repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
eHealthAfrica/ureport
|
ureport/translation.py
|
Python
|
agpl-3.0
| 646
| 0.00774
|
from modeltranslation.
|
translator import translator, TranslationOptions
from nsms.text.models import *
from django.utils.translation import ugettext as _
from django.utils.translation import get_language as _get_language
from modeltranslation import utils
class TextTranslationOptions(TranslationOptions):
fields = ('text',)
translator.register(Text, TextTranslationOptions)
# need to translate somethi
|
ng for django translations to kick in
_("Something to trigger localizations")
# monkey patch a version of get_language that isn't broken
def get_language():
lang = _get_language()
return lang
utils.get_language = get_language
|
nigelb/SerialGrabber
|
examples/MqttXBeeStream/MqttXBee.py
|
Python
|
gpl-2.0
| 668
| 0.007485
|
import logging
from serial_grabber.reader import MessageVerifier
class XBeeMessageVerifier(MessageVerifier):
logger = logging.getLogger("MessageVerifier")
def verify_message(self, transaction):
t
|
ry:
data = transaction.split("\n")
if int(data[-2]) == len("\n".join(data[1:-2])):
|
return True, "OK"
else:
self.logger.error("Reported length: %s, Actual length: %s"%(int(data[-2]), len("\n".join(data[1:-2]))))
raise ValueError()
except ValueError, e:
self.logger.error("Could not convert %s to an integer."%data[-2])
return False, "NA"
|
Nick-Hall/gramps
|
gramps/plugins/lib/libtreebase.py
|
Python
|
gpl-2.0
| 34,876
| 0.004014
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2008-2010 Craig J. Anderson
# Copyright (C) 2014 Paul Franklin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""Reports/Graphical Reports/Tree_Base"""
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from gramps.gen.plug.report import utils
from gramps.plugins.lib.libsubstkeyword import Subst
|
Keywords
from gramps.gen.plug.docgen import (IndexMark, INDEX_TYPE_TOC)
PT2CM = utils.pt2cm
#------------------------------------------------------------------------
#
# Class Calc_Lines
#
#------------------------------------------------------------------------
class CalcLines:
""" wrapper for libsubstk
|
eyword and added functionality for
replacements.
Receive: Individual and family handle, and display format [string]
return: [Text] ready for a box.
"""
def __init__(self, dbase, repl, locale, name_displayer):
self.database = dbase
self.display_repl = repl
#self.default_string = default_str
self._locale = locale
self._nd = name_displayer
def calc_lines(self, _indi_handle, _fams_handle, workinglines):
"""
In this pass we will:
1. make our text and do our replacements
2. remove any extra (unwanted) lines with the compres option
"""
####################
#1.1 Get our line information here
subst = SubstKeywords(self.database, self._locale, self._nd,
_indi_handle, _fams_handle)
lines = subst.replace_and_clean(workinglines)
####################
#1.2 do our replacements
lns = []
for line in lines:
for pair in self.display_repl:
if pair.count("/") == 1:
repl = pair.split("/", 1)
line = line.replace(repl[0], repl[1])
lns.append(line)
return lns
#------------------------------------------------------------------------
#
# Class Canvas/Pages
#
#------------------------------------------------------------------------
class Page:
""" This class is a printable page.
Offsets from the canvas, Page numbers
boxes and lines
"""
def __init__(self, canvas):
#parts from canvas
#self.doc = doc
self.canvas = canvas
#parts about the page
self.page_x_offset = 0
self.page_y_offset = 0
self.x_page_num = 0
self.y_page_num = 0
self.boxes = [] #All object must derive from BoxBase
self.lines = [] #All must derive form Linebase
self.note = None
def is_blank(self):
""" Am I a blank page? Notes and Titles are boxes too """
return self.boxes == [] and self.lines == []
def add_box(self, box):
""" The box must derive from class Box_Base(Object): """
self.boxes.append(box)
box.page = self
def add_line(self, line):
""" Add a line onto this page """
self.lines.append(line)
def draw_border(self, line_name):
doc = self.canvas.doc
if self.y_page_num == 0:
doc.draw_line(line_name, 0, 0,
doc.get_usable_width(), 0)
if self.x_page_num == 0:
doc.draw_line(line_name, 0, 0, 0,
doc.get_usable_height())
if self.y_page_num == self.canvas.y_pages-1:
doc.draw_line(line_name, 0,
doc.get_usable_height(),
doc.get_usable_width(),
doc.get_usable_height())
if self.x_page_num == self.canvas.x_pages-1:
doc.draw_line(line_name, doc.get_usable_width(),
0, doc.get_usable_width(),
doc.get_usable_height())
def display(self):
""" Display all boxes and lines that are on this page """
for box in self.boxes:
box.display()
for line in self.lines:
line.display(self)
class Canvas(Page):
""" The Canvas is two things.
The all in one canvas. a canvas is a page of unlimited size
a group of pages. each page is set is size and shows only a
part of what is on the entire canvas
"""
def __init__(self, doc, report_opts):
Page.__init__(self, self)
self.doc = doc
self.report_opts = report_opts
#How many pages are there in the report. one more than real.
self.x_pages = 1
self.y_pages = 1
self.__pages = {(0, 0): self} #set page 0,0 to me.
self.__fonts = {} #keep a list of fonts so we don't have to lookup.
self.title = None
self.note = None
def __new_page(self, x_page, y_page, x_offset, y_offset):
""" Make a new page. This will only happen if we are
paginating (making new pages to hold parts of the canvas) """
if x_page >= self.x_pages:
self.x_pages = x_page + 1
new_page = Page(self)
new_page.x_page_num = x_page
new_page.y_page_num = y_page
new_page.page_x_offset = x_offset
new_page.page_y_offset = y_offset
self.__pages[x_page, y_page] = new_page
return new_page
def sort_boxes_on_y_cm(self):
""" sorts the list of boxes on the canvas by .y_cm (top down) """
self.boxes.sort( key=lambda box: box.y_cm)
def add_title(self, title):
""" The title must derive from class TitleBox(BoxBase): """
self.title = title
self.title.cm_y = self.report_opts.littleoffset
def add_note(self, note):
""" The note must derive from class NoteBox(BoxBase, NoteType) """
self.note = note
self.set_box_height_width(self.note)
def __get_font(self, box):
""" returns the font used by a box. makes a list of all seen fonts
to be faster. If a new is found, run through the process to get it """
if box.boxstr not in self.__fonts:
style_sheet = self.doc.get_style_sheet()
style_name = style_sheet.get_draw_style(box.boxstr)
style_name = style_name.get_paragraph_style()
self.__fonts[box.boxstr] = \
style_sheet.get_paragraph_style(style_name).get_font()
return self.__fonts[box.boxstr]
def get_report_height_width(self):
""" returns the (max width, max height) of the report
This does not take into account any shadows """
max_width = 0
max_height = 0
for box in self.boxes:
tmp = box.x_cm + box.width
if tmp > max_width:
max_width = tmp
tmp = box.y_cm + box.height
if tmp > max_height:
max_height = tmp
max_width += self.report_opts.box_shadow
max_width += self.report_opts.littleoffset
max_height += self.report_opts.box_shadow
max_height += self.report_opts.littleoffset
return (max_width, max_height)
def __scale_canvas(self, scale_amount):
""" scales everything up/down depending upon scale_amount """
self.report_opts.scale_everything(scale_amount)
self.title.scale(scale_amount)
if self.note is not None:
|
sinotradition/meridian
|
meridian/acupoints/shuidao34.py
|
Python
|
apache-2.0
| 236
| 0.034783
|
#!/usr/bin/python
#coding=utf-8
'''
@
|
author: sheng
@license:
'''
SPELL=u'shuǐdào'
CN=u'水道'
NAME=u'shuidao34'
CHANNEL='stomach'
CHANNEL_FULLNAME='StomachChannelofFoot-Yangming'
|
SEQ='ST28'
if __name__ == '__main__':
pass
|
PetrPPetrov/beautiful-capi
|
source/ExtensionSemantic.py
|
Python
|
gpl-3.0
| 2,815
| 0.001066
|
#!/usr/bin/env python
#
# Beautiful Capi generates beautiful C API wrappers for your C++ classes
# Copyright (C) 2015 Petr Petrovich Petrov
#
# This file is part of Beautiful Capi.
#
# Beautiful Capi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Beautiful Capi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Beautiful Capi. If not, see <http://www.gnu.org/licenses/>.
#
from copy import deepcopy
from Parser import TClass, TNamespace, TBeautifulCapiRoot
class ExtensionSemanticProcessor(object):
def __init__(self, root_node: TBeautifulCapiRoot):
self.root_node = root_node
self.class_stack = []
def process_class(self, cur_class: TClass, cur_namespace: TNamespace):
self.class_stack.append(cur_class.name)
|
for lifecycle_extension in cur_class.lifecycle_extensions:
new_extension_class = deepcopy(cur_class)
new_extension_class.name = lifecycle_extension.name
new_extension_class.lifecycle = lifecycle_extension.lifecycle
new_extension_class.lifecycle_filled = True
new_extension_class.wrap_name = lifecycle_extension.wrap_nam
|
e
new_extension_class.wrap_name_filled = lifecycle_extension.wrap_name_filled
new_extension_class.cast_tos = deepcopy(lifecycle_extension.cast_tos)
new_extension_class.lifecycle_extensions = []
new_extension_class.lifecycle_extension = lifecycle_extension
new_extension_class.extension_base_class_name = '::'.join(self.class_stack)
new_extension_class.down_cast = lifecycle_extension.down_cast
new_extension_class.down_cast_filled = True
cur_namespace.classes.append(new_extension_class)
self.class_stack.pop()
def process_namespace(self, namespace: TNamespace):
self.class_stack.append(namespace.name)
for nested_namespace in namespace.namespaces:
self.process_namespace(nested_namespace)
for cur_class in namespace.classes:
self.process_class(cur_class, namespace)
self.class_stack.pop()
def process(self):
for cur_namespace in self.root_node.namespaces:
self.process_namespace(cur_namespace)
def process(root_node: TBeautifulCapiRoot):
semantic_processor = ExtensionSemanticProcessor(root_node)
semantic_processor.process()
|
giantas/elibrary
|
todo/urls.py
|
Python
|
mit
| 885
| 0.018079
|
from django.conf.urls import include, url
from . import views
app_name = 'todo'
urlpatterns = [
url(r'^lists/$', views.todolists, name='todo_lists'),
url(r'^lists/(?P<list_id>[\w\d]+)/update/$', views.UpdateTodoList.as_view(), name='update_list'),
url(r'^lists/card/(?P<card_id>[\w\d]+)/$', views.cards, name='cards'),
url(r'^lists/new/$', views.CreateList.as_view(), name='n
|
ew_l
|
ist'),
url(r'^lists/card/new/(?P<list_id>[\w\d]+)/$', views.CreateCard.as_view(), name='new_card'),
url(r'^lists/card/(?P<card_id>[\w\d]+)/update/$', views.UpdateCard.as_view(), name='update_card'),
url(r'^lists/card/(?P<card_id>[\w\d]+)/move/$', views.move_card, name='move_card'),
url(r'^lists/card/(?P<card_id>[\w\d]+)/items/new/$', views.CreateTodoItem.as_view(), name='new_item'),
url(r'^lists/card/(?P<item_id>[\w\d]+)/items/update/$', views.UpdateTodoItem.as_view(), name='update_item'),
]
|
NIRALUser/FiberViewerLight
|
FiberLengthCleaning.py
|
Python
|
apache-2.0
| 11,548
| 0.015674
|
from __main__ import vtk, qt, ctk, slicer
#
# LengthStat
#
class FiberLengthCleaning:
def __init__(self, parent):
parent.title = "Fiber Length Cleaning"
parent.categories = ["Diffusion"]
parent.contributors = ["Jean-Baptiste Berger and Martin Styner"]
self.parent = parent
class FiberLengthCleaningWidget:
def __init__(self, parent=None):
if not parent:
self.parent = slicer. qMRMLWidget()
self.parent.setLayout(qt.QVBoxLayout())
self.parent.setMRMLScene(slicer.mrmlScene)
else:
self.parent = parent
self.distanceTable = list()
self.outputNode = None
if not parent:
self.setup()
self.vtkSelector.setMRMLScene(slicer.mrmlScene)
self.outputSelector.setMRMLScene(slicer.mrmlScene)
self.parent.show()
def setup(self):
self.vtkSelectorFrame = qt.QFrame(self.parent)
self.vtkSelectorFrame.setLayout(qt.QHBoxLayout())
self.parent.layout().addWidget(self.vtkSelectorFrame)
self.vtkSelectorLabel = qt.QLabel("Input Fiber Bundle: ", self.vtkSelectorFrame)
self.vtkSelectorFrame.layout().addWidget(self.vtkSelectorLabel)
self.vtkSelector = slicer.qMRMLNodeComboBox(self.vtkSelectorFrame)
self.vtkSelector.nodeTypes = ("vtkMRMLFiberBundleNode","vtkMRMLFiberBundleNode")
self.vtkSelector.selectNodeUponCreation = False
self.vtkSelector.addEnabled = False
self.vtkSelector.removeEnabled = False
self.vtkSelector.noneEnabled = True
self.vtkSelector.setMRMLScene(slicer.mrmlScene)
self.vtkSelector.setToolTip("Select the Fiber Bundle to filter")
self.vtkSelectorFrame.layout().addWidget(self.vtkSelector)
self.outputSelectorFrame = qt.QFrame(self.parent)
self.outputSelectorFrame.setLayout(qt.QHBoxLayout())
self.parent.layout().addWidget(self.outputSelectorFrame)
self.outputSelectorLabel = qt.QLabel("Output Fiber Bundle: ", self.outputSelectorFrame)
self.outputSelectorFrame.layout().addWidget(self.outputSelectorLabel)
self.outputSelector = slicer.qMRMLNodeComboBox(self.outputSelectorFrame)
self.outputSelector.nodeTypes = ("vtkMRMLFiberBundleNode","vtkMRMLFiberBundleNode")
self.outputSelector.selectNodeUponCreation = False
self.outputSelector.addEnabled = True
self.outputSelector.removeEnabled = True
self.outputSelector.noneEnabled = True
self.outputSelector.setMRMLScene(slicer.mrmlScene)
self.outputSelector.setToolTip("Select the output Fiber Bundle")
self.outputSelectorFrame.layout().addWidget(self.outputSelector)
self.thresholdFrame = qt.QFrame(self.parent)
self.thresholdFrame.setLayout(qt.QHBoxLayout())
self.parent.layout().addWidget(self.thresholdFrame)
self.thresholdMinLabel = q
|
t.QLabel("Min: ",self.thresholdFrame)
self.thresholdFrame.layout().addWidget(self.thresholdMinLabel)
self.thresholdMin = qt.QSpinBox(self.thresholdFrame)
self.thresholdMin.setSingleStep(1)
self.thresholdMin.setRange(0,1000)
self.thresholdMin.setValue(0)
self.thresholdMin.enabled = False
self.thresholdFrame.layout().addWidget(self.thresholdMin)
self.thresholdMaxLabel = qt.QLabel("Ma
|
x: ",self.thresholdFrame)
self.thresholdFrame.layout().addWidget(self.thresholdMaxLabel)
self.thresholdMax = qt.QSpinBox(self.thresholdFrame)
self.thresholdMax.setSingleStep(1)
self.thresholdMax.setRange(0,1000)
self.thresholdMax.setValue(1)
self.thresholdMax.enabled = False
self.thresholdFrame.layout().addWidget(self.thresholdMax)
self.calculateLengthButton = qt.QPushButton("Calculate Length Stats")
self.calculateLengthButton.enabled = False
self.parent.layout().addWidget(self.calculateLengthButton)
self.applyThresholdButton = qt.QPushButton("Apply Min-Max Threshold")
self.applyThresholdButton.enabled = False
self.parent.layout().addWidget(self.applyThresholdButton)
self.parent.layout().addStretch(1)
self.calculateLengthButton.connect('clicked()', self.onCalculateLength)
self.applyThresholdButton.connect('clicked()',self.onApplyThreshold)
self.vtkSelector.connect('nodeActivated(vtkMRMLNode*)',self.onVtkSelect)
self.outputSelector.connect('nodeActivated(vtkMRMLNode*)',self.onOutputSelect)
def onVtkSelect(self, node):
self.vtkNode = node
if node != None:
self.calculateLengthButton.enabled = True
else:
self.calculateLengthButton.enabled = False
def onOutputSelect(self, node):
self.outputNode = node
self.outputNode.CreateDefaultDisplayNodes()
self.outputPolyData = vtk.vtkPolyData()
def onCalculateLength(self):
self.inputPolyData = self.vtkNode.GetPolyData()
points = self.inputPolyData.GetPoints()
lines = self.inputPolyData.GetLines()
lines.InitTraversal()
self.distanceTable = list()
for i in range(self.inputPolyData.GetNumberOfCells()):
fiberLength = 0
ids = vtk.vtkIdList()
lines.GetNextCell(ids)
#print(ids.GetNumberOfIds())
for j in range(ids.GetNumberOfIds() - 1):
point1 = [0,0,0]
point2 = [0,0,0]
points.GetPoint(ids.GetId(j), point1)
points.GetPoint(ids.GetId(j+1), point2)
x = point2[0] - point1[0]
y = point2[1] - point1[1]
z = point2[2] - point1[2]
step = (x*x + y*y + z*z)**.5
fiberLength += step
self.distanceTable.append(fiberLength)
min,max=self.getDistanceBound()
self.thresholdMin.setValue(min)
self.thresholdMin.enabled = True
self.thresholdMax.setValue(max+1)
self.thresholdMax.enabled = True
self.applyThresholdButton.enabled = True
layoutNodes = slicer.mrmlScene.GetNodesByClass('vtkMRMLLayoutNode')
layoutNodes.InitTraversal()
layoutNode = layoutNodes.GetNextItemAsObject()
layoutNode.SetViewArrangement(slicer.vtkMRMLLayoutNode.SlicerLayoutConventionalQuantitativeView)
chartViewNodes = slicer.mrmlScene.GetNodesByClass('vtkMRMLChartViewNode')
chartViewNodes.InitTraversal()
chartViewNode = chartViewNodes.GetNextItemAsObject()
arrayNode = slicer.mrmlScene.AddNode(slicer.vtkMRMLDoubleArrayNode())
array = arrayNode.GetArray()
array.SetNumberOfTuples(10)
step = (max-min)/10
interMin = min
interMax = min+step
for i in range(10):
numberOfFibers = 0
for length in self.distanceTable:
if length<=interMax and length>=interMin and length<=self.thresholdMax.value and length>=self.thresholdMin.value:
numberOfFibers += 1
array.SetComponent(i,0,(interMin+interMax)/2)
array.SetComponent(i,1,numberOfFibers)
array.SetComponent(i,2,0)
interMin += step
interMax += step
chartNode = slicer.mrmlScene.AddNode(slicer.vtkMRMLChartNode())
chartNode.AddArray("Fiber Length",arrayNode.GetID())
chartViewNode.SetChartNodeID(chartNode.GetID())
chartNode.SetProperty('default', 'title', 'Length Distribution')
chartNode.SetProperty('default', 'xAxisLabel', 'Length')
chartNode.SetProperty('default', 'yAxisLabel', 'Distribution')
chartNode.SetProperty('default', 'type', 'Bar')
def getDistanceBound(self):
max = -1
min = 100000
for length in self.distanceTable:
if length>max:
max = length
if length<min:
|
wursm1/eurobot-hauptsteuerung
|
eurobot/tests/__init__.py
|
Python
|
gpl-3.0
| 235
| 0.008511
|
"""
This package cont
|
ains different `unittests <https://docs.python.org/3/library/unittest.html>`_ for the project.
Those tests help to validate difficult pieces of the software.
"""
__author
|
__ = 'Wuersch Marcel'
__license__ = "GPLv3"
|
mjm159/sunpy
|
sunpy/util/sysinfo.py
|
Python
|
bsd-2-clause
| 3,403
| 0.006465
|
from __future__ import absolute_import
import platform
import datetime
import sunpy
__all__ = ['system_info']
def system_info():
"""Prints system information.
Prints information about the runtime environment that SunPy lives in.
Information about the OS, architecture, Python, and all major dependencies
are included.
The goal of this function is to provide enough information for someone
running SunPy code or replicating a bug to setup a comparible environment
to that which was originally used.
Author: `Keith Hughitt <keith.hughitt@nasa.gov>`
"""
print("==========================================================")
print(" SunPy Installation Information\n")
print(" " + datetime.datetime.utcnow().strftime("%A, %d. %B %Y %I:%M%p UT"))
print("==========================================================\n")
system = platform.system()
proc = platform.processor()
print("###########")
print(" General")
print("###########")
# OS and architecture information
if system == "Linux":
distro = " ".join(platform.linux_distribution())
print("OS: %s (Linux %s %s)" % (distro, platform.release(), proc))
elif system == "Darwin":
print("OS: Mac OS X %s (%s)" % (platform.mac_ver()[0], proc))
elif system == "Windows":
print("OS: Windows %s %s (%s)" % (platform.release(),
platform.version(), p
|
roc))
else:
print ("Unknown OS (%s)" % proc)
# Python version
arch = platform.architecture()[0]
print("Python: %s (%s)\n" % (platform.python_version(), arch))
# Dependencies
try:
from numpy import __version__ as numpy_version
except ImportError:
numpy_version = "NOT INSTALLED"
try:
from scipy import __version__ as scipy_version
except ImportError:
scipy_version = "NOT INSTALLED"
try:
|
from matplotlib import __version__ as matplotlib_version
except ImportError:
matplotlib_version = "NOT INSTALLED"
try:
from pyfits import __version__ as pyfits_version
except ImportError:
pyfits_version = "NOT INSTALLED"
try:
from pandas import __version__ as pandas_version
except ImportError:
pandas_version = "NOT INSTALLED"
try:
from bs4 import __version__ as bs4_version
except ImportError:
bs4_version = "NOT INSTALLED"
try:
from PyQt4.QtCore import PYQT_VERSION_STR as pyqt_version
except ImportError:
pyqt_version = "NOT INSTALLED"
try:
from suds import __version__ as suds_version
except ImportError:
suds_version = "NOT INSTALLED"
print("####################")
print(" Required libraries")
print("####################")
print("SunPy: %s" % sunpy.__version__)
print("NumPy: %s" % numpy_version)
print("SciPy: %s" % scipy_version)
print("Matplotlib: %s" % matplotlib_version)
print("PyFITS: %s" % pyfits_version)
print("pandas: %s" % pandas_version)
print("")
print("#######################")
print(" Recommended libraries")
print("#######################")
print("beautifulsoup4: %s" % bs4_version)
print("PyQt: %s" % pyqt_version)
print("SUDS: %s" % suds_version)
print("")
|
hitchtest/hitchhttp
|
hitchhttp/main_request_handler.py
|
Python
|
agpl-3.0
| 7,882
| 0.011799
|
from hitchhttp import http_request
from ruamel.yaml import dump
from ruamel.yaml.dumper import RoundTripDumper
from ruamel.yaml.comments import CommentedMap
from hitchhttp.models import Database
from os import path
import tornado.web
import tornado
import requests
import random
import json
import time
import sys
class MockHTTPHandler(tornado.web.RequestHandler):
"""Mock REST server request handling."""
default_response = (
"""
<html><head><title>Nothing configured!</title></head>
<body>No matching URI found for {0}<br/><br/>
See <a href="http://hitchtest.readthedocs.org/">the docs</a>
for more information.</body>\n
"""
)
def log_json(self, name, request, response):
"""JSON to log to indicate what just happened."""
pair = {}
pair['match'] = name
pair['request'] = request
pair['response'] = response
sys.stdout.write(u"{0}\n".format(json.dumps(pair)))
sys.stdout.flush()
def process(self):
self.actual_request = http_request.MockRequest(self.request)
if self.settings['record']:
headers_to_request_with = self.actual_request.headers_without_host
if self.settings['intercept'] is not None:
headers_to_request_with.update(self.settings['intercept'])
self.response = requests.request(
self.request.method,
"{}{}".format(self.settings['redirection_url'], self.request.uri),
headers=headers_to_request_with,
data=self.actual_request.request_data,
)
#if len(response_content) < 1000:
#yaml_snip['response']['content'] = response_content
#else:
#response_filename = "{}.content".format(random.randrange(1, 99999999))
#full_response_filename = path.join(
#path.dirname(
#path.abspath(
#self.settings['record_to_filename']
#)
#),
#response_filename
#)
#with open(full_response_filename, 'w') as handle:
#handle.write(response_content)
#yaml_snip['response']['content'] = {"file": response_filename}
#with open(self.settings['record_to_filename'], 'a') as handle:
#handle.write("\n{}".format(
#dump([yaml_snip], default_flow_style=False, Dumper=RoundTripDumper))
#)
for header_var, header_val in self.response.headers.items():
if header_var.lower() not in ["transfer-encoding", "content-encoding", ]:
self.set_header(header_var, header_val)
self.set_status(self.response.status_code)
if self.response.status_code != 304:
self.write(self.response.content)
else:
uri = self.settings['config'].get_matching_uri(self.actual_request)
if uri is not None:
time.sleep(uri.wait)
self.set_status(uri.return_code)
for header_var, header_val in uri.response_headers.items():
if header_var.lower() not in [
"transfer-encoding", "content-encoding", "set-cookie",
]:
self.set_header(header_var, header_val)
if uri.return_code != 304:
self.write(uri.response_content.encode('utf8'))
#self.log_json(
#uri.name, actual_request.to_dict(uri.name), uri.response_content
|
#)
else:
self.set_status(404)
self.set_header('Content-type', 'text/html')
self.write(
self.default_response.format(self.request.path).encode('utf8')
)
#self.log_json(
#None,
#actual_request.to_dict(None),
#self.default_response.format(self.request.path)
#)
se
|
lf.response_content = {}
def on_finish(self):
if self.settings['record']:
yaml_snip = {}
yaml_snip['request'] = {}
yaml_snip['request']['path'] = self.request.uri
yaml_snip['request']['method'] = self.request.method
yaml_snip['request']['headers'] = self.actual_request.headers_without_host
if self.actual_request.request_data is not None:
yaml_snip['request']['data'] = self.actual_request.body.strip()
yaml_snip['response'] = {}
yaml_snip['response']['code'] = self.response.status_code
yaml_snip['response']["headers"] = {
item[0]: item[1] for item in dict(self.response.headers).items()
if item[0].lower() not in ["transfer-encoding", "content-encoding", ]
}
#response_content = self.resp.text
database = Database(self.settings['record_to_filename'])
db_request = database.Request(
order=1,
request_path=self.request.uri,
request_method=self.request.method,
request_data=self.actual_request.body.strip(),
response_code=self.response.status_code,
response_content=self.response.text,
)
db_request.save()
for header_var, header_val in yaml_snip['request']['headers'].items():
db_request_header = database.RequestHeader(
request=db_request,
name=header_var,
value=header_val,
)
db_request_header.save()
for header_var, header_val in self.response.headers.items():
if header_var.lower() not in ["transfer-encoding", "content-encoding", ]:
db_response_header = database.ResponseHeader(
request=db_request,
name=header_var,
value=header_val,
)
db_response_header.save()
self.log_json("record", yaml_snip['request'], yaml_snip['response'])
else:
uri = self.settings['config'].get_matching_uri(self.actual_request)
if uri is not None:
#time.sleep(uri.wait)
#self.set_status(uri.return_code)
#for header_var, header_val in uri.response_headers.items():
#if header_var.lower() not in [
#"transfer-encoding", "content-encoding", "set-cookie",
#]:
#self.set_header(header_var, header_val)
#if uri.return_code != 304:
#self.write(uri.response_content.encode('utf8'))
self.log_json(
uri.name, self.actual_request.to_dict(uri.name), uri.response_content
)
else:
#self.set_status(404)
#self.set_header('Content-type', 'text/html')
#self.write(
#self.default_response.format(self.request.path).encode('utf8')
#)
self.log_json(
None,
self.actual_request.to_dict(None),
self.default_response.format(self.request.path)
)
def get(self):
self.process()
def post(self):
self.process()
def put(self):
self.process()
def delete(self):
self.process()
def options(self):
self.process()
|
naterh/chipsec
|
source/tool/chipsec/modules/common/secureboot/keys.py
|
Python
|
gpl-2.0
| 5,186
| 0.021597
|
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2015, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#chipsec@intel.com
#
## \addtogroup modules
# __chipsec/modules/secureboot/keys.py__ - verify protections of Secure Boot key EFI variables
from chipsec.module_common import *
from chipsec.file import *
from chipsec.hal.uefi import *
# ############################################################
# SPECIFY PLATFORMS THIS MODULE IS APPLICABLE TO
# ############################################################
_MODULE_NAME = 'keys'
TAGS = [MTAG_SECUREBOOT]
class keys(BaseModule):
SECURE = 0x1
INSECURE = 0x2
ERROR = 0x4
def __init__(self):
BaseModule.__init__(self)
self._uefi = UEFI( self.cs.helper )
def is_supported(self):
supported = self.cs.helper.EFI_supported()
if not supported: self.logger.log_skipped_check( "OS does not support UEFI Runtime API" )
return supported
def check_EFI_variable_authentication( self, name, guid ):
self.logger.log( "[*] Checking EFI variable %s {%s}.." % (name, guid) )
orig_var = self._uefi.get_EFI_variable( name, guid, None )
if not orig_var:
self.logger.log( "[*] EFI variable %s {%s} doesn't exist" % (name, guid) )
return keys.ERROR
fname = name + '_' + guid + '.bin'
if self.logger.VERBOSE: write_file( fname, orig_var )
origvar_len = len(orig_var)
mod_var = chr( ord(orig_var[0]) ^ 0xFF ) + orig_var[1:]
if origvar_len > 1: mod_var = mod_var[:origvar_len-1] + chr( ord(mod_var[origvar_len-1]) ^ 0xFF )
if self.logger.VERBOSE: write_file( fname + '.mod', mod_var )
status = self._uefi.set_EFI_variable( name, guid, mod_var )
if not status: self.logger.log( '[*] Writing EFI variable %s did not succeed. Verifying contents..' % name )
new_var = self._uefi.get_EFI_variable( name, guid, None )
if self.logger.VERBOSE: write_file( fname + '.new', new_var )
ok = (origvar_len == len(new_var))
for i in range( origvar_len ):
if not (new_var[i] == orig_var[i]):
ok = keys.INSECURE
break
if ok == keys.INSECURE:
self.logger.log_bad( "EFI variable %s is not protected! It has been modified. Restoring original contents.." % name )
|
self._uefi.set_EFI_variable( name, guid, orig_var )
else:
self.logger.log_good( "Could not modify EFI variable %s {%s}" % (name, guid) )
return ok
# checks authentication of Secure Boot EFI variables
def check_secureboot_key_variables(self):
sts = 0
sts |= self.check_EFI_variable_authentication( EFI_VAR_NAME_PK, EFI_VARIABLE_DICT[EFI_VAR_NAME_PK] )
sts |= self.check_EFI_variable_authentication( EFI_VAR_NAME_
|
KEK, EFI_VARIABLE_DICT[EFI_VAR_NAME_KEK] )
sts |= self.check_EFI_variable_authentication( EFI_VAR_NAME_db, EFI_VARIABLE_DICT[EFI_VAR_NAME_db] )
sts |= self.check_EFI_variable_authentication( EFI_VAR_NAME_dbx, EFI_VARIABLE_DICT[EFI_VAR_NAME_dbx] )
sts |= self.check_EFI_variable_authentication( EFI_VAR_NAME_SecureBoot, EFI_VARIABLE_DICT[EFI_VAR_NAME_SecureBoot] )
sts |= self.check_EFI_variable_authentication( EFI_VAR_NAME_SetupMode, EFI_VARIABLE_DICT[EFI_VAR_NAME_SetupMode] )
#sts |= self.check_EFI_variable_authentication( EFI_VAR_NAME_CustomMode, EFI_VARIABLE_DICT[EFI_VAR_NAME_CustomMode] )
if (sts & keys.ERROR) != 0: self.logger.log_important( "Some Secure Boot variables don't exist" )
ok = ((sts & keys.INSECURE) == 0)
self.logger.log('')
if ok: self.logger.log_passed_check( 'All existing Secure Boot EFI variables seem to be protected' )
else: self.logger.log_failed_check( 'One or more Secure Boot variables are not protected' )
return ok
# --------------------------------------------------------------------------
# run( module_argv )
# Required function: run here all tests from this module
# --------------------------------------------------------------------------
def run( self, module_argv ):
#self.logger.VERBOSE = True
self.logger.start_test( "Protection of Secure Boot Key and Configuration EFI Variables" )
return self.check_secureboot_key_variables()
|
MSLNZ/msl-equipment
|
msl/examples/equipment/picotech/picoscope/acquire_AWG_custom.py
|
Python
|
mit
| 2,306
| 0.002602
|
"""
This example outputs a custom waveform and records the waveform on Channel A.
The output of the AWG must be connected to Channel A.
"""
import os
import numpy as np
# if matplotlib is available then plot the results
try:
import matplotlib.pyplot as plt
except ImportError:
plt = None
from msl.equipment import (
Equip
|
mentRecord,
ConnectionRecord,
Backend,
)
record = EquipmentRecord(
manufacturer='Pico Technology',
model='5244B', # update for your PicoScope
serial='DY135/055', # update for your PicoScope
connection=ConnectionRecord(
backend=Backend.MSL,
address='SDK::ps5000a.dll', # update for your PicoScope
properties={
'resolution': '14bit', # only used for
|
a ps5000a series PicoScope
'auto_select_power': True, # for PicoScopes that can be powered by an AC adaptor or a USB cable
},
)
)
# optional: ensure that the PicoTech DLLs are available on PATH
os.environ['PATH'] += os.pathsep + r'C:\Program Files\Pico Technology\SDK\lib'
print('Example :: Acquire AWG custom waveform')
# connect to the PicoScope
scope = record.connect()
# configure the PicoScope
scope.set_channel('A', scale='2V') # enable Channel A and set the voltage range to be +/-2V
dt, num_samples = scope.set_timebase(10e-3, 5.0) # sample the voltage on Channel A every 10 ms for 5 s
scope.set_trigger('A', -0.2, timeout=5.0, direction='falling') # use Channel A as the trigger source
# simulate the Lennard-Jones Potential
x = np.linspace(0.88, 2, 500)
awg = (1/x)**12 - 2*(1/x)**6
scope.set_sig_gen_arbitrary(awg, repetition_rate=1e3, index_mode='quad', pk_to_pk=2.0)
scope.run_block(pre_trigger=2.5) # start acquisition
scope.wait_until_ready() # wait until all requested samples are collected
scope.set_data_buffer('A') # set the data buffer for Channel A
scope.get_values() # fill the data buffer of Channel A with the values saved in the PicoScope's internal memory
scope.stop() # stop the oscilloscope from sampling data
print('Channel A input')
t = np.arange(-scope.pre_trigger, dt*num_samples-scope.pre_trigger, dt)
for i in range(num_samples):
print('{0:f}, {1:f}'.format(t[i], scope.channel['A'].volts[i]))
if plt is not None:
plt.plot(t, scope.channel['A'].volts, 'bo')
plt.show()
|
PyCQA/baron
|
tests/test_grammator_operators.py
|
Python
|
lgpl-3.0
| 293,854
| 0.000163
|
#!/usr/bin/python
# -*- coding:Utf-8 -*-
import pytest
from baron.parser import ParsingError
from .test_utils import parse_simple
def test_simple_power():
"a**b"
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**'),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a',
},
"second": {
"type": "name",
"value": 'b'
},
"first_formatting": [],
"second_formatting": []
}
])
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**'),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a'
},
"second": {
"type": "name",
"value": 'b',
},
"first_formatting": [],
"second_formatting": []
}
])
def test_first_space_power():
"a **b"
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')]),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a',
},
"second": {
"type": "name",
"value": 'b'
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": []
}
])
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')]),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a'
},
"second": {
"type": "name",
"value": 'b',
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": []
}
])
def test_second_space_power():
"a** b"
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [], [('SPACE', ' ')]),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a',
},
"second": {
"type": "name",
"value": 'b'
},
"first_formatting": [],
"second_formatting": [{"type": "space", "value": " "}]
}
])
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [], [('SPACE', ' ')]),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a'
},
"second": {
"type": "name",
"value": 'b',
},
"first_formatting": [],
"second_formatting": [{"type": "space", "value": " "}]
}
])
def test_spaces_power():
"a ** b"
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a',
},
"second": {
"type": "name",
"value": 'b'
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
}
])
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'b')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a'
},
"second": {
"type": "name",
"value": 'b',
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
}
])
def test_power_power():
"a ** b ** c"
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'b'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'c')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a'
},
"second": {
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'b'
},
"second": {
"type": "name",
"value": 'c'
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
}
])
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')],
|
[('SPACE', ' ')]),
('NAME', 'b'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME',
|
'c')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a'
},
"second": {
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'b'
},
"second": {
"type": "name",
"value": 'c'
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
}
])
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'b'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'c')
], [
{
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'a'
},
"second": {
"type": "binary_operator",
"value": '**',
"first": {
"type": "name",
"value": 'b'
},
"second": {
"type": "name",
"value": 'c'
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
},
"first_formatting": [{"type": "space", "value": " "}],
"second_formatting": [{"type": "space", "value": " "}]
}
])
parse_simple([
('NAME', 'a'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'b'),
('DOUBLE_STAR', '**', [('SPACE', ' ')], [('SPACE', ' ')]),
('NAME', 'c')
], [
{
"type": "binary_operator",
"value": '**',
"first": {"type": "name", "value": 'a'},
"second": {
"type": "binary_operator",
"value": '**',
"first": {
|
mrcodehang/cqut-chat-server
|
test/test_http.py
|
Python
|
mpl-2.0
| 75
| 0.013333
|
from others import sms_request
print(sms_reques
|
t('15683000435', '12345
|
6'))
|
kesara/tetrapy
|
tetrapy.py
|
Python
|
gpl-3.0
| 3,480
| 0.005747
|
###############################################################################
# tetrapy.py
# Tetrapy
#
# Copyright (C) 2013 Kesara Rathnayake
#
# Tetrapy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tetrapy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tetrapy. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import pygame
import random
import sys
# Screen Size
X = 240
Y = 400
# Colours
BLACK = (000,000,000)
WHITE = (255,255,255)
RED = (255,000,000)
GREEN = (000,255,000)
BLUE = (000,000,255)
class Tetromino(object):
def __init__(self, colour):
self.colour = colour
self.locked = False
def draw(self, screen):
pass
def move(self, matrix, direction, rotaion):
pass
def isLocked(self, matrix):
pass
def getMatrix(self):
pass
def collied(self, matrix):
pass
class TetrominoI(Tetromino):
def __init__(self, colour):
super(TetrominoI, self).__init__(colour)
self.W = 20
self.H = 80
self.x = 0
self.y = 0
def draw(self, screen):
rect = pygame.Rect(self.x, self.y, self.W, self.H)
pygame.draw.rect(screen, self.colour, rect)
def move(self, matrix, direction, rotaion=None):
if not self.isLocked(matrix):
if self.x + direction <= X-20 and self.x + direction >= 0:
self.x += direction
if self.y <= Y-80:
self.y += 20
def isLocked(self, matrix):
if self.y + 80 == Y:
self.locked = True
return True
|
elif (self.collied(matrix)):
r
|
eturn True
return False
def getMatrix(self):
matrix = []
for i in range(0, 4):
matrix.append((self.x, self.y+20*i))
return matrix
def collied(self, matrix):
for i in range(0, 4):
if (self.x, self.y+20*i) in matrix:
return True
return False
screen = pygame.display.set_mode((X, Y))
colours = [WHITE, RED, GREEN, BLUE]
active = None
tetrominos = []
matrix = []
while True:
print matrix
screen.fill(BLACK)
if not active:
active = TetrominoI(random.choice(colours))
elif active.isLocked(matrix):
matrix.extend(active.getMatrix())
tetrominos.append(active)
active = TetrominoI(random.choice(colours))
active.draw(screen)
pygame.display.flip()
k_left = k_right = 0
direction = 0
for event in pygame.event.get():
if not hasattr(event, 'key'): continue
if event.key == pygame.K_RIGHT: k_right += 10
elif event.key == pygame.K_LEFT: k_left += -10
elif event.key == pygame.K_ESCAPE: sys.exit(0)
direction += (k_right + k_left)
screen.fill(BLACK)
for tetromino in tetrominos:
tetromino.draw(screen)
active.move(matrix, direction)
active.draw(screen)
pygame.display.flip()
pygame.time.wait(100)
|
SaintlyVi/DLR_DB
|
evaluation/calibration.py
|
Python
|
mit
| 5,511
| 0.01869
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 15 14:09:59 2017
@author: SaintlyVi
"""
import pandas as pd
import numpy as np
from support import writeLog
def uncertaintyStats(submodel):
"""
Creates a dict with statistics for observed hourly profiles for a given year.
Use evaluation.evalhelpers.observedHourlyProfiles() to generate the input dataframe.
"""
allstats = list()
for c in submodel['class'].unique():
stats = submodel[submodel['class']==c].describe()
stats['customer_class'] = c
stats.reset_index(inplace=True)
stats.set_index(['customer_class','index'], inplace=True)
allstats.append(stats)
df = pd.concat(allstats)
return df[['AnswerID_count','valid_obs_ratio']]
def dataIntegrity(submodels, min_answerid, min_obsratio):
"""
This function returns the slice of submodels that meet the specified minimum uncertainty requirements. Submodels must form part of the same experiment (eg demand summary and hourly profiles).
"""
if isinstance(submodels, list):
models = submodels
else:
models = [submodels]
validmodels = pd.DataFrame(columns = ['submodel_name','valid_data','uncertainty_index',
'valid_unit_count', 'unit'])
for m in models:
name = m.name
valid_data = m[(m.AnswerID_count>=min_answerid) & (m.valid_obs_ratio>=min_obsratio)]
uix = len(valid_data) / len(m)
try:
valid_unit_count = valid_data['valid_hours'].sum()
unit = 'total_valid_hours'
except:
valid_unit_count = valid_data['AnswerID_count'].sum()
unit = 'valid_AnswerID_count'
validmodels = validmodels.append({'submodel_name':name,
'valid_data':valid_data,
'uncertainty_index':uix,
'valid_unit_count':valid_unit_count,
'unit':unit}, ignore_index=True)
v
|
alidmodels.set_index('submodel_name', drop=True, inplace=True)
return validmodels
def modelSimilarity(ex_submodel, ex_ts, valid_new_submodel, new_ts, submod_type):
"""
This
|
function calcualtes the evaluation measure for the run.
ex_submodel = (DataFrame) either existing/expert demand_summary or hourly_profiles submodel
valid_new_submodel = (DataFrame) output from dataIntegrity function
-> only want to compare valid data
submod_type = (str) one of [ds, hp]
-> ds=demand_summary, hp=hourly_profiles
"""
if submod_type == 'ds':
index_cols = ['class','YearsElectrified']
elif submod_type == 'hp':
index_cols = ['class','YearsElectrified','month','daytype','hour']
else:
return(print('Valid submod_type is one of [ds, hp] -> ds=demand_summary, hp=hourly_profiles.'))
merged_sub = ex_submodel.merge(valid_new_submodel, how='left', on=index_cols)
simvec = merged_sub[new_ts] - merged_sub[ex_ts]
simvec.dropna(inplace=True)
simveccount = len(simvec)
eucliddist = np.sqrt(sum(simvec**2))
return eucliddist, simveccount, merged_sub
def logCalibration(bm_model, year, exp_model, min_answerid = 2, min_obsratio = 0.85):
"""
This function logs the evaluation results of the run.
ex_model = [demand_summary, hourly_profiles, ds_val_col_name, hp_val_col_name]
"""
#Generate data model
ods = pd.read_csv('data/experimental_model/'+exp_model+'/demand_summary_'+year+'.csv')
ohp = pd.read_csv('data/experimental_model/'+exp_model+'/hourly_profiles_'+year+'.csv')
#Check data integrity
ods.name = 'demand_summary'
ohp.name = 'hourly_profiles'
validmodels = dataIntegrity([ods, ohp], min_answerid, min_obsratio)
valid_new_ds = validmodels.at['demand_summary','valid_data']
valid_new_hp = validmodels.at['hourly_profiles','valid_data']
new_dsts = 'M_kw_mean'
new_hpts = 'kva_mean'
#Fetch benchmark model
bm_ds = bm_model[0]
bm_hp = bm_model[1]
bm_dsts = bm_model[2]
bm_hpts = bm_model[3]
#Calculate model similarity
euclid_ds, count_ds, slice_ex_ds = modelSimilarity(bm_ds, bm_dsts, valid_new_ds, new_dsts, 'ds')
euclid_hp, count_hp, sliced_ex_hp = modelSimilarity(bm_hp, bm_hpts, valid_new_hp, new_hpts, 'hp')
#Prepare and write logs
ds_uix = validmodels.at['demand_summary','uncertainty_index']
ds_vuc = validmodels.at['demand_summary','valid_unit_count']
ds_unit = validmodels.at['demand_summary','unit']
hp_uix = validmodels.at['hourly_profiles','uncertainty_index']
hp_vuc = validmodels.at['hourly_profiles','valid_unit_count']
hp_unit = validmodels.at['hourly_profiles','unit']
loglineds = [year, exp_model, ods.name, min_answerid, min_obsratio, ds_uix, ds_vuc,
ds_unit, euclid_ds, count_ds]
loglinehp = [year, exp_model, ohp.name, min_answerid, min_obsratio, hp_uix, hp_vuc,
hp_unit, euclid_hp, count_hp]
log_lines = pd.DataFrame([loglineds, loglinehp], columns = ['year','experiment',
'submodel','min_answerid_count','min_valid_obsratio',
'uncertainty_ix','valid_unit_count','unit','sim_eucliddist','sim_count'])
writeLog(log_lines,'log_calibration')
|
tylertian/Openstack
|
openstack F/python-keystoneclient/tests/test_memcache_crypt.py
|
Python
|
apache-2.0
| 3,163
| 0
|
import testtools
from keystoneclient.middleware import memcache_crypt
class MemcacheCryptPositiveTests(testtools.TestCase):
def _setup_keys(self, strategy):
return memcache_crypt.derive_keys('token', 'secret', strategy)
def test_constant_time_compare(self):
# make sure it works as a compare, the "constant time" aspect
# isn't appropriate to test in unittests
ctc = memcache_crypt.constant_time_compare
self.assertTrue(ctc('abcd', 'abcd'))
self.assertTrue(ctc('', ''))
self.assertFalse(ctc('abcd', 'efgh'))
self.assertFalse(ctc('abc', 'abcd'))
self.assertFalse(ctc('abc', 'abc\x00'))
self.assertFalse(ctc('', 'abc'
|
))
def test_derive_keys(self):
keys = memcache_crypt.derive_keys('token', 'secret', 'strategy')
self.assertEqual(len(keys['ENCRYPTION']),
len(keys['CACHE_KEY']))
self.assertEqual(len(keys['CACHE_KEY']),
len(keys['MAC']))
self.assertNotEqual(keys['ENCRYPTION'],
keys['MAC'])
self
|
.assertIn('strategy', keys.keys())
def test_key_strategy_diff(self):
k1 = self._setup_keys('MAC')
k2 = self._setup_keys('ENCRYPT')
self.assertNotEqual(k1, k2)
def test_sign_data(self):
keys = self._setup_keys('MAC')
sig = memcache_crypt.sign_data(keys['MAC'], 'data')
self.assertEqual(len(sig), memcache_crypt.DIGEST_LENGTH_B64)
def test_encryption(self):
keys = self._setup_keys('ENCRYPT')
# what you put in is what you get out
for data in ['data', '1234567890123456', '\x00\xFF' * 13
] + [chr(x % 256) * x for x in range(768)]:
crypt = memcache_crypt.encrypt_data(keys['ENCRYPTION'], data)
decrypt = memcache_crypt.decrypt_data(keys['ENCRYPTION'], crypt)
self.assertEqual(data, decrypt)
self.assertRaises(memcache_crypt.DecryptError,
memcache_crypt.decrypt_data,
keys['ENCRYPTION'], crypt[:-1])
def test_protect_wrappers(self):
data = 'My Pretty Little Data'
for strategy in ['MAC', 'ENCRYPT']:
keys = self._setup_keys(strategy)
protected = memcache_crypt.protect_data(keys, data)
self.assertNotEqual(protected, data)
if strategy == 'ENCRYPT':
self.assertNotIn(data, protected)
unprotected = memcache_crypt.unprotect_data(keys, protected)
self.assertEqual(data, unprotected)
self.assertRaises(memcache_crypt.InvalidMacError,
memcache_crypt.unprotect_data,
keys, protected[:-1])
self.assertIsNone(memcache_crypt.unprotect_data(keys, None))
def test_no_pycrypt(self):
aes = memcache_crypt.AES
memcache_crypt.AES = None
self.assertRaises(memcache_crypt.CryptoUnavailableError,
memcache_crypt.encrypt_data, 'token', 'secret',
'data')
memcache_crypt.AES = aes
|
flavour/eden
|
modules/templates/historic/ARC/config.py
|
Python
|
mit
| 84,335
| 0.008044
|
# -*- coding: utf-8 -*-
from collections import OrderedDict
from gluon import current
from gluon.storage import Storage
def config(settings):
"""
Template settings for American Red Cross
Demo only, not in Production
"""
T = current.T
# =========================================================================
# System Settings
# -------------------------------------------------------------------------
# Pre-Populate
settings.base.prepopulate += ("historic/ARC", "historic/ARC/Demo", "default/users")
settings.base.system_name = T("Resource Management System")
settings.base.system_name_short = T("ARC Demo")
# -------------------------------------------------------------------------
# Security Policy
settings.security.policy = 8 # Delegations
settings.security.map = True
# Authorization Settings
settings.auth.registration_requires_approval = True
settings.auth.registration_requires_verification = True
settings.auth.registration_requests_organisation = True
settings.auth.registration_organisation_required = True
settings.auth.registration_requests_site = True
settings.auth.registration_link_user_to = {"staff": T("Staff"),
"volunteer": T("Volunteer"),
"member": T("Member")
}
settings.auth.record_approval = True
# @ToDo: Should we fallback to organisation_id if site_id is None?
settings.auth.registration_roles = {"site_id": ["reader",
],
}
# Owner Entity
settings.auth.person_realm_human_resource_site_then_org = True
settings.auth.person_realm_member_org = True
def ifrc_realm_entity(table, row):
"""
Assign a Realm Entity to records
"""
tablename = table._tablename
# Do not apply realms for Master Data
# @ToDo: Restore Realms and add a role/functionality support for Master Data
if tablename in ("hrm_certificate",
"hrm_department",
"hrm_job_title",
"hrm_course",
"hrm_programme",
"member_membership_type",
"vol_award",
):
return None
db = current.db
s3db = current.s3db
# Entity reference fields
EID = "pe_id"
#OID = "organisation_id"
SID = "site_id"
#GID = "group_id"
PID = "person_id"
# Owner Entity Foreign Key
realm_entity_fks = dict(pr_contact = EID,
pr_contact_emergency = EID,
pr_physical_description = EID,
pr_address = EID,
pr_image = EID,
pr_identity = PID,
pr_education = PID,
pr_note = PID,
hrm_human_resource = SID,
inv_recv = SID,
inv_send = SID,
inv_track_item = "track_org_id",
inv_adj_item = "adj_id",
req_req_item = "req_id"
)
# Default Foreign Keys (ordered by priority)
default_fks = ("catalog_id",
"project_id",
"project_location_id",
)
# Link Tables
#realm_entity_link_table = dict(
# project_task = Storage(tablename = "project_task_project",
# link_key = "task_id"
# )
# )
#if tablename in realm_entity_link_table:
# # Replace row with the record from the link table
# link_table = realm_entity_link_table[tablename]
# table = s3db[link_table.tablename]
# rows = db(t
|
able[link_table.link_key] == row.id).select(table.id,
# limitby=(0, 1))
# if rows:
# # Update not Create
# row = rows.first()
# Check if there is a FK to inherit the realm_entity
realm_entity = 0
fk = realm_entity_fks.get(tablename, None)
fks = [fk]
fks.extend(default_fks)
for default_fk in fks:
if de
|
fault_fk in table.fields:
fk = default_fk
# Inherit realm_entity from parent record
if fk == EID:
ftable = s3db.pr_person
query = (ftable[EID] == row[EID])
else:
ftablename = table[fk].type[10:] # reference tablename
ftable = s3db[ftablename]
query = (table.id == row.id) & \
(table[fk] == ftable.id)
record = db(query).select(ftable.realm_entity,
limitby=(0, 1)).first()
if record:
realm_entity = record.realm_entity
break
#else:
# Continue to loop through the rest of the default_fks
# Fall back to default get_realm_entity function
use_user_organisation = False
# Suppliers & Partners are owned by the user's organisation
if realm_entity == 0 and tablename == "org_organisation":
ottable = s3db.org_organisation_type
ltable = db.org_organisation_organisation_type
query = (ltable.organisation_id == row.id) & \
(ltable.organisation_type_id == ottable.id)
row = db(query).select(ottable.name,
limitby=(0, 1)
).first()
if row and row.name != "Red Cross / Red Crescent":
use_user_organisation = True
# Groups are owned by the user's organisation
#elif tablename in ("pr_group",):
elif tablename == "pr_group":
use_user_organisation = True
user = current.auth.user
if use_user_organisation and user:
# @ToDo - this might cause issues if the user's org is different from the realm that gave them permissions to create the Org
realm_entity = s3db.pr_get_pe_id("org_organisation",
user.organisation_id)
return realm_entity
settings.auth.realm_entity = ifrc_realm_entity
# -------------------------------------------------------------------------
# Theme (folder to use for views/layout.html)
settings.base.theme = "historic.ARC"
settings.base.xtheme = "IFRC/xtheme-ifrc.css"
# Formstyle
settings.ui.formstyle = "table"
settings.ui.filter_formstyle = "table_inline"
# Icons
settings.ui.icons = "font-awesome3"
settings.gis.map_height = 600
settings.gis.map_width = 869
# Display Resources recorded to Admin-Level Locations on the map
# @ToDo: Move into gis_config?
settings.gis.display_L0 = True
# GeoNames username
settings.gis.geonames_username = "rms_dev"
# Resources which can be directly added to the main map
settings.gis.poi_create_resources = \
(dict(c="gis",
f="poi",
table="gis_poi",
type="point",
label=T("Add PoI"),
layer="PoIs",
),
dict(c="gis",
f="poi",
table="gis_poi",
type="line",
label=T("Add Route"),
layer="Routes",
),
)
# Restrict the Location Selector to just certain countries
# NB This can also be over-ridden for specific contexts later
# e.g. Activities filtered to those of parent Project
settings.gis.countri
|
johnchen902/toyoj
|
judge/toyojjudge/checker/exact.py
|
Python
|
agpl-3.0
| 259
| 0.003861
|
from . import Checker
class ExactChecker(Checker):
async def check(self, san
|
dbox, task):
output = await sandbox.read("/tmp/output
|
.txt")
task.accepted = output == task.testcase.output
task.verdict = "AC" if task.accepted else "WA"
|
I2Cvb/data_balancing
|
src/data_conversion/datasets/arrhythmia.py
|
Python
|
mit
| 6,789
| 0.001326
|
""" Cardiac Arrhythmia Database
The original dataset and further information can be found here:
https://archive.ics.uci.edu/ml/datasets/Arrhythmia
Brief description
-----------------
This data contains 452 observations on 279 variables (206 linear valued
+ 73 nominal) on ECG readings. The data was collected to determine the
type of arrhythmia based on the ECG.
7. Attribute Information:
-- Complete attribute documentation:
1 Age: Age in years , linear
2 Sex: Sex (0 = male; 1 = female) , nominal
3 Height: Height in centimeters , linear
4 Weight: Weight in kilograms , linear
5 QRS duration: Average of QRS duration in msec., linear
6 P-R interval: Average duration between onset of P and Q waves
in msec., linear
7 Q-T interval: Average duration between onset of Q and offset
of T waves in msec., linear
8 T interval: Average duration of T wave in msec., linear
9 P interval: Average duration of P wave in msec., linear
Vector angles in degrees on front plane of:, linear
10 QRS
11 T
12 P
13 QRST
14 J
15 Heart rate: Number of heart beats per minute ,linear
Of channel DI:
Average width, in msec., of: linear
16 Q wave
17 R wave
18 S wave
19 R' wave, small peak just a
|
fter R
20 S' wave
21 Number of intrinsic deflections, linear
22 Existence of ragged R wave, nominal
23 Existence of diphasic derivation of R wave, nominal
24 Existence of ragged P wave, nominal
25 Existence of diphasic derivation of P wave, nominal
2
|
6 Existence of ragged T wave, nominal
27 Existence of diphasic derivation of T wave, nominal
Of channel DII:
28 .. 39 (similar to 16 .. 27 of channel DI)
Of channels DIII:
40 .. 51
Of channel AVR:
52 .. 63
Of channel AVL:
64 .. 75
Of channel AVF:
76 .. 87
Of channel V1:
88 .. 99
Of channel V2:
100 .. 111
Of channel V3:
112 .. 123
Of channel V4:
124 .. 135
Of channel V5:
136 .. 147
Of channel V6:
148 .. 159
Of channel DI:
Amplitude , * 0.1 milivolt, of
160 JJ wave, linear
161 Q wave, linear
162 R wave, linear
163 S wave, linear
164 R' wave, linear
165 S' wave, linear
166 P wave, linear
167 T wave, linear
168 QRSA , Sum of areas of all segments divided by 10,
( Area= width * height / 2 ), linear
169 QRSTA = QRSA + 0.5 * width of T wave * 0.1 * height of T
wave. (If T is diphasic then the bigger segment is
considered), linear
Of channel DII:
170 .. 179
Of channel DIII:
180 .. 189
Of channel AVR:
190 .. 199
Of channel AVL:
200 .. 209
Of channel AVF:
210 .. 219
Of channel V1:
220 .. 229
Of channel V2:
230 .. 239
Of channel V3:
240 .. 249
Of channel V4:
250 .. 259
Of channel V5:
260 .. 269
Of channel V6:
270 .. 279
8. Missing Attribute Values: Several. Distinguished with '?'.
9. Class Distribution:
Database: Arrhythmia
Class code : Class : Number of instances:
01 Normal 245
02 Ischemic changes (Coronary Artery Disease) 44
03 Old Anterior Myocardial Infarction 15
04 Old Inferior Myocardial Infarction 15
05 Sinus tachycardy 13
06 Sinus bradycardy 25
07 Ventricular Premature Contraction (PVC) 3
08 Supraventricular Premature Contraction 2
09 Left bundle branch block 9
10 Right bundle branch block 50
11 1. degree AtrioVentricular block 0
12 2. degree AV block 0
13 3. degree AV block 0
14 Left ventricule hypertrophy 4
15 Atrial Fibrillation or Flutter 5
16 Others 22
Original Owner and Donor
------------------------
H. Altay Guvenir, PhD., and, Burak Acar, M.S., and Haldun Muderrisoglu, M.D., Ph.D.,
Bilkent University,
06533 Ankara, Turkey
Email: guvenir@cs.bilkent.edu.tr
Email: buraka@ee.bilkent.edu.tr
References
----------
H. Altay Guvenir, Burak Acar, Gulsen Demiroz, Ayhan Cekin
"A Supervised Machine Learning Algorithm for Arrhythmia Analysis"
Proceedings of the Computers in Cardiology Conference,
Lund, Sweden, 1997.
#TODO: explain that we use class=14
"""
# Authors: Joan Massich and Guillaume Lemaitre
# License: MIT
from os.path import join, exists
from os import makedirs
try:
# Python 2
from urllib2 import urlretrieve
except ImportError:
# Python 3+
from urllib import urlretrieve
import numpy as np
DATA_URL = "https://archive.ics.uci.edu/ml/machine-learning-databases/"\
"arrhythmia/arrhythmia.data"
RAW_DATA_LABEL = 'arrhythmia'
def get_dataset_home(data_home=None, dir=RAW_DATA_LABEL):
return join(get_data_home(data_home=data_home), dir)
def fetch_arrhythmia(data_home=None, download_if_missing=True):
"""Fetcher for xxxxxxxxxxxxxxxxxxxxx.
Parameters
----------
data_home : optional, default: None
Specify another download and cache folder for the datasets. By default
the original datasets for this `data_balance` study are stored at
`../data/raw/` subfolders.
download_if_missing: optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
"""
data_home = get_dataset_home(data_home=data_home)
if not exists(data_home):
makedirs(data_home)
print('downloading Arrhythmia data from %s to %s' % (DATA_URL, data_home))
urlretrieve(DATA_URL, join(data_home,'data.csv'))
def process_arrhythmia(target=14):
"""Process data of the CoIL 2000 dataset.
Parameters
----------
target: the target class [0..16]
Returns
-------
(data, label)
#TODO: check if files exist
#TODO: a generic file managing using get_data_home
#TODO:
"""
#TODO: assert target
f = join(get_data_home, 'data.csv')
tmp_input = np.loadtxt(f, delimiter=',')
return (tmp_input[:, :-1], tmp_input[:, -1])
def convert_arrhythmia_14():
d, l = process_arrhythmia(target=14)
np.savez('../data/clean/uci-arrythmia_14.npz', data=d, label=l)
if __name__ == '__main__':
convert_arrhythmia_14()
|
lmazuel/azure-sdk-for-python
|
azure-cognitiveservices-vision-face/azure/cognitiveservices/vision/face/models/__init__.py
|
Python
|
mit
| 3,118
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .error import Error
from .api_error import APIError, APIErrorException
from .face_rectangle import FaceRectangle
from .coordinate import Coordinate
from .face_landmarks import FaceLandmarks
from .facial_hair import FacialHair
from .head_pose import HeadPose
from .emotion import Emotion
from .hair_color import HairColor
from .hair import Hair
from .makeup import Makeup
from .occlusion import Occlusion
from .accessory import Accessory
from .blur import Blur
from .exposure import Exposure
from .noise import Noise
from .face_attributes import FaceAttributes
from .detected_face import DetectedFace
from .find_similar_request import FindSimilarRequ
|
est
from .similar_face import SimilarFace
from .group_request import GroupRequest
from .group_result import GroupResult
from .identify_request import IdentifyRequest
from .identify_candidate import IdentifyCandidate
|
from .identify_result import IdentifyResult
from .verify_face_to_person_request import VerifyFaceToPersonRequest
from .verify_face_to_face_request import VerifyFaceToFaceRequest
from .verify_result import VerifyResult
from .persisted_face import PersistedFace
from .face_list import FaceList
from .person_group import PersonGroup
from .person import Person
from .update_person_face_request import UpdatePersonFaceRequest
from .training_status import TrainingStatus
from .name_and_user_data_contract import NameAndUserDataContract
from .image_url import ImageUrl
from .face_api_enums import (
Gender,
GlassesType,
HairColorType,
AccessoryType,
BlurLevel,
ExposureLevel,
NoiseLevel,
FindSimilarMatchMode,
TrainingStatusType,
FaceAttributeType,
AzureRegions,
)
__all__ = [
'Error',
'APIError', 'APIErrorException',
'FaceRectangle',
'Coordinate',
'FaceLandmarks',
'FacialHair',
'HeadPose',
'Emotion',
'HairColor',
'Hair',
'Makeup',
'Occlusion',
'Accessory',
'Blur',
'Exposure',
'Noise',
'FaceAttributes',
'DetectedFace',
'FindSimilarRequest',
'SimilarFace',
'GroupRequest',
'GroupResult',
'IdentifyRequest',
'IdentifyCandidate',
'IdentifyResult',
'VerifyFaceToPersonRequest',
'VerifyFaceToFaceRequest',
'VerifyResult',
'PersistedFace',
'FaceList',
'PersonGroup',
'Person',
'UpdatePersonFaceRequest',
'TrainingStatus',
'NameAndUserDataContract',
'ImageUrl',
'Gender',
'GlassesType',
'HairColorType',
'AccessoryType',
'BlurLevel',
'ExposureLevel',
'NoiseLevel',
'FindSimilarMatchMode',
'TrainingStatusType',
'FaceAttributeType',
'AzureRegions',
]
|
rogerthat-platform/rogerthat-backend
|
src/rogerthat/models/properties/news.py
|
Python
|
apache-2.0
| 14,083
| 0.001562
|
# -*- coding: utf-8 -*-
# Copyright 2017 GIG Technology NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distr
|
ibuted on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expre
|
ss or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.3@@
import logging
from collections import defaultdict
from contextlib import closing
from datetime import datetime
from google.appengine.ext import db
from mcfw.properties import long_list_property, long_property, unicode_property, azzert
from mcfw.serialization import s_long, ds_long_list, s_long_list, ds_long, s_unicode, ds_unicode, get_list_serializer, \
get_list_deserializer
from rogerthat.models import UserProfile
from rogerthat.models.properties.messaging import SpecializedList, DuplicateButtonIdException, \
DuplicateAppIdException
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class NewsStatisticPerApp(object):
def set_data(self, stream):
self._stream = stream
def _setup(self):
if self._initialized:
return
if not self._stream:
raise Exception("NewsStatisticPerApp not ready, but setup was called.")
self._data = defaultdict(NewsItemStatistics)
ds_long(self._stream) # version
for _ in xrange(ds_long(self._stream)):
app_id = ds_unicode(self._stream)
self._data[app_id] = _deserialize_news_item_statistics(self._stream)
self._initialized = True
def __init__(self):
self._initialized = False
self._stream = None
self._data = None
def get(self, key, default=None):
if not (self._initialized or self._stream):
return default
self._setup()
return self._data.get(key, default)
def __getitem__(self, key):
if not (self._initialized or self._stream):
return None
self._setup()
return self._data.get(key)
def __setitem__(self, key, value):
if not (self._initialized or self._stream):
self._data = defaultdict(NewsItemStatistics)
self._initialized = True
self._setup()
self._data[key] = value
def iterkeys(self):
self._setup()
return self._data.iterkeys()
def iteritems(self):
self._setup()
return self._data.iteritems()
def __iter__(self):
self._setup()
for val in self._data.values():
yield val
def keys(self):
self._setup()
return self._data.keys()
def has_key(self, key):
self._setup()
return key in self._data.keys()
def __contains__(self, key):
self._setup()
return key in self._data.keys()
def __len__(self):
self._setup()
return len(self._data)
def _serialize_news_statistic_per_app(stream, value):
s_long(stream, 1) # version
s_long(stream, len(value))
for app_id, stats in value.iteritems():
s_unicode(stream, app_id)
_serialize_news_item_statistics(stream, stats)
def _deserialize_news_item_statistic_per_app(stream):
news_stats_per_app = NewsStatisticPerApp()
news_stats_per_app.set_data(stream)
return news_stats_per_app
class NewsItemStatistics(object):
AGE_LENGTH = 21
GENDER_LENGTH = 3
@staticmethod
def default_age_stats():
return [0] * NewsItemStatistics.AGE_LENGTH
@staticmethod
def default_gender_stats():
return [0] * NewsItemStatistics.GENDER_LENGTH
@staticmethod
def default_time_stats():
return [0]
@property
def reached_total(self):
total = sum(self.reached_gender)
# for validating if stats work properly
if sum(self.reached_age) != total:
logging.error('Expected sum of reached_gender (%d) and reached_age (%d) to be the same', total,
sum(self.reached_age))
if sum(self.reached_time) != total:
logging.error('Expected sum of reached_gender (%d) and reached_time (%d) to be the same', total,
sum(self.reached_time))
return total
@property
def rogered_total(self):
return sum(self.rogered_gender)
@property
def action_total(self):
return sum(self.action_gender)
@property
def followed_total(self):
return sum(self.followed_gender)
reached_age = long_list_property('reached_age') # 0-5, 5-10, 5-15, ..., 95-100+
reached_gender = long_list_property('reached_gender') # male, female, other
reached_time = long_list_property('reached_time') # reach on first hour, reach on second hour, ... (max 30d)
rogered_age = long_list_property('rogered_age')
rogered_gender = long_list_property('rogered_gender')
rogered_time = long_list_property('rogered_time')
action_age = long_list_property('action_age')
action_gender = long_list_property('action_gender')
action_time = long_list_property('action_time')
followed_age = long_list_property('followed_age')
followed_gender = long_list_property('followed_gender')
followed_time = long_list_property('followed_time')
@classmethod
def default_statistics(cls):
stats = cls()
for prop in ('reached', 'rogered', 'action', 'followed'):
for statistic in ('age', 'gender', 'time'):
default_statistics = getattr(cls, 'default_%s_stats' % statistic)()
setattr(stats, '%s_%s' % (prop, statistic), default_statistics)
return stats
@staticmethod
def get_age_index(age):
i = int(age / 5) if age and age >= 0 else 0
if i > 20:
return 20
return i
@staticmethod
def get_gender_index(gender):
if gender == UserProfile.GENDER_MALE:
return 1
if gender == UserProfile.GENDER_FEMALE:
return 2
return 0
@staticmethod
def get_time_index(news_item_created_datetime, action_datetime):
# type: (datetime, datetime) -> int
diff = action_datetime - news_item_created_datetime
return int(diff.total_seconds() / 3600)
@staticmethod
def gender_translation_key(gender_index):
if gender_index == 1:
return u'gender-male'
elif gender_index == 2:
return u'gender-female'
else:
return u'unknown'
@classmethod
def get_age_label(cls, age_index):
azzert(age_index >= 0, 'Expected age_index to be positive, got %s' % age_index)
start_age = age_index * 5
end_age = start_age + 5
return u'%s - %s' % (start_age, end_age)
def _serialize_news_item_statistics(stream, stats):
"""
Args:
stream (StringIO)
stats (NewsItemStatistics)
"""
s_long(stream, 1) # version
s_long_list(stream, stats.reached_age)
s_long_list(stream, stats.reached_gender)
s_long_list(stream, stats.reached_time)
s_long_list(stream, stats.rogered_age)
s_long_list(stream, stats.rogered_gender)
s_long_list(stream, stats.rogered_time)
s_long_list(stream, stats.action_age)
s_long_list(stream, stats.action_gender)
s_long_list(stream, stats.action_time)
s_long_list(stream, stats.followed_age)
s_long_list(stream, stats.followed_gender)
s_long_list(stream, stats.followed_time)
def _deserialize_news_item_statistics(stream):
ds_long(stream) # version
stats = NewsItemStatistics()
stats.reached_age = ds_long_list(stream)
stats.reached_gender = ds_long_list(stream)
stats.reached_time = ds_long_list(stream)
stats.rogered_age = ds_long_list(stream)
stats.rogered_gender = ds_long_list(stream)
stats.rogered_time = ds_long_list(stream)
stats.action_age = ds_long_list(stream)
s
|
iohannez/gnuradio
|
gr-analog/python/analog/qa_random_uniform_source.py
|
Python
|
gpl-3.0
| 2,736
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be u
|
seful,
# but WITHOUT ANY WARRANTY; without even t
|
he implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
from gnuradio import blocks, analog
import numpy as np
class qa_random_uniform_source(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001_byte(self):
minimum = 0
maximum = 5
seed = 3
n_items = 10000
rnd_src = analog.random_uniform_source_b(minimum, maximum, seed)
head = blocks.head(1, n_items)
snk = blocks.vector_sink_b(1)
self.tb.connect(rnd_src, head, snk)
# set up fg
self.tb.run()
# check data
res = snk.data()
self.assertGreaterEqual(minimum, np.min(res))
self.assertLess(np.max(res), maximum)
def test_002_short(self):
minimum = 42
maximum = 1025
seed = 3
n_items = 10000
rnd_src = analog.random_uniform_source_s(minimum, maximum, seed)
head = blocks.head(2, n_items)
snk = blocks.vector_sink_s(1)
self.tb.connect(rnd_src, head, snk)
# set up fg
self.tb.run()
# check data
res = snk.data()
self.assertGreaterEqual(minimum, np.min(res))
self.assertLess(np.max(res), maximum)
def test_003_int(self):
minimum = 2 ** 12 - 2
maximum = 2 ** 17 + 5
seed = 3
n_items = 10000
rnd_src = analog.random_uniform_source_i(minimum, maximum, seed)
head = blocks.head(4, n_items)
snk = blocks.vector_sink_i(1)
self.tb.connect(rnd_src, head, snk)
# set up fg
self.tb.run()
# check data
res = snk.data()
# plt.hist(res)
# plt.show()
self.assertGreaterEqual(np.min(res), minimum)
self.assertLess(np.max(res), maximum)
if __name__ == '__main__':
gr_unittest.run(qa_random_uniform_source, "qa_random_uniform_source.xml")
|
ANickerson/kilosort-py
|
src/algorithms.py
|
Python
|
gpl-2.0
| 10,332
| 0.012195
|
"""
Module containing all the individual algorithms.
It would be possible to use alternative implementations
"""
import numpy as np
import theano
from theano import tensor as T
from theano.tensor import nlinalg
class algorithms_numpy():
"""
The algorit
|
hms implemented in numpy.
This should be the base class for any extended implementations
global notes:
gather_
|
try is a concept of executing planned matrix operations. Not implmented
global todos:
TODO: remove/alter ops.
TODO: better commenting of code
TODO: test output of functions with matlab code - find some input!
TODO: min function in matlab returns index as well as value. need to fix all calls as the argmin
# does not work in the same manner.
"""
def zca_whiten(self, data):
"""
zca_whiten the data
TODO: test with oct2py
"""
m = np.mean(data, 0) # take mean
_data = data - m # demean data
cov = np.dot(_data.T, _data) / (_data.shape[0] - 1) #dot product of data.T, data devide by len-1
U, S, _ = np.linalg.svd(cov) # svd of covariance matrix
s = np.sqrt(S) #S.clip(self.regularization))
s_inv = np.diag(1. / s)
s = np.diag(s)
_whiten = np.dot(np.dot(U, s_inv), U.T)
return np.dot(_data, _whiten.T)
#Main Loop
def alignW(self, W, ops):
"""
:param: W: 2d array: nt0, nFilt
TODO: test using oct2py?
TODO: find out the use of this function...
"""
nt0, nFilt = W.shape
imax = np.argmin(W, axis=0)
dmax = -(imax - ops.nt0min)
for i in range(nFilt):
if dmax[i]>0:
W[(dmax[i] + 1):nt0, i] = W[1:nt0-dmax[i], i]
else:
W[0:nt0+dmax[i], i] = W[(1-dmax[i]):nt0, 1]
return W
def alignWU(self, WU, ops):
"""
:param: WU: 3d array: nt0, nChan,nFilt
TODO: find out what this does
TODO: test using oct2py
"""
nt0, n_chan, n_filt = WU.shape
imin = np.argmin(WU.reshape(nt0*n_chan, n_filt), axis=0)
imin_chan = np.ceil(imin/nt0)
dmax = np.zeros((n_filt,nt0))
for i in range(n_filt):
wu = WU[:, imin_chan[i], i]
imin = np.argmin(wu)
dmax[i] = -(imin - ops.nt0min)
if dmax[i]>0:
WU[(dmax[i] + 1): nt0, :, i] = WU[:nt0-dmax[i],:,1]
else:
WU[:nt0+dmax[i],:,i] = WU[(1-dmax[i]):nt0,:,i]
return WU
def decompose_dWU(self, ops, dWU, n_rank, kcoords):
"""
:param: dWU: 3d array nt0, n_rank, n_filt
TODO: find out what this does
TODO: test using oct2py depends on get_svds and zero_out_kcoords
"""
nt0, n_chan, n_filt = dWU.shape
W = np.zeros((nt0, n_rank, n_filt)) #single precision in original code?
U = np.zeros((n_chan, n_rank, n_filt)) #single precision in orignal code?
mu = np.zeros((n_filt, 1)) #single precision in orignal code?
dWU[dWU == np.nan0] = 0 #replace nans
# original code parallel processing option
# TODO: add parallel processing
for k in range(n_filt):
a, b, c = self.get_svds(dWU[:, :, k], n_rank)
W[:, :, k] = a
U[:, :, k] = b
mu[k] = c
U = np.transpose(U, [1,3,2]) # TODO: improve this?
W = np.transpose(W, [1,3,2])
U[U == np.nan] = 0 #replace nans
if len(np.unique(kcoords)[0]) > 0:
U = self.zero_out_K_coords(U, kcoords, ops.criterionNoiseChannels)
UtU = np.abs(U[:,:,1].T * U[:,:,1]) > 0.1
# TODO: change. This seems like a strange function
Wdiff = np.concatenate((W, np.zeros(2, n_filt,n_rank)), 0) - np.concatenate((np.zeros(2, n_filt, n_rank), W), axis=0)
nu = np.sum( np.sum(Wdiff ** 2, axis=1), axis=3)
return (W, U, mu, UtU, nu)
def get_svd(self, dWU, n_rank):
"""
:param dWU: array to apply svd to.
TODO: find out what this function does
TODO: test using oct2py
"""
Wall, Sv, Uall = np.linalg.svd(dWU) #gather_try?
imax = np.argmax(np.abs(Wall[:,1]))
def sign(x):
x[x > 0] = 1
x[x < 0] = -1
return x
Uall[:,0] = - Uall[:, 0] * sign(Wall[imax, 0])
Wall[:,0] = - Wall[:, 0] * sign(Wall[imax, 0])
Wall = Wall * Sv
Sv = np.diag(Sv)
mu = np.sum(Sv[1:n_rank] ** 2) ** 0.5
Wall = Wall/mu
W = Wall[:, 0:n_rank]
U = Uall[:, 0:n_rank]
return (W, U, mu)
def merge_spikes_in(self, uBase, nS, uS, crit):
"""
TODO: find out what this function does
check if spikes already in uBase?
nS is a histogram of some description?
crit is a criteria for exclusion (similarity?)
TODO: test using oct2py
"""
if uBase is None:
# if uBase is empty then return all the positions
return ([], np.arange(uS.shape[1]))
cdot = uBase[:,:,0].T * uS[:,:,0]
for j in range(1,uBase.shape[2]):
cdot = cdot + uBase[:,:,j].T * uS[:,:,j]
base_norms = np.sum(np.sum(uBase**2, axis=2), axis=0)
new_norms = np.sum(np.sum(uS**2, axis=2), axis=0)
c_norms = 1e-10 + np.tile(baseNorms.T, (1, len(new_norms))) \
+ tile(new_norms, (len(base_norms), 1))
cdot = 1 - 2*(cdot/c_norms)
imin = np.argmin(cdot, axis=0)
cdotmin = cdot[imin]
i_match = cdotmin < crit
nS_new = np.histogram(imin[i_match], np.arange(0, uBase.shape[1])) #not sure this will work
nS = nS = nS_new
i_non_match = np.where(cdotmin > crit)
return (nS, i_non_match)
def mexMPregMUcpu(self, Params, data_raw, fW, data, UtU, mu, lam, dWU, nu, ops):
"""
I believe this function does the heavy lifting. When using theano this is probably
the one to reimplement
get spike times and coefficients
:params: Params: [NT, n_filt, Th, , , , , pm]
TODO: figure out what this function does
TODO: test with oct2py
TODO: rename
TODO: change call signature to make more pythonic
TODO: use a data structure for the raw data
"""
nt0 = ops.nt0
NT, n_Filt, Th = Params[0:2]
pm = Params[8]
fft_data = np.fft.fft(data,axis=0)
proj = np.fft.ifft(fft_data * fW[:,:]).real #convolution
proj = np.sum(proj.reshape(NT, n_filt,3), 2)
Ci = proj + (mu * lam).T
Ci = (Ci**2) / (1 + lam.T)
Ci = Ci - (lam*mu**2).T
imax = np.argmax(Ci, axis=1)
mX = Ci[imax]
maX = -my_min(-mX,31,1) # Err... my_min? This function seems odd.
#TODO: convert my_min. or remove?
st = np.where((maX < mX + 1e-3) & (mX>Th**2))
st[st>(NT-nt0)]
imax = imax[st]
x = []
cost = []
nsp = []
if len(imax)>0:
inds = st.T + np.arange(nt0).T
dspk = dataRaw[inds,:].reshape(nt0, len(st), ops.n_chan)
dspk = np.transpose(dspk, [0, 2, 1])
x = np.zeros(len(id))
cost = np.zeros(len(id))
nsp = np.zeros((n_filt, 1))
for j in range(dspk.shape[2]):
dWU[:, :, imax[j]] = pm * dWU[:, :, imin[j]] + (1 - pm) * dspk[:, :, j]
x[j] = proj[st[j], imin[j]]
cost[j] = maX[st[j]]
nsp[imin[j]] = nsp[imin[j]] + 1
imin = imin - 1
return (dWU, st, id, x, cost, nsp)
def reduce_clusters(self, uS, crit):
"""
:param uS: 3d array
TODO: work out what this function does
TODO: test using matlab/oct2py
/mainLoop/reduce_clusters.m
"""
cdot = uS[:, :, 0].T * uS[:, :, 0]
for j in range(us.shape[2]):
cdot += uS[:, :, j].T * uS[:, :, j]
# compute norms of each spike
newNorms
|
hugollm/foster
|
tests/frames/build/foobar/bar/bar.py
|
Python
|
mit
| 29
| 0
|
def b
|
ar():
print('
|
bar!')
|
gensmusic/test
|
l/python/book/learning-python/c22/module3.py
|
Python
|
gpl-2.0
| 137
| 0.043796
|
#!/usr/bin/python
#coding:utf-8
|
print 'start to load...'
import sys
name = 42
def func(): pass
cla
|
ss kclass : pass
print 'done loading.'
|
blitzmann/Pyfa
|
gui/fitCommands/guiRemoveBooster.py
|
Python
|
gpl-3.0
| 1,009
| 0.000991
|
import wx
from service.fit import Fit
import gui.mainFrame
from gui import globalEvents as GE
from .calc.fitRemoveBooster import FitRe
|
move
|
BoosterCommand
class GuiRemoveBoosterCommand(wx.Command):
def __init__(self, fitID, position):
wx.Command.__init__(self, True, "")
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.sFit = Fit.getInstance()
self.internal_history = wx.CommandProcessor()
self.fitID = fitID
self.position = position
def Do(self):
if self.internal_history.Submit(FitRemoveBoosterCommand(self.fitID, self.position)):
self.sFit.recalc(self.fitID)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=self.fitID))
return True
return False
def Undo(self):
for _ in self.internal_history.Commands:
self.internal_history.Undo()
self.sFit.recalc(self.fitID)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=self.fitID))
return True
|
djfroofy/beatlounge
|
bl/midi.py
|
Python
|
mit
| 11,595
| 0.000172
|
import pypm
from bl.utils import getClock
from bl.debug import debug
__all__ = ['init', 'initialize', 'getInput', 'getOutput', 'printDeviceSummary',
'ClockSender', 'MidiDispatcher', 'FUNCTIONS', 'ChordHandler',
'MonitorHandler', 'NoteEventHandler']
class PypmWrapper:
"""
Simple wrapper around pypm calls which caches inputs and outputs.
"""
initialized = False
inputs = []
outputs = []
deviceMap = {}
inputNames = {}
outputNames = {}
_channels = {}
@classmethod
def initialize(cls):
"""
Initialize pypm if not already initialized.
"""
if cls.initialized:
return
pypm.Initialize()
cls._gatherDeviceInfo()
cls.initialized = True
@classmethod
def _gatherDeviceInfo(cls):
for devno in range(pypm.CountDevices()):
info = pypm.GetDeviceInfo(devno)
insouts = {'output': None, 'input': None}
m = cls.deviceMap.setdefault(info[1], insouts)
if info[3]:
cls.outputs.append(devno)
m['output'] = devno
if info[2]:
cls.inputs.append(devno)
m['input'] = devno
cls.inputNames = dict(
(v['input'], k) for (k, v) in cls.deviceMap.items()
if v['input'] is not None
)
cls.outputNames = dict(
(v['output'], k) for (k, v) in cls.deviceMap.items()
if v['output'] is not None
)
@classmethod
def getInput(cls, dev):
"""
Get an input with devive number 'dev' - dev may also be string matching
the target device. If the input was previously loaded this will return
the cached device.
"""
no = dev
if isinstance(dev, basestring):
no = cls.deviceMap[dev]['input']
key = ('input', no)
if key not in cls._channels:
cls._channels[key] = pypm.Input(no)
return cls._channels[key]
@classmethod
def getOutput(cls, dev):
"""
Get output with devive number 'dev' - dev may also be string matching
the target device. If the output was previously loaded this will return
the cached device.
"""
no = dev
if isinstance(dev, basestring):
no = cls.deviceMap[dev]['output']
key = ('output', no)
if key not in cls._channels:
cls._channels[key] = pypm.Output(no)
return cls._channels[key]
@classmethod
def printDeviceSummary(cls, printer=None):
"""
Print device summary - inputs followed by outputs.
"""
if printer is None:
def printer(line):
print line
printer('Inputs:')
for devno in cls.inputs:
printer('... %d %r' % (devno, cls.inputNames[devno]))
printer('Outputs:')
for devno in cls.outputs:
printer('... %d %r' % (devno, cls.outputNames[devno]))
initialize = init = PypmWrapper.initialize
getInput = PypmWrapper.getInput
getOutput = PypmWrapper.getOutput
printDeviceSummary = PypmWrapper.printDeviceSummary
FUNCTIONS = {}
FUNCTION_ARITY = {}
def _add_global(name, arity, v):
globals()[name] = v
FUNCTIONS[v] = name
FUNCTION_ARITY[v] = arity
__all__.append(name)
for i in range(16):
no = i + 1
_add_global('NOTEOFF_CHAN%d' % no, 2, 0x80 + i)
_add_global('NOTEON_CHAN%d' % no, 2, 0x90 + i)
_add_global('POLYAFTERTOUCH_CHAN%d' % no, 2, 0xA0 + i)
_add_global('CONTROLCHANGE_CHAN%d' % no, 2, 0xB0 + i)
_add_global('PROGRAMCHANGE_CHAN%d' % no, 2, 0xC0 + i)
_add_global('CHANAFTERTOUCH_CHAN%d' % no, 2, 0xD0 + i)
_add_global('PITCHWHEEL_CHAN%d' % no, 2, 0xE0 + i)
_add_global('SYSTEMEXCL', 2, 0xf0)
_add_global('MTC_QFRAME', 2, 0xf1)
_add_global('SONGPOSPOINTER', 2, 0xf2)
_add_global('SONGSELECT', 2, 0xF3)
_add_global('RESERVED1', 2, 0xF4)
_add_global('RESERVED2', 2, 0xF5)
_add_global('TUNEREQ', 2, 0xF6)
_add_global('EOX', 2, 0xF7)
_add_global('TIMINGCLOCK', 2, 0xF8)
_add_global('RESERVED3', 2, 0xF9)
_add_global('START', 2, 0xFA)
_add_global('CONTINUE', 2, 0xFB)
_add_global('STOP', 2, 0xFC)
_add_global('ACTIVESENSING', 2, 0xFE)
_add_global('SYSTEMRESET', 2, 0xFF)
del _add_global
# pyflakes
START = globals()['START']
TIMINGCL
|
OCK = globals()['TIMINGCLOCK']
class MidiDispatcher(object):
"""
Dispatcher for events received from a midi input channel.
Example usage:
in
|
it()
input = getInput(3)
def debug_event(event):
print event
disp = MidiDispatcher(input, [debug_event, NoteOnOffHandler(instr)])
disp.start()
"""
def __init__(self, midiInput, handlers, clock=None):
self.clock = getClock(clock)
self.midiInput = midiInput
self.handlers = handlers
def start(self):
"""
Start the MidiDispatcher - this will schedule an event to call
all it's handlers every tick with any buffered events.
"""
nm = self.clock.meter.nm
n = self.clock.meter.dtt
self._event = self.clock.schedule(self).startAfterTicks(
nm(self.clock.ticks, 1) - self.clock.ticks,
n(1, 96))
def __call__(self):
"""
Call all our handlers with buffered events (max of 32 per call
are processed).
"""
for message in self.midiInput.Read(32):
for call in self.handlers:
call(message)
class MidiHandler(object):
def __call__(self, message):
"""
Parse method and call method on self based on midi function. For
example, if function is NOTEON_CHAN1, this will call our method
noteon(), etc. If a message has a channel as part of it's function,
this will be the first argument. After the first optional channel
argument, remaining positional arguments are passed to the method in
the same order as specified in MIDI. Not all MIDI functions need to be
supplied or implemented in subclass.
"""
packet, timestamp = message
func, arg1, arg2, _pad = packet
args = [arg1, arg2][:FUNCTION_ARITY.get(func, 0)]
args.append(timestamp)
funcname = FUNCTIONS[func]
tokens = funcname.split('_')
if len(tokens) == 2:
type, channel = tokens
channel = int(channel[4:])
method = getattr(self, type.lower(), None)
if method is None:
debug('No handler defined for midi event of type: %s' % type)
method(channel, *args)
def noteon(self, channel, note, velocity, timestamp):
pass
def noteoff(self, channel, note, velocity, timestamp):
pass
class _DummyInstrument:
@classmethod
def playnote(cls, note, velocity):
pass
@classmethod
def stopnote(cls, note):
pass
class MonitorHandler(MidiHandler):
"""
A simple MidiHandler which takes a mapping of channels to instruments.
"""
def __init__(self, instrs):
self.instrs = instrs
def noteon(self, channel, note, velocity, timestamp):
"""
Immediately play instrument at channel with given note and velocity.
The timestamp is ignored. This is a noop if no instrument is mapped
to the given channel.
"""
self.instrs.get(channel, _DummyInstrument).playnote(note, velocity)
def noteoff(self, channel, note, velocity, timestamp):
"""
Immediately stop instrument at channel with given note. The velocity
and timestamp arguments are ignored. This is a noop if no instrument
is mapped to the given channel.
"""
self.instrs.get(channel, _DummyInstrument).stopnote(note)
NoteOnOffHandler = MonitorHandler
class ChordHandler(MidiHandler):
"""
A chord handler is a simple MidiHandler which recognizes chords and sends
to its callback.
todo: Currently this implementation doesn't care about channels; but this
behavior should likely change in the near future.
"""
de
|
markramm/jujulib
|
juju/exceptions.py
|
Python
|
lgpl-3.0
| 219
| 0
|
class EnvironmentNotBootstrapped(Exception):
def __init__(s
|
elf, environment):
self.environment = environment
def __str__(self):
return "environment %s is not bo
|
otstrapped" % self.environment
|
tommy-u/enable
|
enable/text_field_style.py
|
Python
|
bsd-3-clause
| 1,133
| 0.013239
|
# Enthought library imports
from traits.api import HasTrait
|
s, Int, Bool
from kiva.trait_defs.api import KivaFont
from enable.colors import ColorTrait
class TextFieldStyle(HasTraits):
""" This class holds style settings for rendering an EnableTextField.
fixme: See docstring on EnableBoxStyle
"""
# The color of the text
text_color = ColorTrait((0,0,0,1.0))
# The font for the text (must be monospaced!)
font = KivaF
|
ont("Courier 12")
# The color of highlighted text
highlight_color = ColorTrait((.65,0,0,1.0))
# The background color of highlighted items
highlight_bgcolor = ColorTrait("lightgray")
# The font for flagged text (must be monospaced!)
highlight_font = KivaFont("Courier 14 bold")
# The number of pixels between each line
line_spacing = Int(3)
# Space to offset text from the widget's border
text_offset = Int(5)
# Cursor properties
cursor_color = ColorTrait((0,0,0,1))
cursor_width = Int(2)
# Drawing properties
border_visible = Bool(False)
border_color = ColorTrait((0,0,0,1))
bgcolor = ColorTrait((1,1,1,1))
|
ergonomica/ergonomica
|
tests/stdlib/test_cd.py
|
Python
|
gpl-2.0
| 557
| 0.003591
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
[tests/stdlib/test_cd.py]
Test the cd
|
command.
"""
im
|
port unittest
import os
import tempfile
from ergonomica import ergo
class TestCd(unittest.TestCase):
"""Tests the cd command."""
def test_cd(self):
"""
Tests the cd command.
"""
olddir = os.getcwd()
newdir = tempfile.mkdtemp()
ergo("cd {}".format(newdir))
self.assertEqual(os.getcwd(), newdir)
ergo("cd {}".format(olddir))
self.assertEqual(os.getcwd(), olddir)
|
ahmedaljazzar/edx-platform
|
lms/djangoapps/shoppingcart/admin.py
|
Python
|
agpl-3.0
| 5,178
| 0.000386
|
"""Django admin interface for the shopping cart models. """
from django.contrib import admin
from shoppingcart.models import (
Coupon,
CourseRegistrationCodeInvoiceItem,
DonationConfiguration,
Invoice,
InvoiceTransaction,
PaidCourseRegistrationAnnotation
)
class SoftDeleteCouponAdmin(admin.ModelAdmin):
"""
Admin for the Coupon table.
soft-delete on the coupons
"""
fields = ('code', 'description', 'course_id', 'percentage_discount', 'created_by', 'created_at', 'is_active')
raw_id_fields = ("created_by",)
readonly_fields = ('created_at',)
actions = ['really_delete_selected']
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site - used by changelist_view.
"""
qs = super(SoftDeleteCouponAdmin, self).get_queryset(request)
return qs.filter(is_active=True)
def get_actions(self, request):
actions = super(SoftDeleteCouponAdmin, self).get_actions(request)
del actions['delete_selected']
return actions
def really_delete_selected(self, request, queryset):
"""override the default behavior of selected delete method"""
for obj in queryset:
obj.is_active = False
obj.save()
if queryset.count() == 1:
message_bit = "1 coupon entry was"
else:
message_bit = "%s coupon entries were" % queryset.count()
self.message_user(request, "%s successfully deleted." % message_bit)
def delete_model(self, request, obj):
"""override the default behavior of single instance of model delete method"""
obj.is_active = False
obj.save()
really_delete_selected.short_description = "Delete s selected entries"
class CourseRegistrationCodeInvoiceItemInline(admin.StackedInline):
"""Admin for course registration code invoice items.
Displayed inline within the invoice admin UI.
"""
model = CourseRegistrationCodeInvoiceItem
extra = 0
can_delete = False
readonly_fields = (
'qty',
'unit_price',
'currency',
'course_id',
)
def has_add_permission(self, request):
return False
class InvoiceTransactionInline(admin.StackedInline):
"""Admin for invoice transactions.
Displayed inline within the invoice admin UI.
"""
model = InvoiceTransaction
extra = 0
readonly_field
|
s = (
'created',
'modified',
'created_by',
'last_modified_by'
)
class InvoiceAdmin(admin.ModelAdmin):
"""Admin for invoices.
This is intended for the internal finance team
to be able to view and update invoice information,
including payments and refunds.
"""
date_hierarchy = 'created'
can_delete = False
readonly_fields = ('created', 'modified')
search_fields = (
'internal_reference',
'custome
|
r_reference_number',
'company_name',
)
fieldsets = (
(
None, {
'fields': (
'internal_reference',
'customer_reference_number',
'created',
'modified',
)
}
),
(
'Billing Information', {
'fields': (
'company_name',
'company_contact_name',
'company_contact_email',
'recipient_name',
'recipient_email',
'address_line_1',
'address_line_2',
'address_line_3',
'city',
'state',
'zip',
'country'
)
}
)
)
readonly_fields = (
'internal_reference',
'customer_reference_number',
'created',
'modified',
'company_name',
'company_contact_name',
'company_contact_email',
'recipient_name',
'recipient_email',
'address_line_1',
'address_line_2',
'address_line_3',
'city',
'state',
'zip',
'country'
)
inlines = [
CourseRegistrationCodeInvoiceItemInline,
InvoiceTransactionInline
]
def save_formset(self, request, form, formset, change):
"""Save the user who created and modified invoice transactions. """
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, InvoiceTransaction):
if not hasattr(instance, 'created_by'):
instance.created_by = request.user
instance.last_modified_by = request.user
instance.save()
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
admin.site.register(PaidCourseRegistrationAnnotation)
admin.site.register(Coupon, SoftDeleteCouponAdmin)
admin.site.register(DonationConfiguration)
admin.site.register(Invoice, InvoiceAdmin)
|
ChrisTruncer/Just-Metadata
|
modules/analytics/cert_search.py
|
Python
|
gpl-3.0
| 1,648
| 0.003034
|
'''
This module searches the shodan data for IPs using a user-specified https certific
|
ate
'''
from common import helpers
class Analytics:
def __init__(self, cli_options):
self.cli_name = "CertSearch"
self.description = "Searches for user-pr
|
ovided HTTPS certificate"
self.https_cert = ''
self.found_ips = []
def analyze(self, all_ip_objects):
if self.https_cert == '':
print "Please provide the HTTPS certificate you want to search for."
self.https_cert = raw_input(' \n\n[>] HTTPS Cert (including start and end tags): ').strip()
for path, single_ip in all_ip_objects.iteritems():
if single_ip[0].shodan_info is not '' and\
'No available information within Shodan about' not in\
single_ip[0].shodan_info:
for item in single_ip[0].shodan_info['data']:
if 'opts' in item:
if 'pem' in item['opts']:
if self.https_cert.strip() in item['opts']['pem'].encode('utf-8').replace('\n', '').replace('\r', ''):
self.found_ips.append(single_ip[0].ip_address)
if len(self.found_ips) > 0:
print helpers.color("\nCertificate Found!")
print "===================================="
for ip in self.found_ips:
print helpers.color(ip)
print
else:
print helpers.color("\nCertificate is not found within the currently loaded data!\n", warning=True)
self.https_cert = ''
self.found_ips = []
return
|
oknuutti/visnav-py
|
visnav/iotools/read-raw-img.py
|
Python
|
mit
| 811
| 0.006165
|
import sys
import numpy as np
import cv2
def main():
w, h = map(int, (sys.argv[1] if len(sys.argv) > 1 else '2048x1944').split('x'))
imgfile = sys.argv[2] if len(sys.argv) > 2 else r'D:\Downloads\example-navcam-imgs\navcamTests0619\rubbleML-def-14062019-25.raw
|
'
i
|
mgout = sys.argv[3] if len(sys.argv) > 3 else r'D:\Downloads\example-navcam-imgs\navcamTests0619\rubbleML-def-14062019-25.png'
with open(imgfile, 'rb') as fh:
raw_img = np.fromfile(fh, dtype=np.uint16, count=w * h)
raw_img = raw_img.reshape((h, w))
# flip rows pairwise
final_img = raw_img[:, np.array([(i*2+1, i*2) for i in range(0, w//2)]).flatten()]
cv2.imshow('img', final_img)
cv2.waitKey()
cv2.imwrite(imgout, final_img)
if __name__ == '__main__':
main()
|
faneshion/MatchZoo
|
matchzoo/tasks/__init__.py
|
Python
|
apache-2.0
| 72
| 0
|
from
|
.classification
|
import Classification
from .ranking import Ranking
|
mcstrother/dicom-sr-qi
|
inquiries/high_cases.py
|
Python
|
bsd-2-clause
| 7,573
| 0.010828
|
from srqi.core import inquiry
import matplotlib.pyplot as plt
import numpy as np
def get_accumulation_fig(proc):
fig = plt.figure()
plt.title("Accumulation During Procedure for Patient " + str(proc.PatientID) + " on " + str(proc.StudyDate))
event_starts = [e.DateTime_Started for e in proc.get_events()]
# plot doses
dose_ax = plt.subplot(311)
dose_ax.plot(event_starts,
np.cumsum([e.Dose_RP for e in proc.get_events()])
)
plt.ylabel('Dose (Gy)')
# plot frames
frames_ax = plt.subplot(312, sharex = dose_ax)
frames_ax.plot(event_starts,
np.cumsum([e.Number_of_Pulses for e in proc.get_events()])
)
plt.ylabel('# of Frames')
# plot mag
mag_ax = plt.subplot(313, sharex = dose_ax)
mag_ax.plot(event_starts,
[e.iiDiameter for e in proc.get_events()])
plt.ylim((200,500))
plt.ylabel('iiDiameter')
# plot the event type on top of the mag plot
a_events = [e for e in proc.get_events() if e.Irradiation_Event_Type =='Stationary Acquisition']
s_events= [e for e in proc.get_events() if e.Acquisition_Protocol=='Spot']
f_events = [e for e in proc.get_fluoro_events()]
o_events = [e for e in proc.get_events() if not (e.Irradiation_Event_Type=="Stationary Acquisition" or e.Acquisition_Protocol=="Spot" or e.Irradiation_Event_Type=="Fluoroscopy")]
if len(f_events)>0:
plt.scatter([e.DateTime_Started for e in f_events],
[e.iiDiameter for e in f_events],
marker='+', c='blue')
if len(a_events)>0:
collection = plt.scatter([e.DateTime_Started for e in a_events],
[e.iiDiameter for e in a_events],
marker='o', c='red')
collection.set_edgecolor('red')
if len(s_events)>0:
collection = plt.scatter([e.DateTime_Started for e in s_events],
[e.iiDiameter for e in s_events],
marker='o', c='yellow')
collection.set_edgecolor('yellow')
if len(o_events)>0:
collection = plt.scatter([e.DateTime_Started for e in o_events],
[e.iiDiameter for e in o_events],
marker='o', c='cyan')
collection.set_edgecolor('cyan')
# format xlabels
fig.autofmt_xdate()
return fig
class High_Cases(inquiry.Inquiry):
NAME = "High Cases"
description = """Finds and analyzes cases where the dose exceeds a specified limit
Data required:
DICOM-SR xml
"""
LIMIT = inquiry.Inquiry_Parameter(5.0,"Dose Limit", "The doseage above-which cases should be analyzed")
DATE_RANGE_START = inquiry.get_standard_parameter("DATE_RANGE_START")
DATE_RANGE_END = inquiry.get_standard_parameter("DATE_RANGE_END")
def run(self, procs, context, extra_procs):
high_cases = {}
for proc in procs:
total_dose = sum([e.Dose_RP for e in proc.get_events()])
if total_dose > self.LIMIT.value:
high_cases[proc] = {'total dose' : total_dose}
for proc in high_cases.keys():
high_cases[proc]['acquisition dose'] = sum([e.Dose_RP for e in proc.get_events() if e.Irradiation_Event_Type =='Stationary Acquisition'])
high_cases[proc]['spot dose'] = sum([e.Dose_RP for e in proc.get_events() if e.Acquisition_Protocol=='Spot'])
high_cases[proc]['fluoro dose'] = sum([e.Dose_RP for e in proc.get_fluoro_events()])
high_cases[proc]['acquisition frames'] = sum([e.Number_of_Pulses for e in proc.get_events() if e.Irradiation_Event_Type =='Stationary Acquisition'])
high_cases[proc]['spot frames'] = sum([e.Number_of_Pulses for e in proc.get_events() if e.Acquisition_Protocol
|
=='Spot'])
high_cases[proc]['fluoro frames'] = sum([e.Number_of_Pulses for e in proc.get_fluoro_e
|
vents()])
high_cases[proc]['total frames'] = sum([e.Number_of_Pulses for e in proc.get_events()])
self.high_cases = high_cases
def get_text(self):
if len(self.high_cases) == 0:
return "No cases exceeding the dose limit found in the specified date range."
else:
return ''
def get_figures(self):
hc = self.high_cases
figs = []
pies = []
for proc in hc.keys():
# Pie chart of dosages by modality
fig = plt.figure()
plt.title("Dose (Gy) By Modality Patient " + str(proc.PatientID) + " on " + str(proc.StudyDate))
def my_autopct(pct):
total=hc[proc]['total dose']
val=pct*total/100.0
return '{p:.2f}% ({v:.3f} Gy)'.format(p=pct,v=val)
other_dose = hc[proc]['total dose'] - hc[proc]['spot dose'] - hc[proc]['acquisition dose'] - hc[proc]['fluoro dose']
if other_dose <0:
other_dose = 0
plt.pie((hc[proc]['acquisition dose'],
hc[proc]['spot dose'],
hc[proc]['fluoro dose'],
other_dose),
labels = ('acquisition','spot','fluoro ', 'other'),
autopct = my_autopct)
figs.append(fig)
# Pie chart of frame counts by modality
fig = plt.figure()
plt.title("Frame Count by Modality for Patient " + str(proc.PatientID) + " on " + str(proc.StudyDate))
def my_autopct(pct):
total=hc[proc]['total frames']
val=pct*total/100.0
return '{p:.2f}% ({v:.0f})'.format(p=pct,v=val)
other_frames = hc[proc]['total frames'] - (hc[proc]['spot frames'] + hc[proc]['acquisition frames'] + hc[proc]['fluoro frames'])
if other_frames < 0:
other_frames = 0
plt.pie((hc[proc]['acquisition frames'],
hc[proc]['spot frames'],
hc[proc]['fluoro frames'],
other_frames),
labels = ('acquisition','spot','fluoro', 'other'),
autopct = my_autopct)
figs.append(fig)
# dose/frame accumulation plot
figs.append(get_accumulation_fig(proc))
return figs
def get_tables(self):
out = []
hc = self.high_cases
for proc in self.high_cases.keys():
heading = ["Patient " + str(proc.PatientID) + " on " + str(proc.StudyDate),
'fluoro','acqusition','spot', 'other','total']
doses = ['Dose (Gy)', hc[proc]['fluoro dose'],
hc[proc]['acquisition dose'],
hc[proc]['spot dose'],
hc[proc]['total dose'] - hc[proc]['acquisition dose'] - hc[proc]['spot dose'] - hc[proc]['fluoro dose'],
hc[proc]['total dose']]
frames = ['Frame Count', hc[proc]['fluoro frames'],
hc[proc]['acquisition frames'],
hc[proc]['spot frames'],
hc[proc]['total frames'] - hc[proc]['spot frames'] - hc[proc]['acquisition frames'] - hc[proc]['fluoro frames'],
hc[proc]['total frames']
]
out.append([heading, doses, frames])
return out
|
mancoast/pycdc
|
tests/22_test_expressions.ref.py
|
Python
|
gpl-3.0
| 119
| 0.016807
|
def _lsbStrToInt(str):
return ord(s
|
tr[0]) + (ord(str[1]) << 8) + (ord(str[2]) << 16) + (ord(str[3]) << 24)
|
|
kerautret/ipolDevel
|
ipol_demo/lib/base_app.py
|
Python
|
gpl-3.0
| 27,462
| 0.003459
|
"""
base IPOL demo web app
includes interaction and rendering
"""
# TODO add steps (cf amazon cart)
import shutil
from mako.lookup import TemplateLookup
import traceback
import cherrypy
import os.path
import math
import copy
import threading
import time
from mako.exceptions import RichTraceback
from . import http
from . import config
from . import archive
from .empty_app import empty_app
from .image import thumbnail, image
from .misc import prod
from shutil import rmtree
class AppPool(object):
"""
App object pool used by the init_func decorator to
obtain instances of the app object
Used to fix a bug
https://tools.ipol.im/mailman/archive/discuss/2012-December/000969.html
"""
pool_lock = threading.Lock()
class __AppPool(object):
"""
App Pool singleton pattern implementation
"""
pool = {}
def pool_tidyup(self):
"""
Removes old app objects from the pool, to save memory
"""
keys_to_remove = []
# Get keys of the objects to remove
for key in self.pool.keys():
entry = self.pool[key]
timestamp = entry['timestamp']
if time.time() - timestamp > 7200: # two hours
keys_to_remove.append(key)
# Remove old objects
for key in keys_to_remove:
del self.pool[key]
def get_app(self, exec_id):
"""
Obtains the app object associated to the exec_id ID
"""
if exec_id in self.pool:
return self.pool[exec_id]['app_object']
else:
return None
def add_app(self, exec_id, app_object):
"""
Adds an app object and to the pool.
The creation time is also stored
"""
# Remove stored old app objects
self.pool_tidyup()
# Add app_object and timestamp
entry = {'app_object': app_object,
'timestamp': time.time()}
self.pool[exec_id] = entry
# Singleton object instance
instance = None
@staticmethod
def get_instance():
"""
Get an app pool singleton instance
"""
try:
# Acquire lock
AppPool.pool_lock.acquire()
# Set singleton object instance
if AppPool.instance is None:
AppPool.instance = AppPool.__AppPool()
finally:
# Release lock
AppPool.pool_lock.release()
return AppPool.instance
#
# ACTION DECORATOR TO HANDLE GENERIC SETTINGS
#
def init_app(func):
"""
decorator to reinitialize the app with the current request key
"""
def init_func(self, *args, **kwargs):
"""
original function, modified
"""
# key check
key = kwargs.pop('key', None)
# It might happen that here we receive a list with several copies of
# the key, if the demo passes it more than once in the URL.
#
# In that case, just use the first entry of the list.
if isinstance(key, list):
if len(key) > 0:
key = key[0]
# Obtain a copy of the object and use it instead of self
# Bug fix
pool = AppPool.get_instance() # Singleton pattern
#
self2 = pool.get_app(key)
if self2 is None:
self2 = base_app(self.base_dir)
self2.__class__ = self.__class__
self2.__dict__.update(self.__dict__)
pool.add_app(key, self2)
if isinstance(key, list):
key = key[0]
self2.init_key(key)
self2.init_cfg()
# public_archive cookie setup
# default value
if not cherrypy.request.cookie.get('pu
|
blic_archive', '1') == '0':
|
cherrypy.response.cookie['public_archive'] = '1'
self2.cfg['meta']['public'] = True
else:
self2.cfg['meta']['public'] \
= (cherrypy.request.cookie['public_archive'] != '0')
# user setting
if kwargs.has_key('set_public_archive'):
if kwargs.pop('set_public_archive') != '0':
cherrypy.response.cookie['public_archive'] = '1'
self2.cfg['meta']['public'] = True
else:
cherrypy.response.cookie['public_archive'] = '0'
self2.cfg['meta']['public'] = False
# TODO: dirty hack, fixme
ar_path = self2.archive_dir + archive.key2path(self2.key)
if os.path.isdir(ar_path):
ar = archive.bucket(path=self2.archive_dir,
cwd=self2.work_dir,
key=self2.key)
ar.cfg['meta']['public'] = self2.cfg['meta']['public']
ar.cfg.save()
archive.index_add(self2.archive_index,
buc=ar,
path=self2.archive_dir)
x = func(self2, *args, **kwargs)
self2.cfg.save()
return x
return init_func
class base_app(empty_app):
""" base demo app class with a typical flow """
# default class attributes
# to be modified in subclasses
title = "base demo"
input_nb = 1 # number of input files
input_max_pixels = 1024 * 1024 # max size of an input image
input_max_weight = 5 * 1024 * 1024 # max size (in bytes) of an input file
input_dtype = '1x8i' # input image expected data type
input_ext = '.tiff' # input image expected extention (ie. file format)
timeout = 60 # subprocess execution timeout
is_test = True
def __init__(self, base_dir):
"""
app setup
base_dir is supposed to be received from a subclass
"""
# setup the parent class
empty_app.__init__(self, base_dir)
cherrypy.log("base_dir: %s" % self.base_dir,
context='SETUP/%s' % self.id, traceback=False)
# local base_app templates folder
tmpl_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'template')
# first search in the subclass template dir
self.tmpl_lookup = TemplateLookup( \
directories=[self.base_dir + 'template', tmpl_dir],
input_encoding='utf-8',
output_encoding='utf-8', encoding_errors='replace')
#
# TEMPLATES HANDLER
#
def tmpl_out(self, tmpl_fname, **kwargs):
"""
templating shortcut, populated with the default app attributes
"""
# pass the app object
kwargs['app'] = self
# production flag
kwargs['prod'] = (cherrypy.config['server.environment']
== 'production')
tmpl = self.tmpl_lookup.get_template(tmpl_fname)
# Render the template
# If an exception occurs, render an error page showing the traceback
try:
return tmpl.render(**kwargs)
except:
traceback_string = "<h1>IPOL template rendering error</h1>"
traceback_string += "<h2>Template: %s</h2>" % tmpl_fname
traceback_string += "<h2>kwargs: %s</h2>" % kwargs
traceback = RichTraceback()
for (filename, lineno, function, line) in traceback.traceback:
traceback_string += \
"File <b>%s</b>, line <b>%d</b>, in <b>%s</b><br>" % \
(filename, lineno, function)
traceback_string += line + "<br><br>"
traceback_string += "%s: %s" % \
(str(traceback.error.__class__.__name__), \
traceback.error) + "<br>"
return traceback_string
#
# INDEX
#
def index(self):
"""
demo presentation and input menu
"""
# read the input index as a dict
inputd = config.file_dict(self.input_dir)
tn_size = int(cherrypy.config.get('input.thumbnail.size', '192'))
# TODO: build via list-comprehension
for (input_id, inp
|
okuchaiev/f-lm
|
language_model_test.py
|
Python
|
mit
| 1,667
| 0.0006
|
import random
import numpy as np
import tensorflow as tf
from language_model import LM
from hparams import HParams
def get_test_hparams():
return HParams(
batch_size=21,
num_steps=12,
num_shards=2,
num_layers=1,
learning_rate=0.2,
max_grad_norm=1.0,
vocab_size=1000,
emb_size=
|
14,
state_size=17,
projected_size=15,
num_sampled=500,
num_gpus=1,
average_params=True,
run_profiler=False,
)
def simple_data_generator(batch_size, num_s
|
teps):
x = np.zeros([batch_size, num_steps], np.int32)
y = np.zeros([batch_size, num_steps], np.int32)
for i in range(batch_size):
first = random.randrange(0, 20)
for j in range(num_steps):
x[i, j] = first + j
y[i, j] = first + j + 1
return x, y, np.ones([batch_size, num_steps], np.uint8)
class TestLM(tf.test.test_util.TensorFlowTestCase):
def test_lm(self):
hps = get_test_hparams()
with tf.variable_scope("model"):
model = LM(hps)
with self.test_session() as sess:
tf.initialize_all_variables().run()
tf.initialize_local_variables().run()
loss = 1e5
for i in range(50):
x, y, w = simple_data_generator(hps.batch_size, hps.num_steps)
loss, _ = sess.run([model.loss, model.train_op], {model.x: x, model.y: y, model.w: w})
print("%d: %.3f %.3f" % (i, loss, np.exp(loss)))
if np.isnan(loss):
print("NaN detected")
break
self.assertLess(loss, 1.0)
|
Leon109/IDCMS-Web
|
web/app/auth/errors.py
|
Python
|
apache-2.0
| 349
| 0.011461
|
from flask import render_template
from . import auth
@auth.app_errorhandler(403)
de
|
f page_not_found(e):
return render_template('403.html'), 403
@auth.app_errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@auth.app_errorhandler(500)
def internal_server_error(e):
re
|
turn render_template('500.html'), 500
|
dungeonsnd/test-code
|
dev_examples/twisted_test/other_echo/echoserv_udp.py
|
Python
|
gpl-3.0
| 510
| 0.009804
|
#!/usr/bin/env python
# Copyright (c) Twis
|
ted Matrix Laboratories.
# See
|
LICENSE for details.
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
# Here's a UDP version of the simplest possible protocol
class EchoUDP(DatagramProtocol):
def datagramReceived(self, datagram, address):
self.transport.write(datagram, address)
def main():
reactor.listenUDP(8000, EchoUDP())
reactor.run()
if __name__ == '__main__':
main()
|
daniel-dinu/rational-python
|
test_rational/test_rational.py
|
Python
|
mit
| 32,468
| 0.001016
|
import unittest2
from unittest2 import TestCase
from rational.rational import gcd
from rational.rational import Rational
__author__ = 'Daniel Dinu'
class TestRational(TestCase):
def setUp(self):
self.known_values = [(1, 2, 1, 2),
(-1, 2, -1, 2),
(1, -2, -1, 2),
(-1, -2, 1, 2),
(2, 4, 1, 2),
(-2, 4, -1, 2),
(2, -4, -1, 2),
(-2, -4, 1, 2),
(2, 1, 2, 1),
(-2, 1, -2, 1),
(2, -1, -2, 1),
(-2, -1, 2, 1),
(4, 2, 2, 1),
(-4, 2, -2, 1),
(4, -2, -2, 1),
(-4, -2, 2, 1)]
def tearDown(self):
del self.known_values
def test_constructor_numerator_type_error(self):
self.assertRaises(TypeError, Rational, 1.2)
def test_constructor_denominator_type_error(self):
self.assertRaises(TypeError, Rational, 1, 1.2)
def test_constructor_denominator_zero_division_error(self):
numerator = 1
denominator = 0
with self.subTest(numerator=numerator, denominator=denominator):
self.assertRaises(ZeroDivisionError, Rational, numerator, denominator)
numerator = Rational()
denominator = 0
with self.subTest(numerator=numerator, denominator=denominator):
self.assertRaises(ZeroDivisionError, Rational, numerator, denominator)
numerator = Rational()
denominator = Rational()
with self.subTest(numerator=numerator, denominator=denominator):
self.assertRaises(ZeroDivisionError, Rational, numerator, denominator)
def test_constructor_numerator(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
self.assertEqual(expected_numerator, r.numerator)
def test_constructor_denominator(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
self.assertEqual(expected_denominator, r.denominator)
def test_constructor_transform(self):
test_constructor_transform_values = [(Rational(1, 2), Rational(1, 2), Rational(1)),
(Rational(1, 2), Rational(1, 4), Rational(2)),
(Rational(1, 4), Rational(1, 2), Rational(1, 2)),
(Rational(-1, 2), Rational(1, 2), Rational(-1)),
(Rational(-1, 2), Rational(1, 4), Rational(-2)),
(Rational(-1, 4), Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(-1, 2), Rational(-1)),
(Rational(1, 2), Rational(-1, 4), Rational(-2)),
(Rational(1, 4), Rational(-1, 2), Rational(-1, 2)),
(Rational(-1, 2), Rational(-1, 2), Rational(1)),
(Rational(-1, 2), Rational(-1, 4), Rational(2)),
(Rational(-1, 4), Rational(-1, 2), Rational(1, 2))]
for a, b, expected_result in test_constructor_transform_values:
with self.subTest(a=a, b=b, expected_result=expected_result):
computed_result = Rational(a, b)
self.assertEqual(expected_result, computed_result)
def test_transform(self):
test_transform_values = [(1, 2, (1, 2)),
(2, 4, (2, 4)),
(-1, 2, (-1, 2)),
(-2, 4, (-2, 4)),
(1, -2, (1, -2)),
(2, -4, (2, -4)),
(-1, -2, (-1, -2)),
(-2, -4, (-2, -4)),
(Rational(1, 2), 1, (1, 2)),
(Rational(1, 2), 2, (1, 4)),
(Rational(-1, 2), 1, (-1, 2)),
(Rational(-1, 2), 2, (-1, 4)),
(Rational(1, -2), 1, (-1, 2)),
(Rational(1, -2), 2, (-1, 4)),
(Rational(1, 2), -1, (1, -2)),
(Rational(1, 2), -2, (1, -4)),
(Rational(-1, 2), -1, (-1, -2)),
(Rational(-1, 2), -2, (-1, -4)),
(1, Rational(1, 2), (2, 1)),
(2, Rational(1, 2), (4, 1)),
(-1, Rational(1, 2), (-2, 1)),
(-2, Rational(1, 2), (-4, 1)),
(1, Rational(-1, 2), (2, -1)),
(2, Rational(-1, 2), (4, -1)),
(1, Rational(1, -2), (2, -1)),
(2, Rational(1, -2), (4, -1)),
(-1, Rational(1, 2), (-2, 1)),
(-2, Rational(1, 2), (-4, 1)),
(Rational(1, 2), Rational(1, 2), (2, 2)),
(Rational(1, 2), Rational(1, 4), (4, 2)),
(Rational(1, 4), Rational(1, 2), (2, 4)),
(Rational(-1, 2), Rational(1, 2), (-2, 2)),
(Rational(-1, 2), Rational(1, 4), (-4, 2)),
(Rational(-1, 4), Rational(1, 2), (-2, 4)),
|
(Rational(1, 2), Rational(-1, 2), (2, -2)),
(Rational(1, 2), Rational(-1, 4), (4, -2)),
(Rational(1, 4), Rational(-1, 2), (2, -4)),
(Rational(-1, 2), Rational(-1, 2), (-2, -2)),
(Rational(-1, 2), Rational(-1, 4), (-4, -2)),
|
(Rational(-1, 4), Rational(-1, 2), (-2, -4))]
for a, b, expected_result in test_transform_values:
with self.subTest(a=a, b=b, expected_result=expected_result):
computed_result = Rational.transform(a, b)
self.assertEqual(expected_result, computed_result)
def test_gcd(self):
gcd_test_values = [(0, 0, 0),
(0, 1, 1),
(1, 0, 1),
(0, -1, -1),
(-1, 0, -1),
(2, 4, 2),
(-2, 4, 2),
(-2, -4, -2),
(42, 30, 6),
(42, -30, -6),
(-42, -30, -6)]
for a, b, expected_gcd in gcd_test_values:
with self.subTest(a=a, b=b, expected_gcd=expected_gcd):
computed_gcd = gcd(a, b)
self.assertEqual(expected_gcd, computed_gcd)
def test_value(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
expected_value = expected_numerator / (expected_denominator * 1.0)
self.assertEqual(expected_value, r.value)
def test_quotient(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, de
|
bszcz/python
|
tutorial_notes.py
|
Python
|
mit
| 4,575
| 0.007213
|
# Copyright (c) 2020 Bartosz Szczesny <bszcz@bszcz.org>
# This program is free software under the MIT license.
print('\n# avoid new line at the beginning')
s = """\
test
"""
print(s)
print('\n# string are immutable')
s = 'string'
try:
s[1] = 'p'
except TypeError as e:
print(e)
print('\n# enumerate() function')
for n, c in enumerate(['a', 'b', 'c']):
print(n, c)
print('\n# list() is an iterator')
print(list(range(10)))
print('\n# else clause in loops')
for i in range(10):
if n == 2:
break
else:
print('loop did not break')
print('\n# docstrings')
def documented():
"This function is documented."
pass
# now can run: help(documented)
print(documented.__doc__)
print('\n# unpacking arguments')
def unpack(n, c):
print('unpacked:', n, c)
arg_list = [1, 'a']
arg_dict = {'n': 1, 'c': 'a'}
unpack(*arg_list)
unpack(**arg_dict)
print('\n# function annotations')
def annotated(i: int, s: str) -> str:
return 's'
print(annotated.__annotations__)
print('\n# not feeling myself')
class NotSelf():
def __init__(o, n):
o.n = n
def get_n(o):
return o.n
ns = NotSelf(10)
prin
|
t(ns.get_n())
print('\n# lists operations')
print("""\
a = list()
a.copy() => a[:] # return shallow copy
a.clear() => del a[:]
a.append(item) => a[len(a):] = [it
|
em]
a.extend(iterable) => a[len(a):] = iterable
""")
print('\n# set comprehension')
a = 'abracadabra'
s = {x for x in a}
print(a, '->', s)
print('\n# keys can be any immutable type')
d = dict()
d[('a', 1)] = 100
d[('b', 2)] = 200
print(d)
print('\n# dictionary comprehension')
d = {x: 'got ' + str(x) for x in range(3)}
print(d)
print('\n# simple strings as keys')
d = dict(a=1, b=2, c=3)
print(d)
print('\n# reversed() function')
a = reversed(range(10)) # iterator
print(list(a))
print('\n# reload import')
# reload a module without
# restarting the interpreter
# or an already running script
import math
import importlib
importlib.reload(math)
print('\n# dir() function')
import builtins
print(dir()) # currently defined
print()
print(dir(math)) # defined by the module
print()
print(dir(builtins)) # build-in objects
print('\n# string formatting')
c = 299_792_458
print(f'Speed of light is {c} m/s.')
print('Speed of light is {c:.0f} km/s.'.format(c=c/1000))
pi = 3.14159
print(f'Pi is {pi:.2f}.')
d = {'a': 1, 'b': 2}
print('A: {a}, B: {b}.'.format(**d))
print('\n# exceptions')
class E1(Exception):
pass
class E2(E1):
pass
for e in [E1, E2, Exception]:
try:
raise e # no need for ()
except E1: # will catch E2 as well
print('E1.')
except E2:
print('E2.')
except: # will catch anything
print('Exception.')
finally:
print('Finally.')
print()
try:
pass
except:
pass
else: # if not exception raised
print('No exception.')
finally:
print('Finally.')
print()
try:
try:
raise E1
except E2:
print('E2.')
except: # will catch anything
raise # re-raise
finally:
print('Finally (E2).')
except E1:
print('E1.')
finally:
print('Finally (E1).')
print('\n# global and nonlocal scope')
def scope_test():
def do_local():
s = 'local'
def do_nonlocal():
nonlocal s
s = 'nonlocal'
def do_global():
global s
s = 'global'
s = 's'
do_local()
print(s)
do_nonlocal()
print(s)
do_global()
print(s)
scope_test()
print(s)
print('\n# instance and subclass')
print(isinstance(1, int))
print(isinstance(1.0, int))
print(issubclass(bool, int))
print('\n# struct')
class Struct:
pass
s = Struct()
s.x = 1
s.y = 2
print(s.x, s.y)
print('\n# generator')
def rev(s):
for i in range(len(s) - 1, -1, -1):
yield s[i]
for c in rev('abc'):
print(c)
print('\n# generator expression')
# like list comprehension
# but with parentheses
s = sum(i * i for i in range(10))
print(s)
print('\n# regex')
import re
# can use \1 in regex string
r = re.sub(r'([0-9]) \1', r'\1', '1 2 2 3 3 3')
print(r)
print('\n# array')
# store numbers of the same type efficiently
import sys
from array import array
l = list([1, 2, 3, 4, 5])
a = array('B', [1, 2, 3, 4, 5]) # B - unsigned byte
print(sys.getsizeof(l))
print(sys.getsizeof(a))
print('\n# float as ratio')
pi = 3.14159
print(pi.as_integer_ratio())
print('\n# float as hex')
pi = 3.14159
print(pi.hex())
print(float.fromhex('0x1.921f9f01b866ep+1'))
print('\n# precise sum')
a = [0.3, 0.3, 0.3, 0.1]
print(sum(a) == 1)
print(math.fsum(a) == 1)
|
thumbor-community/librato
|
setup.py
|
Python
|
mit
| 441
| 0
|
# coding: utf-8
from setuptools import setup, find_packages
setup
|
(
name='tc_librato',
version="0.0.1",
description='Thumbor Librato extensions',
author='Peter Schröder, Sebastian Eichner',
author_email='peter.schroeder@jimdo.com, sebastian.eichner@jimdo.com',
zip_safe=False,
include_package_data=True,
packages=f
|
ind_packages(),
install_requires=[
'thumbor',
'librato-metrics',
]
)
|
pivstone/andromeda
|
registry/middleware.py
|
Python
|
mit
| 797
| 0.00129
|
# coding=utf-8
from django import http
from django.conf
|
import settings
from django.contrib.auth import authenticate
from django.utils.cache import patch_vary_headers
from registry.exceptions import RegistryException
__author__ = 'pivstone'
class CustomHeaderMiddleware(object):
"""
增加自定头
"""
def process_response(self, request, response):
for key, value in settings.CUSTOM_HEADERS.items():
response[key] = value
return response
class ExceptionsHandleMiddleware(object):
"""
|
统一错误返回
"""
def process_exception(self, request, exception):
if isinstance(exception, RegistryException):
response = http.JsonResponse(status=exception.status, data=exception.errors())
return response
|
munhyunsu/Hobby
|
Signal/signal_propagation_shell.py
|
Python
|
gpl-3.0
| 819
| 0.003663
|
import os
import sys
import signal
import time
import subprocess
WHO = None
def handler(signum, frame):
global WHO
print('Signal
|
handler', signum, WHO, frame)
print('Disable handler', signum, WHO, frame)
signal.signal(signal.SIGINT, signal.SIG_DFL)
def main(argv):
global WHO
WHO = argv[1]
if WHO == 'parent':
signal.signal(signal.SIGINT, handler)
p = subprocess.Popen('python3 signal_propagation.py child',
shell=True)
for index in range(0, 10):
time.sleep(1)
print('Sleep', index, WHO
|
)
if WHO == 'parent':
p.send_signal(signal.SIGINT)
p.communicate()
else:
while True:
time.sleep(1)
print('Sleep 1 infinity')
if __name__ == '__main__':
main(sys.argv)
|
cmjatai/cmj
|
sapl/comissoes/migrations/0019_auto_20181214_1023.py
|
Python
|
gpl-3.0
| 509
| 0.001972
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-14 12:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('comissoes', '0018_auto_20180924_1724'),
]
operations = [
migrations.AlterField(
model_name='reuniao',
|
name='hora_fim',
field=models.TimeField(blank=True, null=True, verbose_name='Horário
|
de Término (hh:mm)'),
),
]
|
rembo10/headphones
|
lib/feedparser/exceptions.py
|
Python
|
gpl-3.0
| 1,957
| 0.000511
|
# Exceptions used throughout feedparser
# Copyright 2010-2021 Kurt McKee <contactme@kurtmckee.org>
# Copyright 2002-2008 Mark Pilgrim
# All rights reserved.
#
# This file is a part of feedparser.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, I
|
NCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SU
|
BSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__all__ = [
'ThingsNobodyCaresAboutButMe',
'CharacterEncodingOverride',
'CharacterEncodingUnknown',
'NonXMLContentType',
'UndeclaredNamespace',
]
class ThingsNobodyCaresAboutButMe(Exception):
pass
class CharacterEncodingOverride(ThingsNobodyCaresAboutButMe):
pass
class CharacterEncodingUnknown(ThingsNobodyCaresAboutButMe):
pass
class NonXMLContentType(ThingsNobodyCaresAboutButMe):
pass
class UndeclaredNamespace(Exception):
pass
|
mhugent/Quantum-GIS
|
python/plugins/processing/interface.py
|
Python
|
gpl-2.0
| 38
| 0
|
# -
|
*- coding: utf-8 -*-
|
iface = None
|
mpetyx/palmdrop
|
venv/lib/python2.7/site-packages/cms/plugins/flash/migrations/0001_initial.py
|
Python
|
apache-2.0
| 3,244
| 0.008323
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Flash'
db.create_table('cmsplugin_flash', (
('cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
('file', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('width', self.gf('django.db.models.fields.CharField')(max_length=6)),
('height', self.gf('django.db.models.fields.CharField')(max_length=6)),
))
db.send_create_signal('flash', ['Flash'])
def backwards(self, orm):
# Deleting model 'Flash'
db.delete_table('cmsp
|
lugin_flash')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django
|
.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'flash.flash': {
'Meta': {'object_name': 'Flash', 'db_table': "'cmsplugin_flash'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'height': ('django.db.models.fields.CharField', [], {'max_length': '6'}),
'width': ('django.db.models.fields.CharField', [], {'max_length': '6'})
}
}
complete_apps = ['flash']
|
carboncointrust/CarboncoinCore
|
contrib/linearize/linearize.py
|
Python
|
mit
| 3,360
| 0.034226
|
#!/usr/bin/python
#
# linearize.py: Construct a linear, no-fork, best version
|
of the blockchain.
#
#
# Copyright (c) 2013 The Carboncoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import json
import struct
import re
import base64
import httplib
import sys
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
class Carbonco
|
inRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblock(self, hash, verbose=True):
return self.rpc('getblock', [hash, verbose])
def getblockhash(self, index):
return self.rpc('getblockhash', [index])
def getblock(rpc, settings, n):
hash = rpc.getblockhash(n)
hexdata = rpc.getblock(hash, False)
data = hexdata.decode('hex')
return data
def get_blocks(settings):
rpc = CarboncoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword'])
outf = open(settings['output'], 'ab')
for height in xrange(settings['min_height'], settings['max_height']+1):
data = getblock(rpc, settings, height)
outhdr = settings['netmagic']
outhdr += struct.pack("<i", len(data))
outf.write(outhdr)
outf.write(data)
if (height % 1000) == 0:
sys.stdout.write("Wrote block " + str(height) + "\n")
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: linearize.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'netmagic' not in settings:
settings['netmagic'] = 'f9beb4d9'
if 'output' not in settings:
settings['output'] = 'bootstrap.dat'
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'min_height' not in settings:
settings['min_height'] = 0
if 'max_height' not in settings:
settings['max_height'] = 279000
if 'rpcuser' not in settings or 'rpcpassword' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['netmagic'] = settings['netmagic'].decode('hex')
settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height'])
get_blocks(settings)
|
assaabloy-ppi/salt-channel-python
|
saltchannel/saltlib/pure_pynacl/tweetnacl.py
|
Python
|
mit
| 25,273
| 0.005342
|
# -*- coding: utf-8 -*-
# import python libs
import os
from array import array
# import pure_pynacl libs
from saltchannel.saltlib.pure_pynacl import lt_py3, lt_py33
from saltchannel.saltlib.pure_pynacl import TypeEnum, integer, Int, IntArray
class u8(Int):
'''unsigned char'''
bits = array('B').itemsize*8
mask = (1 << bits) - 1
signed = False
order = TypeEnum.u8
def __repr__(self):
return 'u8(%s)' % integer.__repr__(self)
class u32(Int):
'''unsigned long'''
bits = array('L').itemsize*8
mask = (1 << bits) - 1
signed = False
order = TypeEnum.u32
def __repr__(self):
return 'u32(%s)' % integer.__repr__(self)
class u64(Int):
'''unsigned long long'''
bits = array('L' if lt_py33 else 'Q').itemsize*8
mask = (1 << bits) - 1
signed = False
order = TypeEnum.u64
def __repr__(self):
return 'u64(%s)' % integer.__repr__(self)
class i64(Int):
'''long long'''
bits = array('l' if lt_py33 else 'q').itemsize*8
mask = (1 << bits - 1) - 1
signe
|
d = True
order = TypeEnum.i64
def __repr__(self):
return 'i64(%s)' % integer.__repr__(self)
class gf(IntArray):
def __init__(self, init=()):
IntArray.__init__(self, i64, init=init, size=16)
def randombytes(c, s):
'''
insert s random bytes into c
'''
if lt_py3:
c[:s] = bytearray(os.urandom(s))
else:
c[:s] = os.urandom(s)
_0 = IntArray(u8, size=16)
_9 = IntArray(u8, size=32, init=[9])
gf0 = gf()
gf1 = gf([1])
|
_121665 = gf([0xDB41, 1])
D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203])
D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406])
X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169])
Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666])
I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83])
def L32(x, c):
'''static u32 L32(u32 x, int c)'''
return (u32(x) << c) | ((u32(x) & 0xffffffff) >> (32 - c))
def ld32(x):
'''u32 ld32(const u8*x)'''
u = u32(x[3])
u = (u << 8) | u32(x[2])
u = (u << 8) | u32(x[1])
return (u << 8) | u32(x[0])
def dl64(x):
'''u64 dl64(const u8*x)'''
u = u64()
for i in range(8): u = (u << 8) | u8(x[i])
return u
def st32(x, u):
'''void st32(u8*x, u32 u)'''
for i in range(4): x[i] = u8(u); u >>= 8
return x
def ts64(x, u):
'''void ts64(u8*x, u64 u)'''
for i in range(7, -1, -1): x[i] = u8(u); u >>= 8
return x
def vn(x, y, n):
'''int vn(const u8*x, const u8*y, int n)'''
d = u32()
for i in range(n): d |= x[i] ^ y[i]
return (1 & ((d - 1) >> 8)) - 1
def crypto_verify_16_tweet(x, y):
'''int crypto_verify_16_tweet(const u8*x, const u8*y)'''
return vn(x, y, 16)
def crypto_verify_32_tweet(x, y):
'''int crypto_verify_32_tweet(const u8*x, const u8*y)'''
return vn(x, y, 32)
def core(out, in_, k, c, h):
'''void core(u8*out, const u8*in, const u8*k, const u8*c, int h)'''
w = IntArray(u32, size=16)
x = IntArray(u32, size=16)
y = IntArray(u32, size=16)
t = IntArray(u32, size=4)
for i in range(4):
x[5*i] = ld32(c[4*i:])
x[1 + i] = ld32(k[4*i:])
x[6 + i] = ld32(in_[4*i:])
x[11 + i] = ld32(k[16 + 4*i:])
for i in range(16): y[i] = x[i]
for i in range(20):
for j in range(4):
for m in range(4): t[m] = x[(5*j + 4*m)%16]
t[1] ^= L32(t[0] + t[3], 7)
t[2] ^= L32(t[1] + t[0], 9)
t[3] ^= L32(t[2] + t[1],13)
t[0] ^= L32(t[3] + t[2],18)
for m in range(4): w[4*j + (j + m)%4] = t[m]
for m in range(16): x[m] = w[m]
if h:
for i in range(16): x[i] += y[i]
for i in range(4):
x[5*i] -= ld32(c[4*i:])
x[6+i] -= ld32(in_[4*i:])
for i in range(4):
out[4*i:] = st32(out[4*i:], x[5*i])
out[16 + 4*i:] = st32(out[16 + 4*i:], x[6 + i])
else:
for i in range(16):
out[4*i:] = st32(out[4*i:], x[i] + y[i])
def crypto_core_salsa20_tweet(out, in_, k, c):
'''int crypto_core_salsa20_tweet(u8*out, const u8*in, const u8*k, const u8*c)'''
core(out, in_, k, c, False)
return 0
def crypto_core_hsalsa20_tweet(out, in_, k, c):
'''int crypto_core_hsalsa20_tweet(u8*out, const u8*in, const u8*k, const u8*c)'''
core(out, in_, k, c, True)
return 0
sigma = IntArray(u8, size=16, init=b'expand 32-byte k')
def crypto_stream_salsa20_tweet_xor(c, m, b, n, k):
'''int crypto_stream_salsa20_tweet_xor(u8*c, const u8*m, u64 b, const u8*n, const u8*k)'''
z = IntArray(u8, size=16)
x = IntArray(u8, size=64)
if not b: return 0
for i in range(8): z[i] = n[i]
c_off = 0 ; m_off = 0
while b >= 64:
crypto_core_salsa20_tweet(x, z, k, sigma)
for i in range(64): c[i + c_off] = (m[i + m_off] if m else 0) ^ x[i]
u = u32(1)
for i in range(8, 16):
u += u32(z[i])
z[i] = u
u >>= 8
b -= 64
c_off += 64
if m: m_off += 64
if b:
crypto_core_salsa20_tweet(x, z, k, sigma)
for i in range(b): c[i + c_off] = (m[i + m_off] if m else 0) ^ x[i]
return 0
def crypto_stream_salsa20_tweet(c, d, n, k):
'''int crypto_stream_salsa20_tweet(u8*c, u64 d, const u8*n, const u8*k)'''
return crypto_stream_salsa20_tweet_xor(c, IntArray(u8), d, n, k)
def crypto_stream_xsalsa20_tweet(c, d, n, k):
'''int crypto_stream_xsalsa20_tweet(u8*c, u64 d, const u8*n, const u8*k)'''
s = IntArray(u8, size=32)
crypto_core_hsalsa20_tweet(s, n, k, sigma)
return crypto_stream_salsa20_tweet(c, d, n[16:], s)
def crypto_stream_xsalsa20_tweet_xor(c, m, d, n, k):
'''int crypto_stream_xsalsa20_tweet_xor(u8*c, const u8*m, u64 d, const u8*n, const u8*k)'''
s = IntArray(u8, size=32)
crypto_core_hsalsa20_tweet(s, n, k, sigma)
return crypto_stream_salsa20_tweet_xor(c, m, d, n[16:], s)
def add1305(h, c):
'''void add1305(u32*h, const u32*c)'''
u = u32()
for j in range(17):
u += u32(h[j] + c[j])
h[j] = u & 255
u >>= 8
minusp = IntArray(u32, size=17, init=(5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252))
def crypto_onetimeauth_poly1305_tweet(out, m, n, k):
'''int crypto_onetimeauth_poly1305_tweet(u8*out, const u8*m, u64 n, const u8*k)'''
s = u32()
u = u32()
x = IntArray(u32, size=17)
r = IntArray(u32, size=17)
h = IntArray(u32, size=17)
c = IntArray(u32, size=17)
g = IntArray(u32, size=17)
for j in range(16): r[j] = k[j]
r[3] &= 15
r[4] &= 252
r[7] &= 15
r[8] &= 252
r[11] &= 15
r[12] &= 252
r[15] &= 15
while n > 0:
c[:] = 17*[u32()]
for j in range(16):
if j >= n: j -= 1 ; break
c[j] = m[j]
j += 1
c[j] = 1
m = m[j:]; n -= j
add1305(h, c)
for i in range(17):
x[i] = 0
for j in range(17): x[i] += h[j]*(r[i - j] if j <= i else 320*r[i + 17 - j])
for i in range(17): h[i] = x[i]
u = 0
for j in range(16):
u += h[j]
h[j] = u & 255
u >>= 8
u += h[16]; h[16] = u & 3
u = 5*(u >> 2)
for j in range(16):
u += h[j]
h[j] = u & 255
u >>= 8
u += h[16]; h[16] = u
for j in range(17): g[j] = h[j]
add1305(h, minusp)
s = -(h[16] >> 7)
for j in range(17): h[j] ^= s & (g[j] ^ h[j])
for j in range(16): c[j] = k[j + 16]
c[16] = 0
add1305(h, c)
for j in range(16): out[j] = h[j]
return 0
def crypto_onetimeauth_poly1305_tweet_verify(h, m, n, k):
'''int crypto_onetimeauth_poly1305_tweet_verify(
|
Joel-U/sparkle
|
sparkle/stim/auto_parameter_model.py
|
Python
|
gpl-3.0
| 15,413
| 0.002336
|
import numpy as np
class AutoParameterModel():
"""Model to hold all the necessary information to generate
auto-tests, where parameters of components are systematically
manipulated
"""
def __init__(self):
self._parameters = []
def nrows(self):
"""The number of auto-parameters
:returns: int -- parameter count
"""
return len(self._parameters)
def clearParameters(self):
"""Clears all parameters out of this model"""
self._parameters = []
def param(self, row):
"""Gets the parameter indexed by *row*
:param row: the ith parameter number
:type row: int
:returns: dict -- the parameter
"""
return self._parameters[row]
def selection(self, row):
"""Gets the component selection for parameter number *row*
:param row: the ith parameter number
:type row: int
:returns: list<:class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>`>
"""
return self._parameters[row]['selection']
def allData(self):
"""Gets a list of all the parameters in this model
:returns: list<dict> -- all parameters
"""
return self._parameters
def toggleSelection(self, row, component):
"""Toggles the *component* in or out of the selection
for parameter *row*
:param row: the ith parameter number
:type row: int
:param component: the component to toggle its selection membership
:type component: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>`
"""
selection = self._parameters[row]['selection']
if component in selection:
selection.remove(component)
else:
selection.append(component)
def setVerifiedValue(self, row, field, value):
"""Sets the *value* for *field* in the parameter
indexed by *row*, only if the value is within set limits
:param row: the ith parameter number
:type row: int
:param field: detail of the parameter to set
:type field: str
:param value: pre-scaled value to assign to field
"""
if self._parameters[row]['parameter'] == 'filename':
return # cannot be set this way?
if field == 'parameter':
self.setParamValue(row, parameter=value)
elif field in ['start', 'stop', 'step']:
if self.checkLimits(row, value):
kwd = {field : value}
self.setParamValue(row, **kwd)
def setParamValue(self, row, **kwargs):
|
"""Sets the arguments as field=val for parameter
indexed by *row*
:param row: the ith pa
|
rameter number
:type row: int
"""
param = self._parameters[row]
for key, val in kwargs.items():
param[key] = val
def paramValue(self, row, field):
"""Gets the value for *field* for parameter indexed by
*row*
:param row: the ith parameter number
:type row: int
:param field: detail of the parameter to set
:type field: str
:returns: value -- type appropriate to parameter
"""
if field == 'nsteps':
return self.numSteps(row)
if field in ['start', 'stop', 'step'] and self._parameters[row]['parameter'] == 'filename':
return '-'
else:
param = self._parameters[row]
return param[field]
def overwriteParam(self, row, param):
"""Assigns *param* to index *row*, overwritting the
parameter at that location
:param row: the ith parameter number
:type row: int
:param param: parameter to set
:type param: dict
"""
if row == -1:
row = self.nrows() - 1
self._parameters[row] = param
def numSteps(self, row):
"""Gets the number of steps for the parameter at
index *row* will yeild
"""
param = self._parameters[row]
return self.nStepsForParam(param)
def nStepsForParam(self, param):
"""Gets the number of steps *parameter* will yeild
:param param: parameter to get the expansion count for
:type param: dict
"""
if param['parameter'] == 'filename':
return len(param['names'])
else:
if param['step'] > 0:
if abs(param['start'] - param['stop']) < param['step']:
return 0
# print 'range', param['start'] - param['stop']
nsteps = np.around(abs(param['start'] - param['stop']), 4) / float(param['step'])
nsteps = int(np.ceil(nsteps)+1)
elif param['start'] == param['stop']:
nsteps = 1
else:
nsteps = 0
return nsteps
def getDetail(self, row, detail_field):
"""Gets the value of the detail *detail_field* of paramter
at index *row* from its selected components `auto_details`.
All of the selected components value for *detail_field* must
match
:param row: the ith parameter number
:type row: int
:param detail_field: auto_details member key
:type detail_field: str
:returns: value type appropriate for parameter
"""
param = self._parameters[row]
param_type = param['parameter']
components = param['selection']
if len(components) == 0 or param_type == '':
return None
# all components must match
matching_details = []
# for comp in components:
for comp in components:
alldetails = comp.auto_details()
if not param_type in alldetails:
# self.hintRequested.emit('INCOMPATABLE COMPONENTS FOR PARAMETER TYPE {}'.format(param_type))
return None
details = alldetails[param_type]
matching_details.append(details[detail_field])
matching_details = set(matching_details)
if len(matching_details) > 1:
print 'Components with mis-matched units!'
return None
return matching_details.pop()
def isFieldValid(self, row, field):
"""Verifies the value in *field* for parameter at index
*row*
:param row: the ith parameter number
:type row: int
:param field: detail of the parameter to check
:type field: str
:returns: bool -- True if valid
"""
param = self._parameters[row]
if param['parameter'] == '':
return False
if field == 'nsteps':
return self.numSteps(row) != 0
if param['parameter'] == 'filename':
# do something here... check filenames?
return True
if field == 'parameter':
return True
# else check that value is between min and max allowed
return self.checkLimits(row, param[field])
def findFileParam(self, comp):
"""Finds the filename auto-parameter that component *comp* is
in, and returns all the filenames for that parameter. Notes this
assumes that *comp* will only be in a single filename auto-parameter.
:param comp: Component to search parameter membership for
:type comp: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>`
:returns: list<str> -- filenames the found parameter will loop through
"""
for p in self._parameters:
if p['parameter'] == 'filename' and comp in p['selection']:
return p['names']
def checkLimits(self, row, value):
"""Check that *value* is within the minimum and maximum allowable
range for the parameter at index *row*
:param row: the ith parameter number
:type row: int
:param value: the candidate value to for start or stop fields
:returns: bool -- True if *value* within range
"""
# extract the selected comp
|
Oriphiel/Python
|
AlarmaTecno/Alarma/migrations/0005_djkombumessage_djkombuqueue.py
|
Python
|
apache-2.0
| 1,116
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('Alarma', '0004_auto_20151025_0206'),
]
operations = [
migrations.CreateModel(
name='DjkombuMessage',
|
fields=[
('id', models.IntegerField(serialize=False, primary_key=True)),
('visible', models.BooleanField()),
('sent_at', models.DateTimeField(null=True, blank=True)),
('payload', models.TextField()),
],
options={
'db_table': 'djkombu_message',
'managed': False,
},
),
m
|
igrations.CreateModel(
name='DjkombuQueue',
fields=[
('id', models.IntegerField(serialize=False, primary_key=True)),
('name', models.CharField(unique=True, max_length=200)),
],
options={
'db_table': 'djkombu_queue',
'managed': False,
},
),
]
|
Twilight0/service.subtitles.subtitles.gr
|
resources/lib/addon.py
|
Python
|
gpl-3.0
| 12,802
| 0.002812
|
# -*- coding: utf-8 -*-
'''
Subtitles.gr Addon
Author Twilight0
SPDX-License-Identifier: GPL-3.0-only
See LICENSES/GPL-3.0-only for more information.
'''
import re, unicodedata
from shutil import copy
from os.path import splitext, exists, split as os_split
from resources.lib import subtitlesgr, xsubstv, podnapisi, vipsubs
from tulip.fuzzywuzzy.fuzz import ratio
from tulip import control
from tulip.compat import urlencode, py3_dec, concurrent_futures
from tulip.log import log_debug
if control.condVisibility('Player.HasVideo'):
infolabel_prefix = 'VideoPlayer'
else:
infolabel_prefix = 'ListItem'
class Search:
def __init__(self, syshandle, sysaddon, langs, action):
self.list = []
self.query = None
self.syshandle = syshandle
self.sysaddon = sysaddon
self.langs = langs
self.action = action
def run(self, query=None):
if 'Greek' not in str(self.langs).split(','):
control.directory(self.syshandle)
control.infoDialog(control.lang(30002))
return
dup_removal = False
if not query:
title = match_title = control.infoLabel('{0}.Title'.format(infolabel_prefix))
with concurrent_futures.ThreadPoolExecutor(5) as executor:
if re.search(r'[^\x00-\x7F]+', title) is not None:
title = control.infoLabel('{0}.OriginalTitle'.format(infolabel_prefix))
title = unicodedata.normalize('NFKD', title).encode('ascii', 'ignore')
title = py3_dec(title)
year = control.infoLabel('{0}.Year'.format(infolabel_prefix))
tvshowtitle = control.infoLabel('{0}.TVshowtitle'.format(infolabel_prefix))
season = control.infoLabel('{0}.Season'.format(infolabel_prefix))
if len(season) == 1:
season = '0' + season
episode = control.infoLabel('{0}.Episode'.format(infolabel_prefix))
if len(episode) == 1:
episode = '0' + episode
if 's' in episode.lower():
season, episode = '0', episode[-1:]
if tvshowtitle != '': # episode
title_query = '{0} {1}'.format(tvshowtitle, title)
season_episode_query = '{0} S{1} E{2}'.format(tvshowtitle, season, episode)
season_episode_query_nospace = '{0} S{1}E{2}'.format(tvshowtitle, season, episode)
threads = [
executor.submit(self.subtitlesgr, season_episode_query_nospace),
executor.submit(self.xsubstv, season_episode_query),
executor.submit(self.podnapisi, season_episode_query),
executor.submit(self.vipsubs, season_episode_query)
]
dup_removal = True
log_debug('Dual query used for subtitles search: ' + title_query + ' / ' + season_episode_query)
if control.setting('queries') == 'true':
threads.extend(
[
executor.submit(self.subtitlesgr, title_query),
executor.submit(self.vipsubs, title_query),
executor.submit(self.podnapisi, title_query),
executor.submit(self.subtitlesgr, season_episode_query)
]
)
elif year != '': # movie
query = '{0} ({1})'.format(title, year)
threads = [
executor.submit(self.subtitlesgr, query), executor.submit(self.xsubstv, query),
executor.submit(self.vipsubs, query), executor.submit(self.podnapisi, query)
]
else: # file
query, year = control.cleanmovietitle(title)
if year != '':
query = '{0} ({1})'.format(query, year)
threads = [
executor.submit(self.subtitlesgr, query), executor.submit(self.xsubstv, query),
executor.submit(self.vipsubs, query), executor.submit(self.podnapisi, query)
]
for future in concurrent_futures.as_completed(threads):
item = future.result()
if not item:
continue
self.list.extend(item)
if not dup_removal:
log_debug('Query used for subtitles search: ' + query)
self.query = query
self.query = py3_dec(self.query)
else: # Manual query
with concurrent_futures.ThreadPoolExecutor(5) as executor:
query = match_title = py3_dec(query)
threads = [
executor.submit(self.subtitlesgr, qu
|
ery), executor.submit(self.xsubstv, query),
executor.submit(self.vipsubs, query), executor.submit(self.podnapisi, query)
]
for future in concurrent_futures.as_c
|
ompleted(threads):
item = future.result()
if not item:
continue
self.list.extend(item)
if len(self.list) == 0:
control.directory(self.syshandle)
return
f = []
# noinspection PyUnresolvedReferences
f += [i for i in self.list if i['source'] == 'xsubstv']
f += [i for i in self.list if i['source'] == 'subtitlesgr']
f += [i for i in self.list if i['source'] == 'podnapisi']
f += [i for i in self.list if i['source'] == 'vipsubs']
self.list = f
if dup_removal:
self.list = [dict(t) for t in {tuple(d.items()) for d in self.list}]
for i in self.list:
try:
if i['source'] == 'xsubstv':
i['name'] = u'[xsubstv] {0}'.format(i['name'])
elif i['source'] == 'podnapisi':
i['name'] = u'[podnapisi] {0}'.format(i['name'])
elif i['source'] == 'vipsubs':
i['name'] = u'[vipsubs] {0}'.format(i['name'])
except Exception:
pass
if control.setting('sorting') == '1':
key = 'source'
elif control.setting('sorting') == '2':
key = 'downloads'
elif control.setting('sorting') == '3':
key = 'rating'
else:
key = 'title'
self.list = sorted(self.list, key=lambda k: k[key].lower(), reverse=control.setting('sorting') in ['1', '2', '3'])
for i in self.list:
u = {'action': 'download', 'url': i['url'], 'source': i['source']}
u = '{0}?{1}'.format(self.sysaddon, urlencode(u))
item = control.item(label='Greek', label2=i['name'])
item.setArt({'icon': str(i['rating'])[:1], 'thumb': 'el'})
if ratio(splitext(i['title'].lower())[0], splitext(match_title)[0]) >= int(control.setting('sync_probability')):
item.setProperty('sync', 'true')
else:
item.setProperty('sync', 'false')
item.setProperty('hearing_imp', 'false')
control.addItem(handle=self.syshandle, url=u, listitem=item, isFolder=False)
control.directory(self.syshandle)
def subtitlesgr(self, query=None):
if not query:
query = self.query
try:
if control.setting('subtitles') == 'false':
raise TypeError
result = subtitlesgr.Subtitlesgr().get(query)
return result
except TypeError:
pass
def podnapisi(self, query=None):
if not query:
query = self.query
try:
if control.setting('podnapisi') == 'false':
raise TypeError
result = podnapisi.Podnapisi().get(query)
return result
exc
|
FredHutch/swift-switch-account
|
sw2account/sw2account.py
|
Python
|
apache-2.0
| 11,887
| 0.025742
|
#!/usr/bin/python
__version__='0.2.1'
impo
|
rt argparse
import logging
import os
import os.path
import sys
import requests
import getpass
import ConfigParser
def _persist( export, rcfile ):
f = open( rcfile, 'w' )
logging.debug( "writing to {}".format( rcfile ) )
|
f.write( "\n".join( export ) )
f.close()
os.chmod( rcfile, 0600 )
logging.info( "saved Swift credentials" )
def sh(creds, auth_version, savepw=False, persist=False ):
export = []
if auth_version == 'v1':
export.append(
"unset OS_USERNAME OS_PASSWORD OS_TENANT_NAME OS_AUTH_URL" )
export.append(
"unset OS_AUTH_TOKEN OS_STORAGE_URL" )
export.append( "export ST_USER='{}'".format( creds['account'] ) )
export.append( "export ST_KEY='{}'".format( creds['password'] ) )
export.append( "export ST_AUTH='{}'".format( v1AuthUrl ) )
else:
export.append(
"unset ST_USER ST_KEY ST_AUTH" )
export.append( "export OS_USERNAME='{}'".format( creds['user'] ) )
export.append(
"export OS_TENANT_NAME='AUTH_Swift_{}'".format( creds['account'] ) )
if savepw:
export.append(
"export OS_PASSWORD='{}'".format( creds['password'] ) )
export.append( "export OS_AUTH_URL='{}'".format( v2AuthUrl ) )
print ";".join( export )
if persist:
rcfile = os.environ[ 'HOME' ] + "/.swiftrc"
logging.debug( "persisting environment variables" )
_persist( export, rcfile )
def csh(creds, auth_version, savepw=False, persist=False ):
export = []
if auth_version == 'v1':
export.append(
"unsetenv OS_USERNAME OS_PASSWORD OS_TENANT_NAME OS_AUTH_URL" )
export.append(
"unsetenv OS_AUTH_TOKEN OS_STORAGE_URL" )
export.append( "setenv ST_USER '{}'".format( creds['account'] ) )
export.append( "setenv ST_KEY '{}'".format( creds['password'] ) )
export.append( "setenv ST_AUTH '{}'".format( v1AuthUrl ) )
else:
export.append(
"unsetenv ST_USER ST_KEY ST_AUTH" )
export.append( "setenv OS_USERNAME '{}'".format( creds['user'] ) )
export.append(
"setenv OS_TENANT_NAME 'AUTH_Swift_{}'".format( creds['account'] ) )
if savepw:
export.append(
"setenv OS_PASSWORD '{}'".format( creds['password'] ) )
export.append( "setenv OS_AUTH_URL '{}'".format( v2AuthUrl ) )
print ";".join( export )
if persist:
rcfile = os.environ[ 'HOME' ] + "/.swift.cshrc"
logging.debug( "persisting environment variables" )
_persist( export, rcfile )
shell_output = {
'sh': sh,
'ksh': sh,
'bash': sh,
'zsh': sh,
'csh': csh,
'tcsh': csh
}
class LocalParser( argparse.ArgumentParser ):
def error( self, message ):
sys.stderr.write( "Error: too few arguments\n" )
sys.stderr.write( "usage: sw2account lastname_f\n" )
sys.stderr.write(
"use \"sw2account --help\" for full help information\n" )
sys.exit(1)
def print_help( self ):
self._print_message( self.format_help(), sys.stderr )
sys.exit(0)
def return_v1_auth( args ):
# If server URL is unspecified, look for "SW2_URL" in current environment
account = args.account
server_url = args.server_url
logging.debug(
'asking {} for credentials for {}'.format( server_url, account )
)
if not server_url:
try:
server_url = os.environ[ 'SW2_URL' ]
except KeyError:
logging.error( "Server URL is unset (not in arguments or SW2_URL)" )
sys.exit(1)
# Add account name to URL
server_url = '/'.join( [ server_url, account ] )
logging.debug( 'final url is {}'.format( server_url ) )
# Get user authentication credentials
user = getpass.getuser()
passwd = getpass.getpass( 'Enter password for {}: '.format(user) )
# Get account credentials from server_url
r = requests.get( server_url, verify = args.verify_ssl, auth=( user, passwd ) )
if r.status_code == 200:
creds = r.json()
logging.debug(
"got credentials for account {}".format( creds['account'] )
)
creds['url'] = 'https://tin/some/crap'
shell_output[ args.shell ](
creds=creds,
persist=args.persist,
auth_version=args.auth_version
)
elif r.status_code == 401:
logging.error(
"invalid username/password supplied to server"
)
elif r.status_code == 403:
logging.error(
"user {} is not permitted to use {} ({})".format(
user, account, r.status_code
)
)
elif r.status_code == 404:
try:
message = r.json()['message']
except KeyError:
logging.error( "404 returned from server with no message" )
sys.exit(1)
logging.error("{} (HTTP{})".format(
message, r.status_code
)
)
else:
logging.error(
"error {} retrieving credentials from server".format(
r.status_code
)
)
def return_v2_auth( args ):
creds = {}
# authentication is done using Swiftstack version 2 authentication
# requires additional "tenant name" in addition to username and password
creds['account'] = args.account
# take username password from currently logged in user
creds['user'] = getpass.getuser()
if args.savepw:
logging.warning( "Saving passwords is insecure and not recommended." )
creds['password'] = getpass.getpass(
'Enter password for {}: '.format( creds['user'] ) )
logging.debug(
"got credentials for account {}".format( creds['account'] )
)
if args.savepw:
logging.debug( 'saving password in rc and environment' )
shell_output[ args.shell ](
creds=creds,
persist=args.persist,
savepw=args.savepw,
auth_version=args.auth_version
)
def add_common_args( aparser ):
aparser.add_argument(
'shell',
help = "format output for shell <shell>",
choices = shell_output.keys()
)
aparser.add_argument(
'account',
help = "retrieve credentials for account <account>"
)
aparser.add_argument(
'--config',
default = "/etc/sw2account.cfg",
help = "configuration file to use (default=/etc/sw2account.cfg)"
)
aparser.add_argument(
'--stack',
default = "default",
help = "stack name to authentication against (see configfile)"
)
aparser.add_argument(
'--save', '--persist',
dest = 'persist',
action = 'store_true',
help = "write credentials to $HOME/.swiftrc"
)
aparser.add_argument(
'--no-save', '--no-persist',
dest = 'persist',
action = 'store_false',
help = "do not write credentials to $HOME/.swiftrc"
)
aparser.add_argument(
'--version', '-v',
help = "show script version",
action = 'version',
version = "sw2account version {}".format( __version__)
)
aparser.add_argument(
'--debug',
action = "store_true",
help = "log level for client"
)
if __name__ == "__main__":
# Get the config first
# Need to prime the pump to find defaults
tparse = argparse.ArgumentParser()
tparse.add_argument(
'--config',
default = "/etc/sw2account.cfg",
help = "configuration file to use (default=/etc/sw2account.cfg)"
)
tparse.add_argument(
'--stack',
default = "default",
help = "stack name to authenticate against (see configfile)"
)
args, unknown = tparse.parse_known_args()
# Read config file with defaults
if not os.path.isfile( args.config ):
logging.error( "missing config file %s", args.config )
sys.exit(1)
appdefaults = ConfigParser.ConfigParser()
try:
appdefaults.read( args.config )
logging.debug( "reading config from %s"
|
tarashor/vibrations
|
py/fem/result.py
|
Python
|
mit
| 2,014
| 0.004965
|
import numpy as np
from . import finiteelements as fe
from . import matrices
from math import cos
class Result:
def __init__(self):
pass
def __init__(self, freq, u1, u2, u3, mesh, geometry):
self.freq = freq
self.u1 = u1
self.u2 = u2
self.u3 = u3
self.mesh = mesh
self.geometry = geometry
def rad_per_sec_to_Hz(self, r
|
ps):
return rps/(2*np.pi)
def get_displacement_and_deriv(self, x1, x2, x3, time):
element = self
|
.mesh.get_element(x1, x3)
if (element is None):
print ("x1 = {}, x2 = {}".format(x1, x3))
u_nodes = np.zeros((8))
u_nodes[0] = self.u1[element.top_left_index]
u_nodes[1] = self.u1[element.top_right_index]
u_nodes[2] = self.u1[element.bottom_right_index]
u_nodes[3] = self.u1[element.bottom_left_index]
u_nodes[4] = self.u3[element.top_left_index]
u_nodes[5] = self.u3[element.top_right_index]
u_nodes[6] = self.u3[element.bottom_right_index]
u_nodes[7] = self.u3[element.bottom_left_index]
h_e = matrices.element_aprox_functions(element, x1, x2, x3)
return h_e.dot(u_nodes) * self.fi(time)
def get_strain(self, x1, x2, x3, time):
B = matrices.deriv_to_grad(self.geometry, x1, x2, x3)
u = self.get_displacement_and_deriv(x1, x2, x3, time)
grad_u = B.dot(u)
E = matrices.grad_to_strain()
# E_NL = grad_to_strain_nonlinear_matrix(alpha1, alpha2, geometry, grad_u)
return E.dot(grad_u)
def get_strain_nl(self, x1, x2, x3, time):
B = matrices.deriv_to_grad(self.geometry, x1, x2, x3)
u = self.get_displacement_and_deriv(x1, x2, x3, time)
grad_u = B.dot(u)
E = matrices.grad_to_strain()
E_NL = matrices.deformations_nl(self.geometry, grad_u, x1, x2, x3)
return (E + E_NL).dot(grad_u)
def fi(self, time):
return cos(self.freq * time)
|
arseneyr/essentia
|
test/src/unittest/spectral/test_hpcp.py
|
Python
|
agpl-3.0
| 7,101
| 0.021405
|
#!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestHPCP(TestCase):
def testEmpty(self):
hpcp = HPCP()([], [])
self.assertEqualVector(hpcp, [0.]*12)
def testZeros(self):
hpcp = HPCP()([0]*10, [0]*10)
self.assertEqualVector(hpcp, [0.]*12)
def testSin440(self):
# Tests whether a real audio signal of one pure tone gets read as a
# single semitone activation, and gets read into the right pcp bin
sampleRate = 44100
audio = MonoLoader(filename = join(testdata.audio_dir, 'generated/synthesised/sin440_0db.wav'),
sampleRate = sampleRate)()
speaks = SpectralPeaks(sampleRate = sampleRate,
maxPeaks = 1,
maxFrequency = sampleRate/2,
minFrequency = 0,
magnitudeThreshold = 0,
orderBy = 'magnitude')
(freqs, mags) = speaks(Spectrum()(audio))
hpcp = HPCP()(freqs, mags)
self.assertEqualVector(hpcp, [1.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def testAllSemitones(self):
# Tests whether a spectral peak output of 12 consecutive semitones
# yields a HPCP of all 1's
tonic = 440
freqs = [(tonic * 2**(x/12.)) for x in range(12)]
mags = [1] * 12
hpcp = HPCP()(freqs, mags)
self.assertEqualVector(hpcp, [1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.])
def testSubmediantPosition(self):
# Make sure that the submediant of a key based on 440 is in the
# correct location (submediant was randomly selected from all the
# tones)
tonic = 440
submediant = tonic * 2**(9./12.)
hpcp = HPCP()([submediant], [1])
self.assertEqualVector(hpcp, [0.,0.,0.,0.,0.,0.,0.,0.,0.,1.,0.,0.])
def testMaxShifted(self):
# Tests whether a HPCP reading with only the dominant semitone
# activated is correctly shifted so that the dominant is at the
# position 0
tonic = 440
dominant = tonic * 2**(7./12.)
hpcp = HPCP(maxShifted=True)([dominant], [1])
self.assertEqualVector(hpcp, [1.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def chordHelper(self, half_steps, tunning, strength):
notes = [tunning*(2.**(half_steps[i]/12.)) for i in range(len(half_steps))]
hpcp = HPCP(maxShifted=False)([notes[0], notes[1], notes[2]], strength)
for i in range(len(hpcp)):
if i in half_steps: self.assertTrue(hpcp[i]>0)
elif (i - 12) in half_steps: self.assertTrue(hpcp[i]>0)
else: self.assertEqual(hpcp[i], 0)
def testChord(self):
tunning = 440
AMajor = [0, 4, 7] # AMajor = A4-C#5-E5
self.chordHelper(AMajor, tunning, [1,1,1])
CMajor = [3, -4, -2] # CMajor = C5-F4-G4
self.chordHelper(CMajor, tunning, [1,1,1])
CMajor = [-4, 3, -2] # CMajor = C5-F4-G4
self.chordHelper(CMajor, tunning, [1,0.5,0.2])
CMajor = [-4, -2, 3] # CMajor = C5-F4-G4
self.chordHelper(CMajor, tunning, [1,0.5,0.2])
CMajor = [3, 8, 10] # CMajor = C5-F5-G5
self.chordHelper(CMajor, tunning, [1,0.5,0.2])
AMinor = [0, 3, 7] # AMinor = A4-C5-E5
self.chordHelper(AMinor, tunning, [1,0.5,0.2])
CMinor = [3, 6, 10] # CMinor = C5-E5-G5
self.chordHelper(CMinor, tunning, [1,0.5,0.2])
# Test of various parameter logical bounds
def testLowFrequency(self):
hpcp = HPCP(minFrequency=100, maxFrequency=1000)([99], [1])
self.assertEqualVector(hpcp, [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def testHighFrequency(self):
hpcp = HPCP(minFrequency=100, maxFrequency=1000)([1001], [1])
self.assertEqualVector(hpcp, [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.])
def testSmallMinRange(self):
self.assertConfigureFails(HPCP(), {'minFrequency':1, 'splitFrequency':200})
def testSmallMaxRange(self):
self.assertConfigureFails(HPCP(), {'maxFrequency':1199, 'splitFrequency':1000})
def testSmallMinMaxRange(self):
self.assertConfigureFails(HPCP(), {'bandPreset':False, 'maxFrequency':200, 'minFrequency':1})
def testSizeNonmultiple12(self):
self.assertConfigureFails(HPCP(), {'size':13})
def testHarmonics(self):
# Regression test for the 'harmonics' parameter
tone = 100. # arbitrary frequency [Hz]
freqs = [tone, tone*2, tone*3, tone*4]
mags = [1]*4
hpcpAlg = HPCP(minFrequency=50, maxFrequency=500, bandPreset=False, harmonics=3)
hpcp = hpcpAlg(freqs, mags)
expected = [0., 0., 0., 0.1340538263, 0., 0.2476127148, 0., 0., 0., 0., 1., 0.]
self.assertAlmostEqualVector(hpcp, expected, 1e-4)
def testRegression(self):
# Just makes sure algorithm does not crash on a real data source. This
# test is not really looking for correctness. Maybe consider revising
# it.
inputSize = 512
sampleRate = 44100
audio = MonoLoader(filename = join(testdata.audio_dir, join('recorded', 'musicbox.wav')),
sampleRate = sampleRate)()
fc = FrameCutter(frameSize = inputSize,
hopSize = inputSize)
windowingAlg = Windowing(type = 'blackmanharris62')
specAlg = Spectrum(size=inputSize)
sPeaksAlg = SpectralPeaks(sampleRate = sampleRate,
|
maxFrequency = sampleRate/2,
minFrequency = 0,
orderBy = 'magnitude')
hpcpAlg = HPCP(
|
minFrequency=50, maxFrequency=500, bandPreset=False, harmonics=3)
frame = fc(audio)
while len(frame) != 0:
spectrum = specAlg(windowingAlg(frame))
(freqs, mags) = sPeaksAlg(spectrum)
hpcp = hpcpAlg(freqs,mags)
self.assertTrue(not any(numpy.isnan(hpcp)))
self.assertTrue(not any(numpy.isinf(hpcp)))
frame = fc(audio)
suite = allTests(TestHPCP)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
|
ssanderson/turing.py
|
turing.py
|
Python
|
mit
| 2,182
| 0.007333
|
required_states = ['accept', 'reject', 'init']
class TuringMachine(object):
def __init__(self, sigma, gamma, delta):
self.sigma = sigma
self.gamma = gamma
self.delta = delta
self.state = None
self.tape = None
self.head_position = None
return
def initialize(self, input_string):
for char in input_string:
assert char in self.sigma
self.tape = list(input_string)
self.state = 'init'
self.head_position = 0
return
def simulate_one_step(self, verbose=False):
if self.state in ['accept', 'reject']:
print "# %s " % self.state
cur_symbol = self.tape[self.head_position]
transition = self.delta[(self.state, cur_symbol)]
if verbose:
self.print_tape_contents()
template = "delta({q_old}, {s_old}) = ({q}, {s}, {arr})"
print(template.format(q_old=self.state,
s_old=cur_symbol,
q=transition[0],
s=transition[1],
|
arr=transition[2])
)
self.state = transition[0]
self.tape[self.head_position] = transition[1]
if(transition[2] == 'left'):
self.head_position = max(0, self.head_position - 1)
else:
assert(transition[2] == 'right')
if self.head_position == len(self.tape) - 1:
self.tape.append('#')
self.head_position +=1
if verbose:
|
self.print_tape_contents()
return
def print_tape_contents(self):
formatted = ''.join(char if i != self.head_position else '[%s]' % char
for i, char in enumerate(self.tape))
print(formatted)
def run(self, input_string, verbose=False):
self.initialize(input_string)
while self.state not in ['reject', 'accept']:
self.simulate_one_step(verbose)
return str(self.tape)
|
dsp-jetpack/JetPack
|
src/pilot/control_overcloud.py
|
Python
|
apache-2.0
| 1,930
| 0
|
#!/usr/bin/python3
# Copyright (c) 2016-2021 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import os
from ironicclient import client
from subprocess import check_output
from credential_helper import CredentialHelper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--power", required=True, default=None,
choices=["on", "off", "reset", "cycle"],
h
|
elp="Control power state of all overcloud nodes")
args = parser.parse_args()
os_auth_url, os_tenant_name, os_username, os_password, \
os_user_domain_name, os_project_domain_name = \
CredentialHelper.get_undercloud_creds()
kwargs = {'os_username': os_user
|
name,
'os_password': os_password,
'os_auth_url': os_auth_url,
'os_tenant_name': os_tenant_name,
'os_user_domain_name': os_user_domain_name,
'os_project_domain_name': os_project_domain_name}
ironic = client.get_client(1, **kwargs)
for node in ironic.node.list(detail=True):
ip, username, password = \
CredentialHelper.get_drac_creds_from_node(node)
cmd = "ipmitool -H {} -I lanplus -U {} -P '{}' chassis power {}". \
format(ip, username, password, args.power)
print(cmd)
os.system(cmd)
if __name__ == "__main__":
main()
|
rhcarvalho/kombu
|
kombu/tests/async/http/test_curl.py
|
Python
|
bsd-3-clause
| 5,102
| 0.000196
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from kombu.async.http.curl import READ, WRITE, CurlClient
from kombu.tests.case import (
HubCase, Mock, call, patch, case_requires, set_module_symbol,
)
@case_requires('pycurl')
class test_CurlClient(HubCase):
class Client(CurlClient):
Curl = Mock(name='Curl')
def test_when_pycurl_missing(self):
with set_module_symbol('kombu.async.http.curl', 'pycurl', None):
with self.assertRaises(ImportError):
self.Client()
def test_max_clients_set(self):
x = self.Client(max_clients=303)
self.assertEqual(x.max_clients, 303)
def test_init(self):
with patch('kombu.async.http.curl.pycurl') as _pycurl:
x = self.Client()
self.assertIsNotNone(x._multi)
self.assertIsNotNone(x._pending)
self.assertIsNotNone(x._free_list)
self.assertIsNotNone(x._fds)
self.assertEqual(
x._socket_action, x._multi.socket_action,
)
self.assertEqual(len(x._curls), x.max_clients)
self.assertTrue(x._timeout_check_tref)
x._multi.setopt.assert_has_calls([
call(_pycurl.M_TIMERFUNCTION, x._set_timeout),
call(_pycurl.M_SOCKETFUNCTION, x._handle_socket),
])
def test_close(self):
with patch('kombu.async.http.curl.pycurl'):
x = self.Client()
x._timeout_check_tref = Mock(name='timeout_check_tref')
x.close()
x._timeout_check_tref.cancel.assert_called_with()
for _curl in x._curls:
_curl.close.assert_called_with()
x._multi.close.assert_called_with()
def test_add_request(self):
with patch('kombu.async.http.curl.pycurl'):
x = self.Client()
x._process_queue = Mock(name='_process_queue')
x._set_timeout = Mock(name='_set_timeout')
request = Mock(name='request')
x.add_request(request)
self.assertIn(request, x._pending)
x._process_queue.assert_called_with()
x._set_timeout.assert_called_with(0)
def test_handle_socket(self):
with patch('kombu.async.http.curl.pycurl') as _pycurl:
hub = Mock(name='hub')
x = self.Client(hub)
fd = Mock(name='fd1')
# POLL_REMOVE
x._fds[fd] = fd
x._handle_socket(_pycurl.POLL_REMOVE, fd, x._multi, None, _pycurl)
hub.remove.assert_called_with(fd)
self.assertNotIn(fd, x._fds)
x._handle_socket(_pycurl.POLL_REMOVE, fd, x._multi, None, _pycurl)
# POLL_IN
hub = x.hub = Mock(name='hub')
fds = [fd, Mock(name='fd2'), Mock(name='fd3')]
x._fds = {f: f for f in fds}
x._handle_socket(_pycurl.POLL_IN, fd, x._multi, None, _pycurl)
hub.remove.assert_has_calls([call(fd)])
hub.add_reader.assert_called_with(fd, x.on_readable, fd)
self.assertEqual(x._fds[fd], READ)
# POLL_OUT
hub = x.hub = Mock(name='hub')
x._handle_socket(_pycurl.POLL_OUT, fd, x._multi, None, _pycurl)
hub.add_writer.assert_called_with(fd, x.on_writable, fd)
self.assertEqual(x._fds[fd], WRITE)
# POLL_INOUT
hub = x.hub = Mock(name='hub')
x._handle_socket(_pycurl.POLL_INOUT, fd, x._multi, None, _pycurl)
|
hub.add_reader.assert_called_with(fd, x.on_readable, fd)
hub.add_writer.assert_called_with(fd, x.on_writable, fd)
self.assertEqual(x._fds[fd], READ | WRITE)
# UNKNOWN EVENT
hub = x.hub = Mock(name='hub')
x._handle_socket(0xff3f, fd, x._multi, None, _pycurl)
# FD NOT IN F
|
DS
hub = x.hub = Mock(name='hub')
x._fds.clear()
x._handle_socket(0xff3f, fd, x._multi, None, _pycurl)
self.assertFalse(hub.remove.called)
def test_set_timeout(self):
x = self.Client()
x._set_timeout(100)
def test_timeout_check(self):
with patch('kombu.async.http.curl.pycurl') as _pycurl:
x = self.Client()
x._process_pending_requests = Mock(name='process_pending')
x._multi.socket_all.return_value = 333, 1
_pycurl.error = KeyError
x._timeout_check(_pycurl=_pycurl)
x._multi.socket_all.return_value = None
x._multi.socket_all.side_effect = _pycurl.error(333)
x._timeout_check(_pycurl=_pycurl)
def test_on_readable_on_writeable(self):
with patch('kombu.async.http.curl.pycurl') as _pycurl:
x = self.Client()
x._on_event = Mock(name='on_event')
fd = Mock(name='fd')
x.on_readable(fd, _pycurl=_pycurl)
x._on_event.assert_called_with(fd, _pycurl.CSELECT_IN)
x.on_writable(fd, _pycurl=_pycurl)
x._on_event.assert_called_with(fd, _pycurl.CSELECT_OUT)
|
rsinger86/goose-extractor
|
goose/article.py
|
Python
|
apache-2.0
| 4,751
| 0.00021
|
# -*- coding: utf-8 -*-
"""\
This is a python port of "Goose" orignialy licensed to Gravity.com
under one or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.
Python port was written by Xavier Grangier for Recrutae
Gravity.com licenses this file
to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class Article(object):
def __init__(self):
# title of the article
self.title = u""
# stores the lovely, pure text from the article,
# stripped of html, formatting, etc...
# just raw text with paragraphs separated by newlines.
# This is probably what you want to use.
self.cleaned_text = u""
# meta description field in HTML source
self.meta_description = u""
# meta lang field in HTML source
self.meta_lang = u""
self.body_html = u""
# meta favicon field in HTML source
self.meta_favicon = u""
# meta keywords field in the HTML source
self.meta_keywords = u""
# The canonical link of this article if found in the meta data
self.canonical_link = u""
# holds the domain of this article we're parsing
self.domain = u""
# holds the top Element we think
# is a candidate for the main body of the article
self.top_node = None
# holds the top Image object that
# we think represents this article
self.top_image = None
# holds a set of tags that may have
# been in the artcle, these are not meta keywords
self.tags = []
# holds a dict of all opengrah data found
self.opengraph = {}
# holds twitter embeds
self.tweets = []
# holds a list of any movies
# we found on the page like youtube, vimeo
self.movies = []
# holds links found in the main article
self.links = []
# hold author names
self.authors = []
# stores the final URL that we're going to try
# and fetch content against, this would be expanded if any
self.final_url = u""
# stores the MD5 hash of the url
# to use for various identification tasks
self.link_hash = ""
# stores the RAW HTML
# straight from the network connection
self.raw_html = u""
# the lxml Document object
self.doc = None
# this is the original JSoup document that contains
# a pure object from the original HTML without any cleaning
# options done on it
self.raw_doc = None
# Sometimes useful to try and know when
#
|
the publish date of an article was
self.publish_date = None
# A property bucket for consumers of goose to store custom data extractions.
self.additional_data = {}
@property
def infos(self):
data = {
"meta": {
"description": self.meta_description,
"lang": self.meta_lang,
"keywords": self.meta_keywords,
"favicon": self.meta_favicon,
"canonical": self.canonical_link,
|
},
"image": None,
"domain": self.domain,
"title": self.title,
"cleaned_text": self.cleaned_text,
"opengraph": self.opengraph,
"tags": self.tags,
"tweets": self.tweets,
"movies": [],
"links": self.links,
"authors": self.authors,
"publish_date": self.publish_date
}
# image
if self.top_image is not None:
data['image'] = {
'url': self.top_image.src,
'width': self.top_image.width,
'height': self.top_image.height,
'type': 'image'
}
# movies
for movie in self.movies:
data['movies'].append({
'embed_type': movie.embed_type,
'provider': movie.provider,
'width': movie.width,
'height': movie.height,
'embed_code': movie.embed_code,
'src': movie.src,
})
return data
|
erwindl0/python-rpc
|
org.eclipse.triquetrum.python.service/scripts/scisoftpy/plot.py
|
Python
|
epl-1.0
| 26,232
| 0.006023
|
###
# Copyright 2011 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
'''
Wrapper of plotting functionality in DAWN
'''
import os
import sys
if os.name == 'java':
import jython.jyplot as _plot
_plot_set_port = _plot.setremoteport
import jython.jycore as _core #@Reimport @UnusedImport
import jython.jybeans as _beans #@Reimport @UnusedImport
else:
import python.pyplot as _pyplot
_plot_set_port = _pyplot.setremoteport
_plot = _pyplot.plotter()
import python.pycore as _core #@Reimport
import python.pybeans as _beans #@Reimport
_plot_line = _plot.plot_line
_plot_addline = _plot.plot_addline
_plot_updateline = _plot.plot_updateline
_plot_image = _plot.plot_image
_plot_images = _plot.plot_images
_plot_surface = _plot.plot_surface
_plot_stack = _plot.plot_stack
_plot_updatestack = _plot.plot_updatestack
_plot_points2d = _plot.plot_points2d
_plot_updatepoints2d = _plot.plot_updatepoints2d
_plot_points3d = _plot.plot_points3d
_plot_updatepoints3d = _plot.plot_updatepoints3d
_plot_scanforimages = _plot.plot_scanforimages
_plot_viewnexustree = _plot.plot_viewnexustree
_plot_volume = _plot.plot_volume
_plot_createaxis = _plot.plot_createaxis
_plot_renameactiveaxis = {'x':_plot.plot_renameactivexaxis, 'y':_plot.plot_renameactiveyaxis}
_plot_clear = _plot.plot_clear
_plot_export = _plot.plot_export
__orders = _plot.plot_orders
_exception = _beans.exception
parameters = _beans.parameters
plotmode = _beans.plotmode
_guibean = _beans.guibean
bean = _guibean
axismapbean = _beans.axismapbean
datasetwithaxisinformation = _beans.datasetwithaxisinformation
databean = _beans.databean
_plot_getbean = _plot.plot_getbean
_plot_setbean = _plot.plot_setbean
_plot_getdatabean = _plot.plot_getdatabean
_plot_setdatabean = _plot.plot_setdatabean
getguinames = _plot.plot_getguinames
window_manager = _plot.plot_window_manager
try:
import io as _io
_REMOTEVOLNAME = "Remote Volume Viewer"
def volume(v, name=_REMOTEVOLNAME):
'''Plot a volume dataset in remote volume view
'''
import tempfile
import os #@Reimport
tmp = tempfile.mkstemp('.dsr') # '/tmp/blah.dsr'
os.close(tmp[0])
vdatafile = tmp[1]
# convert to byte, int or float as volume viewer cannot cope with boolean, long or double datasets
if v.dtype == _core.bool:
v = _core.cast(v, _core.int8)
elif v.dtype == _core.int64:
v = _core.cast(v, _core.int32)
elif v.dtype == _core.float64 or v.dtype == _core.complex64 or v.dtype == _core.complex128:
v = _core.cast(v, _core.float32)
_io.save(vdatafile, v, format='binary')
_plot_volume(name, vdatafile)
os.remove(vdatafile)
except Exception, e:
print >> sys.stderr, "Could not import io for volume renderer, this part of plotting will not work"
print >> sys.stderr, e
import roi
_toList = _core.toList
_PVNAME = "Plot 1"
def setdefname(name):
'''Assign a default plot view name used by all plotters
This default name starts as "Plot 1"
'''
global _PVNAME
_PVNAME = name
def setremoteport(rpcport=0, rmiport=0):
'''Connect over RMI or Analysis RPC to the given port.
If RMI is used (default in Jython for SDS) rpcport is ignored.
If Analysis RPC is used (default in Python) rmiport is ignored.
If CORBA is used, both rpc and rmi ports are ignored.
Values of 0 mean use the default port.'''
_plot_set_port(rpcport=rpcport, rmiport=rmiport)
def _order(order):
try:
return __orders[order]
except KeyError:
raise ValueError, "Given order not one of none, alpha, chrono"
def clear(name=None):
'''Clear plot
Argument:
name -- name of plot view to use (if None, use default name)
'''
if name is None:
name = _PVNAME
_plot_clear(name)
_FILE_T
|
YPES = ["PNG/JPEG File", "Postscript File", "SVG File"]
d
|
ef export(name=None, format=None, savepath=None):
'''Export plot to svg, png, jpg, ps
Argument:
name -- name of plot view to use (if None, use default name)
format -- format of the file to export to: can be 'svg', 'png', 'jpg' or 'ps' (if None, svg is used by default)
savepath -- full path and filename of the file to export to (if none, the filename will be 'exported.svg')
'''
if name is None:
name = _PVNAME
if format is "svg" or format is "SVG" or format is None:
format = _FILE_TYPES[2]
if format is "ps" or format is "PS" or format is "eps" or format is "EPS":
format = _FILE_TYPES[1]
if format is "PNG" or format is "png" or format is "JPG" or format is "jpg":
format = _FILE_TYPES[0]
if savepath is None:
savepath = "exported.svg"
_plot_export(name, format, savepath)
'''
Store a global list of x and y axes names in a per-horizontal/vertical dictionary per plot name
'''
_DEF_NAMES = {'x':'X-Axis', 'y':'Y-Axis'}
_AXES_NAMES = { 'x':{}, 'y':{} }
import types as _types
def _parselinearg(x, y, title, name):
'''x and y can be lists of arrays or single-item dicts (each dict comprises an axis name (or tuple) and array)
'''
if y is None:
if isinstance(x, dict):
yl = [x]
else:
yl = _toList(x)
xl = None
else:
if isinstance(y, dict):
yl = [y]
else:
yl = _toList(y)
if x is None:
xl = None
else:
if isinstance(x, dict):
xl = [x]
else:
xl = _toList(x)
if len(xl) == 1:
x = xl[0]
if type(x) is _types.DictType: # has axis name
x = x.values()[0]
xLength = x.shape[0]
for i in yl:
if type(i) is _types.DictType: # has axis name
i = i.values()[0]
if xLength != i.shape[0]:
raise AttributeError("length of y does not match the length of x" )
elif len(xl) != len(yl):
raise ValueError("number of x datasets should be equal to number of y datasets")
else:
for i,j in zip(xl,yl):
if type(i) is _types.DictType: # has axis name
i = i.values()[0]
if type(j) is _types.DictType: # has axis name
j = j.values()[0]
if i.shape[0] != j.shape[0]:
raise AttributeError("length of y does not match the length of x")
return name, title, xl, yl
_AXES_SIDES = { 'x':{'default':_plot.axis_bottom, 'top':_plot.axis_top, 'bottom':_plot.axis_bottom},
'y':{'default':_plot.axis_left, 'left':_plot.axis_left, 'right':_plot.axis_right} }
def _setup_axes(al, dirn, name):
c = 0 # count use of default axis
for a in al:
if type(a) is _types.DictType: # has axis name
n = a.keys()[0]
if type(n) is _types.TupleType: # has side info
n = n[0]
if n == _DEF_NAMES[dirn]:
c += 1
else:
c += 1
rename = c == 0
an = []
for a in al:
if type(a) is _types.DictType: # has axis name
n = a.keys()[0]
rename, n = _setup_axis(rename, n, dirn, name)
an.append(n)
|
burbanom/python-utils
|
file_utils.py
|
Python
|
gpl-3.0
| 1,260
| 0.00873
|
from __future__ import print_function
import os
import sys
import fnmatch
import mmap
def clean_fil
|
es(path,pattern):
all_files = os.listdir(path)
filtered = fnmatch.filter(all_files,pattern+"*")
for element in filtered:
os.remove(os.path.join(path,element))
def find_files(path,target):
matches = []
for root, subFolders, files in os.walk(path):
if target in files:
matches.append(root)
return matches
def find_dirs_files_pattern(path,pattern):
matches = []
for root, dirnames, filenames in os.walk(path):
for filename in fnma
|
tch.filter(filenames, pattern):
matches.append([root,filename])
return matches
def return_value(filename,pattern):
if type(pattern) is str:
pattern = pattern.encode()
with open(filename, "r") as fin:
# memory-map the file, size 0 means whole file
m = mmap.mmap(fin.fileno(), 0, prot=mmap.PROT_READ)
# prot argument is *nix only
i = m.rfind(pattern)
try:
m.seek(i) # seek to the location
except ValueError:
return np.nan
line = m.readline() # read to the end of the line
return float(line.split()[-1])
|
DelusionalLogic/TAS-100
|
python/test.py
|
Python
|
gpl-3.0
| 571
| 0.010508
|
import serial
import time
ser = serial.Serial(
port = "/dev/ttyACM0",
baudrate = 9600,
bytesize = serial.EIGHTBITS
)
time.sleep(2)
ser.write("\x36\x20\x00")
print(hex(ord(ser.read(1))))
print(hex(ord(se
|
r.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1
|
))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
print(hex(ord(ser.read(1))))
|
repotvsupertuga/repo
|
plugin.video.exodus/resources/lib/sources/newlinks_mv.py
|
Python
|
gpl-2.0
| 4,985
| 0.016048
|
# -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed i
|
n the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
|
import re,urllib,urlparse,json,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
class source:
def __init__(self):
self.domains = ['newmyvideolink.xyz', 'beta.myvideolinks.xyz']
self.base_link = 'http://newmyvideolink.xyz'
self.search_link = '/?s=%s'
def movie(self, imdb, title, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() == False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
query = re.sub('(\\\|/|:|;|\*|\?|"|\'|<|>|\|)', '', data['title'])
query = self.search_link % urllib.quote_plus(query)
query = urlparse.urljoin(self.base_link, query)
t = cleantitle.get(data['title'])
r = client.request(query)
r = client.parseDOM(r, 'ul', attrs = {'class': 'posts'})[0]
r = client.parseDOM(r, 'li')
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', attrs = {'title': '.+?'}), client.parseDOM(i, 'a', attrs = {'rel': 'category tag'})) for i in r]
r = [(i[0][0], i[1][0], i[2]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
r = [(i[0], i[1]) for i in r if 'MOVIES' in i[2]]
r = [(i[0], re.sub('(\.|\(|\[|\s)(\d{4}|3D)(\.|\)|\]|\s|)(.+|)', '', i[1]), re.findall('[\.|\(|\[|\s](\d{4}|)([\.|\)|\]|\s|].+)', i[1])) for i in r]
r = [(i[0], i[1], i[2][0][0], i[2][0][1]) for i in r if len(i[2]) > 0]
r = [(i[0], i[1], i[2], re.split('\.|\(|\)|\[|\]|\s|\-', i[3])) for i in r]
r = [i for i in r if t == cleantitle.get(i[1]) and data['year'] == i[2]]
r = [i for i in r if not any(x in i[3] for x in ['HDCAM', 'CAM', 'DVDR', 'DVDRip', 'DVDSCR', 'HDTS', 'TS', '3D'])]
r = [i for i in r if urlparse.urlparse(self.base_link).netloc in i[0]]
l = [(i[0], '1080p') for i in r if '1080p' in i[3]]
l += [(i[0], 'HD') for i in r if '720p' in i[3]]
l = l[:4]
hostDict = hostprDict + hostDict
links = []
for i in l:
try:
r = urlparse.urljoin(self.base_link, i[0])
r = client.replaceHTMLCodes(r)
r = client.request(r)
r = client.parseDOM(r, 'div', attrs = {'class': 'post_content'})[0]
r = re.sub('\s\s+', ' ', r)
try:
size = re.findall('Size\s*:\s*(.+? [M|G]B) ', r)[-1]
div = 1 if size.endswith(' GB') else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size))/div
info = '%.2f GB' % size
except:
info = ''
r = client.parseDOM(r, 'ul')[0]
r = client.parseDOM(r, 'a', ret='href')
for url in r: links.append({'url': url, 'quality': i[1], 'info': info})
except:
pass
for i in links:
try:
url = i['url']
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': i['quality'], 'provider': 'Newlinks', 'url': url, 'info': i['info'], 'direct': False, 'debridonly': True})
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
|
Sohojoe/damon
|
damon1/replace_it.py
|
Python
|
apache-2.0
| 14,439
| 0.019046
|
"""replace.py
Tool for replacing variable names in Damon.
Copyright (c) 2009 - 2011, Mark H. Moulton for Pythias Consulting, LLC.
Purpose: This tool was developed to replace CamelCase argument
variables with all lower case equivalents across all Damon modules.
It is being retained because it is readily adaptable to other
global search-and-replace problems.
Damon Version: 1.0.15
Damon Release Date: 5/1/2012
Damon is written in Python 2.7.2 and Numpy 1.6.1, as distributed
by Enthought (EPD 7.2-2 (64-bit)).
License
-------
This program references one or more software modules that are
under copyright to Pythias Consulting, LLC. Therefore, it is subject
to either the Gnu Affero General Public License or the Pythias
Commercial License, a copy of which is contained in the current
working directory.
How To Use
----------
The program is configured to convert CamelCase function/method
variables (but not regular variables) to lower case. To adapt
it to other uses, edit according to the following principles:
* Edit the names.extend(...) statement to get the
correct variables to edit. If you already know
the names to replace, you can comment out this
part of the program and rely on the special
dictionary.
* Edit the creation of the replace_ {} dictionary to
capture the names you are after. Currently, it
applies the s.lower() function, but it can be
anything.
* Set mode = 'inspect'. Obtain and review all names
to be replaced to make sure no new names will clash
with reserved Python or other package names or will
in other ways mangle the program.
The convention is to add a trailing underscore_ where
a Python clash would happen.
* Edit the removeit [] list to specify module
contents to ignore
* Edit the special {} dictionarary to specify
how to handle names that need special
handling.
* This function replaces only complete words -- those
governed by the regular expression '\b' (consult re
"regular expressions" module in the standard library).
Edit the re.sub(...) statement to replace characters
or other types of patterns.
* Make sure to save a backup of the module to be edited.
It is quite possible that a global search-and-replace
will result in unintended side-effects that require
debugging.
* Under filenames, list the Python modules in the current
working directory that you want to edit.
* Otherwise, you don't need to do any other file handling.
The program will automatically open and edit a Python
module in place.
* Set mode = 'replace' and hit F5 to run the program.
"""
import os
import sys
import cPickle
import inspect
import fileinput
import re
import glob
import damon1
#############
## Specs ##
#############
# Set mode to: <'inspect','replace'>
mode = 'replace'
# Files to edit
testpath = damon1.__path__[0]+'/tests/'
sys.path.append(testpath)
testfiles = glob.glob(testpath+'test_*.py')
testfiles.extend([testpath+'ut_template.py'])
files2inspect = ['core.py','tools.py','utils.py']
files2edit = files2inspect + testfiles + ['__init__.py','template.py']
print 'files2edit=\n',files2edit
# Module contents to ignore when getting variable names
removeit = ['core','utils','tools','npla','__package__','np','__doc__',
'core','cPickle','__builtins__','__file__','sys','__name__',
'npr','npma','os','__module__','__dict__','__weakref__',
'__doc__','self','npt','tab']
# Names that need special attention
special = {'DamonObj':'Damon',
'baseResid':'base_resid',
'RunSpecs':'runspecs',
'finSE':'fin_se',
'baseEst':'base_est',
'RandPercentNaN':'rand_nan',
'RandRange':'rand_range',
'Ents2Destd':'ents2restore',
'finEAR':'fin_ear',
'FixedRangeEnts':'ents2nan',
'FixedRangeLoc':'range2nan',
'AddSourceIDs':'source_ids',
'AddDataDict':'add_datadict',
'finEst':'fin_est',
'restoreInvalid':'restore_invalid',
'extractValid':'extract_valid',
'FacCoords':'fac_coords',
'Fac0Coord':'fac0coord',
'fac1coord':'fac1coord',
'finFit':'fin_fit',
'PredEnts':'pred_ents',
'Jolt':'jolt_',
'baseEAR':'base_ear',
'TabDataRCD':'tab_datadict',
'MissingLbls':'miss4headers',
'RecodeRange1':'recode1',
'RecodeRange2':'recode2',
'RecodeRange3':'recode3',
'baseSE':'base_se',
'baseFit':'base_fit',
'CondCoord':'condcoord_',
'ConstructLabel':'construct_label',
'ConstructEnts':'construct_ents',
'mergeAnsKey':'merge_anskey',
'XtraHeadRng':'extra_headers',
'PercentNaN':'p_nan',
'ScoreMC':'score_mc',
'RespCat':'resp_cat',
'Dtype':'dtype',
'finResid':'fin_resid',
'ConstructAtts':'construct_atts',
'ResidType':'resid_type',
'TargData':'targ_data',
'TargLabels':'targ_labels',
'OrigData':'orig_data',
'ItemDiff':'itemdiff',
'itemDiff':'item_diff',
'ParseParams':'parse_params',
'Params':'params',
'scoreMC':'score_mc',
'ObjEst':'obj_est',
'TargMeanSD':'mean_sd',
'BankF0Ents':'bankf0',
'BankF1Ents':'bankf1',
'ObjEnts':'obj_e
|
nts',
'OutputAs':'output_as',
'RespCats':'resp_cats',
'RLRow':'rl_row',
'RLCol
|
':'rl_col',
'CLRow':'cl_row',
'CLCol':'cl_col',
'CoreRow':'core_row',
'CoreCol':'core_col',
'WholeRow':'whole_row',
'WholeCol':'whole_col',
'WholeArray':'whole',
'Fileh':'fileh',
'TextFile':'textfile',
'TextFiles':'textfiles',
'DataDictLink':'datadict_link',
'DataDictWhole':'datadict_whole',
'Pickle':'pickle',
'RCD_Whole':'RCD_whole',
'RCD_Dicts':'RCD_dicts',
'RCD_Dicts_Whole':'RCD_dicts_whole',
'ChunkFunc':'chunkfunc',
'ChunkDict':'chunkdict',
'Model':'model',
'Num':'num',
'extractValid_out':'extract_valid_out',
'pseudoMiss_out':'pseudomiss_out',
'scoreMC_out':'score_mc_out',
'baseEst_out':'base_est_out',
'baseResid_out':'base_resid_out',
'baseEAR_out':'base_ear_out',
'baseSE_out':'base_se_out',
'baseFit_out':'base_fit_out',
'finEst_out':'fin_est_out',
'est2Logit_out':'est2logit_out',
'itemDiff_out':'item_diff_out',
'fillMiss_out':'fillmiss_out',
'finResid_out':'fin_resid_out',
'finFit_out':'fin_fit_out',
'mergeAnsKey_out':'merge_anskey_out',
'restoreInvalid_out':'restore_invalid_out',
'summStat_out':'summstat_out',
'RowEnts':'row_ents',
'ColEnts':'col_ents',
'ObjPerDim':'objperdim',
'Stability':'stability',
'Objectivity':'objectivity',
'BestDim':'bestdim',
'MaxPosDim':'maxposdim',
'Accuracy':'accuracy',
'PsMsResid':'psmsresid',
'Fac0SE':'fac0_se',
'Fac1SE':'fac1_se',
'Fac0Infit':'fac0_infit',
'Fac1Infit':'fac1_infit',
'Fac0Outfit':'fac0_outfit',
'Fac1Outfit':'fac1_outfit',
'Reliability':'reliability',
'CellVar':'cellvar',
'CellFit':'cellfit',
'MsIndex':'msindex',
'PsMsIndex':'psmsindex',
'TrueMsIndex':'true_msindex',
'ParsedMsIndex':'parsed_msindex',
'ParsedTrueMsIndex':'parsed_true_msindex',
|
EVEprosper/ProsperAPI
|
scripts/manager.py
|
Python
|
mit
| 850
| 0.001176
|
"""manager.py: Flask-Script launcher for services
using https://github.com/yabb85/ueki as prototype
"""
from os import path
from flask_script import Manager, Server
from publicAPI import create_app
import prosper.common.prosper_logging as p_logging
import prosper.common.prosper_config as p_config
HERE = path.abspath(path.dirname(__file__))
ROOT = path.dirname(HERE)
CONFIG_FILEPATH = path.join(HER
|
E, 'app.cfg')
CONFIG = p_config.ProsperConfig(CONFIG_FILEPATH)
SETTINGS = {
'PORT':8001
}
APP = create_app(SETTINGS, CONFIG)
MANAGER = Manager(APP)
MANAGER.add_command(
'runserver',
Server(
host='0.0.0.0',
port=CONFIG.get('PROD', 'PORT')
)
)
MANAGER.add_command(
'debug',
Server(
use_debugger=True,
|
port=CONFIG.get('DEBUG', 'PORT')
)
)
if __name__ == '__main__':
MANAGER.run()
|
opnsense/core
|
src/opnsense/scripts/filter/list_tables.py
|
Python
|
bsd-2-clause
| 1,992
| 0.000502
|
#!/usr/local/bin/python3
"""
Copyright (c) 2015-2019 Ad Schellevis <ad@opnsense.org>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY,
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------------
returns a list of pf tables (optional as a json container)
"""
import subprocess
import sys
import ujson
if __name__ == '__main__':
result = []
sp = subprocess.run(['/sbin/pfctl', '-sT'], capture_output=True, text=True)
for line in sp.stdout.strip().split('\n'):
result.append(line.strip())
# handle command line argument (type selection)
if len(sys.argv) > 1 and sys.argv[1] == 'json':
print(ujson.dumps(result))
else:
# output plain
for table in result:
print (table)
|
suda/micropython
|
tests/micropython/heapalloc.py
|
Python
|
mit
| 891
| 0.007856
|
# check that we can do certain things without allocating heap memory
import gc
def f1(a):
print(a)
def f2(a, b=2):
print(a, b)
def f3(a, b, c, d):
x1 = x2 = a
x3 = x4 = b
x5 = x6 = c
x7 = x8 = d
print(x1, x3, x5, x7, x2 + x4 + x6 + x8)
global_var = 1
def test():
global global_var
global_var = 2 # set an existing global variable
for i in range(2): # for loop
f1(i)
|
# function call
f1(i * 2 + 1) # binary operation with small ints
f1(a=i) # keyword arguments
f2(i) # default arg (second one)
f2(i, i) # 2 args
f3(1, 2, 3, 4) # function with lots of local state
# call h with heap allocation disabled and all memory used up
gc.disable()
try:
while True:
'a'.lower # allocates 1 cell fo
|
r boundmeth
except MemoryError:
pass
test()
gc.enable()
|
HybridF5/jacket
|
jacket/tests/compute/unit/virt/libvirt/volume/test_volume.py
|
Python
|
apache-2.0
| 11,446
| 0
|
# Copyright 2010 OpenStack Foundation
# Copyright 2012 University Of Minho
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from jacket.compute import exception
from jacket.compute import test
from jacket.tests.compute.unit.virt.libvirt import fakelibvirt
from jacket.compute import utils
from jacket.compute.virt.libvirt import host
from jacket.compute.virt.libvirt.volume import volume
SECRET_UUID = '2a0a0d6c-babf-454d-b93e-9ac9957b95e0'
class FakeSecret(object):
def __init__(self):
self.uuid = SECRET_UUID
def getUUIDString(self):
return self.uuid
def UUIDString(self):
return self.uuid
def setValue(self, value):
self.value = value
return 0
def getValue(self, value):
return self.value
def undefine(self):
self.value = None
return 0
class LibvirtVolumeBaseTestCase(test.NoDBTestCase):
"""Contains common setup and helper methods for libvirt volume tests."""
def setUp(self):
super(LibvirtVolumeBaseTestCase, self).setUp()
self.executes = []
def fake_execute(*cmd, **kwargs):
self.executes.append(cmd)
return None, None
self.stubs.Set(utils, 'execute', fake_execute)
self.useFixture(fakelibvirt.FakeLibvirtFixture())
class FakeLibvirtDriver(object):
def __init__(self):
self._host = host.Host("qemu:///system")
def _get_all_block_devices(self):
return []
self.fake_conn = FakeLibvirtDriver()
self.connr = {
'ip': '127.0.0.1',
'initiator': 'fake_initiator',
'host': 'fake_host'
}
self.disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
self.name = 'volume-00000001'
self.location = '10.0.2.15:3260'
self.iqn = 'iqn.2010-10.org.openstack:%s' % self.name
self.vol = {'id': 1, 'name': self.name}
self.uuid = '875a8070-d0b9-4949-8b31-104d125c9a64'
self.user = 'foo'
def _assertFileTypeEquals(self, tree, file_path):
self.assertEqual('file', tree.get('type'))
self.assertEqual(file_path, tree.find('./source').get('file'))
class LibvirtISCSIVolumeBaseTestCase(LibvirtVolumeBaseTestCase):
"""Contains common setup and helper methods for iSCSI volume tests."""
def iscsi_connection(self, volume, location, iqn, auth=False,
transport=None):
dev_name = 'ip-%s-iscsi-%s-lun-1' % (location, iqn)
if transport is not None:
dev_name = 'pci-0000:00:00.0-' + dev_name
dev_path = '/dev/disk/by-path/%s' % (dev_name)
ret = {
'driver_volume_type': 'iscsi',
'data': {
'volume_id': volume['id'],
'target_portal': location,
'target_iqn': iqn,
'target_lun': 1,
'device_path': dev_path,
'qos_specs': {
'total_bytes_sec': '102400',
'read_iops_sec': '200',
}
}
}
if auth:
ret['data']['auth_method'] = 'CHAP'
ret['data']['auth_username'] = 'foo'
ret['data']['auth_password'] = 'bar'
return ret
class LibvirtVolumeTestCase(LibvirtISCSIVolumeBaseTestCase):
def _assertDiskInfoEquals(self, tree, disk_info):
self.assertEqual(disk_info['type'], tree.get('device'))
self.assertEqual(disk_info['bus'], tree.find('./target').get('bus'))
self.assertEqual(disk_info['dev'], tree.find('./target').get('dev'))
def _test_libvirt_volume_driver_disk_info(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
'device_path': '/foo',
},
'serial': 'fake_serial',
}
conf = libvirt_driver.get_config(connection_info, self.disk_inf
|
o)
tree = conf.format_dom()
self._assertDiskInfoEquals(tree, self.disk_info)
def test_libvirt_volume_disk_info_type(self):
self.disk_info['type'] = 'cdrom
|
'
self._test_libvirt_volume_driver_disk_info()
def test_libvirt_volume_disk_info_dev(self):
self.disk_info['dev'] = 'hdc'
self._test_libvirt_volume_driver_disk_info()
def test_libvirt_volume_disk_info_bus(self):
self.disk_info['bus'] = 'scsi'
self._test_libvirt_volume_driver_disk_info()
def test_libvirt_volume_driver_serial(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
'device_path': '/foo',
},
'serial': 'fake_serial',
}
conf = libvirt_driver.get_config(connection_info, self.disk_info)
tree = conf.format_dom()
self.assertEqual('block', tree.get('type'))
self.assertEqual('fake_serial', tree.find('./serial').text)
self.assertIsNone(tree.find('./blockio'))
self.assertIsNone(tree.find("driver[@discard]"))
def test_libvirt_volume_driver_blockio(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
'device_path': '/foo',
'logical_block_size': '4096',
'physical_block_size': '4096',
},
'serial': 'fake_serial',
}
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
conf = libvirt_driver.get_config(connection_info, disk_info)
tree = conf.format_dom()
blockio = tree.find('./blockio')
self.assertEqual('4096', blockio.get('logical_block_size'))
self.assertEqual('4096', blockio.get('physical_block_size'))
def test_libvirt_volume_driver_iotune(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
"device_path": "/foo",
'qos_specs': 'bar',
},
}
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
conf = libvirt_driver.get_config(connection_info, disk_info)
tree = conf.format_dom()
iotune = tree.find('./iotune')
# ensure invalid qos_specs is ignored
self.assertIsNone(iotune)
specs = {
'total_bytes_sec': '102400',
'read_bytes_sec': '51200',
'write_bytes_sec': '0',
'total_iops_sec': '0',
'read_iops_sec': '200',
'write_iops_sec': '200',
}
del connection_info['data']['qos_specs']
connection_info['data'].update(dict(qos_specs=specs))
conf = libvirt_driver.get_config(connection_info, disk_info)
tree = conf.format_dom()
self.assertEqual('102400', tree.find('./iotune/total_bytes_sec').text)
self.assertEqual('51200', tree.find('./iotune/read_bytes_sec').text)
self.assertEqual('0', tree.find('./iotune/write_bytes_sec').text)
self.assertEqual('0', tree.find('./iotune/total_iops_sec').text)
self.assertEqual('200', tree.find('./iotune/read_iops_sec').text)
self.assertEqual('200', tree.find('./io
|
jdufresne/staticsauce
|
staticsauce/commands/build.py
|
Python
|
gpl-3.0
| 3,466
| 0
|
# This file is part of Static Sauce <http://github.com/jdufresne/staticsauce>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import errno
import shutil
from staticsauce import commands
from staticsauce import routes
from staticsauce.conf import settings
from staticsauce.exceptions import AlreadyUpdatedError
from staticsauce.files import StaticFile
from staticsauce.utils import import_path, path_append, file_updated
class BuildCommand(commands.Command):
command = 'build'
def copy_public_dir(self):
for src_dir, dirnames, filenames in os.walk(settings.PUBLIC_DIR):
dest_dir = path_append(
settings.BUILD_DIR,
src_dir[len(settings.PUBLIC_DIR):]
)
try:
os.mkdir(dest_dir)
except OSError as err:
if err.errno != errno.EEXIST:
raise
for filename in filenames:
dest_path = os.path.join(dest_dir, filename)
src_path = os.path.join(src_dir, filename)
if file_updated(dest_path, src_path):
self.logger.info("[copy] %(src)s %(dest)s", {
'src': src_path,
'dest': dest_path,
})
shutil.copy(src_path, dest_dir)
def __call__(self):
self.copy_public_dir()
for name, route in routes.mapper:
filename = path_append(settings.BUILD_DIR, route.filename)
module, controller = route.controller.rsplit('.', 1)
module = import_path(module)
controller = getattr(module, controller)
permutations = route.permutations \
if route.permutations is not None else [{}]
for permutation in permutations:
fmt_filename = filename.format(**permutation)
try:
os.makedirs(os.path.dirname(fmt_filename))
except OSError as err:
if err.errno != errno.EEXIST:
raise
uri = 'http://{domain}{path}'.format(
domain=settings.SITE_DOMAIN,
path=route.filename
)
static_file = StaticFile(fmt_filena
|
me, uri)
kwargs = {}
|
if route.kwargs:
kwargs.update(route.kwargs)
kwargs.update(permutation)
try:
controller(static_file, **kwargs)
except AlreadyUpdatedError:
pass
else:
self.logger.info("[%(controller)s] %(filename)s", {
'controller': route.controller,
'filename': fmt_filename,
})
static_file.save(fmt_filename)
|
potzenheimer/kotti_mb
|
setup.py
|
Python
|
mit
| 1,275
| 0.000784
|
import os
from setuptools import find_packages
from setuptools import setup
version = '1.0'
project = 'kotti_mb'
install_requires=[
'Kotti',
],
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
setup(name=project,
ver
|
sion=version,
description="AddOn for Kotti",
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"License :: Repoze Public License",
],
keywo
|
rds='kotti addon',
author='Christoph Boehner',
author_email='cb@vorwaerts-werbung.de',
url='http://pypi.python.org/pypi/',
license='bsd',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=[],
entry_points={
'fanstatic.libraries': [
'kotti_mb = kotti_mb.fanstatic:library',
],
},
extras_require={},
message_extractors={'kotti_mb': [
('**.py', 'lingua_python', None),
('**.zcml', 'lingua_xml', None),
('**.pt', 'lingua_xml', None),
]},
)
|
mvidalgarcia/indico
|
indico/modules/networks/util.py
|
Python
|
mit
| 506
| 0
|
# This file is part of Indico.
# Copyright
|
(C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
def serialize_ip_network_group(group):
"""Serialize group to JSON-like object"""
return {
'id': group.id,
'name': group.name,
'identifier': 'IPNetworkGroup:{}'.format(group
|
.id),
'_type': 'IPNetworkGroup'
}
|
dede67/sokoban
|
SokoGoodFloors.py
|
Python
|
gpl-3.0
| 5,693
| 0.026464
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import deque
import SokoMove
import Helper as hlp
class SokoGoodFloors():
# Interface:
# findGoodFloors() -> good-floor-List
def __init__(self, pgs, pgdp, zfl, pp):
self.pgs=pgs
self.pgdp=pgdp
self.zfl=zfl
self.pp=pp
self.bf=[]
self.sm=SokoMove.SokoMove()
self.pppi=[(-1, 0), ( 1, 0), ( 0, -1), ( 0, 1)] # Player-Pull-Pos
self.pdpi=[(-2, 0), ( 2, 0), ( 0, -2), ( 0, 2)] # Player-Desination-Pos
self.hrms=["L", "R", "U", "D"]
# ###########################################################
# Liefert eine gepackte Liste der möglichen Pulls an der
# Box auf (x, y) ohne Berücksichtigung der Spielfigur-Position.
def __pullableBordersForSingleBox(self, x, y):
pbl=[]
for i in range(4):
if self.pgs[y+self.pppi[i][1]][x+self.pppi[i][0]]!="#":
# das Feld, von dem der Player ziehen soll, ist frei
if self.pgs[y+self.pdpi[i][1]][x+self.pdpi[i][0]]!="#":
# das Feld, auf dem der Player landen soll, ist frei
pbl.append(hlp.mpack((x+self.pppi[i][0], y+self.pppi[i][1]), i))
return(pbl)
# ###########################################################
# Liefert True, wenn es einen Weg von pp nach dp gibt.
# Es darf sich nur eine Box an den Koordinaten bp im
# Spielfeld befinden. Der Inhalt des dynamischen Spielfeldes
# in self.pgdp wird nicht berücksichtigt!
def __testPlayerWayToPos(self, pp, dp, bp):
rc=self.__testPlayerWayToPosSubQueue(pp, dp, bp)
return(rc!=None)
# ###########################################################
# Liefert in einem Spielfeld mit nur einer Box (auf bp) eine
# Bewegungsfolge für den Weg von pp nach dp. Wird kein Weg
# gefunden, wird "" geliefert.
def __testPlayerWayToPosSubQueue(self, pp, dp, bp):
queue=deque([(pp, "")])
visited=[pp]
while queue:
((x, y), rc)=queue.popleft()
if (x, y)==dp:
return(rc)
for i in range(4):
nx=x+self.pppi[i][0]
ny=y+self.pppi[i][1]
if (nx, ny) not in visited:
if self.pgs[ny][nx]!="#" and (nx, ny)!=bp:
queue.append(((nx, ny), rc+self.hrms[i]))
visited.append((nx, ny))
return(None)
# ###########################################################
# wie __testPlayerWayToPosSubQueue - taugt hier aber nix.
# wird nicht genutzt!
def __testPlayerWayToPosSubStack(self, rc, pp, dp, visited):
if rc==True:
return(True, visited)
if pp==dp:
return(True, visited)
if self.pgs[pp[1]][pp[0]]=="#":
return(False, visited)
if pp in visited:
return(False, visited)
visited.append(pp)
rc, visited=self.__testPlayerWayToPosSub(rc, (pp[0]-1, pp[1]), dp, visited)
rc, visited=self.__testPlayerWayToPosSub(rc, (pp[0]+1, pp[1]), dp, visited)
rc, visited=self.__testPlayerWayToPosSub(rc, (pp[0], pp[1]-1), dp, visited)
rc, visited=self.__testPlayerWayToPosSub(rc, (pp[0], pp[1]+1), dp, visited)
return(False, visited)
# ###########################################################
# Liefert eine Liste aller Floors, auf der eine Box stehen
# kann, die von einem GoalSquare gezogen wurde.
def findGoodFloors(self):
good_floors=[]
for gsx, gsy in self.zfl: # für jedes Zielfeld...
pgdpt=[hlp.ppack(gsx, gsy)] # ...eine Box drauf setzen
rlst=self.__pullableBordersForSingleBox(gsx, gsy) # mögliche inverse Züge bestimmen
# rlst kann 0 bis 4 Elemente enthalten
for p in rlst:
pgdpt=[hlp.ppack(gsx, gsy)] # neu setzen, weil pgdpt von __findGoodFloorsForSingleBox geändert wird
good_floors=self.__findGoodFloorsForSingleBox(pgdpt, p, self.pp, good_floors)
rc=[]
for i in good_floors: # aus der Liste mit Pu
|
lls eine Liste von Floors machen
(dp, d)=hlp.munpack(i)
if dp not in rc:
rc.append(dp)
return(rc)
# ###########################################################
# Durchläuft rekursiv alle möglichen Box-Positionen, die
# durch Ziehen von einem GoalSquare erreicht werden können.
# Mit good_floors wird eine Liste geführt, in der Box-Positionen
# (und
|
NICHT -wie sonst üblich- Player-Positionen) zusammen mit
# einer Pull-Richtung stehen. Somit können Floors mehrfach (bis
# zu viermal) besucht werden - mit je einer anderen Pull-Richtung.
#
# Verlängert ggf. good_floors und ändert pgdp.
def __findGoodFloorsForSingleBox(self, pgdp, pull, pp, good_floors):
bp=hlp.punpack(pgdp[0]) # derzeitige Box-Position(ungepackt) - ist ja nur eine drin
dp, d=hlp.munpack(pull) # Box-Ziel-Position (ungepackt)
if hlp.mpack(bp, d) in good_floors: # wenn Pull schon ausgeführt wurde...
return(good_floors) # ...langt das
if self.__testPlayerWayToPos(pp, dp, bp)==True: # wenn der Player die Pull-Position erreichen kann...
pp=dp # ...dann kann die Player-Pos auf Pull-Pos gesetzt werden
good_floors.append(hlp.mpack(bp, d)) # ...und der Pull als gut und ausgeführt vermerkt werden
rc, pp=self.sm.inverseMovePlayer(self.pgs, pgdp, pp, d) # Zug gemäß "pull" ausführen
bp=hlp.punpack(pgdp[0]) # ggf. geänderte Box-Position holen
rlst=self.__pullableBordersForSingleBox(bp[0], bp[1]) # mögliche Folge-Pulls ermitteln
for p in rlst: # über alle Folge-Pulls
pgdpt=[pgdp[0]] # neu setzen, weil pgdpt von __findGoodFloorsForSingleBox geändert wird
good_floors=self.__findGoodFloorsForSingleBox(pgdpt, p, pp, good_floors)
return(good_floors)
|
testmana2/test
|
DocumentationTools/TemplatesListsStyle.py
|
Python
|
gpl-3.0
| 7,017
| 0.00456
|
# -*- coding: utf-8 -*-
# Copyright (c) 2004 - 2015 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing templates for the documentation generator (lists style).
"""
from __future__ import unicode_literals
#################################################
## Common templates for index and docu files ##
#################################################
headerTemplate = \
'''<!DOCTYPE html>
<html><head>
<title>{{Title}}</title>
<meta charset="UTF-8">
</head>
<body style="background-color:{BodyBgColor};color:{BodyColor}">'''
footerTemplate = '''
</body></html>'''
#########################################
## Templates for documentation files ##
#########################################
moduleTemplate = \
'''<a NAME="top" ID="top"></a>
<h1 style="background-color:{Level1HeaderBgColor};color:{Level1HeaderColor}">
{{Module}}</h1>
{{ModuleDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Global Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Classes</h3>
{{ClassList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Functions</h3>
{{FunctionList}}
<hr />'''
rbFileTemplate = \
'''<a NAME="top" ID="top"></a>
<h1 style="background-color:{Level1HeaderBgColor};color:{Level1HeaderColor}">
{{Module}}</h1>
{{ModuleDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Global Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Classes</h3>
{{ClassList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Modules</h3>
{{RbModulesList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Functions</h3>
{{FunctionList}}
<hr />'''
classTemplate = \
'''<hr />
<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Class}}</h2>
{{ClassDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Derived from</h3>
{{ClassSuper}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Class Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Class Methods</h3>
{{ClassMethodList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Methods</h3>
{{MethodList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Static Methods</h3>
{{StaticMethodList}}
{{MethodDetails}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
methodTemplate = \
'''<a NAME="{{Anchor}}.{{Method}}" ID="{{Anchor}}.{{Method}}"></a>
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
{{Class}}.{{Method}}{{MethodClassifier}}</h3>
<b>{{Method}}</b>(<i>{{Params}}</i>)
{{MethodDescription}}'''
constructorTemplate = \
'''<a NAME="{{Anchor}}.{{Method}}" ID="{{Anchor}}.{{Method}}"></a>
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
{{Class}} (Constructor)</h3>
<b>{{Class}}</b>(<i>{{Params}}</i>)
{{MethodDescription}}'''
rbModuleTemplate = \
'''<hr />
<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Module}}</h2>
{{ModuleDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Module Attributes</h3>
{{GlobalsList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Classes</h3>
{{ClassesList}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Functions</h3>
{{FunctionsList}}
<hr />
{{ClassesDetails}}
{{FunctionsDetails}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
rbModulesClassTemplate = \
'''<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Class}}</h2>
{{ClassDescription}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Derived from</h3>
{{ClassSuper}}
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Methods</h3>
{{MethodList}}
{{MethodDetails}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
functionTemplate = \
'''<hr />
<a NAME="{{Anchor}}" ID="{{Anchor}}"></a>
<h2 style="background-color:{CFBgColor};color:{CFColor}">{{Function}}</h2>
<b>{{Function}}</b>(<i>{{Params}}</i>)
{{FunctionDescription}}
<div align="right"><a style="color:{LinkColor}" href="#top">Up</a></div>
<hr />'''
listTemplate = \
'''<table>
{{Entries}}
</table>'''
listEntryTemplate = \
'''<tr>
<td><a style="color:{LinkColor}" href="#{{Link}}">{{Name}}</a></td>
<td>{{Deprecated}}{{Description}}</td>
</tr>'''
listEntryNoneTemplate = '''<tr><td>None</td></tr>'''
listEntryDeprecatedTemplate = '''<b>Deprecated.</b>'''
listEntrySimpleTemplate = '''<tr><td>{{Name}}</td></tr>'''
paragraphTemplate = \
'''<p>
{{Lines}}
</p>'''
parametersListTemplate = \
'''<dl>
{{Parameters}}
</dl>'''
parametersListEntryTemplate = \
'''<dt><i>{{Name}}</i></dt>
<dd>
{{Description}}
</dd>'''
parameterTypesListEntryTemplate = \
'''<dt><i>{{Name}}</i> ({{Type}})</dt>
<dd>
{{Description}}
</dd>'''
returnsTemplate = \
'''<dl>
<dt>Returns:</dt>
<dd>
{{0}}
</dd>
</dl>'''
returnTypesTemplate = \
'''<dl>
<dt>Return Type:</dt>
<dd>
{{0}}
</dd>
</dl>'''
exceptionsListTemplate = \
'''<dl>
{{Exceptions}}
</dl>'''
exceptionsListEntryTemplate = \
'''<dt>Raises <b>{{Name}}</b>:</dt>
<dd>
{{Description}}
</dd>'''
signalsListTemplate = \
'''<h4>Signals</h4>
<dl>
{{Signals}}
</dl>'''
signalsListEntryTemplate = \
'''<dt>{{Name}}</dt>
<dd>
{{Description}}
</dd>'''
eventsListTemplate = \
'''<h4>Events</h4>
<dl>
{{Events}}
</dl>'''
eventsListEntryTemplate = \
'''<dt>{{Name}}</dt>
<dd>
{{Description}}
</dd>'''
deprecatedTemplate = \
'''<p>
|
<b>Deprecated.</b>
{{Lines}}
</p>'''
authorInfoTemplate = \
'''<p>
<i>Author(s)</i>:
{{Author
|
s}}
</p>'''
seeListTemplate = \
'''<dl>
<dt><b>See Also:</b></dt>
{{Links}}
</dl>'''
seeListEntryTemplate = \
'''<dd>
{{Link}}
</dd>'''
seeLinkTemplate = '''<a style="color:{LinkColor}" {{Link}}'''
sinceInfoTemplate = \
'''<p>
<b>since</b> {{Info}}
</p>'''
#################################
## Templates for index files ##
#################################
indexBodyTemplate = '''
<h1 style="background-color:{Level1HeaderBgColor};color:{Level1HeaderColor}">
{{Title}}</h1>
{{Description}}
{{Subpackages}}
{{Modules}}'''
indexListPackagesTemplate = '''
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Packages</h3>
<table>
{{Entries}}
</table>'''
indexListModulesTemplate = '''
<h3 style="background-color:{Level2HeaderBgColor};color:{Level2HeaderColor}">
Modules</h3>
<table>
{{Entries}}
</table>'''
indexListEntryTemplate = \
'''<tr>
<td><a style="color:{LinkColor}" href="{{Link}}">{{Name}}</a></td>
<td>{{Description}}</td>
</tr>'''
|
verilylifesciences/analysis-py-utils
|
verily/query_kit/__init__.py
|
Python
|
apache-2.0
| 637
| 0
|
# Copyright 2019 Verily Life Sciences Inc. All Right
|
s Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed
|
under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package marker file."""
|
surabujin/libpy
|
libpy/test/test_shadow.py
|
Python
|
mit
| 1,664
| 0
|
# -*- coding:utf-8 -*-
import mock
import pytest
import libpy
import libpy.shadow
class TestShadow(object):
def test(self):
proxy = libpy.shadow.Shadow(ShadowTarget())
assert isinstance(proxy, libpy.shadow.Shadow)
assert proxy.a == mock.sentinel.proxy_target_a
assert proxy.b == mock.sentinel.proxy_target_b
with pytest.raises(AttributeError):
_ = proxy.X
with pytest.raises(AttributeError):
_ = proxy._c
def test_keep_value(self):
target = ShadowTarget()
proxy = libpy.shadow.Shadow(target)
assert proxy.b == mock.sentinel.proxy_target_b
delattr(target, 'b')
assert not hasattr(target, 'b')
|
assert proxy.b == mock.sentinel.proxy_target_b
delattr(proxy, 'b')
with pytest.raises(AttributeError):
_ = proxy.b
def test_update(self):
target = S
|
hadowTarget()
proxy = libpy.shadow.Shadow(target)
assert proxy.a == mock.sentinel.proxy_target_a
target.a = 'new_value_for_a'
assert proxy.a == mock.sentinel.proxy_target_a
delattr(proxy, 'a')
assert proxy.a == 'new_value_for_a'
def test_override(self):
proxy = libpy.shadow.Shadow(ShadowTarget(), a='override_a')
assert proxy.a == 'override_a'
assert proxy.b == mock.sentinel.proxy_target_b
proxy.b = 'dynamic_override_b'
assert proxy.b == 'dynamic_override_b'
class ShadowTarget(object):
a = mock.sentinel.proxy_target_a
def __init__(self):
self.b = mock.sentinel.proxy_target_b
self._c = mock.sentinel.proxy_target_c
|
zachcp/qiime
|
tests/test_distance_matrix_from_mapping.py
|
Python
|
gpl-2.0
| 8,329
| 0.002641
|
#!/usr/bin/env python
# File created on 27 Sep 2011
from __future__ import division
__author__ = "Antonio Gonzalez Pena"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = [
"Antonio Gonzalez Pena",
"Andrew J. King",
"Michael S. Robeson",
]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Antonio Gonzalez Pena"
__email__ = "antgonza@gmail.com"
from qiime.distance_matrix_from_mapping import compute_distance_matrix_from_metadata, dist_vincenty, calculate_dist_vincenty
from numpy import array
from unittest import TestCase, main
from numpy.testing import assert_almost_equal
import StringIO
class FunctionTests(TestCase):
"""Tests of top-level functions"""
def setUp(self):
self.fasting_map = """#SampleID BarcodeSequence LinkerPrimerSequence Treatment DOB Float_Col Description
#Example mapping file for the QIIME analysis package. These 9 samples are from a study of the effects of exercise and diet on mouse cardiac physiology (Crawford, et al, PNAS, 2009).
PC.354 AGCACGAGCCTA YATGCTGCCTCCCGTAGGAGT Control 20061218 .1 Control_mouse__I.D._354
PC.355 AACTCGTCGATG YATGCTGCCTCCCGTAGGAGT Control 20061218 .2 Control_mouse__I.D._355
PC.356 ACAGACCACTCA YATGCTGCCTCCCGTAGGAGT Control 20061126 .3 Control_mouse__I.D._356
PC.481 ACCAGCGACTAG YATGCTGCCTCCCGTAGGAGT Control 20070314 .4 Control_mouse__I.D._481
PC.593 AGCAGCACTTGT YATGCTGCCTCCCGTAGGAGT Control 20071210 .5 Control_mouse__I.D._593
PC.607 AACTGTGCGTAC YATGCTGCCTCCCGTAGGAGT Fast 20071112 .6 Fasting_mouse__I.D._607
PC.634 ACAGAGTCGGCT YATGCTGCCTCCCGTAGGAGT Fast 20080116 .7 Fasting_mouse__I.D._634
PC.635 ACCGCAGAGTCA YATGCTGCCTCCCGTAGGAGT Fast 20080116 .8 Fasting_mouse__I.D._635
PC.636 ACGGTGAGTGTC YATGCTGCCTCCCGTAGGAGT Fast 20080116 .9 Fasting_mouse__I.D._636"""
self.DOB = [
20061218,
20061218,
20061126,
20070314,
20071210,
20071112,
20080116,
20080116,
20080116]
self.Float_Col = [.1, .2, .3, .4, .5, .6, .7, .8, .9]
self.latitudes = [30, 20, 30, 30, 0, 1, 90, 89, 0, 0]
self.longitudes = [60, -50, 60, 60, 0, 0, 0, 0, 0, 0]
def test_compute_distance_matrix_from_metadata_int(self):
""" distance calculations on ints should throw no errors"""
exp_out = array(
[[0, 0, 92, 9096, 9992, 9894, 18898, 18898, 18898], [0, 0, 92, 9096, 9992, 9894, 18898, 18898, 18898],
[92, 92, 0, 9188, 10084, 9986, 18990, 18990, 18990], [9096,
9096, 9188, 0, 896, 798, 9802, 9802, 9802],
[9992, 9992, 10084, 896, 0, 98, 8906, 8906, 8906], [9894,
9894, 9986, 798, 98, 0, 9004, 9004, 9004],
[18898, 18898, 18990, 9802, 8906, 9004, 0, 0,
0], [18898, 18898, 18990, 9802, 8906, 9004, 0, 0, 0],
[18898, 18898, 18990, 9802, 8906, 9004, 0, 0, 0]])
res_out = compute_distance_matrix_from_metadata(self.DOB)
assert_almost_equal(exp_out, res_out)
def test_compute_distance_matrix_from_metadata_floats(self):
""" distance calculations on floats should throw no errors"""
# testing floats
exp_out = array(
[[0., 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8], [0.1, 0., 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7],
[0.2, 0.1, 0., 0.1, 0.2, 0.3, 0.4, 0.5, 0.6], [0.3,
0.2, 0.1, 0., 0.1, 0.2, 0.3, 0.4, 0.5],
[0.4, 0.3, 0.2, 0.1, 0., 0.1, 0.2, 0.3, 0.4], [0.5,
0.4, 0.3, 0.2, 0.1, 0., 0.1, 0.2, 0.3],
[0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0., 0.1, 0.2], [0.7,
0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0., 0.1],
[0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0.]])
res_out = compute_distance_matrix_from_metadata(self.Float_Col)
assert_almost_equal(exp_out, res_out)
def test_dist_vincenty(self):
"""dist_Vincenty:Returns distance in meters between two lat long points"""
lat1, lon1, lat2, lon2, expected_value = 30, 60, 20, -50, 10709578.387
value = dist_vincenty(lat1, lon1, lat2, lon2, 20)
assert_almost_equal(value, expected_value)
lat1, lon1, lat2, lon2, expected_value = 30, 60, 30, 60, 0
value = dist_vincenty(lat1, lon1, lat2, lon2, 20)
assert_almost_equal(value, expected_value)
lat1, lon1, lat2, lon2, expected_value = 0, 0, 1, 0, 110574.389
value = dist_vincenty(lat1, lon1, lat2, lon2, 20)
assert_almost_equal(value, expected_value)
lat1, lon1, lat2, lon2, expected_value = 90, 0, 89, 0, 111693.865
value = dist_vincenty(lat1, lon1, lat2, lon2, 20)
assert_almost_equal(value, expected_value)
lat1, lon1, lat2, lon2, expected_value = 90, 0, -90, 0, 20003931.459
value = dist_vincenty(lat1, lon1, lat2, lon2, 20)
assert_almost_equal(value, expected_value)
lat1, lon1, lat2, lon2, expected_value = 90, 0, 0, 0, 10001965.729
value = dist_vincenty(lat1, lon1, lat2, lon2, 20)
assert_almost_equal(value, expected_value)
lat1, lon1, lat2, lon2, expected_value = 0, 0, 0, 0, 0
value = dist_vincenty(lat1, lon1, lat2, lon2, 20)
assert_almost_equal(value, expected_value)
# test for not converge
lat1, lon1, lat2, lon2 = 0, 180, 0, 0
self.assertRaises(
ValueError,
dist_vincenty,
lat1,
lon1,
lat2,
lon2,
20)
def test_calculate_dist_vincenty(self):
exp_out = array(
[[0.0, 10709578.387, 0.0, 0.0, 7154900.607, 7094106.828, 6681852.331, 6626434.332, 7154900.607, 7154900.607],
[10709578.387,
0.0,
10709578.387,
10709578.387,
5877643.846,
5831009.412,
7789599.475,
7718017.604,
5877643.846,
5877643.846],
[0.0,
10709578.387,
0.0,
0.0,
7154900.607,
7094106.828,
6681852.331,
6626434.332,
7154900.607,
7154900.607],
[0.0,
10709578.387,
0.0,
0.0,
7154900.607,
7094106.828,
6681852.331,
6626434.332,
7154900.607,
7154900.607],
[7154900.607,
5877643.846,
7154900.607,
7154900.607,
0.0,
110574.389,
10001965.729,
9890271.864,
0.0,
0.0],
[7094106.828,
5831009.412,
7094106.828,
7094106.828,
110574.389,
0.0,
9891391.341,
9779697.476,
110574.389,
110574.389],
[6681852.331,
7789599.475,
6681852.331,
6681852.331,
10001965.729,
9891391.341,
0.0,
111693.865,
10001965.729,
10001965.729],
[6626434.332,
|
7718017.604,
6626434.332,
6626434.332,
9890271.864,
9779697.476,
111693.865,
0.0,
9890271.864,
9890271.864],
[7154900.607,
5877643.846,
7154900.607,
7154900.607,
0.0,
110574.389,
10001965.729,
9890271.864,
0.0,
0.0],
[7154900.607
|
, 5877643.846, 7154900.607, 7154900.607, 0.0, 110574.389, 10001965.729, 9890271.864, 0.0, 0.0]])
res_out = calculate_dist_vincenty(self.latitu
|
squarebracket/star
|
scheduler/tests/schedule_generator_tests.py
|
Python
|
gpl-2.0
| 3,829
| 0.000522
|
from django.utils.unittest.case import TestCase
from scheduler.models import ScheduleGenerator
from uni_info.models import Semester, Course
class ScheduleGeneratorTest(TestCase):
"""
Test class for schedule generator, try different courses
"""
fixtures = ['/scheduler/fixtures/initial_data.json']
def setUp(self):
"""
Setup common data needed in each unit test
"""
self.fall_2013_semester = [sem for sem in Semester.objects.all() if sem.name == 'Fall 2013'][0]
def test_should_generate_empty_schedule(self):
"""
Test generator does not crash with empty list as edge case
"""
course_list = []
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(0, len(result))
def test_should_generate_with_1_course(self):
"""
Test generator with only 1 course as edge case
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
course_list = [soen341]
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(2, len(result))
def test_should_generate_schedule_for_2_course(self):
"""
Test generator with more than 1 course
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
soen287 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '287'][0]
course_list = [soen287, soen341]
generator = ScheduleGenera
|
tor(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self
|
.assertEqual(4, len(result))
def test_should_not_generate_schedule_for_3_course_conflict(self):
"""
Test generator with three conflicting courses
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
soen342 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '342'][0]
soen287 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '287'][0]
course_list = [soen287, soen341, soen342]
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(0, len(result))
def test_should_generate_schedule_for_3_course_no_conflict(self):
"""
Test generator with three courses that has no conflicts
"""
soen341 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '341'][0]
soen343 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '343'][0]
soen287 = [s for s in Course.objects.all() if
s.course_letters == 'SOEN' and
s.course_numbers == '287'][0]
course_list = [soen287, soen341, soen343]
generator = ScheduleGenerator(course_list, self.fall_2013_semester)
result = generator.generate_schedules()
self.assertIsNotNone(result)
self.assertEqual(4, len(result))
|
gusaul/gigsblog
|
urls/main.py
|
Python
|
mit
| 683
| 0.011713
|
from django.conf.urls import patterns, include, url
fro
|
m django.contrib import admin
import urls
from apps.blog import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gigsblog.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.Index.as_view(), name='index'),
url(r'^signup', views.SignUp.as_view(), name='signUp'),
url(r'^login', view
|
s.Login.as_view(), name='login'),
url(r'^logout', 'django.contrib.auth.views.logout',{'next_page':'/'}, name='logout'),
url(r'^post/', include('urls.blog', namespace='post')),
url(r'^admin/', include('urls.admin')),
)
|
mmgen/mmgen
|
mmgen/wallet/brain.py
|
Python
|
gpl-3.0
| 1,905
| 0.026247
|
#!/usr/bin/env python3
#
# mmgen = Multi-Mode GENerator, a command-line cryptocurrency wallet
# Copyright (C)2013-2022 The MMGen Project <mmgen@tuta.io>
# Licensed under the GNU General Public License, Version 3:
# https://www.gnu.org/licenses
# Public project repositories:
# https://github.com/mmgen/mmgen
# https://gitlab.com/mmgen/mmgen
"""
wallet.brain: brainwallet wallet class
"""
from ..opts import opt
from ..util import msg,qmsg,qmsg_r
from ..color import yellow
from .enc import wallet
from .seed import Seed
import mmgen.crypto as crypto
class wallet(wallet):
stdin_ok = True
desc = 'brainwallet'
# brainwallet warning message? TODO
def ge
|
t_bw_params(self):
# already checked
a = opt.brain_params.split(',')
return int(a[0]),a[1]
def _deformat(self):
self.brainpasswd = ' '.join(self.fmt_data.split())
return True
def _decrypt(self):
d = self.ssdata
if opt.brain_params:
"""
Don't set opt.seed_len! When using multiple wallets, BW seed len might differ from others
"""
bw_seed_len,d.hash_preset = self.get_bw_params()
else:
if not opt.seed_len:
qmsg(f'Using defaul
|
t seed length of {yellow(str(Seed.dfl_len))} bits\n'
+ 'If this is not what you want, use the --seed-len option' )
self._get_hash_preset()
bw_seed_len = opt.seed_len or Seed.dfl_len
qmsg_r('Hashing brainwallet data. Please wait...')
# Use buflen arg of scrypt.hash() to get seed of desired length
seed = crypto.scrypt_hash_passphrase(
self.brainpasswd.encode(),
b'',
d.hash_preset,
buflen = bw_seed_len // 8 )
qmsg('Done')
self.seed = Seed(seed)
msg(f'Seed ID: {self.seed.sid}')
qmsg('Check this value against your records')
return True
def _format(self):
raise NotImplementedError('Brainwallet not supported as an output format')
def _encrypt(self):
raise NotImplementedError('Brainwallet not supported as an output format')
|
wubr2000/googleads-python-lib
|
examples/dfa/v1_20/create_campaign.py
|
Python
|
apache-2.0
| 2,699
| 0.004817
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example creates a campaign in a given advertiser.
To create an advertiser, run create_advertiser.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import dfa
ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE'
CAMPAIGN_NAME = 'INSERT_CAMPAIGN_NAME_HERE'
URL = 'INSERT_LANDING_PAGE_URL_HERE'
LANDING_PAGE_NAME = 'INSERT_LANDING_PAGE_NAME_HERE'
START_DATE = '%(year)s-%(month)02d-%(day)02dT12:00:00' % {
'year': 'INSERT_START_YEAR_HERE',
'month': int('INSERT_START_MONTH_HERE'),
'day': int('INSERT_START_DAY_HERE')}
END_DATE = '%(year)s-%(month)02d-%(day)02dT12:00:00' % {
'year': 'INSERT_END_YEAR_HERE',
'month': int('INSERT_END_MONTH_HERE'),
'day': int('INSERT_END_DAY_HERE')}
def main(client, advertiser_id, campaign_name, url, landing_page_name,
start_date, end_date):
# Initialize appropriate service.
campaign_service = client.GetService(
'campaign', 'v1.20', 'https://advertisersapitest.doubleclick.net')
# Create a default landing page for the campaign and save it.
default_landing_page = {
'url': url,
'name':
|
landing_page_name
}
default_landing_page_id = campaig
|
n_service.saveLandingPage(
default_landing_page)['id']
# Construct and save the campaign.
campaign = {
'name': campaign_name,
'advertiserId': advertiser_id,
'defaultLandingPageId': default_landing_page_id,
'archived': 'false',
'startDate': start_date,
'endDate': end_date
}
result = campaign_service.saveCampaign(campaign)
# Display results.
print 'Campaign with ID \'%s\' was created.' % result['id']
if __name__ == '__main__':
# Initialize client object.
dfa_client = dfa.DfaClient.LoadFromStorage()
main(dfa_client, ADVERTISER_ID, CAMPAIGN_NAME, URL, LANDING_PAGE_NAME,
START_DATE, END_DATE)
|
brodeau/barakuda
|
python/exec/orca_mesh_mask_to_bitmap.py
|
Python
|
gpl-2.0
| 842
| 0.011876
|
#!/usr/bin/env python
# B a r a K u d a
#
# L. Brodeau, 2017]
import sys
import numpy as nmp
from PIL import Image
import string
import os
from netCDF4 import Dataset
narg = len(sys.argv)
if narg != 2:
print 'Usage: '+sys.argv[0]+' <mesh_mask>'; sys.exit(0)
cf_mm = sys.argv[1]
cf_bmp = string.replace(os.path.basename(cf_mm), '.nc', '_orig.bmp')
cf_bmp = string.replace(os.path.basename(cf_bmp), '_orig.bmp4', '_orig.bmp')
# Opening mesh_mask:
f_mm = Dataset(cf_mm)
mask = f_mm.variables['tmask'][0,0,:,:]
f_mm.close()
(nj, ni) = nmp.shape(mask)
print ' nj, ni =>', nj, ni
#imask= nmp.zeros((nj, ni), dtype=nmp.int8)
#imask[:,:] = mask[:,:]
#del mask
imask = (255*mask).astype(nmp.uint8)
# Then save it:
result = Image.fromarray(nmp.flipud(imask))
result.s
|
ave(cf_bmp)
print ' *** Im
|
age '+cf_bmp+' saved!\n'
|
lunarca/fngrpt
|
models/BaseModels.py
|
Python
|
apache-2.0
| 1,637
| 0.001833
|
# -*- coding: utf-8 -*-
'''
Created on Mar 12, 2012
@author: moloch
Copyright 2012
Licensed under the Apache License, Version 2.0 (the "Lice
|
nse");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
|
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import re
from uuid import uuid4
from datetime import datetime
from sqlalchemy import Column
from sqlalchemy.types import DateTime, Integer, String
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.declarative import declarative_base
generate_uuid = lambda: str(uuid4())
class _DatabaseObject(object):
''' All game objects inherit from this object '''
@declared_attr
def __tablename__(self):
''' Converts name from camel case to snake case '''
name = self.__name__
return (
name[0].lower() +
re.sub(r'([A-Z])',
lambda letter: "_" + letter.group(0).lower(), name[1:]
)
)
id = Column(Integer, unique=True, primary_key=True) # lint:ok
uuid = Column(String(36), unique=True, default=generate_uuid)
created = Column(DateTime, default=datetime.now)
# Create an instance called "BaseObject"
DatabaseObject = declarative_base(cls=_DatabaseObject)
|
vim-IDE/MatchTagAlways
|
python/mta_core.py
|
Python
|
gpl-3.0
| 8,519
| 0.024416
|
#!/usr/bin/env python
#
# Copyright (C) 2012 Strahinja Val Markovic <val@markovic.io>
#
# This file is part of MatchTagAlways.
#
# MatchTagAlways is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MatchTagAlways is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MatchTagAlways. If not, see <http://www.gnu.org/licenses/>.
import sys
import re
PY2 = (sys.version_info[0] == 2)
TAG_REGEX = re.compile(
r"""<\s* # the opening bracket + whitespace
(?P<start_slash>/)? # captures the slash if closing bracket
\s* # more whitespace
(?P<tag_name>[\w:-]+) # the tag name, captured
.*? # anything else in the tag
(?P<end_slash>/)? # ending slash, for self-closed tags
>""",
re.VERBOSE | re.DOTALL )
COMMENT_REGEX = re.compile( '<!--.*?-->', re.DOTALL )
class TagType( object ):
OPENING = 1
CLOSING = 2
SELF_CLOSED = 3
class Tag( object ):
def __init__( self, match_object ):
if not match_object:
self.valid = False
return
self.valid = True
self.name = match_object.group( 'tag_name' )
if match_object.group( 'start_slash' ):
self.kind = TagType.CLOSING
elif match_object.group( 'end_slash' ):
self.kind = TagType.SELF_CLOSED
else:
self.kind = TagType.OPENING
self.start_offset = match_object.start()
self.end_offset = match_object.end()
if PY2:
def __nonzero__( self ):
return self.valid
else:
def __bool__( self ):
return self.valid
def __eq__( self, other ):
if type( other ) is type( self ):
return
return False
def PacifyHtmlComments( text ):
"""Replaces the contents (including delimiters) of all HTML comments in the
passed-in text with 'x'. For instance, 'foo <!-- bar -->' becomes
'foo xxxx xxx xxx'. We can't just remove the comments because that would screw
with the mapping of string offset to Vim line/column."""
def replacement( match ):
return re.sub( '\S', 'x', match.group() )
return COMMENT_REGEX.sub( replacement, text )
def ReverseFindTag( text, from_position ):
try:
bracket_index = text.rindex( '<', 0, from_position )
except ValueError:
return None
match = TAG_REGEX.match( text, bracket_index )
if not match:
return None
if match.end() <= from_position:
return Tag( match )
return None
def ForwardFindTag( text, from_position ):
return Tag( TAG_REGEX.search( text, from_position ) )
def OffsetForLineColumnInString( text, line, column ):
offset = -1
current_line = 1
current_column = 0
previous_char = ''
for char in text:
offset += 1
current_column += 1
if char == '\n':
current_line += 1
current_column = 0
if current_line == line and current_column == column:
return offset
if current_line > line:
# Vim allows the user to stop on an empty line and declares that column 1
# exists even when there are no characters on that line
if current_column == 0 and previous_char == '\n':
return offset -1
break
previous_char = char
return None
def LineColumnForOffsetInString( text, offset ):
current_offset = -1
current_line = 1
current_column = 0
for char in text:
current_offset += 1
current_column += 1
if char == '\n':
current_line += 1
current_column = 0
continue
if current_offset == offset:
return current_line, current_column
if current_offset > offset:
break
return None, None
def TagWithSameNameExistsInSequence( tag, sequence ):
for current_tag in sequence:
if current_tag.name == tag.name:
return True
return False
def GetPreviousUnmatchedOpeningTag( html, cursor_offset ):
search_index = cursor_offset
tags_to_close = []
while True:
prev_tag = ReverseFindTag( html, search_index )
if not prev_tag:
break
search_index = prev_tag.start_offset
if prev_tag.kind == TagType.CLOSING:
tags_to_close.append( prev_tag )
elif prev_tag.kind == TagType.OPENING:
if tags_to_close:
if tags_to_close[ -1 ].name == prev_tag.name:
tags_to_close.pop()
else:
continue
else:
return prev_tag
# self-closed tags ignored
return None
def GetNextUnmatchedClosingTag( html, cursor_offset ):
def RemoveClosedOpenTags( tags_to_close, new_tag ):
i = 1
for tag in reversed( tags_to_close ):
if tag.name == new_tag.name:
break
else:
i += 1
assert i <= len( tags_to_close )
del tags_to_close[ -i: ]
return tags_to_close
search_index = cursor_offset
tags_to_close = []
while True:
next_tag = ForwardFindTag( html, search_index )
if not next_tag:
break
search_index = next_tag.end_offset
if next_tag.kind == TagType.OPENING:
tags_to_close.append( next_tag )
elif next_tag.kind == TagType.CLOSING:
if not tags_to_close or not TagWithSameNameExistsInSequence(
next_tag, tags_to_close ):
return next_tag
tags_to_close = RemoveClosedOpenTags( tags_to_close, next_tag )
# self-closed tags ignored
return None
def GetOpeningAndClosingTags( html, cursor_offset ):
current_offset = cursor_offset
closing_tag = GetNextUnmatchedClosingTag( html, current_offset )
while True:
opening_tag = GetPreviousUnmatchedOpeningTag( html, current_offset )
if not opening_tag or not closing_tag:
return None, None
if opening_tag
|
.name == closing_tag.name:
return opening_tag, closing_tag
current_of
|
fset = opening_tag.start_offset
def AdaptCursorOffsetIfNeeded( sanitized_html, cursor_offset ):
"""The cursor offset needs to be adapted if it is inside a tag.
If the cursor is inside an opening tag, it will be moved to the index of the
character just past the '>'. If it's inside the closing tag, it will be moved
to the index of the '<'. This will ensure that both the opening and the
closing tags are correctly found.
If the cursor is inside a self-closed tag, then it doesn't really matter what
we do with it, the surrounding tags will be correctly found (the self-closed
tag is ignored, as it should be)."""
preceding_angle_bracket_index = cursor_offset
while True:
if preceding_angle_bracket_index < 0:
return cursor_offset
char = sanitized_html[ preceding_angle_bracket_index ]
if preceding_angle_bracket_index != cursor_offset and char == '>':
# Not inside a tag, no need for adaptation
return cursor_offset
if char == '<':
break
preceding_angle_bracket_index -= 1
tag = Tag( TAG_REGEX.match( sanitized_html,
preceding_angle_bracket_index ) )
if not tag:
return cursor_offset
if tag.kind == TagType.OPENING:
return tag.end_offset
return tag.start_offset
def LocationsOfEnclosingTags( input_html, cursor_line, cursor_column ):
bad_result = ( 0, 0, 0, 0 )
try:
sanitized_html = PacifyHtmlComments( input_html )
cursor_offset = OffsetForLineColumnInString( sanitized_html,
cursor_line,
cursor_column )
if cursor_offset == None:
return bad_result
adapted_cursor_offset = AdaptCursorOffsetIfNeeded( sanitized_html,
cursor_offset )
opening_tag, closing_tag = GetOpeningAndClosingTags( sanitized_html,
adapted_cursor_offset )
if not opening_tag or not closing_tag:
return bad_result
opening_tag_line, opening_tag_column = LineColumnForOffsetInString(
sanitized_h
|
lang-uk/lang.org.ua
|
languk/corpus/mongodb.py
|
Python
|
mit
| 4,932
| 0.002636
|
from pymongo import MongoClient
from pymongo.collection import Collection
from pymongo.errors import AutoReconnect
from django.conf import settings
from types import FunctionType
import functools
import time
__all__ = ("connection", "connections", "db", "get_db")
"""
Goals:
* To provide a clean universal handler for Mongo, similar to how Django does it
for other db connections, but Mongo is unique and simple enough to just live on
it's own.
* To wrap the pymongo Collection methods automatically with a reconnect decorator
in case a server is temporarily down, or a replica set is in the middle of failing
over to a secondary server.
"""
"""
In settings.py:
MONGODB = {
'default': {
'NAME': 'db1' # Default database to connect to
'LOCATION': [ # An array of host strings, similar to the CACHES setting.
'localhost:27017',
]
}
}
Usage:
from mongodb import connections, connection, db
connections['default'].db1.messages.find({'key': 'value'}) # manually select the 'default' connection
connection.db1.messages.find({'key': 'value'}) # manually specific the database to be used to override "NAME"
db.messages.find({'key': 'value'}) # Just let the library use all of the defaults
"""
def with_reconnect(func):
"""
Handle when AutoReconnect is raised from pymongo. This is the standard error
raised for everything from "host disconnected" to "couldn't connect to host"
and more.
The sleep handles the edge case when the state of a replica set changes, and
the cursor raises AutoReconnect because the master may have changed. It can
take some time for the replica set to stop raising this exception, and the
small sleep and iteration count gives us a couple of seconds before we fail
completely. See also http://jira.mongodb.org/browse/PYTHON-216
"""
@functools.wraps(func)
def _reconnector(*args, **kwargs):
for x in xrange(20):
try:
return func(*args, **kwargs)
except AutoReconnect:
time.sleep(0.250)
pass
raise
return _reconnector
class ConnectionDoesNotExist(Exception):
pass
class CollectionWrapper(object):
def __init__(self, collection):
self._collection = collection
def __get
|
attr__(self, func):
old = getattr(self._collection, func)
if type(old) is FunctionType:
return with_reconnect(old)
return old
def __repr__(self):
return "<Coll
|
ectionWrapper %s>" % self._collection.__repr__()
def __str__(self):
return "<CollectionWrapper %s>" % self._collection.__str__()
class DatabaseWrapper(object):
def __init__(self, database):
self._database = database
def __getattr__(self, func):
old = getattr(self._database, func)
if type(old) is FunctionType:
return with_reconnect(old)
elif isinstance(old, Collection):
return CollectionWrapper(old)
return old
def __getitem__(self, func):
old = getattr(self._database, func)
if isinstance(old, Collection):
return CollectionWrapper(old)
return old
def __repr__(self):
return "<DatabaseWrapper %s>" % self._database.__repr__()
def __str__(self):
return "<DatabaseWrapper %s>" % self._database.__str__()
class ConnectionWrapper(object):
def __init__(self, connection, default=None):
self._connection = connection
self._databases = {}
self._default = default
def __getattr__(self, alias):
if self._default is not None and alias == "default":
alias = self._default
if alias in self._databases:
return self._databases[alias]
database = DatabaseWrapper(self._connection[alias])
self._databases[alias] = database
return database
def __repr__(self):
return "<ConnectionWrapper %s>" % self._connection.__repr__()
def __str__(self):
return "<ConnectionWrapper %s>" % self._connection.__str__()
class MongoHandler(object):
def __init__(self, databases):
self.databases = databases
self._connections = {}
def __getitem__(self, alias):
if alias in self._connections:
return self._connections[alias]
try:
conn = self.databases[alias]
except KeyError:
raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias)
conn = MongoClient(
[node for node in self.databases[alias]["LOCATION"]], authSource=self.databases[alias]["authSource"]
)
self._connections[alias] = ConnectionWrapper(conn, self.databases[alias]["NAME"])
return self._connections[alias]
def get_db():
connections = MongoHandler(settings.MONGODB)
connection = connections["default"]
return connection.default
db = get_db()
|
tommo/gii
|
support/waf/waflib/Tools/kde4.py
|
Python
|
mit
| 2,732
| 0.031845
|
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"""
Support for the KDE4 libraries and msgfmt
"""
import os, sys, re
from waflib import Options, TaskGen, Task, Utils
from waflib.TaskGen import feature, after_method
@feature('msgfmt')
def apply_msgfmt(self):
"""
Process all languages to create .mo files and to install them::
def build(bld):
bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
"""
for lang in self.to_list(self.langs):
node = self.path.find_resource(lang+'.po')
task = self.create_task('msgfmt', node, node.change_ext('.mo'))
langname = lang.split('/')
langname = langname[-1]
inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
self.bld.install_as(
inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
task.outputs[0],
chmod = getattr(self, 'chmod', Utils.O644))
class msgfmt(Task.Task):
"""
Transform .po files into .mo files
"""
color = 'BLUE'
run_str = '${MSGFMT} ${SRC} -o ${TGT}'
def configure(self):
"""
Detect kde4-config and set various variables for the *use* system::
def options(opt):
opt.load('compiler_cxx kde4')
def configure(conf):
conf.load('compiler_cxx kde4')
def build(bld):
bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
"""
kdeconfig = self.find_program('kde4-config')
prefix = self.cmd_and_log('%s --prefix' % kdeconfig).strip()
fname = '%s/share/apps/cmake/modules/KDELibsDepen
|
dencies.cmake' % prefix
try: os.stat(fname)
except OSError:
fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try: os.stat(fname)
except OSError: self.fatal('could not open %s' % fname)
try:
txt = Utils.readf(fname)
except (OSError, IOError):
self.fatal('could not read %s' % fname)
txt = txt.replace('\\\n', '\n')
fu = re.compile('#(.*)\n')
txt = fu.sub('', txt)
setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
found = setregexp.fin
|
dall(txt)
for (_, key, val) in found:
#print key, val
self.env[key] = val
# well well, i could just write an interpreter for cmake files
self.env['LIB_KDECORE']= ['kdecore']
self.env['LIB_KDEUI'] = ['kdeui']
self.env['LIB_KIO'] = ['kio']
self.env['LIB_KHTML'] = ['khtml']
self.env['LIB_KPARTS'] = ['kparts']
self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
self.find_program('msgfmt', var='MSGFMT')
|
andresailer/DIRAC
|
Core/Workflow/WorkflowReader.py
|
Python
|
gpl-3.0
| 4,858
| 0.01832
|
"""
This is a comment
"""
#try: # this part to import as part of the DIRAC framework
import xml.sax
from xml.sax.handler import ContentHandler
from DIRAC.Core.Workflow.Parameter import *
from DIRAC.Core.Workflow.Module import *
from DIRAC.Core.Workflow.Step import *
from DIRAC.Core.Workflow.Workflow import Workflow
__RCSID__ = "$Id$"
class WorkflowXMLHandler(ContentHandler):
def __init__(self, new_wf=None):
""" If new_wf defined, it will be used as root of document """
# this is an attribute for the object to be created from the XML document
self.root=new_wf # the reference on the all document
self.stack=None # to keep last object
self.strings=None # to accumulate string object (list of strings) used to split long string
def startDocument(self):
#reset the process
#self.root=None
self.stack=[]
self.strings=[]
def endDocument(self):
pass
def startElement(self, name, attrs):
#print name ,"startElement", "attr=", attrs.getLength(), attrs.getNames()
self.clearCharacters() # clear to remove empty or nonprintable characters
if name == "Workflow":
if self.roo
|
t == None: #if root not defined by constractor
self.root = Workflow()
self.stack.append(self.root)
elif name == "StepDefinition":
obj = StepDefinition("TemporaryXMLObject_StepDefinition")
if self.root == None: # in case we are saving Step only
self.root = obj
self.stack.append(obj)
elif name == "StepInstance":
obj = StepInstance("TemporaryXMLObject_StepInstance")
self.stack.append(obj)
elif name == "Mod
|
uleDefinition":
obj = ModuleDefinition("TemporaryXMLObject_ModuleDefinition")
if self.root == None: # in case we are saving Module only
self.root = obj
self.stack.append(obj)
elif name == "ModuleInstance":
obj = ModuleInstance("TemporaryXMLObject_ModuleInstance")
self.stack.append(obj)
elif name == "Parameter":
obj = Parameter(str(attrs['name']), None, str(attrs['type']), str(attrs['linked_module']), str(attrs['linked_parameter']), str(attrs['in']), str(attrs['out']), str(attrs['description']))
self.stack.append(obj)
# TEMPORARY CODE
elif name=="origin" or name == "version" or name == "name" or name == "type" or name == "value" or\
name == "required" or name == "descr_short" or name == "name" or name == "type" or name == "description" or name == "body":
pass
else:
print "UNTREATED! startElement name=", name, "attr=", attrs.getLength(), attrs.getNames()
pass
def endElement(self, name):
#print name, "endElement"
# attributes
if name=="origin":
self.stack[len(self.stack)-1].setOrigin(self.getCharacters())
elif name == "version":
self.stack[len(self.stack)-1].setVersion(self.getCharacters())
elif name == "name":
self.stack[len(self.stack)-1].setName(self.getCharacters())
elif name == "type":
self.stack[len(self.stack)-1].setType(self.getCharacters())
elif name == "required":
self.stack[len(self.stack)-1].setRequired(self.getCharacters())
elif name == "descr_short":
self.stack[len(self.stack)-1].setDescrShort(self.getCharacters())
elif name == "name":
self.stack[len(self.stack)-1].setName(self.getCharacters())
elif name == "type":
self.stack[len(self.stack)-1].setType(self.getCharacters())
elif name == "description":
self.stack[len(self.stack)-1].setDescription(self.getCharacters())
elif name == "body":
self.stack[len(self.stack)-1].setBody(self.getCharacters())
elif name == "value":
ch = self.getCharacters()
# to keep compatibility with the old version
# were """ was not used for the string
if self.stack[len(self.stack)-1].isTypeString():
self.stack[len(self.stack)-1].setValue(ch)
else:
self.stack[len(self.stack)-1].setValue(eval(ch))
#objects
elif name=="Workflow":
self.stack.pop()
elif name == "StepDefinition":
self.root.step_definitions.append(self.stack.pop())
elif name == "StepInstance":
self.root.step_instances.append(self.stack.pop())
elif name == "ModuleDefinition":
self.root.addModule(self.stack.pop())
elif name == "ModuleInstance":
obj=self.stack.pop()
self.stack[len(self.stack)-1].module_instances.append(obj)
elif name == "Parameter":
obj=self.stack.pop();
self.stack[len(self.stack)-1].addParameter(obj)
else:
print "UNTREATED! endElement", name
def getCharacters(self):
# combine all strings and clear the list
ret = ''.join(self.strings)
self.clearCharacters()
return str(ret)
def clearCharacters(self):
del self.strings
self.strings=[]
def characters(self, content):
self.strings.append(content)
|
Fillll/reddit2telegram
|
reddit2telegram/channels/~inactive/r_getmotivated/app.py
|
Python
|
mit
| 147
| 0.006803
|
#encoding:utf-8
subreddi
|
t = 'getmotivated'
t_channel = '@r_getmotivated'
def send_post(submission, r2t):
|
return r2t.send_simple(submission)
|
LegNeato/buck
|
scripts/rulekey_diff2_test.py
|
Python
|
apache-2.0
| 11,981
| 0.001335
|
import unittest
from contextlib import contextmanager
from StringIO import StringIO
from
|
rulekey_diff2 import *
@contextmanager
def captured_output():
new_out, new_err = StringIO(), StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys
|
.stderr = old_out, old_err
class MockFile(object):
def __init__(self, lines):
self._index = -1
self._lines = lines
def readline(self):
self._index += 1
return self._lines[self._index]
def readlines(self):
return self._lines
def __iter__(self):
return iter(self._lines)
class TestRuleKeyDiff2(unittest.TestCase):
def test_gather(self):
self.assertEquals(
gather(range(10), lambda x: x % 3),
{0: [0, 3, 6, 9], 1: [1, 4, 7], 2: [2, 5, 8]})
def test_ordinals(self):
self.assertEquals(
ordinals([2, 3, 5, 7, 11]),
{2: 0, 3: 1, 5: 2, 7: 3, 11: 4})
def test_tokenize_rulekey_line(self):
self.assertEquals(
tokenize_rulekey_line('type1(val1a:val1b):type2(val2):type3("abc:def"):'),
['type3("abc:def")', 'type2(val2)', 'type1(val1a:val1b)'])
def test_read_rulekeys_from_diag_file(self):
file1 = MockFile([
'rulekey1 type1(val1a:val1b):type2(val2):',
'rulekey2 type3(val3):type4(val4):type5(val5):',
])
self.assertDictEqual(
read_rulekeys(file1, '', ' '),
{
'rulekey1': ['type2(val2)', 'type1(val1a:val1b)'],
'rulekey2': ['type5(val5)', 'type4(val4)', 'type3(val3)'],
})
def test_read_rulekeys_from_bucklog_file(self):
file1 = MockFile([
'[12:34:56][blah blah ...][com.facebook.buck.rules.keys.RuleKeyBuilder] ' +
'RuleKey rulekey1=type1(val1a:val1b):type2(val2):',
'[12:37:11][blah blah ...][com.facebook.buck.rules.keys.RuleKeyFactory] ' +
'I am a rulekey factory! Blah! Blah! Ignore me!',
'[13:41:08][blah blah ...][com.facebook.buck.rules.keys.RuleKeyBuilder] ' +
'RuleKey rulekey2=type3(val3):type4(val4):type5(val5):',
'[12:37:11][blah blah ...][com.facebook.buck.parser.Parser] ' +
'I am a parser! Blah! Blah! Ignore me!',
])
self.assertDictEqual(
read_rulekeys(file1, '[com.facebook.buck.rules.keys.RuleKeyBuilder] RuleKey ', '='),
{
'rulekey1': ['type2(val2)', 'type1(val1a:val1b)'],
'rulekey2': ['type5(val5)', 'type4(val4)', 'type3(val3)'],
})
def test_token_type(self):
self.assertEquals(token_type('key(field1)'), 'key')
self.assertEquals(token_type('wrapper(OPTIONAL)'), 'wrapper')
self.assertEquals(token_type('container(LIST,len=5)'), 'container')
self.assertEquals(token_type('container(TUPLE,len=2)'), 'container')
self.assertEquals(token_type('number(42)'), 'number')
self.assertEquals(token_type('string("ab(c)")'), 'string')
def test_token_value(self):
self.assertEquals(token_value('key(field1)'), 'field1')
self.assertEquals(token_value('wrapper(OPTIONAL)'), 'OPTIONAL')
self.assertEquals(token_value('container(LIST,len=5)'), 'LIST,len=5')
self.assertEquals(token_value('container(TUPLE,len=2)'), 'TUPLE,len=2')
self.assertEquals(token_value('number(42)'), '42')
self.assertEquals(token_value('string("ab(c)")'), '"ab(c)"')
def test_token_length(self):
self.assertEquals(token_length('key(field1)'), 1)
self.assertEquals(token_length('wrapper(OPTIONAL)'), 1)
self.assertEquals(token_length('container(LIST,len=5)'), 5)
self.assertEquals(token_length('container(TUPLE,len=2)'), 2)
self.assertEquals(token_length('number(42)'), 0)
self.assertEquals(token_length('string("ab(c)")'), 0)
def test_print_rulekey(self):
with captured_output() as (out, err):
print_rulekey([
'key(field1)',
'container(TUPLE,len=2)',
'container(LIST,len=3)',
'string("s1")',
'string("s2")',
'string("s3")',
'wrapper(OPTIONAL)',
'string("s4")',
'key(field2)',
'number(42)',
])
self.assertEquals('\n'.join([
'key(field1)',
' container(TUPLE,len=2)',
' container(LIST,len=3)',
' string("s1")',
' string("s2")',
' string("s3")',
' wrapper(OPTIONAL)',
' string("s4")',
'key(field2)',
' number(42)',
''
]), out.getvalue())
def test_reconstruct_rulekey(self):
s = reconstruct_rulekey([
'key(field1)',
'container(TUPLE,len=2)',
'container(LIST,len=3)',
'string("s1")',
'string("s2")',
'string("s3")',
'wrapper(OPTIONAL)',
'string("s4")',
'key(field2)',
'number(42)',
])
self.assertEquals(s.token, 'root()')
self.assertEquals(len(s), 2)
self.assertEquals(s[0].token, 'key(field1)')
self.assertEquals(len(s[0]), 1)
self.assertEquals(s[0][0].token, 'container(TUPLE,len=2)')
self.assertEquals(len(s[0][0]), 2)
self.assertEquals(s[0][0][0].token, 'container(LIST,len=3)')
self.assertEquals(len(s[0][0][0]), 3)
self.assertEquals(s[0][0][0][0].token, 'string("s1")')
self.assertEquals(len(s[0][0][0][0]), 0)
self.assertEquals(s[0][0][0][1].token, 'string("s2")')
self.assertEquals(len(s[0][0][0][1]), 0)
self.assertEquals(s[0][0][0][2].token, 'string("s3")')
self.assertEquals(len(s[0][0][0][2]), 0)
self.assertEquals(s[0][0][1].token, 'wrapper(OPTIONAL)')
self.assertEquals(len(s[0][0][1]), 1)
self.assertEquals(s[0][0][1][0].token, 'string("s4")')
self.assertEquals(len(s[0][0][1][0]), 0)
self.assertEquals(s[1].token, 'key(field2)')
self.assertEquals(len(s[1]), 1)
self.assertEquals(s[1][0].token, 'number(42)')
self.assertEquals(len(s[1][0]), 0)
@staticmethod
def diff_rulekeys_result(s1, s2):
res = []
def visitor(p1, _s1, p2, _s2): res.append((p1, p2))
diff_rulekeys(s1, s2, visitor)
return res
def test_diff_rulekeys_insert_or_remove_element(self):
s1 = reconstruct_rulekey(
['key(k1)', 'container(LIST,len=2)', 'string("s1")', 'string("s3")'])
s2 = reconstruct_rulekey(
['key(k1)', 'container(LIST,len=3)', 'string("s1")', 'string("s2")', 'string("s3")'])
self.assertEquals(
self.diff_rulekeys_result(s1, s2),
[
# report different length
('/root():0/key(k1):0/container(LIST,len=2)',
'/root():0/key(k1):0/container(LIST,len=3)'),
# report 'None' on the left != 'string("s2")' on the right
('/root():0/key(k1):0/container(LIST,len=2):None',
'/root():0/key(k1):0/container(LIST,len=3):1/string("s2")')
])
def test_diff_rulekeys_change_element_order(self):
s1 = reconstruct_rulekey(
['key(k1)', 'container(LIST,len=3)', 'string("s1")', 'string("s2")', 'string("s3")'])
s2 = reconstruct_rulekey(
['key(k1)', 'container(LIST,len=3)', 'string("s2")', 'string("s3")', 'string("s1")'])
self.assertEquals(
self.diff_rulekeys_result(s1, s2),
[
# report different order
('/root():0/key(k1):0/container(LIST,len=3):order[0, 1, 2]',
'/root():0/key(k1):0/container(LIST,len=3):order[2, 0, 1]'),
])
def test_diff_rulekeys_insert_or_remove_key(self):
s1 = reconstruct_rulekey(
['key(k1)', 'string("s
|
sgraham/nope
|
tools/perf/page_sets/pathological_mobile_sites.py
|
Python
|
bsd-3-clause
| 1,806
| 0.002769
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class PathologicalMobileSitesPage(page_module.Page):
def __init__(self, ur
|
l, page_set):
super(PathologicalMobileSitesPage, self).__init__(
url=url, page_set=page_set, credentials_path='data/credentials.json')
self.user_agent_type = 'mobile'
self.archive_data_file = 'data/pathological_mobile_sites.json'
def RunPageInteractions(self, action_runner):
interaction = action_runner.BeginGestur
|
eInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollPage()
interaction.End()
class PathologicalMobileSitesPageSet(page_set_module.PageSet):
"""Pathologically bad and janky sites on mobile."""
def __init__(self):
super(PathologicalMobileSitesPageSet, self).__init__(
user_agent_type='mobile',
archive_data_file='data/pathological_mobile_sites.json',
bucket=page_set_module.PARTNER_BUCKET)
sites = ['http://edition.cnn.com',
'http://m.espn.go.com/nhl/rankings',
'http://recode.net',
'http://www.latimes.com',
('http://www.pbs.org/newshour/bb/'
'much-really-cost-live-city-like-seattle/#the-rundown'),
('http://www.theguardian.com/politics/2015/mar/09/'
'ed-balls-tory-spending-plans-nhs-charging'),
'http://www.zdnet.com',
'http://www.wowwiki.com/World_of_Warcraft:_Mists_of_Pandaria',
'https://www.linkedin.com/in/linustorvalds']
for site in sites:
self.AddUserStory(PathologicalMobileSitesPage(site, self))
|
thekot/cam_to_webm
|
modules/tar_to_webm_compressor.py
|
Python
|
mit
| 2,578
| 0.003491
|
import os
import shutil
import logging
import tarfile
import tempfile
import subprocess
from threading import Thread
class TarToWebmCompressor(object):
@staticmethod
def compress(filename):
thread = Thread(target=TarToWebmCompressor._compress, args=(filename,))
thread.start()
@staticmethod
def _compress(filename):
# extract tar
tmp_dir = tempfil
|
e.mkdtemp()
try:
|
tar = tarfile.open(filename, "r|")
tar.extractall(tmp_dir)
tar.close()
except:
logging.exception("Failed extract %s file" % (filename,))
os.rmdir(tmp_dir)
return
# compress images into web movie
vopts = "-c:v libvpx -quality good -cpu-used 0 -b:v 500k -qmin 4 -qmax 60 -bufsize 5000k -threads 1"
input_images = os.path.join(tmp_dir, '%04d.jpg')
hour_webm = filename + '.webm'
null = '/dev/null'
if os.name == 'nt':
null = 'NUL'
cmd1 = 'ffmpeg -y -v quiet -nostats -f image2 -i "%s" -pass 1 -f rawvideo %s %s' % (input_images, vopts, null)
cmd2 = 'ffmpeg -y -v quiet -nostats -f image2 -i "%s" -pass 2 %s "%s"' % (input_images, vopts, hour_webm)
try:
subprocess.check_call(cmd1)
subprocess.check_call(cmd2)
os.remove(filename)
except subprocess.CalledProcessError:
logging.exception("Failed to compress %s file" % (filename,))
finally:
shutil.rmtree(tmp_dir, ignore_errors=True)
# combine 1-hour movies to daily movies
# combined_webm = (datetime.datetime.now() - datetime.timedelta(hours=6)).strftime('%Y-%m-%d.webm')
# combined_webm = os.path.join(os.path.dirname(filename), combined_webm)
# if not os.path.exists(combined_webm):
# try:
# os.rename(hour_webm, combined_webm)
# except:
# logging.exception("Failed to rename %s to %s" % (hour_webm, combined_webm))
# else:
# combined_webm_tmp = combined_webm + ".tmp"
# cmd = 'mkvmerge --quiet --webm -o "%s" "%s" "+" "%s"' % (combined_webm_tmp, combined_webm, hour_webm)
# try:
# subprocess.check_call(cmd)
# os.remove(combined_webm) # for windows os
# os.rename(combined_webm_tmp, combined_webm)
# os.remove(hour_webm)
# except subprocess.CalledProcessError:
# logging.exception("Failed to append %s to %s" % (hour_webm, combined_webm))
|
lunzhy/PyShanbay
|
tests/ui_test.py
|
Python
|
mit
| 2,044
| 0.000978
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'Lunzhy'
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from pyshanbay.shanbay import VisitShanbay
from pyshanbay import page_parser as parser
from gui.ui_main import UIMainWidget
class MainForm(QWidget):
def __init__(self):
super().__init__()
self.table = QTableWidget()
layout = QHBoxLayout()
layout.addWidget(self.table)
self.setLayout(layout)
self.table.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.table.setSelectionBehavior(QAbstractItemView.SelectRows)
self.table.setSelectionMode(QAbstractItemView.SingleSelect
|
ion)
self.table.verticalHeader().setResizeMode(QHeaderView.Fixed)
self.table.itemClicked.connect(self.show_selected)
return
def set_data(s
|
elf, members_data):
self.table.setColumnCount(2)
self.table.setRowCount(len(members_data))
for row_index, member in enumerate(members_data):
new_item = QTableWidgetItem(member['nickname'])
self.table.setItem(row_index, 0, new_item)
new_item = QTableWidgetItem(str(member['checked_today']))
self.table.setItem(row_index, 1, new_item)
return
def show_selected(self):
select = self.table.selectionModel().selectedRows()
print(self.table.item(select[0].row(), 0).text())
print(self.table.item(select[0].row(), 1).text())
return
def get_data():
shanbay = VisitShanbay()
shanbay.login()
page_members = shanbay.members()
total_page = parser.total_page_members(page_members)
pages = []
for page in range(1, int(total_page) + 1):
page_html = shanbay.members_page(page)
pages.append(page_html)
members_info = parser.parse_members_info(pages)
return members_info
if __name__ == '__main__':
app = QApplication(sys.argv)
main_form = UIMainWidget()
main_form.set_data_members(get_data())
main_form.show()
app.exec_()
|
valohai/valohai-cli
|
valohai_cli/commands/yaml/pipeline.py
|
Python
|
mit
| 2,212
| 0.000452
|
from typing import List
import click
from valohai.internals.pipeline import get_pipeline_from_source
from valohai.yaml import config_to_yaml
from valohai_yaml.objs.config import Config
from valohai_cli.ctx import get_project
from valohai_cli.exceptions import ConfigurationError
from valohai_cli.messages import error, info
from valohai_cli.models.project import Project
@click.command()
@click.argument(
"filenames",
nargs=-1,
type=click.Path(file_okay=True, exists=True, dir_okay=False),
required=True,
)
def pipeline(filenames: List[str]) -> None:
"""
Update a pipeline config(s) in valohai.yaml based on Python source file(s).
Python source file is expected to have def main(config: Config) -> Pipeline
Example:
vh yaml pipeline mypipeline.py
:param filenames: Path(s) of the Python source code files.
"""
project = get_project(require=True)
yaml_filename = project.get_config_filename()
did_update = False
for source_path in filenames:
old_config = get_current_config(project)
try:
new_config = get_pipeline_from_source(source_path, old_config)
except Exception:
error(
f"Retrieving a new pipeline definition for project {project} for {source_path} failed.\n"
f"The configuration file in use is {yaml_filename}. "
|
f"See the full traceback below."
)
raise
merged_config = old_config.merge_with(new_config)
if old_config.serialize() != merged_config.serialize():
with open(yaml_filename, "w") as out_file:
out_file.write(config_to_yaml(merged_config))
did_update = True
if did_update:
info(f"{yaml_filename} updated.")
else:
info(f"{yaml_filename} already up-to-date.")
def get_current_config(proj
|
ect: Project) -> Config:
try:
return project.get_config()
except FileNotFoundError as fnfe:
valohai_yaml_name = project.get_config_filename()
raise ConfigurationError(
f"Did not find {valohai_yaml_name}. "
f"Can't create a pipeline without preconfigured steps."
) from fnfe
|
alquerci/pip
|
tests/unit/test_index.py
|
Python
|
mit
| 1,950
| 0.001538
|
import os
from pip.backwardcompat import u
|
rllib
from tests.lib.path import Path
from pip.index import package_to_requirement, HTMLPage
from pip.index import PackageFinder, Link, INSTALLED_VERSION
from tests.lib import path_to_url
from string import ascii_lowercase
from mock import patch
def test_package_name_should_be_converted_to_requirement():
"""
Test that it
|
translates a name like Foo-1.2 to Foo==1.3
"""
assert package_to_requirement('Foo-1.2') == 'Foo==1.2'
assert package_to_requirement('Foo-dev') == 'Foo==dev'
assert package_to_requirement('Foo') == 'Foo'
def test_html_page_should_be_able_to_scrap_rel_links():
"""
Test scraping page looking for url in href
"""
page = HTMLPage("""
<!-- The <th> elements below are a terrible terrible hack for setuptools -->
<li>
<strong>Home Page:</strong>
<!-- <th>Home Page -->
<a href="http://supervisord.org/">http://supervisord.org/</a>
</li>""", "supervisor")
links = list(page.scraped_rel_links())
assert len(links) == 1
assert links[0].url == 'http://supervisord.org/'
def test_sort_locations_file_find_link(data):
"""
Test that a file:// find-link dir gets listdir run
"""
finder = PackageFinder([data.find_links], [])
files, urls = finder._sort_locations([data.find_links])
assert files and not urls, "files and not urls should have been found at find-links url: %s" % data.find_links
def test_sort_locations_file_not_find_link(data):
"""
Test that a file:// url dir that's not a find-link, doesn't get a listdir run
"""
finder = PackageFinder([], [])
files, urls = finder._sort_locations(data.index_url("empty_with_pkg"))
assert urls and not files, "urls, but not files should have been found"
def test_INSTALLED_VERSION_greater():
"""Test INSTALLED_VERSION compares greater."""
assert INSTALLED_VERSION > Link("some link")
|
noiselabs/box-linux-sync
|
src/noiselabs/box/output.py
|
Python
|
lgpl-3.0
| 6,736
| 0.011435
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of box-linux-sync.
#
# Copyright (C) 2013 Vítor Brandão <noisebleed@noiselabs.org>
#
# box-linux-sync is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# box-linux-sync is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with box-linux-sync; if not, see
# <http://www.gnu.org/licenses/>.
from __future__ import print_function
import curses
import logging
import os
import time
import sys
import types
from noiselabs.box.config import BASEDIR
from noiselabs.box.utils import create_file
from noiselabs.box.ansistrm import ColorizingStreamHandler
################################################################################
##
## Color codes (taken from Portage)
##
################################################################################
_styles = {}
"""Maps style class to tuple of attribute
|
names."""
codes = {}
"""Maps attribute name to ansi code."""
esc_seq = "\x1b["
codes["normal"] = esc_seq + "0m"
codes['reset'] = esc_seq + "39;49;00m"
codes["bold"] = esc_seq + "01m"
codes
|
["faint"] = esc_seq + "02m"
codes["standout"] = esc_seq + "03m"
codes["underline"] = esc_seq + "04m"
codes["blink"] = esc_seq + "05m"
codes["overline"] = esc_seq + "06m"
codes["reverse"] = esc_seq + "07m"
codes["invisible"] = esc_seq + "08m"
codes["no-attr"] = esc_seq + "22m"
codes["no-standout"] = esc_seq + "23m"
codes["no-underline"] = esc_seq + "24m"
codes["no-blink"] = esc_seq + "25m"
codes["no-overline"] = esc_seq + "26m"
codes["no-reverse"] = esc_seq + "27m"
codes["bg_black"] = esc_seq + "40m"
codes["bg_darkred"] = esc_seq + "41m"
codes["bg_darkgreen"] = esc_seq + "42m"
codes["bg_brown"] = esc_seq + "43m"
codes["bg_darkblue"] = esc_seq + "44m"
codes["bg_purple"] = esc_seq + "45m"
codes["bg_teal"] = esc_seq + "46m"
codes["bg_lightgray"] = esc_seq + "47m"
codes["bg_default"] = esc_seq + "49m"
codes["bg_darkyellow"] = codes["bg_brown"]
def color(fg, bg="default", attr=["normal"]):
mystr = codes[fg]
for x in [bg]+attr:
mystr += codes[x]
return mystr
ansi_codes = []
for x in range(30, 38):
ansi_codes.append("%im" % x)
ansi_codes.append("%i;01m" % x)
rgb_ansi_colors = ['0x000000', '0x555555', '0xAA0000', '0xFF5555', '0x00AA00',
'0x55FF55', '0xAA5500', '0xFFFF55', '0x0000AA', '0x5555FF', '0xAA00AA',
'0xFF55FF', '0x00AAAA', '0x55FFFF', '0xAAAAAA', '0xFFFFFF']
for x in range(len(rgb_ansi_colors)):
codes[rgb_ansi_colors[x]] = esc_seq + ansi_codes[x]
del x
codes["black"] = codes["0x000000"]
codes["darkgray"] = codes["0x555555"]
codes["red"] = codes["0xFF5555"]
codes["darkred"] = codes["0xAA0000"]
codes["green"] = codes["0x55FF55"]
codes["darkgreen"] = codes["0x00AA00"]
codes["yellow"] = codes["0xFFFF55"]
codes["brown"] = codes["0xAA5500"]
codes["blue"] = codes["0x5555FF"]
codes["darkblue"] = codes["0x0000AA"]
codes["fuchsia"] = codes["0xFF55FF"]
codes["purple"] = codes["0xAA00AA"]
codes["turquoise"] = codes["0x55FFFF"]
codes["teal"] = codes["0x00AAAA"]
codes["white"] = codes["0xFFFFFF"]
codes["lightgray"] = codes["0xAAAAAA"]
codes["darkteal"] = codes["turquoise"]
# Some terminals have darkyellow instead of brown.
codes["0xAAAA00"] = codes["brown"]
codes["darkyellow"] = codes["0xAAAA00"]
# Colors from /etc/init.d/functions.sh
_styles["NORMAL"] = ( "normal", )
_styles["GOOD"] = ( "green", )
_styles["WARN"] = ( "yellow", )
_styles["BAD"] = ( "red", )
_styles["HILITE"] = ( "teal", )
_styles["BRACKET"] = ( "blue", )
def style_to_ansi_code(style):
"""
@param style: A style name
@type style: String
@rtype: String
@return: A string containing one or more ansi escape codes that are
used to render the given style.
"""
ret = ""
for attr_name in _styles[style]:
# allow stuff that has found it's way through ansi_code_pattern
ret += codes.get(attr_name, attr_name)
return ret
def colorize(color_key, text):
if color_key in codes:
return codes[color_key] + text + codes["reset"]
elif color_key in _styles:
return style_to_ansi_code(color_key) + text + codes["reset"]
else:
return text
class BoxConsole():
"""
A class that performs fancy terminal formatting for status and informational
messages built upon the logging module.
"""
def __init__(self, opts, name):
self.name = name
self.opts = opts
self.logger = logging.getLogger(name)
self.level = logging.DEBUG if self.opts.verbose else logging.INFO
self.logger.setLevel(self.level)
# create console handler
ch = ColorizingStreamHandler()
ch.setLevel(self.level)
# create formatter and add it to the handlers
#ch.setFormatter(logging.Formatter('%(message)s'))
self.logger.addHandler(ch)
# create file handler
if self.opts.log:
logfile = os.path.join(BASEDIR, 'box-sync.log')
create_file(logfile)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter('[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s'))
self.logger.addHandler(fh)
def debug(self, msg):
self.logger.debug(msg)
def info(self, msg):
self.logger.info(msg)
def warning(self, msg):
self.logger.warning(msg)
def error(self, msg):
self.logger.error(msg)
def critical(self, msg):
self.logger.critical(msg)
def log(self, lvl, msg):
self.logger.log(lvl, msg)
def countdown(self, secs=5, doing="Starting"):
""" This method is based on Portage's _emerge.countdown
Copyright 1999-2009 Gentoo Foundation"""
if secs:
print("Waiting",secs,"seconds before starting (Control-C to abort)...")
print(doing+" in: ", end=' ')
ticks=list(range(secs))
ticks.reverse()
for sec in ticks:
sys.stdout.write(colorize("red", str(sec+1)+" "))
sys.stdout.flush()
time.sleep(1)
print()
|
joopert/home-assistant
|
tests/helpers/test_config_validation.py
|
Python
|
apache-2.0
| 26,925
| 0.000409
|
"""Test config validators."""
from datetime import date, datetime, timedelta
import enum
import os
from socket import _GLOBAL_DEFAULT_TIMEOUT
from unittest.mock import Mock, patch
import uuid
import pytest
import voluptuous as vol
import homeassistant
import homeassistant.helpers.config_validation as cv
def test_boolean():
"""Test boolean validation."""
schema = vol.Schema(cv.boolean)
for value in (
None,
"T",
"negative",
"lock",
"tr ue",
[],
[1, 2],
{"one": "two"},
test_boolean,
):
with pytest.raises(vol.MultipleInvalid):
schema(value)
for value in ("true", "On", "1", "YES", " true ", "enable", 1, 50, True, 0.1):
assert schema(value)
for value in ("false", "Off", "0", "NO", "disable", 0, False):
assert not schema(value)
def test_latitude():
"""Test latitude validation."""
schema = vol.Schema(cv.latitude)
for value in ("invalid", None, -91, 91, "-91", "91", "123.01A"):
with pytest.raises(vol.MultipleInvalid):
schema(value)
for value in ("-89", 89, "12.34"):
schema(value)
def test_longitude():
"""Test longitude validation."""
schema = vol.Schema(cv.longitude)
for value in ("invalid", None, -181, 181, "-181", "181", "123.01A"):
with pytest.raises(vol.MultipleInvalid):
schema(value)
for value in ("-179", 179, "12.34"):
schema(value)
def test_port():
"""Test TCP/UDP network port."""
schema = vol.Schema(cv.port)
for value in ("invalid", None, -1, 0, 80000, "81000"):
with pytest.raises(vol.MultipleInvalid):
schema(value)
for value in ("1000", 21, 24574):
schema(value)
def test_isfile():
"""Validate that the value is an existing file."""
schema = vol.Schema(cv.isfile)
fake_file = "this-file-does-not.exist"
assert not os.path.isfile(fake_file)
for value in ("invalid", None, -1, 0, 80000, fake_file):
with pytest.raises(vol.Invalid):
schema(value)
# patching methods that allow us to fake a file existing
# with write access
with patch("os.path.isfile", Mock(return_value=True)), patch(
"os.access", Mock(return_value=True)
):
schema("test.txt")
def test_url():
"""Test URL."""
schema = vol.Schema(cv.url)
for value in (
"invalid",
None,
100,
"htp://ha.io",
"http//ha.io",
"http://??,**",
"https://??,**",
):
with pytest.raises(vol.MultipleInvalid):
schema(value)
for value in (
"http://localhost",
"https://localhost/test/index.html",
"http://home-assistant.io",
"http://home-assistant.io/test/",
"https://community.home-assistant.io/",
):
assert schema(value)
def test_platform_config():
"""Test platform config validation."""
options = ({}, {"hello": "world"})
for value in options:
with pytest.raises(vol.MultipleInvalid):
cv.PLATFORM_SCHEMA(value)
options = ({"platform": "mqtt"}, {"platform": "mqtt", "beer": "yes"})
for value in options:
cv.PLATFORM_SCHEMA_BASE(value)
def test_ensure_list():
"""Test ensure_list."""
schema = vol.Schema(cv.ensure_list)
assert [] == schema(None)
assert [1] == schema(1)
assert [1] == schema([1])
assert ["1"] == schema("1")
assert ["1"] == schema(["1"])
assert [{"1": "2"}] == schema({"1": "2"})
def test_entity_id():
"""Test entity ID validation."""
schema = vol.Schema(cv.entity_id)
with pytest.raises(vol.MultipleInvalid):
schema("invalid_entity")
assert schema("sensor.LIGHT") == "sensor.light"
def test_entity_ids():
"""Test entity ID validation."""
schema = vol.Schema(cv.entity_ids)
options = (
"invalid_entity",
"sensor.light,sensor_invalid",
["invalid_entity"],
["sensor.light", "sensor_invalid"],
["sensor.light,sensor_invalid"],
)
for value in options:
with pytest.raises(vol.MultipleInvalid):
schema(value)
options = ([], ["sensor.light"], "sensor.light")
for value in options:
schema(value)
assert schema("sensor.LIGHT, light.kitchen ") == ["sensor.light", "light.kitchen"]
def test_entity_domain():
"""Test entity domain validation."""
schema = vol.Schema(cv.entity_domain("sensor"))
options = ("invalid_entity", "cover.demo")
for value in options:
with pytest.raises(vol.MultipleInvalid):
print(value)
schema(value)
assert schema("sensor.LIGHT") == "sensor.light"
def test_entities_domain():
"""Test entities domain validation."""
schema = vol.Schema(cv.entities_domain("sensor"))
options = (
None,
"",
"invalid_entity",
["sensor.light", "cover.demo"],
["sensor.light", "sensor_invalid"],
)
for value in options:
with pytest.raises(vol.MultipleInvalid):
schema(value)
options = ("sensor.light", ["SENSOR.light"], ["sensor.light", "sensor.demo"])
for value in options:
schema(value)
assert schema("sensor.LIGHT, sensor.demo ") == ["sensor.light", "sensor.demo"]
a
|
ssert schema(["sensor.light", "SENSOR.demo"]) == ["sensor.light", "sensor.demo"]
def test_ensure_list_csv():
"""Test ensure_list_csv."""
schema = vol.Schema(cv.ens
|
ure_list_csv)
options = (None, 12, [], ["string"], "string1,string2")
for value in options:
schema(value)
assert schema("string1, string2 ") == ["string1", "string2"]
def test_event_schema():
"""Test event_schema validation."""
options = (
{},
None,
{"event_data": {}},
{"event": "state_changed", "event_data": 1},
)
for value in options:
with pytest.raises(vol.MultipleInvalid):
cv.EVENT_SCHEMA(value)
options = (
{"event": "state_changed"},
{"event": "state_changed", "event_data": {"hello": "world"}},
)
for value in options:
cv.EVENT_SCHEMA(value)
def test_icon():
"""Test icon validation."""
schema = vol.Schema(cv.icon)
for value in (False, "work"):
with pytest.raises(vol.MultipleInvalid):
schema(value)
schema("mdi:work")
schema("custom:prefix")
def test_time_period():
"""Test time_period validation."""
schema = vol.Schema(cv.time_period)
options = (None, "", "hello:world", "12:", "12:34:56:78", {}, {"wrong_key": -10})
for value in options:
with pytest.raises(vol.MultipleInvalid):
schema(value)
options = ("8:20", "23:59", "-8:20", "-23:59:59", "-48:00", {"minutes": 5}, 1, "5")
for value in options:
schema(value)
assert timedelta(seconds=180) == schema("180")
assert timedelta(hours=23, minutes=59) == schema("23:59")
assert -1 * timedelta(hours=1, minutes=15) == schema("-1:15")
def test_remove_falsy():
"""Test remove falsy."""
assert cv.remove_falsy([0, None, 1, "1", {}, [], ""]) == [1, "1"]
def test_service():
"""Test service validation."""
schema = vol.Schema(cv.service)
with pytest.raises(vol.MultipleInvalid):
schema("invalid_turn_on")
schema("homeassistant.turn_on")
def test_service_schema():
"""Test service_schema validation."""
options = (
{},
None,
{
"service": "homeassistant.turn_on",
"service_template": "homeassistant.turn_on",
},
{"data": {"entity_id": "light.kitchen"}},
{"service": "homeassistant.turn_on", "data": None},
{
"service": "homeassistant.turn_on",
"data_template": {"brightness": "{{ no_end"},
},
)
for value in options:
with pytest.raises(vol.MultipleInvalid):
cv.SERVICE_SCHEMA(value)
options = (
{"service": "homeassistant.turn_on"},
{"service": "homeassistant.turn_on", "entity_id": "light.kitchen"},
{"service": "light.turn_on", "entity_id": "all"},
|
seancug/python-example
|
fatiando-0.2/test/test_gravmag_euler.py
|
Python
|
gpl-2.0
| 2,686
| 0.004468
|
from __future__ import division
import numpy as np
from fatiando.gravmag.euler import Classic, ExpandingWindow, MovingWindow
from fatiando.gravmag import sphere, fourier
from fatiando.mesher import Sphere
from fatiando import utils, gridder
model = None
xp, yp, zp = None, None, None
inc, dec = None, None
struct_ind = None
base = None
pos = None
field, xderiv, yderiv, zderiv = None, None, None, None
precision = 0.01
def setup():
global model, x, y, z, inc, dec, struct_ind, field, xderiv, yderiv, \
zderiv, base, pos
inc, dec = -30, 50
pos = np.array([1000, 1000, 200])
model = Sphere(pos[0], pos[1], pos[2], 1,
#{'magnetization':utils.ang2vec(100, 25, -10)})
{'magnetization':10000})
struct_ind = 3
shape = (128, 128)
x, y, z = gridder.regular((0, 3000, 0, 3000), shape, z=-1)
|
base = 10
field = utils.nt2si(sphere.tf(x, y, z, [model], inc, dec)) + base
x
|
deriv = fourier.derivx(x, y, field, shape)
yderiv = fourier.derivy(x, y, field, shape)
zderiv = fourier.derivz(x, y, field, shape)
def test_euler_classic_sphere_mag():
"gravmag.euler.Classic for sphere model and magnetic data"
euler = Classic(x, y, z, field, xderiv, yderiv, zderiv, struct_ind).fit()
assert (base - euler.baselevel_)/base <= precision, \
'baselevel: %g estimated: %g' % (base, euler.baselevel_)
assert np.all((pos - euler.estimate_)/pos <= precision), \
'position: %s estimated: %s' % (str(pos), str(euler.estimate_))
def test_euler_classic_expandingwindow_sphere_mag():
"gravmag.euler.ExpandingWindow w Classic for sphere model + magnetic data"
euler = ExpandingWindow(
Classic(x, y, z, field, xderiv, yderiv, zderiv, struct_ind),
center=[1000, 1000], sizes=np.linspace(100, 2000, 20)).fit()
assert (base - euler.baselevel_)/base <= precision, \
'baselevel: %g estimated: %g' % (base, euler.baselevel_)
assert np.all((pos - euler.estimate_)/pos <= precision), \
'position: %s estimated: %s' % (str(pos), str(euler.estimate_))
def test_euler_classic_movingwindow_sphere_mag():
"gravmag.euler.MovingWindow w Classic for sphere model + magnetic data"
euler = MovingWindow(
Classic(x, y, z, field, xderiv, yderiv, zderiv, struct_ind),
windows=[10, 10], size=(1000, 1000), keep=0.2).fit()
for b in euler.baselevel_:
assert (base - b)/base <= precision, \
'baselevel: %g estimated: %g' % (base, b)
for c in euler.estimate_:
assert np.all((pos - c)/pos <= precision), \
'position: %s estimated: %s' % (str(pos), str(c))
|
zxtstarry/src
|
book/rsf/rsf/gui/gui.py
|
Python
|
gpl-2.0
| 1,270
| 0.028346
|
#!/usr/bin/env python
import os, sys
try:
from Tkinter import *
except:
sys.stderr.write('Please install Tkinter!\n\n')
sys.exit(1)
root = Tk()
root.title('Wavelet Demo')
wtype = StringVar()
wtype.set('b')
type_frame = Frame(root,relief=SUNKEN,borderwidth=2)
type_frame.pack(side=TOP,fill=X)
Label(type_frame,text='Wavelet Type').pack(side=TOP)
types = {'h':'Haar',
'l':'Linear',
'b':'Bi-orthogonal'}
for t in 'hlb':
rbut = Radiobutton(type_frame,text=types[t],value=t,variable=wtype)
rbut.pack(side=LEFT)
pclip_frame = Frame(root,relief=SUNKEN,borderwidth=2)
pclip_frame.pack(side=TOP,fill=X)
pclip = IntVar()
pclip.set(50)
scale = Scale(pclip_frame,from_=1,to=99,resolution=1,orient=HORIZONTAL,
variable=pclip,length=200)
scale
|
.pack(side=RIGHT)
Label(pclip_frame,text='Threshold\nPercentile').pack(side=RIGHT,anchor=SE)
frame = Frame(root)
frame.pack(side=TOP,fill=X)
quit = Button(frame,text='Quit',background='red',command=sys.exit)
quit.pack(side=RIGHT)
def scons():
'Get parameters from GUI and pass them to SCons'
os.system ('scons -Q type=%s pclip=%d view' % (wtype.get(),pclip.get()))
cycle = Button(frame,text='
|
Run',background='yellow',command=scons)
cycle.pack()
root.mainloop()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.