repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
ecell/ecell3 | ecell/frontend/model-editor/ecell/ui/model_editor/ComplexLine.py | Python | lgpl-3.0 | 13,350 | 0.027416 | #::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
# This file is part of the E-Cell System
#
# Copyright (C) 1996-2016 Keio University
# Copyright (C) 2008-2016 RIKEN
# Copyright (C) 2005-2009 The Molecular Sciences Institute
#
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
#
# E-Cell System is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# E-Cell System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with E-Cell System -- see the file COPYING.
# If not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#END_HEADER
try:
import gnomecanvas
except:
import gnome.canvas as gnomecanvas
from ecell.ui.model_editor.Constants import *
from ecell.ui.model_editor.Utils import *
from ecell.ui.model_editor.ResizeableText import *
class ComplexLine:
def __init__( self, anObject, aCanvas ):
self.theCanvas = aCanvas
self.parentObject = anObject
self.graphUtils = self.parentObject.getGraphUtils()
self.shapeMap = {}
self.lastmousex = 0
self.lastmousey = 0
self.buttonpressed = False
self.firstdrag=False
def show ( self ):
self.theRoot = self.parentObject.theCanvas.getRoot()
self.shapeDescriptorList = self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).getDescriptorList()
self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).reCalculate()
self.__sortByZOrder( self.shapeDescriptorList )
self.isSelected = False
for aKey in self.shapeDescriptorList.keys():
aDescriptor = self.shapeDescriptorList[aKey]
if aDescriptor[SD_TYPE] == CV_TEXT:
self.createText( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_LINE:
self.createLine( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_BPATH:
self.createBpath( aDescriptor )
self.isSelected = False
def repaint ( self ):
self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).reCalculate()
self.shapeDescriptorList = self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).getDescriptorList()
self.__sortByZOrder( self.shapeDescriptorList )
for aKey in self.shapeDescriptorList.keys():
aDescriptor = self.shapeDescriptorList[aKey]
if aDescriptor[SD_TYPE] == CV_TEXT:
self.redrawText( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_LINE:
self.redrawLine( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_BPATH:
self.redrawBpath( aDescriptor )
def reName( self ):
self.shapeDescriptorList = self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).getDescriptorList()
self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).renameLabel( self.parentObject.getProperty( CO_NAME ) )
aDescriptor = self.shapeDescriptorList["textbox"]
self.renameText( aDescriptor )
def delete( self ):
for aShapeName in self.shapeMap.keys():
self.shapeMap[ aShapeName ].destroy()
def selected( self ):
self.isSelected = True
def unselected( self ):
self.isSelected = False
def outlineColorChanged( self ):
self.fillColorChanged()
def fillColorChanged( self ):
# find shapes with outline color
anRGB = copyValue( self.parentObject.getProperty( OB_FILL_COLOR ) )
if self.isSelected:
for i in range(0,3):
anRGB[i] = 32768 + anRGB[i]
for aKey in self.shapeDescriptorList.keys():
aDescriptor = self.shapeDescriptorList[aKey]
if aDescriptor[ SD_COLOR ] == SD_FILL:
aColor = self.graphUtils.getGdkColorByRGB( anRGB )
if aDescriptor[SD_TYPE] in CV_LINE:
self.changeLineColor( aDescriptor[ SD_NAME ] , aColor )
elif aDescriptor[SD_TYPE] in CV_BPATH:
self.changeLineColorB( aDescriptor[ SD_NAME ] , aColor )
def createBpath(self, aDescriptor):
aSpecific= aDescriptor[SD_SPECIFIC]
# get pathdef
pathdef= aSpecific[BPATH_PATHDEF]
pd = gnomecanvas.path_def_new(pathdef)
aGdkColor = self.getGdkColor( aDescriptor )
#cheCk: 1starg > the Bpath, 2ndarg > Bpath width(def 3), 3rdarg > Color of Bpath(def black)
bpath = self.theRoot.add(gnomecanvas.CanvasBpath, width_units=3,
outline_color_gdk = aGdkColor)
bpath.set_bpath(pd)
self.addHandlers( bpath, aDescriptor[ SD_NAME ] )
self.shapeMap[ aDescriptor[ SD_NAME ] ] = bpath
#cheCk: createLine is in charge of the Simple Line, displaying it width, colour ..blabla..
#regardless of whether it is the arrowheads or the middle stuffs (MS), it creates all
#but, if the MS is a bpath (eg. curvedLi | neSD) it will overwrite the middle line, I THINK OLI
def createLine( self, aDes | criptor ):
lineSpec = aDescriptor[SD_SPECIFIC]
( X1, X2, Y1, Y2 ) = [lineSpec[0], lineSpec[2], lineSpec[1], lineSpec[3] ]
aGdkColor = self.getGdkColor( aDescriptor )
firstArrow = lineSpec[4]
secondArrow = lineSpec[5]
aLine = self.theRoot.add( gnomecanvas.CanvasLine,points=[X1,Y1,X2,Y2], width_units=lineSpec[ 6 ], fill_color_gdk = aGdkColor, first_arrowhead = firstArrow, last_arrowhead = secondArrow,arrow_shape_a=5, arrow_shape_b=5, arrow_shape_c=5 )
self.addHandlers( aLine, aDescriptor[ SD_NAME ] )
self.shapeMap[ aDescriptor[ SD_NAME ] ] = aLine
def changeLineColor ( self, shapeName, aColor ):
aShape = self.shapeMap[ shapeName ]
aShape.set_property('fill_color_gdk', aColor )
def changeLineColorB ( self, shapeName, aColor ):
aShape = self.shapeMap[ shapeName ]
aShape.set_property('outline_color_gdk', aColor )
def createText( self, aDescriptor ):
textSpec = aDescriptor[SD_SPECIFIC]
(X1, Y1) = ( textSpec[TEXT_ABSX], textSpec[TEXT_ABSY] )
aGdkColor = self.getGdkColor( aDescriptor )
aText = ResizeableText( self.theRoot, self.theCanvas, X1, Y1, aGdkColor, textSpec[TEXT_TEXT], gtk.ANCHOR_NW )
self.addHandlers( aText, aDescriptor[ SD_NAME ] )
self.shapeMap[ aDescriptor[ SD_NAME ] ] = aText
def redrawLine( self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]
aSpecific = aDescriptor[ SD_SPECIFIC ]
x1 = aSpecific[0]
y1 = aSpecific[1]
x2 = aSpecific[2]
y2 = aSpecific[3]
hasFirstArrow = aSpecific[4]
hasLastArrow = aSpecific[5]
aShape.set_property( 'points', (x1, y1, x2, y2) )
aShape.set_property('first_arrowhead', hasFirstArrow )
aShape.set_property('last_arrowhead', hasLastArrow )
def redrawBpath( self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]
pathdef = aDescriptor[ SD_SPECIFIC ][BPATH_PATHDEF]
pd=gnomecanvas.path_def_new(pathdef)
aShape.set_bpath(pd)
def redrawText( self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]
aSpecific = aDescriptor[ SD_SPECIFIC ]
x = aSpecific[TEXT_ABSX]
y = aSpecific[TEXT_ABSY]
aShape.set_property( 'x', x )
aShape.set_property( 'y', y )
def renameText (self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]
aSpecific = aDescriptor[ SD_SPECIFIC ]
label = aSpecific[ TEXT_TEXT ]
aShape.set_prope |
birkenfeld/rick | test.py | Python | gpl-2.0 | 4,893 | 0.002044 | # -------------------------------------------------------------------------------------------------
# Rick, a Rust intercal compiler. Save your souls!
#
# Copyright (c) 2015-2021 Georg Brandl
#
# This program is free software; you can redistribute it and/or modify it under the terms of the
# GNU General Public License as published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this program;
# if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
# -------------------------------------------------------------------------------------------------
import os
import sys
import time
import difflib
from os import path
from subprocess import Popen, PIPE, STDOUT
already_compiled = set()
def run_test(testname, testcode, compiled):
stdin = b''
if path.isfile(testname + '.tst'):
with open(testname + '.tst', 'rb') as stdinfile:
stdin = stdinfile.read()
with open(testname + '.chk', 'r') as stdoutfile:
stdout = stdoutfile.read()
def check(proc, remove_cargo):
real_stdout, _ = proc.communicate(stdin)
real_stdout = real_stdout.decode()
# remove cargo's "Running" line
if remove_cargo:
errindex = real_stdout.find('An unknown error occurred')
if errindex == -1:
errindex = real_stdout.find('error: Process didn\'t exit successfully')
if errindex > -1:
real_stdout = real_stdout[:errindex]
if real_stdout != stdout:
print('*** ERROR: standard output does not match check file')
print(''.join(difflib.unified_diff(stdout.splitlines(True),
real_stdout.splitlines(True))))
raise RuntimeError
print('')
print('>>> Test: ' + testname)
print(' > Step 1: interpreted')
check(Popen(['cargo', 'run', '--release', '-q', '--', '-Rbi', testcode],
stdin=PIPE, stdout=PIPE, stderr=STDOUT), True)
print(' > Step 2: interpreted + optimized')
check(Popen(['cargo', 'run', '--release', '-q', '--', '-Rbio', testcode],
stdin=PIPE, stdout=PIPE, stderr=STDOUT), True)
if compiled:
print(' > Step 3: compiled + optimized')
if testcode not in already_compiled:
if os.system('cargo run --release -q -- -RFbo %s > /dev/null' % testcode) != 0:
print('*** ERROR: compilation failed')
raise RuntimeError
already_compiled.add(testcode)
check(Popen([testcode[:-2]], stdin=PIPE, stdout=PIPE, stderr=STDOUT),
False)
def main():
start = time.time()
compile_flag = '--nocompile' not in sys.argv
skip_flag = '--all' not in sys.argv
tests = [path.splitext(test.replace('/', os.sep))[0]
for test in sys.argv[1:] if not test.startswith('-')]
print('Building...')
if os.system('cargo build --release') != 0:
return 2
print('Running tests, please wait...')
passed = 0
total = 0
failed = []
for root, dirs, files in os.walk('code'):
dirs.sort()
for fn in sorted(files):
if not fn.endswith('.chk'):
continue
if skip_flag and fn.startswith(('fft-', 'flonck', 'unlambda')):
continue
testname = path.join(root, fn)[:-4]
if tests and testname not in tests:
continue
testcode = testname + '.i'
# special case
if fn.startswith('fft-'):
testcode = path.join(root, 'fft.i')
elif fn.startswith('life-'):
testcode = path.join(root, 'life2.i')
if not path.isfile(testcode):
print('')
| print('*** WARNING: found %s.chk, but not %s' % (testname, testcode))
continue
total += 1
try:
t1 = time.time()
run_test(testname, testcode, compile_flag)
t2 = time.time()
passed += 1
print('--- passed (%5.2f sec | )' % (t2 - t1))
except RuntimeError:
failed.append(testname)
end = time.time()
print('')
print('RESULT: %d/%d tests passed (%6.2f sec)' % (passed, total, end - start))
if failed:
print('Failed:')
for testname in failed:
print(' ' + testname)
return 0 if passed == total else 1
if __name__ == '__main__':
sys.exit(main())
|
Passtechsoft/TPEAlpGen | blender/release/scripts/addons_contrib/mesh_snap_utilities_line.py | Python | gpl-3.0 | 41,708 | 0.007025 | ### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
# Contact for more information about the Addon:
# Email: germano.costa@ig.com.br
# Twitter: wii_mano @mano_wii
bl_info = {
"name": "Snap_Utilities_Line",
"author": "Germano Cavalcante",
"version": (5, 2),
"blender": (2, 75, 0),
"location": "View3D > TOOLS > Snap Utilities > snap utilities",
"description": "Extends Blender Snap controls",
"wiki_url" : "http://blenderartists.org/forum/showthread.php?363859-Addon-CAD-Snap-Utilities",
"category": "Mesh"}
import bpy, bgl, bmesh, mathutils, math
from mathutils import Vector
from mathutils.geometry import (
intersect_point_line,
intersect_line_line,
intersect_line_plane,
intersect_ray_tri)
def get_units_info(scale, unit_system, separate_units):
if unit_system == 'METRIC':
scale_steps = ((1000, 'km'), (1, 'm'), (1 / 100, 'cm'),
(1 / 1000, 'mm'), (1 / 1000000, '\u00b5m'))
elif unit_system == 'IMPERIAL':
scale_steps = ((5280, 'mi'), (1, '\''),
(1 / 12, '"'), (1 / 12000, 'thou'))
scale /= 0.3048 # BU to feet
else:
scale_steps = ((1, ' BU'),)
separate_units = False
return (scale, scale_steps, separate_units)
def convert_distance(val, units_info, PRECISION = 5):
scale, scale_steps, separate_units = units_info
sval = val * scale
idx = 0
while idx < len(scale_steps) - 1:
if sval >= scale_steps[idx][0]:
break
idx += 1
factor, suffix = scale_steps[idx]
sval /= factor
if not separate_units or idx == len(scale_steps) - 1:
dval = str(round(sval, PRECISION)) + suffix
else:
ival = int(sval)
dval = str(round(ival, PRECISION)) + suffix
fval = sval - ival
idx += 1
while idx < len(scale_steps):
fval *= scale_steps[idx - 1][0] / scale_steps[idx][0]
if fval >= 1:
dval += ' ' \
+ ("%.1f" % fval) \
+ scale_steps[idx][1]
break
idx += 1
return dval
def location_3d_to_region_2d(region, rv3d, coord):
prj = rv3d.perspective_matrix * Vector((coord[0], coord[1], coord[2], 1.0))
width_half = region.width / 2.0
height_half = region.height / 2.0
return Vector((width_half + width_half * (prj.x / prj.w),
height_half + height_half * (prj.y / prj.w),
))
def region_2d_to_orig_and_view_vector(region, rv3d, coord, clamp=None):
viewinv = rv3d.view_matrix.inverted()
persinv = rv3d.perspective_matrix.inverted()
dx = (2.0 * coord[0] / region.width) - 1.0
dy = (2.0 * coord[1] / region.height) - 1.0
if rv3d.is_perspective:
origin_start = viewinv.translation.copy()
out = Vector((dx, dy, -0.5))
w = out.dot(persinv[3].xyz) + persinv[3][3]
view_vector = ((persinv * out) / w) - origin_start
else:
view_vector = -viewinv.col[2].xyz
origin_start = ((persinv.col[0].xyz * dx) +
(persinv.col[1].xyz * dy) +
viewinv.translation)
if clamp != 0.0:
if rv3d.view_perspective != 'CAMERA':
# this value is scaled to the far clip already
origin_offset = persinv.col[2].xyz
if clamp is not None:
if clamp < 0.0:
origin_offset.negate()
clamp = -clamp
if origin_offset.length > clamp:
origin_offset.length = clamp
origin_start -= origin_offset
view_vector.normalize()
return origin_start, view_vector
def out_Location(rv3d, region, orig, vector):
view_matrix = rv3d.view_matrix
v1 = Vector((int(view_matrix[0][0]*1.5),int(view_matrix[0][1]*1.5),int(view_matrix[0][2]*1.5)))
v2 = Vector((int(view_matrix[1][0]*1.5),int(view_matrix[1][1]*1.5),int(view_matrix[1][2]*1.5)))
hit = intersect_ray_tri(Vector((1,0,0)), Vector((0,1,0)), Vector((0,0,0)), (vector), (orig), False)
if hit == None:
hit = intersect_ray_tri(v1, v2, Vector((0,0,0)), (vector), (orig), False)
if hit == None:
hit = intersect_ray_tri(v1, v2, Vector((0,0,0)), (-vector), (orig), False)
if hit == None:
hit = Vector((0,0,0))
return hit
def snap_utilities(self,
context,
obj_matrix_world,
bm_geom,
bool_update,
mcursor,
outer_verts = False,
constrain = None,
previous_vert = None,
ignore_obj = None,
increment = 0.0):
rv3d = context.region_data
region = context.region
is_increment = False
if not hasattr(self, 'snap_cache'):
self.snap_cache = True
self.type = 'OUT'
self.bvert = None
self.bedge = None
self.bface = None
self.hit = False
self.out_obj = None
if bool_update:
#self.bvert = None
self.bedge = None
#self.bface = None
if isinstance(bm_geom, bmesh.types.BMVert):
self.type = 'VERT'
if self.bvert != bm_geom:
self.bvert = bm_geom
self.vert = obj_matrix_world * self.bvert.co
#self.Pvert = location_3d_to_region_2d(region, rv3d, self.vert)
if constrain:
#self.location = (self.vert-self.const).project(vector_constrain) + self.const
location = intersect_point_line(self.vert, constrain[0], constrain[1])
#factor = location[1]
self.location = location[0]
else:
self.location = self.vert
elif isinstance(bm_geom, bmesh.types.BMEdge):
if self.bedge != bm_geom:
self.bedge = bm_geom
self.vert0 = obj_matrix_world*self.bedge.verts[0].co
self.vert1 = obj_matrix_world*self.bedge.verts[1].co
self.po_cent = (self.vert0+self.vert1)/2
self.Pcent = location_3d_to_region_2d(region, rv3d, self.po_cent)
self.Pvert0 = location_3d_to_region_2d(region, rv3d, self.vert0)
self.Pvert1 = location_3d_to_region_2d(region, rv3d, self.vert1)
if previous_vert and previous_vert not in self.bedge.verts:
pvert_co = obj_matrix_world*previous_vert.co
point_perpendicular = intersect_point_line(pvert_co, self.vert0, self.vert1)
self.po_perp = point_perpendicular[0]
#factor = point_perpendicular[1]
self.Pperp = location_3d_to_region_2d(region, rv3d, self.po_perp)
if constrain:
location = intersect_line_line(constrain[0], constrain[1], self.vert0, self.vert1)
if location == None:
| is_increment = True
orig, view_vector = region_2d_to_orig_and_view_vector(region, rv3d, mcursor)
end = orig + view_vector
location = intersect_line_line(c | onstrain[0], constrain[1], orig, end)
if location:
self.location = location[0]
else:
self.location = constrain[0]
elif hasattr(self, 'Pperp') and abs(self.Pperp[0]-mcursor[0]) < 10 and abs(self.Pperp[1]-mcursor[1]) < 10:
self.type = 'PERPENDICULAR'
self.location = self.po_perp
elif abs(se |
awacha/cct | cct/qtgui2/measurement/monitor/monitor.py | Python | bsd-3-clause | 17,577 | 0.003983 | import logging
import time
from typing import Optional, Final, Any
import numpy as np
from PyQt5 import QtWidgets, QtGui, QtCore
from matplotlib.axes import Axes
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT, FigureCanvasQTAgg
from matplotlib.figure import Figure
from matplotlib.lines import Line2D
import matplotlib.transforms
from .monitor_ui import Ui_Form
from ...utils.window import WindowRequiresDevices
from ...utils.plotimage import PlotImage
from ....core2.devices.xraysource import GeniX
from ....core2.devices.device.frontend import DeviceFrontend
from ....core2.dataclasses import Exposure
from ....core2.algorithms.beamweighting import beamweights
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class MonitorMeasurement(QtWidgets.QWidget, WindowRequiresDevices, Ui_Form):
required_devicetypes = ['source', 'detector']
figureIntensity: Figure
figurePosition: Figure
canvasIntensity: FigureCanvasQTAgg
canvasPosition: FigureCanvasQTAgg
toolbarIntensity: NavigationToolbar2QT
toolbarPosition: NavigationToolbar2QT
axesIntensity: Axes
axesPosition: Axes
axesHPositionKDE: Axes
axesVPositionKDE: Axes
xTargetLine: Optional[Line2D] = None
yTargetLine: Optional[Line2D] = None
xTargetLineKDE: Optional[Line2D] = None
yTargetLineKDE: Optional[Line2D] = None
intensityTargetLine: Optional[Line2D] = None
buffer: Optional[np.ndarray] = None
cursor: Optional[int] = None # point in the buffer where the next measurement will be written
bufferdtype: Final[np.dtype] = np.dtype([('time', 'f4'), ('intensity', 'f4'), ('beamx', 'f4'), ('beamy', 'f4'), ])
kdepointcount: Final[int] = 1000
debugmode: bool=False
plotimage: PlotImage
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.setupUi(self)
def setupUi(self, Form):
super().setupUi(Form)
self.plotimage = PlotImage(self)
self.plotImageVerticalLayout.addWidget(self.plotimage, 1)
self.plotimage.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.MinimumExpanding)
self.figureIntensity = Figure(constrained_layout=True)
self.canvasIntensity = FigureCanvasQTAgg(self.figureIntensity)
self.toolbarIntensity = NavigationToolbar2QT(self.canvasIntensity, self)
self.intensityFigureVerticalLayout.addWidget(self.toolbarIntensity)
self.intensityFigureVerticalLayout.addWidget(self.canvasIntensity, 1.0)
self.axesIntensity = self.figureIntensity.add_subplot(1, 1, 1)
self.figurePosition = Figure(constrained_layout=True)
self.canvasPosition = FigureCanvasQTAgg(self.figurePosition)
self.toolbarPosition = NavigationToolbar2QT(self.canvasPosition, self)
self.positionFigureVerticalLayout.addWidget(self.toolbarPosition)
self.positionFigureVerticalLayout.addWidget(self.canvasPosition, 1.0)
gs = self.figurePosition.add_gridspec(4, 4)
self.axesPosition = self.figurePosition.add_subplot(gs[1:, :-1])
self.axesHPositionKDE = self.figurePosition.add_subplot(gs[0, :-1], sharex=self.axesPosition)
self.axesVPositionKDE = self.figurePosition.add_subplot(gs[1:, -1], sharey=self.axesPosition)
self.startStopToolButton.clicked.connect(self.startStop)
self.clearBufferToolButton.clicked.connect(self.clearBuffer)
self.shutterToolButton.toggled.connect(self.moveShutter)
self.bufferLengthSpinBox.valueChanged.connect(self.resizeBuffer)
self.debugModeGroupBox.setVisible(self.debugmode)
self.beamXTargetCheckBox.toggled.connect(self.updateTargetLines)
self.beamYTargetCheckBox.toggled.connect(self.updateTargetLines)
self.intensityTargetCheckBox.toggled.connect(self.updateTargetLines)
self.beamXTargetDoubleSpinBox.valueChanged.connect(self.updateTargetLines)
self.beamYTargetDoubleSpinBox.valueChanged.connect(self.updateTargetLines)
self.intensityTargetDoubleSpinBox.valueChanged.connect(self.updateTargetLines)
self.resizeBuffer()
def updateTargetLines(self):
logger.debug(f'UpdateTargetLines() called from {self.sender().objectName()=}')
if self.xTargetLine is not None:
logger.debug('xTargetLine')
self.xTargetLine.set_xdata(self.beamXTargetDoubleSpinBox.value())
self.xTargetLine.set_visible(self.beamXTargetCheckBox.isChecked())
if self.yTargetLine is not None:
logger.debug('yTargetLine')
self.yTargetLine.set_ydata(self.beamYTargetDoubleSpinBox.value())
self.yTargetLine.set_visible(self.beamYTargetCheckBox.isChecked())
if self.xTargetLineKDE is not None:
logger.debug('xTargetLineKDE')
self.xTargetLineKDE.set_xdata(self.beamXTargetDoubleSpinBox.value())
self.xTargetLineKDE.set_visible(self.beamXTargetCheckBox.isChecked())
if self.yTargetLineKDE is not None:
logger.debug('yTargetLineKDE')
self.yTargetLineKDE.set_ydata(self.beamYTargetDoubleSpinBox.value())
self.yTargetLineKDE.set_visible(self.beamYTargetCheckBox.isChecked())
if self.intensityTargetLine is not None:
logger.debug('intensityTargetLine')
self.intensityTargetLine.set_ydata(self.intensityTargetDoubleSpinBox.value())
self.intensityTargetLine.set_visible(self.intensityTargetCheckBox.isChecked())
# return
self.axesPosition.relim(visible_only=True)
self.axesPosition.autoscale_view(False)
self.axesHPositionKDE.relim(visible_only=True)
self.axesHPositionKDE.autoscale_view(False)
self.axesVPositionKDE.relim(visible_only=True)
self.axesVPositionKDE.autoscale_view(False)
self.axesIntensity.relim(visible_only=True)
self.axesIntensity.autoscale_view(False)
self.canvasPosition.draw_idle()
self.canvasIntensity.draw_idle()
def resizeBuffer(self):
"""Resize the measurement buffer"""
# the measurement buffer is a 1-dimensional structured array, initially filled with NaNs. Measured data starts
# being written from index 0, going towards the end. When the end is reached, the writing position loops back to
# the start, overwriting previous points.
newbuffer = np.empty(self.bufferLengthSpinBox.value(), dtype=self.bufferdtype)
newbuffer[:] = np.nan
if self.buffer is not None:
# first rotate the old buffer
if np.isnan(self.buffer['time']).sum():
# there are still NaNs: we have not yet looped | over. Next index to be wri | tten is `self.cursor`.
oldbuffer = self.buffer[:self.cursor]
else:
# we have already looped over
oldbuffer = np.hstack((self.buffer[self.cursor:], self.buffer[:self.cursor]))
# see how much of the old buffer fits in the new one. Prefer the most recent elements
newbuffer[:min(len(oldbuffer), len(newbuffer))] = oldbuffer[-min(len(oldbuffer), len(newbuffer)):]
self.buffer = newbuffer
self.cursor = (~np.isnan(self.buffer['time'])).sum() % len(self.buffer)
self.redraw()
def startStop(self):
if self.startStopToolButton.text() == 'Start':
# no measurement running, start it.
self.startStopToolButton.setText('Stop')
self.startStopToolButton.setIcon(QtGui.QIcon(QtGui.QPixmap(':/icons/stop.svg')))
if self.debugmode:
self.startTimer(int(self.waitTimeDoubleSpinBox.value() * 1000), QtCore.Qt.PreciseTimer)
else:
self.instrument.exposer.exposureFinished.connect(self.onExposureFinished)
self.instrument.exposer.imageReceived.connect(self.onImageReceived)
self.instrument.exposer.startExposure('mon', self.expTimeDoubleSpinBox.value(), 1)
else:
# measurement is running, stop it
self.startStopToolButton.setText('Start')
self.startStopToolButton.setIcon(QtGui.QIcon(QtGui.QPixmap(':/icons/start.svg')))
|
michaellas/streaming-vid-to-gifs | src/mark_frame_service/service.py | Python | mit | 3,218 | 0.016362 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#import modułów konektora msg_strea | m_connector
from ComssServiceDevelopment.connectors.tcp.msg_stream_connector import InputMessageConnector, OutputMessage | Connector
#import modułów klasy bazowej Service oraz kontrolera usługi
from ComssServiceDevelopment.service import Service, ServiceController
import cv2 #import modułu biblioteki OpenCV
import numpy as np #import modułu biblioteki Numpy
import os
import threading
from time import time
OPACITY = 0.4 # rectangle opacity
SIZE = 0.25 # occupied by rectangle
RECT_DISPLAY_LEN = 3 # seconds?
class MarkFrameService(Service):
"""klasa usługi musi dziedziczyć po ComssServiceDevelopment.service.Service"""
def __init__(self):
""""nie"konstruktor, inicjalizator obiektu usługi"""
#wywołanie metody inicjalizatora klasy nadrzędnej
super(MarkFrameService, self).__init__()
self.filters_lock = threading.RLock()
self.last_rect_shown_time = None
def declare_outputs(self):
"""deklaracja wyjść"""
#deklaracja wyjścia "videoOutput" będącego interfejsem
#wyjściowym konektora msg_stream_connector
self.declare_output("videoOutput", OutputMessageConnector(self))
def declare_inputs(self):
"""deklaracja wejść"""
#deklaracja wejścia "videoInput" będącego interfejsem wyjściowym konektora msg_stream_connector
self.declare_input("videoInput", InputMessageConnector(self))
def run(self):
"""główna metoda usługi"""
video_input = self.get_input("videoInput") #obiekt interfejsu wejściowego
video_output = self.get_output("videoOutput") #obiekt interfejsu wyjściowego
#pętla główna usługi
while self.running():
frame_obj = video_input.read() #odebranie danych z interfejsu wejściowego
frame = np.loads(frame_obj) #załadowanie ramki do obiektu NumPy
# filters
time_now = time()
with self.filters_lock:
current_filters = self.get_parameter("filtersOn")
if 1 in current_filters:
# self.set_parameter("filtersOn", [])
self.update_parameters({"filtersOn": []}) # reset filters
self.last_rect_shown_time = time_now
self.__draw_rectangle(frame)
elif self.last_rect_shown_time and (time_now - self.last_rect_shown_time) < RECT_DISPLAY_LEN:
self.__draw_rectangle(frame)
# forward
video_output.send(frame.dumps()) #przesłanie ramki za pomocą interfejsu wyjściowego
def __draw_rectangle(self, frame):
height, width, _ = frame.shape
overlay = frame.copy()
cv2.rectangle(overlay,(0,0),(int(width*SIZE),int(height*SIZE)),(255,0,0),-1)
cv2.addWeighted(overlay, OPACITY, frame, 1 - OPACITY, 0, frame)
if __name__=="__main__":
#utworzenie obiektu kontrolera usługi
config_name = os.path.join( os.path.dirname(__file__), "service.json") # f.e. src\mark_frame_service\service.json
sc = ServiceController(MarkFrameService, config_name)
sc.start() #uruchomienie usługi
|
Mariusz1970/enigma2 | lib/python/Plugins/Extensions/Infopanel/sundtek.py | Python | gpl-2.0 | 11,219 | 0.02781 | #
# sundtek control center
# coded by giro77
#
#
from Screens.Screen import Screen
from Screens.Console import Console
from Screens.MessageBox import MessageBox
from Screens.InputBox import InputBox
from Components.ActionMap import ActionMap
from Components.Input import Input
from Components.MenuList import MenuList
from Components.config import config, getConfigListEntry, ConfigSubsection, ConfigInteger, ConfigYesNo, ConfigText, ConfigSelection, configfile
from Components.ConfigList import ConfigListScreen
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Components.MultiContent import MultiContentEntryText
from Components.Label import Label
from Plugins.Plugin import PluginDescriptor
from Tools.NumericalTextInput import NumericalTextInput
import os
# for localized texts
from . import _
## configs ################################################################
config.plugins.SundtekControlCenter = ConfigSubsection()
config.plugins.SundtekControlCenter.dvbtransmission = ConfigSelection(default="0", choices = [("0", _("DVB-S/SVB-S2")),("1", _("DVB-C")),("2", _("DVB-T"))])
config.plugins.SundtekControlCenter.autostart = ConfigYesNo(default=False)
config.plugins.SundtekControlCenter.usbnet = ConfigSubsection()
config.plugins.SundtekControlCenter.usbnet.selection = ConfigSelection(default="0", choices = [("0", _("via USB")),("1", _("via Network"))])
config.plugins.SundtekControlCenter.usbnet.networkip = ConfigText(default="0.0.0.0", visible_width = 50, fixed_size = False)
## version string #########################################################
sundtekcontrolcenter_version = "1.0.r2"
###########################################################################
class SundtekControlCenter(Screen, ConfigListScreen):
skin = """
<screen title="SundtekControlCenter" position="center,center" size="570,400" name="SundtekControlCenter">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget name="btt_red" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget name="btt_green" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget name="btt_yellow" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="btt_blue" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget name="ok" position="10,292" zPosition="1" size="550,40" font="Regular;20" halign="left" valign="center" transparent="1" />
<widget name="infos" position="10,316" zPosition="1" size="450,40" font="Regular;20" halign="left" valign="center" transparent="1" />
<widget name="bouquets" position="10,340" zPosition="1" size="450,40" font="Regular;20" halign="left" valign="center" transparent="1" />
<widget name="netservers" position="10,364" zPosition="1" size="450,40" font="Regular;20" halign="left" valign="center" transparent="1" />
<widget name="config" position="100,100" size="370,200" scrollbarMode="showOnDemand" zPosition="1"/>
<ePixmap position="460, 350" size="100,40" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/icons/plugin.png" transparent="1" alphatest="on" />
</screen>"""
def __init__(self, session, args=0):
Screen.__init__(self, session)
ConfigListScreen.__init__(self, [])
self.updateSettingList()
self["btt_red"] = Label(_("Back"))
self["btt_green"] = Label(_("Setup"))
self["btt_yellow"] = Label(_("Stop Tuner"))
self["btt_blue"] = Label(_("Start Tuner"))
self["ok"] = Label(_("OK/ green = activate settings"))
self["infos"] = Label(_("Info = show tuner informations"))
self["bouquets"] = Label(_("Bouquet + = install or update driver"))
self["netservers"] = Label(_("Bouquet - = scan for IPTV server addresses"))
self["actions"] = ActionMap(["OkCancelActions", "ChannelSelectBaseActions", "ColorActions","ChannelSelectEPGActions"],
{
"ok": self.save,
"cancel": self.cancel,
"red": self.cancel,
"green": self.save,
"yellow": self.tunerstop,
"blue": self.tunerstart,
"showEPGList": self.dvbinfo,
"nextBouquet": self.fetchsundtekdriver,
"prevBouquet": self.scannetwork,
},-2)
self.onLayoutFinish.append(self.layoutFinished)
def keyLeft(self):
ConfigListScreen.keyLeft(self)
self.updateSettingList()
def keyRight(self):
ConfigListScreen.keyRight(self)
self.updateSettingList()
def updateSettingList(self):
list = [] ### creating list
list.append(getConfigListEntry(_("DVB Transmission Way"), config.plugins.SundtekControlCenter.dvbtransmission))
list.append(getConfigListEntry(_("USB/Network"), config.plugins.SundtekControlCenter.usbnet.selection))
if config.plugins.SundtekControlCenter.usbnet.selection.getValue() == "1": ## if networking then add ip mask to list
sublist = [
getConfigListEntry(_("Network IP"), config.plugins.SundtekControlCenter.usbnet.networkip)
]
list.extend(sublist)
list.append(getConfigListEntry(_("Autostart"), config.plugins.SundtekControlCenter.autostart))
self["config"].list = list
self["config"].l.setList(list)
def layoutFinished(self):
self.setTitle(_("Sundtek Control Center"))
def fetchsundtekdriver(self):
self.session.openWithCallback(self.disclaimer, MessageBox, _("Sundtek legal notice:\nThis software comes without any warranty, use it at your own risk?"), MessageBox.TYPE_YESNO)
def disclaimer(self, result):
if result:
self.prompt("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/sundtekinstall.sh")
def save(self):
for x in self["config"].list:
x[1].save()
configfile.save()
self.setsettings()
def cancel(self):
for x in self["config"].list:
x[1].cancel()
self.close(False, self.session)
####################################################################
def setsettings(self):
if (not os.path.exists("/usr/sundtek")):
#maybe the driver is not or installed incorrect.
self.session.openWithCallback(self.installdriverrequest, MessageBox, _("It seems the sundtek driver is not installed or not installed properly. Install the driver now?"), MessageBox.TYPE_YESNO)
else: # driver is installed
### disable autostart
if config.plugins.SundtekControlCenter.autostart.getValue() == False:
self.prompt("/usr/sundtek/sun_dvb.sh noautostart")
if config.plugins.SundtekControlCenter.usbnet.selection.getValue() == "1":
### save the IP for networking
f=open("/etc/sundtek.net", "w")
networkingip=config.plugins.SundtekControlCenter.usbnet.networkip.getValue()+"\n"
networkingip.lstrip().rstrip()
f.writelines('REMOTE_IPTV_SERVER='+networkingip)
f.close()
if config.plugins.SundtekControlCenter.autostart.getValue() == True:
self.prompt("/usr/sundte | k/sun_dvb.sh enable_net")
else:
if config.plugins.SundtekControlCenter.dvbtransmission.getValue() == "0":
### dvb-s/ dvb-s2
if config.plugins.SundtekControlCenter.autostart.getValue() == True:
### enable autostart
self.prompt("/usr/sundtek/sun_dvb.sh enable_s2")
elif config.plugins.SundtekControlCenter.dvbtransmis | sion.getValue() == "1":
### dvb-c
if config.plugins.SundtekControlCenter.autostart.getValue() == True:
### enable autostart
self.prompt("/usr/sundtek/sun_dvb.sh enable_c")
else:
### dvb-t
if config.plugins.SundtekControlCenter.autostart.getValue() == True:
### enable autostart
self.prompt("/usr/sundtek/sun_dvb.sh enable_t")
def tunerstart(self):
for x in self["config"].list:
x[1] |
Mirantis/pumphouse | setup.py | Python | apache-2.0 | 656 | 0 | # Copyright (c) 2014 Mirantis Inc. |
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF | ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
tuanvu216/udacity-course | data_wrangling_with_mongodb/Lesson_4_Problem_Set/02-Inserting_into_DB/dbinsert.py | Python | mit | 407 | 0.007371 | import json
d | ef insert_data(data, db):
# Your code here. Insert the data into a collection 'arachnid'
pass
if __name__ == "__main__":
from pymongo import MongoClient
client = MongoClient("mongodb://localhost:27017")
db = client.examples
with open('arachnid.json') as f:
data = json.loads(f.read())
insert_data(data, db)
| print db.arachnid.find_one() |
asm-products/formspree | formspree/settings.py | Python | agpl-3.0 | 2,551 | 0.002744 | import os
import sys
from flask import render_template
# load a bunch of environment
DEBUG = os.getenv('DEBUG') in ['True', 'true', '1', 'yes']
if DEBUG:
SQLALCHEMY_ECHO = True
TESTING = os.getenv('TESTING') in ['True', 'true', '1', 'yes']
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI') or os.getenv('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = False
LOG_LEVEL = os.getenv('LOG_LEVEL') or 'debug'
SECRET_KEY = os.getenv('SECRET_KEY') or ''
HASHIDS_SALT = os.getenv('HASHIDS_SALT') or ''
NONCE_SECRET = (os.getenv('NONCE_SECRET') or '').encode('utf-8')
GRANDFATHER_MONTHLY_LIMIT = 1000
OVERLIMIT_NOTIFICATION_QUANTITY = 25
MONTHLY_SUBMISSIONS_LIMIT = int(os.getenv('MONTHLY_SUBMISSIONS_LIMIT') or 100)
ARCHIVED_SUBMISSIONS_LIMIT = int(os.getenv('ARCHIVED_SUBMISSIONS_LIMIT') or 1000)
FORM_LIMIT_DECREASE_ACTIVATION_SEQUENCE = int(os.getenv('FORM_LIMIT_DECREASE_ACTIVATION_SEQUENCE') or 0)
EXPENSIVELY_WIPE_SUBMISSIONS_FREQUENCY = float(os.getenv('EXPENSIVELY_WIPE_SUBMISSIONS_FREQUENCY') or 0.2)
REDIS_URL = os.getenv('REDISTOGO_URL') or os.getenv('REDISCLOUD_URL') or 'redis://localhost:6379'
CDN_URL = os.getenv('CDN_URL')
SERVICE_NAME = os.getenv('SERVICE_NAME') or 'Forms'
UPGRADED_PLAN_NAME = os.getenv('UPGRADED_PLAN_NAME') or 'Gold'
SERVICE_URL = os.getenv('SERVICE_URL') or 'http://example.com'
CONTACT_EMAIL = os.getenv('CONTACT_EMAIL') or 'team@example.com'
NEWSLETTER_EMAIL = os.getenv('NEWSLETTER_EMAIL') or 'signup@example.com'
DEFAULT_SENDER = os.getenv('DEFAULT_SENDER') or 'Forms Team <submissions@example.com>'
ACCOUNT_SENDER = os.getenv('ACCOUNT_SENDER') or DEFAULT_SENDER
API_ROOT = os.getenv('API_ROOT') or '//example.com'
SENDGRID_USERNAME = os.getenv('SENDGRID_USERNAME')
SENDGRID_PASSWORD = os.getenv('SENDGRID_PASSWORD')
STRIPE_TEST_PUBLISHABLE_KEY = os.getenv('STR | IPE_TEST_PUBLISHABLE_KEY')
STRIPE_TEST_SECRET | _KEY = os.getenv('STRIPE_TEST_SECRET_KEY')
STRIPE_PUBLISHABLE_KEY = os.getenv('STRIPE_PUBLISHABLE_KEY') or STRIPE_TEST_PUBLISHABLE_KEY
STRIPE_SECRET_KEY = os.getenv('STRIPE_SECRET_KEY') or STRIPE_TEST_SECRET_KEY
STRIPE_WEBHOOK_SECRET = os.getenv('STRIPE_WEBHOOK_SECRET')
GA_KEY = os.getenv('GA_KEY') or '123456'
RECAPTCHA_SECRET = os.getenv('RECAPTCHA_SECRET')
RECAPTCHA_KEY = os.getenv('RECAPTCHA_KEY')
RATE_LIMIT = os.getenv('RATE_LIMIT', '30 per hour')
REDIS_RATE_LIMIT = os.getenv('REDIS_URL') # heroku-redis
CONTACT_FORM_HASHID = os.getenv('CONTACT_FORM_HASHID', CONTACT_EMAIL)
TYPEKIT_KEY = os.getenv('TYPEKIT_KEY', '1234567')
CELERY_BROKER_URL = os.getenv('REDIS_URL')
|
changsimon/trove | trove/extensions/mgmt/volume/service.py | Python | apache-2.0 | 1,428 | 0 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# | a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. Se | e the
# License for the specific language governing permissions and limitations
# under the License.
from trove.common import wsgi
from trove.common.auth import admin_context
from trove.extensions.mgmt.volume import models
from trove.extensions.mgmt.volume import views
from trove.openstack.common import log as logging
from trove.openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class StorageController(wsgi.Controller):
"""Controller for storage device functionality."""
@admin_context
def index(self, req, tenant_id):
"""Return all storage devices."""
LOG.info(_("req : '%s'\n\n") % req)
LOG.info(_("Indexing storage info for tenant '%s'") % tenant_id)
context = req.environ[wsgi.CONTEXT_KEY]
storages = models.StorageDevices.load(context)
return wsgi.Result(views.StoragesView(storages).data(), 200)
|
mlperf/training_results_v0.5 | v0.5.0/nvidia/submission/code/object_detection/pytorch/maskrcnn_benchmark/config/defaults.py | Python | apache-2.0 | 11,814 | 0.001354 |
#
# | Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# Copyright (c) 2017-2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/license | s/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from yacs.config import CfgNode as CN
# -----------------------------------------------------------------------------
# Convention about Training / Test specific parameters
# -----------------------------------------------------------------------------
# Whenever an argument can be either used for training or for testing, the
# corresponding name will be post-fixed by a _TRAIN for a training parameter,
# or _TEST for a test-specific parameter.
# For example, the number of images during training will be
# IMAGES_PER_BATCH_TRAIN, while the number of images for testing will be
# IMAGES_PER_BATCH_TEST
# -----------------------------------------------------------------------------
# Config definition
# -----------------------------------------------------------------------------
_C = CN()
_C.MODEL = CN()
_C.MODEL.RPN_ONLY = False
_C.MODEL.MASK_ON = False
_C.MODEL.DEVICE = "cuda"
_C.MODEL.META_ARCHITECTURE = "GeneralizedRCNN"
# If the WEIGHT starts with a catalog://, like :R-50, the code will look for
# the path in paths_catalog. Else, it will use it as the specified absolute
# path
_C.MODEL.WEIGHT = ""
# -----------------------------------------------------------------------------
# Load pre-trained models from C2 Detectron
# -----------------------------------------------------------------------------
_C.MODEL.C2_COMPAT = CN()
# Weight file from C2 Detectron. Should be in .pkl format
_C.MODEL.C2_COMPAT.WEIGHTS = ""
# Name of the function that loads the C2 weights into our PyTorch model
_C.MODEL.C2_COMPAT.WEIGHT_LOADER = ""
# Load from C2 Detectron or not
_C.MODEL.C2_COMPAT.ENABLED = False
# -----------------------------------------------------------------------------
# INPUT
# -----------------------------------------------------------------------------
_C.INPUT = CN()
# Size of the smallest side of the image during training
_C.INPUT.MIN_SIZE_TRAIN = 800 # (800,)
# Maximum size of the side of the image during training
_C.INPUT.MAX_SIZE_TRAIN = 1333
# Size of the smallest side of the image during testing
_C.INPUT.MIN_SIZE_TEST = 800
# Maximum size of the side of the image during testing
_C.INPUT.MAX_SIZE_TEST = 1333
# Values to be used for image normalization
_C.INPUT.PIXEL_MEAN = [102.9801, 115.9465, 122.7717]
# Values to be used for image normalization
_C.INPUT.PIXEL_STD = [1., 1., 1.]
# -----------------------------------------------------------------------------
# Dataset
# -----------------------------------------------------------------------------
_C.DATASETS = CN()
# List of the dataset names for training, as present in paths_catalog.py
_C.DATASETS.TRAIN = ()
# List of the dataset names for testing, as present in paths_catalog.py
_C.DATASETS.TEST = ()
# -----------------------------------------------------------------------------
# DataLoader
# -----------------------------------------------------------------------------
_C.DATALOADER = CN()
# Number of data loading threads
_C.DATALOADER.NUM_WORKERS = 4
# If > 0, this enforces that each collated batch should have a size divisible
# by SIZE_DIVISIBILITY
_C.DATALOADER.SIZE_DIVISIBILITY = 0
# Number of images per batch
_C.DATALOADER.IMAGES_PER_BATCH_TRAIN = 2
_C.DATALOADER.IMAGES_PER_BATCH_TEST = 1
# If True, each batch should contain only images for which the aspect ratio
# is compatible. This groups portrait images together, and landscape images
# are not batched with portrait images.
_C.DATALOADER.ASPECT_RATIO_GROUPING = True
# ---------------------------------------------------------------------------- #
# Backbone options
# ---------------------------------------------------------------------------- #
_C.MODEL.BACKBONE = CN()
# The backbone conv body to use
# The string must match a function that is imported in modeling.model_builder
# (e.g., 'FPN.add_fpn_ResNet101_conv5_body' to specify a ResNet-101-FPN
# backbone)
_C.MODEL.BACKBONE.CONV_BODY = "R-50-C4"
# Add StopGrad at a specified stage so the bottom layers are frozen
_C.MODEL.BACKBONE.FREEZE_CONV_BODY_AT = 2
_C.MODEL.BACKBONE.OUT_CHANNELS = 256 * 4
# ---------------------------------------------------------------------------- #
# RPN options
# ---------------------------------------------------------------------------- #
_C.MODEL.RPN = CN()
_C.MODEL.RPN.USE_FPN = False
# RPN anchor sizes given in relative size w.r.t. BASE_ANCHOR_SIZE
_C.MODEL.RPN.SCALES = (0.125, 0.25, 0.5, 1., 2.)
# Base RPN anchor size given in absolute pixels w.r.t. the scaled network input
_C.MODEL.RPN.BASE_ANCHOR_SIZE = 256
# Stride of the feature map that RPN is attached.
# For FPN, number of strides should match number of scales
_C.MODEL.RPN.ANCHOR_STRIDE = (16,)
# RPN anchor aspect ratios
_C.MODEL.RPN.ASPECT_RATIOS = (0.5, 1.0, 2.0)
# Remove RPN anchors that go outside the image by RPN_STRADDLE_THRESH pixels
# Set to -1 or a large value, e.g. 100000, to disable pruning anchors
_C.MODEL.RPN.STRADDLE_THRESH = 0
# Minimum overlap required between an anchor and ground-truth box for the
# (anchor, gt box) pair to be a positive example (IoU >= FG_IOU_THRESHOLD
# ==> positive RPN example)
_C.MODEL.RPN.FG_IOU_THRESHOLD = 0.7
# Maximum overlap allowed between an anchor and ground-truth box for the
# (anchor, gt box) pair to be a negative examples (IoU < BG_IOU_THRESHOLD
# ==> negative RPN example)
_C.MODEL.RPN.BG_IOU_THRESHOLD = 0.3
# Total number of RPN examples per image
_C.MODEL.RPN.BATCH_SIZE_PER_IMAGE = 256
# Target fraction of foreground (positive) examples per RPN minibatch
_C.MODEL.RPN.POSITIVE_FRACTION = 0.5
# Number of top scoring RPN proposals to keep before applying NMS
# When FPN is used, this is *per FPN level* (not total)
_C.MODEL.RPN.PRE_NMS_TOP_N_TRAIN = 12000
_C.MODEL.RPN.PRE_NMS_TOP_N_TEST = 6000
# Number of top scoring RPN proposals to keep after applying NMS
_C.MODEL.RPN.POST_NMS_TOP_N_TRAIN = 2000
_C.MODEL.RPN.POST_NMS_TOP_N_TEST = 1000
# NMS threshold used on RPN proposals
_C.MODEL.RPN.NMS_THRESH = 0.7
# Proposal height and width both need to be greater than RPN_MIN_SIZE
# (a the scale used during training or inference)
_C.MODEL.RPN.MIN_SIZE = 0
# Number of top scoring RPN proposals to keep after combining proposals from
# all FPN levels
_C.MODEL.RPN.FPN_POST_NMS_TOP_N_TRAIN = 2000
_C.MODEL.RPN.FPN_POST_NMS_TOP_N_TEST = 2000
# ---------------------------------------------------------------------------- #
# ROI HEADS options
# ---------------------------------------------------------------------------- #
_C.MODEL.ROI_HEADS = CN()
_C.MODEL.ROI_HEADS.USE_FPN = False
# Overlap threshold for an RoI to be considered foreground (if >= FG_IOU_THRESHOLD)
_C.MODEL.ROI_HEADS.FG_IOU_THRESHOLD = 0.5
# Overlap threshold for an RoI to be considered background
# (class = 0 if overlap in [0, BG_IOU_THRESHOLD))
_C.MODEL.ROI_HEADS.BG_IOU_THRESHOLD = 0.5
# Default weights on (dx, dy, dw, dh) for normalizing bbox regression targets
# These are empirically chosen to approximately lead to unit variance targets
_C.MODEL.ROI_HEADS.BBOX_REG_WEIGHTS = (10., 10., 5., 5.)
# RoI minibatch size *per image* (number of regions of interest [ROIs])
# Total number of RoIs per training minibatch =
# TRAIN.BATCH_SIZE_PER_IM * TRAIN.IMS_PER_BATCH * NUM_GPUS
# E.g., a common configuration is: 512 * 2 * 8 = 8192
_C.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 512
# Target fraction of RoI minibatch that is labeled foreground (i.e. class > 0)
_C.MODEL.ROI_HEADS.POSITIVE_FRACTION = 0.25
# Only used on test mode
# Minimum score threshold (assuming scores in a [0, |
Sveder/pyweek24 | gamelib/player.py | Python | apache-2.0 | 2,447 | 0.001635 | import pygame
import data
from config import *
import platforms
class Player(pygame.sprite.Spri | te):
def __init__(self):
# Call the parent's constructor
pygame.s | prite.Sprite.__init__(self)
self.image = data.load_image("player.png")
self.rect = self.image.get_rect()
# Set speed vector of player
self.change_x = 0
self.change_y = 0
self.level = None
self.is_jumping = False
def update(self):
self.calc_grav()
# Move left/right
self.rect.x += self.change_x
self.rect.y += self.change_y
print "HAHAHA", self.change_y
# See if we hit anything
block_hit_list = pygame.sprite.spritecollide(self, self.level.level_elements, False)
for block in block_hit_list:
if isinstance(block, platforms.Trampoline):
print "jumpppp"
self.jump()
elif isinstance(block, platforms.Platform):
# If we are moving right,
# set our right side to the left side of the item we hit
# if self.change_x > 0:
# self.rect.right = block.rect.left
# elif self.change_x < 0:
# # Otherwise if we are moving left, do the opposite.
# self.rect.left = block.rect.right
# Reset our position based on the top/bottom of the object.
if self.change_y > 0:
self.rect.bottom = block.rect.top
self.change_y = 0
# # elif self.change_y < 0:
# # self.rect.top = block.rect.bottom
# pass
def calc_grav(self):
""" Calculate effect of gravity. """
if self.change_y == 0:
self.change_y = 1
else:
self.change_y += 2
if self.rect.y >= GROUND_HEIGHT - self.rect.height and self.change_y >= 0:
self.change_y = 0
# self.rect.y = GROUND_HEIGHT - self.rect.height
def jump(self):
self.change_y = -40
def go_left(self):
""" Called when the user hits the left arrow. """
self.change_x = -6
def go_right(self):
""" Called when the user hits the right arrow. """
self.change_x = 6
def stop(self):
""" Called when the user lets off the keyboard. """
self.change_x = 0 |
DemocracyClub/yournextrepresentative | ynr/apps/results/migrations/0011_resultevent_post_new.py | Python | agpl-3.0 | 539 | 0 | from django.db import migrations, models
class Migration(migrations.Migration):
dependen | cies = [
("popolo", "0002_update_models_from_upstream"),
("results", "0010_resultevent_winner_party_new"),
]
operations = [ |
migrations.AddField(
model_name="resultevent",
name="post_new",
field=models.ForeignKey(
blank=True,
to="popolo.Post",
null=True,
on_delete=models.CASCADE,
),
)
]
|
pmacosta/putil | tests/plot/basic_source.py | Python | mit | 9,026 | 0.001219 | # basic_source.py
# Copyright (c) 2013-2016 Pablo Acosta-Serafini
# See LICENSE for details
# pylint: disable=C0103,C0111,E0611,R0201,R0204,W0212,W0232,W0612
# PyPI imports
from numpy import array
import pytest
# Putil imports
from putil.plot import BasicSource as FUT
from putil.test import AE, AI, APROP, AROPROP
###
# Global variables
###
RIVAR = array([1, 2, 3])
RDVAR = array([10, 20, 30])
###
# Test classes
###
class TestBasicSource(object):
""" Tests for BasicSource """
def test_str(self):
""" Test that str behaves correctly """
# Full set
obj = str(FUT(RIVAR, RDVAR, indep_min=-10, indep_max=20.0))
ref = (
'Independent variable minimum: -10\n'
'Independent variable maximum: 20.0\n'
'Independent variable: [ 1.0, 2.0, 3.0 ]\n'
'Dependent variable: [ 10.0, 20.0, 30.0 ]'
)
assert obj == ref
# indep_min not set
obj = str(FUT(RIVAR, RDVAR, indep_max=20.0))
ref = (
'Independent variable minimum: -inf\n'
'Independent variable maximum: 20.0\n'
'Independent variable: [ 1.0, 2.0, 3.0 ]\n'
'Dependent variable: [ 10.0, 20.0, 30.0 ]'
)
assert obj == ref
# indep_max not set
obj = str(FUT(RIVAR, RDVAR, indep_min=-10))
ref = (
'Independent variable minimum: -10\n'
'Independent variable maximum: +inf\n'
'Independent variable: [ 1.0, 2.0, 3.0 ]\n'
'Dependent variable: [ 10.0, 20.0, 30.0 ]'
)
assert obj == ref
# indep_min and indep_max not set
obj = str(FUT(RIVAR, RDVAR))
ref = (
'Independent variable minimum: -inf\n'
'Independent variable maximum: +inf\n'
'Independent variable: [ 1.0, 2.0, 3.0 ]\n'
'Dependent variable: [ 10.0, 20.0, 30.0 ]'
)
assert obj == ref
def test_complete(self):
""" Test _complete property behavior """
obj = FUT(RIVAR, RDVAR, indep_min=0, indep_max=50)
obj._indep_var = None
assert not obj._complete
obj = FUT(RIVAR, RDVAR, indep_min=0, indep_max=50)
assert obj._complete
@pytest.mark.parametrize('indep_min', [1, 2.0])
def test_indep_min(self, indep_min):
""" Tests indep_min property behavior """
# __init__ path
FUT(RIVAR, RDVAR, indep_min=indep_min)
# Managed attribute path
obj = FUT(RIVAR, RDVAR)
obj.indep_min = indep_min
assert obj.indep_min == indep_min
@pytest.mark.basic_source
@pytest.mark.parametrize('indep_min', ['a', False])
def test_indep_min_exceptions(self, indep_min):
""" Tests indep_min property exceptions """
# __init__ path
AI(FUT, 'indep_min', RIVAR, RDVAR, indep_min=indep_min)
obj = FUT(RIVAR, RDVAR)
msg = 'Argument `indep_min` is not valid'
APROP(obj, 'indep_min', indep_min, RuntimeError, msg)
@pytest.mark.parametrize('indep_max', [1, 2.0])
def test_indep_max(self, indep_max):
""" Tests indep_max property behavior """
# __init__ path
FUT(RIVAR, RDVAR, indep_max=indep_max)
# Managed attribute path
obj = FUT(RIVAR, RDVAR)
obj.indep_max = indep_max
assert obj.indep_max == indep_max
@pytest.mark.basic_source
@pytest.mark.parametrize('indep_max', ['a', False])
def test_indep_max_exceptions(self, indep_max):
""" Tests indep_max property exceptions """
# | __init__ path
AI(FUT, 'indep_max', RIVAR, RDVAR, indep_max=indep_max)
# Managed attribute path
obj = FUT(RIVAR, RDVAR)
msg = 'Argument `indep_max` is not valid'
APROP(obj, 'indep_max', indep_max, RuntimeError, msg)
#with pytest.raises(RuntimeError) as exc | info:
# obj.indep_max = indep_max
#assert GET_EXMSG(excinfo) == 'Argument `indep_max` is not valid'
@pytest.mark.basic_source
def test_indep_min_greater_than_indep_max_exceptions(self):
"""
Test behavior when indep_min and indep_max are incongruous
"""
# Assign indep_min first
obj = FUT(RIVAR, RDVAR, indep_min=0.5)
exmsg = 'Argument `indep_min` is greater than argument `indep_max`'
APROP(obj, 'indep_max', 0, ValueError, exmsg)
#with pytest.raises(ValueError) as excinfo:
# obj.indep_max = 0
#assert GET_EXMSG(excinfo) == exmsg
# Assign indep_max first
obj = FUT(RIVAR, RDVAR)
obj.indep_max = 40
APROP(obj, 'indep_min', 50, ValueError, exmsg)
#with pytest.raises(ValueError) as excinfo:
# obj.indep_min = 50
#assert GET_EXMSG(excinfo) == exmsg
def test_indep_var(self):
""" Tests indep_var property behavior """
# __init__ path
indep_var1 = RIVAR
indep_var2 = array([4.0, 5.0, 6.0])
assert (FUT(indep_var1, RDVAR).indep_var == indep_var1).all()
assert (FUT(indep_var2, RDVAR).indep_var == indep_var2).all()
# Managed attribute path
obj = FUT(indep_var=indep_var1, dep_var=RDVAR)
obj.indep_var = indep_var2
assert (obj.indep_var == indep_var2).all()
@pytest.mark.basic_source
@pytest.mark.parametrize(
'indep_var', [None, 'a', array([1.0, 2.0, 0.0, 3.0]), []]
)
def test_indep_var_exceptions(self, indep_var):
""" Tests indep_var property exceptions """
# __init__ path
AI(FUT, 'indep_var', indep_var, RDVAR)
# Assign indep_min via attribute
msg = (
'Argument `indep_var` is empty after '
'`indep_min`/`indep_max` range bounding'
)
obj = FUT(RIVAR, RDVAR)
APROP(obj, 'indep_min', 45, ValueError, msg)
# Assign indep_max via attribute
obj = FUT(RIVAR, RDVAR)
APROP(obj, 'indep_max', 0, ValueError, msg)
# Assign both indep_min and indep_max via __init__ path
AE(FUT, ValueError, msg, RIVAR, RDVAR, indep_min=4, indep_max=10)
# Managed attribute path
obj = FUT(RIVAR, RDVAR)
# Wrong type
assert (obj.indep_var == RIVAR).all()
msg = 'Argument `indep_var` is not valid'
APROP(obj, 'indep_var', indep_var, RuntimeError, msg)
#with pytest.raises(RuntimeError) as excinfo:
# obj.indep_var = indep_var
#assert GET_EXMSG(excinfo) == 'Argument `indep_var` is not valid'
def test_dep_var(self):
""" Tests dep_var property behavior """
# __init__ path
# Valid values, these should not raise any exception
indep_var = array([10, 20, 30])
dep_var1 = array([1, 2, 3])
dep_var2 = array([4.0, 5.0, 6.0])
assert (FUT(indep_var, dep_var1).dep_var == dep_var1).all()
assert (FUT(indep_var, dep_var2).dep_var == dep_var2).all()
# Managed attribute path
obj = FUT(indep_var=indep_var, dep_var=dep_var1)
obj.dep_var = dep_var1
assert (obj.dep_var == dep_var1).all()
obj.dep_var = dep_var2
assert (obj.dep_var == dep_var2).all()
@pytest.mark.basic_source
@pytest.mark.parametrize('dep_var', [None, 'a', []])
def test_dep_var_exceptions(self, dep_var):
""" Tests dep_var property exceptions """
# __init__ path
msg = 'Argument `dep_var` is not valid'
AI(FUT, 'dep_var', RIVAR, dep_var)
# Managed attribute path
obj = FUT(RIVAR, array([1, 2, 3]))
APROP(obj, 'dep_var', dep_var, RuntimeError, msg)
#with pytest.raises(RuntimeError) as excinfo:
# obj.dep_var = dep_var
#assert GET_EXMSG(excinfo) == msg
@pytest.mark.basic_source
def test_indep_dep_var_not_same_number_of_elements_exceptions(self):
""" Tests indep_var and dep_var vector congruency """
msg = (
'Arguments `indep_var` and `dep_var` '
'must have the same number of elements'
)
# Both set at object creation
AE(FUT, ValueError, msg, RDVAR, array([1, 2, 3, 4, 5, 6]), 30, 50)
AE(FUT, ValueError, |
robocomp/robocomp | tools/cli/robocompdsl/robocompdsl/templates/templateCPP/plugins/base/functions/SERVANT_H.py | Python | gpl-3.0 | 1,893 | 0.005283 | import datetime
from robocompdsl.templates.common.templatedict import TemplateDict
from robocompdsl.templates.templateCPP.plugins.base.functions import function_utils as utils
INTERFACE_METHOD_STR = """
${ret} ${interface_name}I::${method_name}(${input_params})
{
${to_return}worker->${interface_name}_${method_name}(${param_str});
}
"""
class SERVANT_H(TemplateDict):
def __init__(self, component, interface_name):
super(SERVANT_H, self).__init__()
self.component = component
module = self.component.idsl_pool.module_providing_interface(interface_name)
self['year'] = str(datetime.date.today().year)
self['interface_name'] = interface_name
self['interface_name_upper'] = interface_name.upper()
self['filename_without_extension'] = module['filename'].split('/')[-1].split('.')[0]
self['module_name'] = module['name']
self['interface_methods_definition'] = self.interface_methods_definition(module,
interface_nam | e)
def interface_methods_definition(self, module, interface_name):
result = ""
for interface in module['interfaces']:
if interface['name'] == interface_name:
for mname in interface['methods']:
method = interface['methods'][mname]
ret = utils.get_type_string(method['return'], module['name'])
name = method['name']
param_str = utils. | get_parameters_string(method, module['name'], self.component.language)
if param_str:
param_str = f"{param_str}, const Ice::Current&"
else:
param_str = "const Ice::Current&"
result += ret + ' ' + name + '(' + param_str + ');\n'
return result
|
Victory/clicker-me-bliss | functional-tests/buy-item4.py | Python | mit | 727 | 0 | from clickerft.cft import Cft
from time import sleep
class Suite(Cft):
def test | _buy_item_4(self):
while int(self.clicksPerGeneration.text) < 2:
if int(self.clicksOwned.text) < 1:
sleep(.5)
conti | nue
self.increaseClicksPerGeneration.click()
while int(self.tr1.text) < int(self.pi4r1.text):
self.click_r_test('r1')
while int(self.tr2.text) < int(self.pi4r2.text):
self.click_r_test('r2')
self.i4.click()
assert int(self.oi4.text) == 1
sleep(1)
# todo put the modifier into the DOM to parse
assert int(self.tr1.text) == 5
pass
if __name__ == '__main__':
Suite()
|
pcmoritz/ray-1 | python/ray/serve/examples/doc/snippet_custom_metric.py | Python | apache-2.0 | 695 | 0 | import ray
from ray import serve
from ray.util import metrics
import time
ray.init(address="auto")
serve.start()
@serve.deploymen | t
class MyBackend:
def __init__(self):
self.my_counter = metrics.Counter(
"my_counter",
description=("The number of excellent requests to this backend."),
tag_keys=("backend", ))
self.my_counter.set_default_tags({
"backend": serve | .get_current_backend_tag()
})
def call(self, excellent=False):
if excellent:
self.my_counter.inc()
MyBackend.deploy()
handle = MyBackend.get_handle()
while True:
ray.get(handle.call.remote(excellent=True))
time.sleep(1)
|
mgautierfr/devparrot | devparrot/core/ui/statusBar.py | Python | gpl-3.0 | 3,065 | 0.001958 | # This file is part of DevParrot.
#
# Author: Matthieu Gautier <matthieu.gautier@devparrot.org>
#
# DevParrot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DevParrot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Pu | blic License
# along with DevParrot. If not, see <http://www.gnu.org/licenses/>.
#
#
# | Copyright 2011-2013 Matthieu Gautier
import tkinter, tkinter.ttk
import logging
from devparrot.core import session, userLogging
class StatusBar(tkinter.Frame, logging.Handler):
def __init__(self, parent):
tkinter.Frame.__init__(self, parent)
logging.Handler.__init__(self)
self.pack(side=tkinter.BOTTOM, fill=tkinter.X)
self['relief'] = 'sunken'
session.userLogger.addHandler(self)
self.label = tkinter.Label(self)
self.label.pack(side='left', fill=tkinter.BOTH, expand=True)
self.defaultColor = self['background']
self.label['anchor'] = 'nw'
separator = tkinter.ttk.Separator(self, orient="vertical")
separator.pack(side='left', fill='y')
self.insertLabel = tkinter.ttk.Label(self)
self.insertLabel.pack(side='right', expand=False, fill="none")
session.eventSystem.connect('mark_set', self.on_mark_set)
self.currentLevel = 0
self.callbackId = 0
def flush(self):
"""overide logging.Handler.flush"""
pass
def clear(self):
self.currentLevel = 0
self.label['text'] = ""
self.label['background'] = self.defaultColor
self.callbackId = 0
def emit(self,record):
"""overide logging.Handler.emit"""
if record.levelno >= self.currentLevel:
self.currentLevel = record.levelno
self.label['text'] = record.getMessage()
if self.currentLevel == userLogging.INFO:
self.label['background'] = session.config.get('ok_color')
if self.currentLevel == userLogging.ERROR:
self.label['background'] = session.config.get('error_color')
if self.currentLevel == userLogging.INVALID:
self.label['background'] = session.config.get('invalid_color')
if self.callbackId:
self.after_cancel(self.callbackId)
self.callbackId = self.after(5000, self.clear)
def on_mark_set(self, model, name, index):
if name == "insert":
if model.sel_isSelection():
self.insertLabel['text'] = "[%s:%s]"%(model.index("sel.first"), model.index("sel.last"))
else:
self.insertLabel['text'] = str(model.index("insert"))
|
EssaAlshammri/django-by-example | bookmarks/bookmarks/actions/utils.py | Python | mit | 679 | 0.002946 | import datetime
from django.contrib.contenttypes.models import ContentType
from django.utils import timezone
from .models import Action
def | create_action(user, verb, target=None):
now = timezone.now()
last_minute = now - datetime.timedelta(seconds=60)
similar_actions = Action.objects.filter(user_id=user.id, verb=verb, created__gte=last_minute)
if target:
target_ct = ContentType.objects.get_for_model(target)
similar_actions = Action.objects.filter(target_ct=target_ct, target_id=target.id)
if not similar_actions:
action = Action(user=user, verb=verb, target=target)
| action.save()
return True
return False
|
open-synergy/opnsynid-purchase-workflow | purchase_order_line_product_uom/models/__init__.py | Python | agpl-3.0 | 176 | 0 | # -*- coding: utf-8 -*-
# C | opyright 2019 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import (
purchase_ | order_line,
)
|
tovmeod/anaf | anaf/viewsets.py | Python | bsd-3-clause | 410 | 0.002439 | from rest_f | ramework import viewsets
from rest_framework.exceptions import MethodNotAllowed
class AnafViewSet(viewsets.ModelViewSet):
"""Base Viewset"""
accepted_formats = ('html', 'ajax')
def retrieve(self, request, *args, **kwargs):
| if request.method != 'GET':
raise MethodNotAllowed(request.method)
return super(AnafViewSet, self).retrieve(request, *args, **kwargs) |
odoocn/odoomrp-wip | mrp_product_variants_configurable_timing/models/mrp_production.py | Python | agpl-3.0 | 1,568 | 0 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licens | es/.
#
##############################################################################
from openerp import models
import math
class MrpProduction(models.Model):
_inherit = 'mrp.production'
def _get_workorder_in_product_lines(
self, workcenter_lines, product_lines, properties=Non | e):
super(MrpProduction, self)._get_workorder_in_product_lines(
workcenter_lines, product_lines, properties=properties)
for workorder in workcenter_lines:
wc = workorder.routing_wc_line
cycle = wc.cycle_nbr and (self.product_qty / wc.cycle_nbr) or 0
if self.company_id.complete_cycle:
cycle = int(math.ceil(cycle))
workorder.cycle = cycle
workorder.hour = wc.hour_nbr * cycle
|
hydratk/hydratk-lib-network | src/hydratk/translation/lib/network/email/client/en/messages.py | Python | bsd-3-clause | 1,250 | 0.000801 | # -*- coding: utf-8 -*-
"""This code is a part of Hydra Toolkit
.. module:: hydratk.translation.lib.network.email.client.en.messages
:platform: Unix
:synopsis: English language translation for EMAIL client messages
.. moduleauthor:: Petr Rašek <bowman@hydratk.org>
"""
language = {
'name': 'English',
'ISO-639-1': 'en'
}
from hydratk.core import const
HIGHLIGHT_START = chr(27) + chr(91) + "1 | m"
HIGHLIGHT_US = chr(27) + chr(91) + "4m"
HIGHLIGHT_END = chr(27) + chr(91) + "0m"
msg = {
'htk_email_unknown_protocol': ["Unknown protocol: '{0}'"],
'htk_email_unknown_method': ["Unknown method for protocol: '{0}'"],
'htk_email_connecting': ["Connecting to server: '{0}'"],
'htk_email_connected': ["Connected successfully"],
'ht | k_email_disconnected': ["Disconnected from server"],
'htk_email_not_connected': ["Not connected to server"],
'htk_email_sending': ["Sending email: '{0}'"],
'htk_email_sent': ["Email sent"],
'htk_email_counting': ["Counting emails"],
'htk_email_count': ["Email count: '{0}'"],
'htk_email_listing': ["Listing emails"],
'htk_email_listed': ["Emails listed"],
'htk_email_receiving': ["Receiving email: '{0}'"],
'htk_email_received': ["Email received"]
}
|
cedexis/cedexis.radar | cedexis/radar/session/errors.py | Python | mit | 424 | 0.007075 |
class InvalidThroughputFileSizeError(Exception):
pass
class UniNotFoundError(Ex | ception):
pass
class UnexpectedStatusError(Exception):
pass
class UnexpectedHttpStatusError(Exception):
def __init__(self, status, text):
self.__status = status
self.__text = text
@property
def status(self):
return self.__st | atus
@property
def text(self):
return self.__text
|
DomNelson/tracts | scripts/tracts_sim.py | Python | gpl-2.0 | 6,478 | 0.017598 | # -*- coding: utf-8 -*-
"""
Created on Wed Jan 28 14:33:24 2015
@author: dominic
"""
#import matplotlib.pylab as pylab
import numpy as np
import tracts_ped as ped
import os
import tracts
import sys
import time
import numpy as np
# MigrantProps = [0.2, 0.05] # Proportion of pedigree that will be new migrants
# MigPropMat = [[8, 0.1, 0], [12, 0, 0.1]]
DemeSwitch = 0.1 # Chance of child leaving deme of parents
rho = 1 # Recombination rate
##@@ AllAncestry = True is assumed to assign parents ie no ancestry means you
## are not a leaf
colordict = {'EUR':'red', 'NAT':'blue', 'AFR':'green'}
#colordict = {'EUR':'yellow', 'NAT':'green'}
#colordict = {0:'red', 1:'blue', 2:'green'}
#colordict = {'AFR':'red', 'EUR':'blue'}#, 2:'green'}
#ChromLengths = [2.865747830, 2.64751457082595, 2.23363180733515,
# 2.15492839808593, 2.04089356863902, 1.92039918028429,
# 1.87852676459211, 1.68003441747308, 1.78206001355185,
# 1.81366917101923, 1.58218649890248, 1.74679023161126,
# 1.26778791112187, 1.20202583329567, 1.39297570875973,
# 1.340377262456, 1.2849052927734, 1.17708922675517,
# 1.07733846085975, 1.08266933913055, 0.627864782064372,
# 0.741095623349923]
cM_ChromLengths = [ 277.6846783 , 263.4266571 , 224.5261258 , 212.8558223 ,
203.9634184 , 192.9822446 , 186.9212679 , 170.2156421 ,
168.2431216 , 179.0947462 , 159.5132079 , 172.8505693 ,
126.9025447 , 116.3957107 , 131.405539 , 134.9600594 ,
129.2943145 , 119.0324459 , 107.8670432 , 108.0521931 ,
61.46827149, 72.68689882]
ChromLengths = [length / 100. for length in cM_ChromLengths]
#ChromLengths = [30]
try:
migfile = sys.argv[1]
if migfile != "None":
migmat = np.genfromtxt(migfile)
labels = ['EUR', 'NAT', 'AFR']
# labels = range(len(migmat[0]))
else:
migmat = None
labels = None
pedfile = sys.argv[2]
if pedfile == "None":
pedfile = None
ancfile = sys.argv[3]
if ancfile == "None":
ancfile = None
numinds = int(sys.argv[4])
method = sys.argv[5]
outdir = os.path.expanduser(sys.argv[6])
if not os.path.exists(outdir):
print "Output path does not exist"
sys.exit()
bed_dir = outdir#os.path.join(outdir + 'BED/')
if not os.path.exists(bed_dir):
os.makedirs(bed_dir)
popname = sys.argv[7]
except IndexError:
print "Usage:"
print "python tracts_sim.py migfile pedfile ancfile numinds method=[forward,PSMC] outdir popname"
sys.exit()
#try:
# popoutfile = sys.argv[7]
# plotoutfile = sys.argv[8]
#except IndexError:
# popoutfile = "None"
# plotoutfile = "None"
indlist = []
times = []
for i in range(numinds):
start_time = time.time()
if i % 100 == 0:
print "Simulating individual", i, "of", numinds
if method == "forward":
P = ped.Pedigree(sampleind = None,
DemeSwitch = DemeSwitch,
MigPropMat = migmat,
pedfile = pedfile,
ancfile = ancfile,
labels = labels,
split_parents = False)
leaflist, nodelist = P.SortLeafNode(P.indlist)
# TMat = P.BuildTransMatrices(leaflist, nodelist)
P.MakeGenomes(ChromLengths = ChromLengths, smoothed = True,
Gamete = False)
##@@ This could be slow in a large pedigree
samp_ind = [ind for ind in P.indlist if ind.depth == 0]
if len(samp_ind) == 1:
samp_ind = samp_ind[0]
else:
print "Depth error: mutiple roots to the pedigree"
print samp_ind
sys.exit()
tracts_ind = samp_ind.to_tracts_indiv()
## We split the pedigree into maternal/paternal sides when simulating with
## PSMC
elif method == "PSMC":
P = ped.Pedigree(sampleind = None,
DemeSwitch = DemeSwitch,
M | igPropMat = migmat,
pedfile = pedfile,
ancfile = ancfile,
labels = labels,
split_parents = True)
M_leaflist, M_nodelist = P.SortLeafNode(P.m | other_indlist)
F_leaflist, F_nodelist = P.SortLeafNode(P.father_indlist)
M_TMat = P.BuildTransMatrices(M_leaflist, M_nodelist)
F_TMat = P.BuildTransMatrices(F_leaflist, F_nodelist)
tracts_ind = P.PSMC_ind(M_TMat, F_TMat, M_leaflist, F_leaflist, ChromLengths)
else:
print "Unknown simulation method"
sys.exit
## Save simulated individual to list
indlist.append(tracts_ind)
## Write simulated individuals to BED files
# if bed_dir != "None":
# outfile = os.path.join(bed_dir, "IND" + str(i + 1))
# ped.tracts_ind_to_bed(tracts_ind, outfile, conv = "M->cM")
times.append(time.time() - start_time)
print "Number of generations simulated:", len(P.MigPropMat)
print "Average time per simulated individual:", np.mean(times)
## Plot tracts distribution for simulated population
pop = tracts.population(list_indivs = indlist)
#(bins, data) = pop.get_global_tractlengths(npts=50)
##outdir = "./out"
#if migmat is None:
# migmat, ancestries = P.ped_to_migmat(P.indlist)
# print ancestries
#D = tracts.demographic_model(mig=migmat)
#
#with open(outdir + popname + "_bins", 'w') as fbins:
# fbins.write("\t".join(map(str, bins)))
#
#with open(outdir + popname + "_dat", 'w') as fdat:
# for label in data.keys():
# fdat.write("\t".join(map(str, data[label])) + "\n")
#
#with open(outdir + popname + "_mig", 'w') as fmig:
# for line in D.mig:
# fmig.write("\t".join(map(str, line)) + "\n")
#
#with open(outdir + popname + "_pred", 'w') as fpred:
# for popnum in range(len(data)):
# fpred.write(
# "\t".join(map(
# str,
# pop.nind * np.array(D.expectperbin(ChromLengths, popnum, bins))))
# + "\n")
plotoutfile = os.path.join(outdir, popname + '_plot.png')
pop.plot_global_tractlengths(colordict, outfile = plotoutfile)
## Option to write population instance to file
#if popoutfile != "None":
# os.path.join(outdir, popoutfile)
# popoutpath = os.path.dirname(popoutfile)
# if not os.path.exists(popoutpath):
# os.makedirs(popoutpath)
# with open(popoutfile, 'wb') as f:
# cPickle.dump(pop, f, cPickle.HIGHEST_PROTOCOL)
|
Livefyre/pseudonym | pseudonym/__init__.py | Python | mit | 55 | 0 | fro | m errors import *
from manager import SchemaMa | nager
|
azavea/gtfs-feed-fetcher | feed_sources/Massdot.py | Python | mit | 3,164 | 0.000316 | """Fetch Massacheusetts Department of Transportation feeds.
MassDOT supplies the feeds for MA not covered by MBTA (Boston's transit authority).
http://www.massdot.state.ma.us/DevelopersData.aspx
"""
import logging
from FeedSource import FeedSource
BASE_URL = 'http://www.massdot.state.ma.us/Portals/0/docs/developers/'
LOG = logging.getLogger(__name__)
class Massdot(FeedSource):
"""Fetch MassDOT (MA, non-Boston) feeds."""
def __init__(self):
super(Massdot, self).__init__()
berkshire_url = '%sbrta_google_transit.zip' % BASE_URL
brockton_url = '%sbat_google_transit.zip' % BASE_URL
cape_ann_url = '%scata_google_transit.zip' % BASE_URL
cape_cod_url = '%sccrta_google_transit.zip' % BASE_URL
franklin_url = '%sfrta_google_transit.zip' % BASE_URL
attleboro_url = '%sgatra_google_transit.zip' % BASE_URL
lowell_url = '%slrta_google_transit.zip' % BASE_URL
merrimack_url = '%smvrta_google_transit.zip' % BASE_URL
metrowest_url = '%smwrta_google_transit.zip' % BASE_URL
montachusett_url = '%smart_google_transit.zip' % BASE_URL
nantucket_url = '%snrta_google_transit.zip' % BASE_URL
pioneer_valley_url = 'http://www.pvta.com/g_trans/google_transit.zip'
southeastern_url = '%ssrta_google_transit.zip' % BASE_URL
vineyard_url = '%svta_google_transit.zip' % BASE_URL
worchester_url = '%swrta_google_transit.zip' % BASE_URL
ma_ferry_url = '%sferries_google_transit.zip' % BASE_URL
# private bus services; these feeds tend to have validation issues
bloom_url = '%sBloom_google_transit.zip' % BASE_URL
boston_express_url = '%sboston_express_google_transit.zip' % BASE_URL
coach_bus_url = '%scoach_google_transit.zip' % BASE_URL
dattco_url = '%sdattco_google_transit.zip' % BASE_URL
peter_pan_url = '%speter_pan_google_transit.zip' % BASE_URL
plymouth_brockton_railway_url = '%sPB_google_transit.zip' % BASE_URL
yankee_url = '%syankee_google_transit.zip' % BA | SE_URL
self.urls = {
'berkshire.zip': berkshire_url,
'brockton.zip': brockton_url,
'cape_ann.zip': cape_ann_url,
'cape_cod.zip': cape_cod_url,
'franklin.zip': franklin_url,
'attleboro.zip': attleboro_url,
'lowell.zip': lowell_url,
'merrimack.zip': merrimack_url,
'metrowest.zip': metrowest_url,
'mo | ntachusett.zip': montachusett_url,
'nantucket.zip': nantucket_url,
'pioneer_valley.zip': pioneer_valley_url,
'southeastern_ma.zip': southeastern_url,
'vineyard_ma.zip': vineyard_url,
'worchester.zip': worchester_url,
'ma_ferries.zip': ma_ferry_url,
'bloom_ma.zip': bloom_url,
'boston_express.zip': boston_express_url,
'coach_bus_ma.zip': coach_bus_url,
'dattco_ma.zip': dattco_url,
'peter_pan_ma.zip': peter_pan_url,
'plymouth_brockton_rail.zip': plymouth_brockton_railway_url,
'yankee_ma.zip': yankee_url
}
|
OxES/k2sc | src/priors.py | Python | gpl-3.0 | 2,483 | 0.00443 | """ Module defining priors.
Copyright (C) 2016 Suzanne Aigrain
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import division
import math as mt
import numpy as np
from numpy import array, asarray, log, pi, inf
class Prior(object):
def __init__(self):
raise NotImplementedError()
def logpdf(self, x):
raise NotImplementedEr | ror()
def __call__(self, x):
return self.logpdf(x)
class UniformPrior(Prior):
def __init__(self, vmin, vmax):
self.vmin = vmin
self.vmax = vmax
self.C = 1./(vmax-vmin)
self.lnC = mt.log(self.C)
self.lims = [vmin,vmax]
def logpdf(self, x):
if x > self.vmin and x < self.vmax:
return se | lf.lnC
else:
return -inf
class NormalPrior(Prior):
def __init__(self, mu, sigma, lims=None):
self.lims = np.array(lims)
self.vmin, self.vmax = lims
self.mu = float(mu)
self.sigma = float(sigma)
self._f1 = 1./ mt.sqrt(2.*pi*sigma*sigma)
self._lf1 = mt.log(self._f1)
self._f2 = 1./ (2.*sigma*sigma)
def logpdf(self, x):
if isinstance(x, np.ndarray):
return np.where((self.vmin < x) & (x < self.vmax), self._lf1 - (x-self.mu)**2 * self._f2, -inf)
else:
return self._lf1 -(x-self.mu)**2*self._f2 if self.vmin < x < self.vmax else -inf
class LogNormPrior(Prior):
def __init__(self, mu, sigma, lims=None):
self.mu = mu
self.sigma = sigma
self.C = -mt.log(sigma*mt.sqrt(2*pi))
self.lims = lims if lims is not None else [0,inf]
self._B = 2*sigma**2
def logpdf(self, x):
if (x <= self.lims[0]) or (x > self.lims[1]):
return -inf
mu = self.mu
lnx = mt.log(x)
return -lnx + self.C - ((lnx*lnx - mu*lnx + mu*mu)/self._B)
|
jmcnamara/XlsxWriter | xlsxwriter/test/comparison/test_unicode_shift_jis.py | Python | bsd-2-clause | 1,538 | 0 | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('unicode_shift_jis.xlsx')
self.set_text_file('unicode_shift_jis.txt')
def test_create_file(self):
"""Test example file converting Unicode text."""
# Open the input file with the correct en | coding.
textfile = open(self.txt_filename, mode='r', encoding='shift_jis')
# Create an new Excel file and convert the text data.
| workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
# Widen the first column to make the text clearer.
worksheet.set_column('A:A', 50)
# Start from the first cell.
row = 0
col = 0
# Read the text file and write it to the worksheet.
for line in textfile:
# Ignore the comments in the sample file.
if line.startswith('#'):
continue
# Write any other lines to the worksheet.
worksheet.write(row, col, line.rstrip("\n"))
row += 1
workbook.close()
textfile.close()
self.assertExcelEqual()
|
astronewts/Flight1 | misc/allaloft/groundstation/python/ground_station_base.py | Python | gpl-3.0 | 9,305 | 0.019774 |
import os
from optparse import OptionParser
import io
import time
import random
import thread
import sys
from smtp_stuff import sendMail
from imap_stuff import checkMessages
import datetime
import string
import array
from time import gmtime, strftime
from socket import *
user = ''
recipient = ''
incoming_server = ''
outgoing_server = ''
password = ''
imei = 0
aprs_server = 'second.aprs.net'
aprs_port = 20157
aprs_password = ''
aprs_callsign = ''
aprs_address = '>APRS,TCPIP*:'
aprs_is_enabled = False
# comment length is supposed to be 0 to 43 char.
email_enabled = False
ip_enabled | = False
http_post_enabled = False
COMMAND_GET_POS = 0
COMMAND_RELEASE = 1
COMMAND_SET_REPORT_INTERVAL = 2
def send_mo_email(msg):
global email
global incoming_server
global outgoing_server
global password
global imei
#put together body
body = ''
#subject
subject = '%d' % imei
#message is included as an attachment
attachment = 'ms | g.sbd'
fd = open(attachment, 'wb')
fd.write(msg)
fd.close()
sendMail(subject, body, user, recipient, password, outgoing_server, attachment)
def log(string):
print string
#TODO logic for text logging
def parse_text_report_no_fix(report):
report = report.split(":")
report = report[1]
report = report.split(",")
int_temp = float(report[0])
ext_temp = float(report[1])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
def send_aprs_packet(position):
global aprs_callsign
#print position
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((aprs_server, aprs_port))
# logon
sSock.send('user ' + aprs_callsign + ' pass ' + aprs_password + ' vers "' + aprs_callsign + ' Python" \n')
#get position information and encode string
lat = position[1]
lon = position[2]
alt = 100
kts = 0.1
crs = 30
# deg mm.mm
lat_str = "=%02d" % ( lat ) + "%05.2f" % ( ( abs(lat) % 1 ) * 60.0 )
if lat > 0:
lat_str += "N"
else:
lat_str += "S"
# deg mm.mm
lon_str = "%03d" % ( abs(lon) ) + "%05.2f" % ( ( abs(lon) % 1 ) * 60.0 )
if lat > 0:
lon_str += "W"
else:
lon_str += "E"
#combine the two
position_str = lat_str + "/" + lon_str
#add course, speed, and altitude
comment = "O%03d/%03d/A=%06d" % (crs,kts,alt)
#comment = "-HELP ME"
#print aprs_callsign + aprs_address + position_str + comment
sSock.send(aprs_callsign + aprs_address + position_str + comment +'\n')
print("Packet sent to APRS: " + time.ctime() )
# close socket -- must be closed to avoidbuffer overflow
sSock.shutdown(0)
sSock.close()
def update_position(position):
if aprs_is_enabled:
send_aprs_packet(position)
def parse_text_report(report):
report = report.split(":")
report = report[1]
report = report.split(",")
time_str = report[0]
lat = float(report[1])
lon = float(report[2])
alt = float(report[3])
kts = float(report[4])
crs = float(report[5])
position = [time_str,lat,lon,alt,kts,crs]
int_temp = float(report[6])
ext_temp = float(report[7])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
print "Report - Lat:",lat,"Lon:",lon,"Alt(ft):",alt,"Speed(kts):",kts,"Course(deg):",crs
update_position(position)
MSG_TEXT_REPORT = 'U'
MSG_TEXT_REPORT_NO_FIX = 'F'
def parse_incoming(msg):
#TODO: My gawd, this is ugly.. lets do something else?
if msg[0] == MSG_TEXT_REPORT_NO_FIX:
parse_text_report_no_fix(msg)
elif msg[0] == MSG_TEXT_REPORT:
parse_text_report(msg)
def email_check_task(name):
#check e-mail for messages
while(1):
#print 'Checking email'
msg,subject,received_msg,unread_msgs = checkMessages(incoming_server,user,password)
if received_msg:
print "Received Message", msg,"\r"
parse_incoming(msg)
time.sleep(1.0)
def SET_REPORT_INTERVAL(args):
print "Setting reporting interval"
if RepresentsInt(args[0]):
value = int(args[0])
byte1 = ( value >> 8 ) & 0xFF
byte0 = ( value ) & 0xFF
msg = array.array('B',[COMMAND_SET_REPORT_INTERVAL,byte1,byte0])
send_mo_email(msg)
else:
"First argument must be int seconds between 1 - 65532. 0 to disable automatic reporting."
def GET_POS(args):
print "Sending position request"
msg = array.array('B',[COMMAND_GET_POS,1,2,3]) #extra bytes for not good reason
send_mo_email(msg)
def RELEASE(args):
print "Sending ballast release command"
if RepresentsInt(args[0]):
msg = array.array('B',[COMMAND_RELEASE,int(args[0])])
print msg
send_mo_email(msg)
else:
"First argument must be int"
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def process_cmd(cmd_str):
#split up the string by space
cmd_args = cmd_str.split(' ')
#caps on CLI input
cmd_args[0] = cmd_args[0].upper()
if(len(cmd_args) > 1):
args = cmd_args[1:]
else:
args = []
possibles = globals().copy()
possibles.update(locals())
method = possibles.get(cmd_args[0])
if not method:
print("Method %s not implemented" % cmd_args[0])
else:
method(args)
def main():
global user
global recipient
global incoming_server
global outgoing_server
global password
global email_enabled
global ip_enabled
global http_post_enabled
global aprs_server
global aprs_port
global aprs_password
global aprs_callsign
global aprs_is_enabled
parser = OptionParser()
parser.add_option("-p", "--passwd", dest="passwd", action="store", help="Password", metavar="PASSWD")
parser.add_option("-u", "--user", dest="user", action="store", help="E-mail account username", metavar="USER")
parser.add_option("-r", "--recipient", dest="recipient", action="store", help="Destination e-mail address.", metavar="USER")
parser.add_option("-i", "--in_srv", dest="in_srv", action="store", help="Incoming e-mail server url", metavar="IN_SRV")
parser.add_option("-o", "--out_srv", dest="out_srv", action="store", help="Outoging e-mail server", metavar="OUT_SRV")
parser.add_option("-m", "--mode", dest="mode", action="store", help="Mode: EMAIL,HTTP_POST,IP,NONE", default="NONE", metavar="MODE")
parser.add_option("-I", "--imei", dest="imei",action="store",help="IMEI of target modem.",metavar="IMEI")
parser.add_option("-A", "--aprs-server",dest="aprs_server",action="store",help="APRS server",metavar="APRS_SERVER")
parser.add_option("-a", "--aprs-port",dest="aprs_port",action="store",help="APRS port",metavar="APRS_PORT")
parser.add_option("-s", "--aprs-password",dest="aprs_password",action="store",help="APRS password",metavar="APRS_PASSWORD")
parser.add_option("-c", "--aprs-callsign",dest="aprs_callsign",action="store",help="APRS Callsign",metavar="APRS_CALLSIGN")
(options, args) = parser.parse_args()
if options.aprs_server:
aprs_server = options.aprs_server
if options.aprs_port:
aprs_port = options.aprs_port
if options.aprs_password:
aprs_password = options.aprs_password
aprs_is_enabled = True
if options.aprs_callsign:
aprs_callsign = options.aprs_callsign
#check for valid arguments
if options.mode == "EMAIL":
if options.passwd is None or options.user is None or options.recipient |
QuartetoFantastico/projetoPokemon | batalha.py | Python | gpl-2.0 | 4,953 | 0.032707 | import pokemon
import random
import re
import display
class Batalha:
def __init__(self, pokeList = []):
self.display = display.Displ | ay()
self.pkmn = []
if (len(pokeList) == 0):
self.pk | mn.append(pokemon.Pokemon())
self.pkmn.append(pokemon.Pokemon())
else:
self.pkmn = pokeList
self.turno = self.IniciaTurno()
def IniciaTurno(self):
if (self.pkmn[0].getSpd() > self.pkmn[1].getSpd()):
return 0
elif (self.pkmn[1].getSpd() > self.pkmn[0].getSpd()):
return 1
return random.randint(0, 1)
def AlternaTurno(self):
self.pkmn[self.turno].setStruggle()
self.turno = (self.turno + 1) % 2
def EscolheAtaque(self):
atacando = self.pkmn[self.turno]
nAtks = atacando.getNatks()
escolheu = 0
if (atacando.isStruggling()):
return 4
else:
if (atacando.npc):
x = self.EscolheAtaqueInteligente()
return x
else:
self.display.escolheAtaque(atacando)
while True:
self.display.listaAtaques(nAtks, atacando.getAtkList())
while (not escolheu):
number = input("")
p = re.compile('[0-9]')
if (p.match(number)):
number = int(number)
if (number > nAtks or number < 1):
self.display.atkInvalido()
else: escolheu = 1
if (escolheu):
if (atacando.getAtks(number - 1).ppCheck()):
return number - 1
self.display.ppInsuficiente()
escolheu = 0
else: self.display.atkInvalido()
def EscolheAtaqueInteligente(self):
#Para maximizar o dano, deve-se maximizar a Base X Tipo X STAB
true = 1
tab = self.TypeChart('tabela.txt')
atacando = self.pkmn[self.turno]
defendendo = self.pkmn[(self.turno + 1) % 2]
BaseXType = 0
lista = atacando.getAtkList()
TypeMaior = tab[lista[0].getTyp()][defendendo.getTyp1()] * tab[lista[0].getTyp()][defendendo.getTyp2()]
if (defendendo.getHpAtual() < 100):
for i in range(0, atacando.getNatks() - 1):
Type = tab[lista[i].getTyp()][defendendo.getTyp1()] * tab[lista[i].getTyp()][defendendo.getTyp2()]
atual = lista[i].getPwr() * Type * (lista[i].getAcu()/100) * self.StabBonus(lista[i])
maior = lista[BaseXType].getPwr() * TypeMaior * (lista[BaseXType].getAcu()/100) * self.StabBonus(lista[BaseXType])
if (atual > maior and lista[i].ppCheck()):
BaseXType = i
TypeMaior = Type
else:
for i in range(0, atacando.getNatks() - 1):
Type = tab[lista[i].getTyp()][defendendo.getTyp1()] * tab[lista[i].getTyp()][defendendo.getTyp2()]
atual = lista[i].getPwr() * Type * self.StabBonus(lista[i])
maior = lista[BaseXType].getPwr() * TypeMaior * self.StabBonus(lista[BaseXType])
if (atual > maior and lista[i].ppCheck()):
BaseXType = i
TypeMaior = Type
return BaseXType
def TypeChart(self, name):
arquivo = open(name, 'r')
tab = []
i = 0
line = arquivo.readline()
while(line):
tab.append(line)
tab[i] = tab[i].split()
tab[i] = [float(j) for j in tab[i]]
i+= 1
line = arquivo.readline()
arquivo.close()
return tab
def StabBonus(self, atk):
atacando = self.pkmn[self.turno]
if (atk.getTyp() == atacando.getTyp1() or atk.getTyp() == atacando.getTyp2()): return 1.5
return 1
def CriticalHit(self):
atacando = self.pkmn[self.turno]
critical = (atacando.getSpd()/512);
temp = random.uniform(0, 1)
if (temp <= critical):
self.display.criticalHit()
return (2 * atacando.getLvl() + 5)/(atacando.getLvl() + 5)
return 1
def CalculaDano(self, atk):
atacando = self.pkmn[self.turno]
defendendo = self.pkmn[(self.turno + 1) % 2]
Critical = self.CriticalHit();
tab = self.TypeChart('tabela.txt');
STAB = self.StabBonus(atk)
Type = tab[atk.getTyp()][defendendo.getTyp1()] * tab[atk.getTyp()][defendendo.getTyp2()]
Modifier = STAB * Type * Critical * random.uniform(0.85, 1)
if (atk.isSpecial()):
Damage = round(((2 * atacando.getLvl() + 10)/250 * atacando.getAtk() / defendendo.getDefe() * atk.getPwr() + 2) * Modifier, 0);
else:
Damage = round(((2 * atacando.getLvl() + 10)/250 * atacando.getSpc() / defendendo.getSpc() * atk.getPwr() + 2) * Modifier, 0);
if (self.isHit(atk)):
defendendo.setHpAtual(int(defendendo.getHpAtual() - Damage))
self.display.hit(atacando, defendendo, atk, Damage)
else: self.display.miss(atacando, defendendo, atk)
if (atacando.isStruggling()):
Damage = round(Damage / 2, 0)
atacando.setHpAtual(int(atacando.getHpAtual() - Damage))
self.display.hitSelf(atacando, Damage)
def isHit(self, atk):
x = random.uniform(0, 1)
return x <= atk.getAcu() * 0.01
def isOver(self):
return not (self.pkmn[0].isAlive() and self.pkmn[1].isAlive())
def showResults(self):
if (not self.pkmn[0].isAlive() and not self.pkmn[1].isAlive()):
self.display.showTie()
elif (self.pkmn[0].isAlive()):
self.display.showWinner(self.pkmn[0])
else: self.display.showWinner(self.pkmn[1])
def showStatus(self):
self.display.pokemonHP(self.pkmn[1])
self.display.pokemonHP(self.pkmn[0])
|
Stanislav-Rybonka/spaceshop | product/admin.py | Python | gpl-3.0 | 397 | 0 | from django.contrib import admin
from product.models import Category, Product
class CategoryAdmin(admin.ModelAdmin):
fields = ('name', 'description', 'image',)
class ProductAdmin(admin.ModelAdmin):
fields = ('name', ' | description', 'price', 'category', 'image',)
# Reqister models for admin part
admin.site.registe | r(Category, CategoryAdmin)
admin.site.register(Product, ProductAdmin)
|
jolyonb/edx-platform | lms/djangoapps/branding/tests/test_api.py | Python | agpl-3.0 | 10,470 | 0.005349 | # encoding: utf-8
"""Tests of Branding API """
from __future__ import absolute_import, unicode_literals
import mock
from django.conf import settings
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
from branding.api import _footer_business_links, get_footer, get_home_url, get_logo_url
from edxmako.shortcuts import marketing_link
from openedx.core.djangoapps.site_configuration.tests.test_util import with_site_configuration
test_config_disabled_contact_us = { # pylint: disable=invalid-name
"CONTACT_US_ENABLE": False,
}
test_config_custom_url_contact_us = { # pylint: disable=invalid-name
"CONTACT_US_ENABLE": True,
"CONTACT_US_CUSTOM_LINK": "https://open.edx.org/",
}
class TestHeader(TestCase):
"""Test API end-point for retrieving the header. """
def test_cdn_urls_for_logo(self):
# Ordinarily, we'd use `override_settings()` to override STATIC_URL,
# which is what the staticfiles storage backend is using to construct the URL.
# Unfortunately, other parts of the system are caching this value on module
# load, which can cause other tests to fail. To ensure that this change
# doesn't affect other tests, we patch the `url()` method directly instead.
cdn_url = "http://cdn.example.com/static/image.png"
with mock.patch('branding.api.staticfiles_storage.url', return_value=cdn_url):
logo_url = get_logo_url()
self.assertEqual(l | ogo_url, cdn | _url)
def test_home_url_with_mktg_disabled(self):
expected_url = get_home_url()
self.assertEqual(reverse('dashboard'), expected_url)
@mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True})
@mock.patch.dict('django.conf.settings.MKTG_URLS', {
"ROOT": "https://edx.org",
})
def test_home_url_with_mktg_enabled(self):
expected_url = get_home_url()
self.assertEqual(marketing_link('ROOT'), expected_url)
class TestFooter(TestCase):
"""Test retrieving the footer. """
maxDiff = None
@mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True})
@mock.patch.dict('django.conf.settings.MKTG_URLS', {
"ROOT": "https://edx.org",
"ENTERPRISE": "/enterprise"
})
@override_settings(ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS={}, PLATFORM_NAME='\xe9dX')
def test_footer_business_links_no_marketing_query_params(self):
"""
Enterprise marketing page values returned should be a concatenation of ROOT and
ENTERPRISE marketing url values when ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS
is not set.
"""
business_links = _footer_business_links()
assert business_links[0]['url'] == 'https://edx.org/enterprise'
@mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True})
@mock.patch.dict('django.conf.settings.MKTG_URLS', {
"ROOT": "https://edx.org",
"ABOUT": "/about-us",
"NEWS": "/news-announcements",
"CONTACT": "/contact",
"CAREERS": '/careers',
"FAQ": "/student-faq",
"BLOG": "/edx-blog",
"DONATE": "/donate",
"JOBS": "/jobs",
"SITE_MAP": "/sitemap",
"TRADEMARKS": "/trademarks",
"TOS_AND_HONOR": "/edx-terms-service",
"PRIVACY": "/edx-privacy-policy",
"ACCESSIBILITY": "/accessibility",
"AFFILIATES": '/affiliate-program',
"MEDIA_KIT": "/media-kit",
"ENTERPRISE": "https://business.edx.org"
})
@override_settings(PLATFORM_NAME='\xe9dX')
def test_get_footer(self):
actual_footer = get_footer(is_secure=True)
business_url = 'https://business.edx.org/?utm_campaign=edX.org+Referral&utm_source=edX.org&utm_medium=Footer'
expected_footer = {
'copyright': '\xa9 \xe9dX. All rights reserved except where noted. '
' EdX, Open edX and their respective logos are '
'trademarks or registered trademarks of edX Inc.',
'navigation_links': [
{'url': 'https://edx.org/about-us', 'name': 'about', 'title': 'About'},
{'url': 'https://business.edx.org', 'name': 'enterprise', 'title': '\xe9dX for Business'},
{'url': 'https://edx.org/edx-blog', 'name': 'blog', 'title': 'Blog'},
{'url': 'https://edx.org/news-announcements', 'name': 'news', 'title': 'News'},
{'url': 'https://support.example.com', 'name': 'help-center', 'title': 'Help Center'},
{'url': '/support/contact_us', 'name': 'contact', 'title': 'Contact'},
{'url': 'https://edx.org/careers', 'name': 'careers', 'title': 'Careers'},
{'url': 'https://edx.org/donate', 'name': 'donate', 'title': 'Donate'}
],
'business_links': [
{'url': 'https://edx.org/about-us', 'name': 'about', 'title': 'About'},
{'url': business_url, 'name': 'enterprise', 'title': '\xe9dX for Business'},
{'url': 'https://edx.org/affiliate-program', 'name': 'affiliates', 'title': 'Affiliates'},
{'url': 'http://open.edx.org', 'name': 'openedx', 'title': 'Open edX'},
{'url': 'https://edx.org/careers', 'name': 'careers', 'title': 'Careers'},
{'url': 'https://edx.org/news-announcements', 'name': 'news', 'title': 'News'},
],
'more_info_links': [
{'url': 'https://edx.org/edx-terms-service',
'name': 'terms_of_service_and_honor_code',
'title': 'Terms of Service & Honor Code'},
{'url': 'https://edx.org/edx-privacy-policy', 'name': 'privacy_policy', 'title': 'Privacy Policy'},
{'url': 'https://edx.org/accessibility',
'name': 'accessibility_policy',
'title': 'Accessibility Policy'},
{'url': 'https://edx.org/trademarks', 'name': 'trademarks', 'title': 'Trademark Policy'},
{'url': 'https://edx.org/sitemap', 'name': 'sitemap', 'title': 'Sitemap'},
],
'connect_links': [
{'url': 'https://edx.org/edx-blog', 'name': 'blog', 'title': 'Blog'},
# pylint: disable=line-too-long
{'url': '{base_url}/support/contact_us'.format(base_url=settings.LMS_ROOT_URL), 'name': 'contact', 'title': 'Contact Us'},
{'url': 'https://support.example.com', 'name': 'help-center', 'title': 'Help Center'},
{'url': 'https://edx.org/media-kit', 'name': 'media_kit', 'title': 'Media Kit'},
{'url': 'https://edx.org/donate', 'name': 'donate', 'title': 'Donate'}
],
'legal_links': [
{'url': 'https://edx.org/edx-terms-service',
'name': 'terms_of_service_and_honor_code',
'title': 'Terms of Service & Honor Code'},
{'url': 'https://edx.org/edx-privacy-policy', 'name': 'privacy_policy', 'title': 'Privacy Policy'},
{'url': 'https://edx.org/accessibility',
'name': 'accessibility_policy',
'title': 'Accessibility Policy'},
{'url': 'https://edx.org/sitemap', 'name': 'sitemap', 'title': 'Sitemap'},
{'name': 'media_kit',
'title': u'Media Kit',
'url': u'https://edx.org/media-kit'}
],
'social_links': [
{'url': '#', 'action': 'Like \xe9dX on Facebook', 'name': 'facebook',
'icon-class': 'fa-facebook-square', 'title': 'Facebook'},
{'url': '#', 'action': 'Follow \xe9dX on Twitter', 'name': 'twitter',
'icon-class': 'fa-twitter-square', 'title': 'Twitter'},
{'url': '#', 'action': 'Subscribe to the \xe9dX YouTube channel',
'name': 'youtube', 'icon-class': 'fa-youtube-square', 'title': 'Youtube'},
{'url': '#', 'action': 'Follow \xe9dX on LinkedIn', 'name': 'linkedin',
'icon-class': 'fa-linkedin-square', 'title': ' |
vykhand/pyspark-csv | pyspark_csv.py | Python | mit | 5,353 | 0.016813 | """
The MIT License (MIT)
Copyright (c) 2015 seahboonsiew
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import csv
import dateutil.parser
from pyspark import sql
from pyspark.sql import *
from pyspark.sql.types import *
"""
Convert CSV plain text RDD into SparkSQL DataFrame (former SchemaRDD) using PySpark
If columns not given, assume first row is the header
If separator not given, assume comma separated
"""
def csvToDataFrame(sqlCtx,rdd,columns=None,sep=",",parseDate=True, nSampl=1000):
def toRow(line):
return toRowSep(line,sep)
rdd_array = rdd.map(toRow)
rdd_sql = rdd_array
if columns is None:
columns = rdd_array.first()
rdd_sampl = rdd_array.zipWithIndex().filter(lambda (r,i): (i > 0))
if nSampl > 0:
rdd_sampl = rdd_sampl.filter(lambda(r,i): (i < nSampl))
rdd_sampl = rdd_sampl.keys()
rdd_sql = rdd_array.zipWithIndex().filter(lambda (r,i): i > 0).keys()
column_types = evaluateType(rdd_sampl,parseDate)
def toSqlRow(row):
return toSqlRowWithType(row,column_types)
schema = makeSchema(zip(columns,column_types))
return sqlCtx.createDataFrame(rdd_sql.map(toSqlRow), schema=schema)
def makeSchema(columns):
struct_field_map = { 'string':StringType(), 'date': TimestampType(), 'double': DoubleType(), 'int': IntegerType(), 'none':NullType()}
fields = [StructField(k, struct_field_map[v], True) for k,v in columns]
return StructType(fields)
# Parse a row using csv.reader
def toRowSep(line,d):
for r in csv.reader([line.encode('utf-8')], delimiter=d):
return r
# Actual conversion to sql.Row
def toSqlRowWithType(row,col_types):
d = row
for col,data in enumerate(row):
typed = col_types[col]
if isNone(data):
d[col] = None
elif typed == 'string':
d[col] = data
elif typed == 'int':
d[col] = int(round(float(data)))
elif typed =='double':
d[col] = float(data)
elif typed == 'date':
d[col] = toDate(data)
return d
# Type converter
def isNone(d):
return (d is None or d == 'None' or d == '?' or d == '' or d == 'NULL' or d == 'null' or d == 'NA')
def toDate(d):
return dateutil.parser.parse(d)
# Infer types for each row
def getRowType(row):
d = row
for col,data in enumerate(row):
try:
if isNone(data):
d[col] = 'none'
else:
num = float(data)
if num.is_integer():
d[col] = 'int'
else:
d[col] = 'double'
except:
try:
dt = toDate(data)
d[col] = 'date'
except:
dt = data
d[col] = 'string'
return d
# Infer types for each row
def getRowTypeNoDate(row):
d = row
for col,data in enumerate(row):
try:
if isNone(data):
d[col] = 'none'
else:
num = float(data)
if num.is_integer():
d[col] = 'int'
else:
d[col] = 'double'
except:
d[col] = 'string'
return d
# Reduce column types among rows to find common denominator
def reduceTypes(a,b):
type_order = {'string':0, 'date':1, 'double':2, 'int':3, 'none':4}
reduce_map = {'int': {0:'string', 1:'string',2:'double'}, 'double': {0:'string',1:'string'}, 'date': {0:'string'}}
d = a
for col, a_type in enumerate(a):
#a_type = a[col]
b_type = b[col]
if a_type == 'none':
d[col] = b_type
elif b_type == 'none':
| d[col] = a_type
else:
order_a = type_order[a_type]
order_b = type_order[b_type]
if order_a == order_b:
d[col] = a_type
elif order_a > order_b:
d[col] = reduce_map[a_type][or | der_b]
elif order_a < order_b:
d[col] = reduce_map[b_type][order_a]
return d
def evaluateType(rdd_sql,parseDate):
if parseDate:
return rdd_sql.map(getRowType).reduce(reduceTypes)
else:
return rdd_sql.map(getRowTypeNoDate).reduce(reduceTypes)
|
google/iree-llvm-sandbox | python/examples/reduction/column_reduction_2d_bench.py | Python | apache-2.0 | 3,738 | 0.011236 | # RUN: %PYTHON %s 2>&1 | FileCheck %s
# This file contains small benchmarks with reasonably-sized problem/tiling sizes
# and codegen options.
from ..core.experts import *
from ..core.harness import *
from ..core.transforms import *
from ..contraction.definitions import *
fun_name = 'column_reduction_2d'
op_name = 'linalg.generic'
################################################################################
### Compilation strategies.
################################################################################
# Note: `\` char at the end of next line prevents formatter reflows, keep it.
all_names = [ \
"Tile4x8PeelInnerParallel", \
"Tile6x8PeelInnerParallel", \
"Tile8x8PeelInnerParallel", \
"Tile4x16PeelInnerParallel", \
"Tile6x16PeelInnerParallel", \
"Tile8x16PeelInnerParallel", \
"Tile4x32PeelInnerParallel", \
"Tile6x32PeelInnerParallel", \
"Tile8x32PeelInnerParallel", \
"Tile16x32PeelInnerParallel", \
"Tile4x64PeelInnerParallel", \
"Tile6x64PeelInnerParallel", \
"Tile8x64PeelInnerParallel", \
"Tile16x64PeelInnerParallel", \
| ]
def all_experts(problem_sizes: List[int]):
tile_sizes = [
[4, 8], [6, 8], [8, 8], \
[4, 16], [6, 16], [8, 16], \
[4, 32], | [6, 32], [8, 32], [16, 32], \
[4, 64], [6, 64], [8, 64], [16, 64], \
]
res = []
for ts in tile_sizes:
res.append(
# Note: `\` char at the end of next line prevents formatter reflows, keep it.
Tile(fun_name=fun_name, \
op_name=op_name,
# Don't tile too small dimensions.
tile_sizes=[ts[0], ts[1]] if problem_sizes[1] > ts[1] else [ts[0]],
peel=[0, 1] if problem_sizes[1] > ts[1] else [0])
.then(Vectorize(fun_name, op_name))
.then(LoweringOnlyExpert(fun_name,
op_name,
multi_reduction_lowering='innerparallel')),
)
return [e.print_ir(after_all=False, at_begin=False, llvm=False) for e in res]
################################################################################
### Problem instantiations.
################################################################################
keys = ['m', 'n']
# CHECK-NOT: FAILURE
def main():
# Specify default configuration and parse command line.
# Note: `\` char at the end of next line prevents formatter reflows, keep it.
args = test_argparser( \
"column reduction 2d benchmark",
default_n_iters=100,
default_problem_sizes_list=[
[128, 256],
[104, 128],
[256, 256],
[1000, 1024],
[8000, 6144],
],
default_expert_list=all_names,
default_dynamic_at_compile_time_list=[
[],
['m', 'n']
],
default_spec_list=[])
def numpy_kernel(args, sizes, types):
A, B = args
B.fill(0.)
np.sum(A, axis=0, out=B)
def pytorch_kernel(args, sizes, types):
A, B = args
B.fill_(0.)
torch.sum(A, dim=0, out=B)
for dynamic_at_compile_time in args.dynamic_at_compile_time_list:
for problem_sizes in args.problem_sizes_list:
test_harness(lambda s, t: EinsumProblem('mn->n', 'mn', 1),
[[np.float32] * 2],
test_sizes(keys, [problem_sizes]),
test_experts(all_experts(problem_sizes), all_names,
args.expert_list),
n_iters=args.n_iters,
dynamic_at_compile_time_sizes=set(
dynamic_at_compile_time).intersection(keys),
function_name=fun_name,
dump_ir_to_file='/tmp/abcd.mlir',
dump_obj_to_file='/tmp/abcd.o',
dump_data_to_file=args.dump_data)
if __name__ == '__main__':
main()
|
arsenovic/galgebra | examples/Old Format/terminal_check.py | Python | bsd-3-clause | 10,702 | 0.037283 | #!/usr/bin/python
from __future__ import print_function
import sys
from sympy import Symbol,symbols,sin,cos,Rational,expand,simplify,collect
from galgebra.printer import enhance_print,Get_Program,Print_Function,Format
from galgebra.deprecated import MV
from galgebra.mv import Nga,ONE,ZERO
from galgebra.ga import Ga
def basic_multivector_operations():
Print_Function()
(ex,ey,ez) = MV.setup('e*x|y|z')
A = MV('A','mv')
A.Fmt(1,'A')
A.Fmt(2,'A')
A.Fmt(3,'A')
X = MV('X','vector')
Y = MV('Y','vector')
print('g_{ij} =\n',MV.metric)
X.Fmt(1,'X')
Y.Fmt(1,'Y')
(X*Y).Fmt(2,'X*Y')
(X^Y).Fmt(2,'X^Y')
(X|Y).Fmt(2,'X|Y')
(ex,ey) = MV.setup('e*x|y')
print('g_{ij} =\n',MV.metric)
X = MV('X','vector')
A = MV('A','spinor')
X.Fmt(1,'X')
A.Fmt(1,'A')
(X|A).Fmt(2,'X|A')
(X<A).Fmt(2,'X<A')
(A>X).Fmt(2,'A>X')
(ex,ey) = MV.setup('e*x|y',metric='[1,1]')
print('g_{ii} =\n',MV.metric)
X = MV('X','vector')
A = MV('A','spinor')
X.Fmt(1,'X')
A.Fmt(1,'A')
(X*A).Fmt(2,'X*A')
(X|A).Fmt(2,'X|A')
(X<A).Fmt(2,'X<A')
(X>A).Fmt(2,'X>A')
(A*X).Fmt(2,'A*X')
(A|X).Fmt(2,'A|X')
(A<X).Fmt(2,'A<X')
(A>X).Fmt(2,'A>X')
return
def check_generalized_BAC_CAB_formulas():
Print_Function()
(a,b,c,d,e) = MV.setup('a b c d e')
print('g_{ij} =\n',MV.metric)
print('a|(b*c) =',a|(b*c))
print('a|(b^c) =',a|(b^c))
print('a|(b^c^d) =',a|(b^c^d))
print('a|(b^c)+c|(a^b)+b|(c^a) =',(a|(b^c))+(c|(a^b))+(b|(c^a)))
print('a*(b^c)-b*(a^c)+c*(a^b) =',a*(b^c)-b*(a^c)+c*(a^b))
print('a*(b^c^d)-b*(a^c^d)+c*(a^b^d)-d*(a^b^c) =',a*(b^c^d)-b*(a^c^d)+c*(a^b^d)-d*(a^b^c))
print('(a^b)|(c^d) =',(a^b)|(c^d))
print('((a^b)|c)|d =',((a^b)|c)|d)
print('(a^b)x(c^d) =',Ga.com(a^b,c^d))
print('(a|(b^c))|(d^e) =',(a|(b^c))|(d^e))
return
def derivatives_in_rectangular_coordinates():
Print_Function()
X = (x,y,z) = symbols('x y z')
(ex,ey,ez,grad) = MV.setup('e_x e_y e_z',metric='[1,1,1]',coords=X)
f = MV('f','scalar',fct=True)
A = MV('A','vector',fct=True)
B = MV('B','grade2',fct=True)
C = MV('C','mv',fct=True)
print('f =',f)
print('A =',A)
print('B =',B)
print('C =',C)
print('grad*f =',grad*f)
print('grad|A =',grad|A)
print('grad*A =',grad*A)
print('-I*(grad^A) =',-MV.I*(grad^A))
print('grad*B =',grad*B)
print('grad^B =',grad^B)
print('grad|B =',grad|B)
print('grad<A =',grad<A)
print('grad>A =',grad>A)
print('grad<B =',grad<B)
print('grad>B =',grad>B)
print('grad<C =',grad<C)
print('grad>C =',grad>C)
return
def derivatives_in_spherical_coordinates():
Print_Function()
X = (r,th,phi) = symbols('r theta phi')
curv = [[r*cos(phi)*sin(th),r*sin(phi)*sin(th),r*cos(th)],[1,r,r*sin(th)]]
(er,eth,ephi,grad) = MV.setup('e_r e_theta e_phi',metric='[1,1,1]',coords=X,curv=curv)
f = MV('f','scalar',fct=True)
A = MV('A','vector',fct=True)
B = MV('B','grade2',fct=True)
print('f =',f)
print('A =',A)
print('B =',B)
print('grad*f =',grad*f)
print('grad|A =',grad|A)
print('-I*(grad^A) =',-MV.I*(grad^A))
print('grad^B =',grad^B)
return
def rounding_numerical_components():
Print_Function()
(ex,ey,ez) = MV.setup('e_x e_y e_z',metric='[1,1,1]')
X = 1.2*ex+2.34*ey+0.555*ez
Y = 0.333*ex+4*ey+5.3*ez
print('X =',X)
print('Nga(X,2) =',Nga(X,2))
print('X*Y =',X*Y)
print('Nga(X*Y,2) =',Nga(X*Y,2))
return
def noneuclidian_distance_calculation():
from sympy import solve,sqrt
Print_Function()
metric = '0 # #,# 0 #,# # 1'
(X,Y,e) = MV.setup('X Y e',metric)
print('g_{ij} =',MV.metric)
print('(X^Y)**2 =',(X^Y)*(X^Y))
L = X^Y^e
B = L*e # D&L 10.152
print('B =',B)
Bsq = B*B
print('B**2 =',Bsq)
Bsq = Bsq.scalar()
print('#L = X^Y^e is a non-euclidian line')
print('B = L*e =',B)
BeBr =B*e*B.rev()
print('B*e*B.rev() =',BeBr)
print('B**2 =',B*B)
print('L**2 =',L*L) # D&L 10.153
(s,c,Binv,M,S,C,alpha,XdotY,Xdote,Ydote) = symbols('s c (1/B) M S C alpha (X.Y) (X.e) (Y.e)')
Bhat = Binv*B # D&L 10.154
R = c+s*Bhat # Rotor R = ex | p(alpha*Bhat/2)
print('s = sinh(alpha/2) and c = cosh(alpha/2)')
print('exp(alpha*B/(2*|B|)) =',R)
Z = R*X*R.rev() # D&L 10.155
Z.obj = expand(Z.obj)
Z.obj = Z.obj.collect([Binv,s,c,XdotY])
Z.Fmt(3,'R*X*R.rev()')
W = Z|Y # Extract scalar part of multivector
# From this point forward all calculations are with sympy scalars
print('Object | ive is to determine value of C = cosh(alpha) such that W = 0')
W = W.scalar()
print('Z|Y =',W)
W = expand(W)
W = simplify(W)
W = W.collect([s*Binv])
M = 1/Bsq
W = W.subs(Binv**2,M)
W = simplify(W)
Bmag = sqrt(XdotY**2-2*XdotY*Xdote*Ydote)
W = W.collect([Binv*c*s,XdotY])
#Double angle substitutions
W = W.subs(2*XdotY**2-4*XdotY*Xdote*Ydote,2/(Binv**2))
W = W.subs(2*c*s,S)
W = W.subs(c**2,(C+1)/2)
W = W.subs(s**2,(C-1)/2)
W = simplify(W)
W = W.subs(1/Binv,Bmag)
W = expand(W)
print('S = sinh(alpha) and C = cosh(alpha)')
print('W =',W)
Wd = collect(W,[C,S],exact=True,evaluate=False)
Wd_1 = Wd[ONE]
Wd_C = Wd[C]
Wd_S = Wd[S]
print('Scalar Coefficient =',Wd_1)
print('Cosh Coefficient =',Wd_C)
print('Sinh Coefficient =',Wd_S)
print('|B| =',Bmag)
Wd_1 = Wd_1.subs(Bmag,1/Binv)
Wd_C = Wd_C.subs(Bmag,1/Binv)
Wd_S = Wd_S.subs(Bmag,1/Binv)
lhs = Wd_1+Wd_C*C
rhs = -Wd_S*S
lhs = lhs**2
rhs = rhs**2
W = expand(lhs-rhs)
W = expand(W.subs(1/Binv**2,Bmag**2))
W = expand(W.subs(S**2,C**2-1))
W = W.collect([C,C**2],evaluate=False)
a = simplify(W[C**2])
b = simplify(W[C])
c = simplify(W[ONE])
print('Require a*C**2+b*C+c = 0')
print('a =',a)
print('b =',b)
print('c =',c)
x = Symbol('x')
C = solve(a*x**2+b*x+c,x)[0]
print('cosh(alpha) = C = -b/(2*a) =',expand(simplify(expand(C))))
return
HALF = Rational(1,2)
def F(x):
global n,nbar
Fx = HALF*((x*x)*n+2*x-nbar)
return(Fx)
def make_vector(a,n = 3):
if isinstance(a,str):
sym_str = ''
for i in range(n):
sym_str += a+str(i+1)+' '
sym_lst = list(symbols(sym_str))
sym_lst.append(ZERO)
sym_lst.append(ZERO)
a = MV(sym_lst,'vector')
return(F(a))
def conformal_representations_of_circles_lines_spheres_and_planes():
global n,nbar
Print_Function()
metric = '1 0 0 0 0,0 1 0 0 0,0 0 1 0 0,0 0 0 0 2,0 0 0 2 0'
(e1,e2,e3,n,nbar) = MV.setup('e_1 e_2 e_3 n nbar',metric)
print('g_{ij} =\n',MV.metric)
e = n+nbar
#conformal representation of points
A = make_vector(e1) # point a = (1,0,0) A = F(a)
B = make_vector(e2) # point b = (0,1,0) B = F(b)
C = make_vector(-e1) # point c = (-1,0,0) C = F(c)
D = make_vector(e3) # point d = (0,0,1) D = F(d)
X = make_vector('x',3)
print('F(a) =',A)
print('F(b) =',B)
print('F(c) =',C)
print('F(d) =',D)
print('F(x) =',X)
print('a = e1, b = e2, c = -e1, and d = e3')
print('A = F(a) = 1/2*(a*a*n+2*a-nbar), etc.')
print('Circle through a, b, and c')
print('Circle: A^B^C^X = 0 =',(A^B^C^X))
print('Line through a and b')
print('Line : A^B^n^X = 0 =',(A^B^n^X))
print('Sphere through a, b, c, and d')
print('Sphere: A^B^C^D^X = 0 =',(((A^B)^C)^D)^X)
print('Plane through a, b, and d')
print('Plane : A^B^n^D^X = 0 =',(A^B^n^D^X))
L = (A^B^e)^X
L.Fmt(3,'Hyperbolic Circle: (A^B^e)^X = 0 =')
return
def properties_of_geometric_objects():
global n,nbar
Print_Function()
metric = '# # # 0 0,'+ \
'# # # 0 0,'+ \
'# # # 0 0,'+ \
'0 0 0 0 2,'+ \
'0 0 0 2 0'
(p1,p2,p3,n,nbar) = MV.setup('p1 p2 p3 n nbar',metric)
print('g_{ij} =\n',MV.metric)
P1 = F(p1)
P2 = F(p2)
P3 = F(p3)
print('Extracting direction o |
maaadc/corbit | plot.py | Python | gpl-2.0 | 3,754 | 0.029036 | #!/usr/bin/env python
#
# Visualization of planet movement via numpy and matplotlib
#
import numpy as np
import matplotlib
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import animation
# load position data and reshape it
Ndays = 0
N = 0
Nplanets = 0
Tstep = 0
V = np.array([])
W = np.array([])
X = np.array([])
filehandle = open("run.dat", "r")
section = ""
it = 0
for line in filehandle:
# skip comments
if line[0] == "#":
continue
# determine section
if line[0] in ("*"):
section = line[1]
it = 0
continue
# parameters
if section == "P":
temp = line.split()
Ndays = int(temp[0])
N = int(temp[1])
Nplanets = int(temp[2])
Tstep = float(temp[3])
# resize array structures
V.resize((Ndays,N,3))
W.resize((Ndays,3))
X.resize((Ndays,N,3))
continue
# abs(v)
if section == "V":
temp = line.split()
V[it] = np.array(temp).reshape((N,3))
it += 1
continue
# energy
if section == "W":
temp = line.split()
W[it,:] = temp
it += 1
continue
# positions
if section == "X":
temp = line.split()
X[it] = np.array(temp).reshape((N,3))
it += 1
continue
filehandle.close()
# calculate abs(V) and store in V[i,j,3]
V.resize((Ndays,N,4));
for i in range(Ndays):
for j in range(N):
V[i,j,3] = np.linalg.norm( V[i,j,0:3] )
# prepare shape [Ndays,N,3] => [N,Ndays,3] for plotting
X = np.swapaxes(X, 0,1)
V = np.swapaxes(V, 0,1)
# use real planet names and make up names for probes
n = ["sun", "mercury", "venus", "earth", "mars", "jupiter", "saturn", "uranus", "neptune", "pluto"]
n_probe = ["probe" + str(i) for i in range(N-Nplanets)]
n += n_probe
print str(N) + " objects found."
# set subplots
fig = plt.figure()
ax1 = fig.add_subplot(211, projection="3d", axisbg='black')
#ax2 = fig.add_subplot(122)
# choose different colors for each trajectory
color_planets = plt.cm.jet(np.linspace(0, 1, Nplanets))
color_probes = plt.cm.binary(np.linspace(0, 0.6, N-Nplanets))
colors = np.vstack((color_planets, color_probes))
#colors = ("#ffc | c00',
# set trajectories, only | lines get labels
lines = sum( [ax1.plot([], [], [], '-', color=c, label=l) for c, l in zip(colors, n)], [] )
points = sum( [ax1.plot([], [], [], 'o', color=c) for c in colors], [] )
# set plot layout
limit = (-5,5) # axes limits
#limit = (-20,20)
ax1.set_xlim(limit)
ax1.set_ylim(limit)
ax1.set_zlim(limit)
ax1.axis("off") # disable axes
# put legend to the right of the plot
#ax.legend(loc="center left", bbox_to_anchor=(1., 0.5), prop={"size":12})
#ax1.legend(loc="center right", bbox_to_anchor=(1., 0.5), prop={"size":12})
counter = plt.figtext(0.1, 0.9, "-", color="white") # prepare text window
# set point-of-view: theta and phi in degrees
ax1.view_init(90, 0) # 60, 20
# function is called each frame
def anim_sequence(i):
# fast forward
#i *= 5
# set counter
counter.set_text("t = " + str(i) + " d")
# set trajectories
for line, point, pos in zip(lines, points, X):
x, y, z = pos[:i].T
line.set_data(x, y)
line.set_3d_properties(z)
point.set_data(x[-1:], y[-1:])
point.set_3d_properties(z[-1:])
fig.canvas.draw()
return lines + points
# start animator
#anim = animation.FuncAnimation(fig, anim_sequence, frames=Ndays, interval=30, blit=True)
# or simply draw the last frame to show full data
anim_sequence(Ndays)
# save animation as mp4, ffmpeg needed
#anim.save("test.mp4", fps=30), extra_args=["-vcodec", "libx264"])
# show speed
ax2 = plt.subplot(212, axisbg="black")
#ax2.set_yscale("log")
#ax2.set_ylim((1.e-2,1.e2))
for i in range(N):
plt.plot(np.arange(Ndays), V[i,:,3], c=colors[i])
# add a legend
fig.legend(lines, n, loc="center right", prop={"size":11})
# show plot with tight layout
#plt.tight_layout()
plt.show()
|
jekhokie/scriptbox | python--advent-of-code/2020/3/solve.py | Python | mit | 757 | 0.029062 | #!/usr/bin/env python3
lines = []
with open('input.txt', 'r') as f:
lines = f.read().splitlines()
#--- challenge 1
def get_trees(lines, right, down):
trees = 0
pos = 0
line_len = len(lines[0])
for line in lines[dow | n::down]:
if (pos + right) >= line_len:
pos = right - (line_len - pos)
else:
pos += right
if line[pos] == '#':
trees += 1
return trees
trees = get_trees(lines, 3, 1)
print("Solution to challenge 1: {}".format(trees))
#--- challenge 2
tree_list = []
sequences = [[1,1], [3,1], [5,1], [7,1], [1,2]]
for check in sequences:
tree_list.append(get_trees(lines, check[0], check[1]))
product | = 1
for trees in tree_list:
product *= trees
print("Solution to challenge 2: {}".format(product))
|
fbradyirl/home-assistant | homeassistant/components/google_assistant/const.py | Python | apache-2.0 | 3,937 | 0.000254 | """Constants for Google Assistant."""
from homeassistant.components import (
binary_sensor,
camera,
climate,
cover,
fan,
group,
input_boolean,
light,
lock,
media_player,
scene,
script,
sensor,
switch,
vacuum,
)
DOMAIN = "google_assistant"
GOOGLE_ASSISTANT_API_ENDPOINT = "/api/google_assistant"
CONF_EXPOSE = "expose"
CONF_ENTITY_CONFIG = "entity_config"
CONF_EXPOSE_BY_DEFAULT = "expose_by_default"
CONF_EXPOSED_DOMAINS = "exposed_domains"
CONF_PROJECT_ID = "project_id"
CONF_ALIASES = "aliases"
CONF_API_KEY = "api_key"
CONF_ROOM_HINT = "room"
CONF_ALLOW_UNLOCK = "allow_unlock"
CONF_SECURE_DEVICES_PIN = "secure_devices_pin"
DEFAULT_EXPOSE_BY_DEFAULT = True
DEFAULT_EXPOSED_DOMAINS = [
"climate",
"cover",
"fan",
"group",
"input_boolean",
"light",
"media_player",
"scene",
"script",
"switch",
"vacuum",
"lock",
"binary_sensor",
"sensor",
]
PREFIX_TYPES = "action.devices.types."
TYPE_CAMERA = PREFIX_TYPES + "CAMERA"
TYPE_LIGHT = PREFIX_TYPES + "LIGHT"
TYPE_SWITCH = PREFIX_TYPES + "SWITCH"
TYPE_VACUUM = PREFIX_TYPES + "VACUUM"
TYPE_SCENE = PREFIX_TYPES + "SCENE"
TYPE_FAN = PREFIX_TYPES + "FAN"
TYPE_THERMOSTAT = PREFIX_TYPES + "THERMOSTAT"
TYPE_LOCK = PREFIX_TYPES + "LOCK"
TYPE_BLINDS = PREFIX_TYPES + "BLINDS"
TYPE_GARAGE = PREFIX_TYPES + "GARAGE"
TYPE_OUTLET = PREFIX_TYPES + "OUTLET"
TYPE_SENSOR = PREFIX_TYPES + "SENSOR"
TYPE_DOOR = PREFIX_TYPES + "DOOR"
TYPE_TV = PREFIX_TYPES + "TV"
TYPE_SPEAKER = PREFIX_TYPES + "SPEAKER"
SERVICE_REQUEST_SYNC = "request_sync"
HOMEGRAPH_URL = "https://homegraph.googleapis.com/"
REQUEST_SYNC_BASE_URL = HOMEGRAPH_URL + "v1/devices:requestSync"
# Error codes used for SmartHomeError class
# https://developers.google.com/actions/reference/smarthome/errors-exceptions
ERR_DEVICE_OFFLINE = "deviceOffline"
ERR_DEVICE_NOT_FOUND = "deviceNotFound"
ERR_VALUE_OUT_OF_RANGE = "valueOutOfRange"
ERR_NOT_SUPPORTED = "notSupported"
ERR_PROTOCOL_ERROR = "protocolError"
ERR_UNKNOWN_ERROR = "unknownError"
ERR_FUNCTION_NOT_SUPPORTED = "functionNotSupported"
ERR_CHALLENGE_NEEDED = "challengeNeeded"
ERR_CHALLENGE_NOT_SETUP = "challengeFailedNotSetup"
ERR_TOO_MANY_FAILED_ATTEMPTS = "tooManyFailedAttempts"
ERR_PIN_INCORRECT = "pinIncorrect"
ERR_USER_CANCELLED = "userCancelled"
# Event types
EVENT_COMMAND_RECEIVED = "google_assistant_command"
EVENT_QUERY_RECEIVED = "google_assistant_query"
EVENT_SYNC_RECEIVED = "google_assistant_sync"
DOMAIN_TO_GOOGLE_TYPES = {
camera.DOMAIN: TYPE_CAMERA,
climate.DOMAIN: TYPE_THERMOSTAT,
cover.DOMAIN: TYPE_BLINDS,
fan.DOMAIN | : TYPE_FAN,
group.DOMAIN: TYPE_SWITCH,
input_boolean.DOMAIN: TYPE_SWITCH,
light.DOMAIN: TYPE_LIGHT,
lock.DOMAIN: TYPE_LOCK,
media_pla | yer.DOMAIN: TYPE_SWITCH,
scene.DOMAIN: TYPE_SCENE,
script.DOMAIN: TYPE_SCENE,
switch.DOMAIN: TYPE_SWITCH,
vacuum.DOMAIN: TYPE_VACUUM,
}
DEVICE_CLASS_TO_GOOGLE_TYPES = {
(cover.DOMAIN, cover.DEVICE_CLASS_GARAGE): TYPE_GARAGE,
(cover.DOMAIN, cover.DEVICE_CLASS_DOOR): TYPE_DOOR,
(switch.DOMAIN, switch.DEVICE_CLASS_SWITCH): TYPE_SWITCH,
(switch.DOMAIN, switch.DEVICE_CLASS_OUTLET): TYPE_OUTLET,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_DOOR): TYPE_DOOR,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_GARAGE_DOOR): TYPE_GARAGE,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_LOCK): TYPE_SENSOR,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_OPENING): TYPE_SENSOR,
(binary_sensor.DOMAIN, binary_sensor.DEVICE_CLASS_WINDOW): TYPE_SENSOR,
(media_player.DOMAIN, media_player.DEVICE_CLASS_TV): TYPE_TV,
(media_player.DOMAIN, media_player.DEVICE_CLASS_SPEAKER): TYPE_SPEAKER,
(sensor.DOMAIN, sensor.DEVICE_CLASS_TEMPERATURE): TYPE_SENSOR,
}
CHALLENGE_ACK_NEEDED = "ackNeeded"
CHALLENGE_PIN_NEEDED = "pinNeeded"
CHALLENGE_FAILED_PIN_NEEDED = "challengeFailedPinNeeded"
|
hfp/tensorflow-xsmm | tensorflow/contrib/distribute/python/monitor.py | Python | apache-2.0 | 2,505 | 0.005988 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Monitor is responsible for training, checkpointing and recovery."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
from tensorflow.python.eager import context
from tensorflow.python.framework import errors
from tensorflow.python.ops import variables
class Monitor(object):
"""Executes training steps, recovers and checkpoints.
Note that this class is particularly preliminary, experimental, and
expected to change.
"""
# TODO(isaprykin): Support step functions that need multiple session calls.
# TO | DO(isaprykin): Support extra arguments to the step function.
# TODO(isaprykin): Support recovery, checkpointing and summaries.
def __init__(self, step_callable, session=None):
"""Initialize the Monitor with components for executing training steps.
Args:
step_callable: a training `Step` that's capable of signaling when done.
session: a `Session` instance that's needed for graph mode.
Raises:
ValueError: if `session` was provided for eager mode or not provided for
graph mode.
"""
if context.executing_eagerly():
if session is not None:
raise ValueError("Should not provide a `session` in Eager mode.")
self._run_step = step_callable
else:
if session is None:
raise ValueError("Should provide a `session` in Graph mode.")
session.run(step_callable._iterator.initializer) # pylint: disable=protected-access
self._run_step = session.make_callable(step_callable())
session.run(variables.global_variables_initializer())
def run_steps(self, num_steps=None):
step = 0
while num_steps is None or step < num_steps:
try:
self._run_step()
step += 1
except errors.OutOfRangeError:
break
|
postlund/home-assistant | homeassistant/components/xiaomi_miio/air_quality.py | Python | apache-2.0 | 7,163 | 0.000419 | """Support for Xiaomi Mi Air Quality Monitor (PM2.5)."""
import logging
from miio import AirQualityMonitor, Device, DeviceException
import voluptuous as vol
from homeassistant.components.air_quality import PLATFORM_SCHEMA, AirQualityEntity
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN
from homeassistant.exceptions import NoEntitySpecifiedError, PlatformNotReady
import homeassistant.helpers.config_validation as cv
from .const import (
MODEL_AIRQUALITYMONITOR_B1,
MODEL_AIRQUALITYMONITOR_S1,
MODEL_AIRQUALITYMONITOR_V1,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Xiaomi Miio Air Quality Monitor"
ATTR_CO2E = "carbon_dioxide_equivalent"
ATTR_TVOC = "total_volatile_organic_compounds"
ATTR_TEMP = "temperature"
ATTR_HUM = "humidity"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
PROP_TO_ATTR = {
"carbon_dioxide_equivalent": ATTR_CO2E,
"total_volatile_organic_compounds": ATTR_TVOC,
"temperature": ATTR_TEMP,
"humidity": ATTR_HUM,
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the sensor from config."""
host = config[CONF_HOST]
token = config[CONF_TOKEN]
name = config[CONF_NAME]
_LOGGER.info("Initializing with host %s (token %s...)", host, token[:5])
miio_device = Device(host, token)
try:
device_info = await hass.async_add_executor_job(miio_device.info)
except DeviceException:
raise PlatformNotReady
model = device_info.model
unique_id = f"{model}-{device_info.mac_address}"
_LOGGER.debug(
"%s %s %s detected",
model,
device_info.firmware_version,
device_info.hardware_version,
)
device = AirQualityMonitor(host, token, model=model)
if model == MODEL_AIRQUALITYMONITOR_S1:
entity = AirMonitorS1(name, device, unique_id)
elif model == MODEL_AIRQUALITYMONITOR_B1:
entity = AirMonitorB1(name, device, unique_id)
elif model == MODEL_AIRQUALITYMONITOR_V1:
entity = AirMonitorV1(name, device, unique_id)
else:
raise NoEntitySpecifiedError(f"Not support for entity {unique_id}")
async_add_entities([entity], update_before_add=True)
class AirMonitorB1(AirQualityEntity):
"""Air Quality class for Xiaomi cgllc.airmonitor.b1 device."""
def __init__(self, name, device, unique_id):
"""Initialize the entity."""
self._name = name
self._device = device
self._unique_id = unique_id
self._icon = "mdi:cloud"
self._unit_of_measurement = "μg/m3"
self._available = None
self._air_quality_index = None
self._carbon_dioxide = None
self._carbon_dioxide_equivalent = None
self._particulate_matter_2_5 = None
self._total_volatile_organic_compounds = None
self._temperature = None
self._humidity = None
async def async_update(self):
"""Fetch state from the miio device."""
try:
state = await self.hass.async_add_executor_job(self._device.status)
_LOGGER.debug("Got new state: %s", state)
self._carbon_dioxide_equivalent = state.co2e
self._particulate_matter_2_5 = round(state.pm25, 1)
self._total_volatile_organic_compounds = round(state.tvoc, 3)
self._temperature = round(state.temperature, 2)
self._humidity = round(state.humidity, 2)
self._available = True
except DeviceException as ex:
self._available = False
_LOGGER.error("Got exception while fetching the state: %s", ex)
@property
def name(self):
"""Return the name of this entity, if any."""
return self._name
@property
def icon(self):
"""Return the icon to use for device if any."""
return self._icon
@property
def available(self):
"""Return true when state is known."""
return self._available
@property
def unique_id(self):
"""Return the unique ID."""
return self._unique_id
@property
def air_quality_index(self):
"""Return the Air Quality Index (AQI)."""
return self._air_quality_index
@property
def carbon_dioxide(self):
"""Return the CO2 (carbon dioxide) level."""
return self._carbon_dioxide
@property
def carbon_dioxide_equivalent(self):
"""Return the CO2e (carbon dioxide equivalent) level."""
return self._carbon_dioxide_equivalent
@property
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self._particulate_matter_2_5
@property
def total_volatile_organic_compounds(self):
"""Return the total volatile organic compounds."""
return self._total_volatile_organic_compounds
@property
def temperature(self):
"""Return the current temperature."""
return self._temperature
@property
def humidity(self):
"""Return the current humidity."""
return self._humidity
@property
def device_state_attributes(self):
"""Return the state attributes."""
data = {}
for prop, attr in PROP_TO_ATTR.items():
value = getattr(self, prop)
if value is not None:
data[attr] = value
return data
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
class AirMonitorS1(AirMonitorB1):
"""Air Quality class for Xiaomi cgllc.airmonitor.s1 device."""
async def async_update(self):
"""Fetch state from the miio device."""
try:
state = await self.hass.async_add_executor_job(self._device.status)
_LOGGER.debug("Got new state: %s", state)
self._carbon_dioxide = state.co2
self._particulate_matter_2_5 = state.pm25
self._total_volatile_organic_compounds = state.tvoc
self._temperature = state.temperature
self._humidity = state.humidity
self._available = True |
except DeviceException as ex:
self._available = False
_LOGGER.error("Got exception while fetching the state: %s", ex)
class AirMonitorV1(AirMonitorB1):
"""Air Quality class for Xiaomi cgllc.airmonitor.s1 device."""
async def async_update(self):
"""Fetch state from the miio device."""
try:
state = await self.hass.async_add_executor_job(self._device.status)
_LOGG | ER.debug("Got new state: %s", state)
self._air_quality_index = state.aqi
self._available = True
except DeviceException as ex:
self._available = False
_LOGGER.error("Got exception while fetching the state: %s", ex)
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return None
|
LuanP/futebolistica | futebolistica/games/models.py | Python | gpl-2.0 | 1,773 | 0.000564 | # -*- coding: utf-8 -*-
from django.db import models
from django.core.urlresolvers import reverse
class Stadium(models.Model):
name = models.CharField(max_length=200, unique=True)
def __unicode__(self):
return self.name
class Judge(models.Model):
name = models.CharField(max_length=200, unique=True)
def __unicode__(self):
return self.name
class Game(models.Model):
stadium = models.ForeignKey('Stadium', null=True, blank=True)
judge = models.ForeignKey('Judge', null=True, blank=True)
date = models.DateTimeField()
team_home = models.ForeignKey('teams.Team', related_name=u'team_one')
team_away = models.ForeignKey('teams.Team', related_name=u'team_two')
team_home_score = models.PositiveSmallIntegerField(null=True, blank=True)
team_away_score = models.PositiveSmallIntegerField(null=True, blank=True)
game_round = models.ForeignKey('leagues.Round')
long_slug = models.TextField(unique=True, null=True, blank=True)
class Meta:
unique_together = (
('stadium', 'date'),
('judge', 'date')
)
def __unicode__(self):
score_1 = self.team_home_score
score_2 = self.team_home_score
return u'{} {} x {} {}'.format(
self.team_home.abbr, score_1 if score_1 else '',
score_2 if score_2 else '', self.team_away.abbr,
)
def save(self, *args, **kwargs):
self.long_slug = u'{}/{}/{}-vs-{}'.format(
self.game_round.league.slug,
self.game_round.slug,
self.team_home.abbr,
| self.team_away.a | bbr
)
super(Game, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('games:detail', args=(self.long_slug, ))
|
tech-server/gondul | templating/templating.py | Python | gpl-2.0 | 3,215 | 0.002799 | #!/usr/bin/python3
import argparse
import traceback
import sys
import netaddr
import requests
from flask import Flask, request
from jinja2 import Environment, FileSystemLoader, TemplateNotFound
endpoints = "read/networks read/oplog read/snmp read/switches-management public/distro-tree public/config public/dhcp public/dhcp-summary public/ping public/switches public/switch-state".split()
objects = {}
def getEndpoint(endpoint):
r = requests.get("http://localhost:80/api/{}".format(endpoint))
if r.status_code != 200:
raise Exception("Bad status code for endpoint {}: {}".format(endpoint, r.status_code))
return r.json()
def updateData():
for a in endpoints:
objects[a] = getEndpoint(a)
env = Environment(loader=FileSystemLoader([]), trim_blocks=True)
env.filters["netmask"] = lambda ip: netaddr.IPNetwork(ip).netmask
env.filters["cidr"] = lambda ip: netaddr.IPNetwork(ip).prefixlen
env.filters["networkId"] = lambda ip: netaddr.IPNetwork(ip).ip
env.filters["getFirstDhcpIp"] = lambda ip: netaddr.IPNetwork(ip)[3]
env.filters["getLastDhcpIp"] = lambda ip: netaddr.IPNetwork(ip)[-1]
env.filters["agentDistro"] = lambda src: src.split(":")[0]
env.filters["agentPort"] = lambda src: src.split(":")[1]
env.filters["getFirstFapIP"] = lambda ip: netaddr.IPNetwork(ip)[netaddr.IPNetwork(ip).size / 2]
app = Flask(__name__)
@app.after_request
def add_header(response):
if response.status_code == 20 | 0:
response.cache_control.max_age = 5
response.cache_control.s_maxage = 1
return response
@app.route("/<path>", methods=["GET"])
def root_get(path):
updateData()
try:
template = env.get_template(path)
body = template.render(objects=objects, options=request.args)
except TemplateNotFound:
return 'Template "{}" not found\n'.format(path), 404
except Exception as err:
return 'Templating of "{}" failed to render. Most likely due to an error in | the template. Error transcript:\n\n{}\n----\n\n{}\n'.format(path, err, traceback.format_exc()), 400
return body, 200
@app.route("/<path>", methods=["POST"])
def root_post(path):
updateData()
try:
content = request.stream.read(int(request.headers["Content-Length"]))
template = env.from_string(content.decode("utf-8"))
body = template.render(objects=objects, options=request.args)
except Exception as err:
return 'Templating of "{}" failed to render. Most likely due to an error in the template. Error transcript:\n\n{}\n----\n\n{}\n'.format(path, err, traceback.format_exc()), 400
return body, 200
parser = argparse.ArgumentParser(description="Process templates for gondul.", add_help=False)
parser.add_argument("-t", "--templates", type=str, nargs="+", help="location of templates")
parser.add_argument("-h", "--host", type=str, default="127.0.0.1", help="host address")
parser.add_argument("-p", "--port", type=int, default=8080, help="host port")
parser.add_argument("-d", "--debug", action="store_true", help="enable debug mode")
args = parser.parse_args()
env.loader.searchpath = args.templates
if not sys.argv[1:]:
parser.print_help()
app.run(host=args.host, port=args.port, debug=args.debug)
|
CarlFK/wafer | wafer/talks/views.py | Python | isc | 3,564 | 0 | from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied, ValidationError
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponseRedirect
from django.utils.decorators import method_decorator
from django.views.generic import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.views.generic.list import ListView
from django.conf import settings
from wafer.talks.models import Talk, ACCEPTED
from wafer.talks.forms import TalkForm
class EditOwnTalksMixin(object):
'''Users can edit their own talks as long as the talk is
"Under Consideration"'''
def get_object(self, *args, **kwargs):
object_ = super(EditOwnTalksMixin, self).get_object(*args, **kwargs)
if object_.can_edit(self.request.user):
return object_
else:
raise PermissionDenied
class LoginRequiredMixin(object):
'''Must be logged in'''
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(*args, **kwargs)
class UsersTalks(ListView):
template_name = 'wafer.talks/talks.html'
paginate_by = 25
def get_queryset(self):
# self.request will be None when we come here via the static site
# renderer
if (self.request and Talk.can_view_all(self.request.user)):
return Talk.objects.all()
return Talk.obj | ects.filter(status=ACCEPTED)
class TalkView(DetailView):
template_name = 'wafer.talks/talk.html'
model = Talk
def get_object(self, *args, **kwargs):
'''Only talk owners can see talks, unless they've been accepted'''
object_ = super(TalkView, self).get_object(*args, **kwargs)
if object_.can_view(self.request.user):
return object_
else:
ra | ise PermissionDenied
def get_context_data(self, **kwargs):
context = super(TalkView, self).get_context_data(**kwargs)
context['can_edit'] = self.object.can_edit(self.request.user)
return context
class TalkCreate(LoginRequiredMixin, CreateView):
model = Talk
form_class = TalkForm
template_name = 'wafer.talks/talk_form.html'
def get_context_data(self, **kwargs):
context = super(TalkCreate, self).get_context_data(**kwargs)
context['can_submit'] = getattr(settings, 'WAFER_TALKS_OPEN', True)
return context
def form_valid(self, form):
if not getattr(settings, 'WAFER_TALKS_OPEN', True):
raise ValidationError # Should this be SuspiciousOperation?
# Eaaargh we have to do the work of CreateView if we want to set values
# before saving
self.object = form.save(commit=False)
self.object.corresponding_author = self.request.user
self.object.save()
# Save the author information as well (many-to-many fun)
form.save_m2m()
return HttpResponseRedirect(self.get_success_url())
class TalkUpdate(EditOwnTalksMixin, UpdateView):
model = Talk
form_class = TalkForm
template_name = 'wafer.talks/talk_form.html'
def get_context_data(self, **kwargs):
context = super(TalkUpdate, self).get_context_data(**kwargs)
context['can_edit'] = self.object.can_edit(self.request.user)
return context
class TalkDelete(EditOwnTalksMixin, DeleteView):
model = Talk
template_name = 'wafer.talks/talk_delete.html'
success_url = reverse_lazy('wafer_page', args=('index',))
|
apple/coremltools | coremltools/converters/mil/frontend/torch/test/test_internal_graph.py | Python | bsd-3-clause | 67,606 | 0.001686 | # Copyright (c) 2020, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import itertools
import numpy as np
import pytest
torch = pytest.importorskip("torch")
import torch.nn as nn
import torch.nn.functional as F
from coremltools.converters.mil.mil import types
from coremltools.converters.mil.mil import Builder as mb
from coremltools.converters.mil.mil import Function, get_new_symbol
from coremltools.converters.mil.testing_utils import random_gen
from .. import ops
from ..converter import TranscriptionContext
from ..internal_graph import InternalTorchIRNode
class TestTorchOps:
"""Class containing tests for converting TorchIR -> CoreML ops.
These tests interface with only the InternalTorchIRGraph and do not
build a torch module. Thus, they are much faster then the numerical tests.
However, for some ops it is necessary to use the torch module to verify
numerical output so they are placed the numerical tests.
NOTE: Confused where @context is coming from? Its from the pytest fixture defined below.
"""
@pytest.fixture
def context(self):
return TranscriptionContext()
@pytest.fixture
def set_random_seeds(self):
torch.manual_seed(1)
np.random.seed(1)
@pytest.mark.parametrize("dtype", [torch.bool, torch.float, torch.int])
def test_constant(self, context, dtype):
test_data = torch.ones(1, dtype=dtype)
node = InternalTorchIRNode(
attr={"value": test_data}, kind="constant", inputs=[], outputs=["1"]
)
ssa = self._construct_test_graph(context, ops.constant, node, "1")
assert np.allclose(test_data, ssa.val)
assert test_data.shape == ssa.shape
def test_constant_magic(self, cont | ext):
test_val = ops.PYTORCH_MAGIC_DEFAULT
node = InternalTorchIRNode(
attr={"value": test_val}, kind="constant", inputs=[], outputs=["1"]
)
ssa = self._construct_test_graph(context, ops.constant, node, "1")
# We expect the magic default to get converted to None
assert ssa is None
@staticmethod
def _gen_constants(size, vals):
"""Helper function. Generates a | list of internal constant nodes.
Arguments:
size: number of constants to generate
vals: Either a list of values for each constant or one value used for all constants."""
is_list = isinstance(vals, list)
if is_list:
if len(vals) != size:
raise ValueError("len(@vals): {} != size: {}".format(len(vals), size))
constants = []
for index in range(size):
if is_list:
val = vals[index]
else:
val = vals
constants.append(
InternalTorchIRNode(
attr={"value": val},
kind="constant",
inputs=[],
outputs=[str(index)],
)
)
input_list = [str(i) for i in range(size)]
output_name = str(len(input_list))
return constants, input_list, output_name
@staticmethod
def _construct_test_graph(
context, test_op, test_node, output_name=None, graph_inputs=None, constants=None
):
""" Construct an Function for the given @graph_inputs, @constants,
and @test_node. Returns the output of the graph, which is the ssa
Var of the given @output_name.
"""
if graph_inputs is None:
graph_inputs = {}
if constants is None:
constants = []
with Function(inputs=graph_inputs) as ssa_func:
for name in ssa_func.inputs.keys():
context.add(ssa_func.inputs[name])
for node in constants:
ops.constant(context, node)
test_op(context, test_node)
ssa = None
if output_name:
ssa = context[output_name]
return ssa
def _test_elementwise_binary(
self, context, op_name, op, test_input, num_constants, expected_result
):
"""Helper function, runs op on test input and compares against expected result"""
constants, input_list, output_name = self._gen_constants(
num_constants, test_input
)
eb_node = InternalTorchIRNode(
kind=op_name, inputs=input_list, outputs=[output_name]
)
ssa = self._construct_test_graph(
context, op, eb_node, output_name, constants=constants
)
np.testing.assert_allclose(expected_result, ssa.val, atol=1e-6)
def _test_cast(self, context, test_val, op_kind, op_func, python_type):
constants, input_list, output_name = self._gen_constants(1, [test_val])
node = InternalTorchIRNode(
kind=op_kind, inputs=input_list, outputs=[output_name]
)
ssa = self._construct_test_graph(
context, op_func, node, output_name, constants=constants
)
assert ssa.val == python_type(test_val)
def test_add(self, context):
test_input_1 = np.random.rand(2, 3)
test_input_2 = np.random.rand(2, 3)
scale_factor = 1
self._test_elementwise_binary(
context,
"Add",
ops.add,
[test_input_1, test_input_2, scale_factor],
3,
test_input_1 + test_input_2,
)
def test_add_no_scale_factor(self, context):
test_input_1 = np.random.rand(2, 3)
test_input_2 = np.random.rand(2, 3)
self._test_elementwise_binary(
context,
"Add",
ops.add,
[test_input_1, test_input_2],
2,
test_input_1 + test_input_2,
)
@pytest.mark.parametrize(
"test_input_1, test_input_2",
[(np.random.rand(3, 2), np.random.rand(3, 2)), (np.random.rand(3, 2), 5), ],
)
def test_sub(self, context, test_input_1, test_input_2):
scale_factor = 1
self._test_elementwise_binary(
context,
"Sub",
ops.sub,
[test_input_1, test_input_2, scale_factor],
3,
test_input_1 - test_input_2,
)
@pytest.mark.parametrize(
"test_input_1, test_input_2",
[(np.random.rand(3, 2), np.random.rand(3, 2)), (np.random.rand(3, 2), 5), ],
)
def test_rsub(self, context, test_input_1, test_input_2):
scale_factor = 1
self._test_elementwise_binary(
context,
"rsub",
ops.sub,
[test_input_1, test_input_2, scale_factor],
3,
# Note the reversal of arg ordering relative to 'sub'
test_input_2 - test_input_1,
)
def test_mul(self, context):
test_input_1 = np.random.rand(3, 2)
test_input_2 = np.random.rand(3, 2)
self._test_elementwise_binary(
context,
"Mul",
ops.mul,
[test_input_1, test_input_2],
2,
test_input_1 * test_input_2,
)
def test_div(self, context):
test_input_1 = np.random.rand(3, 2)
test_input_2 = np.random.rand(3, 2)
self._test_elementwise_binary(
context,
"Div",
ops.div,
[test_input_1, test_input_2],
2,
np.divide(test_input_1, test_input_2),
)
def test_floor_divide(self, context):
test_input_1 = np.random.randint(low=1, high=100, size=(3, 2))
test_input_2 = np.random.randint(low=1, high=100, size=(3, 2))
self._test_elementwise_binary(
context,
"floor_divide",
ops.floor_divide,
[test_input_1, test_input_2],
2,
np.floor_divide(test_input_1, test_input_2),
)
def test_pow(self, context):
test_input_1 = np.random.rand(3, 2)
test_input_2 = np.random.rand(3, 2)
self._test_elementwise_binary(
conte |
igogorek/allure-python | allure-pytest/src/helper.py | Python | apache-2.0 | 1,026 | 0.002924 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import allure_c | ommons
from allure_pytest.utils import ALLURE_LABEL_PREFIX, ALLURE_LINK_PREFIX
class AllureTestHelper(object):
def __init__(self, config):
self.config = config
@allure_commons.hookimpl
def decorate_as_label(self, label_type, labels):
allure_label_marker = '{prefix}.{label_type}'.format(prefix=ALLURE_LABEL_PREFIX, label_type=label_type)
allure_label = getattr(pytest | .mark, allure_label_marker)
return allure_label(*labels, label_type=label_type)
@allure_commons.hookimpl
def decorate_as_link(self, url, link_type, name):
allure_link_marker = '{prefix}.{link_type}'.format(prefix=ALLURE_LINK_PREFIX, link_type=link_type)
pattern = dict(self.config.option.allure_link_pattern).get(link_type, u'{}')
url = pattern.format(url)
allure_link = getattr(pytest.mark, allure_link_marker)
return allure_link(url, name=name, link_type=link_type)
|
UnrememberMe/pants | src/python/pants/backend/jvm/register.py | Python | apache-2.0 | 10,635 | 0.008275 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.jvm.artifact import Artifact
from pants.backend.jvm.ossrh_publication_metadata import (Developer, License,
OSSRHPublicationMetadata, Scm)
from pants.backend.jvm.repository import Repository as repo
from pants.backend.jvm.scala_artifact import ScalaArtifact
from pants.backend.jvm.subsystems.jar_dependency_management import JarDependencyManagementSetup
from pants.backend.jvm.subsystems.scala_platform import ScalaPlatform
from pants.backend.jvm.subsystems.shader import Shading
from pants.backend.jvm.targets.annotation_processor import AnnotationProcessor
from pants.backend.jvm.targets.benchmark import Benchmark
from pants.backend.jvm.targets.credentials import LiteralCredentials, NetrcCredentials
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_agent import JavaAgent
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.javac_plugin import JavacPlugin
from pants.backend.jvm.targets.junit_tests import JUnitTests
from pants.backend.jvm.targets.jvm_app import Bundle, DirectoryReMapper, JvmApp
from pants.backend.jvm.targets.jvm_binary import Duplicate, JarRules, JvmBinary, Skip
from pants.backend.jvm.targets.jvm_prep_command import JvmPrepCommand
from pants.backend.jvm.targets.managed_jar_dependencies import (ManagedJarDependencies,
ManagedJarLibraries)
from pants.backend.jvm.targets.scala_jar_dependency import ScalaJarDependency
from pants.backend.jvm.targets.scala_library import ScalaLibrary
from pants.backend.jvm.targets.scalac_plugin import ScalacPlugin
from pants.backend.jvm.targets.unpacked_jars import UnpackedJars
from pants.backend.jvm.tasks.benchmark_run import BenchmarkRun
from pants.backend.jvm.tasks.binary_create import BinaryCreate
from pants.backend.jvm.tasks.bootstrap_jvm_tools import BootstrapJvmTools
from pants.backend.jvm.tasks.bundle_create import BundleCreate
from pants.backend.jvm.tasks.check_published_deps import CheckPublishedDeps
from pants.backend.jvm.tasks.checkstyle import Checkstyle
from pants.backend.jvm.tasks.classmap import ClassmapTask
from pants.backend.jvm.tasks.consolidate_classpath import ConsolidateClasspath
from pants.backend.jvm.tasks.coursier_resolve import CoursierResolve
from pants.backend.jvm.tasks.detect_duplicates import DuplicateDetector
from pants.backend.jvm.tasks.ivy_imports import IvyImports
from pants.backend.jvm.tasks.ivy_outdated import IvyOutdated
from pants.backend.jvm.tasks.ivy_resolve import IvyResolve
from pants.backend.jvm.tasks.jar_create import JarCreate
from pants.backend.jvm.tasks.jar_publish import JarPublish
from pants.backend.jvm.tasks.javadoc_gen import JavadocGen
from pants.backend.jvm.tasks.junit_run import JUnitRun
from pants.backend.jvm.tasks.jvm_compile.jvm_classpath_publisher import RuntimeClasspathPublisher
from pants.backend.jvm.tasks.jvm_compile.zinc.zinc_compile import ZincCompile
from pants.backend.jvm.tasks.jvm_dependency_check import JvmDependencyCheck
from pants.backend.jvm.tasks.jvm_dependency_usage import JvmDependencyUsage
from pants.backend.jvm.tasks.jvm_platform_analysis import JvmPlatformExplain, JvmPlatformValidate
from pants.backend.jvm.tasks.jvm_run import JvmRun
from pants.backend.jvm.tasks.nailgun_task import NailgunKillall
from pants.backend.jvm.tasks.prepare_resources import PrepareResources
from pants.backend.jvm.tasks.prepare_services import PrepareServices
from pants.backend.jvm.tasks.provide_tools_jar import ProvideToolsJar
from pants.backend.jvm.tasks.run_jvm_prep_command import (RunBinaryJvmPrepCommand,
RunCompileJvmPrepCommand,
RunTestJvmPrepCommand)
from pants.backend.jvm.tasks.scala_repl import ScalaRepl
from pants.backend.jvm.tasks.scaladoc_gen import ScaladocGen
from pants.backend.jvm.tasks.scalafix import ScalaFixCheck, ScalaFixFix
from pants.backend.jvm.tasks.scalafmt import ScalaFmtCheckFormat, ScalaFmtFormat
from pants.backend.jvm.tasks.scalastyle import Scalastyle
from pants.backend.jvm.tasks.unpack_jars import UnpackJars
from pants.base.deprecated import warn_or_error
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.goal import Goal
from pants.goal.task_registrar import TaskRegistrar as task
from pants.java.jar.exclude import Exclude
from pants.java.jar.jar_dependency import JarDependencyParseContextWrapper
class DeprecatedJavaTests(JUnitTests):
def __init__(self, *args, **kwargs):
super(DeprecatedJavaTests, self).__init__(*args, **kwargs)
warn_or_error('1.4.0.dev0',
'java_tests(...) target type',
'Use junit_tests(...) instead for target {}.'.format(self.address.spec))
def build_file_aliases():
return BuildFileAliases(
targets={
'annotation_processor': AnnotationProcessor,
'benchmark': Benchmark,
'credentials': LiteralCredentials,
'jar_library': JarLibrary,
'java_agent': JavaAgent,
'java_library': JavaLibrary,
'javac_plugin': JavacPlugin,
'java_tests': DeprecatedJavaTests,
'junit_tests': JUnitTests,
'jvm_app': JvmApp,
'jvm_binary': JvmBinary,
'jvm_prep_command' : JvmPrepCommand,
'managed_jar_dependencies' : ManagedJarDependencies,
'netrc_credentials': NetrcCredentials,
'scala_library': ScalaLibrary,
'scalac_plugin': ScalacPlugin,
'unpacked_jars': UnpackedJars,
},
objects={
'artifact': Artifact,
'scala_artifact': ScalaArtifact,
'ossrh': OSSRHPublicationMetadata,
'license': License,
'scm': Scm,
'developer': Developer,
'github': Scm.github,
'DirectoryReMapper': DirectoryReMapper,
'Duplicate': Duplicate,
'exclude': Exclude,
'scala_jar': ScalaJarDependency,
'jar_rules': JarRules,
'repository': repo,
'Skip': Skip,
'shading_relocate': Shading.create_relocate,
'shading_exclude': Shading.create_exclude,
'shading_keep': Shading.create_keep,
'shading_zap': Shading.create_zap,
'shading_relocate_package': Shading.create_relocate_package,
'shading_exclude_package': Shading.create_exclude_package,
'shading_keep_package': Shading.create_keep_package,
'shading_zap_package': Shading.create_zap_package,
},
context_aware_object_factories={
'bundle': Bundle,
'jar': JarDependencyParseContextWrapper,
'managed_jar_libraries': ManagedJarLibraries,
}
)
def global_subsystems():
return (ScalaPlatform,)
# TODO https://github.com/pantsbuild/pants/issues/604 register_goals
def register_goals():
ng_killall = task(name='ng-killall', action=NailgunKillall)
ng_killall.install()
Goal.by_name('invalidate').install(ng_killall, first=True)
Goal.by_name('clean-all').install(ng_killall, first=True)
task(name='jar-dependency-management', action=JarDependencyManagementSetup).install('bootstrap')
task(name='jvm-platform-explain', action=JvmPlatformExplain).install('jvm-platform-explain')
task(name='jvm-platform-validate', action=JvmPlatformValidate).install | ('jvm-platform-validate')
task(name='bootstrap-jvm-tools', action=BootstrapJvmTools).install('bootstrap' | )
task(name='provide-tools-jar', action=ProvideToolsJar).install('bootstrap')
# Compile
task(name='zinc', action=ZincCompile).install('compile')
# Dependency resolution.
task(name='ivy', action=IvyResolve).install('resolve', first=True)
task(name='coursier', action=CoursierResolve).install('resolve')
task(name='ivy-imports', action=IvyImports).install('imports')
task(name='unpack-jars', action=UnpackJars).install()
task(name='ivy', action=IvyOutdated).install('outdated')
# Re |
codeforeurope/Change-By-Us | giveaminute/migrations/versions/004_Add_a_city_leader_model.py | Python | agpl-3.0 | 875 | 0.003429 | """
:copyright: (c) 2011 Local Projects, all rights reserved
:license: Affero GNU GPL v3, see LICENSE for more details.
"""
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go h | ere. Don't create your own engine; bind migrate_engine
# to your metadata
meta = MetaData(migrate_engine)
communityleader = Table('community_leader', meta,
Column('id', Integer, primary_key=True),
Column('display_name', String(256)),
Column('title', String(256)),
Column('image_path', String(256)),
Column('order', Integer),
)
communityleader.create | ()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
meta = MetaData(migrate_engine)
communityleader = Table('community_leader', meta, autoload=True)
communityleader.drop()
|
MMeent/MANTHIS-terminal | Terminal/ActiveTileHandler.py | Python | lgpl-3.0 | 641 | 0.00156 | __author__ = 'Matthias'
from tkinter import *
from Terminal.Items.ItemStock import ItemStock
from Terminal.Items.Item import Item
class Ac | tive | TileHandler:
def __init__(self):
self.active_tile = ItemStock(Item("", 0), 1).create_item_tile(None)
def set_tile(self, tile: Frame):
tile.winfo_toplevel().slider.set(tile.item_stock.get_amount())
self.active_tile = tile
def set(self, amount: int):
self.active_tile.item_stock.set_amount(amount)
def get(self):
return self.active_tile
def add(self, amount: int):
self.get().item_stock.add(amount)
self.get().update() |
uclouvain/OSIS-Louvain | base/migrations/0458_auto_20190613_1614.py | Python | agpl-3.0 | 471 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2019-06-13 16:14
from __future__ import | unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('base', '0457_message_template_egys_automatic_postponement_update'),
]
operations = [
migrations.AlterUniqueTogether(
name='academiccalendar',
unique_together=set([('academic | _year', 'title')]),
),
]
|
envoyproxy/envoy | tools/protoxform/protoprint.py | Python | apache-2.0 | 31,369 | 0.003379 | # FileDescriptorProtos pretty-printer tool.
#
# protoprint.py provides the canonical .proto formatting for the Envoy APIs.
#
# See https://github.com/google/protobuf/blob/master/src/google/protobuf/descriptor.proto
# for the underlying protos mentioned in this file.
#
# Usage: protoprint.py <source file path> <type database path> <load type db path>
# <api version file path>
from collections import deque
import copy
import functools
import io
import os
import pathlib
import re
import subprocess
import sys
from tools.api_proto_plugin import annotations, traverse, visitor
from tools.api_versioning import utils as api_version_utils
from tools.protoxform import options as protoxform_options, utils
from tools.type_whisperer import type_whisperer, types_pb2
from google.protobuf import descriptor_pb2
from google.protobuf import text_format
from envoy.annotations import deprecation_pb2
from udpa.annotations import migrate_pb2, status_pb2
from xds.annotations.v3 import status_pb2 as xds_status_pb2
NEXT_FREE_FIELD_MIN = 5
ENVOY_DEPRECATED_UNAVIALABLE_NAME = 'DEPRECATED_AND_UNAVAILABLE_DO_NOT_USE'
class ProtoPrintError(Exception):
"""Base error class for the protoprint module."""
def extract_clang_proto_style(clang_format_text):
"""Extract a key:value dictionary for proto formatting.
Args:
clang_format_text: text from a .clang-format file.
Returns:
key:value dictionary suitable for passing to clang-format --style.
"""
lang = None
format_dict = {}
for line in clang_format_text.split('\n'):
if lang is None or lang != 'Proto':
match = re.match('Language:\s+(\w+)', line)
if match:
lang = match.group(1)
continue
match = re.match('(\w+):\s+(\w+)', line)
if match:
key, value = match.groups()
format_dict[key] = value
else:
break
return str(format_dict)
# Ensure we are using the canonical clang-format proto style.
CLANG_FORMAT_STYLE = extract_clang_proto_style(pathlib.Path('.clang-format').read_text())
def clang_format(contents):
"""Run proto-style oriented clang-format over given string.
Args:
contents: a string with proto contents.
Returns:
clang-formatted string
"""
clang_format_path = os.getenv("CLANG_FORMAT", "clang-format-11")
return subprocess.run(
[clang_format_path,
'--style=%s' % CLANG_FORMAT_STYLE, '--assume-filename=.proto'],
input=contents.encode('utf-8'),
stdout=subprocess.PIPE).stdout
def format_block(block):
"""Append \n to a .proto section (e.g.
comment, message definition, etc.) if non-empty.
Args:
| block: a string representing the section.
Return | s:
A string with appropriate whitespace.
"""
if block.strip():
return block + '\n'
return ''
def format_comments(comments):
"""Format a list of comment blocks from SourceCodeInfo.
Prefixes // to each line, separates blocks by spaces.
Args:
comments: a list of blocks, each block is a list of strings representing
lines in each block.
Returns:
A string reprenting the formatted comment blocks.
"""
# TODO(htuch): not sure why this is needed, but clang-format does some weird
# stuff with // comment indents when we have these trailing \
def fixup_trailing_backslash(s):
return s[:-1].rstrip() if s.endswith('\\') else s
comments = '\n\n'.join(
'\n'.join(['//%s' % fixup_trailing_backslash(line)
for line in comment.split('\n')[:-1]])
for comment in comments)
return format_block(comments)
def create_next_free_field_xform(msg_proto):
"""Return the next free field number annotation transformer of a message.
Args:
msg_proto: DescriptorProto for message.
Returns:
the next free field number annotation transformer.
"""
next_free = max(
sum([
[f.number + 1 for f in msg_proto.field],
[rr.end for rr in msg_proto.reserved_range],
[ex.end for ex in msg_proto.extension_range],
], [1]))
return lambda _: next_free if next_free > NEXT_FREE_FIELD_MIN else None
def format_type_context_comments(type_context, annotation_xforms=None):
"""Format the leading/trailing comments in a given TypeContext.
Args:
type_context: contextual information for message/enum/field.
annotation_xforms: a dict of transformers for annotations in leading
comment.
Returns:
Tuple of formatted leading and trailing comment blocks.
"""
leading_comment = type_context.leading_comment
if annotation_xforms:
leading_comment = leading_comment.get_comment_with_transforms(annotation_xforms)
leading = format_comments(list(type_context.leading_detached_comments) + [leading_comment.raw])
trailing = format_block(format_comments([type_context.trailing_comment]))
return leading, trailing
def format_header_from_file(
source_code_info, file_proto, empty_file, requires_deprecation_annotation):
"""Format proto header.
Args:
source_code_info: SourceCodeInfo object.
file_proto: FileDescriptorProto for file.
empty_file: are there no message/enum/service defs in file?
requires_deprecation_annotation: does the proto have the deprecated version annotation or
disallowed annotation.
Returns:
Formatted proto header as a string.
"""
# Load the type database.
typedb = utils.get_type_db()
# Figure out type dependencies in this .proto.
types = types_pb2.Types()
text_format.Merge(
traverse.traverse_file(file_proto, type_whisperer.TypeWhispererVisitor()), types)
type_dependencies = sum([list(t.type_dependencies) for t in types.types.values()], [])
for service in file_proto.service:
for m in service.method:
type_dependencies.extend([m.input_type[1:], m.output_type[1:]])
# Determine the envoy/ import paths from type deps.
envoy_proto_paths = set(
typedb.types[t].proto_path
for t in type_dependencies
if t.startswith('envoy.') and typedb.types[t].proto_path != file_proto.name)
def camel_case(s):
return ''.join(t.capitalize() for t in re.split('[\._]', s))
package_line = 'package %s;\n' % file_proto.package
file_block = '\n'.join(['syntax = "proto3";\n', package_line])
options = descriptor_pb2.FileOptions()
options.java_outer_classname = camel_case(os.path.basename(file_proto.name))
for msg in file_proto.message_type:
if msg.name == options.java_outer_classname:
# This is a workaround for Java outer class names that would otherwise
# conflict with types defined within the same proto file, see
# https://github.com/envoyproxy/envoy/pull/13378.
# TODO: in next major version, make this consistent.
options.java_outer_classname += "OuterClass"
options.java_multiple_files = True
options.java_package = 'io.envoyproxy.' + file_proto.package
# Workaround packages in generated go code conflicting by transforming:
# foo/bar/v2 to use barv2 as the package in the generated code
golang_package_name = ""
if file_proto.package.split(".")[-1] in ("v2", "v3"):
name = "".join(file_proto.package.split(".")[-2:])
golang_package_name = ";" + name
options.go_package = "".join([
"github.com/envoyproxy/go-control-plane/",
file_proto.package.replace(".", "/"), golang_package_name
])
# This is a workaround for C#/Ruby namespace conflicts between packages and
# objects, see https://github.com/envoyproxy/envoy/pull/3854.
# TODO(htuch): remove once v3 fixes this naming issue in
# https://github.com/envoyproxy/envoy/issues/8120.
if file_proto.package in ['envoy.api.v2.listener', 'envoy.api.v2.cluster']:
names = [s.capitalize() for s in file_proto.package.split('.')]
options.csharp_namespace = |
ulno/micropython-extra-ulno | examples/ehdemov3/wifi_config.py | Python | mit | 56 | 0 | name = "ehdemo-iotemp | ire"
password = "internetofthings" | |
sthyme/ZFSchizophrenia | BehaviorAnalysis/HSMovieAnalysis/pyTrack_usingprevmask_updated.py | Python | mit | 448 | 0.013393 | #!/usr/bin/p | ython
import highspeedmovieanalysis
import imageTools
import sys
pixThreshold = 0.005 # enter pixel threshold here
frameRate = 285 # enter frameRate here (usually 30 fps)
videoStream = imageTools.getVideoStream(sys.argv)
#vidInfo = deltaPix.cmdLine(pixThreshold,frameRate,videoStream)
#vidInfo = deltaPix_updated.cmdLine(pixThreshold,frameRate,videoStream)
vidInfo = highspeedmovieanalysis.cmdLine(pixThreshold,f | rameRate,videoStream)
|
cloudbase/nova-virtualbox | nova/servicegroup/api.py | Python | apache-2.0 | 5,543 | 0 | # Copyright 2012 IBM Corp.
# Copyright (c) | AT&T Labs Inc. 2012 Yun Mao <yunmao@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express o | r
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define APIs for the servicegroup access."""
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from nova.i18n import _, _LW
LOG = logging.getLogger(__name__)
_default_driver = 'db'
servicegroup_driver_opt = cfg.StrOpt('servicegroup_driver',
default=_default_driver,
help='The driver for servicegroup '
'service (valid options are: '
'db, zk, mc)')
CONF = cfg.CONF
CONF.register_opt(servicegroup_driver_opt)
# NOTE(geekinutah): By default drivers wait 5 seconds before reporting
INITIAL_REPORTING_DELAY = 5
class API(object):
_driver = None
_driver_name_class_mapping = {
'db': 'nova.servicegroup.drivers.db.DbDriver',
'zk': 'nova.servicegroup.drivers.zk.ZooKeeperDriver',
'mc': 'nova.servicegroup.drivers.mc.MemcachedDriver'
}
def __new__(cls, *args, **kwargs):
'''Create an instance of the servicegroup API.
args and kwargs are passed down to the servicegroup driver when it gets
created. No args currently exist, though. Valid kwargs are:
db_allowed - Boolean. False if direct db access is not allowed and
alternative data access (conductor) should be used
instead.
'''
if not cls._driver:
LOG.debug('ServiceGroup driver defined as an instance of %s',
str(CONF.servicegroup_driver))
driver_name = CONF.servicegroup_driver
try:
driver_class = cls._driver_name_class_mapping[driver_name]
except KeyError:
raise TypeError(_("unknown ServiceGroup driver name: %s")
% driver_name)
cls._driver = importutils.import_object(driver_class,
*args, **kwargs)
return super(API, cls).__new__(cls)
def __init__(self, *args, **kwargs):
self.basic_config_check()
def basic_config_check(self):
"""Perform basic config check."""
# Make sure report interval is less than service down time
report_interval = CONF.report_interval
if CONF.service_down_time <= report_interval:
new_service_down_time = int(report_interval * 2.5)
LOG.warning(_LW("Report interval must be less than service down "
"time. Current config: <service_down_time: "
"%(service_down_time)s, report_interval: "
"%(report_interval)s>. Setting service_down_time "
"to: %(new_service_down_time)s"),
{'service_down_time': CONF.service_down_time,
'report_interval': report_interval,
'new_service_down_time': new_service_down_time})
CONF.set_override('service_down_time', new_service_down_time)
def join(self, member_id, group_id, service=None):
"""Add a new member to the ServiceGroup
@param member_id: the joined member ID
@param group_id: the group name, of the joined member
@param service: the parameter can be used for notifications about
disconnect mode and update some internals
"""
LOG.debug('Join new ServiceGroup member %(member_id)s to the '
'%(group_id)s group, service = %(service)s',
{'member_id': member_id,
'group_id': group_id,
'service': service})
return self._driver.join(member_id, group_id, service)
def service_is_up(self, member):
"""Check if the given member is up."""
# NOTE(johngarbutt) no logging in this method,
# so this doesn't slow down the scheduler
return self._driver.is_up(member)
def leave(self, member_id, group_id):
"""Explicitly remove the given member from the ServiceGroup
monitoring.
"""
LOG.debug('Explicitly remove the given member %(member_id)s from the'
'%(group_id)s group monitoring',
{'member_id': member_id, 'group_id': group_id})
return self._driver.leave(member_id, group_id)
def get_all(self, group_id):
"""Returns ALL members of the given group."""
LOG.debug('Returns ALL members of the [%s] '
'ServiceGroup', group_id)
return self._driver.get_all(group_id)
def get_one(self, group_id):
"""Returns one member of the given group. The strategy to select
the member is decided by the driver (e.g. random or round-robin).
"""
LOG.debug('Returns one member of the [%s] group', group_id)
return self._driver.get_one(group_id)
|
mahabuber/erpnext | erpnext/patches/v6_5/show_in_website_for_template_item.py | Python | agpl-3.0 | 486 | 0.018519 | from __future__ import unicode_literals
import frappe
import frappe.website.render
def execute():
for item_code in frappe.db.sql_list("""select distinct variant_of from `tabItem`
where variant_of is not null and variant_of !='' and show_in_websit | e=1"""):
item = frappe.get_doc("Item", item_code)
item.db_set("show_in_we | bsite", 1, update_modified=False)
item.get_route()
item.db_set("page_name", item.page_name, update_modified=False)
frappe.website.render.clear_cache()
|
couchbaselabs/litmus | lib/couchdb/multipart.py | Python | apache-2.0 | 8,671 | 0.000692 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Support for streamed reading and writing of multipart MIME content."""
from base64 import b64encode
from cgi import parse_header
try:
from hashlib import md5
md5; # Pyflakes workaround
except ImportError:
from md5 import new as md5
import sys
__all__ = ['read_multipart', 'write_multipart']
__docformat__ = 'restructuredtext en'
CRLF = '\r\n'
def read_multipart(fileobj, boundary=None):
"""Simple streaming MIME multipart parser.
This function takes a file-like object reading a MIME envelope, and yields
a ``(headers, is_multipart, payload)`` tuple for every part found, where
``headers`` is a dictionary containing the MIME headers of that part (with
names lower-cased), ``is_multipart`` is a boolean indicating whether the
part is itself multipart, and ``payload`` is either a string (if
``is_multipart`` is false), or an iterator over the nested parts.
Note that the iterator produced for nested multipart payloads MUST be fully
consumed, even if you wish to skip over the content.
:param fileobj: a file-like object
:param boundary: the part boundary string, will generally be determined
automatically from the headers of the outermost multipart
envelope
:return: an iterator over the parts
:since: 0.5
"""
headers = {}
buf = []
outer = in_headers = boundary is None
next_boundary = boundary and '--' + boundary + '\n' or None
last_boundary = boundary and '--' + boundary + '--\n' or None
def _current_part():
payload = ''.join(buf)
if payload.endswith('\r\n'):
payload = payload[:-2]
elif payload.endswith('\n'):
payload = payload[:-1]
content_md5 = headers.get('content-md5')
if content_md5:
h = b64encode(md5(payload).digest())
if content_md5 != h:
raise ValueError('data integrity check failed')
return headers, False, payload
for line in fileobj:
if in_headers:
line = line.replace(CRLF, '\n')
if line != '\n':
name, value = line.split(':', 1)
headers[name.lower().strip()] = value.strip()
else:
in_headers = False
mimetype, params = parse_header(headers.get('content-type'))
if mimetype.startswith('multipart/'):
sub_boundary = params['boundary']
sub_parts = read_multipart(fileobj, boundary=sub_boundary)
if boundary is not None:
yield headers, True, sub_parts
headers.clear()
del buf[:]
else:
for part in sub_parts:
yield part
return
elif line.replace(CRLF, '\n') == next_boundary:
# We've reached the start of a new part, as indicated by the
# boundary
if headers:
if not outer:
yield _current_part()
else:
outer = False
headers.clear()
del buf[:]
in_headers = True
elif line.replace(CRLF, '\n') == last_boundary:
# We're done with this multipart envelope
break
else:
buf.append(line)
if not outer and headers:
yield _current_part()
class MultipartWriter(object):
def __init__(self, fileobj, headers=None, subtype='mixed', boundary=None):
self.fileobj = fileobj
if boundary is None:
boundary = self._make_boundary()
self.boundary = boundary
if headers is None:
headers = {}
headers['Content-Type'] = 'multipart/%s; boundary="%s"' % (
subtype, self.boundary
)
self._write_headers(headers)
def open(self, headers=None, subtype='mixed', boundary=None):
self.fileobj.write('--')
self.fileobj.write(self.boundary)
self.fileobj.write(CRLF)
return MultipartWriter(self.fileobj, headers=headers, subtype=subtype,
boundary=boundary)
def add(self, mimetype, content, headers=None):
self.fileobj.write('--')
self.fileobj.write(self.boundary)
self.fileobj.write(CRLF)
if headers is None:
headers = {}
if isinstance(content, unicode):
ctype, params = parse_header(mimetype)
if 'charset' in params:
content = content.encode(params['charset'])
else:
content = content.encode('utf-8')
mimetype = mimetype + ';charset=utf-8'
headers['Content-Type'] = mimetype
if content:
headers['Content-Length'] = str(len(content))
headers['Content-MD5'] = b64encode(md5(content).digest())
self._write_headers(headers)
if content:
# XXX: throw an exception if a boundary appears in the content??
self.fileobj.write(content)
self.fileobj.write(CRLF)
def close(self):
self.fileobj.write('--')
self.fileobj.write(self.boundary)
self.fileobj.write('--')
self.fileobj.write(CRLF)
def _make_boundary(self):
try:
from uuid import uuid4
return '==' + uuid4().hex + '=='
except ImportError:
from random import randrange
token = randrange(sys.maxint)
format = '%%0%dd' % len(repr(sys.maxint - 1))
return '===============' + (format % token) + '=='
def _write_headers(self, headers):
if headers:
for name in sorted(headers.keys()):
self.fileobj.write(name)
self.fileobj.write(': ')
self.fileobj.write(headers[name])
self.fileobj.write(CRLF)
self.fileobj.write(CRLF)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def write_multipart(fileobj, subtype='mixed', boundary=None):
r"""Simple streaming MIME multipart writer.
This function returns a `MultipartWriter` object that has a few methods to
control the nested MIME parts. For example, to write a flat multipart
envelope you call the ``add(mimetype, content, [headers])`` method for
every part, and finally call the ``close()`` method.
>>> from StringIO import StringIO
>>> buf = StringIO()
>>> envelope = write_multipart(buf, boundary='==123456789==')
>>> envelope.add('text/plain', 'Just testing')
>>> envelope.close()
>>> print buf.getvalue().replace('\r\n', '\n')
Content-Type: multipart/mixed; boundary="==123456789=="
<BLANKLINE>
--==123456789==
Content-Length: 12
Content-MD5: nHmX4a6el41B06x2uCpglQ==
Content-Type: text/plain
<BLANKLINE>
Just testing
--==123456789==--
<BLANKLINE>
Note that an explicit boundary is only specified for testing purposes. If
the `boundary` parameter is omitted, the multipart writer will generate a
random string for the boundary.
To write nested str | uctures, call the ``open([headers])`` method on the
respective envelope, and finish each envelope using the ``close()`` method:
>>> buf = StringIO()
>>> envelope = write_multipart(buf, boundary='==123456789==')
>>> part = envelope.open( | boundary='==abcdefghi==')
>>> part.add('text/plain', 'Just testing')
>>> part.close()
>>> envelope.close()
>>> print buf.getvalue().replace('\r\n', '\n') #:doctest +ELLIPSIS
Content-Type: multipart/mixed; boundary="==123456789=="
<BLANKLINE>
--==123456789==
Content-Type: multipart/mixed; boundary="==abcdefghi=="
<BLANKLINE>
--==abcdefghi==
Content-Length: 12
Content-MD5: nHmX4a6el4 |
NORCatUofC/rainapp | csos/migrations/0001_initial.py | Python | mit | 1,235 | 0.002429 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-04 15:26
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='RiverCso',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('open_time', models.DateTimeField()),
('close_time', models.DateTimeField()),
],
),
migrations.CreateModel(
name='RiverOutfall',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('n | ame', models.TextField()),
('lat', models.FloatField(null=True)),
('lon', models.FloatField(null=True)),
],
),
migrations.AddField(
model_name='rivercso',
name='river_outfall',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='csos.RiverOutfall'),
),
]
| |
thethythy/Mnemopwd | mnemopwd/client/uilayer/uicomponents/TitledOnBorderWindow.py | Python | bsd-2-clause | 2,237 | 0.001341 | # -*- coding: utf-8 -*-
# Copyright (c) 2016-2017, Thierry Lemeunier <thierry at lemeunier dot net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .BaseWindow import BaseWindow
class TitledOnBorderWindow(BaseWindow):
"""
A window with a border and a title on border. Subclass of BaseWindow.
"""
def __init__(self, parent, h, w, y, x, title, modal=False, menu=False, colourT=False, colourD=False):
"""Create base window"""
BaseWindow.__init__(self, parent, h, w, y, x, modal=modal, menu | =menu)
self.title = title
self.colourT = colourT
self.colourD = colo | urD
self._create()
def redraw(self):
"""See mother class"""
self._create()
BaseWindow.redraw(self)
def _create(self):
self.window.attrset(self.colourD)
self.window.border()
self.window.addstr(0, 2, '[ ' + self.title + ' ]', self.colourT)
self.window.refresh()
self.window.attrset(0)
|
dunkenj/dunkenj.github.io | old/process_ads.py | Python | apache-2.0 | 647 | 0.004637 | # -*- coding: utf-8 -*-
import ads
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader(''))
ads.config.token = '7scWI1Z2A8kPAukKoYUPKyzlQqn3eSY1m4r0QCTo'
all_query = 'author:"duncan, k" year:2013-2020 database:astronomy property:refereed'
first_author = 'author:"^duncan, k" year:2013-2020 database:astronomy property:refereed'
allp = list(ads.SearchQuery(q=all_query, sort="date"))
first = list(ads.SearchQuery(q=first_author, sort="citation_coun | t"))
template = env.get_template('publications_template.html')
out = template.render(all=allp)
with open("publications.html", | "wb") as f:
f.write(out) |
google/vulncode-db | tests/app_tests/api/test_routes.py | Python | apache-2.0 | 3,216 | 0.002488 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from tests.conftest import as_admin
from tests.conftest import as_user
from tests.conftest import set_user
SAVE_VARIANTS = [
({}, {}, 400, {"msg": "Please provide a valid CVE ID or Git commit link."}),
({"id": "CVE-1970-3000"}, {}, 404, {"msg": "Please create an entry first"}),
({"id": "CVE-1970-2000"}, {}, 404, {"msg": "Entry has no linked Git link!"}),
({"id": "CVE-1970-1000"}, {}, 200, {"msg": "Update successful."}),
(
{"id": "CVE-1970-1000"},
[
{
"path": "/etc/passwd",
"hash": "12345678",
"name": "passwd",
"comments": [
{
"row_from": 1,
"row_to": 10,
"text": "a comment",
"sort_pos": 0,
}
],
"markers": [
{
"row_from": 1,
"row_to": 10,
"column_from": 1,
"column_to": 10,
"class": "vulnerableMarker",
}
],
},
],
200,
{"msg": "Update successful."},
),
]
@pytest.mark.integration
@pytest.mark.parametrize("query, data, expected_code, expected_response", SAVE_VARIANTS)
def test_save_editor_data(client, query, data, expected_code, expected_response):
resp = client.post("/api/save_editor_data", json=data, query_string=query)
assert resp.status_code == 403
assert "application/json" in resp.headers["Content-Type"]
assert b"Forbidden" in resp.data
|
@pytest.mark.integration
@pytest.mark.parametrize("query, data, expected_code, expected_response", SAVE_VARIANTS)
def test_save_editor_data_as_admin(
app, client, query, data, expected_code, expected_response
):
as_admin(client)
resp = client.post("/api/save_editor_data", json=data, query_str | ing=query)
assert resp.status_code == expected_code
assert "application/json" in resp.headers["Content-Type"]
assert resp.json == expected_response
@pytest.mark.integration
@pytest.mark.parametrize("query, data, expected_code, expected_response", SAVE_VARIANTS)
def test_save_editor_data_as_user(
app, client, query, data, expected_code, expected_response
):
with set_user(app, as_user(client)):
resp = client.post("/api/save_editor_data", json=data, query_string=query)
assert resp.status_code == 403
assert "application/json" in resp.headers["Content-Type"]
assert b"Forbidden" in resp.data
|
YcheLanguageStudio/PythonStudy | crpytography/tests/test_finite_field.py | Python | mit | 675 | 0 | from crpyto_tool.libs.finite_field_op import FiniteFieldNumber
if __name__ == '__main__':
magical_number = FiniteFieldNumber(FiniteFieldNumber.magical_number, False)
print 'p(x): ' + str(magical_number)
number2 = FiniteFieldNumber('0')
number3 = FiniteFieldNumber('1000110')
print 'Q5-(1):' + str(number2 - number3)
number0 = FiniteField | Number('1000110')
number1 = FiniteFieldNumber('10001011')
print 'Q5-(2):' + str(number0 + number1)
print 'Q5-(3):' + str(number0 * number1)
number4 = FiniteFieldNumber('10000111111010')
| print number4 / magical_number
print FiniteFieldNumber('11110101') * FiniteFieldNumber('1000110')
|
zuck/prometeo-erp | core/menus/forms.py | Python | lgpl-3.0 | 1,345 | 0.003717 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This file is part of the prometeo project.
This program | is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the Licens | e, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2011 Emanuele Bertoldi'
__version__ = '0.0.5'
from django import forms
from prometeo.core.forms import enrich_form
from models import *
class LinkForm(forms.ModelForm):
"""Form for link data.
"""
class Meta:
model = Link
class BookmarkForm(forms.ModelForm):
"""Form for bookmark data.
"""
class Meta:
model = Bookmark
exclude = ['menu', 'slug', 'submenu', 'sort_order', 'only_authenticated', 'only_staff', 'only_with_perms']
enrich_form(LinkForm)
enrich_form(BookmarkForm)
|
updownlife/multipleK | dependencies/biopython-1.65/build/lib.linux-x86_64-2.7/Bio/GenBank/Record.py | Python | gpl-2.0 | 23,027 | 0.000478 | # This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
#
"""Hold GenBank data in | a straightforward format.
classes:
- Record - All of the information in a GenBank record.
- Reference - hold reference data for a record.
- Feature - Hold the information in a Feature Table.
- Qualifier - Qualifiers on a Feature.
17-MAR-2009: added support for WGS and WGS_SCAFL | D lines. Ying Huang & Iddo Friedberg
"""
# local stuff
import Bio.GenBank
__docformat__ = "restructuredtext en"
def _wrapped_genbank(information, indent, wrap_space=1, split_char=" "):
"""Write a line of GenBank info that can wrap over multiple lines.
This takes a line of information which can potentially wrap over
multiple lines, and breaks it up with carriage returns and
indentation so it fits properly into a GenBank record.
Arguments:
- information - The string holding the information we want
wrapped in GenBank method.
- indent - The indentation on the lines we are writing.
- wrap_space - Whether or not to wrap only on spaces in the
information.
- split_char - A specific character to split the lines on. By default
spaces are used.
"""
info_length = Record.GB_LINE_LENGTH - indent
if not information:
# GenBank files use "." for missing data
return ".\n"
if wrap_space:
info_parts = information.split(split_char)
else:
cur_pos = 0
info_parts = []
while cur_pos < len(information):
info_parts.append(information[cur_pos: cur_pos + info_length])
cur_pos += info_length
# first get the information string split up by line
output_parts = []
cur_part = ""
for info_part in info_parts:
if len(cur_part) + 1 + len(info_part) > info_length:
if cur_part:
if split_char != " ":
cur_part += split_char
output_parts.append(cur_part)
cur_part = info_part
else:
if cur_part == "":
cur_part = info_part
else:
cur_part += split_char + info_part
# add the last bit of information to the output
if cur_part:
output_parts.append(cur_part)
# now format the information string for return
output_info = output_parts[0] + "\n"
for output_part in output_parts[1:]:
output_info += " " * indent + output_part + "\n"
return output_info
def _indent_genbank(information, indent):
"""Write out information with the specified indent.
Unlike _wrapped_genbank, this function makes no attempt to wrap
lines -- it assumes that the information already has newlines in the
appropriate places, and will add the specified indent to the start of
each line.
"""
# split the info into lines based on line breaks
info_parts = information.split("\n")
# the first line will have no indent
output_info = info_parts[0] + "\n"
for info_part in info_parts[1:]:
output_info += " " * indent + info_part + "\n"
return output_info
class Record(object):
"""Hold GenBank information in a format similar to the original record.
The Record class is meant to make data easy to get to when you are
just interested in looking at GenBank data.
Attributes:
- locus - The name specified after the LOCUS keyword in the GenBank
record. This may be the accession number, or a clone id or something else.
- size - The size of the record.
- residue_type - The type of residues making up the sequence in this
record. Normally something like RNA, DNA or PROTEIN, but may be as
esoteric as 'ss-RNA circular'.
- data_file_division - The division this record is stored under in
GenBank (ie. PLN -> plants; PRI -> humans, primates; BCT -> bacteria...)
- date - The date of submission of the record, in a form like '28-JUL-1998'
- accession - list of all accession numbers for the sequence.
- nid - Nucleotide identifier number.
- pid - Proteint identifier number
- version - The accession number + version (ie. AB01234.2)
- db_source - Information about the database the record came from
- gi - The NCBI gi identifier for the record.
- keywords - A list of keywords related to the record.
- segment - If the record is one of a series, this is info about which
segment this record is (something like '1 of 6').
- source - The source of material where the sequence came from.
- organism - The genus and species of the organism (ie. 'Homo sapiens')
- taxonomy - A listing of the taxonomic classification of the organism,
starting general and getting more specific.
- references - A list of Reference objects.
- comment - Text with any kind of comment about the record.
- features - A listing of Features making up the feature table.
- base_counts - A string with the counts of bases for the sequence.
- origin - A string specifying info about the origin of the sequence.
- sequence - A string with the sequence itself.
- contig - A string of location information for a CONTIG in a RefSeq file
- project - The genome sequencing project numbers
(will be replaced by the dblink cross-references in 2009).
- dblinks - The genome sequencing project number(s) and other links.
(will replace the project information in 2009).
"""
# constants for outputting GenBank information
GB_LINE_LENGTH = 79
GB_BASE_INDENT = 12
GB_FEATURE_INDENT = 21
GB_INTERNAL_INDENT = 2
GB_OTHER_INTERNAL_INDENT = 3
GB_FEATURE_INTERNAL_INDENT = 5
GB_SEQUENCE_INDENT = 9
BASE_FORMAT = "%-" + str(GB_BASE_INDENT) + "s"
INTERNAL_FORMAT = " " * GB_INTERNAL_INDENT + "%-" + \
str(GB_BASE_INDENT - GB_INTERNAL_INDENT) + "s"
OTHER_INTERNAL_FORMAT = " " * GB_OTHER_INTERNAL_INDENT + "%-" + \
str(GB_BASE_INDENT - GB_OTHER_INTERNAL_INDENT) + \
"s"
BASE_FEATURE_FORMAT = "%-" + str(GB_FEATURE_INDENT) + "s"
INTERNAL_FEATURE_FORMAT = " " * GB_FEATURE_INTERNAL_INDENT + "%-" + \
str(GB_FEATURE_INDENT -
GB_FEATURE_INTERNAL_INDENT) + "s"
SEQUENCE_FORMAT = "%" + str(GB_SEQUENCE_INDENT) + "s"
def __init__(self):
self.locus = ''
self.size = ''
self.residue_type = ''
self.data_file_division = ''
self.date = ''
self.definition = ''
self.accession = []
self.nid = ''
self.pid = ''
self.version = ''
self.projects = []
self.dblinks = []
self.db_source = ''
self.gi = ''
self.keywords = []
self.segment = ''
self.source = ''
self.organism = ''
self.taxonomy = []
self.references = []
self.comment = ''
self.features = []
self.base_counts = ''
self.origin = ''
self.sequence = ''
self.contig = ''
self.primary=[]
self.wgs = ''
self.wgs_scafld = []
def __str__(self):
"""Provide a GenBank formatted output option for a Record.
The objective of this is to provide an easy way to read in a GenBank
record, modify it somehow, and then output it in 'GenBank format.'
We are striving to make this work so that a parsed Record that is
output using this function will look exactly like the original
record.
Much of the output is based on format description info at:
ftp://ncbi.nlm.nih.gov/genbank/gbrel.txt
"""
output = self._locus_line()
output += self._definition_line()
output += self._accession_line()
output += self._version_line()
output += self._project_line()
output += self._db |
remmihsorp/minicps | minicps/utils.py | Python | mit | 3,381 | 0.000592 | """
utils.py.
MiniCPS use a shared logger called mcps_logger.
It contains testing data objects.
TEST_LOG_LEVEL affects all the tests,
output, info and debug are in increasing order of verbosity.
It contains all the others data objects.
"""
import logging
import logging.handlers
import time
import os
from mininet.util import dumpNodeConnections
# logging {{{1
# https://docs.python.org/2/howto/logging.html
# TODO: add a parametric logging level
def build_debug_logger(
name='log_name',
bytes_per_file=10000,
rotating_files=3,
lformat='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
ldir='/tmp/',
suffix=''):
"""Build a custom Python debug logger file.
:name: name of the logger instance
:bytes_per_file: defaults to 10KB
:rotating_files: defaults to 3
:lformat: defaults to time, name, level, message
:ldir: defaults to /tmp
:suffix: defaults to .log
:returns: logger instance
"""
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
# log_path = _getlog_path()
# assert log_path | != None, "No log path found"
fh = logging.handlers.RotatingFileHandler(
ldir + name + suffix,
maxBytes=bytes_per_file,
backupCount=rotating_files)
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# no thread information
formatter = logging.Formatter(
lformat)
fh.setFormatter(formatte | r)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)
return logger
TEMP_DIR = '/tmp'
LOG_DIR = 'logs/'
LOG_BYTES = 20000
LOG_ROTATIONS = 5
# MiniCPS global logger
mcps_logger = build_debug_logger(
name=__name__,
bytes_per_file=LOG_BYTES,
rotating_files=LOG_ROTATIONS,
lformat='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
ldir=LOG_DIR,
suffix='')
# multi-process {{{1
# http://stackoverflow.com/questions/1359383/python-run-a-process-and-kill-it-if-it-doesnt-end-within-one-hour
def wait_timeout(proc, seconds):
"""Wait for a process to finish, or raise exception after timeout
:proc: subprocess.Popen instance
:seconds: before raising the exception
"""
start = time.time()
end = start + seconds
interval = min(seconds / 1000.0, .25)
while True:
result = proc.poll()
if result is not None:
return result
if time.time() >= end:
raise RuntimeError("Process timed out")
time.sleep(interval)
# testing {{{1
def setup_func(test_name):
pass
def teardown_func(test_name):
pass
# TODO: test it
def _arp_cache_rtts(net, h1, h2):
"""Learning check on the first two ping ICMP packets RTT.
:net: Mininet object.
:h1: first host name.
:h2: second host name.
:returns: decimal RTTs from uncached and cacthed arp entries.
"""
h1, h2 = net.get(h1, h2)
delete_arp_cache = h1.cmd('ip -s -s neigh flush all')
ping_output = h1.cmd('ping -c5 %s' % h2.IP())
lines = ping_output.split('\n')
first = lines[1]
second = lines[2]
first_words = first.split(' ')
second_words = second.split(' ')
first_rtt = first_words[6]
second_rtt = second_words[6]
first_rtt = float(first_rtt[5:])
second_rtt = float(second_rtt[5:])
return first_rtt, second_rtt
|
barrand/CTRs | src/lib/flaskext/login.py | Python | mit | 25,927 | 0.00027 | # -*- coding: utf-8 -*-
'''
flask.ext.login
---------------
This module provides user session management for Flask. It lets you log
your users in and out in a database-independent manner.
:c | opyright: (c) 2011 by Matthew Frazier.
:license: MIT/X11, see LICENSE for more details.
'''
__version_info__ | = ('0', '2', '6')
__version__ = '.'.join(__version_info__)
__author__ = 'Matthew Frazier'
__license__ = 'MIT/X11'
__copyright__ = '(c) 2011 by Matthew Frazier'
__all__ = ['LoginManager']
from flask import (_request_ctx_stack, abort, current_app, flash, redirect,
request, session, url_for)
from flask.signals import Namespace
from werkzeug.local import LocalProxy
from werkzeug.security import safe_str_cmp
from werkzeug.urls import url_decode, url_encode
from datetime import datetime, timedelta
from functools import wraps
from hashlib import sha1, md5
import hmac
import warnings
import sys
if sys.version < '3': # pragma: no cover
from urlparse import urlparse, urlunparse
else: # pragma: no cover
from urllib.parse import urlparse, urlunparse
unicode = str
_signals = Namespace()
#: A proxy for the current user. If no user is logged in, this will be an
#: anonymous user
current_user = LocalProxy(lambda: _get_user() or
current_app.login_manager.anonymous_user())
#: The default name of the "remember me" cookie (``remember_token``)
COOKIE_NAME = 'remember_token'
#: The default time before the "remember me" cookie expires (365 days).
COOKIE_DURATION = timedelta(days=365)
#: Whether the "remember me" cookie requires Secure; defaults to ``None``
COOKIE_SECURE = None
#: Whether the "remember me" cookie uses HttpOnly or not; defaults to ``False``
COOKIE_HTTPONLY = False
#: The default flash message to display when users need to log in.
LOGIN_MESSAGE = u'Please log in to access this page.'
#: The default flash message category to display when users need to log in.
LOGIN_MESSAGE_CATEGORY = 'message'
#: The default flash message to display when users need to reauthenticate.
REFRESH_MESSAGE = u'Please reauthenticate to access this page.'
#: The default flash message category to display when users need to
#: reauthenticate.
REFRESH_MESSAGE_CATEGORY = 'message'
class LoginManager(object):
'''
This object is used to hold the settings used for logging in. Instances of
:class:`LoginManager` are *not* bound to specific apps, so you can create
one in the main body of your code and then bind it to your
app in a factory function.
'''
def __init__(self, app=None, add_context_processor=True):
#: A class or factory function that produces an anonymous user, which
#: is used when no one is logged in.
self.anonymous_user = AnonymousUserMixin
#: The name of the view to redirect to when the user needs to log in.
#: (This can be an absolute URL as well, if your authentication
#: machinery is external to your application.)
self.login_view = None
#: The message to flash when a user is redirected to the login page.
self.login_message = LOGIN_MESSAGE
#: The message category to flash when a user is redirected to the login
#: page.
self.login_message_category = LOGIN_MESSAGE_CATEGORY
#: The name of the view to redirect to when the user needs to
#: reauthenticate.
self.refresh_view = None
#: The message to flash when a user is redirected to the 'needs
#: refresh' page.
self.needs_refresh_message = REFRESH_MESSAGE
#: The message category to flash when a user is redirected to the
#: 'needs refresh' page.
self.needs_refresh_message_category = REFRESH_MESSAGE_CATEGORY
#: The mode to use session protection in. This can be either
#: ``'basic'`` (the default) or ``'strong'``, or ``None`` to disable
#: it.
self.session_protection = 'basic'
self.token_callback = None
self.user_callback = None
self.unauthorized_callback = None
self.needs_refresh_callback = None
if app is not None:
self.init_app(app, add_context_processor)
def setup_app(self, app, add_context_processor=True): # pragma: no cover
'''
This method has been deprecated. Please use
:meth:`LoginManager.init_app` instead.
'''
warnings.warn('Warning setup_app is deprecated. Please use init_app.',
DeprecationWarning)
self.init_app(app, add_context_processor)
def init_app(self, app, add_context_processor=True):
'''
Configures an application. This registers a `before_request` and an
`after_request` call, and attaches this `LoginManager` to it as
`app.login_manager`.
:param app: The :class:`flask.Flask` object to configure.
:type app: :class:`flask.Flask`
:param add_context_processor: Whether to add a context processor to
the app that adds a `current_user` variable to the template.
Defaults to ``True``.
:type add_context_processor: bool
'''
app.login_manager = self
app.before_request(self._load_user)
app.after_request(self._update_remember_cookie)
self._login_disabled = app.config.get('LOGIN_DISABLED',
app.config.get('TESTING', False))
if add_context_processor:
app.context_processor(_user_context_processor)
def unauthorized(self):
'''
This is called when the user is required to log in. If you register a
callback with :meth:`LoginManager.unauthorized_handler`, then it will
be called. Otherwise, it will take the following actions:
- Flash :attr:`LoginManager.login_message` to the user.
- Redirect the user to `login_view`. (The page they were attempting
to access will be passed in the ``next`` query string variable,
so you can redirect there if present instead of the homepage.)
If :attr:`LoginManager.login_view` is not defined, then it will simply
raise a HTTP 401 (Unauthorized) error instead.
This should be returned from a view or before/after_request function,
otherwise the redirect will have no effect.
'''
user_unauthorized.send(current_app._get_current_object())
if self.unauthorized_callback:
return self.unauthorized_callback()
if not self.login_view:
abort(401)
if self.login_message:
flash(self.login_message, category=self.login_message_category)
return redirect(login_url(self.login_view, request.url))
def user_loader(self, callback):
'''
This sets the callback for reloading a user from the session. The
function you set should take a user ID (a ``unicode``) and return a
user object, or ``None`` if the user does not exist.
:param callback: The callback for retrieving a user object.
:type callback: unicode
'''
self.user_callback = callback
return callback
def token_loader(self, callback):
'''
This sets the callback for loading a user from an authentication
token. The function you set should take an authentication token
(a ``unicode``, as returned by a user's `get_auth_token` method) and
return a user object, or ``None`` if the user does not exist.
:param callback: The callback for retrieving a user object.
:type callback: unicode
'''
self.token_callback = callback
return callback
def unauthorized_handler(self, callback):
'''
This will set the callback for the `unauthorized` method, which among
other things is used by `login_required`. It takes no arguments, and
should return a response to be sent to the user instead of their
normal view.
:param callback: The callback for unauthorized users.
:type callback: function
'''
self.u |
Chilipp/psy-simple | tests/_base_testing.py | Python | gpl-2.0 | 3,925 | 0 | import os
import six
import sys
import shutil
import subprocess as spr
import tempfile
from unittest import TestCase
from get_ref_dir import get_ref_dir
import numpy as np
ref_dir = get_ref_dir()
test_dir = os.path.dirname(__file__)
remove_temp_files = True
# check if the seaborn version is smaller than 0.8 (without actually importing
# it), due to https://github.com/mwaskom/seaborn/issues/966
# If so, disable the import of it when import psyplot.project
try:
sns_version = spr.check_output(
[sys.executable, '-c', 'import seaborn; print(seaborn.__version__)'])
except spr.CalledProcessError: # seaborn is not installed
sns_version = None
else:
sns_version = sns_version.decode('utf-8')
class PsyPlotTestCase(TestCase):
"""Base class for testing the psyplot package. It only provides some
useful methods to compare figures"""
longMessage = True
plot_type = None
grid_type = None
ncfile = os.path.join(test_dir, 'test-t2m-u-v.nc')
@classmethod
def tearDownClass(cls):
import psyplot
from psyplot.config.rcsetup import defaultParams
psyplot.rcParams.update(
**{key: val[0] for key, val in defaultParams.items()})
if remove_temp_files and hasattr(cls, 'odir'):
shutil.rmtree(cls.odir)
@classmethod
def create_dirs(cls):
if not os.path.exists(ref_dir):
os.makedirs(ref_dir)
cls.odir = tempfile.mkdtemp()
def get_ref_file(self, identifier):
"""
Gives the name of the reference file for a test
This staticmethod gives combines the given `plot_type`, `identifier`
and `grid_type` to form the name of a reference figure
Parameters
----------
identifier: str
The unique identifier for the plot (usually the formatoption name)
Returns
-------
str
The basename of the reference file"""
identifiers = ['test']
if self.plot_type is not None:
identifiers.append(self.plot_type)
identifiers.append(identifier)
if self.grid_type is not None:
identifiers.append(self.grid_type)
| return "_".join(identifiers) + '.png'
def compare_figures(self, fname, tol=5, | **kwargs):
"""Saves and compares the figure to the reference figure with the same
name"""
import matplotlib.pyplot as plt
from matplotlib.testing.compare import compare_images
plt.savefig(os.path.join(self.odir, fname), **kwargs)
results = compare_images(
os.path.join(ref_dir, fname), os.path.join(self.odir, fname),
tol=tol)
self.assertIsNone(results, msg=results)
def assertAlmostArrayEqual(self, actual, desired, rtol=1e-07, atol=0,
msg=None, **kwargs):
"""Asserts that the two given arrays are almost the same
This method uses the :func:`numpy.testing.assert_allclose` function
to compare the two given arrays.
Parameters
----------
actual : array_like
Array obtained.
desired : array_like
Array desired.
rtol : float, optional
Relative tolerance.
atol : float, optional
Absolute tolerance.
equal_nan : bool, optional.
If True, NaNs will compare equal.
err_msg : str, optional
The error message to be printed in case of failure.
verbose : bool, optional
If True, the conflicting values are appended to the error message.
"""
try:
np.testing.assert_allclose(actual, desired, rtol=rtol, atol=atol,
err_msg=msg or '', **kwargs)
except AssertionError as e:
if six.PY2:
self.fail(e.message)
else:
self.fail(str(e))
|
flavour/eden | modules/unit_tests/s3db/inv.py | Python | mit | 1,041 | 0.003842 | # -*- coding: utf-8 -*-
#
# Inv Unit Tests
#
# To run this script use:
# python web2py.py -S eden -M -R applications/eden/modules/unit_tests/s3db/inv.py
#
import unittest
import datetime
from gluon import *
from gluon.storage import Storage
from unit_tests import run_suite
# =============================================================================
class InvTests(unittest.TestCase):
""" Inv Tests """
def setUp(self):
""" Set up location records """
auth = current.auth
auth.override = True
self.location_code = Storage()
self.location_ids = Storage()
s3db = current.s3db
#--------------------------------------------------------------------------
def tearDown(self):
current.db.rollback()
current.auth.override = False
# =================== | ==========================================================
if __name__ == "__main__":
run_suite(
InvTests,
) |
# END ========================================================================
|
david-martin/atomic-reactor | atomic_reactor/plugins/pre_check_and_set_rebuild.py | Python | bsd-3-clause | 3,476 | 0.000575 | """
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals
import json
import os
from osbs.api import OSBS
from osbs.conf import Configuration
from atomic_reactor.plugin import PreBuildPlugin
def is_rebuild(workflow):
return (CheckAndSetRebuildPlugin.key in workflow.prebuild_results and
workflow.prebuild_results[CheckAndSetRebuildPlugin.key])
class CheckAndSetRebuildPlugin(PreBuildPlugin):
"""
Determine whether this is an automated rebuild
This plugin checks for a specific label in the OSv3 Build
metadata. If it exists and has the value specified in the
conf | iguration, this build is a rebuild. The module-level function
'is_rebuild()' can be used by other plugins to determine this.
After checking for the label, it sets the label in the
metadata, allowing future automated rebuilds to be detected as
rebuilds.
Example configuration:
{
"name": "check_and_set_rebuild",
"args": {
"label_key": "rebuild",
"l | abel_value": "true",
"url": "https://localhost:8443/"
}
}
"""
key = "check_and_set_rebuild"
is_allowed_to_fail = False # We really want to stop the process
def __init__(self, tasker, workflow, label_key, label_value,
url, verify_ssl=True, use_auth=True):
"""
constructor
:param tasker: DockerTasker instance
:param workflow: DockerBuildWorkflow instance
:param label_key: str, key of label used to indicate first build
:param label_value: str, value of label used to indicate first build
:param url: str, URL to OSv3 instance
:param verify_ssl: bool, verify SSL certificate?
:param use_auth: bool, initiate authentication with OSv3?
"""
# call parent constructor
super(CheckAndSetRebuildPlugin, self).__init__(tasker, workflow)
self.label_key = label_key
self.label_value = label_value
self.url = url
self.verify_ssl = verify_ssl
self.use_auth = use_auth
def run(self):
"""
run the plugin
"""
try:
build_json = json.loads(os.environ["BUILD"])
except KeyError:
self.log.error("No $BUILD env variable. Probably not running in build container")
raise
metadata = build_json.get("metadata", {})
labels = metadata.get("labels", {})
buildconfig = labels["buildconfig"]
is_rebuild = labels.get(self.label_key) == self.label_value
self.log.info("This is a rebuild? %s", is_rebuild)
if not is_rebuild:
# Update the BuildConfig metadata so the next Build
# instantiated from it is detected as being an automated
# rebuild
kwargs = {}
if 'namespace' in metadata:
kwargs['namespace'] = metadata['namespace']
osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
use_auth=self.use_auth,
verify_ssl=self.verify_ssl)
osbs = OSBS(osbs_conf, osbs_conf)
labels = {self.label_key: self.label_value}
osbs.set_labels_on_build_config(buildconfig, labels, **kwargs)
return is_rebuild
|
dbryant4/stelligent | tests/test_default.py | Python | mit | 590 | 0.005085 | def test_nginx_package(Package):
package = Package('nginx')
assert package.is_installed
def | test_nginx_working(Command):
response = Command('curl http://127.0.0.1/')
assert 'Automation for the People' in response.stdout
def test_nginx_service(Service, Socket):
service = Service('nginx')
socket = Socket('tcp://0.0.0.0:80')
assert service.is_running
assert service.is_enabled
assert socket.is_ | listening
def test_index_html(File):
index = File('/usr/share/nginx/html/index.html')
assert index.content.strip() == 'Automation for the People'
|
examachine/pisi | tests/helloworld/actions.py | Python | gpl-3.0 | 3,200 | 0.00375 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.act | ionsapi import shelltools
from pisi.actionsapi import libtools
from pisi.actionsapi import get
WorkDir = "hello-2.0"
def setup():
autotools.configure()
def build():
autotools.make()
def install():
autotools.install()
'''/opt/helloworld/'''
pisitools.dodir("/opt/helloworld")
'''/usr/share/doc/helloworld-2.0/Makefile.am'''
pisitools.dodoc("Makefile.am")
'''/opt/helloworld/helloworld'''
pisitools.doexe("src/helloworld", "/opt/helloworld") |
'''/usr/share/info/Makefile.am'''
'''/usr/share/info/Makefile.cvs'''
'''/usr/share/info/Makefile.in'''
pisitools.doinfo("Makefile.*")
'''/usr/lib/helloworld.o'''
pisitools.dolib("src/helloworld.o")
'''/opt/hello'''
pisitools.insinto("/opt/", "src/helloworld", "hello")
'''/opt/hi'''
pisitools.insinto("/opt/", "src/helloworld", "hi")
'''/opt/hello -> /var/hello'''
pisitools.domove("/opt/hello", "/var/")
'''/opt/hi -> /var/goodbye'''
pisitools.domove("/opt/hi", "/var/", "goodbye")
'''/usr/bin/helloworld'''
pisitools.dobin("src/helloworld")
'''/bin/helloworld'''
pisitools.dobin("src/helloworld", "/bin")
'''/usr/sbin/helloworld'''
pisitools.dosbin("src/helloworld")
'''/sbin/helloworld'''
pisitools.dosbin("src/helloworld", "/sbin")
'''Hello, world! -> Goodbye, world!'''
pisitools.dosed("src/helloworld.cpp", "Hello, world!", "Goodbye, world!")
'''/usr/sbin/goodbye --> helloworld'''
pisitools.dosym("helloworld", "/usr/sbin/goodbye")
'''/usr/bin/goodbye --> helloworld'''
pisitools.dosym("helloworld", "/usr/bin/goodbye")
'''/home/pardus/'''
pisitools.dodir("/home/pardus")
'''delete pardus'''
pisitools.removeDir("/home/pardus")
'''delete home'''
pisitools.removeDir("/home")
'''src/helloworld.cpp --> /usr/share/doc/helloworld-2.0/goodbyeworld.cpp'''
pisitools.newdoc("src/helloworld.cpp", "goodbyeworld.cpp")
'''/opt/pardus'''
shelltools.touch("%s/opt/pardus" % get.installDIR())
'''/opt/pardus --> /opt/uludag'''
shelltools.copy("%s/opt/pardus" % get.installDIR(), "%s/opt/uludag" % get.installDIR())
'''/opt/pardus --> /opt/Pardus'''
shelltools.move("%s/opt/pardus" % get.installDIR(), "%s/opt/PARDUS" % get.installDIR())
'''/opt/ --> /sys/'''
shelltools.copytree("%s/opt/" % get.installDIR(), "%s/sys/" % get.installDIR())
'''delete /sys/helloworld/helloworld'''
shelltools.unlink("%s/sys/helloworld/helloworld" % get.installDIR())
'''delete /sys/helloworld'''
shelltools.unlinkDir("%s/sys/helloworld" % get.installDIR())
'''generate /usr/lib/helloworld.o'''
libtools.gen_usr_ldscript("helloworld.o")
|
sunForest/AviPost | e2e/features/steps/crud.py | Python | apache-2.0 | 2,306 | 0.005637 | import json
import re
from | behave import given, when, then
from behave import use_step_matcher
use_step_matcher("re")
# implicitly used
import sure # noqa
# We use this instead of validato | r from json_schema_generator
# because its error reports are far better
from jsonschema import validate
from _lazy_request import LazyRequest
# supress requests logging
import logging
logging.getLogger("requests").setLevel(logging.WARNING)
# (?:xx) changes priority but will not be captured as args
@given('(.+) are users')
def step_impl(context, user_names_str):
if not hasattr(context, 'users'):
context.users = {}
user_names = [name.strip() for name in re.split('and|,', user_names_str)]
for user_name in user_names:
token = 'fake_token_' + user_name
user_id = context.helpers.create_test_user(user_name, token)
context.users[user_name] = {'token': token, 'id': user_id}
@given('(\w+) (?:is|am|are) logged in')
def step_impl(context, user_name):
context.token = context.users[user_name]['token']
@given('(\w+) received (\d+) postcards')
def step_impl(context, user_name, count):
context.helpers.load_postcards(user_name, count)
@when('GET "(\S+)"')
def step_impl(context, rel_url):
context.request = LazyRequest(
'GET', context.helpers.url(rel_url), context.token)
@when('POST "(\S+)"')
def step_impl(context, rel_url):
context.request = LazyRequest(
'POST', context.helpers.url(rel_url), context.token)
@when('with file "(\S+)" as (\w+)')
def step_impl(context, name, field):
context.request.add_file(context.helpers.file_path(name), field)
@when('with data')
def step_impl(context):
data = json.loads(context.text)
receiver_name = re.match(r"\<(\w+)'s id\>", data['receiver']).group(1)
data['receiver'] = context.users[receiver_name]['id']
context.request.add_data(data)
@then('request will (\w+) for (\d+)')
def step_impl(context, state, code):
context.response = context.request.send()
context.response.status_code.should.equal(int(code))
@then('return (\d+) items')
def step_impl(context, count):
cnt = len(context.response.json())
cnt.should.equal(int(count))
@then('has structure')
def step_impl(context):
validate(context.response.json(), json.loads(context.text))
|
paulross/cpip | docs/doc_src/tutorial/demo/cpip_03.py | Python | gpl-2.0 | 376 | 0.010638 | import sys
from cpip.core import PpLexer, IncludeHandler
def main(): |
print('Processing:', sys.argv[1])
myH = IncludeHandler.CppIncludeStdOs(
theUsrDirs=['proj/usr',],
theSysDirs=['proj/sys',],
)
myLex = PpLexer.PpLexer(sys.argv[1], myH)
for tok in myLex.ppTokens():
| print(tok.t, end=' ')
if __name__ == "__main__":
main()
|
crs4/omero.biobank | bl/vl/utils/graph.py | Python | gpl-2.0 | 1,304 | 0.000767 | import bl.vl.kb.config as blconf
from bl.vl.utils import _get_env_variable
def graph_driver():
var = 'GRAPH_ENGINE_DRIVER'
try:
return _get_env_variable(var)
except ValueError:
try:
return getattr(blconf, var)
| except AttributeError:
raise ValueError("Cant't find config valuer for %s" % var)
def graph_uri():
var = 'GRAPH_ENGINE_URI'
try:
| return _get_env_variable(var)
except ValueError:
try:
return getattr(blconf, var)
except AttributeError:
raise ValueError("Cant't find config valuer for %s" % var)
def graph_username():
var = 'GRAPH_ENGINE_USERNAME'
try:
return _get_env_variable(var)
except ValueError:
try:
return getattr(blconf, var)
except AttributeError:
raise ValueError("Cant't find config valuer for %s" % var)
def graph_password():
var = 'GRAPH_ENGINE_PASSWORD'
try:
return _get_env_variable(var)
except ValueError:
try:
return getattr(blconf, var)
except AttributeError:
raise ValueError("Cant't find config valuer for %s" % var)
def build_edge_id(source_node_hash, dest_node_hash):
return '%s::%s' % (source_node_hash, dest_node_hash) |
hylje/lbtcex | lbtcex/main/views.py | Python | bsd-3-clause | 1,656 | 0.001208 | import json
from django.shortcuts import render
from django.contrib.auth.forms import AuthenticationForm
from registration.forms import RegistrationForm
from lbtcex.client.utils import api_token_required, api_get, api_post
from lbtcex.main.forms import ApiCallForm
def index(request):
if request.user.is_authenticated():
return render(request, "dashboard.html")
else:
return render(
request,
"index.html",
{
"login_form": AuthenticationForm(),
"registration_form": RegistrationForm(),
}
)
@api_token_required
def api_call(request):
if request.method == "POST":
opts = ApiCallForm(request.POST)
if opts.is_valid():
if opts.cleaned_data["data"]:
data = json.laods(opts.cleaned_data["data"])
else:
data = {}
if opts.cleaned_data["method"] == "POST":
result = api_post(request,
opts.cleaned_data["path"],
data)
else:
result = api_get(request,
opts.cleaned_data["path"],
data)
return render(request,
"api_call.html",
{"form": opts,
"got_result": True,
"result": result,
| "result_pretty_json": json.dumps | (result.json(), indent=4)})
else:
opts = ApiCallForm()
return render(request, "api_call.html", {"form": opts})
|
bsmr-eve/Pyfa | eos/effects/subsystembonusgallenteoffensivedronedamagehp.py | Python | gpl-3.0 | 1,189 | 0.003364 | # subsystemBonusGallenteOffensiveDroneDamageHP
#
# Used by:
# Subsystem: Proteus Offensive - Drone Synthesis Projector
type = "passive"
def handler(fit, src, context):
fit.drones.filteredItemBoost(lambda mod: mod.item.requiresSkill("Drones"),
"armorHP", src.getModifiedItemAttr("subsystemBonusGallenteOffensive"),
skill="Gallente Offensive Systems")
fit.drones.filt | eredItemBoost(lambda mod: mod.item.requiresSkill("Drones"),
"hp", src.getModifiedItemAttr("subsystemBonusGallenteOffensive"),
skill="Gallente Offensive S | ystems")
fit.drones.filteredItemBoost(lambda mod: mod.item.requiresSkill("Drones"),
"damageMultiplier", src.getModifiedItemAttr("subsystemBonusGallenteOffensive"),
skill="Gallente Offensive Systems")
fit.drones.filteredItemBoost(lambda mod: mod.item.requiresSkill("Drones"),
"shieldCapacity", src.getModifiedItemAttr("subsystemBonusGallenteOffensive"),
skill="Gallente Offensive Systems")
|
unclejed613/gnuradio-projects-rtlsdr | scanner/scanner.py | Python | gpl-2.0 | 14,567 | 0.008032 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: NFM SCANNER
# Author: JED MARTIN
# Description: NFM SCANNER EXPERIMENT
# Generated: Tue Jan 31 21:07:35 2017
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from PyQt4 import Qt
from PyQt4.QtCore import QObject, pyqtSlot
from gnuradio import analog
from gnuradio import audio
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import filter
from gnuradio import gr
from gnuradio import qtgui
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from gnuradio.qtgui import Range, RangeWidget
from grc_gnuradio import blks2 as grc_blks2
from optparse import OptionParser
import os
import osmosdr
import sip
import sys
import threading
import time
class scanner(gr.top_block, Qt.QWidget):
def __init__(self):
gr.top_block.__init__(self, "NFM SCANNER")
Qt.QWidget.__init__(self)
self.setWindowTitle("NFM SCANNER")
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "scanner")
self.restoreGeometry(self.settings.value("geometry").toByteArray())
##################################################
# Variables
##################################################
self.freqd = freqd = 0
self.stop_scan_a = stop_scan_a = 0
self.freq_set = freq_set = freqd
self.variable_qtgui_label_0 = variable_qtgui_label_0 = stop_scan_a
self.sql = sql = -50
self.samp_rate = samp_rate = 32000
self.s_rate = s_rate = 2
self.r_rate = r_rate = 240000
self.hold_scan = hold_scan = 0
self.gain = gain = 30
self.frequency = frequency = freq_set/1000000
##################################################
# Blocks
##################################################
self.stop_scan = blocks.probe_signal_f()
def _stop_scan_a_probe():
while True:
val = self.stop_scan.level()
try:
self.set_stop_scan_a(val)
except AttributeError:
pass
time.sleep(1.0 / (100))
_stop_scan_a_thread = threading.Thread(target=_stop_scan_a_probe)
_stop_scan_a_thread.daemon = True
_stop_scan_a_thread.start()
self._sql_range = Range(-100, 0, 1, -50, 50)
self._sql_win = RangeWidget(self._sql_range, self.set_sql, "squelch", "dial", float)
self.top_layout.addWidget(self._sql_win)
self._s_rate_options = (.5, 1, 2, 5, 10, )
self._s_rate_labels = (".5Hz", "1Hz", "2Hz", "5Hz", "10Hz", )
self._s_rate_tool_bar = Qt.QToolBar(self)
self._s_rate_tool_bar.addWidget(Qt.QLabel("SCAN RATE"+": "))
self._s_rate_combo_box = Qt.QComboBox()
self._s_rate_tool_bar.addWidget(self._s_rate_combo_box)
for label in self._s_rate_labels: self._s_rate_combo_box.addItem(label)
self._s_rate_callback = lambda i: Qt.QMetaObject.invokeMethod(self._s_rate_combo_box, "setCurrentIndex", Qt.Q_ARG("int", self._s_rate_options.index(i)))
self._s_rate_callback(self.s_rate)
self._s_rate_combo_box.currentIndexChanged.connect(
lambda i: self.set_s_rate(self._s_rate_options[i]))
self.top_layout.addWidget(self._s_rate_tool_bar)
_hold_scan_check_box = Qt.QCheckBox("hold_scan")
self._hold_scan_choices = {True: 1, False: 0}
self._hold_scan_choices_inv = dict((v,k) for k,v in self._hold_scan_choices.iteritems())
self._hold_scan_callback = lambda i: Qt.QMetaObject.invokeMethod(_hold_scan_check_box, "setChecked", Qt.Q_ARG("bool", self._hold_scan_choices_inv[i]))
self._hold_scan_callback(self.hold_scan)
_hold_scan_check_box.stateChanged.connect(lambda i: self.set_hold_scan(self._hold_scan_choices[bool(i)]))
self.top_layout.addWidget(_hold_scan_check_box)
self._gain_range = Range(0, 50, 1, 30, 25)
self._gain_win = RangeWidget(self._gain_range, self.set_gain, "GAIN", "dial", float)
self.top_layout.addWidget(self._gain_win)
self.freqb = blocks.probe_signal_f()
self._variable_qtgui_label_0_tool_bar = Qt.QToolBar(self)
if None:
self._variable_qtgui_label_0_formatter = None
else:
self._variable_qtgui_label_0_formatter = lambda x: x
self._variable_qtgui_label_0_tool_bar.addWidget(Qt.QLabel("scan"+": "))
self._variable_qtgui_label_0_label = Qt.QLabel(str(self._variable_qtgui_label_0_formatter(self.variable_qtgui_label_0)))
self._variable_qtgui_label_0_tool_bar.addWidget(self._variable_qtgui_label_0_label)
self.top_layout.addWidget(self._variable_qtgui_label_0_tool_bar)
self.valve = grc_blks2.valve(item_size=gr.sizeof_float*1, open=bool(stop_scan_a or hold_scan))
self.rtlsdr_source_0 = osmosdr.source( args="numchan=" + str(1) + " " + "" )
self.rtlsdr_source_0.set_sample_rate(r_rate)
self.rtlsdr_source_0.set_center_freq(freq_set-100000, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_dc_offset_mode(2, 0)
self.rtlsdr_source_0.set_iq_balance_mode(0, 0)
self.rtlsdr_source_0.set_gain_mode(True, 0)
self.rtlsdr_source_0.set_gain(gain, 0)
self.rtlsdr_source_0.set_if_gain(20, 0)
self.rtlsdr_source_0.set_bb_gain(20, 0)
self.rtlsdr_source_0.set_antenna("", 0)
self.rtlsdr_source_0.set_bandwidth(r_rate, 0)
self.qtgui_sink_x_0 = qtgui.sink_c(
1024, #fftsize
firdes.WIN_BLACKMAN_hARRIS, #wintype
freq_set, #fc
r_rate, #bw
"", #name
True, #plotfreq
True, #plotwat | erfall
False, #plottime
False, #plotconst
)
self.qtgui_sink_x_0.set_update_time(1.0/10)
self._qtgui_sink_x_0_win = sip.wrapinstance(self.qtgui_sink_x_0.pyqwidget(), Qt.QWidget)
self.top_layout.addWidget(self._qtgui_sink_x_0_win)
self.qtgui_sink_x_0. | enable_rf_freq(True)
self.low_pass_filter_0 = filter.fir_filter_ccf(5, firdes.low_pass(
1, r_rate, 7500, 5000, firdes.WIN_HAMMING, 6.76))
self._frequency_tool_bar = Qt.QToolBar(self)
if None:
self._frequency_formatter = None
else:
self._frequency_formatter = lambda x: x
self._frequency_tool_bar.addWidget(Qt.QLabel("frequency"+": "))
self._frequency_label = Qt.QLabel(str(self._frequency_formatter(self.frequency)))
self._frequency_tool_bar.addWidget(self._frequency_label)
self.top_layout.addWidget(self._frequency_tool_bar)
def _freqd_probe():
while True:
val = self.freqb.level()
try:
self.set_freqd(val)
except AttributeError:
pass
time.sleep(1.0 / (s_rate))
_freqd_thread = threading.Thread(target=_freqd_probe)
_freqd_thread.daemon = True
_freqd_thread.start()
self.freqa = blocks.fil |
n3storm/django-dynamic-preferences | dynamic_preferences/__init__.py | Python | bsd-3-clause | 193 | 0.005181 | from .dynamic_prefere | nces_registry import user_preferences_registry, global_preferences_registry
__version__ = "0.5.4"
default_app_config = 'dynam | ic_preferences.apps.DynamicPreferencesConfig'
|
jakevdp/bokeh | sphinx/source/tutorial/exercises/style.py | Python | bsd-3-clause | 2,134 | 0.003749 | import numpy as np
import pandas as pd
from bokeh.plotting import *
# Define some categories
categories = [
'ousia', 'poson', 'poion', 'pros ti', 'pou',
'pote', 'keisthai', 'echein', 'poiein', 'paschein',
]
# Create data
N = 10
data = { cat : np.random.randint(10, 100, size=N) for cat in categories }
# Define a little function to stack series together to make polygons. Soon
# this will be built into Bokeh.
def stacked(data, categories):
ys = []
last = np.zeros(len(data.values()[0]))
for cat in categories:
next = last + data[cat]
ys.append(np.hstack((last[::-1], next)))
last = next
return ys
# Get the y coordinates of th | e stacked data
ys = stacked(data, categories)
# The x coordinates for each polygon are simply the series concatenated
# with its reverse.
xs = [np.hstack((categories[::-1], categories))] * len(ys)
# Pick out a color palette
colors = brewer["Spectral"][len(ys)]
# EXERCISE: output static HTML file
# EXERCISE: play around with parameters like:
# - line_color
# - line_alpha
# - line_width
# - line_dash (e.g., [2,4])
# | - fill_color
# - fill_alpha
# - background_fill
patches(xs, ys, x_range=categories, y_range=[0, 800],
color=colors, alpha=0.8, line_color=None, background_fill="lightgrey",
title="Categories of Brewering")
# EXERCISE: configure all of the following plot properties
ygrid().grid_line_color = # color, or None, to suppress the line
ygrid().grid_line_width = # line width for grid lines
axis().major_label_text_font_size = # "12pt", "1.5em", "10px", etc
axis().major_label_text_font_style = # "bold", "normal", "italic"
axis().major_label_standoff = # distance of tick labels from ticks
axis().axis_line_color = # color, or None, to suppress the line
xaxis().major_label_orientation = # radians, "horizontal", "vertical", "normal"
xaxis().major_tick_in = # distance ticks extends into the plot
xaxis().major_tick_out = # and distance they extend out
xaxis().major_tick_line_color = # color, or None, to suppress the line
show() |
Joergen/zamboni | apps/versions/models.py | Python | bsd-3-clause | 25,586 | 0.000469 | # -*- coding: utf-8 -*-
import datetime
import json
import os
import django.dispatch
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.storage import default_storage as storage
from django.db import models
import caching.base
import commonware.log
import jinja2
import addons.query
import amo
import amo.models
import amo.utils
from amo.urlresolvers import reverse
from applications.models import Application, AppVersion
from files import utils
from files.models import File, Platform, cleanup_file
from tower import ugettext as _
from translations.fields import (LinkifiedField, PurifiedField, save_signal,
TranslatedField)
from users.models import UserProfile
from versions.tasks import update_supported_locales_single
from .compare import version_dict, version_int
log = commonware.log.getLogger('z.versions')
class VersionManager(amo.models.ManagerBase):
def __init__(self, include_deleted=False):
amo.models.ManagerBase.__init__(self)
self.include_deleted = include_deleted
def get_query_set(self):
qs = super(VersionManager, self).get_query_set()
qs = qs._clone(klass=addons.query.IndexQuerySet)
if not self.include_deleted:
qs = qs.exclude(deleted=True)
return qs.transform(Version.transformer)
class Version(amo.models.ModelBase):
addon = models.ForeignKey('addons.Addon', related_name='versions')
license = models.ForeignKey('License', null=True)
releasenotes = PurifiedField()
approvalnotes = models.TextField(default='', null=True)
version = models.CharField(max_length=255, default='0.1')
version_int = models.BigIntegerField(null=True, editable=False)
nomination = models.DateTimeField(null=True)
reviewed = models.DateTimeField(null=True)
has_info_request = models.BooleanField(default=False)
has_editor_comment = models.BooleanField(default=False)
deleted = models.BooleanField(default=False)
supported_locales = models.CharField(max_length=255)
_developer_name = models.CharField(max_length=255, default='',
editable=False)
objects = VersionManager()
with_deleted = VersionManager(include_deleted=True)
class Meta(amo.models.ModelBase.Meta):
db_table = 'versions'
ordering = ['-created', '-modified']
def __init__(self, *args, **kwargs):
super(Version, self).__init__(*args, **kwargs)
self.__dict__.update(version_dict(self.version or ''))
def __unicode__(self):
return jinja2.escape(self.version)
def save(self, *args, **kw):
if not self.version_int and self.version:
v_int = version_int(self.version)
# Magic number warning, this is the maximum size
# of a big int in MySQL to prevent version_int overflow, for
# people who have rather crazy version numbers.
# http://dev.mysql.com/doc/refman/5.5/en/numeric-types.html
if v_int < 9223372036854775807:
self.version_int = v_int
else:
log.error('No version_int written for version %s, %s' %
(self.pk, self.version))
creating = not self.id
super(Version, self).save(*args, **kw)
if creating:
# To avoid circular import.
from mkt.webapps.models import AppFeatures
if self.addon.type == amo.ADDON_WEBAPP:
AppFeatures.objects.create(version=self)
return self
@classmethod
def from_upload(cls, upload, addon, platforms, send_signal=True):
data = utils.parse_addon(upload, addon)
try:
license = addon.versions.latest().license_id
except Version.DoesNotExist:
license = None
max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length
developer = data.get('developer_name', '')[:max_len]
v = cls.objects.create(addon=addon, version=data['version'],
license_id=license, _developer_name=developer)
log.info('New version: %r (%s) from %r' % (v, v.id, upload))
AV = ApplicationsVersions
for app in data.get('apps', []):
AV(version=v, min=app.min, max=app.max,
application_id=app.id).save()
if addon.type in [amo.ADDON_SEARCH, amo.ADDON_WEBAPP]:
# Search extensions and webapps are always for all platforms.
platforms = [Platform.objects.get(id=amo.PLATFORM_ALL.id)]
else:
platforms = cls._make_safe_platform_files(platforms)
if addon.is_webapp():
from mkt.webapps.models import AppManifest
# Create AppManifest if we're a Webapp.
# Note: This must happen before we call `File.from_upload`.
manifest = utils.WebAppParser().get_json_data(upload)
AppManifest.objects.create(
version=v, manifest=json.dumps(manifest))
for platform in platforms:
File.from_upload(upload, v, platform, parse_data=data)
if addon.is_webapp():
# Update supported locales from manifest.
# Note: This needs to happen after we call `File.from_upload`.
update_supported_locales_single.apply_async(
args=[addon.id], kwargs={'latest': True},
eta=datetime.datetime.now() +
datetime.timedelta(seconds=settings.NFS_LAG_DELAY))
v.disable_old_files()
# After the upload has been copied to all platforms, remove the upload.
storage.delete(upload.path)
if send_signal:
version_uploaded.send(sender=v)
# If packaged app and app is blocked, put in escalation queue.
if (addon.is_webapp() and addon.is_packaged and
addon.status == amo.STATUS_BLOCKED):
# To avoid circular import.
from editors.models import EscalationQueue
EscalationQueue.objects.create(addon=addon)
return v
@classmethod
def _make_safe_platform_files(cls, platforms):
"""Make file platform translations until all download pages
support desktop ALL + mobile ALL. See bug 646268.
"""
pl_set = set([p.id for p in platforms])
if pl_set == set([amo.PLATFORM_ALL_MOBILE.id, amo.PLATFORM_ALL.id]):
# Make it really ALL:
return [Platform.objects.get(id=amo.PLATFORM_ALL.id)]
has_mobile = any(p in amo.MOBILE_PLATFORMS for p in pl_set)
has_desktop = any(p in amo.DESKTOP_PLATFORMS for p in pl_set)
has_all = any(p in (amo.PLATFORM_ALL_MOBILE.id,
amo.PLATFORM_ALL.id) for p in pl_set)
is_mixed = has_mobile and has_desktop
if (is_mixed and has_all) or has_mobile:
# Mixing desktop and mobile w/ ALL is not safe;
# we have to split the files into exact platforms.
# Additionally, it is not safe to use all-mobile.
new_plats = []
for p in platforms:
if p.id == amo.PLATFORM_ALL_MOBILE.id:
new_plats.extend(list(Platform.objects
.filter(id__in=amo.MOBILE_PLATFORMS)
.exclude(id=amo.PLATFORM_ALL_MOBILE.id)))
elif p.id == amo.PLATFORM_ALL.id:
new_plats.extend(list(Platform.objects
.filter(id__in=amo.DESKTOP_PLATFORMS)
.exclude(id=amo.PLATFORM_ALL.id)))
| else:
new_plats.append(p)
| return new_plats
# Platforms are safe as is
return platforms
@property
def path_prefix(self):
return os.path.join(settings.ADDONS_PATH, str(self.addon_id))
@property
def mirror_path_prefix(self):
return os.path.join(settings.MIRROR_STAGE_PATH, str(self.addon_id))
def license_url(self, impala=False):
return reverse('addons.license', args=[self.addon.slug, self.version])
|
sheeshmohsin/mozioproj | mozio/wsgi.py | Python | mit | 387 | 0 | """
WSGI config for mozio project.
It exposes th | e WSGI callable as a module-le | vel variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mozio.settings")
application = get_wsgi_application()
|
alexandrucoman/labs | python/solutii/monica_vizitiu/unic/unic.py | Python | mit | 374 | 0 | #!/usr/bin/env python
# *-* coding: UTF-8 *-*
"""Problema unic."""
from __future__ import print_function
def g | aseste_unic(istoric):
"""unic"""
result = istoric.pop()
for numar in istoric:
result = result ^ numar
return result
if __name__ == "__main__":
assert gaseste_unic([1, 2, 3, 2, 1]) == 3
asser | t gaseste_unic([1, 1, 1, 2, 2]) == 1
|
DanielOaks/girc | docs/conf.py | Python | isc | 10,875 | 0.006437 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# girc documentation build configuration file, created by
# sphinx-quickstart on Fri Jul 10 20:20:32 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'girc'
copyright = '2015, Daniel Oaks'
author = 'Daniel Oaks'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, t | hat match files and
# directories to | ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
import sphinx_py3doc_enhanced_theme
html_theme = "sphinx_py3doc_enhanced_theme"
html_theme_path = [sphinx_py3doc_enhanced_theme.get_html_theme_path()]
html_theme_options = {
'bodyfont': '\'Lucida Grande\', Arial, sans-serif',
'headfont': '\'Lucida Grande\', Arial, sans-serif',
'footerbgcolor': 'white',
'footertextcolor': '#555555',
'relbarbgcolor': 'white',
'relbartextcolor': '#666666',
'relbarlinkcolor': '#444444',
'sidebarbgcolor': 'white',
'sidebartextcolor': '#444444',
'sidebarlinkcolor': '#444444',
'bgcolor': 'white',
'textcolor': '#222222',
'linkcolor': '#0072AA',
'visitedlinkcolor': '#6363bb',
'headtextcolor': '#1a1a1a',
'headbgcolor': 'white',
'headlinkcolor': '#aaaaaa',
'extrastyling': False,
}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'gircdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'girc.tex', 'girc Documentation',
'Daniel Oaks', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
|
iulian787/spack | var/spack/repos/builtin/packages/libuv/package.py | Python | lgpl-2.1 | 1,625 | 0.003692 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Libuv(AutotoolsPackage):
"""Multi-platform library with a focus on asynchronous IO"""
homepage = "http://libuv.org"
url = "https://github.com/libuv/libuv/archive/v1.9.0.tar.gz"
version('1.40.0', sha256='70fe1c9ba4f2c509e8166c0ca2351000237da573bb6c82092339207a9715ba6b')
version('1.39.0', sha256='dc7b21f1bb7ef19f4b42c5ea058afabe51132d165da18812b70fb319659ba629')
version('1.38.1', sha256='2177fca2426ac60c20f654323656e843dac4f568d46 | 674544b78f416697bd32c')
version('1.25.0', sha256='ce3036d444c3fb4f9a9e2994bec1f4fa07872b01456998b422ce918fdc55c254')
version('1.10.0', sha256='50f4ed57d65af4ab634e2cbdd90c49213020e15b4d77d3631feb633cbba9239f')
version('1.9.0', sha256='f8b8272a0d80138b709d38fad2baf771899eed61e7f9578d17898b07a1a2a5eb')
depends | _on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('libtool', type='build')
# Tries to build an Objective-C file with GCC's C frontend
# https://github.com/libuv/libuv/issues/2805
conflicts('%gcc platform=darwin', when='@:1.37.9',
msg='libuv does not compile with GCC on macOS yet, use clang. '
'See: https://github.com/libuv/libuv/issues/2805')
def autoreconf(self, spec, prefix):
# This is needed because autogen.sh generates on-the-fly
# an m4 macro needed during configuration
bash = which("bash")
bash('autogen.sh')
|
google-research/google-research | schema_guided_dst/baseline/pred_utils.py | Python | apache-2.0 | 6,649 | 0.005565 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Prediction and evaluation-related utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import os
import tensorflow.compat.v1 as tf
from schema_guided_dst import schema
from schema_guided_dst.baseline import data_utils
REQ_SLOT_THRESHOLD = 0.5
def get_predicted_dialog(dialog, all_predictions, schemas):
"""Update labels in a dialogue based on model predictions.
Args:
dialog: A json object containing dialogue whose labels are to be updated.
all_predictions: A dict mapping prediction name to the predicted value. See
SchemaGuidedDST class for the contents of this dict.
schemas: A Schema object wrapping all the schemas for the dataset.
Returns:
A json object containing the dialogue with labels predicted by the model.
"""
# Overwrite the labels in the turn with the predictions from the model. For
# test set, these labels are missing from the data and hence they are added.
dialog_id = dialog["dialogue_id"]
# The slot values tracked for each service.
all_slot_values = collections.defaultdict(dict)
for turn_idx, turn in enumerate(dialog["turns"]):
if turn["speaker"] == "USER":
user_utterance = turn["utterance"]
system_utterance = (
dialog["turns"][turn_idx - 1]["utterance"] if turn_idx else "")
turn_id = "{:02d}".format(turn_idx)
for frame in turn["frames"]:
predictions = all_predictions[(dialog_id, turn_id, frame["service"])]
slot_values = all_slot_values[frame["service"]]
service_schema = schemas.get_service_schema(frame["service"])
# Remove the slot spans and state if present.
frame.pop("slots", None)
frame.pop("state", None)
# The baseline model doesn't predict slot spans. Only state predictions
# are added.
state = {}
# Add prediction for active intent. Offset is subtracted to account for
# NONE intent.
active_intent_id = predictions["intent_status"]
state["active_intent"] = (
service_schema.get_intent_from_id(active_intent_id - 1)
if active_intent_id else "NONE")
# Add prediction for requested slots.
requested_slots = []
for slot_idx, slot in enumerate(service_schema.slots):
if predictions["req_slot_s | tatus"][slot_idx] > REQ_SLOT_THRESHOLD:
requested_slots.append(slot)
state["requested_slots"] = requested_slots
# Add prediction for user goal (slot values).
# Categorical slots.
for slot_idx, slot in enumerate(service_schema.categorical_slots):
slot_sta | tus = predictions["cat_slot_status"][slot_idx]
if slot_status == data_utils.STATUS_DONTCARE:
slot_values[slot] = data_utils.STR_DONTCARE
elif slot_status == data_utils.STATUS_ACTIVE:
value_idx = predictions["cat_slot_value"][slot_idx]
slot_values[slot] = (
service_schema.get_categorical_slot_values(slot)[value_idx])
# Non-categorical slots.
for slot_idx, slot in enumerate(service_schema.non_categorical_slots):
slot_status = predictions["noncat_slot_status"][slot_idx]
if slot_status == data_utils.STATUS_DONTCARE:
slot_values[slot] = data_utils.STR_DONTCARE
elif slot_status == data_utils.STATUS_ACTIVE:
tok_start_idx = predictions["noncat_slot_start"][slot_idx]
tok_end_idx = predictions["noncat_slot_end"][slot_idx]
ch_start_idx = predictions["noncat_alignment_start"][tok_start_idx]
ch_end_idx = predictions["noncat_alignment_end"][tok_end_idx]
if ch_start_idx < 0 and ch_end_idx < 0:
# Add span from the system utterance.
slot_values[slot] = (
system_utterance[-ch_start_idx - 1:-ch_end_idx])
elif ch_start_idx > 0 and ch_end_idx > 0:
# Add span from the user utterance.
slot_values[slot] = (user_utterance[ch_start_idx - 1:ch_end_idx])
# Create a new dict to avoid overwriting the state in previous turns
# because of use of same objects.
state["slot_values"] = {s: [v] for s, v in slot_values.items()}
frame["state"] = state
return dialog
def write_predictions_to_file(predictions, input_json_files, schema_json_file,
output_dir):
"""Write the predicted dialogues as json files.
Args:
predictions: An iterator containing model predictions. This is the output of
the predict method in the estimator.
input_json_files: A list of json paths containing the dialogues to run
inference on.
schema_json_file: Path for the json file containing the schemas.
output_dir: The directory where output json files will be created.
"""
tf.compat.v1.logging.info("Writing predictions to %s.", output_dir)
schemas = schema.Schema(schema_json_file)
# Index all predictions.
all_predictions = {}
for idx, prediction in enumerate(predictions):
if not prediction["is_real_example"]:
continue
tf.compat.v1.logging.log_every_n(
tf.compat.v1.logging.INFO, "Processed %d examples.", 500, idx)
_, dialog_id, turn_id, service_name = (
prediction["example_id"].decode("utf-8").split("-"))
all_predictions[(dialog_id, turn_id, service_name)] = prediction
# Read each input file and write its predictions.
for input_file_path in input_json_files:
with tf.io.gfile.GFile(input_file_path) as f:
dialogs = json.load(f)
pred_dialogs = []
for d in dialogs:
pred_dialogs.append(get_predicted_dialog(d, all_predictions, schemas))
input_file_name = os.path.basename(input_file_path)
output_file_path = os.path.join(output_dir, input_file_name)
with tf.io.gfile.GFile(output_file_path, "w") as f:
json.dump(
pred_dialogs, f, indent=2, separators=(",", ": "), sort_keys=True)
|
technologiescollege/Blockly-rduino-communication | scripts_XP/Lib/site-packages/jedi/evaluate/utils.py | Python | gpl-3.0 | 4,702 | 0.000851 | """ A universal module with functions / classes without dependencies. """
import sys
import contextlib
import functools
import re
import os
from jedi._compatibility import reraise
_sep = os.path.sep
if os.path.altsep is not None:
_sep += os.path.altsep
_path_re = re.compile('(?:\.[^{0}]+|[{0}]__init__\.py)$'.format(re.escape(_sep)))
del _sep
def to_list(func):
def wrapper(*args, **kwargs):
return list(func(*args, **kwargs))
return wrapper
def unite(iterable):
"""Turns a two dimensional array into a one dimensional."""
return set(typ for types in iterable for typ in types)
class UncaughtAttributeError(Exception):
"""
Important, because `__getattr__` and `hasattr` catch AttributeErrors
implicitly. This is really evil (mainly because of `__getattr__`).
`hasattr` in Python 2 is even more evil, because it catches ALL exceptions.
Therefore this class originally had to be derived from `BaseException`
instead of `Exception`. But because I removed relevant `hasattr` from
the code base, we can now switch back to `Exception`.
:param base: return values of sys.exc_info().
"""
def safe_property(func):
return property(reraise_uncaught(func))
def reraise_uncaught(func):
"""
Re-throw uncaught `AttributeError`.
Usage: Put ``@rethrow_uncaught`` in front of the function
which does **not** suppose to raise `AttributeError`.
AttributeError is easily get caught by `hasattr` and another
``except AttributeError`` clause. This becomes problem when you use
a lot of "dynamic" attributes (e.g., using ``@property``) because you
can't distinguish if the property does not exist for real or some code
inside of the "dynamic" attribute through that error. In a well
written code, such error should not exist but getting there is very
difficult. This decorator is to help us getting there by changing
`AttributeError` to `UncaughtAttributeError` to avoid unexpected catch.
This helps us noticing bugs earlier and facilitates debugging.
.. note:: Treating StopIteration here is easy.
Add that feature when needed.
"""
@functools.wraps(func)
def wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except AttributeError:
exc_info = sys.exc_info()
reraise(UncaughtAttributeError(exc_info[1]), exc_info[2])
return wrapper
class PushBackIterator(object):
def __init__(self, iterator):
self.pushes = []
self.iterator = iterator
self.current = None
def push_back(self, value):
self.pushes.append(value)
def __iter__(self):
return self
def next(self):
""" Python 2 Compatibility """
return self.__next__()
def __next__(self):
if self.pushes:
self.current = self.pushes.pop()
else:
self.current = next(self.iterator)
return self.current
@contextlib.contextmanager
def ignored(*exceptions):
"""
Context manager that ignores all of the specified exceptions. This will
be in the standard library starting with Python 3.4.
"""
try:
yield
except exceptions:
pass
def indent_block(text, indention=' '):
"""This function indents a text block with a default of four spaces."""
temp = ''
while text and text[-1] == '\n':
temp += text[-1]
text = text[:-1]
lines = text.split('\n')
return '\n'.join(map(lambda s: indention + s, lines)) + temp
def dotted_from_fs_path(fs_path, sys_path):
"""
Changes `/usr/lib/python3.4/email/utils.py` to `email.utils`. I.e.
compares the path with sys.path and then returns the dotted_path. If the
path is not in the sys.path, just returns None.
"""
if os.path.basename(fs_path).startswith('__init__.'):
# We are calculating the path. __init__ files are not interesting.
fs_path = os.path.dirname(fs_path)
# prefer
# - UNIX
# /path/to/pythonX.Y/lib-dynload
# /path/to/pythonX.Y/site-packages
# - Windows
# C:\path\to\DLLs
# C:\path\to\Lib\site-packages
# over
# - UNIX
# /path/to/pythonX.Y
# - Windows
# | C:\path\to\Lib
path = ''
for s in sys_path:
if (fs_path.startswith(s) and len | (path) < len(s)):
path = s
# - Window
# X:\path\to\lib-dynload/datetime.pyd => datetime
module_path = fs_path[len(path):].lstrip(os.path.sep).lstrip('/')
# - Window
# Replace like X:\path\to\something/foo/bar.py
return _path_re.sub('', module_path).replace(os.path.sep, '.').replace('/', '.')
|
MTG/dunya-desktop | dunyadesktop_app/widgets/dockwidget.py | Python | gpl-3.0 | 12,917 | 0.000077 | import os
from PyQt5.QtWidgets import (QDockWidget, QSizePolicy, QWidget, QVBoxLayout,
QFrame, QLabel, QToolButton, QHBoxLayout,
QSpacerItem, QDialog)
from PyQt5.QtGui import QFont
from PyQt5.QtCore import QSize, Qt
from .table import TableWidget, TableViewCollections, DialogCollTable
from .listwidget import CollectionsWidget
from .newcollectiondialog import NewCollectionDialog
from utilities import database
COMPMUSIC_LOGO = os.path.join(os.path.dirname(__file__), '..', 'ui_files',
'icons', 'compmusic_white.svg')
class DockWidget(QDockWidget):
"""Dockwidget for the main window"""
def __init__(self, min_width, min_height, max_width, max_height):
QDockWidget.__init__(self)
self._set_dockwidget(min_width, min_height, max_width, max_height)
def _set_dockwidget(self, min_width, min_height, max_width, max_height):
"""Sets the size policies of the dock widget"""
size_policy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Minimum)
size_policy.setHorizontalStretch(0)
size_policy.setVerticalStretch(0)
size_policy.setHeightForWidth(self.sizePolicy().hasHeightForWidth())
self.setSizePolicy(size_policy)
self.setMinimumSize(QSize(min_width, min_height))
self.setMaximumSize(QSize(max_width, max_height))
self.setContextMenuPolicy(Qt.PreventContextMenu)
self.setFeatures(QDockWidget.NoDockWidgetFeatures)
self.setAllowedAreas(Qt.NoDockWidgetArea)
self.setTitleBarWidget(QWidget(None))
class DockWidgetContentsLeft(QWidget):
"""Contains the contents of the dock widget on the left side of the main
window"""
def __init__(self, parent=None):
QWidget.__init__(self, parent)
# self._set_widget()
layout = QVBoxLayout(self)
layout.setContentsMargins(8, 15, 15, 15)
# layout.setSpacing(10) # check it
self.frame_collection = QFrame(self)
self._set_frame()
layout_3 = QVBoxLayout(self.frame_collection)
layout_3.setContentsMargins(2, 5, 3, 2)
layout_3.setSpacing(4) # check it
self.label_collections = QLabel(self.frame_collection)
self.label_collections.setIndent(15) # check it
self.label_collections.setTextInteractionFlags(Qt.NoTextInteraction)
layout_3.addWidget(self.label_collections)
self.listView_collections = CollectionsWidget()
layout_3.addWidget(self.listView_collections)
layout.addWidget(self.frame_collection)
# toolbutton
self.toolButton_collection = QToolButton(self)
self._set_toolbutton(self.toolButton_collection)
layout_3.addWidget(self.toolButton_collection)
self.frame_downloaded = QFrame(self)
self._set_frame_downloaded()
layout_4 = QVBoxLayout(self.frame_downloaded)
layout_4.setContentsMargins(3, 5, 3, 2)
layout_5 = QHBoxLayout()
self.button_colltable = QToolButton(self)
self._set_toolbutton(self.button_colltable)
self.button_colltable.setText('Open Table')
self.label_downloaded = QLabel(self.frame_downloaded)
self._set_label_downloaded()
layout_5.addWidget(self.label_downloaded)
layout_5.addWidget(self.button_colltable)
layout_4.addLayout(layout_5)
self.tableView_downloaded = TableWidget()
layout_4.addWidget(self.tableView_downloaded)
layout.addWidget(self.frame_downloaded)
self.retranslateUi()
# signals
self.toolButton_collection.clicked.connect(self.new_collection)
self.button_colltable.clicked.connect(self._open_coll_table)
def _open_coll_table(self):
current_coll = self.listView_collections.currentItem()
if current_coll:
self.coll_dialog = DialogCollTable(self)
self.coll_dialog.label_collection.setText(current_coll.text())
conn, c = database.connect()
collection = database.fetch_collection(c, current_coll.text())
self.coll_dialog.model.add_recording(collection)
conn.close()
self.coll_dialog.show()
def _set_widget(self):
"""Sets the size policies."""
size_policy = QSizePolicy(QSizePolicy.MinimumExpanding,
QSizePolicy.Preferred)
size_policy.setHorizontalStretch(0)
size_policy.setVerticalStretch(0)
size_policy.setHeightForWidth(self.sizePolicy().hasHeightForWidth())
self.setSizePolicy(size_policy)
self.setMaximumSize(QSize(500, 16777215))
def _set_frame(self):
"""Sets the size policies of the frame."""
size_policy = QSizePolicy(QSizePolicy.MinimumExpanding,
QSizePolicy.Preferred)
# size_policy.setHorizontalStretch(0)
# size_policy.setVerticalStretch(0)
# size_policy.setHeightForWidth(
# self.frame_collection.sizePolicy().hasHeightForWidth())
self.frame_collection.setSizePolicy(size_policy)
# self.frame_collection.setMinimumSize(QSize(0, 200))
# self.frame_collection.setMaximumSize(QSize(16777215, 200))
# self.frame_collection.setBaseSize(QSize(10, 10))
self.frame_collection.setFrameShape(QFrame.Box)
# self.frame_collection.setFrameShadow(QFrame.Raised)
def _set_toolbutton(self, button):
"""Sets the size policies of the new collection button."""
size_policy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
size_policy.setHorizontalStretch(0)
size_policy.setVerticalStretch(0)
size_policy.setHeightForWidth(button.sizePolicy().hasHeightForWidth())
button.setSizePolicy(size_policy)
button.setMinimumSize(QSize(0, 30))
button.setMaximumSize(QSize(16777215, 30))
button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
button.setAutoRaise(True)
button.setArrowType(Qt.NoArrow)
def _set_frame_downloaded(self):
"""Sets the size policies of the downloaded features frame."""
self.frame_downloaded = QFrame(self)
size_policy = QSizePolicy(QSizePolicy.MinimumExp | anding,
QSizePolicy.Preferred)
size_policy.setHorizontalStretch(0)
size_policy.setVerticalStretch(0)
size_policy.setHeightForWidth(
self.frame_downloaded.sizePolicy().hasHeightForWidth())
self.frame_downloaded.setSizePolicy(size_policy)
self.frame_downloaded.setMinimumSize(QSize(0, 150))
self.frame_downloaded.setBaseSize(QSize(0, 100))
se | lf.frame_downloaded.setFrameShape(QFrame.StyledPanel)
self.frame_downloaded.setFrameShadow(QFrame.Raised)
def _set_label_downloaded(self):
"""Sets the label 'Downloaded'."""
font = QFont()
font.setFamily("Garuda")
self.label_downloaded.setFont(font)
self.label_downloaded.setIndent(15)
def retranslateUi(self):
self.label_collections.setText("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD "
"HTML 4.0//EN\" "
"\"http://www.w3.org/TR/REC-html40"
"/strict.dtd\">\n "
"<html><head><meta name=\"qrichtext\" "
"content=\"1\" /><style "
"type=\"text/css\">\n "
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" "
"font-family:\'Ubuntu\'; "
"font-size:11pt; font-weight:400; "
"font-style:normal;\">\n "
"<p style=\" margin-top:12px; "
"margin-bottom:12px; margin-left:0px; "
"margin-right:0px; "
"-qt-block-indent:0; |
miRTop/mirtop | mirtop/bam/bam.py | Python | mit | 15,466 | 0.001552 | """ Read bam files"""
from __future__ import print_function
# from memory_profiler import profile
import os.path as op
import os
import pysam
from collections import defaultdict
import pybedtools
from mirtop.libs import do
from mirtop.libs.utils import file_exists
import mirtop.libs.logger as mylog
from mirtop.mirna.realign import isomir, hits, reverse_complement
from mirtop.mirna.mapper import get_primary_transcript, guess_database
from mirtop.bam import filter
from mirtop.gff import body
from mirtop.mirna.annotate import annotate
from mirtop.libs import sql
logger = mylog.getLogger(__name__)
# fp = open('memory_profiler.log', 'w+')
# @profile(stream=fp)
def read_bam(bam_fn, args, clean=True):
"""
Read bam file and perform realignment of hits
Args:
*bam_fn*: a BAM file with alignments to the precursor
*precursors*: dict with keys being precursor names and values
being sequences. Come from mirtop.mirna.fasta.read_precursor().
*clean*: Use mirtop.filter.clean_hits() to remove lower score hits.
Returns:
*reads (dict)*:
keys are read_id and values are *mirtop.realign.hits*
"""
bam_fn = _sam_to_bam(bam_fn)
bam_fn = _bam_sort(bam_fn)
reads = defaultdict(hits)
if args.genomic:
logger.warning("This is under development and variants can be unexact.")
bed_fn = os.path.join(args.out, os.path.basename(bam_fn) + ".bed")
logger.info("Making bed file.")
_bed(bam_fn, bed_fn)
logger.info("Intersecting bed file.")
intersect_fn = intersect(bed_fn, args.gtf)
# logger.info("Analyzing hits.")
# reads = _read_lifted_bam(intersect_fn, reads, args, clean)
logger.info("Loading database.")
conn = _read_lifted_bam_alpha(intersect_fn, bam_fn, args)
rows = sql.select_all_reads(conn)
logger.info("Analyzing database.")
precursors = args.precursors
database = guess_database(args)
reads = _read_lifted_lines(rows, precursors, database)
conn.close()
else:
reads = _read_original_bam(bam_fn, reads, args, clean)
logger.info("Done.")
return reads
# @profile(stream=fp)
def low_memory_bam(bam_fn, sample, out_handle, args):
if args.genomic:
raise ValueError("low-memory option is not compatible with genomic coordinates.")
precursors = args.precursors
bam_fn = _sam_to_bam(bam_fn)
bam_fn = _bam_sort(bam_fn)
mode = "r" if bam_fn.endswith("sam") else "rb"
handle = pysam.Samfile(bam_fn, mode)
lines = []
current = None
for line in handle:
if not current or current == line.query_name:
lines.append(line)
current = line.query_name
else:
reads = _read_lines(lines, precursors, handle, args)
ann = annotate(reads, args.matures, args.precursors, quiet=True)
gff_lines = body.create(ann, args.database, sample, args, quiet=True)
body.write_body_on_handle(gff_lines, out_handle)
current = line.query_name
lines = []
lines.append(line)
reads = _read_lines(lines, precursors, handle, args)
ann = annotate(reads, args.matures, args.precursors, quiet=True)
gff_lines = body.create(ann, args.database, sample, args, quiet=True)
body.write_body_on_handle(gff_lines, out_handle)
def low_memory_genomic_bam(bam_fn, sample, out_handle, args):
logger.info("Reading BAM file in low memory mode.")
logger.warning("This is under development and variants can be unexact.")
precursors = args.precursors
bam_fn = _sam_to_bam(bam_fn)
bam_fn = _bam_sort(bam_fn)
database = guess_database(args)
bed_fn = os.path.join(args.out, os.path.basename(bam_fn) + ".bed")
logger.info("Making bed file.")
_bed(bam_fn, bed_fn)
logger.info("Intersecting bed file.")
intersect_fn = intersect(b | ed_fn, args.gtf)
logger.info("Loading database.")
# TODO this'll return conn_read | s and conn_counts
conn = _read_lifted_bam_alpha(intersect_fn, bam_fn, args)
rows = sql.select_all_reads(conn)
lines = []
current = None
logger.info("Analyzing database.")
for row in rows:
if not current or current == row[0]:
lines.append(row)
current = row[0]
else:
# TODO counts of sequence = conn_counts.query UID
# it could be counts only same location UID+chrom+start, or counts all UID
reads = _read_lifted_lines(lines, precursors, database)
ann = annotate(reads, args.matures, args.precursors, quiet=True)
gff_lines = body.create(ann, args.database, sample, args, quiet=True)
body.write_body_on_handle(gff_lines, out_handle)
current = row[0]
lines = []
lines.append(row)
reads = _read_lifted_lines(lines, precursors, database)
ann = annotate(reads, args.matures, args.precursors, quiet=True)
gff_lines = body.create(ann, args.database, sample, args, quiet=True)
body.write_body_on_handle(gff_lines, out_handle)
conn.close()
logger.info("Done")
def _analyze_line(line, reads, precursors, handle, args):
if line.reference_id < 0:
logger.debug("READ::Sequence not mapped: %s" % line.reference_id)
return reads
if not line.cigarstring:
logger.debug("READ::Sequence malformed: %s" % line)
return reads
query_name = line.query_name
if query_name not in reads and not line.query_sequence:
return reads
sequence = line.query_sequence if not line.is_reverse else reverse_complement(line.query_sequence)
logger.debug(("READ::Read name:{0} and Read sequence:{1}").format(line.query_name, sequence))
if line.query_sequence and line.query_sequence.find("N") > -1:
return reads
if query_name not in reads:
reads[query_name].set_sequence(sequence)
reads[query_name].counts = _get_freq(query_name)
# TODO if args.quant set to 0
# TODO if args.quant increase by 1
if line.is_reverse and not args.genomic:
logger.debug("READ::Sequence is reverse: %s" % line.query_name)
return reads
chrom = handle.getrname(line.reference_id)
start = line.reference_start
cigar = line.cigartuples
# if line.cigarstring.find("I") > -1:
# indels_skip += 1
iso = isomir()
iso.align = line
iso.set_pos(start, len(reads[query_name].sequence))
logger.debug("READ::From BAM start %s end %s at chrom %s" % (iso.start, iso.end, chrom))
if len(precursors[chrom].replace("N","")) + 3 < start + len(reads[query_name].sequence):
logger.debug("READ::%s start + %s sequence size are bigger than"
" size precursor %s" % (
line.reference_id,
len(reads[query_name].sequence),
len(precursors[chrom])))
return reads
iso.subs, iso.add, iso.cigar = filter.tune(
reads[query_name].sequence, precursors[chrom],
start, cigar)
logger.debug("READ::After tune start %s end %s" % (iso.start, iso.end))
logger.debug("READ::iso add %s iso subs %s" % (iso.add, iso.subs))
reads[query_name].set_precursor(chrom, iso)
return reads
def _read_lines(lines, precursors, handle, args, clean=True):
reads = defaultdict(hits)
for line in lines:
reads = _analyze_line(line, reads, precursors, handle, args)
if clean:
reads = filter.clean_hits(reads)
return reads
def _read_lifted_bam_alpha(bed_fn, bam_fn, args):
database = guess_database(args)
conn = sql.create_connection()
key = "name" if args.keep_name else "sequence"
sql.create_reads_table(conn, key)
# TODO create counts table sequence and autoincrement or from read
cur = conn.cursor()
counts = 0
seen = set()
for line in bed_fn:
fields = _parse_intersect(line, database, bed=True)
# TODO add sequence to count table args.quant on/off name=UID or name=UID+chrom+pos
if fields:
hit = ". |
infobloxopen/heat-infoblox | heat_infoblox/tests/test_ha_pair.py | Python | apache-2.0 | 6,180 | 0 | # Copyright 2015 Infoblox Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os
from oslo_config import cfg
from heat.engine import stack
from heat.engine import template
from heat.tests import common
from heat.tests import utils
from heat_infoblox.resources import ha_pair
from heat_infoblox.tests.utils import create_side_effect
ha_pair_template = {
'heat_template_version': '2013-05-23',
'resources': {
'my_ha_pair': {
'type': 'Infoblox::Grid::HaPair',
'properties': {
'name': 'HaPair1',
'vip': 'VIP',
'node1_ha': 'NODE1_HA',
'node2_ha': 'NODE2_HA',
'node1_lan1': 'NODE1_LAN1',
'node2_lan1': 'NODE2_LAN1',
'vip_floating_ip': 'VIP_FLOATING_IP',
'node1_floating_ip': 'NODE1_FLOATING_IP',
'node2_floating_ip': 'NODE2_FLOATING_IP',
'virtual_router_id': 123
}
}
}
}
DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
class HaPairTest(common.HeatTestCase):
def setUp(self):
heat_infoblox_path = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.pardir))
cfg.CONF.import_opt('plugin_dirs', 'heat.common.config')
cfg.CONF.set_override('plugin_dirs', heat_infoblox_path)
super(HaPairTest, self).setUp()
self.ctx = utils.dummy_context()
def set_stack(self, stack_template):
self.stack = stack.Stack(
self.ctx, 'ha_pair_test_stack',
template.Template(stack_template)
)
self.my_ha_pair = self.stack['my_ha_pair']
def test_resource_mapping(self):
mapping = ha_pair.resource_mapping()
self.assertEqual(1, len(mapping))
self.assertEqual(ha_pair.HaPair,
mapping['Infoblox::Grid::HaPair'])
def prepair_ha_pair(self, update_ports=True):
props = ha_pair_template['resources']['my_ha_pair']['properties']
props['update_allowed_address_pairs'] = update_ports
self.set_stack(ha_pair_template)
self.my_ha_pair. | client = mock.MagicMock()
get_first_ip = mock.MagicMock()
ports = {
'vip': {'ip_address': '1.1.1.6', 'subnet_id': 'vip_subnet'},
'node1_lan1': {'ip_address': '1.1.1.4'},
| 'node1_ha': {'ip_address': '1.1.1.2'},
'node2_lan1': {'ip_address': '1.1.1.5'},
'node2_ha': {'ip_address': '1.1.1.3'},
}
get_first_ip.side_effect = create_side_effect(ports)
self.my_ha_pair._get_first_ip = get_first_ip
self.my_ha_pair.node = mock.MagicMock()
self.my_ha_pair.wait_for_https = mock.MagicMock()
show_subnet = mock.MagicMock()
show_subnet.return_value = {'subnet': {'cidr': '1.1.1.0/24',
'gateway_ip': '1.1.1.1'}}
neutron = mock.MagicMock()
neutron.show_subnet = show_subnet
self.my_ha_pair.client = mock.MagicMock(return_value=neutron)
return (props, neutron, ports)
def test_handle_create(self):
(props, neutron, ports) = self.prepair_ha_pair()
with mock.patch('heat_infoblox.resources.grid_member.'
'resource_utils.fix_ha_ports_mac') as fix_ha_ports:
# Call 'handle_create' method
self.my_ha_pair.handle_create()
fix_ha_ports.assert_called_once_with(
neutron,
{'ipv4': {'address': ports['vip']['ip_address']}},
props['virtual_router_id'],
True,
(props['node1_ha'], props['node2_ha']))
# Check calls
self.assertEqual(
[mock.call('vip'), mock.call('node1_ha'), mock.call('node2_ha'),
mock.call('node1_lan1'), mock.call('node2_lan1')],
self.my_ha_pair._get_first_ip.mock_calls)
self.assertEqual(
[mock.call('NODE1_FLOATING_IP'), mock.call('VIP_FLOATING_IP'),
mock.call('NODE2_FLOATING_IP')],
self.my_ha_pair.wait_for_https.mock_calls)
self.assertEqual(
[mock.call('NODE1_FLOATING_IP', 'admin', 'infoblox'),
mock.call().update_member(
'infoblox.localdomain',
{'enable_ha': True, 'router_id': 123,
'node_info': [
{'lan_ha_port_setting': {'mgmt_lan': '1.1.1.4',
'ha_ip_address': '1.1.1.2'}},
{'lan_ha_port_setting': {'mgmt_lan': '1.1.1.5',
'ha_ip_address': '1.1.1.3'}}],
'vip_setting': {'subnet_mask': '255.255.255.0',
'gateway': '1.1.1.1',
'address': '1.1.1.6'}
}),
mock.call('NODE2_FLOATING_IP', 'admin', 'infoblox'),
mock.call().join_grid('Infoblox', '1.1.1.6', 'test')
],
self.my_ha_pair.node.mock_calls)
def test_update_allowed_address_pairs(self):
# Prepair member with update_allowed_address_pairs set to False
(props, neutron, ports) = self.prepair_ha_pair(
update_ports=False)
# Call 'handle_create' method and check that fix_ha_ports not called
with mock.patch('heat_infoblox.resources.grid_member.'
'resource_utils.fix_ha_ports_mac') as fix_ha_ports:
self.my_ha_pair.handle_create()
fix_ha_ports.assert_not_called()
|
irl/gajim | src/gui_interface.py | Python | gpl-3.0 | 137,383 | 0.003581 | # -*- coding:utf-8 -*-
## src/gajim.py
##
## Copyright (C) 2003-2014 Yann Leboulanger <asterix AT lagaule.org>
## Copyright (C) 2004-2005 Vincent Hanquez <tab AT snarc.org>
## Copyright (C) 2005 Alex Podaras <bigpod AT gmail.com>
## Norman Rasmussen <norman AT rasmussen.co.za>
## Stéphan Kochen <stephan AT kochen.nl>
## Copyright (C) 2005-2006 Dimitur Kirov <dkirov AT gmail.com>
## Alex Mauer <hawke AT hawkesnest.net>
## Copyright (C) 2005-2007 Travis Shirk <travis | AT pobox.com>
## Nikos Kouremenos <kourem AT gmail.com>
## Copyright (C) 2006 Junglecow J <junglecow AT gmail.com>
## Stefan Bethge <stefan AT lanpartei.de>
## Copyright (C) 2006-2008 Jean-Marie Traissard <jim AT lapin.org>
## Copyright (C) 2007 Lukas Petrovicky <lukas AT petrovicky.net>
## James Newton <redshodan AT gmail.com>
## Copyright (C) 2007-2008 Brendan Taylor <whateley AT gmail.com>
## Julien | Pivotto <roidelapluie AT gmail.com>
## Stephan Erb <steve-e AT h3c.de>
## Copyright (C) 2008 Jonathan Schleifer <js-gajim AT webkeks.org>
##
## This file is part of Gajim.
##
## Gajim is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published
## by the Free Software Foundation; version 3 only.
##
## Gajim is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
##
import os
import sys
import re
import time
import math
from subprocess import Popen
from gi.repository import Gtk
from gi.repository import GdkPixbuf
from gi.repository import GLib
from common import i18n
from common import gajim
from common import dbus_support
if dbus_support.supported:
from music_track_listener import MusicTrackListener
from common import location_listener
import dbus
import gtkgui_helpers
import dialogs
import notify
import message_control
from chat_control import ChatControlBase
from chat_control import ChatControl
from groupchat_control import GroupchatControl
from groupchat_control import PrivateChatControl
from atom_window import AtomWindow
from session import ChatControlSession
from common import sleepy
from nbxmpp import idlequeue
from nbxmpp import Hashes
from common.zeroconf import connection_zeroconf
from common import resolver
from common import caps_cache
from common import proxy65_manager
from common import socks5
from common import helpers
from common import dataforms
from common import passwords
from common import logging_helpers
from common.connection_handlers_events import OurShowEvent, \
FileRequestErrorEvent, InformationEvent
from common.connection import Connection
from common import jingle
from common.file_props import FilesProp
from common import pep
import roster_window
import profile_window
import config
from threading import Thread
from common import ged
from common.configpaths import gajimpaths
config_filename = gajimpaths['CONFIG_FILE']
from common import optparser
parser = optparser.OptionsParser(config_filename)
import logging
log = logging.getLogger('gajim.interface')
class Interface:
################################################################################
### Methods handling events from connection
################################################################################
def handle_event_db_error(self, unused, data):
#('DB_ERROR', account, (title_text, section_text))
if self.db_error_dialog:
return
self.db_error_dialog = dialogs.ErrorDialog(data[0], data[1])
def destroyed(win):
self.db_error_dialog = None
self.db_error_dialog.connect('destroy', destroyed)
def handle_event_information(self, obj):
if obj.popup:
if obj.level == 'error':
cls = dialogs.ErrorDialog
elif obj.level == 'warn':
cls = dialogs.WarningDialog
elif obj.level == 'info':
cls = dialogs.InformationDialog
else:
return
cls(obj.pri_txt, GLib.markup_escape_text(obj.sec_txt))
def handle_ask_new_nick(self, account, room_jid):
title = _('Unable to join group chat')
prompt = _('Your desired nickname in group chat %s is in use or '
'registered by another occupant.\nPlease specify another nickname '
'below:') % room_jid
check_text = _('Always use this nickname when there is a conflict')
if 'change_nick_dialog' in self.instances:
self.instances['change_nick_dialog'].add_room(account, room_jid,
prompt)
else:
self.instances['change_nick_dialog'] = dialogs.ChangeNickDialog(
account, room_jid, title, prompt)
def handle_event_http_auth(self, obj):
#('HTTP_AUTH', account, (method, url, transaction_id, iq_obj, msg))
def response(account, answer):
obj.conn.build_http_auth_answer(obj.stanza, answer)
def on_yes(is_checked, obj):
response(obj, 'yes')
account = obj.conn.name
sec_msg = _('Do you accept this request?')
if gajim.get_number_of_connected_accounts() > 1:
sec_msg = _('Do you accept this request on account %s?') % account
if obj.msg:
sec_msg = obj.msg + '\n' + sec_msg
dialog = dialogs.YesNoDialog(_('HTTP (%(method)s) Authorization for '
'%(url)s (id: %(id)s)') % {'method': obj.method, 'url': obj.url,
'id': obj.iq_id}, sec_msg, on_response_yes=(on_yes, obj),
on_response_no=(response, obj, 'no'))
def handle_event_iq_error(self, obj):
#('ERROR_ANSWER', account, (id_, fjid, errmsg, errcode))
if str(obj.errcode) in ('400', '403', '406') and obj.id_:
# show the error dialog
ft = self.instances['file_transfers']
sid = obj.id_
if len(obj.id_) > 3 and obj.id_[2] == '_':
sid = obj.id_[3:]
file_props = FilesProp.getFileProp(obj.conn.name, sid)
if file_props :
if str(obj.errcode) == '400':
file_props.error = -3
else:
file_props.error = -4
gajim.nec.push_incoming_event(FileRequestErrorEvent(None,
conn=obj.conn, jid=obj.jid, file_props=file_props,
error_msg=obj.errmsg))
obj.conn.disconnect_transfer(file_props)
return
elif str(obj.errcode) == '404':
sid = obj.id_
if len(obj.id_) > 3 and obj.id_[2] == '_':
sid = obj.id_[3:]
file_props = FilesProp.getFileProp(obj.conn.name, sid)
if file_props:
self.handle_event_file_send_error(obj.conn.name, (obj.fjid,
file_props))
obj.conn.disconnect_transfer(file_props)
return
ctrl = self.msg_win_mgr.get_control(obj.fjid, obj.conn.name)
if ctrl and ctrl.type_id == message_control.TYPE_GC:
ctrl.print_conversation('Error %s: %s' % (obj.errcode, obj.errmsg))
def handle_event_connection_lost(self, obj):
# ('CONNECTION_LOST', account, [title, text])
path = gtkgui_helpers.get_icon_path('gajim-connection_lost', 48)
account = obj.conn.name
notify.popup(_('Connection Failed'), account, account,
'connection_failed', path, obj.title, obj.msg)
def unblock_signed_in_notifications(self, account):
gajim.block_signed_in_notifications[account] = False
def handle_event_status(self, obj): # OUR status
#('STATUS', account, show)
account = obj.conn.name
if obj.show in ('offline', 'error'):
for name in list(self.instanc |
epicf/ef | examples/diode_childs_law/plot.py | Python | mit | 1,764 | 0.037982 | import os, glob
import operator
import h5py
import numpy as np
import matplotlib.pyplot as plt
def get_time_potential_charge_absrbd_on_anode_from_h5( filename ):
h5 = h5py.File( filename, mode="r")
absorbed_charge = h5["/InnerRegions/anode"].attrs["total_absorbed_charge"][0]
time = h5["/TimeGrid"].attrs["current_time"][0]
potential = h5["/InnerRegions/anode"].attrs["potential"][0]
h5.close()
# return( {"time": time,
# "potential": potential,
# "absorbed_charge": absorbed_charge } )
return( (time,
potential,
absorbed_charge ) )
os.chdir("./")
# todo: remove hardcoding
prev_step_filename = "V*_*0900.h5"
last_step_filename = "V*_*1000.h5"
prev_step_vals = []
last_step_vals = []
for f in glob.glob( prev_step_filename ):
prev_step_vals.append( get_time_potential_charge_absrbd_on_anode_from_h5( f ) )
for f in glob.glob( last_step_filename ):
last_step_vals.append( get_time_potential_charge_absrbd_on_anode_from_h5( f ) )
prev_step_vals.sort( key = operator.itemgetter(1) )
last_step_vals.sort( key = operator.itemgetter(1) )
current = []
voltage | = []
cgs_to_v = 300
for (t1,V1,q1), (t2,V2,q2) in zip( prev_step_vals, last_step_vals ):
print( t2 - t1, V2 - V1, q2 - q1 )
current.append( abs( ( q2 - q1 ) ) / ( t2 - t1 ) )
voltage.append( V1 * cgs_to_v )
#print( current, v | oltage )
#A,B = np.polyfit( np.ln( current ), voltage, 1 )
plt.figure()
axes = plt.gca()
axes.set_xlabel( "Voltage [V]" )
axes.set_ylabel( "Current [?]" )
#axes.set_xlim( [0, 1500] )
plt.plot( voltage, current,
linestyle='', marker='o',
label = "Num" )
#plt.plot( current_an, voltage_an,
# label = "An" )
plt.legend()
plt.savefig('diode_VC.png')
|
HackerEarth/django-allauth | allauth/socialaccount/providers/oauth/provider.py | Python | mit | 599 | 0.001669 | from django.core.urlresolvers import reverse
from django.utils.http import urlencode
from allauth.socialaccount.providers.base import Provider
class OAuthProvider(Provider):
def get_login_url(self, request, **kwargs):
url = reverse(self.id | + "_login")
if kwargs:
url = url + '?' + urlencode(kwargs)
return url
def get_scope(self):
settings = self.get_settings()
scope = settings.get('SCOPE')
if scope is None:
sco | pe = self.get_default_scope()
return scope
def get_default_scope(self):
return []
|
lrq3000/unireedsolomon | unireedsolomon/tests/test_cpolynomial.py | Python | mit | 9,289 | 0.027775 | import unittest
# Skip this whole module test if running under PyPy (incompatible with Cython)
try:
import __pypy__
# Empty test unit to show the reason of skipping
class TestMissingDependency(unittest.TestCase):
@unittest.skip('Missing dependency - Cython is incompatible with PyPy')
def test_fail():
pass
# Else we're not under PyPy, we can run the test
except ImportError:
__pypy__ = None
from ..cpolynomial import Polynomial
from ..cff import GF2int, init_lut
def map_GF2int(L):
return list(map(GF2int, L))
class cTestGFPoly(unittest.TestCase):
"""Tests that the Polynomial class works when given GF2int objects
instead of regular integers
"""
def setUp(self):
# (Re) initialize the GF tables to avoid conflicts with previously ran tests
init_lut(generator=3, prim=0x11b, c_exp=8)
def test_add(self):
one = Polynomial(map_GF2int([8,3,5,1]))
two = Polynomial(map_GF2int([5,3,1,1,6,8]))
r = one + two
self.assertEqual(list(r.coefficients), [5,3,9,2,3,9])
def test_sub(self):
one = Polynomial(map_GF2int([8,3,5,1]))
two = Polynomial(map_GF2int([5,3,1,1,6,8]))
r = one - two
self.assertEqual(list(r.coefficients), [5,3,9,2,3,9])
def test_mul(self):
one = Polynomial(map_GF2int([8,3,5,1]))
two = Polynomial(map_GF2int([5,3,1,1,6,8]))
r = one * two
self.assertEqual(list(r.coefficients), [40,23,28,1,53,78,7,46,8])
def test_mul_at(self):
one = Polynomial(map_GF2int([2,4,7,3]))
two = Polynomial(map_GF2int([5,2,4,2]))
k = 3
r1 = one * two
r2 = one.mul_at(two, k)
self.assertEqual(r1.get_coefficient(k), r2)
def test_scale(self):
one = Polynomial(map_GF2int([2,14,7,3]))
scalar = 12
r = one.scale(12)
self.assertEqual(list(r.coefficients), [24, 72, 36, 20])
def test_div(self):
one = Polynomial(map_GF2int([8,3,5,1]))
two = Polynomial(map_GF2int([5,3,1,1,6,8]))
q, r = divmod(two,one)
self.assertEqual(list(q.coefficients), [101, 152, 11])
self.assertEqual(list(r.coefficients), [183, 185, 3])
# Make sure they multiply back out okay
self.assertEqual(q*one + r, two)
def test_div_fast(self):
one = Polynomial(map_GF2int([8,3,5,1]))
two = Polynomial(map_GF2int([5,3,1,1,6,8]))
q, r = two._fastdivmod(one)
self.assertEqual(list(q.coefficients), [101, 152, 11])
self.assertEqual(list(r.coefficients), [183, 185, 3])
# Make sure they multiply back out okay
self.assertEqual(q*one + r, two)
def test_div_gffast(self):
one = Polynomial(map_GF2int([1,3,5,1])) # must be monic! (because the f | unction is optimized for monic divisor polynomial)
two = Polynomial(map_GF2int([5,3,1,1,6,8]))
q, r = two._gffastdivmod(one) # optimized for monic divisor polynomial
q2, r2 = two._fastdivmod(one)
self.assertEqual(q, q2)
| self.assertEqual(r, r2)
self.assertEqual(list(q.coefficients), [5, 12, 4])
self.assertEqual(list(r.coefficients), [52, 30, 12])
# Make sure they multiply back out okay
self.assertEqual(q*one + r, two)
def test_div_scalar(self):
"""Tests division by a scalar"""
numbers = map_GF2int([5,20,50,100,134,158,0,148,233,254,4,5,2])
scalar = GF2int(17)
poly = Polynomial(list(numbers))
scalarpoly = Polynomial(x0=scalar)
self.assertEqual(
list((poly // scalarpoly).coefficients),
[x / scalar for x in numbers]
)
def test_div_scalar2(self):
"""Test that dividing by a scalar is the same as multiplying by the
scalar's inverse"""
a = Polynomial(map_GF2int([5,3,1,1,6,8]))
scalar = GF2int(50)
self.assertEqual(
a * Polynomial(x0=scalar),
a // Polynomial(x0=scalar.inverse())
)
def test_evaluate(self):
a = Polynomial(map_GF2int([5,3,1,1,6,8]))
e = a.evaluate(3)
self.assertEqual(e, 196)
def test_evaluate_array(self):
a = Polynomial(map_GF2int([5,3,1,1,6,8]))
arr, sum = a.evaluate_array(3)
self.assertEqual(sum, 196)
self.assertEqual(list(arr), [255, 51, 15, 5, 10, 8])
def test_derive(self):
a = Polynomial(map_GF2int([5,3,1,1,6,8]))
r = a.derive()
self.assertEqual(list(r), [17, 12, 3, 2, 6])
class cTestPolynomial(unittest.TestCase):
def test_add_1(self):
one = Polynomial([2,4,7,3])
two = Polynomial([5,2,4,2])
r = one + two
self.assertEqual(list(r.coefficients), [7, 6, 11, 5])
def test_add_2(self):
one = Polynomial([2,4,7,3,5,2])
two = Polynomial([5,2,4,2])
r = one + two
self.assertEqual(list(r.coefficients), [2,4,12,5,9,4])
def test_add_3(self):
one = Polynomial([7,3,5,2])
two = Polynomial([6,8,5,2,4,2])
r = one + two
self.assertEqual(list(r.coefficients), [6,8,12,5,9,4])
def test_mul_1(self):
one = Polynomial([2,4,7,3])
two = Polynomial([5,2,4,2])
r = one * two
self.assertEqual(list(r.coefficients),
[10,24,51,49,42,26,6])
def test_mul_at_1(self):
one = Polynomial([2,4,7,3])
two = Polynomial([5,2,4,2])
k = 3
r1 = one * two
r2 = one.mul_at(two, k)
self.assertEqual(r1.get_coefficient(k), r2)
def test_scale_1(self):
one = Polynomial([2,4,7,3])
scalar = 12
r = one.scale(12)
self.assertEqual(list(r.coefficients), [24, 48, 84, 36])
def test_div_1(self):
one = Polynomial([1,4,0,3])
two = Polynomial([1,0,1])
q, r = divmod(one, two)
self.assertEqual(q, one // two)
self.assertEqual(r, one % two)
self.assertEqual(list(q.coefficients), [1,4])
self.assertEqual(list(r.coefficients), [-1,-1])
def test_div_2(self):
one = Polynomial([1,0,0,2,2,0,1,2,1])
two = Polynomial([1,0,-1])
q, r = divmod(one, two)
self.assertEqual(q, one // two)
self.assertEqual(r, one % two)
self.assertEqual(list(q.coefficients), [1,0,1,2,3,2,4])
self.assertEqual(list(r.coefficients), [4,5])
def test_div_3(self):
# 0 quotient
one = Polynomial([1,0,-1])
two = Polynomial([1,1,0,0,-1])
q, r = divmod(one, two)
self.assertEqual(q, one // two)
self.assertEqual(r, one % two)
self.assertEqual(list(q.coefficients), [0])
self.assertEqual(list(r.coefficients), [1,0,-1])
def test_div_4(self):
# no remander
one = Polynomial([1,0,0,2,2,0,1,-2,-4])
two = Polynomial([1,0,-1])
q, r = divmod(one, two)
self.assertEqual(q, one // two)
self.assertEqual(r, one % two)
self.assertEqual(list(q.coefficients), [1,0,1,2,3,2,4])
self.assertEqual(list(r.coefficients), [0])
def test_div_fast_1(self):
# no remander
one = Polynomial([1,0,0,2,2,0,1,-2,-4])
two = Polynomial([1,0,-1])
q, r = one._fastdivmod(two)
self.assertEqual(q, one._fastfloordiv(two))
self.assertEqual(r, one._fastmod(two))
self.assertEqual(list( |
mnestis/provglish | provglish/nl/templates/generation_template.py | Python | mit | 3,100 | 0.006129 | from provglish import transform, prov
from provglish.lexicalisation import urn_from_uri as lex
from provglish.lexicalisation import plural_p
from provglish.prov import PROV
from provglish.nl.tools import SETTINGS, realise_sentence
import rdflib
from rdflib.plugins import sparql
from rdflib import RDF
import urllib2
_generation_query = | sparql.prepareQuery(
"""
SELECT ?entity ?generation ?time ?activity WHERE {
GRAPH <prov_graph> {
{
?entity a prov:Entity .
?entity prov:qualifiedGeneration ?generation .
?generation a prov:Generation .
OPTIONAL { ?generation prov:atTime ?time } .
OPTIONAL { ?generation prov:activity ?activity } .
FILTER ( bound(?time) || bound(?activity))
} UNION {
?entity a prov: | Entity .
?entity prov:wasGeneratedBy ?activity .
?activity a prov:Activity
}
}
}
""",
initNs={"prov":PROV})
def _generation_binding(graph):
results = graph.query(_generation_query)
return results.bindings
def _generation_coverage(bindings, graph):
if "?generation" in bindings:
# Qualified
coverage = [(bindings["?entity"], RDF.type, PROV.Entity),
(bindings["?entity"], PROV.qualifiedGeneration, bindings["?generation"]),
(bindings["?generation"], RDF.type, PROV.Generation)]
if "?time" in bindings:
coverage.append((bindings["?generation"], PROV.atTime, bindings["?time"]))
if "?activity" in bindings:
coverage.extend([(bindings["?generation"], PROV.activity, bindings["?activity"]),
(bindings["?activity"], RDF.type, PROV.Activity)])
return coverage
else:
# Unqualified
return [(bindings["?entity"], RDF.type, PROV.Entity),
(bindings["?entity"], PROV.wasGeneratedBy, bindings["?activity"]),
(bindings["?activity"], RDF.type, PROV.Activity)]
def _generation_string(bindings, history):
sentence = {}
sentence["object"] = {"type": "noun_phrase",
"head": lex(bindings["?entity"]),
"features": {"number": "plural" if plural_p(bindings["?entity"]) else "singular"}}
sentence["verb"] = "generate"
sentence["features"] = {"tense": "past",
"passive": "true"}
sentence["modifiers"] = []
if "?time" in bindings:
sentence["modifiers"].append({"type":"preposition_phrase",
"preposition": "at",
"noun": bindings["?time"]})
if "?activity" in bindings:
sentence["modifiers"].append({"type":"preposition_phrase",
"preposition":"by",
"noun": lex(bindings["?activity"])})
return realise_sentence({"sentence":sentence})
generation = transform.Template("Generation", _generation_binding, _generation_coverage, _generation_string)
|
lucky/newf | example_app.py | Python | mit | 559 | 0.014311 | from newf import Application, Response, ResponseRedirect
def foo(request):
return Response("<h1>Hello Worl | d!</h1>")
def bar(request):
return ResponseRedirect("/foo")
def test_debug(request):
raise Exception, 'I am the exception'
urls = (
(r'^/foo$', foo),
(r'^/bar$', bar),
(r'^/test-debug$', test_debug),
)
application = Application(urls, debug=True)
if __name__ == '__main__':
from wsg | iref.simple_server import make_server
server = make_server('', 8000, application)
server.serve_forever()
|
byu-osl/city-issue-tracker | app/views.py | Python | gpl-2.0 | 12,663 | 0.033483 | import json
from flask import render_template, request, jsonify, Response
from app import app, db, ValidationError, genError
from fakeData import service_list, service_def, get_service_reqs, get_service_req, user_data
from models import Service, ServiceAttribute, Keyword, KeywordMapping, ServiceRequest, User, Note
from werkzeug.utils import secure_filename
from os import urandom
from passlib.hash import sha512_crypt
from datetime import datetime
JSON_ERR_MSG = "Invalid JSON or No JSON"
db.create_all()
############
# Helper functions
###########
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
#############
# Main Page #
#############
#TODO: Make sure the html is being served up correctly
@app.route('/index')
@app.route('/')
def home():
return render_template('index.html')
@app.route('/users.html')
def showUsers():
return render_template('users.html')
################
# User Section #
################
#TODO: How to do this securely
@app.route('/users/sign_in', methods=['POST'])
def signIn():
return "---"
#TODO: Implement
@app.route('/users/sign_out', methods=['POST'])
def signOut():
return "---"
#TODO: Implement
@app.route('/users', methods=['POST'])
def createUser():
'''
Create a User
'''
if not request.json:
return genError(400, JSON_ERR_MSG)
requestJson = request.get_json()
user = User()
user.email = requestJson['email']
user.firstName = requestJson['first_name']
user.lastName = requestJson['last_name']
user.phone = None
user.role = 'admin' if requestJson['admin'] else 'user'
# Generate a Cryptographically Secure Salt of length 16 bytes then generate the password
# hash using the password and salt and hashing 10,000 times.
password = requestJson['password']
user.passwordSalt = urandom(8).encode('hex')
user.passwordHash = sha512_crypt.encrypt(password, rounds = 10000, salt = user.passwordSalt)
user.lastLogin = None
user.joined = datetime.today()
user.subscriptionList = []
user.fromDict(requestJson)
db.session.add(user)
db.session.commit()
return user.toCitJSON()
#TODO: Implement
@app.route('/users/<int:user_id>', methods=['GET'])
def getUser(user_id):
'''
Retrieve a User's information
'''
user = User.query.get(user_id)
if user == None:
return genError(404, "User ID was not found");
return user.toCitJSON();
#TODO: Implement
@app.route('/users/<int:user_id>', methods=['POST'])
def updateUser(user_id):
if not request.json:
return genError(400, JSON_ERR_MSG)
requestJson = request.get_json()
user = User.query.get(user_id)
user.email = requestJson['email']
user.firstName = requestJson['first_name']
user.lastName = requestJson['last_name']
user.role = 'admin' if requestJson['admin'] else 'user'
if requestJson['password']:
password = requestJson['password']
user.passwordSalt = urandom(8).encode('hex')
user.passwordHash = sha512_crypt.encrypt(password, rounds = 10000, salt = user.passwordSalt)
user = user.fromDict(requestJson)
db.session.add(user)
db.session.commit()
return user.toCITJSON()
#TODO: Implement
@app.route('/users/signed_in_user')
def getOwnAccount():
return "---"
#TODO: Implement
@app.route('/users', methods=['GET'])
def getAllUsers():
'''
Return all users. Pagination is not implemented yet, so offset will always be 0, and total_results and total_returned will always be the same.
'''
allUsers = User.query.all()
userArray = []
for user in allUsers:
userArray.append(user.toCITDict())
return jsonify
(
{
"total_results": len(allUsers),
"total_returned": len(allusers),
"offset": 0,
"users": userArray
}
)
###################
# Service Section #
###################
#Create a new service
@app.route('/services', methods=['POST'])
def newService():
"""
A route to post a new Service
TODO: Test
"""
if not request.json:
return genError(400, JSON_ERR_MSG)
s = Service()
s.fromDict(request.json)
db.session.add(s)
db.session.commit()
jsonResp = s.toJSON()
jsonResp.status_code = 201
return jsonResp
#Get a list of all services
@app.route('/services', methods=['GET'])
def getServices():
"""
A route to get a list of all services
TODO: Have some filter options. Maybe get attributes
TODO: Test
"""
l = Service.query.all()
return Service.composeFormatList("json", l)
#Get a specific service
@app.route('/services/<int:serviceId>', methods=['GET'])
def getService(serviceId):
"""
A route to get a service
TODO: Test
Keyword arguments:
serviceId -- The serviceId of a Service to get
"""
s = Service.query.get(serviceId)
if s == None:
return genError(404, "Service ID was not found");
return s.toCitJSON();
#Updates a service
@app.route('/services/<int:serviceId>', methods=['POST'])
def postService(serviceId):
"""
A route to update a service
This should be working
Keyword arguments:
serviceId -- The serviceId of a Service we want to update
"""
#TODO: Check if json will error out
if not request.json:
return genError(400, JSON_ERR_MSG)
s = Service.query.get(serviceId)
if s == None:
return genError(404, "Service ID was not found");
try:
s.fromDict(request.json)
except ValidationError as e:
return genError(400, e.errorMsg)
db.session.commit()
return s.toJSON()
@app.route('/services/<int:serviceId>', methods=['DELETE'])
def deleteService(serviceId):
"""
A route to delete a service
NOT IMPLEMENTED
"""
s = Service.query.get(serviceId)
db.session.delete(s)
db.session.commit()
#TODO: Some other way of marking success
return "---"
#TODO: Implement
@app.route('/services/<int:serviceId>/attr', methods=['GET'])
def getServiceAttr(serviceId):
"""
A view to get the attributes of a specific Service
NOT IMPLEMENTED
"""
return "---"
##################
# Issues Section #
##################
#TODO: Test and deal with user authorization
@app.route('/issues/<int:issue_id>', methods=['GET'])
def getIssue(issue_id):
"""
Return the issue with id = issue_id
Keyword arguments:
issue_id -- The id of an issue t | o get
"""
serviceRequest = ServiceRequest.query.get(issue_id)
if serviceRequest == None:
return genError(404, "Issue ID was not found");
return serviceRequest.toCitJSON()
#TODO: Test and deal with user authorization
@app.route('/issues', methods=['POST'])
def createI | ssue():
"""
Create an issue
"""
#TODO: Authoization?
requestJson = request.get_json()
if not requestJson:
return genError(400, JSON_ERR_MSG)
serviceRequest = ServiceRequest()
try:
serviceRequest.fromCitDict(requestJson);
except ValidationError as e:
return genError(400, e.errorMsg)
db.session.add(serviceRequest)
db.session.commit()
return serviceRequest.toCitJSON()
#TODO: issue_id to issueId
#TODO: Test and deal with user authorization
@app.route('/issues/<int:issue_id>', methods=['POST'])
def updateIssue(issue_id):
"""
Update the given issue
Keyword Arguments:
issue_id -- The id of an issue to update
"""
#TODO: Make sure that the updator is authorized
requestJson = request.get_json()
if not requestJson:
return genError(400, JSON_ERR_MSG)
serviceRequest = ServiceRequest.query.get(issue_id)
#TODO: Check that it's not empty
try:
serviceRequest.fromCitDict(requestJson);
except ValidationError as e:
return genError(400, e.errorMsg)
db.session.commit()
return serviceRequest.toCitJSON()
#TODO: Test and deal with user authorization
@app.route('/issues', methods=['GET'])
def viewAllIssues():
"""
Return all the issues
There are JSON attirbutes that are send but maybe they should be part
of get request(not the body?)
JSON Attributes:
orderBy -- create_at, priority, open
offset -- (int) How many down the list you should start at
max -- (int) The number of items to get
query -- A string to look for in the issues
reversed -- (bool)
includeClosed -- (bool)
"""
requestJson = request.get_json()
#If there is no json that is ok since we have defaults
if not requestJson:
requestJson = {}
#TODO: Have defaults
orderBy = requestJson.get("orderBy", "created_at")
offset = int(requestJson.get("offset", 0))
max = int(requestJson.get("max", 50))
query = requestJson.get |
matthewpklein/battsimpy | docs/extra_files/electrode_ocv_gen.py | Python | gpl-3.0 | 5,199 | 0.025582 | import pickle
from matplotlib import pyplot as plt
plt.style.use('classic')
import matplotlib as mpl
fs = 12.
fw = 'bold'
mpl.rc('lines', linewidth=2., color='k')
mpl.rc('font', size=fs, weight=fw, family='Arial')
mpl.rc('legend', fontsize='small')
import numpy
def grad( x, u ) :
return numpy.gradient(u) / numpy.gradient(x)
date = '20160519'
base = '/home/mk-sim-linux/Battery_TempGrad/Python/batt_simulation/battsimpy/'
base_dir = '/home/mk-sim-linux/Battery_TempGrad/JournalPaper2/Paper2/ocv_unif35/'
fig_dir = '/home/mk-sim-linux/Battery_TempGrad/JournalPaper3/modeling_paper_p3/figs/'
#base_dir = '/home/m_klein/tgs_data/ocv_unif35/'
#base_dir = '/Volumes/Data/Paper2/ocv_dat/'
#bsp_path = '/Users/mk/Desktop/battsim/battsimpy | /'
nmc_rest_523 = numpy.loadtxt( base+'data/Model_nmc/Model_Pars/solid/thermo | dynamics/2012Yang_523NMC_dchg_restOCV.csv', delimiter=',' )
nmc_cby25_111 = numpy.loadtxt( base+'data/Model_nmc/Model_Pars/solid/thermodynamics/2012Wu_NMC111_Cby25_dchg.csv' , delimiter=',' )
nmc_YangWu_mix = numpy.loadtxt( base+'data/Model_nmc/Model_Pars/solid/thermodynamics/YangWuMix_NMC_20170607.csv' , delimiter=',' )
lfp_prada_dchg = numpy.loadtxt( base+'data/Model_v1/Model_Pars/solid/thermodynamics/2012Prada_LFP_U_dchg.csv' , delimiter=',' )
graph_hess_dchg = numpy.loadtxt( base+'data/Model_nmc/Model_Pars/solid/thermodynamics/Ua_cell4Fit_NMC_2012Yang_refx.csv' , delimiter=',' ) #graphite_Hess_discharge_x.csv
#xin, Uin = 1.-lfp_prada_dchg[:,0], lfp_prada_dchg[:,1]
#xin, Uin = 1.-nmc_rest_523[:,0], nmc_rest_523[:,1]
xin, Uin = 1.-nmc_YangWu_mix[:,0], nmc_YangWu_mix[:,1]
#xin, Uin = 1.-nmc_cby25_111[:,0], nmc_cby25_111[:,1]
xin2, Uin2 = graph_hess_dchg[:,0], graph_hess_dchg[:,1]#-0.025
pfiles2 = [ base_dir+'slowOCVdat_cell4_slow_ocv_'+date+'.p', ]
# Load the cell ocv c/60 data
d = pickle.load( open( pfiles2[0], 'rb' ) )
max_cap = numpy.amax( d['interp']['cap'] )
x_cell, U_cell = 1-numpy.array(d['interp']['cap'])/max_cap*1., d['interp']['dchg']['volt']
# NMC 532 scale - NMC cyl cells (cell 4)
#scale_x = 1.8#1.5 # 1.55
#shift_x = -.01#-.06 #-.12
scale_x = 1.42 # 1.55
shift_x = -.03 #-.12
#scale_x1 = 1.9
#shift_x1 = -.03
## LFP Prada - (cell 2)
#scale_x = 1.25
#shift_x = 1.05-scale_x
# Graphite - scale NMC cyl cells (cell 4)
scale_x2 = 1/.8 #1./0.83 #
shift_x2 = -.06 #-.035
#scale_x2 = 1/.74
#shift_x2 = -.04
figres = 300
figname = base_dir+'ocv-plots_'+date+'.pdf'
sty = [ '-', '--' ]
fsz = (190./25.4,120./25.4)
f1, axes = plt.subplots(1,2,figsize=fsz)
a1,a2 = axes
# Plot the full cell ocv
a1.plot( x_cell, U_cell, '-b', label='Cell C/60 Data' )
# Plot the cathode curve for the shifted soc operating window
a1.plot( xin*scale_x+shift_x, Uin, '-g', label='Cathode' )
# Plot the anode curve for the shifted soc operating window
#a1t = a1.twinx()
a1.plot( xin2*scale_x2+shift_x2, Uin2, '-k', label='Anode' )
# Compute the cathode ocv for the full cell soc operating window
if xin[1] < xin[0] :
Uc = numpy.interp( x_cell, numpy.flipud(xin*scale_x+shift_x), numpy.flipud(Uin) )
else :
Uc = numpy.interp( x_cell, xin*scale_x+shift_x, Uin )
Ua = numpy.interp( x_cell, xin2*scale_x2+shift_x2, Uin2 )
# Plot the estimated full cell ocv curve for the aligned anode and cathode equilibrium curves
#a1.plot( x_cell, Uc-U_cell, ':k', label='U$_{anode}$ fit' )
#a1t.set_ylim([0.,2.])
a1.plot( x_cell, Uc-Ua, ':k', label='U$_{cell}$ fit' )
# Calculate the alignment stoichs for anode and cathode
Ua_out = Uc - U_cell
xa_out = (x_cell-shift_x2)/scale_x2
#numpy.savetxt( base+'data/Model_v1/Model_Pars/solid/thermodynamics/Ua_lfp_2012Prada.csv', numpy.array([xa_out, Ua_out]).T, delimiter=',' )
#numpy.savetxt( base+'data/Model_v1/Model_Pars/solid/thermodynamics/Ua_nmc_2012Yang.csv', numpy.array([xa_out, Ua_out]).T, delimiter=',' )
yin = 1.-xin
xc_lo = 1. - (-shift_x/scale_x)
xc_hi = 1. - (1.-shift_x)/scale_x
xa_lo = (-shift_x2/scale_x2)
xa_hi = (1.-shift_x2)/scale_x2
# Print out the stoich limits for the anode and cathode
print 'xc_lo, xc_hi:',xc_lo, xc_hi
print 'xa_lo, xa_hi:',xa_lo, xa_hi
a1.set_xlabel( 'State of Charge', fontsize=fs, fontweight=fw )
a1.set_ylabel( 'Voltage vs. Li [V]', fontsize=fs, fontweight=fw )
a1.set_title( 'Full and Half Cell OCV', fontsize=fs, fontweight=fw )
a1.legend(loc='best')
a1.set_axisbelow(True)
a1.grid(color='gray')
a2.plot( x_cell, grad(x_cell, U_cell), label=r'$\frac{\partial U_{cell}}{\partial SOC}$' )
a2.plot( x_cell, -grad(x_cell, Ua), label=r'$\frac{\partial U_{anode}}{\partial SOC}$' )
a2.set_xlabel( 'State of Charge', fontsize=fs, fontweight=fw )
a2.set_ylabel( '$\partial U / \partial SOC$', fontsize=fs, fontweight=fw )
a2.set_title( 'OCV Gradients for Anode Alignment', fontsize=fs, fontweight=fw )
a2.legend(loc='best')
a2.set_axisbelow(True)
a2.grid(color='gray')
a2.set_ylim([-0.1,1.5])
#plt.suptitle('LFP/C$_6$ Half Cell OCV Alignment', fontsize=fs, fontweight=fw)
plt.suptitle('NMC/C$_6$ Half Cell OCV Alignment', fontsize=fs, fontweight=fw)
plt.tight_layout(rect=[0,0.03,1,0.97])
plt.show()
#f1.savefig( fig_dir+'ocv_alignment_cell2_lfp.pdf', dpi=figres)
#f1.savefig( fig_dir+'ocv_alignment_cell4_nmc.pdf', dpi=figres)
|
backupManager/pyflag | src/plugins_old/DiskForensics/FileHandlers/RFC2822.py | Python | gpl-2.0 | 8,766 | 0.013347 | """ This scanner handles RFC2822 type messages, creating VFS nodes for all their children """
# Michael Cohen <scudette@users.sourceforge.net>
# David Collett <daveco@users.sourceforge.net>
# Gavin Jackson <gavz@users.sourceforge.net>
#
# ******************************************************
# Version: FLAG $Version: 0.87-pre1 Date: Thu Jun 12 00:48:38 EST 2008$
# ******************************************************
#
# * This program is free software; you can redistribute it and/or
# * modify it under the terms of the GNU General Public License
# * as published by the Free Software Foundation; either version 2
# * of the License, or (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# ******************************************************
import os.path
import pyflag.pyflaglog as pyflaglog
import pyflag.Scanner as Scanner
import pyflag.Reports as Reports
import pyflag.DB as DB
import pyflag.conf
config=pyflag.conf.ConfObject()
import email, email.Utils,time
from pyflag.FileSystem import File
import pyflag.Time as Time
import pyflag.Magic as Magic
class MBox(Magic.Magic):
type = "MBox mail file"
mime = "message/x-application-mbox"
default_score = 19
literal_rules = [
( "from ", (0,0)),
( "\nmime-version: ", (0,1000)),
( "\nreceived: ", (0,1000)),
( "\nfrom: ", (0,1000)),
( "\nmessage_id: ",(0,1000)),
( "\nto: ", (0,1000)),
( "\nsubject: ", (0,1000)),
( "\nreturn-path: ", (0,1000))
]
samples = [ (95, """From \"Michael Cohen\" Thu Jan 6 14:49:13 2005
Message-ID: <42BE76A2.8090608@users.sourceforge.net>
Date: Sun, 26 Jun 2005 19:34:26 +1000
From: scudette <scudette@users.sourceforge.net>
User-Agent: Debian Thunderbird 1.0.2 (X11/20050602)
X-Accept-Language: en-us, en
MIME-Version: 1.0
To: scudette@users.sourceforge.net
Subject: The Queen
Content-Type: multipart/mixed;
boundary="-.-----------020606020801030004000306"
"""
) ]
class RFC2822Magic(Magic.Magic):
type = "RFC2822 Mime message"
mime = "message/rfc2822"
default_score = 20
literal_rules = [
( "\nmime-version:", (0,1000)),
( "\nreceived:", (0,1000)),
( "\nfrom:", (0,1000)),
( "\nmessage_id:",(0,1000)),
( "\nto:", (0,1000)),
( "\nsubject:", (0,1000)),
( "\nreturn-path:", (0,1000))
]
samples = [ (80, """Message-ID: <42BE76A2.8090608@users.sourceforge.net>
Date: Sun, 26 Jun 2005 19:34:26 +1000
From: scudette <scudette@users.sourceforge.net>
User-Agent: Debian Thunderbird 1.0.2 (X11/20050602)
X-Accept-Language: en-us, en
MIME-Version: 1.0
To: scudette@users.sourceforge.net
Subject: The Queen
Content-Type: multipart/mixed;
boundary="-.-----------020606020801030004000306"
"""
) ]
class RFC2822(Scanner.GenScanFactory):
""" Scan RFC2822 Mail messages and insert record into email table"""
default = True
depends = ['TypeScan']
group = 'FileScanners'
def __init__(self,fsfd):
Scanner.GenScanFactory.__init__(self,fsfd)
dbh=DB.DBO(self.case)
class Scan(Scanner.StoreAndScanType):
types = [ 'message/rfc2822', 'message/x-application-mbox' ]
def external_process(self, fd):
if self.mime_type==self.types[0]:
self.process_message(fd)
else:
self.process_mbox(fd)
def process_mbox(self, fd):
""" This is borrowed from python's mailbox module """
path, inode, inode_id = self.ddfs.lookup(inode = fd.inode)
starts, stops = [], []
while True:
line_pos = fd.tell()
line = fd.readline()
if line.startswith('From '):
if len(stops) < len(starts):
stops.append(line_pos - len(os.linesep))
starts.append(line_pos)
elif line == '':
stops.append(line_pos)
break
for i in range(len(starts)):
new_inode = "o%s:%s" % (starts[i], stops[i] - starts[i])
new_inode_id = self.ddfs.VFSCreate(inode, new_inode,
"Msg %s" % i)
tmpfd = self.ddfs.open(inode_id = new_inode_id)
self.process_message(tmpfd)
def process_message(self, fd):
count = 0
try:
new_path, new_inode, new_inode_id = self.ddfs.lookup(inode = fd.inode)
a = email.message_from_file(fd)
try:
subject = a['subject']
if len(subject)>50:
subject = subject[:50] + " ..."
new_name = "%s: %s" % (new_path, subject)
self.ddfs.VFSRename(new_inode_id, new_name)
except KeyError:
pass
pyflaglog.log(pyflaglog.DEBUG,"Found an email message in %s: %s" % (
new_inode, a['subject']))
#Mysql is really picky about the date formatting
date = email.Utils.parsedate(a.get('Date'))
if not date:
raise Exception("No Date field in message - this is probably not an RFC2822 message at all.")
dbh=DB.DBO(self.case)
dbh.insert('email',
inode = self.inode,
_date = "from_unixtime(%r)" % int(time.mktime(date)),
to = a.get('To'),
_from = "%r" % a.get('From'),
subject = a.get('Subject'))
for part in a.walk():
if part.get_content_maintype() == 'multipart':
continue
filename = part.get_filename()
data = part.get_payload(decode=1)
## Sometimes the filename is specified in the
## content-type header:
try:
for x,y in part.get_params():
if x =="name":
filename=y
break
except:
pass
if not filename: filename="Attachment %s" % count
## Create the VFSs node:
new_inode_id = self.ddfs.VFSCreate(
new_inode,"m%s" % count, filename,
_mtime = time.mktime(date), size=len(data)
)
## Now call the scanners on new file:
new_fd = self.ddfs.open(inode_id=new_inode_id)
Scanner.scanfile(self.ddfs,new_fd,self.factories)
| new_fd.close()
count+=1
except Exception,e:
pyflaglog.log(pyflaglog.DEBUG,"RFC2822 Scan: Unable to parse inode %s as an RFC2822 message (%s)" % (self.inode,e))
class RFC2822_File(File):
""" A VFS Driver for reading mail attachments """
specifier = 'm'
def __init__(self, case, fd, | inode):
File.__init__(self, case, fd, inode)
self.cache()
def read(self, length=None):
try:
return File.read(self,length)
except IOError:
pass
if self.readptr > 0:
return ''
self.fd.seek(0)
a=email.message_from_file(self.fd)
my_part = self.inode.split('|')[-1]
attachment_numb |
apyrgio/ganeti | lib/jqueue/exec.py | Python | bsd-2-clause | 5,254 | 0.006662 | #
#
# Copyright (C) 2014 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module implementing executing of a job as a separate process
The complete protocol of initializing a job is described in the haskell
module Ganeti.Query.Exec
"""
import contextlib
import logging
import os
import signal
import sys
import time
from ganeti import mcpu
from ganeti.server import masterd
from ganeti.rpc import transport
from ganeti import utils
from ganeti import pathutils
from ganeti.utils import livelock
def _GetMasterInfo():
"""Retrieves the job id and lock file name from the master process
This also closes standard input/output
"""
logging.debug("Opening transport over stdin/out")
with contextlib.closing(transport.FdTransport((0, 1))) as trans:
logging.debug("Reading job id from the master process")
job_id = int(trans.Call(""))
logging.debug("Got job id %d", job_id)
logging.debug("Reading the livelock name from the master process")
livelock_name = livelock.LiveLockName(trans.Call(""))
logging.debug("Got livelock %s", livelock_name)
return (job_id, livelock_name)
def main():
debug = int(os.environ["GNT_DEBUG"])
logname = pathutils.GetLogFilename("jobs")
utils.SetupLogging(logname, "job-startup", debug=debug)
(job_id, livelock_name) = _GetMasterInfo()
utils.SetupLogging(logname, "job-%s" % (job_id,), debug=debug)
exit_code = 1
try:
logging.debug("Preparing the context and the configuration")
context = masterd.GanetiContext(livelock_name)
logging.debug("Registering signal handlers")
cancel = [False]
prio_change = [False]
def _TermHandler(signum, _frame):
logging.info("Killed by signal %d", signum)
cancel[0] = True
signal.signal(signal.SIGTERM, _TermHandler)
def _HupHandler(signum, _frame):
logging.debug("Received signal %d, old flag was %s, will set to True",
signum, mcpu.sighupReceived)
mcpu.sighupReceived[0] = True
signal.signal(signal.SIGHUP, _HupHandler)
def _User1Handler(signum, _frame):
logging.info("Received signal %d, indicating priority change", signum)
prio_change[0] = True
signal.signal(signal.SIGUSR1, _User1Handler)
logging.debug("Picking up job %d", job_id)
context.jobqueue.PickupJob(job_id)
# waiting for the job to finish
time.sleep(1)
while not context.jobqueue.HasJobBeenFinalized(job_id):
if cancel[0]:
logging.debug("Got cancel request, cancelling job %d", job_id)
r = context.jobqueue.CancelJob(job_id)
logging.debug("CancelJob result for job %d: %s", job_id, r)
cancel[0] = False
if prio_change[0]:
logging.debug("Received priority-change request")
try:
fname = os.path.join(pathutils.LUXID_MESSAGE_DIR, "%d.prio" % job_id)
new_prio = int(utils.ReadFile(fname))
utils.RemoveFile(fname)
logging.debug("Changing priority of job %d to %d", j | ob_id, new_prio)
r = context.jobqueue.ChangeJobPrio | rity(job_id, new_prio)
logging.debug("Result of changing priority of %d to %d: %s", job_id,
new_prio, r)
except Exception: # pylint: disable=W0703
logging.warning("Informed of priority change, but could not"
" read new priority")
prio_change[0] = False
time.sleep(1)
# wait until the queue finishes
logging.debug("Waiting for the queue to finish")
while context.jobqueue.PrepareShutdown():
time.sleep(1)
logging.debug("Shutting the queue down")
context.jobqueue.Shutdown()
exit_code = 0
except Exception: # pylint: disable=W0703
logging.exception("Exception when trying to run job %d", job_id)
finally:
logging.debug("Job %d finalized", job_id)
logging.debug("Removing livelock file %s", livelock_name.GetPath())
os.remove(livelock_name.GetPath())
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
kgblll/libresoft-gymkhana | apps/gymkhana/core/api_team_member.py | Python | gpl-2.0 | 3,062 | 0.010451 | #
# Copyright (C) 2009 GSyC/LibreSoft
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>._
#
# Author : Jorge Fernandez Gonzalez <jorge.fernandez.gonzalez __at__ gmail.com>
#
import random
from social.core import api as api_lgs
from apps.gymkhana.models import *
def has_team(event, team_member):
try:
team_member_1 = team.teammember_set.get(event=event, user=team_member.user)
return True, "ok", team_member_1
except:
return False, "Team Member Has Not Team.", None
#def create(team, first_name, last_name):
# team_member = TeamMember()
#
# num_team_members = len(team.teammember_set.all())
# username = str(random.randint(0,1000)) + "-" + team.group.name
# password = str(random.randint(0,1000)) + "-" + team.group.name
# user = {'username': username, 'password': password, 'first_name': first_name, 'last_name': last_name}
# correct, message = api_lgs.user.create_or_modify(user, modify=False)
# if correct:
# # message almacena user.id cuando todo ha ido correctamente
# user = Person.objects.get(id=message)
# team_member.user = user
# else:
# #return False, render_to_response('error.' + format, {'code': 500, 'description': message})
# return False, message
#
# team_member.team = team
# team_member.save()
# return True, "ok"
def create_by_user_id(event, team, user_id):
#team = Team.objects.get(id=team_id)
user = Person.objects.get(id=user_id)
team_member = TeamMember(event=event, team=team, user=user)
team_member.save()
return True, "ok"
def join(event, team_id, session_user_id):
user = Person.objects.get(id=session_user_id)
team = Team(id=team_id)
try:
if type(TeamMember.objects.get(user=user,team=team)) == TeamMember:
pass
#return False, "Don't Repeat a Team Member."
except:
team_member = TeamMember(user=user,tea | m=team)
team_member.save()
#first_proof = FirstProof.objects.get(event=event,team=t | eam)
return True, "ok"
def delete(team_member):
# Esto borraria al usuario de la red social LGS, pero solo quiero eliminar al miembro del equipo.
## => Me corrijo a mi mismo: de momento solo dejo crear nuevos team_member que no pertenezcan ya a LGS,
## asi que cuando borre un team_member, lo borro todo.
#team_member.user.delete()
team_member.delete()
return True, "ok"
#def list_previous_teams(team_member):
|
sharkspeed/dororis | scripts/1w_1w.py | Python | bsd-2-clause | 555 | 0.007561 | # -*- encoding = utf-8 -*-
def test(n, flag=8):
return pow(3, n) < 10 ** flag
d | ef main():
n = 0
while test(n):
print(pow(3, n))
n += 1
print(n)
print(pow(3, n))
if __name__ == '__main__':
main()
# 2D:
# 134217728 ---> (17 month) ---> 1024
# Pixel: x y color owner price is_avaliable is_random_choosen is_advanced_color has_tax is_show show_price
# ExchangeRecord: from_user to_user price datetime tax [属于前 6000w 且价格 | 低于 value 不收取费用]
# User:
# 3D:
# 129140163 ---> (13 month) ---> 81 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.