repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
fablab-bayreuth/fablight
|
Fablight-Gui/hsv_picker.py
|
Python
|
mit
| 16,281
| 0.021559
|
from numpy import *
from colorsys import *
import Tkinter as tk
import ttk
import PIL.Image, PIL.ImageTk
#-----------------------------------------------------------------------------------------
# HSV picker
# Three horizontal scales for Hue, Sat, Val
class HSV_Picker:
panel_size = 290, 32
hue, sat, val = 0, 0, 0
hue_img, sat_img, val_img = None, None, None
def __init__( self, parent, color_broadcast=None ):
self.parent = parent
self.frame = tk.Frame(self.parent)
self.colorbc = color_broadcast
# Get initial color
self.receive_color()
# Create initial images
self.create_img()
# setup frames
self.hue_panel = tk.Label(self.frame, image=self.hue_img, bd=0,
width=self.panel_size[0], height=self.panel_size[1] )
self.sat_panel = tk.Label(self.frame, image=self.sat_img, bd=0,
width=self.panel_size[0], height=self.panel_size[1] )
self.val_panel = tk.Label(self.frame, image=self.val_img, bd=0,
width=self.panel_size[0], height=self.panel_size[1] )
# bind event handlers
self.hue_panel.bind('<Button-1>', self.on_hue_click)
self.hue_panel.bind('<B1-Motion>', self.on_hue_click)
self.sat_panel.bind('<Button-1>', self.on_sat_click)
self.sat_panel.bind('<B1-Motion>', self.on_sat_click)
self.val_panel.bind('<Button-1>', self.on_val_click)
self.val_panel.bind('<B1-Motion>', self.on_val_click)
self.parent.bind('<<NotebookTabChanged>>', self.on_tab_changed)
self.place()
def place(self,**args): # place frames on grid
self.frame.grid(args)
tk.Label(self.frame, text='Hue').grid(column=0, row=0, padx=8, pady=(6,0), sticky=tk.W)
self.hue_panel.grid(column=0, row=1, padx=8, pady=(0,6), sticky=tk.W+tk.E)
tk.Label(self.frame, text='Saturation').grid(column=0, row=2, pad
|
x=8, pady=0, sticky=tk.W)
self.sat_panel.grid(
|
column=0, row=3, padx=8, pady=(0,6), sticky=tk.W+tk.E)
tk.Label(self.frame, text='Value (Brightness)').grid(column=0, row=4, padx=8, pady=0, sticky=tk.W)
self.val_panel.grid(column=0, row=5, padx=8, pady=(0,6), sticky=tk.W+tk.E)
##self.hue_panel.grid(column=0, row=0, padx=8, pady=8, sticky=tk.W+tk.E)
##self.sat_panel.grid(column=0, row=1, padx=8, pady=8, sticky=tk.W+tk.E)
##self.val_panel.grid(column=0, row=2, padx=8, pady=8, sticky=tk.W+tk.E)
def create_img(self):
self.create_hue_img()
self.create_sat_img()
self.create_val_img()
def create_hue_img(self):
w,h = self.panel_size
if (self.hue_img==None): # First call, create color scale
hue_scale = empty((h,w,3), dtype=uint8)
hue_scale[:] = 255*array([hsv_to_rgb(x,0.9,0.9) for x in 1.*arange(0,w)/w])
self.hue_scale = hue_scale
# Mark current value
hue_scale = self.hue_scale.copy()
hue_scale[:, int(self.hue*(w-1)), :] = 0
# Create image object for gui
hue_img = PIL.Image.frombuffer('RGB', (w,h), hue_scale, 'raw', 'RGB', 0, 1)
if (self.hue_img==None):
self.hue_img = PIL.ImageTk.PhotoImage( hue_img )
else:
self.hue_img.paste( hue_img ) # PASTE! Do not replace. Image frame remembers original object
def create_sat_img(self):
w,h = self.panel_size
sat_scale = empty((h,w,3), dtype=uint8)
sat_scale[:] = 255*array([hsv_to_rgb(self.hue, x, 1) for x in 1.*arange(0,w)/w])
#Mark current value
sat_scale[:, int(self.sat*(w-1)), :] = 0
# Create image object for gui
sat_img = PIL.Image.frombuffer('RGB', (w,h), sat_scale, 'raw', 'RGB', 0, 1)
if (self.sat_img==None):
self.sat_img = PIL.ImageTk.PhotoImage( sat_img )
else:
self.sat_img.paste( sat_img ) # PASTE! Do not replace. Image frame remembers original object
def create_val_img(self):
w,h = self.panel_size
val_scale = empty((h,w,3), dtype=uint8)
val_scale[:] = 255*array([hsv_to_rgb(self.hue, self.sat, x) for x in 1.*arange(0,w)/w])
# Mark current value
val_scale[:, int(self.val*(w-1)), :] = 255 if self.val<0.5 else 0
# Create image object for gui
val_img = PIL.Image.frombuffer('RGB', (w,h), val_scale, 'raw', 'RGB', 0, 1)
if (self.val_img==None):
self.val_img = PIL.ImageTk.PhotoImage( val_img )
else:
self.val_img.paste( val_img ) # PASTE! Do not replace. Image frame remembers original object
def on_hue_click(self, event):
x = clip( event.x, 0, self.panel_size[0] )
print 'x=', x
self.hue = float(x)/self.panel_size[0]
print "hue=", self.hue
self.create_hue_img()
self.create_sat_img()
self.create_val_img()
self.broadcast_color()
def on_sat_click(self, event):
x = clip( event.x, 0, self.panel_size[0] )
print 'x=', x
self.sat = float(x)/self.panel_size[0]
print "sat=", self.sat
self.create_sat_img()
self.create_val_img()
self.broadcast_color()
def on_val_click(self, event):
x = clip( event.x, 0, self.panel_size[0] )
print 'x=', x
self.val = float(x)/self.panel_size[0]
print "val=", self.val
self.create_sat_img()
self.create_val_img()
self.broadcast_color()
def on_tab_changed(self, event):
print 'HSV tab'
self.receive_color()
self.create_img()
self.broadcast_color()
def broadcast_color(self):
if self.colorbc:
rgb = hsv_to_rgb(self.hue, self.sat, self.val)
var = ( ('H',self.hue), ('S',self.sat), ('V',self.val) )
self.colorbc.set( rgb, var )
def receive_color(self):
if self.colorbc:
r,g,b = self.colorbc.get_rgb()
else: r,g,b = 0,0,0
self.hue, self.sat, self.val = rgb_to_hsv(r,g,b)
#-----------------------------------------------------------------------------------------
# H(SV) picker
# Two widgets: sat-val plane, vertical hue scale
class H_SV_Picker:
hue_panel_size = 32, 256
sv_panel_size = 256, 256
hue, sat, val = 0, 0, 0
hue_img = None
sv_img = None
def __init__(self, parent, color_broadcast=None):
self.parent = parent
self.frame = tk.Frame(self.parent)
self.colorbc = color_broadcast
# Get initial color
self.receive_color()
# Create initial images
self.create_img()
# setup frames
self.sv_panel = tk.Label(self.frame, image=self.sv_img, bd=0,
width=self.sv_panel_size[0], height=self.sv_panel_size[1])
self.hue_panel = tk.Label(self.frame, image=self.hue_img, bd=0,
width=self.hue_panel_size[0], height=self.hue_panel_size[1] )
# bind event handlers
self.sv_panel.bind('<Button-1>', self.on_sv_click)
self.sv_panel.bind('<B1-Motion>', self.on_sv_click)
self.hue_panel.bind('<Button-1>', self.on_hue_click)
self.hue_panel.bind('<B1-Motion>', self.on_hue_click)
self.parent.bind('<<NotebookTabChanged>>', self.on_tab_changed)
self.place()
def place(self, **args): # place frames on grid
self.frame.grid(args)
tk.Label(self.frame, text='Saturation / Value (Brightness)').grid(column=0, row=0, padx=(8,4), pady=(4,0), sticky=tk.W)
self.sv_panel.grid(column=0, row=1, padx=(8,4), pady=(2,8), sticky=tk.W+tk.E+tk.N+tk.S)
tk.Label(self.frame, text='Hue').grid(column=1, row=0, padx=(4,8), pady=(4,0))
self.hue_panel.grid(column=1, row=1, padx=(4,8), pady=(2,8), sticky=tk.N+tk.S)
def create_hue_img(self):
w,h = self.hue_panel_size
if (self.hue_img==None): # First call, create static hue-scale
hue_scale = 255*array([hsv_to_rgb(1.-y,0.9,0.9) for y in 1.*arange(0,h)/h])
self.hue_scal
|
dani-i/bachelor-project
|
graphics/output/test_sess/test_sess_overall_results_output_f.py
|
Python
|
apache-2.0
| 5,036
| 0.000397
|
from graphics.widgets.single_line_output_f import SingleLineOutputF
from utils.test_sess_overall_results import TestSessOverallResults
import constants.output_constants as const
import tkinter as tk
class TestSessOverallResultsOutputF(tk.Frame):
"""
- Use to display overall results for a test session.
"""
def __init__(self,
parent,
disabled=False):
"""
:param parent: Parent.
:param disabled: - Default: False;
- If True all the widgets will be disabled.
"""
tk.Frame.__init__(self,
parent,
relief=const.TSOR_FRAME_RELIEF,
padx=const.TSOR_FRAME_PADX,
pady=const.TSOR_FRAME_PADY,
bd=const.TSOR_FRAME_BD)
self._slo_identifiers_classes = []
self._create_widgets()
self._place_widgets()
if disabled:
self.disable()
#########################################################################
# Widget handling
def _create_widgets(self):
self._lbl_title = tk.Label(
self,
font=const.TSOR_TITLE_FONT,
text=const.TSOR_TITLE_TEXT,
padx=const.TSOR_TITLE_PADX,
pady=const.TSOR_TITLE_PADY,
)
self._f_results = tk.Frame(
self,
relief=const.TSOR_SUBFRAME_RELIEF,
padx=const.TSOR_SUBFRAME_PADX,
pady=const.TSOR_SUBFRAME_PADY,
bd=const.TSOR_SUBFRAME_BD
)
self._slo_subtitle = SingleLineOutputF(
parent=self,
description_width=28,
font=const.TSOR_SUBTITLE_FONT,
description=const.TSOR_SUBTITLE_EVAL_METHOD_TEXT,
output_text=const.TSOR_SUBTITLE_RESULT_TEXT
)
self._slo_precision = SingleLineOutputF(
parent=self._f_results,
description_width=26,
font=const.TSOR_FONT,
description=const.TSOR_PRECISION_TEXT,
output_text=const.TSOR_PRECISION_INITIAL_TEXT
)
self._slo_recall = SingleLineOutputF(
parent=self._f_results,
description_width=26,
font=const.TSOR_FONT,
description=const.TSOR_RECALL_TEXT,
output_text=const.TSOR_RECALL_INITIAL_TEXT
)
self._slo_f_measure = SingleLineOutputF(
parent=self._f_results,
description_width=26,
font=const.TSOR_FONT,
description=const.TSOR_F_MEASURE_TEXT,
output_text=const.TSOR_F_MEASURE_INITIAL_TEXT
)
def _place_widgets(self):
self._lbl_title.pack(side='top',
fill='both',
expand=True)
self._slo_subtitle.pack(side='top',
fill='both',
expand=True)
self._slo_precision.pack(side='top',
fill='both',
expand=True)
self._slo_recall.pack(side='top',
fill='both',
expand=True)
self._slo_f_measure.pack(side='top',
fill='both',
expand=True)
self._f_results.pack(side='top',
fill='both',
expand=True)
#########################################################################
# Public methods
def update_results(
self,
overall_results: TestSessOverallResults):
"""
- Updates the results.
:param overall_results: Overall test session results.
"""
if overall_results.is_valid():
self._slo_precision.update_output(
output_text='%.2f' % overall_results.precision)
self._slo_recall.update_output(
output_text='%.2f' % overall_results.recall)
self._slo_f_measure.update_output(
output_text='%.2f' % overall_results.f_measure)
else:
rai
|
se ValueError('Overall results are not valid:\n\n'
+ str(overall_results))
def enable(self):
""" Enables all the widgets."""
self._lbl_title.config(state='normal')
self._slo_precision.enable()
self._slo_f_measure.enable()
self._slo_subtitle.enable()
self._slo_recall.enable()
for item in sel
|
f._slo_identifiers_classes:
item.enable()
def disable(self):
""" Disables all the widgets."""
self._slo_recall.disable()
self._slo_subtitle.disable()
self._slo_f_measure.disable()
self._slo_precision.disable()
self._lbl_title.config(state='disabled')
#########################################################################
|
AutorestCI/azure-sdk-for-python
|
azure-servicefabric/azure/servicefabric/models/restart_deployed_code_package_description.py
|
Python
|
mit
| 1,899
| 0.002106
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RestartDeployedCodePackageDescription(Model):
"""Defines description for restarting a deloyed code package on Service Fabric
node.
.
:param service_manifest_name:
:type service_manifest_name: str
:param service_package_activation_id:
:type service_package_activation_id: str
:param code_package_name:
:type code_package_name: str
:param code_package_instance_id:
:type code_package_instance_id: str
"""
_validation = {
'service_manifest_name': {'required': True},
'code_package_name': {'required': True},
'code_package_instance_id': {'required': True},
}
_attribute_map = {
'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'},
'service_package_activation_id': {'key': 'ServicePackageActivationId', 'type': 'str'},
'code_package_name': {'key': 'CodePackageName', 'type': '
|
str'},
'code_package_instance_id': {'
|
key': 'CodePackageInstanceId', 'type': 'str'},
}
def __init__(self, service_manifest_name, code_package_name, code_package_instance_id, service_package_activation_id=None):
self.service_manifest_name = service_manifest_name
self.service_package_activation_id = service_package_activation_id
self.code_package_name = code_package_name
self.code_package_instance_id = code_package_instance_id
|
SimyungYang/py-flask-signup
|
application.py
|
Python
|
apache-2.0
| 4,769
| 0.00692
|
# Copyright 2013. Amazon Web Services, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import json
import flask
from flask import request, Response
#from boto import dynamodb2
#from boto.dynamodb2.table import Table
#from boto.dynamodb2.items import Item
#from boto.dynamodb2.exceptions import ConditionalCheckFailedException
from boto import sns
from flask import Flask, jsonify
from flask.ext.sqlalchemy import SQLAlchemy
# Default config vals
THEME = 'default' if os.environ.get('THEME') is None else os.environ.get('THEME')
FLASK_DEBUG = 'false' if os.environ.get('FLASK_DEBUG') is None else os.environ.get('FLASK_DEBUG')
AWS_REGION = 'us-east-1' if os.environ.get('AWS_REGION') is None else os.environ.get('AWS_REGION')
#STARTUP_SIGNUP_TABLE = '' if os.environ.get('STARTUP_SIGNUP_TABLE') is None else os.environ.get('STARTUP_SIGNUP_TABLE')
STARTUP_SIGNUP_RDBMS = '' if os.environ.get('STARTUP_SIGNUP_RDBMS') is None else os.environ.get('STARTUP_SIGNUP_RDBMS')
NEW_SIGNUP_TOPIC = '' if os.environ.get('NEW_SIGNUP_TOPIC') is None else os.environ.get('NEW_SIGNUP_TOPIC')
# Create the Flask app
application = flask.Flask(__name__)
# Load config values specified above
application.config.from_object(__name__)
# Only enable Flask debugging if an env var is set to true
application.debug = application.config['FLASK_DEBUG'] in ['true', 'True']
# Connect to MySQL DB
application.config['MYSQL_DATABASE_USER'] = 'dbuser'
application.config['MYSQL_DATABASE_PASSWORD'] = 'dbpassword'
application.config['MYSQL_DATABASE_DB'] = 'userdb'
application.config['MYSQL_DATABASE_HOST'] = application.config['STARTUP_SIGNUP_RDBMS']
application.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://' + application.config['MYSQL_DATABASE_USER'] + ':' + application.config['MYSQL_DATABASE_PASSWORD'] + '@' + application.config['MYSQL_DATABASE_HOST'] + '/' + application.config['MYSQL_DATABASE_DB']
db = SQLAlchemy(application)
class User(db.Model):
__tablename__ = 'users'
email = db.Column(db.String(255), primary_key=True)
name = db.Column(db.String(255))
theme = db.Column(db.String(30))
previewAccess = db.Column(db.String(10))
# Connect to DynamoDB and get ref to Table
#ddb_conn = dynamodb2.connect_to_region(application.config['AWS_REGION'])
#ddb_table = Table(table_name=application.config['STARTUP_SIGNUP_TABLE'],connection=ddb_conn)
# Connect to SNS
sns_conn = sns.connect_to_region(application.config['AWS_REGION'])
@application.route('/')
def welcome():
theme = application.config['THEME']
return flask.render_template('index.html', theme=theme, flask_debug=application.debug)
@application.route('/signup', methods=['POST'])
def signup():
signup_data = dict()
for item in request.form:
signup_data[item] = request.form[item]
exists = User.query.filter_by(email=signup_data["email"]).first()
if exists is None:
store_in_rdbms(signup_data)
# store_in_dynamo(signup_data)
publish_to_sns(signup_data)
else:
return Response("", status=409, mimetype='application/json')
return Response(json.dumps(signup_data), status=201, mimetype='application/json')
def store_in_rdbms(signup_data):
db.session.add(User(**signup_data))
db.session.commit()
#def store_in_dynamo(signup_data):
# signup_item = Item(ddb_table, data=signup_data)
# signup_item.save()
def publish_to_sns(signup_data):
try:
sns_conn.publish(application.config['NEW_SIGNUP_TOPIC'], json.dumps(signup_data), "New signup: %s" % signup_data['email'])
except Exception as ex:
sys.stderr.write("Error publishing subscription message to SNS: %
|
s" % ex.message)
@application.errorhandler(404)
def not_found_error(error):
print u'{ "Page Not Found": "%s" }' % error
theme = application.config['THEME']
return flask.render_template('404.html', theme=theme, title='404 File Not Found'), 404
@application.errorhandler(500)
def internal_error(error):
db.session.rollback()
print u'{ "Reason": "%s" }' % error
theme = application.config['THEME']
return flask.render_template(
|
'500.html', theme=theme, title='Unexpected Error Occured'), 500
if __name__ == '__main__':
application.run(host='0.0.0.0')
|
aidanheerdegen/payu
|
payu/fsops.py
|
Python
|
apache-2.0
| 4,890
| 0.003067
|
# coding: utf-8
"""payu.experiment
===============
Basic file system operations for Payu
:copyright: Copyright 2011 Marshall Ward, see AUTHORS for details.
:license: Apache License, Version 2.0, see LICENSE for details.
"""
# Standard library
import errno
import sys, os
import subprocess
import shlex
# Extensions
import yaml
DEFAULT_CONFIG_FNAME = 'config.yaml'
# Lustre target paths for symbolic paths cannot be 60 characters (yes, really)
# Delete this once this bug in Lustre is fixed
CHECK_LUSTRE_PATH_LEN = True
def mkdir_p(path):
"""Create a new directory; ignore if it already exists."""
try:
os.makedirs(path)
except EnvironmentError as exc:
if exc.errno != errno.EEXIST:
raise
def read_config(config_fname=None):
"""Parse input configuration file and return a config dict."""
if not config_fname:
config_fname = DEFAULT_CONFIG_FNAME
try:
with open(config_fname, 'r') as config_file:
config = yaml.load(config_file)
except IOError as exc:
if exc.errno == errno.ENOENT:
print('payu: warning: Configuration file {0} not found!'
.format(config_fname))
config = {}
else:
raise
collate_config = config.pop('collate', {})
# Transform legacy collate config options
if type(collate_config) is bool:
collate_config = {'enable': collate_config}
collatestr = 'collate_'
foundkeys = []
# Cycle through old collate config and convert to newer dict format
for key in list(config.keys()):
if key.startswith(collatestr):
foundkeys.append(key)
collate_config[key[len(collatestr):]] = config.pop(key)
if foundkeys:
print("Use of these keys is deprecated: {}.".format(
", ".join(foundkeys)))
print("Instead use collate dictionary and subkey "
"without 'collate_' prefix")
config['collate'] = collate_config
return config
def make_symlink(src_path, lnk_path):
"""Safely create a symbolic link to an input field."""
# Check for Lustre 60-character symbolic link path bug
if CHECK_LUSTRE_PATH_LEN:
src_path = patch_lustre_path(src_path)
lnk_path = patch_lustre_path(lnk_path)
# os.symlink will happily make a symlink to a non-existent
# file, but we don't want that behaviour
if not os.path.exists(src_path):
return
try:
os.symlink(src_path, lnk_path)
except EnvironmentError as exc:
if exc.errno != errno.EEXIST:
raise
elif not os.path.islink(lnk_path):
# Warn the user, but do not interrupt the job
print("Warning: Cannot create symbolic link to {p}; a file named "
"{f} already exists.".format(p=src_path, f=lnk_path))
else:
# Overwrite any existing symbolic link
if os.path.realpath(lnk_path) != src_path:
os.remove(lnk_path)
os.symlink(src_path, lnk_path)
def splitpath(path):
"""Recursively split a file
|
path into all directories and files."""
head, tail = os.path.split(path)
if tail == '':
return head,
elif head == '':
return tail,
else:
return splitpath(head) + (tail,)
def patch_lustre_path(f_path):
"""Patch any 60-character pathnames, to avoid a current Lustre bug."""
if CHECK_LUSTRE_PATH_LEN and len(f_path) == 60:
if os.path.isabs(f_path):
f_path = '/.' + f_path
else:
f_path
|
= './' + f_path
return f_path
def get_commit_id(filepath):
"""
Return git commit hash for filepath
"""
cmd = shlex.split("git log -n 1 --pretty=format:%H -- ")
cmd.append(filepath)
try:
with open(os.devnull, 'w') as devnull:
hash = subprocess.check_output(cmd, stderr=devnull)
if sys.version_info.major==3:
hash.decode('ascii')
return hash.strip()
except subprocess.CalledProcessError:
return None
def get_git_revision_hash(short=False):
"""
Return git commit hash for repository
"""
cmd = ['git', 'rev-parse', 'HEAD']
if short:
cmd.insert(-1,'--short')
try:
with open(os.devnull, 'w') as devnull:
hash = subprocess.check_output(['git', 'rev-parse', 'HEAD'], stderr=devnull)
if sys.version_info.major==3:
hash.decode('ascii')
return hash.strip()
except subprocess.CalledProcessError:
return None
def is_ancestor(id1, id2):
"""
Return True if git commit id1 is a ancestor of git commit id2
"""
try:
with open(os.devnull, 'w') as devnull:
revs = subprocess.check_output(['git', 'rev-list', id2], stderr=devnull)
except:
return None
else:
return id1 in revs
|
adelina-t/compute-hyperv
|
hyperv/nova/__init__.py
|
Python
|
apache-2.0
| 687
| 0
|
# Copyright (c) 2014 C
|
loudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
|
You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from hyperv.nova import driver
HyperVDriver = driver.HyperVDriver
|
imoverclocked/ServoBot
|
apwm_home/controller/controller/settings.py
|
Python
|
mit
| 5,602
| 0.001428
|
# Django settings for controller project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Tim Spriggs', 'tims@arizona.edu'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '/home/apwm/controller/db.sqlite3', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.4/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Phoenix'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = '/home/apwm/controller/controller/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/apwm/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finder
|
s.DefaultSt
|
orageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^0@$9mm^v@+f#^su8&ee+=1y8q44#t2+$aiy%@)c6e1%_o27o$'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'controller.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'controller.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
'/home/apwm/controller/controller/basic/templates',
'/home/apwm/controller/controller/templates',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'controller.basic',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
sunfounder/SunFounder_SensorKit_for_RPi2
|
Python/15_joystick_PS2.py
|
Python
|
gpl-2.0
| 1,235
| 0.045344
|
#!/usr/bin/env pytho
|
n3
#------------------------------------------------------
#
# This is a program for JoystickPS2 Module.
#
# This program depend on PCF8591 ADC chip. Follow
# the ins
|
truction book to connect the module and
# ADC0832 to your Raspberry Pi.
#
#------------------------------------------------------
import PCF8591 as ADC
import time
def setup():
ADC.setup(0x48) # Setup PCF8591
global state
def direction(): #get joystick result
state = ['home', 'up', 'down', 'left', 'right', 'pressed']
i = 0
if ADC.read(0) <= 30:
i = 1 #up
if ADC.read(0) >= 225:
i = 2 #down
if ADC.read(1) >= 225:
i = 3 #left
if ADC.read(1) <= 30:
i = 4 #right
if ADC.read(2) <= 30:
i = 5 # Button pressed
if ADC.read(0) - 125 < 15 and ADC.read(0) - 125 > -15 and ADC.read(1) - 125 < 15 and ADC.read(1) - 125 > -15 and ADC.read(2) == 255:
i = 0
return state[i]
def loop():
status = ''
while True:
tmp = direction()
if tmp != None and tmp != status:
print (tmp)
status = tmp
def destroy():
pass
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
|
krathjen/studiolibrary
|
src/mutils/tests/test_attribute.py
|
Python
|
lgpl-3.0
| 5,307
| 0.002638
|
# Copyright 2020 by Kurt Rathjen. All Rights Reserved.
#
# This library is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. This library is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
"""
# Example:
import mutils.tests.test_attribute
reload(mutils.tests.test_attribute)
mutils.tests.test_attribute.run()
"""
import os
import unittest
import maya.cmds
import mutils
class TestAttribute(unittest.TestCase):
def setUp(self):
"""
Open an existing maya test scene for testing.
"""
dirname = os.path.dirname(mutils.__file__)
dirname = os.path.join(dirname, "tests", "data")
path = os.path.join(dirname, "sphere.ma")
maya.cmds.file(
path,
open=True,
force=True,
ignoreVersion=True,
executeScriptNodes=False,
)
def test_attribute_limit(self):
"""
Test the attribute limit when setting the attribute value.
"""
range_ = (-100, 100)
maya.cmds.cutKey("sphere", cl=True, time=range_, f=range_, at="testLimit")
attr = mutils.Attribute("sphere", "testLimit")
attr.set(200)
value = maya.cmds.getAttr("sphere.testLimit")
assert value == 10, "Maximum attibute limit was ignored when setting the attribute value"
def test_attribute_limit2(self):
"""
Test the maximum attribute limit when setting a keyframe.
"""
attr = mutils.Attribute("sphere", "testLimit")
attr.setKeyframe(200)
value = maya.cmds.keyframe("sphere.testLimit", query=True, eval=True)[0]
assert value == 10, "Maximum attibute limit was ignored
|
when setting animation keyframe"
def test_attribute_limit3(self):
"""
Test the minimum attribute limit when setting a keyframe.
"""
attr = mutils.Attribute("sphere", "testLimit")
attr.setKeyframe(-200)
value = maya.cmds.keyframe("sphere.testLimit", query=True, eval=True)[0]
assert value == -10, "Minimum attibute limit was ignored when setting
|
animation keyframe"
def test_non_keyable(self):
"""
Test if non-keyable attributes can be keyed.
"""
range_ = (-100, 100)
maya.cmds.cutKey("sphere", cl=True, time=range_, f=range_, at="testNonKeyable")
attr = mutils.Attribute("sphere", "testNonKeyable")
attr.setKeyframe(200)
value = maya.cmds.keyframe("sphere.testNonKeyable", query=True, eval=True)
assert value is None, "Non keyable attribute was keyed"
def test_anim_curve(self):
"""
Test if get anim curve returns the right value.
"""
msg = "Incorrect anim curve was returned when using attr.animCurve "
attr = mutils.Attribute("sphere", "testFloat")
curve = attr.animCurve()
assert curve is None, msg + "1"
attr = mutils.Attribute("sphere", "testConnected")
curve = attr.animCurve()
assert curve is None, msg + "2"
attr = mutils.Attribute("sphere", "testAnimated")
curve = attr.animCurve()
assert curve == "sphere_testAnimated", msg + "3"
def test_set_anim_curve(self):
"""
Test if set anim curve
"""
msg = "No anim curve was set"
attr = mutils.Attribute("sphere", "testAnimated")
srcCurve = attr.animCurve()
attr = mutils.Attribute("sphere", "testFloat")
attr.setAnimCurve(srcCurve, time=(1, 15), option="replace")
curve = attr.animCurve()
assert curve is not None, msg
attr = mutils.Attribute("sphere", "testFloat")
attr.setAnimCurve(srcCurve, time=(15, 15), option="replaceCompletely")
curve = attr.animCurve()
assert curve is not None, msg
def test_set_static_keyframe(self):
"""
Test set static keyframes
"""
msg = "The inserted static keys have different values"
attr = mutils.Attribute("sphere", "testAnimated", cache=False)
attr.setStaticKeyframe(value=2, time=(4, 6), option="replace")
maya.cmds.currentTime(4)
value1 = attr.value()
maya.cmds.currentTime(6)
value2 = attr.value()
assert value1 == value2, msg
def testSuite():
"""
Return the test suite for the TestAttribute.
:rtype: unittest.TestSuite
"""
suite = unittest.TestSuite()
s = unittest.makeSuite(TestAttribute, 'test')
suite.addTest(s)
return suite
def run():
"""
Call from within Maya to run all valid tests.
Example:
import mutils.tests.test_attribute
reload(mutils.tests.test_attribute)
mutils.tests.test_attribute.run()
"""
tests = unittest.TextTestRunner()
tests.run(testSuite())
|
dwatkinsweb/django-skin
|
skin/views/views.py
|
Python
|
mit
| 1,288
| 0.001553
|
from django.contrib.sites.models import Site
from django.utils._os import safe_join
from django.views.generic import TemplateView
from skin.conf import settings
from skin.template.loaders.util import get_site_skin
class TemplateSkinView(TemplateView):
"""
A view that extends Djangos base TemplateView to allow you to set up skins.
"""
skin_name = None
skin_path = None
def get_skin_name(self):
if self.skin_name is None:
return settings.SKIN_NAME
else:
return self.skin_name
def get_skin(self):
return get_site_skin(site=Site.objects.get_current(), name=self.get_skin_name())
def get_skin_path(self):
if self.skin_path is not None:
return self.skin_path
skin = self.get_skin()
if skin is not None:
return skin.path
else:
return None
def get_template_names(self):
template_names = super(TemplateSkinView, self).get_template_names()
skin_path = self.get_skin_path()
skin_template_names = []
if skin_path is n
|
ot None:
for template_name in template_names:
skin_template_
|
names.append(safe_join(skin_path, template_name))
return skin_template_names + template_names
|
PaddlePaddle/Paddle
|
python/paddle/fluid/nets.py
|
Python
|
apache-2.0
| 27,095
| 0.004687
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import six
from . import layers
from .data_feeder import check_variable_and_dtype, convert_dtype
from ..utils import deprecated
__all__ = [
"simple_img_conv_pool",
"sequence_conv_pool",
"glu",
"scaled_dot_product_attention",
"img_conv_group",
]
def simple_img_conv_pool(input,
num_filters,
filter_size,
pool_size,
pool_stride,
pool_padding=0,
pool_type='max',
global_pooling=False,
conv_stride=1,
conv_padding=0,
conv_dilation=1,
conv_groups=1,
param_attr=None,
bias_attr=None,
act=None,
use_cudnn=True):
r"""
:api_attr: Static Graph
The simple_img_conv_pool api is composed of :ref:`api_fluid_layers_conv2d` and :ref:`api_fluid_layers_pool2d` .
Args:
input (Variable): 4-D Tensor, shape is [N, C, H, W], data type can be float32 or float64.
num_filters(int): The number of filters. It is the same as the output channels.
filter_size (int|list|tuple): The filter size. If filter_size is a list or
tuple, it must contain two integers, (filter_size_H, filter_size_W). Otherwise,
the filter_size_H = filter_size_W = filter_size.
pool_size (int|list|tuple): The pooling size of pool2d layer. If pool_size
is a list or tuple, it must contain two integers, (pool_size_H, pool_size_W).
Otherwise, the pool_size_H = pool_size_W = pool_size.
pool_stride (int|list|tuple): The pooling stride of pool2d layer. If pool_stride
is a list or tuple, it must contain two integers, (pooling_stride_H, pooling_stride_W).
Otherwise, the pooling_stride_H = pooling_stride_W = pool_stride.
pool_padding (int|list|tuple): The padding of pool2d layer. If pool_padding is a list or
tuple, it must contain two integers, (pool_padding_H, pool_padding_W).
Otherwise, the pool_padding_H = pool_padding_W = pool_padding. Default 0.
pool_type (str): Pooling type can be :math:`max` for max-pooling or :math:`avg` for
average-pooling. Default :math:`max`.
global_pooling (bool): Whether to use the global pooling. If global_pooling = true,
pool_size and pool_padding while be ignored. Default False
conv_stride (int|list|tuple): The stride size of the conv2d Layer. If stride is a
li
|
st or tuple, it must contain two integers, (conv_stride_H, conv_stride_W). Otherwise,
the conv_stride_H = conv_stride_W = conv_stride. Default: conv_stride = 1.
conv_padding (int|list|tuple): The padding size of the conv2d Layer. If padding is
a list or tuple, it must contain two
|
integers, (conv_padding_H, conv_padding_W).
Otherwise, the conv_padding_H = conv_padding_W = conv_padding. Default: conv_padding = 0.
conv_dilation (int|list|tuple): The dilation size of the conv2d Layer. If dilation is
a list or tuple, it must contain two integers, (conv_dilation_H, conv_dilation_W).
Otherwise, the conv_dilation_H = conv_dilation_W = conv_dilation. Default: conv_dilation = 1.
conv_groups (int): The groups number of the conv2d Layer. According to grouped
convolution in Alex Krizhevsky's Deep CNN paper: when group=2,
the first half of the filters is only connected to the first half
of the input channels, while the second half of the filters is only
connected to the second half of the input channels. Default: groups=1.
param_attr (ParamAttr|None): The parameter attribute for learnable parameters/weights
of conv2d. If it is set to None or one attribute of ParamAttr, conv2d
will create ParamAttr as param_attr. If the Initializer of the param_attr
is not set, the parameter is initialized with :math:`Normal(0.0, std)`,
and the :math:`std` is :math:`(\\frac{2.0 }{filter\_elem\_num})^{0.5}`.
Default: None.
bias_attr (ParamAttr|bool|None): The parameter attribute for the bias of conv2d.
If it is set to False, no bias will be added to the output units.
If it is set to None or one attribute of ParamAttr, conv2d
will create ParamAttr as bias_attr. If the Initializer of the bias_attr
is not set, the bias is initialized zero. Default: None.
act (str): Activation type for conv2d, if it is set to None, activation is not
appended. Default: None.
use_cudnn (bool): Use cudnn kernel or not, it is valid only when the cudnn
library is installed. Default: True
Return:
4-D Tensor, the result of input after conv2d and pool2d, with the same data type as :attr:`input`
Return Type:
Variable
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
img = fluid.data(name='img', shape=[100, 1, 28, 28], dtype='float32')
conv_pool = fluid.nets.simple_img_conv_pool(input=img,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
act="relu")
"""
conv_out = layers.conv2d(
input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=conv_stride,
padding=conv_padding,
dilation=conv_dilation,
groups=conv_groups,
param_attr=param_attr,
bias_attr=bias_attr,
act=act,
use_cudnn=use_cudnn)
pool_out = layers.pool2d(
input=conv_out,
pool_size=pool_size,
pool_type=pool_type,
pool_stride=pool_stride,
pool_padding=pool_padding,
global_pooling=global_pooling,
use_cudnn=use_cudnn)
return pool_out
def img_conv_group(input,
conv_num_filter,
pool_size,
conv_padding=1,
conv_filter_size=3,
conv_act=None,
param_attr=None,
conv_with_batchnorm=False,
conv_batchnorm_drop_rate=0.0,
pool_stride=1,
pool_type="max",
use_cudnn=True):
"""
:api_attr: Static Graph
The Image Convolution Group is composed of Convolution2d, BatchNorm, DropOut,
and Pool2D. According to the input arguments, img_conv_group will do serials of
computation for Input using Convolution2d, BatchNorm, DropOut, and pass the last
result to Pool2D.
Args:
input (Variable): The input is 4-D Tensor with shape [N, C, H, W], the data type of input is float32 or float64.
conv_num_filter(list|tuple): Indicates the numbers of filter of this group.
pool_size (int|list|tuple): The pooling size of Pool2D Layer. If pool_size
is a list or tuple, it must contain two integers, (pool_size_height, pool_size_width).
Otherwise, the pool_siz
|
Thetoxicarcade/ac
|
congredi/auth/test/test_token.py
|
Python
|
gpl-3.0
| 1,288
| 0.000776
|
#!/usr/bin/env python
# -*-
|
coding: utf-8 -*-
"""
JWT tokens (for web interface, mostly, as all peer operations function on
public key cryptography)
JWT tokens can be one of:
* Good
* Expired
* Invalid
And granting them should not take database access. They are mea
|
nt to
figure out if a user is auth'd without using the database to do so.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
from ...utils.timing import TimedTestCase
from ..token import token, jwt_get, jwt_use
class test_token(TimedTestCase):
def test_good_token(self):
"""Valid JWT Token"""
self.threshold = .32
bob = token(u'bob')
example = bob.make(u'print')
bob.check(example)
def test_expired_token(self):
"""Expire a token..."""
self.threshold = .1
a = datetime.datetime.now()
assert a != None
def test_invalid_token(self):
"""Invalid Tokens"""
self.threshold = .1
fred = token(u'fred')
alice = token(u'alice')
wrong = fred.make(u'well then')
alice.check(wrong)
class test_jwt(TimedTestCase):
def test_routes(self):
self.threshold = .1
tok = jwt_get(u'ten')
res = jwt_use(tok)
print(res)
|
rlindner81/pyload
|
module/plugins/hoster/OronCom.py
|
Python
|
gpl-3.0
| 493
| 0
|
# -*- coding: utf-8 -*-
from module.p
|
lugins.internal.DeadHoster import DeadHoster
class OronCom(DeadHoster):
__name__ = "OronCom"
__type__ = "hoster"
__version__ = "0.18"
__status__ = "stable"
__pattern__ = r'https?://(?:www\.)?o
|
ron\.com/\w{12}'
__config__ = [] # @TODO: Remove in 0.4.10
__description__ = """Oron.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("chrox", "chrox@pyload.org"),
("DHMH", "DHMH@pyload.org")]
|
paour/weblate
|
weblate/trans/migrations/0027_auto__chg_field_subproject_template.py
|
Python
|
gpl-3.0
| 15,556
| 0.007716
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2014 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
depends_on = (
('lang', '0003_auto__add_field_language_plural_type'),
)
def forwards(self, orm):
# Changing field 'SubProject.template'
db.alter_column('trans_subproject', 'template', self.gf('django.db.models.fields.CharField')(max_length=200, null=False))
def backwards(self, orm):
# Changing field 'SubProject.template'
db.alter_column('trans_subproject', 'template', self.gf('django.db.models.fields.CharField')(default='', max_length=200))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lang.language': {
'Meta':
|
{'ordering': "['name']", 'object_name': 'Language'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'direction': ('django.db.models.fields.CharField', [], {'default': "'ltr'", 'm
|
ax_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'nplurals': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'plural_type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'pluralequation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'trans.change': {
'Meta': {'ordering': "['-timestamp']", 'object_name': 'Change'},
'action': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Translation']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Unit']", 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'trans.check': {
'Meta': {'object_name': 'Check'},
'check': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignore': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']", 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"})
},
'trans.comment': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Comment'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']", 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'trans.dictionary': {
'Meta': {'ordering': "['source']", 'object_name': 'Dictionary'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
|
ATNF/askapsdp
|
Tools/Dev/rbuild/askapdev/rbuild/utils/pkginfo.py
|
Python
|
gpl-2.0
| 3,666
| 0.004364
|
# @brief helper function to turn pkgconfig files into ASKAP package.info
#
# @copyright (c) 2006 CSIRO
# Australia Telescope National Facility (ATNF)
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# PO Box 76, Ep
|
ping NSW 1710, Australia
# atnf-enquiries@csiro.au
#
# This file is part of the ASKAP software distribution.
#
# The ASKAP software distribution is free software: you can redistribute it
# and/or modify
|
it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the License
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
#
import os
import re
import string
def _replace_vars(lst, vardict):
'''a generator to replace allcaps variables found in .pc files
:param lst: a list of strings
:param vardict: the dictionary of variable definitions
'''
varrx = re.compile("\$\{([A-Z_]+)\}")
for item in lst:
vnames = varrx.search(item)
if vnames:
for v in vnames.groups():
dv = vardict.get(v, None)
if dv is not None:
replaced = varrx.sub(dv, item)
yield replaced
else:
yield item
def to_info(pkgfile=None):
'''To be executed from the build.py directory.
This will extract the information from a pkgconfig file and
writes it to a ASKAPsoft 'package.info' file.
This will only work if there is not already a 'package.info'.
@param pkgfile The path to the .pc file. Default None, means
look for a '.pc' file in 'install/lib/pkgconfig'
'''
if os.path.exists("package.info"):
# nothing to do
return
if not pkgfile:
pcdir = "install/lib/pkgconfig"
if not os.path.exists(pcdir):
return
files = os.listdir(pcdir)
if not files:
# assume no dependencies
return
# there should only be one pc file
pkgfile = os.path.join(pcdir, files[0])
incdir = None
libdir = None
libs = []
outlibs=[]
varnames = {}
varrx = re.compile("\$\{\w*prefix\}/")
f = file(pkgfile)
for line in f.readlines():
line = line.strip()
if line.count(":"):
k,v = line.split(":")
if k.startswith("Libs"):
ls = v.split()
for l in ls:
if l.startswith("-l"):
libs.append(l[2:])
if line.count("="):
k,v = line.split("=")
if varrx.search(v):
v = varrx.sub("", v)
varnames[k] = v
f.close()
outlibs = [i for i in _replace_vars(libs, varnames)]
incdir = [i for i in _replace_vars([varnames["includedir"]], varnames)][0]
if incdir == "include":
incdir = None
libdir = [i for i in _replace_vars([varnames["libdir"]], varnames)][0]
if libdir == "lib":
libdir = None
outtxt = "# Auto-generated by build.py - DO NOT MODIFY\n"
outtxt += "libs=%s\n" % string.join(outlibs)
if libdir: outtxt += "libdir=%s\n" % libdir
if incdir: outtxt += "incdir=%s\n" % incdir
f = file("package.info", "w+")
f.write(outtxt)
f.close()
|
heiths/allura
|
Allura/allura/lib/helpers.py
|
Python
|
apache-2.0
| 42,752
| 0.000678
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import base64
import sys
import os
import os.path
import difflib
import urllib
import urllib2
import re
import unicodedata
import json
import logging
import string
import random
import cPickle as pickle
from hashlib import sha1
from datetime import datetime, timedelta
from collections import defaultdict
import shlex
import socket
from functools import partial
from cStringIO import StringIO
import tg
import genshi.template
try:
import cchardet as chardet
except ImportError:
import chardet
import pkg_resources
from formencode.validators import FancyValidator
from dateutil.parser import parse
from bson import ObjectId
from paste.deploy import appconfig
from pymongo.errors import InvalidId
from contextlib import contextmanager
from pylons import tmpl_context as c, app_globals as g
from pylons import response, request
from tg.decorators import before_validate
from formencode.variabledecode import variable_decode
import formencode
from jinja2 import Markup
from jinja2.filters import contextfilter, escape
from paste.deploy.converters import asbool, aslist, asint
from webhelpers import date, feedgenerator, html, number, misc, text
from webob.exc import HTTPUnauthorized
from allura.lib import exceptions as exc
from allura.lib import AsciiDammit
from allura.lib import utils
# import to make available to templates, don't delete:
from .security import has_access, is_allowed_by_role
log = logging.getLogger(__name__)
# http://stackoverflow.com/questions/2063213/regular-expression-for-validating-dns-label-host-name
# modified to remove capital A-Z and make length parameterized
# and not use lookbehind assertion since JS doesn't support that
dns_var_length = r'^(?![0-9]+$)(?!-)[a-z0-9-]{%s}[a-z0-9]$'
# project & tool names must comply to DNS since used in subdomains for emailing
re_mount_points = {
're_project_name': dns_var_length % '2,14', # validates project, subproject, and user names
're_tool_mount_point': dns_var_length % '0,62', # validates tool mount point names
're_tool_mount_point_fragment': r'[a-z][-a-z0-9]*',
're_relaxed_tool_mount_point': r'^[a-zA-Z0-9][-a-zA-Z0-9_\.\+]{0,62}$',
're_relaxed_tool_mount_point_fragment': r'[a-zA-Z0-9][-a-zA-Z0-9_\.\+]*'
}
# validates project, subproject
|
, and user names
re_project_name = re.compile(re_mount_points['re_project_name'])
# validat
|
es tool mount point names
re_tool_mount_point = re.compile(re_mount_points['re_tool_mount_point'])
re_tool_mount_point_fragment = re.compile(re_mount_points['re_tool_mount_point_fragment'])
re_relaxed_tool_mount_point = re.compile(re_mount_points['re_relaxed_tool_mount_point'])
re_relaxed_tool_mount_point_fragment = re.compile(re_mount_points['re_relaxed_tool_mount_point_fragment'])
re_clean_vardec_key = re.compile(r'''\A
( # first part
\w+# name...
(-\d+)?# with optional -digits suffix
)
(\. # next part(s)
\w+# name...
(-\d+)?# with optional -digits suffix
)+
\Z''', re.VERBOSE)
# markdown escaping regexps
re_amp = re.compile(r'''
[&] # amp
(?= # look ahead for:
([a-zA-Z0-9]+;) # named HTML entity
|
(\#[0-9]+;) # decimal entity
|
(\#x[0-9A-F]+;) # hex entity
)
''', re.VERBOSE)
re_leading_spaces = re.compile(r'^[\t ]+', re.MULTILINE)
re_preserve_spaces = re.compile(r'''
[ ] # space
(?=[ ]) # lookahead for a space
''', re.VERBOSE)
re_angle_bracket_open = re.compile('<')
re_angle_bracket_close = re.compile('>')
md_chars_matcher_all = re.compile(r"([`\*_{}\[\]\(\)#!\\\.+-])")
def make_safe_path_portion(ustr, relaxed=True):
"""Return an ascii representation of ``ustr`` that conforms to mount point
naming :attr:`rules <re_tool_mount_point_fragment>`.
Will return an empty string if no char in ``ustr`` is latin1-encodable.
:param relaxed: Use relaxed mount point naming rules (allows more
characters. See :attr:`re_relaxed_tool_mount_point_fragment`.
:returns: The converted string.
"""
regex = (re_relaxed_tool_mount_point_fragment if relaxed else
re_tool_mount_point_fragment)
ustr = really_unicode(ustr)
s = ustr.encode('latin1', 'ignore')
s = AsciiDammit.asciiDammit(s)
if not relaxed:
s = s.lower()
s = '-'.join(regex.findall(s))
s = s.replace('--', '-')
return s
def escape_json(data):
return json.dumps(data).replace('<', '\u003C')
def monkeypatch(*objs):
def patchem(func):
for obj in objs:
setattr(obj, func.__name__, func)
return patchem
def urlquote(url, safe="/"):
try:
return urllib.quote(str(url), safe=safe)
except UnicodeEncodeError:
return urllib.quote(url.encode('utf-8'), safe=safe)
def urlquoteplus(url, safe=""):
try:
return urllib.quote_plus(str(url), safe=safe)
except UnicodeEncodeError:
return urllib.quote_plus(url.encode('utf-8'), safe=safe)
def _attempt_encodings(s, encodings):
if s is None:
return u''
for enc in encodings:
try:
if enc is None:
return unicode(s) # try default encoding
else:
return unicode(s, enc)
except (UnicodeDecodeError, LookupError):
pass
# Return the repr of the str -- should always be safe
return unicode(repr(str(s)))[1:-1]
def really_unicode(s):
# Try to guess the encoding
def encodings():
yield None
yield 'utf-8'
yield chardet.detect(s[:1024])['encoding']
yield chardet.detect(s)['encoding']
yield 'latin-1'
return _attempt_encodings(s, encodings())
def find_user(email):
from allura import model as M
return M.User.by_email_address(email)
def find_project(url_path):
from allura import model as M
for n in M.Neighborhood.query.find():
if url_path.strip("/").startswith(n.url_prefix.strip("/")):
break
else:
return None, url_path
# easily off-by-one, might be better to join together everything but
# url_prefix
project_part = n.shortname_prefix + url_path[len(n.url_prefix):]
parts = project_part.split('/')
length = len(parts)
while length:
shortname = '/'.join(parts[:length])
p = M.Project.query.get(shortname=shortname, deleted=False,
neighborhood_id=n._id)
if p:
return p, parts[length:]
length -= 1
return None, url_path.split('/')
def make_neighborhoods(ids):
return _make_xs('Neighborhood', ids)
def make_projects(ids):
return _make_xs('Project', ids)
def make_users(ids):
return _make_xs('User', ids)
def make_roles(ids):
return _make_xs('ProjectRole', ids)
def _make_xs(X, ids):
from allura import model as M
X = getattr(M, X)
ids = list(ids)
results = dict(
(r._id, r)
for r in X.query.find(dict(_id={'$in': ids})))
result = (results.get(i) for i in ids)
return (r for r in result if r is not None)
def make_app_admin_only(app):
from allura.model.auth import ProjectRole
admin_role = ProjectRole.by_name('Admin', app.project)
for ace in [ace for ace in app.acl if ace.role_id != admin_role._id]:
app.acl.remove(ace)
@contextmanager
def push_config(obj, **kw):
|
RevansChen/online-judge
|
Codefights/arcade/python-arcade/level-5/34.Multiplication-Table/Python/test.py
|
Python
|
mit
| 2,320
| 0.007759
|
# Python3
from solution1 import multiplicationTable as f
qa = [
(5,
[[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
[4, 8, 12, 16, 20],
[5, 10, 15, 20, 25]]),
(2,
[[1, 2],
[2, 4]]),
(4,
[[1, 2, 3, 4],
[2, 4, 6, 8],
[3, 6, 9, 12],
[4, 8, 12, 16]]),
(10,
[[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
[ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20],
[ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30],
[ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40],
[ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50],
[ 6, 12, 18, 24, 30, 36, 42, 48, 54, 60],
[ 7, 14, 21, 28, 35, 42, 49, 56, 63, 70],
[ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80],
[ 9, 18, 27, 36, 45, 54, 63, 72, 81, 90],
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]]),
(15,
[[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
[ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30],
[ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45],
[ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60],
[ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75],
[ 6, 12, 18, 24, 30, 36, 42, 48, 54, 60, 66, 72, 78, 84, 90],
[ 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98, 105],
[ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104, 112, 120],
[ 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 99, 108, 117, 126, 135],
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150],
[11, 22, 33, 44, 55, 66, 77, 88, 99, 110, 121, 132, 143, 154, 165],
[12, 24, 36, 48, 60, 72, 84, 96, 108, 120, 132, 144, 156, 168, 180],
[13, 26, 39, 52, 65, 78, 91, 104, 117, 130, 143, 156, 169, 182, 195],
[14, 28, 42, 56, 70, 84, 98, 112, 126, 140, 154, 168, 182, 196, 210],
[15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225]])
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
|
print('
|
[ok]')
print(' output:', ans)
print()
|
01org/cloudeebus
|
setup.py
|
Python
|
apache-2.0
| 1,949
| 0.040021
|
#!/usr/bin/env python
# Cloudeebus
#
# Copyright (C) 2012 Intel Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Luc Yriarte <luc.yriarte@intel.com>
# Christophe Guiraud <christophe.g
|
uiraud@intel.com>
# Frederic Paut <frederic.paut@intel.com>
# Patrick Ohly <patrick.ohly@intel.com>
#
from setuptools import setup
setup(name = "cloudeebus",
version = "0.6.1",
description = "J
|
avascript-DBus bridge",
author = "Luc Yriarte, Christophe Guiraud, Frederic Paut, Patrick Ohly",
author_email = "luc.yriarte@intel.com, christophe.guiraud@intel.com, frederic.paut@intel.com, patrick.ohly@intel.com",
url = "https://github.com/01org/cloudeebus/wiki",
license = "http://www.apache.org/licenses/LICENSE-2.0",
scripts = ["cloudeebus/cloudeebus.py","cloudeebus/cloudeebusengine.py"],
packages = ["cloudeebus"],
data_files = [("cloudeebus" ,["AUTHORS", "README.md", "LICENSE"]),
('/etc/dbus-1/system.d/', ['org.cloudeebus.conf'])],
platforms = ("Any"),
install_requires = ["setuptools", "autobahn==0.5.8"],
classifiers = ["License :: OSI Approved :: Apache Software License",
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Internet",
"Topic :: Software Development :: Libraries"],
keywords = "cloudeebus autobahn websocket dbus javascript bridge")
|
Kryz/sentry
|
src/sentry/web/frontend/project_notifications.py
|
Python
|
bsd-3-clause
| 3,802
| 0.000263
|
from __future__ import absolute_import
from django.conf import settings
from django.contrib import messages
from django.http import HttpResponseRedirect, HttpResponse
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from sentry import constants
from sentry.models import OrganizationMemberType
from sentry.plugins import plugins, NotificationPlugin
from sentry.web.forms.projects import NotificationSettingsForm
from sentry.web.frontend.base import ProjectView
OK_SETTINGS_SAVED = _('Your settings were saved successfully.')
class ProjectNotificationsView(ProjectView):
required_access = OrganizationMemberType.ADMIN
def _iter_plugins(self):
for plugin in plugins.all(version=1):
if not isinstance(plugin, NotificationPlugin):
continue
yield plugin
def _handle_enable_plugin(self, request, project):
plugin = plugins.get(request.POST['plugin'])
plugin.enable(project)
messages.add_message(
request, messages.SUCCESS,
constants.OK_PLUGIN_ENABLED.format(name=plugin.get_title()),
)
def _handle_disable_plugin(self, request, project):
plugin = plugins.get(request.POST['plugin'])
plugin.disable(project)
messages.add_message(
request, messages.SUCCESS,
constants.OK_PLUGIN_DISABLED.format(name=plugin.get_title()),
)
def handle(self, request, organization, team, project):
op = request.POST.get('op')
if op == 'enable':
self._handle_enable_plugin(request, project)
return HttpResponseRedirect(request.path)
elif op == 'disable':
self._handle_disable_plugin(request, project)
return HttpResponseRedirect(request.path)
if op == 'save-settings':
general_form = NotificationSettingsForm(
data=request.POST,
prefix='general',
initial={
'subject_prefix': proje
|
ct.get_option(
'mail:subject_prefix', settings.EMAIL_SUBJECT_PREFIX),
},
)
if general_form.is_valid():
project.update_opti
|
on(
'mail:subject_prefix', general_form.cleaned_data['subject_prefix'])
messages.add_message(
request, messages.SUCCESS,
OK_SETTINGS_SAVED)
return HttpResponseRedirect(request.path)
else:
general_form = NotificationSettingsForm(
prefix='general',
initial={
'subject_prefix': project.get_option(
'mail:subject_prefix', settings.EMAIL_SUBJECT_PREFIX),
},
)
enabled_plugins = []
other_plugins = []
for plugin in self._iter_plugins():
if plugin.is_enabled(project):
content = plugin.get_notification_doc_html()
form = plugin.project_conf_form
if form is not None:
view = plugin.configure(request, project=project)
if isinstance(view, HttpResponse):
return view
enabled_plugins.append((plugin, mark_safe(content + view)))
elif content:
enabled_plugins.append((plugin, mark_safe(content)))
else:
other_plugins.append(plugin)
context = {
'page': 'notifications',
'enabled_plugins': enabled_plugins,
'other_plugins': other_plugins,
'general_form': general_form,
}
return self.respond('sentry/project-notifications.html', context)
|
chippey/gaffer
|
python/GafferArnoldUITest/ArnoldShaderUITest.py
|
Python
|
bsd-3-clause
| 4,413
| 0.027646
|
##########################################################################
#
# Copyright (c) 2016, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHAL
|
L THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, IND
|
IRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferUITest
import GafferArnold
import GafferArnoldUI
class ArnoldShaderUITest( GafferUITest.TestCase ) :
def testMetadata( self ) :
shader = GafferArnold.ArnoldShader()
shader.loadShader( "noise" )
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["octaves"], "nodule:type" ),
""
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["amplitude"], "nodule:type" ),
"GafferUI::StandardNodule"
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["octaves"], "plugValueWidget:type" ),
None
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "plugValueWidget:type" ),
"GafferUI.PresetsPlugValueWidget"
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "presetNames" ),
IECore.StringVectorData( [ "world", "object", "Pref" ] ),
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "presetValues" ),
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "presetNames" ),
)
def testLightMetadata( self ) :
light = GafferArnold.ArnoldLight()
with IECore.CapturingMessageHandler() as mh :
light.loadShader( "skydome_light" )
## \todo Here we're suppressing warnings about not being
# able to create plugs for some parameters. In many cases
# these are parameters like "matrix" and "time_samples"
# that we don't actually want to represent anyway. We should
# add a mechanism for ignoring irrelevant parameters (perhaps
# using custom gaffer.something metadata in additional Arnold
# .mtd files), and then remove this suppression.
for message in mh.messages :
self.assertEqual( message.level, mh.Level.Warning )
self.assertTrue( "Unsupported parameter" in message.message )
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["cast_shadows"], "nodule:type" ),
""
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["color"], "nodule:type" ),
"GafferUI::StandardNodule"
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["format"], "plugValueWidget:type" ),
"GafferUI.PresetsPlugValueWidget"
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["format"], "presetNames" ),
IECore.StringVectorData( [ "mirrored_ball", "angular", "latlong" ] ),
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["format"], "presetValues" ),
Gaffer.Metadata.value( light["parameters"]["format"], "presetNames" ),
)
if __name__ == "__main__":
unittest.main()
|
tensorflow/tensorflow
|
tensorflow/python/training/tracking/base_delegate.py
|
Python
|
apache-2.0
| 5,796
| 0.008282
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A mixin class that delegates another Trackable to be used when saving.
This is intended to be used with wrapper classes that cannot directly proxy the
wrapped object (e.g. with wrapt.ObjectProxy), because there are inner attributes
that cannot be exposed.
The Wrapper class itself cannot contain any Trackable children, as only the
delegated Trackable will be saved to checkpoint and SavedModel.
This class will "disappear" and be replaced with the wrapped inner Trackable
after a cycle of SavedModel saving and loading, unless the object is registered
and loaded with Keras.
"""
from tensorflow.python.util.tf_export import tf_export
@tf_export("__internal__.tracking.DelegatingTrackableMixin", v1=[])
class DelegatingTrackableMixin(object):
"""A mixin that delegates all Trackable methods to another trackable object.
DO NOT USE THIS UNLESS YOU ARE THE KERAS LOSS SCALE OPTIMIZER.
This class must be used with multiple inheritance. A class that subclasses
Trackable can also subclass this class, which causes all Trackable methods to
be delegated to the trackable object passed in the constructor.
A subclass can use this mixin to app
|
ear as if it were the trackable passed to
the constructor, from a Checkpoint's perspective. LossScaleOptimizer uses this
mixin, so that the checkpoint format for a LossScaleOptimizer is identical to
the checkpoint format for a normal optimizer.
|
This allows a model to be saved
with a normal Optimizer and restored with a LossScaleOptimizer, or vice versa.
The only difference in checkpoint format is that the loss scale is also saved
with a LossScaleOptimizer.
"""
def __init__(self, trackable_obj):
self._trackable = trackable_obj
# pylint: disable=protected-access
@property
def _setattr_tracking(self):
return self._trackable._setattr_tracking
@_setattr_tracking.setter
def _setattr_tracking(self, value):
self._trackable._setattr_tracking = value
@property
def _update_uid(self):
return self._trackable._update_uid
@_update_uid.setter
def _update_uid(self, value):
self._trackable._update_uid = value
@property
def _unconditional_checkpoint_dependencies(self):
return self._trackable._unconditional_checkpoint_dependencies
@property
def _unconditional_dependency_names(self):
return self._trackable._unconditional_dependency_names
@property
def _name_based_restores(self):
return self._trackable._name_based_restores
def _maybe_initialize_trackable(self):
return self._trackable._maybe_initialize_trackable()
@property
def _object_identifier(self):
return self._trackable._object_identifier
@property
def _tracking_metadata(self):
return self._trackable._tracking_metadata
def _no_dependency(self, *args, **kwargs):
return self._trackable._no_dependency(*args, **kwargs)
def _name_based_attribute_restore(self, *args, **kwargs):
return self._trackable._name_based_attribute_restore(*args, **kwargs)
@property
def _checkpoint_dependencies(self):
return self._trackable._checkpoint_dependencies
@property
def _deferred_dependencies(self):
return self._trackable._deferred_dependencies
def _lookup_dependency(self, *args, **kwargs):
return self._trackable._lookup_dependency(*args, **kwargs)
def _add_variable_with_custom_getter(self, *args, **kwargs):
return self._trackable._add_variable_with_custom_getter(*args, **kwargs)
def _preload_simple_restoration(self, *args, **kwargs):
return self._trackable._preload_simple_restoration(*args, **kwargs)
def _track_trackable(self, *args, **kwargs): # pylint: disable=redefined-outer-name
return self._trackable._track_trackable(*args, **kwargs)
def _handle_deferred_dependencies(self, name, trackable): # pylint: disable=redefined-outer-name
return self._trackable._handle_deferred_dependencies(name, trackable)
def _restore_from_checkpoint_position(self, checkpoint_position):
return self._trackable._restore_from_checkpoint_position(
checkpoint_position)
def _single_restoration_from_checkpoint_position(self, *args, **kwargs):
return self._trackable._single_restoration_from_checkpoint_position(
*args, **kwargs)
def _gather_saveables_for_checkpoint(self, *args, **kwargs):
return self._trackable._gather_saveables_for_checkpoint(*args, **kwargs)
def _list_extra_dependencies_for_serialization(self, *args, **kwargs):
return self._trackable._list_extra_dependencies_for_serialization(
*args, **kwargs)
def _list_functions_for_serialization(self, *args, **kwargs):
return self._trackable._list_functions_for_serialization(*args, **kwargs)
def _trackable_children(self, *args, **kwargs):
return self._trackable._trackable_children(*args, **kwargs)
def _deserialization_dependencies(self, *args, **kwargs):
return self._trackable._deserialization_dependencies(*args, **kwargs)
def _export_to_saved_model_graph(self, *args, **kwargs):
return self._trackable._export_to_saved_model_graph(*args, **kwargs)
# pylint: enable=protected-access
|
madhat2r/plaid2text
|
src/python/plaid2text/renderers.py
|
Python
|
gpl-3.0
| 15,244
| 0.00105
|
#! /usr/bin/env python3
from abc import ABCMeta, abstractmethod
import csv
import os
import re
import subprocess
import sys
import plaid2text.config_manager as cm
from plaid2text.interact import separator_completer, prompt
class Entry:
"""
This represents one entry (transaction) from Plaid.
"""
def __init__(self, transaction, options={}):
"""Parameters:
transaction: a plaid transaction
options: from CLI args and config file
"""
self.options = options
self.transaction = transaction
# TODO: document this
if 'addons' in options:
self.transaction['addons'] = dict(
(k, fields[v - 1]) for k, v in options.addons.items() # NOQA
)
else:
self.transaction['addons'] = {}
# The id for the transaction
self.transaction['transaction_id'] = self.transaction['transaction_id']
# Get the date and convert it into a ledger/beancount formatted date.
d8 = self.transaction['date']
d8_format = options.output_date_format if options and 'output_date_format' in options else '%Y-%m-%d'
self.transaction['transaction_date'] = d8.date().strftime(d8_format)
self.desc = self.transaction['name']
# amnt = self.transaction['amount']
self.transaction['currency'] = options.currency
# self.transaction['debit_amount'] = amnt
# self.transaction['debit_currency'] = currency
# self.transaction['credit_amount'] = ''
# self.transaction['credit_currency'] = ''
self.transaction['posting_account'] = options.posting_account
self.transaction['cleared_character'] = options.cleared_character
if options.template_file:
with open(options.template_file, 'r', encoding='utf-8') as f:
self.transaction['transaction_template'] = f.read()
else:
self.transaction['transaction_template'] = ''
def query(self):
"""
We print a summary of the record on the screen, and allow you to
choose the destination account.
"""
return '{0} {1:<40} {2}'.format(
self.transaction['date'],
self.desc,
self.transaction['amount']
)
def journal_entry(self, payee, account, tags):
"""
Return a formatted journal entry recording this Entry against
the specified posting account
"""
if self.options.output_format == 'ledger':
def_template = cm.DEFAULT_LEDGER_TEMPLATE
else:
def_template = cm.DEFAULT_BEANCOUNT_TEMPLATE
if self.transaction['transaction_template']:
template = (self.transaction['transaction_template'])
else:
template = (def_template)
if self.options.output_format == 'beancount':
ret_tags = ' {}'.format(tags) if tags else ''
else:
ret_tags = ' ; {}'.format(tags) if tags else ''
format_data = {
'associated_account': account,
'payee': payee,
'tags': ret_tags
}
format_data.update(self.transaction['addons'])
format_data.update(self.transaction)
return template.format(**format_data)
class OutputRenderer(metaclass=ABCMeta):
"""
Base class for output rendering.
"""
def __init__(self, transactions, options):
self.transactions = transactions
self.possible_accounts = set([])
self.possible_payees = set([])
self.possible_tags = set([])
self.mappings = []
self.map_file = options.mapping_file
self.read_mapping_file()
self.
|
journal_file = options.journal_file
self.journal_lines = []
self.options = options
self.get_possible_accounts_and_payees()
# Add payees/accounts/tags from mappings
for m in self.mappings:
self.possible_payees.add(m[1])
self.possible_accounts.add(m[2])
if m[3]:
if options.output_format == 'ledger':
self.possible_tags.update(set(m[3]
|
[0].split(':')))
else:
self.possible_tags.update([t.replace('#', '') for t in m[3][0].split(' ')])
def read_mapping_file(self):
"""
Mappings are simply a CSV file with three columns.
The first is a string to be matched against an entry description.
The second is the payee against which such entries should be posted.
The third is the account against which such entries should be posted.
If the match string begins and ends with '/' it is taken to be a
regular expression.
"""
if not self.map_file:
return
with open(self.map_file, 'r', encoding='utf-8', newline='') as f:
map_reader = csv.reader(f)
for row in map_reader:
if len(row) > 1:
pattern = row[0].strip()
payee = row[1].strip()
account = row[2].strip()
tags = row[3:]
if pattern.startswith('/') and pattern.endswith('/'):
try:
pattern = re.compile(pattern[1:-1], re.I)
except re.error as e:
print(
"Invalid regex '{0}' in '{1}': {2}"
.format(pattern, self.map_file, e),
file=sys.stderr)
sys.exit(1)
self.mappings.append((pattern, payee, account, tags))
def append_mapping_file(self, desc, payee, account, tags):
if self.map_file:
with open(self.map_file, 'a', encoding='utf-8', newline='') as f:
writer = csv.writer(f)
ret_tags = tags if len(tags) > 0 else ''
writer.writerow([desc, payee, account, ret_tags])
def process_transactions(self, callback=None):
"""
Read transactions from Mongo (Plaid) and
process them. Writes Ledger/Beancount formatted
lines either to out_file or stdout.
Parameters:
callback: A function taking a single transaction update object to store
in the DB immediately after collecting the information from the user.
"""
out = self._process_plaid_transactions(callback=callback)
if self.options.headers_file:
headers = ''.join(open(self.options.headers_file, mode='r').readlines())
print(headers, file=self.options.outfile)
print(*self.journal_lines, sep='\n', file=self.options.outfile)
return out
def _process_plaid_transactions(self, callback=None):
"""Process plaid transaction and return beancount/ledger formatted
lines.
"""
out = []
for t in self.transactions:
entry = Entry(t, self.options)
payee, account, tags = self.get_payee_and_account(entry)
dic = {}
dic['transaction_id'] = t['transaction_id']
dic['tags'] = tags
dic['associated_account'] = account
dic['payee'] = payee
dic['posting_account'] = self.options.posting_account
out.append(dic)
# save the transactions into the database as they are processed
if callback: callback(dic)
self.journal_lines.append(entry.journal_entry(payee, account, tags))
return out
def prompt_for_value(self, text_prompt, values, default):
sep = ':' if text_prompt == 'Payee' else ' '
a = prompt(
'{} [{}]: '.format(text_prompt, default),
completer=separator_completer(values, sep=sep)
)
# Handle tag returning none if accepting
return a if (a or text_prompt == 'Tag') else default
def get_payee_and_account(self, entry):
payee = entry.desc
account = self.options.default_expense
tags = ''
found = False
# Try to match entry desc with mappings patter
|
TwilioDevEd/api-snippets
|
ip-messaging/rest/services/update-service/update-service.6.x.py
|
Python
|
mit
| 504
| 0
|
# Download the Python helper library from twilio.com/docs/
|
python/install
import os
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
# To set up environmental variables, see http://twil.io/secure
account = os.environ['TWILIO_ACCOUNT_SID']
token = os.environ['TWILIO_AUTH_TOKEN']
client = Client(account, token)
service = client.chat.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update(
friendl
|
y_name="NEW_FRIENDLY_NAME"
)
print(service.friendly_name)
|
epage/telepathy-bluewire
|
src/protocol/backend.py
|
Python
|
lgpl-2.1
| 13,051
| 0.022067
|
#!/usr/bin/python
"""
Resources:
http://code.google.com/p/pybluez/
http://lightblue.sourceforge.net/
http://code.google.com/p/python-bluetooth-scanner
"""
from __future__ import with_statement
import select
import logging
import bluetooth
import gobject
import util.misc as misc_utils
_moduleLogger = logging.getLogger(__name__)
class _BluetoothConnection(gobject.GObject):
__gsignals__ = {
'data_ready' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'closed' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
}
def __init__(self, socket, addr, protocol):
gobject.GObject.__init__(self)
self._socket = socket
self._address = addr
self._dataId = gobject.io_add_watch (self._socket, gobject.IO_IN, self._on_data)
self._protocol = protocol
def close(self):
gobject.source_remove(self._dataId)
self._dataId = None
self._socket.close()
self._socket = None
self.emit("closed")
@property
def socket(self):
return self._socket
@property
def address(self):
return self._address
@property
def protocol(self):
return self._protocol
@misc_utils.log_exception(_moduleLogger)
def _on_data(self, source, condition):
self.emit("data_ready")
return True
gobject.type_register(_BluetoothConnection)
class _BluetoothListener(gobject.GObject):
__gsignals__ = {
'incoming_connection' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, ),
),
'start_listening' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'stop_listening' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
}
def __init__(self, protocol, timeout):
gobject.GObject.__init__(self)
self._timeout = timeout
self._protocol = protocol
self._socket = None
self._incomingId = None
def start(self):
assert self._socket is None and self._incomingId is None
self._socket = bluetooth.BluetoothSocket(self._protocol["transport"])
self._socket.settimeout(self._timeout)
self._socket.bind(("", bluetooth.PORT_ANY))
self._socket.listen(1)
self._incomingId = gobject.io_add_watch(
self._socket, gobject.IO_IN, self._on_incoming
)
bluetooth.advertise_service(self._socket, self._protocol["name"], self._protocol["uuid"])
self.emit("start_listening")
def stop(self):
if self._socket is None or self._incomingId is None:
return
gobject.source_remove(self._incomingId)
self._incomingId = None
bluetooth.stop_advertising(self._socket)
self._socket.close()
self._socket = None
self.emit("stop_listening")
@property
def isListening(self):
return self._socket is not None and self._incomingId is not None
@property
def socket(self):
assert self._socket is not None
return self._socket
@misc_utils.log_exception(_moduleLogger)
def _on_incoming(self, source, condition):
newSocket, (address, port) = self._socket.accept()
newSocket.settimeout(self._timeout)
connection = _BluetoothConnection(newSocket, address, self._protocol)
self.emit("incoming_connection", connection)
return True
gobject.type_register(_BluetoothListener)
class _DeviceDiscoverer(bluetooth.DeviceDiscoverer):
def __init__(self, timeout):
bluetooth.DeviceDiscoverer.__init__(self)
self._timeout = timeout
self._devices = []
self._devicesInProgress = []
@property
def devices(self):
return self._devices
def find_devices(self, *args, **kwds):
# Ensure we always start clean and is the reason we overroad this
self._devicesInProgress = []
newArgs = [self]
newArgs.extend(args)
bluetooth.DeviceDiscoverer.find_devices(*newArgs, **kwds)
def process_inquiry(self):
# The default impl calls into some hci code but an example used select,
# so going with the example
while self.is_inquiring or 0 < len(self.names_to_find):
# The whole reason for overriding this
_moduleLogger.debug("Event (%r, %r)"% (self.is_inquiring, self.names_to_find))
rfds = select.select([self], [], [], self._timeout)[0]
if self in rfds:
self.process_event()
@misc_utils.log_exception(_moduleLogger)
def device_discovered(self, address, deviceclass, name):
device = address, deviceclass, name
_moduleLogger.debug("Device Discovered %r" % (device, ))
self._devicesInProgress.append(device)
@misc_utils.log_exception(_moduleLogger)
def inquiry_complete(self):
_moduleLogger.debug("Inquiry Complete")
self._devices = self._devicesInProgress
class BluetoothBackend(gobject.GObject):
__gsignals__ = {
'login' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'logout' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'contacts_update' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, ),
),
}
def __init__(self):
gobject.GObject.__init__(self)
self._disco = None
self._timeout = 8
self._listeners = {}
self._protocols = []
self._isListening = True
def add_protocol(self, protocol):
assert not self.is_logged_in()
self._protocols.append(protocol)
def login(self):
self._disco = _DeviceDiscoverer(self._timeout)
isListening = self._isListening
for protocol in self._protocols:
protoId = protocol["uuid"]
self._listeners[protoId] = _BluetoothListener(protocol, self._timeout)
if isListening:
self._listeners[protoId].start()
self.emit("login")
def logout(self):
for protocol in self._protocols:
protoId = protocol["uuid"]
listener = self._listeners[protoId]
listener.close()
self._listeners.clear()
self._disco.cancel_inquiry() # precaution
self.emit("logout")
def is_logged_in(self):
if self._listeners:
return True
else:
return False
def is_listening(self):
return self._isListening
def enable_listening(self, enable):
if enable:
for listener in self._listeners.itervalues():
assert not listener.isListening
for listener in self._listeners.itervalues():
listener.start()
else:
for listener in self._listeners.itervalues():
assert listener.isListening
for listener in self._listeners.itervalues():
listener.stop()
def get_contacts(self):
try:
self._disco.find_devices(
duration=self._timeout,
flush_cache = True,
lookup_names = True,
)
self._disco.process_inquiry()
except bluetooth.BluetoothError, e:
# lightblue does this, so I guess I will too
_moduleLogger.error("Error while getting contacts, attempting to cancel")
try:
self._disco.cancel_inquiry()
finally:
raise e
return self._disco.devices
def get_contact_services(self, address):
services = bluetooth.find_service(address = address)
return services
def connect(self, addr, transport, port):
sock = bluetooth.BluetoothSocket(transport)
sock.settimeout(self._timeout)
try:
sock.connect((addr, port))
except bluetooth.error, e:
sock.close()
raise
return _BluetoothConnection(sock, addr, "")
gobject.type_register(BluetoothBackend)
class BluetoothClass(object):
def __init__(self, description):
self.description = description
def __str__(self):
return self.description
MAJOR_CLASS = BluetoothClass("Major Class")
MAJOR_CLASS.MISCELLANEOUS = BluetoothClass("Miscellaneous")
MAJOR_CLASS.COMPUTER = BluetoothClass("Computer")
MAJOR_CLASS.PHONE = BluetoothClass("Phone")
MAJOR_CLA
|
SS.LAN = BluetoothClass("LAN/Network Access Point")
MAJOR_CLASS.AV = BluetoothClass("Audio/Video")
MAJOR_CLASS.PERIPHERAL = BluetoothClass("Peripheral")
MAJOR_CLASS.IMAGING = BluetoothClass("Imaging")
MAJOR_CLASS.UNCATEGORIZED = BluetoothClass("Uncategorized")
MAJOR_CLASS.MISCELLANEOUS.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.COMPUTER.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.COMPUTER.DESKTOP =
|
BluetoothClass("Desktop workstation")
MAJOR_CLASS.COMPUTER.SERVER = BluetoothClass("Server-class computer")
MAJOR_CLASS.COMPUTER.LAPTOP = BluetoothClass("Laptop")
MAJOR_CLASS.COMPUTER.HANDHELD = BluetoothClass("Handheld PC/PDA (clam shell)")
MAJOR_CLASS.COMPUTER.PALM_SIZE = BluetoothClass("Palm sized PC/PDA")
MAJOR_CLASS.COMPUTER.WEARABLE = BluetoothClass("Wearable computer (Watch sized)")
MAJOR_CLASS.COMPUTER.RESERVED = B
|
tilacog/rows
|
tests/tests_plugin_txt.py
|
Python
|
gpl-3.0
| 2,876
| 0.000696
|
# coding: utf-8
# Copyright 2014-2015 Álvaro Justen <https://github.com/turicas/rows/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import tempfile
import unittest
import rows
import rows.plugins.txt
import utils
class PluginTxtTestCase(utils.RowsTestMixIn, unittest.TestCase):
filename = 'tests/data/all-field-types.txt'
encoding = 'utf-8'
def test_imports(self):
self.assertIs(rows.export_to_txt, rows.plugins.txt.export_to_txt)
def test_export_to_txt_filename(self):
temp = tempfile.NamedTemporaryFile(delete=False)
self.files_to_delete.append(temp.name)
rows.export_to_txt(utils.table, temp.name)
self.assert_file_contents_equal(temp.name, self.filename)
def test_export_to_txt_fobj(self):
# TODO: may test with codecs.open passing an encoding
temp = tempfile.NamedTemporaryFile(delete=False)
self.files_to_delete.append(temp.name)
rows.export_to_txt(utils.table, temp.file)
self.assert_file_contents_equal(temp.name, self.filename)
def test_export_to_txt_fobj_some_fields_only(self):
# TODO: this test may be inside `tests_operations.py` (testing
# `serialize` instead a plugin which calls it)
temp = tempfile.NamedTemporaryFile(delete=False)
self.files_to_delete.append(temp.name)
fobj = temp.file
rows.export_to_txt(utils.table, temp.file) # all fields
fobj.seek(0)
table_fields = utils.table.fields.keys()
expected_fields = table_fields
_, second_line = fobj.readline(), fobj.readline()
fields = [field.strip() for field in second
|
_line.split('|')
if field.strip()]
self.asse
|
rtEqual(expected_fields, fields)
expected_fields = table_fields[2:5]
self.assertNotEqual(expected_fields, table_fields)
fobj.seek(0)
rows.export_to_txt(utils.table, temp.file, field_names=expected_fields)
fobj.seek(0)
_, second_line = fobj.readline(), fobj.readline()
fields = [field.strip() for field in second_line.split('|')
if field.strip()]
self.assertEqual(expected_fields, fields)
|
Zedmor/powerball
|
src/powerball/urls.py
|
Python
|
mit
| 852
| 0
|
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
import profiles.urls
import accounts.urls
from . import views
urlpattern
|
s = [
url(r'^$', views.HomePage.as_view(), name='home'),
url(r'^users/', include(profiles.urls, namespace='profiles')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include(accounts.urls, namespace='accounts')),
url(r'^post_url/$', views.HomePage.as_view(), name='post')
]
# User-uploaded files like profile pics need to be served in development
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_RO
|
OT)
# Include django debug toolbar if DEBUG is on
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
apyrgio/snf-ganeti
|
test/py/ganeti.mcpu_unittest.py
|
Python
|
bsd-2-clause
| 9,694
| 0.006911
|
#!/usr/bin/python
#
# Copyright (C) 2009, 2011 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for unittesting the mcpu module"""
import unittest
import itertools
from ganeti import compat
from ganeti import mcpu
from ganeti import opcodes
from ganeti import cmdlib
from ganeti import locking
from ganeti import constants
from ganeti.constants import \
LOCK_ATTEMPTS_TIMEOUT, \
LOCK_ATTEMPTS_MAXWAIT, \
LOCK_ATTEMPTS_MINWAIT
import testutils
REQ_BGL_WHITELIST = compat.UniqueFrozenset([
opcodes.OpClusterActivateMasterIp,
opcodes.OpClusterDeactivateMasterIp,
opcodes.OpClusterDestroy,
opcodes.OpClusterPostInit,
opcodes.OpClusterRename,
opcodes.OpInstanceRename,
opcodes.OpNodeAdd,
opcodes.OpNodeRemove,
opcodes.OpTestAllocator,
])
class TestLockAttemptTimeoutStrategy(unittest.TestCase):
def testConstants(self):
tpa = mcpu.LockAttemptTimeoutStrategy._TIMEOUT_PER_ATTEMPT
self.assert_(len(tpa) > LOCK_ATTEMPTS_TIMEOUT / LOCK_ATTEMPTS_MAXWAIT)
self.assert_(sum(tpa) >= LOCK_ATTEMPTS_TIMEOUT)
self.assertTrue(LOCK_ATTEMPTS_TIMEOUT >= 1800,
msg="Waiting less than half an hour per priority")
self.assertTrue(LOCK_ATTEMPTS_TIMEOUT <= 3600,
msg="Waiting more than an hour per priority")
def testSimple(self):
strat = mcpu.LockAttemptTimeoutStrategy(_random_fn=lambda: 0.5,
_time_fn=lambda: 0.0)
prev = None
for i in range(len(strat._TIMEOUT_PER_ATTEMPT)):
timeout = strat.NextAttempt()
self.assert_(timeout is not None)
self.assert_(timeout <= LOCK_ATTEMPTS_MAXWAIT)
self.assert_(timeout >= LOCK_ATTEMPTS_MINWAIT)
self.assert_(prev is None or timeout >= prev)
prev = timeout
for _ in range(10):
self.assert_(strat.NextAttempt() is None)
class TestDispatchTable(unittest.TestCase):
def test(self):
for opcls in opcodes.OP_MAPPING.values():
if not opcls.WITH_LU:
continue
self.assertTrue(opcls in mcpu.Processor.DISPATCH_TABLE,
msg="%s missing handler class" % opcls)
# Check against BGL whitelist
lucls = mcpu.Processor.DISPATCH_TABLE[opcls]
if lucls.REQ_BGL:
self.assertTrue(opcls in REQ_BGL_WHITELIST,
msg=("%s not whitelisted for BGL" % opcls.OP_ID))
else:
self.assertFalse(opcls in REQ_BGL_WHITELIST,
msg=("%s whitelisted for BGL, but doesn't use it" %
opcls.OP_ID))
class TestProcessResult(unittest.TestCase):
def setUp(self):
self._submitted = []
self._count = itertools.count(200)
def _Submit(self, jobs):
job_ids = [self._count.next() for _ in jobs]
self._submitted.extend(zip(job_ids, jobs))
return job_ids
def testNoJobs(self):
for i in [object(), [], False, True, None, 1, 929, {}]:
self.assertEqual(mcpu._ProcessResult(NotImplemented, NotImplemented, i),
i)
def testDefaults(self):
src = opcodes.OpTestDummy()
res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[
opcodes.OpTestDelay(),
opcodes.OpTestDelay(),
], [
opcodes.OpTestDelay(),
]]))
self.assertEqual(res, {
constants.JOB_IDS_KEY: [200, 201],
})
(_, (op1, op2)) = self._submitted.pop(0)
(_, (op3, )) = self._submitted.pop(0)
self.assertRaises(IndexError, self._submitted.pop)
for op in [op1, op2, op3]:
self.assertTrue("OP_TEST_DUMMY" in op.comment)
self.assertFalse(hasattr(op, "priority"))
self.assertFalse(hasattr(op, "debug_level"))
def testParams(self):
src = opcodes.OpTestDummy(priority=constants.OP_PRIO_HIGH,
debug_level=3)
res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[
opcodes.OpTestDelay(priority=constants.OP_PRIO_LOW),
], [
opcodes.OpTestDelay(comment="foobar", debug_level=10),
]], other=True, value=range(10)))
self.assertEqual(res, {
constants.JOB_IDS_KEY: [200, 201],
"other": True,
"value": range(10),
})
(_, (op1, )) = self._submitted.pop(0)
(_, (op2, )) = self._submi
|
tted.pop(0)
self.assertRaises(IndexError, self._submitted.pop)
self.assertEqual(op1.priority, constants.OP_PRIO_LOW)
self.assertTrue("OP_TEST_DUMMY" in op1.comment)
self.assertEqual(op1.debug_level, 3)
self.assertEqual(op2.priority, consta
|
nts.OP_PRIO_HIGH)
self.assertEqual(op2.comment, "foobar")
self.assertEqual(op2.debug_level, 3)
class _FakeLuWithLocks:
def __init__(self, needed_locks, share_locks):
self.needed_locks = needed_locks
self.share_locks = share_locks
class _FakeGlm:
def __init__(self, owning_nal):
self._owning_nal = owning_nal
def check_owned(self, level, names):
assert level == locking.LEVEL_NODE_ALLOC
assert names == locking.NAL
return self._owning_nal
def owning_all(self, level):
return False
class TestVerifyLocks(unittest.TestCase):
def testNoLocks(self):
lu = _FakeLuWithLocks({}, {})
glm = _FakeGlm(False)
mcpu._VerifyLocks(lu, glm,
_mode_whitelist=NotImplemented,
_nal_whitelist=NotImplemented)
def testNotAllSameMode(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: ["foo"],
}, {
level: 0,
locking.LEVEL_NODE_ALLOC: 0,
})
glm = _FakeGlm(False)
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
def testDifferentMode(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: ["foo"],
}, {
level: 0,
locking.LEVEL_NODE_ALLOC: 1,
})
glm = _FakeGlm(False)
try:
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
except AssertionError, err:
self.assertTrue("using the same mode as nodes" in str(err))
else:
self.fail("Exception not raised")
# Once more with the whitelist
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks],
_nal_whitelist=[])
def testSameMode(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: ["foo"],
locking.LEVEL_NODE_ALLOC: locking.ALL_SET,
}, {
level: 1,
locking.LEVEL_NODE_ALLOC: 1,
})
glm = _FakeGlm(True)
try:
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks],
_nal_whitelist=[])
except AssertionError, err:
self.assertTrue("whitelisted to use different modes" in str(err))
else:
self.fail("Exception not raised")
# Once more without the whitelis
|
falcon-org/Falcon
|
test/TestCache.py
|
Python
|
bsd-3-clause
| 2,048
| 0.025879
|
#!/usr/bin/env python
import time
makefile = '''
{
"rules":
[
{
"inputs": [ "source1" ],
"outputs": [ "output" ],
"cmd": "cat source1 > output && cat source2 >> output && echo 'output: source1 source2' > deps",
"depfile": "deps"
}
]
}
'''
def set_version_1(test):
test.write_file("source1", "1")
test.write_file("source2", "2")
def set_version_2(test):
test.write_file("source1", "2")
test.write_file("source2", "3")
def run(test):
test.create_makefile(makefile)
set_version_1(test)
test.start()
assert(set(["source1", "output"]) == set(test.get_dirty_targets()))
test.build()
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '12')
assert(set(["source1", "source2"]) == set(test.get_inputs_of("output")))
assert(set(["output"]) == set(test.get_outputs_of("source2")))
set_version_2(test)
test.expect_watchman_trigger("source1")
test.expect_watchman_trigger("source2")
assert(set(["source1", "source2",
|
"output"]) == set(test.get_dirty_targets()))
test.build()
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '23')
set_version_1(test)
test.expect_watchman_trigger("source1")
test.expect_watchman_trigger("source2")
assert(set(["source1", "source2", "output"]) == set(test.get_dirty_targets()))
# Build and check we retrieve output from the cache
data = test.build()
assert(len(data['cmds']) == 1)
assert(data
|
['cmds'][0] == { 'cache' : 'output' })
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '12')
set_version_2(test)
test.expect_watchman_trigger("source1")
test.expect_watchman_trigger("source2")
assert(set(["source1", "source2", "output"]) == set(test.get_dirty_targets()))
# Build and check we retrieve output from the cache
data = test.build()
assert(len(data['cmds']) == 1)
assert(data['cmds'][0] == { 'cache' : 'output' })
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '23')
|
valley3405/testMongo01
|
test02.py
|
Python
|
gpl-2.0
| 2,298
| 0.046386
|
#!/usr/bin/env python
#coding:utf-8
# Author: --<qingfengkuyu>
# Purpose: MongoDB的使用
# Created: 2014/4/14
#32位的版本最多只能存储2.5GB的数据(NoSQLFan:最大文件尺寸为2G,生产环境推荐64位)
import pymongo
import datetime
import random
#创建连接
conn = pymongo.MongoClient('localhost',27017)
#连接数据库
db = conn.study
#db = conn['study']
#打印所有聚集名称,连接聚集
print u'所有聚集:',db.collection_names()
posts = db.post
#posts = db['post']
print posts
#插入记录
new_post = {"AccountID":22,"UserName":"libing",'date':datetime.datetime.now()}
new_posts = [{"AccountID":22,"UserName":"liuw",'date':datetime.datetime.now()},
{"AccountID":23,"UserName":"urling",'date':datetime.datetime.now()}]#每条记录插入时间都不一样
posts.insert(new_post)
#posts.insert(new_posts)#批量插入多条数据
#删除记录
|
print u'删除指定记录:\n',posts.find_one({"AccountID":22,"UserName":"libing"})
posts.remove({"AccountID"
|
:22,"UserName":"libing"})
#修改聚集内的记录
posts.update({"UserName":"urling"},{"$set":{'AccountID':random.randint(20,50)}})
#查询记录,统计记录数量
print u'记录总计为:',posts.count(),posts.find().count()
print u'查询单条记录:\n',posts.find_one()
print posts.find_one({"UserName":"liuw"})
#查询所有记录
print u'查询多条记录:'
#for item in posts.find():#查询全部记录
#for item in posts.find({"UserName":"urling"}):#查询指定记录
#for item in posts.find().sort("UserName"):#查询结果根据UserName排序,默认为升序
#for item in posts.find().sort("UserName",pymongo.ASCENDING):#查询结果根据UserName排序,ASCENDING为升序,DESCENDING为降序
for item in posts.find().sort([("UserName",pymongo.ASCENDING),('date',pymongo.DESCENDING)]):#查询结果根据多列排序
print item
#查看查询语句的性能
#posts.create_index([("UserName", pymongo.ASCENDING), ("date", pymongo.DESCENDING)])#加索引
print posts.find().sort([("UserName",pymongo.ASCENDING),('date',pymongo.DESCENDING)]).explain()["cursor"]#未加索引用BasicCursor查询记录
print posts.find().sort([("UserName",pymongo.ASCENDING),('date',pymongo.DESCENDING)]).explain()["nscanned"]#查询语句执行时查询的记录数
|
wolf1986/log_utils
|
log_utils/helper.py
|
Python
|
lgpl-3.0
| 4,265
| 0.001407
|
import datetime
import io
import logging
import logging.handlers
import os
import sys
from collections import deque
from time import perf_counter
import colorlog
class LogHelper:
FORMATTER_COLOR = colorlog.ColoredFormatter('{log_color}{asctime} {name}: {levelname} {message}', style='{')
FORMATTER = logging.Formatter('{asctime} {name}: {levelname} {message}', style='{')
@classmethod
def generate_color_handler(cls, stream=sys.stdout):
handler = logging.StreamHandler(stream)
handler.setFormatter(cls.FORMATTER_COLOR)
return handler
@classmethod
def get_script_name(cls):
script_name = os.path.basename(sys.argv[0])
script_name, _ = os.path.splitext(script_name)
return script_name
@classmethod
def generate_simple_rotating_file_handler(cls, path_log_file=None, when='midnight', files_count=7):
if path_log_file is None:
path_dir = os.path.dirname(sys.argv[0])
path_log_file = cls.suggest_script_log_name(path_dir)
handler = logging.handlers.TimedRotatingFileHandler(path_log_file, when=when, backupCount=files_count)
handler.setLevel(logging.DEBUG)
handler.setFormatter(cls.FORMATTER)
return handler
@classmethod
def suggest_script_log_name(cls, path_dir):
return os.path.join(path_dir, cls.get_script_name() + '.log')
@staticmethod
def timestamp(with_ms=False, time=None):
if time is None:
time = datetim
|
e.datetime.now()
if with_ms:
return time.strftime('%Y%m%d_%H%M%S.%f')[:-3]
else:
return time.strftime('%Y%m%d_%H%M%S')
class PerformanceMetric:
def __init__(self, *, n_samples=1000, units_suffix='', units_format='.2f', name=None):
super().__init__()
self.name: str = name
self.queue_samples = deque(maxlen=n_samples)
self.total = 0
self.last = 0
self.units_str = units_suffix
self.units_format = units_forma
|
t
def reset(self):
self.total = 0
self.last = 0
self.queue_samples.clear()
@property
def n_samples(self):
return len(self.queue_samples)
def __str__(self):
str_name = f'[{self.name}] ' if self.name else ''
if self.n_samples == 0:
return f'{str_name}No measurements'
return '{}Average: {:{}} {}; Last: {:{}} {}; Samples: {};'.format(
str_name, self.average, self.units_format, self.units_str,
self.last, self.units_format, self.units_str,
self.n_samples
)
def last_str(self):
str_name = f'[{self.name}] ' if self.name else ''
return f'{str_name}{self.last:{self.units_format}} {self.units_str}'
@property
def average(self):
if self.n_samples == 0:
return None
return self.total / self.n_samples
def submit_sample(self, sample: float):
sample_popped = 0
if self.n_samples == self.queue_samples.maxlen:
sample_popped = self.queue_samples.popleft()
self.last = sample
self.total += self.last - sample_popped
self.queue_samples.append(self.last)
class PerformanceTimer(PerformanceMetric):
def __init__(self, n_samples=1000, units_format='.1f', **kwargs) -> None:
super().__init__(n_samples=n_samples, units_suffix='sec', units_format=units_format, **kwargs)
self.time_last_start = 0
def __enter__(self):
self.begin()
return self
def __exit__(self, t, value, tb):
self.end()
def begin(self):
self.time_last_start = perf_counter()
def end(self):
self.submit_sample(self.peek())
def peek(self):
return perf_counter() - self.time_last_start
class PrintStream:
"""
Shortcut for using `StringIO`
printf = PrintStream()
printf('Case Results:')
printf(...)
string = str(printf)
"""
def __init__(self, stream=None):
if not stream:
stream = io.StringIO()
self.stream = stream
def __call__(self, *args, **kwargs):
print(*args, file=self.stream, **kwargs)
def __str__(self):
return self.stream.getvalue()
|
lalinsky/mb2freedb
|
mb2freedb/utils.py
|
Python
|
mit
| 2,120
| 0.000472
|
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import re
import syslog
from logging import Handler
from logging.handlers import SysLogHandler
class LocalSysLogHandler(Handler):
"""
Logging handler that logs to the local syslog using the syslog module
"""
facility_names = {
"auth": syslog.LOG_AUTH,
"cron": syslog.LOG_CRON,
"daemon": syslog.LOG_DAEMON,
"kern": syslog.LOG_KERN,
"lpr": syslog.LOG_LPR,
"mail": syslog.LOG_MAIL,
"news": syslog.LOG_NEWS,
"syslog": syslog.LOG_SYSLOG,
"user": syslog.LOG_USER,
"uucp": syslog.LOG_UUCP,
"local0": syslog.LOG_LOCAL0,
"local1": syslog.LOG_LOCAL1,
"local2": syslog.LOG_LOCAL2,
"local3": syslog.LOG_LOCAL3,
"local4": syslog.LOG_LOCAL4,
"local5": syslog.LOG_LOCAL5,
"local6": syslog.LOG_LOCAL6,
"local7": syslog.LOG_LOCAL7,
}
priority_map = {
"DEBUG": syslog.LOG_DEBUG,
"INFO": syslog.LOG_INFO,
"WARNING": syslog.LOG_WARNING,
"ERROR": syslog.LOG_ERR,
"CRITICAL": syslog.LOG_CRIT
}
def __init__(self, ident=None, facility=syslog.LOG_USER, log_pid=False):
|
Handler.__init__(self)
self.facility = facility
if isinstance(facility, basestring):
self.facility = self.facility_names[facility]
options = 0
if log_pid:
options |= syslog.LOG_PID
syslog.openlog(ident, options, self.facility)
self.format
|
ter = None
def close(self):
Handler.close(self)
syslog.closelog()
def emit(self, record):
try:
msg = self.format(record)
if isinstance(msg, unicode):
msg = msg.encode('utf-8')
priority = self.priority_map[record.levelname]
for m in msg.splitlines():
syslog.syslog(self.facility | priority, m)
except StandardError:
self.handleError(record)
|
HanWenfang/syncless
|
examples/demo_gevent_only.py
|
Python
|
apache-2.0
| 4,479
| 0.016522
|
#! /usr/local/bin/stackless2.6
# by pts@fazekas.hu at Fri Jun 17 14:08:07 CEST 2011
"""Demo for hosting a gevent application with Stackless, without Syncless."""
__author__ = 'p
|
ts@fazekas.hu (Peter Szabo)'
import sys
# Import best_greenlet before gevent to add greenlet emulation for Stackless
# if necessary.
import syncless.best_greenlet
import gevent
import gevent.hub
import gevent.socket
class Lprng(object):
__slots__ = ['seed']
def __init__(self, seed=0):
self.seed = int(seed) & 0xffffffff
d
|
ef next(self):
"""Generate a 32-bit unsigned random number."""
# http://en.wikipedia.org/wiki/Linear_congruential_generator
self.seed = (
((1664525 * self.seed) & 0xffffffff) + 1013904223) & 0xffffffff
return self.seed
def __iter__(self):
return self
def Worker(client_socket, addr):
print >>sys.stderr, 'info: connection from %r, handled by %r' % (
addr, gevent.hub.greenlet.getcurrent())
f = client_socket.makefile()
# Read HTTP request.
line1 = None
while True:
line = f.readline().rstrip('\r\n')
if not line: # Empty line, end of HTTP request.
break
if line1 is None:
line1 = line
# Parse HTTP request.
# Please note that an assertion here doesn't abort the server.
items = line1.split(' ')
assert 3 == len(items)
assert items[2] in ('HTTP/1.0', 'HTTP/1.1')
assert items[0] == 'GET'
assert items[1].startswith('/')
# This is to demonstrate the error reporting and recovery behavior of gevent:
# We get an error message like this, and the process execution continues:
#
# Traceback (most recent call last):
# File "/usr/local/lib/python2.6/site-packages/gevent/greenlet.py", line 388, in run
# result = self._run(*self.args, **self.kwargs)
# File "./s2.py", line 137, in Worker
# assert 'bad' not in items[1]
# AssertionError
# <Greenlet at 0xb71acbecL: Worker(<socket at 0xb747668cL fileno=10 sock=127.0.0.1:80, ('127.0.0.1', 55196))> failed with AssertionError
assert 'bad' not in items[1]
if 'sysexit' in items[1]:
print >>sys.stderr, 'info: exiting with SystemExit'
#sys.exit() # Doesn't work, gevent.core.__event_handler catches it.
gevent.hub.MAIN.throw(SystemExit)
if 'exit' in items[1]:
print >>sys.stderr, 'info: exiting with throw'
gevent.hub.MAIN.throw()
try:
num = int(items[1][1:])
except ValueError:
num = None
if 'slow' in items[1]:
gevent.hub.sleep(5)
# Write HTTP response.
if num is None:
f.write('HTTP/1.0 200 OK\r\nContent-Type: text/html\r\n\r\n')
f.write('<a href="/0">start at 0</a><p>Hello, World!\n')
else:
next_num = Lprng(num).next()
f.write('HTTP/1.0 200 OK\r\nContent-Type: text/html\r\n\r\n')
f.write('<a href="/%d">continue with %d</a>\n' %
(next_num, next_num))
#f.flush() # Not needed here.
def GeventListener(server_socket):
# Please note that exceptions raised here will be printed and then ignored
# by the gevent.hub main loop.
print >>sys.stderr, (
'info: accepting connections in %r' % gevent.hub.greenlet.getcurrent())
while True:
client_socket, addr = server_socket.accept()
gevent.spawn(Worker, client_socket, addr)
# Equally good:
#gevent.hub.spawn_raw(Worker, client_socket, addr)
client_socket = addr = None # Save memory.
if __name__ == '__main__':
# We need this patch so gevent.hub.spawn_raw below will create a greenlet
# of the correct type.
server_socket = gevent.socket.socket()
# Old:
# gevent.socket.set_reuse_addr(server_socket)
# server_socket.bind(('127.0.0.1', 8080))
# server_socket.listen(128)
gevent.socket.bind_and_listen(server_socket, ('127.0.0.1', 8080), 128,
reuse_addr=True)
print >>sys.stderr, 'listening on %r' % (server_socket.getsockname(),)
# All non-blocking gevent operations must be initiated from a greenlet
# invoked by the gevent hub. The easiest way to ensure that is to move these
# operations to a function (GeventListener), and call this function with
# gevent.hub.spawn_raw. (As a side effect, if an exception happens in that
# function, the process will continue running.)
gevent.hub.spawn_raw(GeventListener, server_socket)
# Run the gevent main loop indefinitely. This is not a requirement, we
# could to non-blocking Syncless operations instead right here for a long
# time.
syncless.best_greenlet.gevent_hub_main()
assert 0, 'unreached'
|
feliam/pysymemu
|
setup.py
|
Python
|
bsd-3-clause
| 1,306
| 0.022971
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "pysymemu",
version = "0.0.1-alpha",
author = "Felipe Andres Manzano",
author_email = "feliam@binamuse.com",
description = ("A tool for symbolic execution of Intel 64 binaries."),
requires = ['pyelftool', 'capstone' ],
provides = ['pysymemu'],
license = "BSD",
url = 'http://github.com/pysymemu',
download_url= 'http://github.com/',
platforms = ['linux', 'win32', 'win64'],
keywords = "testing reverse enginering symbolic execution white box fuzzing automatic test case generation",
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
|
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Testing"
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Deve
|
lopment :: Quality Assurance",
],
test_suite="test",
)
|
srange/SU2
|
preconfigure.py
|
Python
|
lgpl-2.1
| 28,082
| 0.012143
|
#!/usr/bin/env python
## \file configure.py
# \brief An extended configuration script.
# \author T. Albring
# \version 6.2.0 "Falcon"
#
# The current SU2 release has been coordinated by the
# SU2 International Developers Society <www.su2devsociety.org>
# with selected contributions from the open-source community.
#
# Copyright 2012-2019, Francisco D. Palacios, Thomas D. Economon,
# Tim Albring, and the SU2 contributors.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, division, absolute_import
from optparse import OptionParser, BadOptionError
import sys,time, os, subprocess, os.path, glob, re, shutil, fileinput
from subprocess import call
# "Pas
|
s-through" option parsing -- an OptionParser that ignores
# unknown options and lets them pile up in the leftover argument
# list. Useful to pass u
|
nknown arguments to the automake configure.
class PassThroughOptionParser(OptionParser):
def _process_long_opt(self, rargs, values):
try:
OptionParser._process_long_opt(self, rargs, values)
except BadOptionError as err:
self.largs.append(err.opt_str)
def _process_short_opts(self, rargs, values):
try:
OptionParser._process_short_opts(self, rargs, values)
except BadOptionError as err:
self.largs.append(err.opt_str)
def main():
# Command Line Options
usage = './preconfigure.py [options]' \
'\nNote: Options not listed below are passed to the automake configure.' \
'\n Compiler flags must be set with \'export CFLAGS=...\' or \'export CXXFLAGS=...\' ' \
'\n before calling this script.'
parser = PassThroughOptionParser(usage = usage)
parser.add_option("--enable-direct-diff", action="store_true",
help="Enable direct differentiation mode support", dest="directdiff", default=False)
parser.add_option("--enable-autodiff", action="store_true",
help="Enable Automatic Differentiation support", dest="ad_support", default=False)
parser.add_option("--with-ad", action="store", type = "string", help="AD Tool, CODI/ADOLC", default="CODI", dest="adtool")
parser.add_option("--enable-mpi", action="store_true",
help="Enable mpi support", dest="mpi_enabled", default=False)
parser.add_option("--enable-PY_WRAPPER", action="store_true",
help="Enable Python wrapper compilation", dest="py_wrapper_enabled", default=False)
parser.add_option("--disable-tecio", action="store_true",
help="Disable Tecplot binary support", dest="tecio_disabled", default=False)
parser.add_option("--disable-normal", action="store_true",
help="Disable normal mode support", dest="normal_mode", default=False)
parser.add_option("-c" , "--check", action="store_true",
help="Check the source code for potential problems", dest="check", default=False)
parser.add_option("-r" , "--replace", action="store_true",
help="Do a search and replace of necessary symbols. Creates back up of source files.", dest="replace", default=False)
parser.add_option("-d" , "--delete", action="store_true",
help="Removes the back up files.", dest="remove", default=False)
parser.add_option("-v" , "--revert", action="store_true",
help="Revert files to original state.", dest="revert", default=False)
parser.add_option("-u", "--update", action="store_true",
help="Update and recompile submodules.", dest="update", default=False)
(options, args)=parser.parse_args()
options.adtool = options.adtool.upper()
if options.directdiff == False:
adtool_dd = ""
else:
adtool_dd = options.adtool
if options.ad_support == False:
adtool_da = ""
else:
adtool_da = options.adtool
conf_environ = os.environ
made_adolc = False
made_codi = False
header()
modes = {'SU2_BASE' : not options.normal_mode == True,
'SU2_DIRECTDIFF' : adtool_dd ,
'SU2_AD' : adtool_da }
# Create a dictionary from the arguments
argument_dict = dict(zip(args[::2],args[1::2]))
# Set the default installation path (if not set with --prefix)
argument_dict['--prefix'] = argument_dict.get('--prefix', os.getcwd().rstrip())
if not options.check:
if any([modes["SU2_AD"] == 'CODI', modes["SU2_DIRECTDIFF"] == 'CODI']):
conf_environ, made_codi = init_codi(argument_dict,modes,options.mpi_enabled, options.update)
configure(argument_dict,
conf_environ,
options.mpi_enabled,
options.py_wrapper_enabled,
options.tecio_disabled,
modes,
made_adolc,
made_codi)
if options.check:
prepare_source(options.replace, options.remove, options.revert)
def prepare_source(replace = False, remove = False, revert = False):
# Directories containing the source code
print('Preparing source code ...')
dir_list = [ "Common",
"SU2_CFD",
"SU2_DEF",
"SU2_DOT",
"SU2_GEO",
"SU2_SOL",
"SU2_MSH"]
file_list = ""
exclude_dic_lines = {}
exclude_dic_files = {}
exclude_file_name = 'preconf.exclude'
# # Build the dictionaries for line and file excludes that
# # are defined in the exlude file 'preconf.exclude'.
# # Syntax:
# # PathTo/File[:Line1,Line2,...]
# if os.path.exists(exclude_file_name):
# print 'Reading \'' + exclude_file_name + '\' ...'
# with open(exclude_file_name, 'r') as exclude:
# for line in exclude:
# exclude_line = line.split(':')
# exclude_file = exclude_line[0].rstrip()
# if len(exclude_line) > 1:
# exclude_lines = exclude_line[1].split(',')
# for index,item in enumerate(exclude_lines):
# exclude_lines[index] = int(item.rstrip())
# exclude_dic_lines[exclude_line[0].rstrip()] = exclude_lines
# else:
# exclude_dic_files[exclude_line[0].rstrip()] = [-1]
# else:
# print('Exclude file \'' + exclude_file_name + '\' not found. Checking all files.')
# Hardcoded files that will be skipped
exclude_dic_files = { 'Common/include/datatype_structure.hpp' : [-1],
'Common/include/datatype_structure.inl' : [-1],
'Common/include/mpi_structure.hpp' : [-1],
'Common/include/mpi_structure.inl' : [-1],
'Common/src/datatype_structure.cpp': [-1],
'Common/src/mpi_structure.cpp' : [-1] }
str_double = 'double'
regex_double = re.compile(r'(^|[^\w])('+str_double+')([^\w]|$)')
replacement_double = r'\1su2double\3'
simple_replacements = {'MPI_Reduce' : 'SU2_MPI::Reduce',
'MPI_Allreduce' : 'SU2_MPI::Allreduce',
'MPI_Gather' : 'SU2_MPI::Gather',
'MPI_Allgather' : 'SU2_MPI::Allgather',
'MPI_Isend' : 'SU2_MPI::Isend',
'MPI_Irecv' : 'SU2_MPI::Irecv',
'MPI_Send' : 'SU2_MPI::Send',
'MPI_Wait' : 'SU2_MP
|
dougo/chugchanga-poll
|
musicbrainz.py
|
Python
|
agpl-3.0
| 3,581
| 0.004747
|
# Copyright 2009-2010 Doug Orleans. Distributed under the GNU Affero
# General Public License v3. See COPYING for details.
from google.appengine.api import urlfetch
import urllib
from xml.dom import minidom
import time
mbns = 'http://musicbrainz.org/ns/mmd-1.0#'
extns = 'http://musicbrainz.org/ns/ext-1.0#'
# Since the Musicbrainz XML Web service does rate-limiting by IP, and
# requests from multiple Google App Engine apps might all come from
# the same IP, we are likely to get rate-limited no matter how slow we
# ourselves go. So instead we have to go through a proxy at a
# different address.
def proxify(url):
return ('http://steak.place.org/servlets/mb-mirror.ss?'
+ urllib.urlencode({ 'url': url }))
def xmlHttpRequest(url):
time.sleep(1)
url = proxify(url)
response = urlfetch.fetch(url, deadline=10)
if response.status_code != 200:
raise HTTPError(url, response)
return minidom.parseString(response.content)
class Resource:
@classmethod
def url(cls):
return 'http://musicbrainz.org/ws/1/' + cls.type + '/'
@classmethod
def getElement(cls, id, *inc):
fields = { 'type': 'xml', 'inc': ' '.join(inc) }
url = cls.url() + id + '?' + urllib.urlencode(fields)
doc = xmlHttpRequest(url)
return elementField(doc.documentElement, cls.type)
@classmethod
def searchElements(cls, **fields):
for key in fields:
fields[key] = fields[key].encode('utf-8')
fields['type'] = 'xml'
url = cls.url() + '?' + urllib.urlencode(fields)
doc = xmlHttpRequest(url)
return doc.getElementsByTagNameNS(mbns, cls.type)
class Artist(Resource):
type = 'artist'
def __init__(self, id=None, elt=None):
if elt == None:
elt = self.getElement(id)
self.score = elt.getAttributeNS(extns, 'score')
self.id = elt.getAttribute('id')
self.name = elementFieldValue(elt, 'name')
self.sortname = elementFieldValue(elt, 'sort-name')
self.disambiguation = elementFieldValue(elt, 'disambiguation')
def releaseGroups(self):
|
return ReleaseGroup.search(artistid=self.id)
@classmethod
def search(cls, **fields):
artists = cls.searchElements(**fields)
return [Artist(elt=elt) for elt in artists]
class ReleaseGroup(Resource):
type = 'release-group'
def __init__(self, id=None, elt=None):
if elt == None:
elt = self.getElement(i
|
d, 'artist')
self.score = elt.getAttributeNS(extns, 'score')
self.id = elt.getAttribute('id')
self.type = elt.getAttribute('type')
self.artist = Artist(elt=elementField(elt, 'artist'))
self.title = elementFieldValue(elt, 'title')
@classmethod
def search(cls, **fields):
rgs = cls.searchElements(**fields)
return [ReleaseGroup(elt=elt) for elt in rgs]
def elementField(elt, fieldName):
fields = elt.getElementsByTagNameNS(mbns, fieldName)
if fields:
return fields[0]
def elementFieldValue(elt, fieldName):
field = elementField(elt, fieldName)
if field:
return textContent(field)
# Node.textContent is only in DOM Level 3...
def textContent(node):
node.normalize()
return ''.join(node.data for node in node.childNodes
if node.nodeType == node.TEXT_NODE)
class HTTPError(Exception):
def __init__(self, url, response):
self.url = url
self.response = response
def __str__(self):
return 'HTTPError: ' + str(self.response.status_code)
|
Aurorastation/BOREALISbot2
|
cogs/dm_eval.py
|
Python
|
agpl-3.0
| 13,511
| 0.003553
|
import os
import aiohttp
import random
import string
import asyncio
import shutil
import re
from threading import Thread
from io import BytesIO
from zipfile import ZipFile
from discord.ext import commands
from core import BotError
DEFAULT_MAJOR = "512"
DEFAULT_MINOR = "1416"
class WindowsProcessThread(Thread):
def __init__(self, proc, p_args):
super().__init__()
self._proc = proc
self._args = p_args
self.errored = False
self.error_msg = None
def run(self):
winloop = asyncio.ProactorEventLoop()
future = self._proc(winloop, *self._args)
try:
winloop.run_until_complete(future)
except BotError as err:
self.errored = True
self.error_msg = err.message
except Exception:
self.errored = True
self.error_msg = "Unknown error caught in worker thread."
winloop.close()
def validate_byond_build(byond_str):
"""
A little shit of a failed command argument.
Return a tuple containing (major, minor) build information if the argument
string matches the defined format of: v:{major}.{minor} {rest of code here}.
Returns None if such a tuple can't be generated.
"""
if not byond_str.startswith("v:"):
return None
chunks = byond_str.split(" ")
if not len(chunks) > 1:
return None
chunks = chunks[0].split(".")
# This is triggered alyways forever. So. Return null if format doesn't match.
if len(chunks) != 2:
return None
try:
major = int(chunks[0][2:])
minor = int(chunks[1])
except ValueError:
raise BotError("Error processing BYOND version request.", "validate_byond_build")
return major, minor
class DmCog(commands.Cog):
WORK_FOLDER = "cogs\\byond_eval"
DM_BOILERPLATE = "/world/loop_checks = FALSE;\n" + \
"\n/world/New() {{ dm_eval(); del(src); }}" + \
"\n{0}\n/proc/dm_eval() {{ {1} {2} }}"
def __init__(self, bot):
self.bot = bot
self._instances = []
self._safety_patterns = [r'#(\s*)?include', r'include', r'##',
r'```.*```', r'`.*`', r'Reboot']
self._safety_expressions = []
self._arg_expression = re.compile(r'(?:(?P<pre_proc>.*);;;)?(?:(?P<proc>.*);;)?(?P<to_out>.*)?')
for patt in self._safety_patterns:
self._safety_expressions.append(re.compile(patt))
def get_work_dir(self):
"""Returns the folder where BYOND versions and instances should be saved."""
cwd = os.getcwd()
return os.path.join(cwd, self.WORK_FOLDER)
def new_instance(self, length):
"""Generates a unique instance ID, one which is currently not in use."""
while True:
rand = "".join([random.choice(string.ascii_letters + string.digits) for _ in range(length)])
if rand not in self._instances:
self._instances.append(rand)
return rand
def cleanup_instance(self, instance_id, instance_dir):
"""Deletes all files associated with an instance and removes it from the list."""
if not os.path.isdir(instance_dir):
return
self._instances.remove(instance_id)
shutil.rmtree(instance_dir, ignore_errors=True)
def process_args(self, code):
"""
Generates an array of code segments to be placed into the compiled DM code.
Returned dictionary must have three keys: "pre_proc", "proc", and "to_out".
If th
|
ose pieces do not exist, they are to be set as None. As to avoid key
errors further down the call stack.
"""
res = self._arg_expression.match(code)
if not res or not res.groupdict():
raise BotError("No valid code sent.", "process_args")
code_segs = {"pre
|
_proc": None, "proc": None, "to_out": None}
res_dict = res.groupdict()
for key in code_segs:
if key in res_dict:
code_segs[key] = res_dict[key]
if (code_segs["pre_proc"] and
not code_segs["pre_proc"].endswith(";") and
not code_segs["pre_proc"].endswith("}")):
code_segs["pre_proc"] += ";"
if (code_segs["proc"] and not code_segs["proc"].endswith(";")
and not code_segs["proc"].endswith(";")):
code_segs["proc"] += ";"
if code_segs["to_out"]:
code_segs["to_out"] = code_segs["to_out"].split(";")
return code_segs
def validate_dm(self, code):
"""Validates the code given for potential exploits."""
for expr in self._safety_expressions:
if expr.search(code):
raise BotError("Disallowed/dangerous code found. Aborting.", "validate_dm")
def generate_dm(self, segments, instance_dir):
"""Generates the .dme file to be compiled."""
with open(f"{instance_dir}\\eval.dme", "w+") as f:
if not segments["pre_proc"]:
segments["pre_proc"] = ""
if segments["to_out"]:
var_dump = ""
for var in segments["to_out"]:
var_dump += f"world.log << {var};"
segments["to_out"] = var_dump
self.validate_dm(var_dump)
else:
segments["to_out"] = ""
if not segments["proc"]:
segments["proc"] = ""
output = self.DM_BOILERPLATE
output = output.format(segments["pre_proc"], segments["proc"], segments["to_out"])
f.write(output)
async def compile_dm(self, loop, instance_dir, major, minor):
"""Executor proc to compile the .dme file provided."""
dm_path = os.path.join(self.get_work_dir(),
f"byond{major}.{minor}\\byond\\bin\\dm.exe")
if not os.path.isfile(dm_path):
raise BotError("dm.exe not found.", "compile_dm")
dme_path = os.path.join(instance_dir, "eval.dme")
if not os.path.isfile(dme_path):
raise BotError(".dme under evaluation not found.", "compile_dm")
process = await asyncio.create_subprocess_exec(*[dm_path, dme_path], loop=loop,
stderr=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.DEVNULL)
try:
await asyncio.wait_for(process.wait(), timeout=60.0, loop=loop)
except TimeoutError:
raise BotError("Compiler timed out.", "compile_dm")
if process.returncode != 0:
raise BotError("Error compiling or running DM.", "compile_dm")
def validate_compile(self, instance_dir):
"""Checks wether or not the compiled end result is safe to run."""
dmb_found = False
for fname in os.listdir(instance_dir):
if fname.endswith(".rsc"):
raise BotError("Resource file detected. Execution aborted.", "validate_compile")
elif fname.endswith(".dmb"):
dmb_found = True
if not dmb_found:
raise BotError("Compilation failed and no .dmb was generated.", "validate_compile")
async def run_dm(self, loop, instance_dir, major, minor):
"""Executor proc to host and run the .dmb file provided."""
dd_path = os.path.join(self.get_work_dir(),
f"byond{major}.{minor}\\byond\\bin\\dreamdaemon.exe")
if not os.path.isfile(dd_path):
raise BotError("dreadaemon.exe not found.", "run_dm")
dmb_path = os.path.join(instance_dir, "eval.dmb")
if not os.path.isfile(dmb_path):
raise BotError(".dmb under evaluation not found.", "run_dm")
p_args = [dd_path, dmb_path] + ["-invisible", "-ultrasafe", "-logself", "-log", "output.log", "-once", "-close", "-quiet"]
process = await asyncio.create_subprocess_exec(*p_args, loop=loop,
stderr=asyncio.subprocess.DEVNULL,
|
marshallward/payu
|
payu/models/test.py
|
Python
|
apache-2.0
| 649
| 0
|
"""Test driver interface
:copyright: Copyright 2019 Marshall Ward, see AUTHORS
|
for details
:license: Apache License, Version 2.0, see LICENSE for details
"""
import os
import shlex
import shutil
import subprocess
from payu.models.model import Model
config_files = [
'data',
'diag',
'input.nml'
]
class Test(Model):
def __init__(self, expt, name, config):
# payu initialisation
super(Test, self).__init__(expt, name, config)
# Model-specific config
|
uration
self.model_type = 'test'
self.default_exec = 'test.exe'
self.config_files = config_files
|
cyanogen/uchroma
|
uchroma/client/dbus_client.py
|
Python
|
lgpl-3.0
| 2,107
| 0.002373
|
#
# uchroma - Copyright (C) 20
|
21 Stefanie Kondik
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PART
|
ICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# pylint: disable=invalid-name
import re
import pydbus
BASE_PATH = '/org/chemlab/UChroma'
SERVICE = 'org.chemlab.UChroma'
class UChromaClient(object):
def __init__(self):
self._bus = pydbus.SessionBus()
def get_device_paths(self) -> list:
dm = self._bus.get(SERVICE)
return dm.GetDevices()
def get_device(self, identifier):
if identifier is None:
return None
use_key = False
if isinstance(identifier, str):
if identifier.startswith(BASE_PATH):
return self._bus.get(SERVICE, identifier)
if re.match(r'\w{4}:\w{4}.\d{2}', identifier):
use_key = True
elif re.match(r'\d+', identifier):
identifier = int(identifier)
else:
return None
for dev_path in self.get_device_paths():
dev = self.get_device(dev_path)
if use_key and identifier == dev.Key:
return dev
elif identifier == dev.DeviceIndex:
return dev
return None
def get_layer(self, device, layer_idx):
layers = device.CurrentRenderers
if layer_idx >= len(layers):
raise ValueError("Layer index out of range")
return self._bus.get(SERVICE, layers[layer_idx][1])
if __name__ == '__main__':
uclient = UChromaClient()
for u_dev_path in uclient.get_device_paths():
u_dev = uclient.get_device(u_dev_path)
print('[%s]: %s (%s / %s)' % \
(u_dev.Key, u_dev.Name, u_dev.SerialNumber, u_dev.FirmwareVersion))
|
rentalita/django-lutefiskdemo
|
src/python/lutefiskdemo/development.py
|
Python
|
mit
| 475
| 0
|
#
|
-*- coding: utf-8 -*-
from lutefiskdemo.settings import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SITE_ID = 1
MAINTENANCE_MODE = 'DEVELOPMENT'
|
EMAIL_PORT = 1025
INSTALLED_APPS += (
'debug_toolbar',
)
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
INTERNAL_IPS = (
'127.0.0.1',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}
# Local Variables:
# indent-tabs-mode: nil
# End:
# vim: ai et sw=4 ts=4
|
sou81821/chainer
|
cupy/elementwise.py
|
Python
|
mit
| 22,222
| 0
|
import string
import numpy
import six
import cupy
from cupy import carray
from cupy import cuda
from cupy import util
six_range = six.moves.range
six_zip = six.moves.zip
def _get_simple_elementwise_kernel(
params, operation, name, preamble,
loop_prep='', after_loop='', options=()):
module_code = string.Template('''
${preamble}
extern "C" __global__ void ${name}(${params}) {
${loop_prep};
CUPY_FOR(i, _ind.size()) {
_ind.set(i);
${operation};
}
${after_loop};
}
''').substitute(
params=params,
operation=operation,
name=name,
preamble=preamble,
loop_prep=loop_prep,
after_loop=after_loop)
module = carray.compile_with_cache(module_code, options)
return module.get_function(name)
_typenames = {
numpy.dtype('float64'): 'double',
numpy.dtype('float32'): 'float',
numpy.dtype('float16'): 'float16',
numpy.dtype('int64'): 'long long',
numpy.dtype('int32'): 'int',
numpy.dtype('int16'): 'short',
numpy.dtype('int8'): 'signed char',
numpy.dtype('uint64'): 'unsigned long long',
numpy.dtype('uint32'): 'unsigned int',
numpy.dtype('uint16'): 'unsigned short',
numpy.dtype('uint8'): 'unsigned char',
numpy.dtype('bool'): 'bool',
}
_scalar_type = (int, float, bool) + tuple(t.type for t in _typenames.keys())
def _get_typename(dtype):
if dtype is None:
raise ValueError('dtype is None')
return _typenames[numpy.dtype(dtype)]
def _check_args(args):
dev = cuda.Device()
cp_array = cupy.ndarray
scalar_type = _scalar_type
for arg in args:
if isinstance(arg, cp_array):
if arg.data.device != dev:
raise ValueError('Array device must be same as the current '
'device: array device = %d while current = %d'
% (arg.device.id, dev.id))
|
elif not isinstanc
|
e(arg, scalar_type):
raise TypeError('Unsupported type %s' % type(arg))
def _get_args_info(args):
ret = []
carray_Indexer = carray.Indexer
ret_append = ret.append
for a in args:
t = type(a)
if t == carray_Indexer:
dtype = None
else:
dtype = a.dtype.type
ret_append((t, dtype, a.ndim))
return tuple(ret)
def _get_kernel_params(params, args_info):
ret = []
for p, a in six_zip(params, args_info):
type, dtype, ndim = a
is_array = type is cupy.ndarray
if type is carray.Indexer:
t = 'CIndexer<%d>' % ndim
else:
t = _get_typename(dtype)
if is_array:
t = 'CArray<%s, %d>' % (t, ndim)
ret.append('%s%s %s%s' % ('const ' if p.is_const else '',
t,
'_raw_' if is_array and not p.raw else '',
p.name))
return ', '.join(ret)
def _reduce_dims(args, params, shape):
ndim = len(shape)
if ndim <= 1:
return args, shape
cp_array = cupy.ndarray
is_array_flags = [not p.raw and isinstance(a, cp_array)
for p, a in six_zip(params, args)]
args_strides = [a._strides for a, f in six_zip(args, is_array_flags) if f]
src_shape = shape
shape = list(src_shape)
cnt = 0
for i in six_range(1, ndim):
j = i - 1
shape_i = shape[i]
shape_j = shape[j]
if shape_j == 1:
continue
for strides in args_strides:
if strides[i] * shape_i != strides[j]:
cnt += 1
axis = j
break
else:
shape[i] *= shape_j
shape[j] = 1
if shape[-1] != 1:
cnt += 1
axis = -1
if not cnt:
return args, src_shape
elif cnt == 1:
new_shape = shape[axis],
args = list(args)
for i, a in enumerate(args):
if is_array_flags[i]:
a = args[i] = a.view()
a._shape = new_shape
a._strides = a._strides[axis],
return args, new_shape
new_shape = tuple([dim for dim in shape if dim != 1])
args = list(args)
for i, a in enumerate(args):
if is_array_flags[i]:
a = args[i] = a.view()
a._shape = new_shape
a._strides = tuple(
[st for st, sh in six_zip(a._strides, shape) if sh != 1])
return args, new_shape
class ParameterInfo(object):
def __init__(self, str, is_const):
self.name = None
self.dtype = None
self.ctype = None
self.raw = False
self.is_const = is_const
s = tuple(i for i in str.split() if len(i) != 0)
if len(s) < 2:
raise Exception('Syntax error: %s' % str)
t, self.name = s[-2:]
if t == 'CIndexer':
pass
elif len(t) == 1:
self.ctype = t
else:
dtype = numpy.dtype(t)
self.dtype = dtype.type
if dtype.name != t:
raise ValueError('Wrong type %s' % t)
self.ctype = _get_typename(self.dtype)
for i in s[:-2]:
if i == 'raw':
self.raw = True
else:
raise Exception('Unknown keyward "%s"' % i)
@util.memoize()
def _get_param_info(s, is_const):
if len(s) == 0:
return ()
return tuple([ParameterInfo(i, is_const) for i in s.strip().split(',')])
@util.memoize()
def _decide_params_type(in_params, out_params, in_args_dtype, out_args_dtype):
type_dict = {}
if out_args_dtype:
assert len(out_params) == len(out_args_dtype)
for p, a in six_zip(out_params, out_args_dtype):
if a is None:
raise TypeError('Output arguments must be cupy.ndarray')
if p.dtype is not None:
if a != p.dtype:
raise TypeError(
'Type is mismatched. %s %s %s' % (p.name, a, p.dtype))
elif p.ctype in type_dict:
t = type_dict[p.ctype]
if t != a:
raise TypeError(
'Type is mismatched. %s %s %s %s' % (
p.name, a, t, p.ctype))
else:
type_dict[p.ctype] = a
assert len(in_params) == len(in_args_dtype)
unknown_ctype = []
for p, a in six_zip(in_params, in_args_dtype):
if a is None:
if p.dtype is None:
unknown_ctype.append(p.ctype)
else:
if p.dtype is not None:
if a != p.dtype:
raise TypeError(
'Type is mismatched. %s %s %s' % (p.name, a, p.dtype))
elif p.ctype in type_dict:
t = type_dict[p.ctype]
if t != a:
raise TypeError(
'Type is mismatched. %s %s %s %s' % (
p.name, a, t, p.ctype))
else:
type_dict[p.ctype] = a
in_types = tuple([type_dict[p.ctype] if p.dtype is None else p.dtype
for p in in_params])
out_types = tuple([type_dict[p.ctype] if p.dtype is None else p.dtype
for p in out_params])
return in_types, out_types, tuple(type_dict.items())
def _broadcast(args, params, use_size):
value = [a if not p.raw and isinstance(a, cupy.ndarray) else None
for p, a in six_zip(params, args)]
if use_size:
for i in value:
if i is None:
break
else:
raise ValueError("Specified 'size' can be used only "
"if all of the ndarray are 'raw'.")
else:
for i in value:
if i is not None:
break
else:
raise ValueError('Loop size is Undecided')
brod = cupy.broadcast(*value)
value = [b if a is None else a
for a, b in six_zip(brod.values, args)]
return value, brod.shape
def _get_out_args(out_args, out_types, out_shape):
if not ou
|
LoganRickert/foox
|
test/species/test_fourth.py
|
Python
|
mit
| 6,328
| 0.000474
|
"""
Tests for the module that encompasses fourth species counterpoint.
"""
import unittest
from foox.species.fourth import (Genome, create_population, is_parallel,
make_fitness_function, make_generate_function, make_halt_function,
MAX_REWARD, REWARD_SUSPENSION)
from foox.species.utils import is_suspension
# The cantus firmus to use in the test suite.
CANTUS_FIRMUS = [5, 7, 6, 5, 8, 7, 9, 8, 7, 6, 5]
class TestCreatePopulation(unittest.TestCase):
"""
Ensures the create_population function works as expected.
"""
def test_returns_valid_genomes(self):
"""
Checks the genomes returned by the create_population function are
of the correct type.
"""
result = create_population(1, CANTUS_FIRMUS)
self.assertEqual(Genome, type(result[0]))
def test_returns_correct_number_of_genomes(self):
"""
Ensures the correct number of genomes are returned by the function.
"""
result = create_population(100, CANTUS_FIRMUS)
self.assertEqual(100, len(result))
def test_uses_only_valid_intervals(self):
"""
Tests that only valid consonant intervals are used.
"""
valid_intervals = [2, 4, 5, 7, 9, 11]
result = create_population(20, CANTUS_FIRMUS)
for genome in result:
for i in range(len(genome.chromosome)):
contrapunctus_note = genome.chromosome[i]
cantus_firmus_note = CANTUS_FIRMUS[i]
interval = contrapunctus_note - cantus_firmus_note
self.assertIn(interval, valid_intervals)
def test_solutions_have_correct_number_of_notes(self):
"""
Ensures that all solutions have the expected number of notes.
"""
result = create_population(20, CANTUS_FIRMUS)
expected_length = len(CANTUS_FIRMUS)
for genome in result:
self.assertEqual(expected_length, len(genome.chromosome))
class TestFitnessFunction(unittest.TestCase):
"""
Ensures that the fitness function works as expected.
"""
def test_make_fitness_function_returns_callable(self):
"""
Ensures the make_fitness_function returns a callable.
"""
result = make_fitness_function(CANTUS_FIRMUS)
self.assertTrue(callable(result))
def test_fitness_function_returns_float(self):
"""
Makes sure the generated fitness function returns a fitness score as a
float.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
result = fitness_function(genome)
self.assertTrue(float, type(result))
def test_fitness_function_sets_fitness_on_genome(self):
"""
Ensures the fitness score is set in the genome's fitness attribute and
is the same as the returned fitness score.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
self.assertEqual(None, genome.fitness)
result = fitness_function(genome)
self.assertNotEqual(None, genome.fitness)
self.assertEqual(result, genome.fitness)
def test_fitness_function_uses_cached_genome_fitness(self):
"""
Ensures the fitness function bails if there is already a score set for
the genome.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
genome.fitness = 12345
result = fitness_function(genome)
self.assertEqual(12345, result)
class TestHalt(unittest.TestCase):
"""
Ensure the halting function works as expected.
"""
def test_halt_expected(self):
"""
Ensure the function returns true if we're in a halting state.
"""
halt = make_halt_function([6, 5])
g1 = Genome([6, 5])
g1.fitness = MAX_REWARD
population = [g1, ]
result = halt(population, 1)
self.assertTrue(result)
def test_halt_checks_suspension_count(self):
"""
If the solution contains suspensions the halt function should ensure
that the MAX_REWARD is incremented by the number of suspensions
(rewarded because they're part of a valid step wise motion).
"""
halt = make_halt_function([9, 8, 7, 6, 5])
g1 = Genome([11, 10, 9, 8, 7])
# only one our of two "correct" dissonances
g1.fitness = MAX_REWARD + REWARD_SUSPENSION
population = [g1, ]
result = halt(population, 1)
self.assertFalse(result)
# Try again
# two out of two "correct" dissonances
g1.fitness = MAX_REWARD + (REWARD_SUSPENSION * 2)
population = [g1, ]
result = halt(population, 1)
self.assertTrue(result)
def test_halt_not(self):
"""
Ensures if the fittest genome has fitness < MAX_REWARD then halt
doesn't succeed.
"""
halt = make_halt_function([3, 2, 1])
g1 = Genome([1, 2, 3])
g1.fitness = MAX_REWARD - 0.1
g2 = Genome([1, 2, 3])
g2.fitness = 3
g3 = Genome([1, 2, 3])
g3.fitness = 2
# Any fittest solution with fitness < MAX_REWARD means no halt.
population = [g1, g2, g3]
result = halt(population, 1)
self.assertFalse(result)
class TestGenome(unittest.TestCase):
"""
Ensures that the Genome class is overridden as expected.
"""
def test_mutate_is_implemented(self):
"""
Ensures that we have a mutate method implemented.
"""
genome = Genome([1, 2, 3])
self.assertNotEqual(NotImplemented, genome.mutate(2, 0.2, [1, 2, 3]))
def test_mutate_bounded_by_arg_values(self):
"""
A rather contrived test but it proves that both t
|
he mutation_range and
mutation_rate are used correctly given the context given by a cantus
firmus.
"""
cantus_firmus = [1, 1, 1, 1, 1]
# mutate every time.
mutation_rate = 1
# will always mutate to thirds above the cf note.
mutation_range = 2
genome = Genome([5, 6, 7, 8, 9])
genome.mutate(mutation_range, mutation_rate, cantus_fir
|
mus)
self.assertEqual([3, 3, 3, 3, 3], genome.chromosome)
|
mrawls/APO-1m-phot
|
imginventory.py
|
Python
|
mit
| 9,017
| 0.007541
|
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
from astropy.io import ascii
import astropy.coordinates as coord
import astropy.units as u
from astropy.time import Time
from astropy.time import TimeDelta
import os
'''
Read in 1m observation metadata to figure out:
- which stars were imaged when
- whether they were in eclipse, or not
- some kind of image preview or quality flag (NOT IMPLEMENTED YET)
- make a plot of this info
To work, this program needs to be saved somewhere that can see imagedir and reffile.
It assumes there are date-formatted subdirectories in imagedir (e.g. 150212) with FITS
files saved in them. Some of those FITS files might be for targets we don't care about.
***IMPORTANT NOTE***
You will need to manually inspect the outfile and edit it before running imagereduce.py:
- replace each filter entry with a short (6 chars or fewer) string with no spaces
- ensure each filename starts with '1' (some may be truncated)
'''
#imagedir = '/mnt/mrawls/1m_obs/'
imagedir = '/virgo/mrawls/1mphot/'
reffile = 'RGEB_info_alpha.txt'
outfile = 'imginventory_list3.txt'
# Get the paths to the directories in imagedir which are 2014 or 2015 date format
dirs = [x for x in os.listdir(imagedir) if x[0:2] == '14' or x[0:2] == '15']
fulldirs = [imagedir+x+'/' for x in dirs]
# Read in reference data for the targets
refdata = ascii.read(reffile)
KICs = refdata['col1']
Porbs = refdata['col2']
BJD0s = refdata['col3']
RAs = refdata['col7']
Decs = refdata['col8']
# Create astropy Time objects for the zeropoints and orbital periods
Porbs_time = []; BJD0s_time = []
for Porb, BJD0 in zip(Porbs, BJD0s):
Porbs_time.append(TimeDelta(Porb, format='jd')) # duration of one orbit
BJD0s_time.append(Time(BJD0+2400000.0, format='jd', scale='utc')) # time of primary eclipse
# Eclipse timing information
pwid = refdata['col4']
swid = refdata['col5']
sep = refdata['col6']
# Find the files that are FITS images
# Save the date, time, RA, Dec, and filter from the header, as well as the filename
# Keep going if a file can't be opened for any reason
dateobs = []; UTobs = []
RAobs = []; Decobs = []
filtnameobs = []; filenamesave = []
for dir in fulldirs:
filesindir = os.listdir(dir)
for filename in filesindir:
# Keep only fits files that are not guiding images
if filename[-4:] == ('fits' or 'FITS') and 'g.' not in filename and 'flat' not in filename:
fullfile = dir+filename
#print(fullfile)
try:
hdu = fits.open(fullfile, ignore_missing_end = True)
except:
print('Error accessing {0}'.format(fullfile))
continue
else:
dateobs.append(hdu[0].header['date-obs'])
UTobs.append(hdu[0].header['UT'])
RAobs.append(hdu[0].header['RA'])
Decobs.append(hdu[0].header['Dec'])
filtnameobs.append(hdu[0].header['filtname'][0:17])
filenamesave.append(fullfile)
# Put RA and Dec values into less annoying formats
print('Done reading image files')
RAs = coord.Angle(RAs, unit=u.hour)
RAobs = coord.Angle(RAobs, unit=u.hour)
Decs = coord.Angle(Decs, unit=u.degree)
Decobs = coord.Angle(Decobs, unit=u.degree)
# Identify which catalog RA and Dec value are closest to the observed ones
# If the closest RA and closest Dec have the same index, assign the appropriate KIC
# Otherwise, assume the file doesn't contain a target of interest
print('Identifying which observations belong to KIC objects...')
KICobs = []
for RA, Dec in zip(RAobs, Decobs):
idx1 = min(range(len(RAs)), key=lambda i: abs(RAs[i] - RA))
idx2 = min(range(len(Decs)), key=lambda i: abs(Decs[i] - Dec))
if idx1 == idx2:
KICobs.append(KICs[idx1])
else:
KICobs.append('None')
# Keep only the good observations that have assigned KICS
# Consolidate the time and date info into a single object
# Keep track of the filters and filenames
KICgoods = []; datetimes = []; RAgoods = []; Decgoods = []; filtgoods = []; filenamegoods = []
for KIC, date, UT, RA, Dec, filtname, file in zip(KICobs, dateobs, UTobs, RAobs, Decobs, filtnameobs, filenamesave):
if KIC != 'None':
KICgoods.append(KIC)
datetimes.append(Time(str(date)+'T'+str(UT), format='isot', scale='utc'))
RAgoods.append(RA)
Decgoods.append(Dec)
filtgoods.append(filtname)
filenamegoods.append(file)
print('Done')
# Create a new list that contains a list of observation times for each object
# Keep track of the corresponding filters and filenames
# Initialize a parallel list that assumes all observations are not in eclipse, for now
observations = [[] for x in xrange(len(KICs))]
filtrefs = [[] for x in xrange(len(KICs))]
filenamerefs = [[] for x in xrange(len(KICs))]
isEclipse = [[] for x in xrange(len(KICs))]
for idx, (KIC, Porb, BJD0) in enumerate(zip(KICs, Porbs, BJD0s)): # loop over systems
for KIC_obs, datetime_obs, filt_ref, file_ref in zip(KICgoods, datetimes, filtgoods, filenamegoods): # loop over observations
if KIC_obs == KIC:
observations[idx].append(datetime_obs)
filtrefs[idx].append(filt_ref)
filenamerefs[idx].append(file_ref)
isEclipse[idx].append('No ')
# Define start and end of observation windows, with a 10-day buffer for plotting purposes
obs_tstart = min(datetimes) - TimeDelta(10, format='jd')
obs_tend = max(datetimes) + TimeDelta(10, format='jd')
# Calculate eclipse start and end points that fall within the observation window
# (This is blatantly stolen/adapted from 'eclipsefinder.py')
# These eclipse durations do NOT include any extra time for ensuring 100% of ingress/egress
print('Calculating eclipse times...')
print('New BJD0 values immediately before the obs window (KIC, newBJD0, start, end):')
pri_eclipse_mid = [[] for x in xrange(len(KICs))]
pri_eclipse_mid = [[] for x in xrange(len(KICs))]
sec_eclipse_mid = [[] for x in xrange(len(KICs))]
pri_eclipse_start = [[] for x in xrange(len(KICs))]
pri_eclipse_end = [[] for x in xrange(len(KICs))]
sec_eclipse_start = [[] for x in xrange(len(KICs))]
sec_eclipse_end = [[] for x in xrange(len(KICs))]
for j in range(0,len(KICs)): # j is the *object* index here
# Find the most recent bjd0 time right BEFORE the observation window of interest
newbjd0_float = np.floor((obs_tstart.jd - BJD0s_time[j].jd)/Porbs_time[j].value) * Porbs_time[j].value + BJD0s_time[j].jd
newbjd0 = Time(newbjd0_float, format='jd', scale='utc')
print(KICs[j], newbjd0_float, obs_tstart.jd, obs_tend.jd)
for i in range(0,len(observations[j])): # i is the *observati
|
on* index here
# Save eclipse midpoints
pri_eclipse_mid[j].append(newbjd0 + i*Porbs_time[j])
sec_eclipse_mid[j].append(newbjd0 + i*Porbs_time[j] + sep[j]*Porbs_time[j])
# Save primary eclipse start & end times
pri_eclipse_start[j].append(pri_eclipse_mid[j][i] - pwid[j]*Porbs_time[j]/2)
pri_eclipse_end[j]
|
.append(pri_eclipse_mid[j][i] + pwid[j]*Porbs_time[j]/2)
# Save secondary eclipse start & end times
sec_eclipse_start[j].append(sec_eclipse_mid[j][i] - swid[j]*Porbs_time[j]/2)
sec_eclipse_end[j].append(sec_eclipse_mid[j][i] + swid[j]*Porbs_time[j]/2)
print('Done')
# Make a plot as a function of time
# Color-code points by whether they're in eclipse or not (red = primary, blue = secondary)
# Finally, save all the observation info to file
plt.figure(1, figsize=(17,12))
plt.yticks(range(0,len(KICs)), ['%.0f' % a for a in KICs])
plt.axis([obs_tstart.plot_date, obs_tend.plot_date, -1, len(KICs)])
print('Preparing plot and writing to outfile (be patient)...')
outfilelist = open(outfile, 'w')
for idx, KIC in enumerate(KICs): # loop over systems
print(KIC, '...')
for jdx, (obs, filt, file) in enumerate(zip(observations[idx], filtrefs[idx], filenamerefs[idx])): # loop over observations
plt.plot_date(obs.plot_date, idx, marker='o', mec=None, mfc='0.75') # plot all observations
for start, end in zip(pri_e
|
nkgilley/home-assistant
|
homeassistant/components/rfxtrx/__init__.py
|
Python
|
apache-2.0
| 13,930
| 0.000431
|
"""Support for RFXtrx devices."""
import binascii
from collections import OrderedDict
import logging
import RFXtrx as rfxtrxmod
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_NAME,
ATTR_STATE,
CONF_DEVICE,
CONF_DEVICES,
CONF_HOST,
CONF_PORT,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
POWER_WATT,
TEMP_CELSIUS,
UNIT_PERCENTAGE,
UV_INDEX,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
DOMAIN = "rfxtrx"
DEFAULT_SIGNAL_REPETITIONS = 1
ATTR_AUTOMATIC_ADD = "automatic_add"
ATTR_DEVICE = "device"
ATTR_DEBUG = "debug"
ATTR_FIRE_EVENT = "fire_event"
ATTR_DATA_TYPE = "data_type"
ATTR_DUMMY = "dummy"
CONF_D
|
ATA_BITS = "data_bits"
CONF_AUTOMATIC_ADD = "automatic_a
|
dd"
CONF_DATA_TYPE = "data_type"
CONF_SIGNAL_REPETITIONS = "signal_repetitions"
CONF_FIRE_EVENT = "fire_event"
CONF_DUMMY = "dummy"
CONF_DEBUG = "debug"
CONF_OFF_DELAY = "off_delay"
EVENT_BUTTON_PRESSED = "button_pressed"
DATA_TYPES = OrderedDict(
[
("Temperature", TEMP_CELSIUS),
("Temperature2", TEMP_CELSIUS),
("Humidity", UNIT_PERCENTAGE),
("Barometer", ""),
("Wind direction", ""),
("Rain rate", ""),
("Energy usage", POWER_WATT),
("Total usage", POWER_WATT),
("Sound", ""),
("Sensor Status", ""),
("Counter value", ""),
("UV", UV_INDEX),
("Humidity status", ""),
("Forecast", ""),
("Forecast numeric", ""),
("Rain total", ""),
("Wind average speed", ""),
("Wind gust", ""),
("Chill", ""),
("Total usage", ""),
("Count", ""),
("Current Ch. 1", ""),
("Current Ch. 2", ""),
("Current Ch. 3", ""),
("Energy usage", ""),
("Voltage", ""),
("Current", ""),
("Battery numeric", ""),
("Rssi numeric", ""),
]
)
RECEIVED_EVT_SUBSCRIBERS = []
RFX_DEVICES = {}
_LOGGER = logging.getLogger(__name__)
DATA_RFXOBJECT = "rfxobject"
BASE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_DEBUG, default=False): cv.boolean,
vol.Optional(CONF_DUMMY, default=False): cv.boolean,
}
)
DEVICE_SCHEMA = BASE_SCHEMA.extend({vol.Required(CONF_DEVICE): cv.string})
PORT_SCHEMA = BASE_SCHEMA.extend(
{vol.Required(CONF_PORT): cv.port, vol.Optional(CONF_HOST): cv.string}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Any(DEVICE_SCHEMA, PORT_SCHEMA)}, extra=vol.ALLOW_EXTRA
)
def setup(hass, config):
"""Set up the RFXtrx component."""
# Declare the Handle event
def handle_receive(event):
"""Handle received messages from RFXtrx gateway."""
# Log RFXCOM event
if not event.device.id_string:
return
_LOGGER.debug(
"Receive RFXCOM event from "
"(Device_id: %s Class: %s Sub: %s, Pkt_id: %s)",
slugify(event.device.id_string.lower()),
event.device.__class__.__name__,
event.device.subtype,
"".join(f"{x:02x}" for x in event.data),
)
# Callback to HA registered components.
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
device = config[DOMAIN].get(ATTR_DEVICE)
host = config[DOMAIN].get(CONF_HOST)
port = config[DOMAIN].get(CONF_PORT)
debug = config[DOMAIN][ATTR_DEBUG]
dummy_connection = config[DOMAIN][ATTR_DUMMY]
if dummy_connection:
rfx_object = rfxtrxmod.Connect(
device, None, debug=debug, transport_protocol=rfxtrxmod.DummyTransport2,
)
elif port is not None:
# If port is set then we create a TCP connection
rfx_object = rfxtrxmod.Connect(
(host, port),
None,
debug=debug,
transport_protocol=rfxtrxmod.PyNetworkTransport,
)
else:
rfx_object = rfxtrxmod.Connect(device, None, debug=debug)
def _start_rfxtrx(event):
rfx_object.event_callback = handle_receive
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_rfxtrx)
def _shutdown_rfxtrx(event):
"""Close connection with RFXtrx."""
rfx_object.close_connection()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown_rfxtrx)
hass.data[DATA_RFXOBJECT] = rfx_object
return True
def get_rfx_object(packetid):
"""Return the RFXObject with the packetid."""
try:
binarypacket = bytearray.fromhex(packetid)
except ValueError:
return None
pkt = rfxtrxmod.lowlevel.parse(binarypacket)
if pkt is None:
return None
if isinstance(pkt, rfxtrxmod.lowlevel.SensorPacket):
obj = rfxtrxmod.SensorEvent(pkt)
elif isinstance(pkt, rfxtrxmod.lowlevel.Status):
obj = rfxtrxmod.StatusEvent(pkt)
else:
obj = rfxtrxmod.ControlEvent(pkt)
return obj
def get_pt2262_deviceid(device_id, nb_data_bits):
"""Extract and return the address bits from a Lighting4/PT2262 packet."""
if nb_data_bits is None:
return
try:
data = bytearray.fromhex(device_id)
except ValueError:
return None
mask = 0xFF & ~((1 << nb_data_bits) - 1)
data[len(data) - 1] &= mask
return binascii.hexlify(data)
def get_pt2262_cmd(device_id, data_bits):
"""Extract and return the data bits from a Lighting4/PT2262 packet."""
try:
data = bytearray.fromhex(device_id)
except ValueError:
return None
mask = 0xFF & ((1 << data_bits) - 1)
return hex(data[-1] & mask)
def get_pt2262_device(device_id):
"""Look for the device which id matches the given device_id parameter."""
for device in RFX_DEVICES.values():
if (
hasattr(device, "is_lighting4")
and device.masked_id is not None
and device.masked_id == get_pt2262_deviceid(device_id, device.data_bits)
):
_LOGGER.debug(
"rfxtrx: found matching device %s for %s", device_id, device.masked_id,
)
return device
return None
def find_possible_pt2262_device(device_id):
"""Look for the device which id matches the given device_id parameter."""
for dev_id, device in RFX_DEVICES.items():
if hasattr(device, "is_lighting4") and len(dev_id) == len(device_id):
size = None
for i, (char1, char2) in enumerate(zip(dev_id, device_id)):
if char1 != char2:
break
size = i
if size is not None:
size = len(dev_id) - size - 1
_LOGGER.info(
"rfxtrx: found possible device %s for %s "
"with the following configuration:\n"
"data_bits=%d\n"
"command_on=0x%s\n"
"command_off=0x%s\n",
device_id,
dev_id,
size * 4,
dev_id[-size:],
device_id[-size:],
)
return device
return None
def get_devices_from_config(config, device):
"""Read rfxtrx configuration."""
signal_repetitions = config[CONF_SIGNAL_REPETITIONS]
devices = []
for packet_id, entity_info in config[CONF_DEVICES].items():
event = get_rfx_object(packet_id)
if event is None:
_LOGGER.error("Invalid device: %s", packet_id)
continue
device_id = slugify(event.device.id_string.lower())
if device_id in RFX_DEVICES:
continue
_LOGGER.debug("Add %s rfxtrx", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info[ATTR_FIRE_EVENT]
datas = {ATTR_STATE: False, ATTR_FIRE_EVENT: fire_event}
new_device = device(entity_info[ATTR_NAME], event, datas, signal_repetitions)
RFX_DEVICES[device_id] = new_device
devices.append(new_device)
return devices
def get_new_device(event, config, device):
"""Add entity if not exist and the automatic_add is True."""
device_id = slugify(event.
|
pannellr/3132GroupProject
|
modules/database/where.py
|
Python
|
unlicense
| 34
| 0.029412
|
class
|
Where:
string
|
= ''
|
timsnyder/bokeh
|
bokeh/sampledata/us_cities.py
|
Python
|
bsd-3-clause
| 1,884
| 0.010085
|
#-----------------------------------------------------------------------------
# Co
|
pyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with thi
|
s software.
#-----------------------------------------------------------------------------
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import json
# External imports
# Bokeh imports
from ..util.sampledata import external_path
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'data',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
data = json.load(open(external_path('us_cities.json')))
|
lukas-hetzenecker/home-assistant
|
homeassistant/components/cast/media_player.py
|
Python
|
apache-2.0
| 31,771
| 0.000913
|
"""Provide functionality to interact with Cast devices on the network."""
from __future__ import annotations
import asyncio
from contextlib import suppress
from datetime import datetime, timedelta
import functools as ft
import json
import logging
from urllib.parse import quote
import pychromecast
from pychromecast.controllers.homeassistant import HomeAssistantController
from pychromecast.controllers.multizone import MultizoneManager
from pychromecast.controllers.plex import PlexController
from pychromecast.controllers.receiver import VOLUME_CONTROL_TYPE_FIXED
from pychromecast.quick_play import quick_play
from pychromecast.socket_client import (
CONNECTION_STATUS_CONNECTED,
CONNECTION_STATUS_DISCONNECTED,
)
import voluptuous as vol
from homeassistant.auth.models import RefreshToken
from homeassistant.components import media_source, zeroconf
from homeassistant.components.http.auth import async_sign_path
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
ATTR_MEDIA_EXTRA,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_TVSHOW,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.components.plex.const import PLEX_URI_SCHEME
from homeassistant.components.plex.services import lookup_plex_media
from homeassistant.const import (
EVENT_HOMEASSISTANT_STOP,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.di
|
spatcher import async_dispatcher_connect
from homeassistan
|
t.helpers.network import NoURLAvailableError, get_url
import homeassistant.util.dt as dt_util
from homeassistant.util.logging import async_create_catching_coro
from .const import (
ADDED_CAST_DEVICES_KEY,
CAST_MULTIZONE_MANAGER_KEY,
CONF_IGNORE_CEC,
CONF_UUID,
DOMAIN as CAST_DOMAIN,
SIGNAL_CAST_DISCOVERED,
SIGNAL_CAST_REMOVED,
SIGNAL_HASS_CAST_SHOW_VIEW,
)
from .discovery import setup_internal_discovery
from .helpers import CastStatusListener, ChromecastInfo, ChromeCastZeroconf
_LOGGER = logging.getLogger(__name__)
CAST_SPLASH = "https://www.home-assistant.io/images/cast/splash.png"
SUPPORT_CAST = (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
)
ENTITY_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(CONF_UUID): cv.string,
vol.Optional(CONF_IGNORE_CEC): vol.All(cv.ensure_list, [cv.string]),
}
),
)
@callback
def _async_create_cast_device(hass: HomeAssistant, info: ChromecastInfo):
"""Create a CastDevice Entity from the chromecast object.
Returns None if the cast device has already been added.
"""
_LOGGER.debug("_async_create_cast_device: %s", info)
if info.uuid is None:
_LOGGER.error("_async_create_cast_device uuid none: %s", info)
return None
# Found a cast with UUID
added_casts = hass.data[ADDED_CAST_DEVICES_KEY]
if info.uuid in added_casts:
# Already added this one, the entity will take care of moved hosts
# itself
return None
# -> New cast device
added_casts.add(info.uuid)
if info.is_dynamic_group:
# This is a dynamic group, do not add it but connect to the service.
group = DynamicCastGroup(hass, info)
group.async_setup()
return None
return CastDevice(info)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Cast from a config entry."""
hass.data.setdefault(ADDED_CAST_DEVICES_KEY, set())
# Import CEC IGNORE attributes
pychromecast.IGNORE_CEC += config_entry.data.get(CONF_IGNORE_CEC) or []
wanted_uuids = config_entry.data.get(CONF_UUID) or None
@callback
def async_cast_discovered(discover: ChromecastInfo) -> None:
"""Handle discovery of a new chromecast."""
# If wanted_uuids is set, we're only accepting specific cast devices identified
# by UUID
if wanted_uuids is not None and discover.uuid not in wanted_uuids:
# UUID not matching, ignore.
return
cast_device = _async_create_cast_device(hass, discover)
if cast_device is not None:
async_add_entities([cast_device])
async_dispatcher_connect(hass, SIGNAL_CAST_DISCOVERED, async_cast_discovered)
ChromeCastZeroconf.set_zeroconf(await zeroconf.async_get_instance(hass))
hass.async_add_executor_job(setup_internal_discovery, hass, config_entry)
class CastDevice(MediaPlayerEntity):
"""Representation of a Cast device on the network.
This class is the holder of the pychromecast.Chromecast object and its
socket client. It therefore handles all reconnects and audio group changing
"elected leader" itself.
"""
_attr_should_poll = False
_attr_media_image_remotely_accessible = True
def __init__(self, cast_info: ChromecastInfo) -> None:
"""Initialize the cast device."""
self._cast_info = cast_info
self.services = cast_info.services
self._chromecast: pychromecast.Chromecast | None = None
self.cast_status = None
self.media_status = None
self.media_status_received = None
self.mz_media_status: dict[str, pychromecast.controllers.media.MediaStatus] = {}
self.mz_media_status_received: dict[str, datetime] = {}
self.mz_mgr = None
self._attr_available = False
self._status_listener: CastStatusListener | None = None
self._hass_cast_controller: HomeAssistantController | None = None
self._add_remove_handler = None
self._cast_view_remove_handler = None
self._attr_unique_id = cast_info.uuid
self._attr_name = cast_info.friendly_name
if cast_info.model_name != "Google Cast Group":
self._attr_device_info = {
"name": str(cast_info.friendly_name),
"identifiers": {(CAST_DOMAIN, str(cast_info.uuid).replace("-", ""))},
"model": cast_info.model_name,
"manufacturer": str(cast_info.manufacturer),
}
async def async_added_to_hass(self):
"""Create chromecast object when added to hass."""
self._add_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_CAST_DISCOVERED, self._async_cast_discovered
)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._async_stop)
self.async_set_cast_info(self._cast_info)
# asyncio.create_task is used to avoid delaying startup wrapup if the device
# is discovered already during startup but then fails to respond
asyncio.create_task(
async_create_catching_coro(self.async_connect_to_chromecast())
)
self._cast_view_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_HASS_CAST_SHOW_VIEW, self._handle_signal_show_view
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect Chromecast object when removed."""
await self._async_disconnect()
if self._add_remove_handler:
self._add_remove_handler()
self._add_remove_handler = None
if self._cast_view_remove_handler:
self._cast_view_remove_handler()
self._cast_view_remove_handler = None
def async_set_cast_info(self, cast_info):
"""Set the cast information."""
self._cast_info = cast_info
async def async_connect_to_chromecast(self):
"""Set up the chromecast object."""
_LOGGER.debug(
"[%s %s] Connecting to cast device by service %s",
self.entity_id,
self._cast_info.friendly_name,
self.services,
)
chromecast = await self.hass.async_add_executor_job(
p
|
smithbr/ut-itunes-import
|
import.py
|
Python
|
mit
| 5,276
| 0.005497
|
# -*- coding: utf-8 -*-
"""
this script is crap but I
don't feel like fixing it.
"""
import shutil
import os
import sys
import time
import tempfile
from bencode import *
base_dir = tempfile.gettempdir() + "\\ut-itunes-import"
item_list = []
file_count = 0
file_types = ['.mp3',]
if "--help" in str(sys.argv[1]).lower():
print """ Usage: python import.py [Path_to_resume.dat] [Path_to_Add_to_iTunes_folder] [Label(s) (optional)]
Optional arguments: [Label] only import files with specified label(s)"""
sys.exit()
if not os.path.isfile(str(sys.argv[1]).replace("\\","\\\\")) \
or not os.path.isdir(sys.argv[2]):
raise AssertionError("""Path does not exist. Please check your
resume.dat and Add to iTunes folder paths
are correct.""")
sys.exit()
else:
RESUME_DAT = sys.argv[1]
ADD_TO_ITUNES_FOLDER = sys.argv[2]
try:
# Labels don't do anything right now, sorry
CUSTOM_LABELS = sys.argv[3]
except:
pass
try:
META_INFO_FILE = open(RESUME_DAT, 'rb')
META_INFO_CONTENT = bdecode(META_INFO_FILE.read())
except Exception, e:
raise Exception("Could not find resume.dat file! Message: %s" % str(e))
try:
for torrent in META_INFO_CONTENT.keys():
item_list.append(torrent)
THIS_TORRENTS_FILE_LIST = []
if torrent == 'rec' or torrent == '.fileguard':
item_list.remove(torrent)
else:
if META_INFO_CONTENT[torrent]['labels'] == [] and META_INFO_CONTENT[torrent]['completed_on'] > 0:
print "[uTorrent metadata] Name: %s " % str(torrent)
print "[uTorrent metadata] Label(s): %s" % str(META_INFO_CONTENT[torrent]['labels'])
print "[uTorrent metadata] Path: %s" % str(META_INFO_CONTENT[torrent]['path'])
print "[uTorrent metadata] Completed: %s" % str(META_INFO_CONTENT[torrent]['completed_on'])
FINISHED_FOLDER_PATH = str(base_dir + str(torrent.strip(".torrent")))
print "Source: %s" % META_INFO_CONTENT[torrent]['path']
print "Destination %s" % FINISHED_FOLDER_PATH
print "Starting copy folder..."
if not os.path.isdir(FINISHED_FOLDER_PATH):
try:
print "Copying the folder to %s..." % str(base_dir)
shutil.copytree(META_INFO_CONTENT[torrent]['path'], FINISHED_FOLDER_PATH)
print "Copy finished."
except Exception, e:
raise Exception("""Error: Something went wrong when copying the %s
directory to %s! Message: %s"""
% (META_INFO_CONTENT[torrent]['path'], FINISHED_FOLDER_PATH, str(e)))
else:
print "Destination directory already exists. Skipping copy..."
print "Scanning for file types %s..." + str(file_types)
any_mp3s_in_here = False
for media_file in os.listdir(FINISHED_FOLDER_PATH):
for filetype in file_types:
if media_file[-4:] == filetype:
|
ADD_TO_ITUNES_SOURCE_FILE = str(FINISHED_FOLDER_PATH + "\\" + media_file)
THIS_TORRENTS_FILE_LIST.append(ADD_TO_ITUNES_SOURCE_FILE)
any_mp3s_in_here = True
file_count += 1
print "Found %s %s files..." % (str(file_count), str(file_types))
|
if not THIS_TORRENTS_FILE_LIST == []:
print str(THIS_TORRENTS_FILE_LIST)
if not file_count > 0:
print "Skipping copy..."
else:
print "Copying files to %s" + str(ADD_TO_ITUNES_FOLDER)
for file in THIS_TORRENTS_FILE_LIST:
try:
print "Copying: %s..." % file
shutil.copy(file, ADD_TO_ITUNES_FOLDER)
except Exception, e:
raise Exception("""Error: There was an issue copying the %s
file to the Add To iTunes directory! Message: %s"""
% (file, str(e)))
print "Success."
if THIS_TORRENTS_FILE_LIST == []:
print "KEEPING MOVED DIRECTORY INTACT SINCE THERE WERE NO MUSIC FILES MOVED..."
else:
try:
print "Cleaning up..."
shutil.rmtree(FINISHED_FOLDER_PATH)
except Exception, e:
raise Exception(""""Error: Could not delete the folder %s!"""
% (FINISHED_FOLDER_PATH, str(e)))
print "Success."
print "---"
except Exception, e:
print "Error: Something went wrong. Message: %s" % str(e)
finally:
print "Closing resume.dat..."
META_INFO_FILE.close()
print "Closed."
print "Cleaning up leftover files..."
try:
shutil.rmtree(base_dir)
except:
pass
print "All done."
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/stevedore/driver.py
|
Python
|
agpl-3.0
| 5,248
| 0
|
from .named import NamedExtensionManager
class DriverManager(NamedExtensionManager):
"""Load a single plugin with a given name from the namespace.
:param namespace: The namespace for the entry points.
:type namespace: str
:param name: The name of the driver to load.
:type name: str
:param invoke_on_load: Boolean controlling whether to invoke the
object returned by the entry point after the driver is loaded.
:type invoke_on_load: bool
:param invoke_args: Positional arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_args: tuple
:param invoke_kwds: Named arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_kwds: dict
:param on_load_failure_callback: Callback function that will be called when
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function
:param verify_requirements: Use setuptools to enforce the
dependencies of the plugin(s) being loaded. Defaults to False.
:type verify_requirements: bool
"""
def __init__(self, namespace, name,
invoke_on_load=False, invoke_args=(), invoke_kwds={},
on_load_failure_callback=None,
verify_requirements=False):
super(DriverManager, self).__init__(
namespace=namespace,
names=[name],
invoke_on_load=invoke_on_load,
invoke_args=invoke_
|
args,
invoke_kwds=invoke_kwds,
on_load_failure_callback=on_load_failure_callback,
|
verify_requirements=verify_requirements,
)
@classmethod
def make_test_instance(cls, extension, namespace='TESTING',
propagate_map_exceptions=False,
on_load_failure_callback=None,
verify_requirements=False):
"""Construct a test DriverManager
Test instances are passed a list of extensions to work from rather
than loading them from entry points.
:param extension: Pre-configured Extension instance
:type extension: :class:`~stevedore.extension.Extension`
:param namespace: The namespace for the manager; used only for
identification since the extensions are passed in.
:type namespace: str
:param propagate_map_exceptions: Boolean controlling whether exceptions
are propagated up through the map call or whether they are logged
and then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will
be called when a entrypoint can not be loaded. The
arguments that will be provided when this is called (when
an entrypoint fails to load) are (manager, entrypoint,
exception)
:type on_load_failure_callback: function
:param verify_requirements: Use setuptools to enforce the
dependencies of the plugin(s) being loaded. Defaults to False.
:type verify_requirements: bool
:return: The manager instance, initialized for testing
"""
o = super(DriverManager, cls).make_test_instance(
[extension], namespace=namespace,
propagate_map_exceptions=propagate_map_exceptions,
on_load_failure_callback=on_load_failure_callback,
verify_requirements=verify_requirements)
return o
def _init_plugins(self, extensions):
super(DriverManager, self)._init_plugins(extensions)
if not self.extensions:
name = self._names[0]
raise RuntimeError('No %r driver found, looking for %r' %
(self.namespace, name))
if len(self.extensions) > 1:
discovered_drivers = ','.join(e.entry_point_target
for e in self.extensions)
raise RuntimeError('Multiple %r drivers found: %s' %
(self.namespace, discovered_drivers))
def __call__(self, func, *args, **kwds):
"""Invokes func() for the single loaded extension.
The signature for func() should be::
def func(ext, *args, **kwds):
pass
The first argument to func(), 'ext', is the
:class:`~stevedore.extension.Extension` instance.
Exceptions raised from within func() are logged and ignored.
:param func: Callable to invoke for each extension.
:param args: Variable arguments to pass to func()
:param kwds: Keyword arguments to pass to func()
:returns: List of values returned from func()
"""
results = self.map(func, *args, **kwds)
if results:
return results[0]
@property
def driver(self):
"""Returns the driver being used by this manager.
"""
ext = self.extensions[0]
return ext.obj if ext.obj else ext.plugin
|
nathandh/udacity-fullstack-MovieTrailerWebsite
|
fresh_tomatoes.py
|
Python
|
mit
| 6,106
| 0.003767
|
import webbrowser
import os
import re
# Styles and scripting for the page
main_page_head = '''
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Fresh Tomatoes!</title>
<!-- Bootstrap 3 -->
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap-theme.min.css">
<script src="http://code.jquery.com/jquery-1.10.1.min.js"></script>
<script src="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/js/bootstrap.min.js"></script>
<style type="text/css" media="screen">
body {
padding-top: 80px;
background-color: #9e9e9e;
}
#trailer .modal-dialog {
margin-top: 200px;
width: 640px;
height: 480px;
}
.hanging-close {
position: absolute;
top: -12px;
right: -12px;
z-index: 9001;
}
#trailer-video {
width: 100%;
height: 100%;
}
.movie-tile {
margin-bottom: 20px;
padding-top: 20px;
}
.movie-tile:hover {
background-color: #EEE;
cursor: pointer;
}
.scale-media {
padding-bottom: 56.25%;
position: relative;
}
.scale-media iframe {
border: none;
height: 100%;
position: absolute;
width: 100%;
left: 0;
top: 0;
background-color: white;
}
/* Nathan added 08/30/16 */
.navbar-inverse .navbar-brand {
color: antiquewhite;
}
.navbar-brand,
.navbar-nav li a {
line-height: 65px;
height: 65px;
padding-top: 0;
}
.navbar-inverse {
background-color: darkslategray;
background-image: none;
border-color: gray;
}
.navbar-brand > img {
width:50px;
height:50px;
}
</style>
<script type="text/javascript" charset="utf-8">
// Pause the video when the modal is closed
$(document).on('click', '.hanging-close, .modal-backdrop, .modal', function (event) {
// Remove the src so the player itself gets removed, as this is the only
// reliable way to ensure the video stops playing in IE
$("#trailer-video-container").empty();
});
// Start playing the video whenever the trailer modal is opened
$(document).on('click', '.movie-tile', function (event) {
var trailerYouTubeId = $(this).attr('data-trailer-youtube-id')
var sourceUrl = 'http://www.youtube.com/embed/' + trailerYouTubeId + '?autoplay=1&html5=1';
$("#trailer-video-container").empty().append($("<iframe></iframe>", {
'id': 'trailer-video',
'type': 'text-html',
'src': sourceUrl,
'frameborder': 0
}));
});
// Animate in the movies when the page loads
$(document).ready(function () {
$('.movie-tile').hide().first().show("fast", function showNext() {
$(this).next("div").show("fast", showNext);
});
});
</script>
</head>
'''
# The main page layout and title bar
main_page_content = '''
<body>
<!-- Trailer Video Modal -->
<div class="modal" id="trailer">
<div class="modal-dialog">
|
<div class="modal-content">
<a href="#" class="hanging-close" data-dismiss="modal" aria-hidden="true">
<img src="https://l
|
h5.ggpht.com/v4-628SilF0HtHuHdu5EzxD7WRqOrrTIDi_MhEG6_qkNtUK5Wg7KPkofp_VJoF7RS2LhxwEFCO1ICHZlc-o_=s0#w=24&h=24"/>
</a>
<div class="scale-media" id="trailer-video-container">
</div>
</div>
</div>
</div>
<!-- Main Page Content -->
<div class="container">
<div class="navbar navbar-inverse navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand" href="#">
<img alt="Fresh Tomatoes Logo" src="Tomato-78.png">
Fresh Tomatoes! Trailers
</a>
</div>
</div>
</div>
</div>
<div class="container">
{movie_tiles}
</div>
</body>
</html>
'''
# A single movie entry html template
movie_tile_content = '''
<div class="col-md-6 col-lg-4 movie-tile text-center" data-trailer-youtube-id="{trailer_youtube_id}" data-toggle="modal" data-target="#trailer">
<img src="{poster_image_url}" width="220" height="342">
<h2>{movie_title}</h2>
</div>
'''
def create_movie_tiles_content(movies):
# The HTML content for this section of the page
content = ''
for movie in movies:
# Extract the youtube ID from the url
youtube_id_match = re.search(
r'(?<=v=)[^&#]+', movie.trailer_youtube_url)
youtube_id_match = youtube_id_match or re.search(
r'(?<=be/)[^&#]+', movie.trailer_youtube_url)
trailer_youtube_id = (youtube_id_match.group(0) if youtube_id_match
else None)
# Append the tile for the movie with its content filled in
content += movie_tile_content.format(
movie_title=movie.title,
poster_image_url=movie.poster_image_url,
trailer_youtube_id=trailer_youtube_id
)
return content
def open_movies_page(movies):
# Create or overwrite the output file
output_file = open('fresh_tomatoes.html', 'w')
# Replace the movie tiles placeholder generated content
rendered_content = main_page_content.format(
movie_tiles=create_movie_tiles_content(movies))
# Output the file
output_file.write(main_page_head + rendered_content)
output_file.close()
# open the output file in the browser (in a new tab, if possible)
url = os.path.abspath(output_file.name)
webbrowser.open('file://' + url, new=2)
|
woddx/privacyidea
|
setup.py
|
Python
|
agpl-3.0
| 5,648
| 0.001593
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
import glob
import sys
#VERSION="2.1dev4"
VERSION="2.6dev5"
# Taken from kennethreitz/requests/setup.py
package_directory = os.path.realpath(os.path.dirname(__file__))
def get_file_contents(file_path):
"""Get the context of the file using full path name."""
content = ""
try:
full_path = os.path.join(package_directory, file_path)
content = open(full_path, 'r').read()
except:
print >> sys.stderr, "### could not open file %r" % file_path
return content
setup(
name='privacyIDEA',
version=VERSION,
description='privacyIDEA: identity, multifactor authentication (OTP), '
'authorization, audit',
author='privacyidea.org',
license='AGPLv3',
author_email='cornelius@privacyidea.org',
url='http://www.privacyidea.org',
keywords='OTP, two factor authentication, management, security',
packages=find_packages(),
scripts=['pi-manage.py',
'tools/privacyidea-convert-token',
'tools/privacyidea-create-pwidresolver-user',
'tools/privacyidea-create-sqlidresolver-user',
'tools/privacyidea-pip-update',
'tools/privacyidea-create-certificate',
'tools/privac
|
yidea-fix-access-rights',
'tools/privacyidea-create-ad-users',
'tools/privacyidea-fetchssh.sh',
|
'tools/privacyidea-create-userdb.sh'
],
extras_require={
'dev': ["Sphinx>=1.3.1",
"sphinxcontrib-httpdomain>=1.3.0"],
'test': ["coverage>=3.7.1",
"mock>=1.0.1",
"nose>=1.3.4",
"responses>=0.4.0",
"six>=1.8.0"],
},
install_requires=["Flask>=0.10.1",
"Flask-Cache>=0.13.1",
"Flask-Migrate>=1.2.0",
"Flask-SQLAlchemy>=2.0",
"Flask-Script>=2.0.5",
"Jinja2>=2.7.3",
"Mako>=0.9.1",
"MarkupSafe>=0.23",
"MySQL-python>=1.2.5",
"Pillow>=2.6.1",
"PyJWT>=1.3.0",
"PyYAML>=3.11",
"Pygments>=2.0.2",
"SQLAlchemy>=1.0.5",
"Werkzeug>=0.10.4",
"alembic>=0.6.7",
"argparse>=1.2.1",
"bcrypt>=1.1.0",
"beautifulsoup4>=4.3.2",
"cffi>=0.8.6",
"configobj>=5.0.6",
"docutils>=0.12",
"funcparserlib>=0.3.6",
"itsdangerous>=0.24",
"ldap3>=0.9.8.4",
"netaddr>=0.7.12",
"passlib>=1.6.2",
"pyasn1>=0.1.7",
"pyOpenSSL>=0.15.1",
"pycparser>=2.10",
"pycrypto>=2.6.1",
"pyrad>=2.0",
"pyusb>=1.0.0b2",
"qrcode>=5.1",
"requests>=2.7.0",
"sqlsoup>=0.9.0",
"wsgiref>=0.1.2"
],
include_package_data=True,
data_files=[('etc/privacyidea/',
['deploy/apache/privacyideaapp.wsgi',
'deploy/privacyidea/dictionary',
'deploy/privacyidea/enckey',
'deploy/privacyidea/private.pem',
'deploy/privacyidea/public.pem']),
('share/man/man1',
["tools/privacyidea-convert-token.1",
"tools/privacyidea-create-pwidresolver-user.1",
"tools/privacyidea-create-sqlidresolver-user.1",
"tools/privacyidea-pip-update.1",
"tools/privacyidea-create-certificate.1",
"tools/privacyidea-fix-access-rights.1"
]),
('lib/privacyidea/authmodules/FreeRADIUS',
["authmodules/FreeRADIUS/LICENSE",
"authmodules/FreeRADIUS/privacyidea_radius.pm"]),
('lib/privacyidea/authmodules/OTRS',
["authmodules/OTRS/privacyIDEA.pm"]),
('lib/privacyidea/migrations',
["migrations/alembic.ini",
"migrations/env.py",
"migrations/README",
"migrations/script.py.mako"]),
('lib/privacyidea/migrations/versions',
["migrations/versions/2551ee982544_.py",
"migrations/versions/4f32a4e1bf33_.py",
"migrations/versions/2181294eed0b_.py",
"migrations/versions/e5cbeb7c177_.py",
"migrations/versions/4d9178fa8336_.py",
"migrations/versions/20969b4cbf06_.py"])
],
classifiers=["Framework :: Flask",
"License :: OSI Approved :: "
"GNU Affero General Public License v3",
"Programming Language :: Python",
"Development Status :: 5 - Production/Stable",
"Topic :: Internet",
"Topic :: Security",
"Topic :: System ::"
" Systems Administration :: Authentication/Directory"
],
#message_extractors={'privacyidea': [
# ('**.py', 'python', None),
# ('static/**.html', 'html', {'input_encoding': 'utf-8'})]},
zip_safe=False,
long_description=get_file_contents('README.md')
)
|
mjstealey/exposures-api
|
python-client/exposures_api/models/date_range.py
|
Python
|
mit
| 4,058
| 0.000246
|
# coding: utf-8
"""
Environmental Exposures API
API for environmental exposure models for NIH Data Translator program
OpenAPI spec version: 1.0.0
Contact: stealey@renci.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class DateRange(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, start_date=None, end_date=None):
"""
DateRange - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'start_date': 'datetime',
'end_date': 'datetime'
}
self.attribute_map = {
'start_date': 'start_date',
'end_date': 'end_date'
}
self._start_date = start_date
self._end_date = end_date
@property
def start_date(self):
"""
Gets the start_date of this DateRange.
:return: The start_date of this DateRange.
:rtype: datetime
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""
Sets the start_date of this DateRange.
:param start_date: The start_date of this DateRange.
:type: datetime
"""
self._start_date = start_date
@property
def end_date(self):
"""
Gets the end_date of this DateRange.
:return: The end_date of this DateRange.
:rtype: datetime
"""
return self._end_date
@end_date.setter
def end_date(self, end_date):
"""
Sets the end_date of this DateRange.
:param end_date: The end_date of this DateRange.
:type: datetime
"""
self._end_date = end_date
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
|
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict")
|
else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/python/lib/encodings/cp437.py
|
Python
|
gpl-3.0
| 34,564
| 0.019355
|
""" Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp437',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00a2, # CENT SIGN
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00a5, # YEN SIGN
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x2310, # REVERSED NOT SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS U
|
P SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS
|
DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0
|
pcmoritz/ray-1
|
python/ray/tune/tests/test_horovod.py
|
Python
|
apache-2.0
| 3,092
| 0
|
import pytest
import ray
from ray import tune
pytest.importorskip("horovod")
try:
from ray.tune.integration.horovod import (
DistributedTrainableCreator, _train_simple, _train_validate_session)
except ImportError:
pass # This shouldn't be reached - the test should be skipped.
@pytest.fixture
def ray_start_2_cpus():
address_info = ray.init(num_cpus=2)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def ray_start_4_cpus():
address_info = ray.init(num_cpus=4)
yield address_info
# The code after the yield will run a
|
s teardown code.
ray.shutdown()
@pytest.fixture
def ray_connect_cluster():
try:
address_info =
|
ray.init(address="auto")
except Exception as e:
pytest.skip(str(e))
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
def test_single_step(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(
_train_simple, num_hosts=1, num_slots=2)
trainer = trainable_cls()
trainer.train()
trainer.stop()
def test_step_after_completion(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(
_train_simple, num_hosts=1, num_slots=2)
trainer = trainable_cls(config={"epochs": 1})
with pytest.raises(RuntimeError):
for i in range(10):
trainer.train()
def test_validation(ray_start_2_cpus):
def bad_func(a, b, c):
return 1
t_cls = DistributedTrainableCreator(bad_func, num_slots=2)
with pytest.raises(ValueError):
t_cls()
def test_set_global(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(_train_simple, num_slots=2)
trainable = trainable_cls()
result = trainable.train()
trainable.stop()
assert result["rank"] == 0
@pytest.mark.parametrize("enabled_checkpoint", [True, False])
def test_simple_tune(ray_start_4_cpus, enabled_checkpoint):
trainable_cls = DistributedTrainableCreator(_train_simple, num_slots=2)
analysis = tune.run(
trainable_cls,
config={"enable_checkpoint": enabled_checkpoint},
num_samples=2,
stop={"training_iteration": 2})
assert analysis.trials[0].last_result["training_iteration"] == 2
assert analysis.trials[0].has_checkpoint() == enabled_checkpoint
@pytest.mark.parametrize("use_gpu", [True, False])
def test_resource_tune(ray_connect_cluster, use_gpu):
if use_gpu and ray.cluster_resources().get("GPU", 0) == 0:
pytest.skip("No GPU available.")
trainable_cls = DistributedTrainableCreator(
_train_simple, num_slots=2, use_gpu=use_gpu)
analysis = tune.run(
trainable_cls, num_samples=2, stop={"training_iteration": 2})
assert analysis.trials[0].last_result["training_iteration"] == 2
def test_validate_session(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(_train_validate_session)
tune.run(trainable_cls)
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
|
chiamingyen/PythonCAD_py3
|
Interface/Entity/arrowitem.py
|
Python
|
gpl-2.0
| 1,388
| 0.029539
|
#QLinearGradient myGradient;
#QPen myPen;
#QPolygonF myPolygon;
#QPainterPath myPath;
#myPath.addPolygon(myPolygon);
#QPainter painter(this);
#painter.
|
setBrush(myGradient);
#painter.setPen(myPen);
#painter.drawPath(myPath);
import math
from PyQt5 import QtCore, QtGui, QtWidgets
class ArrowItem(QtWidgets.QGraphicsItem):
def definePath(self):
poligonArrow=QtGui.QPolygonF()
poligonArrow.append(QtCore.QPointF(0.0, 5.0)
|
)
poligonArrow.append(QtCore.QPointF(60.0, 5.0))
poligonArrow.append(QtCore.QPointF(60.0, 10.0))
poligonArrow.append(QtCore.QPointF(80.0, 0.0))
poligonArrow.append(QtCore.QPointF(60.0, -10.0))
poligonArrow.append(QtCore.QPointF(60.0, -5.0))
poligonArrow.append(QtCore.QPointF(0.0, -5.0))
poligonArrow.append(QtCore.QPointF(0.0, 5.0))
arrowPath=QtGui.QPainterPath()
arrowPath.addPolygon(poligonArrow)
return arrowPath
def boundingRect(self):
"""
overloading of the qt bounding rectangle
"""
return QtCore.QRectF(-1,-250 ,80,50)
def paint(self, painter,option,widget):
"""
overloading of the paint method
"""
painter.setPen(QtGui.QPen(QtGui.QColor(79, 106, 25)))
painter.setBrush(QtGui.QColor(122, 163, 39))
painter.drawPath(self.definePath())
|
A425/django-nadmin
|
nadmin/plugins/mobile.py
|
Python
|
mit
| 904
| 0.004425
|
#coding:utf-8
from nadmin.sites import site
from nadmin.views import BaseAdminPlugin, CommAdminView
class MobilePlugin(BaseAdminPlugin):
def _test_mobile(self):
try:
return self.request.META['HTTP_USER
|
_AGENT'].find('Android') >= 0 or \
self.request.META['HTTP_USER_AGENT'].find('iPhone') >= 0
except Exception:
return False
def init_request(self, *args, **kwargs):
return self._test_mobile()
def get_context(self, context):
#context['base_template'] = '
|
nadmin/base_mobile.html'
context['is_mob'] = True
return context
# Media
# def get_media(self, media):
# return media + self.vendor('nadmin.mobile.css', )
def block_extrahead(self, context, nodes):
nodes.append('<script>window.__admin_ismobile__ = true;</script>')
site.register_plugin(MobilePlugin, CommAdminView)
|
JohnReid/biopsy
|
Python/bio_server/main_server.py
|
Python
|
mit
| 11,092
| 0.014245
|
import biopsy
import bifa_server
import base64, threading , os, socket
from sslUserCheck import Check
|
UserEngine
from soaplib.wsgi_soap import WSGISoapApp
from soaplib.wsgi_soap import SoapServiceBase
from soaplib.service import soapmethod
from soaplib.client
|
import make_service_client
from soaplib.serializers.primitive import String, Integer, Array, Boolean, Float
from soaplib.serializers.binary import Attachment
from soaplib.serializers.clazz import ClassSerializer
# This does not need to be changed for local Windows testing
LOCALID = 'wsbc.warwick.ac.uk'
from tempfile import mkstemp
import os
global server
global cu
global portNo
def localIp():
if os.name == 'nt' :
return '127.0.0.1'
else :
name = socket.gethostname()
if name == 'wsbc.cov.warwick.ac.uk' :
return 'wsbc.warwick.ac.uk' #return external ethernet name
else :
pt = socket.gethostbyname(name)
print pt
return pt
#class userDataClass :
# username = String
# password = String
# OK = Boolean
# Used as part of the soap interface
class Version(ClassSerializer):
class types:
majorVersion = Integer
minorVersion = Integer
class BiFaWSGISoapApp(WSGISoapApp, SoapServiceBase):
'''
This object is a VERY simple extension of the base WSGISoapApp.
It subclasses both WSGISoapApp, and SoapServiceBase, so that
an object can simply subclass this single object, and it will
be both a wsgi application and a soap service. This is convenient
if you want to only expose some functionality, and don't need
complex handler mapping, and all of the functionality can be put
in a single class.
'''
def onWsdl(self, environ, wsdl):
client = make_service_client('http://%s' % (localId), BiFa())
return client.server.wsdl('')
'''
This is called when a wsdl is requested
@param the wsgi environment
@param the wsdl string
'''
def __init__(self):
self.cookie = ""
self.state = -9
WSGISoapApp.__init__(self)
SoapServiceBase.__init__(self)
def getHandler(self, environ):
global userCheckedEvent
global checkUserEvent
auth = environ.get("HTTP_AUTHORIZATION")
if auth == None :
raise Exception("Requests must include HTTP authorization")
if auth == '' :
raise Exception("Requests must include HTTP authorization")
if auth[0:6]=="Basic " :
auth = auth[6:]
else :
raise Exception("Requests must include HTTP basic authorization")
auth = base64.decodestring(auth)
user, sep, password = auth.partition(':')
biopsy.UserAdmin.user = user
self.cookie,self.state = cu.check(user, password)
if self.cookie == "" :
print "No cookie"
raise Exception("Invalid user")
return self
# Soap types that are created in the bifa_server
from bifa_server import BiFaHit,PssmInfoData
class BiFa(BiFaWSGISoapApp):
@soapmethod(String, Float, Integer, Array(String), Boolean, String, Float, String, Boolean, Array(String), _returns=Array(BiFaHit))
def BiFaHits(self,sequence,threshold,algorithm,phyloSequences,useConsensusSequences,matrixSpecies,phyloThreshold,matrixNameMatch,useCumulativeLikelihoods,pssmSets):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
if pssmSets != None :
pssms = pssmSets[0]
else :
pssms = ""
hits=bifa_server.bifa_hits(sequence, threshold, algorithm, phyloSequences,
useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch,
useCumulativeLikelihoods, pssmSets)
return hits
@soapmethod(String, Float, String, Integer, Boolean, Array(String), Boolean, String, Float, String, Boolean, Boolean, Array(String), _returns=String)
def BiFaAnalyser(self, sequence, threshold, title, algorithm, showLabels, phyloSequences, useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch, useOldAlgorithm, useCumulativeLikelihoods, pssmSets) :
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
if pssmSets != None :
pssms = pssmSets[0]
else :
pssms = ""
ps = ""
if phyloSequences != None :
i = 1
for seq in phyloSequences :
ps += "> Seq %i\n" % i
ps += seq
ps += "\n"
i += 1
str = "> RefSeq\n%s\n,%s,%f,%i,%i,%i,%s,%f,%s,%i,%i,%s\n" % (sequence, ps, threshold, algorithm, showLabels, useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch, useOldAlgorithm, useCumulativeLikelihoods, pssms )
cu.log2(str)
temp=bifa_server.bifa_tool(sequence, threshold, title, algorithm, showLabels, phyloSequences, useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch, useOldAlgorithm, useCumulativeLikelihoods, pssmSets)
output_svg_file="".join([temp, ".svg"])
if os.path.isfile(output_svg_file):
f1=open(output_svg_file, 'r')
svg_string=f1.readlines()
f1.close()
os.remove(output_svg_file)
return "".join(svg_string)
else:
return "no file"
@soapmethod(String, _returns= String)
def returningString(self, key):
# If no password was provided then the cookie is used to validate the client
# otherwise it is a real cookie
p1, sep, p2 = key.partition(':')
if key == "connection_test" :
if self.state == -1 :
rv = "keyCheck:" + self.cookie
else :
rv = "established:" + self.cookie
return rv
elif key == "connection_info":
if self.state == -1 :
rv = "keyCheck:" + self.cookie
else :
rv = "established:" + self.cookie + ":"+self.transfacVersion() + "." + self.customPssmVersion()
return rv
else :
return "unknown request"
@soapmethod(String, String, Float, _returns=Array(Float))
def scorePssmOnSequence(self, pssm_name, sequence, threshold):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.score_pssm_on_sequence(pssm_name, sequence, threshold)
@soapmethod(Array(String), Array(String), Integer, _returns=Array(String))
def scorePssmsOnSequences(self, pssmNames, sequences, algorithm):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.score_pssms_on_sequences(pssmNames, sequences, algorithm)
@soapmethod(_returns=Float)
def bindingPrior(self):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return biopsy.Environment.bindingPrior
@soapmethod(_returns=Integer)
def maxChainMaxNumSequences(self):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return biopsy.Environment.max_chain_max_num_sequences
@soapmethod(_returns=Array(String))
def PssmSetNames(self):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.get_pssm_set_names()
@soapmethod(Boolean,String,String,Array(String),_returns=Array(String))
def Pssms(self,useConsensusSequences,matrixSpecies,matrixNameMatch,pssmSets):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.pssmAccs(pssmSets,useConsensusSequences,matrixSpecies,matrixNameMatch)
@soapmethod(String,_returns=PssmInfoData)
def PssmInfo(self,pssmName):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.get_pssm_info(pssmName)
@soapmethod(String,_returns=Array(String))
def PssmFreqs(self,pssmName):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
|
frostidaho/python-gpopup
|
tests/test_ipc2.py
|
Python
|
bsd-2-clause
| 2,655
| 0.008286
|
import pytest
def test_pack_unpack():
header = ('json', 301)
from gpopup.ipc import _pack_header, _unpack_header
header_bytes = _pack_header(*header)
header_out = _unpack_header(header_bytes)
assert header == header_out
assert header[0] == header_out.type
assert header[1] == header_out.length
def test_test_get_client(IpcServer):
Client = IpcServer.get_client()
c = Client()
s = IpcServer()
assert c.sock_name == s.sock_name
def test_ping(echo_client):
assert echo_client.ping() == True
def test_pargs(echo_client):
pargs = 9, 8, 7
args, kw = echo_client.echo(*pargs)
assert pargs == args
assert {} == kw
def test_kwargs(echo_client):
kwargs = {
'a': [0,1,2],
'b': 'some string',
'c': print,
}
args, kw = echo_client.echo(**kwargs)
assert () == args
assert kwargs == kw
def test_adding_cmds(MathServer):
Client = MathServer.get_client()
assert 'cos' in Client.__dict__
assert 'erf' in Client.__dict__
def test_calc(math_client):
import math
c = math_client
assert c.cos(0.5) == pytest.approx(math.cos(0.5))
assert c.erf(0.1) == pytest.approx(math.erf(0.1))
def test_json(IpcServer):
assert IpcServer.serial_method == 'pickle'
IpcServer.serial_method = 'json'
assert IpcServer.serial_method == 'json'
Client = IpcServer.get_client()
assert Client.serial_method == 'json'
c = Client()
c.start_server_maybe()
pargs, kwargs = c.ech
|
o(42)
assert c.serial_method == 'json'
assert kwargs == {}
a
|
ssert pargs == [42,]
c.kill_server()
def test_no_server(IpcServer):
Client = IpcServer.get_client()
with pytest.raises(ConnectionError):
Client().ping()
def test_busy(IpcServer):
serv = IpcServer()
serv2 = IpcServer()
assert serv.sock_name == serv2.sock_name
Client = serv.get_client()
c = Client()
with pytest.raises(ConnectionError):
c.ping()
serv.run(background=True)
assert c.ping() == True
assert serv2.run() == False
c.kill_server()
def test_foreground(IpcServer):
serv = IpcServer()
Client = serv.get_client()
c = Client()
with pytest.raises(ConnectionError):
c.ping()
import threading
run = lambda: serv.run(background=False)
t = threading.Thread(target=run)
t.start()
assert c.ping() == True
assert c.echo(37, wow='okay') == ((37,), {'wow': 'okay'})
c.kill_server()
t.join(1)
def test_fail_cmd(echo_client):
assert echo_client.run_cmd('ping') == True
with pytest.raises(AttributeError):
echo_client.run_cmd('asdfasdf', 1, 3)
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/scripts/presets/tracking_camera/Nikon_DX.py
|
Python
|
gpl-3.0
| 192
| 0
|
import bpy
camera = bpy.context.edit_movieclip.tracking.camera
camera.sensor_width = 23.6
camera.units = 'MILLIMETERS'
camera.pixel_aspect = 1
camera.k1 = 0.0
|
camera.k2 = 0.0
camera.k3 = 0
|
.0
|
felixbade/minecraft-proxy
|
app/server_manager/ec2.py
|
Python
|
artistic-2.0
| 902
| 0.001109
|
#!/usr/bin/env python
import logging
import boto.ec2
import config
class EC2Client:
def __init__(self):
self.conn = boto.ec2.connect_to_region(config.ec2_region)
def stop(self):
if self.get_status() in ['running', 'pending']:
logging.info('Stopping server...')
self.conn.stop_instances(instance_ids=[c
|
onfig.ec2_instance_id])
def start(self):
if self.get_status() == 'stopped':
logging.info('Starting server...')
self.conn.start_instances(instance_ids=[config.ec2_instance_id])
def get_status(self):
return self.get_instance()._sta
|
te.name
def get_ip(self):
return self.get_instance().ip_address
def get_instance(self):
for instance in self.conn.get_only_instances():
if instance.id == config.ec2_instance_id:
return instance
return None
|
martinjrobins/hobo
|
pints/tests/test_toy_stochastic_degradation_model.py
|
Python
|
bsd-3-clause
| 4,627
| 0
|
#!/usr/bin/env python3
#
# Tests if the stochastic degradation (toy) model works.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import unittest
import numpy as np
import pints
import pints.toy
from pints.toy import StochasticDegradationModel
class TestStochasticDegradationModel(unittest.TestCase):
"""
Tests if the stochastic degradation (toy) model works.
"""
def test_start_with_zero(self):
# Test the special case where the initial molecule count is zero
model = StochasticDegradationModel(0)
times = [0, 1, 2, 100, 1000]
parameters = [0.1]
values = model.simulate(parameters, times)
self.assertEqual(len(values), len(times))
self.assertTrue(np.all(values == np.zeros(5)))
def test_start_with_twenty(self):
# Run small simulation
model = pints.toy.StochasticDegradationModel(20)
times = [0, 1, 2, 100, 1000]
parameters = [0.1]
values =
|
model.simulate(parameters, times)
self.assertEqual(len(values), len(times))
self.assertEqual(values[0], 20)
self.assertEqual(values[-1], 0)
self.assertTrue(np.all(values[1:] <= values[:-1]))
def test_suggested(self):
model = pints.toy.StochasticDegradationModel(20)
times = model.suggested_times()
parameters = model.suggested_parameters()
self
|
.assertTrue(len(times) == 101)
self.assertTrue(parameters > 0)
def test_simulate(self):
times = np.linspace(0, 100, 101)
model = StochasticDegradationModel(20)
time, mol_count = model.simulate_raw([0.1])
values = model.interpolate_mol_counts(time, mol_count, times)
self.assertTrue(len(time), len(mol_count))
# Test output of Gillespie algorithm
self.assertTrue(np.all(mol_count == np.array(range(20, -1, -1))))
# Check simulate function returns expected values
self.assertTrue(np.all(values[np.where(times < time[1])] == 20))
# Check interpolation function works as expected
temp_time = np.array([np.random.uniform(time[0], time[1])])
self.assertEqual(
model.interpolate_mol_counts(time, mol_count, temp_time)[0],
20)
temp_time = np.array([np.random.uniform(time[1], time[2])])
self.assertEqual(
model.interpolate_mol_counts(time, mol_count, temp_time)[0],
19)
def test_mean_variance(self):
# test mean
model = pints.toy.StochasticDegradationModel(10)
v_mean = model.mean([1], [5, 10])
self.assertEqual(v_mean[0], 10 * np.exp(-5))
self.assertEqual(v_mean[1], 10 * np.exp(-10))
model = pints.toy.StochasticDegradationModel(20)
v_mean = model.mean([5], [7.2])
self.assertEqual(v_mean[0], 20 * np.exp(-7.2 * 5))
# test variance
model = pints.toy.StochasticDegradationModel(10)
v_var = model.variance([1], [5, 10])
self.assertEqual(v_var[0], 10 * (np.exp(5) - 1.0) / np.exp(10))
self.assertAlmostEqual(v_var[1], 10 * (np.exp(10) - 1.0) / np.exp(20))
model = pints.toy.StochasticDegradationModel(20)
v_var = model.variance([2.0], [2.0])
self.assertAlmostEqual(v_var[0], 20 * (np.exp(4) - 1.0) / np.exp(8))
def test_errors(self):
model = pints.toy.StochasticDegradationModel(20)
# parameters, times cannot be negative
times = np.linspace(0, 100, 101)
parameters = [-0.1]
self.assertRaises(ValueError, model.simulate, parameters, times)
self.assertRaises(ValueError, model.mean, parameters, times)
self.assertRaises(ValueError, model.variance, parameters, times)
times_2 = np.linspace(-10, 10, 21)
parameters_2 = [0.1]
self.assertRaises(ValueError, model.simulate, parameters_2, times_2)
self.assertRaises(ValueError, model.mean, parameters_2, times_2)
self.assertRaises(ValueError, model.variance, parameters_2, times_2)
# this model should have 1 parameter
parameters_3 = [0.1, 1]
self.assertRaises(ValueError, model.simulate, parameters_3, times)
self.assertRaises(ValueError, model.mean, parameters_3, times)
self.assertRaises(ValueError, model.variance, parameters_3, times)
# Initial value can't be negative
self.assertRaises(ValueError, pints.toy.StochasticDegradationModel, -1)
if __name__ == '__main__':
unittest.main()
|
parallel-fs-utils/fs-drift
|
unit_test_module.py
|
Python
|
apache-2.0
| 295
| 0.00339
|
# for backwards compatibility with earlier python versions
unit_test_module = None
def get_unit_test_module():
try:
import unittest
u
|
nit_test_module = unittest
except ImportError:
import unittest2
unit_test
|
_module = unittest2
return unit_test_module
|
dagargo/phatty
|
tests/test_connector.py
|
Python
|
gpl-3.0
| 11,804
| 0.001779
|
# -*- coding: utf-8 -*-
#
# Copyright 2017 David García Goñi
#
# This file is part of Phatty.
#
# Phatty is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Phatty is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Phatty. If not, see <http://www.gnu.org/licenses/>.
import unittest
import os
import phatty
import mido
from mido import Message
import mock
from mock import Mock
from mock import call
from phatty.connector import Connector
from struct import unpack
BAD_BANK_FILE_NAME = os.path.join(
os.path.dirname(__file__), 'resources/preset.syx')
BANK_FILE_NAME = os.path.join(os.path.dirname(__file__), 'resources/bank.syx')
BULK_FILE_NAME = os.path.join(os.path.dirname(__file__), 'resources/bulk.syx')
class Test(unittest.TestCase):
def setUp(self):
self.connector = Connector()
self.connector.port = Mock()
def test_get_panel_as_preset(self):
def return_value():
return [i for i in range(0, 192)]
self.connector.get_panel = Mock(side_effect=return_value)
value = self.connector.get_panel_as_preset(37)
self.connector.get_panel.assert_called_once()
self.assertEqual(value[2], 0x5)
self.assertEqual(value[4], 37)
def test_get_panel(self):
def return_va
|
lue
|
():
return [i for i in range(0, 192)]
self.connector.tx_message = Mock()
self.connector.rx_message = Mock(side_effect=return_value)
value = self.connector.get_panel()
self.connector.tx_message.assert_called_once_with(
phatty.connector.REQUEST_PANEL)
self.connector.rx_message.assert_called_once()
self.assertEqual(value, return_value())
def test_get_preset(self):
def return_value():
return [i for i in range(0, 192)]
self.connector.tx_message = Mock()
self.connector.rx_message = Mock(side_effect=return_value)
value = self.connector.get_preset(37)
msg = []
msg.extend(phatty.connector.REQUEST_PATCH)
msg[phatty.connector.REQ_PATCH_BYTE] = 37
self.connector.tx_message.assert_called_once_with(msg)
self.connector.rx_message.assert_called_once()
self.assertEqual(value, return_value())
def test_set_preset(self):
self.connector.port.send = Mock()
self.connector.set_preset(37)
msg = Message('program_change', channel=0, program=37)
self.connector.port.send.assert_called_once_with(msg)
def test_set_bulk(self):
try:
data = []
data.extend(phatty.connector.BULK_START)
data.extend([0] * (phatty.connector.BULK_SIZE -
len(phatty.connector.BULK_START)))
self.connector.tx_message = Mock()
self.connector.set_bulk(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bulk_red(self):
try:
data = []
data.extend(phatty.connector.BULK_START)
data.extend([0] * (phatty.connector.RED_BULK_SIZE -
len(phatty.connector.BULK_START)))
self.connector.tx_message = Mock()
self.connector.set_bulk(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bulk_fail(self):
try:
data = []
self.connector.set_bulk(data)
self.assertTrue(False)
except ValueError as e:
self.assertTrue(str(e) == phatty.connector.INVALID_BULK_FILE)
def test_set_bank(self):
try:
data = []
data.extend(phatty.connector.BANK_START)
data.extend([0] * (phatty.connector.BANK_SIZE -
len(phatty.connector.BANK_START)))
self.connector.tx_message = Mock()
self.connector.set_bank(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bank_red(self):
try:
data = []
data.extend(phatty.connector.BANK_START)
data.extend([0] * (phatty.connector.RED_BANK_SIZE -
len(phatty.connector.BANK_START)))
self.connector.tx_message = Mock()
self.connector.set_bank(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bank_fail(self):
try:
data = []
self.connector.set_bank(data)
self.assertTrue(False)
except ValueError as e:
self.assertTrue(str(e) == phatty.connector.INVALID_BANK_FILE)
def set_bank_from_file(self, filename):
data = mido.read_syx_file(filename)[0].bytes()
data = data[1:len(data) - 1]
self.connector.set_bank_from_file(filename)
return data
def test_set_bank_from_bank_file(self):
self.connector.set_bank = Mock()
data = self.set_bank_from_file(BANK_FILE_NAME)
self.connector.set_bank.assert_called_once_with(data)
def test_set_bank_from_bulk_file(self):
self.connector.set_bank = Mock(side_effect=ValueError)
self.connector.set_bulk = Mock()
data = self.set_bank_from_file(BULK_FILE_NAME)
self.connector.set_bank.assert_called_once_with(data)
self.connector.set_bulk.assert_called_once_with(data)
def test_set_bank_from_bank_file_error(self):
try:
self.connector.set_bank = Mock(side_effect=ValueError)
self.connector.set_bank_from_file(BAD_BANK_FILE_NAME)
self.assertTrue(False)
except ValueError:
self.assertTrue(True)
def test_write_data_to_file(self):
data = [1, 2, 3]
filename = 'foo'
messages = [Message('sysex', data=data)]
mido.write_syx_file = Mock()
self.connector.write_data_to_file(filename, data)
mido.write_syx_file.assert_called_once_with(filename, messages)
def return_sysex(filename):
data = [1, 2, 3]
return [Message('sysex', data=data)]
@mock.patch('mido.read_syx_file', side_effect=return_sysex)
def test_read_data_from_file(self, mock):
filename = 'foo'
data = self.connector.read_data_from_file(filename)
mido.read_syx_file.assert_called_once_with(filename)
self.assertEqual(data, [1, 2, 3])
def test_set_panel_name(self):
name = 'ABCabc123'
calls = []
calls.append(call(
Message('control_change', channel=0, control=119, value=0)))
calls.append(call(
Message('control_change', channel=0, control=66, value=19)))
calls.append(call(
Message('control_change', channel=0, control=66, value=15)))
calls.append(call(
Message('control_change', channel=0, control=66, value=13)))
calls.append(call(
Message('control_change', channel=0, control=66, value=1)))
for c in name:
calls.append(call(
Message('control_change', channel=0, control=66, value=ord(c))))
self.connector.port.send = Mock()
self.connector.set_panel_name(name)
self.connector.port.send.assert_has_calls(calls, any_order=False)
def check_send_message(self, function, control, array):
for i in range(0, len(array)):
message = Message('control_change', channel=0,
control=control, value=array[i])
self.connector.port.send
|
punalpatel/st2
|
contrib/runners/action_chain_runner/tests/unit/test_actionchain.py
|
Python
|
apache-2.0
| 40,041
| 0.002273
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import action_chain_runner as acr
from st2actions.container.service import RunnerContainerService
from st2common.constants.action import LIVEACTION_STATUS_RUNNING
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED
from st2common.constants.action import LIVEACTION_STATUS_CANCELED
from st2common.constants.action import LIVEACTION_STATUS_TIMED_OUT
from st2common.constants.action import LIVEACTION_STATUS_FAILED
from st2common.exceptions import actionrunner as runnerexceptions
from st2common.models.api.notification import NotificationsHelper
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.keyvalue import KeyValuePairDB
from st2common.models.system.common import ResourceReference
from st2common.persistence.keyvalue import KeyValuePair
from st2common.persistence.runner import RunnerType
from st2common.services import action as action_service
from st2common.util import action_db as action_db_util
from st2common.exceptions.action import ParameterRenderingFailedException
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
class DummyActionExecution(object):
def __init__(self, status=LIVEACTION_STATUS_SUCCEEDED, result=''):
self.id = None
self.status = status
self.result = result
FIXTURES_PACK = 'generic'
TEST_MODELS = {
'actions': ['a1.yaml', 'a2.yaml', 'action_4_action_context_param.yaml'],
'runners': ['testrunner1.yaml']
}
MODELS = FixturesLoader().load_models(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
ACTION_1 = MODELS['actions']['a1.yaml']
ACTION_2 = MODELS['actions']['a2.yaml']
ACTION_3 = MODELS['actions']['action_4_action_context_param.yaml']
RUNNER = MODELS['runners']['testrunner1.yaml']
CHAIN_1_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain1.yaml')
CHAIN_2_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain2.yaml')
CHAIN_ACTION_CALL_NO_PARAMS_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_call_no_params.yaml')
CHAIN_NO_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain.yaml')
CHAIN_NO_DEFAULT_2 = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain_2.yaml')
CHAIN_BAD_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'bad_default_chain.yaml')
CHAIN_BROKEN_ON_SUCCESS_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_success_path_static_task_name.yaml')
CHAIN_BROKEN_ON_FAILURE_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_failure_path_static_task_name.yaml')
CHAIN_FIRST_TASK_RENDER_FAIL_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_first_task_parameter_render_fail.yaml')
CHAIN_SECOND_TASK_RENDER_FAI
|
L_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_second_task_parameter_render_fail.yaml')
CHAIN_LIST_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_list_template.yaml')
CHAIN_DICT_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_
|
dict_template.yaml')
CHAIN_DEP_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dependent_input.yaml')
CHAIN_DEP_RESULTS_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dep_result_input.yaml')
MALFORMED_CHAIN_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'malformedchain.yaml')
CHAIN_TYPED_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_params.yaml')
CHAIN_SYSTEM_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_system_params.yaml')
CHAIN_WITH_ACTIONPARAM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_actionparam_vars.yaml')
CHAIN_WITH_SYSTEM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_system_vars.yaml')
CHAIN_WITH_PUBLISH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_publish.yaml')
CHAIN_WITH_PUBLISH_PARAM_RENDERING_FAILURE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_publish_params_rendering_failure.yaml')
CHAIN_WITH_INVALID_ACTION = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_invalid_action.yaml')
CHAIN_ACTION_PARAMS_AND_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_and_parameters.yaml')
CHAIN_ACTION_PARAMS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_attribute.yaml')
CHAIN_ACTION_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_parameters_attribute.yaml')
CHAIN_ACTION_INVALID_PARAMETER_TYPE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_invalid_parameter_type_passed_to_action.yaml')
CHAIN_NOTIFY_API = {'notify': {'on-complete': {'message': 'foo happened.'}}}
CHAIN_NOTIFY_DB = NotificationsHelper.to_model(CHAIN_NOTIFY_API)
@mock.patch.object(action_db_util, 'get_runnertype_by_name',
mock.MagicMock(return_value=RUNNER))
class TestActionChainRunner(DbTestCase):
def test_runner_creation(self):
runner = acr.get_runner()
self.assertTrue(runner)
self.assertTrue(runner.runner_id)
def test_malformed_chain(self):
try:
chain_runner = acr.get_runner()
chain_runner.entry_point = MALFORMED_CHAIN_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
self.assertTrue(False, 'Expected pre_run to fail.')
except runnerexceptions.ActionRunnerPreRunError:
self.assertTrue(True)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_success_path(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.liveaction.notify = CHAIN_NOTIFY_DB
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(requ
|
budnyjj/vkstat
|
utils/print.py
|
Python
|
mit
| 1,418
| 0.000705
|
# Various functions for printing various specific values
# in human-readable format
import sys
import time
import pprint
# pretty print object
pp = pprint.PrettyPrinter(indent=4)
def pretty_print(value):
pp.pprint(value)
# print timedelta, provided in seconds,
# in human-readable format
def print_elapsed_time(timedelta):
gm_timedelta = time.gmtime(timedelta)
hours = int(time.strftime('%H', gm_timedelta))
minutes = int(time.strftime('%M', gm_timedelta))
seconds = int(time.strftime('%S', gm_timedelta))
print('Total time elapsed: ', end='')
if hours > 0:
print('{0} hours, '.format(hours), end='')
if minutes > 0:
print('{0} minutes, '.format(minutes), end='')
print('{0} seconds.'.format(seconds), end=''
|
)
print()
def print_progress(cur_value, max_value, width=72):
"""Print progress bar in form: [###-------]."""
progress = int((cur_value * 100) / max_value)
# effective width -- width of bar without brackets
e_width = width - 2
# number of "#" in bar
num_hashes = int((cur_value * e_width) / max_value)
num_minuses = e_width - num_hashes
sys.stdout.write('\r[{hashes}{minuses}] '
'{per
|
centage}%'.format(hashes='#' * num_hashes,
minuses='-' * num_minuses,
percentage=progress))
sys.stdout.flush()
|
erudit/zenon
|
eruditorg/erudit/migrations/0111_auto_20190312_1251.py
|
Python
|
gpl-3.0
| 422
| 0.002375
|
# Generated by Django 2.0.10 on 2019-03-12 17:51
from django.db import migrations, mode
|
ls
class Migration(migrations.Migration):
dependencies = [
('erudit', '0110_auto_20181123_1558'),
]
operations = [
migrations.AlterField(
model_name='issue',
name='is_published',
field=models.BooleanField(default=Fa
|
lse, verbose_name='Est publié'),
),
]
|
hetica/bentools
|
modules/manager/manager.py
|
Python
|
gpl-3.0
| 9,426
| 0.006265
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Manage modules
"""
import sys, os
import time
import grp
#from subprocess import Popen, PIPE
from pathlib import Path
import shutil
from getpass import getpass
__appname__ = "manager"
__licence__ = "none"
__version__ = "0.1"
__author__ = "Benoit Guibert <benoit.guibert@free.fr>"
__shortdesc__ = "Manage {} modules".format(sys.argv[0].split('/')[-1])
__opts__ = []
def autocomplete (parent):
### Build autocompletion file
module_path = "/".join(os.path.realpath(__file__).split('/')[:-2])
modules = " ".join(os.listdir(module_path))
# content of autocompletion file
""" Function doc """
content = """# Build by {parent} to uodate module completion
_{parent}()
{op}
local cur prev opts
COMPREPLY=()
cur="${op}COMP_WORDS[COMP_CWORD]{cp}"
prev="${op}COMP_WORDS[COMP_CWORD-1]{cp}"
opts="{modules}"
case $prev in
{parent})
COMPREPLY=( $(compgen -W "${op}opts{cp}" -- ${op}cur{cp}) )
;;
esac
return 0
{cp}
complete -F _{parent} -o default {parent}
""".format(parent=parent, modules=modules, op='{', cp='}')
### check if bash_completion is here
autocomplete_dir = str(Path.home()) + '/.bashrc.d'
if not os.path.exists(autocomplete_dir):
os.makedirs(autocomplete_dir)
### Modify .bashrc if not entry
bashrc_file = str(Path.home()) + '/.bashrc'
keyword = '.bashrc.d/' + parent + '_completion'
print(keyword)
bashrc_new_header = "\n# build by {parent}, do not change it!\n".format(parent=parent)
bashrc_new_body = "[ -d $HOME/.bashrc.d ] && source $HOME/.bashrc.d/{parent}_completion\n".format(parent=parent)
with open( bashrc_file, 'r+') as stream:
bashrc = stream.read()
if not keyword in bashrc:
stream.write(bashrc_new_header + bashrc_new_body)
### Write completion file
bold = '\033[1m'
end = '\033[0m'
completion_file = autocomplete_dir + '/' + parent + '_completion'
with open(completion_file, 'w') as file:
file.write(content)
print('\nPlease execute :\n{bold}source {comp_file}{end}\nto refresh {parent} completion\n'.
format(comp_file=completion_file, parent=parent, bold=bold, end=end))
def appContent(parent, appname, shortdesc):
newappcontent = """#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys, os
import argparse
__appname__ = "{app}"
__licence__ = "none"
__version__ = "0.1"
__date__ = '{date}'
__modified__ = '{date}'
__author__ = "John Doe <john.doe@exemple.com>"
__shortdesc__ = "{desc}"
def usage(appname):
# https://docs.python.org/3/howto/argparse.html?highlight=argparse
# add a sub-name in --help (like: {parent} sub-name options) when command not in standalone
# and delete sub-name in sys.args (to avoid confusions with arguments)
subname = ""
if not __appname__ in sys.argv[0] and __appname__ in sys.argv[1]:
subname = "{par}".format(__appname__)
del sys.argv[1]
usage = ('{ret}{par}{nwl}{par}.' # {ret}{par} to replace the header 'usage:'
'{nwl}Version: {par}{nwl}{nwl}' # version number
' usage: %(prog)s {par} options' # usage : prog [sub-name] options...
.format(''.ljust(len('usage:')), __shortdesc__, __version__, subname)
)
parser = argparse.ArgumentParser(usage=usage)
### OPTION
parser.add_argument("option1", # mandatory positional argument
help = 'mandatory file (one or more)', # help text
nargs = "+", # argument options number
metavar = ("file_1 [file_n]"), # option name to display
)
### ARGUMENT WITH OPTION
parser.add_argument("-g", "--genome",
|
help = "reference genome (fasta file)",
metavar = 'genome',
nargs = 1,
required = True,
)
### ARGUMENT WITHOUT OPTION
parser.add_argument('--verbose', # positional argument
action = "store_true", # argument doesn't need option, i.e. tag
help = "Increase volubility",
)
### ARGUMENT WITH PREDEFINED OPTION
parser.add_argument("-n", "--number", # positional argument
type = int, # must be an integer
choices = [1,2,3], # between 1 and 3
help = "a number from 1 to 3",
)
### VERSIONNING
parser.add_argument('-v', '--version', # positional argument
action='version', # action "version" is executed and programm stop
version="%(prog)s version: {par}".format(__version__) # the action "version"
)
### Go to "usage()" without arguments
if len(sys.argv) == 1: # or (__appname__ != appname and len(sys.argv) == 2):
parser.print_help()
sys.exit(1)
return parser.parse_args(), subname
def main(appname):
args, module = usage(appname)
print("Application: ", appname, module)
print(args)
print("Work in progress...")
if __name__ == "__main__":
main(__appname__)
""".format(date=time.strftime('%Y-%m-%d') , parent=parent, app=appname,desc=shortdesc, par="{}", nwl="\\n", ret="\\r", tab="\\t" )
return newappcontent
def writeApp(appname, parent, shortdesc):
# trouver l'emplacement de l'application parent
modulesdir = (os.path.dirname((os.path.dirname(__file__))))
# vérifier les droits d'écriture dans le répertoire
# ...
parentdir = os.path.dirname(modulesdir)
# copier l'app dans <parent>/modules/<app>/<app.py>
appdir = modulesdir + "/" + appname
appfull = modulesdir + "/" + appname + "/" + appname + ".py"
if os.path.isdir(appdir):
print("\n Module '{}' still exists, abort...".format(appname))
print(" Remove '{}' directory to continue\n".format(appdir))
sys.exit()
os.mkdir(appdir)
with open( appfull, "w") as fic:
fic.write(appContent(parent, appname, shortdesc))
st = os.stat(appfull)
os.chmod(appfull, st.st_mode | 0o111)
# modifier le fichier <parent>/bash_completion.d/bentools
# si root ou sudo, écraer le /etc/bash_completion.d/bentools
# recharger le fichier /etc/bash_completion.d/bentools
# Afficher un message indiquant où se trouve l'app
print("\nModule {} has been created in directory {}".format(appname, appdir))
return True
def deleteApp (appname, parent, shortdesc):
""" Function doc """
# trouver l'emplacement de l'application parent
modulesdir = (os.path.dirname((os.path.dirname(__file__))))
# vérifier les droits d'écriture dans le répertoire
# ...
parentdir = os.path.dirname(modulesdir)
# copier l'app dans <parent>/modules/<app>/<app.py>
appdir = modulesdir + "/" + appname
if not os.path.isdir(appdir):
print("\n Module '{}' not found, abort...\n".format(appname))
sys.exit()
shutil.rmtree(appdir)
print('\nModule {} has been deleted'.format(appname))
return True
def argsChk(parent):
args = sys.argv[1:] if __appname__ in sys.argv[0] else sys.argv[2:]
"""checks arguments"""
if "-h" in args:
__opts__.append("-h")
args.remove("-h")
helpme(parent)
try:
if '--add' in args:
ind = args.index('--add')
return { 'type':'add', 'name': args[ind+1]}
if '--del' in args:
ind = args.index('--del')
return { 'type':'del', 'name': args[ind+1]}
if '--complete' in args:
ind = args.index('--complete')
return { 'type':'complete'}
except IndexError:
helpme(parent)
if len(args) != 2:
helpme(parent)
return[args[0]]
def helpme(parent):
print("\
|
|
plamut/ggrc-core
|
src/ggrc_workflows/notification/data_handler.py
|
Python
|
apache-2.0
| 14,000
| 0.007143
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import urllib
from copy import deepcopy
from datetime import date
from logging import getLogger
from urlparse import urljoin
from sqlalchemy import and_
from ggrc import db
from ggrc import utils
from ggrc.models.revision import Revision
from ggrc.notifications import data_handlers
from ggrc.utils import merge_dicts, get_url_root
from ggrc_basic_permissions.models import Role, UserRole
from ggrc_workflows.models import Cycle
from ggrc_workflows.models import CycleTaskGroupObjectTask
from ggrc_workflows.models import Workflow
# pylint: disable=invalid-name
logger = getLogger(__name__)
"""
exposed functions
get_cycle_data,
get_workflow_data,
get_cycle_task_data,
"""
def get_cycle_created_task_data(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task:
logger.warning(
'%s for notification %s not found.',
notification.object_type, notification.id)
return {}
cycle_task_group = cycle_task.cycle_task_group
cycle = cycle_task_group.cycle
force = cycle.workflow.notify_on_change
task_assignee = data_handlers.get_person_dict(cycle_task.contact)
task_group_assignee = data_handlers.get_person_dict(cycle_task_group.contact)
workflow_owners = get_workflow_owners_dict(cycle.context_id)
task = {
cycle_task.id: get_cycle_task_dict(cycle_task)
}
result = {}
assignee_data = {
task_assignee['email']: {
"user": task_assignee,
"force_notifications": {
notification.id: force
},
"cycle_data": {
cycle.id: {
"my_tasks": deepcopy(task)
}
}
}
}
tg_assignee_data = {
task_group_assignee['email']: {
"user": task_group_assignee,
"force_notifications": {
notification.id: force
},
"cycle_data": {
cycle.id: {
"my_task_groups": {
cycle_task_group.id: deepcopy(task)
}
}
}
}
}
for workflow_owner in workflow_owners.itervalues():
wf_owner_data = {
workflow_owner['email']: {
"user": workflow_owner,
"force_notifications": {
notification.id: force
},
"cycle_data": {
cycle.id: {
"cycle_tasks": deepcopy(task)
}
}
}
}
result = merge_dicts(result, wf_owner_data)
return merge_dicts(result, assignee_data, tg_assignee_data)
def get_cycle_task_due(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task:
logger.warning(
'%s for notification %s not found.',
notification.object_type, notification.id)
return {}
if not cycle_task.contact:
logger.warning(
'Contact for cycle task %s not found.',
notification.object_id)
return {}
notif_name = notification.notification_type.name
due = "due_today" if notif_name == "cycle_task_due_today" else "due_in"
force = cycle_task.cycle_task_group.cycle.workflow.notify_on_change
return {
cycle_task.contact.email: {
"user": data_handlers.get_person_dict(cycle_task.contact),
"force_notifications": {
notification.id: force
},
due: {
cycle_task.id: get_cycle_task_dict(cycle_task)
}
}
}
def get_all_cycle_tasks_completed_data(notification, cycle):
workflow_owners = get_workflow_owners_dict(cycle.context_id)
force = cycle.workflow.notify_on_change
result = {}
for workflow_owner in workflow_owners.itervalues():
wf_data = {
workflow_owner['email']: {
"user": workflow_owner,
"force_notifications": {
notification.id: force
},
"all_tasks_completed": {
cycle.id: get_cycle_dict(cycle)
}
}
}
result = merge_dicts(result, wf_data)
return result
def get_cycle_created_data(notification, cycle):
if not cycle.is_current:
return {}
manual = notification.notification_type.name == "manual_cycle_created"
force = cycle.workflow.notify_on_change
result = {}
for user_role in cycle.workflow.context.user_roles:
person = user_role.person
result[person.email] = {
"user": data_handlers.get_person_dict(person),
"force_notifications": {
notification.id: force
},
"cycle_started": {
cycle.id: get_cycle_dict(cycle, manual)
}
}
return result
def get_cycle_data(notification):
cycle = get_object(Cycle, notification.object_id)
if not cycle:
return {}
notification_name = notification.notification_type.name
if notification_name in ["manual_cycle_created", "cycle_created"]:
return get_cycle_created_data(notification, cycle)
elif notification_name == "all_cycle_tasks_completed":
return get_all_cycle_tasks_completed_data(notification, cycle)
return {}
def get_cycle_task_declined_data(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task or not cycle_task.contact:
logger.warning(
'%s for notification %s not found.',
notification.object_type, notification.id)
return {}
force = cycle_task.cycle_task_group.cycle.workflow.notify_on_change
return {
cycle_task.contact.email: {
"user": data_handlers.get_person_dict(cycle_task.contact),
"force_notifications": {
notification.id: force
},
"task_declined": {
cycle_task.id: get_cycle_task_dict(cycle_task)
}
}
}
def get_cycle_task_data(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task or not cycle_task.cycle_task_group.cycle.is_current:
return {}
notification_name = notification.notification_type.name
if notification_name in ["manual_cycle_created", "cycle_created"]:
return get_cycle_created_task_data(notification)
elif notification_name == "cycle_task_declined":
return get_cycle_task_declined_data(notification)
elif notification_name in ["cycle_task_due_in",
"one_time_cycle_task_due_in",
"weekly_cycle_task_due_in",
"monthly_cycle_task_due_in",
"quarterly_cycle_task_due_in",
"annually_cycle_task_due_in",
"cycle_task_due_today"]:
return get_cycle_task_due(notification)
return {}
def get_workflow_starts_in_data(notification, workflow):
if workflow.status != "Active":
return {}
if (not workflow.next_cycle_start_date or
workflow.next_cycle_start_date < date.today()):
return {} # this can only be if the cycle has successfully started
result = {}
workflow_owners = get_workflow_owners_dict(workflow.context_id)
force = workflow.notify_on_change
for user_roles in workflow.context.user_roles:
wf_person = user_roles.person
result[wf_person.email] = {
"user": data_handlers.get_person_dict(wf_person),
"force_notifica
|
tions": {
notification.id: force
},
|
"cycle_starts_in": {
workflow.id: {
"workflow_owners": workflow_owners,
"workflow_url": get_workflow_url(workflow),
"start_date": workflow.next_cycle_start_date,
"start_date_statement": utils.get_digest_date_statement(
workflow.next_cycle_start_date, "start", True),
"custom_message": workflow.notify_custom_message,
"title": workflow.title,
}
}
}
return result
def get_cycle_start_failed_data(notification, workflow):
if workflow.status != "Active":
return {}
if (not workflow.next_cycle_start_date or
|
karllessard/tensorflow
|
tensorflow/lite/tools/flatbuffer_utils_test.py
|
Python
|
apache-2.0
| 8,309
| 0.003972
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for flatbuffer_utils.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os
import subprocess
from tensorflow.lite.tools import flatbuffer_utils
from tensorflow.lite.tools import test_utils
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
class WriteReadModelTest(test_util.TensorFlowTestCase):
def testWriteReadModel(self):
# 1. SETUP
# Define the initial model
initial_model = test_utils.build_mock_model()
# Define temporary files
tmp_dir = self.get_temp_dir()
model_filename = os.path.join(tmp_dir, 'model.tflite')
# 2. INVOKE
# Invoke the write_model and read_model functions
flatbuffer_utils.write_model(initial_model, model_filename)
final_model = flatbuffer_utils.read_model(model_filename)
# 3. VALIDATE
# Validate that the initial and final models are the same
# Validate the description
self.assertEqual(initial_model.description, final_model.description)
# Validate the main subgraph's name, inputs, outputs, operators and tensors
initial_subgraph = initial_model.subgraphs[0]
final_subgraph = final_model.subgraphs[0]
self.assertEqual(initial_subgraph.name, final_subgraph.name)
for i in range(len(initial_subgraph.inputs)):
self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i])
for i in range(len(initial_subgraph.outputs)):
self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i])
for i in range(len(initial_subgraph.operators)):
self.assertEqual(initial_subgraph.operators[i].opcodeIndex,
final_subgraph.operators[i].opcodeIndex)
initial_tensors = initial_subgraph.tensors
final_tensors = final_subgraph.tensors
for i in range(len(initial_tensors)):
self.assertEqual(initial_tensors[i].name, final_tensors[i].name)
self.assertEqual(initial_tensors[i].type, final_tensors[i].type)
self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer)
for j in range(len(initial_tensors[i].shape)):
self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j])
# Validate the first valid buffer (index 0 is always None)
initial_buffer = initial_model.buffers[1].data
final_buffer = final_model.buffers[1].data
for i in range(initial_buffer.size):
self.assertEqual(initial_buffer.data[i], final_buffer.data[i])
class StripStringsTest(test_util.TensorFlowTestCase):
def testStripStrings(self):
# 1. SETUP
# Define the initial model
initial_model =
|
test_utils.build_mock_model()
final_model = copy.deepcopy(initial_model
|
)
# 2. INVOKE
# Invoke the strip_strings function
flatbuffer_utils.strip_strings(final_model)
# 3. VALIDATE
# Validate that the initial and final models are the same except strings
# Validate the description
self.assertNotEqual('', initial_model.description)
self.assertEqual('', final_model.description)
# Validate the main subgraph's name, inputs, outputs, operators and tensors
initial_subgraph = initial_model.subgraphs[0]
final_subgraph = final_model.subgraphs[0]
self.assertNotEqual('', initial_model.subgraphs[0].name)
self.assertEqual('', final_model.subgraphs[0].name)
for i in range(len(initial_subgraph.inputs)):
self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i])
for i in range(len(initial_subgraph.outputs)):
self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i])
for i in range(len(initial_subgraph.operators)):
self.assertEqual(initial_subgraph.operators[i].opcodeIndex,
final_subgraph.operators[i].opcodeIndex)
initial_tensors = initial_subgraph.tensors
final_tensors = final_subgraph.tensors
for i in range(len(initial_tensors)):
self.assertNotEqual('', initial_tensors[i].name)
self.assertEqual('', final_tensors[i].name)
self.assertEqual(initial_tensors[i].type, final_tensors[i].type)
self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer)
for j in range(len(initial_tensors[i].shape)):
self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j])
# Validate the first valid buffer (index 0 is always None)
initial_buffer = initial_model.buffers[1].data
final_buffer = final_model.buffers[1].data
for i in range(initial_buffer.size):
self.assertEqual(initial_buffer.data[i], final_buffer.data[i])
class RandomizeWeightsTest(test_util.TensorFlowTestCase):
def testRandomizeWeights(self):
# 1. SETUP
# Define the initial model
initial_model = test_utils.build_mock_model()
final_model = copy.deepcopy(initial_model)
# 2. INVOKE
# Invoke the randomize_weights function
flatbuffer_utils.randomize_weights(final_model)
# 3. VALIDATE
# Validate that the initial and final models are the same, except that
# the weights in the model buffer have been modified (i.e, randomized)
# Validate the description
self.assertEqual(initial_model.description, final_model.description)
# Validate the main subgraph's name, inputs, outputs, operators and tensors
initial_subgraph = initial_model.subgraphs[0]
final_subgraph = final_model.subgraphs[0]
self.assertEqual(initial_subgraph.name, final_subgraph.name)
for i in range(len(initial_subgraph.inputs)):
self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i])
for i in range(len(initial_subgraph.outputs)):
self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i])
for i in range(len(initial_subgraph.operators)):
self.assertEqual(initial_subgraph.operators[i].opcodeIndex,
final_subgraph.operators[i].opcodeIndex)
initial_tensors = initial_subgraph.tensors
final_tensors = final_subgraph.tensors
for i in range(len(initial_tensors)):
self.assertEqual(initial_tensors[i].name, final_tensors[i].name)
self.assertEqual(initial_tensors[i].type, final_tensors[i].type)
self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer)
for j in range(len(initial_tensors[i].shape)):
self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j])
# Validate the first valid buffer (index 0 is always None)
initial_buffer = initial_model.buffers[1].data
final_buffer = final_model.buffers[1].data
for j in range(initial_buffer.size):
self.assertNotEqual(initial_buffer.data[j], final_buffer.data[j])
class XxdOutputToBytesTest(test_util.TensorFlowTestCase):
def testXxdOutputToBytes(self):
# 1. SETUP
# Define the initial model
initial_model = test_utils.build_mock_model()
initial_bytes = flatbuffer_utils.convert_object_to_bytearray(initial_model)
# Define temporary files
tmp_dir = self.get_temp_dir()
model_filename = os.path.join(tmp_dir, 'model.tflite')
# 2. Write model to temporary file (will be used as input for xxd)
flatbuffer_utils.write_model(initial_model, model_filename)
# 3. DUMP WITH xxd
input_cc_file = os.path.join(tmp_dir, 'model.cc')
command = 'xxd -i {} > {}'.format(model_filename, input_cc_file)
subprocess.call(command, shell=True)
# 4. VALIDATE
final_bytes = flatbuffer_utils.xxd_output_to_bytes(input_cc_file)
# Validate that the init
|
sdroege/cerbero
|
cerbero/utils/git.py
|
Python
|
lgpl-2.1
| 8,593
| 0.00128
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import shutil
from cerbero.config import Platform
from cerbero.utils import shell
# Clean-up LD environment to avoid library version mismatches while running
# the system subversion
CLEAN_ENV = os.environ.copy()
if CLEAN_ENV.has_key('LD_LIBRARY_PATH'):
CLEAN_ENV.pop('LD_LIBRARY_PATH')
GIT = 'git'
def init(git_dir):
'''
Initialize a git repository with 'git init'
@param git_dir: path of the git repository
@type git_dir: str
'''
shell.call('mkdir -p %s' % git_dir)
shell.call('%s init' % GIT, git_dir, env=CLEAN_ENV)
def clean(git_dir):
'''
Clean a git respository with clean -dfx
@param git_dir: path of the git repository
@type git_dir: str
'''
return shell.call('%s clean -dfx' % GIT, git_dir, env=CLEAN_ENV)
def list_tags(git_dir, fail=True):
'''
List all tags
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
@return: list of tag names (str)
@rtype: list
'''
tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
tags = tags.strip()
if tags:
tags = tags.split('\n')
return tags
def create_tag(git_dir, tagname, tagdescription, commit, fail=True):
'''
Create a tag using commit
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to create
@type tagname: str
@param tagdescription: the tag description
@type tagdescription: str
@param commit: the tag commit to use
@type commit: str
@param fail: raise an error if the command failed
@type fail: false
'''
shell.call('%s tag -s %s -m "%s" %s' %
(GIT, tagname, tagdescription, commit), git_dir, fail=fail,
env=CLEAN_ENV)
return shell.call('%s push origin %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def delete_tag(git_dir, tagname, fail=True):
'''
Delete a tag
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to delete
@type tagname: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s tag -d %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def fetch(git_dir, fail=True):
'''
Fetch all refs from all the remotes
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s fetch --all' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
def submodules_update(git_dir, src_dir=None, fail=True):
'''
Update somdules from local directory
@param git_dir: path of the git repository
@type git_dir: str
@param src_dir: path or base URI of the source directory
@type src_dir: src
@param fail: raise an error if the command failed
@type fail: false
'''
if src_dir:
config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
git_dir)
config_array = [s.split('=', 1) for s in config.split('\n')]
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.path'):
submodule = c[0][len('submodule.'):-len('.path')]
shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
(GIT, submodule, os.path.join(src_dir, c[1])),
git_dir)
shell.call("%s submodule init" % GIT, git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
shell.call("%s submodule update" % GIT, git_dir, fail=fail)
if src_dir:
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.url'):
shell.call("%s config --file=.gitmodules %s %s" %
(GIT, c[0], c[1]), git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
def checkout(git_dir, commit):
'''
Reset a git repository to a given commit
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to checkout
@type commit: str
'''
return shell.call('%s reset --hard %s' % (GIT, commit), git_dir,
env=CLEAN_ENV)
def get_hash(git_dir, commit):
'''
Get a commit hash from a valid commit.
Can be used to check if a commit exists
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to log
@type commit: str
'''
return shell.check_call('%s show -s --pretty=%%H %s' %
(GIT, commit), git_dir, env=CLEAN_ENV)
def local_checkout(git_dir, local_git_dir, commit):
'''
Clone a repository for a given commit in a different location
@param git_dir: destination path of the git repository
@type git_dir: str
@param local_git_dir: path of the source git repository
@type local_git_dir: str
@param commit: the commit to checkout
@type commit: false
'''
# reset to a commit in case it's the first checkout and the masterbranch is
# missing
branch_name = 'cerbero_build'
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s branch %s' % (GIT, branch_name), local_git_dir, fail=False,
env=CLEAN_ENV)
shell.call('%s checkout %s' % (GIT, branch_name), local_git_dir,
env=CLEAN_ENV)
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s clone %s -s -b %s .' % (GIT, local_git_dir,
branch_name),
git_dir, env=CLEAN_ENV)
submodules_update(git_dir, local_git_dir)
def add_remote(git_dir, name, url):
'''
Add a remote to a git repository
@param git_dir: destination path of the git repository
@type git_dir: str
@param name: name of the remote
@type name: str
@param url: url of the remote
@type url: str
'''
try:
shell.call('%s remote add %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
except:
shell.call('%s remote set-url %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
def check_line_endings(platform):
'''
Checks if on windows we don't use the automatic line endings conversion
as it breaks everything
@param platform: the host platform
@type platform: L{cerbero.config.Platform}
|
@re
|
turn: true if git config is core.autorlf=false
@rtype: bool
'''
if platform != Platform.WINDOWS:
return True
val = shell.check_call('%s config --get core.autocrlf' % GIT, env=CLEAN_ENV)
if ('false' in val.lower()):
return True
return False
def init_directory(git_dir):
'''
Initialize a git repository with the contents
of a directory
@param git_dir: path of the git repository
@type git_dir: str
'''
init(git_dir)
try:
shell.call('%s add --force -A .' % GIT, git_dir, env=CLEAN_ENV)
shell.call('%s commit -m "Initial commit" > /dev/null 2>&1' % GIT,
|
carolinux/QGIS
|
python/plugins/processing/algs/otb/OTBSpecific_XMLLoading.py
|
Python
|
gpl-2.0
| 12,848
| 0.001946
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
OTBUtils.py
---------------------
Date : 11-12-13
Copyright : (C) 2013 by CS Systemes d'information (CS SI)
Email : otb at c-s dot fr (CS SI)
Contributors : Julien Malik (CS SI) - creation of otbspecific
|
Oscar Picas (CS SI) -
Alexia Mondot (CS SI) - split otbspecific into 2 files
add functions
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
When an OTB algorithms is run, this file allows adapting user parameter to fit the otbapplication.
Most of the following functions are like follows :
adaptNameOfTheOTBApplication(commands_list)
The command list is a list of all parameters of the given algorithm with all user values.
"""
__author__ = 'Julien Malik, Oscar Picas, Alexia Mondot'
__date__ = 'December 2013'
__copyright__ = '(C) 2013, CS Systemes d\'information (CS SI)'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
__version__ = "3.8"
import os
try:
import processing
except ImportError as e:
raise Exception("Processing must be installed and available in PYTHONPATH")
from processing.core.ProcessingConfig import ProcessingConfig
from OTBUtils import OTBUtils
def adaptBinaryMorphologicalOperation(commands_list):
val = commands_list[commands_list.index("-filter") + 1]
def replace_dilate(param, value):
if ".dilate" in str(param):
return param.replace("dilate", value)
else:
return param
import functools
com_list = map(functools.partial(replace_dilate, value=val), commands_list)
val = com_list[com_list.index("-structype.ball.xradius") + 1]
pos = com_list.index("-structype.ball.xradius") + 2
com_list.insert(pos, '-structype.ball.yradius')
com_list.insert(pos + 1, val)
return com_list
def adaptEdgeExtraction(commands_list):
"""
Add filter.touzi.yradius as the same value as filter.touzi.xradius
"""
val = commands_list[commands_list.index("-filter") + 1]
if val == 'touzi':
bval = commands_list[commands_list.index("-filter.touzi.xradius") + 1]
pos = commands_list.index("-filter.touzi.xradius") + 2
commands_list.insert(pos, "-filter.touzi.yradius")
commands_list.insert(pos + 1, bval)
return commands_list
def adaptGrayScaleMorphologicalOperation(commands_list):
"""
Add structype.ball.yradius as the same value as structype.ball.xradius (as it is a ball)
"""
val = commands_list[commands_list.index("-structype.ball.xradius") + 1]
pos = commands_list.index("-structype.ball.xradius") + 2
commands_list.insert(pos, "-structype.ball.yradius")
commands_list.insert(pos + 1, val)
return commands_list
def adaptSplitImage(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".tif"
If no extension given, put ".tif" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".tif")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".tif" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptLSMSVectorization(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".shp"
If no extension given, put ".shp" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".shp")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".shp" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptComputeImagesStatistics(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".xml"
If no extension given, put ".shp" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".xml")
commands_list2.append(item)
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".xml" + commands_list[index + 1][-1]
return commands_list2
def adaptKmzExport(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".kmz"
If no extension given, put ".kmz" at the end of the filename.
Check geoid file, srtm folder and given elevation and manage arguments.
"""
adaptGeoidSrtm(commands_list)
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".kmz")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".kmz" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptColorMapping(commands_list):
"""
The output of this algorithm must be in uint8.
"""
indexInput = commands_list.index("-out")
commands_list[indexInput + 1] = commands_list[indexInput + 1] + " uint8"
return commands_list
def adaptStereoFramework(commands_list):
"""
Remove parameter and user value instead of giving None.
Check geoid file, srtm folder and given elevation and manage arguments.
"""
commands_list2 = commands_list
adaptGeoidSrtm(commands_list2)
for item in commands_list:
if "None" in item:
index = commands_list2.index(item)
argumentToRemove = commands_list2[index - 1]
commands_list2.remove(item)
commands_list2.remove(argumentToRemove)
#commands_list2.append(item)
return commands_list2
def adaptComputeConfusionMatrix(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".csv"
If no extension given, put ".csv" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".csv")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".csv" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptRadiometricIndices(commands_list):
"""
Replace indice nickname by its corresponding entry in the following dictionary :
indices = {"ndvi" : "Vegetation:NDVI", "tndvi" : "Vegetation:TNDVI", "rvi" : "Vegetation:RVI", "savi" : "Vegetation:SAVI",
"tsavi" : "Vegetation:TSAVI", "msavi" : "Vegetation:MSAVI", "msavi2" : "Vegetation:MSAVI2", "gemi" : "Vegetation:GEMI",
"ipvi" : "Vegetation:IPVI"
|
|
bertjwregeer/pyramid_keystone
|
pyramid_keystone/__init__.py
|
Python
|
isc
| 1,415
| 0.003534
|
from pyramid.exceptions import ConfigurationError
from p
|
yramid.interfaces import ISessionFactory
from .settings import parse_settings
def includeme(config):
""" Set up standard configurator registrations.
|
Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
def ensure():
if config.registry.queryUtility(ISessionFactory) is None:
raise ConfigurationError('pyramid_keystone requires a registered'
' session factory. (use the set_session_factory method)')
config.action('keystone-configure', register)
# We need to make sure that this is executed after the default Pyramid
# actions, because otherwise our Session Factory may not exist yet
config.action(None, ensure, order=10)
# Allow the user to use our auth policy (recommended)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
# Add the keystone property to the request
config.add_request_method('.keystone.request_keystone', name='keystone',
property=True, reify=True)
|
skipmodea1/plugin.video.xbmctorrent
|
resources/site-packages/xbmctorrent/scrapers/btdigg.py
|
Python
|
gpl-3.0
| 2,061
| 0.001456
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from xbmctorrent import plugin
from xbmctorrent.scrapers import scraper
from xbmctorrent.ga import tracked
from xbmctorrent.caching import cached_route
from xbmctorrent.utils import ensure_fanart
from
|
xbmctorrent.library import library_context
BASE_URL = plugin.get_setting("base_
|
btdigg")
HEADERS = {
"Referer": BASE_URL,
}
SORT_RELEVANCE = 0
SORT_POPULARITY = 1
SORT_ADDTIME = 2
SORT_SIZE = 3
SORT_FILES = 4
@scraper("BTDigg - DHT Search Engine", "%s/logo.png" % BASE_URL)
@plugin.route("/btdigg")
@ensure_fanart
@tracked
def btdigg_index():
plugin.redirect(plugin.url_for("btdigg_search"))
@plugin.route("/btdigg/search/<query>/<sort>/<page>")
@library_context
@ensure_fanart
@tracked
def btdigg_page(query, sort, page):
from bs4 import BeautifulSoup
from xbmctorrent.utils import url_get
html_data = url_get("%s/search" % BASE_URL, headers=HEADERS, params={
"order": sort,
"q": query,
"p": page,
})
soup = BeautifulSoup(html_data, "html5lib")
name_nodes = soup.findAll("td", "torrent_name")
attr_nodes = soup.findAll("table", "torrent_name_tbl")[1::2]
for name_node, attr_node in zip(name_nodes, attr_nodes):
attrs = attr_node.findAll("span", "attr_val")
title = "%s (%s, DLs:%s)" % (name_node.find("a").text, attrs[0].text, attrs[2].text)
yield {
"label": title,
"path": plugin.url_for("play", uri=attr_node.find("a")["href"]),
"is_playable": True,
}
yield {
"label": ">> Next page",
"path": plugin.url_for("btdigg_page", query=query, sort=sort, page=int(page) + 1),
"is_playable": False,
}
@plugin.route("/btdigg/search")
@tracked
def btdigg_search():
query = plugin.request.args_dict.pop("query", None)
if not query:
query = plugin.keyboard("", "XBMCtorrent - BTDigg - Search")
if query:
plugin.redirect(plugin.url_for("btdigg_page", query=query, sort=SORT_POPULARITY, page=0, **plugin.request.args_dict))
|
Azure/azure-sdk-for-python
|
sdk/rdbms/azure-mgmt-rdbms/azure/mgmt/rdbms/mariadb/aio/operations/_operations.py
|
Python
|
mit
| 3,780
| 0.004233
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transpor
|
t import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as
|
_models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""Operations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.rdbms.mariadb.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list(
self,
**kwargs: Any
) -> "_models.OperationListResult":
"""Lists all of the available REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationListResult, or the result of cls(response)
:rtype: ~azure.mgmt.rdbms.mariadb.models.OperationListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OperationListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/providers/Microsoft.DBForMariaDB/operations'} # type: ignore
|
Daniel-Brosnan-Blazquez/DIT-100
|
examples/IMU/acc/raw_data.py
|
Python
|
gpl-3.0
| 461
| 0.023861
|
# Program to print raw data of the accelerometer device
import sys
sys.path.
|
append ("../../../lib")
import accel
import time
import num
|
py
import os
A = accel.Init ()
while(1):
time.sleep(0.25)
os.system ("clear")
print "\n\n\n\n"
(status, x) = accel.get_x (A)
(status, y) = accel.get_y (A)
(status, z) = accel.get_z (A)
print("\t{:7.2f} {:7.2f} {:7.2f}".format(x, y, z))
print "\t|A| = %6.3F" % numpy.sqrt (x*x + y*y + z*z)
|
tboyce021/home-assistant
|
homeassistant/components/image_processing/__init__.py
|
Python
|
apache-2.0
| 6,082
| 0.000658
|
"""Provides functionality to interact with image processing services."""
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME, CONF_ENTITY_ID, CONF_NAME
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import make_entity_service_schema
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.util.async_ import run_callback_threadsafe
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "image_processing"
SCAN_INTERVAL = timedelta(seconds=10)
DEVICE_CLASSES = [
"alpr", # Automatic license plate recognition
"face", # Face
"ocr", # OCR
]
SERVICE_SCAN = "scan"
EVENT_DETECT_FACE = "image_processing.detect_face"
ATTR_AGE = "age"
ATTR_CONFIDENCE = "confidence"
ATTR_FACES = "faces"
ATTR_GENDER = "gender"
ATTR_GLASSES = "glasses"
ATTR_MOTION = "motion"
ATTR_TOTAL_FACES = "total_faces"
CONF_SOURCE = "source"
CONF_CONFIDENCE = "confidence"
DEFAULT_TIMEOUT = 10
DEFAULT_CONFIDENCE = 80
SOURCE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTITY_ID): cv.entity_domain("camera"),
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_SOURCE): vol.All(cv.ensure_list, [SOURCE_SCHEMA]),
vol.Optional(CONF_CONFIDENCE, default=DEFAULT_CONFIDENCE): vol.All(
vol.Coerce(float), vol.Range(min=0, max=100)
),
}
)
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE.extend(PLATFORM_SCHEMA.schema)
async def async_setup(hass, config):
"""Set up the image processing."""
component = EntityComponent(_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
await component.async_setup(config)
async def async_scan_service(service):
"""Service handler for scan."""
image_entities = await component.async_extract_from_service(service)
update_tasks = []
for entity in image_entities:
entity.async_set_context(service.context)
update_tasks.append(entity.async_update_ha_state(True))
if update_tasks:
await asyncio.wait(update_tasks)
hass.services.async_register(
DOMAIN, SERVICE_SCAN, async_scan_service, schema=make_entity_service_schema({})
)
return True
class ImageProcessingEntity(Entity):
"""Base entity class for image processing."""
timeout = DEFAULT_TIMEOUT
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return None
@property
def confidence(self):
"""Return minimum confidence for do some things."""
return
|
None
def process_image(self, image):
"""Process image."""
raise NotImplementedError()
async def async_process_image
|
(self, image):
"""Process image."""
return await self.hass.async_add_executor_job(self.process_image, image)
async def async_update(self):
"""Update image and process it.
This method is a coroutine.
"""
camera = self.hass.components.camera
image = None
try:
image = await camera.async_get_image(
self.camera_entity, timeout=self.timeout
)
except HomeAssistantError as err:
_LOGGER.error("Error on receive image from entity: %s", err)
return
# process image data
await self.async_process_image(image.content)
class ImageProcessingFaceEntity(ImageProcessingEntity):
"""Base entity class for face image processing."""
def __init__(self):
"""Initialize base face identify/verify entity."""
self.faces = []
self.total_faces = 0
@property
def state(self):
"""Return the state of the entity."""
confidence = 0
state = None
# No confidence support
if not self.confidence:
return self.total_faces
# Search high confidence
for face in self.faces:
if ATTR_CONFIDENCE not in face:
continue
f_co = face[ATTR_CONFIDENCE]
if f_co > confidence:
confidence = f_co
for attr in [ATTR_NAME, ATTR_MOTION]:
if attr in face:
state = face[attr]
break
return state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return "face"
@property
def state_attributes(self):
"""Return device specific state attributes."""
return {ATTR_FACES: self.faces, ATTR_TOTAL_FACES: self.total_faces}
def process_faces(self, faces, total):
"""Send event with detected faces and store data."""
run_callback_threadsafe(
self.hass.loop, self.async_process_faces, faces, total
).result()
@callback
def async_process_faces(self, faces, total):
"""Send event with detected faces and store data.
known are a dict in follow format:
[
{
ATTR_CONFIDENCE: 80,
ATTR_NAME: 'Name',
ATTR_AGE: 12.0,
ATTR_GENDER: 'man',
ATTR_MOTION: 'smile',
ATTR_GLASSES: 'sunglasses'
},
]
This method must be run in the event loop.
"""
# Send events
for face in faces:
if ATTR_CONFIDENCE in face and self.confidence:
if face[ATTR_CONFIDENCE] < self.confidence:
continue
face.update({ATTR_ENTITY_ID: self.entity_id})
self.hass.async_add_job(self.hass.bus.async_fire, EVENT_DETECT_FACE, face)
# Update entity store
self.faces = faces
self.total_faces = total
|
TangXT/GreatCatMOOC
|
common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py
|
Python
|
agpl-3.0
| 49,426
| 0.00346
|
import json
import logging
import traceback
from lxml import etree
from xmodule.timeinfo import TimeInfo
from xmodule.capa_module import ComplexEncoder
from xmodule.progress import Progress
from xmodule.stringify import stringify_children
from xmodule.open_ended_grading_classes import self_assessment_module
from xmodule.open_ended_grading_classes import open_ended_module
from functools import partial
from .combined_open_ended_rubric import CombinedOpenEndedRubric, GRADER_TYPE_IMAGE_DICT, HUMAN_GRADER_TYPE, LEGEND_LIST
from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, MockPeerGradingService, GradingServiceError
from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild
log = logging.getLogger("edx.courseware")
# Set the default number of max attempts. Should be 1 for production
# Set higher for debugging/testing
# attempts specified in xml definition overrides this.
MAX_ATTEMPTS = 1
# The highest score allowed for the overall xmodule and for each rubric point
MAX_SCORE_ALLOWED = 50
# If true, default behavior is to score module as a practice problem. Otherwise, no grade at all is shown in progress
# Metadata overrides this.
IS_SCORED = False
# If true, then default behavior is to require a file upload or pasted link from a student for this problem.
# Metadata overrides this.
ACCEPT_FILE_UPLOAD = False
# Contains all reasonable bool and case combinations of True
TRUE_DICT = ["True", True, "TRUE", "true"]
HUMAN_TASK_TYPE = {
'selfassessment': "Self",
'openended': "edX",
'ml_grading.conf': "AI",
'peer_grading.conf': "Peer",
}
HUMAN_STATES = {
'intitial': "Not started.",
'assessing': "Being scored.",
'intermediate_done': "Scoring finished.",
'done': "Complete.",
}
# Default value that controls whether or not to skip basic spelling checks in the controller
# Metadata overrides this
SKIP_BASIC_CHECKS = False
class CombinedOpenEndedV1Module():
"""
This is a module that encapsulates all open ended grading (self assessment, peer assessment, etc).
It transitions between problems, and support arbitrary ordering.
Each combined open ended module contains one or multiple "child" modules.
Child modules track their own state, and can transition between states. They also implement get_html and
handle_ajax.
The combined open ended module transitions between child modules as appropriate, tracks its own state, and passess
ajax requests from the browser to the child module or handles them itself (in the cases of reset and next problem)
ajax actions implemented by all children are:
'save_answer' -- Saves the student answer
'save_assessment' -- Saves the student assessment (or external grader assessment)
'save_post_assessment' -- saves a post assessment (hint, feedback on feedback, etc)
ajax actions implemented by combined open ended module are:
'reset' -- resets the whole combined open ended module and returns to the first child moduleresource_string
'next_problem' -- moves to the next child module
Types of children. Task is synonymous with child module, so each combined open ended module
incorporates multiple children (tasks):
openendedmodule
selfassessmentmodule
"""
STATE_VERSION = 1
# states
INITIAL = 'initial'
ASSESSING = 'assessing'
INTERMEDIATE_DONE = 'intermediate_done'
DONE = 'done'
# Where the templates live for this problem
TEMPLATE_DIR = "combinedopenended"
def __init__(self, system, location, definition, descriptor,
instance_state=None, shared_state=None, metadata=None, static_data=None, **kwargs):
"""
Definition file should have one or many task blocks, a rubric block, and a prompt block. See DEFAULT_DATA in combined_open_ended_module for a sample.
"""
self.instance_state = instance_state
self.display_name = instance_state.get('display_name', "Open Ended")
# We need to set the location here so the child modules can use it
system.set('location', location)
self.system = system
# Tells the system which xml definition to load
self.current_task_number = instance_state.get('current_task_number', 0)
# This loads the states of the individual children
self.task_states = instance_state.get('task_states', [])
#This gets any old task states that have been persisted after the instructor changed the tasks.
self.old_task_states = instance_state.get('old_task_states', [])
# Overall state of the combined open ended module
self.state = instance_state.get('state', self.INITIAL)
self.student_attempts = instance_state.get('student_attempts', 0)
self.weight = instance_state.get('weight', 1)
# Allow reset is true if student has failed the criteria to move to the next child task
self.ready_to_reset = instance_state.get('ready_to_reset', False)
self.max_attempts = instance_state.get('max_attempts', MAX_ATTEMPTS)
self.is_scored = instance_state.get('graded', IS_SCORED) in TRUE_DICT
self.accept_file_upload = instance_state.get('accept_file_upload', ACCEPT_FILE_UPLOAD) in TRUE_DICT
self.skip_basic_checks = instance_state.get('skip_spelling_checks', SKIP_BASIC_CHECKS) in TRUE_DICT
if system.open_ended_grading_interface:
self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system)
else:
self.peer_gs = MockPeerGradingService()
self.required_peer_grading = instance_state.get('required_peer_grading', 3)
self.peer_grader_count = instance_state.get('peer_grader_cou
|
nt', 3)
self.min_to_calibrate = instance_state.get('min_to_calibrate', 3)
self.max_to_calibrate = instance_state.get('max_to_calibrate', 6)
self.peer_grade_finished_submissions_when_none_pending = instance_state.get(
'peer_grade_finished_submissions_when_none_pending', False
)
due_da
|
te = instance_state.get('due', None)
grace_period_string = instance_state.get('graceperiod', None)
try:
self.timeinfo = TimeInfo(due_date, grace_period_string)
except Exception:
log.error("Error parsing due date information in location {0}".format(location))
raise
self.display_due_date = self.timeinfo.display_due_date
self.rubric_renderer = CombinedOpenEndedRubric(system, True)
rubric_string = stringify_children(definition['rubric'])
self._max_score = self.rubric_renderer.check_if_rubric_is_parseable(rubric_string, location, MAX_SCORE_ALLOWED)
# Static data is passed to the child modules to render
self.static_data = {
'max_score': self._max_score,
'max_attempts': self.max_attempts,
'prompt': definition['prompt'],
'rubric': definition['rubric'],
'display_name': self.display_name,
'accept_file_upload': self.accept_file_upload,
'close_date': self.timeinfo.close_date,
's3_interface': self.system.s3_interface,
'skip_basic_checks': self.skip_basic_checks,
'control': {
'required_peer_grading': self.required_peer_grading,
'peer_grader_count': self.peer_grader_count,
'min_to_calibrate': self.min_to_calibrate,
'max_to_calibrate': self.max_to_calibrate,
'peer_grade_finished_submissions_when_none_pending': (
self.peer_grade_finished_submissions_when_none_pending
),
}
}
self.task_xml = definition['task_xml']
self.location = location
self.fix_invalid_state()
self.setup_next_task()
def validate_task_states(self, tasks_xml, task_states):
"""
Check whether the provided task_states are valid for the supplied task_xml.
Returns a list of messages indicating what is invalid about the state.
If the list is empty, then the state is valid
|
bukun/TorCMS
|
tester/test_model/test_entity.py
|
Python
|
mit
| 2,865
| 0.002115
|
# -*- coding:utf-8 -*-
from torcms.core import tools
from torcms.model.entity_model import MEntity
class TestMEntity():
def setup(self):
print('setup 方法执行于本类中每条用例之前')
self.uid = tools.get_uuid()
self.path = '/static/123123'
def test_create_entity(self):
uid = self.uid
path = self.path
desc = 'create entity'
kind = 'f'
tt = MEntity.create_entity(uid, path, desc, kind)
assert tt == True
self.tearDown()
def add_message(self):
desc = 'create entity'
kind = 'f'
MEntity.cr
|
eate_entity(self.uid, self.path, desc, kind)
def test_query_recent(self):
a = MEntity.get_by_uid(self.uid)
assert a == None
self.add_message()
a = MEntity.get_by_uid(self.uid)
assert a
self.tearDown()
def test_query_all(self):
self.add_message()
a = MEntity.query_all()
tf = False
for i in a:
|
if i.uid == self.uid:
tf = True
assert tf
self.tearDown()
def test_get_by_kind(self):
self.add_message()
a = MEntity.get_by_kind(kind='f')
tf = False
for i in a:
if i.uid == self.uid:
tf = True
assert tf
self.tearDown()
def test_get_all_pager(self):
a = MEntity.get_all_pager()
tf = True
for i in a:
if i.uid == self.uid:
tf = False
assert tf
self.add_message()
a = MEntity.get_all_pager()
tf = False
for i in a:
if i.uid == self.uid:
tf = True
assert tf
self.tearDown()
def test_get_id_by_impath(self):
self.add_message()
path = self.path
a = MEntity.get_id_by_impath(path)
assert a.uid == self.uid
self.tearDown()
def test_total_number(self):
b = MEntity.total_number()
self.add_message()
a = MEntity.total_number()
assert b + 1 <= a
self.tearDown()
def test_delete_by_path(self):
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.add_message()
tf = MEntity.get_by_uid(self.uid)
assert tf
MEntity.delete_by_path(self.path)
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.tearDown()
def test_delete(self):
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.add_message()
tf = MEntity.delete(self.uid)
assert tf
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.tearDown()
def tearDown(self):
print("function teardown")
tt = MEntity.get_by_uid(self.uid)
if tt:
MEntity.delete(tt.uid)
|
jeremiedecock/snippets
|
python/pyqt/pyqt4/fullscreen.py
|
Python
|
mit
| 2,472
| 0.004049
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files
|
(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Softwar
|
e is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: http://web.archive.org/web/20120426224840/http://zetcode.com/tutorials/pyqt4/widgets2
import sys
from PyQt4 import QtGui, QtCore
class Window(QtGui.QWidget):
def __init__(self):
super(Window, self).__init__()
# Create a label
label = QtGui.QLabel("Press Esc to quit.")
# Create the layout
vbox = QtGui.QVBoxLayout()
vbox.addWidget(label)
# Set the layout
self.setLayout(vbox)
self.resize(250, 150)
self.setWindowTitle('Hello')
#self.show()
self.showFullScreen() # <- Full screen
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Escape:
self.close()
def main():
"""Main function"""
app = QtGui.QApplication(sys.argv)
# The default constructor has no parent.
# A widget with no parent is a window.
window = Window()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
n2o/guhema
|
products/migrations/0057_auto_20160118_2025.py
|
Python
|
mit
| 2,432
| 0.004527
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-18 20:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0056_auto_20160118_2012'),
]
operations = [
migrations.AddField(
model_name='bandsawbladeindicator',
name='AE',
field=models.CharField(blank=True, max_length=255, verbose_name='Ä'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='F',
field=models.CharField(blank=True, max_length=255, verbose_name='F'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='RP',
field=models.CharField(blank=True, max_length=255, verbose_name='RP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='S',
field=models.CharField(blank=True, max_length=255, verbose_name='S'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='SP',
field=models.CharField(blank=True, max_length=255, verbose_name='SP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='TP',
|
field=models.CharField(blank=True, max_length=255, verbose_name='TP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='UE',
field=models.CharField(blank=True, max_length=255, verbose_name='Ü'),
),
|
migrations.AddField(
model_name='bandsawbladeindicator',
name='UP',
field=models.CharField(blank=True, max_length=255, verbose_name='UP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='VP',
field=models.CharField(blank=True, max_length=255, verbose_name='VP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='WP',
field=models.CharField(blank=True, max_length=255, verbose_name='WP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='star_p',
field=models.CharField(blank=True, max_length=255, verbose_name='*P'),
),
]
|
croxis/kmr
|
app/user/__init__.py
|
Python
|
mit
| 107
| 0.018692
|
__author__ = 'croxis'
fr
|
om flask import Blueprint
user = Blueprint('user', __name__)
fr
|
om . import views
|
effa/flocs
|
practice/services/__init__.py
|
Python
|
gpl-2.0
| 53
| 0.018868
|
"""Service lay
|
er (domain model) of practice app
"""
| |
eikiu/tdf-actividades
|
_admin-scripts/jsontocsv(activities-name).py
|
Python
|
cc0-1.0
| 814
| 0.045455
|
'''
run where the files are
'''
import json
import os
final_file = "tipo,nombre,nombre_alt\n"
for root, subFolders, files in os.walk(os.getcwd()):
for filename in files:
filePath = os.path.join(root, filename)
if not filePath.endswith(".json") or filename.startswith("_"):
continue
print (" processing " + filePath)
current_text = ""
with open(filePath, 'r', encod
|
ing='utf-8-sig') as readme:
current_text = readme
|
.read()
tmp_file = json.loads(current_text)
nombre_alt = "\"\""
if "nombre_alt" in tmp_file:
nombre_alt = tmp_file["nombre_alt"]
final_file += tmp_file["tipo"] + "," + tmp_file["nombre"] + "," + nombre_alt + "\n"
with open(os.path.join(os.getcwd(),"actividades_merged.csv"), 'w', encoding='utf-8-sig') as saveme:
saveme.writelines(final_file)
|
LowResourceLanguages/hltdi-l3
|
l3xdg/graphics.py
|
Python
|
gpl-3.0
| 10,943
| 0.004132
|
from tkinter import *
# from tkinter.font import *
import math
CW = 800
CH = 600
SENT_H = 50
Y_OFF = 10
X_OFF = 20
DIM_GAP = 10
DIM_OFF = 70
class Multigraph(Canvas):
"""Canvas for displaying the multigraph for a sentence."""
node_rad = 3
def __init__(self, parent, width=CW, height=CH, nnodes=9,
dims=['En LP', 'En ID', 'Sem', 'Am ID', 'Am LP'],
# dims=['En ID', 'Sem', 'Am ID'],
translation=True):
Canvas.__init__(self, parent, width=width, height=height)
# self.draw_arrow(10, 50, 40, 20, 60, 10, 80, 10, 100, 20, 130, 50)
self.parent = parent
self.width = width
self.height = height
self.translation = translation
self.dim_labels = dims
# Calculate the width, height, and positions of the dimensions
self.get_dim_dims()
# Figure node coordinates
node_dist = self.dim_width / nnodes
node_offsets = [node_dist * (i + .5) for i in range(nnodes)]
## for index, off in enumerate(node_offsets):
## dim1.make_node(index, off,
## filled = (index % 3 != 0),
## eos = (index == nnodes - 1))
## dim2.make_node(index, off,
## eos = (index == nnodes - 1))
## dim3.make_node(index, off,
## eos = (index == nnodes - 1))
self.dims = []
for label, x, y in zip(dims, self.dim_x, self.dim_y):
d = Dimension(self, coords=(x, y), label=label,
width=self.dim_width, height=self.dim_height)
self.dims.append(d)
d.draw()
for index, off in enumerate(node_offsets):
d.make_node(index, off, eos = (index == nnodes - 1))
self.dims[0].make_arc(8, 0, tp='root')
self.dims[0].make_arc(1, 3, tp='sbj')
self.dims[0].make_arc(7, 4, tp='mod')
self.dims[0].make_arc(3, 4, tp='rel')
self.dims[0].make_arc(0, 5, tp='obj')
## dim1.make_arc(8, 1, tp='sbj')
## dim1.make_arc(1, 7, tp='obj', color='gray')
## self.dims = [dim1, dim2, dim3]
self.node_connections = []
self.connect_nodes()
self.sentences = []
in_sent = Sentence(self, ['the', 'woman', 'cleaned', 'the', 'house', 'in', 'the', 'city', '.'],
coords=(self.dim_x[0], 580),
width=self.dim_width)
in_sent.draw()
self.sentences.append(in_sent)
## self.connect_sent(in_sent, dim1)
out_sent = Sentence(self, ["እከተማዋ", "ያለውን", "ቤት", "ሴቷ", "ጠረገችው", "።"],
node_indices=[7, 5, 4, 1, 2, 8],
coords=(self.dim_x[-1], 20),
width=self.dim_width)
out_sent.draw()
self.sentences.append(out_sent)
## self.connect_sent(out_sent, dim3)
# self.draw_arrow(10, 80, 80, 20, 150, 80)
# self.draw_arc_label((80, 50), 'sbj')
def get_dim_dims(self):
# Calculate the width, height, and positions of the dimensions
w = self.width - 2 * X_OFF
h = self.height - SENT_H - 2 * Y_OFF
if self.translation:
h -= SENT_H
ndims = len(self.dim_labels)
# Width of dimensions
x_off = DIM_OFF * (ndims - 1)
w_sum = w - x_off
w1 = w_sum # / ndims
# print('Dim w {}'.format(w1))
# Height of dimensions
y_off = DIM_GAP * (ndims - 1)
h_sum = h - y_off
h1 = h_sum / ndims
# print('Dim h {}'.format(h1))
# Figure out the x coordinates of dimensions
x_coords = []
x = X_OFF
for d in self.dim_labels:
x_coords.append(x)
x += DIM_OFF
# Figure out the y coordinates of dimensions
y_coords = []
y = self.height - SENT_H - Y_OFF - h1
for d in self.dim_labels:
y_coords.append(y)
y -= DIM_GAP + h1
self.dim_width = w1
self.dim_height = h1
self.dim_x = x_coords
self.dim_y = y_coords
def connect_nodes(self):
for index, dim in enumerate(self.dims[:-1]):
next_dim = self.dims[index + 1]
for node1, node2 in zip(dim.nodes, next_dim.nodes):
cx1, cy1 = node1.center
cx2, cy2 = node2.center
c_id = self.create_line(cx1, cy1, cx2, cy2,
dash=(3,3))
self.node_connections.append(c_id)
def connect_sent(self, sent, dim):
dim_nodes = dim.nodes
nodes = [dim_nodes[index] for index in sent.node_indices]
for word, node in zip(sent.ids, nodes):
wx, wy = self.coords(word)
nx, ny = node.center
self.create_line(wx, wy, nx, ny, dash=(1, 3))
class Dimension:
"""Graphical representation of an XDG dimension."""
Y_OFF = 15
def __init__(self, canvas, coords=(50, 50), width=500,
height=160, color='black', label='ID'):
self.canvas = canvas
self.color = color
self.label = label
self.coords = coords
self.width = width
self.height = height
self.h2w = self.height / self.width
# print('h2w {}'.format(self.h2w))
self.nodes = []
def draw(self):
c0, c1 = self.coords
self.id = self.canvas.create_rectangle(c0, c1, c0 + self.width, c1 + self.height)
if self.label:
self.make_label()
def make_l
|
abel(self):
x = self.coords
|
[0] + 25
y = self.coords[1] + 10
self.label_id = self.canvas.create_text(x, y, text=self.label,
font = ("Helvetica", "14"))
def make_node(self, index, offset, eos=False,
filled=True):
node = Node(self.canvas,
center=(self.coords[0] + offset,
self.coords[1] + self.height - self.Y_OFF),
filled=filled,
index=index,
eos=eos)
self.nodes.append(node)
node.draw()
def make_arc(self, i_head, i_dep, tp='', color='black'):
head = self.nodes[i_head]
dep = self.nodes[i_dep]
right = i_dep > i_head
start = head.get_upper_right() if right else head.get_upper_left()
head.source
end = dep.top
# dep.get_upper_left() if right else dep.get_upper_right()
arc = Arc(self.canvas, head, dep, start=start, end=end,
tp=tp, color=color, h2w=1.6 * self.h2w)
arc.draw()
class Node:
"""Graphical representation of an XDG node."""
R = 7
CORNER_OFF = 7 * math.cos(math.radians(45))
def __init__(self, canvas, center=(100, 100), index=0, filled=True, eos=False):
self.canvas = canvas
self.center = center
self.filled = filled
self.index = index
self.eos = eos
self.arcs = []
# upper-left, upper-right,
# lower-right, lower-left
cx, cy = self.center
rad = 2 if self.eos else self.CORNER_OFF
self.corners = [(cx-rad, cy-rad),
(cx+rad , cy-rad ),
(cx+rad , cy+rad ),
(cx-rad , cy+rad )]
self.top = (cx, cy-rad)
self.source = center if self.eos else (cx, cy-rad)
def get_upper_left(self):
return self.corners[0]
def get_upper_right(self):
return self.corners[1]
def draw(self):
x1, y1 = self.corners[0]
x2, y2 = self.corners[2]
if self.eos:
self.id = self.canvas.create_oval(x1, y1, x2, y2, fill='black')
else:
self.id = self.canvas.create_oval(x1, y1, x2, y2,
fill='black' if self.filled else '')
class Arc:
"""Graphical representation of an XDG arc."""
def __init__(self, canvas, head, dep, tp='', color='black',
start=(0,0), end=(100,100), h2w=.625):
self.canvas = canvas
self.head = head
self.de
|
CarlosCebrian/RedesII_Ejercicios
|
Practica2_RedesII/chatudp.py
|
Python
|
gpl-2.0
| 1,117
| 0.014324
|
#!/usr/bin/env python3
from socket import *
import _thread
import sys
def enviardatos(sock):
data = input()
enviar = data.encode()
sock.sendto(enviar,('localh
|
ost',23456))
if data == "bye":
print("Closing Client\n")
sock.close()
return 0
_thread.start_new_thread(recibirdatos,(('localhost',23456),sock))
while 1:
data = input()
enviar = data.encode()
sock.sendto(enviar,('localhost',23456))
if data == "bye":
print("Closing Client\n")
sock.close()
|
break
else:
if data == "bye":
print("Closing client\n")
sock.close()
sys.exit(0)
def recibirdatos(tupla,sock):
while 1:
try:
msg,server = sock.recvfrom(1024)
except OSError:
sys.exit(0)
data = msg.decode()
print(data)
if data == "bye":
print("Closing client\n")
sock.close()
def main():
sock = socket(AF_INET,SOCK_DGRAM)
enviardatos(sock)
main()
|
TheSighing/climber
|
climber/__init__.py
|
Python
|
mit
| 7,197
| 0.000973
|
__version__ = '0.1.4'
import requests
import re
import json
from bs4 import BeautifulSoup
# TODO: def see_also() => makes a whole set of related thhings to the topic
# chosen
# TODO:
# def chossy() => parse disambiguation pages can be called
# when the page reached durign climb or
# any given method in the class and it hits a "chossy page"
# one that cannot be parsed in this custiomary
# method ie a disambiguation page or otherwise
# TODO:
# def flash() => grab directly a section of the overall page when supplied
# a set of context levels and/or a bit of text that it can match
# climb links should build based on a depth choice and and builds graph of
# links to help determine later searches
# TODO: add comments to this
# TODO: bolts should also allow for optional images.
# TODO:
# climb should have options (object) passed in to allow it to include images
# in route or to include graph of links with given
# level of depth
# TODO:
# You are creating context and subcontexts, text, links => Bolt() object
# and loading into an Array building structure to the wiki itself
# (or any large text based information page) that can be accessed
# parsed as such. Later should incorporate other checks to find titles and
# context that are more universal.
# TODO:
# Should also work with any amount of headers
# fix the h1 - ?? checks so they are extensible rather than hard coded
# this so it matches the h# set up and loops to
# decide on depth or just inputs the number found
# as the hash for the entry (headers define amounts of context)
# TODO: create overall function that sanitizes the strings for printing them
# "pretty"
# TODO: Replace complex words with definitions you find in the underlying link
# or using dictionary.
# TODO: Build some test harnesses for API and Restful-API.
# TODO: Return related topics and souroundign topics using wikis dropdowns,
# as part of climb or as separate API function.
def check_text(text):
if(text != "Contents" and text != ""):
return text
def chossy():
return {"error": "This is a Disambiguation Page...\n\n"}
class Bolt():
def __init__(self, text):
self.contexts = {}
self.text = text
self.images = None
# Add context to bolt.
def belay(self, context, level=None):
if(not level):
self.contexts = {}
self.contexts["one"] = context
else:
self.contexts[level] = context
# Encodes bolt for json formatting.
def encode(self):
return {"text": self.text, "contexts": self.contexts}
def __str__(self):
temp = "Text: " + self.text
temp += "\nContext:"
for key in self.contexts:
temp += "\nlvl" + key + ": " + self.contexts[key]
return temp
class Climber():
# Constructs route of entire wiki page based on topic chosen.
def __init__(self, options=None):
self.op
|
tions = {} if not options else options
def climb(self, topic):
self.depth = self.options["depth"] if "depth" in self.options.keys() else None
self.summary = self.options["summary"] if "summary" in self.options.keys() else None
if(topic is None):
return None
else:
url = 'http://en.wikipedia.org/?title=%s' % topic
|
content = requests.get(url)
self.soup = BeautifulSoup(content.text, "html.parser")
check = self.soup.find_all(id="disambigbox")
return self.get_scaffold(check)
# Extracts images given a topic.
def climb_images(self, topic=None):
images = []
if(topic is None):
check = self.soup.find_all(id="disambigbox")
for image in self.soup.findAll("img"):
images.append("https://" + image["src"])
else:
url = 'http://en.wikipedia.org/?title=%s' % topic
content = requests.get(url)
self.soup = BeautifulSoup(content.text, "html.parser")
check = self.soup.find_all(id="disambigbox")
if(check):
for image in self.soup.findAll("img"):
images.append("https://" + image["src"])
else:
return chossy()
return json.dumps(images)
def get_scaffold(self, check):
# TODO: WIll cause a toggle based on passed type in which case the
# include summary scaffold will be used but no matter what the depth
# will be passed to scaffold defaulting to 0
if(not len(check)):
images_list = None
wiki_parsed = self.scaffold_basic(self.summary, self.depth)
if("images" in self.options.keys()):
images_list = self.climb_images()
if(images_list is None):
return json.dumps({"data": wiki_parsed})
else:
return json.dumps({"data": wiki_parsed,
"images": images_list})
else:
# TODO: WIll return all the other options to search from
# disambiguation page
return chossy()
def scaffold_basic(self, summary, depth):
selected = []
h = ["", "", "", ""]
for section in self.soup.find_all(["h1", "h2", "h3", "h4", "p"]):
try:
if(section.name == "h1"):
text = section.get_text()
if(check_text(text)):
h[0] = text
elif(section.name == "h2"):
text = section.get_text()
if(check_text(text)):
h[1] = text
h[2] = ""
h[3] = ""
elif(section.name == "h3"):
text = section.get_text()
if(check_text(text)):
h[2] = text
h[3] = ""
elif(section.name == "h4"):
text = section.get_text()
if(check_text(text)):
h[3] = text
elif(section.name == "p"):
# Add text to the bolt.
string = section.get_text()
if(string != ""):
string = re.sub(r"\[\d+\]", "", string)
bolt = Bolt(string)
bolt.belay(h[0], "one")
bolt.belay(h[1], "two")
bolt.belay(h[2], "three")
bolt.belay(h[3], "four")
selected.append(bolt.encode())
else:
continue
pass
except Exception as e:
print e
continue
return selected
# Builds map of links with given search depth option as parameter.
# def climb_links(self, topic, options):
# if(not len(check)):
# link_query = 'div#mw-content-text a'
# links = [a.get('href') for a in self.soup.select(link_query)]
# return json.dumps(links)
# else:
# return chossy()
|
matthiaskramm/corepy
|
examples/spu_interspu.py
|
Python
|
bsd-3-clause
| 5,421
| 0.009592
|
# Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import corepy.lib.extarray as extarray
import corepy.arch.spu.isa as spu
import corepy.arch.spu.platform as env
import corepy.arch.spu.lib.dma as dma
from corepy.arch.spu.lib.util import load_word
# This example program demonstrates sending mailbox messages from one SPU to
# another. In order for an SPU to send messages/signals to another SPU, the
# source SPU must know that base address of the memory-mapped problem state area
# of the target SPU. However the addresses are not known until the SPUs have
# been started, so the addresses must be passed to the SPUs by the PPU. The PPU
# builds one array of the addresses for the SPUs, then gives the address of this
# array to each SPU to DMA into local store and load into registers.
# A race condition is possible if mailboxes are used to send the address of the
# array. What can happen is that an earlier SPU gets the message, loads the
# array into registers, and sends a mailbox message to a following SPU, before
# that following SPU receives the initial array address message from the PPU.
# The solution used in this example program is to use signal to send the array
# address instead of a mailbox.
if __name__ == '__main__':
SPUS = 6
proc = env.Processor()
prgms = [env.Program() for i in xrange(0, SPUS)]
for rank, prgm in enumerate(prgms):
code = prgm.get_stream()
spu.set_active_code(code)
# First all the SPUs should start up and wait for an mbox message.
# The PPU will collect all the PS map addresses into an array for the SPUs.
r_psinfo_mma = dma.spu_read_signal1(code)
# DMA the PS info into local store
dma.mem_get(code, 0x0, r_psinfo_mma, SPUS * 4 * 4, 17)
dma.mem_complete(code, 17)
# Load the PS info into some registers.. one register per address
r_psinfo = prgm.acquire_registers(SPUS)
for i in xrange(0, SPUS):
spu.lqd(r_psinfo[i], code.r_zero, i)
# Initialize a data register with this rank and store it at LSA 0
r_send = prgm.acquire_register()
load_word(code, r_send, rank)
spu.stqd(r_send, code.r_zero, 0)
prgm.release_register(r_send)
# Send our rank as a mailbox message to the rank after this rank
dma.mem_write_in_mbox(code, r_psinfo[(rank + 1) % SPUS], 12, 18)
dma.mem_complete(code, 18)
# Receive the message the preceding rank sent
r_recv = dma.spu_read_in_mbox(code)
# Write the value out the interrupt mailbox for the PPU
dma.spu_write_out_intr_mbox(code, r_recv)
code.prgm.release_register(r_recv)
prgm.add(code)
# Start the SPUs
id = [proc.execute(prgms[i], async = True) for i in xrange(0, SPUS)]
# Set up an array of poi
|
nters to PS maps.
psinfo = extarray.extarray('I', SPUS * 4)
for i in xrange(0, SPUS * 4, 4):
psinfo[i] = id[i / 4].spups
psinfo
|
.synchronize()
# Send the psinfo address to all the SPUs.
addr = psinfo.buffer_info()[0]
for i in xrange(0, SPUS):
env.spu_exec.write_signal(id[i], 1, addr)
# Wait for a mailbox message from each SPU; the value should be the preceding
# rank. Join each SPU once the message is received, too.
for i in xrange(0, SPUS):
val = env.spu_exec.read_out_ibox(id[i])
assert(val == (i - 1) % SPUS)
proc.join(id[i])
|
pozdnyakov/chromium-crosswalk
|
tools/telemetry/telemetry/page/page_measurement.py
|
Python
|
bsd-3-clause
| 5,239
| 0.004772
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
imp
|
ort os
import sys
from telemetry.page import block_page_measurement_results
from telemetry.page import buildbot_page_measurement_results
from telemetry.page import csv_page_measurement_results
from telemetry.page import page_measurement_results
from telemetry.page import page_test
class MeasurementFailure(page_test.Failure):
"""Exception that can be thrown from MeasurePage to indicate an undesired but
designed-
|
for problem."""
pass
class PageMeasurement(page_test.PageTest):
"""Glue code for running a measurement across a set of pages.
To use this, subclass from the measurement and override MeasurePage. For
example:
class BodyChildElementMeasurement(PageMeasurement):
def MeasurePage(self, page, tab, results):
body_child_count = tab.EvaluateJavaScript(
'document.body.children.length')
results.Add('body_children', 'count', body_child_count)
if __name__ == '__main__':
page_measurement.Main(BodyChildElementMeasurement())
To add test-specific options:
class BodyChildElementMeasurement(PageMeasurement):
def AddCommandLineOptions(parser):
parser.add_option('--element', action='store', default='body')
def MeasurePage(self, page, tab, results):
body_child_count = tab.EvaluateJavaScript(
'document.querySelector('%s').children.length')
results.Add('children', 'count', child_count)
"""
def __init__(self,
action_name_to_run='',
needs_browser_restart_after_each_run=False,
discard_first_result=False,
clear_cache_before_each_run=False):
super(PageMeasurement, self).__init__(
'_RunTest',
action_name_to_run,
needs_browser_restart_after_each_run,
discard_first_result,
clear_cache_before_each_run)
def _RunTest(self, page, tab, results):
results.WillMeasurePage(page)
self.MeasurePage(page, tab, results)
results.DidMeasurePage()
def AddOutputOptions(self, parser):
super(PageMeasurement, self).AddOutputOptions(parser)
parser.add_option('-o', '--output',
dest='output_file',
help='Redirects output to a file. Defaults to stdout.')
parser.add_option('--output-trace-tag',
default='',
help='Append a tag to the key of each result trace.')
@property
def output_format_choices(self):
return ['buildbot', 'block', 'csv', 'none']
def PrepareResults(self, options):
if hasattr(options, 'output_file') and options.output_file:
output_stream = open(os.path.expanduser(options.output_file), 'w')
else:
output_stream = sys.stdout
if not hasattr(options, 'output_format'):
options.output_format = self.output_format_choices[0]
if not hasattr(options, 'output_trace_tag'):
options.output_trace_tag = ''
if options.output_format == 'csv':
return csv_page_measurement_results.CsvPageMeasurementResults(
output_stream,
self.results_are_the_same_on_every_page)
elif options.output_format == 'block':
return block_page_measurement_results.BlockPageMeasurementResults(
output_stream)
elif options.output_format == 'buildbot':
return buildbot_page_measurement_results.BuildbotPageMeasurementResults(
trace_tag=options.output_trace_tag)
elif options.output_format == 'none':
return page_measurement_results.PageMeasurementResults(
trace_tag=options.output_trace_tag)
else:
# Should never be reached. The parser enforces the choices.
raise Exception('Invalid --output-format "%s". Valid choices are: %s'
% (options.output_format,
', '.join(self.output_format_choices)))
@property
def results_are_the_same_on_every_page(self):
"""By default, measurements are assumed to output the same values for every
page. This allows incremental output, for example in CSV. If, however, the
measurement discovers what values it can report as it goes, and those values
may vary from page to page, you need to override this function and return
False. Output will not appear in this mode until the entire pageset has
run."""
return True
def MeasurePage(self, page, tab, results):
"""Override to actually measure the page's performance.
page is a page_set.Page
tab is an instance of telemetry.core.Tab
Should call results.Add(name, units, value) for each result, or raise an
exception on failure. The name and units of each Add() call must be
the same across all iterations. The name 'url' must not be used.
Prefer field names that are in accordance with python variable style. E.g.
field_name.
Put together:
def MeasurePage(self, page, tab, results):
res = tab.EvaluateJavaScript('2+2')
if res != 4:
raise Exception('Oh, wow.')
results.Add('two_plus_two', 'count', res)
"""
raise NotImplementedError()
|
kmiller96/Shipping-Containers-Software
|
lib/containers.py
|
Python
|
mit
| 14,805
| 0.002972
|
# AUTHOR: Kale Miller
# DESCRIPTION: Contains the core classes for the program.
# 50726f6772616d6d696e6720697320627265616b696e67206f66206f6e652062696720696d706f737369626c65207461736b20696e746f20736576
# 6572616c207665727920736d616c6c20706f737369626c65207461736b732e
# DEVELOPMENT LOG:
# 05/12/16: Initialized classes file. Created the parent container class along with children for the basic container,
# the heavy container and the refrigerated container.
# 06/12/16: Fixed some of the public attributes to make them private.
# 07/12/16: Renamed the file to 'containers'. Added 'real' time processing of containers for load/unload. Removed
# some magic numbers and placed them at the top of the script.
# 12/12/16: Fixed some of the methods that failed under unit tests.
# 15/12/16: Added methods to add auxilary labels. Added method to generate information label. Small bug fixes.
# TODO: Somehow make the init call on the base class not hardcoded in.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~IMPORTS/GLOBALS~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from time import sleep
TEMP_LOW = -20.0
TEMP_HIGH = 10.0
UNLOAD_TIME = 0.75
LOAD_TIME = 0.75
LABEL_APPLICATION_TIME = 0.1
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.:.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~MAIN~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class _BaseContainer:
"""The parent class for which all containers are inherited."""
def __init__(self, id, info, destination):
"""
Initialises the class. id is the id tag on the container while info is the additional information you
wish to include on the label, such as contents or comments.
"""
# TODO: Change the assertion error to a value error.
assert isinstance(id, str) and isinstance(info, str), "Some of the parameters passed aren't strings."
self._id = id
self._info = info
self._destination = destination
self._labels = list()
self._auxilarylabelsalreadyadded = True
self._type = 'N/A'
self._loaded = False
self._onship = True
self._weight = None
self._currentlocation = self.currentlocation()
def currentlocation(self):
"""Find where the container currently is."""
if self._onship:
loc = 'On the ship'
elif self._loaded:
loc = 'Loaded'
elif not self._onship and not self._loaded:
loc = 'Holding bay'
self._currentlocation = loc
return loc
def settype(self, type):
"""Sets the type of the container."""
self._type = type
return None
def id(self):
"""Fetches the container's id."""
return self._id
def information(self):
"""Print the information about this container."""
print "----------------------------------------------------------------------"
print "CONTAINER: %s" % self._id
print "INFORMATION: %s" % self._info
print "DESTINATION: %s" % self._destination
print "LABELS: %s" % str(self._labels)
print "CURRENT LOCATION: %s" % self._currentlocation
print "----------------------------------------------------------------------"
return None
def _informationlabel(self):
"""Generates a label that contains information about the container."""
return "INFORMATION: %s. DESTINATION: %s." % (self._info,self._destination)
def addidtag(self):
"""Adds a id tag to the container."""
self._labels.append(self._id)
return None
def addlabel(self, label2add, debug=False, quiet=False):
"""Add a label to the container (e.g. fragile)."""
self._labels.append(label2add)
if not quiet: print "Added the label %r to container %s." % (label2add, self._id)
if not debug: sleep(LABEL_APPLICATION_TIME)
return None
def defineweight(self, m):
"""Defines the weight of the container."""
self._weight = m
return None
def weight(self):
"""Returns the weight of the container."""
return self._weight
def removelabel(self, label2remove):
"""Removes a label from the container."""
try:
self._labels.index(label2remove)
except ValueError:
print "The label %r is not on container %s." % (label2remove, self._id)
else:
print "Successfully removed the label %r from container %s." % (label2remove, self._id)
def load(self, load_location, debug=False):
"""Loads the container."""
if not self._auxilarylabelsalreadyadded:
print "WARNING: All of the required labels haven't been added to container %s" % self._id
if self._onship:
print "Container %s is still on the ship." % self._id
elif self._loaded:
print "Container %s is already loaded on %s." % (self._id, load_location)
elif not self._loaded:
print "Loading container %s onto %s." % (self._id, load_location)
if not debug: sleep(LOAD_TIME)
self._loaded = True
else:
raise RuntimeError, "There was a problem with container %s while loading." % self._id
self.currentlocation()
return None
def unload(self, debug=False):
"""Unloads the container."""
if self._onship:
print "Unloading container %s." % self._id
if not debug: sleep(UNLOAD_TIME)
self._onship = False
elif not self._onship:
print "Container %s has already been unloaded." % self._id
else:
raise RuntimeError, "There was a problem with container %s while unloading from the ship." % self._id
self.currentlocation()
return None
class BasicContainer(_BaseContainer):
"""The most basic container possible."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('basic')
self._auxilarylabelsalreadyadded = True
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
return None
class HeavyContainer(_BaseContainer):
"""The heavy type container."""
def __init__(self, id, info, destination
|
):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('heavy')
self._auxilarylabelsalreadyadded = F
|
alse
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('NOTE: Heavy container.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
class RefrigeratedContainer(_BaseContainer):
"""The refrigerated container."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('refrigerated')
self._auxilarylabelsalreadyadded = False
self._temp = 0.0 # Set in celsius.
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('NOTE: Refrigerated. Handle with care.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def assertValidTemp(self, T):
"""Asserts that the temperature is valid."""
assert isinstance(T, float) or isinstance(T, int), "Temperature must be a number."
assert TEMP_LOW <= T <= TEMP_HIGH, "Temperature specified is outsid
|
kanethemediocre/1strand
|
1strandbushinga002.py
|
Python
|
gpl-2.0
| 6,978
| 0.019633
|
#1strand Bushing Tool
#Standalone program for minimized cruft
import math
print "This program is for printing the best possible circular bushings"
print "Printer config values are hardcoded for ease of use (for me)"
xpath = [] #These are initialized and default values
ypath = []
zpath = []
step = []
epath = []
xstart = 10.0
ystart = 10.0
zstart = 0.5
height = 0.0
LayerHeight = 0.3
ExtrusionWidth = 0.6
FilamentDiameter=3
FilamentArea = FilamentDiameter * FilamentDiameter * 3.14159 / 4.0
GooCoefficient = LayerHeight * ExtrusionWidth / FilamentArea
configlist = [LayerHeight, ExtrusionWidth, FilamentDiameter, GooCoefficient]
BrimDiameter = 0.0
OuterDiameter = 0.0
InnerDiameter = 0.0
N = 1
ActualExtrusionWidth = ExtrusionWidth
print "Current values are:"
print "LayerHeight =", configlist[0] #This assignment is super important
print "ExtrusionWidth=", configlist[1] #and needs to be consistent with
print "FilamentDiameter=", configlist[2] #with other code blocks related
print "GooCoefficient=", configlist[3] #to these options.
BrimDiameter = float(raw_input("Enter brim diameter in mm:"))
OuterDiameter = float(raw_input("Enter Outer Diameter in mm:"))
InnerDiameter = float(raw_input("Enter Inner Diameter in mm:"))
N = int(raw_input("Enter number of line segments in your alleged circles"))
anglestep = 2 * math.pi / N
print "Angular step is ", anglestep, " radians."
height = float(raw_input("Enter Height"))
centerx = (BrimDiameter / 2.0)+5 #Center is chosen so brim is 5mm from edge
centery = (BrimDiameter / 2.0)+5 #Center is chosen so brim is 5mm from edge
thickness = (OuterDiameter-InnerDiameter)/2
perimeters = thickness/ExtrusionWidth
print "Thickness = ", thickness
print "Needed perimeters = ", perimeters
perimeters = int(perimeters)
ActualExtrusionWidth = thickness/perimeters
print "Revised perimeters = ", perimeters
print "Revised extrusion width = ", ActualExtrusionWidth
BrimThickness = (BrimDiameter-InnerDiameter)/2
BrimPerimeters = int(BrimThickness/ActualExtr
|
usionWidth)
print "Brim Thickness = ", BrimThickness
print "Brim Perimeters = ", BrimPerimeters
#Brim layer is first, and treated separately.
j=0
i=0
radius = BrimDiameter/2 - (j+0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(LayerHeight)
while (j<BrimPerimeter
|
s):
radius = BrimDiameter/2 - (j+0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<N):
i=i+1
#print "i=", i, "j=", j, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
zpath.append(LayerHeight)
#
#
#
#Now the actual bushing begins printing.
#
#
#
CurrentLayer=1
CurrentHeight=LayerHeight*CurrentLayer #Technically should be earlier but wutev
#
#
#
#Now the actual bushing begins printing.
#
#
#
#k=0
##Even layers (1st bushing layer is 2) are inside to outside
##odd layers are outside to inside, to maintain strand continuity
#j=0
#i=0
#radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
#xpath.append(centerx+radius)
#ypath.append(centery)
#zpath.append(CurrentHeight)
#while (j<=perimeters):
# radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
# j=j+1
# i=0
# while (i<N):
# i=i+1
# #print "i=", i, "j=", j, "radius=", radius
# xpath.append(centerx+radius*math.cos(i*anglestep))
# ypath.append(centery+radius*math.sin(i*anglestep))
# zpath.append(CurrentHeight)
##odd layers are outside to inside, to maintain strand continuity
#CurrentLayer=3
#CurrentHeight=LayerHeight*CurrentLayer
#j=0
#i=0
#radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
#xpath.append(centerx+radius)
#ypath.append(centery)
#zpath.append(CurrentHeight)
#while (j<perimeters):
# radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
# j=j+1
# i=0
# while (i<N):
# i=i+1
# #print "i=", i, "j=", j, "radius=", radius
# xpath.append(centerx+radius*math.cos(i*anglestep))
# ypath.append(centery+radius*math.sin(i*anglestep))
# zpath.append(CurrentHeight)
while (CurrentLayer*LayerHeight < height):
CurrentLayer=CurrentLayer+1
CurrentHeight=LayerHeight*CurrentLayer
#Even layers (1st bushing layer is 2) are inside to outside
#odd layers are outside to inside, to maintain strand continuity
j=1
i=0
radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(CurrentHeight-LayerHeight*0.75)
while (j<=perimeters):
radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<(N-1)): #kludge
i=i+1
#print "i=", i, "j=", j, "layer=", CurrentLayer, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
if (i==1 and j==1):
zpath.append(CurrentHeight-LayerHeight*.25)
else:
zpath.append(CurrentHeight)
#odd layers are outside to inside, to maintain strand continuity
CurrentLayer=CurrentLayer+1
CurrentHeight=LayerHeight*CurrentLayer
j=0
i=0
radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(CurrentHeight-LayerHeight*.75)
while (j<perimeters):
radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<(N-1)): #Same kludge as the even layers.
i=i+1
#print "i=", i, "j=", j, "layer=", CurrentLayer, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
if (i==1 and j==1):
zpath.append(CurrentHeight-LayerHeight*.25)
else:
zpath.append(CurrentHeight)
#Extrusion is only handled here temporarily for testing
for x in xrange(len(xpath)): # This initializes the arrays so I can
step.append(0.0) #avoid that append() bullshit where I dont
epath.append(0.0) #know where I'm writing.
for x in xrange(2, len(xpath)): # This calculates how much extruder movement per step
distance=((xpath[x]-xpath[x-1])**2+(ypath[x]-ypath[x-1])**2)**0.5
step[x]=distance*GooCoefficient
epath[x]=epath[x-1]+step[x]
#for x in range(len(xpath)): #Human readable raw output
# print xpath[x-1], ypath[x-1], zpath[x-1], step[x-1], epath[x-1]
goutput = open("output1.gcode", "wb") #Now save to output1.gcode
goutput.write("G28 \nG21 \nG90 \nG92 E0 \nM82")
x=0
for x in range(len(xpath)):
goutput.write("G1 X" );
goutput.write( str(xpath[x]) );
goutput.write( " Y" );
goutput.write( str(ypath[x]) );
goutput.write( " Z" );
goutput.write( str(zpath[x]) );
goutput.write( " E" );
goutput.write( str(epath[x]) );
goutput.write( " F2000 \n" );
goutput.close()
|
frink182/pi_temps
|
mqtt_listener.py
|
Python
|
gpl-2.0
| 882
| 0.003401
|
#!/usr/bin/env python
import paho.mqtt.client as mqtt
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("presence/+")
# The callback for when a PUBLISH messa
|
ge is received from the server.
def on_message(client, userdata, msg):
print(msg.topic+" "+str(msg.payload))
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("slug", 1883, 60)
# Blocking call that processes network traffic, dispatches
|
callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever()
|
DakRomo/2017Challenges
|
challenge_4/python/ning/challenge_4.py
|
Python
|
mit
| 790
| 0
|
class BTree:
def __init__(self, b_tree_list=list()):
self.b_tree_list = b_tree_list
self.levels = len(b_tree_list)
def visualise(self):
for index, level in enumerate(self.b_tree_list):
spacing = 2 ** (self.levels - index) - 1
print(((spacing-1)//2)*' ', end='')
for node in l
|
evel:
if node is None:
print(' ', end='')
else:
print(node, end='')
print(spacing * ' ', end='')
print('') # newline
def invert(self):
for level in self.b_tree_list:
|
level.reverse()
example_tree = BTree([
[4],
[2, 7],
[1, 3, 6, 9]])
example_tree.visualise()
example_tree.invert()
example_tree.visualise()
|
dhuang/incubator-airflow
|
airflow/providers/amazon/aws/hooks/glue.py
|
Python
|
apache-2.0
| 7,995
| 0.002126
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
from typing import Dict, List, Optional
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
class
|
AwsGlueJobHook(AwsBaseHook):
"""
Interact with AWS Glue - create job, trigger, crawler
:param s3_bucket: S3 bucket where logs and local etl script will be uploaded
:type s3_bucket: Optional[str]
:param job_name: unique job name per AWS account
:type job_name: Optional[str]
:param desc: job description
:ty
|
pe desc: Optional[str]
:param concurrent_run_limit: The maximum number of concurrent runs allowed for a job
:type concurrent_run_limit: int
:param script_location: path to etl script on s3
:type script_location: Optional[str]
:param retry_limit: Maximum number of times to retry this job if it fails
:type retry_limit: int
:param num_of_dpus: Number of AWS Glue DPUs to allocate to this Job
:type num_of_dpus: int
:param region_name: aws region name (example: us-east-1)
:type region_name: Optional[str]
:param iam_role_name: AWS IAM Role for Glue Job Execution
:type iam_role_name: Optional[str]
:param create_job_kwargs: Extra arguments for Glue Job Creation
:type create_job_kwargs: Optional[dict]
"""
JOB_POLL_INTERVAL = 6 # polls job status after every JOB_POLL_INTERVAL seconds
def __init__(
self,
s3_bucket: Optional[str] = None,
job_name: Optional[str] = None,
desc: Optional[str] = None,
concurrent_run_limit: int = 1,
script_location: Optional[str] = None,
retry_limit: int = 0,
num_of_dpus: int = 10,
iam_role_name: Optional[str] = None,
create_job_kwargs: Optional[dict] = None,
*args,
**kwargs,
):
self.job_name = job_name
self.desc = desc
self.concurrent_run_limit = concurrent_run_limit
self.script_location = script_location
self.retry_limit = retry_limit
self.num_of_dpus = num_of_dpus
self.s3_bucket = s3_bucket
self.role_name = iam_role_name
self.s3_glue_logs = 'logs/glue-logs/'
self.create_job_kwargs = create_job_kwargs or {}
kwargs['client_type'] = 'glue'
super().__init__(*args, **kwargs)
def list_jobs(self) -> List:
""":return: Lists of Jobs"""
conn = self.get_conn()
return conn.get_jobs()
def get_iam_execution_role(self) -> Dict:
""":return: iam role for job execution"""
iam_client = self.get_client_type('iam', self.region_name)
try:
glue_execution_role = iam_client.get_role(RoleName=self.role_name)
self.log.info("Iam Role Name: %s", self.role_name)
return glue_execution_role
except Exception as general_error:
self.log.error("Failed to create aws glue job, error: %s", general_error)
raise
def initialize_job(self, script_arguments: Optional[dict] = None) -> Dict[str, str]:
"""
Initializes connection with AWS Glue
to run job
:return:
"""
glue_client = self.get_conn()
script_arguments = script_arguments or {}
try:
job_name = self.get_or_create_glue_job()
job_run = glue_client.start_job_run(JobName=job_name, Arguments=script_arguments)
return job_run
except Exception as general_error:
self.log.error("Failed to run aws glue job, error: %s", general_error)
raise
def get_job_state(self, job_name: str, run_id: str) -> str:
"""
Get state of the Glue job. The job state can be
running, finished, failed, stopped or timeout.
:param job_name: unique job name per AWS account
:type job_name: str
:param run_id: The job-run ID of the predecessor job run
:type run_id: str
:return: State of the Glue job
"""
glue_client = self.get_conn()
job_run = glue_client.get_job_run(JobName=job_name, RunId=run_id, PredecessorsIncluded=True)
job_run_state = job_run['JobRun']['JobRunState']
return job_run_state
def job_completion(self, job_name: str, run_id: str) -> Dict[str, str]:
"""
Waits until Glue job with job_name completes or
fails and return final state if finished.
Raises AirflowException when the job failed
:param job_name: unique job name per AWS account
:type job_name: str
:param run_id: The job-run ID of the predecessor job run
:type run_id: str
:return: Dict of JobRunState and JobRunId
"""
failed_states = ['FAILED', 'TIMEOUT']
finished_states = ['SUCCEEDED', 'STOPPED']
while True:
job_run_state = self.get_job_state(job_name, run_id)
if job_run_state in finished_states:
self.log.info("Exiting Job %s Run State: %s", run_id, job_run_state)
return {'JobRunState': job_run_state, 'JobRunId': run_id}
if job_run_state in failed_states:
job_error_message = "Exiting Job " + run_id + " Run State: " + job_run_state
self.log.info(job_error_message)
raise AirflowException(job_error_message)
else:
self.log.info(
"Polling for AWS Glue Job %s current run state with status %s", job_name, job_run_state
)
time.sleep(self.JOB_POLL_INTERVAL)
def get_or_create_glue_job(self) -> str:
"""
Creates(or just returns) and returns the Job name
:return:Name of the Job
"""
glue_client = self.get_conn()
try:
get_job_response = glue_client.get_job(JobName=self.job_name)
self.log.info("Job Already exist. Returning Name of the job")
return get_job_response['Job']['Name']
except glue_client.exceptions.EntityNotFoundException:
self.log.info("Job doesn't exist. Now creating and running AWS Glue Job")
if self.s3_bucket is None:
raise AirflowException('Could not initialize glue job, error: Specify Parameter `s3_bucket`')
s3_log_path = f's3://{self.s3_bucket}/{self.s3_glue_logs}{self.job_name}'
execution_role = self.get_iam_execution_role()
try:
create_job_response = glue_client.create_job(
Name=self.job_name,
Description=self.desc,
LogUri=s3_log_path,
Role=execution_role['Role']['Arn'],
ExecutionProperty={"MaxConcurrentRuns": self.concurrent_run_limit},
Command={"Name": "glueetl", "ScriptLocation": self.script_location},
MaxRetries=self.retry_limit,
AllocatedCapacity=self.num_of_dpus,
**self.create_job_kwargs,
)
return create_job_response['Name']
except Exception as general_error:
self.log.error("Failed to create aws glue job, error: %s", general_error)
raise
|
lorensen/VTKExamples
|
src/Python/DataManipulation/LineOnMesh.py
|
Python
|
apache-2.0
| 5,546
| 0
|
#!/usr/bin/env python
import numpy as np
import vtk
def main():
named_colors = vtk.vtkNamedColors()
# Make a 32 x 32 grid.
size = 32
# Define z values for the topography.
# Comment out the following line if you want a different random
# distribution each time the script is run.
np.random.seed(3)
topography = np.random.randint(0, 5, (size, size))
# Define points, triangles and colors
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(3)
points = vtk.vtkPoints()
triangles = vtk.vtkCellArray()
# Build the meshgrid manually.
count = 0
for i in range(size - 1):
for j in range(size - 1):
z1 = topography[i][j]
z2 = topography[i][j + 1]
z3 = topography[i + 1][j]
# Triangle 1
points.InsertNextPoint(i, j, z1)
points.InsertNextPoint(i, (j + 1), z2)
points.InsertNextPoint((i + 1), j, z3)
triangle = vtk.vtkTriangle()
triangle.GetPointIds().SetId(0, count)
triangle.GetPointIds().SetId(1, count + 1)
triangle.GetPointIds().SetId(2, count + 2)
triangles.InsertNextCell(triangle)
z1 = topography[i][j + 1]
z2 = topography[i + 1][j + 1]
z3 = topography[i + 1][j]
# Triangle 2
points.InsertNextPoint(i, (j + 1), z1)
points.InsertNextPoint((i + 1), (j + 1), z2)
points.InsertNextPoint((i + 1), j, z3)
triangle = vtk.vtkTriangle()
triangle.GetPointIds().SetId(0, count + 3)
triangle.GetPointIds().SetId(1, count + 4)
triangle.GetPointIds().SetId(2, count + 5)
count += 6
triangles.InsertNextCell(triangle)
# Add some color.
r = [int(i / float(size) * 255), int(j / float(size) * 255), 0]
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
#
|
Create a polydata object.
trianglePolyData = vtk.vtkPolyData()
# Add the geometry and topology to the polydata.
trianglePolyData.SetPoints(points)
trianglePolyData.GetPointData().SetScalars(colors)
trianglePolyData.SetPolys(triangles)
# Clean the polydata so that the edges are shared!
cleanPolyData = vtk.vtkCleanPolyData()
cleanPolyData.SetInputData(trianglePolyData)
# Use a filter
|
to smooth the data (will add triangles and smooth).
smooth_loop = vtk.vtkLoopSubdivisionFilter()
smooth_loop.SetNumberOfSubdivisions(3)
smooth_loop.SetInputConnection(cleanPolyData.GetOutputPort())
# Create a mapper and actor for smoothed dataset.
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(smooth_loop.GetOutputPort())
actor_loop = vtk.vtkActor()
actor_loop.SetMapper(mapper)
actor_loop.GetProperty().SetInterpolationToFlat()
# Update the pipeline so that vtkCellLocator finds cells!
smooth_loop.Update()
# Define a cellLocator to be able to compute intersections between lines.
# and the surface
locator = vtk.vtkCellLocator()
locator.SetDataSet(smooth_loop.GetOutput())
locator.BuildLocator()
maxloop = 1000
dist = 20.0 / maxloop
tolerance = 0.001
# Make a list of points. Each point is the intersection of a vertical line
# defined by p1 and p2 and the surface.
points = vtk.vtkPoints()
for i in range(maxloop):
p1 = [2 + i * dist, 16, -1]
p2 = [2 + i * dist, 16, 6]
# Outputs (we need only pos which is the x, y, z position
# of the intersection)
t = vtk.mutable(0)
pos = [0.0, 0.0, 0.0]
pcoords = [0.0, 0.0, 0.0]
subId = vtk.mutable(0)
locator.IntersectWithLine(p1, p2, tolerance, t, pos, pcoords, subId)
# Add a slight offset in z.
pos[2] += 0.01
# Add the x, y, z position of the intersection.
points.InsertNextPoint(pos)
# Create a spline and add the points
spline = vtk.vtkParametricSpline()
spline.SetPoints(points)
functionSource = vtk.vtkParametricFunctionSource()
functionSource.SetUResolution(maxloop)
functionSource.SetParametricFunction(spline)
# Map the spline
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(functionSource.GetOutputPort())
# Define the line actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(named_colors.GetColor3d("Red"))
actor.GetProperty().SetLineWidth(3)
# Visualize
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Add actors and render
renderer.AddActor(actor)
renderer.AddActor(actor_loop)
renderer.SetBackground(named_colors.GetColor3d("Cornsilk"))
renderWindow.SetSize(800, 800)
renderWindow.Render()
renderer.GetActiveCamera().SetPosition(-32.471276, 53.258788, 61.209332)
renderer.GetActiveCamera().SetFocalPoint(15.500000, 15.500000, 2.000000)
renderer.GetActiveCamera().SetViewUp(0.348057, -0.636740, 0.688055)
renderer.ResetCameraClippingRange()
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
|
jkadlec/knot-dns-zoneapi
|
tests-extra/tests/dnssec/case_sensitivity/test.py
|
Python
|
gpl-3.0
| 1,244
| 0.001608
|
#!/usr/bin/env python3
'''Test for no resigning if the zone is properly signed.'''
from dnstest.utils import set_err
from dnstest.test import Test
import subprocess
def patch_zone(t, server, zone, script):
"""
Update zone file on a master server.
"""
zone = zone[0]
zonefile = "%s/master/%s" % (server.dir, zone.file_name)
modify_script = "%s/modify.sh" % t.data_dir
patch_script = "%s/%s" % (t.data_dir, script)
subprocess.check_call([modify_script, zonefile, patch_script])
t = Test()
server = t.server("knot")
zone = t.zone("example.", storage=".")
server.dnssec_enable = True
server.gen_key(zone, ksk=True)
server.gen_key(zone)
t.link(zone, server)
t.start()
serial = server.zone_wait(zone)
scripts = [
("insensitive RRs", "modify-insensitive.awk", False),
("NSEC RR", "modify-nsec.awk", True),
("LP RR", "modify-lp.awk", True),
]
for name, script, resign in scripts:
t.sleep(1)
server.flush()
server.stop()
|
patch_zone(t, server, zone, script)
server.start()
new_serial = server.zone_wait(zone)
signed = new_serial != serial
if signed != resign:
set_err("Invalid state after %s change" % name)
break
serial = new_
|
serial
t.stop()
|
wesley1001/WeVoteServer
|
config/wsgi.py
|
Python
|
bsd-3-clause
| 1,622
| 0
|
"""
WSGI config for WebAppPublic project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Use Whitenoise to serve static files
# See: https://whitenoise.readthedocs.org/
ap
|
plication = DjangoWhiteNoise(application)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorld
|
Application
# application = HelloWorldApplication(application)
|
yaoxuanw007/forfun
|
leetcode/python/countAndSay.py
|
Python
|
mit
| 720
| 0.0125
|
# https://oj.leetcode.com/problems/count-and-say/
# 10:56 - 11:11
class Solution:
# @return a string
def countAndSay(self, n):
result = "1"
# n == 1, result == '1', not when n == 0
for i in xrange(1, n):
last, count, nextResult = result[0], 1, ""
for j in xrange(1, len(result)):
curr = result[j]
if last != curr:
nextResult += str(count)
|
+ str(last)
count = 0
count += 1
last = curr
nextResult += str(count) + str(last)
result = nextResult
return result
|
s = Solution()
print s.countAndSay(1), '1'
print s.countAndSay(2), '11'
print s.countAndSay(3), '21'
print s.countAndSay(4), '1211'
print s.countAndSay(5), '111221'
|
spatial-computing/geotweets
|
data/tool/test/jsongener.py
|
Python
|
mit
| 2,441
| 0.003277
|
# Ramdomly generates data
import json, random, copy
data = {
'tweets': {},
'events': {},
'tweetsHeat': [],
'eventsHeat': []
}
tweetGeo = {
"type": "FeatureCollection",
"features": [],
"id": "tweetsyoulike.c22ab257"
}
tfeature = {
"geometry": {
"type": "Point",
"coordinates": [120.856705, 14.414455]
},
"type": "Feature",
"id": "55cd1bc45882980ff072054c",
"properties": {
"name": "jayzee guevarra",
"time": "Thu Aug 13 22:35:49 +0000 2015",
"importance": 0.2995732273553991,
"text": "Sweat is body fat crying right??? (@ Boulevard Fitness) https://t.co/rbRHRxzqjG",
"media_url": [],
"id": "55cd1bc05882980ff072054b",
"location": "Haiti Cherie"
}
}
eventGeo = {
"type": "FeatureCollection",
"features": [],
"id": "tweetsyoulike.c22ab257"
}
efeature = {
"geometry": {
"type": "Point",
"coordinates": [120.856705, 14.414455]
},
"type": "Feature",
"id": "55cd1bc45882980ff072054c",
"properties": {
"name": "jayzee guevarra",
"time": "Thu Aug 13 22:35:49 +0000 2015",
"text": "Sweat is body fat crying right??? (@ Boulevard Fitness) https://t.co/rbRHRxzqjG",
"media_url": [],
"id": "55cd1bc05882980ff072054b",
"location": "Haiti Cherie"
}
}
for i in range(0, 100):
tfea = copy.deepcopy(tfeature)
tfea['properties']['importance'] = random.random()
coordi = []
coordi.append(tfeature['geometry']['coordinates'][1] + (random.random() - 0.5) * 10)
coordi.append(tfeature['geometry']['coordinates'][0] + (random.random() - 0.5) * 10)
tfea['geometry']['coordinates'][0] = coordi[1]
tfea['geometry']['coordinates'][1] = coordi[0]
tweetGe
|
o['features'].append(tfea)
coordi.append(tfea['properties']['importance'])
data['tweetsHeat'].append(coordi)
efea = copy.deepcopy(efeature)
coordi = []
|
coordi.append(efeature['geometry']['coordinates'][1] + (random.random() - 0.5) * 10)
coordi.append(efeature['geometry']['coordinates'][0] + (random.random() - 0.5) * 10)
efea['geometry']['coordinates'][0] = coordi[1]
efea['geometry']['coordinates'][1] = coordi[0]
eventGeo['features'].append(efea)
coordi.append(1)
data['eventsHeat'].append(coordi)
data['tweets'] = tweetGeo
data['events'] = eventGeo
f = open("geo.json", "w")
json.dump(data, f)
f.close()
|
bndl/bndl
|
bndl/__init__.py
|
Python
|
apache-2.0
| 1,142
| 0.000876
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging.config
import os.path
from bndl.util.conf import Config, String
from bndl.util.log import instal
|
l_trace_logging
from bndl.util.objects import LazyObject
# Expose a global BNDL configuration
conf = LazyObject(Config)
# Configure Logging
logging_conf = String('logging.conf')
install_trace_logging()
logging.captureWarnings(True)
if os.path.exists(conf['bndl.logging_conf']):
logging.config.fileConfig(conf['bndl.logging_conf'], disable_existing_loggers=False)
# BNDL version info
__version_info__ = (0, 7, 0, 'dev2')
__version__ = '.'.join(map(str, __version_info__))
|
YoApp/yo-water-tracker
|
db.py
|
Python
|
mit
| 154
| 0.006494
|
# -*- c
|
oding: utf-8 -*-
import pymongo
from config import MONGO_STRING
client = pymongo.MongoClient(MONGO_STRING, tz_aware=True)
db = cl
|
ient['yo-water']
|
photoninger/ansible
|
test/units/modules/network/onyx/test_onyx_lldp.py
|
Python
|
gpl-3.0
| 2,404
| 0
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansibl
|
e.modules.network.onyx import onyx_lldp
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxInterfaceModule(TestOnyxModule):
module = onyx_lldp
def setUp(self):
|
super(TestOnyxInterfaceModule, self).setUp()
self.mock_get_config = patch.object(
onyx_lldp.OnyxLldpModule, "_get_lldp_config")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxInterfaceModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
if commands == ['lldp']:
self.get_config.return_value = None
else:
config_file = 'onyx_lldp_show.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_lldp_no_change(self):
set_module_args(dict())
self.execute_module(changed=False)
def test_lldp_disable(self):
set_module_args(dict(state='absent'))
commands = ['no lldp']
self.execute_module(changed=True, commands=commands)
def test_lldp_enable(self):
set_module_args(dict(state='present'))
commands = ['lldp']
self.execute_module(changed=True, commands=commands)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.