text stringlengths 4 1.02M | meta dict |
|---|---|
"""The volumes extension."""
from oslo_utils import strutils
from webob import exc
from nova.api.openstack import common
from nova.api.openstack.compute.schemas.v3 import volumes as volumes_schema
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova import compute
from nova import exception
from nova.i18n import _
from nova import objects
from nova import volume
ALIAS = "os-volumes"
authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS)
authorize_attach = extensions.extension_authorizer('compute',
'v3:os-volumes-attachments')
def _translate_volume_detail_view(context, vol):
"""Maps keys for volumes details view."""
d = _translate_volume_summary_view(context, vol)
# No additional data / lookups at the moment
return d
def _translate_volume_summary_view(context, vol):
"""Maps keys for volumes summary view."""
d = {}
d['id'] = vol['id']
d['status'] = vol['status']
d['size'] = vol['size']
d['availabilityZone'] = vol['availability_zone']
d['createdAt'] = vol['created_at']
if vol['attach_status'] == 'attached':
d['attachments'] = [_translate_attachment_detail_view(vol['id'],
vol['instance_uuid'],
vol['mountpoint'])]
else:
d['attachments'] = [{}]
d['displayName'] = vol['display_name']
d['displayDescription'] = vol['display_description']
if vol['volume_type_id'] and vol.get('volume_type'):
d['volumeType'] = vol['volume_type']['name']
else:
d['volumeType'] = vol['volume_type_id']
d['snapshotId'] = vol['snapshot_id']
if vol.get('volume_metadata'):
d['metadata'] = vol.get('volume_metadata')
else:
d['metadata'] = {}
return d
class VolumeController(wsgi.Controller):
"""The Volumes API controller for the OpenStack API."""
def __init__(self):
self.volume_api = volume.API()
super(VolumeController, self).__init__()
@extensions.expected_errors(404)
def show(self, req, id):
"""Return data about the given volume."""
context = req.environ['nova.context']
authorize(context)
try:
vol = self.volume_api.get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
return {'volume': _translate_volume_detail_view(context, vol)}
@wsgi.response(202)
@extensions.expected_errors(404)
def delete(self, req, id):
"""Delete a volume."""
context = req.environ['nova.context']
authorize(context)
try:
self.volume_api.delete(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
@extensions.expected_errors(())
def index(self, req):
"""Returns a summary list of volumes."""
return self._items(req, entity_maker=_translate_volume_summary_view)
@extensions.expected_errors(())
def detail(self, req):
"""Returns a detailed list of volumes."""
return self._items(req, entity_maker=_translate_volume_detail_view)
def _items(self, req, entity_maker):
"""Returns a list of volumes, transformed through entity_maker."""
context = req.environ['nova.context']
authorize(context)
volumes = self.volume_api.get_all(context)
limited_list = common.limited(volumes, req)
res = [entity_maker(context, vol) for vol in limited_list]
return {'volumes': res}
@extensions.expected_errors(400)
@validation.schema(volumes_schema.create)
def create(self, req, body):
"""Creates a new volume."""
context = req.environ['nova.context']
authorize(context)
vol = body['volume']
vol_type = vol.get('volume_type')
metadata = vol.get('metadata')
snapshot_id = vol.get('snapshot_id', None)
if snapshot_id is not None:
snapshot = self.volume_api.get_snapshot(context, snapshot_id)
else:
snapshot = None
size = vol.get('size', None)
if size is None and snapshot is not None:
size = snapshot['volume_size']
availability_zone = vol.get('availability_zone')
try:
new_volume = self.volume_api.create(
context,
size,
vol.get('display_name'),
vol.get('display_description'),
snapshot=snapshot,
volume_type=vol_type,
metadata=metadata,
availability_zone=availability_zone
)
except exception.InvalidInput as err:
raise exc.HTTPBadRequest(explanation=err.format_message())
# TODO(vish): Instance should be None at db layer instead of
# trying to lazy load, but for now we turn it into
# a dict to avoid an error.
retval = _translate_volume_detail_view(context, dict(new_volume))
result = {'volume': retval}
location = '%s/%s' % (req.url, new_volume['id'])
return wsgi.ResponseObject(result, headers=dict(location=location))
def _translate_attachment_detail_view(volume_id, instance_uuid, mountpoint):
"""Maps keys for attachment details view."""
d = _translate_attachment_summary_view(volume_id,
instance_uuid,
mountpoint)
# No additional data / lookups at the moment
return d
def _translate_attachment_summary_view(volume_id, instance_uuid, mountpoint):
"""Maps keys for attachment summary view."""
d = {}
# NOTE(justinsb): We use the volume id as the id of the attachment object
d['id'] = volume_id
d['volumeId'] = volume_id
d['serverId'] = instance_uuid
if mountpoint:
d['device'] = mountpoint
return d
class VolumeAttachmentController(wsgi.Controller):
"""The volume attachment API controller for the OpenStack API.
A child resource of the server. Note that we use the volume id
as the ID of the attachment (though this is not guaranteed externally)
"""
def __init__(self):
self.compute_api = compute.API()
self.volume_api = volume.API()
super(VolumeAttachmentController, self).__init__()
@extensions.expected_errors(404)
def index(self, req, server_id):
"""Returns the list of volume attachments for a given instance."""
context = req.environ['nova.context']
authorize_attach(context, action='index')
return self._items(req, server_id,
entity_maker=_translate_attachment_summary_view)
@extensions.expected_errors(404)
def show(self, req, server_id, id):
"""Return data about the given volume attachment."""
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='show')
volume_id = id
instance = common.get_instance(self.compute_api, context, server_id,
want_objects=True)
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
if not bdms:
msg = _("Instance %s is not attached.") % server_id
raise exc.HTTPNotFound(explanation=msg)
assigned_mountpoint = None
for bdm in bdms:
if bdm.volume_id == volume_id:
assigned_mountpoint = bdm.device_name
break
if assigned_mountpoint is None:
msg = _("volume_id not found: %s") % volume_id
raise exc.HTTPNotFound(explanation=msg)
return {'volumeAttachment': _translate_attachment_detail_view(
volume_id,
instance.uuid,
assigned_mountpoint)}
@extensions.expected_errors((400, 404, 409))
@validation.schema(volumes_schema.create_volume_attachment)
def create(self, req, server_id, body):
"""Attach a volume to an instance."""
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='create')
volume_id = body['volumeAttachment']['volumeId']
device = body['volumeAttachment'].get('device')
instance = common.get_instance(self.compute_api, context, server_id,
want_objects=True)
try:
device = self.compute_api.attach_volume(context, instance,
volume_id, device)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'attach_volume', server_id)
except (exception.InvalidVolume,
exception.InvalidDevicePath) as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
# The attach is async
attachment = {}
attachment['id'] = volume_id
attachment['serverId'] = server_id
attachment['volumeId'] = volume_id
attachment['device'] = device
# NOTE(justinsb): And now, we have a problem...
# The attach is async, so there's a window in which we don't see
# the attachment (until the attachment completes). We could also
# get problems with concurrent requests. I think we need an
# attachment state, and to write to the DB here, but that's a bigger
# change.
# For now, we'll probably have to rely on libraries being smart
# TODO(justinsb): How do I return "accepted" here?
return {'volumeAttachment': attachment}
@wsgi.response(202)
@extensions.expected_errors((400, 404, 409))
@validation.schema(volumes_schema.update_volume_attachment)
def update(self, req, server_id, id, body):
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='update')
old_volume_id = id
try:
old_volume = self.volume_api.get(context, old_volume_id)
new_volume_id = body['volumeAttachment']['volumeId']
new_volume = self.volume_api.get(context, new_volume_id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
instance = common.get_instance(self.compute_api, context, server_id,
want_objects=True)
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
found = False
try:
for bdm in bdms:
if bdm.volume_id != old_volume_id:
continue
try:
self.compute_api.swap_volume(context, instance, old_volume,
new_volume)
found = True
break
except exception.VolumeUnattached:
# The volume is not attached. Treat it as NotFound
# by falling through.
pass
except exception.InvalidVolume as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'swap_volume', server_id)
if not found:
msg = _("The volume was either invalid or not attached to the "
"instance.")
raise exc.HTTPNotFound(explanation=msg)
@wsgi.response(202)
@extensions.expected_errors((400, 403, 404, 409))
def delete(self, req, server_id, id):
"""Detach a volume from an instance."""
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='delete')
volume_id = id
instance = common.get_instance(self.compute_api, context, server_id,
want_objects=True)
try:
volume = self.volume_api.get(context, volume_id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
if not bdms:
msg = _("Instance %s is not attached.") % server_id
raise exc.HTTPNotFound(explanation=msg)
found = False
try:
for bdm in bdms:
if bdm.volume_id != volume_id:
continue
if bdm.is_root:
msg = _("Can't detach root device volume")
raise exc.HTTPForbidden(explanation=msg)
try:
self.compute_api.detach_volume(context, instance, volume)
found = True
break
except exception.VolumeUnattached:
# The volume is not attached. Treat it as NotFound
# by falling through.
pass
except exception.InvalidVolume as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'detach_volume', server_id)
if not found:
msg = _("volume_id not found: %s") % volume_id
raise exc.HTTPNotFound(explanation=msg)
def _items(self, req, server_id, entity_maker):
"""Returns a list of attachments, transformed through entity_maker."""
context = req.environ['nova.context']
authorize(context)
instance = common.get_instance(self.compute_api, context, server_id,
want_objects=True)
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
limited_list = common.limited(bdms, req)
results = []
for bdm in limited_list:
if bdm.volume_id:
results.append(entity_maker(bdm.volume_id,
bdm.instance_uuid,
bdm.device_name))
return {'volumeAttachments': results}
def _translate_snapshot_detail_view(context, vol):
"""Maps keys for snapshots details view."""
d = _translate_snapshot_summary_view(context, vol)
# NOTE(gagupta): No additional data / lookups at the moment
return d
def _translate_snapshot_summary_view(context, vol):
"""Maps keys for snapshots summary view."""
d = {}
d['id'] = vol['id']
d['volumeId'] = vol['volume_id']
d['status'] = vol['status']
# NOTE(gagupta): We map volume_size as the snapshot size
d['size'] = vol['volume_size']
d['createdAt'] = vol['created_at']
d['displayName'] = vol['display_name']
d['displayDescription'] = vol['display_description']
return d
class SnapshotController(wsgi.Controller):
"""The Snapshots API controller for the OpenStack API."""
def __init__(self):
self.volume_api = volume.API()
super(SnapshotController, self).__init__()
@extensions.expected_errors(404)
def show(self, req, id):
"""Return data about the given snapshot."""
context = req.environ['nova.context']
authorize(context)
try:
vol = self.volume_api.get_snapshot(context, id)
except exception.SnapshotNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
return {'snapshot': _translate_snapshot_detail_view(context, vol)}
@wsgi.response(202)
@extensions.expected_errors(404)
def delete(self, req, id):
"""Delete a snapshot."""
context = req.environ['nova.context']
authorize(context)
try:
self.volume_api.delete_snapshot(context, id)
except exception.SnapshotNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
@extensions.expected_errors(())
def index(self, req):
"""Returns a summary list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_summary_view)
@extensions.expected_errors(())
def detail(self, req):
"""Returns a detailed list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_detail_view)
def _items(self, req, entity_maker):
"""Returns a list of snapshots, transformed through entity_maker."""
context = req.environ['nova.context']
authorize(context)
snapshots = self.volume_api.get_all_snapshots(context)
limited_list = common.limited(snapshots, req)
res = [entity_maker(context, snapshot) for snapshot in limited_list]
return {'snapshots': res}
@extensions.expected_errors(400)
@validation.schema(volumes_schema.snapshot_create)
def create(self, req, body):
"""Creates a new snapshot."""
context = req.environ['nova.context']
authorize(context)
snapshot = body['snapshot']
volume_id = snapshot['volume_id']
force = snapshot.get('force', False)
force = strutils.bool_from_string(force, strict=True)
if force:
create_func = self.volume_api.create_snapshot_force
else:
create_func = self.volume_api.create_snapshot
new_snapshot = create_func(context, volume_id,
snapshot.get('display_name'),
snapshot.get('display_description'))
retval = _translate_snapshot_detail_view(context, new_snapshot)
return {'snapshot': retval}
class Volumes(extensions.V3APIExtensionBase):
"""Volumes support."""
name = "Volumes"
alias = ALIAS
version = 1
def get_resources(self):
resources = []
res = extensions.ResourceExtension(
ALIAS, VolumeController(), collection_actions={'detail': 'GET'})
resources.append(res)
res = extensions.ResourceExtension('os-volumes_boot',
inherits='servers')
resources.append(res)
res = extensions.ResourceExtension('os-volume_attachments',
VolumeAttachmentController(),
parent=dict(
member_name='server',
collection_name='servers'))
resources.append(res)
res = extensions.ResourceExtension(
'os-snapshots', SnapshotController(),
collection_actions={'detail': 'GET'})
resources.append(res)
return resources
def get_controller_extensions(self):
return []
| {
"content_hash": "f5c795208149d9e696779895d790e377",
"timestamp": "",
"source": "github",
"line_count": 554,
"max_line_length": 79,
"avg_line_length": 35.33754512635379,
"alnum_prop": 0.5916636869796189,
"repo_name": "cloudbase/nova-virtualbox",
"id": "5cd88a2c4f74760a5e7991dd3a1863a33a180be7",
"size": "20213",
"binary": false,
"copies": "1",
"ref": "refs/heads/virtualbox_driver",
"path": "nova/api/openstack/compute/plugins/v3/volumes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16016453"
},
{
"name": "Shell",
"bytes": "20716"
},
{
"name": "Smarty",
"bytes": "497954"
}
],
"symlink_target": ""
} |
"""
PyCOMPSs Testbench
========================
"""
# Imports
import unittest
from pycompss.api.api import compss_barrier
from pycompss.api.api import compss_wait_on
from pycompss.api.task import task
from pycompss.api.parameter import *
from pycompss.api.binary import binary
from pycompss.api.compss import compss
from pycompss.api.constraint import constraint
from pycompss.api.decaf import decaf
from pycompss.api.implement import implement
from pycompss.api.mpi import mpi
from pycompss.api.multinode import multinode
from pycompss.api.ompss import ompss
# from pycompss.api.opencl import opencl
# All of the following tasks include a deprecated argument which is
# still supported.
# However, they must raise a WARNING message through stderr which is
# checked in the result script.
# The correct way is in lower case and underscores (snake).
# Deprecated working dir argument
@binary(binary="date", workingDir="/tmp") # the correct way is working_dir
@task()
def binary_task(dprefix, param):
pass
# Deprecated app name, worker in master and computing nodes arguments
@compss(runcompss="${RUNCOMPSS}", flags="-d", appName="${APP_DIR}/src/simple_compss_nested.py", workerInMaster="false", computingNodes="2")
@constraint(computing_units="2")
@task(returns=int)
def compss_task(value):
pass
# Deprecated working dir, df script, df executor and df lib arguments
@decaf(workingDir=".", runner="mpirun", dfScript="${APP_DIR}/src/test_decaf.py", dfExecutor="test.sh", dfLib="lib")
@task(param=FILE_OUT)
def my_decaf_task(param):
pass
@task(returns=int)
def slow_task(value):
return value * value * value
# Deprecated sorce class argument
@implement(sourceClass="modules.testArgumentDeprecation", method="slow_task")
@constraint(computing_units="1")
@task(returns=list)
def better_task(value):
return value ** 3
# Deprecated working dir argument
@mpi(binary="date", workingDir="/tmp", runner="mpirun")
@task()
def mpi_task(dprefix, param):
pass
# Deprecated computing nodes argument
@constraint(computing_units="2")
@multinode(computingNodes="2")
@task(returns=1)
def multi_node_task():
return 0
# Deprecated working dir argument
@ompss(binary="date", workingDir="/tmp")
@task()
def ompss_task(dprefix, param):
pass
# # Deprecated working dir argument
# @opencl(kernel="date", workingDir="/tmp")
# @task()
# def opencl_task(dprefix, param):
# pass
class testArgumentDeprecation(unittest.TestCase):
def testBinaryArgDepr(self):
binary_task("-d", "next friday")
compss_barrier()
def testCompssArgDepr(self):
ev = compss_task(1)
ev = compss_wait_on(ev)
self.assertEqual(ev, 0)
# TODO: currently, if the argument is not recognized -> ERROR
@unittest.skip("The runtime throws an error if unrecognized constraing")
def testConstraintArgDepr(self):
pass
def testDecafArgDepr(self):
my_decaf_task("outFileAll")
compss_barrier()
def testImplementArgDepr(self):
v = 20
o = slow_task(v)
o = compss_wait_on(o)
self.assertEqual(o, v * v * v)
def testMpiArgDepr(self):
mpi_task("-d", "next friday")
compss_barrier()
def testMultinodeArgDepr(self):
ev = multi_node_task()
ev = compss_wait_on(ev)
self.assertEqual(ev, 0)
def testOmpssArgDepr(self):
ompss_task("-d", "next monday")
compss_barrier()
@unittest.skip("OpenCL unsupported")
def testOpenclArgDepr(self):
opencl_task("-d", "next monday")
compss_barrier()
@unittest.skip("TODO: do the check with @parallel")
def testParallelArgDepr(self):
# TODO: check with @parallel
pass
| {
"content_hash": "d1b2945eb88d852ec81c2fc0a594111f",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 139,
"avg_line_length": 27.93984962406015,
"alnum_prop": 0.6940258342303552,
"repo_name": "mF2C/COMPSs",
"id": "de78be35c82da78b7c7692dc535ff1317fb7f50d",
"size": "3760",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sources/python/8_argument_deprecation/src/modules/testArgumentDeprecation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "1595"
},
{
"name": "C",
"bytes": "222477"
},
{
"name": "C++",
"bytes": "200186"
},
{
"name": "Dockerfile",
"bytes": "901"
},
{
"name": "Gnuplot",
"bytes": "4195"
},
{
"name": "Java",
"bytes": "4213323"
},
{
"name": "JavaScript",
"bytes": "16906"
},
{
"name": "Jupyter Notebook",
"bytes": "10514"
},
{
"name": "Lex",
"bytes": "1356"
},
{
"name": "M4",
"bytes": "5538"
},
{
"name": "Makefile",
"bytes": "14740"
},
{
"name": "Python",
"bytes": "635267"
},
{
"name": "Shell",
"bytes": "1241476"
},
{
"name": "XSLT",
"bytes": "177323"
},
{
"name": "Yacc",
"bytes": "3655"
}
],
"symlink_target": ""
} |
import sys, os, getopt, sniper_lib, sniper_stats
def usage():
print 'Usage:', sys.argv[0], '[-h (help)] [-l|--list | -t|--topology | -m|--markers | -e|--events] [--partial <section-start>:<section-end> (default: roi-begin:roi-end)] [--through-time|tt <statname>] [-d <resultsdir (default: .)>]'
jobid = 0
resultsdir = '.'
partial = None
through_time = None
do_list = False
do_topo = False
do_markers = False
do_events = False
do_stats = True
try:
opts, args = getopt.getopt(sys.argv[1:], "hj:d:lmte", [ 'list', 'markers', 'topology', 'events', 'partial=', 'tt=', 'through-time=' ])
except getopt.GetoptError, e:
print e
usage()
sys.exit()
for o, a in opts:
if o == '-h':
usage()
sys.exit()
if o == '-d':
resultsdir = a
if o == '-j':
jobid = long(a)
if o == '--partial':
if ':' not in a:
sys.stderr.write('--partial=<from>:<to>\n')
usage()
partial = a.split(':')
if o in ('--tt', '--through-time'):
through_time = a.split(',')
if o in ('-l', '--list'):
do_list = True
do_stats = False
if o in ('-t', '--topology'):
do_topo = True
do_stats = False
if o in ('-m', '--markers'):
do_markers = True
do_stats = False
if o in ('-e', '--events'):
do_events = True
do_stats = False
if args:
usage()
sys.exit(-1)
def format_event(timestamp, core, thread, message):
return '%9ld ns: core(%2d) thread(%2d) %s' % (timestamp / 1e6, core, thread, message)
def format_marker(value0, value1, description):
if description:
return 'a = %3d, str = "%s"' % (value0, description)
else:
return 'a = %3d, b = %3d' % (value0, value1)
if do_list:
import sniper_stats
stats = sniper_stats.SniperStats(resultsdir = resultsdir, jobid = jobid)
print ', '.join(stats.get_snapshots())
if do_topo:
import sniper_stats
stats = sniper_stats.SniperStats(resultsdir = resultsdir, jobid = jobid)
for t in stats.get_topology():
print ', '.join(map(str,t))
if do_markers:
import sniper_stats
stats = sniper_stats.SniperStats(resultsdir = resultsdir, jobid = jobid)
try:
markers = stats.get_markers()
except Exception, e:
print >> sys.stderr, e
print >> sys.stderr, "--markers could not be fetched"
sys.exit(1)
for timestamp, core, thread, value0, value1, description in markers:
print format_event(timestamp, core, thread, format_marker(value0, value1, description))
if do_events:
import sniper_stats
stats = sniper_stats.SniperStats(resultsdir = resultsdir, jobid = jobid)
try:
events = stats.get_events()
except Exception, e:
print >> sys.stderr, e
print >> sys.stderr, "--events could not be fetched"
sys.exit(1)
for event, timestamp, core, thread, value0, value1, description in events:
if event == sniper_stats.EVENT_MARKER:
print format_event(timestamp, core, thread, 'Marker: %s' % format_marker(value0, value1, description))
elif event == sniper_stats.EVENT_THREAD_NAME:
print format_event(timestamp, core, thread, 'Thread name: %s' % description)
elif event == sniper_stats.EVENT_APP_START:
print format_event(timestamp, core, thread, 'Application %d start' % value0)
elif event == sniper_stats.EVENT_APP_EXIT:
print format_event(timestamp, core, thread, 'Application %d exit' % value0)
elif event == sniper_stats.EVENT_THREAD_CREATE:
print format_event(timestamp, core, thread, 'Thread created: application %d by thread %d' % (value0, value1))
elif event == sniper_stats.EVENT_THREAD_EXIT:
print format_event(timestamp, core, thread, 'Thread exit')
else:
print format_event(timestamp, core, thread, 'Unknown event %d (%d, %d, %s)' % (event, value0, value1, description))
if do_stats:
def print_result(key, value):
if type(value) is dict:
for _key, _value in sorted(value.items()):
print_result(key+'.'+_key, _value)
else:
print key, '=',
if type(value) is list:
print ', '.join(map(str, value))
else:
print value
if through_time:
import sniper_stats
stats = sniper_stats.SniperStats(resultsdir = resultsdir, jobid = jobid)
names = stats.read_metricnames()
metrics = [ metric[1:] if metric[0] in '-' else metric for metric in through_time ]
nameids = dict([ ('%s.%s' % (objectname, metricname), nameid) for nameid, (objectname, metricname) in names.items() if '%s.%s' % (objectname, metricname) in metrics ])
prefixes = stats.get_snapshots()
prefixes_len = max(map(len, prefixes))
data = dict([ (prefix, stats.read_snapshot(prefix, metrics)) for prefix in prefixes ])
def do_op(op, state, v):
if op == '-':
for i, _v in enumerate(v):
v[i], state[i] = v[i] - state.get(i, 0), v[i]
return v
else:
return v
with sniper_lib.OutputToLess():
for metric, _metric in zip(metrics, through_time):
op = _metric[0]
print '==', metric, '=='
state = {}
for prefix in prefixes:
v = data[prefix].get(nameids[metric], {})
v = [ v.get(i, 0) for i in range(max(v.keys() or [0])+1) ]
v = do_op(op, state, v)
print_result('%-*s' % (prefixes_len, prefix), v)
else:
results = sniper_lib.get_results(jobid, resultsdir, partial = partial)
with sniper_lib.OutputToLess():
for key, value in sorted(results['results'].items(), key = lambda (key, value): key.lower()):
print_result(key, value)
| {
"content_hash": "de7d8e1529f8c66e0d8a63da7c354d27",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 235,
"avg_line_length": 33.14545454545455,
"alnum_prop": 0.6182117388919364,
"repo_name": "yonggang985/Sniper",
"id": "7d5aea50d421e6ad27246c8174446dc83a45f0b3",
"size": "5492",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/dumpstats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "90653"
},
{
"name": "C++",
"bytes": "1722452"
},
{
"name": "Makefile",
"bytes": "21654"
},
{
"name": "Objective-C",
"bytes": "645"
},
{
"name": "Python",
"bytes": "103923"
}
],
"symlink_target": ""
} |
import logging
import numpy as np
from numpy import ma
from .qctests import QCCheckVar
module_logger = logging.getLogger(__name__)
class StuckValue(QCCheckVar):
def test(self):
self.flags = {}
x = ma.compressed(self.data[self.varname])
flag = np.zeros(self.data[self.varname].shape, dtype='i1')
if (x.size > 1) and (np.allclose(x, np.ones_like(x) * x[0])):
flag[:] = self.flag_bad
else:
flag[:] = self.flag_good
flag[ma.getmaskarray(self.data[self.varname])] = 9
self.flags['stuck_value'] = flag
| {
"content_hash": "b900e705e1d6bc0f7975f542fdd917df",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 69,
"avg_line_length": 26.636363636363637,
"alnum_prop": 0.6040955631399317,
"repo_name": "castelao/CoTeDe",
"id": "89a4d9c1a94fa6bd09df3420c6c9a525772da50f",
"size": "676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cotede/qctests/stuck_value.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "178847"
},
{
"name": "TeX",
"bytes": "2430"
}
],
"symlink_target": ""
} |
from south.v2 import DataMigration
import desktop.management.commands.convert_documents
class Migration(DataMigration):
def forwards(self, orm):
# Earlier we did the document conversions from Doc1 to Doc2 upon loading
# the home page of a user. That approach had certain flaws like shared
# documents didn't show up until the owner logged in and opened his home
# page. Also, home page load time was affected when the conversions failed
# and loading the home page retried the conversions every single time.
# This migration handles the document conversion of all users at
# the same time preventing such flaws. This migration is being done at
# the useradmin level to avoid any dependency issues.
desktop.management.commands.convert_documents.Command().handle()
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'useradmin.grouppermission': {
'Meta': {'object_name': 'GroupPermission'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'hue_permission': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['useradmin.HuePermission']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'useradmin.huepermission': {
'Meta': {'object_name': 'HuePermission'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'app': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'through': u"orm['useradmin.GroupPermission']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'useradmin.ldapgroup': {
'Meta': {'object_name': 'LdapGroup'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'group'", 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'useradmin.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'creation_method': ('django.db.models.fields.CharField', [], {'default': "'HUE'", 'max_length': '64'}),
'first_login': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'home_directory': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1969, 12, 31, 0, 0)', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['useradmin']
symmetrical = True
| {
"content_hash": "e5aad78924ee65d6a04506cc24d769e3",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 195,
"avg_line_length": 70.12222222222222,
"alnum_prop": 0.5736016479163366,
"repo_name": "cloudera/hue",
"id": "e4afa035596f5b92ffaf397a755808931408b86f",
"size": "6336",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "apps/useradmin/src/useradmin/old_migrations/0008_convert_documents.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
} |
"""Utilities for IODINE."""
# pylint: disable=g-doc-bad-indent, g-doc-return-or-yield, g-doc-args
# pylint: disable=missing-docstring
import importlib
import math
from absl import logging
from matplotlib.colors import hsv_to_rgb
import numpy as np
import shapeguard
import sonnet as snt
import tensorflow.compat.v1 as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
ACT_FUNCS = {
"identity": tf.identity,
"sigmoid": tf.nn.sigmoid,
"tanh": tf.nn.tanh,
"relu": tf.nn.relu,
"elu": tf.nn.elu,
"selu": tf.nn.selu,
"softplus": tf.nn.softplus,
"exp": tf.exp,
"softmax": tf.nn.softmax,
}
def get_act_func(name_or_func):
if name_or_func is None:
return tf.identity
if callable(name_or_func):
return name_or_func
elif isinstance(name_or_func, str):
return ACT_FUNCS[name_or_func.lower()]
else:
raise KeyError(
'Unknown activation function "{}" of type {}"'.format(
name_or_func, type(name_or_func)
)
)
DISTS = {
"normal": tfd.Normal,
"log_normal": tfd.LogNormal,
"laplace": tfd.Laplace,
"logistic": tfd.Logistic,
}
def get_distribution(name_or_dist):
if isinstance(name_or_dist, type(tfd.Normal)):
return name_or_dist
elif isinstance(name_or_dist, str):
return DISTS[name_or_dist.lower()]
raise KeyError(
'Unknown distribution "{}" of type {}"'.format(name_or_dist,
type(name_or_dist)))
def get_mask_plot_colors(nr_colors):
"""Get nr_colors uniformly spaced hues to plot mask values."""
hsv_colors = np.ones((nr_colors, 3), dtype=np.float32)
hsv_colors[:, 0] = np.linspace(0, 1, nr_colors, endpoint=False)
color_conv = hsv_to_rgb(hsv_colors)
return color_conv
def color_transform(masks):
with tf.name_scope("color_transform"):
n_components = masks.shape.as_list()[-1]
colors = tf.constant(get_mask_plot_colors(n_components), name="mask_colors")
return tf.tensordot(masks, colors, axes=1)
def construct_diagnostic_image(
images,
recons,
masks,
components,
border_width=2,
nr_images=6,
clip=True,
mask_components=False,
):
"""Construct a single image containing image, recons., mask, and components.
Args:
images: (B, H, W, C)
recons: (B, H, W, C)
masks: (B, H, W, K)
components: (B, H, W, K, C)
border_width: int. width of the border in pixels. (default=2)
nr_images: int. Number of images to include. (default=6)
clip: bool. Whether to clip the final image to range [0, 1].
Returns:
diag_images: (nr, H+border_width*2, (W+border_width*2) * (K+3), 3)
"""
with tf.name_scope("diagnostic_image"):
# transform the masks into RGB images
recolored_masks = color_transform(masks[:nr_images])
if images.get_shape().as_list()[-1] == 1:
# deal with grayscale images
images = tf.tile(images[:nr_images], [1, 1, 1, 3])
recons = tf.tile(recons[:nr_images], [1, 1, 1, 3])
components = tf.tile(components[:nr_images], [1, 1, 1, 1, 3])
if mask_components:
components *= masks[:nr_images, ..., tf.newaxis]
# Pad everything
no_pad, pad = (0, 0), (border_width, border_width)
paddings = tf.constant([no_pad, pad, pad, no_pad])
paddings_components = tf.constant([no_pad, pad, pad, no_pad, no_pad])
pad_images = tf.pad(images[:nr_images], paddings, constant_values=0.5)
pad_recons = tf.pad(recons[:nr_images], paddings, constant_values=0.5)
pad_masks = tf.pad(recolored_masks, paddings, constant_values=1.0)
pad_components = tf.pad(
components[:nr_images], paddings_components, constant_values=0.5
)
# reshape components into single wide image
pad_components = tf.transpose(pad_components, [0, 1, 3, 2, 4])
pc_shape = pad_components.shape.as_list()
pc_shape[2] = pc_shape[2] * pc_shape.pop(3)
pad_components = tf.reshape(pad_components, pc_shape)
# concatenate all parts along width
diag_imgs = tf.concat(
[pad_images, pad_recons, pad_masks, pad_components], axis=2
)
# concatenate all images along height
diag_shape = diag_imgs.shape.as_list()
final_img = tf.reshape(diag_imgs, [1, -1, diag_shape[2], diag_shape[3]])
if clip:
final_img = tf.clip_by_value(final_img, 0.0, 1.0)
return final_img
def construct_reconstr_image(images, recons, border_width=2,
nr_images=6, clip=True):
"""Construct a single image containing image, and recons.
Args:
images: (B, H, W, C)
recons: (B, H, W, C)
border_width: int. width of the border in pixels. (default=2)
nr_images: int. Number of images to include. (default=6)
clip: bool. Whether to clip the final image to range [0, 1].
Returns:
rec_images: (nr, H+border_width*2, (W+border_width*2) * 2, 3)
"""
with tf.name_scope("diagnostic_image"):
# Pad everything
no_pad, pad = (0, 0), (border_width, border_width)
paddings = tf.constant([no_pad, pad, pad, no_pad])
pad_images = tf.pad(images[:nr_images], paddings, constant_values=0.5)
pad_recons = tf.pad(recons[:nr_images], paddings, constant_values=0.5)
# concatenate all parts along width
diag_imgs = tf.concat([pad_images, pad_recons], axis=2)
# concatenate all images along height
diag_shape = diag_imgs.shape.as_list()
final_img = tf.reshape(diag_imgs, [1, -1, diag_shape[2], diag_shape[3]])
if clip:
final_img = tf.clip_by_value(final_img, 0.0, 1.0)
return final_img
def construct_iterations_image(
images, recons, masks, border_width=2, nr_seqs=2, clip=True
):
"""Construct a single image containing image, and recons.
Args:
images: (B, T, 1, H, W, C)
recons: (B, T, 1, H, W, C)
masks: (B, T, K, H, W, 1)
border_width: int. width of the border in pixels. (default=2)
nr_seqs: int. Number of sequences to include. (default=2)
clip: bool. Whether to clip the final image to range [0, 1].
Returns:
rec_images: (nr, H+border_width*2, (W+border_width*2) * 2, 3)
"""
sg = shapeguard.ShapeGuard()
sg.guard(recons, "B, T, 1, H, W, C")
if images.get_shape().as_list()[1] == 1:
images = tf.tile(images, sg["1, T, 1, 1, 1, 1"])
sg.guard(images, "B, T, 1, H, W, C")
sg.guard(masks, " B, T, K, H, W, 1")
if sg.C == 1: # deal with grayscale
images = tf.tile(images, [1, 1, 1, 1, 1, 3])
recons = tf.tile(recons, [1, 1, 1, 1, 1, 3])
sg.S = min(nr_seqs, sg.B)
with tf.name_scope("diagnostic_image"):
# convert masks to rgb
masks_trans = tf.transpose(masks[:nr_seqs], [0, 1, 5, 3, 4, 2])
recolored_masks = color_transform(masks_trans)
# Pad everything
no_pad, pad = (0, 0), (border_width, border_width)
paddings = tf.constant([no_pad, no_pad, no_pad, pad, pad, no_pad])
pad_images = tf.pad(images[:nr_seqs], paddings, constant_values=0.5)
pad_recons = tf.pad(recons[:nr_seqs], paddings, constant_values=0.5)
pad_masks = tf.pad(recolored_masks, paddings, constant_values=0.5)
# concatenate all parts along width
triples = tf.concat([pad_images, pad_recons, pad_masks], axis=3)
triples = sg.guard(triples[:, :, 0], "S, T, 3*Hp, Wp, 3")
# concatenate iterations along width and sequences along height
final = tf.reshape(
tf.transpose(triples, [0, 2, 1, 3, 4]), sg["1, S*3*Hp, Wp*T, 3"]
)
if clip:
final = tf.clip_by_value(final, 0.0, 1.0)
return final
def get_overview_image(image, output_dist, mask_components=False):
recons = output_dist.mean()[:, 0]
image = image[:, 0]
if hasattr(output_dist, "mixture_distribution") and hasattr(
output_dist, "components_distribution"
):
mask = output_dist.mixture_distribution.probs[:, 0]
components = output_dist.components_distribution.mean()[:, 0]
return construct_diagnostic_image(
image, recons, mask, components, mask_components=mask_components
)
else:
return construct_reconstr_image(image, recons)
class OnlineMeanVarEstimator(snt.AbstractModule):
"""Online estimator for mean and variance using Welford's algorithm."""
def __init__(self, axis=None, ddof=0.0, name="online_mean_var"):
super().__init__(name=name)
self._axis = axis
self._ddof = ddof
def _build(self, x, weights=None):
if weights is None:
weights = tf.ones_like(x)
if weights.get_shape().as_list() != x.get_shape().as_list():
weights = tf.broadcast_to(weights, x.get_shape().as_list())
sum_weights = tf.reduce_sum(weights, axis=self._axis)
shape = sum_weights.get_shape().as_list()
total = tf.get_variable(
"total",
shape=shape,
dtype=weights.dtype,
initializer=tf.zeros_initializer(),
trainable=False,
)
mean = tf.get_variable(
"mean",
shape=shape,
dtype=x.dtype,
initializer=tf.zeros_initializer(),
trainable=False,
)
m2 = tf.get_variable(
"M2",
shape=shape,
dtype=x.dtype,
initializer=tf.zeros_initializer(),
trainable=False,
)
total_update = tf.assign_add(total, sum_weights)
with tf.control_dependencies([total_update]):
delta = (x - mean) * weights
mean_update = tf.assign_add(
mean, tf.reduce_sum(delta, axis=self._axis) / total
)
with tf.control_dependencies([mean_update]):
delta2 = x - mean
m2_update = tf.assign_add(
m2, tf.reduce_sum(delta * delta2, axis=self._axis)
)
with tf.control_dependencies([m2_update]):
return tf.identity(mean), m2 / (total - self._ddof), tf.identity(total)
def print_shapes(name, value, indent=""):
if isinstance(value, dict):
print("{}{}:".format(indent, name))
for k, v in sorted(value.items(),
key=lambda x: (isinstance(x[1], dict), x[0])):
print_shapes(k, v, indent + " ")
elif isinstance(value, list):
print(
"{}{}[{}]: {} @ {}".format(
indent, name, len(value), value[0].shape, value[0].dtype
)
)
elif isinstance(value, np.ndarray):
print("{}{}: {} @ {}".format(indent, name, value.shape, value.dtype))
elif isinstance(value, tf.Tensor):
print(
"{}{}: {} @ {}".format(
indent, name, value.get_shape().as_list(), value.dtype
)
)
elif np.isscalar(value):
print("{}{}: {}".format(indent, name, value))
else:
print("{}{}.type: {}".format(indent, name, type(value)))
def _pad_images(images, image_border_value=0.5, border_width=2):
"""Pad images to create gray borders.
Args:
images: Tensor of shape [B, H], [B, H, W], or [B, H, W, C].
image_border_value: Scalar value of greyscale borderfor images.
border_width: Int. Border width in pixels.
Raises:
ValueError: if the image provided is not {2,3,4} dimensional.
Returns:
Tensor of same shape as images, except H and W being H + border_width and
W + border_width.
"""
image_rank = len(images.get_shape())
border_paddings = (border_width, border_width)
if image_rank == 2: # [B, H]
paddings = [(0, 0), border_paddings]
elif image_rank == 3: # [B, H, W]
paddings = [(0, 0), border_paddings, border_paddings]
elif image_rank == 4: # [B, H, W, C]
paddings = [(0, 0), border_paddings, border_paddings, (0, 0)]
else:
raise ValueError("expected image to be 2D, 3D or 4D, got %d" % image_rank)
paddings = tf.constant(paddings)
return tf.pad(images, paddings, "CONSTANT",
constant_values=image_border_value)
def images_to_grid(
images,
grid_height=None,
grid_width=4,
max_grid_height=4,
max_grid_width=4,
image_border_value=0.5,
):
"""Combine images and arrange them in a grid.
Args:
images: Tensor of shape [B, H], [B, H, W], or [B, H, W, C].
grid_height: Height of the grid of images to output, or None. Either
`grid_width` or `grid_height` must be set to an integer value.
If None, `grid_height` is set to ceil(B/`grid_width`), and capped at
`max_grid_height` when provided.
grid_width: Width of the grid of images to output, or None. Either
`grid_width` or `grid_height` must be set to an integer value.
If None, `grid_width` is set to ceil(B/`grid_height`), and capped at
`max_grid_width` when provided.
max_grid_height: Maximum allowable height of the grid of images to
output or None. Only used when `grid_height` is None.
max_grid_width: Maximum allowable width of the grid of images to output,
or None. Only used when `grid_width` is None.
image_border_value: None or scalar value of greyscale borderfor images.
If None, then no border is rendered.
Raises:
ValueError: if neither of grid_width or grid_height are set to a positive
integer.
Returns:
images: Tensor of shape [height*H, width*W, C].
C will be set to 1 if the input was provided with no channels.
Contains all input images in a grid.
"""
# If only one dimension is set, infer how big the other one should be.
if grid_height is None:
if not isinstance(grid_width, int) or grid_width <= 0:
raise ValueError(
"if `grid_height` is None, `grid_width` must be " "a positive integer"
)
grid_height = int(math.ceil(images.get_shape()[0].value / grid_width))
if max_grid_height is not None:
grid_height = min(max_grid_height, grid_height)
if grid_width is None:
if not isinstance(grid_height, int) or grid_height <= 0:
raise ValueError(
"if `grid_width` is None, `grid_height` must be " "a positive integer"
)
grid_width = int(math.ceil(images.get_shape()[0].value / grid_height))
if max_grid_width is not None:
grid_width = min(max_grid_width, grid_width)
images = images[: grid_height * grid_width, ...]
# Pad with extra blank frames if grid_height x grid_width is less than the
# number of frames provided.
pre_images_shape = images.get_shape().as_list()
if pre_images_shape[0] < grid_height * grid_width:
pre_images_shape[0] = grid_height * grid_width - pre_images_shape[0]
if image_border_value is not None:
dummy_frames = image_border_value * tf.ones(
shape=pre_images_shape, dtype=images.dtype
)
else:
dummy_frames = tf.zeros(shape=pre_images_shape, dtype=images.dtype)
images = tf.concat([images, dummy_frames], axis=0)
if image_border_value:
images = _pad_images(images, image_border_value=image_border_value)
images_shape = images.get_shape().as_list()
images = tf.reshape(images, [grid_height, grid_width] + images_shape[1:])
if len(images_shape) == 2:
images = tf.expand_dims(images, -1)
if len(images_shape) <= 3:
images = tf.expand_dims(images, -1)
image_height, image_width, channels = images.get_shape().as_list()[2:]
images = tf.transpose(images, perm=[0, 2, 1, 3, 4])
images = tf.reshape(
images, [grid_height * image_height, grid_width * image_width, channels]
)
return images
def flatten_all_but_last(tensor, n_dims=1):
shape = tensor.shape.as_list()
batch_dims = shape[:-n_dims]
flat_tensor = tf.reshape(tensor, [np.prod(batch_dims)] + shape[-n_dims:])
def unflatten(other_tensor):
other_shape = other_tensor.shape.as_list()
return tf.reshape(other_tensor, batch_dims + other_shape[1:])
return flat_tensor, unflatten
def ensure_3d(tensor):
if tensor.shape.ndims == 2:
return tensor[..., None]
assert tensor.shape.ndims == 3
return tensor
built_element_cache = {
"none": None,
"global_step": tf.train.get_or_create_global_step(),
}
def build(plan, identifier):
logging.debug("building %s", identifier)
if identifier in built_element_cache:
logging.debug("%s is already built, returning", identifier)
return built_element_cache[identifier]
elif not isinstance(plan, dict):
return plan
elif "constructor" in plan:
ctor = _resolve_constructor(plan)
kwargs = {
k: build(v, identifier=k) for k, v in plan.items() if k != "constructor"
}
with tf.variable_scope(identifier):
built_element_cache[identifier] = ctor(**kwargs)
return built_element_cache[identifier]
else:
return {k: build(v, identifier=k) for k, v in plan.items()}
def _resolve_constructor(plan_subsection):
assert "constructor" in plan_subsection, plan_subsection
if isinstance(plan_subsection["constructor"], str):
module, _, ctor = plan_subsection["constructor"].rpartition(".")
mod = importlib.import_module(module)
return getattr(mod, ctor)
else:
return plan_subsection["constructor"]
| {
"content_hash": "80e6d8befc9ce4428ad0a2b1e86f516c",
"timestamp": "",
"source": "github",
"line_count": 488,
"max_line_length": 80,
"avg_line_length": 34.38319672131148,
"alnum_prop": 0.6289409380773586,
"repo_name": "deepmind/deepmind-research",
"id": "b8f8cf27cefe3769fea94afc3c5cdfade73e2e0b",
"size": "17373",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iodine/modules/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1002"
},
{
"name": "C++",
"bytes": "5765"
},
{
"name": "Jupyter Notebook",
"bytes": "12330730"
},
{
"name": "Lua",
"bytes": "76186"
},
{
"name": "OpenEdge ABL",
"bytes": "15630"
},
{
"name": "PureBasic",
"bytes": "8"
},
{
"name": "Python",
"bytes": "3419119"
},
{
"name": "Racket",
"bytes": "226692"
},
{
"name": "Shell",
"bytes": "84450"
},
{
"name": "Starlark",
"bytes": "3463"
}
],
"symlink_target": ""
} |
from Queue import Queue
from warnings import warn
import functools
import json
import os
import re
import struct
import subprocess
import sys
import threading
import traceback
import weakref
from sgactions.dispatch import dispatch as _dispatch
def log(*args):
sys.stderr.write('[SGActions] %s\n' % ' '.join(str(x) for x in args))
sys.stderr.flush()
# We need to keep checking for this as long as the old Firefox plugin is
# in the wild.
_line_based = os.environ.get('SGACTIONS_HOST') == 'Firefox'
_capabilities = {}
_handlers = {}
_threads = weakref.WeakValueDictionary()
_local = threading.local()
def reply(orig, **msg):
msg['dst'] = orig.get('src') or orig
send(**msg)
def send(**msg):
msg['src'] = 'native'
encoded_msg = json.dumps(msg)
log('send', len(encoded_msg), encoded_msg)
if _line_based:
sys.__stdout__.write(encoded_msg + '\n')
else:
sys.__stdout__.write(struct.pack('I', len(encoded_msg)))
sys.__stdout__.write(encoded_msg)
sys.__stdout__.flush()
def format_exception(e):
return dict(type='error', error_type=e.__class__.__name__, error=str(e))
def reply_exception(orig, e):
reply(orig, **format_exception(e))
def handler(func, name=None):
if isinstance(func, basestring):
return functools.partial(handler, name=func)
_handlers[name or func.__name__] = func
return func
@handler
def hello(capabilities=None, **kw):
_capabilities.update(capabilities or {})
reply(kw,
type='elloh',
capabilities={'dispatch': True},
executable=sys.executable,
script=__file__,
native=os.environ.get('SGACTIONS_NATIVE'),
origin=os.environ.get('SGACTIONS_ORIGIN'),
host=os.environ.get('SGACTIONS_HOST'),
)
@handler
def elloh(**kw):
pass
@handler
def ping(**req):
res = req.copy()
res['type'] = 'pong'
reply(req, res)
@handler
def pong(**kw):
pass
@handler
def dispatch(entrypoint=None, kwargs=None, url=None, **kw):
# We must respect the old URL method until the legacy Chrome/Firefox addon is gone.
log('dispatching:', entrypoint or url)
res = _dispatch(entrypoint=entrypoint, kwargs=kwargs, url=url, reload=None)
if isinstance(res, Exception):
reply_exception(kw, res)
else:
reply(kw, type='result', result=res)
def send_and_recv(**kwargs):
session = current_session()
queue = session.get('result_queue')
if not queue:
queue = session['result_queue'] = Queue(1)
timeout = kwargs.pop('timeout', 300)
send(dst=session['src'], session_token=session['token'], **kwargs)
reply = queue.get(timeout=timeout)
log('async response:', repr(reply))
return reply
@handler
def user_response(session_token, **kw):
thread = _threads.get(session_token)
if not thread:
raise ValueError('no matching thread', session_token)
session = thread.session
queue = session.get('result_queue')
if not queue:
raise ValueError('session not expecting result', session_token)
queue.put(kw, block=False)
def main():
# We need to take over stdout so that print statements don't result in the
# browser thinking it is getting a message back.
sys.stdout = open('/tmp/sgactions.native.log', 'a')
dispatch_counter = 0
log('entering main loop')
while True:
try:
if _line_based:
raw_msg = sys.stdin.readline()
if not raw_msg:
log('native port closed')
break
else:
raw_size = sys.stdin.read(4)
if not raw_size:
log('native port closed')
break
size, = struct.unpack('I', raw_size)
log('reading message of size', raw_size)
raw_msg = sys.stdin.read(size)
msg = json.loads(raw_msg)
except Exception as e:
traceback.print_exc()
send(**format_exception(e))
continue
if len(_threads):
log('%d sessions already open' % len(_threads))
if msg.get('type') not in _handlers:
reply(msg, type='error', error='unknown message type %r' % msg.get('type'))
log('unknown message type: %s' % msg.get('type'))
dispatch_counter += 1
thread = _threads[dispatch_counter] = threading.Thread(target=_dispatch_target, args=[msg])
thread.daemon = True
thread.session = {
'type': msg['type'],
'src': msg.get('src'),
'token': dispatch_counter,
}
thread.start()
del thread # Kill this reference immediately.
def current_session(strict=True):
try:
return threading.current_thread().session
except AttributeError:
if strict:
raise RuntimeError('no current native handler')
def _dispatch_target(msg):
try:
_handlers[msg['type']](**msg)
except Exception as e:
traceback.print_exc()
try:
reply_exception(msg, e)
except Exception as e:
# Just in case it is the exception reporting mechanism...
log('exception during reply_exception')
traceback.print_exc()
| {
"content_hash": "ba87e08ca3a47e2c4d48233c66a2076d",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 99,
"avg_line_length": 25.926470588235293,
"alnum_prop": 0.6010588012856872,
"repo_name": "vfxetc/sgactions",
"id": "0344e04b1c7c5525281f29e7ebba2eec15b75c94",
"size": "5312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sgactions/browsers/native/core.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "55989"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "JavaScript",
"bytes": "30264"
},
{
"name": "Makefile",
"bytes": "389"
},
{
"name": "Python",
"bytes": "54124"
},
{
"name": "Shell",
"bytes": "1843"
}
],
"symlink_target": ""
} |
"""
Tests For Multi Scheduler
"""
from nova.scheduler import driver
from nova.scheduler import multi
from nova.tests.scheduler import test_scheduler
class FakeComputeScheduler(driver.Scheduler):
is_fake_compute = True
def __init__(self):
super(FakeComputeScheduler, self).__init__()
self.is_update_caps_called = False
def schedule_theoretical(self, *args, **kwargs):
pass
class FakeDefaultScheduler(driver.Scheduler):
is_fake_default = True
def __init__(self):
super(FakeDefaultScheduler, self).__init__()
self.is_update_caps_called = False
class MultiDriverTestCase(test_scheduler.SchedulerTestCase):
"""Test case for multi driver."""
driver_cls = multi.MultiScheduler
def setUp(self):
super(MultiDriverTestCase, self).setUp()
base_name = 'nova.tests.scheduler.test_multi_scheduler.%s'
compute_cls_name = base_name % 'FakeComputeScheduler'
default_cls_name = base_name % 'FakeDefaultScheduler'
self.flags(compute_scheduler_driver=compute_cls_name,
default_scheduler_driver=default_cls_name)
self._manager = multi.MultiScheduler()
def test_drivers_inited(self):
mgr = self._manager
self.assertEqual(len(mgr.drivers), 2)
self.assertTrue(mgr.drivers['compute'].is_fake_compute)
self.assertTrue(mgr.drivers['default'].is_fake_default)
def test_update_service_capabilities(self):
def fake_update_service_capabilities(self, service, host, caps):
self.is_update_caps_called = True
mgr = self._manager
self.stubs.Set(driver.Scheduler,
'update_service_capabilities',
fake_update_service_capabilities)
self.assertFalse(mgr.drivers['compute'].is_update_caps_called)
mgr.update_service_capabilities('foo_svc', 'foo_host', 'foo_caps')
self.assertTrue(mgr.drivers['compute'].is_update_caps_called)
| {
"content_hash": "fcb0f9b411112b8c453d3dc5a19d57d7",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 74,
"avg_line_length": 33.59322033898305,
"alnum_prop": 0.6634712411705348,
"repo_name": "jessicalucci/NovaOrc",
"id": "fb25ae9da4e9b33bba9185a6b77c511a56a9a5f9",
"size": "2796",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "nova/tests/scheduler/test_multi_scheduler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from piliko import *
# this example shows the plotting of red and green circles with
# negative radial quadrances.
c1=circle((0,0),4)
c2=circle((0,0),-4)
plot_red_circles([c1,c2])
plot_blue_circles(c1)
plot_green_circles(c1,c2)
plotshow()
| {
"content_hash": "190e19e868cb26ae629eef1831632151",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 63,
"avg_line_length": 21.90909090909091,
"alnum_prop": 0.7302904564315352,
"repo_name": "donbright/piliko",
"id": "c6449e36478c1587070db3c9bec327f7baefb861",
"size": "241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/example15.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5303"
},
{
"name": "C++",
"bytes": "82774"
},
{
"name": "Go",
"bytes": "18074"
},
{
"name": "HTML",
"bytes": "43043"
},
{
"name": "JavaScript",
"bytes": "4554"
},
{
"name": "Python",
"bytes": "387953"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division
import imghdr
import urllib2 as urllib
import io
from PIL import Image
validated_images = {}
class AtomIncompatibleImageType(Exception):
"""
Exception raised when the Atom image is not PNG
"""
pass
class AtomIncompatibleImageSize(Exception):
"""
Exception raised when the Atom image is not the proper size or ratio
"""
pass
class AtomImageHTTPError(Exception):
"""
Exception raised when the Atom image can not be accessed
"""
pass
def check_img(img):
"""
checks a local image for Apple Newsstand feed specifications v1.2
"""
if imghdr.what(img) != "png":
raise AtomIncompatibleImageType("image needs to be of PNG type")
img_obj = Image.open(img)
x,y = img_obj.size
if max(x,y) < 1024:
raise AtomIncompatibleImageSize("image's biggest side has to be at least 1024px")
aspect_ratio = x/y
if aspect_ratio > 2 or aspect_ratio < 0.5:
raise AtomIncompatibleImageSize("image's aspect ratio has to be between .5 and 2 ")
return True
def _fetch_img(img_url):
try:
fd = urllib.urlopen(img_url)
return io.BytesIO(fd.read())
except urllib.HTTPError:
raise AtomImageHTTPError("%s can't be accessed" % img_url)
return False
def validate_img_on_web(img_url):
"""
checks a remote image for existance and Apple Newsstand feed specifications v1.2
example:
>>> from atomgen.validate_img import validate_img_on_web
>>> validate_img_on_web("http://cdn.tennis.com/uploads/magazine/test_material/img_1200_500.png")
"""
if img_url in validated_images:
return
else:
img = _fetch_img(img_url)
if img:
if check_img(img):
validated_images[img_url]=True
print ("%s validated" % img_url)
if __name__ == "__main__":
import doctest
doctest.testmod()
| {
"content_hash": "97aaf646b8285632416ddd9dd55022d9",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 100,
"avg_line_length": 23.226190476190474,
"alnum_prop": 0.6396719630958483,
"repo_name": "seperman/atomgen",
"id": "818046d4a6db7ee29250ed4a739b6f4bdaf5c765",
"size": "1951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atomgen/validate_img.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16435"
},
{
"name": "JavaScript",
"bytes": "54513"
},
{
"name": "Python",
"bytes": "27817"
},
{
"name": "Shell",
"bytes": "6703"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.settings import WAF_ATTACK_VECTORS
__product__ = "AppWall (Radware)"
def detect(get_page):
retval = False
for vector in WAF_ATTACK_VECTORS:
page, headers, code = get_page(get=vector)
retval = re.search(r"Unauthorized Activity Has Been Detected.+Case Number:", page, re.I | re.S) is not None
if retval:
break
return retval
| {
"content_hash": "a3978c7ec0a006530b80a192cd7753ff",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 115,
"avg_line_length": 24.80952380952381,
"alnum_prop": 0.6641074856046065,
"repo_name": "JeyZeta/Dangerous",
"id": "951769b4ff276c0afe9a0a33de409c5db1d14e4e",
"size": "544",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "Dangerous/Golismero/tools/sqlmap/waf/radware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "13260"
},
{
"name": "C",
"bytes": "12851"
},
{
"name": "C++",
"bytes": "3174"
},
{
"name": "CSS",
"bytes": "267451"
},
{
"name": "HTML",
"bytes": "2686153"
},
{
"name": "JavaScript",
"bytes": "1356956"
},
{
"name": "Lua",
"bytes": "14436"
},
{
"name": "Makefile",
"bytes": "11190"
},
{
"name": "Objective-C",
"bytes": "998"
},
{
"name": "PHP",
"bytes": "619"
},
{
"name": "PLpgSQL",
"bytes": "536"
},
{
"name": "Perl",
"bytes": "263365"
},
{
"name": "Python",
"bytes": "16669102"
},
{
"name": "Roff",
"bytes": "9828"
},
{
"name": "Ruby",
"bytes": "503"
},
{
"name": "Shell",
"bytes": "6691"
}
],
"symlink_target": ""
} |
"""
Test lldb data formatter subsystem.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class DataFormatterBoolRefPtr(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipUnlessDarwin
def test_boolrefptr_with_run_command(self):
"""Test the formatters we use for BOOL& and BOOL* in Objective-C."""
self.build()
self.boolrefptr_data_formatter_commands()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break at.
self.line = line_number('main.mm', '// Set break point at this line.')
def boolrefptr_data_formatter_commands(self):
"""Test the formatters we use for BOOL& and BOOL* in Objective-C."""
self.runCmd("file " + self.getBuildArtifact("a.out"), CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_file_and_line(
self, "main.mm", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# This is the function to remove the custom formats in order to have a
# clean slate for the next test case.
def cleanup():
self.runCmd('type format clear', check=False)
self.runCmd('type summary clear', check=False)
self.runCmd('type synth clear', check=False)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
isiOS = (lldbplatformutil.getPlatform() == 'ios' or lldbplatformutil.getPlatform() == 'watchos')
# Now check that we use the right summary for BOOL&
self.expect('frame variable yes_ref',
substrs=['YES'])
self.expect('frame variable no_ref',
substrs=['NO'])
if not(isiOS):
self.expect('frame variable unset_ref', substrs=['12'])
# Now check that we use the right summary for BOOL*
self.expect('frame variable yes_ptr',
substrs=['YES'])
self.expect('frame variable no_ptr',
substrs=['NO'])
if not(isiOS):
self.expect('frame variable unset_ptr', substrs=['12'])
# Now check that we use the right summary for BOOL
self.expect('frame variable yes',
substrs=['YES'])
self.expect('frame variable no',
substrs=['NO'])
if not(isiOS):
self.expect('frame variable unset', substrs=['12'])
| {
"content_hash": "e41d232bc46818b43ae8ac9f62d9ebfc",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 104,
"avg_line_length": 35.96103896103896,
"alnum_prop": 0.5987721198988805,
"repo_name": "endlessm/chromium-browser",
"id": "25ecdef5948c4b9840e24ddec0172bf5ce3c953c",
"size": "2769",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/llvm/lldb/test/API/functionalities/data-formatter/boolreference/TestFormattersBoolRefPtr.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Decrypts the ciphertext stored in GCS to get MySQL password"""
from google.cloud import kms_v1
from hive_to_bigquery import client_info
def decrypt_symmetric(project_id, location_id, key_ring_id, crypto_key_id,
ciphertext):
"""Decrypts input ciphertext using the provided symmetric CryptoKey."""
# Creates an API client for the KMS API.
info = client_info.get_gapic_client_info()
kms_client = kms_v1.KeyManagementServiceClient(client_info=info)
# The resource name of the CryptoKey.
name = kms_client.crypto_key_path_path(project_id, location_id,
key_ring_id, crypto_key_id)
# Use the KMS API to decrypt the data.
response = kms_client.decrypt(name, ciphertext)
return response.plaintext
| {
"content_hash": "a8d2d6831825398c7c5429f1479d016e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 75,
"avg_line_length": 38,
"alnum_prop": 0.6741854636591479,
"repo_name": "CloudVLab/professional-services",
"id": "fe11065f4a216561da30566762b54e76d851d135",
"size": "1373",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/hive-bigquery/hive_to_bigquery/kms_component.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12706"
},
{
"name": "Dockerfile",
"bytes": "6279"
},
{
"name": "Go",
"bytes": "28241"
},
{
"name": "HCL",
"bytes": "23513"
},
{
"name": "HTML",
"bytes": "1228123"
},
{
"name": "Java",
"bytes": "87077"
},
{
"name": "JavaScript",
"bytes": "19579"
},
{
"name": "Makefile",
"bytes": "5250"
},
{
"name": "Python",
"bytes": "1327492"
},
{
"name": "Scala",
"bytes": "298157"
},
{
"name": "Shell",
"bytes": "68560"
},
{
"name": "TSQL",
"bytes": "17166"
},
{
"name": "TypeScript",
"bytes": "137719"
}
],
"symlink_target": ""
} |
import pygame
class Animation(object):
loadedImagePaths = []
loadedImages = []
def __init__(self, imgPath, width, height, updateRate, frameLimit):
if imgPath not in Animation.loadedImagePaths:
self.img = pygame.image.load(imgPath).convert_alpha()
Animation.loadedImagePaths.append(imgPath)
Animation.loadedImages.append(self.img)
else:
self.img = Animation.loadedImages[Animation.loadedImagePaths.index(imgPath)]
self.width = width
self.height = height
self.frameList = self.genLoopable(self.img.get_width(), self.img.get_height())
self.position = pygame.rect.Rect((0, 0), (self.width, self.height))
self.surface = pygame.Surface(self.position.size, pygame.SRCALPHA | pygame.HWSURFACE).convert_alpha()
self.rate = 1 / float(updateRate)
self.swapTimer = 0
self.frame = 0
self.frameLimit = frameLimit
self.callback = None
self.setPos(self.position.topleft)
def genLoopable(self, width, height):
posList = []
for y in xrange(height / self.height):
for x in xrange(width / self.width):
posList.append((x, y))
return posList
def setPos(self, pos):
self.position.topleft = (pos[0] * self.width, pos[1] * self.height)
self.surface.fill(0)
self.surface.blit(self.img, (-self.position.x, -self.position.y))
def getSplice(self):
return self.surface
def setCallback(self, callback):
self.callback = callback
def update(self, dt):
self.swapTimer = self.swapTimer + dt
if self.swapTimer > self.rate:
self.swapTimer -= self.rate
self.frame += 1
if self.frame == self.frameLimit:
self.frame = 0
if self.callback:
self.callback()
self.setPos(self.frameList[self.frame])
| {
"content_hash": "0309f4772c7b6513513f4021bf85cab7",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 103,
"avg_line_length": 27.327868852459016,
"alnum_prop": 0.7060587882423516,
"repo_name": "pedro-b/layer-switcher",
"id": "df40de677e673da9697b26e290dcbfeba72f2358",
"size": "1667",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "animation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "141200"
}
],
"symlink_target": ""
} |
"""API serving config collection service implementation.
Contains the implementation for BackendService as defined in api_backend.py.
"""
# pylint: disable=g-statement-before-imports,g-import-not-at-top
try:
import json
except ImportError:
import simplejson as json
import logging
import api_backend
import api_exceptions
from protorpc import message_types
__all__ = [
'ApiConfigRegistry',
'BackendServiceImpl',
]
class ApiConfigRegistry(object):
"""Registry of active APIs to be registered with Google API Server."""
def __init__(self):
# Set of API classes that have been registered.
self.__registered_classes = set()
# Set of API config contents served by this App Engine AppId/version
self.__api_configs = set()
# Map of API method name to ProtoRPC method name.
self.__api_methods = {}
# pylint: disable=g-bad-name
def register_backend(self, config_contents):
"""Register a single API and its config contents.
Args:
config_contents: String containing API configuration.
"""
if config_contents is None:
return
parsed_config = json.loads(config_contents)
self.__register_class(parsed_config)
self.__api_configs.add(config_contents)
self.__register_methods(parsed_config)
def __register_class(self, parsed_config):
"""Register the class implementing this config, so we only add it once.
Args:
parsed_config: The JSON object with the API configuration being added.
Raises:
ApiConfigurationError: If the class has already been registered.
"""
methods = parsed_config.get('methods')
if not methods:
return
# Determine the name of the class that implements this configuration.
service_classes = set()
for method in methods.itervalues():
rosy_method = method.get('rosyMethod')
if rosy_method and '.' in rosy_method:
method_class = rosy_method.split('.', 1)[0]
service_classes.add(method_class)
for service_class in service_classes:
if service_class in self.__registered_classes:
raise api_exceptions.ApiConfigurationError(
'API class %s has already been registered.' % service_class)
self.__registered_classes.add(service_class)
def __register_methods(self, parsed_config):
"""Register all methods from the given api config file.
Methods are stored in a map from method_name to rosyMethod,
the name of the ProtoRPC method to be called on the backend.
If no rosyMethod was specified the value will be None.
Args:
parsed_config: The JSON object with the API configuration being added.
"""
methods = parsed_config.get('methods')
if not methods:
return
for method_name, method in methods.iteritems():
self.__api_methods[method_name] = method.get('rosyMethod')
def lookup_api_method(self, api_method_name):
"""Looks an API method up by name to find the backend method to call.
Args:
api_method_name: Name of the method in the API that was called.
Returns:
Name of the ProtoRPC method called on the backend, or None if not found.
"""
return self.__api_methods.get(api_method_name)
def all_api_configs(self):
"""Return a list of all API configration specs as registered above."""
return list(self.__api_configs)
class BackendServiceImpl(api_backend.BackendService):
"""Implementation of BackendService."""
def __init__(self, api_config_registry, app_revision):
"""Create a new BackendService implementation.
Args:
api_config_registry: ApiConfigRegistry to register and look up configs.
app_revision: string containing the current app revision.
"""
self.__api_config_registry = api_config_registry
self.__app_revision = app_revision
# pylint: disable=g-bad-name
# pylint: disable=g-doc-return-or-yield
# pylint: disable=g-doc-args
@staticmethod
def definition_name():
"""Override definition_name so that it is not BackendServiceImpl."""
return api_backend.BackendService.definition_name()
def getApiConfigs(self, request=None):
"""Return a list of active APIs and their configuration files.
Args:
request: A request which may contain an app revision
Returns:
ApiConfigList: A list of API config strings
"""
if (request and request.appRevision and
request.appRevision != self.__app_revision):
raise api_exceptions.BadRequestException(
message='API backend app revision %s not the same as expected %s' % (
self.__app_revision, request.appRevision))
configs = self.__api_config_registry.all_api_configs()
return api_backend.ApiConfigList(items=configs)
def logMessages(self, request):
"""Write a log message from the Swarm FE to the log.
Args:
request: A log message request.
Returns:
Void message.
"""
Level = api_backend.LogMessagesRequest.LogMessage.Level
log = logging.getLogger(__name__)
for message in request.messages:
level = message.level if message.level is not None else Level.info
# Create a log record and override the pathname and lineno. These
# messages come from the front end, so it's misleading to say that they
# come from api_backend_service.
record = logging.LogRecord(name=__name__, level=level.number, pathname='',
lineno='', msg=message.message, args=None,
exc_info=None)
log.handle(record)
return message_types.VoidMessage()
| {
"content_hash": "dca2701786ab800ef7fcd09752384279",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 80,
"avg_line_length": 32.72941176470588,
"alnum_prop": 0.6843997124370956,
"repo_name": "catapult-project/catapult",
"id": "ca7670a52c00f2306cd86958098d25897a667759",
"size": "6161",
"binary": false,
"copies": "10",
"ref": "refs/heads/main",
"path": "third_party/google-endpoints/endpoints/api_backend_service.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1324"
},
{
"name": "C++",
"bytes": "46069"
},
{
"name": "CSS",
"bytes": "23376"
},
{
"name": "Dockerfile",
"bytes": "1541"
},
{
"name": "Go",
"bytes": "114396"
},
{
"name": "HTML",
"bytes": "12394298"
},
{
"name": "JavaScript",
"bytes": "1559584"
},
{
"name": "Makefile",
"bytes": "1774"
},
{
"name": "Python",
"bytes": "6778695"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
} |
import warnings
from sphinx_astropy.conf import *
warnings.warn("Note that astropy_helpers.sphinx.conf is deprecated - use sphinx_astropy.conf instead")
| {
"content_hash": "d09466aeb53a29f648982eacb0b2bd01",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 102,
"avg_line_length": 31.2,
"alnum_prop": 0.7948717948717948,
"repo_name": "dpshelio/astropy-helpers",
"id": "97c7afdf5682224976b5c1bee73f85af4dcda6c1",
"size": "156",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "astropy_helpers/sphinx/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2811"
},
{
"name": "Python",
"bytes": "184628"
}
],
"symlink_target": ""
} |
import wx
import wx.xrc
import wx.combo
###########################################################################
## Class TestFrame
###########################################################################
class TestFrame ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u"Aplicación 1", pos = wx.DefaultPosition, size = wx.Size( 400,300 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
self.m_button1 = wx.Button( self, wx.ID_ANY, u"Botón 1", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_button1.SetForegroundColour( wx.Colour( 249, 96, 57 ) )
self.m_button1.SetBackgroundColour( wx.Colour( 249, 249, 249 ) )
bSizer1.Add( self.m_button1, 0, wx.ALL|wx.EXPAND, 10 )
self.m_textCtrl1 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer1.Add( self.m_textCtrl1, 0, wx.ALL|wx.EXPAND, 10 )
self.m_staticText1 = wx.StaticText( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_CENTRE )
self.m_staticText1.Wrap( -1 )
bSizer1.Add( self.m_staticText1, 0, wx.ALL|wx.EXPAND, 10 )
self.m_panel1 = wx.Panel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )
bSizer2 = wx.BoxSizer( wx.HORIZONTAL )
m_comboBox1Choices = [ u"A", u"B", u"C" ]
self.m_comboBox1 = wx.ComboBox( self.m_panel1, wx.ID_ANY, u"A", wx.DefaultPosition, wx.DefaultSize, m_comboBox1Choices, 0 )
bSizer2.Add( self.m_comboBox1, 0, wx.ALL, 5 )
self.m_bcomboBox1 = wx.combo.BitmapComboBox( self.m_panel1, wx.ID_ANY, u"Combo!", wx.DefaultPosition, wx.DefaultSize, "", 0 )
bSizer2.Add( self.m_bcomboBox1, 0, wx.ALL, 5 )
m_choice1Choices = []
self.m_choice1 = wx.Choice( self.m_panel1, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, m_choice1Choices, 0 )
self.m_choice1.SetSelection( 0 )
bSizer2.Add( self.m_choice1, 0, wx.ALL, 5 )
self.m_panel1.SetSizer( bSizer2 )
self.m_panel1.Layout()
bSizer2.Fit( self.m_panel1 )
bSizer1.Add( self.m_panel1, 1, wx.EXPAND |wx.ALL, 5 )
self.SetSizer( bSizer1 )
self.Layout()
self.m_menubar1 = wx.MenuBar( 0 )
self.m_menu1 = wx.Menu()
self.m_menuItem1 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"Salir", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.AppendItem( self.m_menuItem1 )
self.m_menubar1.Append( self.m_menu1, u"Archivo" )
self.SetMenuBar( self.m_menubar1 )
self.Centre( wx.BOTH )
# Connect Events
self.m_button1.Bind( wx.EVT_BUTTON, self.OnClick )
self.Bind( wx.EVT_MENU, self.OnExit, id = self.m_menuItem1.GetId() )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def OnClick( self, event ):
event.Skip()
def OnExit( self, event ):
self.Close()
| {
"content_hash": "1ba15bcb825535ce126bfcbd4d8c901e",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 179,
"avg_line_length": 35.19512195121951,
"alnum_prop": 0.6406791406791407,
"repo_name": "JorgeDeLosSantos/curso-wxpython",
"id": "3466ca2b9a107ec6d09cba02e1ccadebfb78a4ab",
"size": "3204",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Clase 11/gui01.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14682"
}
],
"symlink_target": ""
} |
import json
with open('samples.json', 'r') as f:
samples = json.loads(f.read())
with open('comment.txt', 'r') as f:
data = f.read().replace('\n', '')
samples['comments'].append(data)
with open('samples.json', 'w') as f:
f.write(json.dumps(samples))
| {
"content_hash": "b18377fa6f68b1e63624f2c6cc4e35ef",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 37,
"avg_line_length": 19.071428571428573,
"alnum_prop": 0.6104868913857678,
"repo_name": "CatalystOfNostalgia/hoot",
"id": "39786b5053ff1f1d15ef2ba40911f977a07e5cf0",
"size": "267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/hoot/testing/json_process.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1109"
},
{
"name": "Python",
"bytes": "62334"
},
{
"name": "Swift",
"bytes": "39515"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'DjFacilito.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^home/$', 'TodoList.views.home', name='home'),
url(r'^$', 'TodoList.views.log_in', name='log_in'),
url(r'^login$', 'TodoList.views.log_in', name='login'),
url(r'^logout/$', 'TodoList.views.log_out', name='log_out'),
url(r'^registrar/$', 'TodoList.views.registrar', name='registrar'),
url(r'^crear/$', 'TodoList.views.crear', name='crear'),
url(r'^cumplir/(?P<pk>\d+)/$', 'TodoList.views.cumplir', name="cumplir"),
url(r'^editar/(?P<pk>\d+)/$', 'TodoList.views.editar', name="editar"),
url(r'^borrar/(?P<pk>\d+)/$', 'TodoList.views.borrar', name="borrar"),
url(r'^nueva_categoria/$', 'TodoList.views.nueva_categoria', name='nueva_categoria'),
)
| {
"content_hash": "8f3e33e614d43a85d3f8ac33ac84f9bd",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 89,
"avg_line_length": 42.17391304347826,
"alnum_prop": 0.6164948453608248,
"repo_name": "0sw4l/Cuke",
"id": "3454c5c99dc74acdd68eca67868bba8275dc58da",
"size": "970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DjFacilito/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2789"
},
{
"name": "HTML",
"bytes": "13082"
},
{
"name": "Python",
"bytes": "11374"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib import admin
admin.autodiscover()
import settings
urlpatterns = patterns('',
# home
url(r'^$', direct_to_template, { "template": "home.html", }, name="home"),
# packages
url(r'^build/', include("packages.urls"), name="build"),
# search
url(r'^search/', include("search.urls")),
# django-sentry
(r'^sentry/', include('sentry.web.urls')),
# admin
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True, }),
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT, 'show_indexes': True, }),
)
| {
"content_hash": "b453feddb284804af0f42d943015039c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 78,
"avg_line_length": 28,
"alnum_prop": 0.6136363636363636,
"repo_name": "jonathanchu/pickleback",
"id": "fcd9561261c8fb106a2c230815a8acdcdd106c65",
"size": "924",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24916"
}
],
"symlink_target": ""
} |
"""Test the fundrawtransaction RPC."""
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_fee_amount,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises_rpc_error,
connect_nodes_bi,
count_bytes,
find_vout_for_address,
)
def get_unspent(listunspent, amount):
for utx in listunspent:
if utx['amount'] == amount:
return utx
raise AssertionError('Could not find unspent with amount={}'.format(amount))
class RawTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
def setup_network(self, split=False):
self.setup_nodes()
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 1, 2)
connect_nodes_bi(self.nodes, 0, 2)
connect_nodes_bi(self.nodes, 0, 3)
def run_test(self):
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
# ensure that setting changePosition in fundraw with an exact match is handled properly
rawmatch = self.nodes[2].createrawtransaction([], {self.nodes[2].getnewaddress():50})
rawmatch = self.nodes[2].fundrawtransaction(rawmatch, {"changePosition":1, "subtractFeeFromOutputs":[0]})
assert_equal(rawmatch["changepos"], -1)
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].getaddressinfo(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
# Lock UTXO so nodes[0] doesn't accidentally spend it
watchonly_vout = find_vout_for_address(self.nodes[0], watchonly_txid, watchonly_address)
self.nodes[0].lockunspent(False, [{"txid": watchonly_txid, "vout": watchonly_vout}])
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test that we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
####################################################
# test a fundrawtransaction with an invalid option #
####################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_raises_rpc_error(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'})
# reserveChangeKey was deprecated and is now removed
assert_raises_rpc_error(-3, "Unexpected key reserveChangeKey", lambda: self.nodes[2].fundrawtransaction(hexstring=rawtx, options={'reserveChangeKey': True}))
############################################################
# test a fundrawtransaction with an invalid change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_raises_rpc_error(-5, "changeAddress must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'})
############################################################
# test a fundrawtransaction with a provided change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
change = self.nodes[2].getnewaddress()
assert_raises_rpc_error(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2})
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
out = dec_tx['vout'][0]
assert_equal(change, out['scriptPubKey']['addresses'][0])
#########################################################
# test a fundrawtransaction with a provided change type #
#########################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[2].fundrawtransaction, rawtx, {'change_type': None})
assert_raises_rpc_error(-5, "Unknown change type ''", self.nodes[2].fundrawtransaction, rawtx, {'change_type': ''})
rawtx = self.nodes[2].fundrawtransaction(rawtx, {'change_type': 'bech32'})
dec_tx = self.nodes[2].decoderawtransaction(rawtx['hex'])
assert_equal('witness_v0_keyhash', dec_tx['vout'][rawtx['changepos']]['scriptPubKey']['type'])
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx)
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawtx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawtx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[1].getaddressinfo(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
inputs = []
outputs = {mSigObj:1.1}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawtx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[1].getaddressinfo(addr2)
addr3Obj = self.nodes[1].getaddressinfo(addr3)
addr4Obj = self.nodes[1].getaddressinfo(addr4)
addr5Obj = self.nodes[1].getaddressinfo(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])['address']
inputs = []
outputs = {mSigObj:1.1}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawtx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawtx)
signedTx = self.nodes[2].signrawtransactionwithwallet(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.stop_node(0)
self.nodes[1].node_encrypt_wallet("test")
self.stop_node(2)
self.stop_node(3)
self.start_nodes()
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
# Again lock the watchonly UTXO or nodes[0] may spend it, because
# lockunspent is memory-only and thus lost on restart
self.nodes[0].lockunspent(False, [{"txid": watchonly_txid, "vout": watchonly_vout}])
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
self.nodes[1].getrawchangeaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
assert_raises_rpc_error(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx)
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].keypoolrefill(8) #need to refill the keypool to get an internal change address
self.nodes[1].walletlock()
assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2)
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawtx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 600)
signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawtx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawtx)
fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True })
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
# Backward compatibility test (2nd param is includeWatching)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransactionwithwallet(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
#######################
# Test feeRate option #
#######################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[3].getnewaddress() : 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee)
result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee})
result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee})
result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
################################
# Test no address reuse occurs #
################################
result3 = self.nodes[3].fundrawtransaction(rawtx)
res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
changeaddress = ""
for out in res_dec['vout']:
if out['value'] > 1.0:
changeaddress += out['scriptPubKey']['addresses'][0]
assert(changeaddress != "")
nextaddr = self.nodes[3].getnewaddress()
# Now the change address key should be removed from the keypool
assert(changeaddress != nextaddr)
######################################
# Test subtractFeeFromOutputs option #
######################################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[2].getnewaddress(): 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = [self.nodes[3].fundrawtransaction(rawtx), # uses min_relay_tx_fee (set by settxfee)
self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list
self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0]}), # uses min_relay_tx_fee (set by settxfee)
self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}),
self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee, "subtractFeeFromOutputs": [0]})]
dec_tx = [self.nodes[3].decoderawtransaction(tx_['hex']) for tx_ in result]
output = [d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result)]
change = [d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result)]
assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee'])
assert_equal(result[3]['fee'], result[4]['fee'])
assert_equal(change[0], change[1])
assert_equal(output[0], output[1])
assert_equal(output[0], output[2] + result[2]['fee'])
assert_equal(change[0] + result[0]['fee'], change[2])
assert_equal(output[3], output[4] + result[4]['fee'])
assert_equal(change[3] + result[3]['fee'], change[4])
inputs = []
outputs = {self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3)}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = [self.nodes[3].fundrawtransaction(rawtx),
# split the fee between outputs 0, 2, and 3, but not output 1
self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})]
dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']),
self.nodes[3].decoderawtransaction(result[1]['hex'])]
# Nested list of non-change output amounts for each transaction
output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']]
for d, r in zip(dec_tx, result)]
# List of differences in output amounts between normal and subtractFee transactions
share = [o0 - o1 for o0, o1 in zip(output[0], output[1])]
# output 1 is the same in both transactions
assert_equal(share[1], 0)
# the other 3 outputs are smaller as a result of subtractFeeFromOutputs
assert_greater_than(share[0], 0)
assert_greater_than(share[2], 0)
assert_greater_than(share[3], 0)
# outputs 2 and 3 take the same share of the fee
assert_equal(share[2], share[3])
# output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3
assert_greater_than_or_equal(share[0], share[2])
assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0])
# the fee is the same in both transactions
assert_equal(result[0]['fee'], result[1]['fee'])
# the total subtracted from the outputs is equal to the fee
assert_equal(share[0] + share[2] + share[3], result[0]['fee'])
if __name__ == '__main__':
RawTransactionsTest().main()
| {
"content_hash": "97769981a3e697ab0108595f4e997c24",
"timestamp": "",
"source": "github",
"line_count": 749,
"max_line_length": 223,
"avg_line_length": 45.19492656875835,
"alnum_prop": 0.5628194144929248,
"repo_name": "Bushstar/UFO-Project",
"id": "a806de43b4146e8afe9d4dfeb73f0e51ed6ae386",
"size": "34065",
"binary": false,
"copies": "2",
"ref": "refs/heads/master-0.17",
"path": "test/functional/rpc_fundrawtransaction.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "228583"
},
{
"name": "C",
"bytes": "743652"
},
{
"name": "C++",
"bytes": "6065077"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "195873"
},
{
"name": "Makefile",
"bytes": "116238"
},
{
"name": "Objective-C",
"bytes": "4124"
},
{
"name": "Objective-C++",
"bytes": "6581"
},
{
"name": "Python",
"bytes": "1441822"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Shell",
"bytes": "78920"
}
],
"symlink_target": ""
} |
"""
High-level interface to the Player client libraries.
The Player client libraries allow Python code to communicate with
hardware devices such as robots, cameras, and range sensors.
This is a temporary home for this file until it finds a permanent home
(maybe in the PlayerStage project or in PLASTK?)
"""
import time,array
from threading import RLock, Thread
from Queue import Queue
from operator import eq,ne
from math import pi
# Since this module ships with Topographica only as a sample and is not
# intended to be run, the following SkipTest statement was included to allow
# nose to handle this module (when looking for doctests) without raising an
# import error. If the file is moved elsewhere -- or run on its own,
# assuming the PlayerStage package has been installed -- the below code should
# be reworked to either import the package directly, or handle the import error
# differently. For more information on how nose works, see topo/tests/README.
try:
import playerc
SKIP = False
except ImportError:
SKIP = True
# JPALERT: Because of the global interpreter lock in Python, using
# Python threads (via the 'thread' or 'threading' modules does not
# necessarily provide low-latency polling of the player process. In
# particular, long-running native functions (e.g. C/C++ foreign
# functions) will not be pre-empted, so the polling loop won't
# get a timeslice until they complete. A better solution, especially
# on multicore machines, is true preemptive multiprocessing. The
# 'processing' module should provide that, but it doesn't yet work
# properly on MacOS, and I haven't tested it yet on linux. God knows
# what will happen on Windows.
def use_processing():
"""
Configure the module to use the processing library for asynchronous
process support. Use of the processing library requires the use of
queues for communication with robot devices.
"""
import processing
global RLock, Thread, Queue
RLock = processing.RLock # pyflakes:ignore (optional alternative)
Thread = processing.Process # pyflakes:ignore (optional alternative)
Queue = processing.Queue # pyflakes:ignore (optional alternative)
def use_threading():
"""
Configure the module to use the threading library for asynchronous
process support. (the default)
"""
import threading
global RLock, Thread, Queue
RLock = threading.RLock # pyflakes:ignore (optional alternative)
Thread = threading.Thread # pyflakes:ignore (optional alternative)
Queue = Queue.Queue # pyflakes:ignore (optional alternative)
# JPALERT This is a HACK for the CVS version of Player, this value
# should be defined in the playerc module:
if not SKIP:
playerc.PLAYERC_OPEN_MODE = 1
class PlayerException(Exception):
pass
def player_fn(error_op=ne,error_val=0):
"""
Player function decorator. Adds error checking.
Takes an operator and a value, and compares the result
of the function call with the value using the operator.
If the result is true, a PlayerException is raised. The
default error condition is error_op = ne, error_value = 0,
which raises an exception if fn(*args) != 0.
"""
def wrap(fn):
def new_fn(*args):
if error_op(fn(*args),error_val):
raise PlayerException(playerc.playerc_error_str())
return new_fn
return wrap
def synchronized(lock):
"""
Simple synchronization decorator.
Takes an existing lock and synchronizes a function or
method on that lock. Code taken from the Python Wiki
PythonDecoratorLibrary:
http://wiki.python.org/moin/PythonDecoratorLibrary
"""
def wrap(f):
def newFunction(*args, **kw):
lock.acquire()
try:
return f(*args, **kw)
finally:
lock.release()
return newFunction
return wrap
def synched_method(f):
"""
Synchronized method decorator.
Like synchronized() decorator, except synched_method assumes
that the first argument of the function is an instance containing
a Lock object, and this lock is used for synchronization.
"""
def newFunction(self,*args,**kw):
self._lock.acquire()
try:
return f(self,*args, **kw)
finally:
self._lock.release()
return newFunction
class PlayerObject(object):
"""
A generic threadsafe wrapper for client and proxy objects
from the playerc library.
PlayerObject wrappers are constructed automatically by PlayerRobot
objects. Each PlayerObject instance wraps a playerc device proxy
or client object, and publishes a thread-safe version of each of
proxy's methods, that is synchronized with the PlayerRobot
instance's run-loop thread, and that catches playerc error
conditions and raises them as PlayerExceptions. The original
playerc proxy object is available via the attribute .proxy.
Specialized subclasses of PlayerObject can have additional
interfaces for getting device state or setting commands specific
to that device.
Developer note: the PlayerObject base class __init__ method
automatically wraps each method on the proxy that (a) doesn't
begin with '__' and (b) is not already in dir(self). This way,
subclasses can override the wrapping process by defining their own
wrappers *before* the base class __init__ method is called.
"""
def __init__(self,proxy,lock):
self._lock = lock
self.proxy = proxy
for name in dir(proxy):
attr = getattr(proxy,name)
if name not in dir(self) and name[:2] != '__' and callable(attr):
setattr(self,name,synchronized(lock)(player_fn()(attr)))
self.cmd_queue = Queue()
def process_queues(self):
while not self.cmd_queue.empty():
name,args = self.cmd_queue.get()
try:
print "Doing command:",name,args
getattr(self,name)(*args)
finally:
self.cmd_queue.task_done()
class PlayerClient(PlayerObject):
"""
Player object wrapper for playerc.client objects.
"""
def __init__(self,proxy,lock):
# Override the wrapper on playerc.client.read, because
# it returns None for errors, instead of returning 0 for
# "no error."
self.read = synchronized(lock)(player_fn(eq,None)(proxy.read))
super(PlayerClient,self).__init__(proxy,lock)
def process_queues(self):
pass
class PlayerDevice(PlayerObject):
"""
Generic Player device object.
Overrides the default proxy .subscribe method so that the mode defaults
to PLAYERC_OPEN_MODE.
"""
@synched_method
@player_fn()
def subscribe(self, mode=None):
mode = playerc.PLAYERC_OPEN_MODE if None else mode
return self.proxy.subscribe(mode)
class PTZDevice(PlayerDevice):
"""
Player Pan/Tilt/Zoom (PTZ) device.
Adds the following to the original proxy interface:
state = The tuple (pan,tilt,zoom) indicating the current state of
the PTZ device.
state_deg = Same as state, but returns values in degrees instead
of radians
set_deg() and set_ws_deg() methods. Same as .set() and .set_ws(),
using degrees instead of radians.
"""
def get_state(self):
return self.proxy.pan, self.proxy.tilt, self.proxy.zoom
state = property(get_state)
def get_state_deg(self):
return self.proxy.pan*180/pi, \
self.proxy.tilt*180/pi, \
self.proxy.zoom*180/pi
state_deg = property(get_state_deg)
def set_deg(self,pan,tilt,zoom):
self.set(pan*pi/180, tilt*pi/180, zoom*pi/180)
def set_ws_deg(self,pan,tilt,zoom,pan_speed,tilt_speed):
self.set_ws(pan*pi/180, tilt*pi/180, zoom*pi/180,pan_speed*pi/180,tilt_speed*pi/180)
class CameraDevice(PlayerDevice):
"""
A Player camera device.
The synchronized method get_image grabs an uncompressed snapshot,
along with the additional formatting information needed to make an
image.
"""
def __init__(self,proxy,lock):
self.decompress = synchronized(lock)(player_fn(ne,None)(proxy.decompress))
super(CameraDevice,self).__init__(proxy,lock)
self.image_queue = Queue()
def process_queues(self):
im = self.image
# check to make sure it's really an image
if im[1] > 0:
self.image_queue.put(im)
super(CameraDevice,self).process_queues()
# @synched_method
def get_image(self):
"""
Returns the tuple:
(format,width,height,bpp,fdiv,data)
Where data is a copy of the uncompressed image data.
"""
if self.proxy.compression:
self.decompress()
im_array = array.array('B')
im_array.fromstring(self.proxy.image[:self.proxy.image_count])
return self.proxy.format, \
self.proxy.width, \
self.proxy.height, \
self.proxy.bpp, \
self.proxy.fdiv, \
im_array
image = property(get_image)
##################
# DEVICE TABLE
#
# This table contains the mapping from device type names
# to specialized device object types. Types not indexed in this table
# should default to type PlayerDevice.
device_table = {'ptz' :PTZDevice,
'camera' :CameraDevice,
}
class PlayerRobot(object):
"""
Player Robot interface.
A PlayerRobot instance encapsulates an interface to a Player
robot. It creates and manages a playerc.client object and a set of
device proxies wrapped in PlayerDevice objects. In addition, it
maintains a run-loop in a separate thread that calls the client's
.read() method at regular intervals. The devices are published
through standard interfaces on the PlayerRobot instance, and their
methods and properties are synchronized with the run thread
through a mutex.
Example:
# set up a robot object with position, laser, and camera objects
robot = PlayerRobot(host='myrobot.mydomain.edu',port=6665,
devices = [('position2d',0),
('laser',0),
('camera',1)])
# start the run thread, devices will be subscribed
# automatically.
robot.start()
# start the robot turning at 30 deg/sec
robot.position2d[0].set_cmd_vel(0, 0, 30*pi/180)
# wait for a while
time.sleep(5.0)
# all stop
robot.position2d[0].set_cmd_vel(0,0,0)
# shut down the robot's thread, unsubscribing all devices and
# disconnecting the client
robot.stop()
"""
def __init__(self,host='localhost',port=6665,speed=20,
devices=[]):
self._thread = None
self._running = False
self._lock = RLock()
self.speed = speed
self._client = PlayerClient(playerc.playerc_client(None,host,port),self._lock)
self._queues_running = False
self._devices = []
for devname,devnum in devices:
self.add_device(devname,devnum=devnum)
def start(self):
assert self._thread is None
self._thread = Thread(target=self.run_loop,name="PlayerRobot Run Loop")
self._thread.setDaemon(True)
self._thread.start()
def stop(self):
self._running = False
self._thread.join()
self._thread = None
def run_loop(self):
self._client.connect()
self._running = True
self.subscribe_all()
try:
while self._running:
self._client.read()
if self._queues_running:
self.process_queues()
time.sleep(1.0/self.speed)
finally:
self.unsubscribe_all()
self._client.disconnect()
def run_queues(self,run_state):
"""
When using queues for communication with devices, this method
toggles queue processing. It is often useful to turn off
queue processing, e.g. when a client does not plan on using queued
data for a while.
"""
self._queues_running = run_state
def process_queues(self):
for d in self._devices:
d.process_queues()
def subscribe_all(self):
for dev in self._devices:
dev.subscribe()
def unsubscribe_all(self):
for dev in self._devices:
dev.unsubscribe()
def add_device(self,devname,devnum=0):
if devname not in dir(self):
setattr(self,devname,{})
proxy_constr = getattr(playerc,'playerc_'+devname)
devtype = device_table.get(devname,PlayerDevice)
try:
self._lock.acquire()
dev = devtype(proxy_constr(self._client.proxy,devnum),self._lock)
finally:
self._lock.release()
self._devices.append(dev)
getattr(self,devname)[devnum] = dev
if self._running:
dev.subscribe()
| {
"content_hash": "69374b0c1abbbf017d53c8158f253b55",
"timestamp": "",
"source": "github",
"line_count": 416,
"max_line_length": 92,
"avg_line_length": 31.596153846153847,
"alnum_prop": 0.6426506390748631,
"repo_name": "mjabri/topographica",
"id": "90bb9dd10ba35d9e340d3e0e56f38d030ec69982",
"size": "13144",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "topo/hardware/playerrobot.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "14889"
},
{
"name": "C++",
"bytes": "5714"
},
{
"name": "Elixir",
"bytes": "202"
},
{
"name": "JavaScript",
"bytes": "122"
},
{
"name": "Makefile",
"bytes": "15490"
},
{
"name": "Python",
"bytes": "1878339"
},
{
"name": "Shell",
"bytes": "1577"
},
{
"name": "TeX",
"bytes": "253834"
}
],
"symlink_target": ""
} |
import time
import random
r2.DomeLightsRelay.Enable()
r2.DomeMotorRelay.Enable()
if not "-mute" in args:
r2.SoundRelay.Enable()
r2.BodyServosRelay.Enable()
time.sleep(1)
r2.Head.Enable()
r2.Head.SetPosition(0)
r2.LeftUtilityArm.SetTarget(0)
r2.RightUtilityArm.SetTarget(0)
nextHeadMove = time.time() + 10
nextSound = time.time()
nextHologram = time.time()
hologramOff = time.time() + 1e6
class HeadMove:
def __init__(self, head):
self.Head = head
self.NextMoveTime = 0
def Update(self):
t = time.time()
if t > self.NextMoveTime:
self.Head.SetPosition(random.randint(-90, 90))
self.NextMoveTime = t + random.randint(1, 10)
scripts = []
scripts.append(HeadMove(r2.Head))
from HoloProjector import *
scripts.append(TwitchHoloProjector(r2.FrontHoloProjector))
scripts.append(TwitchHoloProjector(r2.TopHoloProjector))
scripts.append(TwitchHoloProjector(r2.RearHoloProjector))
while running():
t = time.time()
for script in scripts:
script.Update()
if t > nextHologram:
nextHologram = t + random.randint(60, 180)
hologramOff = t + 30
r2.FrontHoloProjector.SetMessage()
#r2.Sound.Play("")
elif t > hologramOff:
r2.FrontHoloProjector.SetDefault()
hologramOff = t + 1e6
if t > nextSound:
nextSound = t + random.randint(10, 20)
r2.Sound.Play("Generic")
if r2.Network.Changed("BB8"):
if r2.Network.IsConnected("BB8"):
r2.Sound.Play("Happy")
else:
r2.Sound.Play("Sad")
r2.DomeMotorRelay.Disable()
| {
"content_hash": "6ce4aceb9888fdc7d436ba0f51135312",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 58,
"avg_line_length": 23.720588235294116,
"alnum_prop": 0.6491010539367638,
"repo_name": "curtisblack/R2D2",
"id": "df57b7a838e65ab25b2b5bc4a2e987149cf4be40",
"size": "1613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "basic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "64121"
},
{
"name": "Shell",
"bytes": "163"
}
],
"symlink_target": ""
} |
import time
from importlib import import_module
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.core.exceptions import SuspiciousOperation
from django.utils.cache import patch_vary_headers
from django.utils.deprecation import MiddlewareMixin
from django.utils.http import http_date
class SessionMiddleware(MiddlewareMixin):
def __init__(self, get_response=None):
self.get_response = get_response
engine = import_module(settings.SESSION_ENGINE)
self.SessionStore = engine.SessionStore
def process_request(self, request):
session_key = request.COOKIES.get(settings.SESSION_COOKIE_NAME)
request.session = self.SessionStore(session_key)
def process_response(self, request, response):
"""
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie or delete
the session cookie if the session has been emptied.
"""
try:
accessed = request.session.accessed
modified = request.session.modified
empty = request.session.is_empty()
except AttributeError:
return response
# First check if we need to delete this cookie.
# The session should be deleted only if the session is entirely empty.
if settings.SESSION_COOKIE_NAME in request.COOKIES and empty:
response.delete_cookie(
settings.SESSION_COOKIE_NAME,
path=settings.SESSION_COOKIE_PATH,
domain=settings.SESSION_COOKIE_DOMAIN,
)
patch_vary_headers(response, ('Cookie',))
else:
if accessed:
patch_vary_headers(response, ('Cookie',))
if (modified or settings.SESSION_SAVE_EVERY_REQUEST) and not empty:
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = http_date(expires_time)
# Save the session data and refresh the client cookie.
# Skip session save for 500 responses, refs #3881.
if response.status_code != 500:
try:
request.session.save()
except UpdateError:
raise SuspiciousOperation(
"The request's session was deleted before the "
"request completed. The user may have logged "
"out in a concurrent request, for example."
)
response.set_cookie(
settings.SESSION_COOKIE_NAME,
request.session.session_key, max_age=max_age,
expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
path=settings.SESSION_COOKIE_PATH,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None,
samesite=settings.SESSION_COOKIE_SAMESITE,
)
return response
| {
"content_hash": "0a0ecf42bd96eff994ea94d0c09f89cf",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 79,
"avg_line_length": 45.75675675675676,
"alnum_prop": 0.5841701122268163,
"repo_name": "schinckel/django",
"id": "d36be4eca8bd2a6c2a6d6ee83afd3830406bfeed",
"size": "3386",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "django/contrib/sessions/middleware.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "85024"
},
{
"name": "HTML",
"bytes": "224566"
},
{
"name": "JavaScript",
"bytes": "251536"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "13234142"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
'''
@license: Apache License, Version 2.0
@copyright: 2007-2013 Marc-Antoine Gouillart
@author: Marc-Antoine Gouillart
'''
## Python imports
## Django imports
from django.contrib.auth.models import Group, User, Permission
## MAGE imports
import scm.models
from ref.models.parameters import setOrCreateParam
def post_migrate_handler(sender, **kwargs):
## Create DEV group & first user
if not Group.objects.filter(name='DEV').exists():
devgroup = Group(name='DEV')
devgroup.save()
p = Permission.objects.get(content_type__app_label = 'scm', codename = 'add_delivery')
devgroup.permissions.add(p)
dev = User.objects.create_user(username = 'dev', email = None, password = 'dev')
dev.save()
dev.groups.add(devgroup)
## Parameters
setOrCreateParam(key = u'APPLY_MERGE_LIMIT', value = u'60',
default_value = u'60',
description = u'Si deux éléments d\'une même livraison sont appliquées sur un même environnement à moins de n minutes, c\'est une même installation. 0 pour désactiver la fusion.')
setOrCreateParam(key = u'BACKUP_MERGE_LIMIT', value = u'180',
default_value = u'180',
description = u'Si deux éléments d\'un même environnements sont sauvegardés à moins de n minutes, c\'est un même backupset. 0 pour désactiver la fusion.')
setOrCreateParam(key = u'DELIVERY_FORM_DATA_FIELDS', value = u'0',
default_value = u'0',
description = u'nombre de champs data à afficher dans le formulaire de bon de livraison')
setOrCreateParam(key = u'DELIVERY_FORM_DATAFILE_MODE', value = u'ONE_FILE_PER_ITEM',
default_value = u'ONE_FILE_PER_ITEM',
description = u'ONE_FILE_PER_SET, ONE_FILE_PER_ITEM, NO_UPLOAD')
| {
"content_hash": "c7893962e50fdda06eba7a47cf8a4864",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 200,
"avg_line_length": 42.644444444444446,
"alnum_prop": 0.6221990620114644,
"repo_name": "marcanpilami/MAGE",
"id": "c77eb9221c35c7fe1bc6034291f281cdbf59fdec",
"size": "1951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scm/management/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16033"
},
{
"name": "Dockerfile",
"bytes": "1730"
},
{
"name": "HTML",
"bytes": "88971"
},
{
"name": "JavaScript",
"bytes": "6024"
},
{
"name": "Python",
"bytes": "401724"
},
{
"name": "Shell",
"bytes": "20159"
}
],
"symlink_target": ""
} |
from tempfile import NamedTemporaryFile
from dune.xt import guarded_import
from dune.xt.common.vtk.plot import plot
for mod_name in ( # order should not matter!
'_discretefunction_discretefunction',
'_discretefunction_dof_vector',
'_functionals_interfaces_common',
'_functionals_interfaces_eigen',
'_functionals_interfaces_istl',
'_functionals_vector_based',
'_interpolations_boundary',
'_interpolations_default',
'_interpolations_oswald',
'_local_bilinear_forms_coupling_intersection_integrals',
'_local_bilinear_forms_coupling_intersection_interface',
'_local_bilinear_forms_element_integrals',
'_local_bilinear_forms_element_interface',
'_local_bilinear_forms_intersection_integrals',
'_local_bilinear_forms_intersection_interface',
'_local_bilinear_forms_restricted_coupling_intersection_integrals',
'_local_bilinear_forms_restricted_intersection_integrals',
'_local_functionals_element_integrals',
'_local_functionals_element_interface',
'_local_functionals_intersection_integrals',
'_local_functionals_intersection_interface',
'_local_functionals_restricted_intersection_integrals',
'_local_integrands_binary_element_interface',
'_local_integrands_binary_intersection_interface',
'_local_integrands_element_product',
'_local_integrands_intersection_product',
'_local_integrands_ipdg_boundary_penalty',
'_local_integrands_ipdg_inner_penalty',
'_local_integrands_jump_boundary',
'_local_integrands_jump_inner',
'_local_integrands_laplace',
'_local_integrands_laplace_ipdg_dirichlet_coupling',
'_local_integrands_laplace_ipdg_inner_coupling',
'_local_integrands_linear_advection',
'_local_integrands_linear_advection_upwind_dirichlet_coupling',
'_local_integrands_linear_advection_upwind_inner_coupling',
'_local_integrands_quaternary_intersection_interface',
'_local_integrands_unary_element_interface',
'_local_integrands_unary_intersection_interface',
'_local_operators_coupling_intersection_indicator',
'_local_operators_element_indicator',
'_local_operators_element_interface',
'_local_operators_intersection_indicator',
'_local_operators_intersection_interface',
'_operators_bilinear_form',
'_operators_interfaces_common',
'_operators_interfaces_eigen',
'_operators_interfaces_istl_1d',
'_operators_interfaces_istl_2d',
'_operators_interfaces_istl_3d',
'_operators_laplace_ipdg_flux_reconstruction',
'_operators_matrix_based_factory',
'_operators_operator',
'_prolongations',
'_spaces_h1_continuous_lagrange',
'_spaces_hdiv_raviart_thomas',
'_spaces_interface',
'_spaces_l2_discontinuous_lagrange',
'_spaces_l2_finite_volume',
'_spaces_skeleton_finite_volume',
'_tools_adaptation_helper',
'_tools_dirichlet_constraints',
'_tools_grid_quality_estimates',
'_tools_sparsity_pattern',
):
guarded_import(globals(), 'dune.gdt', mod_name)
def visualize_function(function, grid=None, subsampling=False):
assert function.dim_domain == 2, f'Not implemented yet for {function.dim_domain}-dimensional grids!'
assert function.dim_range == 1, f'Not implemented yet for {function.dim_domain}-dimensional functions!'
tmpfile = NamedTemporaryFile(mode='wb', delete=False, suffix='.vtu').name
failed = False
try: # discrete function
function.visualize(filename=tmpfile[:-4])
return plot(tmpfile, color_attribute_name=function.name)
except TypeError:
failed = True
except AttributeError:
failed = True
if failed:
from dune.xt.functions import visualize_function as visualize_xt_function
assert grid
return visualize_xt_function(function, grid, subsampling=subsampling)
| {
"content_hash": "ccee57cf27b0d01e99e0f8beb4184b50",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 107,
"avg_line_length": 44.31521739130435,
"alnum_prop": 0.6735344616139318,
"repo_name": "pymor/dune-gdt",
"id": "3f19fff046cb3a9d3b480def26cf9e2eeed6a6bc",
"size": "4581",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/dune/gdt/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C++",
"bytes": "1214423"
},
{
"name": "CMake",
"bytes": "10605"
}
],
"symlink_target": ""
} |
"""Sum square difference
The sum of the squares of the first ten natural numbers is,
1^2 + 2^2 + ... + 10^2 = 385
The square of the sum of the first ten natural numbers is,
(1 + 2 + ... + 10)^2 = 55^2 = 3025
Hence the difference between the sum of the squares of the first ten natural
numbers and the square of the sum is 3025 - 385 = 2640.
Find the difference between the sum of the squares of the first one hundred
natural numbers and the square of the sum.
Answer: 25164150
"""
def solve():
sumSq = 0
sqSum = 0
for i in range(1, 101):
sqSum += i
sumSq += i * i
sqSum *= sqSum
diff = sqSum - sumSq
return diff
if __name__ == '__main__':
print(solve())
| {
"content_hash": "cd8049ab52163c24dd2c95f9f5c9d3ef",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 76,
"avg_line_length": 21.727272727272727,
"alnum_prop": 0.6262203626220363,
"repo_name": "yhlam/project-euler",
"id": "327cb5ba0426a6144956df44fdc95ffadc05dcd1",
"size": "717",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project_euler/p006.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45351"
}
],
"symlink_target": ""
} |
"""Track the accumulation of Path/Value pairs found."""
import collections
from citest.base import JsonSnapshotableEntity
# Denotes the seperator used when specifying paths to JSON attributes
# in an object hierarchy.
PATH_SEP = '/'
def build_path(*parts):
return PATH_SEP.join(parts)
class PathValue(collections.namedtuple('PathValue', ['path', 'value']),
JsonSnapshotableEntity):
"""A path, value pair.
Attributes:
path: The slash-delimited string of field names to traverse to the value.
value: The JSON object value at the path leaf.
The object may itself be compound but is all the path specified.
"""
def __str__(self):
return '"{0}"={1!r}'.format(self.path, self.value)
def export_to_json_snapshot(self, snapshot, entity):
"""Implements JsonSnapshotableEntity interface."""
snapshot.edge_builder.make_control(entity, 'Path', self.path)
snapshot.edge_builder.make_data(entity, 'Value', self.value,
format='json')
| {
"content_hash": "0ba01c3f26dfb6f3eb508ee7f4c1f4dc",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 77,
"avg_line_length": 32.935483870967744,
"alnum_prop": 0.683643486777669,
"repo_name": "google/citest",
"id": "ca9f5c05888ff4a1b5b1ad5dad5ebec8af679e0f",
"size": "1619",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "citest/json_predicate/path_value.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "993608"
}
],
"symlink_target": ""
} |
"""
mbed SDK
Copyright (c) 2011-2020 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os
import binascii
import struct
import shutil
import inspect
import sys
from collections import namedtuple
from copy import copy
from future.utils import raise_from
from tools.resources import FileType
from tools.targets.LPC import patch
from tools.paths import TOOLS_BOOTLOADERS
from tools.utils import json_file_to_dict, NotSupportedException
from tools.psa import find_secure_image
__all__ = ["target", "TARGETS", "TARGET_MAP", "TARGET_NAMES", "CORE_LABELS",
"CORE_ARCH", "HookError", "generate_py_target", "Target",
"CUMULATIVE_ATTRIBUTES", "get_resolution_order"]
CORE_LABELS = {
"Cortex-M0": ["M0", "CORTEX_M", "LIKE_CORTEX_M0", "CORTEX"],
"Cortex-M0+": ["M0P", "CORTEX_M", "LIKE_CORTEX_M0", "CORTEX"],
"Cortex-M1": ["M1", "CORTEX_M", "LIKE_CORTEX_M1", "CORTEX"],
"Cortex-M3": ["M3", "CORTEX_M", "LIKE_CORTEX_M3", "CORTEX"],
"Cortex-M4": ["M4", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M4", "CORTEX"],
"Cortex-M4F": ["M4", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M4", "CORTEX"],
"Cortex-M7": ["M7", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M7", "CORTEX"],
"Cortex-M7F": ["M7", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M7", "CORTEX"],
"Cortex-M7FD": ["M7", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M7",
"CORTEX"],
"Cortex-A9": ["A9", "CORTEX_A", "LIKE_CORTEX_A9", "CORTEX"],
"Cortex-M23": ["M23", "CORTEX_M", "LIKE_CORTEX_M23", "CORTEX"],
"Cortex-M23-NS": ["M23", "M23_NS", "CORTEX_M", "LIKE_CORTEX_M23",
"CORTEX"],
"Cortex-M33": ["M33", "CORTEX_M", "LIKE_CORTEX_M33", "CORTEX"],
"Cortex-M33-NS": ["M33", "M33_NS", "CORTEX_M", "LIKE_CORTEX_M33",
"CORTEX"],
"Cortex-M33F": ["M33", "CORTEX_M", "LIKE_CORTEX_M33", "CORTEX"],
"Cortex-M33F-NS": ["M33", "M33_NS", "CORTEX_M", "LIKE_CORTEX_M33",
"CORTEX"],
"Cortex-M33FE": ["M33", "CORTEX_M", "LIKE_CORTEX_M33", "CORTEX"],
"Cortex-M33FE-NS": ["M33", "M33_NS", "CORTEX_M", "LIKE_CORTEX_M33",
"CORTEX"]
}
CORE_ARCH = {
"Cortex-M0": 6,
"Cortex-M0+": 6,
"Cortex-M1": 6,
"Cortex-M3": 7,
"Cortex-M4": 7,
"Cortex-M4F": 7,
"Cortex-M7": 7,
"Cortex-M7F": 7,
"Cortex-M7FD": 7,
"Cortex-A9": 7,
"Cortex-M23": 8,
"Cortex-M23-NS": 8,
"Cortex-M33": 8,
"Cortex-M33F": 8,
"Cortex-M33-NS": 8,
"Cortex-M33F-NS": 8,
"Cortex-M33FE": 8,
"Cortex-M33FE-NS": 8,
}
###############################################################################
# Generic Target class that reads and interprets the data in targets.json
class HookError(Exception):
""" A simple class that represents all the exceptions associated with
hooking
"""
pass
CACHES = {}
def cached(func):
"""A simple decorator used for automatically caching data returned by a
function
"""
def wrapper(*args, **kwargs):
"""The wrapped function itself"""
if (func.__name__, args) not in CACHES:
CACHES[(func.__name__, args)] = func(*args, **kwargs)
return CACHES[(func.__name__, args)]
return wrapper
# Cumulative attributes can have values appended to them, so they
# need to be computed differently than regular attributes
CUMULATIVE_ATTRIBUTES = [
'extra_labels', 'macros', 'device_has', 'features', 'components'
]
default_build_tools_metadata = {u'version': 0, u'public': False}
def get_resolution_order(json_data, target_name, order, level=0):
""" Return the order in which target descriptions are searched for
attributes. This mimics the Python 2.2 method resolution order, which
is what the old targets.py module used. For more details, check
http://makina-corpus.com/blog/metier/2014/python-tutorial-understanding-python-mro-class-search-path
The resolution order contains (name, level) tuples, where "name" is the
name of the class and "level" is the level in the inheritance hierarchy
(the target itself is at level 0, its first parent at level 1, its
parent's parent at level 2 and so on)
"""
# the resolution order can't contain duplicate target names
if target_name not in [l[0] for l in order]:
order.append((target_name, level))
parents = json_data[target_name].get("inherits", [])
for par in parents:
order = get_resolution_order(json_data, par, order, level + 1)
return order
def target(name, json_data):
"""Construct a target object"""
if name.startswith("_"):
raise Exception(
"Invalid target name '%s' specified,"
" target name should not start with '_'" % name
)
try:
resolution_order = get_resolution_order(json_data, name, [])
except KeyError as exc:
raise_from(NotSupportedException(
"target {} has an incomplete target definition".format(name)
), exc)
resolution_order_names = [tgt for tgt, _ in resolution_order]
return Target(
name=name,
json_data={key: value for key, value in json_data.items()
if key in resolution_order_names},
resolution_order=resolution_order,
resolution_order_names=resolution_order_names,
build_tools_metadata=json_data.get(
"__build_tools_metadata__",
default_build_tools_metadata
)
)
def generate_py_target(new_targets, name):
"""Add one or more new target(s) represented as a Python dictionary
in 'new_targets'. It is an error to add a target with a name that
already exists.
"""
base_targets = Target.get_json_target_data()
for new_target in new_targets.keys():
if new_target in base_targets:
raise Exception("Attempt to add target '%s' that already exists"
% new_target)
total_data = {}
total_data.update(new_targets)
total_data.update(base_targets)
return target(name, total_data)
class Target(namedtuple(
"Target",
"name json_data resolution_order "
"resolution_order_names build_tools_metadata"
)):
"""An object to represent a Target (MCU/Board)"""
# Default location of the 'targets.json' file
__targets_json_location_default = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'targets', 'targets.json'
)
# Current/new location of the 'targets.json' file
__targets_json_location = None
# Extra custom targets files
__extra_target_json_files = []
@staticmethod
@cached
def get_json_target_data():
"""Load the description of JSON target data"""
from_file = (Target.__targets_json_location or
Target.__targets_json_location_default)
targets = json_file_to_dict(from_file)
for tgt in targets.values():
tgt["_from_file"] = from_file
for extra_target in Target.__extra_target_json_files:
for k, v in json_file_to_dict(extra_target).items():
if k in targets:
print(
'WARNING: Custom target "%s" cannot replace existing '
'target.' % k
)
else:
targets[k] = v
targets[k]["_from_file"] = extra_target
return targets
@staticmethod
def add_extra_targets(source_dir):
extra_targets_file = os.path.join(source_dir, "custom_targets.json")
if os.path.exists(extra_targets_file):
Target.__extra_target_json_files.append(extra_targets_file)
CACHES.clear()
@staticmethod
def set_targets_json_location(location=None):
"""Set the location of the targets.json file"""
Target.__targets_json_location = (
location or
Target.__targets_json_location_default
)
Target.__extra_target_json_files = []
# Invalidate caches, since the location of the JSON file changed
CACHES.clear()
@staticmethod
@cached
def get_module_data():
"""Get the members of this module using Python's "inspect" module"""
return dict([(m[0], m[1]) for m in
inspect.getmembers(sys.modules[__name__])])
@staticmethod
def __add_paths_to_progen(data):
"""Modify the exporter specification ("progen") by changing all
"template" keys to full paths
"""
out = {}
for key, val in data.items():
if isinstance(val, dict):
out[key] = Target.__add_paths_to_progen(val)
elif key == "template":
out[key] = [
os.path.join(os.path.dirname(__file__), 'export', v)
for v in val
]
else:
out[key] = val
return out
def __getattr_cumulative(self, attrname):
"""Look for the attribute in the class and its parents, as defined by
the resolution order
"""
tdata = self.json_data
# For a cumulative attribute, figure out when it was defined the
# last time (in attribute resolution order) then follow the "_add"
# and "_remove" data fields
for idx, tgt in enumerate(self.resolution_order):
# the attribute was defined at this level in the resolution
# order
if attrname in tdata[tgt[0]]:
def_idx = idx
break
else:
return []
# Get the starting value of the attribute
starting_value = (tdata[self.resolution_order[def_idx][0]][attrname]
or [])[:]
# Traverse the resolution list in high inheritance to low
# inheritance level, left to right order to figure out all the
# other classes that change the definition by adding or removing
# elements
for idx in range(self.resolution_order[def_idx][1] - 1, -1, -1):
same_level_targets = [tar[0] for tar in self.resolution_order
if tar[1] == idx]
for tar in same_level_targets:
data = tdata[tar]
# Do we have anything to add ?
if (attrname + "_add") in data:
starting_value.extend(data[attrname + "_add"])
# Do we have anything to remove ?
if (attrname + "_remove") in data:
# Macros can be defined either without a value (MACRO)
# or with a value (MACRO=10). When removing, we specify
# only the name of the macro, without the value. So we
# need to create a mapping between the macro name and
# its value. This will work for extra_labels and other
# type of arrays as well, since they fall into the
# "macros without a value" category (simple definitions
# without a value).
name_def_map = {}
for crtv in starting_value:
if crtv.find('=') != -1:
temp = crtv.split('=')
if len(temp) != 2:
raise ValueError(
"Invalid macro definition '%s'" % crtv)
name_def_map[temp[0]] = crtv
else:
name_def_map[crtv] = crtv
for element in data[attrname + "_remove"]:
if element not in name_def_map:
raise ValueError(
("Unable to remove '%s' in '%s.%s' since "
% (element, self.name, attrname)) +
"it doesn't exist")
starting_value.remove(name_def_map[element])
return starting_value
def __getattr_helper(self, attrname):
"""Compute the value of a given target attribute"""
if attrname in CUMULATIVE_ATTRIBUTES:
return self.__getattr_cumulative(attrname)
else:
tdata = self.json_data
for tgt in self.resolution_order:
data = tdata[tgt[0]]
try:
return data[attrname]
except KeyError:
pass
else: # Attribute not found
raise AttributeError(
"Attribute '%s' not found in target '%s'"
% (attrname, self.name))
def __getattr__(self, attrname):
""" Return the value of an attribute. This function only computes the
attribute's value once, then adds it to the instance attributes (in
__dict__), so the next time it is returned directly
"""
result = self.__getattr_helper(attrname)
self.__dict__[attrname] = result
return result
@staticmethod
@cached
def get_target(target_name):
""" Return the target instance starting from the target name """
return target(target_name, Target.get_json_target_data())
@property
def program_cycle_s(self):
"""Special override for program_cycle_s as it's default value depends
upon is_disk_virtual
"""
try:
return self.__getattr__("program_cycle_s")
except AttributeError:
return 4 if self.is_disk_virtual else 1.5
@property
def labels(self):
"""Get all possible labels for this target"""
names = copy(self.resolution_order_names)
if "Target" in names:
names.remove("Target")
labels = (names + CORE_LABELS[self.core] + self.extra_labels)
return labels
@property
def core_without_NS(self):
if self.core.endswith('-NS'):
return self.core[:-3]
else:
return self.core
@property
def is_TrustZone_non_secure_target(self):
return self.core.endswith('-NS')
@property
def is_TrustZone_target(self):
return self.is_TrustZone_non_secure_target
@property
def is_PSA_non_secure_target(self):
return 'NSPE_Target' in self.labels
@property
def is_TFM_target(self):
return getattr(self, 'tfm_target_name', False)
def get_post_build_hook(self, toolchain_labels):
"""Initialize the post-build hooks for a toolchain. For now, this
function only allows "post binary" hooks (hooks that are executed
after the binary image is extracted from the executable file)
Positional Arguments:
hook - the hook object to add post-binary-hooks to
toolchain - the toolchain object for inspection
Return Value:
A callable if any post-build hook is applicable or None
"""
try:
hook_data = self.post_binary_hook
except AttributeError:
return None
# If hook is null, also return
if hook_data is None:
return None
# A hook was found. The hook's name is in the format
# "classname.functionname"
temp = hook_data["function"].split(".")
if len(temp) != 2:
raise HookError(
("Invalid format for hook '%s' in target '%s'"
% (hook_data["function"], self.name)) +
" (must be 'class_name.function_name')")
class_name, function_name = temp
# "class_name" must refer to a class in this file, so check if the
# class exists
mdata = self.get_module_data()
if not inspect.isclass(mdata.get(class_name, None)):
raise HookError(
("Class '%s' required by '%s' in target '%s'"
% (class_name, hook_data["function"], self.name)) +
" not found in targets.py")
# "function_name" must refer to a static function inside class
# "class_name"
cls = mdata[class_name]
if not inspect.isfunction(getattr(cls, function_name, None)):
raise HookError(
("Static function '%s' " % function_name) +
("required by '%s' " % hook_data["function"]) +
("in target '%s' " % self.name) +
("not found in class '%s'" % class_name))
# Check if the hook specification also has toolchain restrictions
toolchain_restrictions = set(hook_data.get("toolchains", []))
if toolchain_restrictions and \
not set(toolchain_labels).intersection(toolchain_restrictions):
return None
return getattr(cls, function_name)
###############################################################################
# Target specific code goes in this section
# This code can be invoked from the target description using the
# "post_binary_hook" key
class LPCTargetCode(object):
"""General LPC Target patching code"""
@staticmethod
def lpc_patch(t_self, resources, elf, binf):
"""Patch an elf file"""
t_self.notify.debug("LPC Patch: %s" % os.path.split(binf)[1])
patch(binf)
class LPC4088Code(object):
"""Code specific to the LPC4088"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
"""Hook to be run after an elf file is built"""
if not os.path.isdir(binf):
# Regular binary file, nothing to do
LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
return
outbin = open(binf + ".temp", "wb")
partf = open(os.path.join(binf, "ER_IROM1"), "rb")
# Pad the fist part (internal flash) with 0xFF to 512k
data = partf.read()
outbin.write(data)
outbin.write(b'\xFF' * (512*1024 - len(data)))
partf.close()
# Read and append the second part (external flash) in chunks of fixed
# size
chunksize = 128 * 1024
partf = open(os.path.join(binf, "ER_IROM2"), "rb")
while True:
data = partf.read(chunksize)
outbin.write(data)
if len(data) < chunksize:
break
partf.close()
outbin.close()
# Remove the directory with the binary parts and rename the temporary
# file to 'binf'
shutil.rmtree(binf, True)
os.rename(binf + '.temp', binf)
t_self.notify.debug(
"Generated custom binary file (internal flash + SPIFI)"
)
LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
class TEENSY3_1Code(object):
"""Hooks for the TEENSY3.1"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
"""Hook that is run after elf is generated"""
# This function is referenced by old versions of targets.json and
# should be kept for backwards compatibility.
pass
class MTSCode(object):
"""Generic MTS code"""
@staticmethod
def _combine_bins_helper(target_name, binf):
"""combine bins with the bootloader for a particular target"""
loader = os.path.join(TOOLS_BOOTLOADERS, target_name, "bootloader.bin")
target = binf + ".tmp"
if not os.path.exists(loader):
print("Can't find bootloader binary: " + loader)
return
outbin = open(target, 'w+b')
part = open(loader, 'rb')
data = part.read()
outbin.write(data)
outbin.write(b'\xFF' * (64*1024 - len(data)))
part.close()
part = open(binf, 'rb')
data = part.read()
outbin.write(data)
part.close()
outbin.seek(0, 0)
data = outbin.read()
outbin.seek(0, 1)
crc = struct.pack('<I', binascii.crc32(data) & 0xFFFFFFFF)
outbin.write(crc)
outbin.close()
os.remove(binf)
os.rename(target, binf)
@staticmethod
def combine_bins_mts_dragonfly(t_self, resources, elf, binf):
"""A hoof for the MTS Dragonfly"""
MTSCode._combine_bins_helper("MTS_DRAGONFLY_F411RE", binf)
class MCU_NRF51Code(object):
"""NRF51 Hooks"""
@staticmethod
def binary_hook(t_self, resources, _, binf):
"""Hook that merges the soft device with the bin file"""
# Scan to find the actual paths of soft device
sdf = None
sd_with_offsets = t_self.target.EXPECTED_SOFTDEVICES_WITH_OFFSETS
for softdevice_and_offset_entry in sd_with_offsets:
for hexf in resources.get_file_paths(FileType.HEX):
if hexf.find(softdevice_and_offset_entry['name']) != -1:
t_self.notify.debug("SoftDevice file found %s."
% softdevice_and_offset_entry['name'])
sdf = hexf
if sdf is not None:
break
if sdf is not None:
break
if sdf is None:
t_self.notify.debug("Hex file not found. Aborting.")
return
# Look for bootloader file that matches this soft device or bootloader
# override image
blf = None
if t_self.target.MERGE_BOOTLOADER is True:
for hexf in resources.get_file_paths(FileType.HEX):
if hexf.find(t_self.target.OVERRIDE_BOOTLOADER_FILENAME) != -1:
t_self.notify.debug(
"Bootloader file found %s."
% t_self.target.OVERRIDE_BOOTLOADER_FILENAME
)
blf = hexf
break
elif hexf.find(softdevice_and_offset_entry['boot']) != -1:
t_self.notify.debug("Bootloader file found %s."
% softdevice_and_offset_entry['boot'])
blf = hexf
break
# Merge user code with softdevice
from intelhex import IntelHex
binh = IntelHex()
_, ext = os.path.splitext(binf)
if ext == ".hex":
binh.loadhex(binf)
elif ext == ".bin":
binh.loadbin(binf, softdevice_and_offset_entry['offset'])
if t_self.target.MERGE_SOFT_DEVICE is True:
t_self.notify.debug("Merge SoftDevice file %s"
% softdevice_and_offset_entry['name'])
sdh = IntelHex(sdf)
sdh.start_addr = None
binh.merge(sdh)
if t_self.target.MERGE_BOOTLOADER is True and blf is not None:
t_self.notify.debug("Merge BootLoader file %s" % blf)
blh = IntelHex(blf)
blh.start_addr = None
binh.merge(blh)
with open(binf.replace(".bin", ".hex"), "w") as fileout:
binh.write_hex_file(fileout, write_start_addr=False)
class NCS36510TargetCode(object):
@staticmethod
def ncs36510_addfib(t_self, resources, elf, binf):
from tools.targets.NCS import add_fib_at_start
print("binf ", binf)
add_fib_at_start(binf[:-4])
class RTL8195ACode(object):
"""RTL8195A Hooks"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
from tools.targets.REALTEK_RTL8195AM import rtl8195a_elf2bin
rtl8195a_elf2bin(t_self, elf, binf)
class PSOC6Code(object):
@staticmethod
def complete(t_self, resources, elf, binf):
from tools.targets.PSOC6 import complete as psoc6_complete
if hasattr(t_self.target, "hex_filename"):
hex_filename = t_self.target.hex_filename
# Completing main image involves merging M0 image.
from tools.targets.PSOC6 import find_cm0_image
m0hexf = find_cm0_image(t_self, resources, elf, binf, hex_filename)
psoc6_complete(t_self, elf, binf, m0hexf)
else:
psoc6_complete(t_self, elf, binf)
@staticmethod
def sign_image(t_self, resources, elf, binf):
"""
Calls sign_image function to add signature to Secure Boot binary file.
"""
version = sys.version_info
# check python version before calling post build as is supports only python3+
if((version[0] < 3) is True):
t_self.notify.info("[PSOC6.sing_image] Be careful - produced HEX file was not signed and thus "
"is not compatible with Cypress Secure Boot target. "
"You are using Python " + str(sys.version[:5]) +
" which is not supported by CySecureTools. "
"Consider installing Python 3.4+ and rebuild target. "
"For more information refver to User Guide https://www.cypress.com/secureboot-sdk-user-guide")
else:
from tools.targets.PSOC6 import sign_image as psoc6_sign_image
psoc6_sign_image(t_self, binf)
class ArmMuscaA1Code(object):
"""Musca-A1 Hooks"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
from tools.targets.ARM_MUSCA_A1 import musca_tfm_bin
configured_secure_image_filename = t_self.target.secure_image_filename
secure_bin = find_secure_image(
t_self.notify,
resources,
binf,
configured_secure_image_filename,
FileType.BIN
)
musca_tfm_bin(t_self, binf, secure_bin)
class ArmMuscaB1Code(object):
"""Musca-B1 Hooks"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
from tools.targets.ARM_MUSCA_B1 import musca_tfm_bin
configured_secure_image_filename = t_self.target.secure_image_filename
secure_bin = find_secure_image(
t_self.notify,
resources,
binf,
configured_secure_image_filename,
FileType.BIN
)
musca_tfm_bin(t_self, binf, secure_bin)
class LPC55S69Code(object):
"""LPC55S69 Hooks"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
from tools.targets.LPC55S69 import lpc55s69_complete
configured_secure_image_filename = t_self.target.secure_image_filename
secure_bin = find_secure_image(
t_self.notify,
resources,
binf,
configured_secure_image_filename,
FileType.BIN
)
lpc55s69_complete(t_self, binf, secure_bin)
class M2351Code(object):
"""M2351 Hooks"""
@staticmethod
def merge_secure(t_self, resources, ns_elf, ns_hex):
t_self.notify.info("Merging non-secure image with secure image")
configured_secure_image_filename = t_self.target.secure_image_filename
t_self.notify.info("Non-secure elf image %s" % ns_elf)
t_self.notify.info("Non-secure hex image %s" % ns_hex)
t_self.notify.info("Finding secure image %s" % configured_secure_image_filename)
s_hex = find_secure_image(
t_self.notify,
resources,
ns_hex,
configured_secure_image_filename,
FileType.HEX
)
t_self.notify.info("Found secure image %s" % s_hex)
_, ext = os.path.splitext(s_hex)
if ext != ".hex":
t_self.notify.debug("Secure image %s must be in Intel HEX format" % s_hex)
return
if not os.path.isfile(s_hex):
t_self.notify.debug("Secure image %s must be regular file" % s_hex)
return
ns_main, ext = os.path.splitext(ns_hex)
if ext != ".hex":
t_self.notify.debug("Non-secure image %s must be in Intel HEX format" % s_hex)
return
if not os.path.isfile(ns_hex):
t_self.notify.debug("Non-secure image %s must be regular file" % s_hex)
return
# Keep original non-secure before merge with secure
ns_nosecure_hex = ns_main + "_no-secure-merge" + ext
t_self.notify.info("Keep no-secure-merge image %s" % ns_nosecure_hex)
shutil.copy2(ns_hex, ns_nosecure_hex)
# Merge secure and non-secure and save to non-secure (override it)
from intelhex import IntelHex
s_ih = IntelHex()
s_ih.loadhex(s_hex)
ns_ih = IntelHex()
ns_ih.loadhex(ns_hex)
ns_ih.start_addr = None
s_ih.merge(ns_ih)
s_ih.tofile(ns_hex, 'hex')
# End Target specific section
###############################################################################
def update_target_data():
"""Instantiate all public targets"""
TARGETS[:] = [Target.get_target(tgt) for tgt, obj
in Target.get_json_target_data().items()
if obj.get("public", True)]
# Map each target name to its unique instance
TARGET_MAP.clear()
TARGET_MAP.update(dict([(tgt.name, tgt) for tgt in TARGETS]))
TARGET_NAMES[:] = TARGET_MAP.keys()
TARGETS = []
TARGET_MAP = dict()
TARGET_NAMES = []
update_target_data()
# Some targets with different name have the same exporters
EXPORT_MAP = {}
# Detection APIs
def get_target_detect_codes():
""" Returns dictionary mapping detect_code -> platform_name
"""
result = {}
for tgt in TARGETS:
for detect_code in tgt.detect_code:
result[detect_code] = tgt.name
return result
def set_targets_json_location(location=None):
"""Sets the location of the JSON file that contains the targets"""
# First instruct Target about the new location
Target.set_targets_json_location(location)
# Then re-initialize TARGETS, TARGET_MAP and TARGET_NAMES. The
# re-initialization does not create new variables, it keeps the old ones
# instead. This ensures compatibility with code that does
# "from tools.targets import TARGET_NAMES"
update_target_data()
| {
"content_hash": "e384f1c5d26607ba33c75f886e468f99",
"timestamp": "",
"source": "github",
"line_count": 808,
"max_line_length": 125,
"avg_line_length": 37.54579207920792,
"alnum_prop": 0.5743811187658635,
"repo_name": "kjbracey-arm/mbed",
"id": "2ef5d6d40b0bdd1013d99e32294340df7d754098",
"size": "30337",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/targets/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4905917"
},
{
"name": "C",
"bytes": "121674109"
},
{
"name": "C++",
"bytes": "7228843"
},
{
"name": "CMake",
"bytes": "4724"
},
{
"name": "HTML",
"bytes": "1107049"
},
{
"name": "Makefile",
"bytes": "4212"
},
{
"name": "Objective-C",
"bytes": "61382"
},
{
"name": "Python",
"bytes": "1766"
}
],
"symlink_target": ""
} |
"""Test APIFlask class for input parameters causing TypeErrors.
"""
import apikit
import pytest
def test_lsstflask_type_errors():
"""Test APIFlask for input parameters causing TypeErrors.
"""
# No arguments at all.
# Obviously the linter is correct here...
with pytest.raises(TypeError):
# pylint: disable=no-value-for-parameter
apikit.APIFlask()
# Name is not a string
with pytest.raises(TypeError):
apikit.APIFlask(("Beer", "me"), "2.0", "http://example.repo",
"BobApp")
# Version is not a string
with pytest.raises(TypeError):
apikit.APIFlask("bob", 2.0, "http://example.repo", "BobApp")
# Repository is not a string
with pytest.raises(TypeError):
apikit.APIFlask("bob", 2.0, ["repo", "man"], "BobApp")
# Description is not a string
with pytest.raises(TypeError):
apikit.APIFlask("bob", 2.0, "", "http://example.repo",
{"totally": "bogus"})
# Auth is not None, the empty string or "none", or a dict
with pytest.raises(TypeError):
apikit.APIFlask("bob", "2.0", "http://example.repo",
"BobApp", auth=5)
# Auth is not None, the empty string or "none", or a dict
with pytest.raises(TypeError):
apikit.APIFlask("bob", "2.0", "http://example.repo", "BobApp",
auth="bob")
# Api_version is not a string
with pytest.raises(TypeError):
apikit.APIFlask("bob", "2.0", "http://example.repo", "BobApp",
api_version=5, auth="")
# Route is not None, a string, or a list of strings
with pytest.raises(TypeError):
apikit.APIFlask("bob", "2.0", "http://example.repo", "BobApp",
route=2)
# Route is a list that contains a non-string
with pytest.raises(TypeError):
apikit.APIFlask("bob", "2.0", "http://example.repo", "BobApp",
route=[2])
| {
"content_hash": "eafe820b7afbdffa78f929d2ac367811",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 70,
"avg_line_length": 41.0625,
"alnum_prop": 0.5809233891425672,
"repo_name": "lsst-sqre/sqre-apikit",
"id": "135af3694ea8668f9654418c874906c79dc37b6a",
"size": "1993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_lsstflask_type_errors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "36478"
}
],
"symlink_target": ""
} |
import argparse
import numpy as np
import matplotlib.pyplot as plt
from nitime.viz import winspect
argParser = argparse.ArgumentParser()
argParser.add_argument('datafile')
args = argParser.parse_args()
time,signal = [],[]
fin = open(args.datafile, 'r')
for line in fin:
words = line.split(',')
time.append(float(words[1]))
signal.append(float(words[0]))
fin.close()
time,signal = np.array(time),np.array(signal)
fig01 = plt.figure()
winspect(signal, fig01)
plt.show() | {
"content_hash": "520d42c342d5946e5b042301db655fba",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 45,
"avg_line_length": 22.952380952380953,
"alnum_prop": 0.7178423236514523,
"repo_name": "NeuralProsthesisLab/unlock",
"id": "2a2c286b0045c390255af00e875027381482572b",
"size": "2075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unlock/analysis/__experiment_code/multitaper.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1386"
},
{
"name": "C++",
"bytes": "994297"
},
{
"name": "CSS",
"bytes": "8977"
},
{
"name": "Go",
"bytes": "62639"
},
{
"name": "HTML",
"bytes": "33643"
},
{
"name": "JavaScript",
"bytes": "711666"
},
{
"name": "Makefile",
"bytes": "402"
},
{
"name": "Matlab",
"bytes": "81353"
},
{
"name": "Python",
"bytes": "493447"
},
{
"name": "Shell",
"bytes": "3842"
},
{
"name": "TeX",
"bytes": "29718"
}
],
"symlink_target": ""
} |
"""Support for watching multiple cryptocurrencies."""
# pylint: disable=import-error
from __future__ import annotations
from datetime import timedelta
from pysochain import ChainSo
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import ATTR_ATTRIBUTION, CONF_ADDRESS, CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
ATTRIBUTION = "Data provided by chain.so"
CONF_NETWORK = "network"
DEFAULT_NAME = "Crypto Balance"
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Required(CONF_NETWORK): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the sochain sensors."""
address = config[CONF_ADDRESS]
network = config[CONF_NETWORK]
name = config[CONF_NAME]
session = async_get_clientsession(hass)
chainso = ChainSo(network, address, hass.loop, session)
async_add_entities([SochainSensor(name, network.upper(), chainso)], True)
class SochainSensor(SensorEntity):
"""Representation of a Sochain sensor."""
def __init__(self, name, unit_of_measurement, chainso):
"""Initialize the sensor."""
self._name = name
self._unit_of_measurement = unit_of_measurement
self.chainso = chainso
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_value(self):
"""Return the state of the sensor."""
return (
self.chainso.data.get("confirmed_balance")
if self.chainso is not None
else None
)
@property
def native_unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement
@property
def extra_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
async def async_update(self):
"""Get the latest state of the sensor."""
await self.chainso.async_get_data()
| {
"content_hash": "2b042d87d2bf5069ac73ec6e56f495c1",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 77,
"avg_line_length": 29.943181818181817,
"alnum_prop": 0.6944971537001897,
"repo_name": "toddeye/home-assistant",
"id": "157d94b87062c7c21fc75b52c451ecf330085566",
"size": "2635",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sochain/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3005"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "47414832"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer import configuration
from chainer import function_node
from chainer.utils import type_check
if cuda.cudnn_enabled:
cudnn = cuda.cudnn
libcudnn = cuda.cuda.cudnn
class GroupNormalization(function_node.FunctionNode):
def __init__(self, groups, eps=1e-5):
if not isinstance(groups, int):
raise TypeError('Argument: \'groups\' type must be (int).')
self.groups = groups
self.eps = eps
self.mean = None
self.inv_std = None
self.dummy_gamma = None
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 3)
x_type, gamma_type, beta_type = in_types
type_check.expect(
x_type.dtype.kind == 'f',
x_type.ndim >= 2,
gamma_type.ndim == 1,
beta_type.ndim == 1,
gamma_type.dtype == x_type.dtype,
beta_type.dtype == x_type.dtype,
x_type.shape[1] == gamma_type.shape[0],
gamma_type.shape == beta_type.shape,
)
def forward(self, inputs):
if inputs[0].shape[1] % self.groups != 0:
raise ValueError('The number of channels {} is not divisible by '
'\'groups\' argument {}.'
.format(inputs[0].shape[1], self.groups))
xp = backend.get_array_module(*inputs)
if xp is cuda.cupy and chainer.should_use_cudnn('>=auto', 5000):
return self.forward_cudnn(inputs)
self.retain_inputs((0, 1))
x, gamma, beta = inputs
orig_shape = x.shape
batch_size, channels = orig_shape[:2]
groups = self.groups
reduced_shape = (batch_size * groups, -1)
x = x.reshape(reduced_shape)
self.mean = x.mean(axis=1)
x_hat = x - self.mean[:, None]
var = (x_hat * x_hat).mean(axis=1)
var += self.eps
self.inv_std = var
del var
xp.sqrt(self.inv_std, out=self.inv_std, dtype=x.dtype)
xp.reciprocal(self.inv_std, out=self.inv_std)
x_hat *= self.inv_std[:, None]
y = x_hat.reshape((batch_size, channels, -1))
y *= gamma[:, None]
y += beta[:, None]
y = y.reshape(orig_shape)
return y,
def forward_cudnn(self, inputs):
if self.eps < libcudnn.CUDNN_BN_MIN_EPSILON:
raise RuntimeError(
'cuDNN does not allow an eps value '
'less than {}.'.format(libcudnn.CUDNN_BN_MIN_EPSILON))
self.retain_inputs((0, 1))
x, gamma, beta = inputs
xp = cuda.cupy
orig_shape = x.shape
batch_size, channels = orig_shape[:2]
groups = self.groups
cudnn_shape = (1, batch_size * groups, -1, 1)
x = x.reshape(cudnn_shape)
with x.device:
dummy_beta = xp.zeros(batch_size * groups, dtype=x.dtype)
self.dummy_gamma = xp.ones_like(dummy_beta)
x_hat, self.mean, self.inv_std = \
cudnn.batch_normalization_forward_training(
x, self.dummy_gamma, dummy_beta, dummy_beta, dummy_beta, None,
None, self.eps, 1.0, True, libcudnn.CUDNN_BATCHNORM_SPATIAL,
configuration.config.debug)
y = x_hat.reshape((batch_size, channels, -1))
cuda.elementwise(
'T gamma, T beta', 'T y',
'y = y * gamma + beta',
'groupnorm_y')(gamma[:, None], beta[:, None], y)
y = y.reshape(orig_shape)
return y,
def backward(self, indexes, grad_outputs):
x, gamma = self.get_retained_inputs()
gy, = grad_outputs
orig_shape = x.shape
batch_size = orig_shape[0]
groups = self.groups
reduced_shape = (batch_size * groups, -1)
x = x.reshape(reduced_shape)
x_hat, = _XHat(
self.eps, self.mean, self.inv_std,
self.dummy_gamma).apply((x,))
gx_hat, ggamma, gbeta = _ScaleShiftGrad().apply((x_hat, gamma, gy))
gx, = _XHatGrad(
self.eps, self.mean, self.inv_std,
self.dummy_gamma, x_hat.array).apply((x, gx_hat))
gx = gx.reshape(orig_shape)
return gx, ggamma, gbeta
class _ScaleShiftGrad(function_node.FunctionNode):
def forward(self, inputs):
self.retain_inputs((0, 1, 2))
x_hat, gamma, gy = inputs
batch_size, channels = gy.shape[:2]
gy = gy.reshape((batch_size, channels, -1))
reduced_shape = x_hat.shape
x_hat = x_hat.reshape((batch_size, channels, -1))
gx_hat = gy * gamma[:, None]
gbeta = gy.sum(axis=(0, 2))
if backend.get_array_module(x_hat) is cuda.cupy:
ggamma = cuda.reduce(
'T gy, T x_hat', 'T ggamma',
'gy * x_hat', 'a + b', 'ggamma = a', '0',
'groupnorm_ggamma')(gy, x_hat, axis=(0, 2))
else:
ggamma = (gy * x_hat).sum(axis=(0, 2))
gx_hat = gx_hat.reshape(reduced_shape)
return gx_hat, ggamma, gbeta
def backward(self, indexes, grad_outputs):
x_hat, gamma, gy = self.get_retained_inputs()
ggx_hat, gggamma, ggbeta = grad_outputs
orig_shape = gy.shape
batch_size, channels = gy.shape[:2]
gy = gy.reshape((batch_size, channels, -1))
reduced_shape = x_hat.shape
x_hat = x_hat.reshape((batch_size, channels, -1))
ggx_hat = ggx_hat.reshape((batch_size, channels, -1))
gx_hat2 = gggamma[:, None] * gy
ggamma2 = chainer.functions.sum(ggx_hat * gy, axis=(0, 2))
ggy = (ggx_hat * gamma[:, None] + gggamma[:, None] * x_hat +
ggbeta[:, None])
gx_hat2 = gx_hat2.reshape(reduced_shape)
ggy = ggy.reshape(orig_shape)
return gx_hat2, ggamma2, ggy
class _XHat(function_node.FunctionNode):
def __init__(self, eps, mean, inv_std, dummy_gamma):
self.eps = eps
self.mean = mean
self.inv_std = inv_std
self.dummy_gamma = dummy_gamma
def forward_cpu(self, inputs):
self.retain_inputs((0,))
x, = inputs
x_hat = x - self.mean[:, None]
x_hat *= self.inv_std[:, None]
self.retain_outputs((0,))
return x_hat,
def forward_gpu(self, inputs):
self.retain_inputs((0,))
x, = inputs
x_hat = cuda.elementwise(
'T x, T mean, T inv_std', 'T x_hat',
'x_hat = (x - mean) * inv_std',
'groupnorm_x_hat')(x, self.mean[:, None], self.inv_std[:, None])
self.retain_outputs((0,))
return x_hat,
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
x_hat, = self.get_retained_outputs()
gx_hat, = grad_outputs
return _XHatGrad(
self.eps, self.mean, self.inv_std,
self.dummy_gamma, x_hat.array).apply((x, gx_hat))
class _XHatGrad(function_node.FunctionNode):
def __init__(self, eps, mean, inv_std, dummy_gamma, x_hat):
self.eps = eps
self.mean = mean
self.inv_std = inv_std
self.dummy_gamma = dummy_gamma
self.x_hat = x_hat
def forward(self, inputs):
xp = backend.get_array_module(*inputs)
if xp is cuda.cupy and chainer.should_use_cudnn('>=auto', 5000) and \
self.dummy_gamma is not None:
return self.forward_cudnn(inputs)
self.retain_inputs((0, 1))
_, gx_hat = inputs
x_hat = self.x_hat
self.x_hat = None
gx_hat_avg = gx_hat.mean(axis=1, keepdims=True)
gx_hat_x_hat_avg = (gx_hat * x_hat).mean(axis=1, keepdims=True)
gx_std = gx_hat - gx_hat_avg - x_hat * gx_hat_x_hat_avg
gx = self.inv_std[:, None] * gx_std
self.retain_outputs((0,))
return gx,
def forward_cudnn(self, inputs):
if self.eps < libcudnn.CUDNN_BN_MIN_EPSILON:
raise RuntimeError(
'cuDNN does not allow an eps value '
'less than {}.'.format(libcudnn.CUDNN_BN_MIN_EPSILON))
self.retain_inputs((0, 1))
x, gx_hat = inputs
self.x_hat = None
# `x[None, :, :, None]` is slower because it results in a different
# strides and cuDNN doesn't recognize it as a contiguous array.
reduced_shape = x.shape
cudnn_shape = (1,) + reduced_shape + (1,)
x = x.reshape(cudnn_shape)
gx_hat = gx_hat.reshape(cudnn_shape)
gx, _, _ = cudnn.batch_normalization_backward(
x, self.dummy_gamma, gx_hat,
self.mean, self.inv_std, self.eps,
True, libcudnn.CUDNN_BATCHNORM_SPATIAL,
configuration.config.debug)
gx = gx.reshape(reduced_shape)
self.retain_outputs((0,))
return gx,
def backward(self, indexes, grad_outputs):
F = chainer.functions
x, gx_hat = self.get_retained_inputs()
gx, = self.get_retained_outputs()
ggx, = grad_outputs
x_hat, = _XHat(
self.eps, self.mean, self.inv_std,
self.dummy_gamma).apply((x,))
ret = []
if 0 in indexes:
# -- sketch of gx2, which is grad of x through gx
# gx = inv_std * gx_std
# dgx = dinv_std * gx_std + inv_std * dgx_std
#
# -gx2l = (ggx * dinv_std * gx_std) / dx
# = sum(ggx * gx_std) * (dinv_std / dx)
# = -sum(ggx * gx_std) * inv_std^2 * x_hat / N
# = -inv_std * x_hat * mean(ggx * gx)
#
# By `gx_std = gx_hat - gx_hat_avg - x_hat * gx_hat_x_hat_avg`,
# -gx_hat2r = (ggx * inv_std * dgx_std) / dx_hat
# = -inv_std * (ggx * mean(gx_hat * x_hat) +
# gx_hat * mean(ggx * x_hat))
gx2l_std = x_hat * F.mean(ggx * gx, axis=1, keepdims=True)
gx2l, = _MulInvStd(
self.eps, self.mean, self.inv_std,
self.dummy_gamma).apply((x, gx2l_std))
gx_hat2r_std = (
ggx * F.mean(gx_hat * x_hat, axis=1, keepdims=True) +
gx_hat * F.mean(ggx * x_hat, axis=1, keepdims=True))
gx_hat2r, = _MulInvStd(
self.eps, self.mean, self.inv_std,
self.dummy_gamma).apply((x, gx_hat2r_std))
gx2r, = _XHatGrad(
self.eps, self.mean, self.inv_std,
self.dummy_gamma, x_hat.array).apply((x, gx_hat2r))
gx2 = -(gx2l + gx2r)
ret.append(gx2)
if 1 in indexes:
ggx_hat, = _XHatGrad(
self.eps, self.mean, self.inv_std,
self.dummy_gamma, x_hat.array).apply((x, ggx))
ret.append(ggx_hat)
return ret
class _MulInvStd(function_node.FunctionNode):
def __init__(self, eps, mean, inv_std, dummy_gamma):
self.eps = eps
self.mean = mean
self.inv_std = inv_std
self.dummy_gamma = dummy_gamma
def forward(self, inputs):
self.retain_inputs((0,))
_, y = inputs
z = self.inv_std[:, None] * y
self.retain_outputs((0,))
return z,
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
z, = self.get_retained_outputs()
gz, = grad_outputs
x_hat, = _XHat(
self.eps, self.mean, self.inv_std,
self.dummy_gamma).apply((x,))
gx_std = x_hat * chainer.functions.mean(gz * z, axis=1, keepdims=True)
gx, = _MulInvStd(
self.eps, self.mean, self.inv_std,
self.dummy_gamma).apply((x, gx_std))
gy, = _MulInvStd(
self.eps, self.mean, self.inv_std,
self.dummy_gamma).apply((x, gz))
return -gx, gy
def group_normalization(x, groups, gamma, beta, eps=1e-5):
"""Group normalization function.
This function implements a "group normalization"
which divides the channels into groups and computes within each group
the mean and variance, then normalize by these statistics,
scales and shifts them.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Batch tensors.
First dimension of this value must be the size of minibatch and
second dimension must be the number of channels.
Moreover, this value must have one or more following dimensions,
such as height and width.
groups (int):
The number of channel groups.
This value must be a divisor of the number of channels.
gamma (:class:`~chainer.Variable` or :ref:`ndarray`):
Scaling parameter.
beta (:class:`~chainer.Variable` or :ref:`ndarray`):
Shifting parameter.
eps (float): Epsilon value for numerical stability of normalization.
Returns:
~chainer.Variable: The output variable which has the same shape
as :math:`x`.
See: `Group Normalization <https://arxiv.org/abs/1803.08494>`_
"""
return GroupNormalization(groups, eps).apply((x, gamma, beta))[0]
| {
"content_hash": "f1260c6b24e2c8d2dc9d8f2bffe3b91c",
"timestamp": "",
"source": "github",
"line_count": 386,
"max_line_length": 78,
"avg_line_length": 34.12176165803109,
"alnum_prop": 0.5418722951939868,
"repo_name": "tkerola/chainer",
"id": "82a57b9aad4bdd9e52914e4f42a9b8b5049a6d25",
"size": "13171",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "chainer/functions/normalization/group_normalization.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "3471733"
}
],
"symlink_target": ""
} |
import sublime # type: ignore
from os import path
from unittest import TestCase
from typing import List, Tuple, Optional
try:
from lib import entities
except ImportError:
# If we're running these tests in UnitTesting, then we need to use
# The package name - Tab Filter - so let's grab import lib and try again.
from importlib import import_module
entities = import_module(".lib.entities", "Tab Filter")
Tab = entities.Tab
class TabTestCase(TestCase):
"""Tests the tab entity works as expected."""
def setUp(self) -> None:
# Close any existing views so as to avoid polluting the results.
for view in sublime.active_window().views():
view.window().focus_view(view)
view.window().run_command("close_file")
def tearDown(self) -> None:
for view in sublime.active_window().views():
view.window().focus_view(view)
view.set_scratch(True)
view.window().run_command("close_file")
def test_initialisation(self) -> None:
"""Test initialising a Tab."""
dir: str = path.dirname(__file__)
fixture: str = path.normpath(
path.join(dir, "./fixtures/foo.txt")
)
scratch_view: sublime.View = sublime.active_window().new_file()
file_view: sublime.View = sublime.active_window().open_file(fixture)
dataset: Tuple[Tuple[sublime.View, str, bool, Optional[str]], ...] = (
(scratch_view, "untitled", False, ""),
(file_view, path.basename(fixture), True, path.dirname(fixture))
)
for (view, name, is_file, pathname) in dataset:
with self.subTest(
view=view,
name=name,
is_file=is_file,
pathname=pathname
):
entity: Tab = Tab(view)
self.assertEquals(name, entity.get_title())
self.assertEquals(bool(is_file), entity.is_file_view())
self.assertEquals(pathname, entity.get_path())
def test_get_title(self) -> None:
"""Tests getting the title of the Tab."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertEquals("untitled", entity.get_title())
def test_get_subtitle(self) -> None:
"""Tests getting the subtitle of the Tab."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertEquals("untitled", entity.get_subtitle())
def test_set_title(self) -> None:
"""Tests setting the title of the Tab."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertEquals("untitled", entity.get_title())
entity.set_title("foo")
self.assertEquals("foo", entity.get_title())
def test_set_subtitle(self) -> None:
"""Tests setting the subtitle of the Tab."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertEquals("untitled", entity.get_subtitle())
entity.set_subtitle("foo")
self.assertEquals("foo", entity.get_subtitle())
def test_is_file_view(self) -> None:
"""Tests checking whether the Tab's view is a file or not."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertEquals(False, entity.is_file_view())
dir: str = path.dirname(__file__)
fixture: str = path.normpath(
path.join(dir, "./fixtures/foo.txt")
)
file_view: sublime.View = sublime.active_window().open_file(fixture)
entity = Tab(file_view)
self.assertEquals(True, entity.is_file_view())
def test_get_path(self) -> None:
"""Tests getting the path for a Tab."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertEquals("", entity.get_path())
dir: str = path.dirname(__file__)
fixture: str = path.normpath(
path.join(dir, "./fixtures/foo.txt")
)
file_view: sublime.View = sublime.active_window().open_file(fixture)
entity = Tab(file_view)
expected: str = path.dirname(fixture)
self.assertEquals(expected, entity.get_path())
def test_get_view(self) -> None:
"""Tests getting the underlying view for a Tab."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertIs(scratch_view, entity.get_view())
def test_add_caption(self) -> None:
"""Test adding captions to a Tab."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
# Ensure we start with no captions.
self.assertListEqual([], entity.get_captions())
entity.add_caption("bar")
# Ensure a regular caption can be added.
self.assertListEqual(["bar"], entity.get_captions())
entity.add_caption("baz")
# Ensure additional captions can be added.
self.assertListEqual(["bar", "baz"], entity.get_captions())
second_scratch_view: sublime.View = sublime.active_window().new_file()
entity = Tab(second_scratch_view)
entity.add_caption(123) # type: ignore
# Ensure captions are stringified
self.assertListEqual(["123"], entity.get_captions())
def test_get_captions(self) -> None:
"""Tests getting the captions for a Tab"""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertListEqual([], entity.get_captions())
entity.add_caption("test")
self.assertListEqual(["test"], entity.get_captions())
def test_get_details_caption_configuration(self) -> None:
"""Test getting details for a Tab with various caption settings."""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
details: List[str] = entity.get_details()
# Without captions at all.
self.assertListEqual(["untitled", "untitled"], details)
details = entity.get_details()
# With empty captions.
self.assertListEqual(["untitled", "untitled"], details)
entity.add_caption("bar")
# With bespoke captions.
details = entity.get_details()
self.assertListEqual(["untitled", "untitled", "bar"], details)
entity.add_caption("baz")
details = entity.get_details()
self.assertListEqual(["untitled", "untitled", "bar, baz"], details)
def test_equality_check(self) -> None:
"""Tests comparing two tabs for equality."""
scratch_view: sublime.View = sublime.active_window().new_file()
t1: Tab = Tab(scratch_view)
t2: Tab = Tab(scratch_view)
self.assertEquals(t1, t2)
t3: Tab = Tab(scratch_view)
t3.add_caption("Force a difference")
self.assertNotEqual(t1, t3)
def test_to_string(self) -> None:
"""Tests representing a tab as a string"""
scratch_view: sublime.View = sublime.active_window().new_file()
entity: Tab = Tab(scratch_view)
self.assertEquals(entity.get_title(), str(entity))
| {
"content_hash": "a47f982d610c25439294ddfb276b6c41",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 78,
"avg_line_length": 33.91228070175438,
"alnum_prop": 0.588334195550957,
"repo_name": "robinmalburn/sublime-tabfilter",
"id": "23eaf278174f460b9c81a9dc5a7ace5228300425",
"size": "7827",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_entities.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46344"
}
],
"symlink_target": ""
} |
import os
import re
import numpy as np
import pyproj
import rasterio as rio
from sklearn.neighbors import KernelDensity
from l8 import BANDS, SCENE_ID_PATTERN, get_date, spectrum
from l8 import timeseries
# Empirically found for demo.
log_probability_threshold = -3000000
def is_scene_directory(srcpath):
sid = os.path.basename(os.path.normpath(srcpath))
return re.match(SCENE_ID_PATTERN, sid) is not None
def detect_change(directory):
"""
:param directory:
Parent directory containing various L8 scenes corresponding to the same path/row.
"""
# Iterate over all pixels in image. Ugh ...
dst_proj = pyproj.Proj(init='epsg:4326')
sceneids = filter(lambda sid: re.match(SCENE_ID_PATTERN, sid), os.listdir(directory))
sceneids = timeseries.sort_by_date(sceneids)
srcdirs = map(lambda d: os.path.join(directory, d), sceneids)
# Probe a single image to get metadata (e.g. array shape, projection)
srcdir = srcdirs[0]
sceneid = os.path.basename(os.path.normpath(srcdir))
srcpath = os.path.join(srcdir, "%s_B1.TIF" % sceneid)
with rio.drivers():
with rio.open(srcpath, 'r') as src:
src_proj = pyproj.Proj(src.crs)
shape = src.shape
metadata = src.meta.copy()
# Instantiate probability density maps
# Since 20 time points are used to fit the KDE below, will
# instantiate the remaining number of files
with rio.drivers():
for srcdir in srcdirs[20:]:
sid = os.path.basename(os.path.normpath(srcdir))
date = str(get_date(sid))
with rio.open("%s.tif" % date, 'w', **metadata) as dst:
pass
# so dumb.
for j in range(shape[0]):
for i in range(shape[1]):
lng, lat = pyproj.transform(src_proj, dst_proj, *src.ul(j, i))
# Okay, now have lng/lat to conform to requirement of timeseries
dates, ts = timeseries.extract(srcdirs, lng, lat)
kde = KernelDensity(kernel='gaussian', bandwidth=1.0, algorithm='ball_tree')
# For now we just look at b-r vs. g-ir
bl = ts[:, 1]
gr = ts[:, 2]
rd = ts[:, 3]
ir = ts[:, 4]
blrd = bl - rd
grir = gr - ir
x = np.vstack((blrd, grir)).transpose()
# Fit about 1/2 the data
kde.fit(x[0:20])
# Get the scores for the remaining time points
logprob = kde.score_samples(x[20:])
# Log probabilities aren't good for visualizing within an image
# so convert to 16bit integer range
probabilities = logprob / log_probability_threshold
values = (65535 * probabilities).astype(np.uint16)
# Write value to maps
window = ((j, j+1), (i, i+1))
for idx, srcdir in enumerate(srcdirs[20:]):
sid = os.path.basename(os.path.normpath(srcdir))
date = str(get_date(sid))
with rio.open("%s.tif" % date, 'r+') as dst:
value = np.array( [[ values[idx] ]])
dst.write_band(1, value, window=window)
| {
"content_hash": "0c6503a762f0122eec8a24c28ba25c79",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 89,
"avg_line_length": 32.15094339622642,
"alnum_prop": 0.5475352112676056,
"repo_name": "kapadia/l8",
"id": "d3cb1b28cc42f3d2344ca1b230f261c6c0385675",
"size": "3597",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "l8/change.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26641"
}
],
"symlink_target": ""
} |
import numpy as np
from collections import namedtuple
optimise = None # expensive, from scipy
RadialFitResult = namedtuple('RadialFitResult', ['centre', 'radius'])
def radial_fit(p):
"""
Find the least squares radial fitting a set of ND points.
Parameters
----------
p : ``(N, D)`` `ndarray`
Points to find find centre of
Returns
-------
centre_i : (D,) `ndarray`
The ND coordinates of the centre of the circle.
r_i : `float`
The radius of the circle.
References
----------
.. [1] http://www.scipy.org/Cookbook/Least_Squares_Circle
"""
global optimise
if optimise is None:
from scipy import optimize # expensive
def error(tuple_c, x):
c = np.array(tuple_c)
err = r(x, c)
return err - err.mean()
r = lambda x, c: np.sqrt(np.sum((x - c) ** 2, axis=1))
av_r = lambda x, c: np.mean(r(x, c))
c_est = np.mean(p, axis=0)
c_i, ier = optimize.leastsq(error, c_est, args=p)
return RadialFitResult(centre=c_i, radius=av_r(p, c_i))
| {
"content_hash": "7680d33bb7725aacb520f0c874d508cb",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 69,
"avg_line_length": 24.930232558139537,
"alnum_prop": 0.5802238805970149,
"repo_name": "nontas/menpo3d",
"id": "a718cffdd5d69ace31ab1a421c7b9a2731648a17",
"size": "1072",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "menpo3d/math.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "155"
},
{
"name": "Python",
"bytes": "388496"
},
{
"name": "Shell",
"bytes": "124"
}
],
"symlink_target": ""
} |
import pytest
from etapi.public.forms import LoginForm
from etapi.user.forms import RegisterForm
from .factories import UserFactory
class TestRegisterForm:
def test_validate_user_already_registered(self, user):
# Enters username that is already registered
form = RegisterForm(username=user.username, email='foo@bar.com',
password='example', confirm='example')
assert form.validate() is False
assert 'Username already registered' in form.username.errors
def test_validate_email_already_registered(self, user):
# enters email that is already registered
form = RegisterForm(username='unique', email=user.email,
password='example', confirm='example')
assert form.validate() is False
assert 'Email already registered' in form.email.errors
def test_validate_success(self, db):
form = RegisterForm(username='newusername', email='new@test.test',
password='example', confirm='example')
assert form.validate() is True
class TestLoginForm:
def test_validate_success(self, user):
user.set_password('example')
user.save()
form = LoginForm(username=user.username, password='example')
assert form.validate() is True
assert form.user == user
def test_validate_unknown_username(self, db):
form = LoginForm(username='unknown', password='example')
assert form.validate() is False
assert 'Unknown username' in form.username.errors
assert form.user is None
def test_validate_invalid_password(self, user):
user.set_password('example')
user.save()
form = LoginForm(username=user.username, password='wrongpassword')
assert form.validate() is False
assert 'Invalid password' in form.password.errors
def test_validate_inactive_user(self, user):
user.active = False
user.set_password('example')
user.save()
# Correct username and password, but user is not activated
form = LoginForm(username=user.username, password='example')
assert form.validate() is False
assert 'User not activated' in form.username.errors
| {
"content_hash": "1bfa693fdd9bf08f8998abc1cb17d2c7",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 74,
"avg_line_length": 36.81666666666667,
"alnum_prop": 0.6731552738795835,
"repo_name": "hypebeast/etapi",
"id": "6d5afbd189c1d1573006d84030e3809788cc6e46",
"size": "2233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "791"
},
{
"name": "HTML",
"bytes": "33818"
},
{
"name": "JavaScript",
"bytes": "10890"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "71826"
},
{
"name": "Shell",
"bytes": "885"
}
],
"symlink_target": ""
} |
import sys
import pylibvw
class SearchTask():
def __init__(self, vw, sch, num_actions):
self.vw = vw
self.sch = sch
self.blank_line = self.vw.example("")
self.blank_line.finish()
self.bogus_example = self.vw.example("1 | x")
def __del__(self):
self.bogus_example.finish()
pass
def _run(self, your_own_input_example):
pass
def _call_vw(self, my_example, isTest): # run_fn, setup_fn, takedown_fn, isTest):
self._output = None
self.bogus_example.set_test_only(isTest)
def run(): self._output = self._run(my_example)
setup = None
takedown = None
if callable(getattr(self, "_setup", None)): setup = lambda: self._setup(my_example)
if callable(getattr(self, "_takedown", None)): takedown = lambda: self._takedown(my_example)
self.sch.set_structured_predict_hook(run, setup, takedown)
self.vw.learn(self.bogus_example)
self.vw.learn(self.blank_line) # this will cause our ._run hook to get called
def learn(self, data_iterator):
for my_example in data_iterator.__iter__():
self._call_vw(my_example, isTest=False);
def example(self, initStringOrDict=None, labelType=pylibvw.vw.lDefault):
"""TODO"""
if self.sch.predict_needs_example():
return self.vw.example(initStringOrDict, labelType)
else:
return self.vw.example(None, labelType)
def predict(self, my_example):
self._call_vw(my_example, isTest=True);
return self._output
class vw(pylibvw.vw):
"""The pyvw.vw object is a (trivial) wrapper around the pylibvw.vw
object; you're probably best off using this directly and ignoring
the pylibvw.vw structure entirely."""
def __init__(self, argString=None, **kw):
"""Initialize the vw object. The (optional) argString is the
same as the command line arguments you'd use to run vw (eg,"--audit").
you can also use key/value pairs as in:
pyvw.vw(audit=True, b=24, k=True, c=True, l2=0.001)
or a combination, for instance:
pyvw.vw("--audit", b=26)"""
def format(key,val):
if type(val) is bool and val == False: return ''
s = ('-'+key) if len(key) == 1 else ('--'+key)
if type(val) is not bool or val != True: s += ' ' + str(val)
return s
l = [format(k,v) for k,v in kw.iteritems()]
if argString is not None: l = [argString] + l
#print ' '.join(l)
pylibvw.vw.__init__(self,' '.join(l))
self.finished = False
def get_weight(self, index, offset=0):
"""Given an (integer) index (and an optional offset), return
the weight for that position in the (learned) weight vector."""
return pylibvw.vw.get_weight(self, index, offset)
def learn(self, ec):
"""Perform an online update; ec can either be an example
object or a string (in which case it is parsed and then
learned on)."""
if isinstance(ec, str):
self.learn_string(ec)
else:
if hasattr(ec, 'setup_done') and not ec.setup_done:
ec.setup_example()
pylibvw.vw.learn(self, ec)
def finish(self):
"""stop VW by calling finish (and, eg, write weights to disk)"""
if not self.finished:
pylibvw.vw.finish(self)
self.finished = True
def example(self, stringOrDict=None, labelType=pylibvw.vw.lDefault):
"""TODO: document"""
return example(self, stringOrDict, labelType)
def __del__(self):
self.finish()
def init_search_task(self, search_task, task_data=None):
sch = self.get_search_ptr()
def predict(examples, my_tag, oracle, condition=None, allowed=None, learner_id=0):
"""The basic (via-reduction) prediction mechanism. Several
variants are supported through this overloaded function:
'examples' can be a single example (interpreted as
non-LDF mode) or a list of examples (interpreted as
LDF mode). it can also be a lambda function that
returns a single example or list of examples, and in
that list, each element can also be a lambda function
that returns an example. this is done for lazy
example construction (aka speed).
'my_tag' should be an integer id, specifying this prediction
'oracle' can be a single label (or in LDF mode a single
array index in 'examples') or a list of such labels if
the oracle policy is indecisive; if it is None, then
the oracle doesn't care
'condition' should be either: (1) a (tag,char) pair, indicating
to condition on the given tag with identifier from the char;
or (2) a (tag,len,char) triple, indicating to condition on
tag, tag-1, tag-2, ..., tag-len with identifiers char,
char+1, char+2, ..., char+len. or it can be a (heterogenous)
list of such things.
'allowed' can be None, in which case all actions are allowed;
or it can be list of valid actions (in LDF mode, this should
be None and you should encode the valid actions in 'examples')
'learner_id' specifies the underlying learner id
Returns a single prediction.
"""
P = sch.get_predictor(my_tag)
if sch.is_ldf():
# we need to know how many actions there are, even if we don't know their identities
while hasattr(examples, '__call__'): examples = examples()
if not isinstance(examples, list): raise TypeError('expected example _list_ in LDF mode for SearchTask.predict()')
P.set_input_length(len(examples))
if sch.predict_needs_example():
for n in range(len(examples)):
ec = examples[n]
while hasattr(ec, '__call__'): ec = ec() # unfold the lambdas
if not isinstance(ec, example) and not isinstance(ec, pylibvw.example): raise TypeError('non-example in LDF example list in SearchTask.predict()')
if hasattr(ec, 'setup_done') and not ec.setup_done:
ec.setup_example()
P.set_input_at(n, ec)
else:
pass # TODO: do we need to set the examples even though they're not used?
else:
if sch.predict_needs_example():
while hasattr(examples, '__call__'): examples = examples()
P.set_input(examples)
else:
pass # TODO: do we need to set the examples even though they're not used?
# if (isinstance(examples, list) and all([isinstance(ex, example) or isinstance(ex, pylibvw.example) for ex in examples])) or \
# isinstance(examples, example) or isinstance(examples, pylibvw.example):
# if isinstance(examples, list): # LDF
# P.set_input_length(len(examples))
# for n in range(len(examples)):
# P.set_input_at(n, examples[n])
# else: # non-LDF
# P.set_input(examples)
if True: # TODO: get rid of this
if oracle is None: pass
elif isinstance(oracle, list):
if len(oracle) > 0: P.set_oracles(oracle)
elif isinstance(oracle, int): P.set_oracle(oracle)
else: raise TypeError('expecting oracle to be a list or an integer')
if condition is not None:
if not isinstance(condition, list): condition = [condition]
for c in condition:
if not isinstance(c, tuple): raise TypeError('item ' + str(c) + ' in condition list is malformed')
if len(c) == 2 and isinstance(c[0], int) and isinstance(c[1], str) and len(c[1]) == 1:
P.add_condition(max(0, c[0]), c[1])
elif len(c) == 3 and isinstance(c[0], int) and isinstance(c[1], int) and isinstance(c[2], str) and len(c[2]) == 1:
P.add_condition_range(max(0,c[0]), max(0,c[1]), c[2])
else:
raise TypeError('item ' + str(c) + ' in condition list malformed')
if allowed is None: pass
elif isinstance(allowed, list):
P.set_alloweds(allowed)
else: raise TypeError('allowed argument wrong type')
if learner_id != 0: P.set_learner_id(learner_id)
p = P.predict()
return p
else:
raise TypeError("'examples' should be a pyvw example (or a pylibvw example), or a list of said things")
sch.predict = predict
num_actions = sch.get_num_actions()
return search_task(self, sch, num_actions) if task_data is None else search_task(self, sch, num_actions, task_data)
class namespace_id():
"""The namespace_id class is simply a wrapper to convert between
hash spaces referred to by character (eg 'x') versus their index
in a particular example. Mostly used internally, you shouldn't
really need to touch this."""
def __init__(self, ex, id):
"""Given an example and an id, construct a namespace_id. The
id can either be an integer (in which case we take it to be an
index into ex.indices[]) or a string (in which case we take
the first character as the namespace id)."""
if isinstance(id, int): # you've specified a namespace by index
if id < 0 or id >= ex.num_namespaces():
raise Exception('namespace ' + str(id) + ' out of bounds')
self.id = id
self.ord_ns = ex.namespace(id)
self.ns = chr(self.ord_ns)
elif isinstance(id, str): # you've specified a namespace by string
if len(id) == 0:
id = ' '
self.id = None # we don't know and we don't want to do the linear search requered to find it
self.ns = id[0]
self.ord_ns = ord(self.ns)
else:
raise Exception("ns_to_characterord failed because id type is unknown: " + str(type(id)))
class example_namespace():
"""The example_namespace class is a helper class that allows you
to extract namespaces from examples and operate at a namespace
level rather than an example level. Mainly this is done to enable
indexing like ex['x'][0] to get the 0th feature in namespace 'x'
in example ex."""
def __init__(self, ex, ns, ns_hash=None):
"""Construct an example_namespace given an example and a
target namespace (ns should be a namespace_id)"""
if not isinstance(ns, namespace_id):
raise TypeError
self.ex = ex
self.ns = ns
self.ns_hash = None
def num_features_in(self):
"""Return the total number of features in this namespace."""
return self.ex.num_features_in(self.ns)
def __getitem__(self, i):
"""Get the feature/value pair for the ith feature in this
namespace."""
f = self.ex.feature(self.ns, i)
v = self.ex.feature_weight(self.ns, i)
return (f, v)
def iter_features(self):
"""iterate over all feature/value pairs in this namespace."""
for i in range(self.num_features_in()):
yield self[i]
def push_feature(self, feature, v=1.):
"""Add an unhashed feature to the current namespace (fails if
setup has already run on this example)."""
if self.ns_hash is None:
self.ns_hash = self.ex.vw.hash_space( self.ns )
self.ex.push_feature(self.ns, feature, v, self.ns_hash)
def pop_feature(self):
"""Remove the top feature from the current namespace; returns True
if a feature was removed, returns False if there were no
features to pop."""
return self.ex.pop_feature(self.ns)
def push_features(self, ns, featureList):
"""Push a list of features to a given namespace. Each feature
in the list can either be an integer (already hashed) or a
string (to be hashed) and may be paired with a value or not
(if not, the value is assumed to be 1.0). See example.push_features
for examples."""
self.ex.push_features(self.ns, featureList)
class abstract_label:
"""An abstract class for a VW label."""
def __init__(self):
pass
def from_example(self, ex):
"""grab a label from a given VW example"""
raise Exception("from_example not yet implemented")
class simple_label(abstract_label):
def __init__(self, label=0., weight=1., initial=0., prediction=0.):
abstract_label.__init__(self)
if isinstance(label, example):
self.from_example(label)
else:
self.label = label
self.weight = weight
self.initial = initial
self.prediction = prediction
def from_example(self, ex):
self.label = ex.get_simplelabel_label()
self.weight = ex.get_simplelabel_weight()
self.initial = ex.get_simplelabel_initial()
self.prediction = ex.get_simplelabel_prediction()
def __str__(self):
s = str(self.label)
if self.weight != 1.:
s += ':' + self.weight
return s
class multiclass_label(abstract_label):
def __init__(self, label=1, weight=1., prediction=1):
abstract_label.__init__(self)
self.label = label
self.weight = weight
self.prediction = prediction
def from_example(self, ex):
self.label = ex.get_multiclass_label()
self.weight = ex.get_multiclass_weight()
self.prediction = ex.get_multiclass_prediction()
def __str__(self):
s = str(self.label)
if self.weight != 1.:
s += ':' + self.weight
return s
class cost_sensitive_label(abstract_label):
class wclass:
def __init__(self, label, cost=0., partial_prediction=0., wap_value=0.):
self.label = label
self.cost = cost
self.partial_prediction = partial_prediction
self.wap_value = wap_value
def __init__(self, costs=[], prediction=0):
abstract_label.__init__(self)
self.costs = costs
self.prediction = prediction
def from_example(self, ex):
self.prediction = ex.get_costsensitive_prediction()
self.costs = []
for i in range(ex.get_costsensitive_num_costs):
wc = wclass(ex.get_costsensitive_class(),
ex.get_costsensitive_cost(),
ex.get_costsensitive_partial_prediction(),
ex.get_costsensitive_wap_value())
self.costs.append(wc)
def __str__(self):
return '[' + ' '.join([str(c.label) + ':' + str(c.cost) for c in self.costs])
class cbandits_label(abstract_label):
class wclass:
def __init__(self, label, cost=0., partial_prediction=0., probability=0.):
self.label = label
self.cost = cost
self.partial_prediction = partial_prediction
self.probability = probability
def __init__(self, costs=[], prediction=0):
abstract_label.__init__(self)
self.costs = costs
self.prediction = prediction
def from_example(self, ex):
self.prediction = ex.get_cbandits_prediction()
self.costs = []
for i in range(ex.get_cbandits_num_costs):
wc = wclass(ex.get_cbandits_class(),
ex.get_cbandits_cost(),
ex.get_cbandits_partial_prediction(),
ex.get_cbandits_probability())
self.costs.append(wc)
def __str__(self):
return '[' + ' '.join([str(c.label) + ':' + str(c.cost) for c in self.costs])
class example(pylibvw.example):
"""The example class is a (non-trivial) wrapper around
pylibvw.example. Most of the wrapping is to make the interface
easier to use (by making the types safer via namespace_id) and
also with added python-specific functionality."""
def __init__(self, vw, initStringOrDict=None, labelType=pylibvw.vw.lDefault):
"""Construct a new example from vw. If initString is None, you
get an "empty" example which you can construct by hand (see, eg,
example.push_features). If initString is a string, then this
string is parsed as it would be from a VW data file into an
example (and "setup_example" is run). if it is a dict, then we add all features in that dictionary. finally, if it's a function, we (repeatedly) execute it fn() until it's not a function any more (for lazy feature computation)."""
while hasattr(initStringOrDict, '__call__'):
initStringOrDict = initStringOrDict()
if initStringOrDict is None:
pylibvw.example.__init__(self, vw, labelType)
self.setup_done = False
elif isinstance(initStringOrDict, str):
pylibvw.example.__init__(self, vw, labelType, initStringOrDict)
self.setup_done = False
elif isinstance(initStringOrDict, dict):
pylibvw.example.__init__(self, vw, labelType)
self.vw = vw
self.stride = vw.get_stride()
self.finished = False
self.push_feature_dict(vw, initStringOrDict)
self.setup_done = False
else:
raise TypeError('expecting string or dict as argument for example construction')
self.vw = vw
self.stride = vw.get_stride()
self.finished = False
self.labelType = labelType
def __del__(self):
self.finish()
def __enter__(self):
return self
def __exit__(self,typ,value,traceback):
self.finish()
return typ is None
def get_ns(self, id):
"""Construct a namespace_id from either an integer or string
(or, if a namespace_id is fed it, just return it directly)."""
if isinstance(id, namespace_id):
return id
else:
return namespace_id(self, id)
def __getitem__(self, id):
"""Get an example_namespace object associated with the given
namespace id."""
return example_namespace(self, self.get_ns(id))
def feature(self, ns, i):
"""Get the i-th hashed feature id in a given namespace (i can
range from 0 to self.num_features_in(ns)-1)"""
ns = self.get_ns(ns) # guaranteed to be a single character
f = pylibvw.example.feature(self, ns.ord_ns, i)
if self.setup_done:
f = (f - self.get_ft_offset()) / self.stride
return f
def feature_weight(self, ns, i):
"""Get the value(weight) associated with a given feature id in
a given namespace (i can range from 0 to
self.num_features_in(ns)-1)"""
return pylibvw.example.feature_weight(self, self.get_ns(ns).ord_ns, i)
def set_label_string(self, string):
"""Give this example a new label, formatted as a string (ala
the VW data file format)."""
pylibvw.example.set_label_string(self, self.vw, string, self.labelType)
def setup_example(self):
"""If this example hasn't already been setup (ie, quadratic
features constructed, etc.), do so."""
if self.setup_done:
raise Exception('trying to setup_example on an example that is already setup')
self.vw.setup_example(self)
self.setup_done = True
def unsetup_example(self):
"""If this example has been setup, reverse that process so you can continue editing the examples."""
if not self.setup_done:
raise Exception('trying to unsetup_example that has not yet been setup')
self.vw.unsetup_example(self)
self.setup_done = False
def learn(self):
"""Learn on this example (and before learning, automatically
call setup_example if the example hasn't yet been setup)."""
if not self.setup_done:
self.setup_example()
self.vw.learn(self)
def sum_feat_sq(self, ns):
"""Return the total sum feature-value squared for a given
namespace."""
return pylibvw.example.sum_feat_sq(self, self.get_ns(ns).ord_ns)
def num_features_in(self, ns):
"""Return the total number of features in a given namespace."""
return pylibvw.example.num_features_in(self, self.get_ns(ns).ord_ns)
def get_feature_id(self, ns, feature, ns_hash=None):
"""Return the hashed feature id for a given feature in a given
namespace. feature can either be an integer (already a feature
id) or a string, in which case it is hashed. Note that if
--hash all is on, then get_feature_id(ns,"5") !=
get_feature_id(ns, 5). If you've already hashed the namespace,
you can optionally provide that value to avoid re-hashing it."""
if isinstance(feature, int):
return feature
if isinstance(feature, str):
if ns_hash is None:
ns_hash = self.vw.hash_space( self.get_ns(ns).ns )
return self.vw.hash_feature(feature, ns_hash)
raise Exception("cannot extract feature of type: " + str(type(feature)))
def push_hashed_feature(self, ns, f, v=1.):
"""Add a hashed feature to a given namespace."""
if self.setup_done: self.unsetup_example();
pylibvw.example.push_hashed_feature(self, self.get_ns(ns).ord_ns, f, v)
def push_feature(self, ns, feature, v=1., ns_hash=None):
"""Add an unhashed feature to a given namespace."""
f = self.get_feature_id(ns, feature, ns_hash)
self.push_hashed_feature(ns, f, v)
def pop_feature(self, ns):
"""Remove the top feature from a given namespace; returns True
if a feature was removed, returns False if there were no
features to pop."""
if self.setup_done: self.unsetup_example();
return pylibvw.example.pop_feature(self, self.get_ns(ns).ord_ns)
def push_namespace(self, ns):
"""Push a new namespace onto this example. You should only do
this if you're sure that this example doesn't already have the
given namespace."""
if self.setup_done: self.unsetup_example();
pylibvw.example.push_namespace(self, self.get_ns(ns).ord_ns)
def pop_namespace(self):
"""Remove the top namespace from an example; returns True if a
namespace was removed, or False if there were no namespaces
left."""
if self.setup_done: self.unsetup_example();
return pylibvw.example.pop_namespace(self)
def ensure_namespace_exists(self, ns):
"""Check to see if a namespace already exists. If it does, do
nothing. If it doesn't, add it."""
if self.setup_done: self.unsetup_example();
return pylibvw.example.ensure_namespace_exists(self, self.get_ns(ns).ord_ns)
def push_features(self, ns, featureList):
"""Push a list of features to a given namespace. Each feature
in the list can either be an integer (already hashed) or a
string (to be hashed) and may be paired with a value or not
(if not, the value is assumed to be 1.0).
Examples:
ex.push_features('x', ['a', 'b'])
ex.push_features('y', [('c', 1.), 'd'])
space_hash = vw.hash_space( 'x' )
feat_hash = vw.hash_feature( 'a', space_hash )
ex.push_features('x', [feat_hash]) # note: 'x' should match the space_hash!
"""
ns = self.get_ns(ns)
self.ensure_namespace_exists(ns)
self.push_feature_list(self.vw, ns.ord_ns, featureList) # much faster just to do it in C++
# ns_hash = self.vw.hash_space( ns.ns )
# for feature in featureList:
# if isinstance(feature, int) or isinstance(feature, str):
# f = feature
# v = 1.
# elif isinstance(feature, tuple) and len(feature) == 2 and (isinstance(feature[0], int) or isinstance(feature[0], str)) and (isinstance(feature[1], int) or isinstance(feature[1], float)):
# f = feature[0]
# v = feature[1]
# else:
# raise Exception('malformed feature to push of type: ' + str(type(feature)))
# self.push_feature(ns, f, v, ns_hash)
def finish(self):
"""Tell VW that you're done with this example and it can
recycle it for later use."""
if not self.finished:
self.vw.finish_example(self)
self.finished = True
def iter_features(self):
"""Iterate over all feature/value pairs in this example (all
namespace included)."""
for ns_id in range( self.num_namespaces() ): # iterate over every namespace
ns = self.get_ns(ns_id)
for i in range(self.num_features_in(ns)):
f = self.feature(ns, i)
v = self.feature_weight(ns, i)
yield f,v
def get_label(self, label_class=simple_label):
"""Given a known label class (default is simple_label), get
the corresponding label structure for this example."""
return label_class(self)
#help(example)
| {
"content_hash": "71e692f9ab7b3de52d642a0654357829",
"timestamp": "",
"source": "github",
"line_count": 599,
"max_line_length": 238,
"avg_line_length": 43.18530884808013,
"alnum_prop": 0.583771455079635,
"repo_name": "zzzbit/vowpal_wabbit",
"id": "dd2d0677c1d52146e927daec263a80cc754ea0c4",
"size": "25868",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/pyvw.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "9191"
},
{
"name": "C#",
"bytes": "53087"
},
{
"name": "C++",
"bytes": "949995"
},
{
"name": "Eagle",
"bytes": "99"
},
{
"name": "HTML",
"bytes": "13166"
},
{
"name": "Java",
"bytes": "7222"
},
{
"name": "Makefile",
"bytes": "34916"
},
{
"name": "Perl",
"bytes": "127068"
},
{
"name": "Python",
"bytes": "63596"
},
{
"name": "R",
"bytes": "7730"
},
{
"name": "Ruby",
"bytes": "5219"
},
{
"name": "Shell",
"bytes": "43394"
},
{
"name": "Tcl",
"bytes": "182"
}
],
"symlink_target": ""
} |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .AuditModel import AuditModel
from django.db import models
class ScreenIntakeMethodCode(AuditModel):
"""
Refers to the type of intake mechanism for a well screen, i.e. Screen, Open Bottom, Uncased Hole.
"""
screen_intake_code = models.CharField(primary_key=True, max_length=10, editable=False)
description = models.CharField(max_length=100)
display_order = models.PositiveIntegerField()
effective_date = models.DateTimeField(blank=True, null=True)
expiry_date = models.DateTimeField(blank=True, null=True)
class Meta:
db_table = 'screen_intake_method_code'
ordering = ['display_order', 'description']
def __str__(self):
return self.description
| {
"content_hash": "d83fd61c704cd660c27f9e13afb6b47f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 102,
"avg_line_length": 39.06060606060606,
"alnum_prop": 0.7152831652443755,
"repo_name": "rstens/gwells",
"id": "790ab2edb90fd0066f6d5b94210ace24210702f9",
"size": "1289",
"binary": false,
"copies": "1",
"ref": "refs/heads/developer",
"path": "gwells/models/ScreenIntakeMethodCode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1707"
},
{
"name": "CSS",
"bytes": "71007"
},
{
"name": "Groovy",
"bytes": "91669"
},
{
"name": "HTML",
"bytes": "136711"
},
{
"name": "JavaScript",
"bytes": "193917"
},
{
"name": "PLpgSQL",
"bytes": "49465"
},
{
"name": "Python",
"bytes": "481602"
},
{
"name": "Shell",
"bytes": "52420"
},
{
"name": "TSQL",
"bytes": "3727"
},
{
"name": "Vue",
"bytes": "65417"
}
],
"symlink_target": ""
} |
import numpy as np
from scipy.spatial import cKDTree
from .bary import RbfInterpolator
class KDTreeInterpolator(object):
"""
KDTreeInterpolator(points, values)
Nearest-neighbours (barycentric) interpolation in N dimensions.
This interpolator uses a KDTree to find the closest neighbours of a ND point
and returns a barycentric interpolation (uses .bary.RbfInterpolator)
if the number of neighbours is 1 or if the distance to the closest match is
smaller than `eps`, the value of this point is returned instead.
Parameters
----------
points : (Npoints, Ndims) ndarray of floats
Data point coordinates.
values : (Npoints,) ndarray of float or complex
Data values.
Notes
-----
Uses ``scipy.spatial.cKDTree``
"""
def __init__(self, x, y):
self.points = np.asarray(x)
npoints, ndim = x.shape
self.npoints = npoints
self.ndim = ndim
self.values = np.asarray(y)
if npoints != len(self.values):
raise ValueError('different number of points in x and y')
self.tree = cKDTree(x)
def __call__(self, *args, **kwargs):
"""
Evaluate interpolator at given points.
Parameters
----------
xi : ndarray of float, shape (..., ndim)
Points where to interpolate data at.
k : integer
The number of nearest neighbors to use.
eps : non-negative float
Return approximate nearest neighbors; the kth returned value
is guaranteed to be no further than (1+eps) times the
distance to the real k-th nearest neighbor.
p : float, 1<=p<=infinity
Which Minkowski p-norm to use.
1 is the sum-of-absolute-values "Manhattan" distance
2 is the usual Euclidean distance
infinity is the maximum-coordinate-difference distance
"""
xi = np.squeeze(np.asarray(args))
s = xi.shape
if s[1] != self.ndim:
raise AttributeError('Points must have {0:d} dimensions, found {1:d}.'.format(self.ndim, s[1]))
k = kwargs.get('k', 1)
eps = kwargs.get('eps', 0)
p = kwargs.get('p', 2)
dist, i = self.tree.query(xi, k=k, eps=eps)
if k <= 1:
return self.values[i]
else:
pts = self.points
val = self.values
p = []
for xik, ik in zip(xi, i):
try:
r = self._NDInterp(pts[ik], val[ik], xik)
p.append(r)
except:
p.append(np.asarray(self.values[ik[0]]))
return np.squeeze(np.asarray(p))
def _NDInterp(self, X, Y, x):
rb = RbfInterpolator(*( (X.T).tolist() + [Y]))
self._rb = rb
return rb(*x)
| {
"content_hash": "a06fd94e44c3547d050b5dec1ce1adfb",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 107,
"avg_line_length": 30.340425531914892,
"alnum_prop": 0.5634642356241234,
"repo_name": "mfouesneau/faststats",
"id": "83fe47c576c2eeaab1d8262addf8e812d037bece",
"size": "2852",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "faststats/interpolate/knn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "328"
},
{
"name": "Python",
"bytes": "97538"
}
],
"symlink_target": ""
} |
import os
SCRIPT_PATH = os.path.dirname(__file__)
class Lua:
def __init__(self, redis):
self.redis = redis
scripts = filter(lambda s: s.endswith('.lua'), os.listdir(SCRIPT_PATH))
for scriptname in scripts:
with open(os.path.join(SCRIPT_PATH, scriptname)) as script:
setattr(self, scriptname.split('.')[0], redis.register_script(script.read()))
def register(self, name, contents):
setattr(self, name, self.redis.register_script(contents))
| {
"content_hash": "be12b946ea19ea93f2b6f0b8b038fb98",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 93,
"avg_line_length": 30.11764705882353,
"alnum_prop": 0.626953125,
"repo_name": "getfleety/coralillo",
"id": "463f67f31af1f9df6135c7d3bf11b86df1f9de3d",
"size": "512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coralillo/lua/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "2851"
},
{
"name": "Makefile",
"bytes": "239"
},
{
"name": "Python",
"bytes": "88550"
}
],
"symlink_target": ""
} |
from setuptools import find_packages, setup
import retask.release as rl
setup(name='retask',
version=rl.VERSION,
description=rl.DESCRIPTION,
long_description=rl.LONG_DESCRIPTION,
author=rl.AUTHOR,
author_email=rl.EMAIL,
maintainer='Kushal Das',
maintainer_email='kushaldas@gmail.com',
license=rl.LICENSE,
url=rl.URL,
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Topic :: System :: Distributed Computing',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2'
],
packages=find_packages(),
data_files=[],
install_requires=[
'redis',
'six'
],
test_suite='tests',
tests_require=[
'mock'
]
)
| {
"content_hash": "f213b8c4e5f9922286e4f9ed53978ffc",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 55,
"avg_line_length": 27.771428571428572,
"alnum_prop": 0.5576131687242798,
"repo_name": "rtnpro/retask",
"id": "b3c051eeb3673fab909b318f9b44da4636681154",
"size": "1020",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14741"
}
],
"symlink_target": ""
} |
from time import sleep
from datetime import datetime
from threading import Thread, Event
import logging
from ftp.db import FtpDb
class Monitor(Thread):
DEFAULT_INTERVAL = 60*60
db = None
checker = None
interval = None
force_update = False
logger = None
_stop_event = None
def __init__(self, db, checker, *args, **kwargs):
super(Monitor, self).__init__(*args, **kwargs)
self.db = db
self.checker = checker
self._stop_event = Event()
self.logger = logging.getLogger('ftwatch')
def update_interval(self):
try:
db = FtpDb(self.db)
settings = db.get_settings()
self.interval = int(settings.get('update_interval', Monitor.DEFAULT_INTERVAL))*60
self.force_update = (settings.get('force_update', 'false').lower() == 'true')
if self.force_update:
self.logger.info('Checker {}: performing force update'.format(self.checker.NAME))
db.update_settings({'force_update': 'false'})
except:
self.interval = Monitor.DEFAULT_INTERVAL
raise
def run(self):
last_check = datetime(1970, 1, 1)
while not self._stop_event.is_set():
self.update_interval()
if int((datetime.now() - last_check).total_seconds()) > self.interval or self.force_update:
self.logger.info('Checker {}: updating file list'.format(self.checker.NAME))
try:
self.checker(self.db).check()
self.logger.info('Checker {}: done'.format(self.checker.NAME))
except Exception as e:
self.logger.warning('Checker {}: exception occured: {}'.format(self.checker.NAME, e))
finally:
last_check = datetime.now()
else:
sleep(1)
def stop_processing(self):
self._stop_event.set()
| {
"content_hash": "19cf9b0c3b770a7d11469bd967ced958",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 105,
"avg_line_length": 32.6,
"alnum_prop": 0.565439672801636,
"repo_name": "grazor/ftwatch",
"id": "3efb3db550747e8671874475e0deca8ba5bbe61f",
"size": "1956",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ftp/monitor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "294"
},
{
"name": "HTML",
"bytes": "6547"
},
{
"name": "JavaScript",
"bytes": "7133"
},
{
"name": "Python",
"bytes": "15021"
},
{
"name": "Shell",
"bytes": "533"
}
],
"symlink_target": ""
} |
"""The gcloud app regions group."""
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Regions(base.Group):
"""View regional availability of App Engine runtime environments.
This command can be used to view availability of App Engine standard and
flexible runtime environments in all geographic regions.
"""
detailed_help = {
'DESCRIPTION': '{description}',
'EXAMPLES': """\
To view regional availability of App Engine runtime environments, run:
$ {command} list
""",
}
| {
"content_hash": "25a1c9678a5c0a02efd0ba2eb703fea2",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 80,
"avg_line_length": 29.45,
"alnum_prop": 0.6977928692699491,
"repo_name": "KaranToor/MA450",
"id": "c017720e421eb09761e96bafcf046a082bde1259",
"size": "1185",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/.install/.backup/lib/surface/app/regions/__init__.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
} |
from girder.exceptions import ValidationException
from girder.utility import setting_utilities
class PluginSettings:
LICENSES = 'item_licenses.licenses'
@setting_utilities.default(PluginSettings.LICENSES)
def _defaultLicenses():
return [
{
# Selected open source licenses from:
# - https://github.com/ufal/public-license-selector/tree/57e31db
'category': 'Code Licenses',
'licenses': [
{
'name': 'Affero General Public License 3 (AGPL-3.0)'
},
{
'name': 'Apache License 2'
},
{
'name': 'The BSD 2-Clause "Simplified" or "FreeBSD" '
'License'
},
{
'name': 'The BSD 3-Clause "New" or "Revised" License '
'(BSD)'
},
{
'name': 'Common Development and Distribution License '
'(CDDL-1.0)'
},
{
'name': 'Eclipse Public License 1.0 (EPL-1.0)'
},
{
'name': 'GNU General Public License 2 or later '
'(GPL-2.0)'
},
{
'name': 'GNU General Public License 3 (GPL-3.0)'
},
{
'name': 'GNU Library or "Lesser" General Public '
'License 2.1 or later (LGPL-2.1)'
},
{
'name': 'GNU Library or "Lesser" General Public '
'License 3.0 (LGPL-3.0)'
},
{
'name': 'The MIT License (MIT)'
},
{
'name': 'Mozilla Public License 2.0'
}
]
},
{
# Licenses from:
# - http://creativecommons.org/licenses/
#
# Names match those from:
# https://github.com/ufal/public-license-selector/tree/57e31db
'category': 'Content Licenses',
'licenses': [
{
'name': 'Public Domain Dedication (CC Zero)'
},
{
'name': 'Creative Commons Attribution (CC-BY)'
},
{
'name': 'Creative Commons Attribution-ShareAlike '
'(CC-BY-SA)'
},
{
'name': 'Creative Commons Attribution-NoDerivs '
'(CC-BY-ND)'
},
{
'name': 'Creative Commons Attribution-NonCommercial '
'(CC-BY-NC)'
},
{
'name': 'Creative Commons Attribution-NonCommercial-'
'ShareAlike (CC-BY-NC-SA)'
},
{
'name': 'Creative Commons Attribution-NonCommercial-'
'NoDerivs (CC-BY-NC-ND)'
},
{
'name': 'Public Domain Mark (PD)'
},
{
'name': 'All Rights Reserved'
}
]
}
]
@setting_utilities.validator(PluginSettings.LICENSES)
def _validateLicenses(doc):
val = doc['value']
if not isinstance(val, list):
raise ValidationException('Licenses setting must be a list.', 'value')
for item in val:
category = item.get('category', None)
if not category or not isinstance(category, str):
raise ValidationException(
'License category is required and must be a non-empty string.', 'category')
licenses = item.get('licenses', None)
if not isinstance(licenses, list):
raise ValidationException('Licenses in category must be a list.', 'licenses')
for license in licenses:
if not isinstance(license, dict):
raise ValidationException('License must be a dict.', 'license')
name = license.get('name', None)
if not name or not isinstance(name, str):
raise ValidationException(
'License name is required and must be a non-empty string.', 'name')
| {
"content_hash": "72395b68f89b5d6597b0bb2043b3e69f",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 91,
"avg_line_length": 36.112,
"alnum_prop": 0.4237926451041205,
"repo_name": "girder/girder",
"id": "5fd0474bbcceefb677305bff4cb97dd72e4ac573",
"size": "4514",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "plugins/item_licenses/girder_item_licenses/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "26244"
},
{
"name": "CSS",
"bytes": "6537"
},
{
"name": "Dockerfile",
"bytes": "1528"
},
{
"name": "HTML",
"bytes": "14"
},
{
"name": "JavaScript",
"bytes": "1176017"
},
{
"name": "Jinja",
"bytes": "322"
},
{
"name": "Mako",
"bytes": "7571"
},
{
"name": "Pug",
"bytes": "137980"
},
{
"name": "Python",
"bytes": "2018697"
},
{
"name": "Roff",
"bytes": "17"
},
{
"name": "Shell",
"bytes": "3354"
},
{
"name": "Stylus",
"bytes": "48706"
}
],
"symlink_target": ""
} |
import os, time, re, io
import json
import mimetypes, hashlib
import traceback, logging
from collections import OrderedDict
import requests
from .. import config, utils
from ..returnvalues import ReturnValue
from ..storage import templates
from .contact import update_local_uin
logger = logging.getLogger('itchat')
def load_messages(core):
core.send_raw_msg = send_raw_msg
core.send_msg = send_msg
core.upload_file = upload_file
core.send_file = send_file
core.send_image = send_image
core.send_video = send_video
core.send = send
def get_download_fn(core, url, msgId):
def download_fn(downloadDir=None):
params = {
'msgid': msgId,
'skey': core.loginInfo['skey'],}
headers = { 'User-Agent' : config.USER_AGENT }
r = core.s.get(url, params=params, stream=True, headers = headers)
tempStorage = io.BytesIO()
for block in r.iter_content(1024):
tempStorage.write(block)
if downloadDir is None:
return tempStorage.getvalue()
with open(downloadDir, 'wb') as f:
f.write(tempStorage.getvalue())
tempStorage.seek(0)
return ReturnValue({'BaseResponse': {
'ErrMsg': 'Successfully downloaded',
'Ret': 0, },
'PostFix': utils.get_image_postfix(tempStorage.read(20)), })
return download_fn
def produce_msg(core, msgList):
''' for messages types
* 40 msg, 43 videochat, 50 VOIPMSG, 52 voipnotifymsg
* 53 webwxvoipnotifymsg, 9999 sysnotice
'''
rl = []
srl = [40, 43, 50, 52, 53, 9999]
for m in msgList:
# get actual opposite
if m['FromUserName'] == core.storageClass.userName:
actualOpposite = m['ToUserName']
else:
actualOpposite = m['FromUserName']
# produce basic message
if '@@' in m['FromUserName'] or '@@' in m['ToUserName']:
produce_group_chat(core, m)
else:
utils.msg_formatter(m, 'Content')
# set user of msg
if '@@' in actualOpposite:
m['User'] = core.search_chatrooms(userName=actualOpposite) or \
templates.Chatroom({'UserName': actualOpposite})
# we don't need to update chatroom here because we have
# updated once when producing basic message
elif actualOpposite in ('filehelper', 'fmessage'):
m['User'] = templates.User({'UserName': actualOpposite})
else:
m['User'] = core.search_mps(userName=actualOpposite) or \
core.search_friends(userName=actualOpposite) or \
templates.User(userName=actualOpposite)
# by default we think there may be a user missing not a mp
m['User'].core = core
if m['MsgType'] == 1: # words
if m['Url']:
regx = r'(.+?\(.+?\))'
data = re.search(regx, m['Content'])
data = 'Map' if data is None else data.group(1)
msg = {
'Type': 'Map',
'Text': data,}
else:
msg = {
'Type': 'Text',
'Text': m['Content'],}
elif m['MsgType'] == 3 or m['MsgType'] == 47: # picture
download_fn = get_download_fn(core,
'%s/webwxgetmsgimg' % core.loginInfo['url'], m['NewMsgId'])
msg = {
'Type' : 'Picture',
'FileName' : '%s.%s' % (time.strftime('%y%m%d-%H%M%S', time.localtime()),
'png' if m['MsgType'] == 3 else 'gif'),
'Text' : download_fn, }
elif m['MsgType'] == 34: # voice
download_fn = get_download_fn(core,
'%s/webwxgetvoice' % core.loginInfo['url'], m['NewMsgId'])
msg = {
'Type': 'Recording',
'FileName' : '%s.mp3' % time.strftime('%y%m%d-%H%M%S', time.localtime()),
'Text': download_fn,}
elif m['MsgType'] == 37: # friends
m['User']['UserName'] = m['RecommendInfo']['UserName']
msg = {
'Type': 'Friends',
'Text': {
'status' : m['Status'],
'userName' : m['RecommendInfo']['UserName'],
'verifyContent' : m['Ticket'],
'autoUpdate' : m['RecommendInfo'], }, }
m['User'].verifyDict = msg['Text']
elif m['MsgType'] == 42: # name card
msg = {
'Type': 'Card',
'Text': m['RecommendInfo'], }
elif m['MsgType'] in (43, 62): # tiny video
msgId = m['MsgId']
def download_video(videoDir=None):
url = '%s/webwxgetvideo' % core.loginInfo['url']
params = {
'msgid': msgId,
'skey': core.loginInfo['skey'],}
headers = {'Range': 'bytes=0-', 'User-Agent' : config.USER_AGENT }
r = core.s.get(url, params=params, headers=headers, stream=True)
tempStorage = io.BytesIO()
for block in r.iter_content(1024):
tempStorage.write(block)
if videoDir is None:
return tempStorage.getvalue()
with open(videoDir, 'wb') as f:
f.write(tempStorage.getvalue())
return ReturnValue({'BaseResponse': {
'ErrMsg': 'Successfully downloaded',
'Ret': 0, }})
msg = {
'Type': 'Video',
'FileName' : '%s.mp4' % time.strftime('%y%m%d-%H%M%S', time.localtime()),
'Text': download_video, }
elif m['MsgType'] == 49: # sharing
if m['AppMsgType'] == 6:
rawMsg = m
cookiesList = {name:data for name,data in core.s.cookies.items()}
def download_atta(attaDir=None):
url = core.loginInfo['fileUrl'] + '/webwxgetmedia'
params = {
'sender': rawMsg['FromUserName'],
'mediaid': rawMsg['MediaId'],
'filename': rawMsg['FileName'],
'fromuser': core.loginInfo['wxuin'],
'pass_ticket': 'undefined',
'webwx_data_ticket': cookiesList['webwx_data_ticket'],}
headers = { 'User-Agent' : config.USER_AGENT }
r = core.s.get(url, params=params, stream=True, headers=headers)
tempStorage = io.BytesIO()
for block in r.iter_content(1024):
tempStorage.write(block)
if attaDir is None:
return tempStorage.getvalue()
with open(attaDir, 'wb') as f:
f.write(tempStorage.getvalue())
return ReturnValue({'BaseResponse': {
'ErrMsg': 'Successfully downloaded',
'Ret': 0, }})
msg = {
'Type': 'Attachment',
'Text': download_atta, }
elif m['AppMsgType'] == 8:
download_fn = get_download_fn(core,
'%s/webwxgetmsgimg' % core.loginInfo['url'], m['NewMsgId'])
msg = {
'Type' : 'Picture',
'FileName' : '%s.gif' % (
time.strftime('%y%m%d-%H%M%S', time.localtime())),
'Text' : download_fn, }
elif m['AppMsgType'] == 17:
msg = {
'Type': 'Note',
'Text': m['FileName'], }
elif m['AppMsgType'] == 2000:
regx = r'\[CDATA\[(.+?)\][\s\S]+?\[CDATA\[(.+?)\]'
data = re.search(regx, m['Content'])
if data:
data = data.group(2).split(u'\u3002')[0]
else:
data = 'You may found detailed info in Content key.'
msg = {
'Type': 'Note',
'Text': data, }
else:
msg = {
'Type': 'Sharing',
'Text': m['FileName'], }
elif m['MsgType'] == 51: # phone init
msg = update_local_uin(core, m)
elif m['MsgType'] == 10000:
msg = {
'Type': 'Note',
'Text': m['Content'],}
elif m['MsgType'] == 10002:
regx = r'\[CDATA\[(.+?)\]\]'
data = re.search(regx, m['Content'])
data = 'System message' if data is None else data.group(1).replace('\\', '')
msg = {
'Type': 'Note',
'Text': data, }
elif m['MsgType'] in srl:
msg = {
'Type': 'Useless',
'Text': 'UselessMsg', }
else:
logger.debug('Useless message received: %s\n%s' % (m['MsgType'], str(m)))
msg = {
'Type': 'Useless',
'Text': 'UselessMsg', }
m = dict(m, **msg)
rl.append(m)
return rl
def produce_group_chat(core, msg):
r = re.match('(@[0-9a-z]*?):<br/>(.*)$', msg['Content'])
if r:
actualUserName, content = r.groups()
chatroomUserName = msg['FromUserName']
elif msg['FromUserName'] == core.storageClass.userName:
actualUserName = core.storageClass.userName
content = msg['Content']
chatroomUserName = msg['ToUserName']
else:
msg['ActualUserName'] = core.storageClass.userName
msg['ActualNickName'] = core.storageClass.nickName
msg['IsAt'] = False
utils.msg_formatter(msg, 'Content')
return
chatroom = core.storageClass.search_chatrooms(userName=chatroomUserName)
member = utils.search_dict_list((chatroom or {}).get(
'MemberList') or [], 'UserName', actualUserName)
if member is None:
chatroom = core.update_chatroom(msg['FromUserName'])
member = utils.search_dict_list((chatroom or {}).get(
'MemberList') or [], 'UserName', actualUserName)
if member is None:
logger.debug('chatroom member fetch failed with %s' % actualUserName)
msg['ActualNickName'] = ''
msg['IsAt'] = False
else:
msg['ActualNickName'] = member.get('DisplayName', '') or member['NickName']
atFlag = '@' + (chatroom['Self'].get('DisplayName', '') or core.storageClass.nickName)
msg['IsAt'] = (
(atFlag + (u'\u2005' if u'\u2005' in msg['Content'] else ' '))
in msg['Content'] or msg['Content'].endswith(atFlag))
msg['ActualUserName'] = actualUserName
msg['Content'] = content
utils.msg_formatter(msg, 'Content')
def send_raw_msg(self, msgType, content, toUserName):
url = '%s/webwxsendmsg' % self.loginInfo['url']
data = {
'BaseRequest': self.loginInfo['BaseRequest'],
'Msg': {
'Type': msgType,
'Content': content,
'FromUserName': self.storageClass.userName,
'ToUserName': (toUserName if toUserName else self.storageClass.userName),
'LocalID': int(time.time() * 1e4),
'ClientMsgId': int(time.time() * 1e4),
},
'Scene': 0, }
headers = { 'ContentType': 'application/json; charset=UTF-8', 'User-Agent' : config.USER_AGENT }
r = self.s.post(url, headers=headers,
data=json.dumps(data, ensure_ascii=False).encode('utf8'))
return ReturnValue(rawResponse=r)
def send_msg(self, msg='Test Message', toUserName=None):
logger.debug('Request to send a text message to %s: %s' % (toUserName, msg))
r = self.send_raw_msg(1, msg, toUserName)
return r
def _prepare_file(fileDir, file_=None):
fileDict = {}
if file_:
if hasattr(file_, 'read'):
file_ = file_.read()
else:
return ReturnValue({'BaseResponse': {
'ErrMsg': 'file_ param should be opened file',
'Ret': -1005, }})
else:
if not utils.check_file(fileDir):
return ReturnValue({'BaseResponse': {
'ErrMsg': 'No file found in specific dir',
'Ret': -1002, }})
with open(fileDir, 'rb') as f:
file_ = f.read()
fileDict['fileSize'] = len(file_)
fileDict['fileMd5'] = hashlib.md5(file_).hexdigest()
fileDict['file_'] = io.BytesIO(file_)
return fileDict
def upload_file(self, fileDir, isPicture=False, isVideo=False,
toUserName='filehelper', file_=None, preparedFile=None):
logger.debug('Request to upload a %s: %s' % (
'picture' if isPicture else 'video' if isVideo else 'file', fileDir))
if not preparedFile:
preparedFile = _prepare_file(fileDir, file_)
if not preparedFile:
return preparedFile
fileSize, fileMd5, file_ = \
preparedFile['fileSize'], preparedFile['fileMd5'], preparedFile['file_']
fileSymbol = 'pic' if isPicture else 'video' if isVideo else'doc'
chunks = int((fileSize - 1) / 524288) + 1
clientMediaId = int(time.time() * 1e4)
uploadMediaRequest = json.dumps(OrderedDict([
('UploadType', 2),
('BaseRequest', self.loginInfo['BaseRequest']),
('ClientMediaId', clientMediaId),
('TotalLen', fileSize),
('StartPos', 0),
('DataLen', fileSize),
('MediaType', 4),
('FromUserName', self.storageClass.userName),
('ToUserName', toUserName),
('FileMd5', fileMd5)]
), separators = (',', ':'))
r = {'BaseResponse': {'Ret': -1005, 'ErrMsg': 'Empty file detected'}}
for chunk in range(chunks):
r = upload_chunk_file(self, fileDir, fileSymbol, fileSize,
file_, chunk, chunks, uploadMediaRequest)
file_.close()
if isinstance(r, dict):
return ReturnValue(r)
return ReturnValue(rawResponse=r)
def upload_chunk_file(core, fileDir, fileSymbol, fileSize,
file_, chunk, chunks, uploadMediaRequest):
url = core.loginInfo.get('fileUrl', core.loginInfo['url']) + \
'/webwxuploadmedia?f=json'
# save it on server
cookiesList = {name:data for name,data in core.s.cookies.items()}
fileType = mimetypes.guess_type(fileDir)[0] or 'application/octet-stream'
files = OrderedDict([
('id', (None, 'WU_FILE_0')),
('name', (None, os.path.basename(fileDir))),
('type', (None, fileType)),
('lastModifiedDate', (None, time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (CST)'))),
('size', (None, str(fileSize))),
('chunks', (None, None)),
('chunk', (None, None)),
('mediatype', (None, fileSymbol)),
('uploadmediarequest', (None, uploadMediaRequest)),
('webwx_data_ticket', (None, cookiesList['webwx_data_ticket'])),
('pass_ticket', (None, core.loginInfo['pass_ticket'])),
('filename' , (os.path.basename(fileDir), file_.read(524288), 'application/octet-stream'))])
if chunks == 1:
del files['chunk']; del files['chunks']
else:
files['chunk'], files['chunks'] = (None, str(chunk)), (None, str(chunks))
headers = { 'User-Agent' : config.USER_AGENT }
return requests.post(url, files=files, headers=headers)
def send_file(self, fileDir, toUserName=None, mediaId=None, file_=None):
logger.debug('Request to send a file(mediaId: %s) to %s: %s' % (
mediaId, toUserName, fileDir))
if hasattr(fileDir, 'read'):
return ReturnValue({'BaseResponse': {
'ErrMsg': 'fileDir param should not be an opened file in send_file',
'Ret': -1005, }})
if toUserName is None:
toUserName = self.storageClass.userName
preparedFile = _prepare_file(fileDir, file_)
if not preparedFile:
return preparedFile
fileSize = preparedFile['fileSize']
if mediaId is None:
r = self.upload_file(fileDir, preparedFile=preparedFile)
if r:
mediaId = r['MediaId']
else:
return r
url = '%s/webwxsendappmsg?fun=async&f=json' % self.loginInfo['url']
data = {
'BaseRequest': self.loginInfo['BaseRequest'],
'Msg': {
'Type': 6,
'Content': ("<appmsg appid='wxeb7ec651dd0aefa9' sdkver=''><title>%s</title>" % os.path.basename(fileDir) +
"<des></des><action></action><type>6</type><content></content><url></url><lowurl></lowurl>" +
"<appattach><totallen>%s</totallen><attachid>%s</attachid>" % (str(fileSize), mediaId) +
"<fileext>%s</fileext></appattach><extinfo></extinfo></appmsg>" % os.path.splitext(fileDir)[1].replace('.','')),
'FromUserName': self.storageClass.userName,
'ToUserName': toUserName,
'LocalID': int(time.time() * 1e4),
'ClientMsgId': int(time.time() * 1e4), },
'Scene': 0, }
headers = {
'User-Agent': config.USER_AGENT,
'Content-Type': 'application/json;charset=UTF-8', }
r = self.s.post(url, headers=headers,
data=json.dumps(data, ensure_ascii=False).encode('utf8'))
return ReturnValue(rawResponse=r)
def send_image(self, fileDir=None, toUserName=None, mediaId=None, file_=None):
logger.debug('Request to send a image(mediaId: %s) to %s: %s' % (
mediaId, toUserName, fileDir))
if fileDir or file_:
if hasattr(fileDir, 'read'):
file_, fileDir = fileDir, None
if fileDir is None:
fileDir = 'tmp.jpg' # specific fileDir to send gifs
else:
return ReturnValue({'BaseResponse': {
'ErrMsg': 'Either fileDir or file_ should be specific',
'Ret': -1005, }})
if toUserName is None:
toUserName = self.storageClass.userName
if mediaId is None:
r = self.upload_file(fileDir, isPicture=not fileDir[-4:] == '.gif', file_=file_)
if r:
mediaId = r['MediaId']
else:
return r
url = '%s/webwxsendmsgimg?fun=async&f=json' % self.loginInfo['url']
data = {
'BaseRequest': self.loginInfo['BaseRequest'],
'Msg': {
'Type': 3,
'MediaId': mediaId,
'FromUserName': self.storageClass.userName,
'ToUserName': toUserName,
'LocalID': int(time.time() * 1e4),
'ClientMsgId': int(time.time() * 1e4), },
'Scene': 0, }
if fileDir[-4:] == '.gif':
url = '%s/webwxsendemoticon?fun=sys' % self.loginInfo['url']
data['Msg']['Type'] = 47
data['Msg']['EmojiFlag'] = 2
headers = {
'User-Agent': config.USER_AGENT,
'Content-Type': 'application/json;charset=UTF-8', }
r = self.s.post(url, headers=headers,
data=json.dumps(data, ensure_ascii=False).encode('utf8'))
return ReturnValue(rawResponse=r)
def send_video(self, fileDir=None, toUserName=None, mediaId=None, file_=None):
logger.debug('Request to send a video(mediaId: %s) to %s: %s' % (
mediaId, toUserName, fileDir))
if fileDir or file_:
if hasattr(fileDir, 'read'):
file_, fileDir = fileDir, None
if fileDir is None:
fileDir = 'tmp.mp4' # specific fileDir to send other formats
else:
return ReturnValue({'BaseResponse': {
'ErrMsg': 'Either fileDir or file_ should be specific',
'Ret': -1005, }})
if toUserName is None:
toUserName = self.storageClass.userName
if mediaId is None:
r = self.upload_file(fileDir, isVideo=True, file_=file_)
if r:
mediaId = r['MediaId']
else:
return r
url = '%s/webwxsendvideomsg?fun=async&f=json&pass_ticket=%s' % (
self.loginInfo['url'], self.loginInfo['pass_ticket'])
data = {
'BaseRequest': self.loginInfo['BaseRequest'],
'Msg': {
'Type' : 43,
'MediaId' : mediaId,
'FromUserName' : self.storageClass.userName,
'ToUserName' : toUserName,
'LocalID' : int(time.time() * 1e4),
'ClientMsgId' : int(time.time() * 1e4), },
'Scene': 0, }
headers = {
'User-Agent' : config.USER_AGENT,
'Content-Type': 'application/json;charset=UTF-8', }
r = self.s.post(url, headers=headers,
data=json.dumps(data, ensure_ascii=False).encode('utf8'))
return ReturnValue(rawResponse=r)
def send(self, msg, toUserName=None, mediaId=None):
if not msg:
r = ReturnValue({'BaseResponse': {
'ErrMsg': 'No message.',
'Ret': -1005, }})
elif msg[:5] == '@fil@':
if mediaId is None:
r = self.send_file(msg[5:], toUserName)
else:
r = self.send_file(msg[5:], toUserName, mediaId)
elif msg[:5] == '@img@':
if mediaId is None:
r = self.send_image(msg[5:], toUserName)
else:
r = self.send_image(msg[5:], toUserName, mediaId)
elif msg[:5] == '@msg@':
r = self.send_msg(msg[5:], toUserName)
elif msg[:5] == '@vid@':
if mediaId is None:
r = self.send_video(msg[5:], toUserName)
else:
r = self.send_video(msg[5:], toUserName, mediaId)
else:
r = self.send_msg(msg, toUserName)
return r
| {
"content_hash": "ffb498e0d9d28341a3da8d4da9559993",
"timestamp": "",
"source": "github",
"line_count": 508,
"max_line_length": 128,
"avg_line_length": 42.16338582677165,
"alnum_prop": 0.5279424809748354,
"repo_name": "ArmsZhou/Raspberry-Pi-Python-scripts",
"id": "8f00bab12381106aeea7130dcf2073ab68676516",
"size": "21419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "itchat/components/messages.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "125420"
}
],
"symlink_target": ""
} |
from .convunet import unet
from .dilatedunet import dilated_unet
from .dilateddensenet import dilated_densenet, dilated_densenet2, dilated_densenet3
| {
"content_hash": "58b34a22a2ce4bafd8fce9fc4aef6d07",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 83,
"avg_line_length": 49.666666666666664,
"alnum_prop": 0.8456375838926175,
"repo_name": "chuckyee/cardiac-segmentation",
"id": "b056e6ab28b8f7a1042649663dd9b9a4ba23021b",
"size": "149",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rvseg/models/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "10789725"
},
{
"name": "Mask",
"bytes": "221184"
},
{
"name": "Python",
"bytes": "70491"
}
],
"symlink_target": ""
} |
"""django_nice_stats URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from stats.views import StatsTemplateView
from django_git_info.views import git_info
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^stats/', StatsTemplateView.as_view() ),
url(r'^gitinfo/', git_info ),
]
| {
"content_hash": "280852191e6a2cf8bf3d849ae25ef202",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 79,
"avg_line_length": 36.30769230769231,
"alnum_prop": 0.7033898305084746,
"repo_name": "spapas/django-nice-stats",
"id": "12998e3bc5d399561bdca51d010f61f7503ed14c",
"size": "944",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_nice_stats/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3062"
},
{
"name": "Python",
"bytes": "46940"
}
],
"symlink_target": ""
} |
import os
from pyramid.authentication import AuthTktAuthenticationPolicy
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.security import Allow, Authenticated
from pyramid.session import SignedCookieSessionFactory
from passlib.apps import custom_app_context as pwd_context
class NewRoot(object):
def __init__(self, request):
self.request = request
"""TODO: create second level of authentication?"""
__acl__ = [
(Allow, Authenticated, 'manage'),
]
def check_credentials(input_password, real_password):
"""Return True if correct password, else False."""
return pwd_context.verify(input_password, real_password)
def includeme(config):
"""Pyramid security configuration."""
auth_secret = os.environ.get("AUTH_SECRET", "potato")
authn_policy = AuthTktAuthenticationPolicy(
secret=auth_secret,
hashalg="sha512"
)
authz_policy = ACLAuthorizationPolicy()
config.set_authentication_policy(authn_policy)
config.set_authorization_policy(authz_policy)
config.set_root_factory(NewRoot)
# Session stuff for CSRF Protection
session_secret = os.environ.get("SESSION_SECRET", "itsaseekrit")
session_factory = SignedCookieSessionFactory(session_secret)
config.set_session_factory(session_factory)
# config.set_default_csrf_options(require_csrf=True)
| {
"content_hash": "f1c354e41f8e58b536ac32fdfa63b0ee",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 68,
"avg_line_length": 34.225,
"alnum_prop": 0.7341124908692477,
"repo_name": "PyListener/CF401-Project-1---PyListener",
"id": "2203e82cba2787be8bdaee9bba876f1c99246808",
"size": "1369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pylistener/security.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6972"
},
{
"name": "JavaScript",
"bytes": "1319"
},
{
"name": "Python",
"bytes": "45861"
},
{
"name": "Shell",
"bytes": "88"
}
],
"symlink_target": ""
} |
import copy
import netaddr
from oslo_config import cfg
from oslo_log import log as logging
from oslo_policy import policy as oslo_policy
from oslo_utils import excutils
import six
import webob.exc
from neutron.api import api_common
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.api.v2 import resource as wsgi_resource
from neutron.common import constants as const
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.i18n import _LE, _LI
from neutron import policy
from neutron import quota
LOG = logging.getLogger(__name__)
FAULT_MAP = {exceptions.NotFound: webob.exc.HTTPNotFound,
exceptions.Conflict: webob.exc.HTTPConflict,
exceptions.InUse: webob.exc.HTTPConflict,
exceptions.BadRequest: webob.exc.HTTPBadRequest,
exceptions.ServiceUnavailable: webob.exc.HTTPServiceUnavailable,
exceptions.NotAuthorized: webob.exc.HTTPForbidden,
netaddr.AddrFormatError: webob.exc.HTTPBadRequest,
oslo_policy.PolicyNotAuthorized: webob.exc.HTTPForbidden
}
class Controller(object):
LIST = 'list'
SHOW = 'show'
CREATE = 'create'
UPDATE = 'update'
DELETE = 'delete'
def __init__(self, plugin, collection, resource, attr_info,
allow_bulk=False, member_actions=None, parent=None,
allow_pagination=False, allow_sorting=False):
if member_actions is None:
member_actions = []
self._plugin = plugin
self._collection = collection.replace('-', '_')
self._resource = resource.replace('-', '_')
self._attr_info = attr_info
self._allow_bulk = allow_bulk
self._allow_pagination = allow_pagination
self._allow_sorting = allow_sorting
self._native_bulk = self._is_native_bulk_supported()
self._native_pagination = self._is_native_pagination_supported()
self._native_sorting = self._is_native_sorting_supported()
self._policy_attrs = [name for (name, info) in self._attr_info.items()
if info.get('required_by_policy')]
self._notifier = n_rpc.get_notifier('network')
# use plugin's dhcp notifier, if this is already instantiated
agent_notifiers = getattr(plugin, 'agent_notifiers', {})
self._dhcp_agent_notifier = (
agent_notifiers.get(const.AGENT_TYPE_DHCP) or
dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
)
if cfg.CONF.notify_nova_on_port_data_changes:
from neutron.notifiers import nova
self._nova_notifier = nova.Notifier()
self._member_actions = member_actions
self._primary_key = self._get_primary_key()
if self._allow_pagination and self._native_pagination:
# Native pagination need native sorting support
if not self._native_sorting:
raise exceptions.Invalid(
_("Native pagination depend on native sorting")
)
if not self._allow_sorting:
LOG.info(_LI("Allow sorting is enabled because native "
"pagination requires native sorting"))
self._allow_sorting = True
if parent:
self._parent_id_name = '%s_id' % parent['member_name']
parent_part = '_%s' % parent['member_name']
else:
self._parent_id_name = None
parent_part = ''
self._plugin_handlers = {
self.LIST: 'get%s_%s' % (parent_part, self._collection),
self.SHOW: 'get%s_%s' % (parent_part, self._resource)
}
for action in [self.CREATE, self.UPDATE, self.DELETE]:
self._plugin_handlers[action] = '%s%s_%s' % (action, parent_part,
self._resource)
def _get_primary_key(self, default_primary_key='id'):
for key, value in six.iteritems(self._attr_info):
if value.get('primary_key', False):
return key
return default_primary_key
def _is_native_bulk_supported(self):
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_bulk_attr_name, False)
def _is_native_pagination_supported(self):
native_pagination_attr_name = ("_%s__native_pagination_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_pagination_attr_name, False)
def _is_native_sorting_supported(self):
native_sorting_attr_name = ("_%s__native_sorting_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_sorting_attr_name, False)
def _exclude_attributes_by_policy(self, context, data):
"""Identifies attributes to exclude according to authZ policies.
Return a list of attribute names which should be stripped from the
response returned to the user because the user is not authorized
to see them.
"""
attributes_to_exclude = []
for attr_name in data.keys():
attr_data = self._attr_info.get(attr_name)
if attr_data and attr_data['is_visible']:
if policy.check(
context,
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data,
might_not_exist=True,
pluralized=self._collection):
# this attribute is visible, check next one
continue
# if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name)
return attributes_to_exclude
def _view(self, context, data, fields_to_strip=None):
"""Build a view of an API resource.
:param context: the neutron context
:param data: the object for which a view is being created
:param fields_to_strip: attributes to remove from the view
:returns: a view of the object which includes only attributes
visible according to API resource declaration and authZ policies.
"""
fields_to_strip = ((fields_to_strip or []) +
self._exclude_attributes_by_policy(context, data))
return self._filter_attributes(context, data, fields_to_strip)
def _filter_attributes(self, context, data, fields_to_strip=None):
if not fields_to_strip:
return data
return dict(item for item in six.iteritems(data)
if (item[0] not in fields_to_strip))
def _do_field_list(self, original_fields):
fields_to_add = None
# don't do anything if fields were not specified in the request
if original_fields:
fields_to_add = [attr for attr in self._policy_attrs
if attr not in original_fields]
original_fields.extend(self._policy_attrs)
return original_fields, fields_to_add
def __getattr__(self, name):
if name in self._member_actions:
def _handle_action(request, id, **kwargs):
arg_list = [request.context, id]
# Ensure policy engine is initialized
policy.init()
# Fetch the resource and verify if the user can access it
try:
resource = self._item(request, id, True)
except oslo_policy.PolicyNotAuthorized:
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
body = kwargs.pop('body', None)
# Explicit comparison with None to distinguish from {}
if body is not None:
arg_list.append(body)
# It is ok to raise a 403 because accessibility to the
# object was checked earlier in this method
policy.enforce(request.context,
name,
resource,
pluralized=self._collection)
return getattr(self._plugin, name)(*arg_list, **kwargs)
return _handle_action
else:
raise AttributeError()
def _get_pagination_helper(self, request):
if self._allow_pagination and self._native_pagination:
return api_common.PaginationNativeHelper(request,
self._primary_key)
elif self._allow_pagination:
return api_common.PaginationEmulatedHelper(request,
self._primary_key)
return api_common.NoPaginationHelper(request, self._primary_key)
def _get_sorting_helper(self, request):
if self._allow_sorting and self._native_sorting:
return api_common.SortingNativeHelper(request, self._attr_info)
elif self._allow_sorting:
return api_common.SortingEmulatedHelper(request, self._attr_info)
return api_common.NoSortingHelper(request, self._attr_info)
def _items(self, request, do_authz=False, parent_id=None):
"""Retrieves and formats a list of elements of the requested entity."""
# NOTE(salvatore-orlando): The following ensures that fields which
# are needed for authZ policy validation are not stripped away by the
# plugin before returning.
original_fields, fields_to_add = self._do_field_list(
api_common.list_args(request, 'fields'))
filters = api_common.get_filters(request, self._attr_info,
['fields', 'sort_key', 'sort_dir',
'limit', 'marker', 'page_reverse'])
kwargs = {'filters': filters,
'fields': original_fields}
sorting_helper = self._get_sorting_helper(request)
pagination_helper = self._get_pagination_helper(request)
sorting_helper.update_args(kwargs)
sorting_helper.update_fields(original_fields, fields_to_add)
pagination_helper.update_args(kwargs)
pagination_helper.update_fields(original_fields, fields_to_add)
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, self._plugin_handlers[self.LIST])
obj_list = obj_getter(request.context, **kwargs)
obj_list = sorting_helper.sort(obj_list)
obj_list = pagination_helper.paginate(obj_list)
# Check authz
if do_authz:
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
# Omit items from list that should not be visible
obj_list = [obj for obj in obj_list
if policy.check(request.context,
self._plugin_handlers[self.SHOW],
obj,
plugin=self._plugin,
pluralized=self._collection)]
# Use the first element in the list for discriminating which attributes
# should be filtered out because of authZ policies
# fields_to_add contains a list of attributes added for request policy
# checks but that were not required by the user. They should be
# therefore stripped
fields_to_strip = fields_to_add or []
if obj_list:
fields_to_strip += self._exclude_attributes_by_policy(
request.context, obj_list[0])
collection = {self._collection:
[self._filter_attributes(
request.context, obj,
fields_to_strip=fields_to_strip)
for obj in obj_list]}
pagination_links = pagination_helper.get_links(obj_list)
if pagination_links:
collection[self._collection + "_links"] = pagination_links
return collection
def _item(self, request, id, do_authz=False, field_list=None,
parent_id=None):
"""Retrieves and formats a single element of the requested entity."""
kwargs = {'fields': field_list}
action = self._plugin_handlers[self.SHOW]
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, action)
obj = obj_getter(request.context, id, **kwargs)
# Check authz
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
if do_authz:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
return obj
def _send_dhcp_notification(self, context, data, methodname):
if cfg.CONF.dhcp_agent_notification:
if self._collection in data:
for body in data[self._collection]:
item = {self._resource: body}
self._dhcp_agent_notifier.notify(context, item, methodname)
else:
self._dhcp_agent_notifier.notify(context, data, methodname)
def _send_nova_notification(self, action, orig, returned):
if hasattr(self, '_nova_notifier'):
self._nova_notifier.send_network_change(action, orig, returned)
def index(self, request, **kwargs):
"""Returns a list of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return self._items(request, True, parent_id)
def show(self, request, id, **kwargs):
"""Returns detailed information about the requested entity."""
try:
# NOTE(salvatore-orlando): The following ensures that fields
# which are needed for authZ policy validation are not stripped
# away by the plugin before returning.
field_list, added_fields = self._do_field_list(
api_common.list_args(request, "fields"))
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return {self._resource:
self._view(request.context,
self._item(request,
id,
do_authz=True,
field_list=field_list,
parent_id=parent_id),
fields_to_strip=added_fields)}
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def _emulate_bulk_create(self, obj_creator, request, body, parent_id=None):
objs = []
try:
for item in body[self._collection]:
kwargs = {self._resource: item}
if parent_id:
kwargs[self._parent_id_name] = parent_id
fields_to_strip = self._exclude_attributes_by_policy(
request.context, item)
objs.append(self._filter_attributes(
request.context,
obj_creator(request.context, **kwargs),
fields_to_strip=fields_to_strip))
return objs
# Note(salvatore-orlando): broad catch as in theory a plugin
# could raise any kind of exception
except Exception:
with excutils.save_and_reraise_exception():
for obj in objs:
obj_deleter = getattr(self._plugin,
self._plugin_handlers[self.DELETE])
try:
kwargs = ({self._parent_id_name: parent_id}
if parent_id else {})
obj_deleter(request.context, obj['id'], **kwargs)
except Exception:
# broad catch as our only purpose is to log the
# exception
LOG.exception(_LE("Unable to undo add for "
"%(resource)s %(id)s"),
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
# plugin raised might have been created or not in the db.
# We need a way for ensuring that if it has been created,
# it is then deleted
def create(self, request, body=None, **kwargs):
"""Creates a new instance of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
self._notifier.info(request.context,
self._resource + '.create.start',
body)
body = Controller.prepare_request_body(request.context, body, True,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.CREATE]
# Check authz
if self._collection in body:
# Have to account for bulk create
items = body[self._collection]
deltas = {}
bulk = True
else:
items = [body]
bulk = False
# Ensure policy engine is initialized
policy.init()
for item in items:
self._validate_network_tenant_ownership(request,
item[self._resource])
policy.enforce(request.context,
action,
item[self._resource],
pluralized=self._collection)
try:
tenant_id = item[self._resource]['tenant_id']
count = quota.QUOTAS.count(request.context, self._resource,
self._plugin, self._collection,
tenant_id)
if bulk:
delta = deltas.get(tenant_id, 0) + 1
deltas[tenant_id] = delta
else:
delta = 1
kwargs = {self._resource: count + delta}
except exceptions.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
else:
quota.QUOTAS.limit_check(request.context,
item[self._resource]['tenant_id'],
**kwargs)
def notify(create_result):
notifier_method = self._resource + '.create.end'
self._notifier.info(request.context,
notifier_method,
create_result)
self._send_dhcp_notification(request.context,
create_result,
notifier_method)
return create_result
kwargs = {self._parent_id_name: parent_id} if parent_id else {}
if self._collection in body and self._native_bulk:
# plugin does atomic bulk create operations
obj_creator = getattr(self._plugin, "%s_bulk" % action)
objs = obj_creator(request.context, body, **kwargs)
# Use first element of list to discriminate attributes which
# should be removed because of authZ policies
fields_to_strip = self._exclude_attributes_by_policy(
request.context, objs[0])
return notify({self._collection: [self._filter_attributes(
request.context, obj, fields_to_strip=fields_to_strip)
for obj in objs]})
else:
obj_creator = getattr(self._plugin, action)
if self._collection in body:
# Emulate atomic bulk behavior
objs = self._emulate_bulk_create(obj_creator, request,
body, parent_id)
return notify({self._collection: objs})
else:
kwargs.update({self._resource: body})
obj = obj_creator(request.context, **kwargs)
self._send_nova_notification(action, {},
{self._resource: obj})
return notify({self._resource: self._view(request.context,
obj)})
def delete(self, request, id, **kwargs):
"""Deletes the specified entity."""
self._notifier.info(request.context,
self._resource + '.delete.start',
{self._resource + '_id': id})
action = self._plugin_handlers[self.DELETE]
# Check authz
policy.init()
parent_id = kwargs.get(self._parent_id_name)
obj = self._item(request, id, parent_id=parent_id)
try:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_deleter = getattr(self._plugin, action)
obj_deleter(request.context, id, **kwargs)
notifier_method = self._resource + '.delete.end'
self._notifier.info(request.context,
notifier_method,
{self._resource + '_id': id})
result = {self._resource: self._view(request.context, obj)}
self._send_nova_notification(action, {}, result)
self._send_dhcp_notification(request.context,
result,
notifier_method)
def update(self, request, id, body=None, **kwargs):
"""Updates the specified entity's attributes."""
parent_id = kwargs.get(self._parent_id_name)
try:
payload = body.copy()
except AttributeError:
msg = _("Invalid format: %s") % request.body
raise exceptions.BadRequest(resource='body', msg=msg)
payload['id'] = id
self._notifier.info(request.context,
self._resource + '.update.start',
payload)
body = Controller.prepare_request_body(request.context, body, False,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.UPDATE]
# Load object to check authz
# but pass only attributes in the original body and required
# by the policy engine to the policy 'brain'
field_list = [name for (name, value) in six.iteritems(self._attr_info)
if (value.get('required_by_policy') or
value.get('primary_key') or
'default' not in value)]
# Ensure policy engine is initialized
policy.init()
orig_obj = self._item(request, id, field_list=field_list,
parent_id=parent_id)
orig_object_copy = copy.copy(orig_obj)
orig_obj.update(body[self._resource])
# Make a list of attributes to be updated to inform the policy engine
# which attributes are set explicitly so that it can distinguish them
# from the ones that are set to their default values.
orig_obj[const.ATTRIBUTES_TO_UPDATE] = body[self._resource].keys()
try:
policy.enforce(request.context,
action,
orig_obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
with excutils.save_and_reraise_exception() as ctxt:
# If a tenant is modifying it's own object, it's safe to return
# a 403. Otherwise, pretend that it doesn't exist to avoid
# giving away information.
if request.context.tenant_id != orig_obj['tenant_id']:
ctxt.reraise = False
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_updater = getattr(self._plugin, action)
kwargs = {self._resource: body}
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj = obj_updater(request.context, id, **kwargs)
result = {self._resource: self._view(request.context, obj)}
notifier_method = self._resource + '.update.end'
self._notifier.info(request.context, notifier_method, result)
self._send_dhcp_notification(request.context,
result,
notifier_method)
self._send_nova_notification(action, orig_object_copy, result)
return result
@staticmethod
def _populate_tenant_id(context, res_dict, is_create):
if (('tenant_id' in res_dict and
res_dict['tenant_id'] != context.tenant_id and
not context.is_admin)):
msg = _("Specifying 'tenant_id' other than authenticated "
"tenant in request requires admin privileges")
raise webob.exc.HTTPBadRequest(msg)
if is_create and 'tenant_id' not in res_dict:
if context.tenant_id:
res_dict['tenant_id'] = context.tenant_id
else:
msg = _("Running without keystone AuthN requires "
" that tenant_id is specified")
raise webob.exc.HTTPBadRequest(msg)
@staticmethod
def prepare_request_body(context, body, is_create, resource, attr_info,
allow_bulk=False):
"""Verifies required attributes are in request body.
Also checking that an attribute is only specified if it is allowed
for the given operation (create/update).
Attribute with default values are considered to be optional.
body argument must be the deserialized body.
"""
collection = resource + "s"
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
LOG.debug("Request body: %(body)s", {'body': body})
try:
if collection in body:
if not allow_bulk:
raise webob.exc.HTTPBadRequest(_("Bulk operation "
"not supported"))
if not body[collection]:
raise webob.exc.HTTPBadRequest(_("Resources required"))
bulk_body = [
Controller.prepare_request_body(
context, item if resource in item
else {resource: item}, is_create, resource, attr_info,
allow_bulk) for item in body[collection]
]
return {collection: bulk_body}
res_dict = body.get(resource)
except (AttributeError, TypeError):
msg = _("Body contains invalid data")
raise webob.exc.HTTPBadRequest(msg)
if res_dict is None:
msg = _("Unable to find '%s' in request body") % resource
raise webob.exc.HTTPBadRequest(msg)
Controller._populate_tenant_id(context, res_dict, is_create)
Controller._verify_attributes(res_dict, attr_info)
if is_create: # POST
for attr, attr_vals in six.iteritems(attr_info):
if attr_vals['allow_post']:
if ('default' not in attr_vals and
attr not in res_dict):
msg = _("Failed to parse request. Required "
"attribute '%s' not specified") % attr
raise webob.exc.HTTPBadRequest(msg)
res_dict[attr] = res_dict.get(attr,
attr_vals.get('default'))
else:
if attr in res_dict:
msg = _("Attribute '%s' not allowed in POST") % attr
raise webob.exc.HTTPBadRequest(msg)
else: # PUT
for attr, attr_vals in six.iteritems(attr_info):
if attr in res_dict and not attr_vals['allow_put']:
msg = _("Cannot update read-only attribute %s") % attr
raise webob.exc.HTTPBadRequest(msg)
for attr, attr_vals in six.iteritems(attr_info):
if (attr not in res_dict or
res_dict[attr] is attributes.ATTR_NOT_SPECIFIED):
continue
# Convert values if necessary
if 'convert_to' in attr_vals:
res_dict[attr] = attr_vals['convert_to'](res_dict[attr])
# Check that configured values are correct
if 'validate' not in attr_vals:
continue
for rule in attr_vals['validate']:
res = attributes.validators[rule](res_dict[attr],
attr_vals['validate'][rule])
if res:
msg_dict = dict(attr=attr, reason=res)
msg = _("Invalid input for %(attr)s. "
"Reason: %(reason)s.") % msg_dict
raise webob.exc.HTTPBadRequest(msg)
return body
@staticmethod
def _verify_attributes(res_dict, attr_info):
extra_keys = set(res_dict.keys()) - set(attr_info.keys())
if extra_keys:
msg = _("Unrecognized attribute(s) '%s'") % ', '.join(extra_keys)
raise webob.exc.HTTPBadRequest(msg)
def _validate_network_tenant_ownership(self, request, resource_item):
# TODO(salvatore-orlando): consider whether this check can be folded
# in the policy engine
if (request.context.is_admin or request.context.is_advsvc or
self._resource not in ('port', 'subnet')):
return
network = self._plugin.get_network(
request.context,
resource_item['network_id'])
# do not perform the check on shared networks
if network.get('shared'):
return
network_owner = network['tenant_id']
if network_owner != resource_item['tenant_id']:
msg = _("Tenant %(tenant_id)s not allowed to "
"create %(resource)s on this network")
raise webob.exc.HTTPForbidden(msg % {
"tenant_id": resource_item['tenant_id'],
"resource": self._resource,
})
def create_resource(collection, resource, plugin, params, allow_bulk=False,
member_actions=None, parent=None, allow_pagination=False,
allow_sorting=False):
controller = Controller(plugin, collection, resource, params, allow_bulk,
member_actions=member_actions, parent=parent,
allow_pagination=allow_pagination,
allow_sorting=allow_sorting)
return wsgi_resource.Resource(controller, FAULT_MAP)
| {
"content_hash": "a83d64c1b4aaf39cb90981af8c4ff19d",
"timestamp": "",
"source": "github",
"line_count": 688,
"max_line_length": 79,
"avg_line_length": 46.78343023255814,
"alnum_prop": 0.5419579333271196,
"repo_name": "NeCTAR-RC/neutron",
"id": "8237905d26bfc101b768a552c50f00d91b81c57e",
"size": "32828",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "neutron/api/v2/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "980"
},
{
"name": "Python",
"bytes": "7228162"
},
{
"name": "Shell",
"bytes": "12807"
}
],
"symlink_target": ""
} |
import sys
from config import redisdb
if __name__ == '__main__':
name = sys.argv[1]
channel = sys.argv[2]
print 'Welcome to {channel}'.format(**locals())
while True:
message = raw_input('Enter a message: ')
if message.lower() == 'exit':
break
message = '{name} says: {message}'.format(**locals())
redisdb.publish(channel, message)
| {
"content_hash": "f529176f79671b6c77911754d55e85ab",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 61,
"avg_line_length": 22.5,
"alnum_prop": 0.5530864197530864,
"repo_name": "hugoxia/Python",
"id": "4e36b3068696b5ef58288bafcc3067075a9219fc",
"size": "405",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chatroom/base-redis/publish.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "394"
},
{
"name": "HTML",
"bytes": "4511"
},
{
"name": "JavaScript",
"bytes": "1426"
},
{
"name": "Python",
"bytes": "91850"
},
{
"name": "Shell",
"bytes": "217"
}
],
"symlink_target": ""
} |
from security_monkey.views import AuthenticatedService
from security_monkey.views import __check_auth__
from security_monkey.views import ITEM_FIELDS
from security_monkey.views import ITEM_COMMENT_FIELDS
from security_monkey.views import AUDIT_FIELDS
from security_monkey.views import REVISION_FIELDS
from security_monkey.datastore import Item
from security_monkey.datastore import Account
from security_monkey.datastore import Technology
from security_monkey.datastore import ItemRevision
from security_monkey import db
from security_monkey import api
from flask.ext.restful import marshal, reqparse
from sqlalchemy.sql.expression import cast
from sqlalchemy import String
from sqlalchemy.orm import joinedload
class ItemGet(AuthenticatedService):
def __init__(self):
super(ItemGet, self).__init__()
def get(self, item_id):
"""
.. http:get:: /api/1/item/1234
Get a specific item
**Example Request**:
.. sourcecode:: http
GET /api/1/item/1234 HTTP/1.1
Host: example.com
Accept: application/json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"item": {
"account": "example_account",
"region": "us-east-1",
"technology": "elb",
"id": 1234,
"name": "example_name"
},
"revisions": [
{
"active": false,
"date_created": "2014-04-11 17:05:06.701936",
"config": {},
"item_id": 1234,
"id": 213784
} ],
"auth": {
"authenticated": true,
"user": "user@example.com"
},
"issues": [],
"comments": []
}
:statuscode 200: no error
:statuscode 401: Authenticaiton Error Please login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
query = Item.query.filter(Item.id == item_id)
result = query.first()
# result should be an Item with a list of audit thingers and a list of
# revisions
retval = {}
item_marshaled = marshal(result.__dict__, ITEM_FIELDS)
item_marshaled = dict(
item_marshaled.items() +
{'account': result.account.name}.items() +
{'technology': result.technology.name}.items()
)
retval['item'] = item_marshaled
retval['issues'] = []
retval['auth'] = self.auth_dict
comments_marshaled = []
for comment in result.comments:
comment_marshaled = marshal(comment, ITEM_COMMENT_FIELDS)
comment_marshaled = dict(
comment_marshaled.items() +
{'user': comment.user.email}.items()
)
comments_marshaled.append(comment_marshaled)
retval['comments'] = comments_marshaled
for issue in result.issues:
issue_marshaled = marshal(issue.__dict__, AUDIT_FIELDS)
if issue.user is not None:
issue_marshaled = dict(issue_marshaled.items() +
{'justified_user': issue.user.email}.items()
)
retval['issues'].append(issue_marshaled)
retval['revisions'] = []
for revision in result.revisions:
revision_marshaled = marshal(revision.__dict__, REVISION_FIELDS)
revision_marshaled = dict(
revision_marshaled.items() +
{'config': revision.config}.items()
)
retval['revisions'].append(revision_marshaled)
return retval, 200
# Returns a list of items optionally filtered by
# account, region, name, ctype or id.
class ItemList(AuthenticatedService):
def __init__(self):
super(ItemList, self).__init__()
def get(self):
"""
.. http:get:: /api/1/items
Get a list of items matching the given criteria.
**Example Request**:
.. sourcecode:: http
GET /api/1/items HTTP/1.1
Host: example.com
Accept: application/json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"items": [
{
"account": "example_account",
"region": "us-east-1",
"technology": "sqs",
"id": 14414,
"name": "example_name",
"num_issues": 3,
"issue_score": 9,
"unjustified_issue_score": 3,
"active" true,
"first_seen": "2014-06-17 19:47:07.299760",
"last_seen": "2014-06-18 11:53:16.467709"
}
],
"total": 144,
"page": 1,
"auth": {
"authenticated": true,
"user": "user@example.com"
}
}
:statuscode 200: no error
:statuscode 401: Authenciation Error. Please Login.
"""
(auth, retval) = __check_auth__(self.auth_dict)
if auth:
return retval
self.reqparse.add_argument('count', type=int, default=30, location='args')
self.reqparse.add_argument('page', type=int, default=1, location='args')
self.reqparse.add_argument('regions', type=str, default=None, location='args')
self.reqparse.add_argument('accounts', type=str, default=None, location='args')
self.reqparse.add_argument('active', type=str, default=None, location='args')
self.reqparse.add_argument('names', type=str, default=None, location='args')
self.reqparse.add_argument('technologies', type=str, default=None, location='args')
self.reqparse.add_argument('searchconfig', type=str, default=None, location='args')
self.reqparse.add_argument('ids', type=int, default=None, location='args')
args = self.reqparse.parse_args()
page = args.pop('page', None)
count = args.pop('count', None)
for k, v in args.items():
if not v:
del args[k]
# Read more about filtering:
# http://docs.sqlalchemy.org/en/rel_0_7/orm/query.html
query = Item.query.join((ItemRevision, Item.latest_revision_id == ItemRevision.id))
if 'regions' in args:
regions = args['regions'].split(',')
query = query.filter(Item.region.in_(regions))
if 'accounts' in args:
accounts = args['accounts'].split(',')
query = query.join((Account, Account.id == Item.account_id))
query = query.filter(Account.name.in_(accounts))
if 'technologies' in args:
technologies = args['technologies'].split(',')
query = query.join((Technology, Technology.id == Item.tech_id))
query = query.filter(Technology.name.in_(technologies))
if 'names' in args:
names = args['names'].split(',')
query = query.filter(Item.name.in_(names))
if 'ids' in args:
ids = args['ids'].split(',')
query = query.filter(Item.id.in_(ids))
if 'active' in args:
active = args['active'].lower() == "true"
query = query.filter(ItemRevision.active == active)
if 'searchconfig' in args:
searchconfig = args['searchconfig']
query = query.filter(cast(ItemRevision.config, String).ilike('%{}%'.format(searchconfig)))
# Eager load the joins and leave the config column out of this.
query = query.options(joinedload('issues'))
query = query.options(joinedload('revisions').defer('config'))
query = query.options(joinedload('account'))
query = query.options(joinedload('technology'))
query = query.order_by(ItemRevision.date_created.desc())
items = query.paginate(page, count)
marshaled_dict = {
'page': items.page,
'total': items.total,
'auth': self.auth_dict
}
marshaled_items = []
for item in items.items:
num_issues = len(item.issues)
issue_score = 0
unjustified_issue_score = 0
for issue in item.issues:
issue_score = issue_score + issue.score
if not issue.justified:
unjustified_issue_score += issue.score
first_seen = str(item.revisions[-1].date_created)
last_seen = str(item.revisions[0].date_created)
active = item.revisions[0].active
item_marshaled = marshal(item.__dict__, ITEM_FIELDS)
item_marshaled = dict(item_marshaled.items() +
{
'account': item.account.name,
'technology': item.technology.name,
'num_issues': num_issues,
'issue_score': issue_score,
'unjustified_issue_score': unjustified_issue_score,
'active': active,
'first_seen': first_seen,
'last_seen': last_seen
#'last_rev': item.revisions[0].config,
}.items())
marshaled_items.append(item_marshaled)
marshaled_dict['items'] = marshaled_items
marshaled_dict['count'] = len(marshaled_items)
return marshaled_dict, 200
| {
"content_hash": "3dbef9091f49e3a0c9aaf9fb4cc174ab",
"timestamp": "",
"source": "github",
"line_count": 280,
"max_line_length": 102,
"avg_line_length": 37.364285714285714,
"alnum_prop": 0.5019116803670426,
"repo_name": "pradeep-aradhya/security_monkey",
"id": "f78817dc1e75ed1b55b7d399e5a09d6acb9a72eb",
"size": "11080",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "security_monkey/views/item.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "22086"
},
{
"name": "Dart",
"bytes": "81727"
},
{
"name": "HTML",
"bytes": "77501"
},
{
"name": "JavaScript",
"bytes": "8629"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "460136"
},
{
"name": "Shell",
"bytes": "16916"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ExtensionsV1beta1DeploymentStrategy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, rolling_update=None, type=None):
"""
ExtensionsV1beta1DeploymentStrategy - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'rolling_update': 'ExtensionsV1beta1RollingUpdateDeployment',
'type': 'str'
}
self.attribute_map = {
'rolling_update': 'rollingUpdate',
'type': 'type'
}
self._rolling_update = rolling_update
self._type = type
@property
def rolling_update(self):
"""
Gets the rolling_update of this ExtensionsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:return: The rolling_update of this ExtensionsV1beta1DeploymentStrategy.
:rtype: ExtensionsV1beta1RollingUpdateDeployment
"""
return self._rolling_update
@rolling_update.setter
def rolling_update(self, rolling_update):
"""
Sets the rolling_update of this ExtensionsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:param rolling_update: The rolling_update of this ExtensionsV1beta1DeploymentStrategy.
:type: ExtensionsV1beta1RollingUpdateDeployment
"""
self._rolling_update = rolling_update
@property
def type(self):
"""
Gets the type of this ExtensionsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:return: The type of this ExtensionsV1beta1DeploymentStrategy.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this ExtensionsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:param type: The type of this ExtensionsV1beta1DeploymentStrategy.
:type: str
"""
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "a8dd7d37e0cc2cde00802ecd24273b33",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 105,
"avg_line_length": 30.333333333333332,
"alnum_prop": 0.5805064500716675,
"repo_name": "skuda/client-python",
"id": "6b4ac38b8c2d750863ef84578c1081b7774ea6d9",
"size": "4203",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/extensions_v1beta1_deployment_strategy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5907789"
},
{
"name": "Shell",
"bytes": "8195"
}
],
"symlink_target": ""
} |
import base
try:
from django.utils import simplejson as json
except ImportError:
import json
from test_utils.testmaker.serializers import REQUEST_UNIQUE_STRING, RESPONSE_UNIQUE_STRING
class Serializer(base.Serializer):
def __init__(self, name='pickle'):
super(Serializer, self).__init__(name)
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = self.process_request(request)
try:
self.ser.info(json.dumps(request_dict))
self.ser.info(REQUEST_UNIQUE_STRING)
except TypeError, e:
#Can't serialize wsgi.error objects
pass
def save_response(self, request, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = self.process_response(request.path, response)
try:
self.ser.info(json.dumps(response_dict))
self.ser.info(RESPONSE_UNIQUE_STRING)
except TypeError, e:
#Can't serialize wsgi.error objects
pass
| {
"content_hash": "d438249a61749931d94b911d13a272b9",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 90,
"avg_line_length": 33.84375,
"alnum_prop": 0.6454293628808865,
"repo_name": "ericholscher/django-test-utils",
"id": "d5a9b48a6209e51cf44f5a959da47b2cab56f876",
"size": "1083",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_utils/testmaker/serializers/json_serializer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1114"
},
{
"name": "Python",
"bytes": "114462"
}
],
"symlink_target": ""
} |
"""
Handles updating the database with allowed minutes
and override allowed minutes for the gradeable timer
"""
from sqlalchemy import create_engine, MetaData, text, exc
import datetime
import os
import sys
import json
try:
CONFIG_PATH = os.path.join(
os.path.dirname(os.path.realpath(__file__)), '..', 'config')
with open(os.path.join(CONFIG_PATH, 'submitty.json')) as open_file:
SUBMITTY_CONFIG = json.load(open_file)
except Exception as config_fail_error:
print("[{}] ERROR: CORE SUBMITTY CONFIGURATION ERROR {}".format(
str(datetime.datetime.now()), str(config_fail_error)))
sys.exit(1)
CONFIG_FILE_PATH = sys.argv[1]
SEMESTER = sys.argv[2]
COURSE = sys.argv[3]
GRADEABLE = sys.argv[4]
def setup_db():
"""Set up a connection with the course database."""
with open(os.path.join(CONFIG_PATH, 'database.json')) as open_file:
db_config = json.load(open_file)
db_name = "submitty_{}_{}".format(SEMESTER, COURSE)
# If using a UNIX socket, have to specify a slightly different connection string
if os.path.isdir(db_config['database_host']):
conn_string = "postgresql://{}:{}@/{}?host={}".format(
db_config['database_user'],
db_config['database_password'],
db_name,
db_config['database_host']
)
else:
conn_string = "postgresql://{}:{}@{}/{}".format(
db_config['database_user'],
db_config['database_password'],
db_config['database_host'],
db_name
)
engine = create_engine(conn_string)
db = engine.connect()
metadata = MetaData(bind=db)
return db, metadata
def send_data(db, allowed_minutes, override):
query = """UPDATE gradeable SET g_allowed_minutes = :minutes
WHERE g_id=:gradeable"""
db.execute(text(query), minutes=allowed_minutes, gradeable=GRADEABLE)
query = """DELETE FROM gradeable_allowed_minutes_override WHERE g_id=:gradeable"""
db.execute(text(query), gradeable=GRADEABLE)
if override is not None:
for user in override:
query = "INSERT INTO gradeable_allowed_minutes_override (g_id, user_id, allowed_minutes) " +\
"VALUES (:gradeable, :userid, :minutes)" # noqa: E501
db.execute(text(query), gradeable=GRADEABLE, userid=user['user'],
minutes=user['allowed_minutes'])
def main():
with open(CONFIG_FILE_PATH) as config_file:
json_string = config_file.read()
CONFIG_FILE = json.loads(json_string)
timelimit_case = None
for testcase in CONFIG_FILE['testcases']:
if testcase['title'] == "Check Time Limit":
if 'validation' in testcase and len(testcase['validation']) > 0:
if 'allowed_minutes' in testcase['validation'][0]:
timelimit_case = testcase
break
if timelimit_case is not None:
allowed_minutes = timelimit_case['validation'][0]['allowed_minutes']
override = None
if 'override' in timelimit_case['validation'][0]:
override = timelimit_case['validation'][0]['override']
try:
db, metadata = setup_db()
send_data(db, allowed_minutes, override)
except exc.IntegrityError:
sys.exit(1)
except IOError:
print("WARNING: You do not have access to set allowed minutes from CLI." +
" Please use website to set that.")
exit()
if __name__ == "__main__":
main()
| {
"content_hash": "bbfc9ec7763bd365d33f94c81d4c7b13",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 105,
"avg_line_length": 35.46,
"alnum_prop": 0.6080090242526791,
"repo_name": "Submitty/Submitty",
"id": "c04638fb5e09b9b652607158e5bc066dd4c09da2",
"size": "3570",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "bin/set_allowed_mins.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8450"
},
{
"name": "C++",
"bytes": "496998"
},
{
"name": "CMake",
"bytes": "1561"
},
{
"name": "CSS",
"bytes": "210295"
},
{
"name": "HTML",
"bytes": "799796"
},
{
"name": "Java",
"bytes": "3828"
},
{
"name": "JavaScript",
"bytes": "981630"
},
{
"name": "PHP",
"bytes": "3103857"
},
{
"name": "PLpgSQL",
"bytes": "122825"
},
{
"name": "Python",
"bytes": "1589891"
},
{
"name": "Shell",
"bytes": "205161"
},
{
"name": "TeX",
"bytes": "21960"
},
{
"name": "Twig",
"bytes": "1239136"
},
{
"name": "TypeScript",
"bytes": "17328"
}
],
"symlink_target": ""
} |
import traceback
class MSSQLEXEC:
def __init__(self, connection):
self.mssql_conn = connection
self.outputBuffer = ''
def execute(self, command, output=False):
try:
self.enable_xp_cmdshell()
self.mssql_conn.sql_query("exec master..xp_cmdshell '{}'".format(command))
if output:
self.mssql_conn.printReplies()
self.mssql_conn.colMeta[0]['TypeData'] = 80*2
self.mssql_conn.printRows()
self.outputBuffer = self.mssql_conn._MSSQL__rowsPrinter.getMessage()
if len(self.outputBuffer):
self.outputBuffer = self.outputBuffer.split('\n', 2)[2]
self.disable_xp_cmdshell()
return self.outputBuffer
except Exception:
traceback.print_exc()
def enable_xp_cmdshell(self):
self.mssql_conn.sql_query("exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;exec master.dbo.sp_configure 'xp_cmdshell', 1;RECONFIGURE;")
def disable_xp_cmdshell(self):
self.mssql_conn.sql_query("exec sp_configure 'xp_cmdshell', 0 ;RECONFIGURE;exec sp_configure 'show advanced options', 0 ;RECONFIGURE;") | {
"content_hash": "4110d1cc947ebbd3f9a67d853e0f6dc5",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 162,
"avg_line_length": 38.15625,
"alnum_prop": 0.6134316134316135,
"repo_name": "Waffle-Wrath/CrackMapExec",
"id": "cda4a09d46369933251ed773a92b7336e7a0d244",
"size": "1221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cme/protocols/mssql/mssqlexec.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "232"
},
{
"name": "PowerShell",
"bytes": "228779"
},
{
"name": "Python",
"bytes": "328025"
}
],
"symlink_target": ""
} |
import itertools
from typing import Optional
import openfermion
from openfermion.utils._testing_utils import random_interaction_operator
def random_interaction_operator_term(
order: int,
real: bool = True,
seed: Optional[int] = None,
) -> openfermion.InteractionOperator:
"""Generates a random interaction operator with non-zero coefficients only
on terms corresponding to the given number of unique orbitals.
The number of orbitals is equal to the given order.
Args:
order: How many unique orbitals the non-zero terms should correspond to.
real: Whether or not the coefficients should be real. Defaults to True.
seed: The seed. If None (default), uses np.random.
"""
n_orbitals = order
if order > 4:
return openfermion.InteractionOperator.zero(order)
operator = random_interaction_operator(n_orbitals, real=real, seed=seed)
operator.constant = 0
for indices in itertools.product(range(n_orbitals), repeat=2):
if len(set(indices)) != order:
operator.one_body_tensor[indices] = 0
for indices in itertools.product(range(n_orbitals), repeat=4):
if len(set(indices)) != order:
operator.two_body_tensor[indices] = 0
return operator
| {
"content_hash": "0c4616bcb7855ddff0984f1d40177601",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 80,
"avg_line_length": 32.25,
"alnum_prop": 0.6844961240310078,
"repo_name": "quantumlib/OpenFermion-Cirq",
"id": "8080d4427a64e6842f368f6525aaa3219c136b42",
"size": "1853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openfermioncirq/testing/random.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "59276"
},
{
"name": "Python",
"bytes": "658631"
},
{
"name": "Shell",
"bytes": "27150"
}
],
"symlink_target": ""
} |
import pretend
import pytest
from pyramid.httpexceptions import HTTPBadRequest, HTTPMovedPermanently
from warehouse import redirects
class TestRedirectView:
def test_redirect_view(self):
target = "/{wat}/{_request.method}"
view = redirects.redirect_view_factory(target)
request = pretend.stub(method="GET", matchdict={"wat": "the-thing"})
resp = view(request)
assert isinstance(resp, HTTPMovedPermanently)
assert resp.headers["Location"] == "/the-thing/GET"
def test_redirect_view_raises_for_invalid_chars(self):
target = "/{wat}/{_request.method}"
view = redirects.redirect_view_factory(target)
request = pretend.stub(method="GET", matchdict={"wat": "the-thing\n"})
with pytest.raises(
HTTPBadRequest, match="URL may not contain control characters"
):
view(request)
def test_add_redirect(monkeypatch):
rview = pretend.stub()
rview_factory = pretend.call_recorder(lambda target, redirect: rview)
monkeypatch.setattr(redirects, "redirect_view_factory", rview_factory)
config = pretend.stub(
add_route=pretend.call_recorder(lambda name, route, **kw: None),
add_view=pretend.call_recorder(lambda view, route_name: None),
)
source = "/the/{thing}/"
target = "/other/{thing}/"
redirect = pretend.stub()
kwargs = {"redirect": redirect}
redirects.add_redirect(config, source, target, **kwargs)
assert config.add_route.calls == [
pretend.call("warehouse.redirects." + source + str(kwargs), source, **kwargs)
]
assert config.add_view.calls == [
pretend.call(rview, route_name="warehouse.redirects." + source + str(kwargs))
]
assert rview_factory.calls == [pretend.call(target, redirect=redirect)]
def test_includeme():
config = pretend.stub(
add_directive=pretend.call_recorder(lambda n, fn, action_wrap: None)
)
redirects.includeme(config)
assert config.add_directive.calls == [
pretend.call("add_redirect", redirects.add_redirect, action_wrap=False)
]
| {
"content_hash": "91ffe122595c5393bc67c41770b41933",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 85,
"avg_line_length": 32.953125,
"alnum_prop": 0.6614509246088194,
"repo_name": "pypa/warehouse",
"id": "6070d62d50c9daace087690440a18be43804ba24",
"size": "2650",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/test_redirects.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "676"
},
{
"name": "Dockerfile",
"bytes": "6745"
},
{
"name": "HCL",
"bytes": "42"
},
{
"name": "HTML",
"bytes": "663799"
},
{
"name": "JavaScript",
"bytes": "128585"
},
{
"name": "Makefile",
"bytes": "5068"
},
{
"name": "Mako",
"bytes": "2040"
},
{
"name": "Procfile",
"bytes": "527"
},
{
"name": "Python",
"bytes": "3315335"
},
{
"name": "SCSS",
"bytes": "205844"
},
{
"name": "Shell",
"bytes": "9424"
},
{
"name": "YARA",
"bytes": "9079"
}
],
"symlink_target": ""
} |
__authors__ = [
'"Leo (Chong Liu)" <HiddenPython@gmail.com>',
]
import datetime
import httplib
from google.appengine.api import users
from google.appengine.ext import db
from soc.logic.models.host import logic as host_logic
from soc.logic.models.sponsor import logic as sponsor_logic
from soc.logic.models.user import logic as user_logic
from soc.modules.gsoc.logic.models.mentor import logic as mentor_logic
from soc.modules.gsoc.logic.models.organization import logic \
as gsoc_organization_logic
from soc.modules.gsoc.logic.models.program import logic as gsoc_program_logic
from soc.modules.gsoc.logic.models.timeline import logic as gsoc_timeline_logic
from soc.modules.gsoc.logic.models.student import logic as student_logic
from soc.modules.gsoc.logic.models.student_proposal import logic \
as student_proposal_logic
from tests.test_utils import DjangoTestCase
from tests.test_utils import MailTestCase
from tests.test_utils import TaskQueueTestCase
class AcceptProposalsTest(DjangoTestCase, TaskQueueTestCase, MailTestCase):
"""Tests related to soc.modules.gsoc.tasks.accept_proposals.
"""
def setUp(self):
"""Set up required for the task tests.
"""
# Setup TaskQueueTestCase and MailTestCase first
super(AcceptProposalsTest, self).setUp()
# Create a user for the founder of sponsor
email = "a_sponsor@example.com"
account = users.User(email=email)
link_id = 'a_sponsor_user'
name = 'A Sponsor User'
sponsor_user_properties = {
'account': account,
'link_id': link_id,
'name': name,
}
sponsor_user = user_logic.updateOrCreateFromFields(sponsor_user_properties)
# Create a sponsor
link_id = 'a_sponsor'
name = link_id
founder = 'a_founder'
phone = '01234567'
contact_postalcode = 'A postalcode'
description = 'A description'
contact_country = 'United States'
short_name = 'AS'
contact_city = 'A city'
home_page = 'http://www.asponsor.com'
email = 'email@asponsor.com'
sponsor_properties = {
'link_id': link_id,
'name': name,
'short_name': short_name,
'founder': sponsor_user,
'phone': phone,
'description': description,
'contact_country': contact_country,
'contact_city': 'A City',
'contact_street': 'A Street',
'contact_postalcode': contact_postalcode,
'home_page': home_page,
'email': email,
'status': 'active',
}
sponsor = sponsor_logic.updateOrCreateFromFields(sponsor_properties)
# Create a timeline for a program
timeline_properties = {
'link_id': 'a_program',
'scope_path': 'a_sponsor',
'scope': sponsor,
'accepted_students_announced_deadline': datetime.datetime.now() \
+ datetime.timedelta(10)
}
timeline = gsoc_timeline_logic.updateOrCreateFromFields(timeline_properties)
# Create a program for a_sponsor
program_properties = {
'key_name': 'a_sponsor/a_program',
'link_id': 'a_program',
'scope': sponsor,
'scope_path': 'a_sponsor',
'name': 'A Program 2010',
'short_name': 'AP2010',
'group_label': 'AP',
'description': 'This is the program for AP2010.',
'apps_tasks_limit': 42,
'slots': 42,
'allocations_visible': True,
'timeline': timeline,
'status': 'visible',
}
# GSoC program logic does not work: error in updatePredefinedOrgTags
from soc.modules.gsoc.models.program import GSoCProgram
program = GSoCProgram(**program_properties)
program.put()
self.program = program
# Create an organization for a_program
organization_properties = {
'link_id': 'an_org',
'name': 'An Organization',
'short_name': 'AO',
'scope_path': 'a_sponsor/a_program',
'scope': program,
'founder': sponsor_user,
'home_page': 'http://www.an_org.com',
'phone': '1-555-2222',
'description': 'An Organization',
'license_name': 'Apache License',
'ideas': 'http://www.an_org.com/ideas',
'contact_country': contact_country,
'contact_city': 'A City',
'contact_street': 'A Street',
'contact_postalcode': contact_postalcode,
'home_page': home_page,
'email': email,
'slots': 1,
'status': 'active',
}
organization = gsoc_organization_logic.updateOrCreateFromFields(
organization_properties)
self.organization = organization
# Create another organization for a_program
organization_properties.update({
'link_id': 'another_org',
})
another_organization = gsoc_organization_logic.updateOrCreateFromFields(
organization_properties)
# Create an organization to serve as cursor sub for a_program, which should
# come as the first result of query
organization_properties.update({
'link_id': 'aa_org',
})
stub_organization = gsoc_organization_logic.updateOrCreateFromFields(
organization_properties)
self.stub_organization = stub_organization
# Create a user for all roles except sponsor
email = "a_role_user@example.com"
account = users.User(email=email)
link_id = 'a_role_user'
name = 'A Role User'
properties = {
'account': account,
'link_id': link_id,
'name': name,
}
key_name = user_logic.getKeyNameFromFields(properties)
role_user = user_logic.updateOrCreateFromKeyName(properties, key_name)
# Create a mentor for an_org
mentor_properties = sponsor_properties.copy()
mentor_properties.update({
'link_id': 'a_mentor',
'scope_path': organization.scope_path + '/' + organization.link_id,
'scope': organization,
'program': program,
'given_name': 'A',
'surname': 'Mentor',
'res_country': 'United States',
'res_city': 'A City',
'res_street': 'A Street',
'res_postalcode': '12345',
'birth_date': db.DateProperty.now(),
'user': role_user,
'email': 'a_mentor@email.com',
})
mentor = mentor_logic.updateOrCreateFromFields(mentor_properties)
self.mentor = mentor
# Create a student for a_program
student_properties = mentor_properties.copy()
student_properties.update({
'link_id': 'a_student',
'scope_path': program.scope_path + '/' + program.link_id,
'scope': program,
'program': program,
'given_name': 'A',
'surname': 'Student',
'major': 'A Major',
'name_on_documents': 'A Name on Documents',
'publish_location': True,
'blog': 'http://www.ablog.com/',
'home_page': 'http://www.ahomepage.com/',
'email': 'a_student@email.com',
'photo_url': 'http://www.astudent.com/aphoto.png',
'expected_graduation': 2011,
'school_country': 'United States',
'school_name': 'A School',
'tshirt_size': 'XS',
'tshirt_style': 'male',
'degree': 'Undergraduate',
'phone': '1650253000',
'can_we_contact_you': True,
'program_knowledge': 'I heard about this program through a friend.'
})
student = student_logic.updateOrCreateFromFields(student_properties)
self.student = student
# Create another student for a_program
student_properties.update({
'link_id': 'another_student',
'email': 'another_student@email.com',
})
another_student = student_logic.updateOrCreateFromFields(student_properties)
self.another_student = another_student
# Create a third student for a_program
student_properties.update({
'link_id': 'third_student',
'email': 'third_student@email.com',
})
third_student = student_logic.updateOrCreateFromFields(student_properties)
self.third_student = third_student
# Create a student proposal to an_org for a_student
student_proposal_properties = {
'link_id': 'a_proposal',
'scope_path': student.scope_path + '/' + student.link_id,
'scope': student,
'title': 'A Proposal Title',
'abstract': 'A Proposal Abstract',
'content': 'A Proposal Content',
'additional_info': 'http://www.a_proposal.com',
'mentor': mentor,
'status': 'pending',
'org': organization,
'program': program,
'score': 90,
}
self.proposal = student_proposal_logic.updateOrCreateFromFields(
student_proposal_properties)
# Create another student proposal to an_org for another_student
student_proposal_properties.update({
'link_id': 'another_proposal',
'scope_path': another_student.scope_path + '/' + another_student.link_id,
'scope': another_student,
'score': 100,
})
self.another_proposal = student_proposal_logic.updateOrCreateFromFields(
student_proposal_properties)
# Create a third student proposal to another_org for third_student
student_proposal_properties.update({
'link_id': 'third_proposal',
'scope_path': third_student.scope_path + '/' + third_student.link_id,
'scope': third_student,
'org': another_organization,
'score': 10,
})
student_proposal_logic.updateOrCreateFromFields(student_proposal_properties)
def testConvertProposalsThroughPostWithoutCorrectXsrfToken(self):
"""Tests that converting proposals is forbidden without correct XSRF token.
Without a correct XSRF token, the attempt to convert proposals is forbidden.
"""
url = '/tasks/accept_proposals/main'
postdata = {'programkey': self.program.key().name(),
'orgkey': self.stub_organization.key().name()}
response = self.client.post(url, postdata)
self.assertEqual(response.status_code, httplib.FORBIDDEN)
def testConvertProposalsThroughPostWithCorrectXsrfToken(self):
"""Tests that tasks for converting proposals spawned with correct token.
Through HTTP POST with correct XSRF token, proposals of all
organizations which have a key equal to or more than 'orgkey' are converted:
tasks for converting them (one task for each organization) are spawned.
"""
url = '/tasks/accept_proposals/main'
postdata = {'programkey': self.program.key().name(),
'orgkey': self.stub_organization.key().name()}
xsrf_token = self.getXsrfToken(url, data=postdata)
postdata.update(xsrf_token=xsrf_token)
response = self.client.post(url, postdata)
self.assertEqual(response.status_code, httplib.OK)
task_url = "/tasks/accept_proposals/accept"
self.assertTasksInQueue(n=3, url=task_url)
def testAcceptProposalsThroughPostWithoutCorrectXsrfToken(self):
"""Tests that accepting proposals is forbidden without correct XSRF token.
Without correct XSRF token, the attempt to accept proposals
for an organization is forbidden.
"""
url = '/tasks/accept_proposals/accept'
next_path = "/tasks/accept_proposals/reject"
postdata = {'orgkey': self.organization.key().name(),
"timelimit": 20000,
"nextpath": next_path}
response = self.client.post(url, postdata)
self.assertEqual(response.status_code, httplib.FORBIDDEN)
def testAcceptProposalsThroughPostWithCorrectXsrfToken(self):
"""Tests that proposals can be accepted with a correct XSRF token.
Through HTTP POST with correct XSRF token, proposals of
an organization with higher score are accepted if the organization has
enough slots, confirmation emails are sent to students, and a task of
rejecting the remaining proposals is spawned.
"""
self.assertEqual(self.proposal.status, 'pending')
self.assertEqual(self.another_proposal.status, 'pending')
url = '/tasks/accept_proposals/accept'
next_path = "/tasks/accept_proposals/reject"
postdata = {'orgkey': self.organization.key().name(),
"timelimit": 20000,
"nextpath": next_path}
xsrf_token = self.getXsrfToken(url, data=postdata)
postdata.update(xsrf_token=xsrf_token)
response = self.client.post(url, postdata)
self.assertEqual(response.status_code, httplib.OK)
self.assertEqual(db.get(self.proposal.key()).status, 'pending')
self.assertEmailNotSent(to=self.student.email)
self.assertEqual(db.get(self.another_proposal.key()).status, 'accepted')
self.assertEmailSent(to=self.another_student.email, html='accepted')
task_url = next_path
self.assertTasksInQueue(n=1, url=task_url)
def testRejectProposalsThroughPostWithoutCorrectXsrfToken(self):
"""Tests that rejecting proposals is forbidden without correct XSRF token.
Without correct XSRF token, the attempt to reject proposals
for an organization is forbidden.
"""
url = '/tasks/accept_proposals/reject'
postdata = {'orgkey': self.organization.key().name(), "timelimit": 20000}
response = self.client.post(url, postdata)
self.assertEqual(response.status_code, httplib.FORBIDDEN)
def testRejectProposalsThroughPostWithCorrectXsrfToken(self):
"""Tests that proposals can be rejected with a correct XSRF token.
Through HTTP POST with correct XSRF token, the remaining
proposals whose status is still 'pending' are rejected and confirmation
emails are sent to students,.
"""
self.assertEqual(self.proposal.status, 'pending')
self.assertEqual(self.another_proposal.status, 'pending')
url = '/tasks/accept_proposals/reject'
# timelimit should be long enough; otherwise not all tasks can be completed.
postdata = {'orgkey': self.organization.key().name(),
"timelimit": 20000}
xsrf_token = self.getXsrfToken(url, data=postdata)
postdata.update(xsrf_token=xsrf_token)
response = self.client.post(url, postdata)
self.assertEqual(response.status_code, httplib.OK)
self.assertEqual(db.get(self.proposal.key()).status, 'rejected')
self.assertEmailSent(to=self.student.email, html='not selected')
self.assertEqual(db.get(self.another_proposal.key()).status, 'rejected')
self.assertEmailSent(to=self.another_student.email, html='not selected')
def testRejectProposalsShortTimelimitThroughPostWithCorrectXsrfToken(self):
"""Tests that not all tasks can be completed if timelimit is too short.
Through HTTP POST with correct XSRF token, if timelimit is
too short, not all tasks can be completed; in the extreme case, when
timelimit is 0, the status will not be changed and a confirmation email
will not be sent; however, a clone task will be spawned.
"""
self.assertEqual(self.proposal.status, 'pending')
self.assertEqual(self.another_proposal.status, 'pending')
url = '/tasks/accept_proposals/reject'
postdata = {'orgkey': self.organization.key().name(), "timelimit": 0}
xsrf_token = self.getXsrfToken(url, data=postdata)
postdata.update(xsrf_token=xsrf_token)
response = self.client.post(url, postdata)
self.assertEqual(response.status_code, httplib.OK)
self.assertEqual(db.get(self.proposal.key()).status, 'pending')
self.assertEmailNotSent(to=self.student.email, html='not selected')
self.assertEqual(db.get(self.another_proposal.key()).status, 'pending')
self.assertEmailNotSent(to=self.another_student.email, html='not selected')
task_url = url
self.assertTasksInQueue(n=1, url=task_url)
| {
"content_hash": "3eb83298c2133c0ee019d04a4da19d32",
"timestamp": "",
"source": "github",
"line_count": 377,
"max_line_length": 80,
"avg_line_length": 40.52785145888594,
"alnum_prop": 0.6745205838078409,
"repo_name": "SRabbelier/Melange",
"id": "b153d1f9a7a65351911fe8246f7467299c51547f",
"size": "15890",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/app/soc/modules/gsoc/tasks/test_accept_proposals.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "400472"
},
{
"name": "C++",
"bytes": "20"
},
{
"name": "Java",
"bytes": "1496"
},
{
"name": "JavaScript",
"bytes": "1623582"
},
{
"name": "PHP",
"bytes": "1032"
},
{
"name": "Perl",
"bytes": "177565"
},
{
"name": "Python",
"bytes": "15317793"
},
{
"name": "Ruby",
"bytes": "59"
},
{
"name": "Shell",
"bytes": "15303"
}
],
"symlink_target": ""
} |
from django.shortcuts import render, get_object_or_404
from rags.models import Rag, Category
def Home(request):
rags = Rag.objects.all()
context={"title": "SomeTitle", "rags": rags}
return render(request, 'rags/home.pug', context=context)
def RagDetail(request, pk):
rag = get_object_or_404(Rag, pk=pk)
images = rag.ragimage_set.all()
img_urls = []
if images.count() > 0:
for i in images:
img_urls.append(i.image.url)
return render(request, "rags/rag_detail.pug", context={"rag":rag, "img_urls": img_urls})
def CategoryDetail(request, pk):
cat = get_object_or_404(Category, pk=pk)
child_cats = cat.category_set.all()
child_rags = cat.rag_set.all()
return render(request, "rags/category_detail.pug", context={"rags":child_rags, "cats": child_cats, "category": cat})
| {
"content_hash": "7e8a0ca2c38ac4c0cff4b185b7ebacdf",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 120,
"avg_line_length": 30.071428571428573,
"alnum_prop": 0.6543942992874109,
"repo_name": "Intey/rags",
"id": "a4a83f7b5d28457647b11a3634eb1a3256b07f15",
"size": "866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rags/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "873"
},
{
"name": "HTML",
"bytes": "2780"
},
{
"name": "JavaScript",
"bytes": "2430"
},
{
"name": "Python",
"bytes": "10260"
}
],
"symlink_target": ""
} |
default_app_config = 'dartcms.apps.filemanager.apps.FileManagerConfig'
| {
"content_hash": "77cd8084d9c3833d6f9ceffa884438fa",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 70,
"avg_line_length": 72,
"alnum_prop": 0.8194444444444444,
"repo_name": "astrikov-d/dartcms",
"id": "6002859d78b6f88c90cb15d74e7292d362ea05bf",
"size": "72",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dartcms/apps/filemanager/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "628621"
},
{
"name": "HTML",
"bytes": "72527"
},
{
"name": "JavaScript",
"bytes": "816668"
},
{
"name": "Python",
"bytes": "240030"
}
],
"symlink_target": ""
} |
"""
DBSCAN: Density-Based Spatial Clustering of Applications with Noise
"""
# Author: Robert Layton <robertlayton@gmail.com>
# Joel Nothman <joel.nothman@gmail.com>
# Lars Buitinck
#
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from ..base import BaseEstimator, ClusterMixin
from ..utils import check_array, check_consistent_length
from ..neighbors import NearestNeighbors
from ._dbscan_inner import dbscan_inner
def dbscan(X, eps=0.5, min_samples=5, metric='minkowski', metric_params=None,
algorithm='auto', leaf_size=30, p=2, sample_weight=None, n_jobs=1):
"""Perform DBSCAN clustering from vector array or distance matrix.
Read more in the :ref:`User Guide <dbscan>`.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array, or array of distances between samples if
``metric='precomputed'``.
eps : float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples : int, optional
The number of samples (or total weight) in a neighborhood for a point
to be considered as a core point. This includes the point itself.
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.pairwise_distances for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square. X may be a sparse matrix, in which case only "nonzero"
elements may be considered neighbors for DBSCAN.
metric_params : dict, optional
Additional keyword arguments for the metric function.
.. versionadded:: 0.19
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, optional
The algorithm to be used by the NearestNeighbors module
to compute pointwise distances and find nearest neighbors.
See NearestNeighbors module documentation for details.
leaf_size : int, optional (default = 30)
Leaf size passed to BallTree or cKDTree. This can affect the speed
of the construction and query, as well as the memory required
to store the tree. The optimal value depends
on the nature of the problem.
p : float, optional
The power of the Minkowski metric to be used to calculate distance
between points.
sample_weight : array, shape (n_samples,), optional
Weight of each sample, such that a sample with a weight of at least
``min_samples`` is by itself a core sample; a sample with negative
weight may inhibit its eps-neighbor from being core.
Note that weights are absolute, and default to 1.
n_jobs : int, optional (default = 1)
The number of parallel jobs to run for neighbors search.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Returns
-------
core_samples : array [n_core_samples]
Indices of core samples.
labels : array [n_samples]
Cluster labels for each point. Noisy samples are given the label -1.
Notes
-----
See examples/cluster/plot_dbscan.py for an example.
This implementation bulk-computes all neighborhood queries, which increases
the memory complexity to O(n.d) where d is the average number of neighbors,
while original DBSCAN had memory complexity O(n).
Sparse neighborhoods can be precomputed using
:func:`NearestNeighbors.radius_neighbors_graph
<sklearn.neighbors.NearestNeighbors.radius_neighbors_graph>`
with ``mode='distance'``.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, "A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise".
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226-231. 1996
"""
if not eps > 0.0:
raise ValueError("eps must be positive.")
X = check_array(X, accept_sparse='csr')
if sample_weight is not None:
sample_weight = np.asarray(sample_weight)
check_consistent_length(X, sample_weight)
# Calculate neighborhood for all samples. This leaves the original point
# in, which needs to be considered later (i.e. point i is in the
# neighborhood of point i. While True, its useless information)
if metric == 'precomputed' and sparse.issparse(X):
neighborhoods = np.empty(X.shape[0], dtype=object)
X.sum_duplicates() # XXX: modifies X's internals in-place
X_mask = X.data <= eps
masked_indices = X.indices.astype(np.intp, copy=False)[X_mask]
masked_indptr = np.concatenate(([0], np.cumsum(X_mask)))[X.indptr[1:]]
# insert the diagonal: a point is its own neighbor, but 0 distance
# means absence from sparse matrix data
masked_indices = np.insert(masked_indices, masked_indptr,
np.arange(X.shape[0]))
masked_indptr = masked_indptr[:-1] + np.arange(1, X.shape[0])
# split into rows
neighborhoods[:] = np.split(masked_indices, masked_indptr)
else:
neighbors_model = NearestNeighbors(radius=eps, algorithm=algorithm,
leaf_size=leaf_size,
metric=metric,
metric_params=metric_params, p=p,
n_jobs=n_jobs)
neighbors_model.fit(X)
# This has worst case O(n^2) memory complexity
neighborhoods = neighbors_model.radius_neighbors(X, eps,
return_distance=False)
if sample_weight is None:
n_neighbors = np.array([len(neighbors)
for neighbors in neighborhoods])
else:
n_neighbors = np.array([np.sum(sample_weight[neighbors])
for neighbors in neighborhoods])
# Initially, all samples are noise.
labels = -np.ones(X.shape[0], dtype=np.intp)
# A list of all core samples found.
core_samples = np.asarray(n_neighbors >= min_samples, dtype=np.uint8)
dbscan_inner(core_samples, neighborhoods, labels)
return np.where(core_samples)[0], labels
class DBSCAN(BaseEstimator, ClusterMixin):
"""Perform DBSCAN clustering from vector array or distance matrix.
DBSCAN - Density-Based Spatial Clustering of Applications with Noise.
Finds core samples of high density and expands clusters from them.
Good for data which contains clusters of similar density.
Read more in the :ref:`User Guide <dbscan>`.
Parameters
----------
eps : float, optional
The maximum distance between two samples for them to be considered
as in the same neighborhood.
min_samples : int, optional
The number of samples (or total weight) in a neighborhood for a point
to be considered as a core point. This includes the point itself.
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string or callable, it must be one of
the options allowed by metrics.pairwise.calculate_distance for its
metric parameter.
If metric is "precomputed", X is assumed to be a distance matrix and
must be square. X may be a sparse matrix, in which case only "nonzero"
elements may be considered neighbors for DBSCAN.
.. versionadded:: 0.17
metric *precomputed* to accept precomputed sparse matrix.
metric_params : dict, optional
Additional keyword arguments for the metric function.
.. versionadded:: 0.19
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, optional
The algorithm to be used by the NearestNeighbors module
to compute pointwise distances and find nearest neighbors.
See NearestNeighbors module documentation for details.
leaf_size : int, optional (default = 30)
Leaf size passed to BallTree or cKDTree. This can affect the speed
of the construction and query, as well as the memory required
to store the tree. The optimal value depends
on the nature of the problem.
p : float, optional
The power of the Minkowski metric to be used to calculate distance
between points.
n_jobs : int, optional (default = 1)
The number of parallel jobs to run.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Attributes
----------
core_sample_indices_ : array, shape = [n_core_samples]
Indices of core samples.
components_ : array, shape = [n_core_samples, n_features]
Copy of each core sample found by training.
labels_ : array, shape = [n_samples]
Cluster labels for each point in the dataset given to fit().
Noisy samples are given the label -1.
Notes
-----
See examples/cluster/plot_dbscan.py for an example.
This implementation bulk-computes all neighborhood queries, which increases
the memory complexity to O(n.d) where d is the average number of neighbors,
while original DBSCAN had memory complexity O(n).
Sparse neighborhoods can be precomputed using
:func:`NearestNeighbors.radius_neighbors_graph
<sklearn.neighbors.NearestNeighbors.radius_neighbors_graph>`
with ``mode='distance'``.
References
----------
Ester, M., H. P. Kriegel, J. Sander, and X. Xu, "A Density-Based
Algorithm for Discovering Clusters in Large Spatial Databases with Noise".
In: Proceedings of the 2nd International Conference on Knowledge Discovery
and Data Mining, Portland, OR, AAAI Press, pp. 226-231. 1996
"""
def __init__(self, eps=0.5, min_samples=5, metric='euclidean',
metric_params=None, algorithm='auto', leaf_size=30, p=None,
n_jobs=1):
self.eps = eps
self.min_samples = min_samples
self.metric = metric
self.metric_params = metric_params
self.algorithm = algorithm
self.leaf_size = leaf_size
self.p = p
self.n_jobs = n_jobs
def fit(self, X, y=None, sample_weight=None):
"""Perform DBSCAN clustering from features or distance matrix.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array, or array of distances between samples if
``metric='precomputed'``.
sample_weight : array, shape (n_samples,), optional
Weight of each sample, such that a sample with a weight of at least
``min_samples`` is by itself a core sample; a sample with negative
weight may inhibit its eps-neighbor from being core.
Note that weights are absolute, and default to 1.
"""
X = check_array(X, accept_sparse='csr')
clust = dbscan(X, sample_weight=sample_weight,
**self.get_params())
self.core_sample_indices_, self.labels_ = clust
if len(self.core_sample_indices_):
# fix for scipy sparse indexing issue
self.components_ = X[self.core_sample_indices_].copy()
else:
# no core samples
self.components_ = np.empty((0, X.shape[1]))
return self
def fit_predict(self, X, y=None, sample_weight=None):
"""Performs clustering on X and returns cluster labels.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array, or array of distances between samples if
``metric='precomputed'``.
sample_weight : array, shape (n_samples,), optional
Weight of each sample, such that a sample with a weight of at least
``min_samples`` is by itself a core sample; a sample with negative
weight may inhibit its eps-neighbor from being core.
Note that weights are absolute, and default to 1.
Returns
-------
y : ndarray, shape (n_samples,)
cluster labels
"""
self.fit(X, sample_weight=sample_weight)
return self.labels_
| {
"content_hash": "3348a82fc2945c335e5000e573647682",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 79,
"avg_line_length": 41.00970873786408,
"alnum_prop": 0.6414141414141414,
"repo_name": "ldirer/scikit-learn",
"id": "6c7bba5af9f8c9385370413bc4872c1d481cd727",
"size": "12696",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sklearn/cluster/dbscan_.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "451996"
},
{
"name": "C++",
"bytes": "140322"
},
{
"name": "Makefile",
"bytes": "1512"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "7013349"
},
{
"name": "Shell",
"bytes": "19532"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import errno
import itertools
import logging
import os.path
import tempfile
from pip._internal.utils.misc import rmtree
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Optional
logger = logging.getLogger(__name__)
class TempDirectory(object):
"""Helper class that owns and cleans up a temporary directory.
This class can be used as a context manager or as an OO representation of a
temporary directory.
Attributes:
path
Location to the created temporary directory
delete
Whether the directory should be deleted when exiting
(when used as a contextmanager)
Methods:
cleanup()
Deletes the temporary directory
When used as a context manager, if the delete attribute is True, on
exiting the context the temporary directory is deleted.
"""
def __init__(
self,
path=None, # type: Optional[str]
delete=None, # type: Optional[bool]
kind="temp"
):
super(TempDirectory, self).__init__()
if path is None and delete is None:
# If we were not given an explicit directory, and we were not given
# an explicit delete option, then we'll default to deleting.
delete = True
if path is None:
path = self._create(kind)
self._path = path
self._deleted = False
self.delete = delete
self.kind = kind
@property
def path(self):
# type: () -> str
assert not self._deleted, (
"Attempted to access deleted path: {}".format(self._path)
)
return self._path
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.path)
def __enter__(self):
return self
def __exit__(self, exc, value, tb):
if self.delete:
self.cleanup()
def _create(self, kind):
"""Create a temporary directory and store its path in self.path
"""
# We realpath here because some systems have their default tmpdir
# symlinked to another directory. This tends to confuse build
# scripts, so we canonicalize the path by traversing potential
# symlinks here.
path = os.path.realpath(
tempfile.mkdtemp(prefix="pip-{}-".format(kind))
)
logger.debug("Created temporary directory: {}".format(path))
return path
def cleanup(self):
"""Remove the temporary directory created and reset state
"""
self._deleted = True
if os.path.exists(self._path):
rmtree(self._path)
class AdjacentTempDirectory(TempDirectory):
"""Helper class that creates a temporary directory adjacent to a real one.
Attributes:
original
The original directory to create a temp directory for.
path
After calling create() or entering, contains the full
path to the temporary directory.
delete
Whether the directory should be deleted when exiting
(when used as a contextmanager)
"""
# The characters that may be used to name the temp directory
# We always prepend a ~ and then rotate through these until
# a usable name is found.
# pkg_resources raises a different error for .dist-info folder
# with leading '-' and invalid metadata
LEADING_CHARS = "-~.=%0123456789"
def __init__(self, original, delete=None):
self.original = original.rstrip('/\\')
super(AdjacentTempDirectory, self).__init__(delete=delete)
@classmethod
def _generate_names(cls, name):
"""Generates a series of temporary names.
The algorithm replaces the leading characters in the name
with ones that are valid filesystem characters, but are not
valid package names (for both Python and pip definitions of
package).
"""
for i in range(1, len(name)):
for candidate in itertools.combinations_with_replacement(
cls.LEADING_CHARS, i - 1):
new_name = '~' + ''.join(candidate) + name[i:]
if new_name != name:
yield new_name
# If we make it this far, we will have to make a longer name
for i in range(len(cls.LEADING_CHARS)):
for candidate in itertools.combinations_with_replacement(
cls.LEADING_CHARS, i):
new_name = '~' + ''.join(candidate) + name
if new_name != name:
yield new_name
def _create(self, kind):
root, name = os.path.split(self.original)
for candidate in self._generate_names(name):
path = os.path.join(root, candidate)
try:
os.mkdir(path)
except OSError as ex:
# Continue if the name exists already
if ex.errno != errno.EEXIST:
raise
else:
path = os.path.realpath(path)
break
else:
# Final fallback on the default behavior.
path = os.path.realpath(
tempfile.mkdtemp(prefix="pip-{}-".format(kind))
)
logger.debug("Created temporary directory: {}".format(path))
return path
| {
"content_hash": "1a6ae36f1fcfbcb9ab576bd3d123230a",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 79,
"avg_line_length": 32.02958579881657,
"alnum_prop": 0.5896914834657306,
"repo_name": "rouge8/pip",
"id": "77d40be6da34417ee4710b7ded6933ff16c15b59",
"size": "5521",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "src/pip/_internal/utils/temp_dir.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Gherkin",
"bytes": "305"
},
{
"name": "HTML",
"bytes": "2625"
},
{
"name": "Python",
"bytes": "1567511"
},
{
"name": "Shell",
"bytes": "2095"
}
],
"symlink_target": ""
} |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("members", "0041_auto_20190513_1803")]
operations = [
migrations.RenameField(
model_name="enquiry", old_name="creator", new_name="created_by"
)
]
| {
"content_hash": "e9802d2deeed2d2ece4a05ddbb78baf9",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 75,
"avg_line_length": 23.25,
"alnum_prop": 0.6344086021505376,
"repo_name": "ianastewart/cwltc-admin",
"id": "9f3d34e3775c4c3bfa11965f0aef4fd59fd853f5",
"size": "328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "members/migrations/0042_auto_20190513_1806.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "945975"
},
{
"name": "Dockerfile",
"bytes": "882"
},
{
"name": "HTML",
"bytes": "526368"
},
{
"name": "JavaScript",
"bytes": "843481"
},
{
"name": "Python",
"bytes": "8389886"
},
{
"name": "Shell",
"bytes": "1023"
}
],
"symlink_target": ""
} |
from datetime import datetime
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import FieldDoesNotExist
from django.core.exceptions import ImproperlyConfigured
from model_utils.managers import InheritanceCastManager, QueryManager
from model_utils.fields import AutoCreatedField, AutoLastModifiedField, \
StatusField, MonitorField
class InheritanceCastModel(models.Model):
"""
An abstract base class that provides a ``real_type`` FK to ContentType.
For use in trees of inherited models, to be able to downcast
parent instances to their child types.
"""
real_type = models.ForeignKey(ContentType, editable=False, null=True)
objects = InheritanceCastManager()
def save(self, *args, **kwargs):
if not self.id:
self.real_type = self._get_real_type()
super(InheritanceCastModel, self).save(*args, **kwargs)
def _get_real_type(self):
return ContentType.objects.get_for_model(type(self))
def cast(self):
return self.real_type.get_object_for_this_type(pk=self.pk)
class Meta:
abstract = True
class TimeStampedModel(models.Model):
"""
An abstract base class model that provides self-updating
``created`` and ``modified`` fields.
"""
created = AutoCreatedField(_('created'))
modified = AutoLastModifiedField(_('modified'))
class Meta:
abstract = True
class TimeFramedModel(models.Model):
"""
An abstract base class model that provides ``start``
and ``end`` fields to record a timeframe.
"""
start = models.DateTimeField(_('start'), null=True, blank=True)
end = models.DateTimeField(_('end'), null=True, blank=True)
class Meta:
abstract = True
class StatusModel(models.Model):
"""
An abstract base class model with a ``status`` field that
automatically uses a ``STATUS`` class attribute of choices, a
``status_changed`` date-time field that records when ``status``
was last modified, and an automatically-added manager for each
status that returns objects with that status only.
"""
status = StatusField(_('status'))
status_changed = MonitorField(_('status changed'), monitor='status')
class Meta:
abstract = True
def add_status_query_managers(sender, **kwargs):
"""
Add a Querymanager for each status item dynamically.
"""
if not issubclass(sender, StatusModel):
return
for value, name in getattr(sender, 'STATUS', ()):
try:
sender._meta.get_field(name)
raise ImproperlyConfigured("StatusModel: Model '%s' has a field "
"named '%s' which conflicts with a "
"status of the same name."
% (sender.__name__, name))
except FieldDoesNotExist:
pass
sender.add_to_class(value, QueryManager(status=value))
def add_timeframed_query_manager(sender, **kwargs):
"""
Add a QueryManager for a specific timeframe.
"""
if not issubclass(sender, TimeFramedModel):
return
try:
sender._meta.get_field('timeframed')
raise ImproperlyConfigured("Model '%s' has a field named "
"'timeframed' which conflicts with "
"the TimeFramedModel manager."
% sender.__name__)
except FieldDoesNotExist:
pass
sender.add_to_class('timeframed', QueryManager(
(models.Q(start__lte=datetime.now) | models.Q(start__isnull=True)) &
(models.Q(end__gte=datetime.now) | models.Q(end__isnull=True))
))
models.signals.class_prepared.connect(add_status_query_managers)
models.signals.class_prepared.connect(add_timeframed_query_manager)
| {
"content_hash": "50b5e109dffc95e6f2f51135c41ee090",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 77,
"avg_line_length": 32.825,
"alnum_prop": 0.6430566133536431,
"repo_name": "gregmuellegger/django-model-utils",
"id": "ad265b65943e31b277f12a657a226364b2a25ed5",
"size": "3939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model_utils/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "39817"
}
],
"symlink_target": ""
} |
import os
import sys
import stat
import string
import textwrap
import argparse
import shutil
import getpass
import subprocess
from collections import OrderedDict
import yaml
from wlauto import ExtensionLoader, Command, settings
from wlauto.exceptions import CommandError, ConfigError
from wlauto.utils.cli import init_argument_parser
from wlauto.utils.misc import (capitalize, check_output,
ensure_file_directory_exists as _f, ensure_directory_exists as _d)
from wlauto.utils.types import identifier
from wlauto.utils.doc import format_body
__all__ = ['create_workload']
TEMPLATES_DIR = os.path.join(os.path.dirname(__file__), 'templates')
UIAUTO_BUILD_SCRIPT = """#!/bin/bash
class_dir=bin/classes/com/arm/wlauto/uiauto
base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
mkdir -p $$class_dir
cp $$base_class $$class_dir
ant build
if [[ -f bin/${package_name}.jar ]]; then
cp bin/${package_name}.jar ..
fi
"""
class CreateSubcommand(object):
name = None
help = None
usage = None
description = None
epilog = None
formatter_class = None
def __init__(self, logger, subparsers):
self.logger = logger
self.group = subparsers
parser_params = dict(help=(self.help or self.description), usage=self.usage,
description=format_body(textwrap.dedent(self.description), 80),
epilog=self.epilog)
if self.formatter_class:
parser_params['formatter_class'] = self.formatter_class
self.parser = subparsers.add_parser(self.name, **parser_params)
init_argument_parser(self.parser) # propagate top-level options
self.initialize()
def initialize(self):
pass
class CreateWorkloadSubcommand(CreateSubcommand):
name = 'workload'
description = '''Create a new workload. By default, a basic workload template will be
used but you can use options to specify a different template.'''
def initialize(self):
self.parser.add_argument('name', metavar='NAME',
help='Name of the workload to be created')
self.parser.add_argument('-p', '--path', metavar='PATH', default=None,
help='The location at which the workload will be created. If not specified, ' +
'this defaults to "~/.workload_automation/workloads".')
self.parser.add_argument('-f', '--force', action='store_true',
help='Create the new workload even if a workload with the specified ' +
'name already exists.')
template_group = self.parser.add_mutually_exclusive_group()
template_group.add_argument('-A', '--android-benchmark', action='store_true',
help='Use android benchmark template. This template allows you to specify ' +
' an APK file that will be installed and run on the device. You should ' +
' place the APK file into the workload\'s directory at the same level ' +
'as the __init__.py.')
template_group.add_argument('-U', '--ui-automation', action='store_true',
help='Use UI automation template. This template generates a UI automation ' +
'Android project as well as the Python class. This a more general ' +
'version of the android benchmark template that makes no assumptions ' +
'about the nature of your workload, apart from the fact that you need ' +
'UI automation. If you need to install an APK, start an app on device, ' +
'etc., you will need to do that explicitly in your code.')
template_group.add_argument('-B', '--android-uiauto-benchmark', action='store_true',
help='Use android uiauto benchmark template. This generates a UI automation ' +
'project as well as a Python class. This template should be used ' +
'if you have a APK file that needs to be run on the device. You ' +
'should place the APK file into the workload\'s directory at the ' +
'same level as the __init__.py.')
def execute(self, args): # pylint: disable=R0201
where = args.path or 'local'
check_name = not args.force
if args.android_benchmark:
kind = 'android'
elif args.ui_automation:
kind = 'uiauto'
elif args.android_uiauto_benchmark:
kind = 'android_uiauto'
else:
kind = 'basic'
try:
create_workload(args.name, kind, where, check_name)
except CommandError, e:
print "ERROR:", e
class CreatePackageSubcommand(CreateSubcommand):
name = 'package'
description = '''Create a new empty Python package for WA extensions. On installation,
this package will "advertise" itself to WA so that Extensions with in it will
be loaded by WA when it runs.'''
def initialize(self):
self.parser.add_argument('name', metavar='NAME',
help='Name of the package to be created')
self.parser.add_argument('-p', '--path', metavar='PATH', default=None,
help='The location at which the new pacakge will be created. If not specified, ' +
'current working directory will be used.')
self.parser.add_argument('-f', '--force', action='store_true',
help='Create the new package even if a file or directory with the same name '
'already exists at the specified location.')
def execute(self, args): # pylint: disable=R0201
package_dir = args.path or os.path.abspath('.')
template_path = os.path.join(TEMPLATES_DIR, 'setup.template')
self.create_extensions_package(package_dir, args.name, template_path, args.force)
def create_extensions_package(self, location, name, setup_template_path, overwrite=False):
package_path = os.path.join(location, name)
if os.path.exists(package_path):
if overwrite:
self.logger.info('overwriting existing "{}"'.format(package_path))
shutil.rmtree(package_path)
else:
raise CommandError('Location "{}" already exists.'.format(package_path))
actual_package_path = os.path.join(package_path, name)
os.makedirs(actual_package_path)
setup_text = render_template(setup_template_path, {'package_name': name, 'user': getpass.getuser()})
with open(os.path.join(package_path, 'setup.py'), 'w') as wfh:
wfh.write(setup_text)
touch(os.path.join(actual_package_path, '__init__.py'))
class CreateAgendaSubcommand(CreateSubcommand):
name = 'agenda'
description = """
Create an agenda whith the specified extensions enabled. And parameters set to their
default values.
"""
def initialize(self):
self.parser.add_argument('extensions', nargs='+',
help='Extensions to be added')
self.parser.add_argument('-i', '--iterations', type=int, default=1,
help='Sets the number of iterations for all workloads')
self.parser.add_argument('-r', '--include-runtime-params', action='store_true',
help="""
Adds runtime parameters to the global section of the generated
agenda. Note: these do not have default values, so only name
will be added. Also, runtime parameters are devices-specific, so
a device must be specified (either in the list of extensions,
or in the existing config).
""")
self.parser.add_argument('-o', '--output', metavar='FILE',
help='Output file. If not specfied, STDOUT will be used instead.')
def execute(self, args): # pylint: disable=no-self-use,too-many-branches,too-many-statements
loader = ExtensionLoader(packages=settings.extension_packages,
paths=settings.extension_paths)
agenda = OrderedDict()
agenda['config'] = OrderedDict(instrumentation=[], result_processors=[])
agenda['global'] = OrderedDict(iterations=args.iterations)
agenda['workloads'] = []
device = None
device_config = None
for name in args.extensions:
extcls = loader.get_extension_class(name)
config = loader.get_default_config(name)
del config['modules']
if extcls.kind == 'workload':
entry = OrderedDict()
entry['name'] = extcls.name
if name != extcls.name:
entry['label'] = name
entry['params'] = config
agenda['workloads'].append(entry)
elif extcls.kind == 'device':
if device is not None:
raise ConfigError('Specifying multiple devices: {} and {}'.format(device.name, name))
device = extcls
device_config = config
agenda['config']['device'] = name
agenda['config']['device_config'] = config
else:
if extcls.kind == 'instrument':
agenda['config']['instrumentation'].append(name)
if extcls.kind == 'result_processor':
agenda['config']['result_processors'].append(name)
agenda['config'][name] = config
if args.include_runtime_params:
if not device:
if settings.device:
device = loader.get_extension_class(settings.device)
device_config = loader.get_default_config(settings.device)
else:
raise ConfigError('-r option requires for a device to be in the list of extensions')
rps = OrderedDict()
for rp in device.runtime_parameters:
if hasattr(rp, 'get_runtime_parameters'):
# a core parameter needs to be expanded for each of the
# device's cores, if they're avialable
for crp in rp.get_runtime_parameters(device_config.get('core_names', [])):
rps[crp.name] = None
else:
rps[rp.name] = None
agenda['global']['runtime_params'] = rps
if args.output:
wfh = open(args.output, 'w')
else:
wfh = sys.stdout
yaml.dump(agenda, wfh, indent=4, default_flow_style=False)
if args.output:
wfh.close()
class CreateCommand(Command):
name = 'create'
description = '''Used to create various WA-related objects (see positional arguments list for what
objects may be created).\n\nUse "wa create <object> -h" for object-specific arguments.'''
formatter_class = argparse.RawDescriptionHelpFormatter
subcmd_classes = [
CreateWorkloadSubcommand,
CreatePackageSubcommand,
CreateAgendaSubcommand,
]
def initialize(self, context):
subparsers = self.parser.add_subparsers(dest='what')
self.subcommands = [] # pylint: disable=W0201
for subcmd_cls in self.subcmd_classes:
subcmd = subcmd_cls(self.logger, subparsers)
self.subcommands.append(subcmd)
def execute(self, args):
for subcmd in self.subcommands:
if subcmd.name == args.what:
subcmd.execute(args)
break
else:
raise CommandError('Not a valid create parameter: {}'.format(args.name))
def create_workload(name, kind='basic', where='local', check_name=True, **kwargs):
if check_name:
extloader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths)
if name in [wl.name for wl in extloader.list_workloads()]:
raise CommandError('Workload with name "{}" already exists.'.format(name))
class_name = get_class_name(name)
if where == 'local':
workload_dir = _d(os.path.join(settings.environment_root, 'workloads', name))
else:
workload_dir = _d(os.path.join(where, name))
if kind == 'basic':
create_basic_workload(workload_dir, name, class_name, **kwargs)
elif kind == 'uiauto':
create_uiautomator_workload(workload_dir, name, class_name, **kwargs)
elif kind == 'android':
create_android_benchmark(workload_dir, name, class_name, **kwargs)
elif kind == 'android_uiauto':
create_android_uiauto_benchmark(workload_dir, name, class_name, **kwargs)
else:
raise CommandError('Unknown workload type: {}'.format(kind))
print 'Workload created in {}'.format(workload_dir)
def create_basic_workload(path, name, class_name):
source_file = os.path.join(path, '__init__.py')
with open(source_file, 'w') as wfh:
wfh.write(render_template('basic_workload', {'name': name, 'class_name': class_name}))
def create_uiautomator_workload(path, name, class_name):
uiauto_path = _d(os.path.join(path, 'uiauto'))
create_uiauto_project(uiauto_path, name)
source_file = os.path.join(path, '__init__.py')
with open(source_file, 'w') as wfh:
wfh.write(render_template('uiauto_workload', {'name': name, 'class_name': class_name}))
def create_android_benchmark(path, name, class_name):
source_file = os.path.join(path, '__init__.py')
with open(source_file, 'w') as wfh:
wfh.write(render_template('android_benchmark', {'name': name, 'class_name': class_name}))
def create_android_uiauto_benchmark(path, name, class_name):
uiauto_path = _d(os.path.join(path, 'uiauto'))
create_uiauto_project(uiauto_path, name)
source_file = os.path.join(path, '__init__.py')
with open(source_file, 'w') as wfh:
wfh.write(render_template('android_uiauto_benchmark', {'name': name, 'class_name': class_name}))
def create_uiauto_project(path, name, target='1'):
sdk_path = get_sdk_path()
android_path = os.path.join(sdk_path, 'tools', 'android')
package_name = 'com.arm.wlauto.uiauto.' + name.lower()
# ${ANDROID_HOME}/tools/android create uitest-project -n com.arm.wlauto.uiauto.linpack -t 1 -p ../test2
command = '{} create uitest-project --name {} --target {} --path {}'.format(android_path,
package_name,
target,
path)
try:
check_output(command, shell=True)
except subprocess.CalledProcessError as e:
if 'is is not valid' in e.output:
message = 'No Android SDK target found; have you run "{} update sdk" and download a platform?'
raise CommandError(message.format(android_path))
build_script = os.path.join(path, 'build.sh')
with open(build_script, 'w') as wfh:
template = string.Template(UIAUTO_BUILD_SCRIPT)
wfh.write(template.substitute({'package_name': package_name}))
os.chmod(build_script, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
source_file = _f(os.path.join(path, 'src',
os.sep.join(package_name.split('.')[:-1]),
'UiAutomation.java'))
with open(source_file, 'w') as wfh:
wfh.write(render_template('UiAutomation.java', {'name': name, 'package_name': package_name}))
# Utility functions
def get_sdk_path():
sdk_path = os.getenv('ANDROID_HOME')
if not sdk_path:
raise CommandError('Please set ANDROID_HOME environment variable to point to ' +
'the locaton of Android SDK')
return sdk_path
def get_class_name(name, postfix=''):
name = identifier(name)
return ''.join(map(capitalize, name.split('_'))) + postfix
def render_template(name, params):
filepath = os.path.join(TEMPLATES_DIR, name)
with open(filepath) as fh:
text = fh.read()
template = string.Template(text)
return template.substitute(params)
def touch(path):
with open(path, 'w') as _:
pass
| {
"content_hash": "70e10235ee9f1094c5534e4625b9953e",
"timestamp": "",
"source": "github",
"line_count": 384,
"max_line_length": 143,
"avg_line_length": 44.4296875,
"alnum_prop": 0.5772229060430221,
"repo_name": "chase-qi/workload-automation",
"id": "9d5e7b48f5cf2d7aa676fef20d1ffcbe7832fe74",
"size": "17648",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "wlauto/commands/create.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "35633"
},
{
"name": "HTML",
"bytes": "243720"
},
{
"name": "Java",
"bytes": "97331"
},
{
"name": "JavaScript",
"bytes": "6578"
},
{
"name": "Jupyter Notebook",
"bytes": "1322"
},
{
"name": "Makefile",
"bytes": "430"
},
{
"name": "Python",
"bytes": "1373835"
},
{
"name": "Shell",
"bytes": "24344"
},
{
"name": "VimL",
"bytes": "901"
}
],
"symlink_target": ""
} |
"""
Tests for django test runner
"""
from __future__ import absolute_import
from optparse import make_option
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command
from django import db
from django.test import simple, TransactionTestCase, skipUnlessDBFeature
from django.test.simple import DjangoTestSuiteRunner, get_tests
from django.test.testcases import connections_support_transactions
from django.utils import unittest
from django.utils.importlib import import_module
from ..admin_scripts.tests import AdminScriptTestCase
from .models import Person
TEST_APP_OK = 'regressiontests.test_runner.valid_app.models'
TEST_APP_ERROR = 'regressiontests.test_runner.invalid_app.models'
class DependencyOrderingTests(unittest.TestCase):
def test_simple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['charlie'],
'bravo': ['charlie'],
}
ordered = simple.dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig,value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
def test_chained_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['bravo'],
'bravo': ['charlie'],
}
ordered = simple.dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig,value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
# Implied dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_multiple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
('s4', ('s4_db', ['delta'])),
]
dependencies = {
'alpha': ['bravo','delta'],
'bravo': ['charlie'],
'delta': ['charlie'],
}
ordered = simple.dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig,aliases in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertIn('s4', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s4'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s4'))
# Implicit dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_circular_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
]
dependencies = {
'bravo': ['alpha'],
'alpha': ['bravo'],
}
self.assertRaises(ImproperlyConfigured, simple.dependency_ordered, raw, dependencies=dependencies)
def test_own_alias_dependency(self):
raw = [
('s1', ('s1_db', ['alpha', 'bravo']))
]
dependencies = {
'alpha': ['bravo']
}
with self.assertRaises(ImproperlyConfigured):
simple.dependency_ordered(raw, dependencies=dependencies)
# reordering aliases shouldn't matter
raw = [
('s1', ('s1_db', ['bravo', 'alpha']))
]
with self.assertRaises(ImproperlyConfigured):
simple.dependency_ordered(raw, dependencies=dependencies)
class MockTestRunner(object):
invoked = False
def __init__(self, *args, **kwargs):
pass
def run_tests(self, test_labels, extra_tests=None, **kwargs):
MockTestRunner.invoked = True
class ManageCommandTests(unittest.TestCase):
def test_custom_test_runner(self):
call_command('test', 'sites',
testrunner='regressiontests.test_runner.tests.MockTestRunner')
self.assertTrue(MockTestRunner.invoked,
"The custom test runner has not been invoked")
class CustomOptionsTestRunner(simple.DjangoTestSuiteRunner):
option_list = (
make_option('--option_a','-a', action='store', dest='option_a', default='1'),
make_option('--option_b','-b', action='store', dest='option_b', default='2'),
make_option('--option_c','-c', action='store', dest='option_c', default='3'),
)
def __init__(self, verbosity=1, interactive=True, failfast=True, option_a=None, option_b=None, option_c=None, **kwargs):
super(CustomOptionsTestRunner, self).__init__(verbosity=verbosity, interactive=interactive,
failfast=failfast)
self.option_a = option_a
self.option_b = option_b
self.option_c = option_c
def run_tests(self, test_labels, extra_tests=None, **kwargs):
print("%s:%s:%s" % (self.option_a, self.option_b, self.option_c))
class CustomTestRunnerOptionsTests(AdminScriptTestCase):
def setUp(self):
settings = {
'TEST_RUNNER': '\'regressiontests.test_runner.tests.CustomOptionsTestRunner\'',
}
self.write_settings('settings.py', sdict=settings)
def tearDown(self):
self.remove_settings('settings.py')
def test_default_options(self):
args = ['test', '--settings=regressiontests.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:2:3')
def test_default_and_given_options(self):
args = ['test', '--settings=regressiontests.settings', '--option_b=foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_option_name_and_value_separated(self):
args = ['test', '--settings=regressiontests.settings', '--option_b', 'foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_all_options_given(self):
args = ['test', '--settings=regressiontests.settings', '--option_a=bar', '--option_b=foo', '--option_c=31337']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
class Ticket17477RegressionTests(AdminScriptTestCase):
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_ticket_17477(self):
"""'manage.py help test' works after r16352."""
args = ['help', 'test']
out, err = self.run_manage(args)
self.assertNoOutput(err)
class ModulesTestsPackages(unittest.TestCase):
def test_get_tests(self):
"Check that the get_tests helper function can find tests in a directory"
module = import_module(TEST_APP_OK)
tests = get_tests(module)
self.assertIsInstance(tests, type(module))
def test_import_error(self):
"Test for #12658 - Tests with ImportError's shouldn't fail silently"
module = import_module(TEST_APP_ERROR)
self.assertRaises(ImportError, get_tests, module)
class Sqlite3InMemoryTestDbs(unittest.TestCase):
@unittest.skipUnless(all(db.connections[conn].vendor == 'sqlite' for conn in db.connections),
"This is a sqlite-specific issue")
def test_transaction_support(self):
"""Ticket #16329: sqlite3 in-memory test databases"""
old_db_connections = db.connections
for option in ('NAME', 'TEST_NAME'):
try:
db.connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.sqlite3',
option: ':memory:',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
option: ':memory:',
},
})
other = db.connections['other']
DjangoTestSuiteRunner(verbosity=0).setup_databases()
msg = "DATABASES setting '%s' option set to sqlite3's ':memory:' value shouldn't interfere with transaction support detection." % option
# Transaction support should be properly initialised for the 'other' DB
self.assertTrue(other.features.supports_transactions, msg)
# And all the DBs should report that they support transactions
self.assertTrue(connections_support_transactions(), msg)
finally:
db.connections = old_db_connections
class AutoIncrementResetTest(TransactionTestCase):
"""
Here we test creating the same model two times in different test methods,
and check that both times they get "1" as their PK value. That is, we test
that AutoField values start from 1 for each transactional test case.
"""
@skipUnlessDBFeature('supports_sequence_reset')
def test_autoincrement_reset1(self):
p = Person.objects.create(first_name='Jack', last_name='Smith')
self.assertEqual(p.pk, 1)
@skipUnlessDBFeature('supports_sequence_reset')
def test_autoincrement_reset2(self):
p = Person.objects.create(first_name='Jack', last_name='Smith')
self.assertEqual(p.pk, 1)
| {
"content_hash": "967ba2a5af8f1022597045ae012a7602",
"timestamp": "",
"source": "github",
"line_count": 278,
"max_line_length": 152,
"avg_line_length": 36.79496402877698,
"alnum_prop": 0.6023071659008701,
"repo_name": "rebost/django",
"id": "8c6dabf771487c84cde7e53f851d22f21541ba54",
"size": "10229",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/regressiontests/test_runner/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import recognizer
import os
#============= Matching each images in test folder with the known faces ===============
# Get path to all the test images
# Filtering on .jpg extension - so this will only work with JPEG images ending with .jpg
def match_faces():
test_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir('test/'))
# Get full paths to test images
paths_to_test_images = ['test/' + x for x in test_filenames]
res_path = 'recognized_faces/'
# Get list of names of people by eliminating the .JPG extension from image filenames
names = [x[:-4] for x in recognizer.image_filenames]
# Iterate over test images to find match one by one
for path_to_image in paths_to_test_images:
# Get face encodings from the test image
face_encodings_in_image = recognizer.get_face_encodings(path_to_image)
# Make sure there's exactly one face in the image
if len(face_encodings_in_image) != 1:
print()
print()
print("Please change image: " + path_to_image + " - it has " + str(len
(face_encodings_in_image)) + " faces; it can only have one")
print()
print()
continue
# Find match for the face encoding found in this test image
match = recognizer.find_match(recognizer.face_encodings, names, face_encodings_in_image[0])
# Print the path of test image and the corresponding match
print(path_to_image +"==> \t\t\t" + match)
#Write result to result path
#cv2.imwrite(match)
match_faces() | {
"content_hash": "1e50605aa1e59028494c9320873a4807",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 99,
"avg_line_length": 39.9,
"alnum_prop": 0.62531328320802,
"repo_name": "gereziherw/DeepFaceRecognizer",
"id": "7485751ba6768866907aee161dc433c0fc3c3340",
"size": "1596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "find_match.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5939"
}
],
"symlink_target": ""
} |
import re
from flask_admin.babel import gettext
from flask_admin.model import filters
from .tools import parse_like_term
class BasePyMongoFilter(filters.BaseFilter):
"""
Base pymongo filter.
"""
def __init__(self, column, name, options=None, data_type=None):
"""
Constructor.
:param column:
Document field name
:param name:
Display name
:param options:
Fixed set of options
:param data_type:
Client data type
"""
super(BasePyMongoFilter, self).__init__(name, options, data_type)
self.column = column
# Common filters
class FilterEqual(BasePyMongoFilter):
def apply(self, query, value):
query.append({self.column: value})
return query
def operation(self):
return gettext('equals')
class FilterNotEqual(BasePyMongoFilter):
def apply(self, query, value):
query.append({self.column: {'$ne': value}})
return query
def operation(self):
return gettext('not equal')
class FilterLike(BasePyMongoFilter):
def apply(self, query, value):
regex = parse_like_term(value)
query.append({self.column: {'$regex': regex}})
return query
def operation(self):
return gettext('contains')
class FilterNotLike(BasePyMongoFilter):
def apply(self, query, value):
regex = parse_like_term(value)
query.append({self.column: {'$not': re.compile(regex)}})
return query
def operation(self):
return gettext('not contains')
class FilterGreater(BasePyMongoFilter):
def apply(self, query, value):
try:
value = float(value)
except ValueError:
value = 0
query.append({self.column: {'$gt': value}})
return query
def operation(self):
return gettext('greater than')
class FilterSmaller(BasePyMongoFilter):
def apply(self, query, value):
try:
value = float(value)
except ValueError:
value = 0
query.append({self.column: {'$lt': value}})
return query
def operation(self):
return gettext('smaller than')
# Customized type filters
class BooleanEqualFilter(FilterEqual, filters.BaseBooleanFilter):
def clean(self, value):
return value == '1'
class BooleanNotEqualFilter(FilterNotEqual, filters.BaseBooleanFilter):
def clean(self, value):
return value == '1'
| {
"content_hash": "a62172f926376c297de8c869c7573faa",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 73,
"avg_line_length": 24.298076923076923,
"alnum_prop": 0.6062524732884844,
"repo_name": "dxmo/flask-admin",
"id": "179866c019894b95697b3fb7aa0675295c37a59c",
"size": "2527",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "flask_admin/contrib/pymongo/filters.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "120"
},
{
"name": "CSS",
"bytes": "1309"
},
{
"name": "HTML",
"bytes": "96469"
},
{
"name": "JavaScript",
"bytes": "30025"
},
{
"name": "Makefile",
"bytes": "5587"
},
{
"name": "Python",
"bytes": "647894"
},
{
"name": "Shell",
"bytes": "1264"
}
],
"symlink_target": ""
} |
"""
Simple example how to a run WsgiDAV in a 3rd-party WSGI server.
"""
from tempfile import gettempdir
from avax.webdav.wsgidav.fs_dav_provider import FilesystemProvider
from avax.webdav.wsgidav.version import __version__
from avax.webdav.wsgidav.wsgidav_app import DEFAULT_CONFIG, WsgiDAVApp
__docformat__ = "reStructuredText"
rootpath = gettempdir()
provider = FilesystemProvider(rootpath)
config = DEFAULT_CONFIG.copy()
config.update({
"provider_mapping": {"/": provider},
"user_mapping": {},
"verbose": 1,
"enable_loggers": [],
"propsmanager": True, # True: use property_manager.PropertyManager
"locksmanager": True, # True: use lock_manager.LockManager
"domaincontroller": None, # None: domain_controller.WsgiDAVDomainController(user_mapping)
})
app = WsgiDAVApp(config)
# For an example. use paste.httpserver
# (See http://pythonpaste.org/modules/httpserver.html for more options)
from paste import httpserver
httpserver.serve(app,
host="localhost",
port=8080,
server_version="WsgiDAV/%s" % __version__,
)
# Or we could use default the server that is part of the WsgiDAV package:
#from wsgidav.server import ext_wsgiutils_server
#ext_wsgiutils_server.serve(config, app)
| {
"content_hash": "ff74f771a209bc064744308707220683",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 95,
"avg_line_length": 36.13157894736842,
"alnum_prop": 0.6576839038601602,
"repo_name": "eavatar/avax.webdav",
"id": "90e0b5a38472e27a2b3908b613c182e60a0ee93e",
"size": "1599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "avax/webdav/wsgidav/server/server_sample.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "697720"
}
],
"symlink_target": ""
} |
import uuid
import fixtures
import mock
from oslo_config import fixture as config_fixture
from oslo_log import log
from pycadf import cadftaxonomy
from pycadf import cadftype
from pycadf import eventfactory
from pycadf import resource as cadfresource
import keystone.conf
from keystone import notifications
from keystone.tests import unit
from keystone.tests.unit import test_v3
CONF = keystone.conf.CONF
EXP_RESOURCE_TYPE = uuid.uuid4().hex
CREATED_OPERATION = notifications.ACTIONS.created
UPDATED_OPERATION = notifications.ACTIONS.updated
DELETED_OPERATION = notifications.ACTIONS.deleted
DISABLED_OPERATION = notifications.ACTIONS.disabled
class ArbitraryException(Exception):
pass
def register_callback(operation, resource_type=EXP_RESOURCE_TYPE):
"""Helper for creating and registering a mock callback."""
callback = mock.Mock(__name__='callback',
im_class=mock.Mock(__name__='class'))
notifications.register_event_callback(operation, resource_type, callback)
return callback
class AuditNotificationsTestCase(unit.BaseTestCase):
def setUp(self):
super(AuditNotificationsTestCase, self).setUp()
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
self.addCleanup(notifications.clear_subscribers)
def _test_notification_operation(self, notify_function, operation):
exp_resource_id = uuid.uuid4().hex
callback = register_callback(operation)
notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
callback.assert_called_once_with('identity', EXP_RESOURCE_TYPE,
operation,
{'resource_info': exp_resource_id})
self.config_fixture.config(notification_format='cadf')
with mock.patch(
'keystone.notifications._create_cadf_payload') as cadf_notify:
notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
initiator = None
cadf_notify.assert_called_once_with(
operation, EXP_RESOURCE_TYPE, exp_resource_id,
notifications.taxonomy.OUTCOME_SUCCESS, initiator)
notify_function(EXP_RESOURCE_TYPE, exp_resource_id, public=False)
cadf_notify.assert_called_once_with(
operation, EXP_RESOURCE_TYPE, exp_resource_id,
notifications.taxonomy.OUTCOME_SUCCESS, initiator)
def test_resource_created_notification(self):
self._test_notification_operation(notifications.Audit.created,
CREATED_OPERATION)
def test_resource_updated_notification(self):
self._test_notification_operation(notifications.Audit.updated,
UPDATED_OPERATION)
def test_resource_deleted_notification(self):
self._test_notification_operation(notifications.Audit.deleted,
DELETED_OPERATION)
def test_resource_disabled_notification(self):
self._test_notification_operation(notifications.Audit.disabled,
DISABLED_OPERATION)
class NotificationsTestCase(unit.BaseTestCase):
def test_send_notification(self):
"""Test _send_notification.
Test the private method _send_notification to ensure event_type,
payload, and context are built and passed properly.
"""
resource = uuid.uuid4().hex
resource_type = EXP_RESOURCE_TYPE
operation = CREATED_OPERATION
# NOTE(ldbragst): Even though notifications._send_notification doesn't
# contain logic that creates cases, this is supposed to test that
# context is always empty and that we ensure the resource ID of the
# resource in the notification is contained in the payload. It was
# agreed that context should be empty in Keystone's case, which is
# also noted in the /keystone/notifications.py module. This test
# ensures and maintains these conditions.
expected_args = [
{}, # empty context
'identity.%s.created' % resource_type, # event_type
{'resource_info': resource}, # payload
'INFO', # priority is always INFO...
]
with mock.patch.object(notifications._get_notifier(),
'_notify') as mocked:
notifications._send_notification(operation, resource_type,
resource)
mocked.assert_called_once_with(*expected_args)
def test_send_notification_with_opt_out(self):
"""Test the private method _send_notification with opt-out.
Test that _send_notification does not notify when a valid
notification_opt_out configuration is provided.
"""
resource = uuid.uuid4().hex
resource_type = EXP_RESOURCE_TYPE
operation = CREATED_OPERATION
event_type = 'identity.%s.created' % resource_type
# NOTE(diazjf): Here we add notification_opt_out to the
# configuration so that we should return before _get_notifer is
# called. This is because we are opting out notifications for the
# passed resource_type and operation.
conf = self.useFixture(config_fixture.Config(CONF))
conf.config(notification_opt_out=event_type)
with mock.patch.object(notifications._get_notifier(),
'_notify') as mocked:
notifications._send_notification(operation, resource_type,
resource)
mocked.assert_not_called()
def test_send_audit_notification_with_opt_out(self):
"""Test the private method _send_audit_notification with opt-out.
Test that _send_audit_notification does not notify when a valid
notification_opt_out configuration is provided.
"""
resource_type = EXP_RESOURCE_TYPE
action = CREATED_OPERATION + '.' + resource_type
initiator = mock
target = mock
outcome = 'success'
event_type = 'identity.%s.created' % resource_type
conf = self.useFixture(config_fixture.Config(CONF))
conf.config(notification_opt_out=event_type)
with mock.patch.object(notifications._get_notifier(),
'_notify') as mocked:
notifications._send_audit_notification(action,
initiator,
outcome,
target,
event_type)
mocked.assert_not_called()
def test_opt_out_authenticate_event(self):
"""Test that authenticate events are successfully opted out."""
resource_type = EXP_RESOURCE_TYPE
action = CREATED_OPERATION + '.' + resource_type
initiator = mock
target = mock
outcome = 'success'
event_type = 'identity.authenticate'
meter_name = '%s.%s' % (event_type, outcome)
conf = self.useFixture(config_fixture.Config(CONF))
conf.config(notification_opt_out=meter_name)
with mock.patch.object(notifications._get_notifier(),
'_notify') as mocked:
notifications._send_audit_notification(action,
initiator,
outcome,
target,
event_type)
mocked.assert_not_called()
class BaseNotificationTest(test_v3.RestfulTestCase):
def setUp(self):
super(BaseNotificationTest, self).setUp()
self._notifications = []
self._audits = []
def fake_notify(operation, resource_type, resource_id,
actor_dict=None, public=True):
note = {
'resource_id': resource_id,
'operation': operation,
'resource_type': resource_type,
'send_notification_called': True,
'public': public}
if actor_dict:
note['actor_id'] = actor_dict.get('id')
note['actor_type'] = actor_dict.get('type')
note['actor_operation'] = actor_dict.get('actor_operation')
self._notifications.append(note)
self.useFixture(fixtures.MockPatchObject(
notifications, '_send_notification', fake_notify))
def fake_audit(action, initiator, outcome, target,
event_type, **kwargs):
service_security = cadftaxonomy.SERVICE_SECURITY
event = eventfactory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=outcome,
action=action,
initiator=initiator,
target=target,
observer=cadfresource.Resource(typeURI=service_security))
for key, value in kwargs.items():
setattr(event, key, value)
audit = {
'payload': event.as_dict(),
'event_type': event_type,
'send_notification_called': True}
self._audits.append(audit)
self.useFixture(fixtures.MockPatchObject(
notifications, '_send_audit_notification', fake_audit))
def _assert_last_note(self, resource_id, operation, resource_type,
actor_id=None, actor_type=None,
actor_operation=None):
# NOTE(stevemar): If 'basic' format is not used, then simply
# return since this assertion is not valid.
if CONF.notification_format != 'basic':
return
self.assertTrue(len(self._notifications) > 0)
note = self._notifications[-1]
self.assertEqual(operation, note['operation'])
self.assertEqual(resource_id, note['resource_id'])
self.assertEqual(resource_type, note['resource_type'])
self.assertTrue(note['send_notification_called'])
if actor_id:
self.assertEqual(actor_id, note['actor_id'])
self.assertEqual(actor_type, note['actor_type'])
self.assertEqual(actor_operation, note['actor_operation'])
def _assert_last_audit(self, resource_id, operation, resource_type,
target_uri):
# NOTE(stevemar): If 'cadf' format is not used, then simply
# return since this assertion is not valid.
if CONF.notification_format != 'cadf':
return
self.assertTrue(len(self._audits) > 0)
audit = self._audits[-1]
payload = audit['payload']
self.assertEqual(resource_id, payload['resource_info'])
action = '%s.%s' % (operation, resource_type)
self.assertEqual(action, payload['action'])
self.assertEqual(target_uri, payload['target']['typeURI'])
self.assertEqual(resource_id, payload['target']['id'])
event_type = '%s.%s.%s' % ('identity', resource_type, operation)
self.assertEqual(event_type, audit['event_type'])
self.assertTrue(audit['send_notification_called'])
def _assert_initiator_data_is_set(self, operation, resource_type, typeURI):
self.assertTrue(len(self._audits) > 0)
audit = self._audits[-1]
payload = audit['payload']
self.assertEqual(self.user_id, payload['initiator']['id'])
self.assertEqual(self.project_id, payload['initiator']['project_id'])
self.assertEqual(typeURI, payload['target']['typeURI'])
action = '%s.%s' % (operation, resource_type)
self.assertEqual(action, payload['action'])
def _assert_notify_not_sent(self, resource_id, operation, resource_type,
public=True):
unexpected = {
'resource_id': resource_id,
'operation': operation,
'resource_type': resource_type,
'send_notification_called': True,
'public': public}
for note in self._notifications:
self.assertNotEqual(unexpected, note)
def _assert_notify_sent(self, resource_id, operation, resource_type,
public=True):
expected = {
'resource_id': resource_id,
'operation': operation,
'resource_type': resource_type,
'send_notification_called': True,
'public': public}
for note in self._notifications:
if expected == note:
break
else:
self.fail("Notification not sent.")
class NotificationsForEntities(BaseNotificationTest):
def test_create_group(self):
group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self._assert_last_note(group_ref['id'], CREATED_OPERATION, 'group')
self._assert_last_audit(group_ref['id'], CREATED_OPERATION, 'group',
cadftaxonomy.SECURITY_GROUP)
def test_create_project(self):
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self._assert_last_note(
project_ref['id'], CREATED_OPERATION, 'project')
self._assert_last_audit(project_ref['id'], CREATED_OPERATION,
'project', cadftaxonomy.SECURITY_PROJECT)
def test_create_role(self):
role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
self._assert_last_audit(role_ref['id'], CREATED_OPERATION, 'role',
cadftaxonomy.SECURITY_ROLE)
def test_create_user(self):
user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self._assert_last_note(user_ref['id'], CREATED_OPERATION, 'user')
self._assert_last_audit(user_ref['id'], CREATED_OPERATION, 'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_create_trust(self):
trustor = unit.new_user_ref(domain_id=self.domain_id)
trustor = self.identity_api.create_user(trustor)
trustee = unit.new_user_ref(domain_id=self.domain_id)
trustee = self.identity_api.create_user(trustee)
role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
trust_ref = unit.new_trust_ref(trustor['id'],
trustee['id'])
self.trust_api.create_trust(trust_ref['id'],
trust_ref,
[role_ref])
self._assert_last_note(
trust_ref['id'], CREATED_OPERATION, 'OS-TRUST:trust')
self._assert_last_audit(trust_ref['id'], CREATED_OPERATION,
'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
def test_delete_group(self):
group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.delete_group(group_ref['id'])
self._assert_last_note(group_ref['id'], DELETED_OPERATION, 'group')
self._assert_last_audit(group_ref['id'], DELETED_OPERATION, 'group',
cadftaxonomy.SECURITY_GROUP)
def test_delete_project(self):
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.resource_api.delete_project(project_ref['id'])
self._assert_last_note(
project_ref['id'], DELETED_OPERATION, 'project')
self._assert_last_audit(project_ref['id'], DELETED_OPERATION,
'project', cadftaxonomy.SECURITY_PROJECT)
def test_delete_role(self):
role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self.role_api.delete_role(role_ref['id'])
self._assert_last_note(role_ref['id'], DELETED_OPERATION, 'role')
self._assert_last_audit(role_ref['id'], DELETED_OPERATION, 'role',
cadftaxonomy.SECURITY_ROLE)
def test_delete_user(self):
user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self.identity_api.delete_user(user_ref['id'])
self._assert_last_note(user_ref['id'], DELETED_OPERATION, 'user')
self._assert_last_audit(user_ref['id'], DELETED_OPERATION, 'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_create_domain(self):
domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
self._assert_last_note(domain_ref['id'], CREATED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], CREATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_update_domain(self):
domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['description'] = uuid.uuid4().hex
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_last_note(domain_ref['id'], UPDATED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], UPDATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_delete_domain(self):
domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self.resource_api.delete_domain(domain_ref['id'])
self._assert_last_note(domain_ref['id'], DELETED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], DELETED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_delete_trust(self):
trustor = unit.new_user_ref(domain_id=self.domain_id)
trustor = self.identity_api.create_user(trustor)
trustee = unit.new_user_ref(domain_id=self.domain_id)
trustee = self.identity_api.create_user(trustee)
role_ref = unit.new_role_ref()
trust_ref = unit.new_trust_ref(trustor['id'], trustee['id'])
self.trust_api.create_trust(trust_ref['id'],
trust_ref,
[role_ref])
self.trust_api.delete_trust(trust_ref['id'])
self._assert_last_note(
trust_ref['id'], DELETED_OPERATION, 'OS-TRUST:trust')
self._assert_last_audit(trust_ref['id'], DELETED_OPERATION,
'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
def test_create_endpoint(self):
endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self._assert_notify_sent(endpoint_ref['id'], CREATED_OPERATION,
'endpoint')
self._assert_last_audit(endpoint_ref['id'], CREATED_OPERATION,
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_update_endpoint(self):
endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self.catalog_api.update_endpoint(endpoint_ref['id'], endpoint_ref)
self._assert_notify_sent(endpoint_ref['id'], UPDATED_OPERATION,
'endpoint')
self._assert_last_audit(endpoint_ref['id'], UPDATED_OPERATION,
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_delete_endpoint(self):
endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self.catalog_api.delete_endpoint(endpoint_ref['id'])
self._assert_notify_sent(endpoint_ref['id'], DELETED_OPERATION,
'endpoint')
self._assert_last_audit(endpoint_ref['id'], DELETED_OPERATION,
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_create_service(self):
service_ref = unit.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self._assert_notify_sent(service_ref['id'], CREATED_OPERATION,
'service')
self._assert_last_audit(service_ref['id'], CREATED_OPERATION,
'service', cadftaxonomy.SECURITY_SERVICE)
def test_update_service(self):
service_ref = unit.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self.catalog_api.update_service(service_ref['id'], service_ref)
self._assert_notify_sent(service_ref['id'], UPDATED_OPERATION,
'service')
self._assert_last_audit(service_ref['id'], UPDATED_OPERATION,
'service', cadftaxonomy.SECURITY_SERVICE)
def test_delete_service(self):
service_ref = unit.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self.catalog_api.delete_service(service_ref['id'])
self._assert_notify_sent(service_ref['id'], DELETED_OPERATION,
'service')
self._assert_last_audit(service_ref['id'], DELETED_OPERATION,
'service', cadftaxonomy.SECURITY_SERVICE)
def test_create_region(self):
region_ref = unit.new_region_ref()
self.catalog_api.create_region(region_ref)
self._assert_notify_sent(region_ref['id'], CREATED_OPERATION,
'region')
self._assert_last_audit(region_ref['id'], CREATED_OPERATION,
'region', cadftaxonomy.SECURITY_REGION)
def test_update_region(self):
region_ref = unit.new_region_ref()
self.catalog_api.create_region(region_ref)
self.catalog_api.update_region(region_ref['id'], region_ref)
self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION,
'region')
self._assert_last_audit(region_ref['id'], UPDATED_OPERATION,
'region', cadftaxonomy.SECURITY_REGION)
def test_delete_region(self):
region_ref = unit.new_region_ref()
self.catalog_api.create_region(region_ref)
self.catalog_api.delete_region(region_ref['id'])
self._assert_notify_sent(region_ref['id'], DELETED_OPERATION,
'region')
self._assert_last_audit(region_ref['id'], DELETED_OPERATION,
'region', cadftaxonomy.SECURITY_REGION)
def test_create_policy(self):
policy_ref = unit.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION,
'policy')
self._assert_last_audit(policy_ref['id'], CREATED_OPERATION,
'policy', cadftaxonomy.SECURITY_POLICY)
def test_update_policy(self):
policy_ref = unit.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self.policy_api.update_policy(policy_ref['id'], policy_ref)
self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION,
'policy')
self._assert_last_audit(policy_ref['id'], UPDATED_OPERATION,
'policy', cadftaxonomy.SECURITY_POLICY)
def test_delete_policy(self):
policy_ref = unit.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self.policy_api.delete_policy(policy_ref['id'])
self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION,
'policy')
self._assert_last_audit(policy_ref['id'], DELETED_OPERATION,
'policy', cadftaxonomy.SECURITY_POLICY)
def test_disable_domain(self):
domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_notify_sent(domain_ref['id'], 'disabled', 'domain',
public=False)
def test_disable_of_disabled_domain_does_not_notify(self):
domain_ref = unit.new_domain_ref(enabled=False)
self.resource_api.create_domain(domain_ref['id'], domain_ref)
# The domain_ref above is not changed during the create process. We
# can use the same ref to perform the update.
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_notify_not_sent(domain_ref['id'], 'disabled', 'domain',
public=False)
def test_update_group(self):
group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.update_group(group_ref['id'], group_ref)
self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group')
self._assert_last_audit(group_ref['id'], UPDATED_OPERATION, 'group',
cadftaxonomy.SECURITY_GROUP)
def test_update_project(self):
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_sent(
project_ref['id'], UPDATED_OPERATION, 'project', public=True)
self._assert_last_audit(project_ref['id'], UPDATED_OPERATION,
'project', cadftaxonomy.SECURITY_PROJECT)
def test_disable_project(self):
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = False
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_sent(project_ref['id'], 'disabled', 'project',
public=False)
def test_disable_of_disabled_project_does_not_notify(self):
project_ref = unit.new_project_ref(domain_id=self.domain_id,
enabled=False)
self.resource_api.create_project(project_ref['id'], project_ref)
# The project_ref above is not changed during the create process. We
# can use the same ref to perform the update.
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project',
public=False)
def test_update_project_does_not_send_disable(self):
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = True
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_last_note(
project_ref['id'], UPDATED_OPERATION, 'project')
self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project')
def test_update_role(self):
role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self.role_api.update_role(role_ref['id'], role_ref)
self._assert_last_note(role_ref['id'], UPDATED_OPERATION, 'role')
self._assert_last_audit(role_ref['id'], UPDATED_OPERATION, 'role',
cadftaxonomy.SECURITY_ROLE)
def test_update_user(self):
user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self.identity_api.update_user(user_ref['id'], user_ref)
self._assert_last_note(user_ref['id'], UPDATED_OPERATION, 'user')
self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_config_option_no_events(self):
self.config_fixture.config(notification_format='basic')
role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
# The regular notifications will still be emitted, since they are
# used for callback handling.
self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
# No audit event should have occurred
self.assertEqual(0, len(self._audits))
def test_add_user_to_group(self):
user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.add_user_to_group(user_ref['id'], group_ref['id'])
self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group',
actor_id=user_ref['id'], actor_type='user',
actor_operation='added')
def test_remove_user_from_group(self):
user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.add_user_to_group(user_ref['id'], group_ref['id'])
self.identity_api.remove_user_from_group(user_ref['id'],
group_ref['id'])
self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group',
actor_id=user_ref['id'], actor_type='user',
actor_operation='removed')
class CADFNotificationsForEntities(NotificationsForEntities):
def setUp(self):
super(CADFNotificationsForEntities, self).setUp()
self.config_fixture.config(notification_format='cadf')
def test_initiator_data_is_set(self):
ref = unit.new_domain_ref()
resp = self.post('/domains', body={'domain': ref})
resource_id = resp.result.get('domain').get('id')
self._assert_last_audit(resource_id, CREATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
self._assert_initiator_data_is_set(CREATED_OPERATION,
'domain',
cadftaxonomy.SECURITY_DOMAIN)
class V2Notifications(BaseNotificationTest):
def setUp(self):
super(V2Notifications, self).setUp()
self.config_fixture.config(notification_format='cadf')
def test_user(self):
token = self.get_scoped_token()
resp = self.admin_request(
method='POST',
path='/v2.0/users',
body={
'user': {
'name': uuid.uuid4().hex,
'password': uuid.uuid4().hex,
'enabled': True,
},
},
token=token,
)
user_id = resp.result.get('user').get('id')
self._assert_initiator_data_is_set(CREATED_OPERATION,
'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
# test for delete user
self.admin_request(
method='DELETE',
path='/v2.0/users/%s' % user_id,
token=token,
)
self._assert_initiator_data_is_set(DELETED_OPERATION,
'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_role(self):
token = self.get_scoped_token()
resp = self.admin_request(
method='POST',
path='/v2.0/OS-KSADM/roles',
body={
'role': {
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
},
},
token=token,
)
role_id = resp.result.get('role').get('id')
self._assert_initiator_data_is_set(CREATED_OPERATION,
'role',
cadftaxonomy.SECURITY_ROLE)
# test for delete role
self.admin_request(
method='DELETE',
path='/v2.0/OS-KSADM/roles/%s' % role_id,
token=token,
)
self._assert_initiator_data_is_set(DELETED_OPERATION,
'role',
cadftaxonomy.SECURITY_ROLE)
def test_service_and_endpoint(self):
token = self.get_scoped_token()
resp = self.admin_request(
method='POST',
path='/v2.0/OS-KSADM/services',
body={
'OS-KSADM:service': {
'name': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
},
},
token=token,
)
service_id = resp.result.get('OS-KSADM:service').get('id')
self._assert_initiator_data_is_set(CREATED_OPERATION,
'service',
cadftaxonomy.SECURITY_SERVICE)
resp = self.admin_request(
method='POST',
path='/v2.0/endpoints',
body={
'endpoint': {
'region': uuid.uuid4().hex,
'service_id': service_id,
'publicurl': uuid.uuid4().hex,
'adminurl': uuid.uuid4().hex,
'internalurl': uuid.uuid4().hex,
},
},
token=token,
)
endpoint_id = resp.result.get('endpoint').get('id')
self._assert_initiator_data_is_set(CREATED_OPERATION,
'endpoint',
cadftaxonomy.SECURITY_ENDPOINT)
# test for delete endpoint
self.admin_request(
method='DELETE',
path='/v2.0/endpoints/%s' % endpoint_id,
token=token,
)
self._assert_initiator_data_is_set(DELETED_OPERATION,
'endpoint',
cadftaxonomy.SECURITY_ENDPOINT)
# test for delete service
self.admin_request(
method='DELETE',
path='/v2.0/OS-KSADM/services/%s' % service_id,
token=token,
)
self._assert_initiator_data_is_set(DELETED_OPERATION,
'service',
cadftaxonomy.SECURITY_SERVICE)
def test_project(self):
token = self.get_scoped_token()
resp = self.admin_request(
method='POST',
path='/v2.0/tenants',
body={
'tenant': {
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'enabled': True
},
},
token=token,
)
project_id = resp.result.get('tenant').get('id')
self._assert_initiator_data_is_set(CREATED_OPERATION,
'project',
cadftaxonomy.SECURITY_PROJECT)
# test for delete project
self.admin_request(
method='DELETE',
path='/v2.0/tenants/%s' % project_id,
token=token,
)
self._assert_initiator_data_is_set(DELETED_OPERATION,
'project',
cadftaxonomy.SECURITY_PROJECT)
class TestEventCallbacks(test_v3.RestfulTestCase):
class FakeManager(object):
def _project_deleted_callback(self, service, resource_type, operation,
payload):
"""Used just for the callback interface."""
def test_notification_received(self):
callback = register_callback(CREATED_OPERATION, 'project')
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertTrue(callback.called)
def test_notification_method_not_callable(self):
fake_method = None
self.assertRaises(TypeError,
notifications.register_event_callback,
UPDATED_OPERATION,
'project',
[fake_method])
def test_notification_event_not_valid(self):
manager = self.FakeManager()
self.assertRaises(ValueError,
notifications.register_event_callback,
uuid.uuid4().hex,
'project',
manager._project_deleted_callback)
def test_event_registration_for_unknown_resource_type(self):
# Registration for unknown resource types should succeed. If no event
# is issued for that resource type, the callback wont be triggered.
manager = self.FakeManager()
notifications.register_event_callback(
DELETED_OPERATION,
uuid.uuid4().hex,
manager._project_deleted_callback)
resource_type = uuid.uuid4().hex
notifications.register_event_callback(
DELETED_OPERATION,
resource_type,
manager._project_deleted_callback)
def test_provider_event_callback_subscription(self):
callback_called = []
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {
CREATED_OPERATION: {'project': self.foo_callback}}
def foo_callback(self, service, resource_type, operation,
payload):
# uses callback_called from the closure
callback_called.append(True)
Foo()
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertEqual([True], callback_called)
def test_provider_event_callbacks_subscription(self):
callback_called = []
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {
CREATED_OPERATION: {
'project': [self.callback_0, self.callback_1]}}
def callback_0(self, service, resource_type, operation, payload):
# uses callback_called from the closure
callback_called.append('cb0')
def callback_1(self, service, resource_type, operation, payload):
# uses callback_called from the closure
callback_called.append('cb1')
Foo()
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertItemsEqual(['cb1', 'cb0'], callback_called)
def test_invalid_event_callbacks(self):
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = 'bogus'
self.assertRaises(AttributeError, Foo)
def test_invalid_event_callbacks_event(self):
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {CREATED_OPERATION: 'bogus'}
self.assertRaises(AttributeError, Foo)
def test_using_an_unbound_method_as_a_callback_fails(self):
# NOTE(dstanek): An unbound method is when you reference a method
# from a class object. You'll get a method that isn't bound to a
# particular instance so there is no magic 'self'. You can call it,
# but you have to pass in the instance manually like: C.m(C()).
# If you reference the method from an instance then you get a method
# that effectively curries the self argument for you
# (think functools.partial). Obviously is we don't have an
# instance then we can't call the method.
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {CREATED_OPERATION:
{'project': Foo.callback}}
def callback(self, service, resource_type, operation, payload):
pass
# TODO(dstanek): it would probably be nice to fail early using
# something like:
# self.assertRaises(TypeError, Foo)
Foo()
project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.assertRaises(TypeError, self.resource_api.create_project,
project_ref['id'], project_ref)
class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
LOCAL_HOST = 'localhost'
ACTION = 'authenticate'
ROLE_ASSIGNMENT = 'role_assignment'
def setUp(self):
super(CadfNotificationsWrapperTestCase, self).setUp()
self._notifications = []
def fake_notify(action, initiator, outcome, target,
event_type, **kwargs):
service_security = cadftaxonomy.SERVICE_SECURITY
event = eventfactory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=outcome,
action=action,
initiator=initiator,
target=target,
observer=cadfresource.Resource(typeURI=service_security))
for key, value in kwargs.items():
setattr(event, key, value)
note = {
'action': action,
'initiator': initiator,
'event': event,
'event_type': event_type,
'send_notification_called': True}
self._notifications.append(note)
self.useFixture(fixtures.MockPatchObject(
notifications, '_send_audit_notification', fake_notify))
def _assert_last_note(self, action, user_id, event_type=None):
self.assertTrue(self._notifications)
note = self._notifications[-1]
self.assertEqual(action, note['action'])
initiator = note['initiator']
self.assertEqual(user_id, initiator.id)
self.assertEqual(self.LOCAL_HOST, initiator.host.address)
self.assertTrue(note['send_notification_called'])
if event_type:
self.assertEqual(event_type, note['event_type'])
def _assert_event(self, role_id, project=None, domain=None,
user=None, group=None, inherit=False):
"""Assert that the CADF event is valid.
In the case of role assignments, the event will have extra data,
specifically, the role, target, actor, and if the role is inherited.
An example event, as a dictionary is seen below:
{
'typeURI': 'http://schemas.dmtf.org/cloud/audit/1.0/event',
'initiator': {
'typeURI': 'service/security/account/user',
'host': {'address': 'localhost'},
'id': 'openstack:0a90d95d-582c-4efb-9cbc-e2ca7ca9c341',
'name': u'bccc2d9bfc2a46fd9e33bcf82f0b5c21'
},
'target': {
'typeURI': 'service/security/account/user',
'id': 'openstack:d48ea485-ef70-4f65-8d2b-01aa9d7ec12d'
},
'observer': {
'typeURI': 'service/security',
'id': 'openstack:d51dd870-d929-4aba-8d75-dcd7555a0c95'
},
'eventType': 'activity',
'eventTime': '2014-08-21T21:04:56.204536+0000',
'role': u'0e6b990380154a2599ce6b6e91548a68',
'domain': u'24bdcff1aab8474895dbaac509793de1',
'inherited_to_projects': False,
'group': u'c1e22dc67cbd469ea0e33bf428fe597a',
'action': 'created.role_assignment',
'outcome': 'success',
'id': 'openstack:782689dd-f428-4f13-99c7-5c70f94a5ac1'
}
"""
note = self._notifications[-1]
event = note['event']
if project:
self.assertEqual(project, event.project)
if domain:
self.assertEqual(domain, event.domain)
if group:
self.assertEqual(group, event.group)
elif user:
self.assertEqual(user, event.user)
self.assertEqual(role_id, event.role)
self.assertEqual(inherit, event.inherited_to_projects)
def test_v3_authenticate_user_name_and_domain_id(self):
user_id = self.user_id
user_name = self.user['name']
password = self.user['password']
domain_id = self.domain_id
data = self.build_authentication_request(username=user_name,
user_domain_id=domain_id,
password=password)
self.post('/auth/tokens', body=data)
self._assert_last_note(self.ACTION, user_id)
def test_v3_authenticate_user_id(self):
user_id = self.user_id
password = self.user['password']
data = self.build_authentication_request(user_id=user_id,
password=password)
self.post('/auth/tokens', body=data)
self._assert_last_note(self.ACTION, user_id)
def test_v3_authenticate_user_name_and_domain_name(self):
user_id = self.user_id
user_name = self.user['name']
password = self.user['password']
domain_name = self.domain['name']
data = self.build_authentication_request(username=user_name,
user_domain_name=domain_name,
password=password)
self.post('/auth/tokens', body=data)
self._assert_last_note(self.ACTION, user_id)
def _test_role_assignment(self, url, role, project=None, domain=None,
user=None, group=None):
self.put(url)
action = "%s.%s" % (CREATED_OPERATION, self.ROLE_ASSIGNMENT)
event_type = '%s.%s.%s' % (notifications.SERVICE,
self.ROLE_ASSIGNMENT, CREATED_OPERATION)
self._assert_last_note(action, self.user_id, event_type)
self._assert_event(role, project, domain, user, group)
self.delete(url)
action = "%s.%s" % (DELETED_OPERATION, self.ROLE_ASSIGNMENT)
event_type = '%s.%s.%s' % (notifications.SERVICE,
self.ROLE_ASSIGNMENT, DELETED_OPERATION)
self._assert_last_note(action, self.user_id, event_type)
self._assert_event(role, project, domain, user, None)
def test_user_project_grant(self):
url = ('/projects/%s/users/%s/roles/%s' %
(self.project_id, self.user_id, self.role_id))
self._test_role_assignment(url, self.role_id,
project=self.project_id,
user=self.user_id)
def test_group_domain_grant(self):
group_ref = unit.new_group_ref(domain_id=self.domain_id)
group = self.identity_api.create_group(group_ref)
self.identity_api.add_user_to_group(self.user_id, group['id'])
url = ('/domains/%s/groups/%s/roles/%s' %
(self.domain_id, group['id'], self.role_id))
self._test_role_assignment(url, self.role_id,
domain=self.domain_id,
user=self.user_id,
group=group['id'])
def test_add_role_to_user_and_project(self):
# A notification is sent when add_role_to_user_and_project is called on
# the assignment manager.
project_ref = unit.new_project_ref(self.domain_id)
project = self.resource_api.create_project(
project_ref['id'], project_ref)
tenant_id = project['id']
self.assignment_api.add_role_to_user_and_project(
self.user_id, tenant_id, self.role_id)
self.assertTrue(self._notifications)
note = self._notifications[-1]
self.assertEqual('created.role_assignment', note['action'])
self.assertTrue(note['send_notification_called'])
self._assert_event(self.role_id, project=tenant_id, user=self.user_id)
def test_remove_role_from_user_and_project(self):
# A notification is sent when remove_role_from_user_and_project is
# called on the assignment manager.
self.assignment_api.remove_role_from_user_and_project(
self.user_id, self.project_id, self.role_id)
self.assertTrue(self._notifications)
note = self._notifications[-1]
self.assertEqual('deleted.role_assignment', note['action'])
self.assertTrue(note['send_notification_called'])
self._assert_event(self.role_id, project=self.project_id,
user=self.user_id)
class TestCallbackRegistration(unit.BaseTestCase):
def setUp(self):
super(TestCallbackRegistration, self).setUp()
self.mock_log = mock.Mock()
# Force the callback logging to occur
self.mock_log.logger.getEffectiveLevel.return_value = log.DEBUG
def verify_log_message(self, data):
"""Verify log message.
Tests that use this are a little brittle because adding more
logging can break them.
TODO(dstanek): remove the need for this in a future refactoring
"""
log_fn = self.mock_log.debug
self.assertEqual(len(data), log_fn.call_count)
for datum in data:
log_fn.assert_any_call(mock.ANY, datum)
def test_a_function_callback(self):
def callback(*args, **kwargs):
pass
resource_type = 'thing'
with mock.patch('keystone.notifications.LOG', self.mock_log):
notifications.register_event_callback(
CREATED_OPERATION, resource_type, callback)
callback = 'keystone.tests.unit.common.test_notifications.callback'
expected_log_data = {
'callback': callback,
'event': 'identity.%s.created' % resource_type
}
self.verify_log_message([expected_log_data])
def test_a_method_callback(self):
class C(object):
def callback(self, *args, **kwargs):
pass
with mock.patch('keystone.notifications.LOG', self.mock_log):
notifications.register_event_callback(
CREATED_OPERATION, 'thing', C().callback)
callback = 'keystone.tests.unit.common.test_notifications.C.callback'
expected_log_data = {
'callback': callback,
'event': 'identity.thing.created'
}
self.verify_log_message([expected_log_data])
def test_a_list_of_callbacks(self):
def callback(*args, **kwargs):
pass
class C(object):
def callback(self, *args, **kwargs):
pass
with mock.patch('keystone.notifications.LOG', self.mock_log):
notifications.register_event_callback(
CREATED_OPERATION, 'thing', [callback, C().callback])
callback_1 = 'keystone.tests.unit.common.test_notifications.callback'
callback_2 = 'keystone.tests.unit.common.test_notifications.C.callback'
expected_log_data = [
{
'callback': callback_1,
'event': 'identity.thing.created'
},
{
'callback': callback_2,
'event': 'identity.thing.created'
},
]
self.verify_log_message(expected_log_data)
def test_an_invalid_callback(self):
self.assertRaises(TypeError,
notifications.register_event_callback,
(CREATED_OPERATION, 'thing', object()))
def test_an_invalid_event(self):
def callback(*args, **kwargs):
pass
self.assertRaises(ValueError,
notifications.register_event_callback,
uuid.uuid4().hex,
'thing',
callback)
class CADFNotificationsDataTestCase(test_v3.RestfulTestCase):
def setUp(self):
super(CADFNotificationsDataTestCase, self).setUp()
def test_receive_identityId_from_audit_notification(self):
observer = None
resource_type = EXP_RESOURCE_TYPE
ref = unit.new_service_ref()
ref['type'] = 'identity'
self.catalog_api.create_service(ref['id'], ref.copy())
action = CREATED_OPERATION + '.' + resource_type
initiator = notifications._get_request_audit_info(self.user_id)
target = cadfresource.Resource(typeURI=cadftaxonomy.ACCOUNT_USER)
outcome = 'success'
event_type = 'identity.authenticate.created'
with mock.patch.object(notifications._get_notifier(),
'_notify') as mocked:
notifications._send_audit_notification(action,
initiator,
outcome,
target,
event_type)
for mock_args_list in mocked.call_args:
if len(mock_args_list) != 0:
for mock_args in mock_args_list:
if 'observer' in mock_args:
observer = mock_args['observer']
break
self.assertEqual(ref['id'], observer['id'])
| {
"content_hash": "b2f1be256825920d875845fa7b73f38a",
"timestamp": "",
"source": "github",
"line_count": 1273,
"max_line_length": 79,
"avg_line_length": 43.45797329143755,
"alnum_prop": 0.5654893170890424,
"repo_name": "cernops/keystone",
"id": "674bef05e839b645690074b3a1f74e7cf7350519",
"size": "55912",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keystone/tests/unit/common/test_notifications.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "665"
},
{
"name": "Python",
"bytes": "4691908"
}
],
"symlink_target": ""
} |
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'RainbowStream'
copyright = u'2014, Vu Nhat Minh'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.5.5'
# The full version, including alpha/beta/rc tags.
release = '1.5.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'RainbowStreamdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'RainbowStream.tex', u'RainbowStream Documentation',
u'Vu Nhat Minh', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'rainbowstream', u'RainbowStream Documentation',
[u'Vu Nhat Minh'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'RainbowStream', u'RainbowStream Documentation',
u'Vu Nhat Minh', 'RainbowStream', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| {
"content_hash": "3c8b8d1d47042c3f5e855f94f360e226",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 79,
"avg_line_length": 31.37719298245614,
"alnum_prop": 0.7078557450377411,
"repo_name": "jthelin/rainbowstream",
"id": "8831a8af5f1044996144f6ccb2b2d92b0a2f39d4",
"size": "8180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1631"
},
{
"name": "Python",
"bytes": "155481"
}
],
"symlink_target": ""
} |
from datetime import datetime
class BaseUser(object):
"""
Base user class for extending. Used by skills for at least logging
but can be used for more.
"""
def log_error(self, user_id, message):
raise NotImplementedError("Not Implemented.")
def get_error(self, user_id):
raise NotImplementedError("Not Implemented.")
class AllVoiceUser(BaseUser):
_database = {}
_db_limit = 50
@classmethod
def log_error(cls, user_id, message):
"""
Saves error for usage later.
:param user_id: <str> Unique user ID
:param message: <str> Message to save
"""
cls._database[user_id] = (message[:64], datetime.now())
cls.clean()
@classmethod
def get_error(cls, user_id):
"""
Retrieves error from storage. Returns blank str otherwise
:param user_id: <str>
:return: <str>
"""
entry = cls._database.get(user_id)
return (entry[0] if entry else "") or ""
@classmethod
def clean(cls):
"""Task for clearing old entries"""
if len(cls._database) > cls._db_limit:
sorted_db = sorted(cls._database.items(), key=lambda x: x[1][1])
for x in range(0, len(cls._database) - cls._db_limit):
key = sorted_db[x][0]
cls._database.pop(key)
@classmethod
def reset(cls):
"""Clears DB"""
cls._database = {}
| {
"content_hash": "af6a03dc3586a56a5a84567418a417c2",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 76,
"avg_line_length": 26.490909090909092,
"alnum_prop": 0.5634866163349348,
"repo_name": "TheLampshady/all_voice",
"id": "ca57d615b87264b35b08c66ed584af4afe0d3479",
"size": "1457",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "all_voice/models/user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "47"
},
{
"name": "Python",
"bytes": "28489"
}
],
"symlink_target": ""
} |
from lazy import lazy
from ploy.common import BaseMaster, BaseInstance, import_paramiko, yesno
import getpass
import logging
import os
import socket
import subprocess
import sys
log = logging.getLogger('ploy')
def get_key_fingerprint(key):
key_fingerprint = key.get_fingerprint()
if isinstance(key_fingerprint[0], int):
return ':'.join("%02x" % x for x in key_fingerprint)
return ':'.join("%02x" % ord(x) for x in key_fingerprint)
def ServerHostKeyPolicy(*args, **kwarks):
paramiko = import_paramiko()
class ServerHostKeyPolicy(paramiko.MissingHostKeyPolicy):
def __init__(self, fingerprint_func):
self.fingerprint_func = fingerprint_func
self.ask = True
@lazy
def fingerprint(self):
return self.fingerprint_func()
def missing_host_key(self, client, hostname, key):
fingerprint = get_key_fingerprint(key)
if self.fingerprint.lower() == 'ask':
if not self.ask:
return
if yesno("WARNING! Automatic fingerprint checking disabled.\nGot fingerprint %s.\nContinue?" % fingerprint):
self.ask = False
return
sys.exit(1)
elif fingerprint == self.fingerprint or self.fingerprint.lower() == 'ignore':
if self.fingerprint.lower() == 'ignore':
log.warn("Fingerprint verification disabled!")
client.get_host_keys().add(hostname, key.get_name(), key)
if client._host_keys_filename is not None:
client.save_host_keys(client._host_keys_filename)
return
raise paramiko.SSHException("Fingerprint doesn't match for %s (got %s, expected %s)" % (hostname, fingerprint, self.fingerprint))
return ServerHostKeyPolicy(*args, **kwarks)
class InstanceFormattingWrapper(object):
def __init__(self, instance):
self.instance = instance
def __getattr__(self, name):
return self.instance.config[name]
class Instance(BaseInstance):
sectiongroupname = 'plain-instance'
def get_host(self):
if 'host' not in self.config:
return self.config['ip']
return self.config['host']
def get_port(self):
return self.config.get('port', 22)
def get_fingerprint(self):
fingerprint = self.config.get('fingerprint')
if fingerprint is None:
fingerprint = self.master.master_config.get('fingerprint')
if fingerprint is None:
raise self.paramiko.SSHException("No fingerprint set in config.")
path = os.path.join(self.master.main_config.path, fingerprint)
if os.path.exists(path):
try:
result = subprocess.check_output(['ssh-keygen', '-lf', path])
except subprocess.CalledProcessError as e:
log.error("Couldn't get fingerprint from '%s':\n%s" % (path, e))
sys.exit(1)
else:
fingerprint = result.split()[1]
return fingerprint
@lazy
def sshconfig(self):
sshconfig = self.paramiko.SSHConfig()
path = os.path.expanduser('~/.ssh/config')
if not os.path.exists(path):
return sshconfig
with open(path) as f:
sshconfig.parse(f)
return sshconfig
@lazy
def proxy_command(self):
proxy_command = self.config.get('proxycommand', None)
if proxy_command is None:
return self.sshconfig.lookup(self.get_host()).get('proxycommand', None)
else:
d = dict(
instances=dict(
(k, InstanceFormattingWrapper(v))
for k, v in self.master.instances.items()))
d.update(self.config)
d['known_hosts'] = self.master.known_hosts
d['path'] = self.master.main_config.path
return proxy_command.format(**d)
def get_proxy_sock(self, hostname, port):
paramiko = self.paramiko
proxy_command = self.proxy_command
if proxy_command:
try:
sock = paramiko.ProxyCommand(proxy_command)
except Exception:
log.error("The following ProxyCommand failed:\n%s" % proxy_command)
raise
else:
sock = None
return sock
def _fix_known_hosts(self, known_hosts):
lines = []
with open(known_hosts, 'r') as f:
for lineno, line in enumerate(f):
line = line.strip()
if (len(line) == 0) or (line[0] == '#'):
continue
try:
self.paramiko.hostkeys.HostKeyEntry.from_line(line, lineno)
except self.paramiko.hostkeys.InvalidHostKey:
continue
lines.append(line + '\n')
with open(known_hosts, 'w') as f:
f.writelines(lines)
def init_ssh_key(self, user=None):
paramiko = self.paramiko
sshconfig = self.sshconfig
try:
host = self.get_host()
except KeyError:
raise paramiko.SSHException("No host or ip set in config.")
port = self.get_port()
hostname = sshconfig.lookup(host).get('hostname', host)
port = sshconfig.lookup(host).get('port', port)
password = None
client = paramiko.SSHClient()
fingerprint_func = self.get_fingerprint
client.set_missing_host_key_policy(ServerHostKeyPolicy(fingerprint_func))
known_hosts = self.master.known_hosts
client.known_hosts = None
while 1:
sock = self.get_proxy_sock(hostname, port)
if os.path.exists(known_hosts):
self._fix_known_hosts(known_hosts)
client.load_host_keys(known_hosts)
try:
if user is None:
user = sshconfig.lookup(host).get('user', 'root')
user = self.config.get('user', user)
client_args = dict(
port=int(port),
username=user,
key_filename=self.config.get('ssh-key-filename', None),
password=password,
sock=sock)
client.connect(hostname, **client_args)
break
except paramiko.AuthenticationException:
if not self.config.get('password-fallback', False):
log.error('Failed to connect to %s (%s)' % (self.config_id, hostname))
for option in ('username', 'password', 'port', 'key_filename', 'sock'):
if client_args[option] is not None:
log.error('%s: %r' % (option, client_args[option]))
raise
if password is None and 'password' in self.config:
password = self.config['password']
else:
password = getpass.getpass("Password for '%s@%s:%s': " % (user, host, port))
except paramiko.BadHostKeyException:
host_keys = client.get_host_keys()
if port == 22:
key_hostname = hostname
else:
key_hostname = "[%s]:%s" % (hostname, port)
bad_key = host_keys.lookup(key_hostname)
keys = [x for x in host_keys.items() if x[1] != bad_key]
if os.path.exists(known_hosts):
os.remove(known_hosts)
open(known_hosts, 'w').close()
host_keys.clear()
for name, key in keys:
for subkey in key.values():
host_keys.add(name, subkey.get_name(), subkey)
client.save_host_keys(known_hosts)
except (paramiko.SSHException, socket.error):
log.error('Failed to connect to %s (%s)' % (self.config_id, hostname))
for option in ('username', 'password', 'port', 'key_filename', 'sock'):
if client_args[option] is not None:
log.error('%s: %r' % (option, client_args[option]))
raise
if sock is not None:
sock.close()
client.save_host_keys(known_hosts)
result = dict(
user=user,
host=host,
port=port,
client=client,
UserKnownHostsFile=known_hosts,
StrictHostKeyChecking="yes")
if self.proxy_command:
result['ProxyCommand'] = self.proxy_command
return result
class Master(BaseMaster):
sectiongroupname = 'plain-instance'
instance_class = Instance
def get_massagers():
from ploy.config import BooleanMassager, UserMassager
sectiongroupname = 'plain-instance'
return [
UserMassager(sectiongroupname, 'user'),
BooleanMassager(sectiongroupname, 'password-fallback')]
def get_masters(ctrl):
masters = ctrl.config.get('plain-master', {'plain': {}})
for master, master_config in masters.items():
yield Master(ctrl, master, master_config)
plugin = dict(
get_massagers=get_massagers,
get_masters=get_masters)
| {
"content_hash": "ce1710fc0b9fff8b464a1271dfe512e9",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 141,
"avg_line_length": 37.72357723577236,
"alnum_prop": 0.5535560344827586,
"repo_name": "fschulze/ploy",
"id": "cce6ac761a68d70a976df424dd80d13e36ff6a98",
"size": "9280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ploy/plain.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "145099"
}
],
"symlink_target": ""
} |
from pyoauth2 import Client, AccessToken
from everydo.api import OCEverydoApi, WOEverydoApi
from everydo.api.base import check_execption
class EverydoApiClient:
def __init__(self, key, secret, api_host, redirect=''):
self.key = key
self.secret = secret
self.api_host = api_host
self.redirect_uri = redirect
self.client = Client(key, secret,
site=self.api_host,
authorize_url=self.api_host + '/@@authorize',
token_url= self.api_host + '/@@access_token')
self.access_token = None
def __repr__(self):
return '<EverydoClient OAuth2>'
@property
def authorize_url(self):
return self.client.auth_code.authorize_url(redirect_uri=self.redirect_uri)
def auth_with_code(self, code):
self.access_token = self.client.auth_code.get_token(code, redirect_uri=self.redirect_uri, header_format="Oauth2 %s")
def auth_with_token(self, token):
self.access_token = AccessToken(self.client, token, header_format="Oauth2 %s")
def auth_with_password(self, username, password, **opt):
self.access_token = self.client.password.get_token(username=username,
password=password, redirect_uri=self.redirect_uri, **opt)
@property
def token_code(self):
return self.access_token and self.access_token.token
@property
def refresh_token_code(self):
return getattr(self.access_token, 'refresh_token', None)
def refresh_token(self, refresh_token):
access_token = AccessToken(self.client, token='', refresh_token=refresh_token, header_format="Oauth2 %s")
self.access_token = access_token.refresh()
def get_account(self):
client = OCApiClient(self.key, self.secret, self.api_host, self.redirect_uri)
client.auth_with_token(self.token_code)
return client
@check_execption
def _get(self, url, **opts):
return self.access_token.get(url, **opts)
@property
def list_sites(self):
return self._get('/list_sites')
def get_site(self, site_name):
site = self.list_sites.get(site_name, {})
if not site:
return None
client = WOApiClient(self.key, self.secret, site['site_url'], self.redirect_uri)
client.auth_with_token(self.token_code)
return client
class OCApiClient(OCEverydoApi):
def __init__(self, key, secret, api_host, redirect=''):
self.redirect_uri = redirect
self.client = Client(key, secret,
site=api_host, authorize_url='', token_url='')
self.access_token = None
def __repr__(self):
return '<OCClient OAuth2>'
def auth_with_token(self, token):
self.access_token = AccessToken(self.client, token, header_format="Oauth2 %s")
@property
def token_code(self):
return self.access_token and self.access_token.token
class WOApiClient(WOEverydoApi):
def __init__(self, key, secret, api_host, redirect=''):
self.redirect_uri = redirect
self.client = Client(key, secret,
site=api_host, authorize_url='', token_url='')
self.access_token = None
def __repr__(self):
return '<WOClient OAuth2>'
def auth_with_token(self, token):
self.access_token = AccessToken(self.client, token, header_format="Oauth2 %s")
@property
def token_code(self):
return self.access_token and self.access_token.token
if __name__ == '__main__':
args = {'key': '',
'secret': '',
'api_host' : '',
'redirect' : ''}
# 初始化输入参数
edo_api = EverydoApiClient(**args)
print edo_api.authorize_url
code = input('input the code')
# 通过code获取access_token
edo_api.auth_with_code(str(code))
# 获取oc的API操作对象
oc_api = edo_api.get_account()
sites = edo_api.list_sites()
for key in sites.keys():
print "site_name: %s ,site_title: %s \nsite_url: %s\n" % (sites[key]['site_name'], sites[key]['site_title'], sites[key]['site_url'])
# 特定站点的API操作对象
wo_api = edo_api.get_site('default')
# 调用特定的API
file_info = wo_api.files.file_info(file_id=9284298392)
| {
"content_hash": "04372e8f7e20b48a51212474f1c39a83",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 140,
"avg_line_length": 32.20454545454545,
"alnum_prop": 0.612796988943778,
"repo_name": "audoe/EverydoApi",
"id": "eb332caf45bfd06caa3bad942f206e17d29df8cd",
"size": "4339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "everydo/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7049"
}
],
"symlink_target": ""
} |
import cgi
import cgitb
# Abilita gli errori al server web/http
cgitb.enable()
# Le mie librerie mjl (Json, Files), mhl (Html), flt (T w/ Redis)
import mjl, mhl, flt
import redis
# Parametri generali
TestoPagina="Aggiunge un gruppo di allarmi (chiave Redis)"
DirBase="/var/www"
ConfigFile=DirBase+"/conf/config.json"
ExecFile="/cgi-bin/writesetsredis.py"
# Redis "key"
RedisKey = "sets:*" # chiavi, ma in realta` e` settata piu` avanti
# Form name/s
FormName = "rkey"
# Apro il database Redis con l'istruzione della mia libreria
MyDB = flt.OpenDBFile(ConfigFile)
# Genero chiave/valori se non esiste
# Assegno dei valori piu` o meno standard
#if not MyDB.exists(RedisKey):
# MyDB.hmset(RedisKey,{"hostname":"nessuno","port":6379,"database":0,"password":""})
# Start web page - Sono blocchi di html presenti nella libreria
print (mhl.MyHtml())
print (mhl.MyHtmlHead())
# Scrivo il Titolo/Testo della pagina
print ("<h1>","<center>",TestoPagina,"</center>","</h1>")
#print ("<hr/>","<br/>")
# Eventuale help/annotazione
print ("Non ho rinominato i campi e non sono stato a riordinare le voci.<br/>")
form=cgi.FieldStorage()
# In realta` questo script non viene eseguito da un'altro che passa parametri,
# quindi le 4 righe seguenti potrei eliminarle.
if FormName not in form:
pass
else:
RedisKey = cgi.escape(form[FormName].value)
print ("<h2>","<center>","Filtra chiave Redis","</center>","</h2>")
print ("Puoi usare i caratteri \"*\" e \"?\", esempi:<br/>")
print ("*stringa*, *stringafinale, *stringacon3caratterifinali???<br/><br/>")
# Inizio del form
print (mhl.MyActionForm("/cgi-bin/readsetsredis.py","POST"))
print ("<table>") # 2 colonne
print ("<tr>")
print ("<td>")
print ("Inserisci il filtro:")
print ("</td>")
print ("<td>")
print (mhl.MyTextForm(FormName,RedisKey,"40","required","")) # Ho messo 40, ma un chiave puo` arrivare a 125 caratteri (se non ricordo male)
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td colspan=\"2\">")
#print ("<hr/>") # La linea orizzontale
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td>")
print ("") # Testo nella 1a colonna
print ("</td>")
print ("<td>")
print (mhl.MyButtonForm("submit","Attiva filtro"))
print ("</td>")
print ("</tr>")
print ("</table>")
# End form
print (mhl.MyEndForm())
print ("<hr/><br/>")
# Tabella visualizzazione chiavi:valore
print ("<table>")
# La prima voce non e` modificabile ed e` la chiave Redis (solo visualizzazione)
print ("<tr>")
print ("<td>")
print ("<b>Key/s: ",RedisKey,"</b>")
print ("</td>")
print ("<td>")
print ("<b>Type : Value</b>")
print ("</td>")
print ("</tr>")
# Per ogni campo ... stampo il campo ed il suo valore. (la funzione "Decode()" serve per trasformare "bin->str")
for i in MyDB.keys(RedisKey):
print ("<tr>")
print ("<td>")
#print (flt.Decode(i),": ",sep="")
print (flt.Decode(i))
print ("</td>")
print ("<td>")
# Ho dovudo decodificare due volte per leggere la stringa del tipo di chiave e controllarne l'uguaglianza
if flt.Decode(MyDB.type(flt.Decode(i))) == "hash":
print (MyDB.type(flt.Decode(i)),": ",MyDB.hgetall(flt.Decode(i)))
elif flt.Decode(MyDB.type(flt.Decode(i))) == "string":
print (MyDB.type(flt.Decode(i)),": ",MyDB.get(flt.Decode(i)))
elif flt.Decode(MyDB.type(flt.Decode(i))) == "list":
print (MyDB.type(flt.Decode(i)),": ",MyDB.llen(flt.Decode(i)),"valori, il primo e`: ",MyDB.lindex(flt.Decode(i),"0"))
elif flt.Decode(MyDB.type(flt.Decode(i))) == "sets":
print (MyDB.type(flt.Decode(i)),": ",MyDB.smembers(flt.Decode(i)))
else:
print (MyDB.type(flt.Decode(i)),": ","Non ancora contemplata")
print ("</td>")
print ("</tr>")
print ("</table>")
# Fine
print ("<hr/><br/>")
print ("<h2>","<center>","Modifica chiave Redis","</center>","</h2>")
# Inizio del form
print (mhl.MyActionForm("/cgi-bin/changesetredis.py","POST"))
print ("<table>") # 2 colonne
print ("<tr>")
print ("<td>")
print ("Seleziona la chiave da modificare:")
print ("</td>")
print ("<td>")
print (mhl.MyDropDown(FormName,flt.DecodeList(MyDB.keys(RedisKey)),"")) #
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td colspan=\"2\">")
#print ("<hr/>") # La linea orizzontale
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td>")
print ("") # Testo nella 1a colonna
print ("</td>")
print ("<td>")
print (mhl.MyButtonForm("submit","Modifica"))
print ("</td>")
print ("</tr>")
print ("</table>")
# End form
print (mhl.MyEndForm())
print ("<hr/><br/>")
print ("<h2>","<center>","Eliminazione chiave Redis","</center>","</h2>")
print ("<strong><center>ATTENZIONE: Non ci sara` una richiesta di conferma</center></strong><br/>")
# Inizio del form
print (mhl.MyActionForm("/cgi-bin/deletekeyredis.py","POST"))
print ("<table>") # 2 colonne
print ("<tr>")
print ("<td>")
print ("Seleziona la chiave da eliminare:")
print ("</td>")
print ("<td>")
print (mhl.MyDropDown(FormName,flt.DecodeList(MyDB.keys(RedisKey)),"")) #
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td colspan=\"2\">")
#print ("<hr/>") # La linea orizzontale
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td>")
print ("") # Testo nella 1a colonna
print ("</td>")
print ("<td>")
print (mhl.MyButtonForm("submit","ELIMINA"))
print ("</td>")
print ("</tr>")
print ("</table>")
# End form
print (mhl.MyEndForm())
# End web page
print (mhl.MyHtmlBottom())
| {
"content_hash": "784f930fe39b5f40c5478f8c36f603d6",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 142,
"avg_line_length": 25.35981308411215,
"alnum_prop": 0.6285240464344942,
"repo_name": "raspibo/Livello1",
"id": "0e7506a41c55844439a76e994415f6c3101e748f",
"size": "5543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "var/www/cgi-bin/readsetsredis.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2004"
},
{
"name": "PHP",
"bytes": "39"
},
{
"name": "Python",
"bytes": "163109"
},
{
"name": "Shell",
"bytes": "10111"
}
],
"symlink_target": ""
} |
'''Status line at the bottom
is working good here.
'''
#####################
from tkinter import *
from tkinter.filedialog import *
from tkinter import ttk
from tkinter.scrolledtext import *
import os
from sanHarvardKyoto import *
from engVirgin import *
TITLE = "संयुक्ता"
class IndicEditor(Text, object):
def __init__(self, master, **options):
Text.__init__(self, master, **options)
self.config(
borderwidth=0,
font=("Mangal", 11),
foreground="white",
background="black",
insertbackground="yellow", # cursor
selectforeground="white", # selection
selectbackground="grey",
wrap=WORD, # use word wrapping
undo=True,
width=64,
)
self.filename = None # current document
def _getfilename(self):
return self._filename
def _setfilename(self, filename):
self._filename = filename
title = os.path.basename(filename or "(new document)")
title = title + " - " + TITLE
self.winfo_toplevel().title(title)
filename = property(_getfilename, _setfilename)
def resetTitle(self):
title = "(new document)"
title = title + " - " + TITLE
self.winfo_toplevel().title(title)
def load(self, filename):
text = open(filename, mode='rb').read()
self.delete(1.0, END)
self.insert(END, text.decode('utf8'))
self.mark_set(INSERT, 1.0)
self.edit_modified( False )
self.filename = filename
def save(self, filename=None):
if filename is None:
filename = self.filename
f = open(filename, "wb")
s = self.get(1.0, END)
try:
tdt = s.rstrip() # remove tkinter's whitespace junk at end
f.write(tdt.encode('utf8')) # write data
f.write("\n".encode('utf8')) # add a newline
f.close()
finally:
f.close()
self.edit_modified( False )
self.filename = filename
def onUndo(self):
try:
self.edit_undo()
except TclError: # exception if stacks empty
showinfo('SaMyuktA', 'Nothing to undo')
def onRedo(self):
try:
self.edit_redo()
except TclError:
showinfo('SaMyuktA', 'Nothing to redo')
FILETYPES = [
("Text files", "*.txt"), ("All files", "*")
]
class Cancel(Exception):
pass
def open_as():
f = filedialog.askopenfilename(parent=root, filetypes=FILETYPES)
if not f:
raise Cancel
try:
editor.load(f)
except IOError:
from tkMessageBox import showwarning
showwarning("Open", "Cannot open the file.")
raise Cancel
def save_as():
f = filedialog.asksaveasfilename(parent=root, defaultextension=".txt")
if not f:
raise Cancel
try:
editor.save(f)
except IOError:
from messageBox import showwarning
showwarning("Save As", "Cannot save the file.")
raise Cancel
def save():
if editor.filename:
try:
editor.save(editor.filename)
except IOError:
from tkMessageBox import showwarning
showwarning("Save", "Cannot save the file.")
raise Cancel
else:
save_as()
def saveIfModified():
if (editor.edit_modified() == False):
return
if messagebox.askyesno(TITLE, "Document modified. Save changes?"):
save()
editor.edit_modified( False )
def file_new(event=None):
try:
saveIfModified()
editor.delete('1.0', END)
editor.resetTitle()
except Cancel:
pass
return "break" # don't propagate events
def file_open(event=None):
try:
saveIfModified()
open_as()
except Cancel:
pass
return "break"
def file_save(event=None):
try:
if (editor.edit_modified() == True):
save()
except Cancel:
pass
return "break"
def file_save_as(event=None):
try:
save_as()
except Cancel:
pass
return "break"
def file_quit(event=None):
try:
saveIfModified()
except Cancel:
return
root.destroy()
def about_command():
label = messagebox.showinfo(
"About",
"संयुक्ता - Sanskrit Editor\nCopyright © 2016 www.SpokenSanskrit.org\nAll Rights Reserved.")
def howtouse():
''' This has to have a tab for each encoding
scheme and one for Intro and one tab for
misc. info
'''
hwin = Toplevel( root )
hwin.title( "Harvard-Kyoto Transliteration Map" )
frame1 = Frame( master = hwin, bg = '#001a00' )
frame1.pack(fill='both', expand='yes')
editArea = Text( master = frame1, wrap = WORD, width = 86, height = 39 )
editArea.pack( padx=10, pady=10, fill=BOTH, expand=True )
khImage = PhotoImage(file ="./KH-scheme.gif")
editArea.image = khImage # keep ref. to image!
editArea.image_create( '1.0', image = khImage)
hwin.protocol( "WM_DELETE_WINDOW", hwin.destroy)
def dummy():
print( "Not Implemented" )
## .......................................
def onFind( event=None ):
t2=Toplevel( root )
def close_search():
editor.tag_remove( 'match', '1.0', END)
t2.destroy()
t2.title( "Find" )
t2.geometry( '381x68+200+250' )
t2.transient( root )
Label(t2, text = "Find:").grid( row = 0, column = 0, sticky = 'e' )
e = Text( t2 )
e.grid( row=0, column =1, padx =2, pady=2, sticky='we')
e.config(
width = 30,
height = 1,
font=("Mangal", 11),
foreground="white",
background="black",
insertbackground="yellow", # cursor
)
e.lastneedle = None
editor.srchindex = '1.0'
e.focus_set()
Button(t2, text = "Find Next", command = lambda: search( c.get(), editor,
t2, e)).grid(row =0, column =2,
sticky = 'e'+'w', padx = 2,
pady = 2)
c = IntVar()
c.set(2)
Radiobutton(t2, text="Forward", variable=c, value=2).grid( row = 1, column = 1, sticky = 'w' )
Radiobutton(t2, text="Backward", variable=c, value=1).grid( row = 1, column = 1, sticky = 'e' )
Button(t2, text = "Cancel", command = close_search).grid( row =1, column =2,
sticky = 'e', padx = 2,
pady = 2)
t2.protocol( "WM_DELETE_WINDOW", close_search)
## .................Local scope function.....................
def search( upordown, editor, t2, e ):
needle = e.get( "insert linestart", INSERT)
if not needle: return
if upordown == 1:
backwards = 1 # up
idx = '%s-%dc' % (editor.index( INSERT ), len( needle ))
else:
backwards = 0 # down
idx = editor.index( INSERT )
idx = editor.search( needle, idx, nocase=1, backwards = backwards )
if not idx: return
lastidx = '%s+%dc' % (idx, len( needle ))
editor.tag_remove('match', '1.0', 'end')
editor.tag_remove(SEL, '1.0', 'end')
editor.tag_add('match', idx, lastidx)
editor.tag_add(SEL, idx, lastidx)
editor.tag_config( 'match', foreground = 'red', background = 'yellow')
editor.mark_set("insert", lastidx)
editor.see(INSERT)
editor.focus_force()
e.focus_set()
## .............................................
root = Tk()
root.wm_state("zoomed")
menu = Menu(root)
root.config(menu=menu, background ="#001a00") #margin color
## -----
editor = IndicEditor(root)
editor.pack(fill=Y, expand=1, pady=0)
editor.focus_set()
## -----
filemenu = Menu( menu, tearoff = 0)
menu.add_cascade(label="File", menu=filemenu)
filemenu.add_command(label="New", command=file_new, accelerator="Ctrl+N")
filemenu.add_command(label="Open", command=file_open, accelerator="Ctrl+O")
filemenu.add_command(label="Save", command=file_save, accelerator="Ctrl+S")
filemenu.add_command(label="Save as...", command=file_save_as)
filemenu.add_separator()
filemenu.add_command(label="Exit", command=file_quit, accelerator="<Ctrl+Q>")
# ---
editmenu = Menu(menu, tearoff = 0)
menu.add_cascade(label="Edit", menu=editmenu)
editmenu.add_command(label="Undo", command=editor.onUndo, accelerator="Ctrl+Z")
editmenu.add_command(label="Redo", command=editor.onRedo, accelerator="Ctrl+Y")
editmenu.add_separator()
editmenu.add_command(label="Find", command=onFind, accelerator="Ctrl+F")
editmenu.add_separator()
editmenu.add_command(label="Preferences", command=dummy)
# ---
helpmenu = Menu(menu, tearoff = 0)
menu.add_cascade(label="Help", menu=helpmenu)
helpmenu.add_command(label="How to use...", command=howtouse)
helpmenu.add_separator()
helpmenu.add_command(label="About...", command=about_command)
# ---Provide status bar at the bottom to display current settings
statusFrame = Frame(root)
statusFrame.pack(side=BOTTOM, fill=X)
mapLabel = Label( statusFrame, text="Map: ", relief=SUNKEN)
mapLabel.pack( side = LEFT)
scriptLabel = Label( statusFrame, text="Script: ", relief=SUNKEN)
scriptLabel.pack( side = LEFT )
langLabel = Label( statusFrame, text="Lang: ", relief=SUNKEN)
langLabel.pack( side = LEFT )
colLabel = Label( statusFrame, text="Col: ", relief=SUNKEN)
colLabel.pack( side = RIGHT)
lineLabel = Label( statusFrame, text="Ln: " ,relief=SUNKEN)
lineLabel.pack( side = RIGHT )
#------------
# punctuations
lekhanacihnam = ['exclam', 'quotedbl', 'numbersign', 'dollar', 'percent',
'quoteright', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma',
'minus', 'period', 'slash', 'colon', 'semicolon', 'less', 'equal',
'greater', 'question', 'at', 'bracketleft', ' backslash', 'bracketright',
'asciicircum', 'underscore', 'quoteleft', 'braceleft', 'bar', 'braceright',
'asciitilde', 'ampersand']
class _nirdezakaH( object ):
'परिवर्तनं केन साधनेन करणीयमिति सूचयति'
def __init__(self, map='KyotoHarvard', form='Devanagari',
infont='Mangal', infontsz=11, bhASA='Sanskrit'):
self.lang = bhASA
self.script = form
self.transliteration = map
self.font = infont
self.fontsz = infontsz
self.escaped = False
self.currentLang = bhASA
self.currentScript = form
self.currentTrans = map
self.currentLine = 1
self.currentCol = 0
def displayStatus( self, event = None ):
strLoc = editor.index( INSERT )
loc = strLoc.split('.')
self.currentLine = loc[0]
self.currentCol = loc[1]
langLabel.config( text = 'Lang: ' + self.currentLang )
scriptLabel.config( text = 'Script: ' + self.currentScript )
mapLabel.config( text = 'Map: ' + self.currentTrans )
lineLabel.config( text = 'Line: ' + str(self.currentLine))
colLabel.config( text = 'Col: ' + str(self.currentCol ))
statusFrame.update_idletasks()
def setLang( self, lang ):
self.lang = lang
def setScript ( self, script ):
self.script = script
def setMap( self, map ):
self.transliteration = map
def toggle( self ):
self.escaped = not( self.escaped )
if self.escaped:
self.currentLang = 'LatinAny'
self.currentScript = 'Latin'
self.currentTrans = 'None'
else:
self.currentLang = self.lang
self.currentScript = self.script
self.currentTrans = self.transliteration
self.displayStatus() # update GUI status
#............................................................
def siddhatAkR(event):
## उपकरणतः सद्यावलिः आनय
saAva = event.widget.get( "insert linestart", INSERT)
## यत्र अवधिः भवति तत्र खण्डयतु
zabdAvali = saAva.split(' ')
if len( zabdAvali ) > 1:
zabdAvali[-1] = zabdAvali[-2] + ' ' + zabdAvali[-1]
zabdAvali.pop(-2)
## परिवर्तनं कुरु
if nirdezakaH.currentScript == 'Devanagari':
zabdAvali[-1] = sanHK_parivRtyatAm( event.keysym, zabdAvali[-1] )
else:
# Latin mode, skip conversion
zabdAvali[-1] = engVirgin_parivRtyatAm( event.keysym, zabdAvali[-1] )
#sarvaM yojaya
nu_saAva = ' '.join(zabdAvali)
## उपकरणे [in widget] अभ्यन्तरी कुरु
event.widget.delete( "insert linestart", INSERT ) # delete old
event.widget.insert( INSERT, nu_saAva)
return
#............................................................
def ghaTanA( event ):
nirdezakaH.displayStatus() # update status at the bottom
if event.char==event.keysym:
siddhatAkR( event )
elif event.keysym in lekhanacihnam:
siddhatAkR( event )
elif event.keysym == "Escape":
nirdezakaH.toggle()
#............................................................
# instantiate mode manager
nirdezakaH = _nirdezakaH()
nirdezakaH.displayStatus() # update status at the bottom
root.bind_all( '<Key>', ghaTanA )
root.bind("<Button-1>", nirdezakaH.displayStatus )
root.bind("<Control-n>", file_new)
root.bind("<Control-o>", file_open)
root.bind("<Control-s>", file_save)
root.bind("<Control-Shift-S>", file_save_as)
root.bind("<Control-f>", onFind)
root.bind("<Control-q>", file_quit)
root.protocol("WM_DELETE_WINDOW", file_quit) # window close button
try:
editor.load(sys.argv[1])
except (IndexError, IOError):
pass
mainloop()
''' To-do
Turn the code into OO
config menu/pop-up - change translit., target script, target lang, font & size
preserve in a cookie - (ala darkroom)
current config,
unsaved editor,
search string,
[un]zoom option]
Recently saved files
Add ITRANS, Velthuis, WX, SLP1,
Backspace - "completely delete what was entered with a single key"
From Basic-English spell checker, create dictionary of 5000+ words and their 7 vibhatis. use that for
spell-check :-)
'''
| {
"content_hash": "05b175f69da7b46affb40f4178a7bc21",
"timestamp": "",
"source": "github",
"line_count": 444,
"max_line_length": 114,
"avg_line_length": 32.30855855855856,
"alnum_prop": 0.5696061345416521,
"repo_name": "sammohan/dyukSI",
"id": "98893f94b6d6a5af9485eeedc1c9eb07126e77f2",
"size": "14641",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dyukSI.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45205"
}
],
"symlink_target": ""
} |
"""Generic utilities for all python scripts."""
import atexit
import httplib
import os
import platform
import signal
import stat
import subprocess
import sys
import tempfile
import urlparse
import zipfile
def GetPlatformName():
"""Return a string to be used in paths for the platform."""
if IsWindows():
return 'win'
if IsMac():
return 'mac'
if IsLinux():
return 'linux'
raise NotImplementedError('Unknown platform "%s".' % sys.platform)
def IsWindows():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
def IsLinux():
return sys.platform.startswith('linux')
def IsMac():
return sys.platform.startswith('darwin')
def Is64Bit():
return platform.architecture()[0] == '64bit'
def GetAbsolutePathOfUserPath(user_path):
"""Expand the given |user_path| (like "~/file") and return its absolute path.
"""
if user_path is None:
return None
return os.path.abspath(os.path.expanduser(user_path))
def _DeleteDir(path):
"""Deletes a directory recursively, which must exist."""
# Don't use shutil.rmtree because it can't delete read-only files on Win.
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
filename = os.path.join(root, name)
os.chmod(filename, stat.S_IWRITE)
os.remove(filename)
for name in dirs:
os.rmdir(os.path.join(root, name))
os.rmdir(path)
def Delete(path):
"""Deletes the given file or directory (recursively), which must exist."""
if os.path.isdir(path):
_DeleteDir(path)
else:
os.remove(path)
def MaybeDelete(path):
"""Deletes the given file or directory (recurisvely), if it exists."""
if os.path.exists(path):
Delete(path)
def MakeTempDir(parent_dir=None):
"""Creates a temporary directory and returns an absolute path to it.
The temporary directory is automatically deleted when the python interpreter
exits normally.
Args:
parent_dir: the directory to create the temp dir in. If None, the system
temp dir is used.
Returns:
The absolute path to the temporary directory.
"""
path = tempfile.mkdtemp(prefix='chromedriver_', dir=parent_dir)
atexit.register(MaybeDelete, path)
return path
def Zip(path):
"""Zips the given path and returns the zipped file."""
zip_path = os.path.join(MakeTempDir(), 'build.zip')
f = zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED)
f.write(path, os.path.basename(path))
f.close()
return zip_path
def Unzip(zip_path, output_dir):
"""Unzips the given zip file using a system installed unzip tool.
Args:
zip_path: zip file to unzip.
output_dir: directory to unzip the contents of the zip file. The directory
must exist.
Raises:
RuntimeError if the unzip operation fails.
"""
if IsWindows():
unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
else:
unzip_cmd = ['unzip', '-o']
unzip_cmd += [zip_path]
if RunCommand(unzip_cmd, output_dir) != 0:
raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
def Kill(pid):
"""Terminate the given pid."""
if IsWindows():
subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
else:
os.kill(pid, signal.SIGTERM)
def RunCommand(cmd, cwd=None):
"""Runs the given command and returns the exit code.
Args:
cmd: list of command arguments.
cwd: working directory to execute the command, or None if the current
working directory should be used.
Returns:
The exit code of the command.
"""
sys.stdout.flush()
process = subprocess.Popen(cmd, cwd=cwd)
process.wait()
sys.stdout.flush()
return process.returncode
def DoesUrlExist(url):
"""Determines whether a resource exists at the given URL.
Args:
url: URL to be verified.
Returns:
True if url exists, otherwise False.
"""
parsed = urlparse.urlparse(url)
try:
conn = httplib.HTTPConnection(parsed.netloc)
conn.request('HEAD', parsed.path)
response = conn.getresponse()
except httplib.HTTPException:
return False
finally:
conn.close()
# Follow both permanent (301) and temporary (302) redirects.
if response.status == 302 or response.status == 301:
return DoesUrlExist(response.getheader('location'))
return response.status == 200
def MarkBuildStepStart(name):
print '@@@BUILD_STEP %s@@@' % name
sys.stdout.flush()
def MarkBuildStepError():
print '@@@STEP_FAILURE@@@'
sys.stdout.flush()
def AddBuildStepText(text):
print '@@@STEP_TEXT@%s@@@' % text
sys.stdout.flush()
def PrintAndFlush(text):
print text
sys.stdout.flush()
def AddLink(label, url):
"""Adds a link with name |label| linking to |url| to current buildbot step.
Args:
label: A string with the name of the label.
url: A string of the URL.
"""
print '@@@STEP_LINK@%s@%s@@@' % (label, url)
| {
"content_hash": "a533e51c507435dc8281599335c27696",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 79,
"avg_line_length": 23.995049504950494,
"alnum_prop": 0.6771198679595626,
"repo_name": "wuhengzhi/chromium-crosswalk",
"id": "35ac49964ccb1ec9ddfe81792c72cd11419b661f",
"size": "5014",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "chrome/test/chromedriver/util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import pytest
import json
import time
import msgpack
from api import create_app
@pytest.fixture
def taxcalc_inputs():
return [{
'user_mods': {
"policy": {
2017: {"_FICA_ss_trt": [0.1]}},
"consumption": {},
"behavior": {},
"growdiff_baseline": {},
"growdiff_response": {},
"growmodel": {}
},
'start_year': 2017,
'use_puf_not_cps': False,
'year_n': 0
}]
@pytest.fixture
def app():
app = create_app({'TESTING': True})
yield app
@pytest.fixture
def client(app):
return app.test_client()
def post_and_poll(client, url, data, exp_status='YES', tries=30):
packed = msgpack.dumps(data, use_bin_type=True)
resp = client.post(url,
data=packed,
headers={'Content-Type': 'application/octet-stream'}
)
assert resp.status_code == 200
data = json.loads(resp.data.decode('utf-8'))
job_id = data['job_id']
status = 'NO'
while status == 'NO' and tries > 0:
resp = client.get(
'/dropq_query_result?job_id={job_id}'.format(job_id=job_id)
)
status = resp.data.decode('utf-8')
assert resp.status_code == 200
time.sleep(1)
tries -= 1
assert status == exp_status
resp = client.get(
'/dropq_get_result?job_id={job_id}'.format(job_id=job_id)
)
assert resp.status_code == 200
return resp
def test_hello(client):
resp = client.get('/hello')
print(resp)
def test_dropq_small_start_job(client, taxcalc_inputs):
resp = post_and_poll(client, '/dropq_small_start_job', taxcalc_inputs)
result = json.loads(resp.data.decode('utf-8'))
assert 'aggr_outputs' in result
def test_dropq_job_fails(client, taxcalc_inputs):
del taxcalc_inputs[0]['user_mods']['policy']
resp = post_and_poll(client, '/dropq_start_job', exp_status='FAIL',
data=taxcalc_inputs)
assert 'Traceback' in resp.data.decode('utf-8')
| {
"content_hash": "9c262e2b2c962b20e7ad5c5106f22224",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 75,
"avg_line_length": 24.96385542168675,
"alnum_prop": 0.5603281853281853,
"repo_name": "OpenSourcePolicyCenter/webapp-public",
"id": "ce1cc51e3ea868219702b0262c979e9eab6b7c3f",
"size": "2072",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "distributed/api/tests/test_flask.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3035251"
},
{
"name": "HTML",
"bytes": "167710"
},
{
"name": "JavaScript",
"bytes": "154977"
},
{
"name": "Python",
"bytes": "743832"
},
{
"name": "SaltStack",
"bytes": "4265"
},
{
"name": "Shell",
"bytes": "6693"
}
],
"symlink_target": ""
} |
"""Zwave discovery schemas."""
from . import const
DEFAULT_VALUES_SCHEMA = {
'power': {
const.DISC_SCHEMAS: [
{const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SENSOR_MULTILEVEL],
const.DISC_INDEX: [const.INDEX_SENSOR_MULTILEVEL_POWER]},
{const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_METER],
const.DISC_INDEX: [const.INDEX_METER_POWER]},
],
const.DISC_OPTIONAL: True,
},
}
DISCOVERY_SCHEMAS = [
{const.DISC_COMPONENT: 'binary_sensor',
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_ENTRY_CONTROL,
const.GENERIC_TYPE_SENSOR_ALARM,
const.GENERIC_TYPE_SENSOR_BINARY,
const.GENERIC_TYPE_SWITCH_BINARY,
const.GENERIC_TYPE_METER,
const.GENERIC_TYPE_SENSOR_MULTILEVEL,
const.GENERIC_TYPE_SWITCH_MULTILEVEL,
const.GENERIC_TYPE_SENSOR_NOTIFICATION,
const.GENERIC_TYPE_THERMOSTAT],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SENSOR_BINARY],
const.DISC_TYPE: const.TYPE_BOOL,
const.DISC_GENRE: const.GENRE_USER,
},
'off_delay': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_CONFIGURATION],
const.DISC_INDEX: [9],
const.DISC_OPTIONAL: True,
}})},
{const.DISC_COMPONENT: 'climate',
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_THERMOSTAT,
const.GENERIC_TYPE_SENSOR_MULTILEVEL],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [
const.COMMAND_CLASS_THERMOSTAT_SETPOINT],
},
'temperature': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SENSOR_MULTILEVEL],
const.DISC_INDEX: [const.INDEX_SENSOR_MULTILEVEL_TEMPERATURE],
const.DISC_OPTIONAL: True,
},
'mode': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_THERMOSTAT_MODE],
const.DISC_OPTIONAL: True,
},
'fan_mode': {
const.DISC_COMMAND_CLASS: [
const.COMMAND_CLASS_THERMOSTAT_FAN_MODE],
const.DISC_OPTIONAL: True,
},
'operating_state': {
const.DISC_COMMAND_CLASS: [
const.COMMAND_CLASS_THERMOSTAT_OPERATING_STATE],
const.DISC_OPTIONAL: True,
},
'fan_state': {
const.DISC_COMMAND_CLASS: [
const.COMMAND_CLASS_THERMOSTAT_FAN_STATE],
const.DISC_OPTIONAL: True,
},
'zxt_120_swing_mode': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_CONFIGURATION],
const.DISC_INDEX: [33],
const.DISC_OPTIONAL: True,
}})},
{const.DISC_COMPONENT: 'cover', # Rollershutter
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_SWITCH_MULTILEVEL,
const.GENERIC_TYPE_ENTRY_CONTROL],
const.DISC_SPECIFIC_DEVICE_CLASS: [
const.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL,
const.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL,
const.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL,
const.SPECIFIC_TYPE_MOTOR_MULTIPOSITION,
const.SPECIFIC_TYPE_SECURE_BARRIER_ADDON,
const.SPECIFIC_TYPE_SECURE_DOOR],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL],
const.DISC_GENRE: const.GENRE_USER,
},
'open': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL],
const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_BRIGHT],
const.DISC_OPTIONAL: True,
},
'close': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL],
const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_DIM],
const.DISC_OPTIONAL: True,
}})},
{const.DISC_COMPONENT: 'cover', # Garage Door Switch
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_SWITCH_MULTILEVEL,
const.GENERIC_TYPE_ENTRY_CONTROL],
const.DISC_SPECIFIC_DEVICE_CLASS: [
const.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL,
const.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL,
const.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL,
const.SPECIFIC_TYPE_MOTOR_MULTIPOSITION,
const.SPECIFIC_TYPE_SECURE_BARRIER_ADDON,
const.SPECIFIC_TYPE_SECURE_DOOR],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_BINARY],
const.DISC_GENRE: const.GENRE_USER,
}})},
{const.DISC_COMPONENT: 'cover', # Garage Door Barrier
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_SWITCH_MULTILEVEL,
const.GENERIC_TYPE_ENTRY_CONTROL],
const.DISC_SPECIFIC_DEVICE_CLASS: [
const.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL,
const.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL,
const.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL,
const.SPECIFIC_TYPE_MOTOR_MULTIPOSITION,
const.SPECIFIC_TYPE_SECURE_BARRIER_ADDON,
const.SPECIFIC_TYPE_SECURE_DOOR],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_BARRIER_OPERATOR],
const.DISC_INDEX: [const.INDEX_BARRIER_OPERATOR_LABEL],
}})},
{const.DISC_COMPONENT: 'fan',
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_SWITCH_MULTILEVEL],
const.DISC_SPECIFIC_DEVICE_CLASS: [
const.SPECIFIC_TYPE_FAN_SWITCH],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL],
const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_LEVEL],
const.DISC_TYPE: const.TYPE_BYTE,
}})},
{const.DISC_COMPONENT: 'light',
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_SWITCH_MULTILEVEL,
const.GENERIC_TYPE_SWITCH_REMOTE],
const.DISC_SPECIFIC_DEVICE_CLASS: [
const.SPECIFIC_TYPE_POWER_SWITCH_MULTILEVEL,
const.SPECIFIC_TYPE_SCENE_SWITCH_MULTILEVEL,
const.SPECIFIC_TYPE_NOT_USED],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL],
const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_LEVEL],
const.DISC_TYPE: const.TYPE_BYTE,
},
'dimming_duration': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_MULTILEVEL],
const.DISC_INDEX: [const.INDEX_SWITCH_MULTILEVEL_DURATION],
const.DISC_OPTIONAL: True,
},
'color': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_COLOR],
const.DISC_INDEX: [const.INDEX_SWITCH_COLOR_COLOR],
const.DISC_OPTIONAL: True,
},
'color_channels': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_COLOR],
const.DISC_INDEX: [const.INDEX_SWITCH_COLOR_CHANNELS],
const.DISC_OPTIONAL: True,
}})},
{const.DISC_COMPONENT: 'lock',
const.DISC_GENERIC_DEVICE_CLASS: [const.GENERIC_TYPE_ENTRY_CONTROL],
const.DISC_SPECIFIC_DEVICE_CLASS: [
const.SPECIFIC_TYPE_DOOR_LOCK,
const.SPECIFIC_TYPE_ADVANCED_DOOR_LOCK,
const.SPECIFIC_TYPE_SECURE_KEYPAD_DOOR_LOCK,
const.SPECIFIC_TYPE_SECURE_LOCKBOX],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_DOOR_LOCK],
const.DISC_INDEX: [const.INDEX_DOOR_LOCK_LOCK],
},
'access_control': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_ALARM],
const.DISC_INDEX: [const.INDEX_ALARM_ACCESS_CONTROL],
const.DISC_OPTIONAL: True,
},
'alarm_type': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_ALARM],
const.DISC_INDEX: [const.INDEX_ALARM_TYPE],
const.DISC_OPTIONAL: True,
},
'alarm_level': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_ALARM],
const.DISC_INDEX: [const.INDEX_ALARM_LEVEL],
const.DISC_OPTIONAL: True,
},
'v2btze_advanced': {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_CONFIGURATION],
const.DISC_INDEX: [12],
const.DISC_OPTIONAL: True,
}})},
{const.DISC_COMPONENT: 'sensor',
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [
const.COMMAND_CLASS_SENSOR_MULTILEVEL,
const.COMMAND_CLASS_METER,
const.COMMAND_CLASS_ALARM,
const.COMMAND_CLASS_SENSOR_ALARM],
const.DISC_GENRE: const.GENRE_USER,
}})},
{const.DISC_COMPONENT: 'switch',
const.DISC_GENERIC_DEVICE_CLASS: [
const.GENERIC_TYPE_METER,
const.GENERIC_TYPE_SENSOR_ALARM,
const.GENERIC_TYPE_SENSOR_BINARY,
const.GENERIC_TYPE_SWITCH_BINARY,
const.GENERIC_TYPE_ENTRY_CONTROL,
const.GENERIC_TYPE_SENSOR_MULTILEVEL,
const.GENERIC_TYPE_SWITCH_MULTILEVEL,
const.GENERIC_TYPE_SENSOR_NOTIFICATION,
const.GENERIC_TYPE_GENERIC_CONTROLLER,
const.GENERIC_TYPE_SWITCH_REMOTE,
const.GENERIC_TYPE_REPEATER_SLAVE,
const.GENERIC_TYPE_THERMOSTAT,
const.GENERIC_TYPE_WALL_CONTROLLER],
const.DISC_VALUES: dict(DEFAULT_VALUES_SCHEMA, **{
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: [const.COMMAND_CLASS_SWITCH_BINARY],
const.DISC_TYPE: const.TYPE_BOOL,
const.DISC_GENRE: const.GENRE_USER,
}})},
]
| {
"content_hash": "ecb0ea723838592c6a479e6620ea5bdc",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 79,
"avg_line_length": 42.936974789915965,
"alnum_prop": 0.5985908601624426,
"repo_name": "persandstrom/home-assistant",
"id": "2a4e42ab92c21dfff11cd2380b42e6ea75693c21",
"size": "10219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homeassistant/components/zwave/discovery_schemas.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1067"
},
{
"name": "Python",
"bytes": "11745210"
},
{
"name": "Ruby",
"bytes": "518"
},
{
"name": "Shell",
"bytes": "16652"
}
],
"symlink_target": ""
} |
import time
from raiden.mtree import Merkletree
from raiden.utils import keccak
def do_test_speed(rounds=100, num_hashes=1000):
values = [
keccak(str(i))
for i in range(num_hashes)
]
start_time = time.time()
for __ in range(rounds):
Merkletree(values).merkleroot
elapsed = time.time() - start_time
print '%d additions per second' % (num_hashes * rounds / elapsed)
if __name__ == '__main__':
do_test_speed()
| {
"content_hash": "490576410f72daefe33b208eb9ad9068",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 69,
"avg_line_length": 20.304347826086957,
"alnum_prop": 0.6209850107066381,
"repo_name": "charles-cooper/raiden",
"id": "88fd63ef7dfb1607e3a59b21e5bfb8ed8c3cf806",
"size": "491",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raiden/tests/benchmark/merkle_tree_speed.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "5202"
},
{
"name": "Python",
"bytes": "952454"
},
{
"name": "Shell",
"bytes": "4384"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Event.slug'
db.alter_column('operations_event', 'slug', self.gf('django.db.models.fields.SlugField')(max_length=250))
def backwards(self, orm):
# Changing field 'Event.slug'
db.alter_column('operations_event', 'slug', self.gf('django.db.models.fields.SlugField')(max_length=50))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'maps.map': {
'Meta': {'object_name': 'Map'},
'center_x': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'center_y': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '800', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'projection': ('django.db.models.fields.CharField', [], {'default': "'EPSG:4326'", 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75'}),
'zoom': ('django.db.models.fields.IntegerField', [], {})
},
'operations.agency': {
'Meta': {'ordering': "['name']", 'object_name': 'Agency'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'operations.deployment': {
'Meta': {'object_name': 'Deployment'},
'closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deployers': ('django.db.models.fields.related.ManyToManyField', [], {'max_length': '250', 'to': "orm['auth.User']", 'null': 'True', 'symmetrical': 'False', 'blank': 'True'}),
'deployment_location': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'description': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['operations.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {}),
'longitude': ('django.db.models.fields.FloatField', [], {}),
'point': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1'})
},
'operations.event': {
'Meta': {'ordering': "['-last_updated']", 'object_name': 'Event'},
'agencies': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['operations.Agency']", 'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'collaboration_link': ('django.db.models.fields.URLField', [], {'default': "'https://connect.dco.dod.mil/r3ops?launcher=false'", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'event_location': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'filedropoff_path': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'gq_job_ids': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'gq_project_ids': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {}),
'map': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['maps.Map']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'poc': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'point': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'posture': ('django.db.models.fields.CharField', [], {'default': "'Monitoring'", 'max_length': '25'}),
'product_feed_url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'rfi_generator_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'services': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['operations.Service']", 'null': 'True', 'blank': 'True'}),
'show_deployments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_event_on_map': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_geomedia_triage': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_notes': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_products': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_related_files': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_rfis': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_services': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_supporting_agencies': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '250'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'})
},
'operations.lessonlearned': {
'Meta': {'ordering': "['-created']", 'unique_together': "(('submitted_by', 'description', 'event'),)", 'object_name': 'LessonLearned'},
'action': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'assigned_to': ('django.db.models.fields.related.ForeignKey', [], {'max_length': '250', 'related_name': "'lesson_learned_assignment'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'max_length': '50', 'to': "orm['operations.LessonLearnedCategory']", 'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True'}),
'due': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['operations.Event']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Low'", 'max_length': '25', 'null': 'True', 'blank': 'True'}),
'resolution': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1', 'null': 'True', 'blank': 'True'}),
'submitted_by': ('django.db.models.fields.related.ForeignKey', [], {'max_length': '250', 'related_name': "'lesson_learned_submission'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'work_around': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'})
},
'operations.lessonlearnedcategory': {
'Meta': {'ordering': "['name']", 'object_name': 'LessonLearnedCategory'},
'description': ('tinymce.models.HTMLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'operations.service': {
'Meta': {'ordering': "['name']", 'object_name': 'Service'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('tinymce.models.HTMLField', [], {'max_length': '800', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'service_type': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['operations.ServiceType']", 'symmetrical': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'operations.servicetype': {
'Meta': {'ordering': "['name']", 'object_name': 'ServiceType'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('tinymce.models.HTMLField', [], {'max_length': '800', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1'})
},
'operations.sitrep': {
'Meta': {'ordering': "['-created']", 'object_name': 'SitRep'},
'closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'content': ('tinymce.models.HTMLField', [], {'max_length': '6000'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['operations.Event']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'max_length': '250', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1', 'null': 'True', 'blank': 'True'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['operations'] | {
"content_hash": "790e85a3291a751fcdae869bb719dc01",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 208,
"avg_line_length": 86.13065326633166,
"alnum_prop": 0.5469661610268378,
"repo_name": "ngageoint/geoevents",
"id": "a4589813bc32c6a40e261396eb70060c2cabdce1",
"size": "17164",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "geoevents/operations/migrations/0011_auto__chg_field_event_slug.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7006"
},
{
"name": "CSS",
"bytes": "169395"
},
{
"name": "JavaScript",
"bytes": "10629452"
},
{
"name": "Python",
"bytes": "1589774"
},
{
"name": "Shell",
"bytes": "4212"
}
],
"symlink_target": ""
} |
"""
pygments.formatters.svg
~~~~~~~~~~~~~~~~~~~~~~~
Formatter for SVG output.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
from pygments.token import Comment
from pygments.util import get_bool_opt, get_int_opt
__all__ = ['SvgFormatter']
def escape_html(text):
"""Escape &, <, > as well as single and double quotes for HTML."""
return text.replace('&', '&'). \
replace('<', '<'). \
replace('>', '>'). \
replace('"', '"'). \
replace("'", ''')
class2style = {}
class SvgFormatter(Formatter):
"""
Format tokens as an SVG graphics file. This formatter is still experimental.
Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
coordinates containing ``<tspan>`` elements with the individual token styles.
By default, this formatter outputs a full SVG document including doctype
declaration and the ``<svg>`` root element.
.. versionadded:: 0.9
Additional options accepted:
`nowrap`
Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
don't add a XML declaration and a doctype. If true, the `fontfamily`
and `fontsize` options are ignored. Defaults to ``False``.
`fontfamily`
The value to give the wrapping ``<g>`` element's ``font-family``
attribute, defaults to ``"monospace"``.
`fontsize`
The value to give the wrapping ``<g>`` element's ``font-size``
attribute, defaults to ``"14px"``.
`linenos`
If ``True``, add line numbers (default: ``False``).
`linenostart`
The line number for the first line (default: ``1``).
`linenostep`
If set to a number n > 1, only every nth line number is printed.
`linenowidth`
Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
for up to 4-digit line numbers. Increase width for longer code blocks).
`xoffset`
Starting offset in X direction, defaults to ``0``.
`yoffset`
Starting offset in Y direction, defaults to the font size if it is given
in pixels, or ``20`` else. (This is necessary since text coordinates
refer to the text baseline, not the top edge.)
`ystep`
Offset to add to the Y coordinate for each subsequent line. This should
roughly be the text size plus 5. It defaults to that value if the text
size is given in pixels, or ``25`` else.
`spacehack`
Convert spaces in the source to `` ``, which are non-breaking
spaces. SVG provides the ``xml:space`` attribute to control how
whitespace inside tags is handled, in theory, the ``preserve`` value
could be used to keep all whitespace as-is. However, many current SVG
viewers don't obey that rule, so this option is provided as a workaround
and defaults to ``True``.
"""
name = 'SVG'
aliases = ['svg']
filenames = ['*.svg']
def __init__(self, **options):
Formatter.__init__(self, **options)
self.nowrap = get_bool_opt(options, 'nowrap', False)
self.fontfamily = options.get('fontfamily', 'monospace')
self.fontsize = options.get('fontsize', '14px')
self.xoffset = get_int_opt(options, 'xoffset', 0)
fs = self.fontsize.strip()
if fs.endswith('px'): fs = fs[:-2].strip()
try:
int_fs = int(fs)
except:
int_fs = 20
self.yoffset = get_int_opt(options, 'yoffset', int_fs)
self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
self.spacehack = get_bool_opt(options, 'spacehack', True)
self.linenos = get_bool_opt(options,'linenos',False)
self.linenostart = get_int_opt(options,'linenostart',1)
self.linenostep = get_int_opt(options,'linenostep',1)
self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep)
self._stylecache = {}
def format_unencoded(self, tokensource, outfile):
"""
Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
tuples and write it into ``outfile``.
For our implementation we put all lines in their own 'line group'.
"""
x = self.xoffset
y = self.yoffset
if not self.nowrap:
if self.encoding:
outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
self.encoding)
else:
outfile.write('<?xml version="1.0"?>\n')
outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
'"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
'svg10.dtd">\n')
outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
outfile.write('<g font-family="%s" font-size="%s">\n' %
(self.fontfamily, self.fontsize))
counter = self.linenostart
counter_step = self.linenostep
counter_style = self._get_style(Comment)
line_x = x
if self.linenos:
if counter % counter_step == 0:
outfile.write('<text x="%s" y="%s" %s text-anchor="end">%s</text>' %
(x+self.linenowidth,y,counter_style,counter))
line_x += self.linenowidth + self.ystep
counter += 1
outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (line_x, y))
for ttype, value in tokensource:
style = self._get_style(ttype)
tspan = style and '<tspan' + style + '>' or ''
tspanend = tspan and '</tspan>' or ''
value = escape_html(value)
if self.spacehack:
value = value.expandtabs().replace(' ', ' ')
parts = value.split('\n')
for part in parts[:-1]:
outfile.write(tspan + part + tspanend)
y += self.ystep
outfile.write('</text>\n')
if self.linenos and counter % counter_step == 0:
outfile.write('<text x="%s" y="%s" text-anchor="end" %s>%s</text>' %
(x+self.linenowidth,y,counter_style,counter))
counter += 1
outfile.write('<text x="%s" y="%s" ' 'xml:space="preserve">' % (line_x,y))
outfile.write(tspan + parts[-1] + tspanend)
outfile.write('</text>')
if not self.nowrap:
outfile.write('</g></svg>\n')
def _get_style(self, tokentype):
if tokentype in self._stylecache:
return self._stylecache[tokentype]
otokentype = tokentype
while not self.style.styles_token(tokentype):
tokentype = tokentype.parent
value = self.style.style_for_token(tokentype)
result = ''
if value['color']:
result = ' fill="#' + value['color'] + '"'
if value['bold']:
result += ' font-weight="bold"'
if value['italic']:
result += ' font-style="italic"'
self._stylecache[otokentype] = result
return result
| {
"content_hash": "0cd9d9f1e71361b95eadf0de37a013d1",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 90,
"avg_line_length": 38.82446808510638,
"alnum_prop": 0.5581586518701191,
"repo_name": "tmm1/pygments.rb",
"id": "547a7bbcdd86d91fb2b5c78bf4f435288a903bf5",
"size": "7299",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "vendor/pygments-main/pygments/formatters/svg.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "99523"
},
{
"name": "Python",
"bytes": "26555"
},
{
"name": "Ruby",
"bytes": "32068"
}
],
"symlink_target": ""
} |
def test():
assert True
| {
"content_hash": "6ddaf06c079421734c6a99a8e48c91d9",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 15,
"avg_line_length": 14,
"alnum_prop": 0.6071428571428571,
"repo_name": "hiidef/django-quickbooks",
"id": "8ec8593d85d122cfb2046203d844abaf441a60f3",
"size": "70",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "quickbooks/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "144"
},
{
"name": "Python",
"bytes": "22656"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
__version__ = "5.0.29"
| {
"content_hash": "d611c4257c2568a28917fce690b30018",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 39,
"avg_line_length": 31.5,
"alnum_prop": 0.6666666666666666,
"repo_name": "indictranstech/tele-frappe",
"id": "902d2412b994ce54a8a80cde7ed1273ee7f0301b",
"size": "63",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/__version__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "237130"
},
{
"name": "HTML",
"bytes": "131820"
},
{
"name": "JavaScript",
"bytes": "1331586"
},
{
"name": "Python",
"bytes": "1086457"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
} |
"""Event notifications."""
from happening.notifications import Notification
class EmailNotification(Notification):
"""An email has been sent."""
required_data = ["subject", "content"]
send_notification = False
| {
"content_hash": "4089b2ed865bd6122dcba62cc2bda36a",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 48,
"avg_line_length": 20.636363636363637,
"alnum_prop": 0.7092511013215859,
"repo_name": "jscott1989/happening",
"id": "4c6e10b82da210ae8b761c7d9f019b7493ab1029",
"size": "227",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/emails/notifications.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48575"
},
{
"name": "CoffeeScript",
"bytes": "1402"
},
{
"name": "HTML",
"bytes": "219438"
},
{
"name": "JavaScript",
"bytes": "32186"
},
{
"name": "Python",
"bytes": "421487"
},
{
"name": "Shell",
"bytes": "2955"
}
],
"symlink_target": ""
} |
import sys
import os
import socket
import zmq
from PyQt4 import QtGui, QtNetwork, QtCore
from struct import *
from time import sleep
# Receive port where the CFS TO_Lab app sends the telemetry packets
udpRecvPort = 1235
#
# Receive telemetry packets, apply the appropiate header
# and publish the message with zeroMQ
#
class RoutingService(QtCore.QThread):
def __init__(self, mainWindow):
QtCore.QThread.__init__(self)
# Signal to update the spacecraft combo box (list) on main window GUI
self.signalUpdateIpList = QtCore.SIGNAL("changeIpList")
# Init lists
self.ipAddressesList = ["All"]
self.spacecraftNames = ["All"]
self.specialPktId = []
self.specialPktName = []
# Init zeroMQ
self.context = zmq.Context()
self.publisher = self.context.socket(zmq.PUB)
self.publisher.bind("ipc:///tmp/GroundSystem")
# Run thread
def run(self):
# Init udp socket
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.bind(('', udpRecvPort))
# Wait for UDP messages
while True:
try:
# Receive message
datagram, host = self.sock.recvfrom(4096) # buffer size is 1024 bytes
# Ignore datagram if it is not long enough (doesnt contain tlm header?)
if len(datagram) < 6:
continue
# Read host address
hostIpAddress = host[0]
#
# Add Host to the list if not already in list
#
if not any(hostIpAddress in s for s in self.ipAddressesList):
hostName = "Spacecraft" + str(len(self.spacecraftNames))
print "Detected " + hostName + " at " + hostIpAddress
self.ipAddressesList.append(hostIpAddress);
self.spacecraftNames.append(hostName)
self.emit(self.signalUpdateIpList, hostIpAddress, hostName)
# Forward the message using zeroMQ
name = self.spacecraftNames[self.ipAddressesList.index(hostIpAddress)]
self.forwardMessage(datagram, name)
# Handle errors
except socket.error, v:
print 'Ignored socket error.'
sleep(1)
# Apply header using hostname and packet id and send msg using zeroMQ
def forwardMessage(self, datagram, hostName):
# Forward message to channel GroundSystem.<Hostname>.<pktId>
pktId = self.getPktId(datagram)
header = "GroundSystem." + hostName + ".TelemetryPackets." + pktId
self.publisher.send_multipart([header, datagram])
#print header
# Read the packet id from the telemetry packet
def getPktId(self, datagram):
# Read the telemetry header
streamId, Sequence, Length = unpack(">HHH",datagram[:6])
# Uncomment the next line to debug
# print "Packet ID = " , hex(streamId)
return hex(streamId)
# Close ZMQ vars
def stop(self):
self.sock.close()
self.context.destroy()
| {
"content_hash": "e17bec6de5b2403ecd6a476aa813430f",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 87,
"avg_line_length": 33.294736842105266,
"alnum_prop": 0.5991147644641164,
"repo_name": "CACTUS-Mission/TRAPSat",
"id": "9ce4ae0cd253602716be5c1b5f5a0313bff58aa5",
"size": "3188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TRAPSat_cFS/cfs/cfe/tools/cFS-GroundSystem/RoutingService.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "3346"
},
{
"name": "C",
"bytes": "5898670"
},
{
"name": "C++",
"bytes": "843022"
},
{
"name": "Java",
"bytes": "1041409"
},
{
"name": "Makefile",
"bytes": "262573"
},
{
"name": "Objective-C",
"bytes": "36682"
},
{
"name": "Perl",
"bytes": "79709"
},
{
"name": "Perl6",
"bytes": "21884"
},
{
"name": "Python",
"bytes": "597468"
},
{
"name": "Shell",
"bytes": "14444"
}
],
"symlink_target": ""
} |
import random
import unittest
from data_structures import bst, stack
class TestDataStructures(unittest.TestCase):
def test_BST(self):
tree = bst.BST()
values = (5, 3, 7, 2, 8, 4, 6)
# Insertion
for value in values:
tree.insert(value)
visit_func = lambda node: node._value
generator_preorder = tree.traverse_preorder(visit_func)
self.assertEqual(generator_preorder.next(), 5)
self.assertEqual(generator_preorder.next(), 3)
self.assertEqual(generator_preorder.next(), 2)
self.assertEqual(generator_preorder.next(), 4)
self.assertEqual(generator_preorder.next(), 7)
self.assertEqual(generator_preorder.next(), 6)
self.assertEqual(generator_preorder.next(), 8)
generator_inorder = tree.traverse_inorder(visit_func)
self.assertEqual(generator_inorder.next(), 2)
self.assertEqual(generator_inorder.next(), 3)
self.assertEqual(generator_inorder.next(), 4)
self.assertEqual(generator_inorder.next(), 5)
self.assertEqual(generator_inorder.next(), 6)
self.assertEqual(generator_inorder.next(), 7)
self.assertEqual(generator_inorder.next(), 8)
generator_postorder = tree.traverse_postorder(visit_func)
self.assertEqual(generator_postorder.next(), 2)
self.assertEqual(generator_postorder.next(), 4)
self.assertEqual(generator_postorder.next(), 3)
self.assertEqual(generator_postorder.next(), 6)
self.assertEqual(generator_postorder.next(), 8)
self.assertEqual(generator_postorder.next(), 7)
self.assertEqual(generator_postorder.next(), 5)
self.assertFalse(tree.search(1))
self.assertTrue(tree.search(3))
self.assertTrue(tree.search(7))
self.assertFalse(tree.search(9))
def test_Stack(self):
stk = stack.Stack()
self.assertTrue(stk.is_empty())
items = (9, 1, 8, 2, 7, 3)
for item in items:
stk.push(item)
self.assertFalse(stk.is_empty())
self.assertEqual(stk.size(), 6)
self.assertEqual(stk.pop(), 3)
self.assertEqual(stk.size(), 5)
self.assertEqual(stk.peek(), 7)
self.assertEqual(stk.size(), 5)
self.assertEqual(stk.search(7), 0)
self.assertEqual(stk.search(8), 2)
self.assertEqual(stk.search(9), 4)
self.assertEqual(stk.search(5), -1)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "97cd0eacbdac4c2eca89decc5585f4c0",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 65,
"avg_line_length": 33.57142857142857,
"alnum_prop": 0.6085106382978723,
"repo_name": "pauljxtan/pystuff",
"id": "dc9acfde1ad1e45ecd677e43f040a06719489014",
"size": "2585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_structures/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "82226"
}
],
"symlink_target": ""
} |
'''
Install software from the FreeBSD ``ports(7)`` system
.. versionadded:: 2014.1.0
This module allows you to install ports using ``BATCH=yes`` to bypass
configuration prompts. It is recommended to use the :mod:`ports state
<salt.states.freebsdports>` to install ports, but it it also possible to use
this module exclusively from the command line.
.. code-block:: bash
salt minion-id ports.config security/nmap IPV6=off
salt minion-id ports.install security/nmap
'''
from __future__ import absolute_import
# Import python libs
import fnmatch
import os
import re
import logging
# Import salt libs
import salt.utils
from salt.ext.six import string_types
from salt.exceptions import SaltInvocationError, CommandExecutionError
import salt.ext.six as six
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'ports'
def __virtual__():
return __virtualname__ if __grains__.get('os', '') == 'FreeBSD' else False
def _portsnap():
'''
Return 'portsnap --interactive' for FreeBSD 10, otherwise 'portsnap'
'''
return 'portsnap{0}'.format(
' --interactive' if float(__grains__['osrelease']) >= 10
else ''
)
def _check_portname(name):
'''
Check if portname is valid and whether or not the directory exists in the
ports tree.
'''
if not isinstance(name, string_types) or '/' not in name:
raise SaltInvocationError(
'Invalid port name {0!r} (category required)'.format(name)
)
path = os.path.join('/usr/ports', name)
if not os.path.isdir(path):
raise SaltInvocationError('Path {0!r} does not exist'.format(path))
return path
def _options_dir(name):
'''
Retrieve the path to the dir containing OPTIONS file for a given port
'''
_check_portname(name)
_root = '/var/db/ports'
# New path: /var/db/ports/category_portname
new_dir = os.path.join(_root, name.replace('/', '_'))
# Old path: /var/db/ports/portname
old_dir = os.path.join(_root, name.split('/')[-1])
if os.path.isdir(old_dir):
return old_dir
return new_dir
def _options_file_exists(name):
'''
Returns True/False based on whether or not the options file for the
specified port exists.
'''
return os.path.isfile(os.path.join(_options_dir(name), 'options'))
def _write_options(name, configuration):
'''
Writes a new OPTIONS file
'''
_check_portname(name)
pkg = next(iter(configuration))
conf_ptr = configuration[pkg]
dirname = _options_dir(name)
if not os.path.isdir(dirname):
try:
os.makedirs(dirname)
except OSError as exc:
raise CommandExecutionError(
'Unable to make {0}: {1}'.format(dirname, exc)
)
with salt.utils.fopen(os.path.join(dirname, 'options'), 'w') as fp_:
sorted_options = list(conf_ptr.keys())
sorted_options.sort()
fp_.write(
'# This file was auto-generated by Salt (http://saltstack.com)\n'
'# Options for {0}\n'
'_OPTIONS_READ={0}\n'
'_FILE_COMPLETE_OPTIONS_LIST={1}\n'
.format(pkg, ' '.join(sorted_options))
)
opt_tmpl = 'OPTIONS_FILE_{0}SET+={1}\n'
for opt in sorted_options:
fp_.write(
opt_tmpl.format(
'' if conf_ptr[opt] == 'on' else 'UN',
opt
)
)
def _normalize(val):
'''
Fix Salt's yaml-ification of on/off, and otherwise normalize the on/off
values to be used in writing the options file
'''
if isinstance(val, bool):
return 'on' if val else 'off'
return str(val).lower()
def install(name, clean=True):
'''
Install a port from the ports tree. Installs using ``BATCH=yes`` for
non-interactive building. To set config options for a given port, use
:mod:`ports.config <salt.modules.freebsdports.config>`.
clean : True
If ``True``, cleans after installation. Equivalent to running ``make
install clean BATCH=yes``.
.. note::
It may be helpful to run this function using the ``-t`` option to set a
higher timeout, since compiling a port may cause the Salt command to
exceed the default timeout.
CLI Example:
.. code-block:: bash
salt -t 1200 '*' ports.install security/nmap
'''
portpath = _check_portname(name)
old = __salt__['pkg.list_pkgs']()
if old.get(name.rsplit('/')[-1]):
deinstall(name)
result = __salt__['cmd.run_all'](
'make install{0} BATCH=yes'.format(' clean' if clean else ''),
cwd=portpath, reset_system_locale=False
)
if result['retcode'] != 0:
__context__['ports.install_error'] = result['stderr']
__context__.pop('pkg.list_pkgs', None)
new = __salt__['pkg.list_pkgs']()
ret = salt.utils.compare_dicts(old, new)
if not ret and result['retcode'] == 0:
# No change in package list, but the make install was successful.
# Assume that the installation was a recompile with new options, and
# set return dict so that changes are detected by the ports.installed
# state.
ret = {name: {'old': old.get(name, ''),
'new': new.get(name, '')}}
return ret
def deinstall(name):
'''
De-install a port.
CLI Example:
.. code-block:: bash
salt '*' ports.deinstall security/nmap
'''
portpath = _check_portname(name)
old = __salt__['pkg.list_pkgs']()
__salt__['cmd.run']('make deinstall BATCH=yes', cwd=portpath)
__context__.pop('pkg.list_pkgs', None)
new = __salt__['pkg.list_pkgs']()
return salt.utils.compare_dicts(old, new)
def rmconfig(name):
'''
Clear the cached options for the specified port; run a ``make rmconfig``
name
The name of the port to clear
CLI Example:
.. code-block:: bash
salt '*' ports.rmconfig security/nmap
'''
portpath = _check_portname(name)
return __salt__['cmd.run']('make rmconfig', cwd=portpath)
def showconfig(name, default=False, dict_return=False):
'''
Show the configuration options for a given port.
default : False
Show the default options for a port (not necessarily the same as the
current configuration)
dict_return : False
Instead of returning the output of ``make showconfig``, return the data
in an dictionary
CLI Example:
.. code-block:: bash
salt '*' ports.showconfig security/nmap
salt '*' ports.showconfig security/nmap default=True
'''
portpath = _check_portname(name)
if default and _options_file_exists(name):
saved_config = showconfig(name, default=False, dict_return=True)
rmconfig(name)
if _options_file_exists(name):
raise CommandExecutionError('Unable to get default configuration')
default_config = showconfig(name, default=False,
dict_return=dict_return)
_write_options(name, saved_config)
return default_config
try:
result = __salt__['cmd.run_all']('make showconfig', cwd=portpath)
output = result['stdout'].splitlines()
if result['retcode'] != 0:
error = result['stderr']
else:
error = ''
except TypeError:
error = result
if error:
msg = ('Error running \'make showconfig\' for {0}: {1}'
.format(name, error))
log.error(msg)
raise SaltInvocationError(msg)
if not dict_return:
return '\n'.join(output)
if (not output) or ('configuration options' not in output[0]):
return {}
try:
pkg = output[0].split()[-1].rstrip(':')
except (IndexError, AttributeError, TypeError) as exc:
log.error(
'Unable to get pkg-version string: {0}'.format(exc)
)
return {}
ret = {pkg: {}}
output = output[1:]
for line in output:
try:
opt, val, desc = re.match(
r'\s+([^=]+)=(off|on): (.+)', line
).groups()
except AttributeError:
continue
ret[pkg][opt] = val
if not ret[pkg]:
return {}
return ret
def config(name, reset=False, **kwargs):
'''
Modify configuration options for a given port. Multiple options can be
specified. To see the available options for a port, use
:mod:`ports.showconfig <salt.modules.freebsdports.showconfig>`.
name
The port name, in ``category/name`` format
reset : False
If ``True``, runs a ``make rmconfig`` for the port, clearing its
configuration before setting the desired options
CLI Examples:
.. code-block:: bash
salt '*' ports.config security/nmap IPV6=off
'''
portpath = _check_portname(name)
if reset:
rmconfig(name)
configuration = showconfig(name, dict_return=True)
if not configuration:
raise CommandExecutionError(
'Unable to get port configuration for {0!r}'.format(name)
)
# Get top-level key for later reference
pkg = next(iter(configuration))
conf_ptr = configuration[pkg]
opts = dict(
(str(x), _normalize(kwargs[x]))
for x in kwargs
if not x.startswith('_')
)
bad_opts = [x for x in opts if x not in conf_ptr]
if bad_opts:
raise SaltInvocationError(
'The following opts are not valid for port {0}: {1}'
.format(name, ', '.join(bad_opts))
)
bad_vals = [
'{0}={1}'.format(x, y) for x, y in six.iteritems(opts)
if y not in ('on', 'off')
]
if bad_vals:
raise SaltInvocationError(
'The following key/value pairs are invalid: {0}'
.format(', '.join(bad_vals))
)
conf_ptr.update(opts)
_write_options(name, configuration)
new_config = showconfig(name, dict_return=True)
try:
new_config = new_config[next(iter(new_config))]
except (StopIteration, TypeError):
return False
return all(conf_ptr[x] == new_config.get(x) for x in conf_ptr)
def update(extract=False):
'''
Update the ports tree
extract : False
If ``True``, runs a ``portsnap extract`` after fetching, should be used
for first-time installation of the ports tree.
CLI Example:
.. code-block:: bash
salt '*' ports.update
'''
result = __salt__['cmd.run_all']('{0} fetch'.format(_portsnap()))
if not result['retcode'] == 0:
raise CommandExecutionError(
'Unable to fetch ports snapshot: {0}'.format(result['stderr'])
)
ret = []
try:
patch_count = re.search(
r'Fetching (\d+) patches', result['stdout']
).group(1)
except AttributeError:
patch_count = 0
try:
new_port_count = re.search(
r'Fetching (\d+) new ports or files', result['stdout']
).group(1)
except AttributeError:
new_port_count = 0
ret.append('Applied {0} new patches'.format(patch_count))
ret.append('Fetched {0} new ports or files'.format(new_port_count))
if extract:
result = __salt__['cmd.run_all']('{0} extract'.format(_portsnap()))
if not result['retcode'] == 0:
raise CommandExecutionError(
'Unable to extract ports snapshot {0}'.format(result['stderr'])
)
result = __salt__['cmd.run_all']('{0} update'.format(_portsnap()))
if not result['retcode'] == 0:
raise CommandExecutionError(
'Unable to apply ports snapshot: {0}'.format(result['stderr'])
)
__context__.pop('ports.list_all', None)
return '\n'.join(ret)
def list_all():
'''
Lists all ports available.
CLI Example:
.. code-block:: bash
salt '*' ports.list_all
.. warning::
Takes a while to run, and returns a **LOT** of output
'''
if 'ports.list_all' not in __context__:
__context__['ports.list_all'] = []
for path, dirs, files in os.walk('/usr/ports'):
stripped = path[len('/usr/ports'):]
if stripped.count('/') != 2 or stripped.endswith('/CVS'):
continue
__context__['ports.list_all'].append(stripped[1:])
return __context__['ports.list_all']
def search(name):
'''
Search for matches in the ports tree. Globs are supported, and the category
is optional
CLI Examples:
.. code-block:: bash
salt '*' ports.search 'security/*'
salt '*' ports.search 'security/n*'
salt '*' ports.search nmap
.. warning::
Takes a while to run
'''
name = str(name)
all_ports = list_all()
if '/' in name:
if name.count('/') > 1:
raise SaltInvocationError(
'Invalid search string {0!r}. Port names cannot have more '
'than one slash'
)
else:
return fnmatch.filter(all_ports, name)
else:
ret = []
for port in all_ports:
if fnmatch.fnmatch(port.rsplit('/')[-1], name):
ret.append(port)
return ret
| {
"content_hash": "c1e38ba29456fc432d3e781a625ba705",
"timestamp": "",
"source": "github",
"line_count": 478,
"max_line_length": 79,
"avg_line_length": 27.784518828451883,
"alnum_prop": 0.5839168737293878,
"repo_name": "smallyear/linuxLearn",
"id": "d3ace225d4c2db97dba435d501ad00c10d4bb8db",
"size": "13305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/salt/modules/freebsdports.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "269"
},
{
"name": "CSS",
"bytes": "35"
},
{
"name": "HTML",
"bytes": "23373"
},
{
"name": "JavaScript",
"bytes": "510"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "12800734"
},
{
"name": "Shell",
"bytes": "240576"
}
],
"symlink_target": ""
} |
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
import pytest
def testShouldFireKeyPressEvents(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("a")
result = driver.find_element(by=By.ID, value="result")
assert "press:" in result.text
def testShouldFireKeyDownEvents(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("I")
result = driver.find_element(by=By.ID, value="result")
assert "down" in result.text
def testShouldFireKeyUpEvents(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("a")
result = driver.find_element(by=By.ID, value="result")
assert "up:" in result.text
def testShouldTypeLowerCaseLetters(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("abc def")
assert keyReporter.get_attribute("value") == "abc def"
def testShouldBeAbleToTypeCapitalLetters(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("ABC DEF")
assert keyReporter.get_attribute("value") == "ABC DEF"
def testShouldBeAbleToTypeQuoteMarks(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("\"")
assert keyReporter.get_attribute("value") == "\""
def testShouldBeAbleToTypeTheAtCharacter(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("@")
assert keyReporter.get_attribute("value") == "@"
def testShouldBeAbleToMixUpperAndLowerCaseLetters(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("me@eXample.com")
assert keyReporter.get_attribute("value") == "me@eXample.com"
def testArrowKeysShouldNotBePrintable(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys(Keys.ARROW_LEFT)
assert keyReporter.get_attribute("value") == ""
def testListOfArrowKeysShouldNotBePrintable(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys([Keys.ARROW_LEFT])
assert keyReporter.get_attribute("value") == ""
def testShouldBeAbleToUseArrowKeys(driver, pages):
pages.load("javascriptPage.html")
keyReporter = driver.find_element(by=By.ID, value="keyReporter")
keyReporter.send_keys("Tet", Keys.ARROW_LEFT, "s")
assert keyReporter.get_attribute("value") == "Test"
def testWillSimulateAKeyUpWhenEnteringTextIntoInputElements(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyUp")
element.send_keys("I like cheese")
result = driver.find_element(by=By.ID, value="result")
assert result.text == "I like cheese"
def testWillSimulateAKeyDownWhenEnteringTextIntoInputElements(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyDown")
element.send_keys("I like cheese")
result = driver.find_element(by=By.ID, value="result")
# Because the key down gets the result before the input element is
# filled, we're a letter short here
assert result.text == "I like chees"
def testWillSimulateAKeyPressWhenEnteringTextIntoInputElements(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyPress")
element.send_keys("I like cheese")
result = driver.find_element(by=By.ID, value="result")
# Because the key down gets the result before the input element is
# filled, we're a letter short here
assert result.text == "I like chees"
def testWillSimulateAKeyUpWhenEnteringTextIntoTextAreas(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyUpArea")
element.send_keys("I like cheese")
result = driver.find_element(by=By.ID, value="result")
assert result.text == "I like cheese"
def testWillSimulateAKeyDownWhenEnteringTextIntoTextAreas(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyDownArea")
element.send_keys("I like cheese")
result = driver.find_element(by=By.ID, value="result")
# Because the key down gets the result before the input element is
# filled, we're a letter short here
assert result.text == "I like chees"
def testWillSimulateAKeyPressWhenEnteringTextIntoTextAreas(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyPressArea")
element.send_keys("I like cheese")
result = driver.find_element(by=By.ID, value="result")
# Because the key down gets the result before the input element is
# filled, we're a letter short here
assert result.text == "I like chees"
def testShouldReportKeyCodeOfArrowKeysUpDownEvents(driver, pages):
pages.load("javascriptPage.html")
result = driver.find_element(by=By.ID, value="result")
element = driver.find_element(by=By.ID, value="keyReporter")
element.send_keys(Keys.ARROW_DOWN)
assert "down: 40" in result.text.strip()
assert "up: 40" in result.text.strip()
element.send_keys(Keys.ARROW_UP)
assert "down: 38" in result.text.strip()
assert "up: 38" in result.text.strip()
element.send_keys(Keys.ARROW_LEFT)
assert "down: 37" in result.text.strip()
assert "up: 37" in result.text.strip()
element.send_keys(Keys.ARROW_RIGHT)
assert "down: 39" in result.text.strip()
assert "up: 39" in result.text.strip()
# And leave no rubbish/printable keys in the "keyReporter"
assert element.get_attribute("value") == ""
def testNumericNonShiftKeys(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
numericLineCharsNonShifted = "`1234567890-=[]\\,.'/42"
element.send_keys(numericLineCharsNonShifted)
assert element.get_attribute("value") == numericLineCharsNonShifted
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
@pytest.mark.xfail_remote(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
def testNumericShiftKeys(driver, pages):
pages.load("javascriptPage.html")
result = driver.find_element(by=By.ID, value="result")
element = driver.find_element(by=By.ID, value="keyReporter")
numericShiftsEtc = "~!@#$%^&*()_+{}:i\"<>?|END~"
element.send_keys(numericShiftsEtc)
assert element.get_attribute("value") == numericShiftsEtc
assert "up: 16" in result.text.strip()
def testLowerCaseAlphaKeys(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
lowerAlphas = "abcdefghijklmnopqrstuvwxyz"
element.send_keys(lowerAlphas)
assert element.get_attribute("value") == lowerAlphas
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
@pytest.mark.xfail_remote(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
def testUppercaseAlphaKeys(driver, pages):
pages.load("javascriptPage.html")
result = driver.find_element(by=By.ID, value="result")
element = driver.find_element(by=By.ID, value="keyReporter")
upperAlphas = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
element.send_keys(upperAlphas)
assert element.get_attribute("value") == upperAlphas
assert "up: 16" in result.text.strip()
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
@pytest.mark.xfail_remote(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
def testAllPrintableKeys(driver, pages):
pages.load("javascriptPage.html")
result = driver.find_element(by=By.ID, value="result")
element = driver.find_element(by=By.ID, value="keyReporter")
allPrintable = "!\"#$%&'()*+,-./0123456789:<=>?@ ABCDEFGHIJKLMNOPQRSTUVWXYZ [\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
element.send_keys(allPrintable)
assert element.get_attribute("value") == allPrintable
assert "up: 16" in result.text.strip()
def testArrowKeysAndPageUpAndDown(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
element.send_keys(
"a" + Keys.LEFT + "b" + Keys.RIGHT +
Keys.UP + Keys.DOWN + Keys.PAGE_UP + Keys.PAGE_DOWN + "1")
assert element.get_attribute("value") == "ba1"
# def testHomeAndEndAndPageUpAndPageDownKeys(driver, pages):
# // FIXME: macs don't have HOME keys, would PGUP work?
# if (Platform.getCurrent().is(Platform.MAC)) {
# return
# }
# pages.load("javascriptPage.html")
# element = driver.find_element(by=By.ID, value="keyReporter")
# element.send_keys("abc" + Keys.HOME + "0" + Keys.LEFT + Keys.RIGHT +
# Keys.PAGE_UP + Keys.PAGE_DOWN + Keys.END + "1" + Keys.HOME +
# "0" + Keys.PAGE_UP + Keys.END + "111" + Keys.HOME + "00")
# assert element.get_attribute("value") == "0000abc1111"
def testDeleteAndBackspaceKeys(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
element.send_keys("abcdefghi")
assert element.get_attribute("value") == "abcdefghi"
element.send_keys(Keys.LEFT, Keys.LEFT, Keys.DELETE)
assert element.get_attribute("value") == "abcdefgi"
element.send_keys(Keys.LEFT, Keys.LEFT, Keys.BACK_SPACE)
assert element.get_attribute("value") == "abcdfgi"
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
@pytest.mark.xfail_remote(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
def testSpecialSpaceKeys(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
element.send_keys("abcd" + Keys.SPACE + "fgh" + Keys.SPACE + "ij")
assert element.get_attribute("value") == "abcd fgh ij"
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
@pytest.mark.xfail_remote(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1255258')
def testNumberpadAndFunctionKeys(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
element.send_keys(
"abcd" + Keys.MULTIPLY + Keys.SUBTRACT + Keys.ADD +
Keys.DECIMAL + Keys.SEPARATOR + Keys.NUMPAD0 + Keys.NUMPAD9 +
Keys.ADD + Keys.SEMICOLON + Keys.EQUALS + Keys.DIVIDE +
Keys.NUMPAD3 + "abcd")
assert element.get_attribute("value") == "abcd*-+.,09+;=/3abcd"
element.clear()
element.send_keys("FUNCTION" + Keys.F2 + "-KEYS" + Keys.F2)
element.send_keys("" + Keys.F2 + "-TOO" + Keys.F2)
assert element.get_attribute("value") == "FUNCTION-KEYS-TOO"
def testShiftSelectionDeletes(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
element.send_keys("abcd efgh")
assert element.get_attribute("value") == "abcd efgh"
element.send_keys(Keys.SHIFT, Keys.LEFT, Keys.LEFT, Keys.LEFT)
element.send_keys(Keys.DELETE)
assert element.get_attribute("value") == "abcd e"
def testShouldTypeIntoInputElementsThatHaveNoTypeAttribute(driver, pages):
pages.load("formPage.html")
element = driver.find_element(by=By.ID, value="no-type")
element.send_keys("Should Say Cheese")
assert element.get_attribute("value") == "Should Say Cheese"
def testShouldTypeAnInteger(driver, pages):
pages.load("javascriptPage.html")
element = driver.find_element(by=By.ID, value="keyReporter")
element.send_keys(1234)
assert element.get_attribute("value") == "1234"
| {
"content_hash": "e0ef926b8a4170b0e155a096a40b4b35",
"timestamp": "",
"source": "github",
"line_count": 318,
"max_line_length": 118,
"avg_line_length": 38.80503144654088,
"alnum_prop": 0.7069692058346839,
"repo_name": "asashour/selenium",
"id": "71230f3709eae1923d1a9778881071e0e128176d",
"size": "13128",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "py/test/selenium/webdriver/common/typing_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "825"
},
{
"name": "Batchfile",
"bytes": "347"
},
{
"name": "C",
"bytes": "46990"
},
{
"name": "C#",
"bytes": "2832474"
},
{
"name": "C++",
"bytes": "2144267"
},
{
"name": "CSS",
"bytes": "16299"
},
{
"name": "HTML",
"bytes": "1629111"
},
{
"name": "Java",
"bytes": "5467352"
},
{
"name": "JavaScript",
"bytes": "3564885"
},
{
"name": "Makefile",
"bytes": "4655"
},
{
"name": "Python",
"bytes": "926661"
},
{
"name": "Ragel",
"bytes": "3086"
},
{
"name": "Ruby",
"bytes": "920668"
},
{
"name": "Shell",
"bytes": "2799"
},
{
"name": "XSLT",
"bytes": "1047"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division
from neuralnilm import Net, RealApplianceSource, BLSTMLayer, SubsampleLayer, DimshuffleLayer
from lasagne.nonlinearities import sigmoid
from lasagne.objectives import crossentropy
from lasagne.init import Uniform, Normal
from lasagne.layers import LSTMLayer, DenseLayer, Conv1DLayer, ReshapeLayer
"""
Setup:
* in_to_cell init weights are now Normal(1.0)
* output all appliances
* fix bug in RealApplianceSource
* use cross-entropy
* smaller network
* power targets
* trying without first two sigmoid layers.
* updated to craffel/nntools commit 097aca480d60fdfada513c20070f8132d71a26b0
which fixes LSTM bug.
https://github.com/craffel/nntools/commit/097aca480d60fdfada513c20070f8132d71a26b0
* Subsampling *bidirectional* LSTM
* Output every sequence in the batch
* Change W_in_to_cell from Normal(1.0) to Uniform(5)
* put back the two sigmoid layers
* use Conv1D to create a hierarchical subsampling LSTM
* Using LSTM (not BLSTM) to speed up training while testing
* Use dimshuffle not reshape
Changes
* init Conv1DLayer with big weights
"""
source = RealApplianceSource(
'/data/dk3810/ukdale.h5',
['fridge freezer', 'hair straighteners', 'television'],
max_input_power=1000, max_appliance_powers=[300, 500, 200],
window=("2013-06-01", "2013-07-01"),
output_one_appliance=False,
boolean_targets=False,
min_on_duration=60,
subsample_target=5
)
net = Net(
experiment_name="e52a",
source=source,
learning_rate=1e-1,
save_plot_interval=50,
loss_function=crossentropy,
layers_config=[
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 5,
'stride': 5,
'W': Uniform(10),
'b': Uniform(5),
'nonlinearity': sigmoid
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': LSTMLayer,
'num_units': 40,
'W_in_to_cell': Uniform(5)
},
{
'type': LSTMLayer,
'num_units': 80,
'W_in_to_cell': Uniform(5)
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': sigmoid
}
]
)
net.print_net()
net.compile()
net.fit()
| {
"content_hash": "827719fb5e9f3523f3ef3ec8af534724",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 92,
"avg_line_length": 27.055555555555557,
"alnum_prop": 0.62217659137577,
"repo_name": "JackKelly/neuralnilm_prototype",
"id": "628c23f86cbb8a5fd085df153112a1f4e274a702",
"size": "2435",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/e52a.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4536723"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import json
import sys
import fieldtrial_to_struct
def _hex(ch):
hv = hex(ord(ch)).replace('0x', '')
hv.zfill(2)
return hv.upper()
# URL escapes the delimiter characters from the output. urllib.quote is not
# used because it cannot escape '.'.
def _escape(str):
result = str
# Must perform replace on '%' first before the others.
for c in '%:/.,':
result = result.replace(c, '%' + _hex(c))
return result
def _FindDuplicates(entries):
seen = set()
duplicates = set()
for entry in entries:
if entry in seen:
duplicates.add(entry)
else:
seen.add(entry)
return duplicates
def _CheckForDuplicateFeatures(enable_features, disable_features):
enable_features = [f.split('<')[0] for f in enable_features]
enable_features_set = set(enable_features)
if len(enable_features_set) != len(enable_features):
raise Exception('Duplicate feature(s) in enable_features: ' +
', '.join(_FindDuplicates(enable_features)))
disable_features = [f.split('<')[0] for f in disable_features]
disable_features_set = set(disable_features)
if len(disable_features_set) != len(disable_features):
raise Exception('Duplicate feature(s) in disable_features: ' +
', '.join(_FindDuplicates(disable_features)))
features_in_both = enable_features_set.intersection(disable_features_set)
if len(features_in_both) > 0:
raise Exception('Conflicting features set as both enabled and disabled: ' +
', '.join(features_in_both))
def _FindFeaturesOverriddenByArgs(args):
"""Returns a list of the features enabled or disabled by the flags in args."""
overridden_features = []
for arg in args:
if (arg.startswith('--enable-features=')
or arg.startswith('--disable-features=')):
_, _, arg_val = arg.partition('=')
overridden_features.extend(arg_val.split(','))
return [f.split('<')[0] for f in overridden_features]
def MergeFeaturesAndFieldTrialsArgs(args):
"""Merges duplicate features and field trials arguments.
Merges multiple instances of --enable-features, --disable-features,
--force-fieldtrials and --force-fieldtrial-params. Any such merged flags are
moved to the end of the returned list. The original argument ordering is
otherwise maintained.
TODO(crbug.com/1033090): Add functionality to handle duplicate flags using the
Foo<Bar syntax. Currently, the implementation considers e.g. 'Foo', 'Foo<Bar'
and 'Foo<Baz' to be different. Also add functionality to handle cases where
the same trial is specified with different groups via --force-fieldtrials,
which isn't currently unhandled.
Args:
args: An iterable of strings representing command line arguments.
Returns:
A new list of strings representing the merged command line arguments.
"""
merged_args = []
disable_features = set()
enable_features = set()
force_fieldtrials = set()
force_fieldtrial_params = set()
for arg in args:
if arg.startswith('--disable-features='):
disable_features.update(arg.split('=', 1)[1].split(','))
elif arg.startswith('--enable-features='):
enable_features.update(arg.split('=', 1)[1].split(','))
elif arg.startswith('--force-fieldtrials='):
# A trailing '/' is optional. Do not split by '/' as that would separate
# each group name from the corresponding trial name.
force_fieldtrials.add(arg.split('=', 1)[1].rstrip('/'))
elif arg.startswith('--force-fieldtrial-params='):
force_fieldtrial_params.update(arg.split('=', 1)[1].split(','))
else:
merged_args.append(arg)
# Sort arguments to ensure determinism.
if disable_features:
merged_args.append('--disable-features=%s' % ','.join(
sorted(disable_features)))
if enable_features:
merged_args.append('--enable-features=%s' % ','.join(
sorted(enable_features)))
if force_fieldtrials:
merged_args.append('--force-fieldtrials=%s' % '/'.join(
sorted(force_fieldtrials)))
if force_fieldtrial_params:
merged_args.append('--force-fieldtrial-params=%s' % ','.join(
sorted(force_fieldtrial_params)))
return merged_args
def GenerateArgs(config_path, platform, override_args=None):
"""Generates command-line flags for enabling field trials.
Generates a list of command-line switches to enable field trials for the
provided config_path and platform. If override_args is set, all field trials
that conflict with any listed --enable-features or --disable-features argument
are skipped.
Args:
config_path: The path to the fieldtrial testing config JSON file.
platform: A string representing the platform on which the tests will be run.
override_args (optional): An iterable of string command line arguments.
Returns:
A list of string command-line arguments.
"""
try:
with open(config_path, 'r') as config_file:
config = json.load(config_file)
except (IOError, ValueError):
return []
platform_studies = fieldtrial_to_struct.ConfigToStudies(config, [platform])
if override_args is None:
override_args = []
overriden_features_set = set(_FindFeaturesOverriddenByArgs(override_args))
# Should skip any experiment that will enable or disable a feature that is
# also enabled or disabled in the override_args.
def ShouldSkipExperiment(experiment):
experiment_features = (experiment.get('disable_features', [])
+ experiment.get('enable_features', []))
return not overriden_features_set.isdisjoint(experiment_features)
studies = []
params = []
enable_features = []
disable_features = []
for study in platform_studies:
study_name = study['name']
experiments = study['experiments']
# For now, only take the first experiment.
experiment = experiments[0]
if ShouldSkipExperiment(experiment):
continue
selected_study = [study_name, experiment['name']]
studies.extend(selected_study)
param_list = []
if 'params' in experiment:
for param in experiment['params']:
param_list.append(param['key'])
param_list.append(param['value'])
if len(param_list):
# Escape the variables for the command-line.
selected_study = [_escape(x) for x in selected_study]
param_list = [_escape(x) for x in param_list]
param = '%s:%s' % ('.'.join(selected_study), '/'.join(param_list))
params.append(param)
for feature in experiment.get('enable_features', []):
enable_features.append(feature + '<' + study_name)
for feature in experiment.get('disable_features', []):
disable_features.append(feature + '<' + study_name)
if not len(studies):
return []
_CheckForDuplicateFeatures(enable_features, disable_features)
args = ['--force-fieldtrials=%s' % '/'.join(studies)]
if len(params):
args.append('--force-fieldtrial-params=%s' % ','.join(params))
if len(enable_features):
args.append('--enable-features=%s' % ','.join(enable_features))
if len(disable_features):
args.append('--disable-features=%s' % ','.join(disable_features))
return args
def main():
if len(sys.argv) < 3:
print('Usage: fieldtrial_util.py [config_path] [platform]')
print('Optionally pass \'shell_cmd\' as an extra argument to print')
print('quoted command line arguments.')
exit(-1)
print_shell_cmd = len(sys.argv) >= 4 and sys.argv[3] == 'shell_cmd'
supported_platforms = ['android', 'android_webview', 'chromeos', 'ios',
'linux', 'mac', 'windows']
if sys.argv[2] not in supported_platforms:
print('\'%s\' is an unknown platform. Supported platforms: %s' %
(sys.argv[2], supported_platforms))
exit(-1)
generated_args = GenerateArgs(sys.argv[1], sys.argv[2])
if print_shell_cmd:
print(" ".join(map((lambda arg: '"{0}"'.format(arg)), generated_args)))
else:
print(generated_args)
if __name__ == '__main__':
main()
| {
"content_hash": "c70ccd82931d1cb51f8cd32d2e3843c4",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 80,
"avg_line_length": 37.36150234741784,
"alnum_prop": 0.6727821060567982,
"repo_name": "endlessm/chromium-browser",
"id": "d03307cdea9719470fde56e0ed10300ece1a1d3a",
"size": "8121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/variations/fieldtrial_util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
Django settings for api project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
from urlparse import urlparse
from website import settings as osf_settings
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
DATABASES = {
'default': {
'CONN_MAX_AGE': 0,
'ENGINE': 'osf.db.backends.postgresql', # django.db.backends.postgresql
'NAME': os.environ.get('OSF_DB_NAME', 'osf'),
'USER': os.environ.get('OSF_DB_USER', 'postgres'),
'PASSWORD': os.environ.get('OSF_DB_PASSWORD', ''),
'HOST': os.environ.get('OSF_DB_HOST', '127.0.0.1'),
'PORT': os.environ.get('OSF_DB_PORT', '5432'),
'ATOMIC_REQUESTS': True,
}
}
DATABASE_ROUTERS = ['osf.db.router.PostgreSQLFailoverRouter', ]
CELERY_IMPORTS = [
'osf.management.commands.migratedata',
'osf.management.commands.migraterelations',
'osf.management.commands.verify',
]
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
AUTH_USER_MODEL = 'osf.OSFUser'
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = osf_settings.SECRET_KEY
AUTHENTICATION_BACKENDS = (
'api.base.authentication.backends.ODMBackend',
)
# SECURITY WARNING: don't run with debug turned on in production!
DEV_MODE = osf_settings.DEV_MODE
DEBUG = osf_settings.DEBUG_MODE
DEBUG_PROPAGATE_EXCEPTIONS = True
# session:
SESSION_COOKIE_NAME = 'api'
SESSION_COOKIE_SECURE = osf_settings.SECURE_MODE
SESSION_COOKIE_HTTPONLY = osf_settings.SESSION_COOKIE_HTTPONLY
# csrf:
CSRF_COOKIE_NAME = 'api-csrf'
CSRF_COOKIE_SECURE = osf_settings.SECURE_MODE
CSRF_COOKIE_HTTPONLY = osf_settings.SECURE_MODE
ALLOWED_HOSTS = [
'.osf.io'
]
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
# 3rd party
'rest_framework',
'rest_framework_swagger',
'corsheaders',
'raven.contrib.django.raven_compat',
'django_extensions',
# OSF
'osf',
# Addons
'addons.osfstorage',
'addons.bitbucket',
'addons.box',
'addons.dataverse',
'addons.dropbox',
'addons.figshare',
'addons.forward',
'addons.github',
'addons.googledrive',
'addons.mendeley',
'addons.owncloud',
'addons.s3',
'addons.twofactor',
'addons.wiki',
'addons.zotero',
)
# local development using https
if osf_settings.SECURE_MODE and DEBUG:
INSTALLED_APPS += ('sslserver',)
# TODO: Are there more granular ways to configure reporting specifically related to the API?
RAVEN_CONFIG = {
'tags': {'App': 'api'},
'dsn': osf_settings.SENTRY_DSN,
'release': osf_settings.VERSION,
}
BULK_SETTINGS = {
'DEFAULT_BULK_LIMIT': 100
}
MAX_PAGE_SIZE = 100
REST_FRAMEWORK = {
'PAGE_SIZE': 10,
# Order is important here because of a bug in rest_framework_swagger. For now,
# rest_framework.renderers.JSONRenderer needs to be first, at least until
# https://github.com/marcgibbons/django-rest-swagger/issues/271 is resolved.
'DEFAULT_RENDERER_CLASSES': (
'api.base.renderers.JSONAPIRenderer',
'api.base.renderers.JSONRendererWithESISupport',
'api.base.renderers.BrowsableAPIRendererNoForms',
),
'DEFAULT_PARSER_CLASSES': (
'api.base.parsers.JSONAPIParser',
'api.base.parsers.JSONAPIParserForRegularJSON',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'EXCEPTION_HANDLER': 'api.base.exceptions.json_api_exception_handler',
'DEFAULT_CONTENT_NEGOTIATION_CLASS': 'api.base.content_negotiation.JSONAPIContentNegotiation',
'DEFAULT_VERSIONING_CLASS': 'api.base.versioning.BaseVersioning',
'DEFAULT_VERSION': '2.0',
'ALLOWED_VERSIONS': (
'2.0',
'2.1',
'2.2',
'2.3',
'2.4',
'2.5',
'2.6',
),
'DEFAULT_FILTER_BACKENDS': ('api.base.filters.ODMOrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'api.base.pagination.JSONAPIPagination',
'ORDERING_PARAM': 'sort',
'DEFAULT_AUTHENTICATION_CLASSES': (
# Custom auth classes
'api.base.authentication.drf.OSFBasicAuthentication',
'api.base.authentication.drf.OSFSessionAuthentication',
'api.base.authentication.drf.OSFCASAuthentication'
),
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.UserRateThrottle',
'api.base.throttling.NonCookieAuthThrottle',
),
'DEFAULT_THROTTLE_RATES': {
'user': '10000/day',
'non-cookie-auth': '100/hour',
'add-contributor': '10/second',
'create-guid': '1000/hour',
'root-anon-throttle': '1000/hour',
'test-user': '2/hour',
'test-anon': '1/hour',
}
}
# Settings related to CORS Headers addon: allow API to receive authenticated requests from OSF
# CORS plugin only matches based on "netloc" part of URL, so as workaround we add that to the list
CORS_ORIGIN_ALLOW_ALL = False
CORS_ORIGIN_WHITELIST = (urlparse(osf_settings.DOMAIN).netloc,
osf_settings.DOMAIN,
)
# This needs to remain True to allow cross origin requests that are in CORS_ORIGIN_WHITELIST to
# use cookies.
CORS_ALLOW_CREDENTIALS = True
# Set dynamically on app init
ORIGINS_WHITELIST = ()
MIDDLEWARE_CLASSES = (
'api.base.middleware.DjangoGlobalMiddleware',
'api.base.middleware.CeleryTaskMiddleware',
'api.base.middleware.PostcommitTaskMiddleware',
# A profiling middleware. ONLY FOR DEV USE
# Uncomment and add "prof" to url params to recieve a profile for that url
# 'api.base.middleware.ProfileMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
'api.base.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True
}]
ROOT_URLCONF = 'api.base.urls'
WSGI_APPLICATION = 'api.base.wsgi.application'
LANGUAGE_CODE = 'en-us'
# Disabled to make a test work (TestNodeLog.test_formatted_date)
# TODO Try to understand what's happening to cause the test to break when that line is active.
# TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static/vendor')
API_BASE = 'v2/'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
('rest_framework_swagger/css', os.path.join(BASE_DIR, 'static/css')),
('rest_framework_swagger/images', os.path.join(BASE_DIR, 'static/images')),
)
# TODO: Revisit methods for excluding private routes from swagger docs
SWAGGER_SETTINGS = {
'api_path': '/',
'info': {
'description':
"""
Welcome to the fine documentation for the Open Science Framework's API! Please click
on the <strong>GET /v2/</strong> link below to get started.
For the most recent docs, please check out our <a href="/v2/">Browsable API</a>.
""",
'title': 'OSF APIv2 Documentation',
},
'doc_expansion': 'list',
'exclude_namespaces': ['applications', 'tokens', 'test'],
}
NODE_CATEGORY_MAP = osf_settings.NODE_CATEGORY_MAP
DEBUG_TRANSACTIONS = DEBUG
JWT_SECRET = 'osf_api_cas_login_jwt_secret_32b'
JWE_SECRET = 'osf_api_cas_login_jwe_secret_32b'
ENABLE_VARNISH = osf_settings.ENABLE_VARNISH
ENABLE_ESI = osf_settings.ENABLE_ESI
VARNISH_SERVERS = osf_settings.VARNISH_SERVERS
ESI_MEDIA_TYPES = osf_settings.ESI_MEDIA_TYPES
ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud']
ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'mendeley', 'zotero', 'forward']
BYPASS_THROTTLE_TOKEN = 'test-token'
OSF_SHELL_USER_IMPORTS = None
# Settings for use in the admin
OSF_URL = 'https://osf.io'
| {
"content_hash": "02f94cd27a92db9c292d8bc99ca29087",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 113,
"avg_line_length": 30.458620689655174,
"alnum_prop": 0.681535152269897,
"repo_name": "caneruguz/osf.io",
"id": "9dc59328a948441f83169690395d1b451c2b37ec",
"size": "8833",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "api/base/settings/defaults.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "131204"
},
{
"name": "HTML",
"bytes": "27618"
},
{
"name": "JavaScript",
"bytes": "1351399"
},
{
"name": "Mako",
"bytes": "610759"
},
{
"name": "Python",
"bytes": "4588002"
},
{
"name": "Shell",
"bytes": "2118"
}
],
"symlink_target": ""
} |
from sympy.core.basic import Basic
import combinatorial
import elementary
import special
from special.polynomials import legendre, hermite, chebyshevt, chebyshevu, \
chebyshevu_root, chebyshevt_root
# see #391
from combinatorial.factorials import factorial, rf, ff, binomial
from combinatorial.factorials import Factorial, RisingFactorial, FallingFactorial, Binomial
from combinatorial.numbers import fibonacci, lucas, harmonic, bernoulli, bell
from elementary.miscellaneous import sqrt, min_, max_
from elementary.complexes import re, im, sign, abs, conjugate, arg
from elementary.trigonometric import acot, cot, tan, cos, sin, asin, acos, atan
from elementary.exponential import exp, log
from elementary.hyperbolic import sinh, cosh, tanh, coth, asinh, acosh, atanh, acoth
from elementary.integers import floor, ceiling
from special.error_functions import erf
from special.gamma_functions import gamma, lowergamma, uppergamma, polygamma, loggamma
from special.zeta_functions import dirichlet_eta, zeta
ln = log
for _n, _cls in Basic.singleton.items():
exec '%s = _cls()' % (_n)
| {
"content_hash": "9c1db0c447c72f8eea86f418d16eb951",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 91,
"avg_line_length": 37.89655172413793,
"alnum_prop": 0.7898089171974523,
"repo_name": "certik/sympy-oldcore",
"id": "bf6d0ebdad14b6bbd9492e9f780c4724cf93de7e",
"size": "1100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympy/functions/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import scrapy
class ItuneApp(scrapy.Item):
guid = scrapy.Field()
updatedAt = scrapy.Field()
name = scrapy.Field()
url = scrapy.Field()
thumbnail = scrapy.Field()
appLastUpdated = scrapy.Field()
developer = scrapy.Field()
website = scrapy.Field()
ofReviews = scrapy.Field()
ofReviewsCurrent = scrapy.Field()
starts = scrapy.Field()
startsCurrent = scrapy.Field()
version = scrapy.Field()
# description = scrapy.Field()
| {
"content_hash": "497349ad41385cfa084c132196aba945",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 37,
"avg_line_length": 18.615384615384617,
"alnum_prop": 0.640495867768595,
"repo_name": "trujunzhang/djzhang-targets",
"id": "b494a37a633ef0b911c9193e5112685636610d94",
"size": "636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cwitunes/cwitunes/items.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7418804"
},
{
"name": "JavaScript",
"bytes": "936547"
},
{
"name": "PHP",
"bytes": "94539"
},
{
"name": "Python",
"bytes": "564898"
},
{
"name": "Shell",
"bytes": "167"
}
],
"symlink_target": ""
} |
"""
Written by Lucas Sinclair.
MIT Licensed.
Contact at www.sinclair.bio
"""
# Built-in modules #
# Internal modules #
from fasta import FASTA
from plumbing.databases import convert_to_sql
from plumbing.databases.sqlite_database import SQLiteDatabase
from plumbing.common import GenWithLength
# Third party modules #
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from Bio.Seq import Seq
from tqdm import tqdm
# Constants #
base_keys = ('id', 'description', 'seq')
###############################################################################
class DatabaseFASTA(SQLiteDatabase):
def __init__(self, path=None):
self.path = path
self.factory = lambda cursor, row: SeqRecord(Seq(row[2]), id=row[0], description=row[1])
def parse(self):
pass
###############################################################################
def generate_values(path, progress=False):
seqs = SeqIO.parse(path, 'fasta')
if not progress:
for seq in seqs: yield seq.id, seq.description, str(seq.seq)
if progress:
for seq in tqdm(GenWithLength(seqs, len(FASTA(path)))):
yield seq.id, seq.description, str(seq.seq)
###############################################################################
def fasta_to_sql(source, dest):
values = generate_values(source, progress=True)
convert_to_sql(dest, base_keys, values)
return DatabaseFASTA(dest) | {
"content_hash": "c1a9e12c831e570dabeae91c9000d8d7",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 96,
"avg_line_length": 30.19148936170213,
"alnum_prop": 0.5828047921071177,
"repo_name": "xapple/fasta",
"id": "030bc6ec0414f88c47c5a0743b481f1dec46afc2",
"size": "1467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fasta/indexed.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "67728"
}
],
"symlink_target": ""
} |
from paintApparatus import *
from abb import *
import json
from color import Color
class Control(object):
def __init__(self, serial_connection, apparatus):
self.last_brush = 0
self.last_color = Color()
self.serial_connection = serial_connection
self.apparatus = apparatus
self.instructions = []
self.delay = 0.5
def load_instructions(self, path_to_instructions):
print('Loading instructions located at {0}'.format(path_to_instructions))
self.instructions = json.loads(open(path_to_instructions).read())
def switch_brush(self, brush):
if self.last_brush != brush:
print('Switching to brush {0}'.format(brush))
self.serial_connection.switch_brush(brush)
def clean_brush(self):
print('cleaning brush')
self.serial_connection.moveToSafe()
time.sleep(self.delay)
self.serial_connection.moveApproachClean()
time.sleep(self.delay)
self.serial_connection.moveOverClean()
time.sleep(self.delay)
self.serial_connection.moveClean()
time.sleep(self.delay)
#self.apparatus.brush_cleaner(2)
self.serial_connection.rinse()
self.serial_connection.moveOverClean()
time.sleep(self.delay)
self.serial_connection.moveOverDry()
time.sleep(self.delay)
self.serial_connection.moveDry()
time.sleep(self.delay)
self.apparatus.brush_dryer(2)
time.sleep(self.delay)
self.serial_connection.moveOverDry()
time.sleep(self.delay)
self.serial_connection.moveOverClean()
time.sleep(self.delay)
self.serial_connection.rinse()
time.sleep(self.delay)
self.serial_connection.moveOverDry()
time.sleep(self.delay)
self.serial_connection.moveDry()
time.sleep(self.delay)
self.apparatus.brush_dryer(2)
time.sleep(self.delay)
self.serial_connection.moveOverDry()
time.sleep(self.delay)
self.serial_connection.moveToSafe()
time.sleep(self.delay)
def switch_or_create_color(self, ColorRGB):
''' Returns False if the paint color was the same, True if new paint was mixed '''
print('creating ColorRGB R:{r}, G:{g}, B:{b}'
.format(r=ColorRGB.red, b=ColorRGB.blue, g=ColorRGB.green))
self.serial_connection.moveToSafe()
if self.apparatus.create_or_activate(ColorRGB):
self.serial_connection.mixPaint()
return True
self.serial_connection.getPaint(0)
return False
def single_step(self, step):
print(step)
stroke_color = Color()
stroke_color.setRGB(step[6:9])
stroke_color.rgb2cmyk()
# if step[9] != self.last_brush:
# self.switch_brush(step[9])
toclean = False
if stroke_color != self.last_color:
self.clean_brush()
toclean = self.switch_or_create_color(stroke_color)
else:
toclean = self.switch_or_create_color(stroke_color)
self.serial_connection.moveToSafe()
self.serial_connection.sendCoordQ(step[0]*2530, step[1]*2530, step[2]*2530, step[3]*2530,
step[4]*2530, step[5]*2530)
self.last_color = stroke_color
def run(self):
self.serial_connection.moveToSafe()
for step in range(len(self.instructions)):
self.single_step(self.instructions[step])
print("STROKE:",step)
self.serial_connection.moveToSafe()
self.clean_brush()
if __name__ == '__main__':
abb = ABBRunner(2530, 2530)
abb.connectToSerial('/dev/ttyUSB0')
abb.sendCanvasInfo()
apparatus = PaintApparatus()
ctrl = Control(abb, apparatus)
ctrl.load_instructions('kmeansflowers.json')
ctrl.run()
| {
"content_hash": "73183f6c4e9493b3d59725a6c50b2610",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 97,
"avg_line_length": 35.80373831775701,
"alnum_prop": 0.6256851996867658,
"repo_name": "Roboartitsts/PiPaintingServer",
"id": "945e69009311fbe6cf3d4cdd758f2b6e7a470bf1",
"size": "3831",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "control.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7172"
},
{
"name": "Python",
"bytes": "74841"
}
],
"symlink_target": ""
} |
import json
from server.models import database, DoctorModel, LoginModel
from server.hmsexceptions import UserNotExistException
from server.utils import logger
def register_doctor(post_data):
"""
Register a doctor in the system. The post data is a dict.
:param post_data: dict
:returns: a status, a str( doctor's info on success, err info on failure)
"""
# print(post_data)
doctor = ''
try:
logger.debug('in register_doctor')
with database.atomic():
doctor = DoctorModel.create_by_dict(post_data)
logger.debug(doctor)
logger.debug('in database.atomic')
# except peewee.IntegrityError:
# logger.warning('in doctor model create except')
# # `username` is a unique column, so this username already exists,
# # making it safe to call .get().
# old_user = AccountModel.get(AccountModel.username == username)
# logger.warning('user exists...')
# resp_dict['info'] = 'user exists, did not create user:%s' % username
# resp.status = falcon.HTTP_409
# try:
# change_user = AccountModel.get(AccountModel.username==username,
# AccountModel.password==password)
# except:
# logger.debug('change user data failed...')
except Exception as ex:
logger.error('Exception: ', ex)
q = DoctorModel.delete().where(DoctorModel.email==doctor)
q.execute()
return 0, 'create doctor failed, did not create doctor', ''
try:
with database.atomic():
user = LoginModel.create_by_dict('doctor', post_data)
logger.debug(doctor)
logger.debug('in database.atomic')
# except peewee.IntegrityError:
# logger.warning('in doctor model create except')
# # `username` is a unique column, so this username already exists,
# # making it safe to call .get().
# old_user = AccountModel.get(AccountModel.username == username)
# logger.warning('user exists...')
# resp_dict['info'] = 'user exists, did not create user:%s' % username
# resp.status = falcon.HTTP_409
# try:
# change_user = AccountModel.get(AccountModel.username==username,
# AccountModel.password==password)
# except:
# logger.debug('change user data failed...')
except Exception as ex:
logger.error('Exception: ', ex)
q = LoginModel.delete().where(LoginModel.username==doctor)
q.execute()
return 0, 'create doctor failed, did not create doctor', ''
else:
return 1, str(doctor), str(user.password)
def edit_doctor(doctorid, post_data):
"""
Edit a doctor in the system. The post data is a dict.
:param post_data: dict
:returns: a status, a str( doctor's info on success, err info on failure)
"""
# print(post_data)
try:
logger.debug('in edit_doctor')
DoctorModel.update_by_dict(doctorid, post_data)
logger.debug('executed')
# except peewee.IntegrityError:
# logger.warning('in doctor model create except')
# # `username` is a unique column, so this username already exists,
# # making it safe to call .get().
# old_user = AccountModel.get(AccountModel.username == username)
# logger.warning('user exists...')
# resp_dict['info'] = 'user exists, did not create user:%s' % username
# resp.status = falcon.HTTP_409
# try:
# change_user = AccountModel.get(AccountModel.username==username,
# AccountModel.password==password)
# except:
# logger.debug('change user data failed...')
except Exception as ex:
logger.error('Exception: ', ex)
return 0, 'edit_doctor failed, did not edit doctor'
else:
return 1, str(doctorid)
def get_doctor(doctorid):
"""
Get info of a doctor in the system.
:param doctorid: doctor's uid
:returns: a status, a str ( doctor's info on success, err info on failure)
"""
# print(doctorid)
info = {}
try:
logger.debug('in get_doctor')
doctor_dict = DoctorModel.get_dict(doctorid)
logger.debug(doctor_dict)
except UserNotExistException:
logger.debug('in UserNotExistException')
return 0, 'get doctor failed, the required Doctor Did Not Exist'
except Exception as ex:
logger.error('Exception: ', ex)
return 0, 'get doctor failed'
else:
doctor_json = json.dumps(doctor_dict)
logger.debug(doctor_json)
return 1, doctor_json
def get_doctors():
"""
Get info of doctors in the system.
:returns: a status, a str ( doctor's info on success, err info on failure)
"""
# print(doctorid)
logger.debug('in get_doctors')
resp_list = []
try:
# patient = DoctorModel.get(DoctorModel.email==patientid)
doctors = DoctorModel.select()
print(doctors)
for doc in doctors:
print('doc')
logger.debug('docid: %s' % (doc))
resp_dict = {}
resp_dict['doctorid'] = doc.email
resp_dict['last_name'] = doc.last_name
resp_dict['first_name'] = doc.first_name
resp_list.append(resp_dict)
logger.debug('doctors:{}'.format(resp_list))
except Exception as ex:
logger.error('Exception: ', ex)
return 0, {'errinfo':'get doctors failed'}
else:
return 1, resp_list
| {
"content_hash": "4022aa81b29e871a75c6eb7559c7f083",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 78,
"avg_line_length": 34.83125,
"alnum_prop": 0.5998564507446618,
"repo_name": "pa2515-group2/server",
"id": "bb9c5fb18b0a5a58437ce2a397b89dded319c5b0",
"size": "5573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/doctor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "186602"
}
],
"symlink_target": ""
} |
from typing import Any, Dict, Optional
import os
from collections import OrderedDict
from ConfigSpace import Configuration
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import CategoricalHyperparameter
from sklearn.base import BaseEstimator
from autosklearn.askl_typing import FEAT_TYPE_TYPE
from autosklearn.pipeline.base import DATASET_PROPERTIES_TYPE, PIPELINE_DATA_DTYPE
from ...base import (
AutoSklearnChoice,
AutoSklearnPreprocessingAlgorithm,
ThirdPartyComponents,
_addons,
find_components,
)
bow_directory = os.path.split(__file__)[0]
_bows = find_components(__package__, bow_directory, AutoSklearnPreprocessingAlgorithm)
additional_components = ThirdPartyComponents(AutoSklearnPreprocessingAlgorithm)
_addons["data_preprocessing.text_encoding"] = additional_components
def add_bow(classifier: "BagOfWordChoice") -> None:
additional_components.add_component(classifier)
class BagOfWordChoice(AutoSklearnChoice):
@classmethod
def get_components(cls: BaseEstimator) -> Dict[str, BaseEstimator]:
components: Dict[str, BaseEstimator] = OrderedDict()
components.update(_bows)
components.update(additional_components.components)
return components
def get_hyperparameter_search_space(
self,
feat_type: Optional[FEAT_TYPE_TYPE] = None,
dataset_properties: Optional[DATASET_PROPERTIES_TYPE] = None,
default: Optional[str] = None,
include: Optional[Dict[str, str]] = None,
exclude: Optional[Dict[str, str]] = None,
) -> ConfigurationSpace:
cs = ConfigurationSpace()
if dataset_properties is None:
dataset_properties = {}
# Compile a list of legal preprocessors for this problem
available_preprocessors = self.get_available_components(
dataset_properties=dataset_properties, include=include, exclude=exclude
)
if len(available_preprocessors) == 0:
raise ValueError(
"No bag of word encoders found, please add any bag of word encoder"
"component."
)
if default is None:
defaults = ["tfidf_encoding"]
for default_ in defaults:
if default_ in available_preprocessors:
default = default_
break
preprocessor = CategoricalHyperparameter(
"__choice__", list(available_preprocessors.keys()), default_value=default
)
cs.add_hyperparameter(preprocessor)
for name in available_preprocessors:
preprocessor_configuration_space = available_preprocessors[
name
].get_hyperparameter_search_space(
feat_type=feat_type, dataset_properties=dataset_properties
)
parent_hyperparameter = {"parent": preprocessor, "value": name}
cs.add_configuration_space(
name,
preprocessor_configuration_space,
parent_hyperparameter=parent_hyperparameter,
)
self.configuration_space = cs
self.dataset_properties = dataset_properties
return cs
def set_hyperparameters(
self,
configuration: Configuration,
feat_type: Optional[FEAT_TYPE_TYPE] = None,
init_params: Optional[Dict[str, Any]] = None,
) -> "BagOfWordChoice":
new_params = {}
params = configuration.get_dictionary()
choice = params["__choice__"]
del params["__choice__"]
for param, value in params.items():
param = param.replace(choice, "").replace(":", "")
new_params[param] = value
if init_params is not None:
for param, value in init_params.items():
# These next two lines are different than in the base class -
# they allow removing the categorical feature indicator array
# in order to not pass it to the no encoding
if choice not in param:
continue
param = param.replace(choice, "").replace(":", "")
new_params[param] = value
new_params["random_state"] = self.random_state
self.new_params = new_params
self.choice = self.get_components()[choice](**new_params)
return self
def transform(self, X: PIPELINE_DATA_DTYPE) -> PIPELINE_DATA_DTYPE:
return self.choice.transform(X)
| {
"content_hash": "b4ec5d217aac390fbccd27f01618c85e",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 86,
"avg_line_length": 35.265625,
"alnum_prop": 0.6366858661940629,
"repo_name": "automl/auto-sklearn",
"id": "f858781997c2761067d4cef76d8f3ba576c75eaf",
"size": "4514",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "autosklearn/pipeline/components/data_preprocessing/text_encoding/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "950"
},
{
"name": "Makefile",
"bytes": "3513"
},
{
"name": "Python",
"bytes": "2008151"
},
{
"name": "Shell",
"bytes": "4744"
}
],
"symlink_target": ""
} |
from tastypie.api import Api as BaseApi
from whippedcream.api import Api
from .resources import (NamesResourceDefault, NamesResource,
DateTimeResource, FileResource)
v1_api = BaseApi(api_name='v1')
v1_api.register(NamesResourceDefault())
v1_api.register(NamesResource())
v1_api.register(DateTimeResource())
v1_api.register(FileResource())
noname_api = Api()
noname_api.register(NamesResourceDefault())
noname_api.register(NamesResource())
| {
"content_hash": "c580cf0474b8b8e4f17a46ab3ae60488",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 60,
"avg_line_length": 27.529411764705884,
"alnum_prop": 0.7564102564102564,
"repo_name": "paulcwatts/django-whippedcream",
"id": "396167938e241afe10d6754c60bc80aa8024d97e",
"size": "468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "whippedcream/tests/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "22141"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.