repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
clarkperkins/stackdio | stackdio/api/stacks/workflows.py | Python | apache-2.0 | 9,270 | 0.001834 | # -*- coding: utf-8 -*-
# Copyright 2017, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
import logging
import actstream
from celery import chain
from stackdio.api.stacks import tasks
from stackdio.core.constants import Action, Activity
logger = logging.getLogger(__name__)
class WorkflowOptions(object):
DEFAULTS = {
'max_attempts': 3,
}
def __init__(self, opts):
self.user_opts = opts
def __getattr__(self, item):
if item in self.user_opts:
return self.user_opts[item]
elif item in self.DEFAULTS:
return self.DEFAULTS[item | ]
else:
raise AttributeError(item)
class LaunchWorkflowOptions(WorkflowOptions):
DEFAULTS = {
'max_attempts': 3,
# Skips launching if set to False
'launch': True,
'provision': True,
# Launches in parallel mode if set to True
'parallel': True,
# See stacks.tasks::launch_hosts for information on these params
| 'simulate_launch_failures': False,
'simulate_ssh_failures': False,
'failure_percent': 0.3,
}
class DestroyWorkflowOptions(WorkflowOptions):
DEFAULTS = {
'parallel': True,
}
class BaseWorkflow(object):
_options_class = WorkflowOptions
def __init__(self, stack, host_ids=None, opts=None):
if opts is None:
opts = {}
self.stack = stack
self.host_ids = host_ids
self.opts = self._options_class(opts)
def task_list(self):
return []
def execute(self):
task_chain = chain(*self.task_list())
task_chain.apply_async()
class LaunchWorkflow(BaseWorkflow):
"""
Encapsulates all tasks required to launch a new stack or new hosts into
a stack.
"""
_options_class = LaunchWorkflowOptions
def task_list(self):
stack_id = self.stack.id
host_ids = self.host_ids
opts = self.opts
if not opts.launch:
return []
task_list = [
tasks.launch_hosts.si(
stack_id,
parallel=opts.parallel,
max_attempts=opts.max_attempts,
simulate_launch_failures=opts.simulate_launch_failures,
simulate_ssh_failures=opts.simulate_ssh_failures,
failure_percent=opts.failure_percent
),
tasks.update_metadata.si(stack_id, Activity.LAUNCHING, host_ids=host_ids),
tasks.tag_infrastructure.si(stack_id, activity=Activity.LAUNCHING, host_ids=host_ids),
tasks.register_dns.si(stack_id, Activity.LAUNCHING, host_ids=host_ids),
tasks.ping.si(stack_id, Activity.LAUNCHING),
tasks.sync_all.si(stack_id),
tasks.highstate.si(stack_id, max_attempts=opts.max_attempts),
tasks.global_orchestrate.si(stack_id, max_attempts=opts.max_attempts),
]
if opts.provision:
task_list.append(tasks.orchestrate.si(stack_id, max_attempts=opts.max_attempts))
task_list.append(tasks.finish_stack.si(stack_id))
self.stack.set_activity(Activity.QUEUED)
actstream.action.send(self.stack, verb='was submitted to launch queue')
return task_list
class DestroyHostsWorkflow(BaseWorkflow):
"""
Encapsulates all tasks required to destroy a set of hosts on a stack.
"""
_options_class = DestroyWorkflowOptions
def task_list(self):
stack_id = self.stack.pk
host_ids = self.host_ids
return [
tasks.update_metadata.si(stack_id, Activity.TERMINATING, host_ids=host_ids),
tasks.register_volume_delete.si(stack_id, host_ids=host_ids),
tasks.unregister_dns.si(stack_id, Activity.TERMINATING, host_ids=host_ids),
tasks.destroy_hosts.si(stack_id,
host_ids=host_ids,
delete_security_groups=False),
tasks.finish_stack.si(stack_id, Activity.IDLE),
]
class DestroyStackWorkflow(BaseWorkflow):
"""
Encapsulates all tasks required to destroy an entire stack.
"""
_options_class = DestroyWorkflowOptions
def __init__(self, stack, opts=None):
super(DestroyStackWorkflow, self).__init__(stack, opts=opts)
# Force host_ids to None since we're destroying the entire stack
self.host_ids = None
def task_list(self):
stack_id = self.stack.pk
return [
tasks.update_metadata.si(stack_id, Activity.TERMINATING),
tasks.register_volume_delete.si(stack_id),
tasks.unregister_dns.si(stack_id, Activity.TERMINATING),
tasks.destroy_hosts.si(stack_id, parallel=self.opts.parallel),
tasks.destroy_stack.si(stack_id),
]
class ActionWorkflow(BaseWorkflow):
"""
Runs an action
"""
def __init__(self, stack, action, args):
super(ActionWorkflow, self).__init__(stack)
self.action = action
self.args = args
def task_list(self):
# TODO: not generic enough
base_tasks = {
Action.LAUNCH: [
tasks.launch_hosts.si(self.stack.id),
],
Action.TERMINATE: [
tasks.update_metadata.si(self.stack.id, Activity.TERMINATING),
tasks.register_volume_delete.si(self.stack.id),
tasks.unregister_dns.si(self.stack.id, Activity.TERMINATING),
tasks.destroy_hosts.si(self.stack.id, delete_hosts=False,
delete_security_groups=False),
],
Action.PAUSE: [
tasks.execute_action.si(self.stack.id, self.action, Activity.PAUSING, *self.args),
],
Action.RESUME: [
tasks.execute_action.si(self.stack.id, self.action, Activity.RESUMING, *self.args),
],
Action.PROPAGATE_SSH: [
tasks.propagate_ssh.si(self.stack.id),
],
Action.SINGLE_SLS: [
tasks.single_sls.si(self.stack.id, arg['component'], arg.get('host_target'))
for arg in self.args
],
}
action_to_activity = {
Action.LAUNCH: Activity.LAUNCHING,
Action.TERMINATE: Activity.TERMINATING,
Action.PAUSE: Activity.PAUSING,
Action.RESUME: Activity.RESUMING,
Action.PROVISION: Activity.PROVISIONING,
Action.ORCHESTRATE: Activity.ORCHESTRATING,
Action.PROPAGATE_SSH: Activity.PROVISIONING,
Action.SINGLE_SLS: Activity.ORCHESTRATING,
}
action_to_end_activity = {
Action.LAUNCH: Activity.IDLE,
Action.TERMINATE: Activity.TERMINATED,
Action.PAUSE: Activity.PAUSED,
Action.RESUME: Activity.IDLE,
Action.PROVISION: Activity.IDLE,
Action.ORCHESTRATE: Activity.IDLE,
Action.PROPAGATE_SSH: Activity.IDLE,
Action.SINGLE_SLS: Activity.IDLE,
}
# Start off with the base
task_list = base_tasks.get(self.action, [])
# Update the metadata after the main action has been executed
if self.action not in (Action.SINGLE_SLS, Action.TERMINATE):
task_list.append(tasks.update_metadata.si(self.stack.id,
action_to_activity[self.action]))
# Resuming and launching requires DNS updates
if self.action in (Action.RESUME, Action.LAUNCH):
task_list.append(tasks.tag_infrastructure.si(
|
GoogleCloudPlatform/appengine-config-transformer | yaml_conversion/lib/google/appengine/api/yaml_object.py | Python | apache-2.0 | 10,552 | 0.007487 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/python2.4
#
# Copyright 2007 Google Inc. All Rights Reserved.
# WARNING: This file is externally viewable by our users. All comments from
# this file will be stripped. The docstrings will NOT. Do not put sensitive
# information in docstrings. If you must communicate internal information in
# this source file, please place them in comments only.
from yaml_conversion. | lib.google.appengine.api import validation
from yaml_conversion.lib.google.appengine.api import yaml_listener
from yaml_conversion.lib.google.appengine.api import yaml_builder
from yaml_conversion.lib.google.appengine.api import yaml_errors
import yaml
class _ObjectMapper(object):
"""Wrapper used for mapping attributes fr | om a yaml file to an object.
This wrapper is required because objects do not know what property they are
associated with a creation time, and therefore can not be instantiated
with the correct class until they are mapped to their parents.
"""
def __init__(self):
"""Object mapper starts off with empty value."""
self.value = None
self.seen = set()
def set_value(self, value):
"""Set value of instance to map to.
Args:
value: Instance that this mapper maps to.
"""
self.value = value
def see(self, key):
if key in self.seen:
raise yaml_errors.DuplicateAttribute("Duplicate attribute '%s'." % key)
self.seen.add(key)
class _ObjectSequencer(object):
"""Wrapper used for building sequences from a yaml file to a list.
This wrapper is required because objects do not know what property they are
associated with a creation time, and therefore can not be instantiated
with the correct class until they are mapped to their parents.
"""
def __init__(self):
"""Object sequencer starts off with empty value."""
self.value = []
self.constructor = None
def set_constructor(self, constructor):
"""Set object used for constructing new sequence instances.
Args:
constructor: Callable which can accept no arguments. Must return
an instance of the appropriate class for the container.
"""
self.constructor = constructor
class ObjectBuilder(yaml_builder.Builder):
"""Builder used for constructing validated objects.
Given a class that implements validation.ValidatedBase, it will parse a YAML
document and attempt to build an instance of the class.
ObjectBuilder will only map YAML fields that are accepted by the
ValidatedBase's GetValidator function.
Lists are mapped to validated. Repeated attributes and maps are mapped to
validated.Type properties.
For a YAML map to be compatible with a class, the class must have a
constructor that can be called with no parameters. If the provided type
does not have such a constructor a parse time error will occur.
"""
def __init__(self, default_class):
"""Initialize validated object builder.
Args:
default_class: Class that is instantiated upon the detection of a new
document. An instance of this class will act as the document itself.
"""
self.default_class = default_class
def _GetRepeated(self, attribute):
"""Get the ultimate type of a repeated validator.
Looks for an instance of validation.Repeated, returning its constructor.
Args:
attribute: Repeated validator attribute to find type for.
Returns:
The expected class of of the Type validator, otherwise object.
"""
if isinstance(attribute, validation.Optional):
attribute = attribute.validator
if isinstance(attribute, validation.Repeated):
return attribute.constructor
return object
def BuildDocument(self):
"""Instantiate new root validated object.
Returns:
New instance of validated object.
"""
return self.default_class()
def BuildMapping(self, top_value):
"""New instance of object mapper for opening map scope.
Args:
top_value: Parent of nested object.
Returns:
New instance of object mapper.
"""
result = _ObjectMapper()
# The first map encountered must be mapped directly to the
# main document instance.
if isinstance(top_value, self.default_class):
result.value = top_value
return result
def EndMapping(self, top_value, mapping):
"""When leaving scope, makes sure new object is initialized.
This method is mainly for picking up on any missing required attributes.
Args:
top_value: Parent of closing mapping object.
mapping: _ObjectMapper instance that is leaving scope.
"""
# make sure that mapping.value is a non-built-in type (i.e. can have
# 'CheckInitialized' called on it)
if not hasattr(mapping.value, 'CheckInitialized'):
raise validation.ValidationError('Cannot convert map to non-map value.')
try:
mapping.value.CheckInitialized()
except validation.ValidationError:
# These should just pass through.
raise
except Exception, e:
# Some errors may have problematic encoding or other issues.
# Re-raising an error in this block would be very hard to debug
# for the time being so instead, on error, the value is merely
# obscured.
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
# Wrap in a ValidationError
raise validation.ValidationError(error_str, e)
def BuildSequence(self, top_value):
"""New instance of object sequence.
Args:
top_value: Object that contains the new sequence.
Returns:
A new _ObjectSequencer instance.
"""
return _ObjectSequencer()
def MapTo(self, subject, key, value):
"""Map key-value pair to an objects attribute.
Args:
subject: _ObjectMapper of object that will receive new attribute.
key: Key of attribute.
value: Value of new attribute.
Raises:
UnexpectedAttribute when the key is not a validated attribute of
the subject value class.
"""
assert isinstance(subject.value, validation.ValidatedBase)
try:
attribute = subject.value.GetValidator(key)
except validation.ValidationError, err:
raise yaml_errors.UnexpectedAttribute(err)
if isinstance(value, _ObjectMapper):
# Now know what class the new instance should be.
# Time to construct it from the attributes expected type.
value.set_value(attribute.expected_type())
value = value.value
elif isinstance(value, _ObjectSequencer):
# Now know what class new instances within the sequence should be.
value.set_constructor(self._GetRepeated(attribute))
value = value.value
subject.see(key)
try:
subject.value.Set(key, value)
except validation.ValidationError, e:
# Some errors may have problematic encoding or other issues.
# Re-raising an error in this block would be very hard to debug
# for the time being so instead, on error, the value is merely
# obscured.
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
try:
value_str = str(value)
except Exception:
value_str = '<unknown>'
# Update error message with a better message.
e.message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
(value_str, key, error_str))
raise e
except Exception, e:
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
try:
value_str = str(value)
except Exception:
value_str = '<unknown>'
# Raise a |
lorensen/VTKExamples | src/Python/GeometricObjects/ConvexPointSet.py | Python | apache-2.0 | 2,484 | 0 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
import vtk
def main():
cps = vtk.vtkConvexPointSet()
points = vtk.vtkPoints()
points.InsertNextPoint(0, 0, 0)
points.InsertNextPoint(1, 0, 0)
points.InsertNextPoint(1, 1, 0)
points.InsertNextPoint(0, 1, 0)
points.InsertNextPoint(0, 0, 1)
points.InsertNextPoint(1, 0, 1)
points.InsertNextPoint(1, 1, 1)
points.InsertNextPoint(0, 1, 1)
points.InsertNextPoint(0.5, 0, 0)
points.InsertNextPoint(1, 0.5, 0)
points.InsertNextPoint(0.5, 1, 0)
points.InsertNextPoint(0, 0.5, 0)
points.InsertNextPoint(0.5, 0.5, 0)
for i in range(0, 13):
cps.GetPointIds().InsertId(i, i)
ug = vtk.vtkUnstructuredGrid()
ug.Allocate(1, 1)
ug.InsertNextCell(cps.GetCellType(), cps.GetPointIds())
ug.SetPoints(points)
colors = vtk.vtkNamedColors()
mapper = vtk.vtkDataSetMapper()
mapper.SetInputData(ug)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(colors.GetColor3d("Tomato"))
actor.GetProperty().SetLineWidth(3)
actor.GetProperty().EdgeVisibilityOn()
# Glyph the points
sphere = vtk.vtkSphereSource()
sphere.SetPhiResolution(21)
sphere.SetThetaResolution(21)
sphere.SetRadius(.03)
# Create a polydata to store everything in
polyData = vtk.vtkPolyData()
polyData.SetPoints(points)
pointMapper = vtk.vtkGlyph3DMapper()
| pointMapper.SetInputData(polyData)
pointMapper.SetSourceConnection(sphere.GetO | utputPort())
pointActor = vtk.vtkActor()
pointActor.SetMapper(pointMapper)
pointActor.GetProperty().SetColor(colors.GetColor3d("Peacock"))
# Create a renderer, render window, and interactor
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetWindowName("Convex Point Set")
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Add the actors to the scene
renderer.AddActor(actor)
renderer.AddActor(pointActor)
renderer.SetBackground(colors.GetColor3d("Silver"))
renderer.ResetCamera()
renderer.GetActiveCamera().Azimuth(210)
renderer.GetActiveCamera().Elevation(30)
renderer.ResetCameraClippingRange()
# Render and interact
renderWindow.SetSize(640, 480)
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
|
HDE/python-lambda-local | lambda_local/event.py | Python | mit | 206 | 0 | '''
Copyright 2015-2020 HENNGE K.K. (formerly known as HDE, Inc.)
Licensed under MIT.
'''
import json
def read_event | (path):
with open(path) as event:
data = json.load(event)
return d | ata
|
hms-dbmi/exac_browser | src/precompute_histogram.py | Python | mit | 3,502 | 0.002856 | # Adapted from Daniel Birnbaum's histogram script
import argparse
import gzip
import pipes
import sys
from collections import Counter
import numpy
metrics = ['DP', 'GQ']
def main(args):
f = gzip.open(args.vcf) if args.vcf.endswith('.gz') else open(args.vcf)
if args.output is None: args.output = args.vcf.replace('.vcf', '.hist.vcf')
if not args.output.endswith('.gz'): args.output += '.gz'
pipe = pipes.Template()
pipe.append('bgzip -c /dev/stdin', '--')
g = pipe.open(args.output, 'w')
header = None
for line in f:
line = line.strip()
# Reading header lines to get VEP and individual arrays
if line.startswith('#'):
line = line.lstrip('#')
if line.startswith('CHROM'):
header = line.split()
header = dict(zip(header, range(len(header))))
continue
if header is None:
print >> sys.stderr, "VCF file does not have a header line (CHROM POS etc.). Exiting."
sys.exit(1)
fields = line.split('\t')
# Pull out annotation info from INFO and ALT fields
new_info = fields[header['INFO']].rstrip(';')
for metric in metrics:
data = get_histogram_for_variant(line, metric)
midpoints, hist = data
new_info += ';%s_MID=' % (metric) + '|'.join(map(str, midpoints))
new_info += ';%s_HIST=' % (metric) + '|'.join(map(str, hist))
fields[header['INFO']] = new_info
g.write('\t'.join(fields) + '\n')
f.close()
g.close()
def convert_to_int(val):
"""
Converts string to int if possible, otherwise returns initial string
"""
try:
return int(val)
except ValueError:
pass
try:
return float(val)
except ValueError:
return val
def get_histogram_for_variant(vcf_line, metric="DP", num_bins=40, midpoints=True, variants_only=False):
vcf_line = vcf_line.strip('\n')
if vcf_line.startswith('#'):
return None
else:
fields = vcf_line.split('\t')
# alts = fields[4].split(',')
try:
idx = fields[8].split(':').index(metric)
except Exception, e:
return None
distr = []
# get distribution for metric
for sample in fields[9:]:
# This is only DP/GQ for now
sample_info = sample.split(':')
if sample_info[0] == './.': continue
if idx < len(sample_info) and samp | le_info[idx] != '.':
distr.append(sample_info[idx])
mids, hist = get_hist_from_distribution(distr, midpoints, num_bins)
return map(str, mids), map(str, hist)
def get_hist_from_distribution(distr, midpoints, num_bins):
distr = [convert_to_int(x) for x in distr]
if any([type(x) == str for x in distr]):
c = Counter(distr)
counts = zip(*c.items())
return counts
else:
hist = numpy.histogram(distr, bins=num_bins)
if midpoints: |
edges = hist[1]
mids = [(edges[i]+edges[i+1])/2 for i in range(len(edges)-1)]
return mids, hist[0]
else:
return hist[1], hist[0]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--vcf', '--input', '-i', help='Input VCF file; may be gzipped', required=True)
parser.add_argument('--output', '-o', help='Output VCF file; may be gzipped')
args = parser.parse_args()
main(args) |
openstack/heat | heat/tests/openstack/sahara/test_data_source.py | Python | apache-2.0 | 4,214 | 0 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from heat.common import exception
from heat.common import template_format
from heat.engine.resources.openstack.sahara import data_source
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
data_source_template = """
heat_template_version: 2015-10-15
resources:
data-source:
type: OS::Sahara::DataSource
properties:
name: my-ds
type: swift
url: swift://container.sahara/text
credentials:
user: admin
password: swordfish
"""
class SaharaDataSourceTest(common.HeatTestCase):
def setUp(self):
super(SaharaDataSourceTest, self).setUp()
t = template_format.parse(data_source_template)
self.stack = utils.parse_stack(t)
resource_defns = self.stack.t.resource_definitions(self.stack)
self.rsrc_defn = resource_defns['data-source']
self.client = mock.Mock()
self.patchobject(data_source.DataSource, 'client',
return_value=self.client)
def _create_resource(self, name, snippet, stack):
ds = data_source.DataSource(name, snippet, stack)
value = mock.MagicMock(id='12345')
self.client.data_sources.create.return_value = value
scheduler.TaskRunner(ds.create)()
return ds
def test_create(self):
ds = self._create_resource('data-source', self.rsrc_defn, self.stack)
args = self.client.data_sources.create.call_args[1]
expected_args = {
'name': 'my-ds',
'description': '',
'data_source_type': 'swift',
'url': 'swift://container.sahara/text',
'credential_user': 'admin',
'credential_pass': 'swordfish'
}
self.assertEqual(expected_args, args)
self.assertEqual('12345', ds.resource_id)
expected_state = (ds.CREATE, ds.COMPLETE)
self.assertEqual(expected_state, ds.state)
def test_update(self):
ds = self._create_resource('data-source', self.rsrc_defn,
self.stack)
props = self.stack.t.t['resources']['data-source']['properties'].copy()
props['type'] = 'hdfs'
props['url'] = 'my/path'
self.rsrc_defn = self.rsrc_defn.freeze(properties=props)
scheduler.TaskRunner(ds.update, self.rsrc_defn)()
data = {
'name': 'my-ds',
'description': '',
'type': 'hdfs',
'url': 'my/path',
'credentials': {
'user': 'admin',
'password': 'swordfish'
}
}
self.client.data_sources.update.assert_called_once_with(
'12345', data)
self.assertEqual((ds.UPDATE, ds.COMPLETE), ds.state)
def test_show_attribute(self):
ds = self._create_resource('data-source', self.rsrc_defn, self.stack)
value = mock.MagicMock()
value.to_dict.return_value = {'ds': 'info'}
self.client.data_sources.get.return_value = value
self.assertEqual({'ds': 'info'}, ds.FnGetAtt('show'))
def test_validate_password_without_user(self):
props = self.stack.t.t['resources']['data-source']['properties'].copy()
del props['credentials']['user']
self.rsrc_defn = self.rsrc_defn.freeze(properties=props)
ds = data_source.DataSource('data-source', self. | rsrc_defn, self.stack)
ex = self.assertRaises(exception.StackValidationFailed, ds.validate)
error_msg = ('Property error: resources.data-source.properties.'
'credentials: Property user not assigned')
self.assertEqual(error_msg, str(ex | ))
|
fahhem/mbed-os | tools/export/kds/__init__.py | Python | apache-2.0 | 1,603 | 0.001248 | """
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os.path import splitext, basename
| from tools.export.exporters import Exporter, deprecated_exporter
@deprecated_exporter
class KDS(Exporter):
NAME = 'Kinetis Design Studio'
TOOLCHAIN = 'GCC_ARM'
TARGETS = [
'K64F',
'HEXIWEAR',
'K22F',
]
def generate(self):
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
ctx = {
'name': self.project_name,
'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script,
'object_files': self.resources.objects,
'libraries': libraries,
'symbols': self.toolchain.get_symbols()
}
self.gen_file('kds/%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('kds/%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
self.gen_file('kds/launch.tmpl', ctx, '%s.launch' % self.project_name)
|
Mikescher/Project-Euler_Befunge | compiled/Python3/Euler_Problem-080.py | Python | mit | 5,544 | 0.059163 | #!/usr/bin/env python3
# transpiled with BefunCompile v1.3.0 (c) 2017
import gzip, base64
_g = ("AR+LCAAAAAAABACtkrFyhCAQhl+FQ6+RMbdy4kXCMCnyECkYTJEZWioqHz4/RM85c13UUWB32f32h8Q+85t/BzzVP54j6jP2fgTJyxEk4QgSOoLEj7WtWGXuhvTgTrtw"
+ "s9rMzlGzNPmq5d/ciU6vXmNrbrjtWq1HoYIbRRd15aearM6ma1BKnPE1ydJIUVotKSIJ3I2k0FZpqv180m0XlXh1baBboCFSv1XeLodxyzqhqK7Bg2Xem3FsrteFHkkH"
+ "iqU2CvSqaZobRbI3CgOFr9OsiixABzX9EpNbdPD3WvyNg6JhOpCMin9wiqYSq3dQy2TW4B4d6XgVo2E2I7QndLUJgpq5S8B0rSTPujUJrekg4gfvsiwEdSwiddNTZJu0"
+ "pRdWmkGDgPWRBoENwYiVfLKbZCkfFTQSRZoF8oz4yKi/YNTCJBA7cRFKhev+tMvVmJJ3ARu1NsTqPyEJ2aHJk52ZTqJxkrh79wi/Cy0JbRa6nZ/lWMZyN7IuNIpAYz6O"
+ "K+TCXDZgPxd+GB4AfwDY6hCc2gQAAA==")
g = base64.b64decode(_g)[1:]
for i in range(base64.b64decode(_g)[0]):
g = gzip.decompress(g)
g=list(g)
def gr(x,y):
if(x>=0 and y>=0 and x<69 and y<18):
return g[y*69 + x];
return 0;
def gw(x,y,v):
if(x>=0 and y>=0 and x<69 and y<18):
g[y*69 + x]=v;
def td(a,b):
return ((0)if(b==0)else(a//b))
def tm(a,b):
return ((0)if(b==0)else(a%b))
s=[]
def sp():
global s
if (len(s) == 0):
return 0
return s.pop()
def sa(v):
global s
s.append(v)
def sr():
global s
if (len(s) == 0):
return 0
return s[-1]
def _0():
gw(9,0,0)
gw(2,0,2)
sa(2)
return 1
def _1():
sa(100)
sa(10000-gr(2,0))
return 2
def _2():
return (3)if(sp()!=0)else(26)
def _3():
sa(sp()-1)
sa(sr());
return (4)if(sp()!=0)else(5)
def _4():
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sp()-gr(2,0))
return 2
def _5():
gw(68,1,0)
gw(68,3,0)
gw(68,5,0)
sp();
sa(sr());
sa(59)
sa(59)
return 6
def _6():
return (34)if(sp()!=0)else(7)
def _7():
sp();
gw(68,1,sp())
gw(2,0,0)
sa(100)
return 8
def _8():
global t0
gw(4,0,gr(2,0)*gr(2,0))
t0=((gr(68,3)*gr(2,0)*20)+gr(4,0))%100
gw(4,0,((gr(68,3)*gr(2,0)*20)+gr(4,0))/100)
gw(68,5,t0)
sa(59)
sa(59)
return 9
def _9():
return (33)if(sp()!=0)else(10)
def _10():
sp();
sa(0)
sa(gr(9,5)-gr(9,1))
return 11
def _11():
return (15)if(sp()!=0)else(12)
def _12():
sa(sp()+1)
return (13)if(sr()!=60)else(14)
def _13():
global t0
global t1
sa(sr());
t0=gr(sr()+9,5)
sa(sp()+9)
sa(1)
v0=sp()
t1=gr(sp(),v0)
sa(t0-t1)
return 11
def _14():
gw(2,0,gr(2,0)+1)
sp();
return 8
def _15():
global t0
global t1
global t2
sa(sr());
t0=gr(sr()+9,5)
sa(sp()+9)
sa(1)
v0=sp()
t1=gr(sp(),v0)
t2=(1)if(t0>t1)else(0)
return (16)if((t2)!=0)else(14)
def _16():
gw(2,0,gr(2,0)-1)
gw(68,5,0)
gw(4,0,gr(2,0)*gr(2,0))
gw(6,0,gr(68,1)-gr(4,0))
gw(7,0,gr(68,3)*gr(2,0)*20)
sp();
sa(59)
sa(59)
return 17
def _17():
global t0
t0=0
return 18
def _18():
return (32)if(gr(7,0)>gr(6,0))else(19)
def _19():
global t0
gw(4,0,t0)
sa(gr(6,0)-gr(7,0))
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+8)
sa(5)
v0=sp()
v1=sp()
gw(v1,v0,sp())
sa(sp()-1)
sa(sr());
return (20)if(sp()!=0)else(21)
def _20():
sa(sr());
gw(6,0,gr(sr()+9,1)-gr(4,0))
gw(7,0,gr(sr()+9,3)*gr(2,0)*20)
return 17
def _21():
gw(68,1,gr(68,5))
sp();
sa(59)
sa(59)
return 22
def _22():
return (31)if(sp()!=0)else(23)
def _23():
gw(9,3,((gr(9,3)%10)*10)+(gr(10,3)/10))
sp();
sa(1)
sa(-58)
return 24
def _24():
return (30)if(sp()!=0)else(25)
def _25():
gw(68,3,((gr(68,3)%10)*10)+gr(2,0))
gw(9,0,gr(2,0)+gr(9,0))
gw(2,0,0)
sp();
sa(sp()-1)
sa(sr());
return (8)if(sp()!=0)else(26)
def _26():
sp();
return 27
def _27():
sa(sr()+1)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()-100)
return (28)if(sp()!=0)else(29)
def _28():
sa(sr());
gw(2,0,sp())
return 1
def _29():
print(gr(9,0),end=" ",flush=True)
sp();
return 35
def _30():
global t0
global t1
sa(sr());
sa(sr());
t0=(gr(sr()+9,3)%10)*10
sa(sp()+10)
sa(3)
v0=sp()
t1=gr(sp(),v0)
t1=t1/10
sa(t0+t1)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+9)
sa(3)
v0=sp()
v1=sp()
gw(v1,v0,sp())
sa(sp()+1)
sa(sr()-59)
return 24
def _31():
sa(sp()-1)
sa(sr());
sa(gr(sr()+9,5))
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+9)
sa(1)
v0=sp()
v1=sp()
gw(v1,v0,sp())
sa(sr());
return 22
def _32():
global t0
gw(6,0,gr(6,0)+100)
t0=t0+1
return 18
def _33():
sa(sp()-1)
sa(sr());
sa((gr(sr()+9,3)*gr(2,0)*20)+gr(4,0))
gw(4,0,sr()/100)
sa(sp()%100);
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+9)
sa(5)
v0=sp()
v1=sp()
gw(v1,v0,sp())
sa(sr());
retur | n 9
def _34():
sa(sp()-1)
sa(sr());
sa(0)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+9)
sa(1)
v0=sp()
v1=sp()
gw(v1,v0,sp())
sa(sr());
sa(0)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+9)
sa(3)
v0=sp()
v1=sp()
gw(v1,v0,sp())
sa(sr());
s | a(0)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+9)
sa(5)
v0=sp()
v1=sp()
gw(v1,v0,sp())
sa(sr());
return 6
m=[_0,_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14,_15,_16,_17,_18,_19,_20,_21,_22,_23,_24,_25,_26,_27,_28,_29,_30,_31,_32,_33,_34]
c=0
while c<35:
c=m[c]()
|
swharden/SWHLab | doc/uses/EPSCs-and-IPSCs/variance method/2016-12-16 tryout2.py | Python | mit | 4,500 | 0.032222 | """
This script investigates how calculating phasic currents from voltage clamp
recordings may benefit from subtracting-out the "noise" determined from a
subset of the quietest pieces of the recording, rather than using smoothing
or curve fitting to guess a guassian-like RMS noise function.
"""
import os
import sys
sys.path.append("../")
sys.path.append("../../")
sys.path.append("../../../")
sys.path.append("../../../../")
import swhlab
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.mlab as mlab
POINTS_PER_SEC=20000
POINTS_PER_MS=int(POINTS_PER_SEC/1000)
CHUNK_POINTS=POINTS_PER_MS*10 # size of Y pieces to calculate variance from
PERCENT_STEP=10 # percentile steps to display
HIST_RESOLUTION=.1 # pA per bin
COLORMAP=plt.get_cmap('jet') # which color scheme do we want to use?
#COLORMAP=plt.get_cmap('winter') # which color scheme do we want to use?
def quietParts(data,percentile=10):
"""
Given some data (Y) break it into chunks and return just the quiet ones.
Returns data where the variance for its chunk size is below the given percentile.
CHUNK_POINTS should be adjusted so it's about 10ms of data.
"""
nChunks=int(len(Y)/CHUNK_POINTS)
chunks=np.reshape(Y[:nChunks*CHUNK_POINTS],(nChunks,CHUNK_POINTS))
variances=np.var(chunks,axis=1)
percentiles=np.empty(len(variances))
for i,variance in enumerate(variances):
percentiles[i]=sorted(variances).index(variance)/len(variances)*100
selected=chunks[np.where(percentiles<=percentile)[0]].flatten()
return selected
def ndist(data,Xs):
"""
given some data and a list of X posistions, return the normal
distribution curve as a Y point at each of those Xs.
"""
sigma=np.sqrt(np.var(data))
center=np.average(data)
curve=mlab.normpdf(Xs,center,sigma)
curve*=len(data)*HIST_RESOLUTION
return curve
if __name__=="__main__":
Y=np.load("sweepdata.npy")
baseline=swhlab.common.lowpass(Y,POINTS_ | PER_MS*250)
plt.figure(figsize=(15,5))
plt.plot(Y)
plt.plot(baseline,color='r',alpha=.5,lw=5)
plt.savefig("baseline.png")
plt.figure(figsize=(15,5))
plt.plot(Y-baseline)
plt.axhline(0,color='r',alpha=.5,lw=5)
plt.savefig("baseline2.png")
plt.show()
if __name__=="__main__" and False:
# apply baseline
Y=Y-baseline
# predict what our histogram will look like
padding=50
histCenter=int(np.av | erage(Y))
histRange=(histCenter-padding,histCenter+padding)
histBins=int(abs(histRange[0]-histRange[1])/HIST_RESOLUTION)
# FIRST CALCULATE THE 10-PERCENTILE CURVE
data=quietParts(Y,10) # assume 10% is a good percentile to use
hist,bins=np.histogram(data,bins=histBins,range=histRange,density=False)
hist=hist.astype(np.float) # histogram of data values
curve=ndist(data,bins[:-1]) # normal distribution curve
hist[hist == 0] = np.nan
histValidIs=np.where(~np.isnan(hist))
histX,histY=bins[:-1][histValidIs],hist[histValidIs] # remove nans
baselineCurve=curve/np.max(curve) # max is good for smooth curve
# THEN CALCULATE THE WHOLE-SWEEP HISTOGRAM
hist,bins=np.histogram(Y,bins=histBins,range=histRange,density=False)
hist=hist.astype(np.float) # histogram of data values
hist[hist == 0] = np.nan
histValidIs=np.where(~np.isnan(hist))
histX,histY=bins[:-1][histValidIs],hist[histValidIs] # remove nans
histY/=np.percentile(histY,98) # percentile is needed for noisy data
# DETERMINE THE DIFFERENCE
diffX=bins[:-1][histValidIs]
diffY=histY-baselineCurve[histValidIs]
diffY[diffY<0]=np.nan
# NOW PLOT THE DIFFERENCE
plt.figure(figsize=(10,10))
plt.subplot(211)
plt.grid()
plt.plot(histX,histY,'b.',ms=10,alpha=.5,label="data points")
plt.plot(bins[:-1],baselineCurve,'r-',lw=3,alpha=.5,label="10% distribution")
plt.legend(loc='upper left',shadow=True)
plt.ylabel("normalized distribution")
plt.axis([histCenter-20,histCenter+20,0,1.5])
plt.subplot(212)
plt.grid()
plt.plot(diffX,diffY,'.',ms=10,alpha=.5,color='b')
plt.axvline(histCenter,color='r',lw=3,alpha=.5,ls='--')
plt.legend(loc='upper left',shadow=True)
plt.ylabel("difference")
plt.xlabel("histogram data points (pA)")
plt.margins(0,.1)
plt.axis([histCenter-20,histCenter+20,0,None])
plt.tight_layout()
plt.savefig("2016-12-16-tryout-yesSub.png")
plt.show()
print("DONE") |
wtarimo/CFCScorePredictor | Fixture.py | Python | artistic-2.0 | 1,032 | 0.022287 | """
William Tarimo
COSI 157 - Final Project: CFC Score Predictor
Fixture Class: Implements a soccer game fixture
11/30/2012
"""
class Fixture(objec | t):
"""Implements a soccer game fixture"""
def __init__(self,game,opponent="",time="",venue="",result=[],cfcScorers=[],oppositionScorers=[]):
self.game = game
self.opponent = opponent
| self.time = time
self.venue = venue
self.result = [] if result=='[]' else [int(x) for x in result[1:-1].split(", ")]
self.cfcScorers = [] if cfcScorers=='[]' else [int(x) for x in cfcScorers[1:-1].split(", ")]
self.oppositionScorers = [] if oppositionScorers=='[]' else [int(x) for x in oppositionScorers[1:-1].split(", ")]
def updateResults(self,result,cfcScorers,oppositionScorers):
"""Sets the result of the fixture"""
self.result = result
self.cfcScorers = cfcScorers
self.oppositionScorers = oppositionScorers
return self
|
miing/mci_migo | acceptance/tests/new_account/new_account_8_confirmation_requires_login.py | Python | agpl-3.0 | 862 | 0 | from sst.actions import (
assert_element,
asser | t_title,
check_flags,
click_button,
get_element,
go_to,
| wait_for,
)
from u1testutils import mail
from u1testutils.sso import mail as sso_mail
from u1testutils.sst import config
from acceptance import helpers
check_flags('allow_unverified')
config.set_base_url_from_env()
edit_account_anchor = {'data-qa-id': 'edit_account'}
email_address = mail.make_unique_test_email_address()
password = "Admin007"
helpers.register_account(email_address, password=password)
wait_for(assert_element, **edit_account_anchor)
helpers.logout()
link = sso_mail.get_verification_link_for_address(email_address)
go_to(link)
wait_for(assert_title, "Log in")
helpers.login(email_address, password)
go_to(link)
click_button(get_element(name='continue'))
wait_for(assert_element, **edit_account_anchor)
|
phoebeargon/BigDataForEducation | big_data_for_education/users/models.py | Python | mit | 837 | 0.001195 | from django.contrib.auth.models import AbstractUser
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name | = models.CharField(_('Name of User'), blank=True, max_length=255)
def __str__(self):
return self.username
def get_absolute_url(self):
return reverse('users:detail', kwargs={'username': self.username})
class Document(models.Model):
description = models.CharField(max_leng | th=255, blank=True)
document = models.FileField(upload_to='documents/%Y/%m/%d/')
uploaded_at = models.DateTimeField(auto_now_add=True)
|
ismaelgaudioso/pihome | Events/draw.py | Python | mit | 193 | 0.020725 | from Events.e | vent import *
class DrawEvent(Event):
"""
The event for when the CPU Spinner wants the main vi | ew to update the display
"""
def __init__(self):
self.name = "Draw Event" |
drufat/sympy | sympy/printing/tests/test_theanocode.py | Python | bsd-3-clause | 9,802 | 0.006223 | from sympy.external import import_module
from sympy.utilities.pytest import raises, SKIP
from sympy.core.compatibility import range
theano = import_module('theano')
if theano:
import numpy as np
ts = theano.scalar
tt = theano.tensor
xt, yt, zt = [tt.scalar(name, 'floatX') for name in 'xyz']
else:
#bin/test will not execute any tests now
disabled = True
import sympy
from sympy import S
sy = sympy
from sympy.abc import x, y, z
from sympy.printing.theanocode import (theano_code, dim_handling,
theano_function)
def fgraph_of(*exprs):
""" Transform SymPy expressions into Theano Computation """
outs = list(map(theano_code, exprs))
ins = theano.gof.graph.inputs(outs)
ins, outs = theano.gof.graph.clone(ins, outs)
return theano.gof.FunctionGraph(ins, outs)
def theano_simplify(fgraph):
""" Simplify a Theano Computation """
mode = theano.compile.get_default_mode().excluding("fusion")
fgraph = fgraph.clone()
mode.optimizer.optimize(fgraph)
return fgraph
def theq(a, b):
""" theano equality """
astr = theano.printing.debugprint(a, file='str')
bstr = theano.printing.debugprint(b, file='str')
if not astr == bstr:
print()
print(astr)
print(bstr)
return astr == bstr
def test_symbol():
xt = theano_code(x)
assert isinstance(xt, (tt.TensorVariable, ts.ScalarVariable))
assert xt.name == x.name
assert theano_code(x, broadcastables={x: (False,)}).broadcastable == (False,)
assert theano_code(x, broadcastables={x: (False,)}).name == x.name
def test_add():
expr = x + y
comp = theano_code(expr)
assert comp.owner.op == theano.tensor.add
comp = theano_code(expr, broadcastables={x: (False,), y: (False,)})
assert comp.broadcastable == (False,)
comp = theano_code(expr, broadcastables={x: (False, True), y: (False, False)})
assert comp.broadcastable == (False, False)
def test_trig():
assert theq(theano_code(sympy.sin(x)), tt.sin(xt))
assert theq(theano_code(sympy.tan(x)), tt.tan(xt))
def test_many():
expr = sy.exp(x**2 + sy.cos(y)) * sy.log(2*z)
comp = theano_code(expr)
expected = tt.exp(xt**2 + tt.cos(yt)) * tt.log(2*zt)
# assert theq(comp, expected)
def test_dtype():
assert theano_code(x, dtypes={x: 'float32'}).type.dtype == 'float32'
assert theano_code(x, dtypes={x: 'float64'}).type.dtype == 'float64'
assert theano_code(x+1, dtypes={x: 'float32'}).type.dtype == 'float32'
assert theano_code(x+y, dtypes={x: 'float64', y: 'float32'}).type.dtype == 'float64'
def test_MatrixSymbol():
X = sympy.MatrixSymbol('X', 4, 5)
Xt = theano_code(X)
assert isinstance(Xt, tt.TensorVariable)
assert Xt.broadcastable == (False, False)
def test_MatMul():
X = sympy.MatrixSymbol('X', 4, 4)
Y = sympy.MatrixSymbol('X', 4, 4)
Z = sympy.MatrixSymbol('X', 4, 4)
expr = X*Y*Z
assert isinstance(theano_code(expr).owner.op, tt.Dot)
def test_Transpose():
X = sympy.MatrixSymbol('X', 4, 4)
assert isinstance(theano_code(X.T).owner.op, tt.DimShuffle)
def test_MatAdd():
X = sympy.MatrixSymbol('X', 4, 4)
Y = sympy.MatrixSymbol('X', 4, 4)
Z = sympy.MatrixSymbol('X', 4, 4)
expr = X+Y+Z
assert isinstance(theano_code(expr).owner.op, tt.Elemwise)
def test_symbols_are_created_once():
expr = x**x
comp = theano_code(expr)
assert theq(comp, xt**xt)
def test_dim_handling():
assert dim_handling([x], dim=2) == {x: (False, False)}
assert dim_handling([x, y], dims={x: 1, y: 2}) == {x: (False, True),
y: (False, False)}
assert dim_handling([x], broadcastables={x: (False,)}) == {x: (False,)}
def test_Rationals():
assert theq(theano_code(sympy.Integer(2) / 3), tt.true_div(2, 3))
assert theq(theano_code(S.Half), tt.true_div(1, 2))
def test_Integers():
assert theano_code(sympy.Integer(3)) == 3
def test_factorial():
n = sympy.Symbol('n')
assert theano_code(sympy.factorial(n))
def test_Derivative():
simp = lambda expr: theano_simplify(fgraph_of(expr))
assert theq(simp(theano_code(sy.Derivative(sy.sin(x), x, evaluate=False))),
simp(theano.grad(tt.sin(xt), xt)))
def test_theano_function_simple():
f = theano_function([x, y], [x+y])
assert f(2, 3) == 5
def test_theano_function_numpy():
f = theano_function([x, y], [x+y], dim=1,
dtypes={x: 'float64', y: 'float64'})
assert np.linalg.norm(f([1, 2], [3, 4]) - np.asarray([4, 6])) < 1e-9
f = theano_function([x, y], [x+y], dtypes={x: 'float64', y: 'float64'},
dim=1)
xx = np.arange(3).astype('float64')
yy = 2*np.arange(3).astype('float64')
assert np.linalg.norm(f(xx, yy) - 3*np.arange(3)) < 1e-9
def test_theano_function_kwargs():
import numpy as np
f = theano_function([x, y, z], [x+y], dim=1, on_unused_input='ignore',
dtypes={x: 'float64', y: 'float64', z: 'float64'})
assert np.linalg.norm(f([1, 2], [3, 4], [0, 0]) - np.asarray([4, 6])) < 1e-9
f = theano_function([x, y, z], [x+y],
dtypes={x: 'float64', y: 'float64', z: 'float64'},
dim=1, on_unused_input='ignore')
xx = np.arange(3).astype('float64')
yy = 2*np.arange(3).astype('float64')
zz = 2*np.arange(3).astype('float64')
assert np.linalg.norm(f(xx, yy, zz) - 3*np.arange(3)) < 1e-9
def test_slice():
assert theano_code(slice(1, 2, 3)) == slice(1, 2, 3)
assert str(theano_code(slice(1, x, 3), dtypes={x: 'int32'})) ==\
str(slice(1, xt, 3))
def test_MatrixSlice():
n = sympy.Symbol('n', integer=True)
X = sympy.MatrixSymbol('X', n, n)
Y = X[1:2:3, 4:5:6]
Yt = theano_code(Y)
from theano.scalar import Scalar
from theano import Constant
s = Scalar('int64')
assert tuple(Yt.owner.op.idx_list) == (slice(s, s, s), slice(s, s, s))
assert Yt.owner.inputs[0] == theano_code(X)
# == doesn't work in theano like it does in SymPy. You have to use
# equals.
assert [i.equals(j) for i, j in zip(Yt.owner.inputs[1:],[
Constant(s, 1),
Constant(s, 2),
Constant(s, 3),
Constant(s, 4),
Constant(s, 5),
Constant(s, 6),
])]
k = sympy.Symbol('k')
kt = theano_code(k, dtypes={k: 'int32'})
start, stop, step = 4, k, 2
Y = X[start:stop:step]
Yt = theano_code(Y, dtypes={n: 'int32', k: 'int32'})
# assert Yt.owner.op.idx_list[0].stop == kt
def test_BlockMatrix():
n = sympy.Symbol('n', integer=True)
A = sympy.MatrixSymbol('A', n, n)
B = sympy.MatrixSymbol('B', n, n)
C = sympy.MatrixSymbol('C', n, n)
D = sympy.MatrixSymbol('D', n, n)
At, Bt, Ct, Dt = map(theano_code, (A, B, C, D))
Block = sympy.BlockMatrix([[A, B], [C, D]])
Blockt = theano_code(Block)
solutions = [tt.join(0, tt.join(1, At, Bt), tt.join(1, Ct, Dt)),
tt.join(1, tt.join(0, At, Ct), tt.join(0, Bt, Dt))]
assert any(theq(Blockt, solution) for solution in solutions)
@SKIP
def test_BlockMatrix_Inverse_execution():
k, n = 2, 4
dtype = 'float32'
A = sympy.MatrixSymbol('A', n, k)
B = sympy.MatrixSymbol('B', n, n)
inputs = A, B
output = B.I*A
cutsizes = {A: [(n//2, n//2), (k//2, k//2)],
| B: [(n//2, n//2), (n//2, n//2)]}
cutinputs = [sympy.blockcut(i, *cutsiz | es[i]) for i in inputs]
cutoutput = output.subs(dict(zip(inputs, cutinputs)))
dtypes = dict(zip(inputs, [dtype]*len(inputs)))
f = theano_function(inputs, [output], dtypes=dtypes, cache={})
fblocked = theano_function(inputs, [sympy.block_collapse(cutoutput)],
dtypes=dtypes, cache={})
ninputs = [np.random.rand(*x.shape).astype(dtype) for x in inputs]
ninputs = [np.arange(n*k).reshape(A.shape).astype(dtype),
np.eye(n).astype(dtype)]
ninputs[1] += np.ones(B.shape)*1e-5
assert np.allclose(f(*ninputs), fblocked(*ninputs), rtol=1e-5)
def test_DenseMatrix():
t = sy.Symbol('theta')
for MatrixType in [sy.Matrix, sy.ImmutableMatrix]:
X |
DakRomo/2017Challenges | challenge_0/python/slandau/hello_world.py | Python | mit | 77 | 0 | #!/usr/bin/env pyth | on3
if __name__ == '__main__':
print('Hello W | orld!')
|
OTL/jps | test/test_utils.py | Python | apache-2.0 | 2,258 | 0 | import jps
import json
import time
class MessageHolder(object):
def __init__(self):
self._saved_msg = []
def __call__(self, msg):
self._saved_msg.append(msg)
def get_msg(self):
return self._saved_msg
def test_multi_pubsub_once():
holder1 = MessageHolder()
holder2 = MessageHolder()
holder3 = MessageHolder()
sub1 = jps.Subscriber('test_utils1', holder1)
sub2 = jps.Subscriber('test_utils2', holder2)
sub3 = jps.Subscriber('test_utils3', holder3)
pub = jps.utils.JsonMultiplePublisher()
time.sleep(0.1)
pub.publish(
'{"test_utils1": "hoge", "test_utils2": {"x": 3}, "test_util | s3": 5}')
time.sleep(0.1)
sub1.spin_once()
sub2.spin_once()
sub3.spin_once()
assert len(holder1.get_msg()) == 1
assert json.loads(holder1.get_msg()[0]) == 'hoge'
assert len(holder2.get_msg()) == 1
obj = json.loads(holder2.get_msg()[0])
assert obj['x'] == 3
assert len(holder3.get_msg()) == 1
assert json.loads(holder3.get_msg()[0]) == 5
def test_to_obj():
msg = '{"aa": 1, "bb": ["hoge", "hogi"], "cc": {"cc1" : 50}}'
converted = jp | s.utils.to_obj(msg)
assert converted.aa == 1
assert converted.bb[0] == 'hoge'
assert converted.bb[1] == 'hogi'
assert len(converted.bb) == 2
assert converted.cc.cc1 == 50
# todo: do
# json = converted.to_json()
# assert json == msg
# todo
def test_to_obj_list():
msg = '["hoge", "hogi", {"atr1": "val2", "atr2": 1.0}]'
bb = jps.utils.to_obj(msg)
assert len(bb) == 2
assert bb[0] == 'hoge'
assert bb[1] == 'hogi'
assert bb[2].atr1 == 'val2'
assert bb[2].atr2 == 1.0
# json = bb.to_json()
# assert json == msg
def test_to_obj_list():
msg = '[{"hoge": 1}, {"hogi": 2}]'
bb = jps.utils.to_obj(msg)
assert len(bb) == 2
assert bb[0].hoge == 1
assert bb[1].hogi == 2
# todo: list support
# json = bb.to_json()
# assert json == msg
def test_to_obj_simple():
msg = '{"aa": 1, "cc": 3, "bb": 2}'
converted = jps.utils.to_obj(msg)
assert converted.aa == 1
assert converted.bb == 2
assert converted.cc == 3
# works only super simple case
json1 = converted.to_json()
assert json1 == msg
|
ulope/nearest_pypi | wsgi.py | Python | mit | 104 | 0 | fr | om werkzeug.contrib.fixers import Proxy | Fix
from app import app
app.wsgi_app = ProxyFix(app.wsgi_app)
|
rsalmaso/huey | huey/contrib/kyototycoon.py | Python | mit | 3,659 | 0 | from functools import partial
import time
from ukt import KT_NONE
from ukt import KyotoTycoon
from huey.api import Huey
from huey.constants import EmptyData
from huey.storage import BaseStorage
from huey.utils import decode
class KyotoTycoonStorage(BaseStorage):
priority = True
def __init__(self, name='huey', host='127.0.0.1', port=1978, db=None,
timeout=None, max_age=3600, queue_db=None, client=None,
blocking=False, result_expire_time=None):
super(KyotoTycoonStorage, self).__init__(name)
if client is None:
client = KyotoTycoon(host, port, timeout, db, serializer=KT_NONE,
max_age=max_age)
self.blocking = blocking
self.expire_time = result_expire_time
self.kt = client
self._db = db
self._queue_db = queue_db if queue_db is not None else db
self.qname = self.name + '.q'
self.sname = self.name + '.s'
self.q = self.kt.Queue(self.qname, self._queue_db)
self.s = self.kt.Schedule(self.sname, self._queue_db)
def enqueue(self, data, priority=None):
self.q.add(data, priority)
def dequeue(self):
if self.blocking:
return self.q.bpop(timeout=30)
else:
return self.q.pop()
def queue_size(self):
return len(self.q)
def enqueued_items(self, limit=None):
return self.q.peek(n=limit or -1)
def flush_queue(self):
return self.q.clear()
def convert_ts(self, ts):
return int(time.mktime(ts.timetuple()))
def add_to_schedule(self, data, ts, utc):
self.s.add(data, self.convert_ts(ts))
def read_schedule(self, ts):
return self.s.read(self.convert_ts(ts))
def schedule_size(self):
return len(self.s)
def scheduled_items(self, limit=None):
return self.s.items(limit)
def flush_schedule(self):
return self.s.clear()
def prefix_key(self, key):
return '%s.%s' % (self.qname, decode(key))
def put_data(self, key, value, is_result=False):
xt = self.expire_time if is_result else None
self.kt.set(self.prefix_key(key), value, self._db, expire_time=xt)
def peek_data(self, key):
result = self.kt.get_bytes(self.prefix_key(key), self._db)
return EmptyData if result is None else result
def pop_data(self, key):
if self.expire_time is not None:
return self.peek_data(key)
result = self.kt.seize(self.prefix_key(key), self._db)
return EmptyData if result is None else result
def delete_data(self, key):
return self.kt.seize(self.prefix_key(key), self._db) is not None
def has_data_for_key(self, key):
return self.kt.exists(self.prefix_key(key), self._db)
def put_if_empty(self, key, value):
return self.kt.add(self.prefix_key(key), value, self._db)
def result_store_size(self):
return len(self.kt.match_prefix(self.prefix_key(''), db=self._db))
def result_items(self):
prefix = self.prefix_key('')
keys = self.kt.match_prefix(prefix, db=self._db)
result = self.kt.get_bulk(keys, self._db)
plen = len(prefix)
ret | urn {key[plen:]: value for key, value in result.items()}
def flush_results(self):
prefix = self.prefix_key('')
keys = self.kt.match_prefix(prefix, db=self._db)
return self.kt.remove_bulk(keys, self._db)
def flush_all(self):
| self.flush_queue()
self.flush_schedule()
self.flush_results()
class KyotoTycoonHuey(Huey):
storage_class = KyotoTycoonStorage
|
asm-products/pants-party | subscriptions/admin.py | Python | agpl-3.0 | 100 | 0 | from django.contrib import ad | min
from models import Subscription
admin.site.register(Subscription)
| |
pombredanne/voc | tests/datatypes/test_float.py | Python | bsd-3-clause | 6,425 | 0.000156 | from .. utils import TranspileTestCase, UnaryOperationTestCase, BinaryOperati | onTestCase, InplaceOperationTestCase
class FloatTests(TranspileTestCase):
def test_setattr(self):
self.assertCodeExecution("""
x = 3.14159
try:
x.attr = 42
except AttributeError as err:
print(err)
""")
def test_getattr(self):
s | elf.assertCodeExecution("""
x = 3.14159
try:
print(x.attr)
except AttributeError as err:
print(err)
""")
def test_repr(self):
self.assertCodeExecution("""
x = 350000000000000000.0
print(x)
x = 3500.0
print(x)
x = 35.0
print(x)
x = 3.5
print(x)
x = 0.35
print(x)
x = 0.035
print(x)
x = 0.0035
print(x)
x = 0.00035
print(x)
x = 0.000035
print(x)
x = 0.0000035
print(x)
x = 0.00000000000000035
print(x)
x = 0.0
print(x)
x = float('-0.0')
print(x)
x = float('nan')
print(x)
x = float('inf')
print(x)
x = float('-inf')
print(x)
""")
def test_negative_zero_constant(self):
self.assertCodeExecution("""
x = -0.0
y = 0.0
print(x, y)
""")
def test_is_integer(self):
self.assertCodeExecution("""
x = 0.0
print(x.is_integer())
x = 3.14
print(x.is_integer())
x = -1.0
print(x.is_integer())
x = -62.5
print(x.is_integer())
x = float('nan')
print(x.is_integer())
x = float('inf')
print(x.is_integer())
x = float('-inf')
print(x.is_integer())
""")
def test_hex(self):
numbers = [
0e0, -0e0, 10000152587890625e-16, -566e85,
-87336362425182547697e-280, 4.9406564584124654e-324,
'nan', 'inf', '-inf'
]
template = """
x = float('{}')
print(x.hex())
"""
code = '\n'.join(template.format(number) for number in numbers)
self.assertCodeExecution(code)
class UnaryFloatOperationTests(UnaryOperationTestCase, TranspileTestCase):
data_type = 'float'
not_implemented = [
'test_unary_invert',
]
class BinaryFloatOperationTests(BinaryOperationTestCase, TranspileTestCase):
data_type = 'float'
not_implemented = [
'test_add_class',
'test_add_frozenset',
'test_and_class',
'test_and_frozenset',
'test_direct_eq_bytes',
'test_direct_ge_bytes',
'test_direct_gt_bytes',
'test_direct_le_bytes',
'test_direct_lt_bytes',
'test_direct_ne_bytes',
'test_direct_eq_frozenset',
'test_direct_ge_frozenset',
'test_direct_gt_frozenset',
'test_direct_le_frozenset',
'test_direct_lt_frozenset',
'test_direct_ne_frozenset',
'test_eq_class',
'test_eq_frozenset',
'test_ge_class',
'test_ge_frozenset',
'test_gt_class',
'test_gt_frozenset',
'test_le_class',
'test_le_frozenset',
'test_lshift_class',
'test_lshift_frozenset',
'test_lt_class',
'test_lt_frozenset',
'test_modulo_class',
'test_modulo_complex',
'test_modulo_frozenset',
'test_multiply_bytearray',
'test_multiply_bytes',
'test_multiply_class',
'test_multiply_complex',
'test_multiply_frozenset',
'test_multiply_NotImplemented',
'test_multiply_range',
'test_ne_class',
'test_ne_frozenset',
'test_or_class',
'test_or_frozenset',
'test_power_class',
'test_power_complex',
'test_power_float',
'test_power_frozenset',
'test_rshift_class',
'test_rshift_frozenset',
'test_subscr_bool',
'test_subscr_bytearray',
'test_subscr_bytes',
'test_subscr_class',
'test_subscr_complex',
'test_subscr_dict',
'test_subscr_float',
'test_subscr_frozenset',
'test_subscr_int',
'test_subscr_list',
'test_subscr_None',
'test_subscr_NotImplemented',
'test_subscr_range',
'test_subscr_set',
'test_subscr_slice',
'test_subscr_str',
'test_subscr_tuple',
'test_subtract_class',
'test_subtract_complex',
'test_subtract_frozenset',
'test_true_divide_class',
'test_true_divide_complex',
'test_true_divide_frozenset',
'test_xor_class',
'test_xor_frozenset',
]
class InplaceFloatOperationTests(InplaceOperationTestCase, TranspileTestCase):
data_type = 'float'
not_implemented = [
'test_add_class',
'test_add_complex',
'test_add_frozenset',
'test_and_class',
'test_and_frozenset',
'test_floor_divide_class',
'test_floor_divide_complex',
'test_floor_divide_frozenset',
'test_lshift_class',
'test_lshift_frozenset',
'test_modulo_class',
'test_modulo_complex',
'test_modulo_frozenset',
'test_multiply_bytearray',
'test_multiply_bytes',
'test_multiply_class',
'test_multiply_complex',
'test_multiply_frozenset',
'test_multiply_list',
'test_multiply_NotImplemented',
'test_multiply_range',
'test_multiply_str',
'test_multiply_tuple',
'test_or_class',
'test_or_frozenset',
'test_power_class',
'test_power_complex',
'test_power_float',
'test_power_frozenset',
'test_rshift_class',
'test_rshift_frozenset',
'test_subtract_class',
'test_subtract_complex',
'test_subtract_frozenset',
'test_true_divide_class',
'test_true_divide_complex',
'test_true_divide_frozenset',
'test_xor_class',
'test_xor_frozenset',
]
|
timpalpant/KaggleBillionWordImputation | scripts/predict_missing_word.space.py | Python | gpl-3.0 | 770 | 0.009091 | #!/usr/bin/env | python
'''
Predict missing words with n-gram model
'''
import sys, argparse
from itertools import izip
from util import tokenize_words
def opts():
parser = | argparse.ArgumentParser(description=__doc__)
parser.add_argument('sample', type=argparse.FileType('r'),
help='Sentences with one missing word')
parser.add_argument('removed', type=argparse.FileType('r'),
help='File with predicted indices of missing words')
return parser
if __name__ == "__main__":
args = opts().parse_args()
for sentence, i_missing in izip(args.sample, args.removed):
words = tokenize_words(sentence)
i_missing = int(i_missing)
print ' '.join(words[:i_missing]) + ' e ' + ' '.join(words[i_missing:])
|
eino-makitalo/odoo | openerp/addons/base/ir/ir_fields.py | Python | agpl-3.0 | 17,664 | 0.002434 | # -*- coding: utf-8 -*-
import cStringIO
import datetime
import functools
import itertools
import time
import psycopg2
import pytz
from openerp import models, api, _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, ustr
REFERENCING_FIELDS = set([None, 'id', '.id'])
def only_ref_fields(record):
return dict((k, v) for k, v in record.iteritems()
if k in REFERENCING_FIELDS)
def exclude_ref_fields(record):
return dict((k, v) for k, v in record.iteritems()
if k not in REFERENCING_FIELDS)
CREATE = lambda values: (0, False, values)
UPDATE = lambda id, values: (1, id, values)
DELETE = lambda id: (2, id, False)
FORGET = lambda id: (3, id, False)
LINK_TO = lambda id: (4, id, False)
DELETE_ALL = lambda: (5, False, False)
REPLACE_WITH = lambda ids: (6, False, ids)
class ImportWarning(Warning):
""" Used to send warnings upwards the stack during the import process """
pass
class ConversionNotFound(ValueError): pass
class ir_fields_converter(models.Model):
_name = 'ir.fields.converter'
@api.model
def _format_import_error(self, error_type, error_msg, error_params=(), error_args=None):
# sanitize error params for later formatting by the import system
sanitize = lambda p: p.replace('%', '%%') if isinstance(p, basestring) else p
if error_params:
if isinstance(error_params, basestring):
error_params = sanitize(error_params)
elif isinstance(error_params, dict):
error_params = dict((k, sanitize(v)) for k, v in error_params.iteritems())
elif isinstance(error_params, tuple):
error_params = tuple(map(sanitize, error_params))
return error_type(error_msg % error_params, error_args)
@api.model
def for_model(self, model, fromtype=str):
""" Returns a converter object for the model. A converter is a
callable taking a record-ish (a dictionary representing an openerp
record with values of typetag ``fromtype``) and returning a converted
records matching what :meth:`openerp.osv.orm.Model.write` expects.
:param model: :class:`openerp.osv.orm.Model` for the conversion base
:returns: a converter callable
:rtype: (record: dict, logger: (field, error) -> None) -> dict
"""
# make sure model is new api
model = self.env[model._name]
converters = {
name: self.to_field(model, field, fromtype)
for name, field in model._fields.iteritems()
}
def fn(record, log):
converted = {}
for field, value in record.iteritems():
if field in (None, 'id', '.id'):
continue
if not value:
converted[field] = False
continue
try:
converted[field], ws = converters[field](value)
for w in ws:
if isinstance(w, basestring):
# wrap warning string in an ImportWarning for
# uniform handling
w = ImportWarning(w)
log(field, w)
except ValueError, e:
log(field, e)
return converted
return fn
@api.model
def to_field(self, model, field, fromtype=str):
""" Fetches a converter for the provided field object, from the
specified type.
A converter is simply a callable taking a value of type ``fromtype``
(or a composite of ``fromtype``, e.g. list or dict) and returning a
value acceptable for a write() on the field ``field``.
By default, tries to get a method on itself with a name matching the
pattern ``_$fromtype_to_$field.type`` and returns it.
Converter callables can either return a value and a list of warnings
to their caller or raise ``ValueError``, which will be interpreted as a
validation & conversion failure.
ValueError can have either one or two parameters. The first parameter
is mandatory, **must** be a unicode string and will be used as the
user-visible message for the error (it should be translatable and
translated). It can contain a ``field`` named format placeholder so the
caller can inject the field's translated, user-facing name (@string).
The second parameter is optional and, if provided, must be a mapping.
This mapping will be merged into the error dictionary returned to the
client.
If a converter can perform its function but has to make assumptions
about the data, it can send a warning to the user through adding an
instance of :class:`~.ImportWarning` to the second value
it returns. The handling of a warning at the upper levels is the same
as ``ValueError`` above.
:param field: field object to generate a value for
:type field: :class:`openerp.fields.Field`
:param fromtype: type to convert to something fitting for ``field``
:type fromtype: type | str
:param context: openerp request context
:return: a function (fromtype -> field.write_type), if a converter is found
:rtype: Callable | None
"""
assert isinstance(fromtype, (type, str))
# FIXME: return None
typename = fromtype.__name__ if isinstance(fromtype, type) else fromtype
converter = getattr(self, '_%s_to_%s' % (typename, field.type), None)
if not converter:
return None
return functools.partial(converter, model, field)
@api.model
def _str_to_boolean(self, model, field, value):
# all translatables used for booleans
true, yes, false, no = _(u"true"), _(u"yes"), _(u"false"), _(u"no")
# potentially broken casefolding? What about locales?
trues = set(word.lower() for word in itertools.chain(
[u'1', u"true", u"yes"], # don't use potentially translated values
self._get_translations(['code'], u"true"),
self._get_translations(['code'], u"yes"),
))
if value.lower() in trues:
return True, []
# potentially broken casefolding? What about locales?
falses = set(word.lower() for word in itertools.chain(
[u'', u"0", u"false", u"no"],
self._get_translations(['code'], u"false"),
self._get_translations(['code'], u"no"),
))
if value.lower() in falses:
return False, []
return True, [self._format_import_error(
ImportWarning,
_(u"Unknown value '%s' for boolean field '%%(field)s', assuming '%s'"),
(value, yes),
{'moreinfo': _(u"Use '1' for yes and '0' for no")}
)]
@api.model
def _str_to_integer(self, model, field, value):
try:
return int(value), []
except ValueError:
raise self._format_import_error(
ValueError,
_(u"'%s' does not seem to be | an integer for field '%%(field)s'"),
value
)
@api.model
def _str_to_float(self, model, field, value):
try:
return float(value), []
except ValueError:
raise self._format_import_error(
ValueError,
| _(u"'%s' does not seem to be a number for field '%%(field)s'"),
value
)
@api.model
def _str_id(self, model, field, value):
return value, []
_str_to_reference = _str_to_char = _str_to_text = _str_to_binary = _str_to_html = _str_id
@api.model
def _str_to_date(self, model, field, value):
try:
time.strptime(value, DEFAULT_SERVER_DATE_FORMAT)
return value, []
except ValueError:
raise self._format_import_error(
ValueError,
_(u"'%s' does not seem to be a valid date for field '%%(field)s'"),
value,
{'moreinfo': _(u"Use th |
thombashi/DataProperty | test/test_function.py | Python | mit | 4,806 | 0.000625 | """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
from dataproperty import get_integer_digit, get_number_of_digit
nan = float("nan")
inf = float("inf")
class Test_get_integer_digit:
@pytest.mark.parametrize(
["value", "expected"],
[
[0, 1],
[-0, 1],
[0.99, 1],
[-0.99, 1],
[".99", 1],
["-.99", 1],
[1.01, 1],
[-1.01, 1],
[9.99, 1],
[-9.99, 1],
["9.99", 1],
["-9.99", 1],
["0", 1],
["-0", 1],
[10, 2],
[-10, 2],
[99.99, 2],
[-99.99, 2],
["10", 2],
["-10", 2],
["99.99", 2],
["-99.99", 2],
[100, 3],
[-100, 3],
[999.99, 3],
[-999.99, 3],
["100", 3],
["-100", 3],
["999.99", 3],
["-999.99", 3],
[10000000000000000000, 20],
[-10000000000000000000, 20],
# float not enough precision
[10000000000000000000.99, 20],
[-10000000000000000000.99, 20],
["10000000000000000000", 20],
["-10000000000000000000", 20],
["99999999999999099999.99", 20],
["-99999999999999099999.99", 20],
],
)
def test_normal(self, value, expected):
assert get_integer_digit(value) == expected
@pytest.mark.parametrize(
["value", "expected"],
[
[999999999999999999999999999999.9999999999, 31],
[-999999999999999999999999999999.9999999999, 31],
["999999999999999999999999999999.9999999999", 30],
["-999999999999999999999999999999.9999999999", 30],
],
)
def test_abnormal(self, value, expected):
assert get_integer_digit(value) == expected
@pytest.mark.parametrize(
["value", "exception"],
[
[True, ValueError],
[False, ValueError],
[None, ValueError],
["test", ValueError],
["a", ValueError],
["0xff", ValueError],
[nan, ValueError],
[inf, ValueError],
],
)
def test_exception(self, value, exception):
with pytest.raises(exception):
get_integer_digit(value)
class Test_get_number_of_digit:
@pytest.mark.parametrize(
["value", "expected"],
[
[0, (1, 0)],
[-0, (1, 0)],
| ["0", (1, 0)],
["-0", (1, 0)],
[10, (2, 0)],
[-10, (2, 0)],
["10", (2, 0)],
["-10", (2, 0)],
[10.1, (2, 1 | )],
[-10.1, (2, 1)],
["10.1", (2, 1)],
["-10.1", (2, 1)],
[10.01, (2, 2)],
[-10.01, (2, 2)],
[10.001, (2, 3)],
[-10.001, (2, 3)],
[100.1, (3, 1)],
[-100.1, (3, 1)],
[100.01, (3, 2)],
[-100.01, (3, 2)],
[0.1, (1, 1)],
[-0.1, (1, 1)],
["0.1", (1, 1)],
["-0.1", (1, 1)],
[0.99, (1, 2)],
[-0.99, (1, 2)],
[".99", (1, 2)],
["-.99", (1, 2)],
[0.01, (1, 2)],
[-0.01, (1, 2)],
["0.01", (1, 2)],
["-0.01", (1, 2)],
[0.001, (1, 3)],
[-0.001, (1, 3)],
["0.001", (1, 3)],
["-0.001", (1, 3)],
[0.0001, (1, 4)],
[-0.0001, (1, 4)],
["0.0001", (1, 4)],
["-0.0001", (1, 4)],
[0.00001, (1, 5)],
[-0.00001, (1, 5)],
["0.00001", (1, 5)],
["-0.00001", (1, 5)],
[2e-05, (1, 5)],
[-2e-05, (1, 5)],
["2e-05", (1, 5)],
["-2e-05", (1, 5)],
["0.000000000000001", (1, 15)],
["1e+15", (16, 0)],
],
)
def test_normal(self, value, expected):
assert get_number_of_digit(value) == expected
@pytest.mark.parametrize(
["value", "max_decimal_places", "expected"],
[
[0, 5, (1, 0)],
["0.000000000000001", 5, (1, 5)],
],
)
def test_normal_max_decimal_places(self, value, max_decimal_places, expected):
assert get_number_of_digit(value, max_decimal_places=max_decimal_places) == expected
@pytest.mark.parametrize(
["value"], [[None], [True], [inf], [nan], ["0xff"], ["test"], ["いろは".encode()]]
)
def test_nan(self, value):
integer_digits, decimal_places = get_number_of_digit(value)
assert integer_digits is None
assert decimal_places is None
|
NovikovMA/python_training | model/address.py | Python | apache-2.0 | 2,625 | 0.002667 | # -*- coding: utf-8 -*-
__author__ = 'M.Novikov'
from sys import maxsize
class Address:
def __init__(self, id=None, first_name=None, middle_name=None, last_name=None,
nickname=None, title=None, company=None, address=None,
tel_home=None, tel_mobile=None, tel_work=None, tel_fax=None,
web_email=None, web_email2=None, web_email3=None, web_homepage=None,
birthday_day=None, birthday_month=None, birthday_year=None,
anniversary_day=None, anniversary_month=None, anniversary_year=None,
sec_address=None, home=None, notes=None,
all_phones_from_home_page=None, all_email_from_home_page=None):
self.id = id
self.first_name = first_name
self.middle_name = middle_name
self.last_name = last_name
self.nickname = nickname
self.title = title
self.company = company
self.address = address
self.tel_home = tel_home
self.tel_mobile = tel_mobile
self.tel_work = tel_work
self.tel_fax = tel_fax
self.web_email = web_email
self.web_email2 = web_email2
self.web_email3 = web_email3
self.web_homepage = web_homepage
self.birthday_day = birthday_day
self.birthday_month = birthday_month
self.birthday_year = birthday_year
self.anniversary_day = anniversary_day
self.anniversary_month = anniversary_month
self.anniversary_year = anniversary_year
self.sec_address = sec_address
self.home = home
self.notes = notes
self.all_phones_from_home_page = all_phones_from_home_page
self.all_emai | l_from_home_page = all_email_from_home_page
def __repr__(self):
return "%s: %s %s" % (self.id, self.last_name, self.first_name)
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id)\
and self.first_name == other.first_name\
and self.last_name == other.last_name\
and (self.address is None or other.address is None or self.address == other.address)\
| and (self.all_phones_from_home_page is None or other.all_phones_from_home_page is None or self.all_phones_from_home_page == other.all_phones_from_home_page)\
and (self.all_email_from_home_page is None or other.all_email_from_home_page is None or self.all_email_from_home_page == other.all_email_from_home_page)
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
|
DemiMarie/SlipRock | .ycm_extra_conf.py | Python | isc | 6,434 | 0.017718 | # This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
import os
import ycm_core
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
# THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which
# language to use when compiling headers. So it will guess. Badly. So C++
# headers will be compiled as C headers. You don't want that so ALWAYS specify
# a "-std=<something>".
# For a C project, you would set this to something like 'c99' instead of
# 'c++11'.
'-std=c99',
# ...and the same thing goes for the magic -x option which specifies the
# language that the files to be compiled are written in. This is mostly
# relevant for c++ headers.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x', 'c', '-I', os.path.join(DirectoryOfThisScript(), 'src'),
'-I.',
'-Iinclude',
'-Weverything',
'-Werror',
#'-I', '/usr/x86_64-w64-mingw32/sys-root/mingw/include/',
]
compilation_database_folder = '/dev/null/etc'
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break |
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
| if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
p = os.path
fname = p.basename(filename)
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
if fname == 'sliprock_unix.h' or fname == 'sliprock_windows.h':
filename = p.join(p.dirname(filename), 'sliprock.c')
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
if 'windows' in fname or 'state_machine' in fname:
final_flags += (
'-I/usr/x86_64-w64-mingw32/sys-root/mingw/include',
'-fms-extensions')
if p.basename(p.dirname(filename)) == 'wip':
final_flags += ('-Iwip', '-fms-extensions', '-target=x86_64-w64-mingw32')
if p.splitext(fname)[1] == '.cpp':
final_flags[final_flags.index('-std=c99')] = '-std=c++11'
final_flags[final_flags.index('c')] = 'c++'
final_flags += [
'-Wno-old-style-cast',
'-Wno-c++98-compat',
'-Wno-c++98-compat-pedantic',
'-Wno-disabled-macro-expansion',
'-Wno-c99-extensions',
'-Wno-global-constructors',
]
return {
'flags': final_flags,
'do_cache': True
}
|
lycopersin/x6hr-python | x6hr.py | Python | gpl-3.0 | 17,896 | 0.013093 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: x6hr.py
# Purpose: log reader of SUUNTO H6HR
# Author: tomoya kamata (iware pref. japan)
# Created: 15 Mar 2010
# Copyright: GPL. please read COPYING file
# mailto: lycopersin@gmail.com)
# web: http://lycopersin.blogspot.com
#-------------------------------------------------------------------------------
import serial
## my investigation (2010.3.17 T.Kamata)
## comments are my hypothesis, pls keep in mind that they might be wrong.
## ********************************************************************************
## 05 00 04 | 5A 00 | (01) 05 5E
## ********************************************************************************
## general setting
## 05 00 0E | 64 00 | (0B) 00 01 02 01 00 01 02 01 01 01 6B 04
## ********************************************************************************
## = 0D48 (l 12): Retrieve watch history.
## Format as follows:
## Max alt: YY MM DD ALT_L ALT_H
## Ascent : ASC_L .. .. .. (supposedly on 4 bytes as 2 are not enough)
## Descent: DESC_L .. .. .. (supposedly on 4 bytes as 2 are not enough)
## ??: .. ..
## Last reset date: YY MM DD
## 05 00 15 | 48 0D | (12) 08 05 17 E5 00 00 00 00 00 00 00 00 00 04 00 FF FF FF 53
## ********************************************************************************
## hiking index table
## 05 00 17 | B4 0F | (14) 01 02 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 AC
## ********************************************************************************
## hiking log index 1
## 05 00 33 | C8 0F | (30) 00 0A 03 11 0E 10 0A 01 00 00 00 00 00 00 00 03 31 00 00 00 00 00 00 D8 03 11 0E 10 00 D8 03 11 0E 10 4B 61 58 E6 1E 00 00 00 11 00 00 FF FF FF AC
## ********************************************************************************
## hiking log index 2
## 05 00 33 | 48 10 | (30) 00 0A 03 11 0E 1F 0A 01 00 00 00 00 00 00 00 05 14 00 00 00 00 00 00 D8 03 11 0E 1F 00 D8 03 11 0E 1F 4A 53 4E E6 1E 00 00 00 20 00 00 FF FF FF 0B
## ********************************************************************************
## chrono index table
## 05 00 21 | C9 19 | (1E) 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 CF
## ********************************************************************************
## chrono log index 1
## 05 00 35 | FA 19 | (32) 09 0A 03 11 0E 1C 0A 01 00 00 00 00 00 01 00 03 0D 05 00 00 00 00 00 D8 03 11 0E 1C 00 D8 03 11 0E 1C 4A 60 54 E6 1E 00 00 00 11 00 00 FF FF FF FF FF BB
## ********************************************************************************
## product or protocol number?
## 05 00 04 | 5A 00 | (01) 05 5E
## ********************************************************************************
## serial number
## 05 00 07 | 5D 00 | (04) 3D 1E 20 60 3A
class x6hr:
def __init__(self):
self.x6hr = None
def open(self, serial_port = "COM1"):
"""
open serial communication port
serial_port : port name
return : becomes None when Fail to open.
"""
if self.x6hr == None:
self.x6hr = serial.Serial(port=serial_port, timeout=3)
return self.x6hr
def write_raw(self, bin):
"""
write raw binaly data
bin : sequence of write data to suunto
"""
cmd = "".join(map(chr, bin))
self.x6hr.write(cmd)
def write_cmd(self, bin):
"""
write data. before write data this function make packet format.
bin : sequence of write data to suunto
"""
cmd2 = [0x05, 0x00, len(bin)] + bin
c = 0
for i in cmd2[3:]: c = c ^ i
cmd3 = cmd2 + [c]
self.write_raw(cmd3)
def read_raw(self, length):
"""
read raw data.
this function will return raw packet binaly data.
"""
return map(ord, list(self.x6hr.read(length)))
def read_register(self, addr, length):
self.write_cmd([addr & 0xff, (addr >> 8) & 0xff, length])
data = self.read_raw(length + 7)
ret = data[6:-1]
return ret
def read_units(self):
data = self.read_register(0x64, 0x0b)
# light night [1, 0, 2, 1, 0, 1, 2, 1, 1, 1, 107]
# light off [1, 0, 1, 1, 0, 1, 2, 1, 1, 1, 107]
# light normal [1, 0, 0, 1, 0, 1, 2, 1, 1, 1, 107]
# tone on [1, 0, 1, 1, 0, 1, 2, 1, 1, 1, 107]
# tone off [0, 0, 1, 1, 0, 1, 2, 1, 1, 1, 107]
# time 24h [1, 0, 1, 1, 0, 1, 2, 1, 1, 1, 107]
# time 12g [1, 0, 1, 0, 0, 1, 2, 1, 1, 1, 107]
# icon on [1, 1, 2, 1, 0, 1, 2, 1, 1, 1, 107]
# icon on [1, 0, 2, 1, 0, 1, 2, 1, 1, 1, 107]
# I investigated above. (T.Kamata)
#
# 12: altitude ft(00) / m(01)
# 13: ascentional speed m/s(00) / m/mn(01) / m/h(02) / ft/s(03) / ft/mn(04) / ft/h(05)
# 14: pression inHg(00) / hPa(01)
# 15: temperature F(00) / C (01)
# Referenced from http://wiki.terre-adelie.org/SuuntoX6HR
units = {}
units['tone'] = data[0] == 1
units['icon'] = data[1] == 1
units['light'] = ['Night', 'OFF', 'Normal'][data[2]]
units['time'] = ['12h', '24h'][data[3]]
units['date'] = ['MM.DD', 'DD.MM', 'Day'][data[4]]
units['altitude'] = ['ft', 'm'][data[5]]
units['ascsp'] = ['m/s', 'm/mn', 'm/h', 'ft/s', 'ft/mn', 'ft/h'][data[6]]
units['pressure'] = ['inHg', 'hPa'][data[7]]
units['temperature'] = ['F', 'C'][data[8]]
return units
def read_serial_number(self):
#read serial number
data = self.read_register(0x005d, 4)
return (data[0] * 1000000) + (data[1] * 10000) + (data[2] * 100) + data[3]
# Get list of "hiking" (logbook) logs
def read_hiking_index(self):
data = self.read_register(0x0fb4, 0x14)
lut = []
for i in data:
if i != 0:
| lut.append(i)
return lut
def read_hiking_log(self, index):
p = self.read_register | (0x0fc8 + (index - 1) * 128, 0x30)
log = {}
log['start'] = "20%02d/%d/%d %02d:%02d" % (p[1],p[2],p[3],p[4],p[5])
log['interval'] = p[6]
log['hrdata'] = p[7] == 1
log['total ascent'] = p[8] * 256 + p[9]
log['total descent'] = p[10] * 256 + p[11]
log['laps'] = p[13]
log['duration'] = "%02d:%02d:%02d.%d" % (p[14], p[15], p[16], p[17])
log['highest time'] = "%d/%d %02d:%02d" % (p[0x18],p[0x19],p[0x1a],p[0x1b])
log['highest point altitude'] = p[0x16] * 256 + p[0x17]
log['lowest time'] = "%d/%d %02d:%02d" % (p[0x1e],p[0x1f],p[0x20],p[0x21])
log['lowest altitude'] = p[0x1c] * 256 + p[0x1d]
log['HR min'] = p[34]
log['HR max'] = p[35]
log['HR average'] = p[36]
log['HR limit high'] = p[37]
log['HR limit low'] = p[38]
log['HR over limit'] = p[39] * 256 + p[40]
log['HR in limit'] = p[41] * 256 + p[42]
log['HR under limit'] = p[43] * 256 + p[44]
return log
# idx 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, F,
# hex 00, 0A, 03, 11, 0E, 10, 0A, 01, 00, 00, 00, 00, 00, 00, 00, 03,
# dec 0, 10, 3, 17, 14, 16, 10, 1, 0, 0, 0, 0, 0, 0, 0, 3,
# idx 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 1A, 1B, 1C, 1D, 1E, 1F,
# hex 31, 00, 00, 00, 00, 00, 00, D8, 03, 11, 0E, 10, 00, D8, 03, 11,
# dec 49, 0, 0, 0, 0, 0, 0,216, 3, 17, 14, 16, 0,216, 3, 17,
# idx 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, |
iw3hxn/LibrERP | sale_order_analysis/__openerp__.py | Python | agpl-3.0 | 1,472 | 0.000679 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (C) 2015 Didotech srl (<http://www.didotech.com>).
#
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
########## | ####################################################################
{
"name": "Sale Order Analysis",
"version": "3.1.1.2",
"author": "Didotech SRL",
"website": "http://www.didotech.com",
"category": "Sales Management",
"description": """
Module permits to create a simple analysis on sale shop based on date, sales team, user
""",
"depends": [
'sale_order_confirm',
],
"data": [
'sale/sale_shop_view.xml'
],
" | active": False,
"installable": True,
}
|
censusreporter/censusreporter | censusreporter/apps/census/templatetags/results.py | Python | mit | 1,848 | 0.001082 | from collections import defaultdict
from django import template
from django.utils.safestring import mark_safe
from censusreporter.apps.census.utils import parse_table_id, generic_table_description, table_link
|
register = template.Library()
@register.filter
def format_subtables_for_results(table_ids):
parts = []
deferred_racials = defaultdict(list)
deferred_pr = []
for table in table_ids:
parsed = parse_table_id(table)
if parsed['racial']:
key = parsed['table_type']
if parsed['puerto_ | rico']:
key += 'PR'
deferred_racials[key].append(parsed)
elif parsed['puerto_rico']:
deferred_pr.append(table)
else:
parts.append(table_link(table, generic_table_description(table)))
for table in deferred_pr:
parts.append(table_link(table, generic_table_description(table)))
racial_label_tests = [
('B', 'Detailed (by race)'),
('C', 'Simplified (by race)'),
('BPR', 'Detailed (by race) for Puerto Rico'),
('CPR', 'Simplified (by race) for Puerto Rico'),
]
for test, label in racial_label_tests:
try:
iteration_parts = []
for table_dict in deferred_racials[test]:
iteration_parts.append(table_link(table_dict['table_id'], table_dict['race']))
group_table_id = table_dict['table_id']
if iteration_parts:
contents = ' / '.join(iteration_parts)
iter_wrapper = """
<a class="toggler" data-id="{}">{}</a>
<span data-id="{}" class='racial-iteration'>{}</span>
""".format(group_table_id, label, group_table_id, contents)
parts.append(iter_wrapper)
except Exception as e:
parts.append(e.message)
return mark_safe(', '.join(parts))
|
rchakra3/x9115rc3 | hw/code/1/ok.py | Python | gpl-2.0 | 1,020 | 0.028431 | """
Source: https://github.com/txt/mase/blob/master/src/ok.md
# Unit tests in Python
Python has some great unit testing tools. The one
shown below is a "less-is-more" approach and is
based on [Kent Beck video on how to write a test engine in just a
few lines of code](https://www.youtube.com/watch?v=nIonZ6-4nuU).
For example usages, see [okok.py](okok.md) which can be loaded via
```
python okok.py
```
Share and enjoy.
"""
def ok(*lst):
print "### ",lst[0].__name__
for one in lst: unittest(one)
return one
class unittest:
tries = fails = 0 # tracks the record so far
@staticmethod
def score():
t = unittest.tries
f = unittest.fails
return "# TRIES= %s FAIL= %s %% | PASS = %s%%" % (
t,f,int(round(t*100/(t+f+0.001))))
def __init__(i,test):
unittest.tries += 1
try:
test( | )
except Exception,e:
unittest.fails += 1
i.report(test)
def report(i,test):
import traceback
print traceback.format_exc()
print unittest.score(),':',test.__name__ |
OreCruncher/DynamicSurroundings | rtd/source/conf.py | Python | mit | 5,517 | 0.000363 | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Dynamic Surroundings'
copyright = '2019, OreCruncher'
author = 'OreCruncher'
# The short X.Y version
version = '3.5'
# The full version, including alpha/beta/rc tags
release = '3.5.x'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['ntemplates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['nstatic']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# Don't want source attached to HTML
html_copy_source = False
html_show_sourcelink = False
html_logo = "images/logo.png"
# -- Options for HTMLHelp output ---------------------------- | -----------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'DynamicSurroundingsdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size | ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'DynamicSurroundings.tex', 'Dynamic Surroundings Documentation',
'OreCruncher', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dynamicsurroundings', 'Dynamic Surroundings Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'DynamicSurroundings', 'Dynamic Surroundings Documentation',
author, 'DynamicSurroundings', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
def setup(app):
app.add_stylesheet('custom.css') # may also be an URL |
sciunto-org/scifig | libscifig/collogging.py | Python | gpl-3.0 | 1,446 | 0.009682 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Code adapted from https://stackoverflow.com/questions/21923249/logging-formatting-right-justify-level-name
import logging
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
#The background is set with 40 plus the number of the color, and the foreground with 30
#These are the sequences need to get colored ouput
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
def formatter_message(message, use_color = True):
if use_color:
message = message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ)
else:
message = message.replace("$RESET", "").replace("$BOLD", "")
return message
COLORS = {
'WARNING': YELLOW,
'INFO': GREEN,
'DEBUG': BLUE,
'CRITICAL': RED,
'ERROR': RED
}
class ColoredFormatter(logging.Formatter):
def __init__(self, msg, use_color = True):
logging.Formatter.__init__(self, msg)
self.use_color = use_color
def format(self, record):
levelname = record.levelname
message = record.msg
if self.use_color and levelname in COLORS:
levelname_color = COLOR_SEQ % (30 + COLORS[levelname]) + levelname + RESET_SEQ
record.levelname = levelname_color
message_color = COLOR_SEQ % (30 + COLORS[levelname]) + message + RESET_SE | Q
record.msg = message_color
| return logging.Formatter.format(self, record)
|
tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Scripts/f2py.py | Python | bsd-3-clause | 827 | 0 | #!C:\Users\DMora | n\Downloads\WinPython-64bit-2.7.13.1Zero\python-2.7.13.amd64\python.exe
# See http://cens.ioc.ee/projects/f2py2e/
from __future__ import division, print_function
import os
import sys
for mode in ["g3-numpy", "2e-numeric", "2e-numarray", "2e-numpy"]:
try:
i = sys.argv.index("--" + mode)
del sys.argv[i]
break
excep | t ValueError:
pass
os.environ["NO_SCIPY_IMPORT"] = "f2py"
if mode == "g3-numpy":
sys.stderr.write("G3 f2py support is not implemented, yet.\\n")
sys.exit(1)
elif mode == "2e-numeric":
from f2py2e import main
elif mode == "2e-numarray":
sys.argv.append("-DNUMARRAY")
from f2py2e import main
elif mode == "2e-numpy":
from numpy.f2py import main
else:
sys.stderr.write("Unknown mode: " + repr(mode) + "\\n")
sys.exit(1)
main()
|
herow/planning_qgis | python/plugins/processing/algs/qgis/MeanCoords.py | Python | gpl-2.0 | 4,557 | 0.001097 | # -*- coding: utf-8 -*-
"""
***************************************************************************
MeanCoords.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QVariant
from qgis.core import QGis, QgsField, QgsFeature, QgsGeometry, QgsPoint
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterTableField
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class MeanCoords(GeoAlgorithm):
POINTS = 'POINTS'
WEIGHT = 'WEIGHT'
OUTPUT = 'OUTPUT'
UID = 'UID'
WEIGHT = 'WEIGHT'
def defineCharacteristics(self):
self.name = 'Mean coordinate(s)'
self.group = 'Vector analysis tools'
self.addParameter(ParameterVector(self.POINTS,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterTableField(self.WEIGHT,
self.tr('Weight field'), MeanCoords.POINTS,
ParameterTableField.DATA_TYPE_NUMBER, optional=True))
self.addParameter(ParameterTableField(self.UID,
self.tr('Unique ID field'), MeanCoords.POINTS,
ParameterTableField.DATA_TYPE_NUMBER, optional=True))
self.addOutput(OutputVector(MeanCoords.OUTPUT, self.tr('Result')))
def processAlgorithm(self, progress):
layer = dataobjects.getObjectFromUri(self.getParameterValue(self.POINTS))
weightField = self.getParameterValue(self.WEIGHT)
uniqueField = self.getParameterValue(self.UID)
if weightField is None:
weightIndex = -1
else:
weightIndex = layer.fieldNameIndex(weightField)
if uniqueField is None:
uniqueIndex = -1
else:
uniqueIndex = layer.fieldNameIndex(uniqueField)
fieldList = [QgsField('MEAN_X', QVariant.Double, '', 24, 15),
QgsField('MEAN_Y', QVariant.Double, '', 24, 15),
QgsField('UID', QVariant.String, '', 255)]
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
fieldList, QGis.WKBPoint, layer.crs()
)
current = 0
features = vector.features(layer)
total = 100.0 / float(len(features))
means = {}
for feat in features:
current += 1
progress.setPercentage(current * total)
if uniqueIndex == -1:
clazz = "Single class"
else:
clazz = str(feat.attributes()[uniqueIndex]).strip()
if weightIndex == -1:
weight = 1.00
else:
try:
weight = float(feat.attributes()[weightIndex])
except:
weight = 1.00
if clazz not in means:
means[clazz] = (0, 0, 0)
(cx, cy, totalweight) = means[clazz]
geom = QgsGeometry(feat.geometry())
geom = vector.extractPoints(geom)
for i in geom:
cx += i.x() * weight
cy += i.y() * weight
totalweight += weight
means[clazz] = (cx, cy, totalweight)
for (cl | azz, values) in means.iteritems():
outFeat = QgsF | eature()
cx = values[0] / values[2]
cy = values[1] / values[2]
meanPoint = QgsPoint(cx, cy)
outFeat.setGeometry(QgsGeometry.fromPoint(meanPoint))
outFeat.setAttributes([cx, cy, clazz])
writer.addFeature(outFeat)
del writer
|
MrPablozOne/kaira | gui/cmdutils.py | Python | gpl-3.0 | 2,053 | 0.003897 | #
# Copyright (C) 2010 Stanislav Bohm
#
# This file is part of Kaira.
#
# Kaira is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License, or
# (at your option) any later version.
#
# Kaira is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kaira. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
import sys
import paths
sys.path.append(paths.PTP_DIR)
import loader
import os
import tracelog
def export(filename, directory, trace, lib):
p = loader.load_project(filename)
if trace and lib:
target = "libtraced"
elif trace:
target = "traced"
elif lib:
target = "lib"
else:
target = "release"
build_config = p.get_build_config(target)
if directory is not None:
build_config.directory = directory
else:
build_config.directory = os.path.dirname(filename)
p.export(build_config)
def check_tracelog(filename):
t = tracelog.TraceLog(filename)
print t.get_runinsta | nces_count()
def main():
parser = argparse.ArgumentParser(description='Kaira gui command line controller')
parser.add_argument('--export', metavar='filename', type=str)
parser.add_argument('--output', metavar='directory', type=str)
parser.add_argument("--trace", action='store_true')
parser.add_argument('--tracelog', metavar='filename', t | ype=str)
parser.add_argument("--lib", action='store_true')
args = parser.parse_args()
if args.export:
export(os.path.abspath(args.export), args.output, args.trace, args.lib)
return
if args.tracelog:
check_tracelog(args.tracelog)
if __name__ == "__main__":
main()
|
MapuH/Interactive-Programming-In-Python | simplegui-demo.py | Python | mit | 613 | 0.014682 | try:
import simplegui
except ImportError:
import SimpleGUI | CS2Pygame.simpleguics2pygame as simplegui
message = "Welcome!"
# Handlers for mouse click
def click():
global message
message = "Good job!"
def reset():
global message
message = "Welcome!"
# Handler to draw on canvas
def draw(canvas):
canvas.draw_text(message, [50,112], 48, "Red")
# Create a frame and assign callbacks to event handlers
frame = simplegui.create_frame("Home", 300, 200)
frame.add_button("Click me", click)
frame.add_button("Reset", reset)
frame.set_draw_handler(draw)
# S | tart the frame animation
frame.start()
|
andres-hurtado-lopez/naranjaverdeprod | app/produccion/__init__.py | Python | mit | 163 | 0.006135 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from bottle import template, redirect
import utils
def GET(**p | arams):
return template('materiales_ | index.html')
|
jawilson/home-assistant | homeassistant/components/cloudflare/config_flow.py | Python | apache-2.0 | 6,680 | 0.000599 | """Config flow for Cloudflare integration."""
from __future__ import annotations
import logging
from typing import Any
from pycfdns import CloudflareUpdater
from pycfdns.exceptions import (
CloudflareAuthenticationException,
CloudflareConnectionException,
CloudflareZoneException,
)
import voluptuous as vol
from homeassistant.components import persistent_notification
from homeassistant.config_entries import ConfigEntry, ConfigFlow
from homeassistant.const import CONF_API_TOKEN, CONF_ZONE
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers | import config_validation as cv
from homeassistant.h | elpers.aiohttp_client import async_get_clientsession
from .const import CONF_RECORDS, DOMAIN
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_TOKEN): str,
}
)
def _zone_schema(zones: list | None = None):
"""Zone selection schema."""
zones_list = []
if zones is not None:
zones_list = zones
return vol.Schema({vol.Required(CONF_ZONE): vol.In(zones_list)})
def _records_schema(records: list | None = None):
"""Zone records selection schema."""
records_dict = {}
if records:
records_dict = {name: name for name in records}
return vol.Schema({vol.Required(CONF_RECORDS): cv.multi_select(records_dict)})
async def validate_input(hass: HomeAssistant, data: dict):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
zone = data.get(CONF_ZONE)
records = None
cfupdate = CloudflareUpdater(
async_get_clientsession(hass),
data[CONF_API_TOKEN],
zone,
[],
)
try:
zones = await cfupdate.get_zones()
if zone:
zone_id = await cfupdate.get_zone_id()
records = await cfupdate.get_zone_records(zone_id, "A")
except CloudflareConnectionException as error:
raise CannotConnect from error
except CloudflareAuthenticationException as error:
raise InvalidAuth from error
except CloudflareZoneException as error:
raise InvalidZone from error
return {"zones": zones, "records": records}
class CloudflareConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Cloudflare."""
VERSION = 1
entry: ConfigEntry | None = None
def __init__(self):
"""Initialize the Cloudflare config flow."""
self.cloudflare_config = {}
self.zones = None
self.records = None
async def async_step_reauth(self, data: dict[str, Any]) -> FlowResult:
"""Handle initiation of re-authentication with Cloudflare."""
self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle re-authentication with Cloudflare."""
errors = {}
if user_input is not None and self.entry:
_, errors = await self._async_validate_or_error(user_input)
if not errors:
self.hass.config_entries.async_update_entry(
self.entry,
data={
**self.entry.data,
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
},
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry.entry_id)
)
return self.async_abort(reason="reauth_successful")
return self.async_show_form(
step_id="reauth_confirm",
data_schema=DATA_SCHEMA,
errors=errors,
)
async def async_step_user(self, user_input: dict | None = None):
"""Handle a flow initiated by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
persistent_notification.async_dismiss(self.hass, "cloudflare_setup")
errors = {}
if user_input is not None:
info, errors = await self._async_validate_or_error(user_input)
if not errors:
self.cloudflare_config.update(user_input)
self.zones = info["zones"]
return await self.async_step_zone()
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_zone(self, user_input: dict | None = None):
"""Handle the picking the zone."""
errors = {}
if user_input is not None:
self.cloudflare_config.update(user_input)
info, errors = await self._async_validate_or_error(self.cloudflare_config)
if not errors:
await self.async_set_unique_id(user_input[CONF_ZONE])
self.records = info["records"]
return await self.async_step_records()
return self.async_show_form(
step_id="zone",
data_schema=_zone_schema(self.zones),
errors=errors,
)
async def async_step_records(self, user_input: dict | None = None):
"""Handle the picking the zone records."""
if user_input is not None:
self.cloudflare_config.update(user_input)
title = self.cloudflare_config[CONF_ZONE]
return self.async_create_entry(title=title, data=self.cloudflare_config)
return self.async_show_form(
step_id="records",
data_schema=_records_schema(self.records),
)
async def _async_validate_or_error(self, config):
errors = {}
info = {}
try:
info = await validate_input(self.hass, config)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidZone:
errors["base"] = "invalid_zone"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return info, errors
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class InvalidZone(HomeAssistantError):
"""Error to indicate we cannot validate zone exists in account."""
|
arthurSena/processors | tests/processors/base/processors/test_trial.py | Python | mit | 1,381 | 0.006517 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import pytest
import uuid
from processors.base import helpers
import processors.nct.extractors as nct_extractors
from processors.base.processors.trial import process_trials
class TestTrialProcessor(object):
def test_updates_which_record_is_primary(self, conn, extractors, trial, record,
nct_record, euctr_source):
current_primary = conn['database']['records'].find_one(id=record)
nct_record_attrs = conn['warehouse']['nct'].find_one(nct_id=nct_record)
current_primary.update({
| 'trial_id': trial,
'is_primary': True,
'source_id': euctr_source,
'identi | fiers': {'nct': nct_record},
})
conn['database']['records'].update(current_primary, ['id'])
process_trials(conn, 'nct', extractors)
updated_current_primary = conn['database']['records'].find_one(id=record)
new_record = conn['database']['records'].find_one(id=nct_record_attrs['meta_id'])
assert updated_current_primary['is_primary'] == False
assert new_record['is_primary'] == True
@pytest.fixture
def extractors():
return helpers.get_variables(nct_extractors,
lambda x: x.startswith('extract_'))
|
CRImier/pyLCI | output/output.py | Python | apache-2.0 | 717 | 0.004184 | from helpers import read_config
import importlib
screen = None
def init():
""" This function is called by main.py to read the output configuration, pick the corresponding drivers and initialize a Screen object.
It also sets ``screen`` global of ``output`` module with created `` | Screen`` object."""
global screen
config = read_config("config.json")
output_config = config["output"][0]
driver_name = output_config["driver"]
driver_module = importlib.import_module("out | put.drivers."+driver_name)
args = output_config["args"] if "args" in output_config else []
kwargs = output_config["kwargs"] if "kwargs" in output_config else {}
screen = driver_module.Screen(*args, **kwargs)
|
heqiao2010/demo | exlogutil/ExLogUtilPy/ExLogUtil/__init__.py | Python | gpl-3.0 | 45 | 0.022222 | __autho | r__ = 'joel'
__all__ = ["ExLogUtil"] | |
dloman/FiestaMonsterz | Drawing/Drawable.py | Python | gpl-3.0 | 1,861 | 0.017195 | from Utility.Utility import DoesntHaveMethod
################################################################################
################################################################################
class Drawable(object):
##############################################################################
def __init__(self, DrawFu | nctor, GetPositionFunctor):
self.DrawFunctor = DrawFunctor
self.GetPositionFunctor = GetPositionFunctor
##############################################################################
@property
def DrawFunctor(self):
return self.__DrawFunctor
##############################################################################
@DrawFunctor.setter
def DrawFunctor(self, | DrawFunctor):
if not callable(DrawFunctor):
raise TypeError(str(type(DrawFunctor)) + " must be a callable type")
self.__DrawFunctor = DrawFunctor
##############################################################################
@property
def GetPositionFunctor(self):
return self.__GetPositionFunctor
##############################################################################
@GetPositionFunctor.setter
def GetPositionFunctor(self, GetPositionFunctor):
if DoesntHaveMethod(GetPositionFunctor, 'GetPosition'):
raise TypeError(str(type(GetPositionFunctor)) + 'must be callable')
self.__GetPositionFunctor = GetPositionFunctor
##############################################################################
def Draw(self):
self.__DrawFunctor(self.GetPosition())
##############################################################################
def GetPosition(self):
return self.GetPositionFunctor.GetPosition()
################################################################################
################################################################################
|
shravanshandilya/catching-up-with-python | Recipes/class_example.py | Python | mit | 245 | 0.093878 | class F | riend:
def walk(self,shravan=""):
'''
>>> Friend().walk()
walking
'''
print "walking",
def talk(self):
print "talking",
def fight(self):
print "fighting",
f1=Friend()
f1.walk()
import do | ctest
doctest.testmod() |
spyder-ide/spyder-terminal | spyder_terminal/api.py | Python | mit | 764 | 0 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the | terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
class TerminalMainWidgetActions:
NewTerminal = 'new_terminal_toolbar_action'
NewTerminalForCWD = 'new_terminal'
NewTerminalForProje | ct = 'new_terminal_project_action'
NewTerminalForFile = 'new_terminal_file_action'
RenameTab = 'rename_tab_action'
Copy = 'copy'
Paste = 'paste'
Clear = 'clear'
ZoomIn = 'zoom_in'
ZoomOut = 'zoom_out'
class TermViewMenus:
Context = 'context_menu'
|
CINPLA/expipe-dev | python-neo/neo/io/nestio.py | Python | gpl-3.0 | 32,456 | 0.000216 | # -*- coding: utf-8 -*-
"""
Class for reading output files from NEST simulations
( http://www.nest-simulator.org/ ).
Tested with NEST2.10.0
Depends on: numpy, quantities
Supported: Read
Authors: Julia Sprenger, Maximilian Schmidt, Johanna Senk
"""
# needed for Python3 compatibility
from __future__ import absolute_import
import os.path
import warnings
from datetime import datetime
import numpy as np
import quantities as pq
from neo.io.baseio import BaseIO
from neo.core import Block, Segment, SpikeTrain, AnalogSignal
value_type_dict = {'V': pq.mV,
'I': pq.pA,
'g': pq.CompoundUnit("10^-9*S"),
'no type': pq.dimensionless}
class NestIO(BaseIO):
"""
Class for reading NEST output files. GDF files for the spike data and DAT
files for analog signals are possible.
Usage:
from neo.io.nestio import NestIO
files = ['membrane_voltages-1261-0.dat',
'spikes-1258-0.gdf']
r = NestIO(filenames=files)
seg = r.read_segment(gid_list=[], t_start=400 * pq.ms,
t_stop=600 * pq.ms,
id_column_gdf=0, time_column_gdf=1,
id_column_dat=0, time_column_dat=1,
value_columns_dat=2)
"""
is_readable = True # class supports reading, but not writing
is_writable = False
supported_objects = [SpikeTrain, AnalogSignal, Segment, Block]
readable_objects = [SpikeTrain, AnalogSignal, Segment, Block]
has_header = False
is_streameable = False
write_params = None # writing is not supported
name = 'nest'
extensions = ['gdf', 'dat']
mode = 'file'
def __init__(self, filenames=None):
"""
Parameters
----------
filenames: string or list of strings, default=None
The filename or list of filenames to load.
"""
if isinstance(filenames, str):
filenames = [filenames]
self.filenames = filenames
self.avail_formats = {}
self.avail_IOs = {}
for filename in filenames:
path, ext = os.path.splitext(filename)
ext = ext.strip('.')
if ext in self.extensions:
if ext in self.avail_IOs:
raise ValueError('Received multiple files with "%s" '
'extention. Can only load single file of '
'this type.' % ext)
self.avail_IOs[ext] = ColumnIO(filename)
self.avail_formats[ext] = path
def __read_analogsignals(self, gid_list, time_unit, t_start=None,
t_stop=None, sampling_period=None,
id_column=0, time_column=1,
value_columns=2, value_types=None,
value_units=None, lazy=False):
"""
Internal function called by read_analogsignal() and read_segment().
"""
| if 'dat' not in self.avail_formats:
raise ValueError('Can not load analogsignals. No DAT file '
| 'provided.')
# checking gid input parameters
gid_list, id_column = self._check_input_gids(gid_list, id_column)
# checking time input parameters
t_start, t_stop = self._check_input_times(t_start, t_stop,
mandatory=False)
# checking value input parameters
(value_columns, value_types, value_units) = \
self._check_input_values_parameters(value_columns, value_types,
value_units)
# defining standard column order for internal usage
# [id_column, time_column, value_column1, value_column2, ...]
column_ids = [id_column, time_column] + value_columns
for i, cid in enumerate(column_ids):
if cid is None:
column_ids[i] = -1
# assert that no single column is assigned twice
column_list = [id_column, time_column] + value_columns
column_list_no_None = [c for c in column_list if c is not None]
if len(np.unique(column_list_no_None)) < len(column_list_no_None):
raise ValueError(
'One or more columns have been specified to contain '
'the same data. Columns were specified to %s.'
'' % column_list_no_None)
# extracting condition and sorting parameters for raw data loading
(condition, condition_column,
sorting_column) = self._get_conditions_and_sorting(id_column,
time_column,
gid_list,
t_start,
t_stop)
# loading raw data columns
data = self.avail_IOs['dat'].get_columns(
column_ids=column_ids,
condition=condition,
condition_column=condition_column,
sorting_columns=sorting_column)
sampling_period = self._check_input_sampling_period(sampling_period,
time_column,
time_unit,
data)
analogsignal_list = []
if not lazy:
# extracting complete gid list for anasig generation
if (gid_list == []) and id_column is not None:
gid_list = np.unique(data[:, id_column])
# generate analogsignals for each neuron ID
for i in gid_list:
selected_ids = self._get_selected_ids(
i, id_column, time_column, t_start, t_stop, time_unit,
data)
# extract starting time of analogsignal
if (time_column is not None) and data.size:
anasig_start_time = data[selected_ids[0], 1] * time_unit
else:
# set t_start equal to sampling_period because NEST starts
# recording only after 1 sampling_period
anasig_start_time = 1. * sampling_period
# create one analogsignal per value column requested
for v_id, value_column in enumerate(value_columns):
signal = data[
selected_ids[0]:selected_ids[1], value_column]
# create AnalogSignal objects and annotate them with
# the neuron ID
analogsignal_list.append(AnalogSignal(
signal * value_units[v_id],
sampling_period=sampling_period,
t_start=anasig_start_time,
id=i,
type=value_types[v_id]))
# check for correct length of analogsignal
assert (analogsignal_list[-1].t_stop ==
anasig_start_time + len(signal) * sampling_period)
return analogsignal_list
def __read_spiketrains(self, gdf_id_list, time_unit,
t_start, t_stop, id_column,
time_column, **args):
"""
Internal function for reading multiple spiketrains at once.
This function is called by read_spiketrain() and read_segment().
"""
if 'gdf' not in self.avail_IOs:
raise ValueError('Can not load spiketrains. No GDF file provided.')
# assert that the file contains spike times
if time_column is None:
raise ValueError('Time column is None. No spike times to '
'be read in.')
gdf_id_list, id_column = self._check_input_gids(gdf_id_list, id_column)
t_start, t_stop = self._check_input_times(t_start, t_stop,
|
valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/extractor/tweakers.py | Python | gpl-3.0 | 1,742 | 0.029851 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
int_or_none,
determine_ext,
mimetype2ext,
)
class TweakersIE(InfoExtractor):
_VALID_URL = r'https?://tweakers\.net/video/(?P<id>\d+)'
_TEST = {
'url': 'https://tweakers.net/video/9926/new-nintendo-3ds-xl-op-alle-fronten-beter.html',
'md5': 'fe73e417c093a788e0160c4025f88b15',
'info_dict': {
'id': '9926',
'ext': 'mp4',
'title': 'New Nintendo 3DS XL - Op alle fronten beter',
'description': 'md5:3789b21fed9c0219e9bcaacd43fab280',
'thumbnail': r're:^https?://.*\.jpe?g$',
'duration': 386,
'uploader_id': 's7JeEm',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
video_data = self._download_json(
'https://tweakers.net/video/s1playlist/%s/1920/1080/playlist.json' % video_id,
video_id)['items'][0]
title = video_data['title']
formats = []
for location in video_data.get('locations', {}).get('progressive', []):
format_id = location.get('label')
width = int_or_none(location.get('width'))
height = int_or_none(location.get('height'))
for source in location.get('sources', []):
source_url = source. | get('src')
if not source_url:
| continue
ext = mimetype2ext(source.get('type')) or determine_ext(source_url)
formats.append({
'format_id': format_id,
'url': source_url,
'width': width,
'height': height,
'ext': ext,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': video_data.get('description'),
'thumbnail': video_data.get('poster'),
'duration': int_or_none(video_data.get('duration')),
'uploader_id': video_data.get('account'),
'formats': formats,
}
|
mortcanty/SARDocker | src/build/lib.linux-x86_64-2.7/auxil/png.py | Python | mit | 142,825 | 0.002962 | #!/usr/bin/env python
# $URL: http://pypng.googlecode.com/svn/trunk/code/png.py $
# $Rev: 201 $
# png.py - PNG encoder/decoder in pure Python
#
# Copyright (C) 2006 Johann C. Rocholl <johann@browsershots.org>
# Portions Copyright (C) 2009 David Jones <drj@pobox.com>
# And probably portions Copyright (C) 2006 Nicko van Someren <nicko@nicko.org>
#
# Original concept by Johann C. Rocholl.
#
# LICENSE (The MIT License)
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Changelog (recent first):
# 2009-03-11 David: interlaced bit depth < 8 (writing).
# 2009-03-10 David: interlaced bit depth < 8 (reading).
# 2009-03-04 David: Flat and Boxed pixel formats.
# 2009-02-26 David: Palette support (writing).
# 2009-02-23 David: Bit-depths < 8; better PNM support.
# 2006-06-17 Nicko: Reworked into a class, faster interlacing.
# 2006-06-17 Johann: Very simple prototype PNG decoder.
# 2006-06-17 Nicko: Test suite with various image generators.
# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support.
# 2006-06-15 Johann: Scanline iterator interface for large input files.
# 2006-06-09 Johann: Very simple prototype PNG encoder.
# Incorporated into Bangai-O Development Tools by drj on 2009-02-11 from
# http://trac.browsershots.org/browser/trunk/pypng/lib/png.py?rev=2885
# Incorporated into pypng by drj on 2009-03-12 from
# //depot/prj/bangaio/master/code/png.py#67
"""
Pure Python PNG Reader/Writer
This Python module implements support for PNG images (see PNG
specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads
and writes PNG files with all allowable bit depths (1/2/4/8/16/24/32/48/64
bits per pixel) and colour combinations: greyscale (1/2/4/8/16 bit); RGB,
RGBA, LA (greyscale with alpha) with 8/16 bits per channel; colour mapped
images (1/2/4/8 bit). Adam7 interlacing is supported for reading and
writing. A number of optional chunks can be specified (when writing)
and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``.
For help, type ``import png; help(png)`` in your python interpreter.
A good place to start is the :class:`Reader` and :class:`Writer` classes.
Requires Python 2.3. Limited support is available for Python 2.2, but
not everything works. Best with Python 2.4 and higher. Installation is
trivial, but see the ``README.txt`` file (with the source distribution)
for details.
This file can also be used as a command-line utility to convert
`Netpbm <http://netpbm.sourceforge.net/>`_ PNM files to PNG, and the reverse conversion from PNG to
PNM. The interface is similar to that of the ``pnmtopng`` program from
Netpbm. Type ``python png.py --help`` at the shell prompt
for usage and a list of options.
A note on spelling and terminology
----------------------------------
Generally British English spelling is used in the documentation. So
that's "greyscale" and "colour". This not only matches the author's
native language, it's also used by the PNG specification.
The major colour models supported by PNG (and hence by PyPNG) are:
greyscale, RGB, greyscale--alpha, RGB--alpha. These are sometimes
referred to using the abbreviations: L, RGB, LA, RGBA. In this case
each letter abbreviates a single channel: *L* is for Luminance or Luma or
Lightness which is the channel used in greyscale images; *R*, *G*, *B* stand
for Red, Green, Blue, the components of a colour image; *A* stands for
Alpha, the opacity channel (used for transparency effects, but higher
values are more opaque, so it makes sense to call it opacity).
A note on formats
-----------------
When getting pixel data out of this module (reading) and presenting
data to this module (writing) there are a number of ways the data could
be represented as a Python value. Generally this module uses one of
three formats called "flat row flat pixel", "boxed row flat pixel", and
"boxed row boxed pixel". Basically the concern is whether each pixel
and each row comes in its own little tuple (box), or not.
Consider an image that is 3 pixels wide by 2 pixels high, and each pixel
has RGB components:
Boxed row flat pixel::
list([R,G,B, R,G,B, R,G,B],
[R,G,B, R,G,B, R,G,B])
Each row appears as its own list, but the pixels are flattened so that
three values for one pixel simply follow the three values for the previous
pixel. This is the most common format used, because it provides a good
compromise between space and convenience. PyPNG regards itself as
at liberty to replace any sequence type with any sufficiently compatible
other sequence type; in practice each row is an array (from the array
module), and the outer list is sometimes an iterator rather than an
explicit list (so that streaming is possible).
Flat row flat pixel::
[R,G,B, R,G,B, R,G,B,
R,G,B, R,G,B, R,G,B]
The entire image is one single giant sequence of colour values.
Generally an array will be used (to save space), not a list.
Boxed row boxed pixel::
list([ (R,G,B), (R,G,B), (R,G,B) ],
[ (R,G,B), (R,G,B), (R,G,B) ])
Each row appears in its own list, but each pixel also appears in its own
tuple. A serious memory burn in Python.
In all cases the top row comes first, and for each row the pixels are
ordered from left-to-right. Within a pixel the values appear in the
order, R-G-B-A (or L-A for greyscale--alpha).
There is a fourth format, mentioned because it is used internally,
is close to what lies inside a PNG file itself, and has some support
from the public API. This format is called packed. When packed,
each row is a sequence of bytes (integers from 0 to 255), just as
it is before PNG scanline filtering is applied. When the bit depth
is 8 this is essentially the same as boxed row flat pixel; when the
bit depth is less than 8, several pixels are packed into each byte;
when the bit depth is 16 (the only value more than 8 that is supported
by the PNG image format) each pixel value is decomposed into 2 bytes
(and `packed` is a misnomer). This format is used by the
:meth:`Writer.write_packed` method. It isn't usually a convenient
format, but may be just right if the source data for the PNG image
comes from something that uses a similar format (for example, 1-bit
BMPs, or another PNG file).
And now, my famous members
--------------------------
"""
# http://www.python.org/doc/2.2.3/whatsnew/node5.html
from __future__ import generators
__version__ = "$URL: http://pypng.googlecode.com/svn/trunk/code/png.py $ $Rev: 201 $"
from array import array
try: # See :pyver:old
import itertools
except:
pass
import math
# http://www.python.org/doc/2.4.4/lib/module-operator.html
import operator
import | struct
import sys
import zlib
# http://www.python.org/doc/2.4 | .4/lib/module-warnings.html
import warnings
__all__ = ['Reader', 'Writer', 'write_chunks']
# The PNG signature.
# http://www.w3.org/TR/PNG/#5PNG-file-signature
_signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10)
_adam7 = ((0, 0, 8, 8),
(4, 0, 8, 8),
(0, 4, 4, 8),
(2, 0, 4, 4),
(0, 2, 2, 4),
(1, 0, 2, 2),
(0, 1, 1, 2))
def group(s, n):
# See
# http://www.python.org/doc/2.6/library/functions.html#zip
return zip(*[iter( |
ucdavis-agecon/gunicorn-init | tree/etc/gunicorn/py/example.py | Python | mit | 272 | 0.018382 | # Example taken from the http://gunicorn.org/configure.html
# page.
|
import os
def numCPUs():
if not hasattr(os, "sysconf"):
raise RuntimeError("No sysconf detected.")
return os.sysconf("SC_ | NPROCESSORS_ONLN")
bind = "127.0.0.1:8000"
workers = numCPUs() * 2 + 1
|
sam-roth/Keypad | keypad/abstract/editor.py | Python | gpl-3.0 | 4,153 | 0.004816 | import abc
from ..control import BufferController
from ..buffers import Buffer
from .application import app, AbstractApplication
from ..control.interactive import interactive
from ..core import Signal
import logging
class AbstractEditor(metaclass=abc.ABCMeta):
def __init__(self, view, config):
self.__view = view
self.__buffer = Buffer()
self.__buffer_controller = BufferController(None,
view,
self.__buffer,
True,
config)
self.buffer_controller.modified_was_changed.connect(self.__modified_changed)
self.buffer_controller.path_changed.connect(self.path_changed)
self.config = config
@Signal
def editor_activated(self):
pass
def __modified_changed(self, value):
self.is_modified_changed()
@abc.abstractmethod
def activate(self):
'''
Bring this editor to the front and give it focus.
'''
@property
def buffer_controller(self):
return self.__buffer_controller
@abc.abstractmethod
def kill(self):
'''
Immediately close the editor without prompting the user.
'''
@property
def is_modified(self):
'''
Returns True iff the editor's contents reflect the last-saved state.
'''
r = self.__buffer_controller.is_modified
return r
@Signal
def is_modified_changed(self):
pass
@Signal
def saved(self):
pass
@Signal
def path_changed(self):
pass
@property
def path(self):
'''
Returns the current path of the file that this editor will save to, or None
if there is no such file.
'''
return self.__buffer_controller.path
@path.setter
def path(self, value):
if value is not None:
import pathlib
value = pathlib.Path(value)
self.__buffer_controller.add_tags(path=value)
def save(self, path, *, codec_errors='strict', prompt_on_error=True):
'''
Save the file to the path given.
'''
try:
self.__buffer_controller.write_to_path(path, codec_errors=codec_errors)
except UnicodeEncodeError as exc:
if not prompt_on_error:
raise
res = app().message_box(self,
'This buffer contains non-plaintext characters: ' + str(exc),
['Write As-Is', 'DELETE Unknown Characters', 'Cancel'],
kind=AbstractApplication.MessageBoxKind.warning)
if res == 'Write As-Is':
self.__buffer_controller.write_to_path(ed.path, codec_errors='surrogateescape')
elif re | s == 'DELETE Unknown Characters':
self.__buffer_controller.write_to_path(ed.path, codec_errors='ignore')
else:
raise
except OSError as exc:
if not prompt_on_error:
raise
res = app().message_box(self,
'An error occurred while trying to save the file: ' + str(exc),
['Cancel (the file | will remain open)'],
kind=AbstractApplication.MessageBoxKind.warning)
raise
self.saved()
def load(self, path, *, codec_errors='strict'):
'''
Load the file from the path given.
'''
from ..core.notification_queue import run_in_main_thread
with self.__buffer_controller.history.transaction():
self.__buffer_controller.replace_from_path(path, create_new=True, codec_errors=codec_errors)
self.__buffer_controller.history.clear()
self.__buffer_controller.is_modified = False
@interactive('gui_save', 'gsave', 'gsv')
def gui_save(ed: AbstractEditor):
from .application import app
app().save(ed)
|
josenavas/american-gut-web | amgut/handlers/human_survey.py | Python | bsd-3-clause | 7,184 | 0.000139 | from json import dumps, loads
import logging
from tornado.web import authenticated
from tornado.escape import url_escape
from amgut import media_locale, text_locale
from amgut.connections import ag_data, redis
from amgut.handlers.base_handlers import BaseHandler
from amgut.lib.util import store_survey, make_survey_class
from amgut.lib.survey_supp import primary_human_survey
from amgut.lib.mail import send_email
phs_groups = primary_human_survey.groups
surveys = [make_survey_class(group, survey_type='HumanSurvey')
for group in phs_groups]
def build_consent_form(consent_info):
tl = text_locale['new_participant.html']
# build out the consent form
if consent_info['age_range'] == '0-6':
message = ("%s<br/>%s<br/>%s<p>%s: %s</p><p>%s: %s</p><p>%s: %s</p>"
"<p>%s: %s</p><p>%s: %s</p><p>%s: %s</p>") %\
(tl['CONSENT_YOUR_CHILD'],
tl['PARTICIPATION_AGREEMENT'],
tl['EXHIBIT_A'],
tl['PARTICIPANT_NAME'], consent_info['participant_name'],
tl['PARTICIPANT_EMAIL'], consent_info['participant_email'],
tl['PARTICIPANT_PARENT_1'], consent_info['parent_1_name'],
tl['PARTICIPANT_PARENT_2'], consent_info['parent_2_name'],
tl['PARTICIPANT_DECEASED_PARENTS'],
consent_info['deceased_parent'],
tl['DATE_SIGNED'], str(consent_info['date_signed']))
elif consent_info['age_range'] == '7-12':
message = ("%s<br/>%s<p>%s: %s</p><p>%s: %s</p><p>%s: %s</p>"
"%s<p>%s: %s</p><p>%s: %s</p><p>%s: %s</p>"
"<p>%s: %s</p>") %\
(tl['ASSENT_7_12'],
tl['PARTICIPATION_AGREEMENT'],
tl['PARTICIPANT_NAME'], consent_info['participant_name'],
tl['PARTICIPANT_EMAIL'], consent_info['participant_email'],
tl['OBTAINER_NAME'], consent_info['assent_obtainer'],
tl['CONSENT_YOUR_CHILD'],
tl['PARTICIPANT_PARENT_1'], consent_info['parent_1_name'],
tl['PARTICIPANT_PARENT_2'], consent_info['parent_2_name'],
tl['PARTICIPANT_DECEASED_PARENTS'],
consent_info['deceased_parent'],
tl['DATE_SIGNED'], str(consent_info['date_signed']))
elif consent_info['age_range'] == '13-17':
message = ("%s<br/>%s<p>%s: %s</p><p>%s: %s</p>"
"%s<p>%s: %s</p><p>%s: %s</p><p>%s: %s</p>"
"<p>%s: %s</p>") %\
(tl['ASSENT_13_17'],
tl['PARTICIPATION_AGREEMENT'],
tl['PARTICIPANT_NAME'], consent_info['participant_name'],
tl['PARTICIPANT_EMAIL'], consent_info['participant_email'],
tl['CONSENT_YOUR_CHILD'],
tl['PARTICIPANT_PARENT_1'], consent_info['parent_1_name'],
tl['PARTICIPANT_PARENT_2'], consent_info['parent_2_name'],
tl['PARTICIPANT_DECEASED_PARENTS'],
consent_info['deceased_parent'],
tl['DATE_SIGNED'], str(consent_info['date_signed']))
elif consent_info['age_range'] == '18-plus':
message = "%s<br/>%s<p>%s: %s</p><p>%s: %s</p><p>%s: %s</p>" %\
(tl['CONSENT_18'],
tl['PARTICIPATION_AGREEMENT'],
tl['PARTICIPANT_NAME'], consent_info['participant_name'],
tl['PARTICIPANT_EMAIL'], consent_info['participant_email'],
tl['DATE_SIGNED'], str(consent_info['date_signe | d']))
else:
# old consent so no idea of age range and text, only juv/non-juv
raise NotImplementedError("Old consent, no text available")
return message
class HumanSurveyHandler(BaseHandler):
@authenticated
def post(self):
# see if we're coming from an edit
human_survey_id = | self.get_argument('survey_id', None)
page_number = int(self.get_argument('page_number'))
sitebase = media_locale['SITEBASE']
if human_survey_id is None:
# we came from consent
human_survey_id = self.get_secure_cookie('human_survey_id')
if human_survey_id is None:
err_msg = url_escape("There was an unexpected error.")
self.redirect(sitebase + "/authed/portal/?errmsg=%s" % err_msg)
return
else:
# we came from participant_overview
consent = ag_data.getConsent(human_survey_id)
# make sure is string so can be serialized
consent['date_signed'] = str(consent['date_signed'])
self.set_secure_cookie('human_survey_id', human_survey_id)
data = primary_human_survey.fetch_survey(human_survey_id)
redis.hset(human_survey_id, 'consent', dumps(consent))
redis.hset(human_survey_id, 'existing', dumps(data))
redis.expire(human_survey_id, 86400)
next_page_number = page_number + 1
if page_number >= 0:
form_data = surveys[page_number]()
form_data.process(data=self.request.arguments)
data = {'questions': form_data.data}
redis.hset(human_survey_id, page_number, dumps(data))
progress = int(100.0 * (page_number + 2) / (len(phs_groups) + 1))
# if this is not the last page, render the next page
if next_page_number < len(surveys):
the_form = surveys[next_page_number]()
existing_responses = redis.hget(human_survey_id, 'existing')
if existing_responses:
existing_responses = loads(existing_responses)
the_form = surveys[next_page_number](data=existing_responses)
title = phs_groups[next_page_number].name
self.render('human_survey.html', the_form=the_form,
skid=self.current_user, TITLE=title,
page_number=next_page_number,
progress=progress)
else:
# only get the cookie if you complete the survey
self.clear_cookie('human_survey_id')
self.set_secure_cookie('completed_survey_id', human_survey_id)
store_survey(primary_human_survey, human_survey_id)
existing = redis.hget(human_survey_id, 'existing')
if existing is None:
# Send consent info email since new participant
consent_info = ag_data.getConsent(human_survey_id)
try:
message = build_consent_form(consent_info)
send_email(message, 'American Gut-Signed Consent Form(s)',
recipient=consent_info['participant_email'],
sender='donotreply@americangut.com', html=True)
except:
logging.exception('Error sending signed consent form for '
'survey ID: %s to email: %s' %
(human_survey_id,
consent_info['participant_email']))
self.redirect(sitebase + '/authed/human_survey_completed/')
@authenticated
def get(self, *args, **kwargs):
self.redirect(media_locale['SITEBASE'] + "/")
|
guegue/forocacao | forocacao/app/context_processors.py | Python | bsd-3-clause | 1,036 | 0.003861 | from django.conf import settings
from forocacao.app.models import Event, Content
def get_or_none(model, objects, *args, **kwargs):
try:
return objects.get(*args, **kwargs)
except model.DoesNotExist:
return None
def current_event(request):
'''
A context processor to add the "current event" to the current Co | ntext
'''
try:
current_event = Event.objects.filter(status='frontpage')[0]
return {
'event': current_event,
'current_event': current_event.name,
'current_slug': current_event.slug,
'current_info': get_or_none(Content, current_event.contents, page='info'),
'current_footer': get_or_none(Content, current_event.contents, page='footer'),
}
except Event.DoesNotExist:
# always retu | rn a dict, no matter what!
return {
'event': None,
'current_event': '',
'current_slug': '',
'current_info': '',
'current_footer': '',
}
|
LLNL/spack | var/spack/repos/builtin/packages/py-azure-mgmt-resource/package.py | Python | lgpl-2.1 | 853 | 0.001172 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyAzureMgmtResource(PythonPackage):
"""Microsoft Azure Resource Management Client Library for Python."""
homepage = "https://github.com/Azure/azure-sdk-for-python"
pypi | = "azure-mgmt-resource/azure-mgmt-resource-10.0.0.zip"
version('10.0.0', sha256='bd9a3938f5423741329436d2da09693 | 845c2fad96c35fadbd7c5ae5213208345')
depends_on('py-setuptools', type='build')
depends_on('py-msrest@0.5.0:', type=('build', 'run'))
depends_on('py-msrestazure@0.4.32:1', type=('build', 'run'))
depends_on('py-azure-common@1.1:1', type=('build', 'run'))
depends_on('py-azure-mgmt-nspkg', when='^python@:2', type=('build', 'run'))
|
openstack/ceilometer | ceilometer/network/services/lbaas.py | Python | apache-2.0 | 15,220 | 0 | #
# Copyright 2014 Cisco Systems,Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import collections
import warnings
from oslo_log import log
from ceilometer.i18n import _
from ceilometer.network.services import base
from ceilometer import neutron_client
from ceilometer import sample
LOG = log.getLogger(__name__)
LBStatsData = collections.namedtuple(
'LBStats',
['active_connections', 'total_connections', 'bytes_in', 'bytes_out']
)
LOAD_BALANCER_STATUS_V2 = {
'offline': 0,
'online': 1,
'no_monitor': 3,
'error': 4,
'degraded': 5,
'disabled': 6
}
class BaseLBPollster(base.BaseServicesPollster):
"""Base Class for Load Balancer pollster"""
def __init__(self, conf):
super(BaseLBPollster, self).__init__(conf)
self.lb_version = self.c | onf.service_types.neutron_lbaas_version
warnings.warn('Support for Neutron LBaaS has been deprecated '
'and will be removed in a future release.',
category=DeprecationWarning, stacklevel=3)
def get_load_balancer_status_id(self, value):
if self.lb_version == 'v1':
resource_status = | self.get_status_id(value)
elif self.lb_version == 'v2':
status = value.lower()
resource_status = LOAD_BALANCER_STATUS_V2.get(status, -1)
return resource_status
class LBPoolPollster(BaseLBPollster):
"""Pollster to capture Load Balancer pool status samples."""
FIELDS = ['admin_state_up',
'description',
'lb_method',
'name',
'protocol',
'provider',
'status',
'status_description',
'subnet_id',
'vip_id'
]
@property
def default_discovery(self):
return 'lb_pools'
def get_samples(self, manager, cache, resources):
resources = resources or []
for pool in resources:
LOG.debug("Load Balancer Pool : %s" % pool)
status = self.get_load_balancer_status_id(pool['status'])
if status == -1:
# unknown status, skip this sample
LOG.warning(_("Unknown status %(stat)s received on pool "
"%(id)s, skipping sample")
% {'stat': pool['status'], 'id': pool['id']})
continue
yield sample.Sample(
name='network.services.lb.pool',
type=sample.TYPE_GAUGE,
unit='pool',
volume=status,
user_id=None,
project_id=pool['tenant_id'],
resource_id=pool['id'],
resource_metadata=self.extract_metadata(pool)
)
class LBVipPollster(base.BaseServicesPollster):
"""Pollster to capture Load Balancer Vip status samples."""
FIELDS = ['admin_state_up',
'address',
'connection_limit',
'description',
'name',
'pool_id',
'port_id',
'protocol',
'protocol_port',
'status',
'status_description',
'subnet_id',
'session_persistence',
]
@property
def default_discovery(self):
return 'lb_vips'
def get_samples(self, manager, cache, resources):
resources = resources or []
for vip in resources:
LOG.debug("Load Balancer Vip : %s" % vip)
status = self.get_status_id(vip['status'])
if status == -1:
# unknown status, skip this sample
LOG.warning(_("Unknown status %(stat)s received on vip "
"%(id)s, skipping sample")
% {'stat': vip['status'], 'id': vip['id']})
continue
yield sample.Sample(
name='network.services.lb.vip',
type=sample.TYPE_GAUGE,
unit='vip',
volume=status,
user_id=None,
project_id=vip['tenant_id'],
resource_id=vip['id'],
resource_metadata=self.extract_metadata(vip)
)
class LBMemberPollster(BaseLBPollster):
"""Pollster to capture Load Balancer Member status samples."""
FIELDS = ['admin_state_up',
'address',
'pool_id',
'protocol_port',
'status',
'status_description',
'weight',
]
@property
def default_discovery(self):
return 'lb_members'
def get_samples(self, manager, cache, resources):
resources = resources or []
for member in resources:
LOG.debug("Load Balancer Member : %s" % member)
status = self.get_load_balancer_status_id(member['status'])
if status == -1:
LOG.warning(_("Unknown status %(stat)s received on member "
"%(id)s, skipping sample")
% {'stat': member['status'], 'id': member['id']})
continue
yield sample.Sample(
name='network.services.lb.member',
type=sample.TYPE_GAUGE,
unit='member',
volume=status,
user_id=None,
project_id=member['tenant_id'],
resource_id=member['id'],
resource_metadata=self.extract_metadata(member)
)
class LBHealthMonitorPollster(base.BaseServicesPollster):
"""Pollster to capture Load Balancer Health probes status samples."""
FIELDS = ['admin_state_up',
'delay',
'max_retries',
'pools',
'timeout',
'type'
]
@property
def default_discovery(self):
return 'lb_health_probes'
def get_samples(self, manager, cache, resources):
for probe in resources:
LOG.debug("Load Balancer Health probe : %s" % probe)
yield sample.Sample(
name='network.services.lb.health_monitor',
type=sample.TYPE_GAUGE,
unit='health_monitor',
volume=1,
user_id=None,
project_id=probe['tenant_id'],
resource_id=probe['id'],
resource_metadata=self.extract_metadata(probe)
)
class _LBStatsPollster(base.BaseServicesPollster, metaclass=abc.ABCMeta):
"""Base Statistics pollster.
It is capturing the statistics info and yielding samples for connections
and bandwidth.
"""
def __init__(self, conf):
super(_LBStatsPollster, self).__init__(conf)
self.client = neutron_client.Client(self.conf)
self.lb_version = self.conf.service_types.neutron_lbaas_version
@staticmethod
def make_sample_from_pool(pool, name, type, unit, volume,
resource_metadata=None):
if not resource_metadata:
resource_metadata = {}
return sample.Sample(
name=name,
type=type,
unit=unit,
volume=volume,
user_id=None,
project_id=pool['tenant_id'],
resource_id=pool['id'],
resource_metadata=resource_metadata,
)
def _populate_stats_cache(self, pool_id, cache):
i_cache = cache.setdefault("lbstats", {})
if pool_id not in i_cache:
stats = self.client.pool_stats(pool_id)['stats']
i_cache[pool_id] = LBStatsData(
active_ |
uclouvain/osis | program_management/ddd/service/write/postpone_program_tree_service_mini_training.py | Python | agpl-3.0 | 2,696 | 0.002597 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from typing import List
from django.db import transaction
from program_management.ddd.command import PostponeProgramTreeCommand, CopyProgramTreeToNextYearCommand
from program_management.ddd.domain.program_tree import ProgramTreeIdentity
from program_management.ddd.domain.service.calculate_end_postponement import CalculateEndPostponement
from program_management.ddd.repositories import program_tree_version as tree_version_repo
from program_management.ddd.service.write import copy_program_tree_service
@transaction.atomic()
def postpone_program_tree(
postpone_cmd: 'PostponeProgramTreeCommand'
) -> List['ProgramTreeIdentity']:
identities_created = []
# GIVEN
from_year = postpone_cmd.from_year
end_postponement_year = CalculateEndPostponement.calculate_end_postponement_year_program_tree(
identity=ProgramTreeIdentity(code=postpone_cmd.from_code, year=postpone_cmd.from_year),
repository=tree_version_repo.Progr | amTreeVersionRepository()
)
# WHEN
while from_year < end_postponement_year:
identity_next_year = copy_program_tree_service.copy_program_tree_to_next_year(
copy_cmd=CopyProgramTreeToNextYearCommand(
code=postpone_cmd.from_c | ode,
year=from_year,
)
)
# THEN
identities_created.append(identity_next_year)
from_year += 1
return identities_created
|
asajeffrey/servo | tests/wpt/web-platform-tests/worklets/resources/credentials.py | Python | mpl-2.0 | 591 | 0 | # Returns a valid response when a request has appropriate credentials.
def main(request, response):
cookie = request.cookies.first(b"cookieName", None)
expected_value = request.GET.first(b"value", None)
source_origin = request.headers.get(b"origin", None)
response_headers = [(b"Content-Type", b"text/javascript"),
(b"Access-Control-Allo | w-Origin", source_origin),
(b"Access-Control-Allow-Credentials", b"true")]
if cookie == expected_value:
return (200, response_headers, u"")
return (404, | response_headers)
|
wooga/airflow | tests/providers/opsgenie/hooks/test_opsgenie_alert.py | Python | apache-2.0 | 5,041 | 0.000198 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
import requests_mock
from airflow.exceptions import AirflowException
from airflow.models import Connection
from airflow.providers.opsgenie.hooks.opsgenie_alert import OpsgenieAlertHook
from airflow.utils import db
class TestOpsgenieAlertHook(unittest.TestCase):
conn_id = 'opsgenie_conn_id_test'
opsgenie_alert_endpoint = 'https://api.opsgenie.com/v2/alerts'
_payload = {
'message': 'An example alert message',
'alias': 'Life is too short for no alias',
'description': 'Every alert needs a description',
'responders': [
{'id': '4513b7ea-3b91-438f-b7e4-e3e54af9147c', 'type': 'team'},
{'name': 'NOC', 'type': 'team'},
{'id': 'bb4d9938-c3c2-455d-aaab-727aa701c0d8', 'type': 'user'},
{'username': 'trinity@opsgenie.com', 'type': 'user'},
{'id': 'aee8a0de-c80f-4515-a232-501c0bc9d715', 'type': 'escalation'},
{'name': 'Nightwatch Escalation', 'type': 'escalation'},
{'id': '80564037-1984-4f38-b98e-8a1f662df552', 'type': 'schedule'},
{'name': 'First Responders Schedule', 'type': 'schedule'}
],
'visibleTo': [
{'id': '4513b7ea-3b91-438f-b7e4-e3e54af9147c', 'type': 'team'},
{'name': 'rocket_team', 'type': 'team'},
{'id': 'bb4d9938-c3c2-455d-aaab-727aa701c0d8', 'type': 'user'},
{'username': 'trinity@opsgenie.com', 'type': 'user'}
],
'actions': ['Restart', 'AnExampleAction'],
'tags': ['OverwriteQuietHours', 'Critical'],
'details': {'key1': 'value1', 'key2': 'value2'},
'entity': 'An example entity',
'source': 'Airflow',
'priority': 'P1',
'user': 'Jesse',
'note': 'Write this down'
}
_mock_success_response_body = {
"result": "Request will be processed",
"took": 0.302,
"requestId": "43a29c5c-3dbf-4fa4-9c26-f4f71023e120"
}
def setUp(self):
db.merge_conn(
Connection(
conn_id=self.conn_id,
host='https://api.opsgenie.com/',
password='eb243592-faa2-4ba2-a551q-1afdf565c889'
)
)
def test_get_api_key(self):
hook = OpsgenieAlertHook(opsgenie_conn_id=self.conn_id)
api_key = hook._get_api_key()
self.assertEqual('eb243592-faa2-4ba2-a551q-1afdf565c889', api_key)
def test_get_conn_def | aults_host(self):
hook = OpsgenieAlertHook()
hook.get_conn()
self.assertEqual('https://api.opsgenie.com', hook.base_url)
@requests_mock.mock()
def test_call_with_success(self, m):
hook = OpsgenieAlertHook(opsgenie_conn_id=self.conn_id)
m.post(
self.opsgenie_alert_endpoint,
status_code=202,
| json=self._mock_success_response_body
)
resp = hook.execute(payload=self._payload)
self.assertEqual(resp.status_code, 202)
self.assertEqual(resp.json(), self._mock_success_response_body)
@requests_mock.mock()
def test_api_key_set(self, m):
hook = OpsgenieAlertHook(opsgenie_conn_id=self.conn_id)
m.post(
self.opsgenie_alert_endpoint,
status_code=202,
json=self._mock_success_response_body
)
resp = hook.execute(payload=self._payload)
self.assertEqual(resp.request.headers.get('Authorization'),
'GenieKey eb243592-faa2-4ba2-a551q-1afdf565c889')
@requests_mock.mock()
def test_api_key_not_set(self, m):
hook = OpsgenieAlertHook()
m.post(
self.opsgenie_alert_endpoint,
status_code=202,
json=self._mock_success_response_body
)
with self.assertRaises(AirflowException):
hook.execute(payload=self._payload)
@requests_mock.mock()
def test_payload_set(self, m):
hook = OpsgenieAlertHook(opsgenie_conn_id=self.conn_id)
m.post(
self.opsgenie_alert_endpoint,
status_code=202,
json=self._mock_success_response_body
)
resp = hook.execute(payload=self._payload)
self.assertEqual(json.loads(resp.request.body), self._payload)
|
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/core/debug.py | Python | mit | 503 | 0 | # pylint: disable=C0111,R0903
"""Shows that debug is enabled"""
import platform
import core.module
import core.widget
import core.decorators
class Module(core.module.Module):
@core.decorators.every(minutes=60)
def __init__(self, config, theme):
super().__init__(config, t | heme, core.widget.Widget(self.full_text))
def full_text(self, widgets):
return "debug"
def state | (self, widget):
return "warning"
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
novapost/django-pimpmytheme | example/example/settings.py | Python | mit | 3,921 | 0 | """
Django settings for example project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/t | opics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref | /settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(os.path.join(BASE_DIR, ".."))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#^qtdm4ad9_44k+pf+2^ecrm(w9j@w(+s(^e$@s8l=zq%pqtwl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'pimpmytheme',
'example',
'subapp',
'compressor',
'django_nose'
)
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'example.urls'
WSGI_APPLICATION = 'example.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, "example", "static")
STATIC_URL = '/static/'
SITE_ID = 1
COMPRESS_PRECOMPILERS = (('text/less', 'lessc {infile} {outfile}'),)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'context_processors': [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.request",
"django.template.context_processors.static",
"pimpmytheme.context_processors.get_site",
],
'builtins': [
'django.templatetags.i18n',
'django.templatetags.static',
'django.templatetags.tz',
],
'loaders': [
'pimpmytheme.template_loader.Loader',
'django.template.loaders.app_directories.Loader',
],
}
},
]
STATICFILES_FINDERS = (
"pimpmytheme.static_finder.CustomFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
"compressor.finders.CompressorFinder"
)
NOSE_ARGS = [
'--with-coverage',
'--cover-package=pimpmytheme',
'--verbosity=3',
'--nocapture'
]
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
CUSTOM_THEME_LOOKUP_OBJECT = "example.models.PimpSite"
CUSTOM_THEME_LOOKUP_ATTR = "name"
PIMPMYTHEME_FOLDER = os.path.join(BASE_DIR, "pimp_theme")
LOGGING = {
'version': 1,
'formatters': {
'oneline': {
'format': '%(asctime)s %(levelname)-8s %(name)s %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'oneline',
},
},
'root': {
'level': 'DEBUG',
'handlers': ['console'],
},
}
|
suraj-jayakumar/lstm-rnn-ad | src/testdata/random_data_time_series/generate_data.py | Python | apache-2.0 | 1,042 | 0.019194 | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 23 11:15:12 2016
@author: suraj
"""
import random
import numpy as np
import pickle
import matplotlib.pyplot as plt
attachRateList = []
for i in range(3360):
attachRateList.append(random.uniform(4,6))
attachRateList = np.array(attachRateList)
encoded_attach_rate_list = np.fft.fft(attachRateList)
day_number_list = [i%7 for i in range(3360)]
encoded_day_number_list = np.fft.fft(day_number_list)
time_number_list = [i%96 for i in r | ange(3360)]
encoded_time_number_list = np.fft.fft(time_number_list)
final_list_x = np.array([[encoded_day_number_list.real[i],encoded_day_number_list.imag[i],encoded_time_number_list.real[i],encoded_time_number_list.imag[i],encoded_attach_rate_list.real[i],encoded_attach_rate_list.imag[i]] for i in range(3360)])
final_list_y = [ (encoded_attach_rate_list[i].real,encoded_attach_rate_list[i].imag) for i in range(len(encoded_attach_rate_list)) ]
pickle.dump(fina | l_list_x,open('x_att.p','wb'))
pickle.dump(final_list_y,open('y_att.p','wb'))
|
jasperges/blenderseed | properties/nodes/kelemen_brdf.py | Python | mit | 5,574 | 0 |
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2014-2017 The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import bpy
from bpy.types import NodeSocket, Node
from ...util import asUpdate
from ..materials import AppleseedMatLayerProps
from . import AppleseedNode, AppleseedSocket
class AppleseedKelemenReflectanceSocket(NodeSocket, AppleseedSocket):
bl_idname = "AppleseedKelemenReflectance"
bl_label = "Diffuse Reflectance"
socket_value = AppleseedMatLayerProps.kelemen_matte_reflectance
def draw(self, context, layout, node, text):
if self.is_output or self.is_linked:
layout.label(text)
else:
layout.prop(self, "socket_value", text=text)
def draw_color(self, context, node):
return 0.8, 0.8, 0.5, 1.0
class AppleseedKelemenMultiplierSocket(NodeSocket, AppleseedSocket):
bl_idname = "AppleseedKelemenMultiplier"
bl_label = "Diffuse Multiplier"
socket_value = AppleseedMatLayerProps.kelemen_matte_multiplier
def draw(self, context, layout, node, text):
if self.is_output or self.is_linked:
layout.label(text)
else:
layout.prop(self, "socket_value", text=text)
def draw_color(self, context, node):
return 0.5, 0.5, 0.5, 1.0
class AppleseedKelemenRoughnessSocket(NodeSocket, AppleseedSocket):
bl_idname = "AppleseedKelemenRoughness"
bl_label = "Roughness"
socket_value = AppleseedMatLayerProps.kelemen_roughness
def draw(self, context, layout, node, text):
if self.is_output or self.is_linked:
layout.label(text)
else:
layout.prop(self, "socket_value", text=text)
def draw_color(self, context, node):
return 0.5, 0.5, 0.5, 1.0
class AppleseedKelemenSpecReflSocket(NodeSocket, AppleseedSocket):
bl_idname = "AppleseedKelemenSpecRefl"
bl_label = "Specular Reflectance"
socket_value = AppleseedMatLayerProps.kelemen_specular_reflectance
def draw(self, context, layout, node, text):
if self.is_output or self.is_linked:
layout.label(text)
else:
layout.prop(self, "socket_value", text=text)
def draw_color(self, context, node):
return 0.8, 0.8, 0.5, 1.0
class AppleseedKelemenSpecMultSocket(NodeSocket, AppleseedSocket):
bl_idname = "AppleseedKelemenSpecMult"
bl_label = "Specular Multiplier"
socket_value = AppleseedMatLayerProps.kelemen_specular_multiplier
def draw(self, context, layout, node, text):
if self.is_output or self.is_linked:
layout.label(text)
else:
layout.prop(self, "socket_value", text=text)
def draw_color(self, context, node):
return 0.5, 0.5, 0.5, 1.0
class AppleseedKelemenNode(Node, AppleseedNode):
bl_idname = "AppleseedKelemenNode"
bl_label = "Kelemen BRDF"
bl_icon = 'SMOOTH'
node_type = 'kelemen'
def init(self, context):
self.inputs.new('AppleseedKelemenReflectance', "Reflectance")
self.inputs.new('AppleseedKelemenMultiplier', "Multiplier")
self.inputs.new('AppleseedKelemenSpecRefl', "Specular Reflectance")
self.inputs.new('AppleseedKelemenSpecMult', "Specular Multiplier")
self.inputs.new('AppleseedKelemenRoughness', "Roughness")
self.outputs.new('NodeSocketShader', "BRDF | ")
def draw_buttons(self, context, layout):
pass
def draw_buttons_ext(self, context, layout):
pass
def copy(self, node):
| pass
def free(self):
asUpdate("Removing node ", self)
def draw_label(self):
return self.bl_label
def register():
bpy.utils.register_class(AppleseedKelemenMultiplierSocket)
bpy.utils.register_class(AppleseedKelemenReflectanceSocket)
bpy.utils.register_class(AppleseedKelemenRoughnessSocket)
bpy.utils.register_class(AppleseedKelemenSpecReflSocket)
bpy.utils.register_class(AppleseedKelemenSpecMultSocket)
bpy.utils.register_class(AppleseedKelemenNode)
def unregister():
bpy.utils.unregister_class(AppleseedKelemenNode)
bpy.utils.unregister_class(AppleseedKelemenMultiplierSocket)
bpy.utils.unregister_class(AppleseedKelemenReflectanceSocket)
bpy.utils.unregister_class(AppleseedKelemenRoughnessSocket)
bpy.utils.unregister_class(AppleseedKelemenSpecReflSocket)
bpy.utils.unregister_class(AppleseedKelemenSpecMultSocket)
|
configuresystems/restful-api-with-flask | app/modules/todo/tests.py | Python | mit | 5,136 | 0 | from flask import url_for
from app.testing import BaseTestCase
import json
class TodoTests(BaseTestCase):
def test_get_all_tasks(self):
"""Test intended to pull all tasks, ensure status 200 response,
and that the list is a lenght of 2"""
with self.client:
tasks = [{'title': "Get Bread",
'description': "Wheat!"
},
{'title': "Get my To-Do List",
'description': "Check all of the items on my ToDo List"
}]
for task in tasks:
add = self.client.post(url_for('create_task'),
headers={"Content-Type":
"application/json"},
data=json.dumps(task))
with self.client:
response = self.client.get(url_for('get_tasks'))
self.assert200(response)
self.assertEqual(len(res | ponse.json.get('tasks')), 2)
def test_get_task(self):
"""Test indended to ensure status 200 and pull the task
with an id of 1, then check its title"""
with self.client:
tasks = {'title': "Test Single Get",
'description': "Can we get it successfully"
}
add = self.client.post(url_for('create_task'),
headers={"Content-Type":
"application | /json"},
data=json.dumps(tasks))
with self.client:
response = self.client.get(url_for('get_task', id=1))
self.assert200(response)
self.assertEqual(response.json.get('task')['title'],
"Test Single Get")
def test_post_task(self):
"""Test the ability to post a new task to our ToDo list.
We are testing for response 201, and content match, then we
check to see what happens when you post invalid request.
Repsonse 400
"""
new_task = {'title': "I've created a post",
'description': "I should take a break"
}
with self.client:
response = self.client.post(url_for('create_task'),
headers={"Content-Type":
"application/json"},
data=json.dumps(new_task))
self.assert_status(response, 201)
self.assertEqual(response.json.get('task')['title'],
"I've created a post")
"""Test invalid POST request"""
fail = self.client.post(url_for('create_task'),
headers={"Content-Type":
"application/json"},
data=json.dumps({'titel':
"This should fail"})
)
self.assert400(fail)
def test_update_task(self):
"""Test the ability to update our tasks"""
new_task = {'title': "Test Updattin Tasks",
'description': "Let's ensure we can delete tasks"
}
with self.client:
add = self.client.post(url_for('create_task'),
headers={"Content-Type":
"application/json"},
data=json.dumps(new_task))
self.assert_status(add, 201)
self.assertEqual(add.json.get('task')['title'],
"Test Updattin Tasks"
)
with self.client:
update = {'title': "Test Updating Tasks"}
fix = self.client.put(url_for('update_task',
id=1),
headers={"Content-Type":
"application/json"},
data=json.dumps(update))
self.assert200(fix)
def test_delete_tasks(self):
"""Test to ensure we can delete data"""
new_task = {'title': "Test Deleting Tasks",
'description': "Let's ensure we can delete tasks"
}
with self.client:
add = self.client.post(url_for('create_task'),
headers={"Content-Type":
"application/json"},
data=json.dumps(new_task))
self.assert_status(add, 201)
self.assertEqual(add.json.get('task')['title'],
"Test Deleting Tasks"
)
with self.client:
response = self.client.delete(url_for('reset_task',
id=add.json.get('id')))
self.assert200(response)
self.assertEqual(len(response.json.get('tasks')), 0)
|
yhteentoimivuuspalvelut/ytp-tools | continuous-deployment/visualization/generate_build_duration_summary.py | Python | agpl-3.0 | 2,157 | 0.003245 | #!/usr/bin/env python
import sys
import os
import glob
from collections import defaultdict
from operator import itemgetter
import json
import logging
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s')
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
def parse_task_durations_from_log_files():
log_directory = '../deployment_cache'
results = defaultdict(list)
for build_directory in os.walk(log_directory).next()[1]:
logfile_list = sorted(glob.glob(log_directory + '/' + build_directory + '/time_log_*.log'))
timestamp = int(build_directory.strip('cd-ytp-')[:-3])
if len(logfile_list) != 5:
l | og.warn("could not find excepted log files for build {0}, skipping build".format(build_directory))
continue
for playbook_logfile in logfile_list[1:3]:
with open(playbook_logfile, 'r') as logfile:
for task_line in logfile:
split = task_line.replace('\n', '')
split = split.split(' s, ')
duration = float(split[0])
| split = split[1].split(' | ')
if len(split) == 2:
results['{0} ({1})'.format(split[1], split[0])].append({"x": timestamp, "y": duration})
elif len(split) == 1:
results['{0} (no role)'.format(split[0])].append({"x": timestamp, "y": duration})
else:
log.error("Encountered unparseable line {0}".format(task_line))
sys.exit()
return results
def transform_data_for_rickshaw(results):
series = []
for task, durations in results.iteritems():
series.append({"name": task, "data": sorted(durations, key=itemgetter('x'))})
return series
def write_data_as_json_to_file(data, filename):
with open(filename, 'w') as jsonfile:
json.dump(data, jsonfile, sort_keys=True, indent=4)
if __name__ == "__main__":
results = parse_task_durations_from_log_files()
write_data_as_json_to_file(transform_data_for_rickshaw(results), "build_durations.json")
|
dpgaspar/Flask-AppBuilder | examples/simpleform/app/__init__.py | Python | bsd-3-clause | 382 | 0 | i | mport logging
from flask import Flask
from flask_appbuilder import AppBuilder, SQLA
"""
Logging configuration
"""
logging.basicConfig(format="%(asctime)s:%(levelname)s:%(name)s:%(message)s")
logging.getLogger().setLevel(logging.DEBUG)
app = Flask(__name__)
app.config.from_object("config")
db = SQLA(app)
appbuilder = AppBuild | er(app, db.session)
from . import views # noqa
|
OpenSciViz/cloudstack | openstack/src/python/nova-libvirt/backup/utils.py | Python | mit | 18,922 | 0.000106 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright (c) 2011 Piston Cloud Computing, Inc
# Copyright (c) 2011 OpenStack Foundation
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import os
import re
from lxml import etree
from oslo_concurrency import processutils
from oslo_log import log as logging
import nova.conf
from nova.i18n import _
from nova.i18n import _LI
from nova.i18n import _LW
from nova.objects import fields as obj_fields
from nova import utils
from nova.virt.disk import api as disk
from nova.virt import images
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt.volume import remotefs
from nova.virt import volumeutils
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
| RESIZE_SNAPSHOT_NAME = 'nova-resize'
def execute(*args, **kwargs):
return utils.execute(*args, **kwargs)
def get_iscsi_initiator():
return volumeutils.get_iscsi_initiator()
def create_image(disk_format, path, size):
"""Create a disk image
:param disk_format: Disk image format (as known by qemu-img)
:param path: Desired location of the disk image
:param size: Desired size of disk im | age. May be given as an int or
a string. If given as an int, it will be interpreted
as bytes. If it's a string, it should consist of a number
with an optional suffix ('K' for Kibibytes,
M for Mebibytes, 'G' for Gibibytes, 'T' for Tebibytes).
If no suffix is given, it will be interpreted as bytes.
"""
execute('qemu-img', 'create', '-f', disk_format, path, size)
def create_cow_image(backing_file, path, size=None):
"""Create COW image
Creates a COW image with the given backing file
:param backing_file: Existing image on which to base the COW image
:param path: Desired location of the COW image
"""
base_cmd = ['qemu-img', 'create', '-f', 'qcow2']
cow_opts = []
if backing_file:
cow_opts += ['backing_file=%s' % backing_file]
base_details = images.qemu_img_info(backing_file)
else:
base_details = None
# Explicitly inherit the value of 'cluster_size' property of a qcow2
# overlay image from its backing file. This can be useful in cases
# when people create a base image with a non-default 'cluster_size'
# value or cases when images were created with very old QEMU
# versions which had a different default 'cluster_size'.
if base_details and base_details.cluster_size is not None:
cow_opts += ['cluster_size=%s' % base_details.cluster_size]
if size is not None:
cow_opts += ['size=%s' % size]
if cow_opts:
# Format as a comma separated list
csv_opts = ",".join(cow_opts)
cow_opts = ['-o', csv_opts]
cmd = base_cmd + cow_opts + [path]
execute(*cmd)
def create_ploop_image(disk_format, path, size, fs_type):
"""Create ploop image
:param disk_format: Disk image format (as known by ploop)
:param path: Desired location of the ploop image
:param size: Desired size of ploop image. May be given as an int or
a string. If given as an int, it will be interpreted
as bytes. If it's a string, it should consist of a number
with an optional suffix ('K' for Kibibytes,
M for Mebibytes, 'G' for Gibibytes, 'T' for Tebibytes).
If no suffix is given, it will be interpreted as bytes.
:param fs_type: Filesystem type
"""
if not fs_type:
fs_type = CONF.default_ephemeral_format or \
disk.FS_FORMAT_EXT4
execute('mkdir', '-p', path)
disk_path = os.path.join(path, 'root.hds')
execute('ploop', 'init', '-s', size, '-f', disk_format, '-t', fs_type,
disk_path, run_as_root=True, check_exit_code=True)
# Add read access for all users, because "ploop init" creates
# disk with rw rights only for root. OpenStack user should have access
# to the disk to request info via "qemu-img info"
execute('chmod', '-R', 'a+r', path,
run_as_root=True, check_exit_code=True)
def pick_disk_driver_name(hypervisor_version, is_block_dev=False):
"""Pick the libvirt primary backend driver name
If the hypervisor supports multiple backend drivers we have to tell libvirt
which one should be used.
Xen supports the following drivers: "tap", "tap2", "phy", "file", or
"qemu", being "qemu" the preferred one. Qemu only supports "qemu".
:param is_block_dev:
:returns: driver_name or None
"""
if CONF.libvirt.virt_type == "xen":
if is_block_dev:
return "phy"
else:
# 4002000 == 4.2.0
if hypervisor_version >= 4002000:
try:
execute('xend', 'status',
run_as_root=True, check_exit_code=True)
except OSError as exc:
if exc.errno == errno.ENOENT:
LOG.debug("xend is not found")
# libvirt will try to use libxl toolstack
return 'qemu'
else:
raise
except processutils.ProcessExecutionError:
LOG.debug("xend is not started")
# libvirt will try to use libxl toolstack
return 'qemu'
# libvirt will use xend/xm toolstack
try:
out, err = execute('tap-ctl', 'check', check_exit_code=False)
if out == 'ok\n':
# 4000000 == 4.0.0
if hypervisor_version > 4000000:
return "tap2"
else:
return "tap"
else:
LOG.info(_LI("tap-ctl check: %s"), out)
except OSError as exc:
if exc.errno == errno.ENOENT:
LOG.debug("tap-ctl tool is not installed")
else:
raise
return "file"
elif CONF.libvirt.virt_type in ('kvm', 'qemu'):
return "qemu"
else:
# UML doesn't want a driver_name set
return None
def get_disk_size(path, format=None):
"""Get the (virtual) size of a disk image
:param path: Path to the disk image
:param format: the on-disk format of path
:returns: Size (in bytes) of the given disk image as it would be seen
by a virtual machine.
"""
size = images.qemu_img_info(path, format).virtual_size
return int(size)
def get_disk_backing_file(path, basename=True, format=None):
"""Get the backing file of a disk image
:param path: Path to the disk image
:returns: a path to the image's backing store
"""
backing_file = images.qemu_img_info(path, format).backing_file
if backing_file and basename:
backing_file = os.path.basename(backing_file)
return backing_file
def copy_image(src, dest, host=None, receive=False,
on_execute=None, on_completion=None,
compression=True):
"""Copy a disk image to an existing directory
:param src: Source image
:param dest: Destination path
:param host: Remote host
:param receive: Reverse the rsync direction
:param on_execute: Callback method to store pid of process in cache
:param on_completion: Callback |
fabric8-analytics/fabric8-analytics-worker | tests/storages/test_postgres.py | Python | gpl-3.0 | 4,228 | 0.003075 | """Test the Postgress interface - data storage and retrieval."""
import datetime
import flexmock
import selinon
from f8a_worker.defaults import configuration
from f8a_worker.enums import EcosystemBackend
from f8a_worker.models import (Ecosystem, Package, Version, Analysis, WorkerResult,
create_db_scoped_session)
from f8a_worker.storages.postgres import BayesianPostgres
from ..conftest import rdb
class TestBayesianPostgres:
"""Test the Postgress interface - data storage and retrieval."""
def setup_method(self, method):
"""Get the DB session and prepare test data."""
rdb()
self.s = create_db_scoped_session()
self.en = 'foo'
self.pn = 'bar'
self.vi = '1.1.1'
self.e = Ecosystem(name=self.en, backend=EcosystemBackend.mave | n)
self.p = Package(ecosystem=self.e, name=self.pn)
self.v = Version(package=self.p, identifier=self.vi)
self.a = Analysis(version=self.v, finished_at=datetime.datetime.utcnow())
self.a2 = Analysis(version=self.v,
finished_at=datetime.datetime.utcnow() + datetime.timedelta(seconds=10))
self.s.add(self.a)
self.s.add(self.a2)
self.s.commit()
self.bp = BayesianPost | gres(connection_string=configuration.POSTGRES_CONNECTION)
assert method
def test_retrieve_normal(self):
"""Test the ability to retrieve data from Postgress."""
wid = 'x'
w = 'y'
tr = {'1': '2'}
wr = WorkerResult(analysis=self.a, worker_id=wid, worker=w, task_result=tr)
self.s.add(wr)
self.s.commit()
assert self.bp.retrieve('whatever', w, wid) == tr
def test_retrieve_s3(self):
"""Test the ability to retrieve data from Postgress, target is mocked S3 storage."""
wid = 'x'
w = 'y'
tr = {'version_id': 123}
res = {'real': 'result'}
wr = WorkerResult(analysis=self.a, worker_id=wid, worker=w, task_result=tr)
self.s.add(wr)
self.s.commit()
s3_storage = flexmock()
s3_storage.\
should_receive('retrieve_task_result').\
with_args(self.en, self.pn, self.vi, w).\
and_return(res)
flexmock(selinon.StoragePool).\
should_receive('get_connected_storage').\
with_args('S3Data').\
and_return(s3_storage)
assert self.bp.retrieve('blahblah', w, wid) == res
def test_store_normal(self):
"""Test the ability to store data to Postgress."""
tn = 'asd'
tid = 'sdf'
res = {'some': 'thing'}
self.bp.store(node_args={}, flow_name='blah', task_name=tn, task_id=tid, result=res)
assert self.bp.retrieve('doesntmatter', tn, tid) == res
def test_store_already_exists(self):
"""Test if database integrity is checked.
The second attempt to store results should be ignored.
"""
tn = 'asd'
tid = 'sdf'
res = {'some': 'thing'}
self.bp.store(node_args={}, flow_name='blah', task_name=tn, task_id=tid, result=res)
self.bp.store(
node_args={}, flow_name='blah', task_name=tn, task_id=tid,
result={'some': 'other-thing'}
)
result = self.bp.retrieve(flow_name='blah', task_name=tn, task_id=tid)
assert result.get('some') == 'thing'
def test_get_latest_task_result(self):
"""Test the function to get the latest task result from database."""
tn = 'asd'
tid = 'sdf'
res = {'some': 'thing'}
self.bp.store(node_args={'document_id': self.a.id},
flow_name='blah', task_name=tn, task_id=tid, result=res)
res['later'] = 'aligator'
self.bp.store(node_args={'document_id': self.a2.id},
flow_name='blah', task_name=tn, task_id=tid + '2', result=res)
assert self.bp.get_latest_task_result(self.en, self.pn, self.vi, tn) == res
def test_get_latest_task_result_no_results(self):
"""Test the function to get the latest task result from empty database."""
assert self.bp.get_latest_task_result(self.en, self.pn, self.vi, 'asd') is None
|
itsapi/pycraft | events.py | Python | gpl-2.0 | 1,083 | 0.003693 | from random import random
import render, player
def process_events(events, server):
new_blocks = {}
for event in events:
if event['time_remaining'] <= 0:
new_blocks.update(event['func'](server, *eve | nt['args']))
events.remove(event)
else:
event['time_remaining'] -= 1
return new_blocks
def boom(server, x, y):
new_blocks = {}
radius = 5
blast_strength = 85
for tx in range(x - radius*2, x + radius*2):
new_blocks[tx] = {}
for ty in rang | e(y - radius, y + radius):
if (render.in_circle(tx, ty, x, y, radius) and tx in server.map_ and ty >= 0 and ty < len(server.map_[tx]) and
player.can_strength_break(server.map_[tx][ty], blast_strength)):
if not render.in_circle(tx, ty, x, y, radius - 1):
if random() < .5:
new_blocks[tx][ty] = ' '
else:
new_blocks[tx][ty] = ' '
server.splash_damage(x, y, radius*2, blast_strength/3)
return new_blocks |
sipwise/repoapi | repoapi/signals.py | Python | gpl-3.0 | 6,714 | 0 | # Copyright (C) 2022 The Sipwise Team - http://sipwise.com
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import structlog
from django.apps import apps
from django.db.models.signals import post_save
from django.dispatch import receiver
from . import utils
from .models.wni import workfront_re_branch
from .tasks import get_jbi_files
from .tasks import jenkins_remove_project
from release_dashboard.utils.build import is_ngcp_project
logger = structlog.get_logger(__name__)
@receiver(
post_save, sender="repoapi.JenkinsBuildInfo", dispatch_uid="jbi_manage"
)
def jbi_manage(sender, **kwargs):
if kwargs["created"]:
instance = kwargs["instance"]
if instance.is_job_url_allowed():
get_jbi_files.delay(
instance.pk, instance.jobname, instance.buildnumber
)
def gerrit_repo_add(instance):
log = logger.bind(
instance=str(instance),
)
if instance.param_ppa == "$ppa": |
log.warn("ppa unset, skip removal")
return
GerritRepoInfo = apps.get_model("repoapi", "GerritRepoInfo")
gri = GerritRepoInfo.objects
ppa, created = gri.get_or_create(
param_ppa=instance.param_ppa,
gerrit_change=instance.gerrit_change,
defaults={"projectname": instance.projectname},
)
if created:
lo | g.debug("ppa created", ppa=str(ppa))
elif ppa.projectname == "unknown":
ppa.projectname = instance.projectname
ppa.save()
log.info("ppa projectname updated")
def gerrit_repo_del(instance):
log = logger.bind(
instance=str(instance),
)
if instance.param_ppa == "$ppa":
log.warn("ppa unset, skip removal")
return
GerritRepoInfo = apps.get_model("repoapi", "GerritRepoInfo")
gri = GerritRepoInfo.objects
try:
ppa = gri.get(
param_ppa=instance.param_ppa, gerrit_change=instance.gerrit_change
)
ppa.delete()
log.debug("removed ppa", ppa=str(ppa))
except GerritRepoInfo.DoesNotExist:
pass
qs = gri.filter(param_ppa=instance.param_ppa)
ppa_count = qs.count()
project_ppa_count = qs.filter(projectname=instance.projectname).count()
if ppa_count == 0:
utils.jenkins_remove_ppa(instance.param_ppa)
elif project_ppa_count == 0:
log.debug("remove source+packages from ppa")
jenkins_remove_project.delay(instance.id)
else:
log.debug(
"nothing to do here",
ppa_count=ppa_count,
project_ppa_count=project_ppa_count,
)
@receiver(
post_save,
sender="repoapi.JenkinsBuildInfo",
dispatch_uid="gerrit_repo_manage",
)
def gerrit_repo_manage(sender, **kwargs):
if kwargs["created"]:
instance = kwargs["instance"]
log = logger.bind(
instance=str(instance),
ppa=instance.param_ppa,
)
if instance.param_ppa == "$ppa":
log.warn("ppa unset, skip")
return
if (
instance.jobname.endswith("-repos")
and instance.result == "SUCCESS"
):
logger.debug("we need to count this")
if instance.gerrit_eventtype == "patchset-created":
gerrit_repo_add(instance)
elif instance.gerrit_eventtype == "change-merged":
gerrit_repo_del(instance)
elif (
instance.jobname.endswith("-cleanup")
and instance.result == "SUCCESS"
and instance.gerrit_eventtype == "change-abandoned"
):
log.debug("we need to count this")
gerrit_repo_del(instance)
def workfront_release_target(instance, wid):
if not is_ngcp_project(instance.projectname):
logger.info(
"%s not a NGCP project, skip release_target", instance.projectname
)
return
branch = instance.param_branch
if workfront_re_branch.search(branch):
release = branch
else:
release = utils.get_next_release(branch)
if release:
utils.workfront_set_release_target(wid, release)
def workfront_note_add(instance, message, release_target=False):
WorkfrontNoteInfo = apps.get_model("repoapi", "WorkfrontNoteInfo")
wni = WorkfrontNoteInfo.objects
workfront_ids = WorkfrontNoteInfo.getIds(instance.git_commit_msg)
from django.conf import settings
for wid in workfront_ids:
if not instance.gerrit_eventtype:
change = WorkfrontNoteInfo.getCommit(instance.git_commit_msg)
url = settings.GITWEB_URL.format(instance.projectname, change)
eventtype = "git-commit"
else:
change = instance.gerrit_change
url = settings.GERRIT_URL.format(instance.gerrit_change)
eventtype = instance.gerrit_eventtype
note, created = wni.get_or_create(
workfront_id=wid, gerrit_change=change, eventtype=eventtype
)
if created:
if not utils.workfront_note_send(wid, "%s %s " % (message, url)):
logger.error("remove related WorkfrontNoteInfo")
note.delete()
if release_target:
workfront_release_target(instance, wid)
def workfront_note_manage(sender, **kwargs):
"""
<name>-get-code job is the first in the flow that has the proper
GIT_CHANGE_SUBJECT envVar set, so git_commit_msg is fine
"""
if kwargs["created"]:
instance = kwargs["instance"]
if instance.result != "SUCCESS":
return
if instance.jobname.endswith("-get-code"):
set_release_target = True
if instance.gerrit_eventtype == "change-merged":
msg = "%s.git[%s] review merged"
elif instance.gerrit_eventtype == "patchset-created":
msg = "%s.git[%s] review created"
set_release_target = False
else:
msg = "%s.git[%s] commit created"
workfront_note_add(
instance,
msg % (instance.projectname, instance.param_branch),
set_release_target,
)
|
nyu-mhealth/project-smsurvey | main/smsurvey/core/services/question_service.py | Python | gpl-3.0 | 2,074 | 0.001929 | import pickle
import boto3
from botocore.exceptions import ClientError
from smsurvey.core.model.question import Question
from smsurvey.core.model.question import QuestionOperationException
from smsurvey import config
class QuestionService:
def __init__(self, cache_name=config.question_backend_name, local=config.local):
if local:
self.dynamo = boto3.client('dynamodb', region_name='us-west-2', endpoint_url=config.dynamo_url_local)
else:
self.dynamo = boto3.client('dynamodb', region_name='us-east-1')
self.cache_name = cache_name
def insert(self, protocol_id, question_number, question, safe=True):
if not issubclass(type(question), Question):
raise QuestionOperationException("Object is not a survey question")
if safe:
if self.get(protocol_id, question_number) is not None:
raise QuestionOperationException("Question with this ID already exists in cache")
dumped = pickle.dumps(question)
self.dynamo.put_item(
TableName=self.cache_name,
Item={
'question_number': {
'S': str(question_number)
},
'protocol_id': {
'S': str(protocol_id)
},
'question': {
'B': dumped
}
}
)
def get(self, protocol_id, question_number):
try:
response = self.dynamo.get_item | (
TableName=self.cache_ | name,
Key={
'question_number': {'S': str(question_number)},
'protocol_id': {'S': str(protocol_id)}
}
)
except ClientError as e:
print(e.response['Error']['Message'])
raise QuestionOperationException("Error occurred trying to get item")
else:
if 'Item' in response:
return pickle.loads(response['Item']['question']['B'])
else:
return None
|
mmcardle/MServe | django-mserve/jobservice/tasks.py | Python | lgpl-2.1 | 4,498 | 0.012228 | ########################################################################
#
# University of Southampton IT Innovation Centre, 2011
#
# Copyright in this library belongs to the University of Southampton
# University Road, Highfield, Southampton, UK, SO17 1BJ
#
# This software may not be used, sold, licensed, transferred, copied
# or reproduced in whole or in part in any manner or form or in or
# on any media by any person other than in accordance with the terms
# of the Licence Agreement supplied with the software, or otherwise
# without the prior written consent of the copyright owners.
#
# This software is distributed WITHOUT ANY WARRANTY, without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE, except where stated in the Licence Agreement supplied with
# the software.
#
# Created By : Mark McArdle
# Created Date : 2011-03-25
# Created for Project : PrestoPrime
#
########################################################################
import os.path
from celery.task import task
from celery.task.sets import subtask
from django.core.files import File
import logging
import subprocess
import string
import shutil
import pycurl
import tempfile
import re
import pycurl
import sys
class Storage:
def __init__(self):
self.contents = []
def store(self, | buf):
self.contents.append(buf) #= "%s%i: %s" % (self.contents, self.line, buf)
def __str__(self):
return ", ".join(self.contents)
@task
def copyfromurl(inputs,outputs,options={},callbacks=[]):
url = options["url"]
logging.info(url)
tfile = tempfile.NamedTemporaryFile('wb',delete=False)
retrieved_headers = Storage()
f = open(tfile.name,'w')
c = pycurl.Curl()
c.setopt(c.URL, str(url))
c.setopt(pycurl.FO | LLOWLOCATION, 1)
c.setopt(c.WRITEFUNCTION, f.write)
c.setopt(c.HEADERFUNCTION, retrieved_headers.store)
c.perform()
status = c.getinfo(c.HTTP_CODE)
c.close()
f.close()
logging.debug(retrieved_headers)
filename = "Imported File"
for header in retrieved_headers.contents:
if header.lower().startswith("content-disposition"):
filename = re.match(".*filename=(?P<filename>.*)", header).group('filename')
if status > 400:
logging.warn("Copy From URL %s return error status code '%s' " % (url, status))
return { "message" : "Copy from url failed error_code '%s'" % status }
else:
mfileid = inputs[0]
from dataservice.models import MFile
mfile = MFile.objects.get(id=mfileid)
filename = mfile.service.get_unique_name(filename)
mfile.update_mfile(filename, file=File(open(tfile.name, 'r')))
mfile.save()
for callback in callbacks:
subtask(callback).delay()
return { "message" : "Copy from url was successful"}
# Blender Command Line API
#
# Render a Picture
# blender -b file.blend -o //file -F JPEG -x 1 -f 1
#
# Render a movie
# blender -b file.blend -x 1 -o //file -F MOVIE -s 003 -e 005 -a
#
# Render a Series
# blender -b file.blend -x 1 -o //file -F "PNG" -s ss -e ee -a
@task
def render_blender(inputs,outputs,options={},callbacks=[]):
padding = 4
frame = options["frame"]
if options.has_key("fname"):
fname = options["format"]
else:
fname="image"
if options.has_key("format"):
format = options["format"]
else:
format="PNG"
mfileid = inputs[0]
from dataservice.models import MFile
mf = MFile.objects.get(id=mfileid)
inputfile = mf.file.path
outputfile = outputs[0]
logging.info("Processing render job %s frame: %s " % (inputfile,frame))
if not os.path.exists(inputfile):
logging.info("Scene %s does not exist" % inputfile)
return False
[outputdir,ffff]= os.path.split(outputfile)
hashes = "#" * padding
outputformat = "%s/%s.%s" % (outputdir,fname,hashes)
ss= string.zfill(str(frame), padding)
args = ["blender","-b",inputfile,"-x","1","-o",outputformat,"-F",format.upper(),"-s",ss,"-e",ss,"-a"]
logging.info(args)
n = str(frame).zfill(padding)
resultfile = os.path.join(outputdir,"%s.%s.%s"%(fname,n,format.lower()))
ret = subprocess.call(args)
if resultfile != outputfile:
logging.debug("result file %s is not outputfile %s ... Moving" % (resultfile, outputfile))
shutil.move(resultfile, outputfile)
for callback in callbacks:
subtask(callback).delay()
return ret
|
Trii/NoseGAE | examples/function_manual_config/test.py | Python | bsd-2-clause | 348 | 0.002874 | fr | om webtest import TestApp
import helloworld
import os
def test_index():
test_index.testbed.init_taskqueue_stub(task_retry_seconds=42, root_path=os.path.dirname(__file__))
app = TestApp(helloworld.app)
# fires off a task queue and should pass without exceptions
response = app.get('/')
ass | ert 'Hello world!' in str(response)
|
wukong-m2m/NanoKong | tools/inteldemo201202/setThreshold.py | Python | gpl-2.0 | 542 | 0.012915 | #!/usr/bin/python
import sys
import pynvc
def setThreshold(destination, threshold):
pynvc.sendWithRetryAndCheckedReceiveAPPMSG(
| destination=destination,
command=2,
allowedReplies=[12],
payload=[threshold])
if __name__ == "__main__":
pynvc.init()
if len(sys.argv)<3 or int(sys.argv[2]) < 0 or int(sys.argv[2]) > | 255:
print "setThreshold <nodeid> <threshold>"
print "0<=threshold<=255"
else:
setThreshold(int(sys.argv[1]), int(sys.argv[2]))
|
jessamynsmith/boards-backend | blimp_boards/utils/request.py | Python | agpl-3.0 | 192 | 0 | def get_ip_address(request):
ip_addre | ss = request.META.get('HTTP_X_FORWARDED_FOR')
if ip_address:
return ip_address.split(',')[-1]
r | eturn request.META.get('REMOTE_ADDR')
|
valentin-krasontovitsch/ansible | test/runner/lib/env.py | Python | gpl-3.0 | 6,627 | 0.002113 | """Show information about the test environment."""
from __future__ import absolute_import, print_function
import datetime
import json
import os
import platform
import re
import sys
from lib.config import (
CommonConfig,
)
from lib.util import (
display,
find_executable,
raw_command,
SubprocessError,
ApplicationError,
)
from lib.ansible_util import (
ansible_environment,
)
from lib.git import (
Git,
)
from lib.docker_util import (
docker_info,
docker_version
)
class EnvConfig(CommonConfig):
"""Configuration for the tools command."""
def __init__(self, args):
"""
:type args: any
"""
super(EnvConfig, self).__init__(args, 'env')
self.show = args.show or not args.dump
self.dump = args.dump
def command_env(args):
"""
:type args: EnvConfig
"""
data = dict(
ansible=dict(
version=get_ansible_version(args),
),
docker=get_docker_details(args),
environ=os.environ.copy(),
git=get_git_details(args),
platform=dict(
datetime=datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
platform=platform.platform(),
uname=platform.uname(),
),
python=dict(
executable=sys.executable,
version=platform.python_version(),
),
)
if args.show:
verbose = {
'docker': 3,
'docker.executable': 0,
'environ': 2,
'platform.uname': 1,
}
show_dict(data, verbose)
if args.dump and not args.explain:
with open('test/results/bot/data-environment.json', 'w') as results_fd:
results_fd.write(json.dumps(data, sort_keys=True))
def show_dict(data, verbose, root_verbosity=0, path=None):
"""
:type data: dict[str, any]
:type verbose: dict[str, int]
:type root_verbosity: int
:type path: list[str] | None
"""
path = path if path else []
for key, value in sorted(data.items()):
indent = ' ' * len(path)
key_path = path + [key]
key_name = '.'.join(key_path)
verbosity = verbose.get(key_name, root_verbosity)
if isinstance(value, (tuple, list)):
display.info(indent + '%s:' % key, verbosity=verbosity)
for item in value:
display.info(indent + ' - %s' % item, verbosity=verbosity)
elif isinstance(value, dict):
min_verbosity = min([verbosity] + [v f | or k, v in verbose.items() if k.startswith('%s.' % key)])
display.info(indent + '%s:' % key, verbosity=min_verbosity)
show_dict(value, verbose, verbosity, key_path)
else:
display.info(indent + '%s: %s' % (key, value), verbosity=verbosity)
def get_ansible_version(args):
"""
:ty | pe args: CommonConfig
:rtype: str | None
"""
code = 'from __future__ import (print_function); from ansible.release import __version__; print(__version__)'
cmd = [sys.executable, '-c', code]
env = ansible_environment(args)
try:
ansible_version, _dummy = raw_command(cmd, env=env, capture=True)
ansible_version = ansible_version.strip()
except SubprocessError as ex:
display.warning('Unable to get Ansible version:\n%s' % ex)
ansible_version = None
return ansible_version
def get_docker_details(args):
"""
:type args: CommonConfig
:rtype: dict[str, any]
"""
docker = find_executable('docker', required=False)
info = None
version = None
if docker:
try:
info = docker_info(args)
except SubprocessError as ex:
display.warning('Failed to collect docker info:\n%s' % ex)
try:
version = docker_version(args)
except SubprocessError as ex:
display.warning('Failed to collect docker version:\n%s' % ex)
docker_details = dict(
executable=docker,
info=info,
version=version,
)
return docker_details
def get_git_details(args):
"""
:type args: CommonConfig
:rtype: dict[str, any]
"""
commit = os.environ.get('COMMIT')
base_commit = os.environ.get('BASE_COMMIT')
git_details = dict(
base_commit=base_commit,
commit=commit,
merged_commit=get_merged_commit(args, commit),
root=os.getcwd(),
)
return git_details
def get_merged_commit(args, commit):
"""
:type args: CommonConfig
:type commit: str
:rtype: str | None
"""
if not commit:
return None
git = Git(args)
try:
show_commit = git.run_git(['show', '--no-patch', '--no-abbrev', commit])
except SubprocessError as ex:
# This should only fail for pull requests where the commit does not exist.
# Merge runs would fail much earlier when attempting to checkout the commit.
raise ApplicationError('Commit %s was not found:\n\n%s\n\n'
'The commit was likely removed by a force push between job creation and execution.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, ex.stderr.strip()))
head_commit = git.run_git(['show', '--no-patch', '--no-abbrev', 'HEAD'])
if show_commit == head_commit:
# Commit is HEAD, so this is not a pull request or the base branch for the pull request is up-to-date.
return None
match_merge = re.search(r'^Merge: (?P<parents>[0-9a-f]{40} [0-9a-f]{40})$', head_commit, flags=re.MULTILINE)
if not match_merge:
# The most likely scenarios resulting in a failure here are:
# A new run should or does supersede this job, but it wasn't cancelled in time.
# A job was superseded and then later restarted.
raise ApplicationError('HEAD is not commit %s or a merge commit:\n\n%s\n\n'
'This job has likely been superseded by another run due to additional commits being pushed.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, head_commit.strip()))
parents = set(match_merge.group('parents').split(' '))
if len(parents) != 2:
raise ApplicationError('HEAD is a %d-way octopus merge.' % len(parents))
if commit not in parents:
raise ApplicationError('Commit %s is not a parent of HEAD.' % commit)
parents.remove(commit)
last_commit = parents.pop()
return last_commit
|
kantel/processingpy | sketches/modes/PythonMode/examples/Basics/Objects/CompositeObjects/ring.py | Python | mit | 600 | 0.001667 | class Ring(object):
def start(self, xpos, ypos):
self.x = xpos # x-coordina | te
self.y = ypos # y-coordinate
self.diameter = 1 # Diameter of the ring
self.on = False # Turns the display on and off
self.on = True
def grow(self):
if self.on:
self.diamet | er += 0.5
if self.diameter > width * 2:
self.diameter = 0.0
def display(self):
if self.on:
noFill()
strokeWeight(4)
stroke(155, 153)
ellipse(self.x, self.y, self.diameter, self.diameter)
|
ingenioustechie/zamboni | mkt/webapps/views.py | Python | bsd-3-clause | 7,946 | 0 | from django import forms as django_forms
from django.core.urlresolvers import reverse
from django.http import Http404
import commonware
import waffle
from rest_framework import exceptions, response, serializers, status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.response import Response
from lib.metrics import record_action
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import CORSMixin, MarketplaceView, SlugOrIdMixin
from mkt.api.exceptions import HttpLegallyUnavailable
from mkt.api.forms import IconJSONForm
from mkt.api.permissions import (AllowAppOwner, AllowReadOnlyIfPublic,
AllowReviewerReadOnly, AnyOf)
from mkt.developers import tasks
from mkt.developers.forms import (AppFormMedia, IARCGetAppInfoForm,
IARCV2ExistingCertificateForm)
from mkt.files.models import FileUpload
from mkt.regions import get_region
from mkt.submit.views import PreviewViewSet
from mkt.webapps.models import AddonUser, get_excluded_in, Webapp
from mkt.webapps.serializers import AppSerializer
log = commonware.log.getLogger('z.api')
class AppViewSet(CORSMixin, SlugOrIdMixin, MarketplaceView,
viewsets.ModelViewSet):
serializer_class = AppSerializer
slug_field = 'app_slug'
cors_allowed_methods = ('get', 'put', 'post', 'delete')
permission_classes = [AnyOf(AllowAppOwner, AllowReviewerReadOnly,
AllowReadOnlyIfPublic)]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
def get_queryset(self):
return Webapp.objects.all().exclude(
id__in=get_excluded_in(get_region().id))
def get_base_queryset(self):
return Webapp.objects.all()
def get_object(self):
try:
app = super(AppViewSet, self).get_object()
except Http404:
self.get_queryset = self.get_base_queryset
app = super(AppViewSet, self).get_object()
# Owners and reviewers can see apps regardless of region.
owner_or_reviewer = AnyOf(AllowAppOwner, AllowReviewerReadOnly)
if owner_or_reviewer.has_object_permission(self.request, self,
app):
return app
data = {}
for key in ('name', 'support_email', 'support_url'):
value = getattr(app, key)
data[key] = unicode(value) if value else ''
data['reason'] = 'Not available in your region.'
raise HttpLegallyUnavailable(data)
self.check_object_permissions(self.request, app)
return app
def create(self, request, *args, **kwargs):
uuid = request.data.get('upload', '')
if uuid:
is_packaged = True
else:
uuid = request.data.get('manifest', '')
is_packaged = False
if not uuid:
raise exceptions.ParseError(
'No upload or manifest specified.')
try:
upload = FileUpload.objects.get(uuid=uuid)
except FileUpload.DoesNotExist:
raise exceptions.ParseError('No upload found.')
if not upload.valid:
raise exceptions.ParseError('Upload not valid.')
if not request.user.read_dev_agreement:
log.info(u'Attempt to use API without dev agreement: %s'
% request.user.pk)
raise exceptions.PermissionDenied('Terms of Service not accepted.')
if not (upload.user and upload.user.pk == request.user.pk):
raise exceptions.PermissionDenied('You do not own that app.')
# Create app, user and fetch the icon.
try:
obj = Webapp.from_upload(upload, is_packaged=is_packaged)
except (serializers.ValidationError,
django_forms.ValidationError) as e:
raise exceptions.ParseError(unicode(e))
AddonUser(addon=obj, user=request.user).save()
tasks.fetc | h_icon.delay(obj.pk, obj.latest_version.all_files[0].pk)
record_action('app-submitted', request, {'app-id': obj.pk})
log.info('App created: %s' % obj.pk)
data = AppSerializer(
context=self.get_serializer_context(), instance=obj).data
return response.Response(
data, status=201,
headers={'Location': reverse('app-detail', kwargs={'p | k': obj.pk})})
def update(self, request, *args, **kwargs):
# Fail if the app doesn't exist yet.
self.get_object()
r = super(AppViewSet, self).update(request, *args, **kwargs)
# Be compatible with tastypie responses.
if r.status_code == 200:
r.status_code = 202
return r
def list(self, request, *args, **kwargs):
if not request.user.is_authenticated():
log.info('Anonymous listing not allowed')
raise exceptions.PermissionDenied('Anonymous listing not allowed.')
self.object_list = self.filter_queryset(self.get_queryset().filter(
authors=request.user))
page = self.paginate_queryset(self.object_list)
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
def partial_update(self, request, *args, **kwargs):
raise exceptions.MethodNotAllowed('PATCH')
@detail_route(methods=['POST'])
def content_ratings(self, request, *args, **kwargs):
app = self.get_object()
if waffle.switch_is_active('iarc-upgrade-v2'):
form = IARCV2ExistingCertificateForm(data=request.data, app=app)
else:
form = IARCGetAppInfoForm(data=request.data, app=app)
if form.is_valid():
try:
form.save(app)
return Response(status=status.HTTP_201_CREATED)
except django_forms.ValidationError:
pass
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
@detail_route(
methods=['POST'],
cors_allowed_methods=PreviewViewSet.cors_allowed_methods)
def preview(self, request, *args, **kwargs):
kwargs['app'] = self.get_object()
view = PreviewViewSet.as_view({'post': '_create'})
return view(request, *args, **kwargs)
@detail_route(methods=['PUT'], cors_allowed_methods=['put'])
def icon(self, request, *args, **kwargs):
app = self.get_object()
data_form = IconJSONForm(request.data)
if not data_form.is_valid():
return Response(data_form.errors,
status=status.HTTP_400_BAD_REQUEST)
form = AppFormMedia(data_form.cleaned_data, request=request)
if not form.is_valid():
return Response(data_form.errors,
status=status.HTTP_400_BAD_REQUEST)
form.save(app)
return Response(status=status.HTTP_200_OK)
class PrivacyPolicyViewSet(CORSMixin, SlugOrIdMixin, MarketplaceView,
viewsets.GenericViewSet):
queryset = Webapp.objects.all()
cors_allowed_methods = ('get',)
permission_classes = [AnyOf(AllowAppOwner, AllowReviewerReadOnly,
AllowReadOnlyIfPublic)]
slug_field = 'app_slug'
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
def retrieve(self, request, *args, **kwargs):
app = self.get_object()
return response.Response(
{'privacy_policy': unicode(app.privacy_policy)},
content_type='application/json')
|
nehal96/Deep-Learning-ND-Exercises | MiniFlow/5 - Cost/miniflow.py | Python | mit | 4,928 | 0.000406 | import numpy as np
class Node(object):
"""
Base class for nodes in the network.
Arguments:
`inbound_nodes`: A list of nodes with edges into this node.
"""
def __init__(self, inbound_nodes=[]):
"""
Node's constructor (runs when the object is instantiated). Sets
properties that all nodes need.
"""
# A list of nodes with edges into this node.
self.inbound_nodes = inbound_nodes
# The eventual value of this node. Set by running
# the forward() method.
self.value = None
# A list of nodes that this node outputs to.
self.outbound_nodes = []
# Sets this node as an outbound node for all of
# this node's inputs.
for node in inbound_nodes:
node.outbound_nodes.append(self)
def forward(self):
"""
Every node that uses this class as a base class will
need to define its own `forward` method.
"""
raise NotImplementedError
class Input(Node):
"""
A generic input into the network.
"""
def __init__(self):
# The base class constructor has to run to set all
# the properties here.
#
# The most important property on an Input is value.
# self.value is set during `topological_sort` later.
Node.__init__(self)
def forward(self):
# Do nothing because nothing is calculated.
pass
class Linear(Node):
"""
Represents a node that performs a linear transform.
"""
def __init__(self, X, W, b):
# The base class (Node) constructor. Weights and bias
# are treated like inbound nodes.
Node.__init__(self, [X, W, b])
def forward(self):
"""
Performs the math behind a linear transform.
"""
X = self.inbound_nodes[0].value
W = self.inbound_nodes[1].value
b = self.inbound_nodes[2].value
self.value = np.dot(X, W) + b
class Sigmoid(Node):
"""
Represents a node that performs the sigmoid activation function.
"""
def __init__(self, node):
# The base class constructor.
Node.__init__(self, [node])
def _sigmoid(self, x):
"""
This method is separate from `forward` because it
will be used with `backward` as well.
`x`: A numpy array-like object.
"""
return 1. / (1. + np.exp(-x))
def forward(self):
"""
Perform the sigmoid function and set the value.
"""
input_value = self.inbound_nodes[0].value
self.value = self._sigmoid(input_value)
class MSE(Node):
def __init__(self, y, a):
"""
The mean squared error cost function.
Should be used as the last node for a network.
"""
# Call the base class' constructor.
Node.__init__(self, [y, a])
def forward(self):
"""
Calculates the mean squared error.
"""
# NOTE: We reshape these to avoid possible matrix/vector broadcast
# errors.
#
# For example, if we subtract an array of shape (3,) from an array of shape
# (3,1) we get an array of shape(3,3) as the result when we want
# an array of shape (3,1) instead.
#
# Making both arrays (3,1) insures the result is (3,1) and does
# an elementwise subtraction as expected.
y = self.inbound_nodes[0].value.reshape(-1, 1)
a = self.inbound_nodes[1].value.reshape(-1, 1)
# TODO: your code here
self.value = np.mean(np.square(y - a))
def topological_sort(feed_dict):
"""
Sort the nodes in topological order using Kahn's Algorithm.
`feed_dict`: A dictionary where the key is a `Input` Node and the value is the respective | value feed to that Node.
Returns a list of sorted nodes.
"""
input_nodes = [n for n in feed_dict.keys()]
G = {}
nodes = [n for n in input_nodes]
while len(nodes) > 0:
n = nodes.pop(0)
if n not in G:
G[n] = {'in': | set(), 'out': set()}
for m in n.outbound_nodes:
if m not in G:
G[m] = {'in': set(), 'out': set()}
G[n]['out'].add(m)
G[m]['in'].add(n)
nodes.append(m)
L = []
S = set(input_nodes)
while len(S) > 0:
n = S.pop()
if isinstance(n, Input):
n.value = feed_dict[n]
L.append(n)
for m in n.outbound_nodes:
G[n]['out'].remove(m)
G[m]['in'].remove(n)
# if no other incoming edges add to S
if len(G[m]['in']) == 0:
S.add(m)
return L
def forward_pass(graph):
"""
Performs a forward pass through a list of sorted Nodes.
Arguments:
`graph`: The result of calling `topological_sort`.
"""
# Forward pass
for n in graph:
n.forward()
|
rmcgibbo/mdtraj | tests/test_restart.py | Python | lgpl-2.1 | 4,202 | 0.000476 | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Jason Swails
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
import os
import tempfile
import mdtraj as md
import numpy as np
import pytest
from mdtraj.formats import AmberRestartFile, AmberNetCDFRestartFile
from mdtraj.testing import eq
fd1, temp1 = tempfile.mkstemp(suffix='.rst7')
fd2, temp2 = tempfile.mkstemp(suffix='.ncrst')
os.close(fd1)
os.close(fd2)
def teardown_module(module):
"""remove the temporary file created by tests in this file
this gets automatically called by pytest"""
if os.path.exists(temp1): os.unlink(temp1)
if os.path.exists(temp2): os.unlink(temp2)
def test_read_after_close(get_fn):
f = AmberNetCDFRestartFile(get_fn('ncinpcrd.rst7'))
assert eq(f.n_atoms, 2101)
assert eq(f.n_frames, 1)
f.close()
with pytest.raises(IOError):
f.read()
def test_shape(get_fn):
with AmberRestartFile(get_fn('inpcrd')) as f:
xyz, time, lengths, angles = f.read()
assert eq(xyz.shape, (1, 2101, 3))
assert eq(time.shape, (1,))
assert eq(lengths, np.asarray([[30.2642725] * 3]))
assert eq(angles, np.asarray([[109.471219] * 3]))
def test_shape_2(get_fn):
with AmberNetCDFRestartFile(get_fn('ncinpcrd.rst7')) as f:
xyz, time, lengths, angles = f.read()
assert eq(xyz.shape, (1, 2101, 3))
assert eq(time.shape, (1,))
assert eq(lengths, np.asarray([[30.2642725] * 3]))
assert eq(angles, np.asarray([[109.471219] * 3]))
def test_read_write_1():
| xyz = np.random.randn(1, 10, 3)
time = np.random.randn(1)
boxlengths = np.random.randn(1, 3)
boxangles = np.random.randn(1, 3)
with AmberRestartFile(temp1, 'w', force_overwrite=True) as f:
f.write(xyz, time, boxlengths, boxangles)
with AmberRestartFile(temp1) as f:
a, b, c, d = f.read()
| assert eq(a, xyz)
assert eq(b, time)
assert eq(c, boxlengths)
assert eq(d, boxangles)
def test_read_write_2():
xyz = np.random.randn(1, 10, 3)
time = np.random.randn(1)
boxlengths = np.random.randn(1, 3)
boxangles = np.random.randn(1, 3)
with AmberNetCDFRestartFile(temp2, 'w', force_overwrite=True) as f:
f.write(xyz, time, boxlengths, boxangles)
with AmberNetCDFRestartFile(temp2) as f:
a, b, c, d = f.read()
assert eq(a, xyz)
assert eq(b, time)
assert eq(c, boxlengths)
assert eq(d, boxangles)
def test_read_write_3(get_fn):
traj = md.load(get_fn('frame0.nc'), top=get_fn('native.pdb'))
traj[0].save(temp1)
assert os.path.exists(temp1)
rsttraj = md.load(temp1, top=get_fn('native.pdb'))
eq(rsttraj.xyz, traj[0].xyz)
os.unlink(temp1)
traj.save(temp1)
for i in range(traj.n_frames):
assert os.path.exists('%s.%03d' % (temp1, i + 1))
os.unlink('%s.%03d' % (temp1, i + 1))
def test_read_write_4(get_fn):
traj = md.load(get_fn('frame0.nc'), top=get_fn('native.pdb'))
traj[0].save(temp2)
assert os.path.exists(temp2)
rsttraj = md.load(temp2, top=get_fn('native.pdb'))
eq(rsttraj.xyz, traj[0].xyz)
os.unlink(temp2)
traj.save(temp2)
for i in range(traj.n_frames):
assert os.path.exists('%s.%03d' % (temp2, i + 1))
os.unlink('%s.%03d' % (temp2, i + 1))
|
alejandro-mc/BDM-hw8 | application.py | Python | mit | 2,660 | 0.018045 | ##Imports
import pyspark
import rtree
import geopandas as gpd
import shapely.geometry as geom
import sys
### yellow_tripdata_2011-05.csv
### 0 vendor_id,pickup_datetime,dropoff_datetime,passenger_count,
### 4 trip_distance,pickup_longitude,pickup_latitude,rate_code,
### 8 store_and_fwd_flag,dropoff_longitude,dropoff_latitude,
### 11 payment_type,fare_amount,surcharge,mta_tax,tip_amount,
### 16 tolls_amount,total_amount
import operator
import heapq
def tripMapper(records):
import rtree
import geopandas as gpd
import shapely.geometry as geom
#first create index
index = rtree.Rtree()
hoods = gpd.read_file('neighborhoods.geojson')
for idx,geometry in enumerate(hoods.geometry):
index.insert(idx,geometry.bounds)
#for each record yield ((nei_idx,boro_idx), 1)
neiborhood_name = "Undefined"
boro_name = "Undefined"
for record in records:
list_record = record.split(',')
if record != '' and list_record[0] != 'vendor_id':#check for empty row and header
pux = list_record[5]#pick up location x coordinate
puy = list_record[6]#pick up location y coordinate
dox = list_record[9]#drop off location x coordinate
doy = list_record[10]#drop off location y coordinate
if pux == '' or puy =='' or dox == '' or doy == '':
continue
point_origin = geom.Point(float(pux),float(puy))
point_destination = geom.Point(float(dox),float(doy))
#get origin neighborhood
matches = list(index.intersection((point_origin.x,point_origin.y)))
for ind in matches:
if any(map(lambda x: x.contains(point_origin), hoods.geometry[ind])):
neiborhood_name = hoods.neighborhood[ind]
#get destination borough
matches = list(index.intersection | ((point_destination.x,point_destination.y)))
for ind in matches:
if any(map(lambda x: x.contains(poin | t_destination), hoods.geometry[ind])):
boro_name = hoods.borough[ind]
yield ((neiborhood_name,boro_name), 1)
def top3Reducer(lst1,lst2):
return heapq.nlargest(3,lst1+lst2,lambda x: x[1])
if __name__=='__main__':
if len(sys.argv)<3:
print "Usage: <input files> <output path>"
sys.exit(-1)
sc = pyspark.SparkContext()
trips = sc.textFile(','.join(sys.argv[1:-1]))
output = trips.mapPartitions(tripMapper).reduceByKey(operator.add).\
map(lambda x: (x[0][1],[(x[0][0],x[1])])).reduceByKey(top3Reducer)
output.saveAsTextFile(sys.argv[-1])
|
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_12_01/aio/operations/_public_ip_addresses_operations.py | Python | mit | 43,544 | 0.004983 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PublicIPAddressesOperations:
"""PublicIPAddressesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified public IP address.
:param resource_group_name: The name of the re | source group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_addre | ss_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def get(
self,
resource_group_name: str,
public_ip_address_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.PublicIPAddress":
"""Gets the specified public IP address in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.m |
quattor/aquilon | tests/broker/orderedsuite.py | Python | apache-2.0 | 30,308 | 0.000132 | #!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008-2019 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for testing the broker commands.
Ideally, real unit tests are self-contained. In practice, for many of
these commands that would be painful. The 'del' commands generally rely
on some 'add' commands having been run first. The same holds for 'bind'
and 'unbind', 'map' and 'unmap', etc.
"""
from __future__ import absolute_import
import unittest
import os
from subprocess import Popen
from aquilon.config import Config
from .test_add_address import TestAddAddress
from .test_add_address_alias import TestAddAddressAlias
from .test_add_alias import TestAddAlias
from .test_add_allowed_personality import TestAddAllowedPersonality
from .test_add_application import TestAddApplication
from .test_add_aquilon_host import TestAddAquilonHost
from .test_add_archetype import TestAddArchetype
from .test_add_aurora_host import TestAddAuroraHost
from .test_add_auxiliary import TestAddAuxiliary
from .test_add_building import TestAddBuilding
from .test_add_building_preference import TestAddBuildingPreference
from .test_add_bunker import TestAddBunker
from .test_add_campus import TestAddCampus
from .test_add_chassis import TestAddChassis
from .test_add_city import TestAddCity
from .test_add_cluster import TestAddCluster
from .test_add_cluster_autostartlist import TestAddClusterAutoStartList
from .test_add_cluster_systemlist import TestAddClusterSystemList
from .test_add_console_server import TestAddConsoleServer
from .test_add_cpu import TestAddCpu
from .test_add_desk import TestAddDesk
from .test_add_disk import TestAddDisk
from .test_add_dns_domain import TestAddDnsDomain
from .test_add_dns_environment import TestAddDnsEnvironment
from .test_add_domain import TestAddDomain
from .test_add_dynamic_range import TestAddDynamicRange
from .test_add_entitlement import TestAddEntitlement
from .test_add_entitlement_type import TestAddEntitlementType
from .test_add_esx_cluster import TestAddESXCluster
from .test_add_feature import TestAddFeature
from .test_add_filesystem import TestAddFilesystem
from .test_add_host import TestAddHost
from .test_add_hostlink import TestAddHostlink
from .test_add_interface import TestAddInterface
from .test_add_interface_address import TestAddInterfaceAddress
from .test_add_intervention import TestAddIntervention
from .test_add_machine import TestAddMachine
from .test_add_manager import TestAddManager
from .test_add_metacluster import TestAddMetaCluster
from .test_add_model import TestAddModel
from .test_add_netdev import TestAddNetworkDevice
from .test_add_network import TestAddNetwork
from .test_add_network_compartment import TestAddNetworkCompartment
from .test_add_network_environment import TestAddNetworkEnvironment
from .test_add_ns_record import TestAddNSRecord
from .test | _add_os import TestAddOS
from .test_add_parameter import TestAddParameter
from .test_add_parameter_definition import TestAddParameterDefinition
from .test_add_parameter_feature import TestAddParameterFeature
from .test_add_personality import TestAddPersonality
from .test_add_rack import TestAddRack
from .test_add_reboot_intervention import TestAddRebootIntervention
from .test_add_reboot_schedule import TestAddRebootSchedule
from .test_add_req | uired_service import TestAddRequiredService
from .test_add_resourcegroup import TestAddResourceGroup
from .test_add_role import TestAddRole
from .test_add_room import TestAddRoom
from .test_add_router_address import TestAddRouterAddress
from .test_add_sandbox import TestAddSandbox
from .test_add_service import TestAddService
from .test_add_service_address import TestAddServiceAddress
from .test_add_service_address_sn_aliases import TestAddServiceAddressSNAliases
from .test_add_share import TestAddShare
from .test_add_shared_service_name import TestAddSharedServiceName
from .test_add_srv_record import TestAddSrvRecord
from .test_add_static_route import TestAddStaticRoute
from .test_add_user import TestAddUser
from .test_add_user_type import TestAddUserType
from .test_add_vendor import TestAddVendor
from .test_add_virtual_hardware import TestAddVirtualHardware
from .test_add_virtual_switch import TestAddVirtualSwitch
from .test_add_vlan import TestAddVlan
from .test_add_windows_host import TestAddWindowsHost
from .test_appliance import TestAppliance
from .test_audit import TestAudit
from .test_bind_client import TestBindClient
from .test_bind_cluster import TestBindCluster
from .test_bind_console_server import TestBindConsoleServer
from .test_bind_feature import TestBindFeature
from .test_bind_server import TestBindServer
from .test_build_clusters import TestBuildClusters
from .test_change_status import TestChangeStatus
from .test_change_status_cluster import TestChangeClusterStatus
from .test_client_bypass import TestClientBypass
from .test_client_failure import TestClientFailure
from .test_cluster import TestCluster
from .test_cm_logger import TestCMLogger
from .test_compile import TestCompile
from .test_consistency import TestConsistency
from .test_constraints_archetype import TestArchetypeConstraints
from .test_constraints_bind_client import TestBindClientConstraints
from .test_constraints_bind_server import TestBindServerConstraints
from .test_constraints_chooser import TestChooserConstraints
from .test_constraints_cluster import TestClusterConstraints
from .test_constraints_cluster_no_members import \
TestClusterConstraintsNoMembers
from .test_constraints_cluster_no_vms import TestClusterConstraintsNoVMs
from .test_constraints_dns import TestDnsConstraints
from .test_constraints_domain import TestDomainConstraints
from .test_constraints_interface import TestInterfaceConstraints
from .test_constraints_location import TestLocationConstraints
from .test_constraints_machine import TestMachineConstraints
from .test_constraints_metacluster import TestMetaClusterConstraints
from .test_constraints_model import TestModelConstraints
from .test_constraints_netdev import TestNetworkDeviceConstraints
from .test_constraints_network import TestNetworkConstraints
from .test_constraints_parameter import TestParameterConstraints
from .test_constraints_personality import TestPersonalityConstraints
from .test_constraints_service import TestServiceConstraints
from .test_constraints_umask import TestUmaskConstraints
from .test_constraints_vendor import TestVendorConstraints
from .test_continent import TestContinent
from .test_copy_personality import TestCopyPersonality
from .test_country import TestCountry
from .test_del_10gig_hardware import TestDel10GigHardware
from .test_del_address import TestDelAddress
from .test_del_address_alias import TestDelAddressAlias
from .test_del_alias import TestDelAlias
from .test_del_allowed_personality import TestDelAllowedPersonality
from .test_del_application import TestDelApplication
from .test_del_archetype import TestDelArchetype
from .test_del_auxiliary import TestDelAuxiliary
from .test_del_building import TestDelBuilding
from .test_del_building_preference import TestDelBuildingPreference
from .test_del_bunker import TestDelBunker
from .test_del_campus import TestDelCampus
from .test_del_chassis import TestDelChassis
from .test_del_city import TestDelCity
from .test_del_cluster import TestDelCluster
from .test_del_cluster_autostartlist import TestDelClusterAutoStartList
from .test_del_cluster_systemlist import TestDelClusterSystemList
from .test_del_console_server import TestDelConsoleServer
from .test_del_desk import TestDelDesk
from .test_del_disk import TestDelDisk
from . |
slosar/april | Run/wqdriver_eboss.py | Python | gpl-2.0 | 410 | 0.004878 | #!/usr/bin/env python
# this scripts runs everything on the | bnl cluster
import os
import sys
from wqdriver import wqsubmit
if len(sys.argv) > 1:
lst = sys.argv[1:]
else:
lst = ['TakadaFlat']
for l in lst:
if l == 'TakadaFlat':
wqsubmit('pre', 'PolyOk', 'BBAO+SN', 5, 30000)
wqsubmit('pr | e', 'PolyOkc', 'BBAO+SN', 5, 30000)
wqsubmit('pre', 'PolyOkf', 'BBAO+SN', 5, 30000)
|
HBPNeurorobotics/nest-simulator | pynest/nest/tests/test_labeled_synapses.py | Python | gpl-2.0 | 6,034 | 0 | # -*- coding: utf-8 -*-
#
# test_labeled_synapses.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Test setting and getting labels on synapses.
"""
import unittest
import nest
HAVE_GSL = nest.sli_func("statusdict/have_gsl ::")
@nest.check_stack
@unittest.skipIf(not HAVE_GSL, 'GSL is not available')
class LabeledSynapsesTestCase(unittest.TestCase):
"""Test labeled synapses"""
def default_network(self):
nest.ResetKernel()
# set volume transmitter for stdp_dopamine_synapse_lbl
vol = nest.Create('volume_transmitter', 3)
nest.SetDefaults('stdp_dopamine_synapse', {'vt': vol[0]})
nest.SetDefaults('stdp_dopamine_synapse_lbl', {'vt': vol[1]})
nest.SetDefaults('stdp_dopamine_synapse_hpc', {'vt': vol[2]})
# create neurons that accept all synapse connections (especially gap
# junctions)... hh_psc_alpha_gap is only available with GSL, hence the
# skipIf above
return nest.Create("hh_psc_alpha_gap", 5)
def test_SetLabelToSynapseOnConnect(self):
"""Set a label to a labeled synapse on connect."""
labeled_synapse_models = [s for s in nest.Models(
mtype='synapses') | if s.endswith("_lbl")]
for syn in la | beled_synapse_models:
a = self.default_network()
# set a label during connection
nest.Connect(a, a, {"rule": "one_to_one"}, {
"model": syn, "synapse_label": 123})
c = nest.GetConnections(a, a)
self.assertTrue(
all([
status['synapse_label'] == 123
for status in nest.GetStatus(c)
])
)
def test_SetLabelToSynapseSetStatus(self):
"""Set a label to a labeled synapse on SetStatus."""
labeled_synapse_models = [s for s in nest.Models(
mtype='synapses') if s.endswith("_lbl")]
for syn in labeled_synapse_models:
a = self.default_network()
# set no label during connection
nest.Connect(a, a, {"rule": "one_to_one"}, {"model": syn})
c = nest.GetConnections(a, a)
# still unlabeled
self.assertTrue(
all([
status['synapse_label'] == -1
for status in nest.GetStatus(c)
])
)
# set a label
nest.SetStatus(c, {'synapse_label': 123})
self.assertTrue(
all([
status['synapse_label'] == 123
for status in nest.GetStatus(c)
])
)
def test_SetLabelToSynapseSetDefaults(self):
"""Set a label to a labeled synapse on SetDefaults."""
labeled_synapse_models = [s for s in nest.Models(
mtype='synapses') if s.endswith("_lbl")]
for syn in labeled_synapse_models:
a = self.default_network()
# set a label during SetDefaults
nest.SetDefaults(syn, {'synapse_label': 123})
nest.Connect(a, a, {"rule": "one_to_one"}, {"model": syn})
c = nest.GetConnections(a, a)
self.assertTrue(
all([
status['synapse_label'] == 123
for status in nest.GetStatus(c)
])
)
def test_GetLabeledSynapses(self):
"""Get labeled synapses with GetConnections."""
labeled_synapse_models = [s for s in nest.Models(
mtype='synapses') if s.endswith("_lbl")]
for syn in labeled_synapse_models:
a = self.default_network()
# some more connections
nest.Connect(a, a, {"rule": "one_to_one"},
{"model": "static_synapse"})
# set a label during connection
nest.Connect(a, a, {"rule": "one_to_one"}, {
"model": syn, "synapse_label": 123})
c = nest.GetConnections(a, a, synapse_label=123)
self.assertTrue(
all([
status['synapse_label'] == 123
for status in nest.GetStatus(c)
])
)
def test_SetLabelToNotLabeledSynapse(self):
"""Try set a label to an 'un-label-able' synapse."""
labeled_synapse_models = [s for s in nest.Models(
mtype='synapses') if not s.endswith("_lbl")]
for syn in labeled_synapse_models:
a = self.default_network()
# try set a label during SetDefaults
with self.assertRaises(nest.NESTError):
nest.SetDefaults(syn, {'synapse_label': 123})
# try set on connect
with self.assertRaises(nest.NESTError):
nest.Connect(a, a, {"rule": "one_to_one"}, {
"model": syn, "synapse_label": 123})
# plain connection
nest.Connect(a, a, {"rule": "one_to_one"}, {"model": syn})
# try set on SetStatus
c = nest.GetConnections(a, a)
with self.assertRaises(nest.NESTError):
nest.SetStatus(c, {'synapse_label': 123})
def suite():
suite = unittest.makeSuite(LabeledSynapsesTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
|
anbangr/trusted-juju | juju/charm/tests/test_directory.py | Python | agpl-3.0 | 8,532 | 0 | import gc
import os
import hashlib
import inspect
import shutil
import tempfile
import yaml
import zipfile
from juju.errors import CharmError, FileNotFound
from juju.charm.errors import InvalidCharmFile
from juju.charm.metadata import MetaData
from juju.charm.directory import CharmDirectory
from juju.charm.bundle import CharmBundle
from juju.lib.filehash import compute_file_hash
from juju.charm import tests
from juju.charm.tests.test_repository import RepositoryTestBase
sample_directory = os.path.join(
os.path.dirname(
inspect.getabsfile(tests)), "repository", "series", "dummy")
class DirectoryTest(RepositoryTestBase):
def setUp(self):
super(DirectoryTest, self).setUp()
# Ensure the empty/ directory exists under the dummy sample
# charm. Depending on how the source code is exported,
# empty directories may be ignored.
empty_dir = os.path.join(sample_directory, "empty")
if not os.path.isdir(empty_dir):
os.mkdir(empty_dir)
def copy_charm(self):
dir_ = os.path.join(self.makeDir(), "sample")
shutil.copytree(sample_directory, dir_)
return dir_
def delete_revision(self, dir_):
os.remove(os.path.join(dir_, "revision"))
def set_metadata_revision(self, dir_, revision):
metadata_path = os.path.join(dir_, "metadata.yaml")
with open(metadata_path) as f:
data = yaml.load(f.read())
data["revision"] = 999
with open(metadata_path, "w") as f:
f.write(yaml.dump(data))
def test_metadata_is_required(self):
directory = self.makeDir()
self.assertRaises(FileNotFound, CharmDirectory, directory)
def test_no_revision(self):
dir_ = self.copy_charm()
self.delete_revision(dir_)
charm = CharmDirectory(dir_)
self.assertEquals(charm.get_revision(), 0)
with open(os.path.join(dir_, "revision")) as f:
self.assertEquals(f.read(), "0\n")
def test_nonsense_revision(self):
dir_ = self.copy_charm()
with open(os.path.join(dir_, "revision"), "w") as f:
f.write("shifty look")
err = self.assertRaises(CharmError, CharmDirectory, dir_)
self.assertEquals(
str(err),
"Error processing %r: invalid charm revision 'shifty look'" % dir_)
def test_revision_in_metadata(self):
dir_ = self.copy_charm()
self.delete_revision(dir_)
self.set_metadata_revision(dir_, 999)
log = self.capture_logging("juju.charm")
charm = CharmDirectory(dir_)
self.assertEquals(charm.get_revision(), 999)
self.assertIn(
"revision field is obsolete. Move it to the 'revision' file | .",
log.getvalue())
def test_competing_revisions(self):
dir_ = self.copy_charm()
self.set_metadata_revision(dir_, 999)
log = self.capture_logging("juju.charm")
charm = CharmDirectory(dir_)
self.assertEquals(charm.get_revision(), 1)
self.assertIn(
"revision field is obsolete. Move it to the 'revision' file.",
log.getvalue())
def test_set | _revision(self):
dir_ = self.copy_charm()
charm = CharmDirectory(dir_)
charm.set_revision(123)
self.assertEquals(charm.get_revision(), 123)
with open(os.path.join(dir_, "revision")) as f:
self.assertEquals(f.read(), "123\n")
def test_info(self):
directory = CharmDirectory(sample_directory)
self.assertTrue(directory.metadata is not None)
self.assertTrue(isinstance(directory.metadata, MetaData))
self.assertEquals(directory.metadata.name, "dummy")
self.assertEquals(directory.type, "dir")
def test_make_archive(self):
# make archive from sample directory
directory = CharmDirectory(sample_directory)
f = tempfile.NamedTemporaryFile(suffix=".charm")
directory.make_archive(f.name)
# open archive in .zip-format and assert integrity
from zipfile import ZipFile
zf = ZipFile(f.name)
self.assertEqual(zf.testzip(), None)
# assert included
included = [info.filename for info in zf.infolist()]
self.assertEqual(
set(included),
set(("metadata.yaml", "empty/", "src/", "src/hello.c",
"config.yaml", "hooks/", "hooks/install", "revision")))
def test_as_bundle(self):
directory = CharmDirectory(self.sample_dir1)
charm_bundle = directory.as_bundle()
self.assertEquals(type(charm_bundle), CharmBundle)
self.assertEquals(charm_bundle.metadata.name, "sample")
self.assertIn("sample-1.charm", charm_bundle.path)
total_compressed = 0
total_uncompressed = 0
zip_file = zipfile.ZipFile(charm_bundle.path)
for n in zip_file.namelist():
info = zip_file.getinfo(n)
total_compressed += info.compress_size
total_uncompressed += info.file_size
self.assertTrue(total_compressed < total_uncompressed)
def test_as_bundle_file_lifetime(self):
"""
The temporary bundle file created should have a life time
equivalent to that of the directory object itself.
"""
directory = CharmDirectory(self.sample_dir1)
charm_bundle = directory.as_bundle()
gc.collect()
self.assertTrue(os.path.isfile(charm_bundle.path))
del directory
gc.collect()
self.assertFalse(os.path.isfile(charm_bundle.path))
def test_compute_sha256(self):
"""
Computing the sha256 of a directory will use the bundled
charm, since the hash of the file itself is needed.
"""
directory = CharmDirectory(self.sample_dir1)
sha256 = directory.compute_sha256()
charm_bundle = directory.as_bundle()
self.assertEquals(type(charm_bundle), CharmBundle)
self.assertEquals(compute_file_hash(hashlib.sha256,
charm_bundle.path),
sha256)
def test_as_bundle_with_relative_path(self):
"""
Ensure that as_bundle works correctly with relative paths.
"""
current_dir = os.getcwd()
os.chdir(self.sample_dir2)
self.addCleanup(os.chdir, current_dir)
charm_dir = "../%s" % os.path.basename(self.sample_dir1)
directory = CharmDirectory(charm_dir)
charm_bundle = directory.as_bundle()
self.assertEquals(type(charm_bundle), CharmBundle)
self.assertEquals(charm_bundle.metadata.name, "sample")
def test_charm_base_inheritance(self):
"""
get_sha256() should be implemented in the base class,
and should use compute_sha256 to calculate the digest.
"""
directory = CharmDirectory(self.sample_dir1)
bundle = directory.as_bundle()
digest = compute_file_hash(hashlib.sha256, bundle.path)
self.assertEquals(digest, directory.get_sha256())
def test_as_directory(self):
directory = CharmDirectory(self.sample_dir1)
self.assertIs(directory.as_directory(), directory)
def test_config(self):
"""Validate that ConfigOptions are available on the charm"""
from juju.charm.tests.test_config import sample_yaml_data
directory = CharmDirectory(sample_directory)
self.assertEquals(directory.config.get_serialization_data(),
sample_yaml_data)
def test_file_type(self):
charm_dir = self.copy_charm()
os.mkfifo(os.path.join(charm_dir, "foobar"))
directory = CharmDirectory(charm_dir)
e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
self.assertIn("foobar' Invalid file type for a charm", str(e))
def test_internal_symlink(self):
charm_path = self.copy_charm()
os.symlink("/etc/lsb-release", os.path.join(charm_path, "foobar"))
directory = CharmDirectory(charm_path)
e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
self.assertIn("foobar' Absolute links are in |
testmana2/test | MultiProject/__init__.py | Python | gpl-3.0 | 531 | 0 | # -*- coding: utf-8 -*-
# Copyright (c) 2008 - 2015 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Package implementing the multi project management module of eric6.
The multi project management module consists of the | main part, which is
used for reading and writ | ing of eric6 multi project files (*.e5m) and
for performing all operations on the multi project. It is accompanied by
various UI related modules implementing different dialogs and a browser
for the display of projects belonging to the current multi project.
"""
|
xuru/pyvisdk | pyvisdk/do/dvs_port_created_event.py | Python | mit | 1,163 | 0.009458 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def DvsPortCreatedEvent(vim, *args, **kwargs):
'''New ports | are created in the distributed virtual switch.'''
obj = vim.client.factory.create('ns0:DvsPortCreatedEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 5:
raise IndexError('Expected at least 6 arguments got: %d' % len(args))
required = [ 'portKey', 'chainId', 'createdTime', 'key', 'userName' ]
| optional = [ 'changeTag', 'computeResource', 'datacenter', 'ds', 'dvs',
'fullFormattedMessage', 'host', 'net', 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
DemocracyLab/CivicTechExchange | salesforce/contact.py | Python | mit | 1,381 | 0.001448 | from common.models import Tag
from .client import SalesforceClient
import json
import requests
import threading
''' Contributor model maps to the Contact object in Salesforce '''
client = SalesforceClient()
def run(request):
response = SalesforceClient().send(request)
def save(contributor: object):
data = {
"ownerid": client.owner_id,
"firstname": contributor.first_name,
"lastname": contributor.last_name,
"email": contributor.username,
"phone": contributor.phone_primary,
"mailingpostalcode": contributor.postal_code,
"mailingcountry": contributor.country,
"npo02__membershipjoindate__c": contributor.date_joined.strftime('%Y-%m-%d'),
"description": con | tributor.about_me,
'technologies__c': Tag.tags_field_descriptions(contributor.user_technologies)
}
req = requests.Request(
method="PATCH",
url=f'{client.contact_endpoint}/platform_id__c/{contributor.id}',
data=json.dumps(data),
)
thread = threading.Thread(target=run, args=(req,))
thread.daemon = True
thread.start()
def delete(contributor: object):
req = requests.Request(
| method="DELETE",
url=f'{client.contact_endpoint}/platform_id__c/{contributor.id}'
)
thread = threading.Thread(target=run, args=(req,))
thread.daemon = True
thread.start()
|
sivaprakashniet/push_pull | p2p/lib/python2.7/site-packages/celery/tests/concurrency/test_prefork.py | Python | bsd-3-clause | 8,523 | 0 | from __future__ import absolute_import
import errno
import socket
import time
from itertools import cycle
fro | m mock import Mock, call, patch
from nose import SkipTest
from celery.five import items, range
from celery.utils.functional import noop
from celery.tests.case import AppCase
try:
from celery.concurrency import prefork as mp
from celery.concurrency import asynpool
except ImportError:
class _mp(object):
RUN = 0x1
class TaskPool(object):
_pool = Mock() |
def __init__(self, *args, **kwargs):
pass
def start(self):
pass
def stop(self):
pass
def apply_async(self, *args, **kwargs):
pass
mp = _mp() # noqa
asynpool = None # noqa
class Object(object): # for writeable attributes.
def __init__(self, **kwargs):
[setattr(self, k, v) for k, v in items(kwargs)]
class MockResult(object):
def __init__(self, value, pid):
self.value = value
self.pid = pid
def worker_pids(self):
return [self.pid]
def get(self):
return self.value
class MockPool(object):
started = False
closed = False
joined = False
terminated = False
_state = None
def __init__(self, *args, **kwargs):
self.started = True
self._timeout_handler = Mock()
self._result_handler = Mock()
self.maintain_pool = Mock()
self._state = mp.RUN
self._processes = kwargs.get('processes')
self._pool = [Object(pid=i, inqW_fd=1, outqR_fd=2)
for i in range(self._processes)]
self._current_proc = cycle(range(self._processes))
def close(self):
self.closed = True
self._state = 'CLOSE'
def join(self):
self.joined = True
def terminate(self):
self.terminated = True
def terminate_job(self, *args, **kwargs):
pass
def restart(self, *args, **kwargs):
pass
def handle_result_event(self, *args, **kwargs):
pass
def flush(self):
pass
def grow(self, n=1):
self._processes += n
def shrink(self, n=1):
self._processes -= n
def apply_async(self, *args, **kwargs):
pass
def register_with_event_loop(self, loop):
pass
class ExeMockPool(MockPool):
def apply_async(self, target, args=(), kwargs={}, callback=noop):
from threading import Timer
res = target(*args, **kwargs)
Timer(0.1, callback, (res, )).start()
return MockResult(res, next(self._current_proc))
class TaskPool(mp.TaskPool):
Pool = BlockingPool = MockPool
class ExeMockTaskPool(mp.TaskPool):
Pool = BlockingPool = ExeMockPool
class PoolCase(AppCase):
def setup(self):
try:
import multiprocessing # noqa
except ImportError:
raise SkipTest('multiprocessing not supported')
class test_AsynPool(PoolCase):
def test_gen_not_started(self):
def gen():
yield 1
yield 2
g = gen()
self.assertTrue(asynpool.gen_not_started(g))
next(g)
self.assertFalse(asynpool.gen_not_started(g))
list(g)
self.assertFalse(asynpool.gen_not_started(g))
def test_select(self):
ebadf = socket.error()
ebadf.errno = errno.EBADF
with patch('select.select') as select:
select.return_value = ([3], [], [])
self.assertEqual(
asynpool._select(set([3])),
([3], [], 0),
)
select.return_value = ([], [], [3])
self.assertEqual(
asynpool._select(set([3]), None, set([3])),
([3], [], 0),
)
eintr = socket.error()
eintr.errno = errno.EINTR
select.side_effect = eintr
readers = set([3])
self.assertEqual(asynpool._select(readers), ([], [], 1))
self.assertIn(3, readers)
with patch('select.select') as select:
select.side_effect = ebadf
readers = set([3])
self.assertEqual(asynpool._select(readers), ([], [], 1))
select.assert_has_calls([call([3], [], [], 0)])
self.assertNotIn(3, readers)
with patch('select.select') as select:
select.side_effect = MemoryError()
with self.assertRaises(MemoryError):
asynpool._select(set([1]))
with patch('select.select') as select:
def se(*args):
select.side_effect = MemoryError()
raise ebadf
select.side_effect = se
with self.assertRaises(MemoryError):
asynpool._select(set([3]))
with patch('select.select') as select:
def se2(*args):
select.side_effect = socket.error()
select.side_effect.errno = 1321
raise ebadf
select.side_effect = se2
with self.assertRaises(socket.error):
asynpool._select(set([3]))
with patch('select.select') as select:
select.side_effect = socket.error()
select.side_effect.errno = 34134
with self.assertRaises(socket.error):
asynpool._select(set([3]))
def test_promise(self):
fun = Mock()
x = asynpool.promise(fun, (1, ), {'foo': 1})
x()
self.assertTrue(x.ready)
fun.assert_called_with(1, foo=1)
def test_Worker(self):
w = asynpool.Worker(Mock(), Mock())
w.on_loop_start(1234)
w.outq.put.assert_called_with((asynpool.WORKER_UP, (1234, )))
class test_ResultHandler(PoolCase):
def test_process_result(self):
x = asynpool.ResultHandler(
Mock(), Mock(), {}, Mock(),
Mock(), Mock(), Mock(), Mock(),
fileno_to_outq={},
on_process_alive=Mock(),
on_job_ready=Mock(),
)
self.assertTrue(x)
hub = Mock(name='hub')
recv = x._recv_message = Mock(name='recv_message')
recv.return_value = iter([])
x.on_state_change = Mock()
x.register_with_event_loop(hub)
proc = x.fileno_to_outq[3] = Mock()
reader = proc.outq._reader
reader.poll.return_value = False
x.handle_event(6) # KeyError
x.handle_event(3)
x._recv_message.assert_called_with(
hub.add_reader, 3, x.on_state_change,
)
class test_TaskPool(PoolCase):
def test_start(self):
pool = TaskPool(10)
pool.start()
self.assertTrue(pool._pool.started)
self.assertTrue(pool._pool._state == asynpool.RUN)
_pool = pool._pool
pool.stop()
self.assertTrue(_pool.closed)
self.assertTrue(_pool.joined)
pool.stop()
pool.start()
_pool = pool._pool
pool.terminate()
pool.terminate()
self.assertTrue(_pool.terminated)
def test_apply_async(self):
pool = TaskPool(10)
pool.start()
pool.apply_async(lambda x: x, (2, ), {})
def test_grow_shrink(self):
pool = TaskPool(10)
pool.start()
self.assertEqual(pool._pool._processes, 10)
pool.grow()
self.assertEqual(pool._pool._processes, 11)
pool.shrink(2)
self.assertEqual(pool._pool._processes, 9)
def test_info(self):
pool = TaskPool(10)
procs = [Object(pid=i) for i in range(pool.limit)]
class _Pool(object):
_pool = procs
_maxtasksperchild = None
timeout = 10
soft_timeout = 5
def human_write_stats(self, *args, **kwargs):
return {}
pool._pool = _Pool()
info = pool.info
self.assertEqual(info['max-concurrency'], pool.limit)
self.assertEqual(info['max-tasks-per-child'], 'N/A')
self.assertEqual(info['timeouts'], (5, 10))
def test_num_processes(self):
pool = TaskPool(7)
pool.start()
self.assertEqual(pool.num_processes, 7)
de |
jekhokie/scriptbox | python--learnings/coding-practice/swap_sort_array.py | Python | mit | 684 | 0.005848 | #!/usr/bin/env python
#
# Given an array of elements, determine how many pair-swaps (minimum) are needed
# to get array in sorted ascending order.
#
#!/bin/python3
import math
import os
| import random
import re
import sys
# Complete the minimumSwaps function below.
def minimumSwaps(arr):
swaps = 0
for i in range(len(arr) - 1):
cur_val = i + 1
# swap needed
if arr[i] != cur_val:
swaps += 1
arr[arr.index(cur_val)] = arr[i]
arr[i] | = cur_val
return swaps
if __name__ == '__main__':
n = int(input())
arr = list(map(int, input().rstrip().split()))
res = minimumSwaps(arr)
print(res)
|
allmende/synnefo | snf-django-lib/snf_django/lib/api/urls.py | Python | gpl-3.0 | 1,762 | 0.000568 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core import urlresolvers
from django.views.decorators import csrf
from django.conf.urls import patterns
def _patch_pattern(regex_pattern):
"""
Patch pattern callback using csrf_exempt. Enforce
RegexURLPattern callback to get resolved if required.
"""
regex_pattern._callback = \
csrf.csrf_exempt(regex_pattern.callback)
def _patch_resolver(r):
"""
Patch all patterns found in resolver with _patch_pattern
"""
if hasattr(r, 'url_patterns'):
| entries = r.url_patterns
else:
# first level view in patterns ?
entries = [r]
for entry in entries:
| if isinstance(entry, urlresolvers.RegexURLResolver):
_patch_resolver(entry)
#if isinstance(entry, urlresolvers.RegexURLPattern):
# let it break...
else:
_patch_pattern(entry)
def api_patterns(*args, **kwargs):
"""
Protect all url patterns from csrf attacks.
"""
_patterns = patterns(*args, **kwargs)
for entry in _patterns:
_patch_resolver(entry)
return _patterns
|
CptLemming/django-socket-server | tests/test_models.py | Python | bsd-3-clause | 390 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-socket-server
------------
Tests for `django-socket-server` models module.
"""
import os
import shutil
from django.test import TestCase
from socket_server import models
class TestSocket_server(TestCase):
| def setUp(self):
pass
| def test_something(self):
pass
def tearDown(self):
pass
|
tristanfisher/ffi4wd | tests/test_cython.py | Python | agpl-3.0 | 647 | 0.017002 | import sys
sys.path.append('..')
sys.path.append('.')
import cProfile
from jumper | .blueprints.backends.fixture_data import test_cities_eleven_tuples as test_11_cities
from jumper.blueprints.backends.modules.cython \
import ffi_cython_py_compat as ffi_py
from jumper.blueprints.backends.modules.cython \
import ffi_cython as ffi_cy
def test_profile_backend_cypy():
cmd = "ffi_py.backend_cython(test_11_cities)"
cProfile.run(cmd)
def test_profile_backend_cython():
#tes | t_11_cities is test data
cmd = "ffi_cy.backend_cython(test_11_cities)"
cProfile.run(cmd)
test_profile_backend_cypy()
#test_profile_backend_cython() |
tivaliy/python-gerritclient | gerritclient/tests/utils/fake_commit.py | Python | apache-2.0 | 1,630 | 0 | #
# Copyright 2018 Vitalii Kulanov
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or i | mplied. See the
# License for the specific language governing permissions and limitations
# under the License.
def get_fake_commit(commit_id=None):
return {
"commit": commit_id or "184ebe53805e102605d11f6b143486d15c23a09c",
"parents": [
{
"commit": "1eee2c9d8f352483781e772f35dc586a69ff5646",
| "subject": "Migrate contributor agreements to All-Projects."
}
],
"author": {
"name": "Shawn O. Pearce",
"email": "sop@google.com",
"date": "2012-04-24 18:08:08.000000000",
"tz": -420
},
"committer": {
"name": "Shawn O. Pearce",
"email": "sop@google.com",
"date": "2012-04-24 18:08:08.000000000",
"tz": -420
},
"subject": "Use an EventBus to manage star icons",
"message": "Use an EventBus to manage star icons\n\n"
"Image widgets that need to ..."
}
def get_fake_commit_affiliation():
return {
"branches": ["master", "fake/branch"],
"tags": ["fake_tag"]
}
|
makinacorpus/django | tests/mail/tests.py | Python | bsd-3-clause | 35,289 | 0.003065 | # coding: utf-8
from __future__ import unicode_literals
import asyncore
import email
import os
import shutil
import smtpd
import sys
import tempfile
import threading
from django.core import mail
from django.core.mail import (EmailMessage, mail_admins, mail_managers,
EmailMultiAlternatives, send_mail, send_mass_mail)
from django.core.mail.backends import console, dummy, locmem, filebased, smtp
from django.core.mail.message import BadHeaderError
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.encoding import force_str, force_text
from django.utils.six import PY3, StringIO
from django.utils.translation import ugettext_lazy
class MailTests(TestCase):
"""
Non-backend specific tests.
"""
def test_ascii(self):
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], 'from@example.com')
self.assertEqual(message['To'], 'to@example.com')
def test_multiple_recipients(self):
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com', 'other@example.com'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], 'from@example.com')
self.assertEqual(message['To'], 'to@example.com, other@example.com')
def test_cc(self):
"""Regression test for #7722"""
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com'], cc=['cc@example.com'])
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'cc@example.com'])
# Test multiple CC with multiple To
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com', 'other@example.com'], cc=['cc@example.com', 'cc.other@example.com'])
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com, cc.other@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'other@example.com', 'cc@example.com', 'cc.other@example.com'])
# Testing with Bcc
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com', 'other@example.com'], cc=['cc@example.com', 'cc.other@example.com'], bcc=['bcc@example.com'])
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com, cc.other@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'other@example.com', 'cc@example.com', 'cc.other@example.com', 'bcc@example.com'])
def test_recipients_as_tuple(self):
email = EmailMessage('Subject', 'Content', 'from@example.com', ('to@example.com', 'other@example.com'), cc=('cc@example.com', 'cc.other@example.com'), bcc=('bcc@example.com',))
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com, cc.other@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'other@example.com', 'cc@example.com', 'cc.other@example.com', 'bcc@example.com'])
def test_header_injection(self):
email = EmailMessage('Subject\nInjection Test', 'Content', 'from@example.com', ['to@example.com'])
self.assertRaises(BadHeaderError, email.message)
email = EmailMessage(ugettext_lazy('Subject\nInjection Test'), 'Content', 'from@example.com', ['to@example.com'])
self.assertRaises(BadHeaderError, email.message)
def test_space_continuation(self):
"""
Test for space continuation character in long (ascii) subject headers (#7747)
"""
email = EmailMessage('Long subject lines that get wrapped should contain a space continuation character to get expected behavior in Outlook and Thunderbird', 'Content', 'from@example.com', | ['to@example.com'])
message = email.message()
# Note that in Python 3, maximum line length has increased from 76 to 78
self.assertEqual(message['Subject'].encode(), b'Long subject lines that get wrapped should contain a space continuation\n character to get expected behavior in Outlook and Thunderbird')
def test_message_header_overri | des(self):
"""
Specifying dates or message-ids in the extra headers overrides the
default values (#9233)
"""
headers = {"date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
email = EmailMessage('subject', 'content', 'from@example.com', ['to@example.com'], headers=headers)
self.assertEqual(sorted(email.message().items()), [
('Content-Transfer-Encoding', '7bit'),
('Content-Type', 'text/plain; charset="utf-8"'),
('From', 'from@example.com'),
('MIME-Version', '1.0'),
('Message-ID', 'foo'),
('Subject', 'subject'),
('To', 'to@example.com'),
('date', 'Fri, 09 Nov 2001 01:08:47 -0000'),
])
def test_from_header(self):
"""
Make sure we can manually set the From header (#9214)
"""
email = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
message = email.message()
self.assertEqual(message['From'], 'from@example.com')
def test_to_header(self):
"""
Make sure we can manually set the To header (#17444)
"""
email = EmailMessage('Subject', 'Content', 'bounce@example.com',
['list-subscriber@example.com', 'list-subscriber2@example.com'],
headers={'To': 'mailing-list@example.com'})
message = email.message()
self.assertEqual(message['To'], 'mailing-list@example.com')
self.assertEqual(email.to, ['list-subscriber@example.com', 'list-subscriber2@example.com'])
# If we don't set the To header manually, it should default to the `to` argument to the constructor
email = EmailMessage('Subject', 'Content', 'bounce@example.com',
['list-subscriber@example.com', 'list-subscriber2@example.com'])
message = email.message()
self.assertEqual(message['To'], 'list-subscriber@example.com, list-subscriber2@example.com')
self.assertEqual(email.to, ['list-subscriber@example.com', 'list-subscriber2@example.com'])
def test_multiple_message_call(self):
"""
Regression for #13259 - Make sure that headers are not changed when
calling EmailMessage.message()
"""
email = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
message = email.message()
self.assertEqual(message['From'], 'from@example.com')
message = email.message()
self.assertEqual(message['From'], 'from@example.com')
def test_unicode_address_header(self):
"""
Regression for #11144 - When a to/from/cc header contains unicode,
make sure the email addresses are parsed correctly (especially with
regards to commas)
"""
email = EmailMessage('Subject', 'Content', 'from@example.com', ['"Firstname Sürname" <to@example.com>', 'other@example.com'])
self.assertEqual(email.message()['To'], '=?utf-8?q?Firstname_S=C3=BCrname?= <to@example.com>, other@example.com')
email = EmailMessage('Subject', 'Content', 'from@example.com', ['"Sürname, Firstname" <to@example.com>', 'other@example.com'])
self.assertEqual(email.message()['To'], '=?utf-8?q?S=C3=BCrname=2C_Firstname?= <to@example.com>, other@example.com')
def test_unicode_headers(self):
email = EmailMessage("Gżegżółka", "Content", "from@example.com", ["to@example.com"],
headers={"Sender": '"Firstname Sürname" <sender@example.com>',
"Comments": 'My Sürname is non-A |
helloqiu/AsyncFTP | asyncftp/console/app.py | Python | mit | 2,207 | 0 | # -*- coding: utf-8 -*-
import json
from flask import Flask
from flask import request
from flask import jsonify
import time
from psutil import net_io_counters
from asyncftp import __version__
import threading
from asyncftp.Logger import _LogFormatter
t = time.time()
net = net_io_counters()
formatter = _LogFormatter(color=False)
log_message = str()
def make_app(server, queue):
app = Flask(__name__)
@app.route('/api/info', methods=['GET'])
def speed():
if request.method == 'GET':
global t
global net
temp_t = time.time()
p = net_io_counters()
result = dict()
result['speed'] = dict(
up=(p[0] - net[0]) / (temp_t - t),
down=(p[1] - net[1]) / (temp_t - t)
)
result['up_time'] = server.up_time
result['running'] = True if server.up_time else False
t = temp_t
net = p
return jsonify(result)
@app.route('/api/start', methods=['GET'])
def run_server():
if not server.running:
thread = threading.Thread(target=server.run)
thread.start()
return 'ok'
@app.route('/api/stop', methods=['GET'])
def close_server():
server.close()
return 'ok'
@app.route('/api/config', methods=['GET', 'POST'])
def config():
if request.method == 'GET':
return jsonify({
| 'host': server.host,
'port': str(server.port),
'version': __version__,
'refuse_ip': server.ip_refuse
})
if request.method == 'POST':
data = json.loads(request.data.decode('utf-8'))
for ip in data['refuse_ip']:
server.add_refuse_ip(ip)
re | turn 'ok'
@app.route('/api/log', methods=['GET'])
def log():
if request.method == 'GET':
result = str()
while not queue.empty():
record = queue.get(block=False)
result += formatter.format(record) + '\n'
global log_message
log_message += result
return log_message
return app
|
Rahul91/Pytweets | setup.py | Python | mit | 666 | 0.087087 | #!/usr/bin/python2.7
"""Twitter Project"""
__version__ = '1. | 1.1'
from setuptools import find_packages, setup
setup(name = 'Pytweets',
package = ['Pytweets'],
version = '0.3',
descripiton = 'A test module:Pytweets',
Summary = 'A test module:dPytweets' | ,
long_description = 'A module for printing all the tweets/re-tweets you/anyone have/has done till date.',
platforms = ["Linux"],
author = "Rahul Mishra",
author_email = "priyrahulmishra@gmail.com",
url= "https://github.com/Rahul91/pytweet",
download_url = "https://github.com/Rahul91/pytweet/tarball/0.1",
license = "MIT",
keywords = ['twitter', 'Tweets', 'testing'],
packages = find_packages()
) |
davischau/CMPUT410Lab6 | bookmarks/main/urls.py | Python | apache-2.0 | 295 | 0.037288 | from django.conf.urls import patterns,url
from main import views
urlpatterns = patterns('',
url(r'^$',views.index,name='index'),
url(r'^tags/$',views.tags,name='tags'),
url(r'^tags/(?P<tag_name>\w+ | )/$',views.tag,name='tag'), |
url(r'^add_link/$',views.add_link,name='add_link'),
) |
JulienMcJay/eclock | windows/Python27/Lib/site-packages/pywin32-218-py2.7-win32.egg/win32comext/shell/test/testShellItem.py | Python | gpl-2.0 | 2,921 | 0.003766 | # Test IShellItem and related interfaces
from win32com.shell import shell, shellcon, knownfolders
import unittest
class TestShellItem(unittest.TestCase):
def assertShellItemsEqual(self, i1, i2):
n1 = i1.GetDisplayName(shellcon.SHGDN_FORPARSING)
n2 = i2.GetDisplayName(shellcon.SHGDN_FORPARSING)
self.assertEqual(n1, n2)
def test_idlist_roundtrip(self):
pidl = shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_DESKTOP)
item = shell.SHCreateItemFromIDList(pidl, shell.IID_IShellItem)
pidl_back = shell.SHGetIDListFromObject(item)
self.assertEqual(pidl, pidl_back)
def test_parsing_name(self):
sf = shell.SHGetDesktopFolder()
flags = shellcon.SHCONTF_FOLDERS | shellcon.SHCONTF_NONFOLDERS
children = sf.EnumObjects(0, flags)
child_pidl = children.next()
name = sf.GetDisplayNameOf(child_pidl, shellcon.SHGDN_FORPARSING)
item = shell.SHCreateItemFromParsingName(name, None, shell.IID_IShellItem)
# test the name we get from the item is the same as from the folder.
self.assertEqual(name, item.GetDisplayName(shellcon.SHGDN_FORPARSING))
def test_pars | ing_relative(self):
desktop_pidl = shell.SHGetSpecialFolde | rLocation(0, shellcon.CSIDL_DESKTOP)
desktop_item = shell.SHCreateItemFromIDList(desktop_pidl, shell.IID_IShellItem)
sf = shell.SHGetDesktopFolder()
flags = shellcon.SHCONTF_FOLDERS | shellcon.SHCONTF_NONFOLDERS
children = sf.EnumObjects(0, flags)
child_pidl = children.next()
name_flags = shellcon.SHGDN_FORPARSING | shellcon.SHGDN_INFOLDER
name = sf.GetDisplayNameOf(child_pidl, name_flags)
item = shell.SHCreateItemFromRelativeName(desktop_item, name, None,
shell.IID_IShellItem)
# test the name we get from the item is the same as from the folder.
self.assertEqual(name, item.GetDisplayName(name_flags))
def test_create_in_known_folder(self):
item = shell.SHCreateItemInKnownFolder(knownfolders.FOLDERID_Desktop, 0,
None, shell.IID_IShellItem)
# this will do for now :)
def test_create_item_with_parent(self):
desktop_pidl = shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_DESKTOP)
desktop_item = shell.SHCreateItemFromIDList(desktop_pidl, shell.IID_IShellItem)
sf = shell.SHGetDesktopFolder()
flags = shellcon.SHCONTF_FOLDERS | shellcon.SHCONTF_NONFOLDERS
children = sf.EnumObjects(0, flags)
child_pidl = children.next()
item1 = shell.SHCreateItemWithParent(desktop_pidl, None, child_pidl, shell.IID_IShellItem)
item2 = shell.SHCreateItemWithParent(None, sf, child_pidl, shell.IID_IShellItem)
self.assertShellItemsEqual(item1, item2)
if __name__=='__main__':
unittest.main()
|
lfcnassif/MultiContentViewer | release/modules/ext/libreoffice/program/python-core-3.3.0/lib/fractions.py | Python | lgpl-3.0 | 23,033 | 0.00026 | # Originally contributed by Sjoerd Mullender.
# Significantly modified by Jeffrey Yasskin <jyasskin at gmail.com>.
"""Fraction, infinite-precision, real numbers."""
from decimal import Decimal
import math
import numbers
import operator
import re
import sys
__all__ = ['Fraction', 'gcd']
def gcd(a, b):
"""Calculate the Greatest Common Divisor of a and b.
Unless b==0, the result will have the same sign as b (so that when
b is divided by it, the result comes out positive).
"""
while b:
a, b = b, a%b
return a
# Constants related to the hash implementation; hash(x) is based
# on the reduction of x modulo the prime _PyHASH_MODULUS.
_PyHASH_MODUL | US = sys.hash_info.modulus
# Value to be used for rationals that reduce to infinity modu | lo
# _PyHASH_MODULUS.
_PyHASH_INF = sys.hash_info.inf
_RATIONAL_FORMAT = re.compile(r"""
\A\s* # optional whitespace at the start, then
(?P<sign>[-+]?) # an optional sign, then
(?=\d|\.\d) # lookahead for digit or .digit
(?P<num>\d*) # numerator (possibly empty)
(?: # followed by
(?:/(?P<denom>\d+))? # an optional denominator
| # or
(?:\.(?P<decimal>\d*))? # an optional fractional part
(?:E(?P<exp>[-+]?\d+))? # and optional exponent
)
\s*\Z # and optional whitespace to finish
""", re.VERBOSE | re.IGNORECASE)
class Fraction(numbers.Rational):
"""This class implements rational numbers.
In the two-argument form of the constructor, Fraction(8, 6) will
produce a rational number equivalent to 4/3. Both arguments must
be Rational. The numerator defaults to 0 and the denominator
defaults to 1 so that Fraction(3) == 3 and Fraction() == 0.
Fractions can also be constructed from:
- numeric strings similar to those accepted by the
float constructor (for example, '-2.3' or '1e10')
- strings of the form '123/456'
- float and Decimal instances
- other Rational instances (including integers)
"""
__slots__ = ('_numerator', '_denominator')
# We're immutable, so use __new__ not __init__
def __new__(cls, numerator=0, denominator=None):
"""Constructs a Rational.
Takes a string like '3/2' or '1.5', another Rational instance, a
numerator/denominator pair, or a float.
Examples
--------
>>> Fraction(10, -8)
Fraction(-5, 4)
>>> Fraction(Fraction(1, 7), 5)
Fraction(1, 35)
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
Fraction(3, 14)
>>> Fraction('314')
Fraction(314, 1)
>>> Fraction('-35/4')
Fraction(-35, 4)
>>> Fraction('3.1415') # conversion from numeric string
Fraction(6283, 2000)
>>> Fraction('-47e-2') # string may include a decimal exponent
Fraction(-47, 100)
>>> Fraction(1.47) # direct construction from float (exact conversion)
Fraction(6620291452234629, 4503599627370496)
>>> Fraction(2.25)
Fraction(9, 4)
>>> Fraction(Decimal('1.47'))
Fraction(147, 100)
"""
self = super(Fraction, cls).__new__(cls)
if denominator is None:
if isinstance(numerator, numbers.Rational):
self._numerator = numerator.numerator
self._denominator = numerator.denominator
return self
elif isinstance(numerator, float):
# Exact conversion from float
value = Fraction.from_float(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, Decimal):
value = Fraction.from_decimal(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, str):
# Handle construction from strings.
m = _RATIONAL_FORMAT.match(numerator)
if m is None:
raise ValueError('Invalid literal for Fraction: %r' %
numerator)
numerator = int(m.group('num') or '0')
denom = m.group('denom')
if denom:
denominator = int(denom)
else:
denominator = 1
decimal = m.group('decimal')
if decimal:
scale = 10**len(decimal)
numerator = numerator * scale + int(decimal)
denominator *= scale
exp = m.group('exp')
if exp:
exp = int(exp)
if exp >= 0:
numerator *= 10**exp
else:
denominator *= 10**-exp
if m.group('sign') == '-':
numerator = -numerator
else:
raise TypeError("argument should be a string "
"or a Rational instance")
elif (isinstance(numerator, numbers.Rational) and
isinstance(denominator, numbers.Rational)):
numerator, denominator = (
numerator.numerator * denominator.denominator,
denominator.numerator * numerator.denominator
)
else:
raise TypeError("both arguments should be "
"Rational instances")
if denominator == 0:
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
g = gcd(numerator, denominator)
self._numerator = numerator // g
self._denominator = denominator // g
return self
@classmethod
def from_float(cls, f):
"""Converts a finite float to a rational number, exactly.
Beware that Fraction.from_float(0.3) != Fraction(3, 10).
"""
if isinstance(f, numbers.Integral):
return cls(f)
elif not isinstance(f, float):
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
(cls.__name__, f, type(f).__name__))
if math.isnan(f) or math.isinf(f):
raise TypeError("Cannot convert %r to %s." % (f, cls.__name__))
return cls(*f.as_integer_ratio())
@classmethod
def from_decimal(cls, dec):
"""Converts a finite Decimal instance to a rational number, exactly."""
from decimal import Decimal
if isinstance(dec, numbers.Integral):
dec = Decimal(int(dec))
elif not isinstance(dec, Decimal):
raise TypeError(
"%s.from_decimal() only takes Decimals, not %r (%s)" %
(cls.__name__, dec, type(dec).__name__))
if not dec.is_finite():
# Catches infinities and nans.
raise TypeError("Cannot convert %s to %s." % (dec, cls.__name__))
sign, digits, exp = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
digits = -digits
if exp >= 0:
return cls(digits * 10 ** exp)
else:
return cls(digits, 10 ** -exp)
def limit_denominator(self, max_denominator=1000000):
"""Closest Fraction to self with denominator at most max_denominator.
>>> Fraction('3.141592653589793').limit_denominator(10)
Fraction(22, 7)
>>> Fraction('3.141592653589793').limit_denominator(100)
Fraction(311, 99)
>>> Fraction(4321, 8765).limit_denominator(10000)
Fraction(4321, 8765)
"""
# Algorithm notes: For any real number x, define a *best upper
# approximation* to x to be a rational number p/q such that:
#
# (1) p/q >= x, and
# (2) if p/q > r/s >= x then s > q, for any rational r/s.
#
# Define *best lower approximation* similarly. Then it can be
|
yosef-gao/zhuaxia | zhuaxia/commander.py | Python | mit | 7,171 | 0.012848 | # -*- coding:utf-8 -*-
import sys
import config ,util ,logging ,log,downloader
import xiami as xm
import netease
import re
from threadpool import ThreadPool
from time import sleep
from os import path
from threadpool import Terminate_Watcher
from proxypool import ProxyPool
LOG = log.get_logger("zxLogger")
dl_songs = []
total = 0
done = 0
fmt_parsing = u'解析: "%s" ..... [%s] %s'
fmt_has_song_nm = u'包含%d首歌曲.'
fmt_single_song = u'[曲目] %s'
border = log.hl(u'%s'% ('='*90), 'cyan')
pat_xm = r'^https?://[^/.]*\.xiami\.com/'
pat_163 = r'^https?://music\.163\.com/'
#proxypool
ppool = None
def shall_I_begin(in_str, is_file=False, is_hq=False, need_proxy_pool = False):
#start terminate_watcher
Terminate_Watcher()
global ppool
if need_proxy_pool:
LOG.info(u'初始化proxy pool')
ppool = ProxyPool()
LOG.info(u'proxy pool:[%d] 初始完毕'%len(ppool.proxies))
#xiami obj
xiami_obj = xm.Xiami(config.XIAMI_LOGIN_EMAIL,\
config.XIAMI_LOGIN_PASSWORD, \
is_hq,proxies=ppool)
#netease obj
m163 = netease.Netease(is_hq, proxies=ppool)
if is_file:
from_file(xiami_obj, m163,in_str)
elif re.match(pat_xm, in_str):
from_url_xm(xiami_obj, in_str)
elif re.match(pat_163, in_str):
from_url_163(m163, in_str)
print border
if len(dl_songs):
LOG.info(u' 下载任务总数: %d \n 3秒后开始下载' % len(dl_songs))
sleep(3)
downloader.start_download(dl_songs)
else:
LOG.warning(u' 没有可下载任务,自动退出.')
def from_url_163(m163, url, verbose=True):
""" parse the input string (163 url), and do download"""
LOG.debug('processing 163 url: "%s"'% url)
msg = u''
if '/song?id=' in url:
song =netease.NeteaseSong(m163,url=url)
dl_songs.append(song)
msg = fmt_parsing % (m163_url_abbr(url),u'曲目', song.song_name)
elif '/album?id=' in url:
album = netease.NeteaseAlbum(m163, url)
dl_songs.extend(album.songs)
msgs = [fmt_parsing % (m163_url_abbr(url),u'专辑', album.artist_name+u' => '+album.album_name)]
if verbose:
for s in album.songs:
msgs.append(fmt_single_song %s.song_name)
msg = u'\n |-> '.join(msgs)
else:
msgs.append(fmt_has_song_nm % len(album.songs))
msg= u' => '.join(msgs)
elif '/playlist?id=' in url:
playlist = netease.NeteasePlayList(m163, url)
dl_songs.extend(playlist.songs)
msgs = [ fmt_parsing % (m163_url_abbr(url),u'歌单',playlist.playlist_name)]
if verbose:
for s in playlist.songs:
msgs.append( fmt_single_song % s.song_name)
msg = u'\n |-> '.join(msgs)
else:
msgs.append(fmt_has_song_nm % len(playlist.songs))
msg= u' => '.join(msgs)
elif '/artist?id=' in url:
topsong= netease.NeteaseTopSong(m163, url)
dl_songs.extend(topsong.songs)
msgs = [fmt_parsing % (m163_url_abbr(url), u'艺人热门歌曲',topsong.artist_name)]
if verbose:
for s in topsong.songs:
msgs.append(fmt_single_song %s.song_name)
msg = u'\n |-> '.join(msgs)
else:
msgs.append( fmt_has_song_nm % len(topsong.songs))
msg = u' => '.join(msgs)
global total, done
done +=1
pre = ('[%d/%d] ' % (done, total)) if not verbose else ''
if not msg:
#unknown url
LOG.error(u'%s [易]不能识别的url [%s].' % (pre,url))
else:
LOG.info(u'%s%s'% (pre,msg))
def from_url_xm(xm_obj, url, verbose=True):
""" parse the input string (xiami url), and do download"""
LOG.debug('processing xiami url: "%s"'% url)
msg = u''
if '/collect/' in url:
collect = xm.Collection(xm_obj, url)
dl_songs.extend(collect.songs)
msgs = [ fmt_parsing % (xiami_url_abbr(url),u'精选集',collect.collection_name)]
if verbose:
for s in collect.songs:
msgs.append( fmt_single_song % s.song_name)
msg = u'\n |-> '.join(msgs)
else:
msgs.append(fmt_has_song_nm % len(collect.songs))
msg= u' => '.join(msgs)
elif '/song/' in url:
song = xm.XiamiSong(xm_obj, url=url)
dl_songs.append(song)
msg = fmt_parsing % (xiami_url_abbr(url),u'曲目', song.song_name)
elif '/album/' in url:
album = xm.Album(xm_obj, url)
dl_songs.extend(album.songs)
msgs = [fmt_parsing % (xiami_url_abbr(url),u'专辑', album.artist_name+u' => '+album.album_name)]
if verbose:
for s in album.songs:
msgs.append(fmt_single_song %s.song_name)
msg = u'\n |-> '.join(msgs)
else:
msgs.append(fmt_has_song_nm % len(album.songs))
msg= u' => '.join(msgs)
elif '/lib-song/u/' in url:
if verbose:
LOG.warning(u'[虾]如用户收藏较多,解析歌曲需要较长时间,请耐心等待')
fav = xm.Favorite(xm_obj, url, verbose)
dl_songs.extend(fav.songs)
msgs = [fmt_parsing % (xiami_url_abbr(url), u'用户收藏','')]
if verbose:
for s in fav.songs:
msgs.append(fmt_single_song %s.song_name)
msg = u'\n |-> '.join(msgs)
else:
msgs.append( fmt_has_song_nm % len(fav.songs))
msg = u' => '.join(msgs)
elif re.search(r'/artist/top/id/\d+', url):
topsong=xm.TopSong(xm_obj, url)
dl_songs.extend(topsong.songs)
msgs = [fmt_parsing % (xiami_url_abbr(url), u'艺人热门歌曲',topsong.artist_name)]
if verbose:
for s in topsong.songs:
msgs.append(fmt_single_song %s.song_name)
msg = u'\n |-> '.join(msgs)
else:
msgs.append( fmt_has_song_nm | % len(topsong.songs))
msg = u' => '.join(msgs)
global total, done
done +=1
pre = ('[%d/%d] ' % (done, total)) if not verbose else ''
if not msg:
#unknown url
LOG.error(u'%s [虾]不能识别的url [%s].' % | (pre,url))
else:
LOG.info(u'%s%s'% (pre,msg))
def from_file(xm_obj,m163, infile):
""" download objects (songs, albums...) from an input file. """
urls = []
with open(infile) as f:
urls = f.readlines()
global total, done
total = len(urls)
print border
LOG.info(u' 文件包含链接总数: %d' % total)
print border
pool = ThreadPool(config.THREAD_POOL_SIZE)
for link in [u for u in urls if u]:
link = link.rstrip('\n')
if re.match(pat_xm, link):
pool.add_task(from_url_xm, xm_obj,link, verbose=False)
elif re.match(pat_163, link):
pool.add_task(from_url_163, m163,link, verbose=False)
else:
LOG.warning(u' 略过不能识别的url [%s].' % link)
pool.wait_completion()
def xiami_url_abbr(url):
return re.sub(pat_xm,u'[虾] ',url)
def m163_url_abbr(url):
return re.sub(pat_163,u'[易] ',url)
|
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/behave/model_describe.py | Python | bsd-3-clause | 3,364 | 0 | # -*- coding: utf-8 -*-
"""
Provides textual descriptions for :mod:`behave.model` elements.
"""
from behave.textutil import indent
# -----------------------------------------------------------------------------
# FUNCTIONS:
# ------------------------------------------------------- | ----------------------
def escape_cell(cell):
"""
Escape table cell contents.
:param cell: Table cell (as unicode string).
:return: Escaped cell (as unicode string).
"""
cell = cell.replace(u'\\', u'\\\\')
cell = cell.replace(u'\n', u'\\n')
cell = cell.replace(u'|', u'\\|')
return cell
def escape_triple_quotes(text | ):
"""
Escape triple-quotes, used for multi-line text/doc-strings.
"""
return text.replace(u'"""', u'\\"\\"\\"')
# -----------------------------------------------------------------------------
# CLASS:
# -----------------------------------------------------------------------------
class ModelDescriptor(object):
@staticmethod
def describe_table(table, indentation=None):
"""
Provide a textual description of the table (as used w/ Gherkin).
:param table: Table to use (as :class:`behave.model.Table`)
:param indentation: Line prefix to use (as string, if any).
:return: Textual table description (as unicode string).
"""
# -- STEP: Determine output size of all cells.
cell_lengths = []
all_rows = [table.headings] + table.rows
for row in all_rows:
lengths = [len(escape_cell(c)) for c in row]
cell_lengths.append(lengths)
# -- STEP: Determine max. output size for each column.
max_lengths = []
for col in range(0, len(cell_lengths[0])):
max_lengths.append(max([c[col] for c in cell_lengths]))
# -- STEP: Build textual table description.
lines = []
for r, row in enumerate(all_rows):
line = u"|"
for c, (cell, max_length) in enumerate(zip(row, max_lengths)):
pad_size = max_length - cell_lengths[r][c]
line += u" %s%s |" % (escape_cell(cell), " " * pad_size)
line += u"\n"
lines.append(line)
if indentation:
return indent(lines, indentation)
# -- OTHERWISE:
return u"".join(lines)
@staticmethod
def describe_docstring(doc_string, indentation=None):
"""
Provide a textual description of the multi-line text/triple-quoted
doc-string (as used w/ Gherkin).
:param doc_string: Multi-line text to use.
:param indentation: Line prefix to use (as string, if any).
:return: Textual table description (as unicode string).
"""
text = escape_triple_quotes(doc_string)
text = u'"""\n' + text + '\n"""\n'
if indentation:
text = indent(text, indentation)
return text
class ModelPrinter(ModelDescriptor):
def __init__(self, stream):
super(ModelPrinter, self).__init__()
self.stream = stream
def print_table(self, table, indentation=None):
self.stream.write(self.describe_table(table, indentation))
self.stream.flush()
def print_docstring(self, text, indentation=None):
self.stream.write(self.describe_docstring(text, indentation))
self.stream.flush()
|
sunfounder/SunFounder_SensorKit_for_RPi2 | Python/24_touch_switch.py | Python | gpl-2.0 | 1,264 | 0.04193 | #!/usr/bin/env python3
import RPi.GPIO as GPIO
TouchPin = 11
Gpin = 13
Rpin = 12
tmp = 0
def setup():
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
GPIO.setup(Gpin, GPIO.OUT) # Set Green Led Pin mode to output
GPIO.setup(Rpin, GPIO.OUT) # Set Red Led Pin mode to output
GPIO.setup(TouchPin, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Set BtnPin's mode is input, and pull up to high level(3.3V)
def Led(x):
if x == 0:
GPIO.output(Rpin, 1)
GPI | O.output(Gpin, 0)
if x == 1:
GPIO.output(Rpin, 0)
GPIO.output(Gpin, 1)
def Print(x):
global tmp
if x != tmp:
if x == 0:
print (' **********')
print (' * ON *')
print (' **********')
if x == 1:
print (' **********')
print (' * OFF *')
print (' **********')
tmp = x
def loop():
while True:
Led(GPIO.input(TouchPin))
Print(GPIO.input(TouchPin))
def destroy():
GPIO.output(Gpi | n, GPIO.HIGH) # Green led off
GPIO.output(Rpin, GPIO.HIGH) # Red led off
GPIO.cleanup() # Release resource
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.