repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
bitkeeper/python-opcua
|
opcua/common/ua_utils.py
|
Python
|
lgpl-3.0
| 8,286
| 0.002414
|
"""
Usefull method and classes not belonging anywhere and depending on opcua library
"""
from dateutil import parser
from datetime import datetime
from enum import Enum, IntEnum
import uuid
from opcua import ua
from opcua.ua.uaerrors import UaError
def val_to_string(val):
"""
convert a python object or python-opcua object to a string
which should be easy to understand for human
easy to modify, and not too hard to parse back ....not easy
meant for UI or command lines
"""
if isinstance(val, (list, tuple)):
res = []
for v in val:
res.append(val_to_string(v))
return "[" + ", ".join(res) + "]"
if hasattr(val, "to_string"):
val = val.to_string()
elif isinstance(val, ua.StatusCode):
val = val.name
elif isinstance(val, (Enum, IntEnum)):
val = val.name
elif isinstance(val, ua.DataValue):
val = variant_to_string(val.Value)
elif isinstance(val, ua.XmlElement):
val = val.Value
elif isinstance(val, str):
pass
elif isinstance(val, bytes):
val = str(val)
elif isinstance(val, datetime):
val = val.isoformat()
elif isinstance(val, (int, float)):
val = str(val)
else:
# FIXME: Some types are probably missing!
val = str(val)
return val
def variant_to_string(var):
"""
convert a variant to a string which should be easy to understand for human
easy to modify, and not too hard to parse back ....not easy
meant for UI or command lines
"""
return val_to_string(var.Value)
def string_to_val(string, vtype):
"""
Convert back a string to a python or python-opcua object
Note: no error checking is done here, supplying null strings could raise exceptions (datetime and guid)
"""
string = string.strip()
if string.startswith("["):
string = string[1:-1]
var = []
for s in string.split(","):
s = s.strip()
val = string_to_val(s, vtype)
var.append(val)
return var
if vtype == ua.VariantType.Null:
val = None
elif vtype == ua.VariantType.Boolean:
if string in ("True", "true", "on", "On", "1"):
val = True
else:
val = False
elif vtype in (ua.VariantType.SByte, ua.VariantType.Int16, ua.VariantType.Int32, ua.VariantType.Int64):
val = int(string)
elif vtype in (ua.VariantType.Byte, ua.VariantType.UInt16, ua.VariantType.UInt32, ua.VariantType.UInt64):
val = int(string)
elif vtype in (ua.VariantType.Float, ua.VariantType.Double):
val = float(string)
elif vtype == ua.VariantType.XmlElement:
val = ua.XmlElement(string)
elif vtype == ua.VariantType.String:
val = string
elif vtype == ua.VariantType.ByteString:
val = string.encode("utf-8")
elif vtype in (ua.VariantType.NodeId, ua.VariantType.ExpandedNodeId):
val = ua.NodeId.from_string(string)
elif vtype == ua.VariantType.QualifiedName:
val = ua.QualifiedName.from_string(string)
elif vtype == ua.VariantType.DateTime:
val = parser.parse(string)
elif vtype == ua.VariantType.LocalizedText:
val = ua.LocalizedText(string)
elif vtype == ua.VariantType.StatusCode:
val = ua.StatusCode(string)
elif vtype == ua.VariantType.Guid:
val = uuid.UUID(string)
else:
# FIXME: Some types are probably missing!
raise NotImplementedError
return val
def string_to_variant(string, vtype):
"""
convert back a string to an ua.Variant
"""
return ua.Variant(string_to_val(string, vtype), vtype)
def get_node_children(node, nodes=None):
"""
Get recursively all children of a node
"""
if nodes is None:
nodes = [node]
for child in node.get_children():
nodes.append(child)
get_node_children(child, nodes)
return nodes
def get_node_subtypes(node, nodes=None):
if nodes is None:
nodes = [node]
for child in node.get_children(refs=ua.ObjectIds.HasSubtype):
nodes.append(child)
get_node_subtypes(child, nodes)
return nodes
def get_node_supertypes(node, includeitself=False, skipbase=True):
"""
return get all subtype parents of node recursive
:param node: can be a ua.Node or ua.NodeId
:param includeitself: include also node to the list
:param skipbase don't include the toplevel one
:returns list of ua.Node, top parent first
"""
parents = []
if includeitself:
parents.append(node)
parents.extend(_get_node_supertypes(node))
if skipbase and len(parents) > 1:
parents = parents[:-1]
return parents
def _get_node_supertypes(node):
"""
recursive implementation of get_node_derived_from_types
"""
basetypes = []
parent = get_node_supertype(node)
if parent:
basetypes.append(parent)
basetypes.extend(_get_node_supertypes(parent))
return basetypes
def get_node_supertype(node):
"""
return node supertype or None
"""
supertypes = node.get_referenced_nodes(refs=ua.ObjectIds.HasSubtype,
direction=ua.BrowseDirection.Inverse,
includesubtypes=True)
if supertypes:
return supertypes[0]
else:
return None
def is_child_present(node, browsename):
"""
return if a browsename is present a child from the provide node
:param node: node wherein to find the browsename
:param browsename: browsename to search
:returns returne True if the browsename is present else False
"""
child_descs = node.get_children_descriptions()
for child_desc in child_descs:
if child_desc.BrowseName == browsename:
return True
return False
def data_type_to_variant_type(dtype_node):
"""
Given a Node datatype, find out the variant type to encode
data. This is not exactly straightforward...
"""
base = get_base_data_type(dtype_node)
if base.nodeid.Identifier != 29:
return ua.VariantType(base.nodeid.Identifier)
else:
# we have an enumeration, value is a Int32
return ua.VariantType.Int32
def get_base_data_type(datatype):
"""
Looks up the base datatype of the provided datatype Node
The base datatype is either:
A primitive type (ns=0, i<=21) or a complex one (ns=0 i>21 and i<=30) like Enum and Struct.
Args:
datatype: NodeId of a datype of a variable
Returns:
NodeId of datatype base or None
|
in case base datype can not be determined
"""
base = datatype
while base:
if base.nodeid.NamespaceIndex == 0 and isinstance(base.nodeid.Identifier, int) and base.nodeid.Identifier <= 30:
return base
base = get_node_supertype(base)
raise ua.UaError("Datatype must be a subtype of builtin types
|
{0!s}".format(datatype))
def get_nodes_of_namespace(server, namespaces=None):
"""
Get the nodes of one or more namespaces .
Args:
server: opc ua server to use
namespaces: list of string uri or int indexes of the namespace to export
Returns:
List of nodes that are part of the provided namespaces
"""
if namespaces is None:
namespaces = []
ns_available = server.get_namespace_array()
if not namespaces:
namespaces = ns_available[1:]
elif isinstance(namespaces, (str, int)):
namespaces = [namespaces]
# make sure all namespace are indexes (if needed convert strings to indexes)
namespace_indexes = [n if isinstance(n, int) else ns_available.index(n) for n in namespaces]
# filter nodeis based on the provide namespaces and convert the nodeid to a node
nodes = [server.get_node(nodeid) for nodeid in server.iserver.aspace.keys()
if nodeid.NamespaceIndex != 0 and nodeid.NamespaceIndex in namespace_indexes]
return nodes
def get_default_value(uatype):
if isinstance(uatype, ua.VariantType):
return ua.get_default_values(uatype)
elif hasattr(ua.VariantType, uatype):
retu
|
mbbill/thefuck
|
thefuck/rules/no_command.py
|
Python
|
mit
| 580
| 0
|
from difflib import get_close_matches
from thefuck.utils import sudo_support, get_all_executables, get_closest
@sudo_support
def match(command, settings):
return 'not found' in command.stderr and \
bool(get_close_matches(co
|
mmand.script.split(' ')[0],
get_all_executables()))
@sudo_suppor
|
t
def get_new_command(command, settings):
old_command = command.script.split(' ')[0]
new_command = get_closest(old_command, get_all_executables())
return ' '.join([new_command] + command.script.split(' ')[1:])
priority = 3000
|
akaihola/django
|
tests/modeltests/timezones/tests.py
|
Python
|
bsd-3-clause
| 49,481
| 0.002466
|
import datetime
import os
import sys
import time
import warnings
try:
import pytz
except ImportError:
pytz = None
from django.conf import settings
from django.core import serializers
from django.core.urlresolvers import reverse
from django.db import connection
from django.db.models import Min, Max
from django.http import HttpRequest
from django.template import Context, RequestContext, Template, TemplateSyntaxError
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.tzinfo import FixedOffset
from django.utils.unittest import skipIf, skipUnless
from .forms import EventForm, EventSplitForm, EventModelForm
from .models import Event, MaybeEvent, Session, SessionEvent, Timestamp, AllDayEvent
# These tests use the EAT (Eastern Africa Time) and ICT (Indochina Time)
# who don't have Daylight Saving Time, so we can represent them easily
# with FixedOffset, and use them directly as tzinfo in the constructors.
# settings.TIME_ZONE is forced to EAT. Most tests use a variant of
# datetime.datetime(2011, 9, 1, 13, 20, 30), which translates to
# 10:20:30 in UTC and 17:20:30 in ICT.
UTC = timezone.utc
EAT = FixedOffset(180) # Africa/Nairobi
ICT = FixedOffset(420) # Asia/Bangkok
TZ_SUPPORT = hasattr(time, 'tzset')
# On OSes that don't provide tzset (Windows), we can't set the timezone
# in which the program runs. As a consequence, we must skip tests that
# don't enforce a specific timezone (with timezone.override or equivalent),
# or attempt to interpret naive datetimes in the default timezone.
requires_tz_support = skipUnless(TZ_SUPPORT,
"This test relies on the ability to run a program in an arbitrary "
"time zone, but your operating system isn't able to do that.")
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=False)
class LegacyDatabaseTests(TestCase):
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipUnlessDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipIfDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
Event.objects.create(dt=dt)
event = Event.objects.get()
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt, dt.replace(microsecond=0))
@skipUnlessDBFeature('supports_timezones')
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination actually never happens.
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt.replace(tzinfo=EAT), dt.replace(microsecond=0))
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination is no longer possible since timezone support
# was removed from the SQLite backend -- it didn't work.
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_utc_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
e
|
vent = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# django.db.backend.utils.typecast_dt will just drop the
# timezone, so a round-trip in the database alters the data (!)
# interpret the naive datetime in local time and you get a wrong value
self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt)
# interpret the naive datetime in original time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=UTC), dt)
@ski
|
pUnlessDBFeature('supports_timezones')
@skipIfDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination is no longer possible since timezone support
# was removed from the SQLite backend -- it didn't work.
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_other_timezone_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# django.db.backend.utils.typecast_dt will just drop the
# timezone, so a round-trip in the database alters the data (!)
# interpret the naive datetime in local time and you get a wrong value
self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt)
# interpret the naive datetime in original time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=ICT), dt)
@skipIfDBFeature('supports_timezones')
def test_aware_datetime_unspported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
with self.assertRaises(ValueError):
Event.objects.create(dt=dt)
def test_auto_now_and_auto_now_add(self):
now = datetime.datetime.now()
past = now - datetime.timedelta(seconds=2)
future = now + datetime.timedelta(seconds=2)
Timestamp.objects.create()
ts = Timestamp.objects.get()
self.assertLess(past, ts.created)
self.assertLess(past, ts.updated)
self.assertGreater(future, ts.updated)
self.assertGreater(future, ts.updated)
def test_query_filter(self):
dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30)
dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30)
Event.objects.create(dt=dt1)
Event.objects.create(dt=dt2)
self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2)
self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1)
self.assertEqual(Event.object
|
hehongliang/tensorflow
|
tensorflow/python/keras/engine/training.py
|
Python
|
apache-2.0
| 109,170
| 0.004598
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training-related part of the Keras engine.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import weakref
import numpy as np
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import losses
from tensorflow.python.keras import metrics as metrics_module
from tensorflow.python.keras import optimizers
from tensorflow.python.keras.engine import distributed_training_utils
from tensorflow.python.keras.engine import training_arrays
from tensorflow.python.keras.engine import training_distributed
from tensorflow.python.keras.engine import training_eager
from tensorflow.python.keras.engine import training_generator
from tensorflow.python.keras.engine import training_utils
from tensorflow.python.keras.engine.network import Network
from tensorflow.python.keras.utils import data_utils
from tensorflow.python.keras.utils.generic_utils import slice_arrays
from tensorflow.python.keras.utils.losses_utils import squeeze_or_expand_dimensions
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import optimizer as tf_optimizer_module
from tensorflow.python.training.checkpointable import base as checkpointable
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
@tf_export('keras.models.Model', 'keras.Model')
class Model(Network):
"""`Model` groups layers into an object with training and inference features.
There are two ways to instantiate a `Model`:
1 - With the "functional API", where you start from `Input`,
you chain layer calls to specify the model's forward pass,
and finally you create your model from inputs and outputs:
```python
import tensorflow as tf
inputs = tf.keras.Input(shape=(3,))
x = tf.keras.layers.Dense(4, activation=tf.nn.relu)(inputs)
outputs = tf.keras.layers.Dense(5, activation=tf.nn.softmax)(x)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
```
2 - By subclassing the `Model` class: in that case, you should define your
layers in `__init__` and you should implement the model's forward pass
in `call`.
```python
import tensorflow as tf
class MyModel(tf.keras.Model):
def __init__(self):
super(MyModel, self).__init__()
self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
def call(self, inputs):
x = self.dense1(inputs)
return self.dense2(x)
model = MyModel()
```
If you subclass `Model`, you can optionally have
a `training` argument (boolean) in `call`, which you can use to specify
a different behavior in training and inference:
```python
import tensorflow as tf
class MyModel(tf.keras.Model):
def __init__(self):
super(MyModel, self).__init__()
self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
self.dropout = tf.keras.layers.Dropout(0.5)
def call(self, inputs, training=False):
x = self.dense1(inputs)
if training:
x = self.dropout(x, training=training)
return self.dense2(x)
model = MyModel()
```
"""
def __init__(self, *args, **kwargs):
super(Model, self).__init__(*args, **kwargs)
# Create a cache for iterator get_next op.
self._iterator_get_next = weakref.WeakKeyDictionary()
# Create a cache for dataset - uninitialized iterators
self._dataset_iterator_cache = weakref.WeakKeyDictionary()
# initializing _distribution_strategy here since it is possible to call
# predict on a model without compiling it.
self._distribution_strategy = None
self.run_eagerly = None
def _set_sample_weight_attributes(self, sample_weight_mode,
|
skip_target_weighing_indices):
"""Sets sample weight related attributes on the model."""
sample_weights, sample_weight_modes = training_utils.prepare_sample_weights(
self
|
.output_names, sample_weight_mode, skip_target_weighing_indices)
self.sample_weights = sample_weights
self.sample_weight_modes = sample_weight_modes
self._feed_sample_weight_modes = [
sample_weight_modes[i]
for i in range(len(self.outputs))
if i not in skip_target_weighing_indices
]
self._feed_sample_weights = [
sample_weights[i]
for i in range(len(sample_weights))
if i not in skip_target_weighing_indices
]
def _cache_output_metric_attributes(self, metrics, weighted_metrics):
"""Caches metric name and function attributes for every model output."""
output_shapes = [
None if output is None else output.get_shape().as_list()
for output in self.outputs
]
self._per_output_metrics = training_utils.collect_per_output_metric_info(
metrics, self.output_names, output_shapes, self.loss_functions)
self._per_output_weighted_metrics = \
training_utils.collect_per_output_metric_info(
weighted_metrics, self.output_names, output_shapes,
self.loss_functions, self.sample_weights)
def _add_unique_metric_name(self, metric_name, output_index):
"""Makes the metric name unique and adds it to the model's metric name list.
If there are multiple outputs for which the metrics are calculated, the
metric names have to be made unique by appending an integer.
Arguments:
metric_name: Metric name that corresponds to the metric specified by the
user. For example: 'acc'.
output_index: The index of the model output for which the metric name is
being added.
Returns:
string, name of the model's unique metric name
"""
if len(self.output_names) > 1:
metric_name = '%s_%s' % (self.output_names[output_index], metric_name)
j = 1
base_metric_name = metric_name
while metric_name in self._compile_metrics_names:
metric_name = '%s_%d' % (base_metric_name, j)
j += 1
return metric_name
@property
def metrics(self):
"""Returns the model's metrics added using `compile`, `add_metric` APIs."""
metrics = []
if self._is_compiled:
metrics += self._compile_stateful_metric_functions
return metrics + super(Model, self).metrics
@property
def metrics_names(self):
"""Returns the model's display labels for all outputs."""
metrics_names = []
if self._is_compiled:
metrics_names += self._compile_metrics_names # Includes names of losses.
# Add metric names from layers.
for layer in self.layers:
metrics_names += [m.name for m in layer._metrics] # pylint: disable=protected-access
metrics_names += [m.name for m in self._metrics]
return metrics_names
@property
def _all_metrics_tensors(self):
"""Returns the network's symbolic metric tensors."""
metrics_tensors = {}
if self._is_compiled:
metrics_tensors.update(self._compile_metrics_tensors)
metrics_tensors.update(super(Model, self)._all_metrics_tensors)
return metrics_tensors
@property
def _all_stateful_me
|
uwosh/uwosh.emergency.client
|
uwosh/emergency/client/importexport.py
|
Python
|
gpl-2.0
| 877
| 0.010262
|
import rsa
from Products.CMFCore.utils import getToolByName
from uwosh.simpleemergency.utils import disable_emergency
se_default_profile = 'profile-uwosh.simpleemergency:default'
def install(context):
if not context.readDataFile('uwosh.emergency.client.txt'):
return
site = context.getSite()
qi = getToolByName(site, 'port
|
al_quickinstaller')
if qi.isProductInstalled('uwosh.simpleemergency') or qi.isProductInstalled('uwosh.emergency.master'):
raise Exception('You can not install uwosh.simpleemrgency or uwosh.emergency.master on the same site as the client.')
portal_setup = getToolByName(site, 'portal_setup')
portal_setup.runImportStepFromProfile(se_default_profile, 'propertiestool')
portal_setup.runImportStepFromProfile(se_default_profile, 'cssregistry')
|
disable_emergency(site) # disable by default
|
mlecours/netman
|
netman/api/switch_api.py
|
Python
|
apache-2.0
| 29,194
| 0.002877
|
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from netman.api.api_utils import BadRequest, to_response
from netman.api.objects.interface import SerializableInterface
from netman.api.objects.bond import SerializableBond
from netman.api.objects.vlan import SerializableVlan
from netman.api.switch_api_base import SwitchApiBase
from netman.api.validators import Switch, is_boolean, is_vlan_number, Interface, Vlan, resource, content, is_ip_network, \
IPNetworkResource, is_access_group_name, Direction, is_vlan, is_bond, Bond, \
is_bond_link_speed, is_bond_number, is_description, is_vrf_name, \
is_vrrp_group, VrrpGroup, is_dict_with, optional, is_type
class SwitchApi(SwitchApiBase):
def hook_to(self, server):
server.add_url_rule('/switches/<hostname>/vlans', view_func=self.get_vlans, methods=['GET'])
server.add_url_rule('/switches/<hostname>/vlans', view_func=self.add_vlan, methods=['POST'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>', view_func=self.remove_vlan, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/ips', view_func=self.add_ip, methods=['POST'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/ips/<path:ip_network>', view_func=self.remove_ip, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/vrrp-groups', view_func=self.add_vrrp_group, methods=['POST'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/vrrp-groups/<vrrp_group_id>', view_func=self.remove_vrrp_group, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/access-groups/<direction>', view_func=self.set_vlan_access_group, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/access-groups/<direction>', view_func=self.remove_vlan_access_group, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/vrf-forwarding', view_func=self.set_vlan_vrf, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/vrf-forwarding', view_func=self.remove_vlan_vrf, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/dhcp-relay-server', view_func=self.add_dhcp_relay_server, methods=['POST'])
server.add_url_rule('/switches/<hostname>/vlans/<vlan_number>/dhcp-relay-server/<ip_network>', view_func=self.remove_dhcp_relay_server, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/interfaces', view_func=self.get_interfaces, methods=['GET'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/shutdown', view_func=self.set_shutdown_state, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/port-mode', view_func=self.set_port_mode, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/access-vlan', view_func=self.set_access_vlan, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/access-vlan', view_func=self.remove_access_vlan, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/trunk-vlans', view_func=self.add_trunk_vlan, methods=['POST'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/trunk-vlans/<vlan_number>', view_func=self.remove_trunk_vlan, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/trunk-native-vlan', view_func=self.configure_native_vlan, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/trunk-native-vlan', view_func=self.remove_native_vlan, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/bond-master', view_func=self.add_interface_to_bond, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/bond-master', view_func=self.remove_interface_from_bond, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/description', view_func=self.set_interface_description, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/description', view_func=self.remove_interface_description, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/spanning-tree', view_func=self.edit_interface_spanning_tree, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/interfaces/<path:interface_id>/lldp', view_func=self.enable_lldp, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/bonds', view_func=self.get_bonds, methods=['GET'])
server.add_url_rule('/switches/<hostname>/bonds', view_func=self.add_bond, methods=['POST'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>', view_func=self.get_bond, methods=['GET'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>', view_func=self.remove_bond, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/link-speed', view_func=self.set_bond_link_speed, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_n
|
umber>/port-mode', view_func=self.set_bond_port_mode, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/access-vlan', view_func=
|
self.set_bond_access_vlan, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/access-vlan', view_func=self.remove_bond_access_vlan, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/trunk-vlans', view_func=self.add_bond_trunk_vlan, methods=['POST'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/trunk-vlans/<vlan_number>', view_func=self.remove_bond_trunk_vlan, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/trunk-native-vlan', view_func=self.configure_bond_native_vlan, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/trunk-native-vlan', view_func=self.remove_bond_native_vlan, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/description', view_func=self.set_bond_description, methods=['PUT'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/description', view_func=self.remove_bond_description, methods=['DELETE'])
server.add_url_rule('/switches/<hostname>/bonds/<bond_number>/spanning-tree', view_func=self.edit_bond_spanning_tree, methods=['PUT'])
return self
@to_response
@resource(Switch)
def get_vlans(self, switch):
"""
Displays informations about all VLANs
:arg str hostname: Hostname or IP of the switch
:code 200 OK:
Example output:
.. literalinclude:: ../../../tests/api/fixtures/get_switch_hostname_vlans.json
:language: json
"""
vlans = sorted(switch.get_vlans(), key=lambda x: x.number)
return 200, [SerializableVlan(vlan) for vlan in vlans]
@to_response
@content(is_vlan)
@resource(Switch)
def add_vlan(self, switch, number, name):
"""
Create an new VLAN
:arg str hostname: Hostname or IP of the switch
:body:
Highlighted fields are mandatory
.. literalinclude:: ../../../tests/api/fixtures/post_switch_hostname_vlans.json
:language: json
|
h2oloopan/easymerge
|
EasyMerge/merger/unparse.py
|
Python
|
mit
| 24,407
| 0.007211
|
"Usage: unparse.py <path to source file>"
import sys
import ast
import cStringIO
import os
# Large float and imaginary literals get turned into infinities in the AST.
# We unparse those infinities to INFSTR.
INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
def interleave(inter, f, seq):
"""Call f on each item in seq, calling inter() in between.
"""
seq = iter(seq)
try:
f(next(seq))
except StopIteration:
pass
else:
for x in seq:
inter()
f(x)
class UTree:
def __init__(self,name=None):
self._name = name
self._attr = {}
def addAttr(self, attr, value):
self._attr[attr] = value
def output(self,level,tree=None):
for i in range(level):
print "\t"
if tree==None:
print "Type:",self._name
for i in self._attr:
self.output(level+1,self._attr[i])
elif tree.__class__=="UTree":
print "Type:",tree._name
for i in tree._attr:
self.output(level+1,tree._attr[i])
else:
print "Other:",tree
class Unifier:
def __init__(self, tree1, tree2):
self._t1 = tree1
self._t2 = tree2
self._utree = UTree()
self._utree = self.compare_trees(self._t1, self._t2)
def compare_trees(self,t1,t2):
if self.check_entire_tree(t1,t2):
print "t1=t2"
return t1
else:
if self.check_tree_name(t1, t2):
print "t1,t2 have same type:",t1.__class__.__name__
utree = UTree(t1.__class__.__name__)
meth = getattr(self, "_"+t1.__class__.__name__)
(vals,nodes) = meth(t1,t2)
print nodes
for attr in nodes:
node = nodes[attr]
utree.addAttr(attr, self.compare_trees(node[0], node[1]))
if not vals and not nodes:
print "t1,t2 have different numbers of attributes"
return ("???",t1,t2)
return utree
else:
|
print "t1,t2 have different types:",t1.__class__.__n
|
ame__,",",t2.__class__.__name__
return ("???",t1,t2)
def _Module(self, t1,t2):
nodes = {}
if len(t1.body)!=len(t2.body):
return (None, None)
for i in range(len(t1.body)):
nodes["body["+str(i)+"]"]=[t1.body[i],t2.body[i]]
return ([],nodes)
def _If(self, t1,t2):
node = {}
node["test"] = [t1.test, t2.test]
node["body"] = [t1.body, t2.body]
# collapse nested ifs into equivalent elifs.
elif_counter = -1
while True:
has_elif1 = t1.orelse and len(t1.orelse) == 1 and isinstance(t1.orelse[0], ast.If)
has_elif2 = t2.orelse and len(t2.orelse) == 1 and isinstance(t2.orelse[0], ast.If)
if has_elif1 and has_elif2:
elif_counter+=1
t1 = t1.orelse[0]
t2 = t2.orelse[0]
node["elif["+str(elif_counter)+"].test"] = [t1.test, t2.test]
node["elif["+str(elif_counter)+"].body"] = [t1.body, t2.body]
elif not has_elif1 and not has_elif2:
break
else:
return (None,None)
# final else
if t1.orelse and t2.orelse:
node["orelse"]=[t1.orelse,t2.orelse]
elif not t1.orelse and not t2.orelse:
pass
else:
return (None, None)
return ([],node)
def check_entire_tree(self,t1,t2):
if t1==t2:
return True
else:
return False
def check_tree_name(self,t1,t2):
if t1.__class__.__name__==t2.__class__.__name__:
return True
else:
return False
def check_value(self,v1,v2):
if v1==v2:
return True
else:
return False
class Unparser:
"""Methods in this class recursively traverse an AST and
output source code for the abstract syntax; original formatting
is disregarded. """
def __init__(self, tree, lines, src_ast, file = sys.stdout):
"""Unparser(tree, file=sys.stdout) -> None.
Print the source for tree to file."""
self.calls = self.crop_calls(lines, src_ast)
self.mod_calls = []
self.functions = src_ast.functions
self.classes = src_ast.classes
self.lines = lines
self.cur_call = -1
self.incall = False
self.cur_str = ""
self.ret_str = False
self.toplevel = True
self.f = file
self.future_imports = []
self._indent = 0
self.dispatch(tree)
self.f.write("\n")
print self.mod_calls
self.f.flush()
def crop_calls(self, lines, src_ast):
calls = []
for i in src_ast.calls:
if i.line<lines[0] or i.line>lines[1]:
continue
else:
calls.append(i)
print "======================="
print "Name:"+str(i.name)
print "line:"+str(i.line)
print "scope:"+str(i.scope)
print "source:"+str(i.source)
print "tree:"+str(i.tree)
print "======================="
return calls
def call_dealer(self,tree):
#self.write("CALL_HERE"+str(tree.lineno)+","+str(tree.col_offset))
def check_reachable(type,id):
if type[id].lines[0]>=self.lines[0] and type[id].lines[1]<=self.lines[1]:
return True
else:
return False
def process_mod_call(call):
self.write("unreachable_method["+str(len(self.mod_calls))+"]")
#self.write("$CALL:"+str(call.source)+"$")
self.mod_calls.append(call)
self.cur_call+=1
call = self.calls[self.cur_call]
if isinstance(call.source, tuple):
source = call.source
else:
source = ("Unknown", call.source)
if source==("Unknown",-1) or source==("member",-1):
return False
elif source[0]=="function":
id = source[1]
if check_reachable(self.functions,id):
return False
else:
process_mod_call(call)
return True
elif source[0]=="class":
id = source[1]
if check_reachable(self.classes,id):
return False
else:
process_mod_call(call)
return True
else: #call import
process_mod_call(call)
return True
def fill(self, text = ""):
"Indent a piece of text, according to the current indentation level"
if not self.ret_str:
self.f.write("\n"+" "*self._indent + text)
else:
self.cur_str+=("\n"+" "*self._indent + text)
def write(self, text):
"Append a piece of text to the current line."
if not self.ret_str:
self.f.write(text)
else:
self.cur_str+=(text)
def enter(self):
"Print ':', and increase the indentation."
self.write(":")
self._indent += 1
def leave(self):
"Decrease the indentation level."
self._indent -= 1
def dispatch(self, tree):
"Dispatcher function, dispatching tree type T to method _T."
if isinstance(tree, list):
for t in tree:
self.dispatch(t)
return
meth = getattr(self, "_"+tree.__class__.__name__)
meth(tree)
############### Unparsing methods ######################
# There should be one method per concrete grammar type #
# Constructors should be grouped by sum type. Ideally, #
# this would follow the order in the grammar, but #
# currently doesn't. #
################################
|
Unifield/ufload
|
ufload/webdav.py
|
Python
|
mit
| 8,486
| 0.003889
|
# -*- coding: utf-8 -*-
import cgi
import logging
import os
import uuid
from collections import namedtuple
import requests
from office365.runtime.auth.authentication_context import AuthenticationContext
from office365.runtime.client_request import ClientRequest
from office365.runtime.utilities.http_method import HttpMethod
from office365.runtime.utilities.request_options import RequestOptions
from office365.sharepoint.client_context import ClientContext
import time
class ConnectionFailed(Exception):
pass
class Client(object):
def __init__(self, host, port=0, auth=None, username=None, password=None, protocol='http', path=None):
if not port:
port = 443 if protocol == 'https' else 80
self.path = path or ''
if not self.path.endswith('/'):
self.path = '%s/' % self.path
self.username = username
self.password = password
# oneDrive: need to split /site/ and path
# in our config site is /personal/UF_OCX_msf_geneva_msf_org/
# path is /Documents/Tests/
self.baseurl = '{0}://{1}:{2}{3}/'.format(protocol, host, port, '/'.join(self.path.split('/')[0:3]) )
self.login()
def login(self):
ctx_auth = AuthenticationContext(self.baseurl)
if ctx_auth.acquire_token_for_user(self.username, cgi.escape(self.password)):
self.request = ClientRequest(ctx_auth)
self.request.context = ClientC
|
ontext(self.baseurl, ctx_auth)
if not ctx_auth.provider.FedAuth or not ctx_auth.provider.rtFa:
raise ConnectionFailed(ctx_auth.get_last_error())
else:
raise ConnectionFailed(ctx_auth.get_last_error())
def change_oc(self, baseurl, dir):
if dir == 'OCA':
dir = '/personal/
|
UF_OCA_msf_geneva_msf_org/'
elif dir == 'OCB':
dir = '/personal/UF_OCB_msf_geneva_msf_org/'
elif dir == 'OCG':
dir = '/personal/UF_OCG_msf_geneva_msf_org/'
elif dir == 'OCP':
dir = '/personal/UF_OCP_msf_geneva_msf_org/'
self.baseurl = baseurl + dir
def delete(self, remote_path):
webUri = '%s%s' % (self.path, remote_path)
request_url = "%s/_api/web/getfilebyserverrelativeurl('%s')" % (self.baseurl, webUri)
options = RequestOptions(request_url)
options.method = HttpMethod.Delete
options.set_header("X-HTTP-Method", "DELETE")
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
result = requests.post(url=request_url, data="", headers=options.headers, auth=options.auth)
if result.status_code not in (200, 201):
raise Exception(result.content)
return True
def list(self, remote_path):
#webUri = '%s%s' % (self.path, remote_path)
#request_url = "%s_api/web/getfilebyserverrelativeurl('%s')/files" % (self.baseurl, webUri)
request_url = "%s_api/web/getfolderbyserverrelativeurl('%s')/files" % (self.baseurl, remote_path)
options = RequestOptions(request_url)
options.method = HttpMethod.Get
options.set_header("X-HTTP-Method", "GET")
options.set_header('accept', 'application/json;odata=verbose')
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
result = requests.get(url=request_url, headers=options.headers, auth=options.auth)
#result = requests.post(url=request_url, data="", headers=options.headers, auth=options.auth)
result = result.json()
'''if result.status_code not in (200, 201):
print 'Error code: '
print result.status_code
raise Exception(result.content)
'''
#return True
files=[]
for i in range(len(result['d']['results'])):
item = result['d']['results'][i]
files.append(item)
return files
def download(self, remote_path, filename):
request_url = "%s_api/web/getfilebyserverrelativeurl('%s')/$value" % (self.baseurl, remote_path)
options = RequestOptions(request_url)
options.method = HttpMethod.Get
options.set_header("X-HTTP-Method", "GET")
options.set_header('accept', 'application/json;odata=verbose')
retry = 5
while retry:
try:
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
with requests.get(url=request_url, headers=options.headers, auth=options.auth, stream=True, timeout=120) as r:
if r.status_code not in (200, 201):
error = self.parse_error(r)
raise requests.exceptions.RequestException(error)
with open(filename, 'wb') as file:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
file.write(chunk)
except requests.exceptions.RequestException:
time.sleep(3)
self.login()
retry -= 1
if not retry:
raise
continue
retry = 0
return filename
def upload(self, fileobj, remote_path, buffer_size=None, log=False, progress_obj=False):
iid = uuid.uuid1()
if progress_obj:
log = True
if log:
logger = logging.getLogger('cloud.backup')
try:
size = os.path.getsize(fileobj.name)
except:
size = None
offset = -1
if not buffer_size:
buffer_size = 10* 1024 * 1024
x = ""
webUri = '%s%s' % (self.path, remote_path)
while True:
if offset == -1:
request_url = "%s/_api/web/GetFolderByServerRelativeUrl('%s')/Files/add(url='%s',overwrite=true)" % (self.baseurl, self.path, remote_path)
offset = 0
elif not offset:
if len(x) == buffer_size:
request_url="%s/_api/web/getfilebyserverrelativeurl('%s')/startupload(uploadId=guid'%s')" % (self.baseurl, webUri, iid)
else:
request_url = "%s/_api/web/GetFolderByServerRelativeUrl('%s')/Files/add(url='%s',overwrite=true)" % (self.baseurl, self.path, remote_path)
elif len(x) == buffer_size:
request_url = "%s/_api/web/getfilebyserverrelativeurl('%s')/continueupload(uploadId=guid'%s',fileOffset=%s)" % (self.baseurl, webUri, iid, offset)
else:
request_url = "%s/_api/web/getfilebyserverrelativeurl('%s')/finishupload(uploadId=guid'%s',fileOffset=%s)" % (self.baseurl, webUri, iid, offset)
offset += len(x)
options = RequestOptions(request_url)
options.method = HttpMethod.Post
self.request.context.authenticate_request(options)
self.request.context.ensure_form_digest(options)
result = requests.post(url=request_url, data=x, headers=options.headers, auth=options.auth)
if result.status_code not in (200, 201):
raise Exception(result.content)
if log and offset and offset % buffer_size*10 == 0:
percent_txt = ''
if size:
percent = round(offset*100/size)
percent_txt = '%d%%' % percent
if progress_obj:
progress_obj.write({'name': percent})
logger.info('OneDrive: %d bytes sent on %s bytes %s' % (offset, size or 'unknown', percent_txt))
x = fileobj.read(buffer_size)
if not x:
break
return True
def parse_error(self, result):
try:
if 'application/json' in result.headers.get('Content-Type'):
resp_content = result.json()
msg = resp_content['odata.error']['message']
error = []
if isinstance(msg, dict):
error = [msg['va
|
basnijholt/holoviews
|
holoviews/ipython/preprocessors.py
|
Python
|
bsd-3-clause
| 7,474
| 0.003077
|
"""
Prototype demo:
python holoviews/ipython/convert.py Conversion_Example.ipynb | python
"""
import ast
from nbconvert.preprocessors import Preprocessor
def comment_out_magics(source):
"""
Utility used to make sure AST parser does not choke on unrecognized
magics.
"""
filtered = []
for line in source.splitlines():
if line.strip().startswith('%'):
filtered.append('# ' + line)
else:
filtered.append(line)
return '\n'.join(filtered)
def wrap_cell_expression(source, template='{expr}'):
"""
If a cell ends in an expression that could be displaying a HoloViews
object (as determined using the AST), wrap it with a given prefix
and suffix string.
If the cell doesn't end in an expression, return the source unchanged.
"""
cell_output_types = (ast.IfExp, ast.BoolOp, ast.BinOp, ast.Call,
ast.Name, ast.Attribute)
try:
node = ast.parse(comment_out_magics(source))
except SyntaxError:
return source
filtered = source.splitlines()
if node.body != []:
last_expr = node.body[-1]
if not isinstance(last_expr, ast.Expr):
pass # Not an expression
elif isinstance(last_expr.value, cell_output_types):
# CAREFUL WITH UTF8!
expr_end_slice = filtered[last_expr.lineno-1][:last_expr.col_offset]
expr_start_slice = filtered[last_expr.lineno-1][last_expr.col_offset:]
start = '\n'.join(filtered[:last_expr.lineno-1]
+ ([expr_end_slice] if expr_end_slice else []))
ending = '\n'.join(([expr_start_slice] if expr_start_slice else [])
+ filtered[last_expr.lineno:])
# BUG!! Adds newline for 'foo'; <expr>
return start + '\n' + template.format(expr=ending)
return source
def filter_magic(source, magic, strip=True):
"""
Given the source of a cell, filter out the given magic and collect
the lines using the magic into a list.
If strip is True, the IPython syntax part of the magic (e.g %magic
or %%magic) is stripped from the returned lines.
"""
filtered, magic_lines=[],[]
for line in source.splitlines():
if line.strip().startswith(magic):
magic_lines.append(line)
else:
filtered.append(line)
if strip:
magic_lines = [el.replace(magic,'') for el in magic_lines]
return '\n'.join(filtered), magic_lines
def strip_magics(source):
"""
Given the source of a cell, filter out all cell and line magics.
"""
filtered=[]
for line in source.splitlines():
if not line.startswith('%') or line.st
|
artswith('%%'):
filtered.append(line)
return '\n'.join(filtered)
def replace_line_magic(source, magic,
|
template='{line}'):
"""
Given a cell's source, replace line magics using a formatting
template, where {line} is the string that follows the magic.
"""
filtered = []
for line in source.splitlines():
if line.strip().startswith(magic):
substitution = template.format(line=line.replace(magic, ''))
filtered.append(substitution)
else:
filtered.append(line)
return '\n'.join(filtered)
class OptsMagicProcessor(Preprocessor):
"""
Preprocessor to convert notebooks to Python source to convert use of
opts magic to use the util.opts utility instead.
"""
def preprocess_cell(self, cell, resources, index):
if cell['cell_type'] == 'code':
source = replace_line_magic(cell['source'], '%opts',
template='hv.util.opts({line!r})')
source, opts_lines = filter_magic(source, '%%opts')
if opts_lines:
# Escape braces e.g normalization options as they pass through format
template = 'hv.util.opts({options!r}, {{expr}})'.format(
options=' '.join(opts_lines).replace('{','{{').replace('}','}}'))
source = wrap_cell_expression(source, template)
cell['source'] = source
return cell, resources
def __call__(self, nb, resources): return self.preprocess(nb,resources)
class OutputMagicProcessor(Preprocessor):
"""
Preprocessor to convert notebooks to Python source to convert use of
output magic to use the util.output utility instead.
"""
def preprocess_cell(self, cell, resources, index):
if cell['cell_type'] == 'code':
source = replace_line_magic(cell['source'], '%output',
template='hv.util.output({line!r})')
source, output_lines = filter_magic(source, '%%output')
if output_lines:
template = 'hv.util.output({options!r}, {{expr}})'.format(
options=output_lines[-1])
source = wrap_cell_expression(source, template)
cell['source'] = source
return cell, resources
def __call__(self, nb, resources): return self.preprocess(nb,resources)
class StripMagicsProcessor(Preprocessor):
"""
Preprocessor to convert notebooks to Python source to strips out all
magics. To be applied after the preprocessors that can handle
holoviews magics appropriately.
"""
def preprocess_cell(self, cell, resources, index):
if cell['cell_type'] == 'code':
cell['source'] = strip_magics(cell['source'])
return cell, resources
def __call__(self, nb, resources): return self.preprocess(nb,resources)
class Substitute(Preprocessor):
"""
An nbconvert preprocessor that substitutes one set of HTML data
output for another, adding annotation to the output as required.
The constructor accepts the notebook format version and a
substitutions dictionary:
{source_html:(target_html, annotation)}
Where the annotation may be None (i.e. no annotation).
"""
annotation = '<center><b>%s</b></center>'
def __init__(self, version, substitutions, **kw):
self.nbversion = version
self.substitutions = substitutions
super(Preprocessor, self).__init__(**kw)
def __call__(self, nb, resources): # Temporary hack around 'enabled' flag
return self.preprocess(nb,resources)
def replace(self, src):
"Given some source html substitute and annotated as applicable"
for html in self.substitutions.keys():
if src == html:
annotation = self.annotation % self.substitutions[src][1]
return annotation + self.substitutions[src][0]
return src
def preprocess_cell(self, cell, resources, index):
v4 = (self.nbversion[0] == 4)
if cell['cell_type'] == 'code':
for outputs in cell['outputs']:
output_key = ('execute_result' if v4 else 'pyout')
if outputs['output_type'] == output_key:
# V1-3
if not v4 and 'html' in outputs:
outputs['html'] = self.replace(outputs['html'])
# V4
for data in outputs.get('data',[]):
if v4 and data == 'text/html':
substitution = self.replace(outputs['data']['text/html'])
outputs['data']['text/html'] = substitution
return cell, resources
|
nelango/ViralityAnalysis
|
model/lib/nltk/treeprettyprinter.py
|
Python
|
mit
| 24,360
| 0.001314
|
# -*- coding: utf-8 -*-
# Natural Language Toolkit: ASCII visualization of NLTK trees
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Andreas van Cranenburgh <A.W.vanCranenburgh@uva.nl>
# Peter Ljunglöf <peter.ljunglof@gu.se>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Pretty-printing of discontinuous trees.
Adapted from the disco-dop project, by Andreas van Cranenburgh.
https://github.com/andreasvc/disco-dop
Interesting reference (not used for this code):
T. Eschbach et al., Orth. Hypergraph Drawing, Journal of
Graph Algorithms and Applications, 10(2) 141--157 (2006)149.
http://jgaa.info/accepted/2006/EschbachGuentherBecker2006.10.2.pdf
"""
from __future__ import division, print_function, unicode_literals
from nltk.util import slice_bounds, OrderedDict
from nltk.compat import string_types, python_2_unicode_compatible, unicode_repr
from nltk.internals import raise_unorderable_types
from nltk.tree import Tree
import re
import sys
import codecs
from cgi import escape
from collections import defaultdict
from operator import itemgetter
from itertools import chain, islice
ANSICOLOR = {
'black': 30,
'red': 31,
'green': 32,
'yellow': 33,
'blue': 34,
'magenta': 35,
'cyan': 36,
'white': 37,
}
@python_2_unicode_compatible
class TreePrettyPrinter(object):
"""
Pretty-print a tree in text format, either as ASCII or Unicode.
The tree can be a normal tree, or discontinuous.
``TreePrettyPrinter(tree, sentence=None, highlight=())``
creates an object from which different visualizations can be created.
:param tree: a Tree object.
:param sentence: a list of words (strings). If `sentence` is given,
`tree` must contain integers as leaves, which are taken as indices
in `sentence`. Using this you can display a discontinuous tree.
:param highlight: Optionally, a sequence of Tree objects in `tree` which
should be highlighted. Has the effect of only applying colors to nodes
in this sequence (nodes should be given as Tree objects, terminals as
indices).
>>> from nltk.tree import Tree
>>> tree = Tree.fromstring('(S (NP Mary) (VP walks))')
>>> print(TreePrettyPrinter(tree).text())
... # doctest: +NORMALIZE_WHITESPACE
S
____|____
NP VP
| |
Mary walks
"""
def __init__(self, tree, sentence=None, highlight=()):
if sentence is None:
leaves = tree.leaves()
if (leaves and not any(len(a) == 0 for a in tree.subtrees())
and all(isinstance(a, int) for a in leaves)):
sentence = [str(a) for a in leaves]
else:
# this deals with empty nodes (frontier non-terminals)
# and multiple/mixed terminals under non-terminals.
tree = tree.copy(True)
sentence = []
for a in tree
|
.subtrees():
if len(a) == 0:
a.append(len(sentence))
sentence.append(None)
elif any(not isinstance(b, Tree) for b in a):
for n, b in enumerate(a):
if not isinstance(b, Tree):
a[n] = len(sentence)
sentence.append('%s' % b)
self.nodes, self.coords, self.edges, self.highlight = self.nodecoords(
|
tree, sentence, highlight)
def __str__(self):
return self.text()
def __repr__(self):
return '<TreePrettyPrinter with %d nodes>' % len(self.nodes)
@staticmethod
def nodecoords(tree, sentence, highlight):
"""
Produce coordinates of nodes on a grid.
Objective:
- Produce coordinates for a non-overlapping placement of nodes and
horizontal lines.
- Order edges so that crossing edges cross a minimal number of previous
horizontal lines (never vertical lines).
Approach:
- bottom up level order traversal (start at terminals)
- at each level, identify nodes which cannot be on the same row
- identify nodes which cannot be in the same column
- place nodes into a grid at (row, column)
- order child-parent edges with crossing edges last
Coordinates are (row, column); the origin (0, 0) is at the top left;
the root node is on row 0. Coordinates do not consider the size of a
node (which depends on font, &c), so the width of a column of the grid
should be automatically determined by the element with the greatest
width in that column. Alternatively, the integer coordinates could be
converted to coordinates in which the distances between adjacent nodes
are non-uniform.
Produces tuple (nodes, coords, edges, highlighted) where:
- nodes[id]: Tree object for the node with this integer id
- coords[id]: (n, m) coordinate where to draw node with id in the grid
- edges[id]: parent id of node with this id (ordered dictionary)
- highlighted: set of ids that should be highlighted
"""
def findcell(m, matrix, startoflevel, children):
"""
Find vacant row, column index for node ``m``.
Iterate over current rows for this level (try lowest first)
and look for cell between first and last child of this node,
add new row to level if no free row available.
"""
candidates = [a for _, a in children[m]]
minidx, maxidx = min(candidates), max(candidates)
leaves = tree[m].leaves()
center = scale * sum(leaves) // len(leaves) # center of gravity
if minidx < maxidx and not minidx < center < maxidx:
center = sum(candidates) // len(candidates)
if max(candidates) - min(candidates) > 2 * scale:
center -= center % scale # round to unscaled coordinate
if minidx < maxidx and not minidx < center < maxidx:
center += scale
if ids[m] == 0:
startoflevel = len(matrix)
for rowidx in range(startoflevel, len(matrix) + 1):
if rowidx == len(matrix): # need to add a new row
matrix.append([vertline if a not in (corner, None)
else None for a in matrix[-1]])
row = matrix[rowidx]
i = j = center
if len(children[m]) == 1: # place unaries directly above child
return rowidx, next(iter(children[m]))[1]
elif all(a is None or a == vertline for a
in row[min(candidates):max(candidates) + 1]):
# find free column
for n in range(scale):
i = j = center + n
while j > minidx or i < maxidx:
if i < maxidx and (matrix[rowidx][i] is None
or i in candidates):
return rowidx, i
elif j > minidx and (matrix[rowidx][j] is None
or j in candidates):
return rowidx, j
i += scale
j -= scale
raise ValueError('could not find a free cell for:\n%s\n%s'
'min=%d; max=%d' % (tree[m], minidx, maxidx, dumpmatrix()))
def dumpmatrix():
"""Dump matrix contents for debugging purposes."""
return '\n'.join(
'%2d: %s' % (n, ' '.join(('%2r' % i)[:2] for i in row))
for n, row in enumerate(matrix))
leaves = tree.leaves()
if not all(isinstance(n, int) for n in leaves):
raise ValueError('All leaves must be integer indices.')
if len(leaves) != len(set(leaves)):
raise ValueError('Indices must occur at most once.')
if not all(0 <= n < len(sentence) for n in leaves):
|
silly-wacky-3-town-toon/SOURCE-COD
|
toontown/catalog/CatalogNametagItem.py
|
Python
|
apache-2.0
| 3,692
| 0.002167
|
import CatalogItem
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPLocalizer
from direc
|
t.interval.IntervalGlobal import *
from direct.gui.DirectGui import *
class CatalogNametagItem(CatalogItem.CatalogItem):
sequenceNumber = 0
def makeNewItem(self, nametagStyle, isSpecial = False):
self.nametagStyle = nametagStyle
self.isSpecial = isSpecial
CatalogItem.CatalogItem.makeNewItem(self)
def getPurchaseLimit(self):
return 1
def reache
|
dPurchaseLimit(self, avatar):
if self in avatar.onOrder or self in avatar.mailboxContents or self in avatar.onGiftOrder or self in avatar.awardMailboxContents or self in avatar.onAwardOrder:
return 1
if avatar.nametagStyle == self.nametagStyle:
return 1
return 0
def getAcceptItemErrorText(self, retcode):
if retcode == ToontownGlobals.P_ItemAvailable:
return TTLocalizer.CatalogAcceptNametag
return CatalogItem.CatalogItem.getAcceptItemErrorText(self, retcode)
def saveHistory(self):
return 1
def getTypeName(self):
return TTLocalizer.NametagTypeName
def getName(self):
if self.nametagStyle == 100:
name = TTLocalizer.UnpaidNameTag
else:
name = TTLocalizer.NametagFontNames[self.nametagStyle]
if TTLocalizer.NametagReverse:
name = TTLocalizer.NametagLabel + name
else:
name = name + TTLocalizer.NametagLabel
return name
if self.nametagStyle == 0:
name = TTLocalizer.NametagPaid
elif self.nametagStyle == 1:
name = TTLocalizer.NametagAction
elif self.nametagStyle == 2:
name = TTLocalizer.NametagFrilly
def recordPurchase(self, avatar, optional):
if avatar:
avatar.b_setNametagStyle(self.nametagStyle)
return ToontownGlobals.P_ItemAvailable
def getPicture(self, avatar):
frame = self.makeFrame()
if self.nametagStyle == 100:
inFont = ToontownGlobals.getToonFont()
else:
inFont = ToontownGlobals.getNametagFont(self.nametagStyle)
nameTagDemo = DirectLabel(parent=frame, relief=None, pos=(0, 0, 0.24), scale=0.5, text=localAvatar.getName(), text_fg=(1.0, 1.0, 1.0, 1), text_shadow=(0, 0, 0, 1), text_font=inFont, text_wordwrap=9)
self.hasPicture = True
base.graphicsEngine.renderFrame()
return (frame, None)
def output(self, store = -1):
return 'CatalogNametagItem(%s%s)' % (self.nametagStyle, self.formatOptionalData(store))
def compareTo(self, other):
return self.nametagStyle - other.nametagStyle
def getHashContents(self):
return self.nametagStyle
def getBasePrice(self):
return 500
cost = 500
if self.nametagStyle == 0:
cost = 600
elif self.nametagStyle == 1:
cost = 600
elif self.nametagStyle == 2:
cost = 600
elif self.nametagStyle == 100:
cost = 50
return cost
def decodeDatagram(self, di, versionNumber, store):
CatalogItem.CatalogItem.decodeDatagram(self, di, versionNumber, store)
self.nametagStyle = di.getUint16()
self.isSpecial = di.getBool()
def encodeDatagram(self, dg, store):
CatalogItem.CatalogItem.encodeDatagram(self, dg, store)
dg.addUint16(self.nametagStyle)
dg.addBool(self.isSpecial)
def isGift(self):
return 0
def getBackSticky(self):
itemType = 1
numSticky = 4
return (itemType, numSticky)
|
StepicOrg/codejail
|
codejail/tests/test_jail_code.py
|
Python
|
agpl-3.0
| 11,655
| 0.000601
|
"""Test jail_code.py"""
import os
import shutil
import sys
import textwrap
import tempfile
import unittest
from nose.plugins.skip import SkipTest
from codejail.jail_code import jail_code, is_configured, Jail, configure, auto_configure
auto_configure()
def jailpy(code=None, *args, **kwargs):
"""Run `jail_code` on Python."""
if code:
code = textwrap.dedent(code)
result = jail_code("python", code, *args, **kwargs)
if isinstance(result.stdout, bytes):
result.stdout = result.stdout.decode()
if isinstance(result.stderr, bytes):
result.stderr = result.stderr.decode()
return result
def file_here(fname):
"""Return the full path to a file alongside this code."""
return os.path.join(os.path.dirname(__file__), fname)
class JailCodeHelpers(unittest.TestCase):
"""Assert helpers for jail_code tests."""
def setUp(self):
super(JailCodeHelpers, self).setUp()
if not is_configured("python"):
raise SkipTest
def assertResultOk(self, res):
"""Assert that `res` exited well (0), and had no stderr output."""
self.assertEqual(res.stderr, "")
self.assertEqual(res.status, 0)
class TestFeatures(JailCodeHelpers):
"""Test features of how `jail_code` runs Python."""
def test_hello_world(self):
res = jailpy(code="print('Hello, world!')")
self.assertResultOk(res)
self.assertEqual(res.stdout, 'Hello, world!\n')
def test_argv(self):
res = jailpy(
code="import sys; print(':'.join(sys.argv[1:]))",
argv=["Hello", "world", "-x"]
)
self.assertResultOk(res)
self.assertEqual(res.stdout, "Hello:world:-x\n")
def test_ends_with_exception(self):
res = jailpy(code="""raise Exception('FAIL')""")
self.assertNotEqual(res.status, 0)
self.assertEqual(res.stdout, "")
self.assertEqual(res.stderr, textwrap.dedent("""\
Traceback (most recent call last):
File "jailed_code", line 1, in <module>
raise Exception('FAIL')
Exception: FAIL
"""))
def test_stdin_is_provided(self):
res = jailpy(
code="import json,sys; print(sum(json.load(sys.stdin)))",
stdin="[1, 2.5, 33]"
)
self.assertResultOk(res)
self.assertEqual(res.stdout, "36.5\n")
def test_files_are_copied(self):
res = jailpy(
code="print('Look:', open('hello.txt').read())",
files=[file_here("hello.txt")]
)
self.assertResultOk(res)
self.assertEqual(res.stdout, 'Look: Hello there.\n\n')
def test_directories_are_copied(self):
res = jailpy(
code="""\
import os
for path, dirs, files in os.walk("."):
print((path, sorted(dirs), sorted(files)))
""",
files=[file_here("hello.txt"), file_here("pylib")]
)
self.assertResultOk(res)
self.assertIn("hello.txt", res.stdout)
self.assertIn("pylib", res.stdout)
self.assertIn("module.py", res.stdout)
def test_executing_a_copied_file(self):
res = jailpy(
files=[file_here("doit.py")],
argv=["doit.py", "1", "2", "3"]
|
)
self.assertResultOk(re
|
s)
self.assertEqual(
res.stdout,
"This is doit.py!\nMy args are ['doit.py', '1', '2', '3']\n"
)
def test_context_managers(self):
first = textwrap.dedent("""
with open("hello.txt", "w") as f:
f.write("Hello, second")
""")
second = textwrap.dedent("""
with open("hello.txt") as f:
print(f.read())
""")
limits = {"TIME": 1, "MEMORY": 128*1024*1024,
"CAN_FORK": True, "FILE_SIZE": 256}
configure("unconfined_python", sys.prefix + "/bin/python", limits_conf=limits)
with Jail() as j:
res = j.run_code("unconfined_python", first)
self.assertEqual(res.status, 0)
res = j.run_code("python", second)
self.assertEqual(res.status, 0)
self.assertEqual(res.stdout.decode().strip(), "Hello, second")
class TestLimits(JailCodeHelpers):
"""Tests of the resource limits, and changing them."""
def test_cant_use_too_much_memory(self):
# This will fail after setting the limit to 30Mb.
res = jailpy(code="print(len(bytearray(50000000)))", limits={'MEMORY': 30000000})
self.assertEqual(res.stdout, "")
self.assertNotEqual(res.status, 0)
def test_changing_vmem_limit(self):
# Up the limit, it will succeed.
res = jailpy(code="print(len(bytearray(50000000)))", limits={'MEMORY': 80000000})
self.assertEqual(res.stdout, "50000000\n")
self.assertEqual(res.status, 0)
def test_disabling_vmem_limit(self):
# Disable the limit, it will succeed.
res = jailpy(code="print(len(bytearray(50000000)))", limits={'MEMORY': None})
self.assertEqual(res.stdout, "50000000\n")
self.assertEqual(res.status, 0)
def test_cant_use_too_much_cpu(self):
res = jailpy(code="print(sum(range(10**9)))")
self.assertEqual(res.stdout, "")
self.assertNotEqual(res.status, 0)
self.assertTrue(res.time_limit_exceeded)
def test_cant_use_too_much_time(self):
# time limit is 5 * cpu_time
res = jailpy(code="import time; time.sleep(7); print('Done!')", limits={'TIME': 1})
self.assertNotEqual(res.status, 0)
self.assertEqual(res.stdout, "")
self.assertTrue(res.time_limit_exceeded)
def test_cant_write_files(self):
res = jailpy(code="""\
print("Trying")
with open("mydata.txt", "w") as f:
f.write("hello")
with open("mydata.txt") as f2:
print("Got this:", f2.read())
""")
self.assertNotEqual(res.status, 0)
self.assertEqual(res.stdout, "Trying\n")
self.assertIn("ermission denied", res.stderr)
def test_cant_use_network(self):
res = jailpy(code="""\
import urllib.request
print("Reading google")
u = urllib.request.urlopen("http://google.com")
google = u.read()
print(len(google))
""")
self.assertNotEqual(res.status, 0)
self.assertEqual(res.stdout, "Reading google\n")
self.assertIn("URLError", res.stderr)
def test_cant_fork(self):
res = jailpy(code="""\
import os
print("Forking")
child_ppid = os.fork()
""")
self.assertNotEqual(res.status, 0)
self.assertEqual(res.stdout, "Forking\n")
self.assertIn("IOError", res.stderr)
def test_cant_see_environment_variables(self):
os.environ['HONEY_BOO_BOO'] = 'Look!'
res = jailpy(code="""\
import os
for name, value in os.environ.items():
print("%s: %r" % (name, value))
""")
self.assertResultOk(res)
self.assertNotIn("HONEY", res.stdout)
def test_reading_dev_random(self):
# We can read 10 bytes just fine.
res = jailpy(code="x = open('/dev/random', 'rb').read(10); print(len(x))")
self.assertResultOk(res)
self.assertEqual(res.stdout, "10\n")
# If we try to read all of it, we'll be killed by the real-time limit.
res = jailpy(code="x = open('/dev/random').read(); print('Done!')")
self.assertNotEqual(res.status, 0)
class TestSymlinks(JailCodeHelpers):
"""Testing symlink behavior."""
def setUp(self):
# Make a temp dir, and arrange to have it removed when done.
tmp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmp_dir)
# Make a directory that won't be copied into the sandbox.
self.not_copied = os.path.join(tmp_dir, "not_copied")
os.mkdir(self.not_copied)
self.linked_txt = os.path.j
|
uclouvain/osis
|
assessments/views/common/score_encoding_progress_overview.py
|
Python
|
agpl-3.0
| 2,568
| 0.001558
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.functional import cached_property
from django.views.generic import TemplateView
from assessments.calendar.scores_exam_submission_calendar import ScoresExamSubmissionCalendar
from base.utils.cache import CacheFilterMixin
f
|
rom osis_role.contrib.views import PermissionRequiredMixi
|
n
class ScoreEncodingProgressOverviewBaseView(PermissionRequiredMixin, CacheFilterMixin, TemplateView):
# PermissionRequiredMixin
permission_required = "assessments.can_access_scoreencoding"
# CacheFilterMixin
timeout = 10800 # seconds = 3 hours
@cached_property
def person(self):
return self.request.user.person
def dispatch(self, request, *args, **kwargs):
opened_calendars = ScoresExamSubmissionCalendar().get_opened_academic_events()
if not opened_calendars:
redirect_url = reverse('outside_scores_encodings_period')
return HttpResponseRedirect(redirect_url)
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
return {
**super().get_context_data(**kwargs),
'person': self.person
}
def get_permission_object(self):
return None
|
jawilson/home-assistant
|
tests/components/litejet/__init__.py
|
Python
|
apache-2.0
| 1,438
| 0
|
"""Tests for the litejet component."""
from homeassistant.components import scene, switch
from homeassistant.components.litejet import DOMAIN
from homeassistant.const import CONF_PORT
from homeassistant.helpers import entity_registry as er
from tests.common import MockConfigEntry
async def async_init_integration(
hass, use_switch=False, use_scene=False
) -> MockConfigEntry:
"""Set up the LiteJet integration in Home Assistant."""
registry = er.async_get(hass)
entry_data = {CONF_PORT: "/dev/mock"}
entry = MockConfigEntry(
domain=DOMAIN, unique_id=entry_data[CONF_PORT], data=entry_data
)
if use_switch:
registry.async_get_or_create(
switch.DOMAIN,
DOMAIN,
f"{entry.entry_id}_1",
suggested_object_id="mock_switch_1",
disabled_by=None,
)
registry.async_get_or_create(
switch.DOMAIN,
DOMAIN,
f"{entry.entry_id}_2",
suggested_object_id="mock_switch_2",
disabled_by=None,
)
if use_scene:
registry.async_get_or_create(
scene.DOMAIN,
D
|
OMAIN,
f"{entry.entry_id}_1",
|
suggested_object_id="mock_scene_1",
disabled_by=None,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
ewheeler/rapidpro
|
temba/orgs/tasks.py
|
Python
|
agpl-3.0
| 1,662
| 0.001203
|
from __future__ import absolute_import, unicode_literals
import time
from datetime import timedelta
from djcelery_transactions import task
from django.utils import timezone
from redis_cache import get_redis_connection
from .models import CreditAlert, Invitation, Org, TopUpCredits
@task(track_started=True, name='send_invitation_email_task')
def send_invitation_email_
|
task(invitation_id):
invitation = Invitation.objects.get(pk=invitation_id)
invitation.send_email()
@task(track_started=True, name='send_alert_email_task')
def send_alert_email_task(alert_id):
alert = CreditAlert.objects.get(pk=alert_id)
alert.send_email()
|
@task(track_started=True, name='check_credits_task')
def check_credits_task():
CreditAlert.check_org_credits()
@task(track_started=True, name='calculate_credit_caches')
def calculate_credit_caches():
"""
Repopulates the active topup and total credits for each organization
that received messages in the past week.
"""
# get all orgs that have sent a message in the past week
last_week = timezone.now() - timedelta(days=7)
# for every org that has sent a message in the past week
for org in Org.objects.filter(msgs__created_on__gte=last_week).distinct('pk'):
start = time.time()
org._calculate_credit_caches()
print " -- recalculated credits for %s in %0.2f seconds" % (org.name, time.time() - start)
@task(track_started=True, name="squash_topupcredits")
def squash_topupcredits():
r = get_redis_connection()
key = 'squash_topupcredits'
if not r.get(key):
with r.lock(key, timeout=900):
TopUpCredits.squash_credits()
|
lizardsystem/flooding
|
gislib/vectors.py
|
Python
|
gpl-3.0
| 6,539
| 0.000153
|
# (c) Nelen & Schuurmans. GPL licensed, see LICENSE.rst.
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from osgeo import ogr
import numpy as np
from gislib import projections
def array2polygon(array):
"""
Return a polygon geometry.
This method numpy to prepare a wkb string. Seems only faster for
larger polygons, compared to adding points individually.
"""
# 13 bytes for the header, 16 bytes per point
nbytes = 13 + 16 * array.shape[0]
data = np.empty(nbytes, dtype=np.uint8)
# little endian
data[0:1] = 1
# wkb type, number of rings, number of points
data[1:13].view(np.uint32)[:] = (3, 1, array.shape[0])
# set the points
data[13:].view(np.float64)[:] = array.ravel()
return ogr.CreateGeometryFromWkb(data.tostring())
def points2polygon(points):
"""
Return a polygon geometry.
Adds points individually. Faster for small amounts of points.
"""
ring = ogr.Geometry(ogr.wkbLinearRing)
for x, y in points:
ring.AddPoint_2D(x, y)
polygon = ogr.Geometry(ogr.wkbPolygon)
polygon.AddGeometry(ring)
return polygon
def point2geometry(point):
""" Return geometry. """
geometry = ogr.Geometry(ogr.wkbPoint)
geometry.AddPoint_2D(*map(float, point))
return geometry
def line2geometry(line):
""" Return geometry. """
geometry = ogr.Geometry(ogr.wkbLineString)
for point in line:
geometry.AddPoint_2D(*map(float, point))
return geometry
def magnitude(vectors):
""" Return magnitudes. """
return np.sqrt((vectors ** 2).sum(1))
def normalize(vectors):
""" Return unit vectors. """
return vectors / magnitude(vectors).reshape(-1, 1)
def rotate(vectors, degrees):
""" Return vectors rotated by degrees. """
return np.vstack([
+np.cos(np.radians(degrees)) * vectors[:, 0] +
-np.sin(np.radians(degrees)) * vectors[:, 1],
+np.sin(np.radians(degrees)) * vectors[:, 0] +
+np.cos(np.radians(degrees)) * vectors[:, 1],
]).transpose()
class Geometry(object):
""" Wrapper around ogr geometry for working with extents. """
def __init__(self, geometry):
self.geometry = geometry
def transform(self, source, target):
"""
Transform geometry from source projection to target projection.
"""
transformation = projections.get_coordinate_transformation(
source=source, target=target,
)
self.geometry.Transform(transformation)
@classmethod
def fromextent(cls, x1, y1, x2, y2):
points = ((x1, y1),
(x2, y1),
(x2, y2),
(x1, y2),
(x1, y1))
return cls(geometry=points2polygon(points))
@property
def
|
extent(self):
""" Return x1, y1, x2, y2. """
x1, x2, y1, y2 = self.geometry.Ge
|
tEnvelope()
return x1, y1, x2, y2
@property
def envelope(self):
""" Return polygon representing envelope. """
x1, x2, y1, y2 = self.geometry.GetEnvelope()
points = (x1, y1), (x2, y1), (x2, y2), (x1, y2), (x1, y1)
return points2polygon(points)
@property
def size(self):
""" Return width, height. """
x1, x2, y1, y2 = self.geometry.GetEnvelope()
return x2 - x1, y2 - y1
class MagicLine(object):
"""
LineString with handy parameterization and projection properties.
"""
def __init__(self, points):
# Data
self.points = np.array(points)
# Views
self.p = self.points[:-1]
self.q = self.points[1:]
self.lines = np.hstack([self.p, self.q]).reshape(-1, 2, 2)
# Derivatives
self.length = len(points) - 1
self.vectors = self.q - self.p
self.centers = (self.p + self.q) / 2
def __getitem__(self, parameters):
""" Return points corresponding to parameters. """
i = np.uint64(np.where(parameters == self.length,
self.length - 1, parameters))
t = np.where(parameters == self.length,
1, np.remainder(parameters, 1)).reshape(-1, 1)
return self.p[i] + t * self.vectors[i]
def _pixelize_to_parameters(self, size):
"""
Return array of parameters where pixel boundary intersects self.
"""
extent = np.array([self.points.min(0), self.points.max(0)])
size = np.array([1, 1]) * size # Coerce scalar size
parameters = []
# Loop dimensions for intersection parameters
for i in range(extent.shape[-1]):
intersects = np.arange(
size[i] * np.ceil(extent[0, i] / size[i]),
size[i] * np.ceil(extent[1, i] / size[i]),
size[i],
).reshape(-1, 1)
# Calculate intersection parameters for each vector
nonzero = self.vectors[:, i].nonzero()
lparameters = ((intersects - self.p[nonzero, i]) /
self.vectors[nonzero, i])
# Add integer to parameter and mask outside line
global_parameters = np.ma.array(
np.ma.array(lparameters + np.arange(nonzero[0].size)),
mask=np.logical_or(lparameters < 0, lparameters > 1),
)
# Only unmasked values must be in parameters
parameters.append(global_parameters.compressed())
# Add parameters for original points
parameters.append(np.arange(self.length + 1))
return np.sort(np.unique(np.concatenate(parameters)))
def pixelize(self, size, endsonly=False):
"""
Return pixelized MagicLine instance.
"""
all_parameters = self._pixelize_to_parameters(size)
if endsonly:
index_points = np.equal(all_parameters,
np.round(all_parameters)).nonzero()[0]
index_around_points = np.sort(np.concatenate([
index_points,
index_points[:-1] + 1,
index_points[1:] - 1,
]))
parameters = all_parameters[index_around_points]
else:
parameters = all_parameters
return self.__class__(self[parameters])
def project(self, points):
"""
Return array of parameters.
Find closest projection of each point on the magic line.
"""
pass
|
madedotcom/photon-pump
|
test/connection/fake_server.py
|
Python
|
mit
| 1,317
| 0
|
import asyncio
class FakeProtocol(asyncio.Protocol):
def __init__(self, name):
self.name = name
self.connections_made = 0
self.data = []
self.connection_errors = []
self.expected_received = 0
self.expected = 0
self.connected = asyncio.Future()
self.closed = asyncio.Future()
def connection_made(self, transport):
|
print("%s: connection made" % self.name)
self.transport = transport
self.connections_made += 1
self.connected.set_result(None)
def data_received(self, data):
print("%s: data received" % self.name)
self.data.append(data)
if self.expectation and not self.expectation.done():
self.expected_received += len(data)
if self.expected_received >= self.expected:
self.expectation.set_r
|
esult(None)
def connection_lost(self, exc):
print("%s: connection lost" % self.name)
self.closed.set_result(None)
self.connected = asyncio.Future()
self.connection_errors.append(exc)
def expect(self, count):
self.expected_received = 0
self.expected = count
self.expectation = asyncio.Future()
return self.expectation
def write(self, msg):
self.transport.write(msg)
|
daanwierstra/pybrain
|
pybrain/rl/environments/twoplayergames/capturegame.py
|
Python
|
bsd-3-clause
| 9,101
| 0.007911
|
__author__ = 'Tom Schaul, tom@idsia.ch'
from random import choice
from scipy import zeros
from twoplayergame import TwoPlayerGame
# TODO: undo operation
class CaptureGame(TwoPlayerGame):
""" the capture game is a simplified version of the Go game: the first player to capture a stone wins!
Pass moves are forbidden."""
# CHECKME: suicide allowed?
BLACK = 1
WHITE = -1
EMPTY = 0
startcolor = BLACK
def __init__(self, size, suicideenabled = True):
""" the size of the board is generally between 3 and 19. """
self.size = size
self.suicideenabled = suicideenabled
self.reset()
def _iterPos(self):
""" an iterator over all the positions of the board. """
for i in range(self.size):
for j in range(self.size):
yield (i,j)
def reset(self):
""" empty the board. """
TwoPlayerGame.reset(self)
self.movesDone = 0
self.b = {}
for p in self._iterPos():
self.b[p] = self.EMPTY
# which stone belongs to which group
self.groups = {}
# how many liberties does each group have
self.liberties = {}
@property
def indim(self):
return self.size**2
@property
def outdim(self):
return 2*self.size**2
def getBoardArray(self):
""" an array with thow boolean values per position, indicating
'white stone present' and 'black stone present' respectively. """
a = zeros(self.outdim)
for i, p in enumerate(self._iterPos()):
if self.b[p] == self.WHITE:
a[2*i] = 1
elif self.b[p] == self.BLACK:
a[2*i+1] = 1
return a
def isLegal(self, c, pos):
if pos not in self.b:
return False
elif self.b[pos] != self.EMPTY:
return False
elif not self.suicideenabled:
return not self._suicide(c, pos)
return True
def doMove(self, c, pos):
""" the action is a (color, position) tuple, for the next stone to move.
returns True if the move was legal. """
self.movesDone += 1
if pos == 'resign':
self.winner = -c
return True
elif not self.isLegal(c, pos):
return False
elif self._suicide(c, pos):
assert self.suicideenabled
self.b[pos] = 'y'
self.winner = -c
return True
elif self._capture(c, pos):
self.winner = c
self.b[pos] = 'x'
return True
else:
self._setStone(c, pos)
return True
def getSensors(self):
""" just a list of the board position states. """
return map(lambda x: x[1], sorted(self.b.items()))
def __str__(self):
s = ''
for i in range(self.size):
for j in range(self.size):
val = self.b[(i,j)]
if val == self.EMPTY: s += ' .'
elif val == self.BLACK: s += ' X'
elif val == self.WHITE: s += ' O'
else: s += ' '+str(val)
s += '\n'
if self.winner:
if self.winner == self.BLACK:
w = 'Black (#)'
elif self.winner == self.WHITE:
w = 'White (*)'
else:
w = self.winner
s += 'Winner: '+w
s += ' (moves done:'+str(self.movesDone)+')\n'
return s
def _neighbors(self, pos):
""" the 4 neighboring positions """
res = []
if pos[1] < self.size -1: res.append((pos[0], pos[1]+1))
if pos[1] > 0: res.append((p
|
os[0], pos[1]-1))
if pos[0] < self.size -1: res.append((pos[0]+1, pos[1]))
if pos[0] > 0: res.append((pos[0]-1, pos[1]))
return res
def _setStone(s
|
elf, c, pos):
""" set stone, and update liberties and groups. """
self.b[pos] = c
merge = False
self.groups[pos] = self.size*pos[0]+pos[1]
freen = filter(lambda n: self.b[n] == self.EMPTY, self._neighbors(pos))
self.liberties[self.groups[pos]] = set(freen)
for n in self._neighbors(pos):
if self.b[n] == -c:
self.liberties[self.groups[n]].difference_update([pos])
elif self.b[n] == c:
if merge:
newg = self.groups[pos]
oldg = self.groups[n]
if newg == oldg:
self.liberties[newg].difference_update([pos])
else:
# merging 2 groups
for p in self.groups.keys():
if self.groups[p] == oldg:
self.groups[p] = newg
self.liberties[newg].update(self.liberties[oldg])
self.liberties[newg].difference_update([pos])
del self.liberties[oldg]
else:
# connect to this group
del self.liberties[self.groups[pos]]
self.groups[pos] = self.groups[n]
self.liberties[self.groups[n]].update(freen)
self.liberties[self.groups[n]].difference_update([pos])
merge = True
def _suicide(self, c, pos):
""" would putting a stone here be suicide for c? """
# any free neighbors?
for n in self._neighbors(pos):
if self.b[n] == self.EMPTY:
return False
# any friendly neighbor with extra liberties?
for n in self._neighbors(pos):
if self.b[n] == c:
if len(self.liberties[self.groups[n]]) > 1:
return False
# capture all surrounding ennemies?
if self._capture(c, pos):
return False
return True
def _capture(self, c, pos):
""" would putting a stone here lead to a capture? """
for n in self._neighbors(pos):
if self.b[n] == -c:
if len(self.liberties[self.groups[n]]) == 1:
return True
return False
def getLiberties(self, pos):
""" how many liberties does the stone at pos have? """
if self.b[pos] == self.EMPTY:
return None
return len(self.liberties[self.groups[pos]])
def getGroupSize(self, pos):
""" what size is the worm that this stone is part of? """
if self.b[pos] == self.EMPTY:
return None
g = self.groups[pos]
return len(filter(lambda x: x==g, self.groups.values()))
def getLegals(self, c):
""" return all the legal positions for a color """
return filter(lambda p: self.b[p] == self.EMPTY, self._iterPos())
def getAcceptable(self, c):
""" return all legal positions for a color that don't commit suicide. """
return filter(lambda p: not self._suicide(c, p), self.getLegals(c))
def getKilling(self, c):
""" return all legal positions for a color that immediately kill the opponent. """
return filter(lambda p: self._capture(c, p), self.getAcceptable(c))
def randomBoard(self, nbmoves):
""" produce a random, undecided and legal capture-game board, after at most nbmoves.
@return: the number of moves actually done. """
c = self.BLACK
self.reset()
for i in range(nbmoves):
l = set(self.getAcceptable(c))
l.difference_update(self.getKilling(c))
if len(l) == 0:
return i
self._setStone(c, choice(list(l)))
c = -c
return nbmoves
def giveHandicap(self, h, color = BLACK):
i = 0
for pos in self._handicapIterator():
i += 1
if i > h:
|
yaelelmatad/EtsyApiTest
|
etsy/_core.py
|
Python
|
gpl-3.0
| 10,263
| 0.005164
|
from __future__ import with_statement
from contextlib import closing
import simplejson as json
import urllib2
from urllib import urlencode
import os
import re
import tempfile
import time
from _multipartformdataencode import encode_multipart_formdata
missing = object()
class TypeChecker(object):
def __init__(self):
self.checkers = {
'int': self.check_int,
'float': self.check_float,
'string': self.check_string,
}
def __call__(self, method, **kwargs):
params = method['params']
for k, v in kwargs.items():
if k == 'includes': continue
if k not in params:
raise ValueError('Unexpected argument: %s=%s' % (k, v))
t = params[k]
checker = self.checkers.get(t, None) or self.compile(t)
ok, converted = checker(v)
if not ok:
raise ValueError(
"Bad value for parameter %s of type '%s' - %s" % (k, t, v))
kwargs[k] = converted
def compile(self, t):
if t.startswith('enum'):
f = self.compile_enum(t)
else:
f = self.always_ok
self.checkers[t] = f
return f
def compile_enum(self, t):
terms = [x.strip() for x in t[5:-1].split(',')]
def check_enum(value):
return (value in terms), value
return check_enum
def always_ok(self, value):
return True, value
def check_int(self, value):
if isinstance(value, long):
return True, value
return isinstance(value, int), value
def check_float(self, value):
if isinstance(value, int):
return True, value
return isinstance(value, float), value
def check_string(self, value):
return isinstance(value, basestring), value
class APIMethod(object):
def __init__(self, api, spec):
"""
Parameters:
api - API object that this method is associated with.
spec - dict with the method specification; e.g.:
{'name': 'createListing', 'uri': '/listings', 'visibility':
'private', 'http_method': 'POST', 'params': {'description':
'text', 'tags': 'array(string)', 'price': 'float', 'title':
'string', 'materials': 'array(string)', 'shipping_template_id':
'int', 'quantity': 'int', 'shop_section_id': 'int'}, 'defaults':
{'materials': None, 'shop_section_id': None}, 'type': 'Listing',
'description': 'Creates a new Listing'}
"""
self.api = api
self.spec = spec
self.type_checker = self.api.type_checker
self.__doc__ = self.spec['description']
self.compiled = False
def __call__(self, *args, **kwargs):
if not self.compiled:
self.compile()
return self.invoke(*args, **kwargs)
def compile(self):
uri = self.spec['uri']
self.positionals = re.findall('{(.*)}', uri)
for p in self.positionals:
uri = uri.replace('{%s}' % p, '%%(%s)s' % p)
self.uri_format = uri
self.compiled = True
def invoke(self, *args, **kwargs):
if args and not self.positionals:
raise ValueError(
'Positional argument(s): %s provided, but this method does '
'not support them.' % (args,))
if len(args) > len(self.positionals):
raise ValueError('Too many positional arguments.')
for k, v in zip(self.positionals, args):
if k in kwargs:
raise ValueError(
'Positional argument duplicated in kwargs: %s' % k)
kwargs[k] = v
ps = {}
for p in self.positionals:
if p not in kwargs:
raise ValueError("Required argument '%s' not provided." % p)
ps[p] = kwargs[p]
del kwargs[p]
self.type_checker(self.spec, **kwargs)
return self.api._get(self.spec['http_method'], self.uri_format % ps, **kwargs)
class MethodTableCache(object):
max_age = 60*60*24
def __init__(self, api, method_cache):
self.api = api
self.filename = self.resolve_file(method_cache)
self.used_cache = False
self.wrote_cache = False
def resolve_file(self, method_cache):
if method_cache is missing:
return self.default_file()
return method_cache
def etsy_home(self):
return self.api.etsy_home()
def default_file(self):
etsy_home = self.etsy_home()
d = etsy_home if os.path.isdir(etsy_home) else tempfile.gettempdir()
return os.path.join(d, 'methods.%s.json' % self.api.api_version)
def get(self):
ms = self.get_cached()
if not ms:
ms = self.api.get_method_table()
self.cache(ms)
return ms
def get_cached(self):
if self.filename is None or not os.path.isfile(self.filename):
self.api.log('Not using cached method table.')
return None
if time.time() - os.stat(self.filename).st_mtime > self.max_age:
self.api.log('Method table too old.')
return None
with open(self.filename, 'r') as f:
self.used_cache = True
self.api.log('Reading method table cache: %s' % self.filename)
return json.loads(f.read())
def cache(self, methods):
if self.filename is None:
self.api.log('Method table caching disabled, not writing new cache.')
return
with open(self.filename, 'w') as f:
json.dump(methods, f)
self.wrote_cache = True
self.api.log('Wrote method table cache: %s' % self.filename)
class API(object):
def __init__(self, api_key='', key_file=None, method_cache=missing,
log=None):
"""
Creates a new API instance. When called with no arguments,
reads the appropriate API key from the default ($HOME/.etsy/keys)
file.
Parameters:
api_key - An explicit API key to use.
key_file - A file to read the API keys from.
method_cache - A file to save the API method table in for
24 hours. This speeds up the creation of API
objects.
log - An callable that accepts a string parameter.
Receives log messages. No logging is done if
this is None.
Only one of api_key and key_file
|
may be passed.
If method_cache is explicitly set to None, no method table
caching is performed. If the parameter is not passed, a file in
$HOME/.etsy is used if that directory exists. Otherwise, a
temp file is used.
"""
if not getattr(self, 'api_url', None):
raise AssertionError('No api_url configured.')
if self.a
|
pi_url.endswith('/'):
raise AssertionError('api_url should not end with a slash.')
if not getattr(self, 'api_version', None):
raise AssertionError('API object should define api_version')
if api_key and key_file:
raise AssertionError('Keys can be read from a file or passed, '
'but not both.')
if api_key:
self.api_key = api_key
else:
self.api_key = self._read_key(key_file)
self.log = log or self._ignore
if not callable(self.log):
raise ValueError('log must be a callable.')
self.type_checker = TypeChecker()
self.decode = json.loads
self.log('Creating %s Etsy API, base url=%s.' % (
self.api_version, self.api_url))
self._get_methods(method_cache)
def _ignore(self, _):
pass
def _get_methods(self, method_cache):
self.method_cache = MethodTableCache(self, method_cache)
ms = self.method_cache.get()
self._methods = dict([(m['na
|
nickmeharry/django-mysql
|
tests/testapp/test_operations.py
|
Python
|
bsd-3-clause
| 11,174
| 0
|
# -*- coding:utf-8 -*-
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from unittest import SkipTest
import pytest
from django.db import connect
|
ion, migrations, models, transaction
from django.db.migrations.state impo
|
rt ProjectState
from django.test import TransactionTestCase
from django.test.utils import CaptureQueriesContext
from django_mysql.operations import (
AlterStorageEngine, InstallPlugin, InstallSOName
)
from django_mysql.test.utils import override_mysql_variables
def plugin_exists(plugin_name):
with connection.cursor() as cursor:
cursor.execute(
"""SELECT COUNT(*) FROM INFORMATION_SCHEMA.PLUGINS
WHERE PLUGIN_NAME = %s""",
(plugin_name,)
)
return (cursor.fetchone()[0] > 0)
def table_storage_engine(table_name):
with connection.cursor() as cursor:
cursor.execute(
"""SELECT ENGINE FROM INFORMATION_SCHEMA.TABLES
WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = %s""",
(table_name,)
)
return cursor.fetchone()[0]
class PluginOperationTests(TransactionTestCase):
@classmethod
def setUpClass(cls):
super(PluginOperationTests, cls).setUpClass()
has_metadata_lock_plugin = (
(connection.is_mariadb and connection.mysql_version >= (10, 0, 7))
)
if not has_metadata_lock_plugin:
raise SkipTest("The metadata_lock_info plugin is required")
def test_install_plugin(self):
"""
Test we can load the example plugin that every version of MySQL ships
with.
"""
assert not plugin_exists("metadata_lock_info")
state = ProjectState()
operation = InstallPlugin("metadata_lock_info",
"metadata_lock_info.so")
assert (
operation.describe() ==
"Installs plugin metadata_lock_info from metadata_lock_info.so"
)
new_state = state.clone()
with connection.schema_editor() as editor:
operation.database_forwards("testapp", editor,
state, new_state)
assert plugin_exists("metadata_lock_info")
new_state = state.clone()
with connection.schema_editor() as editor:
operation.database_backwards("testapp", editor,
new_state, state)
assert not plugin_exists("metadata_lock_info")
def test_install_soname(self):
"""
Test we can load the 'metadata_lock_info' library.
"""
assert not plugin_exists("metadata_lock_info")
state = ProjectState()
operation = InstallSOName("metadata_lock_info.so")
assert operation.describe() == "Installs library metadata_lock_info.so"
new_state = state.clone()
with connection.schema_editor() as editor:
operation.database_forwards("testapp", editor,
state, new_state)
assert plugin_exists("metadata_lock_info")
new_state = state.clone()
with connection.schema_editor() as editor:
operation.database_backwards("testapp", editor,
new_state, state)
assert not plugin_exists("metadata_lock_info")
class AlterStorageEngineTests(TransactionTestCase):
def test_no_from_means_unreversible(self):
operation = AlterStorageEngine("mymodel", to_engine="InnoDB")
assert not operation.reversible
with pytest.raises(NotImplementedError) as excinfo:
operation.database_backwards(None, None, None, None)
assert str(excinfo.value) == "You cannot reverse this operation"
def test_from_means_reversible(self):
operation = AlterStorageEngine("mymodel", from_engine="MyISAM",
to_engine="InnoDB")
assert operation.reversible
def test_describe_without_from(self):
operation = AlterStorageEngine("Pony", "InnoDB")
assert (operation.describe() ==
"Alter storage engine for Pony to InnoDB")
def test_describe_with_from(self):
operation = AlterStorageEngine("Pony", from_engine="MyISAM",
to_engine="InnoDB")
assert (operation.describe() ==
"Alter storage engine for Pony from MyISAM to InnoDB")
def test_references_model(self):
operation = AlterStorageEngine("Pony", "InnoDB")
assert operation.references_model("PONY")
assert operation.references_model("Pony")
assert operation.references_model("pony")
assert not operation.references_model("poony")
@override_mysql_variables(default_storage_engine='MyISAM')
def test_running_with_changes(self):
project_state = self.set_up_test_model("test_arstd")
operation = AlterStorageEngine("Pony", from_engine="MyISAM",
to_engine="InnoDB")
assert table_storage_engine("test_arstd_pony") == "MyISAM"
# Forwards
new_state = project_state.clone()
operation.state_forwards("test_arstd", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_arstd", editor, project_state,
new_state)
assert table_storage_engine("test_arstd_pony") == "InnoDB"
# Backwards
with connection.schema_editor() as editor:
operation.database_backwards("test_arstd", editor, new_state,
project_state)
assert table_storage_engine("test_arstd_pony") == "MyISAM"
@override_mysql_variables(default_storage_engine='InnoDB')
def test_running_without_changes(self):
project_state = self.set_up_test_model("test_arstd")
operation = AlterStorageEngine("Pony", from_engine="MyISAM",
to_engine="InnoDB")
assert table_storage_engine("test_arstd_pony") == "InnoDB"
# Forwards - shouldn't actually do an ALTER since it is already InnoDB
new_state = project_state.clone()
operation.state_forwards("test_arstd", new_state)
capturer = CaptureQueriesContext(connection)
with capturer, connection.schema_editor() as editor:
operation.database_forwards("test_arstd", editor, project_state,
new_state)
queries = [q['sql'] for q in capturer.captured_queries]
assert not any(q.startswith('ALTER TABLE ') for q in queries), (
"One of the executed queries was an unexpected ALTER TABLE:\n{}"
.format("\n".join(queries))
)
assert table_storage_engine("test_arstd_pony") == "InnoDB"
# Backwards - will actually ALTER since it is going 'back' to MyISAM
with connection.schema_editor() as editor:
operation.database_backwards("test_arstd", editor, new_state,
project_state)
assert table_storage_engine("test_arstd_pony") == "MyISAM"
# Copied from django core migration tests
def set_up_test_model(
self, app_label, second_model=False, third_model=False,
related_model=False, mti_model=False, proxy_model=False,
unique_together=False, options=False, db_table=None,
index_together=False):
"""
Creates a test model state and database table.
"""
# Delete the tables if they already exist
table_names = [
# Start with ManyToMany tables
'_pony_stables', '_pony_vans',
# Then standard model tables
'_pony', '_stable', '_van',
]
tables = [(app_label + table_name) for table_name in table_names]
with connection.cursor() as cursor:
table_names = connection.introspection.table_names(cursor)
connection.disable_constraint_checking()
sql_delete_table = connection.schema_editor().sql_delete_table
|
mbox/django
|
tests/migrations/test_state.py
|
Python
|
bsd-3-clause
| 12,382
| 0.001131
|
from django.apps.registry import Apps
from django.db import models
from django.db.migrations.state import ProjectState, ModelState, InvalidBasesError
from django.test import TestCase
class StateTests(TestCase):
"""
Tests state construction, rendering and modification by operations.
"""
def test_create(self):
"""
Tests making a ProjectState from an Apps
"""
new_apps = Apps(["migrations"])
class Author(models.Model):
name = models.CharField(max_length=255)
bio = models.TextField()
age = models.IntegerField(blank=True, null=True)
class Meta:
app_label = "migrations"
apps = new_apps
unique_together = ["name", "bio"]
index_together = ["bio", "age"]
class AuthorProxy(Author):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
ordering = ["name"]
class SubAuthor(Author):
width = models.FloatField(null=True)
cl
|
ass Meta:
app_label = "migrations"
|
apps = new_apps
class Book(models.Model):
title = models.CharField(max_length=1000)
author = models.ForeignKey(Author)
contributors = models.ManyToManyField(Author)
class Meta:
app_label = "migrations"
apps = new_apps
verbose_name = "tome"
db_table = "test_tome"
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
author_proxy_state = project_state.models['migrations', 'authorproxy']
sub_author_state = project_state.models['migrations', 'subauthor']
book_state = project_state.models['migrations', 'book']
self.assertEqual(author_state.app_label, "migrations")
self.assertEqual(author_state.name, "Author")
self.assertEqual([x for x, y in author_state.fields], ["id", "name", "bio", "age"])
self.assertEqual(author_state.fields[1][1].max_length, 255)
self.assertEqual(author_state.fields[2][1].null, False)
self.assertEqual(author_state.fields[3][1].null, True)
self.assertEqual(author_state.options, {"unique_together": set([("name", "bio")]), "index_together": set([("bio", "age")])})
self.assertEqual(author_state.bases, (models.Model, ))
self.assertEqual(book_state.app_label, "migrations")
self.assertEqual(book_state.name, "Book")
self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author", "contributors"])
self.assertEqual(book_state.fields[1][1].max_length, 1000)
self.assertEqual(book_state.fields[2][1].null, False)
self.assertEqual(book_state.fields[3][1].__class__.__name__, "ManyToManyField")
self.assertEqual(book_state.options, {"verbose_name": "tome", "db_table": "test_tome"})
self.assertEqual(book_state.bases, (models.Model, ))
self.assertEqual(author_proxy_state.app_label, "migrations")
self.assertEqual(author_proxy_state.name, "AuthorProxy")
self.assertEqual(author_proxy_state.fields, [])
self.assertEqual(author_proxy_state.options, {"proxy": True, "ordering": ["name"]})
self.assertEqual(author_proxy_state.bases, ("migrations.author", ))
self.assertEqual(sub_author_state.app_label, "migrations")
self.assertEqual(sub_author_state.name, "SubAuthor")
self.assertEqual(len(sub_author_state.fields), 2)
self.assertEqual(sub_author_state.bases, ("migrations.author", ))
def test_render(self):
"""
Tests rendering a ProjectState into an Apps.
"""
project_state = ProjectState()
project_state.add_model_state(ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
{},
None,
))
project_state.add_model_state(ModelState(
"migrations",
"SubTag",
[
('tag_ptr', models.OneToOneField(
auto_created=True,
primary_key=True,
to_field='id',
serialize=False,
to='migrations.Tag',
)),
("awesome", models.BooleanField()),
],
options={},
bases=("migrations.Tag",),
))
new_apps = project_state.render()
self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field_by_name("name")[0].max_length, 100)
self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field_by_name("hidden")[0].null, False)
self.assertEqual(len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2)
def test_render_model_inheritance(self):
class Book(models.Model):
title = models.CharField(max_length=1000)
class Meta:
app_label = "migrations"
apps = Apps()
class Novel(Book):
class Meta:
app_label = "migrations"
apps = Apps()
# First, test rendering individually
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(Novel)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent model is in the app registry, it should be fine
ModelState.from_model(Book).render(apps)
ModelState.from_model(Novel).render(apps)
def test_render_model_with_multiple_inheritance(self):
class Foo(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class Bar(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class FooBar(Foo, Bar):
class Meta:
app_label = "migrations"
apps = Apps()
class AbstractSubFooBar(FooBar):
class Meta:
abstract = True
apps = Apps()
class SubFooBar(AbstractSubFooBar):
class Meta:
app_label = "migrations"
apps = Apps()
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(FooBar)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent models are in the app registry, it should be fine
ModelState.from_model(Foo).render(apps)
self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model])
ModelState.from_model(Bar).render(apps)
self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model])
ModelState.from_model(FooBar).render(apps)
self.assertSequenceEqual(ModelState.from_model(FooBar).bases, ['migrations.foo', 'migrations.bar'])
ModelState.from_model(SubFooBar).render(apps)
self.assertSequenceEqual(ModelState.from_model(SubFooBar).bases, ['migrations.foobar'])
def test_render_project_dependencies(self):
"""
Tests that the ProjectState render method correctly renders models
to account for inter-model base dependencies.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "migrations"
apps = new_apps
class B(A):
class Meta:
app_label = "migrations"
apps = new_apps
class C(B):
class Meta:
app_label = "migrations"
apps = new_apps
class D(A):
class Meta:
app_label = "migrations"
apps = new_apps
class E(B):
|
idea4bsd/idea4bsd
|
python/testData/copyPaste/multiLine/IndentInnerFunction2.dst.py
|
Python
|
apache-2.0
| 55
| 0.090909
|
def foo(se
|
lf):
x = 1
y = 2
<care
|
t>
z = 3
|
bussiere/pypyjs
|
website/demo/home/rfk/repos/pypy/lib-python/2.7/uuid.py
|
Python
|
mit
| 21,448
| 0.001958
|
r"""UUID objects (universally unique identifiers) according to RFC 4122.
This module provides immutable UUID objects (class UUID) and the functions
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
UUIDs as specified in RFC 4122.
If all you want is a unique ID, you should probably call uuid1() or uuid4().
Note that uuid1() may compromise privacy since it creates a UUID containing
the computer's network address. uuid4() creates a random UUID.
Typical usage:
>>> import uuid
# make a UUID based on the host ID and current time
>>> uuid.uuid1()
UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
# make a UUID using an MD5 hash of a namespace UUID and a name
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
# make a random UUID
>>> uuid.uuid4()
UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
# make a UUID using a SHA-1 hash of a namespace UUID and a name
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
# make a UUID from a string of hex digits (braces and hyphens ignored)
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
# convert a UUID to a string of hex digits in standard form
>>> str(x)
'00010203-0405-0607-0809-0a0b0c0d0e0f'
# get the raw 16 bytes of the UUID
>>> x.bytes
'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
# make a UUID from a 16-byte string
>>> uuid.UUID(bytes=x.bytes)
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
"""
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
'reserved for NCS compatibility', 'specified in RFC 4122',
'reserved for Microsoft compatibility', 'reserved for future definition']
class UUID(object):
"""Instances of the UUID class represent UUIDs as specified in RFC 4122.
UUID objects are immutable, hashable, and usable as dictionary keys.
Converting a UUID to a string with str() yields something in the form
'12345678-1234-1234-1234-123456789abc'. The UU
|
ID constructor accepts
five possible forms: a similar string of hexadecimal digits, or a tuple
of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
48-bit values respectively) a
|
s an argument named 'fields', or a string
of 16 bytes (with all the integer fields in big-endian order) as an
argument named 'bytes', or a string of 16 bytes (with the first three
fields in little-endian order) as an argument named 'bytes_le', or a
single 128-bit integer as an argument named 'int'.
UUIDs have these read-only attributes:
bytes the UUID as a 16-byte string (containing the six
integer fields in big-endian byte order)
bytes_le the UUID as a 16-byte string (with time_low, time_mid,
and time_hi_version in little-endian byte order)
fields a tuple of the six integer fields of the UUID,
which are also available as six individual attributes
and two derived attributes:
time_low the first 32 bits of the UUID
time_mid the next 16 bits of the UUID
time_hi_version the next 16 bits of the UUID
clock_seq_hi_variant the next 8 bits of the UUID
clock_seq_low the next 8 bits of the UUID
node the last 48 bits of the UUID
time the 60-bit timestamp
clock_seq the 14-bit sequence number
hex the UUID as a 32-character hexadecimal string
int the UUID as a 128-bit integer
urn the UUID as a URN as specified in RFC 4122
variant the UUID variant (one of the constants RESERVED_NCS,
RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
version the UUID version number (1 through 5, meaningful only
when the variant is RFC_4122)
"""
def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None,
int=None, version=None):
r"""Create a UUID from either a string of 32 hexadecimal digits,
a string of 16 bytes as the 'bytes' argument, a string of 16 bytes
in little-endian order as the 'bytes_le' argument, a tuple of six
integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
the 'fields' argument, or a single 128-bit integer as the 'int'
argument. When a string of hex digits is given, curly braces,
hyphens, and a URN prefix are all optional. For example, these
expressions all yield the same UUID:
UUID('{12345678-1234-5678-1234-567812345678}')
UUID('12345678123456781234567812345678')
UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
UUID(bytes='\x12\x34\x56\x78'*4)
UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' +
'\x12\x34\x56\x78\x12\x34\x56\x78')
UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
UUID(int=0x12345678123456781234567812345678)
Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must
be given. The 'version' argument is optional; if given, the resulting
UUID will have its variant and version set according to RFC 4122,
overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'.
"""
if [hex, bytes, bytes_le, fields, int].count(None) != 4:
raise TypeError('need one of hex, bytes, bytes_le, fields, or int')
if hex is not None:
hex = hex.replace('urn:', '').replace('uuid:', '')
hex = hex.strip('{}').replace('-', '')
if len(hex) != 32:
raise ValueError('badly formed hexadecimal UUID string')
int = long(hex, 16)
if bytes_le is not None:
if len(bytes_le) != 16:
raise ValueError('bytes_le is not a 16-char string')
bytes = (bytes_le[3] + bytes_le[2] + bytes_le[1] + bytes_le[0] +
bytes_le[5] + bytes_le[4] + bytes_le[7] + bytes_le[6] +
bytes_le[8:])
if bytes is not None:
if len(bytes) != 16:
raise ValueError('bytes is not a 16-char string')
int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
if fields is not None:
if len(fields) != 6:
raise ValueError('fields is not a 6-tuple')
(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node) = fields
if not 0 <= time_low < 1<<32L:
raise ValueError('field 1 out of range (need a 32-bit value)')
if not 0 <= time_mid < 1<<16L:
raise ValueError('field 2 out of range (need a 16-bit value)')
if not 0 <= time_hi_version < 1<<16L:
raise ValueError('field 3 out of range (need a 16-bit value)')
if not 0 <= clock_seq_hi_variant < 1<<8L:
raise ValueError('field 4 out of range (need an 8-bit value)')
if not 0 <= clock_seq_low < 1<<8L:
raise ValueError('field 5 out of range (need an 8-bit value)')
if not 0 <= node < 1<<48L:
raise ValueError('field 6 out of range (need a 48-bit value)')
clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low
int = ((time_low << 96L) | (time_mid << 80L) |
(time_hi_version << 64L) | (clock_seq << 48L) | node)
if int is not None:
if not 0 <= int < 1<<128L:
raise ValueError('int is out of range (need a 128-bit value)')
if version is not None:
if not 1 <= version <= 5:
raise ValueError('illegal version number')
# Set the variant to RFC 4122.
int &= ~(0xc000 << 48L)
int |= 0x8000 << 48L
|
Comp-UFSCar/neural-networks-2
|
mutant-networks/experiments/cifar-genetic/cifar-genetic.py
|
Python
|
mit
| 2,889
| 0.000346
|
"""Training and Predicting Cifar10 with Mutant Networks.
The networks mutate their architecture using genetic algorithms.
Author: Lucas David -- <ld492@drexel.edu>
Licence: MIT License 2016 (c)
"""
import logging
import artificial as art
import numpy as np
import tensorflow as tf
from artificial.utils.experiments import arg_parser, ExperimentSet, Experiment
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
import mutant
class Cifar10MutantEnvironment(mutant.Environment):
def build(self):
tf.logging.info('building environment...')
tf.logging.info('|-loading data...')
(X, y), (X_test, y_test) = cifar10.load_data()
X = X.astype('float32')
X_test = X_test.astype('float32')
X /= 255
X_test /= 255
g = ImageDataGenerator(
featurewise_center=True,
featurewise_std_normalization=True,
rotation_range=15,
width_shift_range=0.1,
height_shift_range=0.1,
horizontal_flip=True)
tf.logging.info('|-fitting image generator...')
g.fit(X)
tf.logging.info('|-defining data sets...')
self.dataset_ = g.flow(X, y, batch_size=self.consts.batch_size,
shuffle=self.consts.shuffle_train)
self.test_dataset_ = self.val_dataset_ = (X_test, y_test)
tf.logging.info('building complete')
return self
class MutateOverCifar10Experiment(Experiment):
env_ = None
def setup(self):
consts = self.consts
# Settings for logging.
verbosity_level = logging.INFO if consts.verbose else logging.WARNING
l = logging.getLogger('artificial')
l.setLevel(verbosity_level)
l.addHandler(logging.FileHandler(consts.log_file))
np.random.seed(consts.seed)
# Create mutation environment.
env = Cifar10MutantEnvironment(consts)
env.agents = [
mutant.Agent(search=art.searches.genetic.GeneticAlgorithm,
search_params=consts.search_params,
environment=env,
random_state=consts.agent_seed)
]
self.env_ = e
|
nv
def run(self):
try:
self.env_.live(n_cycles=1)
finally:
answer = self.env_.current_state
if answer:
tf.logging.info('train and validation loss after %i epochs: '
'(%f, %f)', self.consts.n_epochs,
answer.loss_, answer.validation_loss_)
if __name_
|
_ == '__main__':
print(__doc__, flush=True)
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tensorflow').propagate = False
(ExperimentSet(MutateOverCifar10Experiment)
.load_from_json(arg_parser.parse_args().constants)
.run())
|
grischa/django-tastypie
|
tests/alphanumeric/tests/http.py
|
Python
|
bsd-3-clause
| 4,725
| 0.002751
|
import httplib
try:
import json
except ImportError: # < Python 2.6
from django.utils import simplejson as json
from testcases import TestServerTestCase
class HTTPTestCase(TestServerTestCase):
def setUp(self):
self.start_test_server(address='localhost', port=8001)
def tearDown(self):
self.stop_test_server()
def get_connection(self):
return httplib.HTTPConnection('localhost', 8001)
def test_get_apis_json(self):
connection = self.get_connection()
connection.request('GET', '/api/v1/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
data = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(data, '{"products": {"list_endpoint": "/api/v1/products/", "schema": "/api/v1/products/schema/"}}')
def test_get_apis_xml(self):
connection = self.get_connection()
connection.request('GET', '/api/v1/', headers={'Accept': 'application/xml'})
response = connection.getresponse()
connection.close()
data = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(data, '<?xml version=\'1.0\' encoding=\'utf-8\'?>\n<response><products type="hash"><list_endpoint>/api/v1/products/</list_endpoint><schema>/api/v1/products/schema/</schema></products></response>')
def test_get_list(self):
connection = self.get_connection()
connection.request('GET', '/api/v1/products/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
expected =
|
{
'meta': {
'previous': None,
'total_count': 6,
'offset': 0,
'limit': 20,
'next': None
},
'objects': [
{
'updated': '2010-03-30T20:05:00',
'resource_ur
|
i': '/api/v1/products/11111/',
'name': 'Skateboardrampe',
'artnr': '11111',
'created': '2010-03-30T20:05:00'
},
{
'updated': '2010-05-04T20:05:00',
'resource_uri': '/api/v1/products/76123/',
'name': 'Bigwheel',
'artnr': '76123',
'created': '2010-05-04T20:05:00'
},
{
'updated': '2010-05-04T20:05:00',
'resource_uri': '/api/v1/products/WS65150-01/',
'name': 'Trampolin',
'artnr': 'WS65150-01',
'created': '2010-05-04T20:05:00'
},
{
'updated': '2010-05-04T20:05:00',
'resource_uri': '/api/v1/products/65100A-01/',
'name': 'Laufrad',
'artnr': '65100A-01',
'created': '2010-05-04T20:05:00'
},
{
'updated': '2010-05-04T20:05:00',
'resource_uri': '/api/v1/products/76123/01/',
'name': 'Bigwheel',
'artnr': '76123/01',
'created': '2010-05-04T20:05:00'
},
{
'updated': '2010-05-04T20:05:00',
'resource_uri': '/api/v1/products/WS65150/01-01/',
'name': 'Trampolin',
'artnr': 'WS65150/01-01',
'created': '2010-05-04T20:05:00'
}
]
}
self.assertEqual(json.loads(response.read()), expected)
def test_post_object(self):
connection = self.get_connection()
post_data = '{"artnr": "A76124/03", "name": "Bigwheel XXL"}'
connection.request('POST', '/api/v1/products/', body=post_data, headers={'Accept': 'application/json', 'Content-type': 'application/json'})
response = connection.getresponse()
self.assertEqual(response.status, 201)
self.assertEqual(dict(response.getheaders())['location'], 'http://localhost:8001/api/v1/products/A76124/03/')
# make sure posted object exists
connection.request('GET', '/api/v1/products/A76124/03/', headers={'Accept': 'application/json'})
response = connection.getresponse()
connection.close()
self.assertEqual(response.status, 200)
data = response.read()
obj = json.loads(data)
self.assertEqual(obj['name'], 'Bigwheel XXL')
self.assertEqual(obj['artnr'], 'A76124/03')
|
13lcp2000/pythonExercises
|
script3.py
|
Python
|
mit
| 414
| 0.014493
|
def ContinueCurse():
test_note_1 = (int(input("Type first Test Value: ")))
test_note_2 = (int(input("Type Second Test Value: ")))
tmp_note = (((test_note_1 + test_note_2)/2)*0.6)
|
if ((3 - tmp_note)/0.4) <= 5:
print "Keep Studying :D"
else:
print "C
|
ancel course :/"
print "=== Program Finished ==="
print " ==== Starting the -ContinueCourse- Script ===="
ContinueCurse()
|
cloudbase/neutron-virtualbox
|
neutron/tests/unit/test_linux_dhcp.py
|
Python
|
apache-2.0
| 56,280
| 0.000018
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
import netaddr
from oslo_config import cfg
from neutron.agent.common import config
from neutron.agent.dhcp import config as dhcp_config
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.common import config as base_config
from neutron.common import constants
from neutron.openstack.common import log as logging
from neutron.tests import base
LOG = logging.getLogger(__name__)
class FakeIPAllocation(object):
def __init__(self, address, subnet_id=None):
self.ip_address = address
self.subnet_id = subnet_id
class DhcpOpt(object):
def __init__(self, **kwargs):
self.__dict__.update(ip_version=4)
self.__dict__.update(kwargs)
def __str__(self):
return str(self.__dict__)
class FakePort1(object):
id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
admin_state_up = True
device_owner = 'foo1'
fixed_ips = [FakeIPAllocation('192.168.0.2',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
mac_address = '00:00:80:aa:bb:cc'
def __init__(self):
self.extra_dhcp_opts = []
class FakePort2(object):
id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
admin_state_up = False
device_owner = 'foo2'
fixed_ips = [FakeIPAllocation('192.168.0.3',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
mac_address = '00:00:f3:aa:bb:cc'
def __init__(self):
self.extra_dhcp_opts = []
class FakePort3(object):
id = '44444444-4444-4444-4444-444444444444'
admin_state_up = True
device_owner = 'foo3'
fixed_ips = [FakeIPAllocation('192.168.0.4',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('192.168.1.2',
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
mac_address = '00:00:0f:aa:bb:cc'
def __init__(self):
self.extra_dhcp_opts = []
class FakePort4(object):
id = 'gggggggg-gggg-gggg-gggg-gggggggggggg'
admin_state_up = False
device_owner = 'foo3'
fixed_ips = [FakeIPAllocation('192.168.0.4',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('ffda:3ba5:a17a:4ba3:0216:3eff:fec2:771d',
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
mac_address = '00:16:3E:C2:77:1D'
def __init__(self):
self.extra_dhcp_opts = []
class FakeV6Port(object):
id = 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'
admin_state_up = True
device_owner = 'foo3'
fixed_ips = [FakeIPAllocation('fdca:3ba5:a17a:4ba3::2',
'ffffffff-ffff-ffff-ffff-ffffffffffff')]
mac_address = '00:00:f3:aa:bb:cc'
def __init__(self):
self.extra_dhcp_opts = []
class FakeV6PortExtraOpt(object):
id = 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'
admin_state_up = True
device_owner = 'foo3'
fixed_ips = [FakeIPAllocation('ffea:3ba5:a17a:4ba3:0216:3eff:fec2:771d',
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
mac_address = '00:16:3e:c2:77:1d'
def __init__(self):
self.extra_dhcp_opts = [
DhcpOpt(opt_name='dns-server',
opt_value='ffea:3ba5:a17a:4ba3::100',
ip_version=6)]
class FakeDualPort(object):
id = 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'
admin_state_up = True
device_owner = 'foo3'
fixed_ips = [FakeIPAllocation('192.168.0.3',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('fdca:3ba5:a17a:4ba3::3',
'ffffffff-ffff-ffff-ffff-ffffffffffff')]
mac_address = '00:00:0f:aa:bb:cc'
def __init__(self):
self.extra_dhcp_opts = []
class FakeRouterPort(object):
id = 'rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr'
admin_state_up = True
device_owner = constants.DEVICE_OWNER_ROUTER_INTF
mac_address = '00:00:0f:rr:rr:rr'
def __init__(self, dev_owner=constants.DEVICE_OWNER_ROUTER_INTF,
ip_address='192.168.0.1'):
self.extra_dhcp_opts = []
self.device_owner = dev_owner
self.fixed_ips = [FakeIPAllocation(
ip_address, 'dddddddd-dddd-dddd-dddd-dddddddddddd')]
class FakePortMultipleAgents1(object):
id = 'rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr'
admin_state_up = True
device_owner = constants.DEVICE_OWNER_DHCP
fixed_ips = [FakeIPAllocation('192.168.0.5',
|
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
mac_address = '00:00:0f:dd:dd:dd'
def __init__(self):
self.extra_dhcp_opts = []
class FakePortMultipleAgents2(object):
id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
admin_state_up = True
device_owner = constants.DEVICE_OWNER_DHCP
fixed_ips = [FakeIPAllocation('192
|
.168.0.6',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
mac_address = '00:00:0f:ee:ee:ee'
def __init__(self):
self.extra_dhcp_opts = []
class FakeV4HostRoute(object):
destination = '20.0.0.1/24'
nexthop = '20.0.0.1'
class FakeV4HostRouteGateway(object):
destination = '0.0.0.0/0'
nexthop = '10.0.0.1'
class FakeV6HostRoute(object):
destination = '2001:0200:feed:7ac0::/64'
nexthop = '2001:0200:feed:7ac0::1'
class FakeV4Subnet(object):
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '192.168.0.0/24'
gateway_ip = '192.168.0.1'
enable_dhcp = True
host_routes = [FakeV4HostRoute]
dns_nameservers = ['8.8.8.8']
class FakeV4MetadataSubnet(object):
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '169.254.169.254/30'
gateway_ip = '169.254.169.253'
enable_dhcp = True
host_routes = []
dns_nameservers = []
class FakeV4SubnetGatewayRoute(object):
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '192.168.0.0/24'
gateway_ip = '192.168.0.1'
enable_dhcp = True
host_routes = [FakeV4HostRouteGateway]
dns_nameservers = ['8.8.8.8']
class FakeV4SubnetMultipleAgentsWithoutDnsProvided(object):
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '192.168.0.0/24'
gateway_ip = '192.168.0.1'
enable_dhcp = True
dns_nameservers = []
host_routes = []
class FakeV4MultipleAgentsWithoutDnsProvided(object):
id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
subnets = [FakeV4SubnetMultipleAgentsWithoutDnsProvided()]
ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort(),
FakePortMultipleAgents1(), FakePortMultipleAgents2()]
namespace = 'qdhcp-ns'
class FakeV4SubnetMultipleAgentsWithDnsProvided(object):
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '192.168.0.0/24'
gateway_ip = '192.168.0.1'
enable_dhcp = True
dns_nameservers = ['8.8.8.8']
host_routes = []
class FakeV4MultipleAgentsWithDnsProvided(object):
id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
subnets = [FakeV4SubnetMultipleAgentsWithDnsProvided()]
ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort(),
FakePortMultipleAgents1(), FakePortMultipleAgents2()]
namespace = 'qdhcp-ns'
class FakeV6Subnet(object):
id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
ip_version = 6
cidr = 'fdca:3ba5:a17a:4ba3::/64'
gateway_ip = 'fdca:3ba5:a17a:4ba3::1'
enable_dhcp = True
host_rout
|
Azure/azure-sdk-for-python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_07_01/operations/_operations.py
|
Python
|
mit
| 5,336
| 0.003936
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
**kwargs: Any
) -> HttpRequest:
api_version = "2021-07-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/providers/Microsoft.Compute/operations')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class Operations(object):
"""Operations operations.
You should not instantiate this class directly. Instead, you should create a Client instance tha
|
t
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgm
|
t.compute.v2021_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> Iterable["_models.ComputeOperationListResult"]:
"""Gets a list of compute operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ComputeOperationListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2021_07_01.models.ComputeOperationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeOperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ComputeOperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.Compute/operations'} # type: ignore
|
emilio-simoes/qt-rest-client
|
tools/test-service/server.py
|
Python
|
gpl-3.0
| 4,688
| 0.009812
|
#!/usr/bin/env python
import os
import json
from flask import Flask, abort, jsonify, request, g, url_for
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.httpauth import HTTPBasicAuth
from passlib.apps import custom_app_context as pwd_context
from itsdangerous import (TimedJSONWebSignatureSerializer
as Serializer, BadSignature, SignatureExpired)
colors = json.load(file('colors.json', 'r'))
# initialization
app = Flask(__name__)
app.config['SECRET_KEY'] = 'the quick brown fox jumps over the lazy dog'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
# extensions
db = SQLAlchemy(app)
auth = HTTPBasicAuth()
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(64))
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
def generate_auth_token(self, expiration=600):
s = Serializer(app.config['SECRET_KEY'], expires_in=expiration)
return s.dumps({'id': self.id})
@staticmethod
def verify_auth_token(token):
s = Serializer(app.config['SECRET_KEY'])
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
user = User.query.get(data['id'])
return user
@auth.verify_password
def verify_password(username_or_token, password):
# first try to authenticate by token
user = User.verify_auth_token(username_or_token)
if no
|
t user:
# try to authenticate with username/password
user = User.query.filter_by(username=username_or_token).first()
if not user or not user.verify_password(password):
return False
g.user = user
return True
@app.route('/api/users', methods=['POST'])
def new_user():
username = request.json.get('username')
password = request.json.get('password')
if username is None or password is None:
ab
|
ort(400) # missing arguments
if User.query.filter_by(username=username).first() is not None:
abort(400) # existing user
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
return (jsonify({'username': user.username}), 201,
{'Location': url_for('get_user', id=user.id, _external=True)})
@app.route('/api/users/<int:id>', methods = ['GET'])
def get_user(id):
user = User.query.get(id)
if not user:
abort(400)
return jsonify({'username': user.username})
@app.route('/api/token', methods = ['GET'])
@auth.login_required
def get_auth_token():
print(request.headers.get('Date'))
token = g.user.generate_auth_token(600)
return jsonify({'token': token.decode('ascii'), 'duration': 600})
@app.route('/api/resource')
@auth.login_required
def get_resource():
return jsonify({'data': 'Hello, %s!' % g.user.username})
@app.route('/api/colors', methods = ['GET'])
def get_colors():
print(colors)
return jsonify( { "data" : colors })
@app.route('/api/colors/<name>', methods = ['GET'])
def get_color(name):
for color in colors:
if color["name"] == name:
return jsonify( color )
return jsonify( { 'error' : True } )
@app.route('/api/colors', methods= ['POST'])
@auth.login_required
def create_color():
print('create color')
color = {
'name': request.json['name'],
'value': request.json['value']
}
print(color)
colors.append(color)
return jsonify( color ), 201
@app.route('/api/colors/<name>', methods= ['PUT'])
@auth.login_required
def update_color(name):
success = False
print('update color')
for color in colors:
if color["name"] == name:
color['value'] = request.json.get('value', color['value'])
print(color)
return jsonify( color )
return jsonify( { 'error' : True } )
@app.route('/api/colors/<name>', methods=['DELETE'])
@auth.login_required
def delete_color(name):
success = False
print('delete color')
for color in colors:
if color["name"] == name:
colors.remove(color)
print(color)
return jsonify(color)
return jsonify( { 'error' : True } )
if __name__ == '__main__':
if not os.path.exists('db.sqlite'):
db.create_all()
app.run(debug = True)
|
offlinehacker-playground/sltv
|
sltv/input/xinput.py
|
Python
|
gpl-2.0
| 2,132
| 0.001876
|
# -*- coding: utf-8 -*-
# Copyright (C) 2010 Holoscopio Tecnologia
# Author: Marcelo Jorge Vieira <metal@holoscopio.com>
# Author: Thadeu Lima de Souza Cascardo <cascardo@holoscopio.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gobject
import pygst
pygst.require("0.10")
import gst
from core import Input, INPUT_TYPE_VIDEO
from sltv.utils import Fract
CAPABILITIES = INPUT_TYPE_VIDEO
class XInput(Input):
def __init__(self):
Input.__init__(self, CAPABILITIES)
self.video_src = gst.element_factory_make("ximagesrc", "video_src")
# Setting format to time, to work with input-selector, since they're
# were not working together in version 0.10.18-1 from Debian.
# This should be fixed in ximagesrc's code and input-selector should
# also be fixed to work with byte format.
self.video_src.set_format(gst.FORMAT_TIME)
self.video_src.set_property("use-damage", False)
self.add(self.video_src)
self.capsfilter = gst.element_factory_make("capsfilter", "capsfilter")
self.add(self.capsfilter)
gst.element_link_many(self.video_src, self.capsfilter)
self.video_pad.set_target(self.capsfilter.src_pads().next())
def config(self, dict):
num, den = Fract.fromdecimal(dict["framerate"])
|
caps = gst.caps_from_string(
"video/x-raw-rgb, framerate=%d/%d" % (num, den)
)
self.
|
capsfilter.set_property("caps", caps)
|
lisiyuan656/Gesture-Recognizer
|
preprocessing/__init__.py
|
Python
|
gpl-2.0
| 250
| 0
|
from preprocessing.img_segmenting import ImgSegmenter
from preprocessing.NoiseRemoval import NoiseRemoval
from preprocessing.im
|
ageScaler import imageScaler
noise_remover = NoiseRemoval()
img_scaler = imageScaler()
segmenter = ImgSegmenter(5)
| |
aligoren/pyalgo
|
ruler_algorithm.py
|
Python
|
mit
| 1,641
| 0.014016
|
import sys
def ruler_algorithm(n1, n2):
"""
Ruler Algorithm yani Cetvel Algoritmasi. Verilen aralik ikiye bolundugunde
her alt parca da devamli olarak ikiye bolunmekte. Alt parcalarin uzunlugu
onceden verilen degere erisinceye kadar devam edilir.
"""
dot = (n1+n2)/2
if a
|
bs((n2-n1)) < 2:
return
print("[%s]" % dot, end=' ')
ruler_algorithm(n1, dot)
ruler_algorithm(dot, n2)
def ruler_algorithm_main():
sys.stdout.write("Ruler Algorithm
|
: ")
ruler_algorithm(0,20000)
ruler_algorithm_main()
# profiling result for 20.000 numbers
# profile: python -m profile ruler_algorithm.py
"""
147457 func
tion calls (114691 primitive calls) in 0.998 seconds
Ordered by: standard name
ncalls tottime percall cumtime percall filename:lineno(function)
32767 0.125 0.000 0.125 0.000 :0(abs)
32767 0.125 0.000 0.125 0.000 :0(charmap_encode)
1 0.000 0.000 0.998 0.998 :0(exec)
16383 0.265 0.000 0.484 0.000 :0(print)
1 0.000 0.000 0.000 0.000 :0(setprofile)
1 0.000 0.000 0.000 0.000 :0(write)
32767 0.094 0.000 0.218 0.000 cp857.py:18(encode)
1 0.000 0.000 0.998 0.998 profile:0(<code object <module> at
0x01E1C2A0, file "ruler_algorithm.py", line 1>)
0 0.000 0.000 profile:0(profiler)
1 0.000 0.000 0.998 0.998 ruler_algorithm.py:1(<module>)
1 0.000 0.000 0.998 0.998 ruler_algorithm.py:16(ruler_algori
thm_main)
32767/1 0.390 0.000 0.998 0.998 ruler_algorithm.py:3(ruler_algorit
hm)
"""
|
caspar/KentLab
|
scripts/GMRplot3.py
|
Python
|
mit
| 1,307
| 0.026014
|
import sys
import numpy as np
import matplotlib.pyplot as plt
import math
#from datetime import datetime
#import os
# load csv file
PATH = "./data/"
FILENAME = "Sample_F5_21_2_1mA_trial2";
#TIMESTAMP = datetime.now().strftime('%D_%H:%M')
#OUTPUT = ''+ os.path.splitext(FILENAME)[0] + '_' + TIMESTAMP + '.png'
field, resistance = np.loadtxt((PATH+FILENAME), skiprows=0 , unpack=True, delimiter=' ');
print(OUTPUT)
color = []
array = []
i = 0
while (i < len(field) - 1):
if (float(field[i]) >= float(field[i+1])):
color = 'blue'
else:
color = 'red'
# array[i] = field[i], resistance[i], color[i]
# print(math.fabs(float(i)/10000.00))
fig = plt.plot(field[i], (resistance[i], '.', color = color)
i = i+1
def color():
global i
if i%3 == 0:
i = i + 1
print(i)
return 'red'
else:
i = i + 1
return 'blue'
# plot temperature vs. pressure + error bars
plt.ylabel("Resistance (Ohms)");
plt.xlabel("Magnetic Field Strength (Tesla)");
plt.title("Magnetoresistance in F5 Nanopillar");
|
plt.show();
plt.savefig((OUTPUT), dpi=None, facecolor='w', edgecolor='w',
orientation='p
|
ortrait', papertype=None, format=None,
transparent=False, bbox_inches=None, pad_inches=0.1,
frameon=None)
plt.close(fig)
|
vivisect/synapse
|
synapse/tests/test_lib_fifo.py
|
Python
|
apache-2.0
| 9,331
| 0.000214
|
from synapse.tests.common import *
import synapse.lib.fifo as s_fifo
class FifoTest(SynTest):
def test_fifo_nack_past(self):
with self.getTestDir() as dirn:
conf = {
'dir': dirn,
'file:maxsize': 1024,
'window:max': 4,
'window:min': 2,
'window:fill': 1,
}
sent = []
with s_fifo.Fifo(conf) as fifo:
self.eq(fifo.wind.nack, 0)
while fifo.atom.size != 0:
fifo.put('whee')
nseq = fifo.nseq
path = fifo._getSeqPath(0)
os.unlink(path)
with s_fifo.Fifo(conf) as fifo:
self.eq(fifo.wind.nack, nseq)
def test_fifo_flush(self):
with self.getTestDir() as dirn:
conf = {'dir': dirn}
sent = []
with s_fifo.Fifo(conf) as fifo:
fifo.put('whee')
fifo.put('whee')
fifo.resync(xmit=sent.append)
fifo.ack(sent[0][1])
# dirty
fifo.flush()
# not dirty
fifo.flush()
def test_fifo_ack_neg1(self):
with self.getTestDir() as dirn:
conf = {'dir': dirn}
sent = []
with s_fifo.Fifo(conf, xmit=sent.append) as fifo:
fifo.put('foo')
fifo.put('bar')
slen = len(sent)
fifo.ack(-1)
self.eq(len(sent), slen * 2)
self.eq(sent[:slen], sent[slen:])
# also test ack of lower than nack
self.true(fifo.ack(sent[0][1]))
self.false(fifo.ack(sent[0][1]))
def test_fifo_fifo(self):
with self.getTestDir() as dirn:
# some very small settings so we trigger
# more of the cleanup / culling code...
conf = {
'dir': dirn,
'file:maxsize': 1024,
'window:max': 4,
'window:min': 2,
'window:fill': 1,
}
sent = []
with s_fifo.Fifo(conf) as fifo:
fifo.put('foo')
fifo.resync(xmit=sent.append)
self.eq(sent[0][2], 'foo')
self.len(1, fifo.wind.dequ)
# these should all end up in the window
fifo.put('bar')
fifo.put('baz')
fifo.put('faz')
self.eq(sent[1][2], 'bar')
self.eq(sent[2][2], 'baz')
self.eq(sent[3][2], 'faz')
self.len(4, fifo.wind.dequ)
self.true(fifo.wind.c
|
aught)
# the next should *not* make it in the window
fifo.put('he
|
he')
fifo.put('haha')
fifo.put('hoho')
self.len(4, fifo.wind.dequ)
self.false(fifo.wind.caught)
# ack 0 should shrink the window, but not fill()
self.true(fifo.ack(sent[0][0]))
self.len(3, fifo.wind.dequ)
# ack next should trigger fill and load hehe/haha
# but still not catch us up...
self.true(fifo.ack(sent[1][0]))
self.len(6, sent)
self.len(4, fifo.wind.dequ)
self.false(fifo.wind.caught)
# ack skip to the window end (which should also set caught)
self.true(fifo.ack(sent[-1][0]))
self.len(7, sent)
self.true(fifo.wind.caught)
# now that we are caught up again, a put should xmit
fifo.put('visi')
self.eq(sent[-1][2], 'visi')
# now lets check starting with an existing one...
with s_fifo.Fifo(conf) as fifo:
sent = []
fifo.resync(xmit=sent.append)
self.eq(sent[0][2], 'hoho')
self.eq(sent[1][2], 'visi')
self.true(fifo.wind.caught)
# send a skip ack
fifo.ack(sent[1][0])
# put in enough messages to cause file next
while fifo.atom.size != 0:
fifo.put('whee')
self.len(2, fifo.seqs)
self.false(fifo.wind.caught)
# put in enough that when we jump over to
# reading this file, we will not be caught up
fifo.put('foo1')
fifo.put('bar1')
fifo.put('baz1')
fifo.put('faz1')
fifo.put('zip1')
self.true(os.path.isfile(fifo._getSeqPath(0)))
while sent[-1][2] == 'whee':
fifo.ack(sent[-1][0])
self.len(1, fifo.seqs)
self.none(fifo.atoms.get(0))
self.false(os.path.isfile(fifo._getSeqPath(0)))
# the last message should be one of the following
# ( we may not know exactly which due to whee mod math )
self.true(sent[-1][2] in ('foo1', 'bar1', 'baz1', 'faz1'))
self.false(fifo.wind.caught)
# by acking until faz1 we will fill zip1 into the window
# and cause ourselves to be caught up again...
# faz1 should be lifted into the window
while sent[-1][2] != 'zip1':
fifo.ack(sent[-1][0])
self.true(fifo.wind.caught)
def test_fifo_puts(self):
with self.getTestDir() as dirn:
sent = []
conf = {'dir': dirn}
with s_fifo.Fifo(conf) as fifo:
fifo.resync(xmit=sent.append)
fifo.puts(('foo', 'bar'))
self.eq(tuple(sent), ((0, 4, 'foo'), (4, 8, 'bar')))
def test_fifo_resync_race_put(self):
with self.getTestDir() as dirn:
N = 1000
conf = {'dir': dirn}
evt = threading.Event()
items = ['foo' + str(i) for i in range(N)]
sent = []
def race(data):
evt.set()
time.sleep(0)
sent.append(data[2])
@firethread
def otherwrite():
evt.wait()
fifo.puts(('attempting to mutate', 'during iteration'))
with s_fifo.Fifo(conf) as fifo:
fifo.puts(items)
thr = otherwrite()
fifo.resync(xmit=race)
thr.join()
self.len(N + 2, sent)
self.eq(sent[0:N], items)
self.eq(sent[N:N + 1], ['attempting to mutate'])
self.eq(sent[N + 1:N + 2], ['during iteration'])
with s_fifo.Fifo(conf) as fifo:
fifo.resync(xmit=race)
self.len(2 * (N + 2), sent)
self.eq(sent[0:N], items)
self.eq(sent[N:N + 1], ['attempting to mutate'])
self.eq(sent[N + 1:N + 2], ['during iteration'])
self.eq(sent[N + 2:2 * N + 2], items)
self.eq(sent[2 * N + 2:2 * N + 3], ['attempting to mutate'])
self.eq(sent[2 * N + 3:2 * N + 4], ['during iteration'])
def test_fifo_resync_race_ack(self):
with self.getTestDir() as dirn:
N = 1000
conf = {'dir': dirn}
evt = threading.Event()
items = ['foo' + str(i) for i in range(N)]
sent = []
def race(data):
evt.set()
time.sleep(0)
sent.append(data[2])
@firethread
def otherwrite():
evt.wait()
# This call to ack will not actually cull anything because
# it won't run until after iteration has completed.
fifo.ack(100)
with s_fifo.Fifo(conf) as fifo:
fifo.puts(items)
thr = otherwrite()
fifo.resync(xmit=race)
thr.join()
# The end result should be all of the items in order.
|
apatriciu/OpenStackOpenCL
|
computeOpenCL/nova/nova/OpenCL/OpenCLClientException.py
|
Python
|
apache-2.0
| 173
| 0.017341
|
import exceptions
class OpenCLClientException(
|
Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(se
|
lf.value)
|
gista/django-geoshortcuts
|
geoshortcuts/tests/test_app/models.py
|
Python
|
gpl-2.0
| 1,245
| 0.036948
|
from django.contrib.gis.db import models
class TestTable(models.Model):
field0 = models.CharField(max_length = 50)
def __unicode__(self):
return self.field0
class TestPoint(models.Model):
field0 = models.CharField(max_length = 50)
field1 = models.IntegerField()
field2 = models.DateTimeField(
|
)
field3 = models.FloatField()
field4 = models.BooleanField()
field5 = models.ForeignKey(TestTable)
the_geom = models.PointField(srid=102067)
objects = models.GeoManager()
def __unicode__(self):
return self.field0
class TestLineString(models.Model):
field0 = models.CharField(max_length = 50)
field1 = models.IntegerField()
field2 = models.DateTimeField()
field3 = models.FloatField()
field4 = models.BooleanField()
field5 = models.ForeignKey(TestTable)
the_g
|
eom = models.LineStringField()
objects = models.GeoManager()
def __unicode__(self):
return self.field0
class TestPolygon(models.Model):
field0 = models.CharField(max_length = 50)
field1 = models.IntegerField()
field2 = models.DateTimeField()
field3 = models.FloatField()
field4 = models.BooleanField()
field5 = models.ForeignKey(TestTable)
the_geom = models.PolygonField()
objects = models.GeoManager()
def __unicode__(self):
return self.field0
|
yw374cornell/e-mission-server
|
emission/analysis/configs/config_utils.py
|
Python
|
bsd-3-clause
| 967
| 0.004137
|
import logging
import emission.storage.timeseries.abstract_timeseries as esta
import emission.core.wrapper.entry as ecwe
def get_last_entry(user_id, time_query, config_key):
user_ts = esta.TimeSeries.get_time_series(user_id)
# get the list of overrides for this time range. This should be non zero
# only if there has been
|
an override since the last run, which needs to be
# saved back into the cache.
config_overrides = list(user_ts.find_entries([config_key], time_query))
logging.debug("Found %d user overrides for user %s" % (len(config_overrides), user_id))
if len(config_overrides) == 0:
logging.warning("No user defined overrides for %s, early return" % user_id)
return (None, None)
else:
# entries are sor
|
ted by the write_ts, we can take the last value
coe = ecwe.Entry(config_overrides[-1])
logging.debug("last entry is %s" % coe)
return (coe.data, coe.metadata.write_ts)
|
ttycl/keyhub
|
keyhub/wsgi.py
|
Python
|
apache-2.0
| 46
| 0
|
from fl
|
ask import Flask
app = Flask('keyhub'
|
)
|
ryandoherty/RaceCapture_App
|
autosportlabs/racecapture/views/dashboard/widgets/graphicalgauge.py
|
Python
|
gpl-3.0
| 864
| 0.013889
|
import
|
kivy
kivy.require('1.9.1')
from utils import kvFind
from kivy.core.window import Window
from kivy.properties import NumericProperty
from autosportlabs.racecapture.views.dashboard.widgets.gauge import CustomizableGauge
class GraphicalGauge(CustomizableGauge):
_gaugeView = None
gauge_size = NumericProperty(0)
|
def _update_gauge_size(self, size):
self.gauge_size = size
def __init__(self, **kwargs):
super(GraphicalGauge, self).__init__(**kwargs)
def on_size(self, instance, value):
width = value[0]
height = value[1]
size = width if width < height else height
self._update_gauge_size(size)
@property
def graphView(self):
if not self._gaugeView:
self._gaugeView = kvFind(self, 'rcid', 'gauge')
return self._gaugeView
|
kuropatkin/lte
|
src/virtual-net-device/bindings/callbacks_list.py
|
Python
|
gpl-2.0
| 1,101
| 0.00545
|
callback_classes = [
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Packet>', 'ns3::Address const&', 'ns3::Address const&', 'unsigned short', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', '
|
ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>'
|
, 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['unsigned char', 'ns3::Ptr<ns3::QueueItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
RomuloOliveira/django-dojo
|
minitwitter/app/views.py
|
Python
|
mit
| 544
| 0.005515
|
from django.shortcuts import render
from django.http import HttpResponse, Http404
from minitwitter.app.models import Tweet
def index(request):
last_tweets = Tweet.objects.all().order_
|
by('-timestamp')[:15]
return render(request, 'tweets/index.html', {'last_tweets': last_tw
|
eets})
def show(request, user, tweet_id):
try:
tweet = Tweet.objects.get(id=tweet_id, user=user)
except Tweet.DoesNotExist, e:
raise Http404('Tweet not found')
else:
return HttpResponse('Here\'s your tweet: ' + str(tweet))
|
warehouseman/trello-swagger-generator
|
processDefaults.py
|
Python
|
mit
| 931
| 0.025779
|
#!/usr/bin/env python
from bs4 import BeautifulSoup
# Constants
DEFAULT = "Default:"
# Methods
def processDefaults(soup, swagger):
default = ''
for string in soup.code.stripped_strings :
default = ' and'.join(
|
', '.join(string.split(',')).rsplit(',', 1))
swagger['default'] = default
if "true" in default : swagger['default'] = True
if "false" in default : swagger['default'] = False
return
# - - - - - - - - - - -
# Main routine (for testing)
def main():
from test.testdataDefaults import test_values
from test.testdataDefaults import swagger
idx = 0
for frag in test_values
|
:
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
soup = BeautifulSoup(frag)
# if idx == 1 :
if idx != -1 :
processDefaults(soup.body.li, swagger)
print "swagger = {}".format(swagger)
idx = idx + 1
print(">>~~~~~~~~~~~~~~~~~~~~~~~~~<<")
if __name__ == "__main__": main()
|
tinloaf/home-assistant
|
homeassistant/components/sensor/elkm1.py
|
Python
|
apache-2.0
| 7,872
| 0
|
"""
Support for control of ElkM1 sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.elkm1/
"""
from homeassistant.components.elkm1 import (
DOMAIN as ELK_DOMAIN, create_elk_entities, ElkEntity)
DEPENDENCIES = [ELK_DOMAIN]
async def async_setup_platform(hass, config, async_add_entities,
|
discovery_info=None):
"""Create the Elk-M1 sensor platform."""
if discovery_info is None:
return
elk = hass.data[ELK_DOMAIN]['elk']
entities = create_elk_entities(
hass, elk.coun
|
ters, 'counter', ElkCounter, [])
entities = create_elk_entities(
hass, elk.keypads, 'keypad', ElkKeypad, entities)
entities = create_elk_entities(
hass, [elk.panel], 'panel', ElkPanel, entities)
entities = create_elk_entities(
hass, elk.settings, 'setting', ElkSetting, entities)
entities = create_elk_entities(
hass, elk.zones, 'zone', ElkZone, entities)
async_add_entities(entities, True)
def temperature_to_state(temperature, undefined_temperature):
"""Convert temperature to a state."""
return temperature if temperature > undefined_temperature else None
class ElkSensor(ElkEntity):
"""Base representation of Elk-M1 sensor."""
def __init__(self, element, elk, elk_data):
"""Initialize the base of all Elk sensors."""
super().__init__(element, elk, elk_data)
self._state = None
@property
def state(self):
"""Return the state of the sensor."""
return self._state
class ElkCounter(ElkSensor):
"""Representation of an Elk-M1 Counter."""
@property
def icon(self):
"""Icon to use in the frontend."""
return 'mdi:numeric'
def _element_changed(self, element, changeset):
self._state = self._element.value
class ElkKeypad(ElkSensor):
"""Representation of an Elk-M1 Keypad."""
@property
def temperature_unit(self):
"""Return the temperature unit."""
return self._temperature_unit
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._temperature_unit
@property
def icon(self):
"""Icon to use in the frontend."""
return 'mdi:thermometer-lines'
@property
def device_state_attributes(self):
"""Attributes of the sensor."""
from elkm1_lib.util import username
attrs = self.initial_attrs()
attrs['area'] = self._element.area + 1
attrs['temperature'] = self._element.temperature
attrs['last_user_time'] = self._element.last_user_time.isoformat()
attrs['last_user'] = self._element.last_user + 1
attrs['code'] = self._element.code
attrs['last_user_name'] = username(self._elk, self._element.last_user)
attrs['last_keypress'] = self._element.last_keypress
return attrs
def _element_changed(self, element, changeset):
self._state = temperature_to_state(self._element.temperature, -40)
async def async_added_to_hass(self):
"""Register callback for ElkM1 changes and update entity state."""
await super().async_added_to_hass()
self.hass.data[ELK_DOMAIN]['keypads'][
self._element.index] = self.entity_id
class ElkPanel(ElkSensor):
"""Representation of an Elk-M1 Panel."""
@property
def icon(self):
"""Icon to use in the frontend."""
return "mdi:home"
@property
def device_state_attributes(self):
"""Attributes of the sensor."""
attrs = self.initial_attrs()
attrs['system_trouble_status'] = self._element.system_trouble_status
return attrs
def _element_changed(self, element, changeset):
if self._elk.is_connected():
self._state = 'Paused' if self._element.remote_programming_status \
else 'Connected'
else:
self._state = 'Disconnected'
class ElkSetting(ElkSensor):
"""Representation of an Elk-M1 Setting."""
@property
def icon(self):
"""Icon to use in the frontend."""
return 'mdi:numeric'
def _element_changed(self, element, changeset):
self._state = self._element.value
@property
def device_state_attributes(self):
"""Attributes of the sensor."""
from elkm1_lib.const import SettingFormat
attrs = self.initial_attrs()
attrs['value_format'] = SettingFormat(
self._element.value_format).name.lower()
return attrs
class ElkZone(ElkSensor):
"""Representation of an Elk-M1 Zone."""
@property
def icon(self):
"""Icon to use in the frontend."""
from elkm1_lib.const import ZoneType
zone_icons = {
ZoneType.FIRE_ALARM.value: 'fire',
ZoneType.FIRE_VERIFIED.value: 'fire',
ZoneType.FIRE_SUPERVISORY.value: 'fire',
ZoneType.KEYFOB.value: 'key',
ZoneType.NON_ALARM.value: 'alarm-off',
ZoneType.MEDICAL_ALARM.value: 'medical-bag',
ZoneType.POLICE_ALARM.value: 'alarm-light',
ZoneType.POLICE_NO_INDICATION.value: 'alarm-light',
ZoneType.KEY_MOMENTARY_ARM_DISARM.value: 'power',
ZoneType.KEY_MOMENTARY_ARM_AWAY.value: 'power',
ZoneType.KEY_MOMENTARY_ARM_STAY.value: 'power',
ZoneType.KEY_MOMENTARY_DISARM.value: 'power',
ZoneType.KEY_ON_OFF.value: 'toggle-switch',
ZoneType.MUTE_AUDIBLES.value: 'volume-mute',
ZoneType.POWER_SUPERVISORY.value: 'power-plug',
ZoneType.TEMPERATURE.value: 'thermometer-lines',
ZoneType.ANALOG_ZONE.value: 'speedometer',
ZoneType.PHONE_KEY.value: 'phone-classic',
ZoneType.INTERCOM_KEY.value: 'deskphone'
}
return 'mdi:{}'.format(
zone_icons.get(self._element.definition, 'alarm-bell'))
@property
def device_state_attributes(self):
"""Attributes of the sensor."""
from elkm1_lib.const import (
ZoneLogicalStatus, ZonePhysicalStatus, ZoneType)
attrs = self.initial_attrs()
attrs['physical_status'] = ZonePhysicalStatus(
self._element.physical_status).name.lower()
attrs['logical_status'] = ZoneLogicalStatus(
self._element.logical_status).name.lower()
attrs['definition'] = ZoneType(
self._element.definition).name.lower()
attrs['area'] = self._element.area + 1
attrs['bypassed'] = self._element.bypassed
attrs['triggered_alarm'] = self._element.triggered_alarm
return attrs
@property
def temperature_unit(self):
"""Return the temperature unit."""
from elkm1_lib.const import ZoneType
if self._element.definition == ZoneType.TEMPERATURE.value:
return self._temperature_unit
return None
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
from elkm1_lib.const import ZoneType
if self._element.definition == ZoneType.TEMPERATURE.value:
return self._temperature_unit
if self._element.definition == ZoneType.ANALOG_ZONE.value:
return 'V'
return None
def _element_changed(self, element, changeset):
from elkm1_lib.const import ZoneLogicalStatus, ZoneType
from elkm1_lib.util import pretty_const
if self._element.definition == ZoneType.TEMPERATURE.value:
self._state = temperature_to_state(self._element.temperature, -60)
elif self._element.definition == ZoneType.ANALOG_ZONE.value:
self._state = self._element.voltage
else:
self._state = pretty_const(ZoneLogicalStatus(
self._element.logical_status).name)
|
ronas/PythonGNF
|
Eduardo/L01.ExeSeq01.py
|
Python
|
gpl-3.0
| 25
| 0.04
|
print (
|
"Hello World
|
!!")
|
nuagenetworks/vspk-python
|
vspk/v6/nuavatar.py
|
Python
|
bsd-3-clause
| 9,831
| 0.009155
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUAvatar(NURESTObject):
""" Represents a Avatar in the VSD
Notes:
Avatar
"""
__rest_name__ = "avatar"
__resource_name__ = "avatars"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a Avatar instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> avatar = NUAvatar(id=u'xxxx-xxx-xxx-xxx', name=u'Avatar')
>>> avatar = NUAvatar(data=my_dict)
"""
super(NUAvatar, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._last_updated_date = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._owner = None
self._external_id = None
self._type = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name=
|
"creationDate", attribute_type=str, is_required=False, is_uniq
|
ue=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp
|
uclaros/QGIS
|
python/plugins/sagaprovider/SagaProviderPlugin.py
|
Python
|
gpl-2.0
| 1,562
| 0
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
sagaproviderplugin.py
---------------------
Date : May 2021
Copyright : (C) 2021 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of th
|
e License, or *
* (at your option) any later version. *
* *
*******************
|
********************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'May 2021'
__copyright__ = '(C) 2021, Alexander Bruy'
from qgis.core import QgsApplication, QgsRuntimeProfiler
with QgsRuntimeProfiler.profile('Import SAGA Provider'):
from sagaprovider.SagaAlgorithmProvider import SagaAlgorithmProvider
class SagaProviderPlugin:
def __init__(self):
self.provider = SagaAlgorithmProvider()
def initProcessing(self):
QgsApplication.processingRegistry().addProvider(self.provider)
def initGui(self):
self.initProcessing()
def unload(self):
QgsApplication.processingRegistry().removeProvider(self.provider)
|
NeuroDataDesign/seelviz
|
scripts/clearity/__init__.py
|
Python
|
apache-2.0
| 5,716
| 0.013821
|
#!/usr/bin/python
#-*- coding:utf-8 -*-
__author__ = 'david'
import numpy as np
import nibabel as nib
import resources as rs
# from vispy import app
from plot import Canvas
import matplotlib.pyplot as plt
import gc
np.random.seed()
class Clarity(object):
def __init__(self,token,imgfile=None,pointsfile=None):
if token not in rs.TOKENS:
raise ValueError("Token %s not found."%(token))
self._token = token
self._imgfile = imgfile
self._pointsfile = pointsfile
self._img = None # img data
self._points = None # [[x],[y],[z],[v]]
self._shape = None # (x,y,z)
self._max = None # max value
def loadImg(self, path=None, info=False):
if path is None:
path = rs.RAW_DATA_PATH
pathname = path+self._token+".img"
img = nib.load(pathname)
if info:
print(img)
self._img = img.get_data()[:,:,:,0]
self._shape = self._img.shape
self._max = np.max(self._img)
print("Image Loaded: %s"%(pathname))
return self
def loadEqImg(self, path=None, info=False):
if path is None:
path = rs.RAW_DATA_PATH
pathname = path+self._token+".nii"
img = nib.load(pathname)
if info:
print(img)
self._img = img.get_data()[:,:,:,0]
self._shape = self._img.shape
self._max = np.max(self._img)
print("Image Loaded: %s"%(pathname))
return self
def getShape(self):
return self._shape
def getMax(self):
return self._max
def discardImg(self):
del self._img
gc.collect()
return self
def getHistogram(self,bins,range,density=True):
if self._img is None:
raise ValueError("Img haven't loaded, please call loadImg() first.")
return np.histogram(self._img.flatten(), bins=bins, range=range, density=density)
def imgToPoints(self, threshold=0.1, sample=0.5, optimize=True):
if not 0 <= threshold < 1:
raise ValueError
|
("Threshold should be within [0,1).")
if not 0 < sample <= 1:
raise ValueError("Sample rate should be within (0,1].")
if self._img is None:
raise ValueError("Img haven't loaded, please call loadImg() first.")
total = self._shape[0]*self._shape[1]*self._shape[2]
print("Coverting to points...\ntoken=%s\ntotal=
|
%d\nmax=%f\nthreshold=%f\nsample=%f"\
%(self._token,total,self._max,threshold,sample))
print("(This will take couple minutes)")
# threshold
filt = self._img > threshold * self._max
x, y, z = np.where(filt)
v = self._img[filt]
if optimize:
self.discardImg()
v = np.int16(255*(np.float32(v)/np.float32(self._max)))
l = v.shape
print("Above threshold=%d"%(l))
# sample
if sample < 1.0:
filt = np.random.random(size=l) < sample
x = x[filt]
y = y[filt]
z = z[filt]
v = v[filt]
self._points = np.vstack([x,y,z,v])
self._points = np.transpose(self._points)
print("Samples=%d"%(self._points.shape[0]))
print("Finished")
return self
def loadPoints(self,path=None):
if path is None:
path = rs.POINTS_DATA_PATH
pathname = path+self._token+".csv"
self._points = np.loadtxt(pathname,dtype=np.int16,delimiter=',')
print("Points Loaded: %s"%(pathname))
return self
def savePoints(self,path=None):
if self._points is None:
raise ValueError("Points is empty, please call imgToPoints() first.")
if path is None:
path = rs.POINTS_DATA_PATH
pathname = path+self._token+".csv"
np.savetxt(pathname,self._points,fmt='%d',delimiter=',')
return self
def centralize(self):
# Centralize the data
# use mean or median
centerX = np.mean(self._points[:,0])
centerY = np.mean(self._points[:,1])
centerZ = np.mean(self._points[:,2])
self._points[:,0] -= np.int16(centerX)
self._points[:,1] -= np.int16(centerY)
self._points[:,2] -= np.int16(centerZ)
return self
def histogramEqualize(self,scale=30):
# get image histogram
imhist, bins = np.histogram(self._points[:,3],256,density=True)
cdf = imhist.cumsum() # cumulative distribution function
cdf = scale * cdf / cdf[-1] # normalize
# use linear interpolation of cdf to find new pixel values
return np.interp(self._points[:,3],bins[:-1],cdf)
def showHistogram(self,bins=255):
plt.hist(self._points[:,3],bins=bins)
plt.title("%s Points Histogram"%(self._token))
plt.ylabel("count")
plt.xlabel("level")
plt.grid()
plt.show()
def show(self):
if self._points is None:
raise ValueError("Points is empty, please call imgToPoints() first.")
# centralize
self.centralize()
# colors
colors = np.array(np.abs(self._points[:,:3]),dtype=np.float32)
mx = np.max(colors[:,0])
my = np.max(colors[:,1])
mz = np.max(colors[:,2])
brighter = 0.1
colors[:,0]/=mx+brighter
colors[:,1]/=my+brighter
colors[:,2]/=mz+brighter
alpha = np.empty((len(colors[:,0]),1))
alpha.fill(0.8)
colors = np.hstack([colors,alpha])
# sizes
sizes = self.histogramEqualize()
# visualize
c = Canvas(self._points[:,:3],colors,sizes)
app.run()
if __name__ == '__main__':
pass
|
hsoft/dgeq-rdf
|
freebase_pull.py
|
Python
|
bsd-3-clause
| 2,455
| 0.004481
|
import argparse
import os.path as op
from datetime import datetime
from rdflib import Graph, Literal
from rdflib.namespace import RDF, OWL
from ns import ns_property, ns_type
from util import query_freebase
def node2mid(node):
s = node.toPython()
return '/m/' + s.split('.')[-1]
def pull_genelection(graph, key):
# Pulls election date for every linked general election
query = """select distinct ?node ?year ?freebaseNode where {
?node rdf:type type:ProvincialGeneralElection .
?node owl:sameAs ?freebaseNode .
?node prop:year ?year .
filter regex(str(?freebaseNode), "^http://rdf.freebase.com") .
}
"""
res = graph.query(query, initNs={'rdf': RDF, 'type': ns_type, 'prop': ns_property, 'owl': OWL})
for node, year, freebaseNode in res:
print("Fetching date for %s election" % year)
query = {
'mid': node2mid(freebaseNode),
'name': None,
'/time/event/start_date': None,
}
fbres = query_freebase(query, key)
start_date = datetime.strptime(fbres['/time/event/start_date'], '%Y-%m-%d').date()
election_name = fbres['name']
print("Got date %s for election %s" % (start_date, election_name))
graph.add((node, ns_property.startDate, Literal(start_date)))
PULL_FUNCTIONS = {
'genelection': pull_genelection,
}
def main(rdfpath, domain):
if n
|
ot op.exists('freebase_api_key'):
print("You need a Freebase API key in the file 'freebase_api_key' to run this script.")
return
if not op.exists(rdfpath):
print("Invalid path: %s" % rdfpath)
return
if domain not in PULL_FUNCTIONS:
|
print("Invalid domain: %s. Valid domains are %s" % (domain, ', '.join(PULL_FUNCTIONS.keys())))
return
with open('freebase_api_key', 'rt') as fp:
key = fp.read()
graph = Graph()
print("Loading RDF...")
graph.parse(rdfpath)
print("Starting linking operation for domain %s" % domain)
PULL_FUNCTIONS[domain](graph, key)
graph.serialize(rdfpath)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Pulls information from freebase.com to put in our DGEQ RDF")
parser.add_argument('rdfpath', help="The path of the RDF file to link")
parser.add_argument('domain', help="Element domain to link (election, candidate, division)")
args = parser.parse_args()
main(args.rdfpath, args.domain)
|
CityOfNewYork/NYCOpenRecords
|
tests/conftest.py
|
Python
|
apache-2.0
| 2,096
| 0.001431
|
# -*- coding: utf-8 -*-
"""ConfigTest Module
This module handles the setup for running tests agains the OpenRecords application.
.. _Flask Tutorial:
http://flask.pocoo.org/docs/1.0/tutorial/
"""
import sys
import os
import pytest
from app import create_app, db as _db
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
sys.path.append(os.path.join(os.path.dirname(__file__), 'helpers'))
@pytest.yield_fixture(scope='session')
def app():
"""
Create a session
Yields:
app (Flask): Instance of a Flask application
"""
_app = create_app(config_name='testing')
ctx = _app.app_context()
ctx.push()
yield _app
ctx.pop()
@pytest.fixture(scope='session')
def client(app: Flask):
|
"""Retrieve an instance of the Flask test_client.
.. _Flask Test Client:
http://flask.pocoo.org/docs/1.0/api/#flask.Flask.test_client
Args:
app (Flask): Instance of the Flask application
Returns:
app.test_client (Flask.test_client): Returns a client used to test the Flask application
"""
return app.test_client()
@pyt
|
est.yield_fixture(scope='session')
def db(app: Flask):
"""
Create all of the database tables and yield an instance of the database.
Args:
app (Flask): Instance of the flask application.
Yields:
db (SQLAlchemy): Instance of the SQLAlchemy DB connector
"""
_db.app = app
_db.create_all()
yield _db
_db.drop_all()
@pytest.fixture(scope='function', autouse=True)
def session(db: SQLAlchemy):
"""Create a database session to be used for the function.
Args:
db (SQLAlchemy): Instance of the SQLAlchemy DB connector
Yields:
session_ (SQLAlchemy.session): Session object to be used by the function
"""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
session_ = db.create_scoped_session(options=options)
db.session = session_
yield session_
transaction.rollback()
connection.close()
session_.remove()
|
jongha/stock-ai
|
modules/valuations/per.py
|
Python
|
mit
| 540
| 0.015385
|
#-*- coding: utf-8 -*-
import os
import pandas as pd
import config
import pandas
import re
import math
from modules.val
|
uations.valuation import Valuation
# 현 EPS 과거 5년 PER 평균을 곱한 값
class PER(Valuation):
def __init__(self, valuation):
data = valuation.get_data()
json = valuation.get_json()
Valuation.__init__(self, data, json)
self.set_json('PER', self.valuate())
def valuate(self):
try:
json = self.get_json()
return int(json['EPS'] * json['PER_5'])
except:
retur
|
n None
|
wes342/EasyDevStudio
|
scripts/RomOther.py
|
Python
|
apache-2.0
| 18,124
| 0.008828
|
#Copyright 2012 EasyDevStdio , wes342
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
#!/usr/bin/env python
from scripts.GI import *
import datetime
from kivy.uix.textinput import TextInput
from EdsNotify import EdsNotify
def add_aroma(self):
root = BoxLayout(orientation='vertical', spacing=20)
btn_layout = GridLayout(cols=2, row_force_default=True, row_default_height=50, spacing=25)
add = Button(text='Add', size_hint_x=None, width=150)
cancel = Button(text='Cancel', size_hint_x=None, width=150)
root.add_widget(Label(text='Are You Sure You Want To\nAdd Aroma Inastaller?'))
root.add_widget(btn_layout)
btn_layout.add_widget(add)
btn_layout.add_widget(cancel)
popup = Popup(background='atlas://images/eds/pop', title='Add Aroma',content=root, auto_dismiss=False,
size_hint=(None, None), size=(350, 200))
cancel.bind(on_release=popup.dismiss)
popup.open()
def callback(instance):
try:
pathtofile = '%s/Aroma/Aroma.zip' % (Tools)
destpath = '%s/META-INF/com/google/android' % (Rom)
z = zipfile.ZipFile(pathtofile)
z.extractall(destpath)
f = open(UScript)
text = f.read()
f.close()
f = open(UScript, 'w')
f.write('''ui_print("");
#
# WIPE = config.prop => selected.1=2
#
if
file_getprop("/tmp/aroma-data/config.prop","selected.1") == "2"
then
sleep(3);\n''')
f.write(text)
f.close()
aroma(self)
dev_name(self)
device(self)
today(self)
contact(self)
except:
EdsNotify().run("'Custom Rom Not Found", "'\nPlease Select Base Rom Then Try Again\n")
add.bind(on_press=callback)
add.bind(on_release=popup.dismiss)
def aroma(self):
self.panel_layout.clear_widgets()
title = Label(text='[b][color=#22A0D6][size=20]Custom Aroma Configuration[/size][/color][/b]', markup = True, pos_hint={'x':-.05, 'y':.20})
name_lbl = Label(text='[b][color=ffffff][size=12]Set Rom Name :[/size][/color][/b]', markup = True, pos_hint={'x':-.30, 'y':.08})
ver_lbl = Label(text='[b][color=ffffff][size=12]Rom Version Number :[/size][/color][/b]', markup = True, pos_hint={'x':-.30, 'y':-.02})
dev_lbl = Label(text='[b][color=ffffff][size=12]Developer Name :[/size][/color][/b]', markup = True, pos_hint={'x':-.30, 'y':-.12})
name = TextInput(text='', multiline=False, pos_hint={'x':.400, 'y':.550}, size_hint=(.50, .05))
ver = TextInput(text='', multiline=False, pos_hint={'x':.400, 'y':.450}, size_hint=(.50, .05))
dev = TextInput(text='', multiline=False, pos_hint={'x':.400, 'y':.350}, size_hint=(.50, .05))
self.panel_layout.add_widget(title)
self.panel_layout.add_widget(name_lbl)
self.panel_layout.add_widget(name)
self.panel_layout.add_widget(ver_lbl)
self.panel_layout.add_widget(ver)
self.panel_layout.add_widget(dev_lbl)
self.panel_layout.add_widget(dev)
def name_enter(self):
processing_change = False
for line in
|
fileinput.input(Aroma, inplace=1):
if line.startswith('ini_set("rom_name", "'):
processing_change = True
else:
if processing_change:
print r'ini_set("rom_name", "' + name.text + r'");'
processing_change = False
print line,
for line in fileinput.input(Aroma, inplace=1):
|
if line.startswith(' "You are about to Install <b>'):
processing_change = True
else:
if processing_change:
print r' "You are about to Install <b>' + name.text + r'</b>. \n\n"+'
processing_change = False
print line,
for line in fileinput.input(Aroma, inplace=1):
if line.startswith(' "Please read carefully <b>'):
processing_change = True
else:
if processing_change:
print r' "Please read carefully <b>' + name.text + r'</b> Terms of Use Below...",'
processing_change = False
print line,
for line in fileinput.input(Terms, inplace=1):
if line.startswith('Rom: '):
processing_change = True
else:
if processing_change:
print r'Rom: ' + name.text + r''
processing_change = False
print line,
for line in fileinput.input(BuildProp, inplace=1):
if line.startswith(r'ro.build.description='):
processing_change = True
else:
if processing_change:
print r'ro.build.description=' + name.text + r''
processing_change = False
print line,
name.bind(on_text_validate=name_enter)
def ver_enter(self):
processing_change = False
for line in fileinput.input(Aroma, inplace=1):
if line.startswith(r'ini_set("rom_version",'):
processing_change = True
else:
if processing_change:
print r'ini_set("rom_version", "' + ver.text + r'");'
processing_change = False
print line,
for line in fileinput.input(Aroma, inplace=1):
if line.startswith(r' "\t\tVERSION\t: <#080>'):
processing_change = True
else:
if processing_change:
print r' "\t\tVERSION\t: <#080>' + ver.text + r'\n"+'
processing_change = False
print line,
for line in fileinput.input(Terms, inplace=1):
if line.startswith(r'Build: '):
processing_change = True
else:
if processing_change:
print r'Build: ' + ver.text + r''
processing_change = False
print line,
for line in fileinput.input(BuildProp, inplace=1):
if line.startswith('ro.product.version='):
processing_change = True
else:
if processing_change:
print r'ro.product.version=' + ver.text + r''
processing_change = False
print line,
ver.bind(on_text_validate=ver_enter)
def dev_enter(self):
processing_change = False
for line in fileinput.input(Aroma, inplace=1):
if line.startswith(r'ini_set("rom_author", "'):
processing_change = True
else:
if processing_change:
print 'ini_set("rom_author", "' + dev.text + '");'
processing_change = False
print line,
for line in fileinput.input(Terms, inplace=1):
if line.startswith(r'Developer: '):
processing_change = True
else:
if processing_change:
print r'Developer: ' + dev.text + r''
processing_change = False
print line,
for line in fileinput.input(BuildProp, inplace=1):
if line.startswith(r'ro.build.host='):
processing_change = True
else:
if processing_change:
print r'ro.build.host=' + dev.text + r''
|
estansifer/mape
|
src/thermo.py
|
Python
|
mit
| 10,645
| 0.007797
|
import math
import scipy.optimize as opt
log = math.log
exp = math.exp
small = 1e-20 # unitless
T0 = 1 # K
Tcrit = 650 # K
zero_C = 273.15 # K
p0 = 1 # Pa
atm = 101325 # Pa
bar = 100000 # Pa
# Tcrit is slightly above the critical point of water. This is used as an upperbound
# on values of T that would be even vaguely physically reasonable for our thermodynamic
# equations. Exact value here is unimportant.
#####
# The following values are quoted exactly* from
# CRC Handbook of Chemistry and Physics, 84th edition, 2003-2004, ISBN 0-8493-0484-9
# Comments specify units and section number in the source.
# *changes in units like g -> kg are done silently
#####
R = 8.314510 # J / mol / K 1-54
Mw_ = 0.01801528 # kg / mol 6-4 molecular weight of water
# enthalpy of vaporization of water at specified temperature
vap_T = zero_C # K 6-3
vap_enthalpy = 45054 # J / mol 6-3
# heat capacity and density of air (dry air?) at specified temperature and 1 bar pressure
air_T = 300 # K 6-1
air_cp = 1007 # J / kg / K 6-1
air_rho = 1.161 # kg / m^3 6-1
# heat capacity of liquid water at specified temperature
lw_T = 10 + zero_C # K 6-3
lw_cp = 4192.1 # J / kg / K 6-3
# saturation vapor pressure at specified temperature
sat_T = 10 + zero_C # K 6-10
sat_p_star = 1228.1 # Pa 6-10
####
# End of CRC reference values
###
# Old value of cv_ I was using is 37.47 J / mol / K.
# New value is from the following source:
# 1870 J / kg / K (or 33.68857 J / mol / K)
# page 77
# Iribarne & Godson (Eds.) (2012). Atmospheric thermodynamics (Vol. 6). Springer Science & Business Media.
# Derived values
Md_ = air_rho * R * air_T / bar # kg / mol molecular weight of air
cd_ = air_cp * Md_ # J / mol / K heat capacity of air, constant pressure
cv_ = 1870 * Mw_ # J / mol / K heat capacity of water vapor, constant p
cl_ = lw_cp * Mw_ # J / mol / K heat capacity of liquid water, constant p
cd = cd_ / R # unitless
cv = cv_ / R # unitless
cl = cl_ / R # unitless
Md = Md_ / R # kg K / J
Mw = Mw_ / R # kg K / J
epsilon = Mw_ / Md_ # unitless
Lc = vap_enthalpy / R + (cl - cv) * vap_T # K
Tc = sat_T # K
pc = sat_p_star * exp(Lc / Tc) # Pa
# Clausius-Clapeyron relation
def compute_p_star(T):
return pc * exp((cv - cl) * log(T / Tc) - Lc / T)
def compute_y_s(p, p_star):
return p_star / (p - p_star)
def compute_y_s_from_T(p, T):
return compute_y_s(p, compute_p_star(T))
def compute_ell(T):
return cv - cl + Lc / T
def compute_issat_ypT(y, p, T):
y_s = compute_y_s_from_T(p, T)
return (y_s > 0) and (y > y_s)
# Correctness of this is non-trivial.
def compute_issat_yps(y, p, s):
return compute_issat_ypT(y, p, compute_T_unsat(y, p, s))
def compute_M(y):
return Md * (1 + epsilon * y)
def compute_Ms_unsat(y, p, T):
if y < small:
return cd * log(T / T0) - log(p / p0)
else:
return ((cd + y * cv) * log(T / T0)
- (1 + y) * log(p / p0)
+ (1 + y) * log(1 + y)
- y * log(y))
def compute_Ms_sat(y, p, T):
p_star = compute_p_star(T)
y_s = compute_y_s(p, p_star)
ell = compute_ell(T)
if y < small:
# Unlikely to represent a physical situation,
# since y > y_s for saturated parcels.
return cd * log(T / T0) - log(p_star / p0)
|
+ log(y_s) + y_s * ell
else:
return ((cd + y * cv) * log(T / T0)
- (1 + y) * log(p_star / p0)
+ log (y_s
|
)
+ (y_s - y) * ell)
def compute_T_unsat(y, p, s):
Ms = compute_M(y) * s
if y < small:
return T0 * exp((Md * s + log(p / p0)) / cd)
else:
return T0 * exp(
(Ms + (1 + y) * log(p / p0) - (1 + y) * log(1 + y) + y * log(y))
/ (cd + y * cv)
)
#
# For ease of writing this function and computation speed, we assume that the parcel
# specified is saturated, that y > 1e-10, that p < 1e10 Pa, and that the parcel's temperature
# is less than Tcrit. If any of these assumptions are violated this function may diverge,
# throw an exception, or return a nonsense value.
#
# This function is the main bottleneck in speeding up the code.
#
def compute_T_sat(y, p, s):
if y < 1e-10 or p > 1e10:
raise ValueError()
#
# Equation we wish to solve:
# M * s = (cd + y*cv) * log(T / T0) - (1 + y)*log(p_star / p0) + log(y_s) + (y_s - y) * ell
# where
# p_star is a function of T
# y_s = p_star / (p - p_star)
# ell = cv - cl + Lc / T
#
# Note that for T < Tcrit, ell > 0 and d p_star/dT > 0.
#
# Let
# f(T) = c0 * log(T) - (1 + y) * log(p_star) + log(y_s) + (y_s - y) * ell + c1
# = c0 * log(T) - y * log(p_star) - log(p - p_star) + (y_s - y) * ell + c1
# = c0 * log(T) - y * ((cv - cl) log(T / Tc) - Lc / T) - log(p - p_star)
# + y_s * ell - y * (cv - cl) - y * Lc / T + c1 - y * log(pc)
# = c0 * log(T) - y * (cv - cl) * log(T) - log(p - p_star)
# + y_s * ell + c2
# = c3 * log(T) - log(p - p_star) + y_s * ell + c2
# where
# c0 = cd + y * cv
# c1 = - (cd + y * cv) * log(T0) + (1 + y) * log(p0) - compute_M(y) * s
# c2 = c1 - y * log(pc) - y * (cv - cl) + y * (cv - cl) * log(Tc)
# c3 = cd + y * cl
#
# Note that f(T) is increasing in T for reasonable values of p and T. We want to find
# where f(T) = 0.
#
c1 = - (cd + y * cv) * log(T0) + (1 + y) * log(p0) - compute_M(y) * s
c2 = c1 - y * log(pc) - y * (cv - cl) + y * (cv - cl) * log(Tc)
c3 = cd + y * cl
#
# Since the parcel is saturated we know that y_s < y, so
# p_star = p (y_s / (1 + y_s)) = p (1 - 1 / (1 + y_s)) < p (1 - 1 / (1 + y))
# so we have an upperbound on the value of p_star. Furthermore, since cv - cl < 0,
# p_star = pc exp((cv - cl) log(T / Tc) - Lc / T)
# > pc exp((cv - cl) log(Tcrit / Tc) - Lc / T)
# so
# -Lc / T < log(p_star / pc) + (cl - cv) log(Tcrit / Tc)
# Lc / T > -log(p_star / pc) + (cv - cl) log(Tcrit / Tc) [1]
# T < Lc / (-log(p_star / pc) + (cv - cl) log(Tcrit / Tc))
# T < Lc / (-log(p / pc) - log(y / (1 + y)) + (cv - cl) log(Tcrit / Tc))
# where we have used that the right side of [1] is positive for p_star smaller than 1e11 Pa
# or so.
#
c4 = (cv - cl) * log(Tcrit / Tc)
p_star_max = p * y / (1 + y)
Tmax = Lc / (c4 - log(p_star_max / pc))
Tmax = min(Tmax, Tcrit)
# Couldn't figure out a good way to lower bound it. 100 K is pretty safe.
Tmin = 100
def f(T):
p_star = compute_p_star(T)
if p_star >= p_star_max:
return T * 1.0e200
y_s = p_star / (p - p_star)
ell = cv - cl + Lc / T
return c3 * log(T) - log(p - p_star) + y_s * ell + c2
if f(Tmin) >= 0:
return Tmin
if f(Tmax) <= 0:
return Tmax
return opt.brentq(f, Tmin, Tmax)
def compute_Tv_sat(y, p, s):
T = compute_T_sat(y, p, s)
y_s = compute_y_s_from_T(p, T)
return T * (1 + y_s) / (1 + y * epsilon)
def compute_Tv_unsat(y, p, s):
return compute_T_unsat(y, p, s) * (1 + y) / (1 + y * epsilon)
def compute_Mh_unsat(y, p, s):
return (cd + y * cv) * compute_T_unsat(y, p, s)
def compute_Mh_sat(y, p, s):
T = compute_T_sat(y, p, s)
y_s = compute_y_s_from_T(p, T)
ell = compute_ell(T)
return (cd + y * cv + (y_s - y) * ell) * T
|
home-assistant/home-assistant
|
homeassistant/components/mqtt/number.py
|
Python
|
apache-2.0
| 8,276
| 0.000967
|
"""Configure number in a device through MQTT topic."""
from __future__ import annotations
import functools
import logging
import voluptuous as vol
from homeassistant.components import number
from homeassistant.components.number import (
DEFAULT_MAX_VALUE,
DEFAULT_MIN_VALUE,
DEFAULT_STEP,
NumberEntity,
)
from homeassistant.const import (
CONF_NAME,
CONF_OPTIMISTIC,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from . import PLATFORMS, MqttCommandTemplate, subscription
from .. import mqtt
from .const import CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC, DOMAIN
from .debug_info import log_messages
from .mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, async_setup_entry_helper
CONF_COMMAND_TEMPLATE = "command_template"
_LOGGER = logging.getLogger(__name__)
CONF_MIN = "min"
CONF_MAX = "max"
CONF_PAYLOAD_RESET = "payload_reset"
CONF_STEP = "step"
DEFAULT_NAME = "MQTT Number"
DEFAULT_OPTIMISTIC = False
DEFAULT_PAYLOAD_RESET = "None"
MQTT_NUMBER_ATTRIBUTES_BLOCKED = frozenset(
{
number.ATTR_MAX,
number.ATTR_MIN,
number.ATTR_STEP,
}
)
def validate_config(config):
"""Validate that the configuration is valid, throws if it isn't."""
if config.get(CONF_MIN) >= config.get(CONF_MAX):
raise vol.Invalid(f"'{CONF_MAX}' must be > '{CONF_MIN}'")
return config
_PLATFORM_SCHEMA_BASE = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): vol.Coerce(float),
vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_PAYLOAD_RESET, default=DEFAULT_PAYLOAD_RESET): cv.string,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
},
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
PLATFORM_SCHEMA = vol.All(
_PLATFORM_SCHEMA_BASE,
validate_config,
)
DISCOVERY_SCHEMA = vol.All(
_PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA),
validate_config,
)
async def async_setup_platform(
hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT number through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT number dynamically through MQTT discovery."""
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, number.DOMAIN, setup, DISCOVERY_SCHEMA)
async def _async_setup_entity(
|
hass, async_add_entities, config, config_entry=None, discovery_data=None
):
"""Set up the MQTT number."""
async_add_entities([MqttNumber(hass, config, config_entry, discovery_data)])
class MqttNumber(MqttEntity, NumberEntity, RestoreEntity):
|
"""representation of an MQTT number."""
_entity_id_format = number.ENTITY_ID_FORMAT
_attributes_extra_blocked = MQTT_NUMBER_ATTRIBUTES_BLOCKED
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the MQTT Number."""
self._config = config
self._optimistic = False
self._sub_state = None
self._current_number = None
NumberEntity.__init__(self)
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return DISCOVERY_SCHEMA
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._optimistic = config[CONF_OPTIMISTIC]
self._templates = {
CONF_COMMAND_TEMPLATE: MqttCommandTemplate(
config.get(CONF_COMMAND_TEMPLATE), self.hass
).async_render,
CONF_VALUE_TEMPLATE: config.get(CONF_VALUE_TEMPLATE),
}
value_template = self._templates[CONF_VALUE_TEMPLATE]
if value_template is None:
self._templates[CONF_VALUE_TEMPLATE] = lambda value: value
else:
value_template.hass = self.hass
self._templates[
CONF_VALUE_TEMPLATE
] = value_template.async_render_with_possible_json_value
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT messages."""
payload = self._templates[CONF_VALUE_TEMPLATE](msg.payload)
try:
if payload == self._config[CONF_PAYLOAD_RESET]:
num_value = None
elif payload.isnumeric():
num_value = int(payload)
else:
num_value = float(payload)
except ValueError:
_LOGGER.warning("Payload '%s' is not a Number", msg.payload)
return
if num_value is not None and (
num_value < self.min_value or num_value > self.max_value
):
_LOGGER.error(
"Invalid value for %s: %s (range %s - %s)",
self.entity_id,
num_value,
self.min_value,
self.max_value,
)
return
self._current_number = num_value
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
# Force into optimistic mode.
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic and (last_state := await self.async_get_last_state()):
self._current_number = last_state.state
@property
def min_value(self) -> float:
"""Return the minimum value."""
return self._config[CONF_MIN]
@property
def max_value(self) -> float:
"""Return the maximum value."""
return self._config[CONF_MAX]
@property
def step(self) -> float:
"""Return the increment/decrement step."""
return self._config[CONF_STEP]
@property
def unit_of_measurement(self) -> str | None:
"""Return the unit of measurement."""
return self._config.get(CONF_UNIT_OF_MEASUREMENT)
@property
def value(self):
"""Return the current value."""
return self._current_number
async def async_set_value(self, value: float) -> None:
"""Update the current value."""
current_number = value
if value.is_integer():
current_number = int(value)
payload = self._templates[CONF_COMMAND_TEMPLATE](current_number)
if self._optimistic:
self._current_number = current_number
self.async_write_ha_state()
await mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
payload,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
@property
def assumed_stat
|
hmpf/nav
|
python/nav/web/alertprofiles/views.py
|
Python
|
gpl-3.0
| 87,463
| 0.00104
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007, 2008, 2011 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License version 3 as published by the Free
# Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Alert Profiles view functions"""
# TODO This module has crazy-many lines and should use class-based views
# TODO Check that functions that should require permission do require
# permission
# TODO Filter/filter_groups have owners, check that the account that performs
# the operation is the owner
from django.http import HttpResponseRedirect
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.shortcuts import render
from django.urls import reverse
from django.utils import six
from nav.web.utils import SubListView
from nav.models.profiles import (
Account,
AccountGroup,
AlertAddress,
AlertPreference,
AlertProfile,
TimePeriod,
AlertSubscription,
FilterGroupContent,
Operator,
Expression,
Filter,
FilterGroup,
MatchField,
SMSQueue,
AccountAlertQueue,
)
from nav.django.utils import get_account, is_admin
from nav.web.message import Messages, new_message
from nav.web.alertprofiles.forms import TimePeriodForm, LanguageForm
from nav.web.alertprofiles.forms import AlertProfileForm, AlertSubscriptionForm
from nav.web.alertprofiles.forms import AlertAddressForm, FilterForm
from nav.web.alertprofiles.forms import ExpressionForm, FilterGroupForm
from nav.web.alertprofiles.forms import MatchFieldForm
from nav.web.alertprofiles.utils import alert_subscriptions_table
from nav.web.alertprofiles.utils import read_time_period_templates
from nav.web.alertprofiles.utils import resolve_account_admin_and_owner
from nav.web.alertprofiles.utils import account_owns_filters
from nav.web.alertprofiles.utils import order_filter_group_content
from nav.web.alertprofiles.shortcuts import (
alertprofiles_response_forbidden,
alertprofiles_response_not_found,
BASE_PATH,
)
from .decorators import requires_post
_ = lambda a: a
PAGINATE_BY = 25
def overview(request):
"""The Alert Profiles overview / index page"""
account = get_account(request)
# Get information about user
groups = account.accountgroup_set.all()
|
try:
active_profile = account.ge
|
t_active_profile()
except ObjectDoesNotExist:
active_profile = None
if not active_profile:
subscriptions = None
else:
periods = TimePeriod.objects.filter(profile=active_profile).order_by('start')
subscriptions = alert_subscriptions_table(periods)
# Get information about users privileges
sms_privilege = account.has_perm('alert_by', 'sms')
filter_dict = {'group_permissions__in': [g.id for g in groups]}
filter_groups = FilterGroup.objects.filter(**filter_dict).order_by('name')
language = account.preferences.get(account.PREFERENCE_KEY_LANGUAGE, 'en')
language_form = LanguageForm(initial={'language': language})
info_dict = {
'active': {'overview': True},
'groups': groups,
'active_profile': active_profile,
'sms_privilege': sms_privilege,
'filter_groups': filter_groups,
'language_form': language_form,
'alert_subscriptions': subscriptions,
'navpath': [
('Home', '/'),
('Alert profiles', None),
],
'title': 'NAV - Alert profiles',
}
return render(request, 'alertprofiles/account_detail.html', info_dict)
def show_profile(request):
"""Shows a single profile"""
account = get_account(request)
# Define valid options for ordering
valid_ordering = ['name', '-name']
order_by = request.GET.get('order_by', 'name').lower()
if order_by not in valid_ordering:
order_by = 'name'
try:
active_profile = account.alertpreference.active_profile
except Exception:
active_profile = None
if not active_profile:
new_message(request, _('There\'s no active profile set.'), Messages.NOTICE)
profiles = AlertProfile.objects.filter(account=account.pk).order_by(order_by)
info_dict = {
'active': {'profile': True},
'subsection': {'list': True},
'profiles': profiles,
'active_profile': active_profile,
'page_link': reverse('alertprofiles-profile'),
'order_by': order_by,
'navpath': BASE_PATH + [('Profiles', None)],
'title': 'NAV - Alert profiles',
}
return SubListView.as_view(
queryset=profiles,
paginate_by=PAGINATE_BY,
template_name='alertprofiles/profile.html',
extra_context=info_dict,
)(request)
def profile_show_form(
request, profile_id=None, profile_form=None, time_period_form=None
):
"""Shows the profile edit form"""
account = get_account(request)
profile = None
periods = []
detail_id = None
page_name = 'New profile'
if profile_id:
try:
profile = AlertProfile.objects.get(pk=profile_id, account=account)
except AlertProfile.DoesNotExist:
new_message(
request, _('The requested profile does not exist.'), Messages.ERROR
)
return HttpResponseRedirect(reverse('alertprofiles-profile'))
detail_id = profile.id
page_name = profile.name
periods = TimePeriod.objects.filter(profile=profile).order_by('start')
if not time_period_form:
time_period_form = TimePeriodForm(initial={'profile': profile.id})
if not profile_form:
profile_form = AlertProfileForm(instance=profile)
elif not profile_form:
profile_form = AlertProfileForm()
templates = None
if not profile_id:
templates = read_time_period_templates()
subsection = {'new': True}
else:
subsection = {'detail': profile.id}
info_dict = {
'form': profile_form,
'time_period_form': time_period_form,
'detail_id': detail_id,
'owner': True,
'alert_subscriptions': alert_subscriptions_table(periods),
'time_period_templates': templates,
'active': {'profile': True},
'subsection': subsection,
'navpath': BASE_PATH
+ [('Profiles', reverse('alertprofiles-profile')), (page_name, None)],
'title': 'NAV - Alert profiles',
}
return render(request, 'alertprofiles/profile_detail.html', info_dict)
def profile_detail(request, profile_id=None):
"""Shows the profile form a specific profile"""
return profile_show_form(request, profile_id)
def profile_new(request):
"""Shows an empty profile form"""
return profile_show_form(request)
def set_active_profile(request, profile):
"""Set active profile to given profile"""
preference, _created = AlertPreference.objects.get_or_create(
account=request.account, defaults={'active_profile': profile}
)
preference.active_profile = profile
preference.save()
new_message(
request,
u'Active profile automatically set to {}'.format(profile.name),
Messages.NOTICE,
)
def create_time_periods(request, profile):
"""Creates time periods for this profile based on template chosen"""
templates = read_time_period_templates()
template = templates.get(request.POST.get('template'), None)
if template:
# A template were selected. Loop through each subsection and make
# periods if the title of the subsection is 'all_week', 'weekends'
# or 'weekdays'.
for key, value in template.items():
periods = {}
if key == 'all_week':
valid_during = TimePeriod.ALL_WEEK
|
mc2014/anvil
|
setup.py
|
Python
|
apache-2.0
| 2,199
| 0
|
#!/usr/bin/env python
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import setuptools
from anvil import version
def read_requires(filename):
requires = []
with open(filename, "rb") as fh:
for line in fh:
l
|
ine = line.strip()
if not line or line.startswith("#"):
continue
requires.appe
|
nd(line)
return requires
setuptools.setup(
name='anvil',
description='A tool to forge raw OpenStack into a productive tool',
author='OpenStack Foundation',
author_email='anvil-dev@lists.launchpad.net',
url='http://anvil.readthedocs.org/',
long_description=open("README.rst", 'rb').read(),
packages=setuptools.find_packages(),
license='Apache Software License',
version=version.version_string(),
scripts=[
"tools/yyoom",
"tools/py2rpm",
"tools/multipip",
"tools/specprint",
],
install_requires=read_requires("requirements.txt"),
tests_require=read_requires("test-requirements.txt"),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: OpenStack',
'Intended Audience :: Information Technology',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
krishauser/Klampt
|
Python/python2_version/klampt/math/symbolic_linalg.py
|
Python
|
bsd-3-clause
| 9,854
| 0.032576
|
"""Defines the symbolic functions:
- norm(x): returns the L-2 norm of a vector
- norm2(x): returns the squared L-2 norm of a vector
- norm_L1(x): returns the L-1 norm of a vector
- norm_Linf(x): returns the L-infinity norm of a vector
- norm_fro(A): returns the Frobeneus norm of a matrix
- distance(x,y): returns the L-2 distance between two vectors
- distance2(x,y): returns the squared L-2 distance between two vectors
- distance_L1(x,y): returns the L-1 distance between two vectors
- distance_Linf(x,y): returns the L-infinity distance between two vectors
- mahalanobis_distance(x,y,A): returns the mahalanobis distance between two vectors weighted by A, i.e. sqrt((x-y)^T A (x-y))
- mahalanobis_distance2(x,y,A): returns the squared mahalanobis distance between two vectors weighted by A
- unit(x): returns the unit vector in the direction x
- inv(A): returns the inverse of a matrix
- pinv(A): returns the pseudoinverse of a matrix
- linear(x,A): the linear function A*x
- quadratic(x,A): the quadratic function x^T*A*x
- bilinear(x,y,A): the bilinear function x^T*A*y
- bound_contains(xmin,xmax,x): returns True if xmin <= x <= xmax element-wise
- bound_margin(xmin,xmax,x): returns the distance from x to boundaries of the bounding box [xmin,xmax], if inside (positive is inside)
- bound_overlaps(xmin,xmax,ymin,ymax): returns True if xmin <= x <= xmax element-wise
Also defines a context LinAlgContext that can be included into a Context under "linalg"
Completeness table
================= ============= ==============
Function Derivative Simplification
================= ============= ==============
norm Y
norm2 Y
norm_L1 Y
norm_Linf Y
norm_fro Y
distance Y
distance2 Y
distance_L1 Y
distance_Linf Y
mahalanobis_dis Y,Y,N
mahalanobis...2 Y,Y,N
unit Y
inv Y
pinv Y
linear Y
quadratic Y,Y,N
bilinear Y,Y,N
bound_contains N/A
bound_margin
bound_overlabs N/A
================= ============= ==============
"""
from symbolic import *
import vectorops
import numpy as np
import weakref
_x = Variable("x","V")
_y = Variable("y","V")
_A = Variable("A","M")
norm = Function('norm',np.linalg.norm,['x'],returnType='N')
norm2 = Function('norm2',dot(_x,_x),['x'],returnType='N')
norm_L1 = Function('norm_L1',lambda x:np.linalg.norm(x,ord=1),returnType='N')
_inf = float('inf')
norm_Linf = Fu
|
nction('norm_Linf',lambda x:np.lin
|
alg.norm(x,ord=_inf),returnType='N')
norm_fro = Function('norm_fro',np.linalg.norm,['A'],returnType='N')
norm2_fro = Function('norm2_fro',lambda x:np.linalg.norm(x)**2,['A'],returnType='N')
distance = Function('distance',norm(_x-_y),['x','y'],returnType='N')
distance2 = Function('distance2',norm2(_x-_y),['x','y'],returnType='N')
distance_L1 = Function('distance_L1',norm_L1(_x-_y),['x','y'],returnType='N')
distance_Linf = Function('distance_Linf',norm_Linf(_x-_y),['x','y'],returnType='N')
unit = Function('unit',vectorops.unit,['x'])
norm.argTypes = [Vector]
norm2.argTypes = [Vector]
norm_L1.argTypes = [Vector]
norm_Linf.argTypes = [Vector]
norm_fro.argTypes = [Matrix]
norm.properties['nonnegative'] = True
norm2.properties['nonnegative'] = True
norm_L1.properties['nonnegative'] = True
norm_Linf.properties['nonnegative'] = True
norm_fro.properties['nonnegative'] = True
norm2_fro.properties['nonnegative'] = True
norm.addSimplifier(['zero'],lambda x:0)
norm.addSimplifier(['unit'],lambda x:1)
norm2.addSimplifier(['zero'],lambda x:0)
norm2.addSimplifier(['unit'],lambda x:1)
norm_L1.addSimplifier(['zero'],lambda x:0)
norm_L1.addSimplifier(['basis'],lambda x:1)
norm_Linf.addSimplifier(['zero'],lambda x:0)
norm_Linf.addSimplifier(['basis'],lambda x:1)
norm.setDeriv(0,(lambda x,dx:dot(x,dx)/norm(x)),asExpr=True,stackable=True)
norm2.setDeriv(0,(lambda x,dx:dot(x,dx)*2),asExpr=True,stackable=True)
norm_L1.setDeriv(0,lambda x,dx:dot(sign(x),dx),asExpr=True,stackable=True)
def _norm_Linf_deriv(x,dx):
imax = argmax(abs_(x))
return sign(x[imax])*dx[imax]
norm_Linf.setDeriv(0,_norm_Linf_deriv,asExpr=True,stackable=True)
norm_fro.setDeriv(0,(lambda A,dA:multiply(A,dA)/norm_fro(A)),asExpr=True,stackable=True)
norm2_fro.setDeriv(0,(lambda A,dA:multiply(A,dA)*2),asExpr=True,stackable=True)
distance.argTypes = [Vector,Vector]
distance2.argTypes = [Vector,Vector]
distance_L1.argTypes = [Vector,Vector]
distance_Linf.argTypes = [Vector,Vector]
distance.properties['nonnegative'] = True
distance2.properties['nonnegative'] = True
distance_L1.properties['nonnegative'] = True
distance_Linf.properties['nonnegative'] = True
distance.addSimplifier(['zero',None],lambda x,y:norm(y))
distance.addSimplifier([None,'zero'],lambda x,y:norm(x))
distance2.addSimplifier(['zero',None],lambda x,y:norm2(y))
distance2.addSimplifier([None,'zero'],lambda x,y:norm2(x))
distance_L1.addSimplifier(['zero',None],lambda x,y:norm_L1(y))
distance_L1.addSimplifier([None,'zero'],lambda x,y:norm_L1(x))
distance_Linf.addSimplifier(['zero',None],lambda x,y:norm_Linf(y))
distance_Linf.addSimplifier([None,'zero'],lambda x,y:norm_Linf(x))
distance.setDeriv(0,(lambda x,y,dx:dot(x-y,dx)/distance(x,y)),asExpr=True,stackable=True)
distance.setDeriv(1,(lambda x,y,dy:dot(x-y,dy)/distance(x,y)),asExpr=True,stackable=True)
distance2.setDeriv(0,(lambda x,y,dx:dot(x-y,dx)*2),asExpr=True,stackable=True)
distance2.setDeriv(1,(lambda x,y,dx:dot(y-x,dy)*2),asExpr=True,stackable=True)
distance_L1.autoSetJacobians()
distance_Linf.autoSetJacobians()
linear = Function('linear',dot(_A,_x),["x","A"])
linear.argTypes = [Vector,Matrix]
linear.autoSetJacobians()
bilinear = Function('bilinear',dot(_x,dot(_A,_y)),["x","A","y"])
bilinear.argTypes = [Vector,Matrix,Vector]
bilinear.autoSetJacobians()
quadratic = Function('quadratic',dot(_x,dot(_A,_x)),["x","A"],returnType='N')
quadratic.argTypes = [Vector,Matrix]
#quadratic.setJacobian('x',lambda x,A:2*dot(x,A),asExpr=True)
#quadratic.setJacobian('A',lambda x,A:outer(x,x),asExpr=True)
quadratic.autoSetJacobians()
mahalanobis_distance2 = Function('mahalanobis_distance2',quadratic(_x-_y,_A),['x','y','A'])
mahalanobis_distance2.autoSetJacobians()
mahalanobis_distance = Function('mahalanobis_distance',sqrt(mahalanobis_distance2(_x,_y,_A)),['x','y','A'])
mahalanobis_distance.autoSetJacobians()
unit.setDeriv(0,lambda x,dx:if_(x==0,zero(shape(x)),dx/norm(x)-x*(dot(x,dx)/norm(x))**3),stackable=True)
inv = Function('inv',np.linalg.inv,['A'],returnType='M')
inv.argTypes = [Matrix]
def _inv_deriv(A,dA):
Ainv = inv(A)
return -dot(Ainv,dot(dA,Ainv))
inv.properties['inverse'] = weakref.proxy(inv)
#patch the dot function
dot.addSimplifier(['inv','inv'],lambda Ainv,Binv:inv(dot(Binv.args[0],Ainv.args[0])))
dot.addSimplifier(['linalg.inv','linalg.inv'],lambda Ainv,Binv:inv(dot(Binv.args[0],Ainv.args[0])))
inv.addSimplifier(['neg'],lambda x:-inv(x.args[0]))
inv.setDeriv(0,_inv_deriv,asExpr=True)
inv.printers['str'] = lambda expr,astr:astr[0]+'^-1'
pinv = Function('pinv',np.linalg.pinv,['A'],returnType='M')
pinv.argTypes = [Matrix]
pinv.addSimplifier(['neg'],lambda x:-pinv(x.args[0]))
pinv.printers['str'] = lambda expr,astr:astr[0]+'^+'
def _pinv_deriv(A,dA):
Ainv = pinv(A)
return -dot(Ainv,dot(dA,Ainv))
pinv.setDeriv(0,_pinv_deriv,asExpr=True)
def _bound_contains(xmin,xmax,x):
return all(a <= v and v <= b for v,a,b in zip(x,xmin,xmax))
def _bound_overlaps(xmin,xmax,ymin,ymax):
for a,b,c,d in zip(xmin,xmax,ymin,ymax):
if d < a or c > b: return False
if b < c or a > d: return False
|
sjaoudi/hangout-app
|
transcribe_streaming.py
|
Python
|
mit
| 8,465
| 0.000236
|
#!/usr/bin/python
# Copyright (C) 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Sample that streams audio to the Google Cloud Speech API via GRPC."""
from __future__ import division
import contextlib
import functools
import re
import signal
import sys
import google.auth
import google.auth.transport.grpc
import google.auth.transport.requests
from google.cloud.grpc.speech.v1beta1 import cloud_speech_pb2
from google.rpc import code_pb2
import grpc
import pyaudio
from six.moves import queue
# Audio recording parameters
RATE = 16000
CHUNK = int(RATE / 10) # 100ms
# The Speech API has a streaming limit of 60 seconds of audio*, so keep the
# connection alive for that long, plus some more to give the API time to figure
# out the transcription.
# * https://g.co/cloud/speech/limits#content
|
DEADLINE_SECS = 60 * 3 + 5
SPEECH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'
def make_channel(host, port):
"""Creates a secure channel with auth credentials from the environment."""
# Grab application default cred
|
entials from the environment
credentials, _ = google.auth.default(scopes=[SPEECH_SCOPE])
# Create a secure channel using the credentials.
http_request = google.auth.transport.requests.Request()
target = '{}:{}'.format(host, port)
return google.auth.transport.grpc.secure_authorized_channel(
credentials, http_request, target)
def _audio_data_generator(buff):
"""A generator that yields all available data in the given buffer.
Args:
buff - a Queue object, where each element is a chunk of data.
Yields:
A chunk of data that is the aggregate of all chunks of data in `buff`.
The function will block until at least one data chunk is available.
"""
stop = False
while not stop:
# Use a blocking get() to ensure there's at least one chunk of data.
data = [buff.get()]
# Now consume whatever other data's still buffered.
while True:
try:
data.append(buff.get(block=False))
except queue.Empty:
break
# `None` in the buffer signals that the audio stream is closed. Yield
# the final bit of the buffer and exit the loop.
if None in data:
stop = True
data.remove(None)
yield b''.join(data)
def _fill_buffer(buff, in_data, frame_count, time_info, status_flags):
"""Continuously collect data from the audio stream, into the buffer."""
buff.put(in_data)
return None, pyaudio.paContinue
# [START audio_stream]
@contextlib.contextmanager
def record_audio(rate, chunk):
"""Opens a recording stream in a context manager."""
# Create a thread-safe buffer of audio data
buff = queue.Queue()
audio_interface = pyaudio.PyAudio()
audio_stream = audio_interface.open(
format=pyaudio.paInt16,
# The API currently only supports 1-channel (mono) audio
# https://goo.gl/z757pE
channels=1, rate=rate,
input=True, frames_per_buffer=chunk,
# Run the audio stream asynchronously to fill the buffer object.
# This is necessary so that the input device's buffer doesn't overflow
# while the calling thread makes network requests, etc.
stream_callback=functools.partial(_fill_buffer, buff),
)
yield _audio_data_generator(buff)
audio_stream.stop_stream()
audio_stream.close()
# Signal the _audio_data_generator to finish
buff.put(None)
audio_interface.terminate()
# [END audio_stream]
def request_stream(data_stream, rate, interim_results=True):
"""Yields `StreamingRecognizeRequest`s constructed from a recording audio
stream.
Args:
data_stream: A generator that yields raw audio data to send.
rate: The sampling rate in hertz.
interim_results: Whether to return intermediate results, before the
transcription is finalized.
"""
# The initial request must contain metadata about the stream, so the
# server knows how to interpret it.
recognition_config = cloud_speech_pb2.RecognitionConfig(
# There are a bunch of config options you can specify. See
# https://goo.gl/KPZn97 for the full list.
encoding='LINEAR16', # raw 16-bit signed LE samples
sample_rate=rate, # the rate in hertz
# See http://g.co/cloud/speech/docs/languages
# for a list of supported languages.
language_code='en-US', # a BCP-47 language tag
)
streaming_config = cloud_speech_pb2.StreamingRecognitionConfig(
interim_results=interim_results,
config=recognition_config,
)
yield cloud_speech_pb2.StreamingRecognizeRequest(
streaming_config=streaming_config)
for data in data_stream:
# Subsequent requests can all just have the content
yield cloud_speech_pb2.StreamingRecognizeRequest(audio_content=data)
def listen_print_loop(recognize_stream, q):
"""Iterates through server responses and prints them.
The recognize_stream passed is a generator that will block until a response
is provided by the server. When the transcription response comes, print it.
In this case, responses are provided for interim results as well. If the
response is an interim one, print a line feed at the end of it, to allow
the next result to overwrite it, until the response is a final one. For the
final one, print a newline to preserve the finalized transcription.
"""
num_chars_printed = 0
print 'begin call!'
for resp in recognize_stream:
if resp.error.code != code_pb2.OK:
raise RuntimeError('Server error: ' + resp.error.message)
if not resp.results:
continue
# Display the top transcription
result = resp.results[0]
transcript = result.alternatives[0].transcript
# Display interim results, but with a carriage return at the end of the
# line, so subsequent lines will overwrite them.
#
# If the previous result was longer than this one, we need to print
# some extra spaces to overwrite the previous result
overwrite_chars = ' ' * max(0, num_chars_printed - len(transcript))
if not result.is_final:
sys.stdout.write(transcript + overwrite_chars + '\r')
sys.stdout.flush()
num_chars_printed = len(transcript)
else:
#print(transcript + overwrite_chars) # where the program prints msg??
q.put(transcript + overwrite_chars)
# Exit recognition if any of the transcribed phrases could be
# one of our keywords.
if re.search(r'\b(exit|quit)\b', transcript, re.I):
print('Exiting..')
break
num_chars_printed = 0
def main(q):
service = cloud_speech_pb2.SpeechStub(
make_channel('speech.googleapis.com', 443))
# For streaming audio from the microphone, there are three threads.
# First, a thread that collects audio data as it comes in
with record_audio(RATE, CHUNK) as buffered_audio_data:
# Second, a thread that sends requests with that data
requests = request_stream(buffered_audio_data, RATE)
# Third, a thread that listens for transcription responses
recognize_stream = service.StreamingRecognize(
requests, DEADLINE_SECS)
# Exit things cleanly on interrupt
signal.signal(signal.SIGINT, lambda *_: recognize_stream.cancel())
# Now, put the transcription responses to use.
try:
listen_print_loop(recognize_stream, q)
|
TravelModellingGroup/TMGToolbox
|
TMGToolbox/src/analysis/traffic/Export_Count_Station_Location.py
|
Python
|
gpl-3.0
| 5,176
| 0.013717
|
'''
Copyright 2015 Travel Modelling Group, Department of Civil Engineering, University of Toronto
This file is part of the TMG Toolbox.
The TMG Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The TMG Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the TMG Toolbox. If not, see <http://www.gnu.org/licenses/>.
'''
#---METADATA---------------------
'''
Export Count Station Link Correspondence File
Authors: David King
Latest revision by:
[D
|
escription]
'''
#---VERSION HISTORY
|
'''
0.0.1 Created
0.1.1 Created on 2015-03-13 by David King
'''
import inro.modeller as _m
import csv
import traceback as _traceback
from contextlib import contextmanager
from contextlib import nested
_mm = _m.Modeller()
net =_mm.scenario.get_network()
_util = _mm.module('tmg.common.utilities')
_tmgTPB = _mm.module('tmg.common.TMG_tool_page_builder')
class ExportCountStationLocation(_m.Tool()):
version = '0.1.1'
tool_run_msg = ""
number_of_tasks = 1
Scenario = _m.Attribute(_m.InstanceType)
CordonExportFile = _m.Attribute(str)
def __init__(self):
#---Init internal variables
self.TRACKER = _util.ProgressTracker(self.number_of_tasks) #init the ProgressTracker
#---Set the defaults of parameters used by Modeller
self.Scenario = _mm.scenario #Default is primary scenario
def page(self):
pb = _tmgTPB.TmgToolPageBuilder(self, title="Export Count Station-Link Correspondence File v%s" %self.version,
description="Exports a link and countpost correspondence file.\
Contained witin, is the link on which each countpost is found.\
Assumes that count stations are defined by '@stn1'.",
branding_text="- TMG Toolbox")
if self.tool_run_msg != "": # to display messages in the page
pb.tool_run_status(self.tool_run_msg_status)
pb.add_header("EXPORT CORDON DATA FILE")
pb.add_select_file(tool_attribute_name='CordonExportFile',
window_type='save_file', file_filter='*.csv',
title="Cordon Count File",
note="Select Export Location:\
<ul><li>countpost_id</li>\
<li>link id (inode-jnode)</li>\
</ul>")
return pb.render()
def __call__(self, Scen, TruthTable):
self.tool_run_msg = ""
self.TRACKER.reset()
self.Scenario = Scen
self.CordonTruthTable = TruthTable
try:
self._Execute()
except Exception as e:
self.tool_run_msg = _m.PageBuilder.format_exception(
e, _traceback.format_exc())
raise
self.tool_run_msg = _m.PageBuilder.format_info("Done.")
def run(self):
self.tool_run_msg = ""
self.TRACKER.reset()
try:
self._Execute()
except Exception as e:
self.tool_run_msg = _m.PageBuilder.format_exception(
e, _traceback.format_exc())
raise
self.tool_run_msg = _m.PageBuilder.format_info("Done.")
def _Execute(self):
with _m.logbook_trace(name="{classname} v{version}".format(classname=(self.__class__.__name__), version=self.version),
attributes=self._GetAtts()):
lines =[]
for link in net.links():
if int(link['@stn1']) > 0:
lines.append((link['@stn1'],link.id))
with open(self.CordonExportFile, 'w') as writer:
writer.write("Countpost ID ,Link (i-node j-node)")
for line in lines:
line = [str(c) for c in line]
writer.write("\n" + ','.join(line))
#----SUB FUNCTIONS---------------------------------------------------------------------------------
def _GetAtts(self):
atts = {
"Scenario" : str(self.Scenario.id),
"Version": self.version,
"self": self.__MODELLER_NAMESPACE__}
return atts
@_m.method(return_type=_m.TupleType)
def percent_completed(self):
return self.TRACKER.getProgress()
@_m.method(return_type=unicode)
def tool_run_msg_status(self):
return self.tool_run_msg
|
R3v1L/django-landingpages
|
landingpages/admin.py
|
Python
|
mit
| 813
| 0.012315
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Landing pages administratio
|
n module
===============================================
.. module:: landingpages.admin
:platform: Django
:synopsis: Landing pages administration module
.. moduleauthor:: (C) 2015 Oliver Gutiérrez
"""
# Django imports
from djan
|
go.contrib import admin
# Site tools imports
from sitetools.admin import BaseModelAdmin
# Application imports
from landingpages.models import LandingPage
class LandingPageAdmin(BaseModelAdmin):
"""
Landing page administration class
"""
list_display = ('name','url','language','template',)
list_filter = ('language','template',)
search_fields = ('name','title','keywords',)
# Register models
admin.site.register(LandingPage,LandingPageAdmin)
|
dooma/Events
|
events/controllers/event.py
|
Python
|
mit
| 6,395
| 0.003911
|
__author__ = 'Călin Sălăgean'
from events.models.event import Event
from events.repositories.event import EventRepository
from events.repositories.person_event import PersonEventRepository
from events.repositories.person import PersonRepository
class EventController():
def __init__(self):
'''
EventController constructor
:return: object
'''
self.repository = EventRepository()
self.person_event_repository = PersonEventRepository()
self.person_repository = PersonRepository()
def index(self):
'''
Return all events in one string ready to display
:return: string
'''
events = self.repository.get_all()
output = ""
for event in events:
output += str(event.get_id()) + "\t" + event.get_date() + "\t" + event.get_time() + "\t" + event.get_description() + "\n"
return output
def show(self, id):
'''
Return a found event ready to display
:param id:
:return: string
'''
try:
id = int(id)
except ValueError:
return "Please enter an integer for ID"
try:
event = self.repository.get(id)
return str(event.get_id()) + "\t" + event.get_date() + "\t" + event.get_time() + "\t" + event.get_description() + "\n"
except (ValueError, AttributeError) as e:
return e
def insert(self, date = None, time = None, description = None):
'''
Insert an event
:param date:
:param time:
:param description:
:return: string
'''
try:
event = Event(date, time, description)
except (AttributeError, ValueError) as e:
return e
self.repository.insert(event)
return "Event inserted successfully"
def update(self, id, date = None, time = None, description = None):
'''
Update an event
:param id:
:param date:
:param time:
:param description:
:return: string
'''
try:
id = int(id)
except ValueError:
return "Please insert a valid ID"
try:
event = self.repository.get(id)
except ValueError as e:
return e
if date == '':
date = None
if time == '':
time = None
if description == '':
description = None
try:
event.update(date, time, description)
self.repository.update(event)
except (ValueError, AttributeError) as e:
return e
return "Event updated successfully!"
def delete(self, id):
'''
Delete an event
:param id:
:return: string
'''
try:
id = int(id)
except ValueError:
return "Please enter an integer for ID"
try:
event = self.repository.get(id)
self.repository.delete(event)
except (ValueError, AttributeError) as e:
return e
return "Event was deleted successfully"
def get_top_events(self):
'''
Show top events with most visitors
:return: string
'''
relations = self.get_instantiated_relation()
occurences = self.determine_occurences(relations)
relations.sort(
key=lambda rel: EventController.find_dict2(occurences, rel[0].get_id())[rel[0].get_id()],
reverse=True
)
# Unique relations
unique_relations = []
actual_event = -1
for rel in relations:
if rel[0].get_id() != actual_event:
actual_event = rel[0].get_id()
unique_relations.append(rel)
output = ""
for rel in unique_relations:
number_events = EventController.find_dict2(occurences, rel[0].get_id())[rel[0].get_id()]
output += str(rel[0].get_id()) + "\t" + rel[0].get_description() + "\tparticipa \t" + str(number_events) + " persoane\n"
return output
def get_instantiated_relation(self, person_id = None, event_id = None):
'''
Instantiate a Repository Many-to-Many relation
:param person_id:
:param event_id:
:return: array
'''
if person_id is None and event_id is None:
person_events = self.person_event_repository.get_all()
else:
person_events = self.person_event_repository.get_by_id(person_id, event_id)
result = []
for relation in person_events:
person = self.person_repository.get(relation.get_person_id())
event = self.repository.get(relation.get_event_id())
date = relation.get_date()
if date is None:
date = "Doesn't exist"
result.append((event, person, date))
return result
@staticmethod
def determine_occurences(relations):
'''
Find occurences in multiple tuples of realtions
:param relations:
:return: array
'''
occurences = []
for relation in relations:
event_id = relation[0].get_id()
try:
occurence = EventController.find_dict2(occurences, event_id)
except:
occurences.append({
event_id: 0
})
occurence = occurences[-1]
finally:
occurence[event_id] += 1
return occurences
@staticmethod
def find_dict(array, id):
'''
Find dictionar
|
y with given id
:param array:
:param id:
:return: dictionary
:raise: ValueError if id is not found
'''
for elem in array:
try:
elem[id]
return elem
except KeyError:
continue
raise ValueEr
|
ror
@staticmethod
def find_dict2(array, id):
'''
Find dictionary with given id
:param array:
:param id:
:return: dictionary
:raise: ValueError if id is not found
'''
if not len(array):
raise ValueError
try:
array[0][id]
return array[0]
finally:
return find_dict2(array[1:], id)
|
oli-kester/advanced-av-examples
|
amp-osc-lv2/.waf-1.8.5-3556be08f33a5066528395b11fed89fa/waflib/Tools/qt5.py
|
Python
|
gpl-2.0
| 14,329
| 0.056878
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml=False
ContentHandler=object
else:
has_xml=True
import os,sys
from waflib.Tools import cxx
from waflib import Task,Utils,Options,Errors,Context
from waflib.TaskGen import feature,after_method,extension
from waflib.Configure import conf
from waflib import Logs
MOC_H=['.h','.hpp','.hxx','.hh']
EXT_RCC=['.qrc']
EXT_UI=['.ui']
EXT_QT5=['.cpp','.cc','.cxx','.C']
QT5_LIBS='''
qtmain
Qt5Bluetooth
Qt5CLucene
Qt5Concurrent
Qt5Core
Qt5DBus
Qt5Declarative
Qt5DesignerComponents
Qt5Designer
Qt5Gui
Qt5Help
Qt5MultimediaQuick_p
Qt5Multimedia
Qt5MultimediaWidgets
Qt5Network
Qt5Nfc
Qt5OpenGL
Qt5Positioning
Qt5PrintSupport
Qt5Qml
Qt5QuickParticles
Qt5Quick
Qt5QuickTest
Qt5Script
Qt5ScriptTools
Qt5Sensors
Qt5SerialPort
Qt5Sql
Qt5Svg
Qt5Test
Qt5WebKit
Qt5WebKitWidgets
Qt5Widgets
Qt5WinExtras
Qt5X11Extras
Qt5XmlPatterns
Qt5Xml'''
class qxx(Task.classes['cxx']):
def __init__(self,*k,**kw):
Task.Task.__init__(self,*k,**kw)
self.moc_done=0
def runnable_status(self):
if self.moc_done:
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
self.add_moc_tasks()
return Task.Task.runnable_status(self)
def create_moc_task(self,h_node,m_node):
try:
moc_cache=self.generator.bld.moc_cache
except AttributeError:
moc_cache=self.generator.bld.moc_cache={}
try:
return moc_cache[h_node]
except KeyError:
tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)
if self.generator:
self.generator.tasks.append(tsk)
gen=self.generator.bld.producer
gen.outstanding.insert(0,tsk)
gen.total+=1
return tsk
else:
delattr(self,'cache_sig')
def moc_h_ext(self):
try:
ext=Options.options.qt_header_ext.split()
except AttributeError:
pass
if not ext:
ext=MOC_H
return ext
def add_moc_tasks(self):
node=self.inputs[0]
bld=self.generator.bld
try:
self.signature()
except KeyError:
pass
else:
delattr(self,'cache_sig')
include_nodes=[node.parent]+self.generator.includes_nodes
moctasks=[]
mocfiles=set([])
for d in bld.raw_deps.get(self.uid(),[]):
if not d.endswith('.moc'):
continue
if d in mocfiles:
continue
mocfiles.add(d)
h_node=None
base2=d[:-4]
for x in include_nodes:
for e in self.moc_h_ext():
h_node=x.find_node(base2+e)
if h_node:
break
if h_node:
m_node=h_node.change_ext('.moc')
break
else:
for k in EXT_QT5:
if base2.endswith(k):
for x in include_nodes:
h_node=x.find_node(base2)
if h_node:
break
if h_node:
m_node=h_node.change_ext(k+'.moc')
break
if not h_node:
raise Errors.WafError('No source found for %r which is a moc file'%d)
task=self.create_moc_task(h_node,m_node)
moctasks.append(task)
self.run_after.update(set(moctasks))
self.moc_done=1
class trans_update(Task.Task):
run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'
color='BLUE'
Task.update_outputs(trans_update)
class XMLHandler(ContentHandler):
def __init__(self):
self.buf=[]
self.files=[]
def startElement(self,name,attrs):
if name=='file':
self.buf=[]
def endElement(self,name):
if name=='file':
self.files.append(str(''.join(self.buf)))
def characters(self,cars):
self.buf.append(cars)
@extension(*EXT_RCC)
def create_rcc_task(self,node):
rcnode=node.change_ext('_rc.cpp')
self.create_task('rcc',node,rcnode)
cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))
try:
self.compiled_tasks.append(cpptask)
except AttributeError:
self.compiled_tasks=[cpptask]
return cpptask
@extension(*EXT_UI)
def create_uic_task(self,node):
uictask=self.create_task('ui5',node)
uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])]
@extension('.ts')
def add_lang(self,node):
self.lang=self.to_list(getattr(self,'lang',[]))+[node]
@feature('qt5')
@after_method('apply_link')
def apply_qt5(self):
if getattr(self,'lang',None):
qmtasks=[]
for x in self.to_list(self.lang):
if isinstance(x,str):
x=self.path.find_resource(x+'.ts')
qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm')))
if getattr(self,'update',None)and Options.options.trans_qt5:
cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update',cxxnodes,x.inputs)
if getattr(self,'langname',None):
qmnodes=[x.outputs[0]for x in qmtasks]
rcnode=self.langname
if isinstance(rcnode,str):
rcnode=self.path.find_or_declare(rcn
|
ode+'.qrc')
t=self.create_task('qm2rcc',qmnodes,rcnode)
k=create_rcc_task(self,t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst=[]
for flag in self.to_list(self.env['CXXFLAGS']):
if len
|
(flag)<2:continue
f=flag[0:2]
if f in('-D','-I','/D','/I'):
if(f[0]=='/'):
lst.append('-'+flag[1:])
else:
lst.append(flag)
self.env.append_value('MOC_FLAGS',lst)
@extension(*EXT_QT5)
def cxx_hook(self,node):
return self.create_compiled_task('qxx',node)
class rcc(Task.Task):
color='BLUE'
run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
ext_out=['.h']
def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]
def scan(self):
if not has_xml:
Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
return([],[])
parser=make_parser()
curHandler=XMLHandler()
parser.setContentHandler(curHandler)
fi=open(self.inputs[0].abspath(),'r')
try:
parser.parse(fi)
finally:
fi.close()
nodes=[]
names=[]
root=self.inputs[0].parent
for x in curHandler.files:
nd=root.find_resource(x)
if nd:nodes.append(nd)
else:names.append(x)
return(nodes,names)
class moc(Task.Task):
color='BLUE'
run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
class ui5(Task.Task):
color='BLUE'
run_str='${QT_UIC} ${SRC} -o ${TGT}'
ext_out=['.h']
class ts2qm(Task.Task):
color='BLUE'
run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
class qm2rcc(Task.Task):
color='BLUE'
after='ts2qm'
def run(self):
txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs])
code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt
self.outputs[0].write(code)
def configure(self):
self.find_qt5_binaries()
self.set_qt5_libs_to_check()
self.set_qt5_defines()
self.find_qt5_libraries()
self.add_qt5_rpath()
self.simplify_qt5_libs()
@conf
def find_qt5_binaries(self):
env=self.env
opt=Options.options
qtdir=getattr(opt,'qtdir','')
qtbin=getattr(opt,'qtbin','')
paths=[]
if qtdir:
qtbin=os.path.join(qtdir,'bin')
if not qtdir:
qtdir=os.environ.get('QT5_ROOT','')
qtbin=os.environ.get('QT5_BIN',None)or os.path.join(qtdir,'bin')
if qtbin:
paths=[qtbin]
if not qtdir:
paths=os.environ.get('PATH','').split(os.pathsep)
paths.append('/usr/share/qt5/bin/')
try:
lst=Utils.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()
qtdir='/usr/local/Trolltech/%s/'%lst[0]
qtbin=os.path.join(qtdir,'bin')
paths.append(qtbin)
cand=None
prev_ver=['5','0','0']
for qmk in('qmake-qt5','qmake5','qmake'):
try:
qmake=self.find_program(qmk,path_list=paths)
except self.errors.ConfigurationError:
pass
else:
try:
version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
if version:
new_ver=version.split('.')
if new_ver>prev_ver:
cand=qmake
prev_ver=new_ver
if cand:
self.env.QMAKE=cand
else:
self.fatal('Could not find qmake for qt5')
qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep
def find_bin(ls
|
pinax/django-user-accounts
|
makemigrations.py
|
Python
|
mit
| 904
| 0
|
#!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
DEFAULT_SETTINGS = dict(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"account",
"account.tests"
],
MIDDLEWARE_CLASSES=[],
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
},
SITE_ID=1,
ROOT_URLCONF="account.tests.urls",
SECRET_KEY="notasecret",
)
def run(*args):
|
if not settings.configured:
s
|
ettings.configure(**DEFAULT_SETTINGS)
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
django.core.management.call_command(
"makemigrations",
"account",
*args
)
if __name__ == "__main__":
run(*sys.argv[1:])
|
attente/snapcraft
|
snapcraft/tests/test_plugin_gulp.py
|
Python
|
gpl-3.0
| 5,389
| 0
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from os import path
from unittest import mock
import fixtures
import snapcraft
from snapcraft.plugins import gulp, nodejs
from snapcraft import tests
class GulpPluginTestCase(tests.TestCase):
def setUp(self):
super().setUp()
self.project_options = snapcraft.ProjectOptions()
patcher = mock.patch('snapcraft.internal.common.run')
self.run_mock = patcher.start()
self.addCleanup(patcher.stop)
patcher = mock.patch('snapcraft.sources.Tar')
self.tar_mock = patcher.start()
self.addCleanup(patcher.stop)
patcher = mock.patch('sys.stdout')
patcher.start()
self.addCleanup(patcher.stop)
def test_pull_local_sources(self):
class Options:
source = '.'
gulp_tasks = []
node_engine = '4'
plugin = gulp.GulpPlugin('test-part', Options(), self.project_options)
os.makedirs(plugin.sourcedir)
plugin.pull()
self.assertFalse(self.run_mock.called, 'run() was called')
self.tar_mock.assert_has_calls([
mock.call(
nodejs.get_nodejs_release(plugin.options.node_engine),
path.join(os.path.abspath('.'), 'parts', 'test-part', 'npm')),
mock.call().download()])
def test_build(self):
self.useFixture(tests.fixture_setup.CleanEnvironment())
self.useFixture(fixtures.EnvironmentVariable(
'PATH', '/bin'))
class Options:
source = '.'
gulp_tasks = []
node_engine = '4'
plugin = gulp.GulpPlugin('test-part', Options(), self.project_options)
os.makedirs(plugin.sourcedir)
open(os.path.join(plugin.sourcedir, 'package.json'), 'w').close()
plugin.build()
path = '{}:/bin'.format(os.path.join(plugin._npm_dir, 'bin'))
self.run_mock.assert_has_calls([
mock.call(['npm', 'install', '-g', 'gulp-cli'],
cwd=plugin.builddir, env={'PATH': path}),
mock.call(['npm', 'install', '--only-development'],
cwd=plugin.builddir, env={'PATH': path}),
])
self.tar_mock.assert_has_calls([
mock.call(
nodejs.get_nodejs_release(plugin.options.node_engine),
os.path.join(plugin._npm_dir)),
mock.call().provision(
plugin._npm_dir, clean_target=False, keep_tarball=True)])
@mock.patch('platform.machine')
def test_unsupported_arch_raises_exception(self, machine_mock):
machine_mock.return_value = 'fantasy-arch'
class Options:
source = None
gulp_tasks = []
node_engine = '4'
with self.assertRaises(EnvironmentError) as raised:
gulp.GulpPlugin('test-part', Options(), self.project_options)
self.assertEqual(raised.exception.__str__(),
'architecture not supported (fantasy-arch)')
def test_schema(self):
self.maxDiff = None
plugin_schema = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'additionalProperties': False,
'properties': {
'gulp-tasks': {'default': [],
'items': {'type': 'string'},
'minitems': 1,
'type': 'array',
'uniqueItems': True},
'node-engine': {'default': '4.4.4',
|
'type': 'string'},
'source': {'type': 'string'},
'source-branch': {'default': '', 'type': 'string'},
'source-subdir': {'default': None, 'type': 'string'},
'source-tag': {'default': '', 'type:': 'string'},
'source-type': {'default': '', 'type': 'string'},
|
'disable-parallel': {'default': False, 'type': 'boolean'}},
'pull-properties': ['source', 'source-type', 'source-branch',
'source-tag', 'source-subdir', 'node-engine'],
'build-properties': ['disable-parallel', 'gulp-tasks'],
'required': ['source', 'gulp-tasks'],
'type': 'object'}
self.assertEqual(gulp.GulpPlugin.schema(), plugin_schema)
def test_clean_pull_step(self):
class Options:
source = '.'
gulp_tasks = []
node_engine = '4'
plugin = gulp.GulpPlugin('test-part', Options(), self.project_options)
os.makedirs(plugin.sourcedir)
plugin.pull()
self.assertTrue(os.path.exists(plugin._npm_dir))
plugin.clean_pull()
self.assertFalse(os.path.exists(plugin._npm_dir))
|
annayqho/TheCannon
|
TheCannon/find_continuum_pixels.py
|
Python
|
mit
| 3,465
| 0.003463
|
import numpy as np
LARGE = 200.
SMALL = 1. / LARGE
def _find_contpix_given_cuts(f_cut, sig_cut, wl, fluxes, ivars):
""" Find and return continuum pixels given the flux and sigma cut
Parameters
----------
f_cut: float
the upper limit imposed on the quantity (fbar-1)
sig_cut: float
the upper limit imposed on the quantity (f_sig)
wl: numpy ndarray of length npixels
rest-frame wavelength vector
fluxes: numpy ndarray of shape (nstars, npixels)
pixel intensities
ivars: numpy ndarray of shape nstars, npixels
inverse variances, parallel to fluxes
Returns
-------
contmask: boolean mask of length npixels
True indicates that the pixel is continuum
"""
f_bar = np.median(fluxes, axis=0)
sigma_f = np.var(fluxes, axis=0)
bad = np.logical_and(f_bar==0, sigma_f==0)
cont1 = np.abs(f_bar-1) <= f_cut
cont2 = sigma_f <= sig_cut
contmask = np.logical_and(cont1, cont2)
contmask[bad] = False
return contmask
def _find_contpix(wl, fluxes, ivars, target_frac):
""" Find continuum pix in spec, meeting a set target fraction
Parameters
----------
wl: numpy ndarray
rest-frame wavelength vector
fluxes: numpy ndarray
pixel intensities
ivars: numpy ndarray
inverse variances, parallel to fluxes
target_frac: float
the fraction of pixels in spectrum desired to be continuum
Returns
-------
contmask: boolean numpy ndarray
True corresponds to continuum pixels
"""
print("Target frac: %s" %(target_frac))
bad1 = np.median(ivars, axis=0) == SMALL
bad2 = np.var(ivars, axis=0) == 0
bad = np.logical_and(bad1, bad2)
npixels = len(wl)-sum(bad)
f_cut = 0.0001
stepsize = 0.0001
sig_cut = 0.0001
contmask = _find_contpix_given_cuts(f_cut, sig_cut, wl, fluxes, ivars)
if npixels > 0:
frac = sum(contmask)/float(npixels)
else:
frac = 0
while (frac < target_frac):
f_cut += stepsize
sig_cut += stepsize
contmask = _find_contpix_given_cuts(f_cut, sig_cut, wl, fluxes, ivars)
if npixels > 0:
frac = sum(contmask)/float(npixels)
else:
frac = 0
if frac > 0.10*npixels:
print("Warning: Over 10% of pixels identified as continuum.")
print("%s out of %s pixels identified as continuum" %(sum(contmask),
npixels))
print("Cuts: f_cut %s, sig_cut %s" %(f_cut, sig_cut))
return contmask
def _find_contpix_regions(wl, fluxes, ivars, frac, ranges):
""" Find continuum pix in a spectrum split into chunks
Parameters
----------
wl: numpy ndarray
rest-frame wavelength vector
fluxes: numpy ndarray
pixel intensities
ivars: numpy ndarray
inverse
|
variances, parallel to fluxes
frac: float
fraction of pixels in spectrum to be found as continuum
ranges: list, array
starts and ends indicating location of chunks in array
Returns
------
contmask: numpy ndarray, boolean
True indicates continuum pixel
"""
contmask = np.zeros(len(wl), dtype=bool)
for chunk in ranges:
start = chunk[0]
stop = chunk[1]
contma
|
sk[start:stop] = _find_contpix(
wl[start:stop], fluxes[:,start:stop], ivars[:,start:stop], frac)
return contmask
|
Psychedelic-Engineering/sleep-machine
|
hardware/sensors.py
|
Python
|
mit
| 2,531
| 0.034374
|
import os
import time
import datetime
import logging
import gzip
from hardware.channel import Channel
"""
Sensor Class
- ggf. Logfile als Datasource per Parameter
- Logging in eigene Klasse oder Scheduler
"""
class Sensor:
def __init__(self, device):
|
self.initialized = False
self.device = device
self.initSensor()
self.logging = False
self.buffer = ""
self.lastTime = 0
def initSensor(self):
self.initialized = False
try:
header = self.device.sendCommand("?")
header = header.split(";")
logging.info("Initsensor: %s", header)
self.initChannels(header)
self.device.write("b 0")
self.device.write("l 0")
except:
raise
logging.error("Initsensor error")
def initChannels(self, header):
self.
|
channels = []
for sensor in header:
if sensor != "":
name, params = sensor.split(":")
num, min, max = params.split(",")
num = int(num)
min, max = float(min), float(max)
for i in range(num):
self.channels.append(Channel(name, min, max, 100))
self.initialized = True
def readData(self):
if not self.initialized:
self.initSensor()
values = None
try:
values = self.device.sendCommand("!", ",")
if values and len(values) == len(self.channels):
values = map(float, values)
self.logData(values)
for i, v in enumerate(values):
self.channels[i].putValue(v)
except:
pass
def readRaw(self):
now = int(10*time.time())
if now > self.lastTime:
#data = self.device.readWaiting()
self.lastTime = now
data = self.device.sendCommand("!")
self.buffer += str(now) + "," + data + "\n"
if len(self.buffer) >= 20000:
logging.info("Writing Buffer")
file = gzip.open('logfile.txt.gz', 'a')
#file = open("logfile.txt", "a")
file.write(self.buffer)
file.close()
self.buffer = ""
def calibrate(self):
for c in self.channels:
c.calibrate()
# logging, ggf. in eigene Klasse
def startLogging(self):
logging.debug("startLogging")
self.logging = True
self.logDate = datetime.date.today()
self.logFile = self.logDate.strftime("data/%Y-%m-%d.csv")
def stopLogging(self):
logging.debug("stopLogging")
self.logging = False
def logData(self, values):
if self.logging:
fileExists = os.path.isfile(self.logFile)
timestamp = time.time()
file = open(self.logFile, "a")
if not fileExists:
names = [c.name for c in self.channels]
file.write("time," + ','.join(names) + "\n")
file.write(str(timestamp) + "," + ','.join(map(str, values)) + "\n")
file.close()
|
niavlys/kivy
|
kivy/config.py
|
Python
|
mit
| 27,319
| 0.000146
|
'''
Configuration object
====================
The :class:`Config` object is an instance of a modified Python ConfigParser.
See the `ConfigParser documentation
<http://docs.python.org/library/configparser.html>`_ for more information.
Kivy has a configuration file which determines the default settings. In
order to change these settings, you can alter this file manually or use
the Config object. Please see the :ref:`Configure Kivy` section for more
information.
Note: To avoid instances where a blank screen appears before resizing, kivy.config
should be imported right after kivy.app ,or before any modules affecting the window.
Usage of the Config object
--------------------------
To read a configuration token from a particular section::
>>> from kivy.config import Config
>>> Config.getint('kivy', 'show_fps')
0
Change the configuration and save it::
>>> Config.set('postproc', 'retain_time', '50')
>>> Config.write()
.. versionchanged:: 1.7.1
The ConfigParser should work correctly with utf-8 now. The values are
converted from ascii to unicode only when needed. The method get() returns
utf-8 strings.
Available configuration tokens
------------------------------
.. |log_levels| replace:: 'debug', 'info', 'warning', 'error' or 'critical'
:kivy:
`desktop`: int, 0 or 1
This option controls desktop OS specific features, such as enabling
drag-able scroll-bar in scroll views, disabling of bubbles in
TextInput etc. 0 is disabled, 1 is enabled.
`exit_on_escape`: int, 0 or 1
Enables exiting kivy when escape is pressed.
0 is disabled, 1 is enabled.
`log_level`: string, one of |log_levels|
Set the minimum log level to use.
`log_dir`: string
Path of log directory.
`log_name`: string
Format string to use for the filename of log file.
`log_enable`: int, 0 or 1
Activate file logging. 0 is disabled, 1 is enabled.
`keyboard_mode`: string
Specifies the keyboard mode to use. If can be one of the following:
* '' - Let Kivy choose the best option for your current platform.
* 'system' - real keyboard.
* 'dock' - one virtual keyboard docked to a screen side.
* 'multi' - one virtual keyboard for every widget request.
* 'systemanddock' - virtual docked keyboard plus input from real
keyboard.
* 'systemandmulti' - analogous.
`keyboard_layout`: string
Identifier of the layout to use.
`window_icon`: string
Path of the window icon. Use this if you want to replace the default
pygame icon.
:postproc:
`double_tap_time`: int
Time allowed for the detection of double tap, in milliseconds.
`double_tap_distance`: float
Maximum distance allowed for a double tap, normalized inside the range
0 - 1000.
`triple_tap_time`: int
Time allowed for the detection of triple tap, in milliseconds.
`triple_tap_distance`: float
Maximum distance allowed for a triple tap, normalized inside the range
0 - 1000.
`retain_time`: int
Time allowed for a retain touch, in milliseconds.
`retain_distance`: int
If the touch moves more than is indicated by retain_distance, it will
not be retained. Argument should be an int between 0 and 1000.
`jitter_distance`: int
Maximum distance for jitter detection, normalized inside the range 0
- 1000.
`jitter_ignore_devices`: string, separated with commas
List of devices to ignore from jitter detection.
`ignore`: list of tuples
List of regions where new touches are ignored.
This configuration token can be used to resolve hotspot problems
with DIY hardware. The format of the list must be::
ignore = [(xmin, ymin, xmax, ymax), ...]
All the values must be inside the range 0 - 1.
:graphics:
`maxfps`: int, defaults to 60
Maximum FPS allowed.
`fulls
|
creen`: int or string, one of 0, 1, 'fake' or 'auto'
Activate fullscreen. If set to `1`, a resolution of `width`
times `height` pixels will be used.
If set to `auto`, your current display's resolution will be
used instead. This is mos
|
t likely what you want.
If you want to place the window in another display,
use `fake` and adjust `width`, `height`, `top` and `left`.
`width`: int
Width of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
`height`: int
Height of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
`fbo`: string, one of 'hardware', 'software' or 'force-hardware'
Selects the FBO backend to use.
`show_cursor`: int, one of 0 or 1
Show the cursor on the screen.
`position`: string, one of 'auto' or 'custom'
Position of the window on your display. If `auto` is used, you have no
control of the initial position: `top` and `left` are ignored.
`top`: int
Top position of the :class:`~kivy.core.window.Window`.
`left`: int
Left position of the :class:`~kivy.core.window.Window`.
`rotation`: int, one of 0, 90, 180 or 270
Rotation of the :class:`~kivy.core.window.Window`.
`resizable`: int, one of 0 or 1
If 0, the window will have a fixed size. If 1, the window will be
resizable.
:input:
You can create new input devices using this syntax::
# example of input provider instance
yourid = providerid,parameters
# example for tuio provider
default = tuio,127.0.0.1:3333
mytable = tuio,192.168.0.1:3334
.. seealso::
Check the providers in kivy.input.providers for the syntax to use
inside the configuration file.
:widgets:
`scroll_distance`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_distance`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_friction`: float
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_friction`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_timeout`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_timeout`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_stoptime`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_stoptime`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
`scroll_moves`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_moves`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
:modules:
You can activate modules with this syntax::
modulename =
Anything after the = will be passed to the module as arguments.
Check the specific module's documentation for a list of accepted
arguments.
.. versionchanged:: 1.8.0
`systemanddock` and `systemandmulti` has been added as possible values for
`keyboard_mode` in the kivy section. `exit_on_escape` has been added
to the kivy section.
.. versionchanged:: 1.2.0
`resizable` has been added to graphics section.
.. versionchanged:: 1.1.0
tuio no longer listens by default. Window icons are not copied to
user directory anymore. You can still set a new window icon by using the
``window_icon`` config setting.
.. versionchanged:: 1.0.8
`scro
|
asgardproject/asgard-calendar
|
events/sitemaps.py
|
Python
|
bsd-3-clause
| 251
| 0.047809
|
from django.contrib.sitemaps import Sitemap
fro
|
m events.model
|
s import Event
class EventsSitemap(Sitemap):
changefreq = "never"
priority = 1.0
def items(self):
return Event.objects.public()
def lastmod(self, obj):
return obj.date_modified
|
jeromecc/doctoctocbot
|
src/registration/views.py
|
Python
|
mpl-2.0
| 1,823
| 0.003291
|
import logging
from django_registration.backends.one_step.views import RegistrationView
from django.urls import reverse
from django.contrib.auth import authenticate, get_user_model, login
from django_registration import signals
from django.conf import settings
from invite.models import CategoryInvitation
logger=logging.getLogger(__name__)
User = get_user_model()
class OneStepRegistrationView(RegistrationView):
success_url = "/user/profile/"
disallowed_url = "/accounts/register/disallowed/"
def register(self, form):
# Check that the email used to register is identical to invitation email
registration_email = form.cleaned_data["email"]
key = self.request.session.get(settings.INVITATION_SESSION_KEY, None)
try:
invitation = CategoryInvitation.objects.get(k
|
ey=key)
except CategoryInvitation.DoesNotExist:
return
if registration_email != invitation.email:
return
new_user = form.save()
new_user = authenticate(
**{
User.USERNAME_FIELD: new_user.get_username(),
"password": form.cleaned_data["password1"],
}
)
login(self.request, new_user)
signals.user_registered.send(
sender=se
|
lf.__class__, user=new_user, request=self.request
)
return new_user
def registration_allowed(self):
"""
Override this to enable/disable user registration, either
globally or on a per-request basis.
"""
key = self.request.session.get(settings.INVITATION_SESSION_KEY, None)
if not key:
logger.debug("no key")
return False
logger.debug(f"key:{key}")
return CategoryInvitation.objects.filter(key=key, accepted=True).exists()
|
sfromm/snmpryte
|
lib/netspryte/db/influx.py
|
Python
|
gpl-3.0
| 4,597
| 0.00261
|
# Written by Stephen Fromm <stephenf nero net>
# Copyright (C) 2016-2017 University of Oregon
#
# This file is part of netspryte
#
# netspryte is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# netspryte is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with netspryte. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
import time
import subprocess
from netspryte.db import *
import netspryte.snmp
from netspryte import constants as C
try:
import influxdb
HAVE_INFLUXDB = True
except ImportError:
HAVE_INFLUXDB = False
class InfluxDatabaseBackend(BaseDatabaseBackend):
TIME_PRECISION = 's'
def __init__(self, backend, **kwargs):
super(InfluxDatabaseBackend, self).__init__(backend, **kwargs)
self.client = None
if not HAVE_INFLUXDB:
logging.error("do not have influxdb bindings for python")
return None
self.host = kwargs.get('host', C.DEFAULT_INFLUXDB_HOST)
self.port = kwargs.get('port', C.DEFAULT_INFLUXDB_PORT)
self.database = kwargs.get('database', C.DEFAULT_INFLUXDB_DATABASE)
self.user = kwargs.get('user', C.DEFAULT_INFLUXDB_USER)
self.password = kwargs.get('password', C.DEFAULT_INFLUXDB_PASSWORD)
self.client = influxdb.InfluxDBClient(self.host, self.port,
self.user, self.password, self.database)
logging.warn("influx host is %s", self.host)
databases = influxdb_list_databases(self.client)
if self.database not in databases:
influxdb_create_database(self.client, self.database)
def write(self, data):
''' write data to rrd database '''
if not self.client:
return None
influxdb_switch_database(self.client, self.database)
return influxdb_write(self.client, data)
@property
def host(self):
return self._host
@host.setter
def host(self, arg):
self._host = arg
@property
def port(self):
return self._port
@port.setter
def port(self, arg):
try:
self._port = int(arg)
except ValueError:
raise ValueError("InflubDB port must be an integer")
@property
def database(self):
return self._database
@database.setter
def database(self, arg):
self._database = arg
@property
def user(self):
return self._user
@user.setter
def user(self, arg):
self._user = arg
@property
def password(self):
return self._password
@password.setter
def password(self, arg):
self._password = arg
def influxdb_create_database(client, dbname):
try:
logging.info("creating database %s", dbname)
client.create_database(dbname)
influxdb_switch_database(client, dbname)
except ConnectionError as e:
logging.error("failed to create database %s: %s", dbname, str(e))\
def influxdb_switch_database(client, dbname):
logging.info("switching to database %s", dbname)
|
client.switch_database(dbname)
def influxdb_list_databases(client):
dbs = client.get_list_database()
return [ x['name'] for x in dbs ]
def influxdb_write(client, data, ts=time.time()):
|
points = list()
data_class = data.measurement_class.name
data_title = data.presentation.title
data_id = data.name
data_host = data.host.name
for k, v in list(data.items()):
if k.startswith('_'):
continue
if hasattr(v, 'prettyPrint'):
v = v.prettyPrint()
point = {
"measurement": data_class,
"time": int(ts),
"tags": {
"host" : data_host,
"title" : data_title,
"instance" : data_id,
"type": k.lower()
},
"fields": {
"value": v
}
}
points.append(point)
try:
client.write_points(points, time_precision=InfluxDatabaseBackend.TIME_PRECISION)
except influxdb.exceptions.InfluxDBClientError as e:
logging.error("failed to write data: %s", str(e))
|
Piotr95/Yummy_Pies
|
UserManagement/urls.py
|
Python
|
mit
| 359
| 0.002786
|
from django.conf.urls import url
from dja
|
ngo.contrib import admin
from UserManagement import views
from django.contrib.auth import views as auth_views
app_name = 'UserManagement'
urlpatterns = [
url(r'^register$', views.register, name="register"),
url(r'^login$', views.login, name='login'),
url(r'^logout_user/$', views.log_ou
|
t, name='logout'),
]
|
eunchong/build
|
third_party/buildbot_8_4p1/buildbot/manhole.py
|
Python
|
bsd-3-clause
| 11,523
| 0.001909
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import os
import types
import binascii
import base64
from twisted.python import log
from twisted.application import service, strports
from twisted.cred import checkers, portal
from twisted.conch import manhole, telnet, manhole_ssh, checkers as conchc
from twisted.conch.insults import insults
from twisted.internet import protocol
from buildbot.util import ComparableMixin
from zope.interface import implements # requires Twisted-2.0 or later
# makeTelnetProtocol and _TelnetRealm are for the TelnetManhole
c
|
lass makeTelnetProtocol:
# this curries the 'portal' argument into a later call to
# TelnetTransport()
def __init__(self, portal):
self.portal = portal
def __
|
call__(self):
auth = telnet.AuthenticatingTelnetProtocol
return telnet.TelnetTransport(auth, self.portal)
class _TelnetRealm:
implements(portal.IRealm)
def __init__(self, namespace_maker):
self.namespace_maker = namespace_maker
def requestAvatar(self, avatarId, *interfaces):
if telnet.ITelnetProtocol in interfaces:
namespace = self.namespace_maker()
p = telnet.TelnetBootstrapProtocol(insults.ServerProtocol,
manhole.ColoredManhole,
namespace)
return (telnet.ITelnetProtocol, p, lambda: None)
raise NotImplementedError()
class chainedProtocolFactory:
# this curries the 'namespace' argument into a later call to
# chainedProtocolFactory()
def __init__(self, namespace):
self.namespace = namespace
def __call__(self):
return insults.ServerProtocol(manhole.ColoredManhole, self.namespace)
class AuthorizedKeysChecker(conchc.SSHPublicKeyDatabase):
"""Accept connections using SSH keys from a given file.
SSHPublicKeyDatabase takes the username that the prospective client has
requested and attempts to get a ~/.ssh/authorized_keys file for that
username. This requires root access, so it isn't as useful as you'd
like.
Instead, this subclass looks for keys in a single file, given as an
argument. This file is typically kept in the buildmaster's basedir. The
file should have 'ssh-dss ....' lines in it, just like authorized_keys.
"""
def __init__(self, authorized_keys_file):
self.authorized_keys_file = os.path.expanduser(authorized_keys_file)
def checkKey(self, credentials):
f = open(self.authorized_keys_file)
for l in f.readlines():
l2 = l.split()
if len(l2) < 2:
continue
try:
if base64.decodestring(l2[1]) == credentials.blob:
return 1
except binascii.Error:
continue
return 0
class _BaseManhole(service.MultiService):
"""This provides remote access to a python interpreter (a read/exec/print
loop) embedded in the buildmaster via an internal SSH server. This allows
detailed inspection of the buildmaster state. It is of most use to
buildbot developers. Connect to this by running an ssh client.
"""
def __init__(self, port, checker, using_ssh=True):
"""
@type port: string or int
@param port: what port should the Manhole listen on? This is a
strports specification string, like 'tcp:12345' or
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
simple tcp port.
@type checker: an object providing the
L{twisted.cred.checkers.ICredentialsChecker} interface
@param checker: if provided, this checker is used to authenticate the
client instead of using the username/password scheme. You must either
provide a username/password or a Checker. Some useful values are::
import twisted.cred.checkers as credc
import twisted.conch.checkers as conchc
c = credc.AllowAnonymousAccess # completely open
c = credc.FilePasswordDB(passwd_filename) # file of name:passwd
c = conchc.UNIXPasswordDatabase # getpwnam() (probably /etc/passwd)
@type using_ssh: bool
@param using_ssh: If True, accept SSH connections. If False, accept
regular unencrypted telnet connections.
"""
# unfortunately, these don't work unless we're running as root
#c = credc.PluggableAuthenticationModulesChecker: PAM
#c = conchc.SSHPublicKeyDatabase() # ~/.ssh/authorized_keys
# and I can't get UNIXPasswordDatabase to work
service.MultiService.__init__(self)
if type(port) is int:
port = "tcp:%d" % port
self.port = port # for comparison later
self.checker = checker # to maybe compare later
def makeNamespace():
# close over 'self' so we can get access to .parent later
master = self.parent
namespace = {
'master': master,
'status': master.getStatus(),
'show': show,
}
return namespace
def makeProtocol():
namespace = makeNamespace()
p = insults.ServerProtocol(manhole.ColoredManhole, namespace)
return p
self.using_ssh = using_ssh
if using_ssh:
r = manhole_ssh.TerminalRealm()
r.chainedProtocolFactory = makeProtocol
p = portal.Portal(r, [self.checker])
f = manhole_ssh.ConchFactory(p)
else:
r = _TelnetRealm(makeNamespace)
p = portal.Portal(r, [self.checker])
f = protocol.ServerFactory()
f.protocol = makeTelnetProtocol(p)
s = strports.service(self.port, f)
s.setServiceParent(self)
def startService(self):
service.MultiService.startService(self)
if self.using_ssh:
via = "via SSH"
else:
via = "via telnet"
log.msg("Manhole listening %s on port %s" % (via, self.port))
class TelnetManhole(_BaseManhole, ComparableMixin):
"""This Manhole accepts unencrypted (telnet) connections, and requires a
username and password authorize access. You are encouraged to use the
encrypted ssh-based manhole classes instead."""
compare_attrs = ["port", "username", "password"]
def __init__(self, port, username, password):
"""
@type port: string or int
@param port: what port should the Manhole listen on? This is a
strports specification string, like 'tcp:12345' or
'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a
simple tcp port.
@param username:
@param password: username= and password= form a pair of strings to
use when authenticating the remote user.
"""
self.username = username
self.password = password
c = checkers.InMemoryUsernamePasswordDatabaseDontUse()
c.addUser(username, password)
_BaseManhole.__init__(self, port, c, using_ssh=False)
class PasswordManhole(_BaseManhole, ComparableMixin):
"""This Manhole accepts encrypted (ssh) connections, and requires a
username and password to authorize access.
"""
compare_attrs = ["port", "username", "password"]
def __init__(self, port, username, password):
"""
@type port: string or int
@param port: what port should the Manhole listen
|
brkt/brkt-cli
|
brkt_cli/test_version.py
|
Python
|
apache-2.0
| 2,893
| 0
|
# Copyright 2017 Bracket Computing, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# https://github.com/brkt/brkt-cli/blob/master/LICENSE
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and
# limitations under the License.
import unittest
import datetime
import iso8601
from brkt_cli import CLIConfig
from brkt_cli import version
class TestVersionCheck(unittest.TestCase):
def test_is_version_supported(self):
supported = [
'0.9.8', '0.9.9', '0.9.9.1', '0.9.10', '0.9.11', '0.9.12'
]
self.assertFalse(
version._is_version_supported('0.9.7', supported)
)
self.assertTrue(
version._is_version_supported('0.9.8', supported)
)
self.assertTrue(
version._is_version_supported('0.9.12', supported)
)
self.assertTrue(
version._is_version_supported('0.9.13pre1', supported)
)
self.assertTrue(
version._is_version_supported('0.9.13', supported)
)
def test_is_later_version_available(self):
supported = [
'0.9.8', '0.9.9', '0.9.9.1', '0.9.10', '0.9.11', '0.9.12'
]
self.assertTrue(
version._is_later_version_available('0.9.11', supported)
)
self.assertFalse(
version._is_later_version_available('0.9.12', supported)
)
self.assertFalse(
version._is_later_version_available('0.9.13pre1', supported)
)
def test_version_check_time(self):
cfg = CLIConfig()
# Not set.
self.assertIsNone(version.get_last_version_check_time(cfg))
# Set.
before = datetime.datetime.now(tz=iso8601.UTC)
version.set_last_version_check_time(cfg)
t = version.get_last_version_check_time(cfg)
self.assertIsNotNone(t)
after = datetime.datetime.now(tz=iso8601.UTC)
self.assertTrue(before <= t <= after)
def test_is_version_check_needed(self):
cfg = CLIConfig()
# Not set.
self.assertTrue(version.is_version_check_needed(cfg))
# Set to 25 hours ago.
|
now = datetime.datetime.now(tz=iso8601.UTC)
dt = now - datetime.timedelta(hours=25)
version.set_last_version_check_time(cfg, dt=dt)
self.assertTrue(version.is_version_check_needed(cfg))
# Set to 23 hours ago.
dt = now - datetime.timed
|
elta(hours=23)
version.set_last_version_check_time(cfg, dt=dt)
self.assertFalse(version.is_version_check_needed(cfg))
|
Feandil/webapp-config
|
WebappConfig/selinux.py
|
Python
|
gpl-2.0
| 4,699
| 0.005533
|
#!/usr/bin/python -O
#
# /usr/sbin/webapp-config
# Python script for managing the deployment of web-based
# applications
#
# Originally written for the Gentoo Linux distribution
#
# Copyright (c) 1999-2007 Authors
# Released under v2 of the GNU GPL
#
# ========================================================================
# ========================================================================
# Dependencies
# ------------------------------------------------------------------------
import os, os.path, re, shutil, subprocess, tempfile
from WebappConfig.debug import OUT
# ========================================================================
# Constants
# ------------------------------------------------------------------------
MAKE_CONF_FILE = ['/etc/make.conf', '/etc/portage/make.conf']
# ========================================================================
# SELinux handler
# ------------------------------------------------------------------------
class SELinux:
def __init__(self, package_name, vhost_hostname, policy_types = ()):
self.package_name = package_name
self.vhost_hostname = vhost_hostname
self.policy_name = '{}_{}'.format(package_name, vhost_hostname)
self.policy_types = policy_types
if not self.policy_types:
for filename in MAKE_CONF_FILE:
try:
with open(filename) as file:
for line in file.readlines():
if line.startswith('POLICY_TYPES='):
self.policy_types = line[len('POLICY_TYPES='):-1].strip(' "').split()
break
if self.policy_types is not None:
break
except IOError:
pass
if not self.policy_types:
OUT.die('No SELinux policy was found, abording')
def remove_module(self):
OUT.info('Removing SELinux modules')
for policy in self.policy_types:
if subprocess.call(['semodule', '-s', policy, '-r', self.policy_name]):
OUT.warn('Unable to remove {} SELinux module for {} @ {}'.format(policy, self.package_name, self.vhost_hostname))
def create_module(self, package_version, vhost_root, server_files, server_dirs):
temp_dir = tempfile.mkdtemp()
OUT.info('Creating SELinux modules')
cleaned_version = re.match(r'(?P<version>[0-9]*\.[0-9]*(?:\.[0-9]*)?)', package_version).group('version')
for policy in self.policy_types:
base_dir = os.path.join(temp_dir, policy)
os.mkdir(base_dir)
with open(os.path.join(base_dir, '{}.te'.format(self.policy_name)), 'w') as te_file:
te_file.write('policy_module({},{})\n'.format(self.policy_name, cleaned_version))
te_file.write('require {\n')
te_file.write(' type httpd_sys_rw_content_t;\n')
te_file.write('}')
with open(os.path.join(base_dir, '{}.fc'.format(self.policy_name)), 'w') as fc_file:
for files in server_files:
fc_file.write('{} gen_context(system_u:object_r:httpd_sys_rw_content_t,s0)\n'.format(SELinux.filename_re_escape(os.path.join(vhost_root, files.rstrip('\n')))))
for dirs in server_dirs:
fc_file.write('{}(/.*)? gen_context(system_u:object_r:httpd_sys_rw_content_t,s0)\n'.format(SELinux.filename_re_escape(os.path.join(vhost_root, dirs.rstrip('\n')))))
if subprocess.call(['make', '-s', '-C', base_dir, '-f', os.path.join('/usr/share/selinux', policy, 'include/Makefile'), '{}.pp'.format(self.policy_name)]):
if not os.path.isfile(os.path.join('/usr/share/selinux', policy, 'include/Makefile')):
OUT.die('Policy {} is not supported, please fix your configuration'.format(policy))
OUT.die('Unable to create {} SELinux module for {} @ {}'.format(policy, self.package_name, self.vhost_hostname))
OUT.info('Installing SELinux modules')
try:
for policy in self.policy_types:
if subprocess.call(['semodule', '-s', policy, '-i', os.path.join(temp_dir, policy, '{}.pp'.format(self.policy_name))]):
OUT.die('Unable to install {} SELinux module for {} @
|
{}'.format(policy, self.package_name, self.vhost_hostname))
except IOError:
|
OUT.die('"semodule" was not found, please check you SELinux installation')
shutil.rmtree(temp_dir)
@staticmethod
def filename_re_escape(string):
return re.sub('\.', '\.', string)
|
bitkeeper/python-opcua
|
tests/tests_server.py
|
Python
|
lgpl-3.0
| 27,210
| 0.00294
|
import unittest
import os
import shelve
import time
from tests_common import CommonTests, add_server_methods
from tests_xml import XmlTests
from tests_subscriptions import SubscriptionTests
from datetime import timedelta, datetime
from tempfile import NamedTemporaryFile
import opcua
from opcua import Server
from opcua import Client
from opcua import ua
from opcua import uamethod
from opcua.common.event_objects import BaseEvent, AuditEvent, AuditChannelEvent, AuditSecurityEvent, AuditOpenSecureChannelEvent
from opcua.common import ua_utils
port_num = 48540
port_discovery = 48550
class TestServer(unittest.TestCase, CommonTests, SubscriptionTests, XmlTests):
'''
Run common tests on server side
Tests that can only be run on server side must be defined here
'''
@classmethod
def setUpClass(cls):
cls.srv = Server()
cls.srv.set_endpoint('opc.tcp://localhost:{0:d}'.format(port_num))
|
add_server_methods(cls.srv)
cls.srv.start()
cls.opc = cls.srv
cls.discovery = Server()
cls.discovery.set_application_uri("urn:freeopcua:python:discovery")
cls.discovery.set_
|
endpoint('opc.tcp://localhost:{0:d}'.format(port_discovery))
cls.discovery.start()
@classmethod
def tearDownClass(cls):
cls.srv.stop()
cls.discovery.stop()
def test_discovery(self):
client = Client(self.discovery.endpoint.geturl())
client.connect()
try:
servers = client.find_servers()
new_app_uri = "urn:freeopcua:python:server:test_discovery"
self.srv.application_uri = new_app_uri
self.srv.register_to_discovery(self.discovery.endpoint.geturl(), 0)
time.sleep(0.1) # let server register registration
new_servers = client.find_servers()
self.assertEqual(len(new_servers) - len(servers) , 1)
self.assertFalse(new_app_uri in [s.ApplicationUri for s in servers])
self.assertTrue(new_app_uri in [s.ApplicationUri for s in new_servers])
finally:
client.disconnect()
def test_find_servers2(self):
client = Client(self.discovery.endpoint.geturl())
client.connect()
try:
servers = client.find_servers()
new_app_uri1 = "urn:freeopcua:python:server:test_discovery1"
self.srv.application_uri = new_app_uri1
self.srv.register_to_discovery(self.discovery.endpoint.geturl(), period=0)
new_app_uri2 = "urn:freeopcua:python:test_discovery2"
self.srv.application_uri = new_app_uri2
self.srv.register_to_discovery(self.discovery.endpoint.geturl(), period=0)
time.sleep(0.1) # let server register registration
new_servers = client.find_servers()
self.assertEqual(len(new_servers) - len(servers) , 2)
self.assertFalse(new_app_uri1 in [s.ApplicationUri for s in servers])
self.assertFalse(new_app_uri2 in [s.ApplicationUri for s in servers])
self.assertTrue(new_app_uri1 in [s.ApplicationUri for s in new_servers])
self.assertTrue(new_app_uri2 in [s.ApplicationUri for s in new_servers])
# now do a query with filer
new_servers = client.find_servers(["urn:freeopcua:python:server"])
self.assertEqual(len(new_servers) - len(servers) , 0)
self.assertTrue(new_app_uri1 in [s.ApplicationUri for s in new_servers])
self.assertFalse(new_app_uri2 in [s.ApplicationUri for s in new_servers])
# now do a query with filer
new_servers = client.find_servers(["urn:freeopcua:python"])
self.assertEqual(len(new_servers) - len(servers) , 2)
self.assertTrue(new_app_uri1 in [s.ApplicationUri for s in new_servers])
self.assertTrue(new_app_uri2 in [s.ApplicationUri for s in new_servers])
finally:
client.disconnect()
"""
# not sure if this test is necessary, and there is a lot repetition with previous test
def test_discovery_server_side(self):
servers = self.discovery.find_servers()
self.assertEqual(len(servers), 1)
self.srv.register_to_discovery(self.discovery.endpoint.geturl(), 1)
time.sleep(1) # let server register registration
servers = self.discovery.find_servers()
print("SERVERS 2", servers)
self.assertEqual(len(servers), 2)
"""
# def test_register_server2(self):
# servers = self.opc.register_server()
def test_register_namespace(self):
uri = 'http://mycustom.Namespace.com'
idx1 = self.opc.register_namespace(uri)
idx2 = self.opc.get_namespace_index(uri)
self.assertEqual(idx1, idx2)
def test_register_existing_namespace(self):
uri = 'http://mycustom.Namespace.com'
idx1 = self.opc.register_namespace(uri)
idx2 = self.opc.register_namespace(uri)
idx3 = self.opc.get_namespace_index(uri)
self.assertEqual(idx1, idx2)
self.assertEqual(idx1, idx3)
def test_register_use_namespace(self):
uri = 'http://my_very_custom.Namespace.com'
idx = self.opc.register_namespace(uri)
root = self.opc.get_root_node()
myvar = root.add_variable(idx, 'var_in_custom_namespace', [5])
myid = myvar.nodeid
self.assertEqual(idx, myid.NamespaceIndex)
def test_server_method(self):
def func(parent, variant):
variant.Value *= 2
return [variant]
o = self.opc.get_objects_node()
v = o.add_method(3, 'Method1', func, [ua.VariantType.Int64], [ua.VariantType.Int64])
result = o.call_method(v, ua.Variant(2.1))
self.assertEqual(result, 4.2)
def test_historize_variable(self):
o = self.opc.get_objects_node()
var = o.add_variable(3, "test_hist", 1.0)
self.srv.iserver.enable_history_data_change(var, timedelta(days=1))
time.sleep(1)
var.set_value(2.0)
var.set_value(3.0)
self.srv.iserver.disable_history_data_change(var)
def test_historize_events(self):
srv_node = self.srv.get_node(ua.ObjectIds.Server)
self.assertEqual(
srv_node.get_event_notifier(),
{ua.EventNotifier.SubscribeToEvents}
)
srvevgen = self.srv.get_event_generator()
self.srv.iserver.enable_history_event(srv_node, period=None)
self.assertEqual(
srv_node.get_event_notifier(),
{ua.EventNotifier.SubscribeToEvents, ua.EventNotifier.HistoryRead}
)
srvevgen.trigger(message="Message")
self.srv.iserver.disable_history_event(srv_node)
def test_references_for_added_nodes_method(self):
objects = self.opc.get_objects_node()
o = objects.add_object(3, 'MyObject')
nodes = objects.get_referenced_nodes(refs=ua.ObjectIds.Organizes, direction=ua.BrowseDirection.Forward, includesubtypes=False)
self.assertTrue(o in nodes)
nodes = o.get_referenced_nodes(refs=ua.ObjectIds.Organizes, direction=ua.BrowseDirection.Inverse, includesubtypes=False)
self.assertTrue(objects in nodes)
self.assertEqual(o.get_parent(), objects)
self.assertEqual(o.get_type_definition().Identifier, ua.ObjectIds.BaseObjectType)
@uamethod
def callback(parent):
return
m = o.add_method(3, 'MyMethod', callback)
nodes = o.get_referenced_nodes(refs=ua.ObjectIds.HasComponent, direction=ua.BrowseDirection.Forward, includesubtypes=False)
self.assertTrue(m in nodes)
nodes = m.get_referenced_nodes(refs=ua.ObjectIds.HasComponent, direction=ua.BrowseDirection.Inverse, includesubtypes=False)
self.assertTrue(o in nodes)
self.assertEqual(m.get_parent(), o)
# This should work for following BaseEvent tests to work (maybe to write it a bit differentlly since they are not independent)
def test_get_event_from_type_node_BaseEvent(self):
ev = opcua.common.events.get_event_obj_from_type_node(opcua.Node(self.opc.iserver.isession, ua.NodeId(ua.ObjectIds.BaseEventType)))
chec
|
hellwen/mytrade
|
mytrade/form/widgets.py
|
Python
|
bsd-3-clause
| 8,982
| 0.001113
|
from jinja2 import escape
from flask.globals import _request_ctx_stack
from flask import json
from wtforms import widgets
from mytrade.utils import _, get_url
class Select2Widget(widgets.Select):
"""
`Select2 <https://github.com/ivaynberg/select2>`_ styled select widget.
You must include select2.js, form-x.x.x.js and select2 stylesheet for it to
work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'select2')
allow_blank = getattr(field, 'allow_blank', False)
if allow_blank and not self.multiple:
kwargs['data-allow-blank'] = u'1'
return super(Select2Widget, self).__call__(field, **kwargs)
class Select2TagsWidget(widgets.TextInput):
"""`Select2 <http://ivaynberg.github.com/select2/#tags>`_ styled text widget.
You must include select2.js, form-x.x.x.js and select2 stylesheet for it to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'select2')
kwargs.setdefault('data-tags', u'1')
return super(Select2TagsWidget, self).__call__(field, **kwargs)
class DatePickerWidget(widgets.TextInput):
"""
Date picker widget.
You must include bootstrap-datepicker.js and form-x.x.x.js for styling to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'datepicker')
kwargs.setdefault('data-date-format', u'YYYY-MM-DD')
self.date_format = kwargs['data-date-format']
return super(DatePickerWidget, self).__call__(field, **kwargs)
class DateTimePickerWidget(widgets.TextInput):
"""
Datetime picker widget.
You must include bootstrap-datepicker.js and form-x.x.x.js for styling to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'datetimepicker')
kwargs.setdefault('data-date-format', u'YYYY-MM-DD HH:mm:ss')
return super(DateTimePickerWidget, self).__call__(field, **kwargs)
class TimePickerWidget(widgets.TextInput):
"""
Date picker widget.
You must include bootstrap-datepicker.js and form-x.x.x.js for styling to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'timepicker')
kwargs.setdefault('data-date-format', u'HH:mm:ss')
return super(TimePickerWidget, self).__call__(field, **kwargs)
class RenderTemplateWidget(object):
"""
WTForms widget that renders Jinja2 template
"""
def __init__(self, template):
"""
Constructor
:param template:
Template path
"""
self.template = template
def __call__(self, field, **kwargs):
ctx = _request_ctx_stack.top
jinja_env = ctx.app.jinja_env
kwargs.update({
'field': field,
'_gettext': _,
'_ngettext': _,
})
template = jinja_env.get_template(self.template)
return template.render(kwargs)
class InlineFieldListWidget(RenderTemplateWidget):
def __init__(self):
super(InlineFieldListWidget, self).__init__('widgets/inline_field_list.html')
class InlineFormWidget(RenderTemplateWidget):
def __init__(self):
super(InlineFormWidget, self).__init__('widgets/inline_form.html')
def __call__(self, field, **kwargs):
kwargs.setdefault('form_opts', getattr(field, 'form_opts', None))
return super(InlineFormWidget, self).__call__(field, **kwargs)
class AjaxSelect2Widget(object):
def __init__(self, multiple=False):
self.multiple = multiple
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', 'select2-ajax')
kwargs.setdefault('data-url', get_url('.ajax_lookup', name=field.loader.name))
allow_blank = getattr(field, 'allow_blank', False)
if allow_blank and not self.multiple:
kwargs['data-allow-blank'] = u'1'
kwargs.setdefault('id', field.id)
kwargs.setdefault('type', 'hidden')
if self.multiple:
result = []
ids = []
for value in field.data:
data = field.loader.format(value)
result.append(data)
ids.append(data[0])
separator = getattr(field, 'separator', ',')
kwargs['value'] = separator.join(ids)
kwargs['data-json'] = json.dumps(result)
kwargs['data-multiple'] = u'1'
else:
data = field.loader.format(field.data)
if data:
kwargs['value'] = data[0]
kwargs['data-json'] = json.dumps(data)
placeholder = _(field.loader.options.get('placeholder', 'Please select model'))
kwargs.setdefault('data-placeholder', placeholder)
return widgets.HTMLString('<input %s>' % widgets.html_params(name=field.name, **kwargs))
class XEditableWidget(object):
"""
WTForms widget that provides in-line editing for the list view.
Determines how to display the x-editable/ajax form based on the
field inside of the FieldList (StringField, IntegerField, etc).
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-value', kwargs.pop('value', ''))
kwargs.setdefault('data-role', 'x-editable')
kwargs.setdefault('data-url', './ajax/update/')
kwargs.setdefault('id', field.id)
kwargs.setdefault('name', field.name)
kwargs.setdefault('href', '#')
if not kwargs.get('pk'):
raise Exception('pk required')
kwargs['data-pk'] = str(kwargs.pop("pk"))
kwargs['data-csrf'] = kwargs.pop("csrf", "")
# subfield is the first entry (subfield) from FieldList (field)
subfield = field.entries[0]
kwargs = self.get_kwargs(subfield, kwargs)
return widgets.HTMLString(
'<a %s>%s</a>' % (widgets.html_params(**kwargs),
escape(kwargs['data-value']))
)
def get_kwargs(self, subfield, kwargs):
"""
Return extra kwargs based on the subfield type.
"""
if subfield.type == 'S
|
tringField':
kwarg
|
s['data-type'] = 'text'
elif subfield.type == 'TextAreaField':
kwargs['data-type'] = 'textarea'
kwargs['data-rows'] = '5'
elif subfield.type == 'BooleanField':
kwargs['data-type'] = 'select'
# data-source = dropdown options
kwargs['data-source'] = {'': 'False', '1': 'True'}
kwargs['data-role'] = 'x-editable-boolean'
elif subfield.type == 'Select2Field':
kwargs['data-type'] = 'select'
choices = [{'value': x, 'text': y} for x, y in subfield.choices]
# prepend a blank field to choices if allow_blank = True
if getattr(subfield, 'allow_blank', False):
choices.insert(0, {'value': '__None', 'text': ''})
# json.dumps fixes issue with unicode strings not loading correctly
kwargs['data-source'] = json.dumps(choices)
elif subfield.type == 'DateField':
kwargs['data-type'] = 'combodate'
kwargs['data-format'] = 'YYYY-MM-DD'
kwargs['data-template'] = 'YYYY-MM-DD'
elif subfield.type == 'DateTimeField':
kwargs['data-type'] = 'combodate'
kwargs['data-format'] = 'YYYY-MM-DD HH:mm:ss'
kwargs['data-template'] = 'YYYY-MM-DD HH:mm:ss'
# x-editable-combodate uses 1 minute increments
kwargs['data-role'] = 'x-editable-combodate'
elif subfield.type == 'TimeField':
kwargs['data-type'] = 'combodate'
kwargs['data-format'] = 'HH:mm:ss'
kwargs['data-template'] = 'HH:mm:ss'
kwargs['data-role'] = 'x-editable-combodate'
elif subfield.type == 'IntegerField':
kwargs['data-type'] = 'number'
elif subfield.type in ['FloatField', 'DecimalField']:
kwargs['data-type'] = 'number'
kwargs['data-step'] = 'any'
elif subfield.type in ['Quer
|
PhilHarnish/forge
|
src/data/meta.py
|
Python
|
mit
| 1,361
| 0.011756
|
import collections
import typing
from typing import TypeVar
Key = TypeVar('Key')
class Meta(collections.OrderedDict, typing.MutableMapping[Key, float]):
def __init__(self, *args, **kwargs) -> None:
self._smallest = float('inf')
self._largest = 0
self._ordered =
|
True
super(Meta, self).__init__(*args, **kwargs)
def __setitem__(self, key: Key, value: float) -> None:
if key in self and self[key]
|
== value:
raise AssertionError('Redundant assignment: %s = %s' % (key, value))
if value > self._smallest:
self._ordered = False
else:
self._smallest = value
if value > self._largest:
self._largest = value
super(Meta, self).__setitem__(key, value)
self._changed()
def items(self) -> typing.ItemsView[Key, float]:
self._reorder()
return super(Meta, self).items()
def first(self) -> typing.Tuple[Key, float]:
self._reorder()
for k, v in self.items():
return k, v
def peek(self) -> Key:
self._reorder()
for first in self:
return first
def magnitude(self) -> float:
return self._largest
def _reorder(self) -> None:
if self._ordered:
return
order = sorted(super(Meta, self).items(), key=lambda x: x[1], reverse=True)
for k, v in order:
self.move_to_end(k)
self._ordered = True
def _changed(self) -> None:
pass
|
Shopify/shopify_python
|
tests/functional/blank_line_after_class_required.py
|
Python
|
mit
| 178
| 0
|
# pylint:dis
|
able=missing-docstring,invalid-name,too-few-public-methods,old-style-class
class SomeClass: # [blank-line-after-class-required]
def __init__(self):
|
pass
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/loot/tool/shared_recording_rod_broken.py
|
Python
|
mit
| 461
| 0.045553
|
#### NOTICE: THIS FILE
|
IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/tool/shared_recording_rod_broken.iff"
result.attribute_template_id
|
= -1
result.stfName("item_n","recording_rod_broken")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
kingmotley/SickRage
|
sickrage/recompiled/tags.py
|
Python
|
gpl-3.0
| 869
| 0.002301
|
import re
# Resolutions
resolution = re.compile(r'(?P<vres>4320|2160|1080|720|480|360)(?P<scan>[pi])', re.I)
# Sources
tv = re.compile(r'([sph]d).?tv|tv(rip|mux)', re.I)
dvd = re.compile(r'(?P<hd>hd)?dvd(?P<rip>rip|mux)?', re.I)
web = re.compile(r'(web(?P<type>rip|mux|hd|.?dl|\b
|
))', re.I)
bluray = re.compile(r'(blue?-?ray|b[rd](?:rip|mux))', re.I)
sat = re.compile(r'(dsr|satrip)', re.I)
itunes = re.compile(r'(itunes)', re.I)
netflix = re.compile(r'netflix(hd|uhd)', re.I)
# Codecs
avc = re.compile(r'([xh].?26[45])', re.I)
xvid = re.compile(r'(xvid|divx)', re.I)
mpeg = re.compile(r'(mpeg-?2)', re.I)
# anime
anime_sd = re.compile(r'(848x480|480p|360p|xvid)', re.I)
anime_hd = re.compile(r'((1280|960)x720|720p)', re.I)
anime_fullhd = re.compile(r'(1920x1080|1
|
080p)', re.I)
anime_bluray = re.compile(r'(blue?-?ray|b[rd](?:rip|mux)|(?:\b|_)bd(?:\b|_))', re.I)
|
lauhuiyik/same-page
|
src/lib/utils.py
|
Python
|
mit
| 1,770
| 0.015819
|
##########
import os
import web
import pylibmc
from etherpad_lite import EtherpadLiteClient
from jinja2 import Environment,FileSystemLoader
##########
def render(template_name, **context):
"""
Jinja2 Template Handler
This function renders the html template Jinja2 style
Extra parameters for substituting into html
"""
extensions = context.pop('extensions', [])
globals = context.pop('globals', {})
jinja_env = Environment(loader = FileSystemLoader(
os.path.join(os.path.dirname(
os.path.dirname(__file__) ), 'templates') ),
extensions=extensions)
jinja_env.globals.update(globals)
return jinja_env.get_template(template_name).render(context)
##########
"""
Initialize Memcache
binary = True
This specifies that client talks to memcached servers using the binary protocol.
'tcp_nodelay' = True
Setting this behavior will enable the TCP_NODELAY socket option which makes sense for TCP connections.
'ketama' = True
Setting this behavior is a shortcut for setting 'hash' to 'md5' and distribution to 'consistent ketama'.
"""
memcache = pylibmc.Client(['1
|
27.0.0.1'], binary = True,
behaviors = {
'ketama': True,
'tcp_nodelay': True,
})
##########
"""
Initializes etherpad instance
"""
etherpad = EtherpadLiteClient(base_params = {
'apikey': '2cf9c48c15aa877c1aad4383aa2bd6abf7de75986f6ba036f
|
90ba9ef18ac274c'
})
"""
Configuration for db
"""
db = web.database(user = 'guanhao97',
dbn = 'postgres',
db = 'same_page',
pw = '55popo')
|
lukasklein/django-newsletter2go
|
newsletter2go/backends.py
|
Python
|
bsd-3-clause
| 1,590
| 0.001258
|
# -*- coding: utf-8 -*-
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.message import sanitize_address
from django.conf import settings
import
|
requests
import logging
logger = logging.getLogger(__name__)
class Newsletter2GoEmailBackend(BaseEmailBackend):
n2g_api_endpoint = 'h
|
ttps://www.newsletter2go.de/de/api/send/email/'
def send_messages(self, emails):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not emails:
return
num_sent = 0
for email in emails:
if not email.recipients():
continue
from_email = sanitize_address(email.from_email, email.encoding)
recipients = [sanitize_address(addr, email.encoding)
for addr in email.recipients()]
logger.debug('Sending email from {0} to {1}'.format(from_email, ', '.join(recipients)))
for recipient in recipients:
payload = {
'key': settings.NEWSLETTER2GO_API_KEY,
'to': recipient,
'from': from_email,
'subject': email.subject,
}
payload['html' if email.content_subtype == 'html' else 'text'] = email.body
response = requests.post(self.n2g_api_endpoint, payload)
response_json = response.json()
if response_json.get('status') == 200:
num_sent += 1
return num_sent
|
yuxng/Deep_ISM
|
FCN/lib/setup.py
|
Python
|
mit
| 5,133
| 0.003702
|
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import os
from os.path import join as pjoin
import numpy as np
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
def find_in_path(name, path):
"Find a file in a search path"
#adapted fom http://code.activestate.com/recipes/52224-find-a-file-given-a-search-path/
for dir in path.split(os.pathsep):
binpath = pjoin(dir, name)
if os.path.exists(binpath):
return os.path.abspath(binpath)
return None
def locate_cuda():
"""Locate the CUDA environment on the system
Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64'
and values giving the absolute path to each directory.
Starts by looking for the CUDAHOME env variable. If not found, everything
is based on finding 'nvcc' in the PATH.
"""
# first check if the CUDAHOME env variable is in use
if 'CUDAHOME' in os.environ:
home = os.environ['CUDAHOME']
nvcc = pjoin(home, 'bin', 'nvcc')
else:
# otherwise, search the PATH for NVCC
default_path = pjoin(os.sep, 'usr', 'local', 'cuda', 'bin')
nvcc = find_in_path('nvcc', os.environ['PATH'] + os.pathsep + default_path)
if nvcc is None:
raise EnvironmentError('The nvcc binary could not be '
'located in your $PATH. Either add it to your path, or set $CUDAHOME')
home = os.path.dirname(os.path.dirname(nvcc))
cudaconfig = {'home':home, 'nvcc':nvcc,
'include': pjoin(home, 'include'),
'lib64': pjoin(home, 'lib64')}
for k, v in cudaconfig.iteritems():
if not os.path.exists(v):
raise EnvironmentError('The CUDA %s path could not be located in %s' % (k, v))
return cudaconfig
CUDA = locate_cuda()
# Obtain the numpy include directory. This logic works across numpy versions.
try:
numpy_include = np.get_include()
except AttributeError:
numpy_include = np.get_numpy_include()
def customize_compiler_for_nvcc(self):
"""inject deep into distutils to customize how the dispatch
to gcc/nvcc works.
If you subclass UnixCCompiler, it's not trivial to get your subclass
injected in, and still have the right customizations (i.e.
distutils.sysconfig.customize_compiler) run on it. So instead of going
the OO route, I have this. Note, it's kindof like a wierd functional
subclassing going on."""
# tell the compiler it can processes .cu
self.src_extensions.append('.cu')
# save references to the default compiler_so and _comple methods
default_compiler_so = self.compiler_so
super = self._compile
# now redefine the _compile method. This gets executed for each
# object but distutils doesn't have the ability to change compilers
# based on source extension: we add it.
def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts):
if os.path.splitext(src)[1] == '.cu':
# use the cuda for .cu files
self.set_executable('compiler_so', CUDA['nvcc'])
# use only a subset of the extra_postargs, which are 1-1 translated
# from the extra_compile_args in the Extension class
postargs = extra_postargs['nvcc']
else:
postargs = extra_postargs['gcc']
super(obj, src, ext, cc_args, postargs, pp_opts)
# reset the default compiler_so, which we might have changed for cuda
self.compiler_so = default_compiler_so
# inject our redefined _compil
|
e method into the class
self._compile = _compile
# run the customize_compiler
class custom_build_ext(build_ext):
def build_extensions(self):
customize_compiler_for_nvcc(self.compiler)
build_ext.build_extensions(self)
ext_modules = [
Extension('normals.gpu_normals',
['normals/compute_normals.cu', 'normals/gpu_normals.pyx', 'normals/cuda/convolutionSeparable.cu', 'normals/cuda/cuda_pcl_helpers.cu'],
library_dirs=[CUDA
|
['lib64']],
libraries=['cudart'],
language='c++',
runtime_library_dirs=[CUDA['lib64']],
# this syntax is specific to this build system
# we're only going to use certain compiler args with nvcc and not with gcc
# the implementation of this trick is in customize_compiler() below
extra_compile_args={'gcc': ["-Wno-unused-function"],
'nvcc': ['-arch=sm_35',
'--ptxas-options=-v',
'-c',
'--compiler-options',
"'-fPIC'"]},
include_dirs = [numpy_include, CUDA['include'], './normals/include', '/usr/local/include/eigen3']
)
]
setup(
name='fcn',
ext_modules=ext_modules,
# inject our custom trigger
cmdclass={'build_ext': custom_build_ext},
)
|
futurepr0n/Books-solutions
|
Python-For-Everyone-Horstmann/Chapter3-Decisions/P3.22.py
|
Python
|
mit
| 2,601
| 0.001924
|
# Write a program that prompts for the day and month of the user’s birthday and then
# prints a horoscope. Make up fortunes for programmers, like this:
# Please enter your birthday.
# month: 6
# day: 16
# Gemini are experts at figuring out the behavior of complicated programs.
# You feel where bugs are coming from and then stay one step ahead. Tonight,
# your style wins approval from a tough critic.
# Each fortune should contain the name of the astrological sign. (You will find the
# names and date ranges of the signs at a distressingly large number of sites on the
# Internet.)
month = int(input("Enter your birthday(month and day): "))
day = int(in
|
put())
sign = ""
if month == 1:
if day <= 19:
sign = "Capricorn"
else:
sign = "Aquarius"
elif month == 2:
if day <= 18:
sign = "Aquari
|
us"
else:
sign = "Pisces"
elif month == 3:
if day <= 20:
sign = "Pisces"
else:
sign = "Aries"
elif month == 4:
if day <= 19:
sign = "Aries"
else:
sign = "Taurus"
elif month == 5:
if day <= 20:
sign = "Taurus"
else:
sign = "Gemini"
elif month == 6:
if day <= 21:
sign = "Gemini"
else:
sign = "Cancer"
elif month == 7:
if day <= 22:
sign = "Cancer"
else:
sign = "Leo"
elif month == 8:
if day <= 22:
sign = "Leo"
else:
sign = "Virgo"
elif month == 9:
if day <= 22:
sign = "Virgo"
else:
sign = "Libra"
elif month == 10:
if day <= 23:
sign = "Libra"
else:
sign = "Scorpio"
elif month == 11:
if day <= 21:
sign = "Scorpio"
else:
sign = "Sagittarius"
elif month == 12:
if day <= 21:
sign = "Sagittarius"
else:
sign = "Capricorn"
# fortunes
if sign == "Capricorn":
print("Example Capricorn fortune.")
elif sign == "Aquarius":
print("Example Aquarius fortune.")
elif sign == "Pisces":
print("Example Pisces fortune.")
elif sign == "Aries":
print("Example Aries fortune.")
elif sign == "Taurus":
print("Example Taurus fortune.")
elif sign == "Gemini":
print("Example Gemini fortune.")
elif sign == "Cancer":
print("You have cancer, you don't get a fortune.")
elif sign == "Leo":
print("Example Leo fortune")
elif sign == "Virgo":
print("Example Virgo fortune")
elif sign == "Libra":
print("Example Libra fortune")
elif sign == "Scorpio":
print("Example Scorpio fortune")
else:
print("Unknown sign. You're a special one")
|
JohnSpeno/owney
|
owney/conf/settings.py
|
Python
|
mit
| 635
| 0.006299
|
from django.conf import settings
_TRACKING_USPS_URL = 'http://trkcnfrm1.smi.usps.com/PTSInternetWeb/InterLabelInquiry.do?origTrackNum='
T
|
RACKING_USPS_URL = getattr(settings, 'OWNEY_USPS_TRACKING_URL', _TRACKING_USPS_URL)
_USPS_API_URL = 'http://production.shippingapis.com/ShippingAPI.dll'
USPS_API_URL = getattr(settings, 'OWNEY_USPS_API_URL', _USPS_API_URL)
_USPS_API_USERID = 'Set your USPS API userid here'
USPS_API_USERID = getattr(settings, 'OWNEY_USPS_API_USERID', _USPS_API_USERID)
_CS_URL = 'Set the URL for your Customer Service application here'
TRA
|
CKING_CS_URL = getattr(settings, 'OWNEY_TRACKING_CS_URL', _CS_URL)
|
Huyuwei/tvm
|
python/tvm/contrib/debugger/debug_runtime.py
|
Python
|
apache-2.0
| 8,948
| 0.000894
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Graph debug runtime executes TVM debug packed functions."""
import os
import tempfile
import shutil
from tvm._ffi.base import string_types
from tvm._ffi.function import get_global_func
from tvm.contrib import graph_runtime
from tvm.ndarray import array
from tvm.rpc import base as rpc_base
from . import debug_result
_DUMP_ROOT_PREFIX = "tvmdbg_"
_DUMP_PATH_PREFIX = "_tvmdbg_"
def create(graph_json_str, libmod, ctx, dump_root=None):
"""Create a runtime executor module given a graph and module.
Parameters
----------
graph_json_str : str or graph class
The graph to be deployed in json format output by nnvm graph.
The graph can only contain one operator(tvm_op) that
points to the name of PackedFunc in the libmod.
libmod : tvm.Module
The module of the corresponding function.
ctx : TVMContext
The context to deploy the module, can be local or remote.
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
Returns
-------
graph_modu
|
le : GraphModuleDebug
Debug Runtime graph module that can be used to execute the graph.
"""
|
if not isinstance(graph_json_str, string_types):
try:
graph_json_str = graph_json_str._tvm_graph_json()
except AttributeError:
raise ValueError("Type %s is not supported" % type(graph_json_str))
try:
fcreate = get_global_func("tvm.graph_runtime_debug.create")
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx)
if num_rpc_ctx == len(ctx):
libmod = rpc_base._ModuleHandle(libmod)
try:
fcreate = ctx[0]._rpc_sess.get_function(
"tvm.graph_runtime_debug.remote_create"
)
except ValueError:
raise ValueError(
"Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
"config.cmake and rebuild TVM to enable debug mode"
)
func_obj = fcreate(graph_json_str, libmod, *device_type_id)
return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
class GraphModuleDebug(graph_runtime.GraphModule):
"""Graph debug runtime module.
This is a debug wrapper over the TVM runtime.
Runtime interfaces are wrapped with debug functionalities.
Manage the debug framework to format the debug data and
trigger the user interfaces.
Parameters
----------
module : Module
The interal tvm module that holds the actual graph functions.
ctx : TVMContext
The context this module is under.
graph_json_str : str or graph class
Content of graph json file in string format
dump_root : str
To select which folder the outputs should be kept.
None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
"""
def __init__(self, module, ctx, graph_json_str, dump_root):
self._dump_root = dump_root
self._dump_path = None
self._get_output_by_layer = module["get_output_by_layer"]
self._run_individual = module["run_individual"]
graph_runtime.GraphModule.__init__(self, module)
self._create_debug_env(graph_json_str, ctx)
def _format_context(self, ctx):
return str(ctx[0]).upper().replace("(", ":").replace(")", "")
def _ensure_dir(self, directory):
"""Create a directory if not exists
Parameters
----------
directory : str
File path to create
"""
if not os.path.exists(directory):
os.makedirs(directory, 0o700)
def _get_dump_path(self, ctx):
"""Make the graph and tensor dump folder and return the path.
Parameters
----------
ctx : TVMContext
The context this module is under.
Returns
-------
path : str
Directory path where the graph and node outputs will be stored.
"""
# save to file
folder_name = _DUMP_PATH_PREFIX + "ctx_"
folder_name = folder_name + ctx.replace(":", "_")
path = os.path.join(self._dump_root, folder_name)
self._ensure_dir(path)
return path
def _remove_dump_root(self):
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _create_debug_env(self, graph_json, ctx):
"""Create UI wrapper framework to handle multiple UI frontends for tvmdbg
Parameters
----------
graph_json : json format
json formatted NNVM graph contain list of each node's name, shape and type.
nodes_list : list
List of all the nodes presented in the graph
ctx : TVMContext
The context this module is under.
"""
# make the dump folder if not given
if not self._dump_root:
self._dump_root = tempfile.mkdtemp(prefix=_DUMP_ROOT_PREFIX)
# format the context
ctx = self._format_context(ctx)
# updates the dumping directories
self._dump_path = self._get_dump_path(ctx)
# init the debug dumping environment
self.debug_datum = debug_result.DebugResult(graph_json, self._dump_path)
def _run_debug(self):
"""Execute the node specified with index will be executed.
Each debug output will be copied to the buffer
Time consumed for each execution will be set as debug output.
"""
self.debug_datum._time_list = [
[float(t) * 1e-6] for t in self.run_individual(10, 1, 1)
]
for i, node in enumerate(self.debug_datum.get_graph_nodes()):
num_outputs = self.debug_datum.get_graph_node_output_num(node)
for j in range(num_outputs):
out_tensor = self._get_output_by_layer(i, j)
out_tensor = array(out_tensor)
self.debug_datum._output_tensor_list.append(out_tensor)
def debug_get_output(self, node, out):
"""Run graph up to node and get the output to out
Parameters
----------
node : int / str
The node index or name
out : NDArray
The output array container
"""
ret = None
if isinstance(node, str):
output_tensors = self.debug_datum.get_output_tensors()
try:
ret = output_tensors[node]
except:
node_list = output_tensors.keys()
raise RuntimeError(
"Node "
+ node
+ " not found, available nodes are: "
+ str(node_list)
+ "."
)
elif isinstance(node, int):
output_tensors = self.debug_datum._output_tensor_list
ret = output_tensors[node]
else:
raise RuntimeError("Require node index or name only.")
return ret
def run(self, **input_dict):
"""Run forward execution of the graph with debug
Parameters
----------
input_dict : dict of str to NDArray
List of input val
|
zhanqxun/cv_fish
|
pythonwin/pywin/Demos/dyndlg.py
|
Python
|
apache-2.0
| 2,568
| 0.030763
|
# dyndlg.py
# contributed by Curt Hagenlocher <chi@earthlink.net>
# Dialog Template params:
# Parameter 0 - Window caption
# Parameter 1 - Bounds (rect tuple)
# Parameter 2 - Window style
# Parameter 3 - Extended style
# Parameter 4 - Font tuple
# Parameter 5 - Menu name
# Parameter 6 - Window class
# Dialog item params:
# Parameter 0 - Window class
# Parameter 1 - Text
# Parameter 2 - ID
# Parameter 3 - Bounds
# Parameter 4 - Style
# Parameter 5 - Extended style
# Parameter 6 - Extra data
import win32ui
import win32con
from pywin.mfc import dialog, window
def MakeDlgTemplate():
style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
cs = win32con.WS_CHILD | win32con.WS_VISIBLE
dlg = [ ["Select Warehouse", (0, 0, 177, 93), style, None, (8, "MS Sans Serif")], ]
dlg.append([130, "Current Warehouse:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT])
dlg.append([130, "ASTORIA", 128, (16, 17, 99, 7), cs | win32con.SS_LEF
|
T])
dlg.append([130, "New &Warehouse:", -1, (7, 29, 69, 9), cs | win32con.SS_LEFT])
s = win32con.WS_TABSTOP | cs
# dlg.append([131, None, 130, (5, 40, 110, 48),
# s | win32con.LBS_NOTIFY | win32con.
|
LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER])
dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP])
dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
s = win32con.BS_PUSHBUTTON | s
dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s])
dlg.append([128, "&Help", 100, (124, 74, 50, 14), s])
return dlg
def test1():
win32ui.CreateDialogIndirect( MakeDlgTemplate() ).DoModal()
def test2():
dialog.Dialog( MakeDlgTemplate() ).DoModal()
def test3():
dlg = win32ui.LoadDialogResource(win32ui.IDD_SET_TABSTOPS)
dlg[0][0] = 'New Dialog Title'
dlg[0][1] = (80, 20, 161, 60)
dlg[1][1] = '&Confusion:'
cs = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON
dlg.append([128, "&Help", 100, (111, 41, 40, 14), cs])
dialog.Dialog( dlg ).DoModal()
def test4():
page1=dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO1))
page2=dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO2))
ps=dialog.PropertySheet('Property Sheet/Page Demo', None, [page1, page2])
ps.DoModal()
def testall():
test1()
test2()
test3()
test4()
if __name__=='__main__':
testall()
|
arunchaganty/presidential-debates
|
third-party/stanza/stanza/research/output.py
|
Python
|
mit
| 770
| 0
|
import sys
def output_results(results, split_id='results', output_stream=None):
'''
Log `results` readably to `output_stream`, with a header
containing `split_id`.
:param results: a dictionary of summary statistics from an evaluation
:type results: dict(str -> object)
:param str split_id: an identifier for the source of `results` (e.g. 'dev')
:param file output_stream: the file-like object to which to log the results
(default: stdout)
:type split_id: str
'''
if output_stream is None:
output_stream = sys
|
.stdout
output_stream.write('----- %s -----\n' % split_id)
for name
|
in sorted(results.keys()):
output_stream.write('%s: %s\n' % (name, repr(results[name])))
output_stream.flush()
|
plotly/plotly.py
|
packages/python/plotly/plotly/graph_objs/scattergeo/marker/_colorbar.py
|
Python
|
mit
| 78,668
| 0.000953
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class ColorBar(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scattergeo.marker"
_path_str = "scattergeo.marker.colorbar"
_valid_props = {
"bgcolor",
"bordercolor",
"borderwidth",
"dtick",
"exponentformat",
"len",
"lenmode",
"minexponent",
"nticks",
"orientation",
"outlinecolor",
"outlinewidth",
"separatethousands",
"showexponent",
"showticklabels",
"showtickprefix",
"showticksuffix",
"thickness",
"thicknessmode",
"tick0",
"tickangle",
"tickcolor",
"tickfont",
"tickformat",
"tickformatstopdefaults",
"tickformatstops",
"ticklabeloverflow",
"ticklabelposition",
"ticklabelstep",
"ticklen",
"tickmode",
"tickprefix",
"ticks",
"ticksuffix",
"ticktext",
"ticktextsrc",
"tickvals",
"tickvalssrc",
"tickwidth",
"title",
"titlefont",
"titleside",
"x",
"xanchor",
"xpad",
"y",
"yanchor",
"ypad",
}
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the color of padded area.
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the axis line color.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb
|
(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
|
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# borderwidth
# -----------
@property
def borderwidth(self):
"""
Sets the width (in px) or the border enclosing this color bar.
The 'borderwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["borderwidth"]
@borderwidth.setter
def borderwidth(self, val):
self["borderwidth"] = val
# dtick
# -----
@property
def dtick(self):
"""
Sets the step in-between ticks on this axis. Use with `tick0`.
Must be a positive number, or special strings available to
"log" and "date" axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick number. For
example, to set a tick mark at 1, 10, 100, 1000, ... set dtick
to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2.
To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special values;
"L<f>", where `f` is a positive number, gives ticks linearly
spaced in value (but not position). For example `tick0` = 0.1,
|
Hackplayers/Empire-mod-Hpys-tests
|
lib/modules/powershell/management/lock.py
|
Python
|
bsd-3-clause
| 3,056
| 0.008835
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-LockWorkStation',
'Author': ['@harmj0y'],
'Description': ("Locks the workstation's display."),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : False,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'http://poshcode.org/1640'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run mod
|
ule on.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
|
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
script = """
Function Invoke-LockWorkStation {
# region define P/Invoke types dynamically
# stolen from PowerSploit https://github.com/mattifestation/PowerSploit/blob/master/Mayhem/Mayhem.psm1
# thanks matt and chris :)
$DynAssembly = New-Object System.Reflection.AssemblyName('Win32')
$AssemblyBuilder = [AppDomain]::CurrentDomain.DefineDynamicAssembly($DynAssembly, [Reflection.Emit.AssemblyBuilderAccess]::Run)
$ModuleBuilder = $AssemblyBuilder.DefineDynamicModule('Win32', $False)
$TypeBuilder = $ModuleBuilder.DefineType('Win32.User32', 'Public, Class')
$DllImportConstructor = [Runtime.InteropServices.DllImportAttribute].GetConstructor(@([String]))
$SetLastError = [Runtime.InteropServices.DllImportAttribute].GetField('SetLastError')
$SetLastErrorCustomAttribute = New-Object Reflection.Emit.CustomAttributeBuilder($DllImportConstructor,
@('User32.dll'),
[Reflection.FieldInfo[]]@($SetLastError),
@($True))
# Define [Win32.User32]::LockWorkStation()
$PInvokeMethod = $TypeBuilder.DefinePInvokeMethod('LockWorkStation',
'User32.dll',
([Reflection.MethodAttributes]::Public -bor [Reflection.MethodAttributes]::Static),
[Reflection.CallingConventions]::Standard,
[Bool],
[Type[]]@(),
[Runtime.InteropServices.CallingConvention]::Winapi,
[Runtime.InteropServices.CharSet]::Ansi)
$PInvokeMethod.SetCustomAttribute($SetLastErrorCustomAttribute)
$User32 = $TypeBuilder.CreateType()
$Null = $User32::LockWorkStation()
}
Invoke-LockWorkStation; "Workstation locked."
"""
return script
|
XTAv2/Enigma2
|
lib/python/Screens/InstallWizard.py
|
Python
|
gpl-2.0
| 5,974
| 0.026783
|
from Screens.Screen import Screen
from Components.ConfigList import ConfigListScreen, ConfigList
from Components.ActionMap import ActionMap
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigSubsection, ConfigBoolean, getConfigListEntry, ConfigSelection, ConfigYesNo, ConfigIP
from Components.Network import iNetwork
from Components.Ipkg import IpkgComponent
from enigma import eDVBDB
config.misc.installwizard = ConfigSubsection()
config.misc.installwizard.hasnetwork = ConfigBoolean(default = False)
config.misc.installwizard.ipkgloaded = ConfigBoolean(default = False)
config.misc.installwizard.channellistdownloaded = ConfigBoolean(default = False)
class InstallWizard(Screen, ConfigListScreen):
STATE_UPDATE = 0
STATE_CHOISE_CHANNELLIST = 1
STATE_CHOISE_SOFTCAM = 2
def __init__(self, session, args = None):
Screen.__init__(self, session)
self.index = args
self.list = []
ConfigListScreen.__init__(self, self.list)
if self.index == self.STATE_UPDATE:
config.misc.installwizard.hasnetwork.value = False
config.misc.installwizard.ipkgloaded.value = False
modes = {0: " "}
self.enabled = ConfigSelection(choices = modes, default = 0)
self.adapters = [(iNetwork.getFriendlyAdapterName(x),x) for x in iNetwork.getAdapterList()]
is_found = False
for x in self.adapters:
if x[1] == 'eth0' or x[1] == 'eth1':
if iNetwork.getAdapterAttribute(x[1], 'up'):
self.ipConfigEntry = ConfigIP(default = iNetwork.getAdapterAttribute(x[1], "ip"))
iNetwork.checkNetworkState(self.checkNetworkCB)
if_found = True
else:
iNetwork.restartNetwork(self.checkNetworkLinkCB)
break
if is_found is False:
self.createMenu()
elif self.index == self.STATE_CHOISE_CHANNELLIST:
self.enabled = ConfigYesNo(default = True)
modes = {"openxta": "XTA(13e-19e)", "19e": "Astra 1", "23e": "Astra 3", "19e-23e": "Astra 1 Astra 3", "19e-23e-28e": "Astra 1 Astra 2 Astra 3", "13e-19e-23e-28e": "Astra 1 Astra 2 Astra 3 Hotbird"}
self.channellist_type = ConfigSelection(choices = modes, default = "openxta")
self.createMenu()
elif self.index == self.STATE_CHOISE_SOFTCAM:
self.enabled = ConfigYesNo(default = True)
modes = {"cccam": _("default") + " (CCcam)", "scam": "scam"}
self.softcam_type = ConfigSelection(choices = modes, default = "cccam")
self.createMenu()
def checkNetworkCB(self, data):
if data < 3:
config.misc.installwizard.hasnetwork.value = True
self.createMenu()
def checkNetworkLinkCB(self, retval):
if retval:
iNetwork.checkNetworkState(self.checkNetworkCB)
else:
self.createMenu()
def createMenu(self):
try:
test = self.index
except:
return
self.list = []
if self.index == self.STATE_UPDATE:
if config.misc.installwizard.hasnetwork.value:
self.list.append(getConfigListEntry(_("Your internet connection is working (ip: %s)") % (self.ipConfigEntry.getText()), self.enabled))
else:
self.list.append(getConfigListEntry(_("Your receiver does not have an internet connection"), self.enabled))
elif self.index == self.STATE_CHOISE_CHANNELLIST:
self.list.append(getConfigListEntry(_("Install channel list"), self.enabled))
if self.enabled.value:
self.list.append(getConfigListEntry(_("Channel list type"), self.channellist_type))
elif self.index == self.STATE_CHOISE_SOFTCAM:
self.list.append(getConfigListEntry(_("Install softcam"), self.enabled))
if self.enabled.value:
self.list.append(getConfigListEntry(_("Softcam type"), self.softcam_type))
self["config"].list = self.list
self["config"].l.setList(self.list)
def keyLeft(self):
if self.index == 0:
return
ConfigListScreen.keyLeft(self)
self.createMenu()
def keyRight(self):
if self.index == 0:
return
ConfigListScreen.keyRight(self)
self.createMenu()
def run(self):
if self.index == self.STATE_UPDATE:
if config.misc.installwizard.hasnetwork.value:
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (updating packages)'), IpkgComponent.CMD_UPDATE)
elif self.index == self.STATE_CHOISE_CHANNELLIST and self.enabled.value:
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading channel list)'), IpkgComponent.CMD_REMOVE, {'package': 'enigma2-plugin-settings-henksat-' + self.channellist_type.value})
elif self.index == self.STATE_CHOISE_SOFTCAM and self.enabled.value:
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading softcam)'), IpkgComponent.CMD_INSTALL, {'package': 'enigma2-plugin-softcams-' + self.softcam_type.value})
return
class InstallWizardIpkgUpdater(Screen):
skin = """
<screen position="c-300,c-25" size="600,50" title=" ">
<widget source="statusbar" render="Label" position="10,5" zPosition="10" size="e-10,30" halign="center" valign="center" font="Regular;22" transparent="1" shadowColor="black" shadowOffset="-1,-1" />
</screen>"""
def __init__(self, session, index, info, cmd, pkg = None):
self.skin = InstallWizardIpkgUpdater.skin
Screen.__init__(self, session)
self["statusbar"] = StaticText(info)
self.pkg = pkg
self.index = index
self.state = 0
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
if self.index == InstallWizard.STATE_CHOISE_CHANNELLIST:
self.ipkg.startCmd(cmd, {'package': 'enigma2-plugin-
|
settings-*'})
else:
self.ipkg.startCmd(cmd, pkg)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_DONE:
if self.index == InstallWizard.STATE_UPDATE:
config.misc.installwiz
|
ard.ipkgloaded.value = True
elif self.index == InstallWizard.STATE_CHOISE_CHANNELLIST:
if self.state == 0:
self.ipkg.startCmd(IpkgComponent.CMD_INSTALL, self.pkg)
self.state = 1
return
else:
config.misc.installwizard.channellistdownloaded.value = True
eDVBDB.getInstance().reloadBouquets()
eDVBDB.getInstance().reloadServicelist()
self.close()
|
swxs/web
|
cloudplat/center/Models/Tools_todo_Model.py
|
Python
|
mit
| 1,457
| 0.004844
|
# -*- coding: utf-8 -*-
from django.db import models
from ..Models import *
from center.Exceptions.ModelsExceptions import *
class Tools_todo(models.Model):
STATUS_TYPES = (
(1, u'New'),
(2, u'Doing'),
(3, u'Waiting'),
(4, u'Done'),
)
SPECIES_TYPES = (
(1, u'Task'), # 查询
(2, u'Check'), # 任务
)
PRIORITY_TYPES = (
(1, 'Low'),
(2, 'Normal'),
(3, 'High'),
)
user = models.ForeignKey(User)
title = models.CharField(max_length=100)
summary = models.TextField()
status = models.IntegerField(choices=STATUS_TYPES, default=1)
species = models.IntegerField(choices=SPECIES_TYPES, default=1)
priority = models.IntegerField(choices=PRIORITY_TYPES, default=2)
creation_date = models.DateTimeField(auto_now_add=True)
modify_date = models.DateTimeField(auto_now=True)
model_name = "计划"
def create(self, *args, **kwargs):
try:
|
super(Tools_todo, self).save(*args, **kwargs)
except:
raise createException(self.model_name)
def update(self, *args, **kwargs):
|
try:
super(Tools_todo, self).update(*args, **kwargs)
except:
raise updateException(self.model_name)
def delete(self, *args, **kwargs):
try:
super(Tools_todo, self).delete(*args, **kwargs)
except:
raise updateException(self.model_name)
|
cooperlees/peerme
|
peerme/main.py
|
Python
|
bsd-2-clause
| 3,155
| 0.001585
|
import asyncio
import click
import logging
import sys
import time
from os.path import expanduser
from . import config as peerme_config
from . import peeringdb_mysql
from . import peeringdb_api
from . import euroix_json
from .commands.generate import GenerateConfigCli
from .commands.discover import DiscoverCli
from .commands.request import RequestCli
from .commands.version import VersionCli
CLICK_CONTEXT_SETTINGS = {'help_option_names': ('-h', '--help')}
class Options():
''' Object for holding shared object between subcommands '''
def __init__(self, debug, start_time, db, loop, config):
self.config = config
self.db = db
self.debug = debug
self.loop = loop
self.start_time = start_time
self.human_start_time = time.strftime('%Y%m%d%H%M%S', time.gmtime(start_time))
logging.debug("{} started @ {}".format(sys.argv[0], self.human_start_time))
def __str__(self):
''' String Representation for debugging '''
return 'CLI stated @ {} - Debug is {}'.format(self.debug, self.human_start_time)
def _handle_debug(ctx, param, debug):
'''Turn on debugging if asked otherwise INFO default'''
log_level = logging.DEBUG if debug else logging.INFO
logging.basicConfig(
|
format=('[%(asctime)s] %(levelname)s: %(message)s (%(filename)s:%(lineno)d)'),
level=log_level,
)
return debug
@click.group(context_settings=CLICK_CONTEXT_SETTINGS)
@click.option(
'-c',
'-
|
-config',
default='{}/.peerme.conf'.format(expanduser('~')),
help='Config File Location - Default: ~/.peerme.conf',
)
@click.option(
'-d',
'--debug',
is_flag=True,
help='Turn on verbose logging',
callback=_handle_debug,
)
@click.option(
'--refresh-data',
is_flag=True,
help='Fetch fresh data from Internet sources (EuroIX only)',
)
@click.option(
'-s',
'--data-source',
help='Choose Peering datasource (pdbapi [default], pdbsql, euroix)',
default='pdbapi',
)
@click.pass_context
def main(ctx, config, debug, data_source, refresh_data):
'''
Discover and generate potential peering endpoints @ IXs
'''
loop = asyncio.get_event_loop()
config_obj = peerme_config.PeermeConfig(config)
if data_source == 'pdbsql':
peering_api = peeringdb_mysql.PeermeDb(config_obj, loop)
elif data_source == 'pdbapi':
peering_api = peeringdb_api.PeermeDb(config_obj, loop)
elif data_source == 'euroix':
peering_api = euroix_json.PeermeDb(config_obj, refresh_data, loop)
else:
raise Exception('Invalid option "{}" for data source.'.format(data_source))
# Class to hold all shared options
ctx.obj = Options(debug, time.time(), peering_api, loop, config_obj)
def add_internal_modules():
''' Add internal modules to main parser '''
main.add_command(DiscoverCli().discover)
main.add_command(GenerateConfigCli().generate)
main.add_command(RequestCli().pinder)
main.add_command(VersionCli().version)
def script_entry():
# Add subcommands
add_internal_modules()
sys.exit(main())
if __name__ == '__main__':
script_entry()
|
cs-chan/fuzzyDCN
|
prune_neon/transformation/cost.py
|
Python
|
bsd-3-clause
| 979
| 0.003064
|
from neon.transforms.cost import Cost
class MulticlsSVMLoss(Cost):
def __init_
|
_(self, delta=1.):
self.delta = delta
def __call__(self, y, t):
T = self.be.empty_like(y)
T[:] = self.be.max(y * t, axis=0)
# T = self.be.array(self.be.max(y * t, axis=0).asnumpyarray(), y.shape[0], axis=0)
margin = self.be.square(self.be.maximum(0, y - T + self.delta)) * 0.5
return self.be.sum(margin) / self.be.bsz
def bprop(self, y, t):
T = self.be.empty_like(y)
T[:] = self.be.max(y * t, axis=0)
return self.
|
be.maximum(0, y - T + self.delta) / self.be.bsz
class L1SVMLoss(Cost):
def __init__(self, C=10):
self.C = C
def __call__(self, y, t):
return self.C * self.be.sum(self.be.square(self.be.maximum(0, 1 - y * (t * 2 - 1)))) * 0.5 / y.shape[0]
def bprop(self, y, t):
return - self.C * (t * 2 - 1) * self.be.maximum(0, 1 - y * (t * 2 - 1)) / self.be.bsz / y.shape[0]
|
theouf/CMStools
|
CMS_Conf.py
|
Python
|
gpl-2.0
| 6,570
| 0.059209
|
#!/usr/bin/python
# coding=utf-8
'''
@authors: David,Erwan, Theo
version Python 2.7 and 3.4
This prog permits configure Precidot and Novar according to 2 input files.
first one is created from eagle by a module "testeur_UM.pnp" and contains components and pins x & y coordonates
second one is not yet defined and will contains component location in the novar componentroom
here is the configuration of machines
'''
floppyBanks={
"precidot" : "bank4",
"novar" : "bank1"
}
# ~ Bank address on the floppy
hexAddr = {
'bank1' : 0x04000,
'bank2' : 0x16206,
'bank3' : 0x28206,
'bank4' : 0x3A000,
'bank4P' : 0x39000 # to be check what is it
}
# ~ We defined the dictionary pack2Mag with keys and the Value
# ~ the value is the LAB
# ~ Unite Machine =0.0508mm
pack2Mag = {}
#pack2Mag['3,17/1,2'] = 0
pack2Mag['SO08'] = 1
pack2Mag['SO12'] = 2
pack2Mag['SO14'] = 3
pack2Mag['SO16'] = 4
pack2Mag['SO20'] = 5
pack2Mag['SO24'] = 6
pack2Mag['SO28'] = 7
pack2Mag['SOT23'] = 8
pack2Mag['SOT89'] = 9
pack2Mag['SOT143'] = 10
pack2Mag['SOT194'] = 11
pack2Mag['SOT223'] = 12
pack2Mag['SOD80'] = 13
pack2Mag['SOD87'] = 14
pack2Mag['0402'] = 15
pack2Mag['0603'] = 16
pack2Mag['0805'] = 17
pack2Mag['1206'] = 18
pack2Mag['1210'] = 19
pack2Mag['1812'] = 20
pack2Mag['2220'] = 21
pack2Mag['R3216'] = 22
LabInfo= {
'3,17/1,2':{'Lab':'0','submission':0,'tool':0,'speed':[0,4,0,0]},
'SO08':{'Lab':'1','submission':148,'tool':3,'speed':[0,5,0,0]},
'SO12':{'Lab':'2','submission':148,'tool':3,'speed':[0,5,0,0]},
'SO14':{'Lab':'3','submission':148,'tool':4,'speed':[0,5,0,0]},
'SO16':{'Lab':'4','submission':148,'tool':4,'speed':[0,5,0,0]},
'SO20':{'Lab':'5','submission':148,'tool':4,'speed':[0,5,0,0]},
'SO24':{'Lab':'6','submission':148,'tool':4,'speed':[0,5,0,0]},
'SO28':{'Lab':'7','submission':148,'tool':4,'spee
|
d':[0,5,0,0]},
'SOT23':{'Lab':'8','submission':223,'tool':2,'speed':[0,4,0,0]},
'SOT89':{'Lab':'9','submission':223,'tool':2,'speed':[0,4,0,0]},
'SOT143':{'Lab':'10','submission':148,'tool':3,'speed':[0,4,0,0]},
'SOT194':{'Lab':'11','submission':148,'tool':3,'speed':[0,4,0,0]},
'SOT223':{'Lab':'12','submission':148,'tool':3,'speed':[0,4,0,0]},
'SOD80':{'L
|
ab':'13','submission':148,'tool':3,'speed':[0,4,0,0]},
'SOD87':{'Lab':'14','submission':148,'tool':3,'speed':[0,4,0,0]},
'0402':{'Lab':'15','submission':400,'tool':2,'speed':[0,4,0,0]},
'0603':{'Lab':'16','submission':400,'tool':2,'speed':[0,4,0,0]},
'0805':{'Lab':'17','submission':400,'tool':2,'speed':[0,4,0,0]},
'1206':{'Lab':'18','submission':400,'tool':2,'speed':[0,4,0,0]},
'1210':{'Lab':'19','submission':400,'tool':2,'speed':[0,4,0,0]},
'1812':{'Lab':'20','submission':400,'tool':2,'speed':[0,4,0,0]},
'2220':{'Lab':'21','submission':400,'tool':2,'speed':[0,4,0,0]},
'R3216':{'Lab':'22','submission':400,'tool':2,'speed':[0,4,0,0]}
}
# ~ We defined the dictionary storeAddr with keys and 4 Values
# ~ the value is the MT
# ~ the second value is the LAB
# ~ the third value is Center Dx of component
# ~ the fourth value is Center Dy of component
# ~ Unite Machine =0.0508mm
# 3 and 4 row are :
# Coordinate centrer of component Machine : ((longueur /0,0508)/2)
#Buffer is fixed all magasin with there Mag address
storeAddr ={
'0': [0,0,0,0],
'1': [0, 0, 153 , 1725],
'2': [0, 0, 153 , 2034],
'3': [0, 0, 153 , 2357],
'4': [0, 0, 153 , 2671],
'5': [0, 0, 153 , 2986],
'6': [0, 0, 153 , 3299],
'7': [0, 0, 153 , 3614],
'8': [0, 0, 153 , 3929],
'9': [0, 0, 153 , 4242],
'10': [0, 0, 153 , 4557],
'11': [0, 0, 153 , 4969],
'12': [0, 0, 153 , 5362],
'13': [0, 0, 153 , 5804],
'14': [0, 0, 153 , 6278], #14
'15': [0, 0, 153 , 6278],
'16': [0, 0, 153 , 6278],
'17': [0, 0, 153 , 6278],
'18': [0, 0, 153 , 6278],
'19': [0, 0, 153 , 6278],
'20': [0, 0, 7619, 5572],
'21': [0, 0, 7619, 5572], #21
'22': [0, 0, 7619 , 5253],
'23': [0, 0, 7619 , 4940],
'24': [0, 0, 7619 , 4625],
'25': [0, 0, 7619 , 4311],
'26': [0, 0, 7619 , 4005],
'27': [0, 0, 7619 , 3686],
'28': [0, 0, 7619 , 3369],
'29': [0, 0, 7698 , 3055],
'30': [0, 0, 7619 , 2737],
'31': [0, 0, 7690 , 2480],
'32': [0, 0, 7690 , 2087],
'33': [0, 0, 7690 , 1652],
'34': [0, 0, 7690 , 1190], #34
'35': [0, 0, 7690 , 1190],
'36': [0, 0, 7690 , 1190],
'37': [0, 0, 7690 , 1190],
'38': [0, 0, 7690 , 1190],
'39': [0, 0, 7690 , 1190],
'40': [0, 0, 7690 , 1190],
'41': [0, 0, 366 , 6855] #41
}
ListBuffer=[
[0,0,0,0],
[0, 0, 153 , 1725],
[0, 0, 153 , 2034],
[0, 0, 153 , 2357],
[0, 0, 153 , 2671],
[0, 0, 153 , 2986],
[0, 0, 153 , 3299],
[0, 0, 153 , 3614],
[0, 0, 153 , 3929],
[0, 0, 153 , 4242],
[0, 0, 153 , 4557],
[0, 0, 153 , 4969],
[0, 0, 153 , 5362],
[0, 0, 153 , 5804],
[0, 0, 153 , 6278], #14
[0, 0, 153 , 6278],
[0, 0, 153 , 6278],
[0, 0, 153 , 6278],
[0, 0, 153 , 6278],
[0, 0, 153 , 6278],
[0, 0, 7619, 5572],
[0, 0, 7619, 5572], #21
[0, 0, 7619 , 5253],
[0, 0, 7619 , 4940],
[0, 0, 7619 , 4625],
[0, 0, 7619 , 4311],
[0, 0, 7619 , 4005],
[0, 0, 7619 , 3686],
[0, 0, 7619 , 3369],
[0, 0, 7698 , 3055],
[0, 0, 7619 , 2737],
[0, 0, 7690 , 2480],
[0, 0, 7690 , 2087],
[0, 0, 7690 , 1652],
[0, 0, 7690 , 1190], #34
[0, 0, 7690 , 1190],
[0, 0, 7690 , 1190],
[0, 0, 7690 , 1190],
[0, 0, 7690 , 1190],
[0, 0, 7690 , 1190],
[0, 0, 7690 , 1190],
[0, 0, 366 , 6855] #41
]
|
maeotaku/leaf_recognition_sdk
|
src/Features/Morphology.py
|
Python
|
mit
| 3,273
| 0.018637
|
import numpy as np
import cv2
import math
from Contours import *
#Basic measures
def calcLeafMinAndMaxContourPoints(binaryImage, contours=None):
if (contours==None):
contours = getContours(binaryImage)
xs = contours[0][:,0][:,0]
ys = contours[0][:,0][:,1]
minx = np.min(xs)
miny = np.min(ys)
maxx = np.max(xs)
maxy = np.max(ys)
return minx, miny, maxx, maxy
def calcLeafWidthAndLength(binaryImage, contours=None, draw=False):
minx, miny, maxx, maxy = calcLeafMinAndMaxContourPoints(binaryImage, contours)
first = maxx - minx
second = maxy - miny
if (first > second): #we take the biggest always as length based on aspect ratio
length = first
width = second
else:
length = second
width = first
if (draw):
return length, width, binaryImage
else:
return length, width, binaryImage
#a binary image must be set, 0 for non-leaf, meaning segmentation has to be done perviously
def calcLeafArea(binaryImage, draw=False):
if (draw):
return len(binaryImage[binaryImage > 0]), binaryImage
else:
return len(binaryImage[binaryImage > 0]), binaryImage
def calcLeafPerimeter(image, contours=None, draw=False):
if (contours==None):
contours = getContours(image)
if (draw):
return len(contours[0]), image
else:
return len(contours[0]), image
def calcLeafCompactness(binaryImage, area=None, perimeter=None, draw=False):
if (area==None):
area = calcLeafArea(binaryImage)
if(perimeter==None):
perimeter= calcLeafPerimeter(binaryImage)
if(draw):
return float(area) / float(math.sqrt(perimeter)), binaryImage
else:
return float(area) / float(math.sqrt(perimeter)), binaryImage
#longest distance between any 2 points of the margin of the
|
leaf
def calcLeafDiameter(binaryImage, contours=None, draw=False):
return None
#Proportions
def calcLeafAspectRatio(binaryImage, width=None, length=None, contours=None, draw=False):
if (width==None or length==None):
length, width = calcLeafWidthAndLength(binaryImage, contours, draw)
if (draw):
return float(length) / float(width), binar
|
yImage
else:
return float(length) / float(width), binaryImage
#differ between the shape and a circle, also called Form Factor
def calcLeafRoundness(binaryImage, area=None, perimeter=None, draw=False):
if (area==None):
area= calcLeafArea(binaryImage)
if (perimeter==None):
perimeter=calcLeafPerimeter(binaryImage)
if (draw):
return float(4.0 * math.pi * area) / float(perimeter**2), binaryImage
else:
return float(4.0 * math.pi * area) / float(perimeter**2), binaryImage
def calcLeafRectangularity(binaryImage, width=None, length=None, area=None, contours=None, draw=False):
if (width==None or length==None):
length, width = calcLeafWidthAndLength(binaryImage, contours, draw)
if (area==None):
area = calcLeafArea(binaryImage)
if (draw):
return float(length * width) / float(area), binaryImage
else:
return float(length * width) / float(area), binaryImage
#Vein related
def calcMainVein(image, draw=False):
return None
|
lowdev/alfred
|
robot/robotFactory.py
|
Python
|
gpl-3.0
| 721
| 0.006935
|
from .stt import ApiRobot
from .stt import BingRobot
from .stt import WatsonRobot
from .stt import WitaiRobot
from .stt import GoogleRobot
class RobotFactory:
@staticmethod
def produce(config, speaker, actions):
configSTT = config['stt']
if config
|
STT == 'bing':
return BingRobot(config['bing'], speaker, actions)
if configSTT == 'watson':
return WatsonRobot(config['watson-stt'], speaker, actions)
if configSTT == 'witai':
return WitaiRobot(config['witai-stt'], speaker, actions)
i
|
f configSTT == 'google':
return GoogleRobot(config['google-stt'], speaker, actions)
return ApiRobot(config['apiai'], speaker, actions)
|
satra/NiPypeold
|
nipype/externals/pynifti/funcs.py
|
Python
|
bsd-3-clause
| 2,788
| 0.001435
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
''' Processor functions for images '''
import numpy as np
def squeeze_image(img):
''' Return image, remove axes length 1 at end of image shape
For example, an image may have shape (10,20,30,1,1). In this case
squeeze will result in an image with shape (10,20,30). See doctests
for further description of behavior.
Parameters
----------
img : ``SpatialImage``
Returns
-------
squeezed_img : ``SpatialImage``
Copy of img, such that data, and data shape have been squeezed,
for dimensions > 3rd, and at the end of the shape list
Examples
--------
>>> import nipype.externals.pynifti as nf
>>> shape = (10,20,30,1,1)
>>> data = np.arange(np.prod(shape)).reshape(shape)
>>> affine = np.eye(4)
>>> img = nf.Nifti1Image(data, affine)
>>> img.get_shape()
(10, 20, 30, 1, 1)
>>> img2 = squeeze_image(img)
>>> img2.get_shape()
(10, 20, 30)
If the data are 3D then last dimensions of 1 are ignored
>>> shape = (10,1,1)
>>> data = np.arange(np.prod(shape)).reshape(shape)
>>> img = nf.ni1.Nifti1Image(data, affine)
>>> img.get_shape()
(10, 1, 1)
>>> img2 = squeeze_image(img)
>>> img2.get_shape()
(10, 1, 1)
Only *final* dimensions of 1 are squeezed
>>> shape = (1, 1, 5, 1, 2, 1, 1)
>>> data = data.reshape(shape)
>>> img = nf.ni1.Nifti1Image(data, affine)
>>> img.get_shape()
(1, 1, 5, 1, 2, 1, 1)
>>> img2 = squeeze_image(img)
>>> img2.get_shape()
(1, 1, 5
|
, 1, 2)
'''
klass = img.__class__
shape = img.get_shape()
|
slen = len(shape)
if slen < 4:
return klass.from_image(img)
for bdim in shape[3::][::-1]:
if bdim == 1:
slen-=1
else:
break
if slen == len(shape):
return klass.from_image(img)
shape = shape[:slen]
data = img.get_data()
data = data.reshape(shape)
return klass(data,
img.get_affine(),
img.get_header(),
img.extra)
def concat_images(images):
''' Concatenate images in list to single image, along last dimension '''
n_imgs = len(images)
img0 = images[0]
i0shape = img0.get_shape()
affine = img0.get_affine()
header = img0.get_header()
out_shape = (n_imgs, ) + i0shape
out_data = np.empty(out_shape)
for i, img in enumerate(images):
if not np.all(img.get_affine() == affine):
raise ValueError('Affines do not match')
out_data[i] = img.get_data()
out_data = np.rollaxis(out_data, 0, len(i0shape)+1)
klass = img0.__class__
return klass(out_data, affine, header)
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/py-flask-socketio/package.py
|
Python
|
lgpl-2.1
| 2,117
| 0.000945
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyFlaskSocketio(PythonPackage)
|
:
"""Flask-SocketIO gives Flask applications access to low latency
bi-directional communications between the clients and the server.
The client-side application can use any of the SocketIO official clients
libraries in Javascript, C++, Java and Swift, or any compatible client to
establish a permanent connection to the server.
"""
homepage = "https://flask-socketio.readth
|
edocs.io"
url = "https://pypi.io/packages/source/F/Flask-SocketIO/Flask-SocketIO-2.9.6.tar.gz"
version('2.9.6', 'bca83faf38355bd91911f2f140f9b50f')
depends_on('py-setuptools', type='build')
depends_on('py-flask@0.9:', type=('build', 'run'))
depends_on('py-python-socketio@1.6.1:', type=('build', 'run'))
depends_on('py-werkzeug', type=('build', 'run'))
|
Aloomaio/googleads-python-lib
|
examples/adwords/v201809/advanced_operations/add_multi_asset_responsive_display_ad.py
|
Python
|
apache-2.0
| 5,311
| 0.005649
|
#!/usr/bin/env python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example adds a responsive display ad to an ad group.
Image assets are uploaded using AssetService. To get ad groups, run
get_ad_groups.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
|
"""
from googleads import adwords
import requests
AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE'
def UploadImageAsset(client, url):
"""Uploads the image from the specified url.
|
Args:
client: An AdWordsClient instance.
url: The image URL.
Returns:
The ID of the uploaded image.
"""
# Initialize appropriate service.
asset_service = client.GetService('AssetService', version='v201809')
# Download the image.
image_request = requests.get(url)
# Create the image asset.
image_asset = {
'xsi_type': 'ImageAsset',
'imageData': image_request.content,
# This field is optional, and if provided should be unique.
# 'assetName': 'Image asset ' + str(uuid.uuid4()),
}
# Create the operation.
operation = {
'operator': 'ADD',
'operand': image_asset
}
# Create the asset and return the ID.
result = asset_service.mutate([operation])
return result['value'][0]['assetId']
def main(client, ad_group_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201809')
# Create the ad.
multi_asset_responsive_display_ad = {
'xsi_type': 'MultiAssetResponsiveDisplayAd',
'headlines': [{
'asset': {
'xsi_type': 'TextAsset',
'assetText': 'Travel to Mars'
}
}, {
'asset': {
'xsi_type': 'TextAsset',
'assetText': 'Travel to Jupiter',
}
}, {
'asset': {
'xsi_type': 'TextAsset',
'assetText': 'Travel to Pluto'
}
}],
'descriptions': [{
'asset': {
'xsi_type': 'TextAsset',
'assetText': 'Visit the planet in a luxury spaceship.',
}
}, {
'asset': {
'xsi_type': 'TextAsset',
'assetText': 'See the planet in style.',
}
}],
'businessName': 'Galactic Luxury Cruises',
'longHeadline': {
'asset': {
'xsi_type': 'TextAsset',
'assetText': 'Visit the planet in a luxury spaceship.',
}
},
# This ad format does not allow the creation of an image asset by setting
# the asset.imageData field. An image asset must first be created using
# the AssetService, and asset.assetId must be populated when creating
# the ad.
'marketingImages': [{
'asset': {
'xsi_type': 'ImageAsset',
'assetId': UploadImageAsset(client, 'https://goo.gl/3b9Wfh')
}
}],
'squareMarketingImages': [{
'asset': {
'xsi_type': 'ImageAsset',
'assetId': UploadImageAsset(client, 'https://goo.gl/mtt54n')
}
}],
# Optional values
'finalUrls': ['http://www.example.com'],
'callToActionText': 'Shop Now',
# Set color settings using hexadecimal values. Set allowFlexibleColor to
# false if you want your ads to render by always using your colors
# strictly.
'mainColor': '#0000ff',
'accentColor': '#ffff00',
'allowFlexibleColor': False,
'formatSetting': 'NON_NATIVE',
# Set dynamic display ad settings, composed of landscape logo image,
# promotion text, and price prefix.
'dynamicSettingsPricePrefix': 'as low as',
'dynamicSettingsPromoText': 'Free shipping!',
'logoImages': [{
'asset': {
'xsi_type': 'ImageAsset',
'assetId': UploadImageAsset(client, 'https://goo.gl/mtt54n')
}
}]
}
# Create ad group ad.
ad_group_ad = {
'adGroupId': ad_group_id,
'ad': multi_asset_responsive_display_ad,
# Optional.
'status': 'PAUSED'
}
# Add ad.
ads = ad_group_ad_service.mutate([
{'operator': 'ADD', 'operand': ad_group_ad}
])
# Display results.
if 'value' in ads:
for ad in ads['value']:
print ('Added new responsive display ad ad with ID "%d" '
'and long headline "%s".'
% (ad['ad']['id'], ad['ad']['longHeadline']['asset']['assetText']))
else:
print 'No ads were added.'
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, AD_GROUP_ID)
|
sychan/RRoller
|
lib/RRoller/RRollerImpl.py
|
Python
|
mit
| 4,705
| 0.003826
|
# -*- coding: utf-8 -*-
#BEGIN_HEADER
# The header block is where all import statments should live
import os
import uuid
from KBaseReport.KBaseReportClient import KBaseReport
#END_HEADER
class RRoller:
'''
Module Name:
RRoller
Module Description:
A KBase module: RRoller
This sample module contains one small method - rick_roll.
'''
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "0.0.1"
GIT_URL = ""
GIT_COMMIT_HASH = ""
#BEGIN_CLASS_HEADER
# Class variables and functions can be defined in this block
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
# Any configuration parameters that are important should be parsed and
# saved in the constructor.
self.callback_url = os.environ['SDK_CALLBACK_URL']
self.shared_folder = config['scratch']
#END_CONSTRUCTOR
pass
def rick_roll(self, ctx, input_params):
"""
The actual function is declared using 'funcdef' to specify the name
and input/return arguments to the function. For all typical KBase
Apps that run in the Narrative, your function should have the
'authentication required' modifier.
:param roll_id: instance of String
:returns: instance of type "RRoll_Output" (Here is the definition of
the output of the function. The output can be used by other SDK
modules which call your code, or the output visualizations in the
Narrative. 'report_name' and 'report_ref' are special output
fields- if defined, the Narrative can automatically render your
Report.) -> structure: parameter "report_name" of String,
parameter "report_url" of String
"""
# ctx is the context object
# return variables are: output
#BEGIN rick_roll
report_name = str(input_params["roll_id"])
workspace_name = str(input_params["workspace_name"])
print "Report name is {}".format(report_name)
report_url = "https://www.youtube.com/watch?v=oHg5SJYRHA0"
self.callbackURL = os.environ.get('SDK_CALLBACK_URL')
if self.callbackURL is None:
raise ValueError("SDK_CALLBACK_URL not set in environment")
# build report
#
reportName = 'kb_rickroll_{}_{}'.format(report_name, str(uuid.uuid4()))
reportObj = {'objects_created': [],
'message': '',
'direct_html': '',
'direct_html_index': 0,
'file_links': [],
'html_links': [],
'workspace_name': workspace_name,
'report_object_name': reportName
}
# html report
html_report_lines = []
html_report_lines += ['<html>']
#html_report_lines += ["<script>"]
#html_report_lines += ['window.location.assign("https://www.youtube.com/watch?v=oHg5SJYRHA0&autoplay=on")']
#html_report_lines += ["</script>"]
html_report_lines += ['<body bgcolor="whi
|
te">']
html_report_lines += ['<a target="_blank" href="{}">{}</a>'.format(report_url, report_url)]
html_report_lines += ['</body>']
html_report_lines += ['</html>']
|
reportObj['direct_html'] = "\n".join(html_report_lines)
SERVICE_VER = 'release'
report = KBaseReport(self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
report_info = report.create_extended_report(reportObj)
output = {'report_name': report_info['name'],
'report_ref': report_info['ref']}
#END rick_roll
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method rick_roll return value ' +
'output is not type dict as required.')
# return the results
return [output]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK",
'message': "",
'version': self.VERSION,
'git_url': self.GIT_URL,
'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.