repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
jcfr/girder
|
tests/test_plugins/test_plugin/server.py
|
1
|
1795
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
from girder.utility.server import staticFile
class CustomAppRoot(object):
"""
The webroot endpoint simply serves the main index HTML file.
"""
exposed = True
def GET(self):
return "hello world"
class Other(Resource):
def __init__(self):
self.resourceName = 'other'
self.route('GET', (), self.getResource)
@access.public
def getResource(self, params):
return ['custom REST route']
getResource.description = Description('Get something.')
def load(info):
info['serverRoot'], info['serverRoot'].girder = (
CustomAppRoot(), info['serverRoot'])
info['serverRoot'].api = info['serverRoot'].girder.api
del info['serverRoot'].girder.api
info['apiRoot'].other = Other()
path = os.path.join(globals()['PLUGIN_ROOT_DIR'], 'static.txt')
info['serverRoot'].static_route = staticFile(path)
|
apache-2.0
| 4,997,767,409,729,860,000
| 30.491228
| 79
| 0.630641
| false
| 4.098174
| false
| false
| false
|
mkobos/tree_crawler
|
concurrent_tree_crawler/tree_accessor.py
|
1
|
3966
|
import logging
import threading
from concurrent_tree_crawler.abstract_tree_accessor import \
AbstractTreeAccessor, NodeAction
from concurrent_tree_crawler.abstract_node import NodeState
class TreeAccessor(AbstractTreeAccessor):
"""
An interface for the tree made of L{AbstractNode}s.
Access to sensitive methods is protected by concurrent programming objects:
locks and conditions.
"""
def __init__(self, sentinel):
"""
@param sentinel: a technical node which will be made parent of the
root node.
@type sentinel: L{AbstractNode}
"""
self.__sentinel = sentinel
"""
The sentinel is a purely technical object. It shouldn't be
analyzed by the navigator. It is here just to make sure that the
root of the tree has a parent. This is because it is required by our
algorithm that all of the nodes in the tree have a parent.
"""
self.__root = None
"""The main business-level element of the tree"""
## The one and only child of the sentinel is the root node
if self.__sentinel.has_child("root"):
self.__root = self.__sentinel.get_child("root")
else:
self.__root = self.__sentinel.add_child("root", NodeState.OPEN)
def get_sentinel(self):
return self.__sentinel
def get_root(self):
return self.__root
def update_and_get_child(self, node, possible_children_names):
while True:
node.get_children_cond().acquire()
try:
child = node.update_and_get_child(possible_children_names)
if child is None: ## No accessible children are available
return None
state = child.get_state()
if state == NodeState.OPEN:
child.set_state(NodeState.PROCESSING)
return (child, NodeAction.TO_PROCESS)
elif state == NodeState.VISITED:
return (child, NodeAction.TO_VISIT)
elif state == NodeState.PROCESSING:
self.__log("Starting to wait on \"{}\" node children".\
format(node.get_name()))
node.get_children_cond().wait()
self.__log("Done waiting on \"{}\" node children".format(
node.get_name()))
else:
assert False, "Unknown node state: {}".format(state)
finally:
node.get_children_cond().release()
def set_node_type(self, node, is_leaf):
assert node != self.__sentinel, "Processing sentinel is not allowed"
parent = node.get_parent()
parent.get_children_cond().acquire()
try:
if is_leaf:
node.set_state(NodeState.CLOSED)
self.__internal_update_node_state(parent)
else:
node.set_state(NodeState.VISITED)
finally:
parent.get_children_cond().notify_all()
parent.get_children_cond().release()
def set_error(self, node):
self.__set_node_state_and_update(node, NodeState.ERROR)
def __set_node_state_and_update(self, node, new_state):
assert node != self.__sentinel, "Changing sentinel state is not allowed"
parent = node.get_parent()
parent.get_children_cond().acquire()
try:
node.set_state(new_state)
self.__internal_update_node_state(parent)
finally:
parent.get_children_cond().notify_all()
parent.get_children_cond().release()
def __internal_update_node_state(self, node):
"""@param node: L{AbstractNode}"""
if node == self.__sentinel:
## The state of the sentinel is undefined and not used
## in the program, it should not be changed
return
new_state = None
if node.all_children_are_in_one_of_states({NodeState.CLOSED}):
new_state = NodeState.CLOSED
elif node.all_children_are_in_one_of_states(
{NodeState.ERROR, NodeState.CLOSED}):
new_state = NodeState.ERROR
## Node state does not have to be changed
if new_state is None:
return
parent = node.get_parent()
parent.get_children_cond().acquire()
try:
node.set_state(new_state)
self.__internal_update_node_state(parent)
finally:
parent.get_children_cond().notify_all()
parent.get_children_cond().release()
def __log(self, message):
"""
@type message: string
"""
logging.debug("thread=\"{}\", {}".format(
threading.current_thread().name, message))
|
mit
| -5,910,058,468,039,156,000
| 30.736
| 76
| 0.688603
| false
| 3.305
| false
| false
| false
|
danakj/chromium
|
services/shell/public/tools/manifest/manifest_collator.py
|
1
|
2806
|
#!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" A collator for Service Manifests """
import argparse
import json
import os
import shutil
import sys
import urlparse
eater_relative = '../../../../../../tools/json_comment_eater'
eater_relative = os.path.join(os.path.abspath(__file__), eater_relative)
sys.path.insert(0, os.path.normpath(eater_relative))
try:
import json_comment_eater
finally:
sys.path.pop(0)
def ParseJSONFile(filename):
with open(filename) as json_file:
try:
return json.loads(json_comment_eater.Nom(json_file.read()))
except ValueError:
print "%s is not a valid JSON document" % filename
return None
def MergeDicts(left, right):
for k, v in right.iteritems():
if k not in left:
left[k] = v
else:
if isinstance(v, dict):
assert isinstance(left[k], dict)
MergeDicts(left[k], v)
elif isinstance(v, list):
assert isinstance(left[k], list)
left[k].extend(v)
else:
raise "Refusing to merge conflicting non-collection values."
return left
def MergeBaseManifest(parent, base):
MergeDicts(parent["capabilities"], base["capabilities"])
if "services" in base:
if "services" not in parent:
parent["services"] = []
parent["services"].extend(base["services"])
if "process-group" in base:
parent["process-group"] = base["process-group"]
def main():
parser = argparse.ArgumentParser(
description="Collate Service Manifests.")
parser.add_argument("--parent")
parser.add_argument("--output")
parser.add_argument("--name")
parser.add_argument("--base-manifest", default=None)
args, children = parser.parse_known_args()
parent = ParseJSONFile(args.parent)
if parent == None:
return 1
if args.base_manifest:
base = ParseJSONFile(args.base_manifest)
if base == None:
return 1
MergeBaseManifest(parent, base)
service_path = parent['name'].split(':')[1]
if service_path.startswith('//'):
raise ValueError("Service name path component '%s' must not start " \
"with //" % service_path)
if args.name != service_path:
raise ValueError("Service name '%s' specified in build file does not " \
"match name '%s' specified in manifest." %
(args.name, service_path))
services = []
for child in children:
service = ParseJSONFile(child)
if service == None:
return 1
services.append(service)
if len(services) > 0:
parent['services'] = services
with open(args.output, 'w') as output_file:
json.dump(parent, output_file)
return 0
if __name__ == "__main__":
sys.exit(main())
|
bsd-3-clause
| 4,538,789,842,080,426,000
| 25.980769
| 76
| 0.650036
| false
| 3.721485
| false
| false
| false
|
skevy/django
|
django/db/models/fields/related.py
|
1
|
54908
|
from django.conf import settings
from django.db import connection, router, transaction
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import (AutoField, Field, IntegerField,
PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist)
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.db.models.deletion import CASCADE
from django.utils.encoding import smart_unicode
from django.utils.translation import (ugettext_lazy as _, string_concat,
ungettext, ugettext)
from django.utils.functional import curry
from django.core import exceptions
from django import forms
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
except AttributeError:
# If it doesn't have a split it's actually a model class
app_label = relation._meta.app_label
model_name = relation._meta.object_name
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name,
seed_cache=False, only_installed=False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Store the opts for related_query_name()
self.opts = cls._meta
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
other = self.rel.to
if isinstance(other, basestring) or other._meta.pk is None:
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, self.related)
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return self._pk_trace(value, 'get_prep_lookup', lookup_type)
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_prep_lookup', lookup_type) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return [self._pk_trace(value, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)]
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)
for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def _pk_trace(self, value, prep_func, lookup_type, **kwargs):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
# In the case of an FK to 'self', this check allows to_field to be used
# for both forwards and reverse lookups across the FK. (For normal FKs,
# it's only relevant for forward lookups).
if isinstance(v, self.rel.to):
field_name = getattr(self.rel, "field_name", None)
else:
field_name = None
try:
while True:
if field_name is None:
field_name = v._meta.pk.name
v = getattr(v, field_name)
field_name = None
except AttributeError:
pass
except exceptions.ObjectDoesNotExist:
v = None
field = self
while field.rel:
if hasattr(field.rel, 'field_name'):
field = field.rel.to._meta.get_field(field.rel.field_name)
else:
field = field.rel.to._meta.pk
if lookup_type in ('range', 'in'):
v = [v]
v = getattr(field, prep_func)(lookup_type, v, **kwargs)
if isinstance(v, list):
v = v[0]
return v
def related_query_name(self):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or self.opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
return getattr(instance, self.cache_name)
except AttributeError:
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
db = router.db_for_read(self.related.model, instance=instance)
rel_obj = self.related.model._base_manager.using(db).get(**params)
setattr(instance, self.cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# Set the value of the related field to the value of the related object's related field
setattr(value, self.related.field.attname, getattr(instance, self.related.field.rel.get_related_field().attname))
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
def __get__(self, instance, instance_type=None):
if instance is None:
return self
cache_name = self.field.get_cache_name()
try:
return getattr(instance, cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
# If NULL is an allowed value, return it.
if self.field.null:
return None
raise self.field.rel.to.DoesNotExist
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__pk' % self.field.rel.field_name: val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
# If the related manager indicates that it should be used for
# related fields, respect that.
rel_mgr = self.field.rel.to._default_manager
db = router.db_for_read(self.field.rel.to, instance=instance)
if getattr(rel_mgr, 'use_for_related_fields', False):
rel_obj = rel_mgr.using(db).get(**params)
else:
rel_obj = QuerySet(self.field.rel.to).using(db).get(**params)
setattr(instance, cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self._field.name)
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.field.get_cache_name(), None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related:
cache_name = self.field.related.get_cache_name()
try:
delattr(related, cache_name)
except AttributeError:
pass
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object cache now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.field.get_cache_name(), value)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self.create_manager(instance,
self.related.model._default_manager.__class__)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
def delete_manager(self, instance):
"""
Returns a queryset based on the related model's base manager (rather
than the default manager, as returned by __get__). Used by
Model.delete().
"""
return self.create_manager(instance,
self.related.model._base_manager.__class__)
def create_manager(self, instance, superclass):
"""
Creates the managers used by other methods (__get__() and delete()).
"""
rel_field = self.related.field
rel_model = self.related.model
class RelatedManager(superclass):
def get_query_set(self):
db = self._db or router.db_for_read(rel_model, instance=instance)
return superclass.get_query_set(self).using(db).filter(**(self.core_filters))
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
setattr(obj, rel_field.name, instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs.update({rel_field.name: instance})
db = router.db_for_write(rel_model, instance=instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs.update({rel_field.name: instance})
db = router.db_for_write(rel_model, instance=instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(instance, rel_field.rel.get_related_field().attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, instance))
remove.alters_data = True
def clear(self):
for obj in self.all():
setattr(obj, rel_field.name, None)
obj.save()
clear.alters_data = True
manager = RelatedManager()
attname = rel_field.rel.get_related_field().name
manager.core_filters = {'%s__%s' % (rel_field.name, attname):
getattr(instance, attname)}
manager.model = self.related.model
return manager
def create_many_related_manager(superclass, rel=False):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
through = rel.through
class ManyRelatedManager(superclass):
def __init__(self, model=None, core_filters=None, instance=None, symmetrical=None,
join_table=None, source_field_name=None, target_field_name=None,
reverse=False):
super(ManyRelatedManager, self).__init__()
self.core_filters = core_filters
self.model = model
self.symmetrical = symmetrical
self.instance = instance
self.source_field_name = source_field_name
self.target_field_name = target_field_name
self.through = through
self._pk_val = self.instance.pk
self.reverse = reverse
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance)
return superclass.get_query_set(self).using(db)._next_is_sticky().filter(**(self.core_filters))
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if rel.through._meta.auto_created:
def add(self, *objs):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_field_name, self.source_field_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_field_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_field_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not rel.through._meta.auto_created:
opts = through._meta
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = \
super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# join_table: name of the m2m link table
# source_field_name: the PK fieldname in join_table for the source object
# target_field_name: the PK fieldname in join_table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db))
new_ids.add(obj.pk)
elif isinstance(obj, Model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: new_ids,
})
new_ids = new_ids - set(vals)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
# Add the ones that aren't there already
for obj_id in new_ids:
self.through._default_manager.using(db).create(**{
'%s_id' % source_field_name: self._pk_val,
'%s_id' % target_field_name: obj_id,
})
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_col_name: the PK colname in join_table for the source object
# target_col_name: the PK colname in join_table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj.pk)
else:
old_ids.add(obj)
# Work out what DB we're operating on
db = router.db_for_write(self.through, instance=self.instance)
# Send a signal to the other end if need be.
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
# Remove the specified objects from the join table
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: old_ids
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
def _clear_items(self, source_field_name):
db = router.db_for_write(self.through, instance=self.instance)
# source_col_name: the PK colname in join_table for the source object
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model = self.related.model
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.related.field.rel)
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.related.field.name: instance._get_pk_val()},
instance=instance,
symmetrical=False,
source_field_name=self.related.field.m2m_reverse_field_name(),
target_field_name=self.related.field.m2m_field_name(),
reverse=True
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
def _through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.field.rel.through
through = property(_through)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model=self.field.rel.to
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.field.rel)
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.field.related_query_name(): instance._get_pk_val()},
instance=instance,
symmetrical=self.field.rel.symmetrical,
source_field_name=self.field.m2m_field_name(),
target_field_name=self.field.m2m_reverse_field_name(),
reverse=False
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.multiple = True
self.parent_link = parent_link
self.on_delete = on_delete
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
parent_link=parent_link, on_delete=on_delete
)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the field in the to' object to which this relationship is tied
(this is always the primary key on the target model). Provided for
symmetry with ManyToOneRel.
"""
return self.to._meta.pk
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('Model %(model)s with pk %(pk)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
if 'db_index' not in kwargs:
kwargs['db_index'] = True
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
parent_link=kwargs.pop('parent_link', False),
on_delete=kwargs.pop('on_delete', CASCADE),
)
Field.__init__(self, **kwargs)
def validate(self, value, model_instance):
if self.rel.parent_link:
return
super(ForeignKey, self).validate(value, model_instance)
if value is None:
return
using = router.db_for_read(model_instance.__class__, instance=model_instance)
qs = self.rel.to._default_manager.using(using).filter(
**{self.rel.field_name: value}
)
qs = qs.complex_filter(self.rel.limit_choices_to)
if not qs.exists():
raise exceptions.ValidationError(self.error_messages['invalid'] % {
'model': self.rel.to._meta.verbose_name, 'pk': value})
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value,
connection=connection)
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_unicode(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
if self.rel.limit_choices_to:
cls._meta.related_fkey_lookups.append(self.rel.limit_choices_to)
if self.rel.field_name is None:
self.rel.field_name = cls._meta.pk.name
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self, connection):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
if (isinstance(rel_field, AutoField) or
(not connection.features.related_fields_match_type and
isinstance(rel_field, (PositiveIntegerField,
PositiveSmallIntegerField)))):
return IntegerField().db_type(connection=connection)
return rel_field.db_type(connection=connection)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
description = _("One-to-one relationship")
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.rel.to):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
managed = True
if isinstance(field.rel.to, basestring) and field.rel.to != RECURSIVE_RELATIONSHIP_CONSTANT:
to_model = field.rel.to
to = to_model.split('.')[-1]
def set_managed(field, model, cls):
field.rel.through._meta.managed = model._meta.managed or cls._meta.managed
add_lazy_relation(klass, field, to_model, set_managed)
elif isinstance(field.rel.to, basestring):
to = klass._meta.object_name
to_model = klass
managed = klass._meta.managed
else:
to = field.rel.to._meta.object_name
to_model = field.rel.to
managed = klass._meta.managed or to_model._meta.managed
name = '%s_%s' % (klass._meta.object_name, field.name)
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass._meta.object_name:
from_ = 'from_%s' % to.lower()
to = 'to_%s' % to.lower()
else:
from_ = klass._meta.object_name.lower()
to = to.lower()
meta = type('Meta', (object,), {
'db_table': field._get_m2m_db_table(klass._meta),
'managed': managed,
'auto_created': klass,
'app_label': klass._meta.app_label,
'unique_together': (from_, to),
'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to},
'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to},
})
# Construct and return the new class.
return type(name, (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name),
to: models.ForeignKey(to_model, related_name='%s+' % name)
})
class ManyToManyField(RelatedField, Field):
description = _("Many-to-many relationship")
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to==RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
Field.__init__(self, **kwargs)
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"Function that can be curried to provide the source accessor or DB column name for the m2m table"
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.model:
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"Function that can be curried to provide the related accessor or DB column name for the m2m table"
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
else:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_unicode(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and (self.rel.to == "self" or self.rel.to == cls._meta.object_name):
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
if not self.rel.through and not cls._meta.abstract:
self.rel.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, basestring):
def resolve_through_model(field, model, cls):
field.rel.through = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = curry(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = curry(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = curry(self._get_m2m_reverse_attr, related, 'name')
get_m2m_rel = curry(self._get_m2m_attr, related, 'rel')
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = curry(self._get_m2m_reverse_attr, related, 'rel')
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to)
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i._get_pk_val() for i in initial]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
|
bsd-3-clause
| -3,232,830,973,483,323,000
| 45.889838
| 222
| 0.602153
| false
| 4.154661
| false
| false
| false
|
noironetworks/heat
|
heat/engine/resources/openstack/designate/zone.py
|
1
|
5889
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
class DesignateZone(resource.Resource):
"""Heat Template Resource for Designate Zone.
Designate provides DNS-as-a-Service services for OpenStack. So, zone, part
of domain is a realm with an identification string, unique in DNS.
"""
support_status = support.SupportStatus(
version='8.0.0')
PROPERTIES = (
NAME, TTL, DESCRIPTION, EMAIL, TYPE, MASTERS
) = (
'name', 'ttl', 'description', 'email', 'type', 'masters'
)
ATTRIBUTES = (
SERIAL,
) = (
'serial',
)
TYPES = (
PRIMARY, SECONDARY
) = (
'PRIMARY', 'SECONDARY'
)
properties_schema = {
# Based on RFC 1035, length of name is set to max of 255
NAME: properties.Schema(
properties.Schema.STRING,
_('DNS Name for the zone.'),
required=True,
constraints=[constraints.Length(max=255)]
),
# Based on RFC 1035, range for ttl is set to 1 to signed 32 bit number
TTL: properties.Schema(
properties.Schema.INTEGER,
_('Time To Live (Seconds) for the zone.'),
update_allowed=True,
constraints=[constraints.Range(min=1,
max=2147483647)]
),
# designate mandates to the max length of 160 for description
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of zone.'),
update_allowed=True,
constraints=[constraints.Length(max=160)]
),
EMAIL: properties.Schema(
properties.Schema.STRING,
_('E-mail for the zone. Used in SOA records for the zone. '
'It is required for PRIMARY Type, otherwise ignored.'),
update_allowed=True,
),
TYPE: properties.Schema(
properties.Schema.STRING,
_('Type of zone. PRIMARY is controlled by Designate, SECONDARY '
'zones are slaved from another DNS Server.'),
default=PRIMARY,
constraints=[constraints.AllowedValues(
allowed=TYPES)]
),
MASTERS: properties.Schema(
properties.Schema.LIST,
_('The servers to slave from to get DNS information and is '
'mandatory for zone type SECONDARY, otherwise ignored.'),
update_allowed=True
)
}
attributes_schema = {
SERIAL: attributes.Schema(
_("DNS zone serial number."),
type=attributes.Schema.STRING
),
}
default_client_name = 'designate'
entity = 'zones'
def client(self):
return super(DesignateZone,
self).client(version=self.client_plugin().V2)
def validate(self):
super(DesignateZone, self).validate()
def raise_invalid_exception(zone_type, prp):
if self.properties.get(self.TYPE) == zone_type:
if not self.properties.get(prp):
msg = _('Property %(prp)s is required for zone type '
'%(zone_type)s') % {
"prp": prp,
"zone_type": zone_type
}
raise exception.StackValidationFailed(message=msg)
raise_invalid_exception(self.PRIMARY, self.EMAIL)
raise_invalid_exception(self.SECONDARY, self.MASTERS)
def handle_create(self):
args = dict((k, v) for k, v in six.iteritems(self.properties) if v)
args['type_'] = args.pop(self.TYPE)
zone = self.client().zones.create(**args)
self.resource_id_set(zone['id'])
def _check_status_complete(self):
zone = self.client().zones.get(self.resource_id)
if zone['status'] == 'ERROR':
raise exception.ResourceInError(
resource_status=zone['status'],
status_reason=_('Error in zone'))
return zone['status'] != 'PENDING'
def check_create_complete(self, handler_data=None):
return self._check_status_complete()
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
args = dict()
for prp in (self.EMAIL, self.TTL, self.DESCRIPTION, self.MASTERS):
if prop_diff.get(prp):
args[prp] = prop_diff.get(prp)
if len(args.keys()) > 0:
self.client().zones.update(self.resource_id, args)
def check_update_complete(self, handler_data=None):
return self._check_status_complete()
def _resolve_attribute(self, name):
if self.resource_id is None:
return
if name == self.SERIAL:
zone = self.client().zones.get(self.resource_id)
return zone[name]
def check_delete_complete(self, handler_data=None):
if handler_data:
with self.client_plugin().ignore_not_found:
return self._check_status_complete()
return True
def resource_mapping():
return {
'OS::Designate::Zone': DesignateZone
}
|
apache-2.0
| 857,568,286,610,432,500
| 31.716667
| 78
| 0.587366
| false
| 4.239741
| false
| false
| false
|
selboo/starl-mangle
|
webvirtmgr/webvirtmgr/server.py
|
1
|
30677
|
# Utility functions used for guest installation
#
import libvirt
from libvirt import VIR_DOMAIN_XML_SECURE
from network.IPy import IP
import re
import time
import libxml2
from datetime import datetime
import string
def get_xml_path(xml, path=None, func=None):
"""
Return the content from the passed xml xpath, or return the result
of a passed function (receives xpathContext as its only arg)
"""
doc = None
ctx = None
result = None
try:
doc = libxml2.parseDoc(xml)
ctx = doc.xpathNewContext()
if path:
ret = ctx.xpathEval(path)
if ret is not None:
if type(ret) == list:
if len(ret) >= 1:
result = ret[0].content
else:
result = ret
elif func:
result = func(ctx)
else:
raise ValueError("'path' or 'func' is required.")
finally:
if doc:
doc.freeDoc()
if ctx:
ctx.xpathFreeContext()
return result
def network_size(net, dhcp=None):
"""
Func return gateway, mask and dhcp pool.
"""
mask = IP(net).strNetmask()
addr = IP(net)
if addr[0].strNormal()[-1] == '0':
gateway = addr[1].strNormal()
dhcp_pool = [addr[2].strNormal(), addr[addr.len() - 2].strNormal()]
else:
gateway = addr[0].strNormal()
dhcp_pool = [addr[1].strNormal(), addr[addr.len() - 2].strNormal()]
if dhcp:
return gateway, mask, dhcp_pool
else:
return gateway, mask, None
class ConnServer(object):
def __init__(self, host):
"""
Return connection object.
"""
self.login = host.login
self.host = host.hostname
self.passwd = host.password
self.type = host.type
self.port = host.port
if self.type == 'tcp':
def creds(credentials, user_data):
for credential in credentials:
if credential[0] == libvirt.VIR_CRED_AUTHNAME:
credential[4] = self.login
if len(credential[4]) == 0:
credential[4] = credential[3]
elif credential[0] == libvirt.VIR_CRED_PASSPHRASE:
credential[4] = self.passwd
else:
return -1
return 0
flags = [libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_PASSPHRASE]
auth = [flags, creds, None]
uri = 'qemu+tcp://%s/system' % self.host
self.conn = libvirt.openAuth(uri, auth, 0)
if self.type == 'ssh':
uri = 'qemu+ssh://%s@%s:%s/system' % (self.login, self.host, self.port)
self.conn = libvirt.open(uri)
def lookupVM(self, vname):
"""
Return VM object.
"""
try:
dom = self.conn.lookupByName(vname)
except:
dom = None
return dom
def storagePool(self, storage):
"""
Return storage object.
"""
try:
stg = self.conn.storagePoolLookupByName(storage)
except:
stg = None
return stg
def networkPool(self, network):
"""
Return network object.
"""
try:
net = self.conn.networkLookupByName(network)
except:
net = None
return net
def storageVol(self, volume, storage):
"""
Return volume object.
"""
stg = self.storagePool(storage)
stg_type = get_xml_path(stg.XMLDesc(0), "/pool/@type")
if stg_type == 'dir':
volume += '.img'
stg_volume = stg.storageVolLookupByName(volume)
return stg_volume
def storageVolPath(self, volume):
"""
Return volume object by path.
"""
stg_volume = self.conn.storageVolLookupByPath(volume)
return stg_volume
def hard_accel_node(self):
"""
Check hardware acceleration.
"""
xml = self.conn.getCapabilities()
kvm = re.search('kvm', xml)
if kvm:
return True
else:
return False
def add_vm(self, name, ram, cpu, host_model, images, nets, virtio, storages, passwd=None):
"""
Create VM function
"""
ram = int(ram) * 1024
iskvm = re.search('kvm', self.conn.getCapabilities())
if iskvm:
dom_type = 'kvm'
else:
dom_type = 'qemu'
machine = get_xml_path(self.conn.getCapabilities(), "/capabilities/guest/arch/machine/@canonical")
if not machine:
machine = 'pc-1.0'
if re.findall('/usr/libexec/qemu-kvm', self.conn.getCapabilities()):
emulator = '/usr/libexec/qemu-kvm'
elif re.findall('/usr/bin/kvm', self.conn.getCapabilities()):
emulator = '/usr/bin/kvm'
elif re.findall('/usr/bin/qemu-kvm', self.conn.getCapabilities()):
emulator = '/usr/bin/qemu-kvm'
else:
emulator = '/usr/bin/qemu-system-x86_64'
disks = []
for image in images:
img = self.storageVolPath(image)
image_type = self.get_vol_image_type(storages, img.name())
disks.append({'image': image, 'type': image_type})
xml = """<domain type='%s'>
<name>%s</name>
<description>None</description>
<memory unit='KiB'>%s</memory>
<vcpu>%s</vcpu>""" % (dom_type, name, ram, cpu)
if host_model:
xml += """<cpu mode='host-model'/>"""
xml += """<os>
<type arch='x86_64' machine='%s'>hvm</type>
<boot dev='hd'/>
<boot dev='cdrom'/>
<bootmenu enable='yes'/>
</os>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<clock offset='utc'/>
<on_poweroff>destroy</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>restart</on_crash>
<devices>
<emulator>%s</emulator>""" % (machine, emulator)
disk_letters = list(string.lowercase)
for disk in disks:
xml += """<disk type='file' device='disk'>
<driver name='qemu' type='%s'/>
<source file='%s'/>""" % (disk['type'], disk['image'])
if virtio:
xml += """<target dev='vd%s' bus='virtio'/>""" % (disk_letters.pop(0),)
else:
xml += """<target dev='hd%s' bus='ide'/>""" % (disk_letters.pop(0),)
xml += """</disk>"""
xml += """<disk type='file' device='cdrom'>
<driver name='qemu' type='raw'/>
<source file=''/>
<target dev='sda' bus='ide'/>
<readonly/>
</disk>"""
for net in nets.split(','):
xml += """
<interface type='network'>
<source network='%s'/>""" % net
if virtio:
xml += """<model type='virtio'/>"""
xml += """
</interface>"""
xml += """
<input type='tablet' bus='usb'/>
<input type='mouse' bus='ps2'/>
<graphics type='vnc' port='-1' autoport='yes' listen='0.0.0.0' passwd='%s'>
<listen type='address' address='0.0.0.0'/>
</graphics>
<memballoon model='virtio'/>
</devices>
</domain>""" % (passwd)
self.conn.defineXML(xml)
dom = self.lookupVM(name)
dom.setAutostart(1)
def get_vol_image_type(self, storages, vol):
for storage in storages:
stg = self.storagePool(storage)
if stg.info()[0] != 0:
stg.refresh(0)
for img in stg.listVolumes():
if img == vol:
vol = stg.storageVolLookupByName(img)
xml = vol.XMLDesc(0)
image_type = get_xml_path(xml, "/volume/target/format/@type")
return image_type
def vds_get_node(self):
"""
Get all VM in host server
"""
vname = {}
for vm_id in self.conn.listDomainsID():
vm_id = int(vm_id)
dom = self.conn.lookupByID(vm_id)
vname[dom.name()] = dom.info()[0]
for name in self.conn.listDefinedDomains():
dom = self.lookupVM(name)
vname[dom.name()] = dom.info()[0]
return vname
def networks_get_node(self):
"""
Function return host server virtual networks.
"""
virtnet = {}
for network in self.conn.listNetworks():
net = self.conn.networkLookupByName(network)
status = net.isActive()
virtnet[network] = status
for network in self.conn.listDefinedNetworks():
net = self.networkPool(network)
status = net.isActive()
virtnet[network] = status
return virtnet
def storages_get_node(self):
"""
Function return host server storages.
"""
storages = {}
for storage in self.conn.listStoragePools():
stg = self.conn.storagePoolLookupByName(storage)
status = stg.isActive()
storages[storage] = status
for storage in self.conn.listDefinedStoragePools():
stg = self.storagePool(storage)
status = stg.isActive()
storages[storage] = status
return storages
def node_get_info(self):
"""
Function return host server information: hostname, cpu, memory, ...
"""
info = []
info.append(self.conn.getHostname())
info.append(self.conn.getInfo()[0])
info.append(self.conn.getInfo()[2])
try:
info.append(get_xml_path(self.conn.getSysinfo(0),
"/sysinfo/processor/entry[6]"))
except:
info.append('Unknown')
info.append(self.conn.getURI())
info.append(self.conn.getLibVersion())
return info
def memory_get_usage(self):
"""
Function return memory usage on node.
"""
allmem = self.conn.getInfo()[1] * 1048576
get_freemem = self.conn.getMemoryStats(-1, 0)
if type(get_freemem) == dict:
freemem = (get_freemem.values()[0] + \
get_freemem.values()[2] + \
get_freemem.values()[3]) * 1024
percent = (freemem * 100) / allmem
percent = 100 - percent
memusage = (allmem - freemem)
else:
memusage = None
percent = None
return allmem, memusage, percent
def cpu_get_usage(self):
"""
Function return cpu usage on node.
"""
prev_idle = 0
prev_total = 0
cpu = self.conn.getCPUStats(-1, 0)
if type(cpu) == dict:
for num in range(2):
idle = self.conn.getCPUStats(-1, 0).values()[1]
total = sum(self.conn.getCPUStats(-1, 0).values())
diff_idle = idle - prev_idle
diff_total = total - prev_total
diff_usage = (1000 * (diff_total - diff_idle) / diff_total + 5) / 10
prev_total = total
prev_idle = idle
if num == 0:
time.sleep(1)
else:
if diff_usage < 0:
diff_usage = 0
else:
diff_usage = None
return diff_usage
def new_volume(self, storage, name, size, format='qcow2'):
"""
Add new volume in storage
"""
stg = self.storagePool(storage)
size = int(size) * 1073741824
stg_type = get_xml_path(stg.XMLDesc(0), "/pool/@type")
if stg_type == 'dir':
name += '.img'
alloc = 0
else:
alloc = size
xml = """
<volume>
<name>%s</name>
<capacity>%s</capacity>
<allocation>%s</allocation>
<target>
<format type='%s'/>
</target>
</volume>""" % (name, size, alloc, format)
stg.createXML(xml, 0)
def clone_volume(self, storage, img, new_img, format=None):
"""
Function clone volume
"""
stg = self.storagePool(storage)
stg_type = get_xml_path(stg.XMLDesc(0), "/pool/@type")
if stg_type == 'dir':
new_img += '.img'
vol = stg.storageVolLookupByName(img)
if not format:
xml = vol.XMLDesc(0)
format = get_xml_path(xml, "/volume/target/format/@type")
xml = """
<volume>
<name>%s</name>
<capacity>0</capacity>
<allocation>0</allocation>
<target>
<format type='%s'/>
</target>
</volume>""" % (new_img, format)
stg.createXMLFrom(xml, vol, 0)
def images_get_storages(self, storages):
"""
Function return all images on all storages
"""
disk = []
for storage in storages:
stg = self.storagePool(storage)
if stg.info()[0] != 0:
stg.refresh(0)
for img in stg.listVolumes():
if re.findall(".img", img):
disk.append(img)
return disk
def image_get_path(self, vol, storages):
"""
Function return volume path.
"""
for storage in storages:
stg = self.storagePool(storage)
for img in stg.listVolumes():
if vol == img:
stg_volume = stg.storageVolLookupByName(vol)
return stg_volume.path()
def storage_get_info(self, storage):
"""
Function return storage info.
"""
stg = self.storagePool(storage)
if stg:
if stg.info()[3] == 0:
percent = 0
else:
percent = (stg.info()[2] * 100) / stg.info()[1]
info = stg.info()[1:4]
info.append(int(percent))
info.append(stg.isActive())
xml = stg.XMLDesc(0)
info.append(get_xml_path(xml, "/pool/@type"))
info.append(get_xml_path(xml, "/pool/target/path"))
else:
info = [None] * 7
return info
def new_storage_pool(self, type_pool, name, source, target):
"""
Function create storage pool.
"""
xml = """
<pool type='%s'>
<name>%s</name>""" % (type_pool, name)
if type_pool == 'logical':
xml += """
<source>
<device path='%s'/>
<name>%s</name>
<format type='lvm2'/>
</source>""" % (source, name)
if type_pool == 'logical':
target = '/dev/' + name
xml += """
<target>
<path>%s</path>
</target>
</pool>""" % target
self.conn.storagePoolDefineXML(xml, 0)
stg = self.storagePool(name)
if type_pool == 'logical':
stg.build(0)
stg.create(0)
stg.setAutostart(1)
def volumes_get_info(self, storage):
"""
Function return volume info.
"""
stg = self.storagePool(storage)
volume_info = {}
for name in stg.listVolumes():
if re.findall(".img", name) or re.findall(".iso", name):
vol = stg.storageVolLookupByName(name)
xml = vol.XMLDesc(0)
size = vol.info()[1]
volume_format = get_xml_path(xml, "/volume/target/format/@type")
volume_info[name] = size, volume_format
return volume_info
def new_network_pool(self, name, forward, gateway, mask, dhcp, bridge_name):
"""
Function create network pool.
"""
xml = """
<network>
<name>%s</name>""" % name
if forward in ['nat', 'route', 'bridge']:
xml += """<forward mode='%s'/>""" % forward
xml += """<bridge """
if forward in ['nat', 'route', 'none']:
xml += """stp='on' delay='0'"""
if forward == 'bridge':
xml += """name='%s'""" % bridge_name
xml += """/>"""
if forward != 'bridge':
xml += """
<ip address='%s' netmask='%s'>""" % (gateway, mask)
if dhcp:
xml += """<dhcp>
<range start='%s' end='%s' />
</dhcp>""" % (dhcp[0], dhcp[1])
xml += """</ip>"""
xml += """</network>"""
self.conn.networkDefineXML(xml)
net = self.networkPool(name)
net.create()
net.setAutostart(1)
def network_get_info(self, network):
"""
Function return network info.
"""
info = []
net = self.networkPool(network)
if net:
info.append(net.isActive())
info.append(net.bridgeName())
else:
info = [None] * 2
return info
def network_get_subnet(self, network):
"""
Function return virtual network info: ip, netmask, dhcp, type forward.
"""
net = self.networkPool(network)
xml_net = net.XMLDesc(0)
ipv4 = []
fw_type = get_xml_path(xml_net, "/network/forward/@mode")
fw_dev = get_xml_path(xml_net, "/network/forward/@dev")
if fw_type:
ipv4.append([fw_type, fw_dev])
else:
ipv4.append(None)
# Subnet block
addr_str = get_xml_path(xml_net, "/network/ip/@address")
mask_str = get_xml_path(xml_net, "/network/ip/@netmask")
if addr_str and mask_str:
netmask = IP(mask_str)
gateway = IP(addr_str)
network = IP(gateway.int() & netmask.int())
ipv4.append(IP(str(network) + "/" + mask_str))
else:
ipv4.append(None)
# DHCP block
dhcp_start = get_xml_path(xml_net, "/network/ip/dhcp/range[1]/@start")
dhcp_end = get_xml_path(xml_net, "/network/ip/dhcp/range[1]/@end")
if not dhcp_start or not dhcp_end:
pass
else:
ipv4.append([IP(dhcp_start), IP(dhcp_end)])
return ipv4
def snapshots_get_node(self):
"""
Function return all snaphots on node.
"""
vname = {}
for vm_id in self.conn.listDomainsID():
vm_id = int(vm_id)
dom = self.conn.lookupByID(vm_id)
if dom.snapshotNum(0) != 0:
vname[dom.name()] = dom.info()[0]
for name in self.conn.listDefinedDomains():
dom = self.lookupVM(name)
if dom.snapshotNum(0) != 0:
vname[dom.name()] = dom.info()[0]
return vname
def snapshots_get_vds(self, vname):
"""
Function return all vds snaphots.
"""
snapshots = {}
dom = self.lookupVM(vname)
all_snapshot = dom.snapshotListNames(0)
for snapshot in all_snapshot:
snapshots[snapshot] = (datetime.fromtimestamp(int(snapshot)), dom.info()[0])
return snapshots
def snapshot_delete(self, vname, name_snap):
"""
Function delete vds snaphots.
"""
dom = self.lookupVM(vname)
snap = dom.snapshotLookupByName(name_snap, 0)
snap.delete(0)
def snapshot_revert(self, vname, name_snap):
"""
Function revert vds snaphots.
"""
dom = self.lookupVM(vname)
snap = dom.snapshotLookupByName(name_snap, 0)
dom.revertToSnapshot(snap, 0)
def vnc_get_port(self, vname):
"""
Function rever vds snaphots.
"""
dom = self.lookupVM(vname)
port = get_xml_path(dom.XMLDesc(0), "/domain/devices/graphics/@port")
return port
def vds_mount_iso(self, vname, image):
"""
Function mount iso image on vds. Changes on XML config.
"""
storages = self.storages_get_node()
dom = self.lookupVM(vname)
for storage in storages:
stg = self.storagePool(storage)
for img in stg.listVolumes():
if image == img:
if dom.info()[0] == 1:
vol = stg.storageVolLookupByName(image)
xml = """<disk type='file' device='cdrom'>
<driver name='qemu' type='raw'/>
<target dev='sda' bus='ide'/>
<source file='%s'/>
</disk>""" % vol.path()
dom.attachDevice(xml)
xmldom = dom.XMLDesc(VIR_DOMAIN_XML_SECURE)
self.conn.defineXML(xmldom)
if dom.info()[0] == 5:
vol = stg.storageVolLookupByName(image)
xml = dom.XMLDesc(VIR_DOMAIN_XML_SECURE)
newxml = "<disk type='file' device='cdrom'>\n <driver name='qemu' type='raw'/>\n <source file='%s'/>" % vol.path()
xmldom = xml.replace(
"<disk type='file' device='cdrom'>\n <driver name='qemu' type='raw'/>", newxml)
self.conn.defineXML(xmldom)
def vds_umount_iso(self, vname, image):
"""
Function umount iso image on vds. Changes on XML config.
"""
dom = self.lookupVM(vname)
if dom.info()[0] == 1:
xml = """<disk type='file' device='cdrom'>
<driver name="qemu" type='raw'/>
<target dev='sda' bus='ide'/>
<readonly/>
</disk>"""
dom.attachDevice(xml)
xmldom = dom.XMLDesc(VIR_DOMAIN_XML_SECURE)
self.conn.defineXML(xmldom)
if dom.info()[0] == 5:
xml = dom.XMLDesc(VIR_DOMAIN_XML_SECURE)
xmldom = xml.replace("<source file='%s'/>\n" % image, '')
self.conn.defineXML(xmldom)
def vds_cpu_usage(self, vname):
"""
Function return vds cpu usage.
"""
dom = self.lookupVM(vname)
if dom.info()[0] == 1:
nbcore = self.conn.getInfo()[2]
cpu_use_ago = dom.info()[4]
time.sleep(1)
cpu_use_now = dom.info()[4]
diff_usage = cpu_use_now - cpu_use_ago
cpu_usage = 100 * diff_usage / (1 * nbcore * 10 ** 9L)
else:
cpu_usage = 0
return cpu_usage
def vds_memory_usage(self, vname):
"""
Function return vds memory usage.
"""
dom = self.lookupVM(vname)
allmem = self.conn.getInfo()[1] * 1048576
if dom.info()[0] == 1:
dom_mem = dom.info()[1] * 1024
percent = (dom_mem * 100) / allmem
else:
percent = 0
return allmem, percent
def vds_get_info(self, vname):
"""
Function return vds info.
"""
info = []
dom = self.lookupVM(vname)
xml = dom.XMLDesc(0)
info.append(get_xml_path(xml, "/domain/vcpu"))
mem = get_xml_path(xml, "/domain/memory")
mem = int(mem) / 1024
info.append(int(mem))
def get_networks(ctx):
result = []
for interface in ctx.xpathEval('/domain/devices/interface'):
mac = interface.xpathEval('mac/@address')[0].content
nic = interface.xpathEval('source/@network|source/@bridge')[0].content
result.append({'mac': mac, 'nic': nic})
return result
info.append(get_xml_path(xml, func=get_networks))
description = get_xml_path(xml, "/domain/description")
info.append(description)
return info
def vds_get_hdd(self, vname):
"""
Function return vds hdd info.
"""
all_hdd_dev = {}
storages = self.storages_get_node()
dom = self.lookupVM(vname)
xml = dom.XMLDesc(0)
for num in range(1, 5):
hdd_dev = get_xml_path(xml, "/domain/devices/disk[%s]/@device" % (num))
if hdd_dev == 'disk':
dev_bus = get_xml_path(xml, "/domain/devices/disk[%s]/target/@dev" % (num))
hdd = get_xml_path(xml, "/domain/devices/disk[%s]/source/@file" % (num))
# If xml create custom
if not hdd:
hdd = get_xml_path(xml, "/domain/devices/disk[%s]/source/@dev" % (num))
try:
img = self.storageVolPath(hdd)
img_vol = img.name()
for storage in storages:
stg = self.storagePool(storage)
if stg.info()[0] != 0:
stg.refresh(0)
for img in stg.listVolumes():
if img == img_vol:
vol = img
vol_stg = storage
all_hdd_dev[dev_bus] = vol, vol_stg
except:
all_hdd_dev[dev_bus] = hdd, 'Not in the pool'
return all_hdd_dev
def vds_get_media(self, vname):
"""
Function return vds media info.
"""
dom = self.lookupVM(vname)
xml = dom.XMLDesc(0)
for num in range(1, 5):
hdd_dev = get_xml_path(xml, "/domain/devices/disk[%s]/@device" % (num))
if hdd_dev == 'cdrom':
media = get_xml_path(xml, "/domain/devices/disk[%s]/source/@file" % (num))
if media:
try:
vol = self.storageVolPath(media)
return vol.name(), vol.path()
except:
return media, media
else:
return None, None
return None, None
def vds_set_vnc_passwd(self, vname, passwd):
"""
Function set vnc password to vds.
"""
dom = self.lookupVM(vname)
xml = dom.XMLDesc(VIR_DOMAIN_XML_SECURE)
find_tag = re.findall('<graphics.*/>', xml)
if find_tag:
close_tag = '/'
else:
close_tag = ''
newxml = "<graphics type='vnc' passwd='%s'%s>" % (passwd, close_tag)
xmldom = re.sub('<graphics.*>', newxml, xml)
self.conn.defineXML(xmldom)
def vds_edit(self, vname, description, ram, vcpu):
"""
Function change ram and cpu on vds.
"""
dom = self.lookupVM(vname)
xml = dom.XMLDesc(VIR_DOMAIN_XML_SECURE)
memory = int(ram) * 1024
xml_memory = "<memory unit='KiB'>%s</memory>" % memory
xml_memory_change = re.sub('<memory.*memory>', xml_memory, xml)
xml_curmemory = "<currentMemory unit='KiB'>%s</currentMemory>" % memory
xml_curmemory_change = re.sub('<currentMemory.*currentMemory>', xml_curmemory, xml_memory_change)
xml_vcpu = "<vcpu>%s</vcpu>" % vcpu
xml_vcpu_change = re.sub('<vcpu.*vcpu>', xml_vcpu, xml_curmemory_change)
xml_description = "<description>%s</description>" % description
xml_description_change = re.sub('<description.*description>', xml_description, xml_vcpu_change)
self.conn.defineXML(xml_description_change)
def defineXML(self, xml):
"""
Funciton define VM config
"""
self.conn.defineXML(xml)
def get_all_media(self):
"""
Function return all media.
"""
iso = []
storages = self.storages_get_node()
for storage in storages:
stg = self.storagePool(storage)
if stg.info()[0] != 0:
stg.refresh(0)
for img in stg.listVolumes():
if re.findall(".iso", img):
iso.append(img)
return iso
def vds_remove_hdd(self, vname):
"""
Function delete vds hdd.
"""
dom = self.lookupVM(vname)
img = get_xml_path(dom.XMLDesc(0), "/domain/devices/disk[1]/source/@file")
vol = self.storageVolPath(img)
vol.delete(0)
def vds_create_snapshot(self, vname):
"""
Function create vds snapshot.
"""
dom = self.lookupVM(vname)
xml = """<domainsnapshot>\n
<name>%d</name>\n
<state>shutoff</state>\n
<creationTime>%d</creationTime>\n""" % (time.time(), time.time())
xml += dom.XMLDesc(VIR_DOMAIN_XML_SECURE)
xml += """<active>0</active>\n
</domainsnapshot>"""
dom.snapshotCreateXML(xml, 0)
def vds_on_cluster(self):
"""
Function show all host and vds
"""
vname = {}
host_mem = self.conn.getInfo()[1] * 1048576
for vm_id in self.conn.listDomainsID():
vm_id = int(vm_id)
dom = self.conn.lookupByID(vm_id)
mem = get_xml_path(dom.XMLDesc(0), "/domain/memory")
mem = int(mem) * 1024
mem_usage = (mem * 100) / host_mem
vcpu = get_xml_path(dom.XMLDesc(0), "/domain/vcpu")
vname[dom.name()] = (dom.info()[0], vcpu, mem, mem_usage)
for name in self.conn.listDefinedDomains():
dom = self.lookupVM(name)
mem = get_xml_path(dom.XMLDesc(0), "/domain/memory")
mem = int(mem) * 1024
mem_usage = (mem * 100) / host_mem
vcpu = get_xml_path(dom.XMLDesc(0), "/domain/vcpu")
vname[dom.name()] = (dom.info()[0], vcpu, mem, mem_usage)
return vname
def close(self):
"""
Close libvirt connection.
"""
self.conn.close()
|
apache-2.0
| -4,393,413,968,680,874,500
| 29.800201
| 148
| 0.478991
| false
| 3.969591
| false
| false
| false
|
mitodl/open-discussions
|
open_discussions/authentication.py
|
1
|
2405
|
"""Custom authentication for DRF"""
import logging
from django.contrib.auth import get_user_model
import jwt
from rest_framework.authentication import BaseAuthentication
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
User = get_user_model()
HEADER_PREFIX = "Token "
HEADER_PREFIX_LENGTH = len(HEADER_PREFIX)
logger = logging.getLogger()
class IgnoreExpiredJwtAuthentication(JSONWebTokenAuthentication):
"""Version of JSONWebTokenAuthentication that ignores JWT values if they're expired"""
def get_jwt_value(self, request):
"""Returns the JWT values as long as it's not expired"""
value = super().get_jwt_value(request)
try:
# try to decode the value just to see if it's expired
from rest_framework_jwt.settings import api_settings
jwt_decode_handler = api_settings.JWT_DECODE_HANDLER
jwt_decode_handler(value)
except jwt.ExpiredSignature:
# if it is expired, treat it as if the user never passed a token
logger.debug("Ignoring expired JWT")
return None
except: # pylint: disable=bare-except
# we're only interested in jwt.ExpiredSignature above
# exception handling in general is already handled in the base class
pass
return value
class StatelessTokenAuthentication(BaseAuthentication):
"""
Stateless authentication via a authorization token
NOTE: this is a highly trusting version of authentication and should only be
used for certain things such as email unsubscribes
"""
def authenticate(self, request):
"""
Attempts to authenticate using a stateless token
"""
from open_discussions.auth_utils import unsign_and_verify_username_from_token
if "HTTP_AUTHORIZATION" in request.META:
header_value = request.META["HTTP_AUTHORIZATION"]
if not header_value.startswith(HEADER_PREFIX):
return None
token = header_value[HEADER_PREFIX_LENGTH:]
username = unsign_and_verify_username_from_token(token)
if not username:
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
return (user, None)
return None
|
bsd-3-clause
| -7,893,392,934,490,459,000
| 30.233766
| 90
| 0.656133
| false
| 4.625
| false
| false
| false
|
UnitedThruAction/Data
|
Tools/StandardizeAddress.py
|
1
|
4188
|
"""Standardize a list of addresses using the USPS API.
Multi-threaded, since the API response time is slow.
Get an API key at https://registration.shippingapis.com.
"""
from __future__ import print_function
import threading
import sys
import pandas as pd
from tqdm import tqdm
from collections import deque
from pyusps import address_information
NUM_THREADS = 100
def standardize_address(
df,
type='vf',
col1=None,
col2=None,
key=None,
usps_key=None,
new_col='standardized_address'):
"""Standardize a list of addresses using the USPS API.
Arguments:
df: a DataFrame of data
type: 'vf' (NY State Voter File)
or 'raw', two columns
col1: if using 'raw', column name for first line of address
col2: if using 'raw', column name for second line of address
key: if using 'raw', column name for the key to lookup on
usps_key: USPS API key
new_col: name of new column to add."""
threads = deque()
results = {}
for obj in tqdm(df.iterrows(), total=df.shape[0]):
row = obj[1]
if len(threads) < NUM_THREADS:
if type == 'vf':
t = threading.Thread(
target=vf_standardize_address, args=(
row, results, usps_key))
elif type == 'raw':
t = threading.Thread(
target=gen_standardize_address, args=(
row[col1], row[col2], row[key], results, usps_key))
else:
raise Exception("type not recognized")
t.start()
threads.append(t)
continue
else:
t = threads.popleft()
t.join()
continue
while threads:
t = threads.popleft()
t.join()
sys.stderr.flush()
sys.stdout.flush()
if type == 'vf':
df[new_col] = df['SBOEID'].map(results)
elif type == 'raw':
df[new_col] = df[key].map(results)
def vf_standardize_address(row, results, usps_key):
"""Used for the NY State Voter File only."""
rhalfcode = '' if pd.isnull(row['RHALFCODE']) else row['RHALFCODE']
raddnumber = '' if pd.isnull(row['RADDNUMBER']) else row['RADDNUMBER']
rpredirection = '' if pd.isnull(
row['RPREDIRECTION']) else row['RPREDIRECTION']
rstreetname = '' if pd.isnull(row['RSTREETNAME']) else row['RSTREETNAME']
rpostdirection = '' if pd.isnull(
row['RPOSTDIRECTION']) else row['RPOSTDIRECTION']
rapartment = '' if pd.isnull(row['RAPARTMENT']) else row['RAPARTMENT']
if ('APT' in str(row['RAPARTMENT']).upper()) \
or ('UNIT' in str(row['RAPARTMENT']).upper()) \
or (row['RAPARTMENT'] == ''):
address = "{} {} {} {} {} {}".format(
raddnumber,
rhalfcode,
rpredirection,
rstreetname,
rpostdirection,
rapartment)
else:
address = "{} {} {} {} {} APT {}".format(
raddnumber,
rhalfcode,
rpredirection,
rstreetname,
rpostdirection,
rapartment)
try:
address = address.upper()
addr = {'address': address, 'city': row['RCITY'], 'state': 'NY'}
result = address_information.verify(usps_key, addr)
zip4 = "-{}".format(result['zip4']) if result['zip4'] else ''
results[row['SBOEID']] = "{}, {} {} {}{}".format(
result['address'], result['city'], result['state'], result['zip5'], zip4)
except Exception:
results[row['SBOEID']] = address
def gen_standardize_address(addr1, addr2, key, results, usps_key):
addr = {'address': addr1, 'city': addr2, 'state': 'NY'}
try:
result = address_information.verify(usps_key, addr)
zip4 = "-{}".format(result['zip4']) if ('zip4' in result) and result['zip4'] else ''
results[key] = "{}, {} {} {}{}".format(
result['address'],
result['city'],
result['state'],
result['zip5'],
zip4)
except Exception as e:
results[key] = "{}, {}".format(addr1, addr2)
|
apache-2.0
| 227,579,348,872,957,340
| 32.504
| 92
| 0.546323
| false
| 3.706195
| false
| false
| false
|
pombredanne/pytype
|
pytype/pyc/opcodes_test.py
|
1
|
52430
|
from pytype.pyc import opcodes
import unittest
class Python2Test(unittest.TestCase):
"""Test bytecodes.dis for Python 2 opcodes."""
PYTHON_VERSION = (2, 7, 6)
def dis(self, data):
return opcodes.dis(data, self.PYTHON_VERSION)
def test_stop_code(self):
self.assertEquals(self.dis('\x00')[0].name, 'STOP_CODE')
def test_pop_top(self):
self.assertEquals(self.dis('\x01')[0].name, 'POP_TOP')
def test_rot_two(self):
self.assertEquals(self.dis('\x02')[0].name, 'ROT_TWO')
def test_rot_three(self):
self.assertEquals(self.dis('\x03')[0].name, 'ROT_THREE')
def test_dup_top(self):
self.assertEquals(self.dis('\x04')[0].name, 'DUP_TOP')
def test_rot_four(self):
self.assertEquals(self.dis('\x05')[0].name, 'ROT_FOUR')
def test_nop(self):
self.assertEquals(self.dis('\t')[0].name, 'NOP')
def test_unary_positive(self):
self.assertEquals(self.dis('\n')[0].name, 'UNARY_POSITIVE')
def test_unary_negative(self):
self.assertEquals(self.dis('\x0b')[0].name, 'UNARY_NEGATIVE')
def test_unary_not(self):
self.assertEquals(self.dis('\x0c')[0].name, 'UNARY_NOT')
def test_unary_convert(self):
self.assertEquals(self.dis('\r')[0].name, 'UNARY_CONVERT')
def test_unary_invert(self):
self.assertEquals(self.dis('\x0f')[0].name, 'UNARY_INVERT')
def test_binary_power(self):
self.assertEquals(self.dis('\x13')[0].name, 'BINARY_POWER')
def test_binary_multiply(self):
self.assertEquals(self.dis('\x14')[0].name, 'BINARY_MULTIPLY')
def test_binary_divide(self):
self.assertEquals(self.dis('\x15')[0].name, 'BINARY_DIVIDE')
def test_binary_modulo(self):
self.assertEquals(self.dis('\x16')[0].name, 'BINARY_MODULO')
def test_binary_add(self):
self.assertEquals(self.dis('\x17')[0].name, 'BINARY_ADD')
def test_binary_subtract(self):
self.assertEquals(self.dis('\x18')[0].name, 'BINARY_SUBTRACT')
def test_binary_subscr(self):
self.assertEquals(self.dis('\x19')[0].name, 'BINARY_SUBSCR')
def test_binary_floor_divide(self):
self.assertEquals(self.dis('\x1a')[0].name, 'BINARY_FLOOR_DIVIDE')
def test_binary_true_divide(self):
self.assertEquals(self.dis('\x1b')[0].name, 'BINARY_TRUE_DIVIDE')
def test_inplace_floor_divide(self):
self.assertEquals(self.dis('\x1c')[0].name, 'INPLACE_FLOOR_DIVIDE')
def test_inplace_true_divide(self):
self.assertEquals(self.dis('\x1d')[0].name, 'INPLACE_TRUE_DIVIDE')
def test_slice_0(self):
self.assertEquals(self.dis('\x1e')[0].name, 'SLICE_0')
def test_slice_1(self):
self.assertEquals(self.dis('\x1f')[0].name, 'SLICE_1')
def test_slice_2(self):
self.assertEquals(self.dis(' ')[0].name, 'SLICE_2')
def test_slice_3(self):
self.assertEquals(self.dis('!')[0].name, 'SLICE_3')
def test_store_slice_0(self):
self.assertEquals(self.dis('(')[0].name, 'STORE_SLICE_0')
def test_store_slice_1(self):
self.assertEquals(self.dis(')')[0].name, 'STORE_SLICE_1')
def test_store_slice_2(self):
self.assertEquals(self.dis('*')[0].name, 'STORE_SLICE_2')
def test_store_slice_3(self):
self.assertEquals(self.dis('+')[0].name, 'STORE_SLICE_3')
def test_delete_slice_0(self):
self.assertEquals(self.dis('2')[0].name, 'DELETE_SLICE_0')
def test_delete_slice_1(self):
self.assertEquals(self.dis('3')[0].name, 'DELETE_SLICE_1')
def test_delete_slice_2(self):
self.assertEquals(self.dis('4')[0].name, 'DELETE_SLICE_2')
def test_delete_slice_3(self):
self.assertEquals(self.dis('5')[0].name, 'DELETE_SLICE_3')
def test_store_map(self):
self.assertEquals(self.dis('6')[0].name, 'STORE_MAP')
def test_inplace_add(self):
self.assertEquals(self.dis('7')[0].name, 'INPLACE_ADD')
def test_inplace_subtract(self):
self.assertEquals(self.dis('8')[0].name, 'INPLACE_SUBTRACT')
def test_inplace_multiply(self):
self.assertEquals(self.dis('9')[0].name, 'INPLACE_MULTIPLY')
def test_inplace_divide(self):
self.assertEquals(self.dis(':')[0].name, 'INPLACE_DIVIDE')
def test_inplace_modulo(self):
self.assertEquals(self.dis(';')[0].name, 'INPLACE_MODULO')
def test_store_subscr(self):
self.assertEquals(self.dis('<')[0].name, 'STORE_SUBSCR')
def test_delete_subscr(self):
self.assertEquals(self.dis('=')[0].name, 'DELETE_SUBSCR')
def test_binary_lshift(self):
self.assertEquals(self.dis('>')[0].name, 'BINARY_LSHIFT')
def test_binary_rshift(self):
self.assertEquals(self.dis('?')[0].name, 'BINARY_RSHIFT')
def test_binary_and(self):
self.assertEquals(self.dis('@')[0].name, 'BINARY_AND')
def test_binary_xor(self):
self.assertEquals(self.dis('A')[0].name, 'BINARY_XOR')
def test_binary_or(self):
self.assertEquals(self.dis('B')[0].name, 'BINARY_OR')
def test_inplace_power(self):
self.assertEquals(self.dis('C')[0].name, 'INPLACE_POWER')
def test_get_iter(self):
self.assertEquals(self.dis('D')[0].name, 'GET_ITER')
def test_print_expr(self):
self.assertEquals(self.dis('F')[0].name, 'PRINT_EXPR')
def test_print_item(self):
self.assertEquals(self.dis('G')[0].name, 'PRINT_ITEM')
def test_print_newline(self):
self.assertEquals(self.dis('H')[0].name, 'PRINT_NEWLINE')
def test_print_item_to(self):
self.assertEquals(self.dis('I')[0].name, 'PRINT_ITEM_TO')
def test_print_newline_to(self):
self.assertEquals(self.dis('J')[0].name, 'PRINT_NEWLINE_TO')
def test_inplace_lshift(self):
self.assertEquals(self.dis('K')[0].name, 'INPLACE_LSHIFT')
def test_inplace_rshift(self):
self.assertEquals(self.dis('L')[0].name, 'INPLACE_RSHIFT')
def test_inplace_and(self):
self.assertEquals(self.dis('M')[0].name, 'INPLACE_AND')
def test_inplace_xor(self):
self.assertEquals(self.dis('N')[0].name, 'INPLACE_XOR')
def test_inplace_or(self):
self.assertEquals(self.dis('O')[0].name, 'INPLACE_OR')
def test_break_loop(self):
self.assertEquals(self.dis('P')[0].name, 'BREAK_LOOP')
def test_with_cleanup(self):
self.assertEquals(self.dis('Q')[0].name, 'WITH_CLEANUP')
def test_load_locals(self):
self.assertEquals(self.dis('R')[0].name, 'LOAD_LOCALS')
def test_return_value(self):
self.assertEquals(self.dis('S')[0].name, 'RETURN_VALUE')
def test_import_star(self):
self.assertEquals(self.dis('T')[0].name, 'IMPORT_STAR')
def test_exec_stmt(self):
self.assertEquals(self.dis('U')[0].name, 'EXEC_STMT')
def test_yield_value(self):
self.assertEquals(self.dis('V')[0].name, 'YIELD_VALUE')
def test_pop_block(self):
self.assertEquals(self.dis('W')[0].name, 'POP_BLOCK')
def test_end_finally(self):
self.assertEquals(self.dis('X')[0].name, 'END_FINALLY')
def test_build_class(self):
self.assertEquals(self.dis('Y')[0].name, 'BUILD_CLASS')
def test_store_name(self):
self.assertEquals(self.dis('Z\x00\x00')[0].name, 'STORE_NAME')
def test_delete_name(self):
self.assertEquals(self.dis('[\x00\x00')[0].name, 'DELETE_NAME')
def test_unpack_sequence(self):
self.assertEquals(self.dis('\\\x00\x00')[0].name, 'UNPACK_SEQUENCE')
def test_for_iter(self):
self.assertEquals(self.dis(']\x00\x00\t')[0].name, 'FOR_ITER')
def test_list_append(self):
self.assertEquals(self.dis('^\x00\x00')[0].name, 'LIST_APPEND')
def test_store_attr(self):
self.assertEquals(self.dis('_\x00\x00')[0].name, 'STORE_ATTR')
def test_delete_attr(self):
self.assertEquals(self.dis('`\x00\x00')[0].name, 'DELETE_ATTR')
def test_store_global(self):
self.assertEquals(self.dis('a\x00\x00')[0].name, 'STORE_GLOBAL')
def test_delete_global(self):
self.assertEquals(self.dis('b\x00\x00')[0].name, 'DELETE_GLOBAL')
def test_dup_topx(self):
self.assertEquals(self.dis('c\x00\x00')[0].name, 'DUP_TOPX')
def test_load_const(self):
self.assertEquals(self.dis('d\x00\x00')[0].name, 'LOAD_CONST')
def test_load_name(self):
self.assertEquals(self.dis('e\x00\x00')[0].name, 'LOAD_NAME')
def test_build_tuple(self):
self.assertEquals(self.dis('f\x00\x00')[0].name, 'BUILD_TUPLE')
def test_build_list(self):
self.assertEquals(self.dis('g\x00\x00')[0].name, 'BUILD_LIST')
def test_build_set(self):
self.assertEquals(self.dis('h\x00\x00')[0].name, 'BUILD_SET')
def test_build_map(self):
self.assertEquals(self.dis('i\x00\x00')[0].name, 'BUILD_MAP')
def test_load_attr(self):
self.assertEquals(self.dis('j\x00\x00')[0].name, 'LOAD_ATTR')
def test_compare_op(self):
self.assertEquals(self.dis('k\x00\x00')[0].name, 'COMPARE_OP')
def test_import_name(self):
self.assertEquals(self.dis('l\x00\x00')[0].name, 'IMPORT_NAME')
def test_import_from(self):
self.assertEquals(self.dis('m\x00\x00')[0].name, 'IMPORT_FROM')
def test_jump_forward(self):
self.assertEquals(self.dis('n\x00\x00\t')[0].name, 'JUMP_FORWARD')
def test_jump_if_false_or_pop(self):
self.assertEquals(self.dis('o\x03\x00\t')[0].name, 'JUMP_IF_FALSE_OR_POP')
def test_jump_if_true_or_pop(self):
self.assertEquals(self.dis('p\x03\x00\t')[0].name, 'JUMP_IF_TRUE_OR_POP')
def test_jump_absolute(self):
self.assertEquals(self.dis('q\x03\x00\t')[0].name, 'JUMP_ABSOLUTE')
def test_pop_jump_if_false(self):
self.assertEquals(self.dis('r\x03\x00\t')[0].name, 'POP_JUMP_IF_FALSE')
def test_pop_jump_if_true(self):
self.assertEquals(self.dis('s\x03\x00\t')[0].name, 'POP_JUMP_IF_TRUE')
def test_load_global(self):
self.assertEquals(self.dis('t\x00\x00')[0].name, 'LOAD_GLOBAL')
def test_continue_loop(self):
self.assertEquals(self.dis('w\x03\x00\t')[0].name, 'CONTINUE_LOOP')
def test_setup_loop(self):
self.assertEquals(self.dis('x\x00\x00\t')[0].name, 'SETUP_LOOP')
def test_setup_except(self):
self.assertEquals(self.dis('y\x00\x00\t')[0].name, 'SETUP_EXCEPT')
def test_setup_finally(self):
self.assertEquals(self.dis('z\x00\x00\t')[0].name, 'SETUP_FINALLY')
def test_load_fast(self):
self.assertEquals(self.dis('|\x00\x00')[0].name, 'LOAD_FAST')
def test_store_fast(self):
self.assertEquals(self.dis('}\x00\x00')[0].name, 'STORE_FAST')
def test_delete_fast(self):
self.assertEquals(self.dis('~\x00\x00')[0].name, 'DELETE_FAST')
def test_raise_varargs(self):
self.assertEquals(self.dis('\x82\x00\x00')[0].name, 'RAISE_VARARGS')
def test_call_function(self):
self.assertEquals(self.dis('\x83\x00\x00')[0].name, 'CALL_FUNCTION')
def test_make_function(self):
self.assertEquals(self.dis('\x84\x00\x00')[0].name, 'MAKE_FUNCTION')
def test_build_slice(self):
self.assertEquals(self.dis('\x85\x00\x00')[0].name, 'BUILD_SLICE')
def test_make_closure(self):
self.assertEquals(self.dis('\x86\x00\x00')[0].name, 'MAKE_CLOSURE')
def test_load_closure(self):
self.assertEquals(self.dis('\x87\x00\x00')[0].name, 'LOAD_CLOSURE')
def test_load_deref(self):
self.assertEquals(self.dis('\x88\x00\x00')[0].name, 'LOAD_DEREF')
def test_store_deref(self):
self.assertEquals(self.dis('\x89\x00\x00')[0].name, 'STORE_DEREF')
def test_call_function_var(self):
self.assertEquals(self.dis('\x8c\x00\x00')[0].name, 'CALL_FUNCTION_VAR')
def test_call_function_kw(self):
self.assertEquals(self.dis('\x8d\x00\x00')[0].name, 'CALL_FUNCTION_KW')
def test_call_function_var_kw(self):
self.assertEquals(self.dis('\x8e\x00\x00')[0].name, 'CALL_FUNCTION_VAR_KW')
def test_setup_with(self):
self.assertEquals(self.dis('\x8f\x00\x00\t')[0].name, 'SETUP_WITH')
def test_set_add(self):
self.assertEquals(self.dis('\x92\x00\x00')[0].name, 'SET_ADD')
def test_map_add(self):
self.assertEquals(self.dis('\x93\x00\x00')[0].name, 'MAP_ADD')
def test_binary(self):
code = ''.join(chr(c) for c in ([
0x7c, 0, 0, # 0 LOAD_FAST, arg=0,
0x7c, 0, 0, # 3 LOAD_FAST, arg=0,
0x17, # 6 BINARY_ADD,
0x01, # 7 POP_TOP,
0x7c, 0, 0, # 8 LOAD_FAST, arg=0,
0x7c, 0, 0, # 11 LOAD_FAST, arg=0,
0x14, # 14 BINARY_MULTIPLY,
0x01, # 15 POP_TOP,
0x7c, 0, 0, # 16 LOAD_FAST, arg=0,
0x7c, 0, 0, # 19 LOAD_FAST, arg=0,
0x16, # 22 BINARY_MODULO,
0x01, # 23 POP_TOP,
0x7c, 0, 0, # 24 LOAD_FAST, arg=0,
0x7c, 0, 0, # 27 LOAD_FAST, arg=0,
0x15, # 30 BINARY_DIVIDE,
0x01, # 31 POP_TOP,
0x64, 0, 0, # 32 LOAD_CONST, arg=0,
0x53, # 35 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 18)
self.assertEquals(ops[0].name, 'LOAD_FAST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_FAST')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'BINARY_ADD')
self.assertEquals(ops[3].name, 'POP_TOP')
self.assertEquals(ops[4].name, 'LOAD_FAST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'LOAD_FAST')
self.assertEquals(ops[5].arg, 0)
self.assertEquals(ops[6].name, 'BINARY_MULTIPLY')
self.assertEquals(ops[7].name, 'POP_TOP')
self.assertEquals(ops[8].name, 'LOAD_FAST')
self.assertEquals(ops[8].arg, 0)
self.assertEquals(ops[9].name, 'LOAD_FAST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'BINARY_MODULO')
self.assertEquals(ops[11].name, 'POP_TOP')
self.assertEquals(ops[12].name, 'LOAD_FAST')
self.assertEquals(ops[12].arg, 0)
self.assertEquals(ops[13].name, 'LOAD_FAST')
self.assertEquals(ops[13].arg, 0)
self.assertEquals(ops[14].name, 'BINARY_DIVIDE')
self.assertEquals(ops[15].name, 'POP_TOP')
self.assertEquals(ops[16].name, 'LOAD_CONST')
self.assertEquals(ops[16].arg, 0)
self.assertEquals(ops[17].name, 'RETURN_VALUE')
def test_break(self):
code = ''.join(chr(c) for c in ([
0x78, 4, 0, # 0 SETUP_LOOP, dest=7,
0x50, # 3 BREAK_LOOP,
0x71, 3, 0, # 4 JUMP_ABSOLUTE, dest=3,
0x64, 0, 0, # 7 LOAD_CONST, arg=0,
0x53, # 10 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 5)
self.assertEquals(ops[0].name, 'SETUP_LOOP')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'BREAK_LOOP')
self.assertEquals(ops[2].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[2].arg, 1)
self.assertEquals(ops[2].target, ops[1])
self.assertEquals(ops[3].name, 'LOAD_CONST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'RETURN_VALUE')
def test_call(self):
code = ''.join(chr(c) for c in ([
0x74, 0, 0, # 0 LOAD_GLOBAL, arg=0,
0x83, 0, 0, # 3 CALL_FUNCTION, arg=0,
0x01, # 6 POP_TOP,
0x64, 0, 0, # 7 LOAD_CONST, arg=0,
0x53, # 10 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 5)
self.assertEquals(ops[0].name, 'LOAD_GLOBAL')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'CALL_FUNCTION')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'POP_TOP')
self.assertEquals(ops[3].name, 'LOAD_CONST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'RETURN_VALUE')
def test_continue(self):
code = ''.join(chr(c) for c in ([
0x78, 6, 0, # 0 SETUP_LOOP, dest=9,
0x71, 3, 0, # 3 JUMP_ABSOLUTE, dest=3,
0x71, 3, 0, # 6 JUMP_ABSOLUTE, dest=3,
0x64, 0, 0, # 9 LOAD_CONST, arg=0,
0x53, # 12 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 5)
self.assertEquals(ops[0].name, 'SETUP_LOOP')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[1].arg, 1)
self.assertEquals(ops[1].target, ops[1])
self.assertEquals(ops[2].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[2].arg, 1)
self.assertEquals(ops[2].target, ops[1])
self.assertEquals(ops[3].name, 'LOAD_CONST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'RETURN_VALUE')
def test_except(self):
code = ''.join(chr(c) for c in ([
0x79, 4, 0, # 0 SETUP_EXCEPT, dest=7,
0x57, # 3 POP_BLOCK,
0x6e, 7, 0, # 4 JUMP_FORWARD, dest=14,
0x01, # 7 POP_TOP,
0x01, # 8 POP_TOP,
0x01, # 9 POP_TOP,
0x6e, 1, 0, # 10 JUMP_FORWARD, dest=14,
0x58, # 13 END_FINALLY,
0x64, 0, 0, # 14 LOAD_CONST, arg=0,
0x53, # 17 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 10)
self.assertEquals(ops[0].name, 'SETUP_EXCEPT')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'POP_BLOCK')
self.assertEquals(ops[2].name, 'JUMP_FORWARD')
self.assertEquals(ops[2].arg, 8)
self.assertEquals(ops[2].target, ops[8])
self.assertEquals(ops[3].name, 'POP_TOP')
self.assertEquals(ops[4].name, 'POP_TOP')
self.assertEquals(ops[5].name, 'POP_TOP')
self.assertEquals(ops[6].name, 'JUMP_FORWARD')
self.assertEquals(ops[6].arg, 8)
self.assertEquals(ops[6].target, ops[8])
self.assertEquals(ops[7].name, 'END_FINALLY')
self.assertEquals(ops[8].name, 'LOAD_CONST')
self.assertEquals(ops[8].arg, 0)
self.assertEquals(ops[9].name, 'RETURN_VALUE')
def test_finally(self):
code = ''.join(chr(c) for c in ([
0x7a, 4, 0, # 0 SETUP_FINALLY, dest=7,
0x57, # 3 POP_BLOCK,
0x64, 0, 0, # 4 LOAD_CONST, arg=0,
0x58, # 7 END_FINALLY,
0x64, 0, 0, # 8 LOAD_CONST, arg=0,
0x53, # 11 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 6)
self.assertEquals(ops[0].name, 'SETUP_FINALLY')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'POP_BLOCK')
self.assertEquals(ops[2].name, 'LOAD_CONST')
self.assertEquals(ops[2].arg, 0)
self.assertEquals(ops[3].name, 'END_FINALLY')
self.assertEquals(ops[4].name, 'LOAD_CONST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'RETURN_VALUE')
def test_inplace(self):
code = ''.join(chr(c) for c in ([
0x7c, 0, 0, # 0 LOAD_FAST, arg=0,
0x7c, 0, 0, # 3 LOAD_FAST, arg=0,
0x4b, # 6 INPLACE_LSHIFT,
0x7d, 0, 0, # 7 STORE_FAST, arg=0,
0x7c, 0, 0, # 10 LOAD_FAST, arg=0,
0x7c, 0, 0, # 13 LOAD_FAST, arg=0,
0x4c, # 16 INPLACE_RSHIFT,
0x7d, 0, 0, # 17 STORE_FAST, arg=0,
0x7c, 0, 0, # 20 LOAD_FAST, arg=0,
0x7c, 0, 0, # 23 LOAD_FAST, arg=0,
0x37, # 26 INPLACE_ADD,
0x7d, 0, 0, # 27 STORE_FAST, arg=0,
0x7c, 0, 0, # 30 LOAD_FAST, arg=0,
0x7c, 0, 0, # 33 LOAD_FAST, arg=0,
0x38, # 36 INPLACE_SUBTRACT,
0x7d, 0, 0, # 37 STORE_FAST, arg=0,
0x64, 0, 0, # 40 LOAD_CONST, arg=0,
0x53, # 43 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 18)
self.assertEquals(ops[0].name, 'LOAD_FAST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_FAST')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'INPLACE_LSHIFT')
self.assertEquals(ops[3].name, 'STORE_FAST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'LOAD_FAST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'LOAD_FAST')
self.assertEquals(ops[5].arg, 0)
self.assertEquals(ops[6].name, 'INPLACE_RSHIFT')
self.assertEquals(ops[7].name, 'STORE_FAST')
self.assertEquals(ops[7].arg, 0)
self.assertEquals(ops[8].name, 'LOAD_FAST')
self.assertEquals(ops[8].arg, 0)
self.assertEquals(ops[9].name, 'LOAD_FAST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'INPLACE_ADD')
self.assertEquals(ops[11].name, 'STORE_FAST')
self.assertEquals(ops[11].arg, 0)
self.assertEquals(ops[12].name, 'LOAD_FAST')
self.assertEquals(ops[12].arg, 0)
self.assertEquals(ops[13].name, 'LOAD_FAST')
self.assertEquals(ops[13].arg, 0)
self.assertEquals(ops[14].name, 'INPLACE_SUBTRACT')
self.assertEquals(ops[15].name, 'STORE_FAST')
self.assertEquals(ops[15].arg, 0)
self.assertEquals(ops[16].name, 'LOAD_CONST')
self.assertEquals(ops[16].arg, 0)
self.assertEquals(ops[17].name, 'RETURN_VALUE')
def test_list(self):
code = ''.join(chr(c) for c in ([
0x67, 0, 0, # 0 BUILD_LIST, arg=0,
0x7c, 0, 0, # 3 LOAD_FAST, arg=0,
0x44, # 6 GET_ITER,
0x5d, 12, 0, # 7 FOR_ITER, dest=22,
0x7d, 1, 0, # 10 STORE_FAST, arg=1,
0x7c, 1, 0, # 13 LOAD_FAST, arg=1,
0x5e, 2, 0, # 16 LIST_APPEND, arg=2,
0x71, 7, 0, # 19 JUMP_ABSOLUTE, dest=7,
0x01, # 22 POP_TOP,
0x64, 0, 0, # 23 LOAD_CONST, arg=0,
0x53, # 26 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 11)
self.assertEquals(ops[0].name, 'BUILD_LIST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_FAST')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'GET_ITER')
self.assertEquals(ops[3].name, 'FOR_ITER')
self.assertEquals(ops[3].arg, 8)
self.assertEquals(ops[3].target, ops[8])
self.assertEquals(ops[4].name, 'STORE_FAST')
self.assertEquals(ops[4].arg, 1)
self.assertEquals(ops[5].name, 'LOAD_FAST')
self.assertEquals(ops[5].arg, 1)
self.assertEquals(ops[6].name, 'LIST_APPEND')
self.assertEquals(ops[6].arg, 2)
self.assertEquals(ops[7].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[7].arg, 3)
self.assertEquals(ops[7].target, ops[3])
self.assertEquals(ops[8].name, 'POP_TOP')
self.assertEquals(ops[9].name, 'LOAD_CONST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'RETURN_VALUE')
def test_loop(self):
code = ''.join(chr(c) for c in ([
0x78, 10, 0, # 0 SETUP_LOOP, dest=13,
0x74, 0, 0, # 3 LOAD_GLOBAL, arg=0,
0x72, 12, 0, # 6 POP_JUMP_IF_FALSE, dest=12,
0x71, 3, 0, # 9 JUMP_ABSOLUTE, dest=3,
0x57, # 12 POP_BLOCK,
0x64, 0, 0, # 13 LOAD_CONST, arg=0,
0x53, # 16 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 7)
self.assertEquals(ops[0].name, 'SETUP_LOOP')
self.assertEquals(ops[0].arg, 5)
self.assertEquals(ops[0].target, ops[5])
self.assertEquals(ops[1].name, 'LOAD_GLOBAL')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'POP_JUMP_IF_FALSE')
self.assertEquals(ops[2].arg, 4)
self.assertEquals(ops[2].target, ops[4])
self.assertEquals(ops[3].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[3].arg, 1)
self.assertEquals(ops[3].target, ops[1])
self.assertEquals(ops[4].name, 'POP_BLOCK')
self.assertEquals(ops[5].name, 'LOAD_CONST')
self.assertEquals(ops[5].arg, 0)
self.assertEquals(ops[6].name, 'RETURN_VALUE')
def test_raise_one(self):
code = ''.join(chr(c) for c in ([
0x64, 0, 0, # 0 LOAD_CONST, arg=0,
0x82, 1, 0, # 3 RAISE_VARARGS, arg=1,
0x64, 0, 0, # 6 LOAD_CONST, arg=0,
0x53, # 9 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 4)
self.assertEquals(ops[0].name, 'LOAD_CONST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'RAISE_VARARGS')
self.assertEquals(ops[1].arg, 1)
self.assertEquals(ops[2].name, 'LOAD_CONST')
self.assertEquals(ops[2].arg, 0)
self.assertEquals(ops[3].name, 'RETURN_VALUE')
def test_unary(self):
code = ''.join(chr(c) for c in ([
0x7c, 0, 0, # 0 LOAD_FAST, arg=0,
0x0b, # 3 UNARY_NEGATIVE,
0x01, # 4 POP_TOP,
0x7c, 0, 0, # 5 LOAD_FAST, arg=0,
0x0f, # 8 UNARY_INVERT,
0x01, # 9 POP_TOP,
0x7c, 0, 0, # 10 LOAD_FAST, arg=0,
0x0a, # 13 UNARY_POSITIVE,
0x01, # 14 POP_TOP,
0x64, 0, 0, # 15 LOAD_CONST, arg=0,
0x53, # 18 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 11)
self.assertEquals(ops[0].name, 'LOAD_FAST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'UNARY_NEGATIVE')
self.assertEquals(ops[2].name, 'POP_TOP')
self.assertEquals(ops[3].name, 'LOAD_FAST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'UNARY_INVERT')
self.assertEquals(ops[5].name, 'POP_TOP')
self.assertEquals(ops[6].name, 'LOAD_FAST')
self.assertEquals(ops[6].arg, 0)
self.assertEquals(ops[7].name, 'UNARY_POSITIVE')
self.assertEquals(ops[8].name, 'POP_TOP')
self.assertEquals(ops[9].name, 'LOAD_CONST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'RETURN_VALUE')
def test_with(self):
code = ''.join(chr(c) for c in ([
0x64, 0, 0, # 0 LOAD_CONST, arg=0,
0x8f, 5, 0, # 3 SETUP_WITH, dest=11,
0x01, # 6 POP_TOP,
0x57, # 7 POP_BLOCK,
0x64, 0, 0, # 8 LOAD_CONST, arg=0,
0x51, # 11 WITH_CLEANUP,
0x58, # 12 END_FINALLY,
0x64, 0, 0, # 13 LOAD_CONST, arg=0,
0x53, # 16 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 9)
self.assertEquals(ops[0].name, 'LOAD_CONST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'SETUP_WITH')
self.assertEquals(ops[1].arg, 5)
self.assertEquals(ops[1].target, ops[5])
self.assertEquals(ops[2].name, 'POP_TOP')
self.assertEquals(ops[3].name, 'POP_BLOCK')
self.assertEquals(ops[4].name, 'LOAD_CONST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'WITH_CLEANUP')
self.assertEquals(ops[6].name, 'END_FINALLY')
self.assertEquals(ops[7].name, 'LOAD_CONST')
self.assertEquals(ops[7].arg, 0)
self.assertEquals(ops[8].name, 'RETURN_VALUE')
class Python3Test(unittest.TestCase):
"""Test bytecodes.dis for Python 3 opcodes."""
PYTHON_VERSION = (3, 3, 0)
def dis(self, data):
return opcodes.dis(data, self.PYTHON_VERSION)
def test_pop_top(self):
self.assertEquals(self.dis('\x01')[0].name, 'POP_TOP')
def test_rot_two(self):
self.assertEquals(self.dis('\x02')[0].name, 'ROT_TWO')
def test_rot_three(self):
self.assertEquals(self.dis('\x03')[0].name, 'ROT_THREE')
def test_dup_top(self):
self.assertEquals(self.dis('\x04')[0].name, 'DUP_TOP')
def test_dup_top_two(self):
self.assertEquals(self.dis('\x05')[0].name, 'DUP_TOP_TWO')
def test_nop(self):
self.assertEquals(self.dis('\t')[0].name, 'NOP')
def test_unary_positive(self):
self.assertEquals(self.dis('\n')[0].name, 'UNARY_POSITIVE')
def test_unary_negative(self):
self.assertEquals(self.dis('\x0b')[0].name, 'UNARY_NEGATIVE')
def test_unary_not(self):
self.assertEquals(self.dis('\x0c')[0].name, 'UNARY_NOT')
def test_unary_invert(self):
self.assertEquals(self.dis('\x0f')[0].name, 'UNARY_INVERT')
def test_binary_power(self):
self.assertEquals(self.dis('\x13')[0].name, 'BINARY_POWER')
def test_binary_multiply(self):
self.assertEquals(self.dis('\x14')[0].name, 'BINARY_MULTIPLY')
def test_binary_modulo(self):
self.assertEquals(self.dis('\x16')[0].name, 'BINARY_MODULO')
def test_binary_add(self):
self.assertEquals(self.dis('\x17')[0].name, 'BINARY_ADD')
def test_binary_subtract(self):
self.assertEquals(self.dis('\x18')[0].name, 'BINARY_SUBTRACT')
def test_binary_subscr(self):
self.assertEquals(self.dis('\x19')[0].name, 'BINARY_SUBSCR')
def test_binary_floor_divide(self):
self.assertEquals(self.dis('\x1a')[0].name, 'BINARY_FLOOR_DIVIDE')
def test_binary_true_divide(self):
self.assertEquals(self.dis('\x1b')[0].name, 'BINARY_TRUE_DIVIDE')
def test_inplace_floor_divide(self):
self.assertEquals(self.dis('\x1c')[0].name, 'INPLACE_FLOOR_DIVIDE')
def test_inplace_true_divide(self):
self.assertEquals(self.dis('\x1d')[0].name, 'INPLACE_TRUE_DIVIDE')
def test_store_map(self):
self.assertEquals(self.dis('6')[0].name, 'STORE_MAP')
def test_inplace_add(self):
self.assertEquals(self.dis('7')[0].name, 'INPLACE_ADD')
def test_inplace_subtract(self):
self.assertEquals(self.dis('8')[0].name, 'INPLACE_SUBTRACT')
def test_inplace_multiply(self):
self.assertEquals(self.dis('9')[0].name, 'INPLACE_MULTIPLY')
def test_inplace_modulo(self):
self.assertEquals(self.dis(';')[0].name, 'INPLACE_MODULO')
def test_store_subscr(self):
self.assertEquals(self.dis('<')[0].name, 'STORE_SUBSCR')
def test_delete_subscr(self):
self.assertEquals(self.dis('=')[0].name, 'DELETE_SUBSCR')
def test_binary_lshift(self):
self.assertEquals(self.dis('>')[0].name, 'BINARY_LSHIFT')
def test_binary_rshift(self):
self.assertEquals(self.dis('?')[0].name, 'BINARY_RSHIFT')
def test_binary_and(self):
self.assertEquals(self.dis('@')[0].name, 'BINARY_AND')
def test_binary_xor(self):
self.assertEquals(self.dis('A')[0].name, 'BINARY_XOR')
def test_binary_or(self):
self.assertEquals(self.dis('B')[0].name, 'BINARY_OR')
def test_inplace_power(self):
self.assertEquals(self.dis('C')[0].name, 'INPLACE_POWER')
def test_get_iter(self):
self.assertEquals(self.dis('D')[0].name, 'GET_ITER')
def test_print_expr(self):
self.assertEquals(self.dis('F')[0].name, 'PRINT_EXPR')
def test_load_build_class(self):
self.assertEquals(self.dis('G')[0].name, 'LOAD_BUILD_CLASS')
def test_yield_from(self):
self.assertEquals(self.dis('H')[0].name, 'YIELD_FROM')
def test_inplace_lshift(self):
self.assertEquals(self.dis('K')[0].name, 'INPLACE_LSHIFT')
def test_inplace_rshift(self):
self.assertEquals(self.dis('L')[0].name, 'INPLACE_RSHIFT')
def test_inplace_and(self):
self.assertEquals(self.dis('M')[0].name, 'INPLACE_AND')
def test_inplace_xor(self):
self.assertEquals(self.dis('N')[0].name, 'INPLACE_XOR')
def test_inplace_or(self):
self.assertEquals(self.dis('O')[0].name, 'INPLACE_OR')
def test_break_loop(self):
self.assertEquals(self.dis('P')[0].name, 'BREAK_LOOP')
def test_with_cleanup(self):
self.assertEquals(self.dis('Q')[0].name, 'WITH_CLEANUP')
def test_return_value(self):
self.assertEquals(self.dis('S')[0].name, 'RETURN_VALUE')
def test_import_star(self):
self.assertEquals(self.dis('T')[0].name, 'IMPORT_STAR')
def test_yield_value(self):
self.assertEquals(self.dis('V')[0].name, 'YIELD_VALUE')
def test_pop_block(self):
self.assertEquals(self.dis('W')[0].name, 'POP_BLOCK')
def test_end_finally(self):
self.assertEquals(self.dis('X')[0].name, 'END_FINALLY')
def test_pop_except(self):
self.assertEquals(self.dis('Y')[0].name, 'POP_EXCEPT')
def test_store_name(self):
self.assertEquals(self.dis('Z\x00\x00')[0].name, 'STORE_NAME')
def test_delete_name(self):
self.assertEquals(self.dis('[\x00\x00')[0].name, 'DELETE_NAME')
def test_unpack_sequence(self):
self.assertEquals(self.dis('\\\x00\x00')[0].name, 'UNPACK_SEQUENCE')
def test_for_iter(self):
self.assertEquals(self.dis(']\x00\x00\t')[0].name, 'FOR_ITER')
def test_unpack_ex(self):
self.assertEquals(self.dis('^\x00\x00')[0].name, 'UNPACK_EX')
def test_store_attr(self):
self.assertEquals(self.dis('_\x00\x00')[0].name, 'STORE_ATTR')
def test_delete_attr(self):
self.assertEquals(self.dis('`\x00\x00')[0].name, 'DELETE_ATTR')
def test_store_global(self):
self.assertEquals(self.dis('a\x00\x00')[0].name, 'STORE_GLOBAL')
def test_delete_global(self):
self.assertEquals(self.dis('b\x00\x00')[0].name, 'DELETE_GLOBAL')
def test_load_const(self):
self.assertEquals(self.dis('d\x00\x00')[0].name, 'LOAD_CONST')
def test_load_name(self):
self.assertEquals(self.dis('e\x00\x00')[0].name, 'LOAD_NAME')
def test_build_tuple(self):
self.assertEquals(self.dis('f\x00\x00')[0].name, 'BUILD_TUPLE')
def test_build_list(self):
self.assertEquals(self.dis('g\x00\x00')[0].name, 'BUILD_LIST')
def test_build_set(self):
self.assertEquals(self.dis('h\x00\x00')[0].name, 'BUILD_SET')
def test_build_map(self):
self.assertEquals(self.dis('i\x00\x00')[0].name, 'BUILD_MAP')
def test_load_attr(self):
self.assertEquals(self.dis('j\x00\x00')[0].name, 'LOAD_ATTR')
def test_compare_op(self):
self.assertEquals(self.dis('k\x00\x00')[0].name, 'COMPARE_OP')
def test_import_name(self):
self.assertEquals(self.dis('l\x00\x00')[0].name, 'IMPORT_NAME')
def test_import_from(self):
self.assertEquals(self.dis('m\x00\x00')[0].name, 'IMPORT_FROM')
def test_jump_forward(self):
self.assertEquals(self.dis('n\x00\x00\t')[0].name, 'JUMP_FORWARD')
def test_jump_if_false_or_pop(self):
self.assertEquals(self.dis('o\x03\x00\t')[0].name, 'JUMP_IF_FALSE_OR_POP')
def test_jump_if_true_or_pop(self):
self.assertEquals(self.dis('p\x03\x00\t')[0].name, 'JUMP_IF_TRUE_OR_POP')
def test_jump_absolute(self):
self.assertEquals(self.dis('q\x03\x00\t')[0].name, 'JUMP_ABSOLUTE')
def test_pop_jump_if_false(self):
self.assertEquals(self.dis('r\x03\x00\t')[0].name, 'POP_JUMP_IF_FALSE')
def test_pop_jump_if_true(self):
self.assertEquals(self.dis('s\x03\x00\t')[0].name, 'POP_JUMP_IF_TRUE')
def test_load_global(self):
self.assertEquals(self.dis('t\x00\x00')[0].name, 'LOAD_GLOBAL')
def test_continue_loop(self):
self.assertEquals(self.dis('w\x03\x00\t')[0].name, 'CONTINUE_LOOP')
def test_setup_loop(self):
self.assertEquals(self.dis('x\x00\x00\t')[0].name, 'SETUP_LOOP')
def test_setup_except(self):
self.assertEquals(self.dis('y\x00\x00\t')[0].name, 'SETUP_EXCEPT')
def test_setup_finally(self):
self.assertEquals(self.dis('z\x00\x00\t')[0].name, 'SETUP_FINALLY')
def test_load_fast(self):
self.assertEquals(self.dis('|\x00\x00')[0].name, 'LOAD_FAST')
def test_store_fast(self):
self.assertEquals(self.dis('}\x00\x00')[0].name, 'STORE_FAST')
def test_delete_fast(self):
self.assertEquals(self.dis('~\x00\x00')[0].name, 'DELETE_FAST')
def test_raise_varargs(self):
self.assertEquals(self.dis('\x82\x00\x00')[0].name, 'RAISE_VARARGS')
def test_call_function(self):
self.assertEquals(self.dis('\x83\x00\x00')[0].name, 'CALL_FUNCTION')
def test_make_function(self):
self.assertEquals(self.dis('\x84\x00\x00')[0].name, 'MAKE_FUNCTION')
def test_build_slice(self):
self.assertEquals(self.dis('\x85\x00\x00')[0].name, 'BUILD_SLICE')
def test_make_closure(self):
self.assertEquals(self.dis('\x86\x00\x00')[0].name, 'MAKE_CLOSURE')
def test_load_closure(self):
self.assertEquals(self.dis('\x87\x00\x00')[0].name, 'LOAD_CLOSURE')
def test_load_deref(self):
self.assertEquals(self.dis('\x88\x00\x00')[0].name, 'LOAD_DEREF')
def test_store_deref(self):
self.assertEquals(self.dis('\x89\x00\x00')[0].name, 'STORE_DEREF')
def test_delete_deref(self):
self.assertEquals(self.dis('\x8a\x00\x00')[0].name, 'DELETE_DEREF')
def test_call_function_var(self):
self.assertEquals(self.dis('\x8c\x00\x00')[0].name, 'CALL_FUNCTION_VAR')
def test_call_function_kw(self):
self.assertEquals(self.dis('\x8d\x00\x00')[0].name, 'CALL_FUNCTION_KW')
def test_call_function_var_kw(self):
self.assertEquals(self.dis('\x8e\x00\x00')[0].name, 'CALL_FUNCTION_VAR_KW')
def test_setup_with(self):
self.assertEquals(self.dis('\x8f\x00\x00\t')[0].name, 'SETUP_WITH')
def test_list_append(self):
self.assertEquals(self.dis('\x91\x00\x00')[0].name, 'LIST_APPEND')
def test_set_add(self):
self.assertEquals(self.dis('\x92\x00\x00')[0].name, 'SET_ADD')
def test_map_add(self):
self.assertEquals(self.dis('\x93\x00\x00')[0].name, 'MAP_ADD')
def test_load_classderef(self):
self.assertEquals(self.dis('\x94\x00\x00')[0].name, 'LOAD_CLASSDEREF')
def test_binary(self):
code = ''.join(chr(c) for c in ([
0x7c, 0, 0, # 0 LOAD_FAST, arg=0,
0x7c, 0, 0, # 3 LOAD_FAST, arg=0,
0x17, # 6 BINARY_ADD,
0x01, # 7 POP_TOP,
0x7c, 0, 0, # 8 LOAD_FAST, arg=0,
0x7c, 0, 0, # 11 LOAD_FAST, arg=0,
0x14, # 14 BINARY_MULTIPLY,
0x01, # 15 POP_TOP,
0x7c, 0, 0, # 16 LOAD_FAST, arg=0,
0x7c, 0, 0, # 19 LOAD_FAST, arg=0,
0x16, # 22 BINARY_MODULO,
0x01, # 23 POP_TOP,
0x7c, 0, 0, # 24 LOAD_FAST, arg=0,
0x7c, 0, 0, # 27 LOAD_FAST, arg=0,
0x1b, # 30 BINARY_TRUE_DIVIDE,
0x01, # 31 POP_TOP,
0x64, 0, 0, # 32 LOAD_CONST, arg=0,
0x53, # 35 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 18)
self.assertEquals(ops[0].name, 'LOAD_FAST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_FAST')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'BINARY_ADD')
self.assertEquals(ops[3].name, 'POP_TOP')
self.assertEquals(ops[4].name, 'LOAD_FAST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'LOAD_FAST')
self.assertEquals(ops[5].arg, 0)
self.assertEquals(ops[6].name, 'BINARY_MULTIPLY')
self.assertEquals(ops[7].name, 'POP_TOP')
self.assertEquals(ops[8].name, 'LOAD_FAST')
self.assertEquals(ops[8].arg, 0)
self.assertEquals(ops[9].name, 'LOAD_FAST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'BINARY_MODULO')
self.assertEquals(ops[11].name, 'POP_TOP')
self.assertEquals(ops[12].name, 'LOAD_FAST')
self.assertEquals(ops[12].arg, 0)
self.assertEquals(ops[13].name, 'LOAD_FAST')
self.assertEquals(ops[13].arg, 0)
self.assertEquals(ops[14].name, 'BINARY_TRUE_DIVIDE')
self.assertEquals(ops[15].name, 'POP_TOP')
self.assertEquals(ops[16].name, 'LOAD_CONST')
self.assertEquals(ops[16].arg, 0)
self.assertEquals(ops[17].name, 'RETURN_VALUE')
def test_break(self):
code = ''.join(chr(c) for c in ([
0x78, 4, 0, # 0 SETUP_LOOP, dest=7,
0x50, # 3 BREAK_LOOP,
0x71, 3, 0, # 4 JUMP_ABSOLUTE, dest=3,
0x64, 0, 0, # 7 LOAD_CONST, arg=0,
0x53, # 10 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 5)
self.assertEquals(ops[0].name, 'SETUP_LOOP')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'BREAK_LOOP')
self.assertEquals(ops[2].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[2].arg, 1)
self.assertEquals(ops[2].target, ops[1])
self.assertEquals(ops[3].name, 'LOAD_CONST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'RETURN_VALUE')
def test_call(self):
code = ''.join(chr(c) for c in ([
0x74, 0, 0, # 0 LOAD_GLOBAL, arg=0,
0x83, 0, 0, # 3 CALL_FUNCTION, arg=0,
0x01, # 6 POP_TOP,
0x64, 0, 0, # 7 LOAD_CONST, arg=0,
0x53, # 10 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 5)
self.assertEquals(ops[0].name, 'LOAD_GLOBAL')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'CALL_FUNCTION')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'POP_TOP')
self.assertEquals(ops[3].name, 'LOAD_CONST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'RETURN_VALUE')
def test_continue(self):
code = ''.join(chr(c) for c in ([
0x78, 6, 0, # 0 SETUP_LOOP, dest=9,
0x71, 3, 0, # 3 JUMP_ABSOLUTE, dest=3,
0x71, 3, 0, # 6 JUMP_ABSOLUTE, dest=3,
0x64, 0, 0, # 9 LOAD_CONST, arg=0,
0x53, # 12 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 5)
self.assertEquals(ops[0].name, 'SETUP_LOOP')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[1].arg, 1)
self.assertEquals(ops[1].target, ops[1])
self.assertEquals(ops[2].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[2].arg, 1)
self.assertEquals(ops[2].target, ops[1])
self.assertEquals(ops[3].name, 'LOAD_CONST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'RETURN_VALUE')
def test_except(self):
code = ''.join(chr(c) for c in ([
0x79, 4, 0, # 0 SETUP_EXCEPT, dest=7,
0x57, # 3 POP_BLOCK,
0x6e, 8, 0, # 4 JUMP_FORWARD, dest=15,
0x01, # 7 POP_TOP,
0x01, # 8 POP_TOP,
0x01, # 9 POP_TOP,
0x59, # 10 POP_EXCEPT,
0x6e, 1, 0, # 11 JUMP_FORWARD, dest=15,
0x58, # 14 END_FINALLY,
0x64, 0, 0, # 15 LOAD_CONST, arg=0,
0x53, # 18 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 11)
self.assertEquals(ops[0].name, 'SETUP_EXCEPT')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'POP_BLOCK')
self.assertEquals(ops[2].name, 'JUMP_FORWARD')
self.assertEquals(ops[2].arg, 9)
self.assertEquals(ops[2].target, ops[9])
self.assertEquals(ops[3].name, 'POP_TOP')
self.assertEquals(ops[4].name, 'POP_TOP')
self.assertEquals(ops[5].name, 'POP_TOP')
self.assertEquals(ops[6].name, 'POP_EXCEPT')
self.assertEquals(ops[7].name, 'JUMP_FORWARD')
self.assertEquals(ops[7].arg, 9)
self.assertEquals(ops[7].target, ops[9])
self.assertEquals(ops[8].name, 'END_FINALLY')
self.assertEquals(ops[9].name, 'LOAD_CONST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'RETURN_VALUE')
def test_finally(self):
code = ''.join(chr(c) for c in ([
0x7a, 4, 0, # 0 SETUP_FINALLY, dest=7,
0x57, # 3 POP_BLOCK,
0x64, 0, 0, # 4 LOAD_CONST, arg=0,
0x58, # 7 END_FINALLY,
0x64, 0, 0, # 8 LOAD_CONST, arg=0,
0x53, # 11 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 6)
self.assertEquals(ops[0].name, 'SETUP_FINALLY')
self.assertEquals(ops[0].arg, 3)
self.assertEquals(ops[0].target, ops[3])
self.assertEquals(ops[1].name, 'POP_BLOCK')
self.assertEquals(ops[2].name, 'LOAD_CONST')
self.assertEquals(ops[2].arg, 0)
self.assertEquals(ops[3].name, 'END_FINALLY')
self.assertEquals(ops[4].name, 'LOAD_CONST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'RETURN_VALUE')
def test_inplace(self):
code = ''.join(chr(c) for c in ([
0x7c, 0, 0, # 0 LOAD_FAST, arg=0,
0x7c, 0, 0, # 3 LOAD_FAST, arg=0,
0x4b, # 6 INPLACE_LSHIFT,
0x7d, 0, 0, # 7 STORE_FAST, arg=0,
0x7c, 0, 0, # 10 LOAD_FAST, arg=0,
0x7c, 0, 0, # 13 LOAD_FAST, arg=0,
0x4c, # 16 INPLACE_RSHIFT,
0x7d, 0, 0, # 17 STORE_FAST, arg=0,
0x7c, 0, 0, # 20 LOAD_FAST, arg=0,
0x7c, 0, 0, # 23 LOAD_FAST, arg=0,
0x37, # 26 INPLACE_ADD,
0x7d, 0, 0, # 27 STORE_FAST, arg=0,
0x7c, 0, 0, # 30 LOAD_FAST, arg=0,
0x7c, 0, 0, # 33 LOAD_FAST, arg=0,
0x38, # 36 INPLACE_SUBTRACT,
0x7d, 0, 0, # 37 STORE_FAST, arg=0,
0x64, 0, 0, # 40 LOAD_CONST, arg=0,
0x53, # 43 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 18)
self.assertEquals(ops[0].name, 'LOAD_FAST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_FAST')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'INPLACE_LSHIFT')
self.assertEquals(ops[3].name, 'STORE_FAST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'LOAD_FAST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'LOAD_FAST')
self.assertEquals(ops[5].arg, 0)
self.assertEquals(ops[6].name, 'INPLACE_RSHIFT')
self.assertEquals(ops[7].name, 'STORE_FAST')
self.assertEquals(ops[7].arg, 0)
self.assertEquals(ops[8].name, 'LOAD_FAST')
self.assertEquals(ops[8].arg, 0)
self.assertEquals(ops[9].name, 'LOAD_FAST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'INPLACE_ADD')
self.assertEquals(ops[11].name, 'STORE_FAST')
self.assertEquals(ops[11].arg, 0)
self.assertEquals(ops[12].name, 'LOAD_FAST')
self.assertEquals(ops[12].arg, 0)
self.assertEquals(ops[13].name, 'LOAD_FAST')
self.assertEquals(ops[13].arg, 0)
self.assertEquals(ops[14].name, 'INPLACE_SUBTRACT')
self.assertEquals(ops[15].name, 'STORE_FAST')
self.assertEquals(ops[15].arg, 0)
self.assertEquals(ops[16].name, 'LOAD_CONST')
self.assertEquals(ops[16].arg, 0)
self.assertEquals(ops[17].name, 'RETURN_VALUE')
def test_list(self):
code = ''.join(chr(c) for c in ([
0x64, 1, 0, # 0 LOAD_CONST, arg=1,
0x64, 2, 0, # 3 LOAD_CONST, arg=2,
0x84, 0, 0, # 6 MAKE_FUNCTION, arg=0,
0x7c, 0, 0, # 9 LOAD_FAST, arg=0,
0x44, # 12 GET_ITER,
0x83, 1, 0, # 13 CALL_FUNCTION, arg=1,
0x01, # 16 POP_TOP,
0x64, 0, 0, # 17 LOAD_CONST, arg=0,
0x53, # 20 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 9)
self.assertEquals(ops[0].name, 'LOAD_CONST')
self.assertEquals(ops[0].arg, 1)
self.assertEquals(ops[1].name, 'LOAD_CONST')
self.assertEquals(ops[1].arg, 2)
self.assertEquals(ops[2].name, 'MAKE_FUNCTION')
self.assertEquals(ops[2].arg, 0)
self.assertEquals(ops[3].name, 'LOAD_FAST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'GET_ITER')
self.assertEquals(ops[5].name, 'CALL_FUNCTION')
self.assertEquals(ops[5].arg, 1)
self.assertEquals(ops[6].name, 'POP_TOP')
self.assertEquals(ops[7].name, 'LOAD_CONST')
self.assertEquals(ops[7].arg, 0)
self.assertEquals(ops[8].name, 'RETURN_VALUE')
def test_loop(self):
code = ''.join(chr(c) for c in ([
0x78, 3, 0, # 0 SETUP_LOOP, dest=6,
0x71, 3, 0, # 3 JUMP_ABSOLUTE, dest=3,
0x64, 0, 0, # 6 LOAD_CONST, arg=0,
0x53, # 9 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 4)
self.assertEquals(ops[0].name, 'SETUP_LOOP')
self.assertEquals(ops[0].arg, 2)
self.assertEquals(ops[0].target, ops[2])
self.assertEquals(ops[1].name, 'JUMP_ABSOLUTE')
self.assertEquals(ops[1].arg, 1)
self.assertEquals(ops[1].target, ops[1])
self.assertEquals(ops[2].name, 'LOAD_CONST')
self.assertEquals(ops[2].arg, 0)
self.assertEquals(ops[3].name, 'RETURN_VALUE')
def test_raise_zero(self):
code = ''.join(chr(c) for c in ([
0x82, 0, 0, # 0 RAISE_VARARGS, arg=0,
0x64, 0, 0, # 3 LOAD_CONST, arg=0,
0x53, # 6 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 3)
self.assertEquals(ops[0].name, 'RAISE_VARARGS')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_CONST')
self.assertEquals(ops[1].arg, 0)
self.assertEquals(ops[2].name, 'RETURN_VALUE')
def test_raise_one(self):
code = ''.join(chr(c) for c in ([
0x64, 0, 0, # 0 LOAD_CONST, arg=0,
0x82, 1, 0, # 3 RAISE_VARARGS, arg=1,
0x64, 0, 0, # 6 LOAD_CONST, arg=0,
0x53, # 9 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 4)
self.assertEquals(ops[0].name, 'LOAD_CONST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'RAISE_VARARGS')
self.assertEquals(ops[1].arg, 1)
self.assertEquals(ops[2].name, 'LOAD_CONST')
self.assertEquals(ops[2].arg, 0)
self.assertEquals(ops[3].name, 'RETURN_VALUE')
def test_raise_two(self):
code = ''.join(chr(c) for c in ([
0x74, 0, 0, # 0 LOAD_GLOBAL, arg=0,
0x74, 1, 0, # 3 LOAD_GLOBAL, arg=1,
0x82, 2, 0, # 6 RAISE_VARARGS, arg=2,
0x64, 0, 0, # 9 LOAD_CONST, arg=0,
0x53, # 12 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 5)
self.assertEquals(ops[0].name, 'LOAD_GLOBAL')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_GLOBAL')
self.assertEquals(ops[1].arg, 1)
self.assertEquals(ops[2].name, 'RAISE_VARARGS')
self.assertEquals(ops[2].arg, 2)
self.assertEquals(ops[3].name, 'LOAD_CONST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'RETURN_VALUE')
def test_raise_three(self):
code = ''.join(chr(c) for c in ([
0x74, 0, 0, # 0 LOAD_GLOBAL, arg=0,
0x74, 1, 0, # 3 LOAD_GLOBAL, arg=1,
0x64, 1, 0, # 6 LOAD_CONST, arg=1,
0x82, 3, 0, # 9 RAISE_VARARGS, arg=3,
0x64, 0, 0, # 12 LOAD_CONST, arg=0,
0x53, # 15 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 6)
self.assertEquals(ops[0].name, 'LOAD_GLOBAL')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'LOAD_GLOBAL')
self.assertEquals(ops[1].arg, 1)
self.assertEquals(ops[2].name, 'LOAD_CONST')
self.assertEquals(ops[2].arg, 1)
self.assertEquals(ops[3].name, 'RAISE_VARARGS')
self.assertEquals(ops[3].arg, 3)
self.assertEquals(ops[4].name, 'LOAD_CONST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'RETURN_VALUE')
def test_unary(self):
code = ''.join(chr(c) for c in ([
0x7c, 0, 0, # 0 LOAD_FAST, arg=0,
0x0b, # 3 UNARY_NEGATIVE,
0x01, # 4 POP_TOP,
0x7c, 0, 0, # 5 LOAD_FAST, arg=0,
0x0f, # 8 UNARY_INVERT,
0x01, # 9 POP_TOP,
0x7c, 0, 0, # 10 LOAD_FAST, arg=0,
0x0a, # 13 UNARY_POSITIVE,
0x01, # 14 POP_TOP,
0x64, 0, 0, # 15 LOAD_CONST, arg=0,
0x53, # 18 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 11)
self.assertEquals(ops[0].name, 'LOAD_FAST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'UNARY_NEGATIVE')
self.assertEquals(ops[2].name, 'POP_TOP')
self.assertEquals(ops[3].name, 'LOAD_FAST')
self.assertEquals(ops[3].arg, 0)
self.assertEquals(ops[4].name, 'UNARY_INVERT')
self.assertEquals(ops[5].name, 'POP_TOP')
self.assertEquals(ops[6].name, 'LOAD_FAST')
self.assertEquals(ops[6].arg, 0)
self.assertEquals(ops[7].name, 'UNARY_POSITIVE')
self.assertEquals(ops[8].name, 'POP_TOP')
self.assertEquals(ops[9].name, 'LOAD_CONST')
self.assertEquals(ops[9].arg, 0)
self.assertEquals(ops[10].name, 'RETURN_VALUE')
def test_with(self):
code = ''.join(chr(c) for c in ([
0x64, 0, 0, # 0 LOAD_CONST, arg=0,
0x8f, 5, 0, # 3 SETUP_WITH, dest=11,
0x01, # 6 POP_TOP,
0x57, # 7 POP_BLOCK,
0x64, 0, 0, # 8 LOAD_CONST, arg=0,
0x51, # 11 WITH_CLEANUP,
0x58, # 12 END_FINALLY,
0x64, 0, 0, # 13 LOAD_CONST, arg=0,
0x53, # 16 RETURN_VALUE
]))
ops = opcodes.dis(code, self.PYTHON_VERSION)
self.assertEquals(len(ops), 9)
self.assertEquals(ops[0].name, 'LOAD_CONST')
self.assertEquals(ops[0].arg, 0)
self.assertEquals(ops[1].name, 'SETUP_WITH')
self.assertEquals(ops[1].arg, 5)
self.assertEquals(ops[1].target, ops[5])
self.assertEquals(ops[2].name, 'POP_TOP')
self.assertEquals(ops[3].name, 'POP_BLOCK')
self.assertEquals(ops[4].name, 'LOAD_CONST')
self.assertEquals(ops[4].arg, 0)
self.assertEquals(ops[5].name, 'WITH_CLEANUP')
self.assertEquals(ops[6].name, 'END_FINALLY')
self.assertEquals(ops[7].name, 'LOAD_CONST')
self.assertEquals(ops[7].arg, 0)
self.assertEquals(ops[8].name, 'RETURN_VALUE')
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| -2,192,197,559,645,915,400
| 35.034364
| 79
| 0.625424
| false
| 2.74776
| true
| false
| false
|
ikargis/horizon_fod
|
openstack_dashboard/api/glance.py
|
1
|
3362
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import itertools
import logging
import thread
import urlparse
from django.conf import settings # noqa
import glanceclient as glance_client
from horizon.utils import functions as utils
from openstack_dashboard.api import base
LOG = logging.getLogger(__name__)
def glanceclient(request):
o = urlparse.urlparse(base.url_for(request, 'image'))
url = "://".join((o.scheme, o.netloc))
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
LOG.debug('glanceclient connection created using token "%s" and url "%s"'
% (request.user.token.id, url))
return glance_client.Client('1', url, token=request.user.token.id,
insecure=insecure, cacert=cacert)
def image_delete(request, image_id):
return glanceclient(request).images.delete(image_id)
def image_get(request, image_id):
"""Returns an Image object populated with metadata for image
with supplied identifier.
"""
image = glanceclient(request).images.get(image_id)
if not hasattr(image, 'name'):
image.name = None
return image
def image_list_detailed(request, marker=None, filters=None, paginate=False):
limit = getattr(settings, 'API_RESULT_LIMIT', 1000)
page_size = utils.get_page_size(request)
if paginate:
request_size = page_size + 1
else:
request_size = limit
kwargs = {'filters': filters or {}}
if marker:
kwargs['marker'] = marker
images_iter = glanceclient(request).images.list(page_size=request_size,
limit=limit,
**kwargs)
has_more_data = False
if paginate:
images = list(itertools.islice(images_iter, request_size))
if len(images) > page_size:
images.pop(-1)
has_more_data = True
else:
images = list(images_iter)
return (images, has_more_data)
def image_update(request, image_id, **kwargs):
return glanceclient(request).images.update(image_id, **kwargs)
def image_create(request, **kwargs):
copy_from = None
if kwargs.get('copy_from'):
copy_from = kwargs.pop('copy_from')
image = glanceclient(request).images.create(**kwargs)
if copy_from:
thread.start_new_thread(image_update,
(request, image.id),
{'copy_from': copy_from})
return image
|
apache-2.0
| 2,677,192,707,964,886,500
| 29.844037
| 78
| 0.649018
| false
| 3.941383
| false
| false
| false
|
alex/changes
|
changes/api/project_test_history.py
|
1
|
2669
|
from __future__ import absolute_import, division, unicode_literals
from flask.ext.restful import reqparse
from sqlalchemy.orm import contains_eager, joinedload
from changes.api.base import APIView
from changes.constants import Status
from changes.models import Project, TestCase, Job, Source
class ProjectTestHistoryAPIView(APIView):
get_parser = reqparse.RequestParser()
get_parser.add_argument('per_page', type=int, location='args',
default=100)
def get(self, project_id, test_hash):
project = Project.get(project_id)
if not project:
return '', 404
# use the most recent test run to find basic details
test = TestCase.query.filter(
TestCase.project_id == project_id,
TestCase.name_sha == test_hash,
).order_by(TestCase.date_created.desc()).limit(1).first()
if not test:
return '', 404
args = self.get_parser.parse_args()
num_results = args.per_page
# restrict the join to the last N jobs otherwise this can get
# significantly expensive as we have to seek quite a ways
job_sq = Job.query.filter(
Job.status == Status.finished,
Job.project_id == project_id,
).order_by(Job.date_created.desc()).limit(num_results * 10).subquery()
recent_runs = list(TestCase.query.options(
contains_eager('job', alias=job_sq),
contains_eager('job.source'),
joinedload('job.build'),
joinedload('job.build.author'),
joinedload('job.build.source'),
joinedload('job.build.source.revision'),
).join(
job_sq, TestCase.job_id == job_sq.c.id,
).join(
Source, job_sq.c.source_id == Source.id,
).filter(
Source.repository_id == project.repository_id,
Source.patch_id == None, # NOQA
Source.revision_sha != None, # NOQA
TestCase.name_sha == test.name_sha,
).order_by(job_sq.c.date_created.desc())[:num_results])
jobs = set(r.job for r in recent_runs)
builds = set(j.build for j in jobs)
serialized_jobs = dict(zip(jobs, self.serialize(jobs)))
serialized_builds = dict(zip(builds, self.serialize(builds)))
results = []
for recent_run, s_recent_run in zip(recent_runs, self.serialize(recent_runs)):
s_recent_run['job'] = serialized_jobs[recent_run.job]
s_recent_run['job']['build'] = serialized_builds[recent_run.job.build]
results.append(s_recent_run)
return self.respond(results, serialize=False)
|
apache-2.0
| 2,706,287,891,616,317,400
| 37.128571
| 86
| 0.606969
| false
| 3.90776
| true
| false
| false
|
masfaraud/volmdlr
|
scripts/distance/tore_tore.py
|
1
|
4675
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 2 14:15:50 2020
@author: Mack Pro
"""
import numpy as npy
import volmdlr as volmdlr
import volmdlr.primitives3D as primitives3D
import volmdlr.primitives2D as primitives2D
import matplotlib.pyplot as plt
import random
import math
rmin, rmax = 100, 1000
posmin, posmax = -100, 100
x1, y1, z1 = random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100
x2, y2, z2 = random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100
R1, R2 = random.randrange(rmin, rmax, 1)/1000, random.randrange(rmin, rmax, 1)/1000 #Radius of the generative arc3D
r1, r2 = random.randrange(rmin/10, rmax/10, 1)/1000, random.randrange(rmin/10, rmax/10, 1)/1000 #Radius of the arc3d generated
c1, c2 = volmdlr.Point3D([x1,y1,z1]), volmdlr.Point3D([x2,y2,z2]) #Choose the coordinate of the center
x3, y3, z3 = random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100
x4, y4, z4 = random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100, random.randrange(posmin, posmax, 1)/100
n1, n2 = volmdlr.Vector3D([x3,y3,z3]), volmdlr.Vector3D([x4,y4,z4]) #Choose the normal
n1.Normalize() #Normalize the normal if it is not the case
n2.Normalize()
plane1, plane2 = volmdlr.Plane3D.from_normal(c1, n1), volmdlr.Plane3D.from_normal(c2, n2) #Create a plane to give us two others vector
frame1 = volmdlr.Frame3D(c1, plane1.vectors[0], plane1.vectors[1], n1) #Frame in the center of the Tore
frame2 = volmdlr.Frame3D(c2, plane2.vectors[0], plane2.vectors[1], n2)
toresurface1 = volmdlr.ToroidalSurface3D(frame1, R1, r1)
toresurface2 = volmdlr.ToroidalSurface3D(frame2, R2, r2)
angle_min, angle_max = 0, 2*3.14*100
theta1 = random.randrange(angle_min, angle_max, 20)/100 #Tore's length
phi1 = 2*math.pi #angle of circle
offset_theta1 = random.randrange(angle_min, angle_max, 20)/100 #Theta's offset if you want to turn it with normal's reference
offset_phi1 = random.randrange(angle_min, angle_max, 20)/100 #Idem but with circle's normal
print('param1', phi1, theta1, offset_phi1, offset_theta1)
#You have to create a cutting pattern in 2D
pt1, pt2, pt3, pt4 = volmdlr.Point2D((offset_theta1, offset_phi1)), volmdlr.Point2D((offset_theta1, offset_phi1+phi1)), volmdlr.Point2D((offset_theta1+theta1, offset_phi1+phi1)), volmdlr.Point2D((offset_theta1+theta1, offset_phi1))
seg1, seg2, seg3, seg4 = volmdlr.LineSegment2D(pt1, pt2), volmdlr.LineSegment2D(pt2, pt3), volmdlr.LineSegment2D(pt3, pt4), volmdlr.LineSegment2D(pt4, pt1)
edges = [seg1, seg2, seg3, seg4]
contours2d = [volmdlr.Contour2D(edges)]
points = [theta1, phi1]
theta2 = random.randrange(angle_min, angle_max, 20)/100 #Tore's length
phi2 = random.randrange(angle_min, angle_max, 20)/100 #angle of circle
offset_theta2 = random.randrange(angle_min, angle_max, 20)/100 #Theta's offset if you want to turn it with normal's reference
offset_phi2 = random.randrange(angle_min, angle_max, 20)/100 #Idem but with circle's normal
print('param2', phi2, theta2, offset_phi2, offset_theta2)
#You have to create a cutting pattern in 2D
pt1_2, pt2_2, pt3_2, pt4_2 = volmdlr.Point2D((offset_theta2, offset_phi2)), volmdlr.Point2D((offset_theta2, offset_phi2+phi2)), volmdlr.Point2D((offset_theta2+theta2, offset_phi2+phi2)), volmdlr.Point2D((offset_theta2+theta2, offset_phi2))
seg1_2, seg2_2, seg3_2, seg4_2 = volmdlr.LineSegment2D(pt1_2, pt2_2), volmdlr.LineSegment2D(pt2_2, pt3_2), volmdlr.LineSegment2D(pt3_2, pt4_2), volmdlr.LineSegment2D(pt4_2, pt1_2)
edges_2 = [seg1_2, seg2_2, seg3_2, seg4_2]
contours2d_2 = [volmdlr.Contour2D(edges_2)]
points_2 = [theta2, phi2]
toroidalface1 = volmdlr.ToroidalFace3D(contours2d, toresurface1, points)
toroidalface2 = volmdlr.ToroidalFace3D(contours2d_2, toresurface2, points_2)
pts1, tangle1 = toroidalface1.triangulation(resolution=10)
pts2, tangle2 = toroidalface2.triangulation(resolution=10)
p1, p2 = toroidalface1.minimum_distance_points_tore(toroidalface2)
print('p1, p2', p1,p2)
print(p1.point_distance(p2))
# fig = plt.figure()
# ax = fig.add_subplot(111, projection='3d')
# [pt.MPLPlot(ax=ax) for pt in pts1]
# [pt.MPLPlot(ax=ax) for pt in pts2]
# p1.MPLPlot(ax=ax, color='r')
# p2.MPLPlot(ax=ax, color='b')
# toroidalface1.start.MPLPlot(ax=ax, color='m')
# toroidalface2.start.MPLPlot(ax=ax, color='g')
# LS = volmdlr.LineSegment3D(p1, p2)
shell = volmdlr.Shell3D([toroidalface1,toroidalface2])
vol = volmdlr.VolumeModel([shell, p1, p2])
vol.babylonjs_from_script()
# m = volmdlr.VolumeModel([shell])
# m.babylonjs()
|
gpl-3.0
| 296,104,524,173,989,760
| 45.75
| 239
| 0.73861
| false
| 2.446363
| false
| false
| false
|
thinkobscure/PantheROV
|
topside/SDL/endian.py
|
1
|
1027
|
#!/usr/bin/env python
'''Functions for converting to native byte order
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import sys
import SDL.constants
def SDL_Swap16(x):
return (x << 8 & 0xff00) | \
(x >> 8 & 0x00ff)
def SDL_Swap32(x):
return (x << 24 & 0xff000000) | \
(x << 8 & 0x00ff0000) | \
(x >> 8 & 0x0000ff00) | \
(x >> 24 & 0x000000ff)
def SDL_Swap64(x):
return (SDL_Swap32(x & 0xffffffff) << 32) | \
(SDL_Swap32(x >> 32 & 0xffffffff))
def _noop(x):
return x
if sys.byteorder == 'big':
SDL_BYTEORDER = SDL.constants.SDL_BIG_ENDIAN
SDL_SwapLE16 = SDL_Swap16
SDL_SwapLE32 = SDL_Swap32
SDL_SwapLE64 = SDL_Swap64
SDL_SwapBE16 = _noop
SDL_SwapBE32 = _noop
SDL_SwapBE64 = _noop
else:
SDL_BYTEORDER = SDL.constants.SDL_LIL_ENDIAN
SDL_SwapLE16 = _noop
SDL_SwapLE32 = _noop
SDL_SwapLE64 = _noop
SDL_SwapBE16 = SDL_Swap16
SDL_SwapBE32 = SDL_Swap32
SDL_SwapBE64 = SDL_Swap64
|
gpl-3.0
| -3,626,432,104,365,442,600
| 21.326087
| 49
| 0.584226
| false
| 2.821429
| false
| false
| false
|
gmariotti/lassim
|
source/core/lassim_context.py
|
1
|
4587
|
from logging import Logger
from typing import Dict, Type, Callable, List, Optional
import psutil
from core.base_solution import BaseSolution
from core.core_system import CoreSystem
from core.utilities.type_aliases import Vector
__author__ = "Guido Pio Mariotti"
__copyright__ = "Copyright (C) 2016 Guido Pio Mariotti"
__license__ = "GNU General Public License v3.0"
__version__ = "0.2.0"
class LassimContext:
"""
Represents the context of the current optimization. Should allow dependency
injection of common parameters, like the class that represents the
solutions, the ode function to use, ..
"""
def __init__(self, core: CoreSystem, primary_opt: List['OptimizationArgs'],
ode_fun: Callable[..., Vector], pert_fun: Callable[..., float],
solution_class: Type[BaseSolution],
secondary_opt: List['OptimizationArgs'] = None):
self.__core_system = core
if len(primary_opt) == 0:
raise ValueError("Primary optimization list can't be empty")
self.__primary_opt = primary_opt
self.__ode_function = ode_fun
self.__pert_function = pert_fun
self.__solution_class = solution_class
self.__secondary_opt = list()
if secondary_opt is not None:
self.__secondary_opt = secondary_opt
@property
def core(self) -> CoreSystem:
return self.__core_system
@property
def primary_opts(self) -> List['OptimizationArgs']:
# recreate the list in order to not allowing the possibility to modify
# the main one
return [val for val in self.__primary_opt]
@property
def primary_first(self) -> 'OptimizationArgs':
return self.primary_opts[0]
@property
def secondary_opts(self) -> List['OptimizationArgs']:
return [val for val in self.__secondary_opt]
# FIXME - use my Optional
@property
def secondary_first(self) -> Optional['OptimizationArgs']:
if len(self.secondary_opts) > 0:
return self.secondary_opts[0]
else:
return None
@property
def ode(self) -> Callable[..., Vector]:
return self.__ode_function
@property
def perturbation(self) -> Callable[..., float]:
return self.__pert_function
@property
def SolutionClass(self) -> Type[BaseSolution]:
return self.__solution_class
def __str__(self) -> str:
# TODO
return "LassimContext"
__repr__ = __str__
class OptimizationArgs:
"""
This class represents the list of arguments for an optimization. Except for
the number of cores, each argument is read-only and is initialized at class
instantiation.
"""
def __init__(self, opt_type: str, params: Dict, num_cores: int,
evolutions: int, individuals: int, pert_factor: float):
self.__type = opt_type
self.__params = params
self.__islands = num_cores
self.__evolutions = evolutions
self.__individuals = individuals
self.__pert_factor = pert_factor
@property
def type(self) -> str:
return self.__type
@property
def params(self) -> Dict:
return self.__params
@property
def num_islands(self) -> int:
return self.__islands
@num_islands.setter
def num_islands(self, num_islands: int):
# if the number is less than one, then use all the CPUs available
if num_islands < 1:
self.__islands = psutil.cpu_count
self.__islands = num_islands
@property
def num_evolutions(self) -> int:
return self.__evolutions
@property
def num_individuals(self) -> int:
return self.__individuals
@property
def pert_factor(self) -> float:
return self.__pert_factor
def log_args(self, logger: Logger, is_pert: bool = False):
"""
Used to log the optimization arguments inserted by the user.
:param logger: the logging object to use
:param is_pert: if the presence of the perturbations factor has to be
logged or not.
"""
logger.info("Algorithm used is {}".format(self.__type))
logger.info("Number of cores is {}".format(self.__islands))
logger.info("Number of evolutions for archipelago is {}".format(
self.__evolutions
))
logger.info("Number of individuals for each island is {}".format(
self.__individuals
))
if is_pert:
logger.info("Perturbations factor is {}".format(self.__pert_factor))
|
gpl-3.0
| 5,525,869,121,874,791,000
| 30.417808
| 80
| 0.615871
| false
| 4.117594
| false
| false
| false
|
jingriver/stocktracker
|
pytoolkit/randombitmap/bitmap.py
|
1
|
1072
|
from PIL import Image
import random
size = (640, 640)
black = (0,0,0)
white = (255,255,255)
def draw(size):
im = Image.new("RGB", size)
ll = []
for i in range(size[0]):
for j in range(size[1]):
if random.random()>0.5:
ll.append(white)
else:
ll.append(black)
im.putdata(ll)
im.show()
im.save("1.png")
def drawColor(size):
im = Image.new("RGB", size)
ll = []
for i in range(size[0]):
for j in range(size[1]):
ll.append((random.randint(1,255),random.randint(1, 255),random.randint(1,255)))
im.putdata(ll)
im.show()
im.save("2.png")
def drawStyle(size):
im = Image.new("RGB", size)
ll = []
for i in range(size[0]):
for j in range(size[1]):
c = (i+j)%255
ll.append((i%255,c,j%255))
im.putdata(ll)
im.show()
im.save("3.png")
if __name__ == "__main__":
draw(size)
drawColor(size)
drawStyle(size)
|
mit
| 6,033,678,608,516,749,000
| 20.375
| 91
| 0.476679
| false
| 3.08046
| false
| false
| false
|
CollabQ/CollabQ
|
.google_appengine/google/appengine/api/images/images_stub.py
|
1
|
15868
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the images API."""
import logging
import StringIO
try:
import PIL
from PIL import _imaging
from PIL import Image
except ImportError:
import _imaging
import Image
from google.appengine.api import apiproxy_stub
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import blobstore
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import images
from google.appengine.api.images import images_service_pb
from google.appengine.runtime import apiproxy_errors
def _ArgbToRgbaTuple(argb):
"""Convert from a single ARGB value to a tuple containing RGBA.
Args:
argb: Signed 32 bit integer containing an ARGB value.
Returns:
RGBA tuple.
"""
unsigned_argb = argb % 0x100000000
return ((unsigned_argb >> 16) & 0xFF,
(unsigned_argb >> 8) & 0xFF,
unsigned_argb & 0xFF,
(unsigned_argb >> 24) & 0xFF)
class ImagesServiceStub(apiproxy_stub.APIProxyStub):
"""Stub version of images API to be used with the dev_appserver."""
def __init__(self, service_name="images"):
"""Preloads PIL to load all modules in the unhardened environment.
Args:
service_name: Service name expected for all calls.
"""
super(ImagesServiceStub, self).__init__(service_name)
Image.init()
def _Dynamic_Composite(self, request, response):
"""Implementation of ImagesService::Composite.
Based off documentation of the PIL library at
http://www.pythonware.com/library/pil/handbook/index.htm
Args:
request: ImagesCompositeRequest, contains image request info.
response: ImagesCompositeResponse, contains transformed image.
"""
width = request.canvas().width()
height = request.canvas().height()
color = _ArgbToRgbaTuple(request.canvas().color())
canvas = Image.new("RGBA", (width, height), color)
sources = []
if (not request.canvas().width() or request.canvas().width() > 4000 or
not request.canvas().height() or request.canvas().height() > 4000):
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
if not request.image_size():
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
if not request.options_size():
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
if request.options_size() > images.MAX_COMPOSITES_PER_REQUEST:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
for image in request.image_list():
sources.append(self._OpenImageData(image))
for options in request.options_list():
if (options.anchor() < images.TOP_LEFT or
options.anchor() > images.BOTTOM_RIGHT):
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
if options.source_index() >= len(sources) or options.source_index() < 0:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
if options.opacity() < 0 or options.opacity() > 1:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
source = sources[options.source_index()]
x_anchor = (options.anchor() % 3) * 0.5
y_anchor = (options.anchor() / 3) * 0.5
x_offset = int(options.x_offset() + x_anchor * (width - source.size[0]))
y_offset = int(options.y_offset() + y_anchor * (height - source.size[1]))
alpha = options.opacity() * 255
mask = Image.new("L", source.size, alpha)
canvas.paste(source, (x_offset, y_offset), mask)
response_value = self._EncodeImage(canvas, request.canvas().output())
response.mutable_image().set_content(response_value)
def _Dynamic_Histogram(self, request, response):
"""Trivial implementation of ImagesService::Histogram.
Based off documentation of the PIL library at
http://www.pythonware.com/library/pil/handbook/index.htm
Args:
request: ImagesHistogramRequest, contains the image.
response: ImagesHistogramResponse, contains histogram of the image.
"""
image = self._OpenImageData(request.image())
img_format = image.format
if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"):
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.NOT_IMAGE)
image = image.convert("RGBA")
red = [0] * 256
green = [0] * 256
blue = [0] * 256
for pixel in image.getdata():
red[int((pixel[0] * pixel[3]) / 255)] += 1
green[int((pixel[1] * pixel[3]) / 255)] += 1
blue[int((pixel[2] * pixel[3]) / 255)] += 1
histogram = response.mutable_histogram()
for value in red:
histogram.add_red(value)
for value in green:
histogram.add_green(value)
for value in blue:
histogram.add_blue(value)
def _Dynamic_Transform(self, request, response):
"""Trivial implementation of ImagesService::Transform.
Based off documentation of the PIL library at
http://www.pythonware.com/library/pil/handbook/index.htm
Args:
request: ImagesTransformRequest, contains image request info.
response: ImagesTransformResponse, contains transformed image.
"""
original_image = self._OpenImageData(request.image())
new_image = self._ProcessTransforms(original_image,
request.transform_list())
response_value = self._EncodeImage(new_image, request.output())
response.mutable_image().set_content(response_value)
def _EncodeImage(self, image, output_encoding):
"""Encode the given image and return it in string form.
Args:
image: PIL Image object, image to encode.
output_encoding: ImagesTransformRequest.OutputSettings object.
Returns:
str with encoded image information in given encoding format.
"""
image_string = StringIO.StringIO()
image_encoding = "PNG"
if (output_encoding.mime_type() == images_service_pb.OutputSettings.JPEG):
image_encoding = "JPEG"
image = image.convert("RGB")
image.save(image_string, image_encoding)
return image_string.getvalue()
def _OpenImageData(self, image_data):
"""Open image data from ImageData protocol buffer.
Args:
image_data: ImageData protocol buffer containing image data or blob
reference.
Returns:
Image containing the image data passed in or reference by blob-key.
Raises:
ApplicationError if both content and blob-key are provided.
NOTE: 'content' must always be set because it is a required field,
however, it must be the empty string when a blob-key is provided.
"""
if image_data.content() and image_data.has_blob_key():
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.INVALID_BLOB_KEY)
if image_data.has_blob_key():
image = self._OpenBlob(image_data.blob_key())
else:
image = self._OpenImage(image_data.content())
img_format = image.format
if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"):
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.NOT_IMAGE)
return image
def _OpenImage(self, image):
"""Opens an image provided as a string.
Args:
image: image data to be opened
Raises:
apiproxy_errors.ApplicationError if the image cannot be opened or if it
is an unsupported format.
Returns:
Image containing the image data passed in.
"""
if not image:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.NOT_IMAGE)
image = StringIO.StringIO(image)
try:
return Image.open(image)
except IOError:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
def _OpenBlob(self, blob_key):
key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key)
try:
datastore.Get(key)
except datastore_errors.Error:
logging.exception('Blob with key %r does not exist', blob_key)
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR)
blobstore_stub = apiproxy_stub_map.apiproxy.GetStub("blobstore")
try:
blob_file = blobstore_stub.storage.OpenBlob(blob_key)
except IOError:
logging.exception('Could not get file for blob_key %r', blob_key)
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
try:
return Image.open(blob_file)
except IOError:
logging.exception('Could not open image %r for blob_key %r',
blob_file, blob_key)
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
def _ValidateCropArg(self, arg):
"""Check an argument for the Crop transform.
Args:
arg: float, argument to Crop transform to check.
Raises:
apiproxy_errors.ApplicationError on problem with argument.
"""
if not isinstance(arg, float):
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
if not (0 <= arg <= 1.0):
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
def _CalculateNewDimensions(self,
current_width,
current_height,
req_width,
req_height):
"""Get new resize dimensions keeping the current aspect ratio.
This uses the more restricting of the two requested values to determine
the new ratio.
Args:
current_width: int, current width of the image.
current_height: int, current height of the image.
req_width: int, requested new width of the image.
req_height: int, requested new height of the image.
Returns:
tuple (width, height) which are both ints of the new ratio.
"""
width_ratio = float(req_width) / current_width
height_ratio = float(req_height) / current_height
if req_width == 0 or (width_ratio > height_ratio and req_height != 0):
return int(height_ratio * current_width), req_height
else:
return req_width, int(width_ratio * current_height)
def _Resize(self, image, transform):
"""Use PIL to resize the given image with the given transform.
Args:
image: PIL.Image.Image object to resize.
transform: images_service_pb.Transform to use when resizing.
Returns:
PIL.Image.Image with transforms performed on it.
Raises:
BadRequestError if the resize data given is bad.
"""
width = 0
height = 0
if transform.has_width():
width = transform.width()
if width < 0 or 4000 < width:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
if transform.has_height():
height = transform.height()
if height < 0 or 4000 < height:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
current_width, current_height = image.size
new_width, new_height = self._CalculateNewDimensions(current_width,
current_height,
width,
height)
return image.resize((new_width, new_height), Image.ANTIALIAS)
def _Rotate(self, image, transform):
"""Use PIL to rotate the given image with the given transform.
Args:
image: PIL.Image.Image object to rotate.
transform: images_service_pb.Transform to use when rotating.
Returns:
PIL.Image.Image with transforms performed on it.
Raises:
BadRequestError if the rotate data given is bad.
"""
degrees = transform.rotate()
if degrees < 0 or degrees % 90 != 0:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
degrees %= 360
degrees = 360 - degrees
return image.rotate(degrees)
def _Crop(self, image, transform):
"""Use PIL to crop the given image with the given transform.
Args:
image: PIL.Image.Image object to crop.
transform: images_service_pb.Transform to use when cropping.
Returns:
PIL.Image.Image with transforms performed on it.
Raises:
BadRequestError if the crop data given is bad.
"""
left_x = 0.0
top_y = 0.0
right_x = 1.0
bottom_y = 1.0
if transform.has_crop_left_x():
left_x = transform.crop_left_x()
self._ValidateCropArg(left_x)
if transform.has_crop_top_y():
top_y = transform.crop_top_y()
self._ValidateCropArg(top_y)
if transform.has_crop_right_x():
right_x = transform.crop_right_x()
self._ValidateCropArg(right_x)
if transform.has_crop_bottom_y():
bottom_y = transform.crop_bottom_y()
self._ValidateCropArg(bottom_y)
width, height = image.size
box = (int(transform.crop_left_x() * width),
int(transform.crop_top_y() * height),
int(transform.crop_right_x() * width),
int(transform.crop_bottom_y() * height))
return image.crop(box)
def _ProcessTransforms(self, image, transforms):
"""Execute PIL operations based on transform values.
Args:
image: PIL.Image.Image instance, image to manipulate.
trasnforms: list of ImagesTransformRequest.Transform objects.
Returns:
PIL.Image.Image with transforms performed on it.
Raises:
BadRequestError if we are passed more than one of the same type of
transform.
"""
new_image = image
if len(transforms) > images.MAX_TRANSFORMS_PER_REQUEST:
raise apiproxy_errors.ApplicationError(
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
for transform in transforms:
if transform.has_width() or transform.has_height():
new_image = self._Resize(new_image, transform)
elif transform.has_rotate():
new_image = self._Rotate(new_image, transform)
elif transform.has_horizontal_flip():
new_image = new_image.transpose(Image.FLIP_LEFT_RIGHT)
elif transform.has_vertical_flip():
new_image = new_image.transpose(Image.FLIP_TOP_BOTTOM)
elif (transform.has_crop_left_x() or
transform.has_crop_top_y() or
transform.has_crop_right_x() or
transform.has_crop_bottom_y()):
new_image = self._Crop(new_image, transform)
elif transform.has_autolevels():
logging.info("I'm Feeling Lucky autolevels will be visible once this "
"application is deployed.")
else:
logging.warn("Found no transformations found to perform.")
return new_image
|
apache-2.0
| -8,046,123,606,114,034,000
| 33.051502
| 79
| 0.664104
| false
| 3.970971
| false
| false
| false
|
chappers/sklearn-recipes
|
streaming_take2/dpp_classifier_dpp_only.py
|
1
|
13063
|
import sklearn
from sklearn.datasets import make_regression, make_classification
from sklearn.linear_model import SGDRegressor, SGDClassifier
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics.pairwise import euclidean_distances
import pandas as pd
import numpy as np
from scipy import stats
from scipy.stats import wilcoxon
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.decomposition import PCA, KernelPCA
from sklearn.kernel_approximation import Nystroem
from dpp import sample_dpp, decompose_kernel, sample_conditional_dpp
import random
from collections import Counter
def fast_euclid(X):
gamma = 1.0/X.shape[1]
if X.shape[0] < 1000:
L = rbf_kernel(X, gamma=gamma)
else:
L = Nystroem(gamma=gamma).fit_transform(X)
L = L.dot(L.T)
Ls = np.log(L)*(-1.0/(gamma))
return Ls
def class_separability(X, y, mode='mitra'):
"""
Calculates the class separability based on the mitra paper
"""
from ogfs_classifier import spec_supervised
return spec_supervised(X, y)
# get prior probs
prior_proba = Counter(y)
s_w = []
s_b = []
m_o = np.mean(X, axis=0).reshape(-1, 1)
if X.shape[0] > 1000:
mode = 'kernel'
for class_ in prior_proba.keys():
mask = y==class_
X_sel = X[mask, :]
if mode == 'mitra':
cov_sig = np.cov(X_sel.T)
s_w.append(cov_sig * prior_proba[class_])
else:
K = fast_euclid(X_sel.T)
s_w.append(K * prior_proba[class_])
mu_m = prior_proba[class_] - m_o
s_b.append(np.dot(mu_m, mu_m.T))
s_w = np.atleast_2d(np.add(*s_w))
s_b = np.add(*s_b)
return s_b, s_w
def evaluate_feats0(s_b, s_w):
curr_u1 = []
curr_u2 = []
my_feats = []
prev_score = None
try:
s_b_inv = np.linalg.inv(s_b)
except:
s_b_inv = np.linalg.pinv(s_b)
S = np.trace(np.dot(s_b_inv, s_w))
eval_order = np.argsort(S).flatten()
for idx in list(eval_order):
if prev_score is None:
curr_u1.append(s_b[idx])
curr_u2.append(s_w[idx])
my_feats.append(idx)
else:
test_u1 = curr_u1[:]
test_u2 = curr_u2[:]
test_u1.append(s_b[idx])
test_u2.append(s_w[idx])
score = (prev_score - (np.sum(test_u1)/np.sum(test_u2)))
if score > 0.001:
my_feats.append(idx)
curr_u1.append(s_b[idx])
curr_u2.append(s_w[idx])
prev_score = np.sum(curr_u1)/np.sum(curr_u2)
return list(my_feats)
def evaluate_feats1(s_b, s_w, highest_best=True):
curr_u1 = []
curr_u2 = []
my_feats = []
prev_score = None
X = s_b/s_w
eval_order = np.argsort(X).flatten()
if highest_best:
eval_order = eval_order[::-1]
for idx in list(eval_order):
if prev_score is None:
curr_u1.append(s_b[idx])
curr_u2.append(s_w[idx])
my_feats.append(idx)
else:
test_u1 = curr_u1[:]
test_u2 = curr_u2[:]
test_u1.append(s_b[idx])
test_u2.append(s_w[idx])
score = ((np.sum(test_u1)/np.sum(test_u2)) - prev_score)
if score > 0.001:
my_feats.append(idx)
curr_u1.append(s_b[idx])
curr_u2.append(s_w[idx])
prev_score = np.sum(curr_u1)/np.sum(curr_u2)
return list(my_feats)
def evaluate_feats2(X, alpha=0.05, highest_best=True):
"""
X is the raw scrores
alpha is the level of significance
This version uses T-test
Returns: set of indices indicating selected features.
"""
eval_order = np.argsort(X)
if highest_best:
eval_order = eval_order[::-1]
selected_feats = []
selected_idx = []
for idx in eval_order:
if len(selected_feats) == 0:
selected_feats.append(X[idx])
selected_idx.append(idx)
continue
# now continue on and decide what to do
mu = np.mean(selected_feats)
sigma = np.std(selected_feats)
U = len(selected_feats)
if sigma == 0.0 and U > 1:
return selected_idx
elif sigma == 0.0:
selected_feats.append(X[idx])
selected_idx.append(idx)
continue
# otherwise compute score for T test.
t_stat = (mu - X[idx])/(sigma/np.sqrt(U))
t_alpha = stats.t.pdf(t_stat, U)
if t_alpha <= alpha:
selected_feats.append(X[idx])
selected_idx.append(idx)
else:
return selected_idx
return selected_idx
def evaluate_feats(s_b, s_w, alpha=0.05):
set1 = evaluate_feats1(s_b,s_w)
eval2 = s_b/s_w
if len(eval2.shape) > 1:
eval2 = np.diag(s_b)/np.diag(s_w)
set2 = evaluate_feats2(eval2, alpha)
return list(set(set1 + set2))
def entropy(X):
mm = MinMaxScaler()
X_mm = mm.fit_transform(X)
Dpq = euclidean_distances(X_mm)
D_bar = np.mean([x for x in np.triu(Dpq).flatten() if x != 0])
alpha = -np.log(0.5)/D_bar
sim_pq = np.exp(-alpha * Dpq)
log_sim_pq = np.log(sim_pq)
entropy = -2*np.sum(np.triu(sim_pq*log_sim_pq + ((1-sim_pq)*np.log((1-sim_pq))), 1))
return entropy
def wilcoxon_group(X, f):
"""
Wilcoxon is a very aggressive selector in an unsupervised sense.
Do we require a supervised group selection? (probably)
Probably one that is score based in order to select the "best" ones
similar to OGFS?
"""
# X is a matrix, f is a single vector
if len(X.shape) == 1:
return wilcoxon(X, f).pvalue
# now we shall perform and check each one...and return only the lowest pvalue
return np.max([wilcoxon(x, f) for x in X.T])
"""
Implement DPP version that is similar to what is done above
sketch of solution
------------------
DPP requires a known number of parameters to check at each partial fit!
"""
class DPPClassifier(SGDClassifier):
def __init__(self, loss="log", penalty='l2', alpha=0.0001, l1_ratio=0.15,
fit_intercept=True, max_iter=None, tol=None, shuffle=True,
verbose=0, epsilon=0.1, n_jobs=1,
random_state=None, learning_rate="optimal", eta0=0.0,
power_t=0.5, class_weight=None, warm_start=False,
average=False, n_iter=None,
intragroup_decay = 0.9, pca_alpha=0.05,
intragroup_alpha=0.05, intergroup_thres=None):
super(DPPClassifier, self).__init__(
loss=loss, penalty=penalty, alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept, max_iter=max_iter, tol=tol,
shuffle=shuffle, verbose=verbose, epsilon=epsilon, n_jobs=n_jobs,
random_state=random_state, learning_rate=learning_rate, eta0=eta0,
power_t=power_t, class_weight=class_weight, warm_start=warm_start,
average=average, n_iter=n_iter)
self.coef_info = {'cols': [], 'coef':[], 'excluded_cols': []}
self.seen_cols = []
self.base_shape = None
self.dpp_k = {'pca': 0, 'kpca':0}
self.unseen_only = False
self.intragroup_alpha = intragroup_alpha
self.intergroup_thres = intergroup_thres if intergroup_thres is not None else epsilon
def _dpp_estimate_k(self, L):
"""
L is the input kernel
"""
"""
pca = PCA(n_components=None)
pca.fit(L)
pca_k = np.min(np.argwhere(np.cumsum(pca.explained_variance_ratio_) >
(1-self.intragroup_alpha)))
# also use KernelPCA
kpca = KernelPCA(kernel='rbf')
kpca.fit(L)
kpca_k = np.argwhere(kpca.lambdas_ > 0.01).flatten().shape[0]
self.dpp_k['pca'] = pca_k
self.dpp_k['kpca'] = kpca_k
"""
self.dpp_k['pca'] = None
def add_column_exclusion(self, cols):
self.coef_info['excluded_cols'] = list(self.coef_info['excluded_cols']) + list(cols)
def _fit_columns(self, X_, return_x=True, transform_only=False):
"""
Method filter through "unselected" columns. The goal of this
method is to filter any uninformative columns.
This will be selected based on index only?
If return_x is false, it will only return the boolean mask.
"""
X = X_[X_.columns.difference(self.coef_info['excluded_cols'])]
# order the columns correctly...
col_order = self.coef_info['cols'] + list([x for x in X.columns if x not in self.coef_info['cols']])
X = X[col_order]
return X
def _reg_penalty(self, X):
col_coef = [(col, coef) for col, coef in zip(X.columns.tolist(), self.coef_.flatten()) if np.abs(coef) >= self.intergroup_thres]
self.coef_info['cols'] = [x for x, _ in col_coef]
self.coef_info['coef'] = [x for _, x in col_coef]
self.coef_info['excluded_cols'] = [x for x in self.seen_cols if x not in self.coef_info['cols']]
self.coef_ = np.array(self.coef_info['coef']).reshape(1, -1)
def _dpp_sel(self, X_, y=None):
"""
DPP only relies on X.
We will condition the sampling based on:
* `self.coef_info['cols']`
After sampling it will go ahead and then perform grouped wilcoxon selection.
"""
X = np.array(X_)
print(X.shape)
cols_to_index = [idx for idx, x in enumerate(X_.columns) if x in self.coef_info['cols']]
unseen_cols_to_index = [idx for idx, x in enumerate(X_.columns) if x not in self.coef_info['cols']]
if X.shape[0] < 1000 or X.shape[1] < 100:
#feat_dist = rbf_kernel(X.T)
feat_dist = Nystroem().fit_transform(X.T)
feat_dist = feat_dist.dot(feat_dist.T)
else:
feat_dist = Nystroem().fit_transform(X.T)
feat_dist = feat_dist.dot(feat_dist.T)
#self._dpp_estimate_k(feat_dist)
#k = self.dpp_k['pca'] #- len(self.coef_info['cols'])
k = None
feat_index = []
#while len(feat_index) == 0:
if len(self.coef_info['cols']) == 0:
feat_index = sample_dpp(decompose_kernel(feat_dist), k=k)
else:
feat_index = sample_conditional_dpp(feat_dist, cols_to_index, k=k)
feat_index = [x for x in feat_index if x is not None]
index_to_col = [col for idx, col in enumerate(X_.columns) if idx in feat_index]
self.coef_info['cols'] = list(set(self.coef_info['cols'] + index_to_col))
col_rem = X_.columns.difference(self.coef_info['cols'])
# update column exclusion...
self.coef_info['excluded_cols'] = [x for x in self.coef_info['excluded_cols'] if x not in self.coef_info['cols']]
self.add_column_exclusion(col_rem)
def fit(self, X, y, coef_init=None, intercept_init=None,
sample_weight=None):
self.seen_cols = list(set(self.seen_cols + X.columns.tolist()))
# TODO: add DPP selection
self.coef_info = {'cols': [], 'coef':[], 'excluded_cols': []}
#self._dpp_sel(X, y)
#X = self._fit_columns(X)
super(DPPClassifier, self).fit(X, y, coef_init=coef_init, intercept_init=intercept_init,
sample_weight=sample_weight)
self._reg_penalty(X)
return self
def partial_fit(self, X, y, sample_weight=None):
X_ = X.copy()
unseen_col_size = len([1 for x in X.columns if x not in self.seen_cols])
self.seen_cols = list(set(self.seen_cols + X.columns.tolist()))
#sample_from_exclude_size = int(len(self.coef_info['excluded_cols']) - (len(self.coef_info['cols'])/2.0))+1
sample_from_exclude_size = int(len(self.coef_info['excluded_cols']) - unseen_col_size)
if sample_from_exclude_size > 0:
cols_excl_sample = random.sample(self.coef_info['excluded_cols'], sample_from_exclude_size)
X = X[X.columns.difference(cols_excl_sample)]
#X = X[X.columns.difference(self.coef_info['excluded_cols'])]
# TODO: add DPP selection
self._dpp_sel(X, y)
X = self._fit_columns(X_)
# now update coefficients
n_samples, n_features = X.shape
coef_list = np.zeros(n_features, dtype=np.float64, order="C")
coef_list[:len(self.coef_info['coef'])] = self.coef_info['coef']
self.coef_ = np.array(coef_list).reshape(1, -1)
super(DPPClassifier, self).partial_fit(X, y, sample_weight=None)
self._reg_penalty(X)
return self
def predict(self, X):
X = self._fit_columns(X, transform_only=True)
return super(DPPClassifier, self).predict(X)
def predict_proba(self, X):
X = self._fit_columns(X, transform_only=True)
return super(DPPClassifier, self).predict_proba(X)
|
mit
| 1,082,041,100,579,539,800
| 35.8
| 136
| 0.569318
| false
| 3.21274
| true
| false
| false
|
ym2050/pythonpractice
|
CF_SVD++_K-means/refine/utils.py
|
1
|
4130
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
# 下一个新版本的特性导入到当前版本,可以在当前版本中使用一些新版本的特性,必须放在文档开头
'''
工具类与工具方法
'''
# from builtins import *
# 引入内建模块,该模块中有一些常用函数;而该模块在Python启动后、且没有执行程序员所写的任何代码前,
# Python会首先加载该内建函数到内存,如str(),min(),max()等常用函数,不是必要
import time
from os.path import isfile
from sklearn.externals import joblib
# Joblib 包括为Python函数提供轻量级管道任务(pipeline job)服务的一系列工具,
# 包括透明磁盘IO缓冲、快速序列化、简单并行化运行、日志服务
# 创建定时器Timer类,对调用该类的操作进行时间统计,并输出结果
class Timer:
# def a(): 定义函数a
# _init_初始化 类似于构造函数
def __init__(self, progress=100000,msg=''):
self.starttime = None
self.progress = progress
self.i = 0
self.msg = msg
# __enter__ 方法将在进入代码块前被调用
def __enter__(self):
return self.start()
# __exit__ 方法则在离开代码块之后被调用(即使在代码块中遇到了异常)
def __exit__(self, *args):
self.stop()
# 开始计时,输出开始提示信息
def start(self):
self.starttime = time.clock()
#format(self.msg) 格式化输出,将参数self.msg在大括号所在位置输出,
print("{}: Started.".format(self.msg))
return self
# 结束计时,输出结束提示信息
def stop(self):
interval = time.clock() - self.starttime
print("{}: Finished in {:.3f} secs.".format(self.msg,interval))
#增量函数
def increment(self):
self.i += 1
if self.i % self.progress == 0:
interval = time.clock() - self.starttime
print("{}: {} step has been made in {:.3f} secs.".format(self.msg,self.i, interval))
def diskcache(fname, recache=False):
'''
缓存方法,不必仔细研究,功能描述如下
若fname文件存在,则加载后返回。若不存在,执行函数后,将结果写入缓存文件。
适合于执行特定流程并返回结果的函数。
:param fname:缓存文件名
:param recache:是否重新生成缓存
'''
def wrapper(F):
def docache(*args, **kwargs):
if isfile(fname) and not recache:
# 使用jolibd.load读取缓存文件并返回
return joblib.load(fname)
else:
# 使用jolibd.dump 创建缓存文件,返回函数的返回值
result = F(*args, **kwargs)
joblib.dump(result, fname)
return result
return docache
return wrapper
# 对上面的类和方法进行测试,不必注意
def __timertest():
with Timer(100,msg="COUNTING") as t:
for i in range(10000):
t.increment()
def __diskcachetest():
import numpy as np
@diskcache('../tmp/computetest.cache')
def compute():
print("Compute Run")
return np.arange(100000)
result1 = compute()
result2 = compute()
print(np.array_equal(result1,result2))
def writeToFile(file, content):
with open(file, 'wt+') as fd:
fd.write(content)
fd.close()
def writeMatrix(file, M):
content = ''
for i in range(len(M)):
content = content + '{}: '.format(i+1) + str(M[i]) + '\n'
writeToFile(file, content)
def writeRates(file, uid, rate):
content = '{}: '.format(uid) + str(rate) + '\n'
writeToFile(file, content)
if __name__ == '__main__':
# __timertest()
# __diskcachetest()
# data = ['1', '2', '3']
data = [[1, 2, 3, 4, 7],[2, 4, 5, 6]]
rate = [2, 4, 5, 6]
# writeMatrix('e.txt', data)
writeRates('rate.txt', 1009, rate)
|
gpl-3.0
| -5,676,998,464,668,595,000
| 25.495798
| 96
| 0.564792
| false
| 2.265928
| false
| false
| false
|
eeshangarg/zulip
|
zerver/lib/email_notifications.py
|
1
|
28170
|
# See https://zulip.readthedocs.io/en/latest/subsystems/notifications.html
import re
from collections import defaultdict
from datetime import timedelta
from email.headerregistry import Address
from typing import Any, Dict, Iterable, List, Optional, Tuple
import html2text
import lxml.html
import pytz
from bs4 import BeautifulSoup
from django.conf import settings
from django.contrib.auth import get_backends
from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _
from django.utils.translation import override as override_language
from lxml.cssselect import CSSSelector
from confirmation.models import one_click_unsubscribe_link
from zerver.decorator import statsd_increment
from zerver.lib.markdown.fenced_code import FENCE_RE
from zerver.lib.message import bulk_access_messages
from zerver.lib.queue import queue_json_publish
from zerver.lib.send_email import FromAddress, send_future_email
from zerver.lib.types import DisplayRecipientT
from zerver.lib.url_encoding import (
huddle_narrow_url,
personal_narrow_url,
stream_narrow_url,
topic_narrow_url,
)
from zerver.models import (
Message,
Recipient,
Stream,
UserMessage,
UserProfile,
get_context_for_message,
get_display_recipient,
get_user_profile_by_id,
receives_offline_email_notifications,
)
def relative_to_full_url(base_url: str, content: str) -> str:
# Convert relative URLs to absolute URLs.
fragment = lxml.html.fromstring(content)
# We handle narrow URLs separately because of two reasons:
# 1: 'lxml' seems to be having an issue in dealing with URLs that begin
# `#` due to which it doesn't add a `/` before joining the base_url to
# the relative URL.
# 2: We also need to update the title attribute in the narrow links which
# is not possible with `make_links_absolute()`.
for link_info in fragment.iterlinks():
elem, attrib, link, pos = link_info
match = re.match("/?#narrow/", link)
if match is not None:
link = re.sub(r"^/?#narrow/", base_url + "/#narrow/", link)
elem.set(attrib, link)
# Only manually linked narrow URLs have title attribute set.
if elem.get("title") is not None:
elem.set("title", link)
# Inline images can't be displayed in the emails as the request
# from the mail server can't be authenticated because it has no
# user_profile object linked to it. So we scrub the inline image
# container.
inline_image_containers = fragment.find_class("message_inline_image")
for container in inline_image_containers:
container.drop_tree()
# The previous block handles most inline images, but for messages
# where the entire Markdown input was just the URL of an image
# (i.e. the entire body is a message_inline_image object), the
# entire message body will be that image element; here, we need a
# more drastic edit to the content.
if fragment.get("class") == "message_inline_image":
image_link = fragment.find("a").get("href")
image_title = fragment.find("a").get("title")
fragment = lxml.html.Element("p")
a = lxml.html.Element("a")
a.set("href", image_link)
a.set("target", "_blank")
a.set("title", image_title)
a.text = image_link
fragment.append(a)
fragment.make_links_absolute(base_url)
content = lxml.html.tostring(fragment, encoding="unicode")
return content
def fix_emojis(content: str, base_url: str, emojiset: str) -> str:
def make_emoji_img_elem(emoji_span_elem: CSSSelector) -> Dict[str, Any]:
# Convert the emoji spans to img tags.
classes = emoji_span_elem.get("class")
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
# re.search is capable of returning None,
# but since the parent function should only be called with a valid css element
# we assert that it does not.
assert match is not None
emoji_code = match.group("emoji_code")
emoji_name = emoji_span_elem.get("title")
alt_code = emoji_span_elem.text
image_url = base_url + f"/static/generated/emoji/images-{emojiset}-64/{emoji_code}.png"
img_elem = lxml.html.fromstring(
f'<img alt="{alt_code}" src="{image_url}" title="{emoji_name}">'
)
img_elem.set("style", "height: 20px;")
img_elem.tail = emoji_span_elem.tail
return img_elem
fragment = lxml.html.fromstring(content)
for elem in fragment.cssselect("span.emoji"):
parent = elem.getparent()
img_elem = make_emoji_img_elem(elem)
parent.replace(elem, img_elem)
for realm_emoji in fragment.cssselect(".emoji"):
del realm_emoji.attrib["class"]
realm_emoji.set("style", "height: 20px;")
content = lxml.html.tostring(fragment, encoding="unicode")
return content
def fix_spoilers_in_html(content: str, language: str) -> str:
with override_language(language):
spoiler_title: str = _("Open Zulip to see the spoiler content")
fragment = lxml.html.fromstring(content)
spoilers = fragment.find_class("spoiler-block")
for spoiler in spoilers:
header = spoiler.find_class("spoiler-header")[0]
spoiler_content = spoiler.find_class("spoiler-content")[0]
header_content = header.find("p")
if header_content is None:
# Create a new element to append the spoiler to)
header_content = lxml.html.fromstring("<p></p>")
header.append(header_content)
else:
# Add a space. Its simpler to append a new span element than
# inserting text after the last node ends since neither .text
# and .tail do the right thing for us.
header_content.append(lxml.html.fromstring("<span> </span>"))
span_elem = lxml.html.fromstring(
f'<span class="spoiler-title" title="{spoiler_title}">({spoiler_title})</span'
)
header_content.append(span_elem)
header.drop_tag()
spoiler_content.drop_tree()
content = lxml.html.tostring(fragment, encoding="unicode")
return content
def fix_spoilers_in_text(content: str, language: str) -> str:
with override_language(language):
spoiler_title: str = _("Open Zulip to see the spoiler content")
lines = content.split("\n")
output = []
open_fence = None
for line in lines:
m = FENCE_RE.match(line)
if m:
fence = m.group("fence")
lang = m.group("lang")
if lang == "spoiler":
open_fence = fence
output.append(line)
output.append(f"({spoiler_title})")
elif fence == open_fence:
open_fence = None
output.append(line)
elif not open_fence:
output.append(line)
return "\n".join(output)
def build_message_list(
user: UserProfile,
messages: List[Message],
stream_map: Dict[int, Stream], # only needs id, name
) -> List[Dict[str, Any]]:
"""
Builds the message list object for the message notification email template.
The messages are collapsed into per-recipient and per-sender blocks, like
our web interface
"""
messages_to_render: List[Dict[str, Any]] = []
def sender_string(message: Message) -> str:
if message.recipient.type in (Recipient.STREAM, Recipient.HUDDLE):
return message.sender.full_name
else:
return ""
def fix_plaintext_image_urls(content: str) -> str:
# Replace image URLs in plaintext content of the form
# [image name](image url)
# with a simple hyperlink.
return re.sub(r"\[(\S*)\]\((\S*)\)", r"\2", content)
def append_sender_to_message(
message_plain: str, message_html: str, sender: str
) -> Tuple[str, str]:
message_plain = f"{sender}: {message_plain}"
message_soup = BeautifulSoup(message_html, "html.parser")
sender_name_soup = BeautifulSoup(f"<b>{sender}</b>: ", "html.parser")
first_tag = message_soup.find()
if first_tag.name == "p":
first_tag.insert(0, sender_name_soup)
else:
message_soup.insert(0, sender_name_soup)
return message_plain, str(message_soup)
def build_message_payload(message: Message, sender: Optional[str] = None) -> Dict[str, str]:
plain = message.content
plain = fix_plaintext_image_urls(plain)
# There's a small chance of colliding with non-Zulip URLs containing
# "/user_uploads/", but we don't have much information about the
# structure of the URL to leverage. We can't use `relative_to_full_url()`
# function here because it uses a stricter regex which will not work for
# plain text.
plain = re.sub(r"/user_uploads/(\S*)", user.realm.uri + r"/user_uploads/\1", plain)
plain = fix_spoilers_in_text(plain, user.default_language)
assert message.rendered_content is not None
html = message.rendered_content
html = relative_to_full_url(user.realm.uri, html)
html = fix_emojis(html, user.realm.uri, user.emojiset)
html = fix_spoilers_in_html(html, user.default_language)
if sender:
plain, html = append_sender_to_message(plain, html, sender)
return {"plain": plain, "html": html}
def build_sender_payload(message: Message) -> Dict[str, Any]:
sender = sender_string(message)
return {"sender": sender, "content": [build_message_payload(message, sender)]}
def message_header(message: Message) -> Dict[str, Any]:
if message.recipient.type == Recipient.PERSONAL:
narrow_link = get_narrow_url(user, message)
header = f"You and {message.sender.full_name}"
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
elif message.recipient.type == Recipient.HUDDLE:
display_recipient = get_display_recipient(message.recipient)
assert not isinstance(display_recipient, str)
narrow_link = get_narrow_url(user, message, display_recipient=display_recipient)
other_recipients = [r["full_name"] for r in display_recipient if r["id"] != user.id]
header = "You and {}".format(", ".join(other_recipients))
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
else:
stream_id = message.recipient.type_id
stream = stream_map.get(stream_id, None)
if stream is None:
# Some of our callers don't populate stream_map, so
# we just populate the stream from the database.
stream = Stream.objects.only("id", "name").get(id=stream_id)
narrow_link = get_narrow_url(user, message, stream=stream)
header = f"{stream.name} > {message.topic_name()}"
stream_link = stream_narrow_url(user.realm, stream)
header_html = f"<a href='{stream_link}'>{stream.name}</a> > <a href='{narrow_link}'>{message.topic_name()}</a>"
return {
"plain": header,
"html": header_html,
"stream_message": message.recipient.type_name() == "stream",
}
# # Collapse message list to
# [
# {
# "header": {
# "plain":"header",
# "html":"htmlheader"
# }
# "senders":[
# {
# "sender":"sender_name",
# "content":[
# {
# "plain":"content",
# "html":"htmlcontent"
# }
# {
# "plain":"content",
# "html":"htmlcontent"
# }
# ]
# }
# ]
# },
# ]
messages.sort(key=lambda message: message.date_sent)
for message in messages:
header = message_header(message)
# If we want to collapse into the previous recipient block
if len(messages_to_render) > 0 and messages_to_render[-1]["header"] == header:
sender = sender_string(message)
sender_block = messages_to_render[-1]["senders"]
# Same message sender, collapse again
if sender_block[-1]["sender"] == sender:
sender_block[-1]["content"].append(build_message_payload(message))
else:
# Start a new sender block
sender_block.append(build_sender_payload(message))
else:
# New recipient and sender block
recipient_block = {"header": header, "senders": [build_sender_payload(message)]}
messages_to_render.append(recipient_block)
return messages_to_render
def get_narrow_url(
user_profile: UserProfile,
message: Message,
display_recipient: Optional[DisplayRecipientT] = None,
stream: Optional[Stream] = None,
) -> str:
"""The display_recipient and stream arguments are optional. If not
provided, we'll compute them from the message; they exist as a
performance optimization for cases where the caller needs those
data too.
"""
if message.recipient.type == Recipient.PERSONAL:
assert stream is None
assert display_recipient is None
return personal_narrow_url(
realm=user_profile.realm,
sender=message.sender,
)
elif message.recipient.type == Recipient.HUDDLE:
assert stream is None
if display_recipient is None:
display_recipient = get_display_recipient(message.recipient)
assert display_recipient is not None
assert not isinstance(display_recipient, str)
other_user_ids = [r["id"] for r in display_recipient if r["id"] != user_profile.id]
return huddle_narrow_url(
realm=user_profile.realm,
other_user_ids=other_user_ids,
)
else:
assert display_recipient is None
if stream is None:
stream = Stream.objects.only("id", "name").get(id=message.recipient.type_id)
return topic_narrow_url(user_profile.realm, stream, message.topic_name())
def message_content_allowed_in_missedmessage_emails(user_profile: UserProfile) -> bool:
return (
user_profile.realm.message_content_allowed_in_email_notifications
and user_profile.message_content_in_email_notifications
)
@statsd_increment("missed_message_reminders")
def do_send_missedmessage_events_reply_in_zulip(
user_profile: UserProfile, missed_messages: List[Dict[str, Any]], message_count: int
) -> None:
"""
Send a reminder email to a user if she's missed some PMs by being offline.
The email will have its reply to address set to a limited used email
address that will send a Zulip message to the correct recipient. This
allows the user to respond to missed PMs, huddles, and @-mentions directly
from the email.
`user_profile` is the user to send the reminder to
`missed_messages` is a list of dictionaries to Message objects and other data
for a group of messages that share a recipient (and topic)
"""
from zerver.context_processors import common_context
recipients = {
(msg["message"].recipient_id, msg["message"].topic_name()) for msg in missed_messages
}
if len(recipients) != 1:
raise ValueError(
f"All missed_messages must have the same recipient and topic {recipients!r}",
)
# This link is no longer a part of the email, but keeping the code in case
# we find a clean way to add it back in the future
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
context = common_context(user_profile)
context.update(
name=user_profile.full_name,
message_count=message_count,
unsubscribe_link=unsubscribe_link,
realm_name_in_notifications=user_profile.realm_name_in_notifications,
)
triggers = [message["trigger"] for message in missed_messages]
unique_triggers = set(triggers)
context.update(
mention="mentioned" in unique_triggers or "wildcard_mentioned" in unique_triggers,
stream_email_notify="stream_email_notify" in unique_triggers,
mention_count=triggers.count("mentioned") + triggers.count("wildcard_mentioned"),
)
# If this setting (email mirroring integration) is enabled, only then
# can users reply to email to send message to Zulip. Thus, one must
# ensure to display warning in the template.
if settings.EMAIL_GATEWAY_PATTERN:
context.update(
reply_to_zulip=True,
)
else:
context.update(
reply_to_zulip=False,
)
from zerver.lib.email_mirror import create_missed_message_address
reply_to_address = create_missed_message_address(user_profile, missed_messages[0]["message"])
if reply_to_address == FromAddress.NOREPLY:
reply_to_name = ""
else:
reply_to_name = "Zulip"
narrow_url = get_narrow_url(user_profile, missed_messages[0]["message"])
context.update(
narrow_url=narrow_url,
)
senders = list({m["message"].sender for m in missed_messages})
if missed_messages[0]["message"].recipient.type == Recipient.HUDDLE:
display_recipient = get_display_recipient(missed_messages[0]["message"].recipient)
# Make sure that this is a list of strings, not a string.
assert not isinstance(display_recipient, str)
other_recipients = [r["full_name"] for r in display_recipient if r["id"] != user_profile.id]
context.update(group_pm=True)
if len(other_recipients) == 2:
huddle_display_name = " and ".join(other_recipients)
context.update(huddle_display_name=huddle_display_name)
elif len(other_recipients) == 3:
huddle_display_name = (
f"{other_recipients[0]}, {other_recipients[1]}, and {other_recipients[2]}"
)
context.update(huddle_display_name=huddle_display_name)
else:
huddle_display_name = "{}, and {} others".format(
", ".join(other_recipients[:2]), len(other_recipients) - 2
)
context.update(huddle_display_name=huddle_display_name)
elif missed_messages[0]["message"].recipient.type == Recipient.PERSONAL:
context.update(private_message=True)
elif context["mention"] or context["stream_email_notify"]:
# Keep only the senders who actually mentioned the user
if context["mention"]:
senders = list(
{
m["message"].sender
for m in missed_messages
if m["trigger"] == "mentioned" or m["trigger"] == "wildcard_mentioned"
}
)
message = missed_messages[0]["message"]
stream = Stream.objects.only("id", "name").get(id=message.recipient.type_id)
stream_header = f"{stream.name} > {message.topic_name()}"
context.update(
stream_header=stream_header,
)
else:
raise AssertionError("Invalid messages!")
# If message content is disabled, then flush all information we pass to email.
if not message_content_allowed_in_missedmessage_emails(user_profile):
realm = user_profile.realm
context.update(
reply_to_zulip=False,
messages=[],
sender_str="",
realm_str=realm.name,
huddle_display_name="",
show_message_content=False,
message_content_disabled_by_user=not user_profile.message_content_in_email_notifications,
message_content_disabled_by_realm=not realm.message_content_allowed_in_email_notifications,
)
else:
context.update(
messages=build_message_list(
user=user_profile,
messages=[m["message"] for m in missed_messages],
stream_map={},
),
sender_str=", ".join(sender.full_name for sender in senders),
realm_str=user_profile.realm.name,
show_message_content=True,
)
with override_language(user_profile.default_language):
from_name: str = _("Zulip notifications")
from_address = FromAddress.NOREPLY
if len(senders) == 1 and settings.SEND_MISSED_MESSAGE_EMAILS_AS_USER:
# If this setting is enabled, you can reply to the Zulip
# message notification emails directly back to the original sender.
# However, one must ensure the Zulip server is in the SPF
# record for the domain, or there will be spam/deliverability
# problems.
#
# Also, this setting is not really compatible with
# EMAIL_ADDRESS_VISIBILITY_ADMINS.
sender = senders[0]
from_name, from_address = (sender.full_name, sender.email)
context.update(
reply_to_zulip=False,
)
email_dict = {
"template_prefix": "zerver/emails/missed_message",
"to_user_ids": [user_profile.id],
"from_name": from_name,
"from_address": from_address,
"reply_to_email": str(Address(display_name=reply_to_name, addr_spec=reply_to_address)),
"context": context,
}
queue_json_publish("email_senders", email_dict)
user_profile.last_reminder = timezone_now()
user_profile.save(update_fields=["last_reminder"])
def handle_missedmessage_emails(
user_profile_id: int, missed_email_events: Iterable[Dict[str, Any]]
) -> None:
message_ids = {event.get("message_id"): event.get("trigger") for event in missed_email_events}
user_profile = get_user_profile_by_id(user_profile_id)
if not receives_offline_email_notifications(user_profile):
return
# Note: This query structure automatically filters out any
# messages that were permanently deleted, since those would now be
# in the ArchivedMessage table, not the Message table.
messages = Message.objects.filter(
usermessage__user_profile_id=user_profile,
id__in=message_ids,
usermessage__flags=~UserMessage.flags.read,
)
# Cancel missed-message emails for deleted messages
messages = [um for um in messages if um.content != "(deleted)"]
if not messages:
return
# We bucket messages by tuples that identify similar messages.
# For streams it's recipient_id and topic.
# For PMs it's recipient id and sender.
messages_by_bucket: Dict[Tuple[int, str], List[Message]] = defaultdict(list)
for msg in messages:
if msg.recipient.type == Recipient.PERSONAL:
# For PM's group using (recipient, sender).
messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg)
else:
messages_by_bucket[(msg.recipient_id, msg.topic_name())].append(msg)
message_count_by_bucket = {
bucket_tup: len(msgs) for bucket_tup, msgs in messages_by_bucket.items()
}
for msg_list in messages_by_bucket.values():
msg = min(msg_list, key=lambda msg: msg.date_sent)
if msg.is_stream_message():
context_messages = get_context_for_message(msg)
filtered_context_messages = bulk_access_messages(user_profile, context_messages)
msg_list.extend(filtered_context_messages)
# Sort emails by least recently-active discussion.
bucket_tups: List[Tuple[Tuple[int, str], int]] = []
for bucket_tup, msg_list in messages_by_bucket.items():
max_message_id = max(msg_list, key=lambda msg: msg.id).id
bucket_tups.append((bucket_tup, max_message_id))
bucket_tups = sorted(bucket_tups, key=lambda x: x[1])
# Send an email per bucket.
for bucket_tup, ignored_max_id in bucket_tups:
unique_messages = {}
for m in messages_by_bucket[bucket_tup]:
unique_messages[m.id] = dict(
message=m,
trigger=message_ids.get(m.id),
)
do_send_missedmessage_events_reply_in_zulip(
user_profile,
list(unique_messages.values()),
message_count_by_bucket[bucket_tup],
)
def followup_day2_email_delay(user: UserProfile) -> timedelta:
days_to_delay = 2
user_tz = user.timezone
if user_tz == "":
user_tz = "UTC"
signup_day = user.date_joined.astimezone(pytz.timezone(user_tz)).isoweekday()
if signup_day == 5:
# If the day is Friday then delay should be till Monday
days_to_delay = 3
elif signup_day == 4:
# If the day is Thursday then delay should be till Friday
days_to_delay = 1
# The delay should be 1 hour before the above calculated delay as
# our goal is to maximize the chance that this email is near the top
# of the user's inbox when the user sits down to deal with their inbox,
# or comes in while they are dealing with their inbox.
return timedelta(days=days_to_delay, hours=-1)
def enqueue_welcome_emails(user: UserProfile, realm_creation: bool = False) -> None:
from zerver.context_processors import common_context
if settings.WELCOME_EMAIL_SENDER is not None:
# line break to avoid triggering lint rule
from_name = settings.WELCOME_EMAIL_SENDER["name"]
from_address = settings.WELCOME_EMAIL_SENDER["email"]
else:
from_name = None
from_address = FromAddress.support_placeholder
other_account_count = (
UserProfile.objects.filter(delivery_email__iexact=user.delivery_email)
.exclude(id=user.id)
.count()
)
unsubscribe_link = one_click_unsubscribe_link(user, "welcome")
context = common_context(user)
context.update(
unsubscribe_link=unsubscribe_link,
keyboard_shortcuts_link=user.realm.uri + "/help/keyboard-shortcuts",
realm_name=user.realm.name,
realm_creation=realm_creation,
email=user.delivery_email,
is_realm_admin=user.is_realm_admin,
)
if user.is_realm_admin:
context["getting_started_link"] = (
user.realm.uri + "/help/getting-your-organization-started-with-zulip"
)
else:
context["getting_started_link"] = "https://zulip.com"
# Imported here to avoid import cycles.
from zproject.backends import ZulipLDAPAuthBackend, email_belongs_to_ldap
if email_belongs_to_ldap(user.realm, user.delivery_email):
context["ldap"] = True
for backend in get_backends():
# If the user is doing authentication via LDAP, Note that
# we exclude ZulipLDAPUserPopulator here, since that
# isn't used for authentication.
if isinstance(backend, ZulipLDAPAuthBackend):
context["ldap_username"] = backend.django_to_ldap_username(user.delivery_email)
break
send_future_email(
"zerver/emails/followup_day1",
user.realm,
to_user_ids=[user.id],
from_name=from_name,
from_address=from_address,
context=context,
)
if other_account_count == 0:
send_future_email(
"zerver/emails/followup_day2",
user.realm,
to_user_ids=[user.id],
from_name=from_name,
from_address=from_address,
context=context,
delay=followup_day2_email_delay(user),
)
def convert_html_to_markdown(html: str) -> str:
parser = html2text.HTML2Text()
markdown = parser.handle(html).strip()
# We want images to get linked and inline previewed, but html2text will turn
# them into links of the form ``, which is
# ugly. Run a regex over the resulting description, turning links of the
# form `` into
# `[image.png](http://foo.com/image.png)`.
return re.sub("!\\[\\]\\((\\S*)/(\\S*)\\?(\\S*)\\)", "[\\2](\\1/\\2)", markdown)
|
apache-2.0
| -4,329,368,778,265,539,600
| 39.416069
| 123
| 0.625204
| false
| 3.867912
| false
| false
| false
|
Guiiix/ip150_interfacer
|
classes/Interfacer.py
|
1
|
5394
|
from classes.Parser import Parser
from classes.Paracrypt import Paracrypt
from threading import Thread
import urllib2
import time
import random
class Interfacer:
def __init__(self, ip, port, username, password, verbose_level, zones_identifier,
area_identifier, ses_identifier, status_identifier, states_identifier, keep_alive_allowed_errors):
self.ip = ip
self.port = str(port)
self.username = str(username)
self.password = str(password)
self.connected = False
self.verbose_level = verbose_level
self.parser = Parser(self, zones_identifier, area_identifier, ses_identifier, status_identifier, states_identifier)
self.current_status = "Init phase"
self.paracrypt = Paracrypt(username, password)
self.running = False
self.keep_alive_allowed_errors = keep_alive_allowed_errors;
self.keep_alive_errors = 0
### Main method ###
def run(self, login_max_try, ready_wait_time, update_time_interval):
th = Thread(target=self.keep_alive)
if not self.loop_login(login_max_try, ready_wait_time):
return False
self.connected = True
equipment = self.get_equipment()
if not equipment:
return False
print equipment
self.zones = equipment[0]
self.areas = equipment[1]
self.update_status()
self.running = True
self.current_status = "Running"
th.start()
while self.running and self.connected:
self.update_status()
time.sleep(update_time_interval)
running = False
th.join()
if self.connected:
self.logout()
### These methods provide some usefull features to help ###
def display_message(self, msg, verbose_level):
if verbose_level <= self.verbose_level:
print '\033[94m' + "* <INTERFACER> : " + msg + '\033[0m'
def raise_error(self, msg):
print '\033[94m' + "* <INTERFACER> : /!\ " + msg + '\033[0m'
self.current_status = msg
def do_request(self, location):
try:
html = urllib2.urlopen("http://" + self.ip + ":" + self.port + "/" + location, timeout=1).read()
self.display_message("Making request to /" + location, 2)
return html
except Exception:
self.raise_error('Unable to make request to /' + location)
return False
### Login/logout methods ###
def loop_login(self, login_max_try, ready_wait_time):
# Trying to connect
retry = True
i = 0
while retry:
if self.login():
retry = False
else:
i += 1
if (i == login_max_try):
return False
# Waiting for server to be ready
while not self.do_request("index.html"):
self.raise_error("Not yes ready...")
time.sleep(ready_wait_time)
self.display_message("Seems to be ready", 1)
return True
def login(self):
html = self.do_request("login_page.html")
if not html:
return False
js = self.parser.js_from_html(html)
if not js:
return False
self.display_message("Looking for someone connected...", 1)
if self.parser.someone_connected(js):
self.raise_error('Unable to login : someone is already connected')
time.sleep(30)
return False
ses = self.parser.parse_ses(js)
if ses == False:
self.raise_error('Unable to login : No SES value found')
self.display_message('SES Value found, encrypting credentials...', 2)
credentials = self.paracrypt.login_encrypt(ses)
self.display_message('Sending auth request...', 2)
html = self.do_request("default.html?u=" + str(credentials['user']) + "&p=" + str(credentials['password']))
if not html:
return False
return True
def logout(self):
self.connected = False
return self.do_request("logout.html")
### Status/equipment methods ###
def get_status(self):
html = self.do_request("statuslive.html")
if not html:
return False
js = self.parser.js_from_html(html)
if not js:
return False
return self.parser.parse_status(js)
def get_equipment(self):
html = self.do_request("index.html")
if not html:
return False
js = self.parser.js_from_html(html)
if not js:
return False
return self.parser.parse_equipment(js)
def update_status(self):
status = self.get_status()
if not status:
return False
states = status[1]
status = status[0]
if len(status) == len(self.zones):
for i in range(0, len(status)):
self.zones[i]["status"] = status[i]
else:
self.raise_error("status (" + str(len(status)) + ") != zones (" + str(len(self.zones)) + " )...")
return False
if len(states) == len(self.areas):
for i in range(0, len(states)):
self.areas[i]["armed"] = states[i]
else:
self.raise_error("Erf, states (" + str(len(states)) + ") != areas (" + str(len(Areas)) + " )...")
return False
return True
### Stay connected ###
def keep_alive(self):
while (self.running):
#generate random id
rand = random.randint(1000000000000000,9999999999999999)
html = self.do_request("keep_alive.html?msgid=1&" + str(rand))
if not html:
self.keep_alive_errors += 1
if self.keep_alive_allowed_errors == self.keep_alive_errors:
self.raise_error("Keep alive errors exceeded")
self.running = False
return False
else:
if "javascript" in html:
self.raise_error("Connection lost")
self.running = False
self.connected = False
return False
time.sleep(2.5)
### Commands methods ###
def arm(self):
self.do_request("statuslive.html?area=00&value=r")
def desarm(self):
self.do_request("statuslive.html?area=00&value=d")
def partiel(self):
self.do_request("statuslive.html?area=00&value=s")
|
gpl-3.0
| 2,701,808,814,002,473,000
| 25.975
| 117
| 0.673526
| false
| 3.04918
| false
| false
| false
|
koomik/CouchPotatoServer
|
couchpotato/core/media/_base/providers/torrent/torrentday.py
|
1
|
3584
|
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
log = CPLog(__name__)
class Base(TorrentProvider):
urls = {
'test': 'http://www.td.af/',
'login': 'http://www.td.af/torrents/',
'login_check': 'http://www.torrentday.com/userdetails.php',
'detail': 'http://www.td.af/details.php?id=%s',
'search': 'http://www.td.af/V3/API/API.php',
'download': 'http://www.td.af/download.php/%s/%s',
}
http_time_between_calls = 1 # Seconds
def _search(self, media, quality, results):
query = self.buildUrl(media)
data = {
'/browse.php?': None,
'cata': 'yes',
'jxt': 8,
'jxw': 'b',
'search': query,
}
data = self.getJsonData(self.urls['search'], data = data)
try: torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
except: return
for torrent in torrents:
results.append({
'id': torrent['id'],
'name': torrent['name'],
'url': self.urls['download'] % (torrent['id'], torrent['fname']),
'detail_url': self.urls['detail'] % torrent['id'],
'size': self.parseSize(torrent.get('size')),
'seeders': tryInt(torrent.get('seed')),
'leechers': tryInt(torrent.get('leech')),
})
def getLoginParams(self):
return {
'username': self.conf('username'),
'password': self.conf('password'),
'submit.x': 18,
'submit.y': 11,
'submit': 'submit',
}
def loginSuccess(self, output):
return 'Password not correct' not in output
def loginCheckSuccess(self, output):
return 'logout.php' in output.lower()
config = [{
'name': 'torrentday',
'groups': [
{
'tab': 'searcher',
'list': 'torrent_providers',
'name': 'TorrentDay',
'description': 'See <a href="http://www.td.af/">TorrentDay</a>',
'wizard': True,
'options': [
{
'name': 'enabled',
'type': 'enabler',
'default': False,
},
{
'name': 'username',
'default': '',
},
{
'name': 'password',
'default': '',
'type': 'password',
},
{
'name': 'seed_ratio',
'label': 'Seed ratio',
'type': 'float',
'default': 1,
'description': 'Will not be (re)moved until this seed ratio is met.',
},
{
'name': 'seed_time',
'label': 'Seed time',
'type': 'int',
'default': 40,
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
},
{
'name': 'extra_score',
'advanced': True,
'label': 'Extra Score',
'type': 'int',
'default': 0,
'description': 'Starting score for each release found via this provider.',
}
],
},
],
}]
|
gpl-3.0
| -641,086,129,201,212,200
| 30.716814
| 99
| 0.429129
| false
| 4.281959
| false
| false
| false
|
bitmovin/bitmovin-python
|
bitmovin/resources/models/encodings/muxings/information/progressive_ts_information.py
|
1
|
4987
|
from bitmovin.resources import Resource
from bitmovin.resources.models.encodings.muxings.information import ByteRange
from bitmovin.errors import InvalidTypeError
from bitmovin.utils.serialization import Serializable
from .muxing_information_video_track import MuxingInformationVideoTrack
from .muxing_information_audio_track import MuxingInformationAudioTrack
class ProgressiveTSInformation(Resource, Serializable):
def __init__(self, mime_type=None, file_size=None, container_format=None, container_bitrate=None, duration=None,
video_tracks=None, audio_tracks=None, byte_ranges=None):
super().__init__()
self.mime_type = mime_type
self.file_size = file_size
self.container_format = container_format
self.container_bitrate = container_bitrate
self.duration = duration
self._video_tracks = None
self._audio_tracks = None
self._byte_ranges = None
self.video_tracks = video_tracks
self.audio_tracks = audio_tracks
self.byte_ranges = byte_ranges
@classmethod
def parse_from_json_object(cls, json_object):
mime_type = json_object.get('mimeType')
file_size = json_object.get('fileSize')
container_format = json_object.get('containerFormat')
container_bitrate = json_object.get('containerBitrate')
duration = json_object.get('duration')
video_tracks = json_object.get('videoTracks')
audio_tracks = json_object.get('audioTracks')
byte_ranges = json_object.get('byteRanges')
progressive_ts_muxing_information = ProgressiveTSInformation(mime_type=mime_type,
file_size=file_size,
container_format=container_format,
container_bitrate=container_bitrate,
duration=duration,
video_tracks=video_tracks,
audio_tracks=audio_tracks,
byte_ranges=byte_ranges)
return progressive_ts_muxing_information
@property
def audio_tracks(self):
return self._audio_tracks
@audio_tracks.setter
def audio_tracks(self, new_audio_tracks):
if new_audio_tracks is None:
return
if not isinstance(new_audio_tracks, list):
raise InvalidTypeError('new_audio_tracks has to be a list of MuxingInformationAudioTrack objects')
if all(isinstance(audio_track, MuxingInformationAudioTrack) for audio_track in new_audio_tracks):
self._audio_tracks = new_audio_tracks
else:
audio_tracks = []
for json_object in new_audio_tracks:
audio_track = MuxingInformationAudioTrack.parse_from_json_object(json_object)
audio_tracks.append(audio_track)
self._audio_tracks = audio_tracks
@property
def video_tracks(self):
return self._video_tracks
@video_tracks.setter
def video_tracks(self, new_video_tracks):
if new_video_tracks is None:
return
if not isinstance(new_video_tracks, list):
raise InvalidTypeError('new_video_tracks has to be a list of MuxingInformationVideoTrack objects')
if all(isinstance(video_track, MuxingInformationVideoTrack) for video_track in new_video_tracks):
self._video_tracks = new_video_tracks
else:
video_tracks = []
for json_object in new_video_tracks:
video_track = MuxingInformationVideoTrack.parse_from_json_object(json_object)
video_tracks.append(video_track)
self._video_tracks = video_tracks
@property
def byte_ranges(self):
return self._byte_ranges
@byte_ranges.setter
def byte_ranges(self, new_value):
if new_value is None:
return
if not isinstance(new_value, list):
raise InvalidTypeError('byte_ranges has to be a list of ByteRange instances')
if all(isinstance(output, ByteRange) for output in new_value):
byte_ranges = []
for item in new_value:
byte_ranges.append(item)
self._byte_ranges = byte_ranges
else:
byte_ranges = []
for item in new_value:
byte_ranges.append(ByteRange.parse_from_json_object(item))
self._byte_ranges = byte_ranges
def serialize(self):
serialized = super().serialize()
serialized['videoTracks'] = self.video_tracks
serialized['audioTracks'] = self.audio_tracks
serialized['byteRanges'] = self.byte_ranges
return serialized
|
unlicense
| -5,852,554,402,090,742,000
| 39.544715
| 116
| 0.594947
| false
| 4.533636
| false
| false
| false
|
bosichong/17python.com
|
thread/threadlock.py
|
1
|
2692
|
#codeing=utf-8
# @Time : 2017-10.04
# @Author : J.sky
# @Mail : bosichong@qq.com
# @Site : www.17python.com
# @Title : Python多线程编程(2)多线程锁 threading.Lock
# @Url : http://www.17python.com/blog/33
# @Details : Python多线程编程(2)多线程锁 threading.Lock
# @Other : OS X 10.11.6
# Python 3.6.1
# VSCode 1.15.1
###################################
# Python多线程编程(2)多线程锁 threading.Lock
###################################
'''
在多线程任务中,线程对数据的操作是随机的,这个先后次序无法预测,如果利用多线程修改唯一数据,由于对数据操作的随机性,必会影响到数据结果的准确性,所以在多线程的任务的编码中,我们必须使用线程锁。
## Python的多线程锁 threading.Lock
通过下边的例子,我们来看看多线程锁的重要性,定义两个数据,同时利用多线程对其+ -相同的数值,
如果操作次序是正常的,一加一减,那个数据应该是没有变化的,但是因为多线程操作没有加锁时对数据的操作是随机争抢资源的,
多线程操作时会发生,多加或是多减的结果,我们看下边的例子:
'''
import threading
data = 0
lock_data = 0
lock = threading.Lock()#创建一把线程锁
lock.acquire()
lock.release()
def change_d(n):
'''修改无锁数据的函数'''
global data
data += n
data -= n
def change_l_d(n):
'''修改有锁数据的函数'''
global lock_data
lock_data += n
lock_data -= n
def myfun(n):
for i in range(500000):
change_d(n)
#lock.acquire()
#change_l_d(n)
#lock.release()
#与下边的with语句处相同
with lock:
change_l_d(n)
def main():
threads = []
k = 5
for i in range(k):
t = threading.Thread(target=myfun, args=(10,))
threads.append(t)
for i in range(k):
threads[i].start()
for i in range(k):
threads[i].join()
print("无锁数据最终结果=={0}".format(data))
print("有锁数据最终结果=={0}".format(lock_data))
if __name__ == '__main__':
main()
'''
多次运行后我们会发现,无锁数据的最终结果会出现不同,因为可以证明,无锁的时候多线程操作是随机性的。
所以在多线程操作中,如果存在多线程操作唯一数据时,一定要加锁保证每次只有一个线程对基进行操作。
除了对多唯一数据进行加锁这种方法以外,在`Python`中还可以使用信号量或是事件对线程进行控制,但笔者认为,还是使用Lock对象比较方便。
'''
|
apache-2.0
| 8,619,549,271,112,287,000
| 21.21519
| 96
| 0.607184
| false
| 1.603291
| false
| false
| false
|
gaetano-guerriero/eyeD3-debian
|
src/eyed3/utils/console.py
|
1
|
18555
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import struct
import sys
import time
from . import formatSize, formatTime
from .. import LOCAL_ENCODING, compat
from .log import log
try:
import fcntl
import termios
import signal
_CAN_RESIZE_TERMINAL = True
except ImportError:
_CAN_RESIZE_TERMINAL = False
class AnsiCodes(object):
_USE_ANSI = False
_CSI = '\033['
def __init__(self, codes):
def code_to_chars(code):
return AnsiCodes._CSI + str(code) + 'm'
for name in dir(codes):
if not name.startswith('_'):
value = getattr(codes, name)
setattr(self, name, code_to_chars(value))
# Add color function
for reset_name in ("RESET_%s" % name, "RESET"):
if hasattr(codes, reset_name):
reset_value = getattr(codes, reset_name)
setattr(self, "%s" % name.lower(),
AnsiCodes._mkfunc(code_to_chars(value),
code_to_chars(reset_value)))
break
@staticmethod
def _mkfunc(color, reset):
def _cwrap(text, *styles):
if not AnsiCodes._USE_ANSI:
return text
s = u''
for st in styles:
s += st
s += color + text + reset
if styles:
s += Style.RESET_ALL
return s
return _cwrap
def __getattribute__(self, name):
attr = super(AnsiCodes, self).__getattribute__(name)
if (hasattr(attr, "startswith") and
attr.startswith(AnsiCodes._CSI) and
not AnsiCodes._USE_ANSI):
return ""
else:
return attr
def __getitem__(self, name):
return getattr(self, name.upper())
@classmethod
def init(cls, allow_colors):
cls._USE_ANSI = allow_colors and cls._term_supports_color()
@staticmethod
def _term_supports_color():
if (os.environ.get("TERM") == "dumb" or
os.environ.get("OS") == "Windows_NT"):
return False
return hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
class AnsiFore:
GREY = 30 # noqa
RED = 31 # noqa
GREEN = 32 # noqa
YELLOW = 33 # noqa
BLUE = 34 # noqa
MAGENTA = 35 # noqa
CYAN = 36 # noqa
WHITE = 37 # noqa
RESET = 39 # noqa
class AnsiBack:
GREY = 40 # noqa
RED = 41 # noqa
GREEN = 42 # noqa
YELLOW = 43 # noqa
BLUE = 44 # noqa
MAGENTA = 45 # noqa
CYAN = 46 # noqa
WHITE = 47 # noqa
RESET = 49 # noqa
class AnsiStyle:
RESET_ALL = 0 # noqa
BRIGHT = 1 # noqa
RESET_BRIGHT = 22 # noqa
DIM = 2 # noqa
RESET_DIM = RESET_BRIGHT # noqa
ITALICS = 3 # noqa
RESET_ITALICS = 23 # noqa
UNDERLINE = 4 # noqa
RESET_UNDERLINE = 24 # noqa
BLINK_SLOW = 5 # noqa
RESET_BLINK_SLOW = 25 # noqa
BLINK_FAST = 6 # noqa
RESET_BLINK_FAST = 26 # noqa
INVERSE = 7 # noqa
RESET_INVERSE = 27 # noqa
STRIKE_THRU = 9 # noqa
RESET_STRIKE_THRU = 29 # noqa
Fore = AnsiCodes(AnsiFore)
Back = AnsiCodes(AnsiBack)
Style = AnsiCodes(AnsiStyle)
def ERROR_COLOR():
return Fore.RED
def WARNING_COLOR():
return Fore.YELLOW
def HEADER_COLOR():
return Fore.GREEN
class Spinner(object):
"""
A class to display a spinner in the terminal.
It is designed to be used with the `with` statement::
with Spinner("Reticulating splines", "green") as s:
for item in enumerate(items):
s.next()
"""
_default_unicode_chars = u"◓◑◒◐"
_default_ascii_chars = u"-/|\\"
def __init__(self, msg, file=None, step=1,
chars=None, use_unicode=True, print_done=True):
self._msg = msg
self._file = file or sys.stdout
self._step = step
if not chars:
if use_unicode:
chars = self._default_unicode_chars
else:
chars = self._default_ascii_chars
self._chars = chars
self._silent = not self._file.isatty()
self._print_done = print_done
def _iterator(self):
chars = self._chars
index = 0
write = self._file.write
flush = self._file.flush
while True:
write(u'\r')
write(self._msg)
write(u' ')
write(chars[index])
flush()
yield
for i in range(self._step):
yield
index += 1
if index == len(chars):
index = 0
def __enter__(self):
if self._silent:
return self._silent_iterator()
else:
return self._iterator()
def __exit__(self, exc_type, exc_value, traceback):
write = self._file.write
flush = self._file.flush
if not self._silent:
write(u'\r')
write(self._msg)
if self._print_done:
if exc_type is None:
write(Fore.GREEN + u' [Done]\n')
else:
write(Fore.RED + u' [Failed]\n')
else:
write("\n")
flush()
def _silent_iterator(self):
self._file.write(self._msg)
self._file.flush()
while True:
yield
class ProgressBar(object):
"""
A class to display a progress bar in the terminal.
It is designed to be used either with the `with` statement::
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
or as a generator::
for item in ProgressBar(items):
item.process()
"""
def __init__(self, total_or_items, file=None):
"""
total_or_items : int or sequence
If an int, the number of increments in the process being
tracked. If a sequence, the items to iterate over.
file : writable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If `file` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
"""
self._file = file or sys.stdout
if not self._file.isatty():
self.update = self._silent_update
self._silent = True
else:
self._silent = False
try:
self._items = iter(total_or_items)
self._total = len(total_or_items)
except TypeError:
try:
self._total = int(total_or_items)
self._items = iter(range(self._total))
except TypeError:
raise TypeError("First argument must be int or sequence")
self._start_time = time.time()
self._should_handle_resize = (
_CAN_RESIZE_TERMINAL and self._file.isatty())
self._handle_resize()
if self._should_handle_resize:
signal.signal(signal.SIGWINCH, self._handle_resize)
self._signal_set = True
else:
self._signal_set = False
self.update(0)
def _handle_resize(self, signum=None, frame=None):
self._terminal_width = getTtySize(self._file,
self._should_handle_resize)[1]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self._silent:
if exc_type is None:
self.update(self._total)
self._file.write('\n')
self._file.flush()
if self._signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
def __iter__(self):
return self
def next(self):
try:
rv = next(self._items)
except StopIteration:
self.__exit__(None, None, None)
raise
else:
self.update()
return rv
def update(self, value=None):
"""
Update the progress bar to the given value (out of the total
given to the constructor).
"""
if value is None:
value = self._current_value = self._current_value + 1
else:
self._current_value = value
if self._total == 0:
frac = 1.0
else:
frac = float(value) / float(self._total)
file = self._file
write = file.write
suffix = self._formatSuffix(value, frac)
self._bar_length = self._terminal_width - 37
bar_fill = int(float(self._bar_length) * frac)
write(u'\r|')
write(Fore.BLUE + u'=' * bar_fill + Fore.RESET)
if bar_fill < self._bar_length:
write(Fore.GREEN + u'>' + Fore.RESET)
write(u'-' * (self._bar_length - bar_fill - 1))
write(u'|')
write(suffix)
self._file.flush()
def _formatSuffix(self, value, frac):
if value >= self._total:
t = time.time() - self._start_time
time_str = ' '
elif value <= 0:
t = None
time_str = ''
else:
t = ((time.time() - self._start_time) * (1.0 - frac)) / frac
time_str = u' ETA '
if t is not None:
time_str += formatTime(t, short=True)
suffix = ' {0:>4s}/{1:>4s}'.format(formatSize(value, short=True),
formatSize(self._total, short=True))
suffix += u' ({0:>6s}%)'.format(u'{0:.2f}'.format(frac * 100.0))
suffix += time_str
return suffix
def _silent_update(self, value=None):
pass
@classmethod
def map(cls, function, items, multiprocess=False, file=None):
"""
Does a `map` operation while displaying a progress bar with
percentage complete.
::
def work(i):
print(i)
ProgressBar.map(work, range(50))
Parameters:
function : function
Function to call for each step
items : sequence
Sequence where each element is a tuple of arguments to pass to
*function*.
multiprocess : bool, optional
If `True`, use the `multiprocessing` module to distribute each
task to a different processor core.
file : writeable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If `file` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
"""
results = []
if file is None:
file = sys.stdout
with cls(len(items), file=file) as bar:
step_size = max(200, bar._bar_length)
steps = max(int(float(len(items)) / step_size), 1)
if not multiprocess:
for i, item in enumerate(items):
function(item)
if (i % steps) == 0:
bar.update(i)
else:
import multiprocessing
p = multiprocessing.Pool()
for i, result in enumerate(p.imap_unordered(function, items,
steps)):
bar.update(i)
results.append(result)
return results
def _encode(s):
'''This is a helper for output of unicode. With Python2 it is necessary to
do encoding to the LOCAL_ENCODING since by default unicode will be encoded
to ascii. In python3 this conversion is not necessary for the user to
to perform; in fact sys.std*.write, for example, requires unicode strings
be passed in. This function will encode for python2 and do nothing
for python3 (except assert that ``s`` is a unicode type).'''
if compat.PY2:
if isinstance(s, compat.unicode):
try:
return s.encode(LOCAL_ENCODING)
except Exception as ex:
log.error("Encoding error: " + str(ex))
return s.encode(LOCAL_ENCODING, "replace")
elif isinstance(s, str):
return s
else:
raise TypeError("Argument must be str or unicode")
else:
assert(isinstance(s, str))
return s
def printMsg(s):
fp = sys.stdout
s = _encode(s)
try:
fp.write("%s\n" % s)
except UnicodeEncodeError:
fp.write("%s\n" % compat.unicode(s.encode("utf-8", "replace"), "utf-8"))
fp.flush()
def printError(s):
_printWithColor(s, ERROR_COLOR(), sys.stderr)
def printWarning(s):
_printWithColor(s, WARNING_COLOR(), sys.stdout)
def printHeader(s):
_printWithColor(s, HEADER_COLOR(), sys.stdout)
def boldText(s, c=None):
return formatText(s, b=True, c=c)
def formatText(s, b=False, c=None):
return ((Style.BRIGHT if b else '') +
(c or '') +
s +
(Fore.RESET if c else '') +
(Style.RESET_BRIGHT if b else ''))
def _printWithColor(s, color, file):
s = _encode(s)
file.write(color + s + Fore.RESET + '\n')
file.flush()
def cformat(msg, fg, bg=None, styles=None):
'''Format ``msg`` with foreground and optional background. Optional
``styles`` lists will also be applied. The formatted string is returned.'''
fg = fg or ""
bg = bg or ""
styles = "".join(styles or [])
reset = Fore.RESET + Back.RESET + Style.RESET_ALL if (fg or bg or styles) \
else ""
output = u"%(fg)s%(bg)s%(styles)s%(msg)s%(reset)s" % locals()
return output
def getTtySize(fd=sys.stdout, check_tty=True):
hw = None
if check_tty:
try:
data = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 4)
hw = struct.unpack("hh", data)
except (OSError, IOError, NameError):
pass
if not hw:
try:
hw = (int(os.environ.get('LINES')),
int(os.environ.get('COLUMNS')))
except (TypeError, ValueError):
hw = (78, 25)
return hw
def cprint(msg, fg, bg=None, styles=None, file=sys.stdout):
'''Calls ``cformat`` and prints the result to output stream ``file``.'''
print(cformat(msg, fg, bg=bg, styles=styles), file=file)
if __name__ == "__main__":
AnsiCodes.init(True)
def checkCode(c):
return (c[0] != '_' and
"RESET" not in c and
c[0] == c[0].upper()
)
for bg_name, bg_code in ((c, getattr(Back, c))
for c in dir(Back) if checkCode(c)):
sys.stdout.write('%s%-7s%s %s ' %
(bg_code, bg_name, Back.RESET, bg_code))
for fg_name, fg_code in ((c, getattr(Fore, c))
for c in dir(Fore) if checkCode(c)):
sys.stdout.write(fg_code)
for st_name, st_code in ((c, getattr(Style, c))
for c in dir(Style) if checkCode(c)):
sys.stdout.write('%s%s %s %s' %
(st_code, st_name,
getattr(Style, "RESET_%s" % st_name),
bg_code))
sys.stdout.write("%s\n" % Style.RESET_ALL)
sys.stdout.write("\n")
with Spinner(Fore.GREEN + u"Phase #1") as spinner:
for i in range(50):
time.sleep(.05)
spinner.next()
with Spinner(Fore.RED + u"Phase #2" + Fore.RESET,
print_done=False) as spinner:
for i in range(50):
time.sleep(.05)
spinner.next()
with Spinner(u"Phase #3", print_done=False, use_unicode=False) as spinner:
for i in range(50):
spinner.next()
time.sleep(.05)
with Spinner(u"Phase #4", print_done=False, chars='.oO°Oo.') as spinner:
for i in range(50):
spinner.next()
time.sleep(.05)
items = range(200)
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
time.sleep(.05)
for item in ProgressBar(items):
time.sleep(.05)
progress = 0
max = 320000000
with ProgressBar(max) as bar:
while progress < max:
progress += 23400
bar.update(progress)
time.sleep(.001)
|
gpl-3.0
| 7,526,668,568,266,435,000
| 31.031088
| 80
| 0.463496
| false
| 4.296039
| false
| false
| false
|
NicoSantangelo/sublime-gulp
|
base_command.py
|
1
|
5042
|
import sublime
import sublime_plugin
import os.path
is_sublime_text_3 = int(sublime.version()) >= 3000
if is_sublime_text_3:
from .settings import Settings
from .status_bar import StatusBar
from .insert_in_output_view import insert_in_output_view
from .timeout import set_timeout, defer_sync
else:
from settings import Settings
from status_bar import StatusBar
from insert_in_output_view import insert_in_output_view
from timeout import set_timeout, defer_sync
#
# A base for each command
#
class BaseCommand(sublime_plugin.WindowCommand):
def run(self, task_name=None, task_flag=None, silent=False, paths=[]):
self.settings = None
self.setup_data_from_settings()
self.task_name = task_name
self.task_flag = task_flag if task_name is not None and task_flag is not None else self.get_flag_from_task_name()
self.silent = silent
self._working_dir = ""
self.searchable_folders = [os.path.dirname(path) for path in paths] if len(paths) > 0 else self.window.folders()
self.output_view = None
self.status_bar = StatusBar(self.window)
self.work()
def setup_data_from_settings(self):
Settings.gather_shared_data()
self.settings = Settings()
self.results_in_new_tab = self.settings.get("results_in_new_tab", False)
self.check_for_gulpfile = self.settings.get('check_for_gulpfile', True)
def get_flag_from_task_name(self):
flags = self.settings.get("flags", {})
return flags[self.task_name] if self.task_name in flags else ""
# Properties
@property
def working_dir(self):
return self._working_dir
@working_dir.setter
def working_dir(self, value):
if self.check_for_gulpfile:
self._working_dir = os.path.dirname(value)
else:
self._working_dir = value
# Main method, override
def work(self):
pass
# Panels and message
def show_quick_panel(self, items, on_done=None, font=sublime.MONOSPACE_FONT):
defer_sync(lambda: self.window.show_quick_panel(items, on_done, font))
def show_input_panel(self, caption, initial_text="", on_done=None, on_change=None, on_cancel=None):
self.window.show_input_panel(caption, initial_text, on_done, on_change, on_cancel)
def status_message(self, text):
sublime.status_message("%s: %s" % (Settings.PACKAGE_NAME, text))
def error_message(self, text):
sublime.error_message("%s: %s" % (Settings.PACKAGE_NAME, text))
# Output view
def show_output_panel(self, text):
if self.silent:
self.status_message(text)
return
if self.results_in_new_tab:
new_tab_path = os.path.join(self.gulp_results_path(), "Gulp Results")
self.output_view = self.window.open_file(new_tab_path)
self.output_view.set_scratch(True)
else:
self.output_view = self.window.get_output_panel("gulp_output")
self.show_panel()
self.output_view.settings().set("scroll_past_end", False)
self.add_syntax()
self.append_to_output_view(text)
def gulp_results_path(self):
return next(folder_path for folder_path in self.searchable_folders if self.working_dir.find(folder_path) != -1) if self.working_dir else ""
def gulp_results_view(self):
if self.output_view is None:
gulp_results = [view for view in sublime.active_window().views() if view.file_name() and os.path.basename(view.file_name()) == "Gulp Results"]
return gulp_results[0] if len(gulp_results) > 0 else None
else:
return self.output_view
def add_syntax(self):
if self.settings.has("syntax_override"):
syntax_file = self.settings.get("syntax_override")
else:
syntax_file = self.settings.get_from_user_settings("syntax", "Packages/Gulp/syntax/GulpResults.tmLanguage")
if syntax_file:
self.output_view.set_syntax_file(syntax_file)
def append_to_output_view_in_main_thread(self, text):
defer_sync(lambda: self.append_to_output_view(text))
def append_to_output_view(self, text):
if not self.silent:
insert_in_output_view(self.output_view, text, self.results_in_new_tab)
def set_output_close_on_timeout(self):
timeout = self.settings.get("results_autoclose_timeout_in_milliseconds", False)
if timeout:
set_timeout(self.close_panel, timeout)
def close_panel(self):
if self.results_in_new_tab:
self.output_view = self.gulp_results_view()
if self.output_view and self.output_view.file_name():
self.window.focus_view(self.output_view)
self.window.run_command('close_file')
else:
self.window.run_command("hide_panel", { "panel": "output.gulp_output" })
def show_panel(self):
self.window.run_command("show_panel", { "panel": "output.gulp_output" })
|
mit
| 4,268,979,282,055,752,000
| 36.626866
| 154
| 0.639825
| false
| 3.48686
| false
| false
| false
|
loonycyborg/scons-plusplus
|
python_modules/Tool/lex.py
|
1
|
4870
|
"""SCons.Tool.lex
Tool-specific initialization for lex.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/lex.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os.path
import sys
import SCons.Action
import SCons.Tool
import SCons.Util
from SCons.Platform.mingw import MINGW_DEFAULT_PATHS
from SCons.Platform.cygwin import CYGWIN_DEFAULT_PATHS
from SCons.Platform.win32 import CHOCO_DEFAULT_PATH
LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR")
if sys.platform == 'win32':
BINS = ['flex', 'lex', 'win_flex']
else:
BINS = ["flex", "lex"]
def lexEmitter(target, source, env):
sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0]))
if sourceExt == ".lm": # If using Objective-C
target = [sourceBase + ".m"] # the extension is ".m".
# This emitter essentially tries to add to the target all extra
# files generated by flex.
# Different options that are used to trigger the creation of extra files.
fileGenOptions = ["--header-file=", "--tables-file="]
lexflags = env.subst("$LEXFLAGS", target=target, source=source)
for option in SCons.Util.CLVar(lexflags):
for fileGenOption in fileGenOptions:
l = len(fileGenOption)
if option[:l] == fileGenOption:
# A file generating option is present, so add the
# file name to the target list.
fileName = option[l:].strip()
target.append(fileName)
return (target, source)
def get_lex_path(env, append_paths=False):
"""
Find the path to the lex tool, searching several possible names
Only called in the Windows case, so the default_path
can be Windows-specific
:param env: current construction environment
:param append_paths: if set, add the path to the tool to PATH
:return: path to lex tool, if found
"""
for prog in BINS:
bin_path = SCons.Tool.find_program_path(
env,
prog,
default_paths=CHOCO_DEFAULT_PATH + MINGW_DEFAULT_PATHS + CYGWIN_DEFAULT_PATHS )
if bin_path:
if append_paths:
env.AppendENVPath('PATH', os.path.dirname(bin_path))
return bin_path
SCons.Warnings.Warning('lex tool requested, but lex or flex binary not found in ENV PATH')
def generate(env):
"""Add Builders and construction variables for lex to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action(".l", LexAction)
c_file.add_emitter(".l", lexEmitter)
c_file.add_action(".lex", LexAction)
c_file.add_emitter(".lex", lexEmitter)
# Objective-C
cxx_file.add_action(".lm", LexAction)
cxx_file.add_emitter(".lm", lexEmitter)
# C++
cxx_file.add_action(".ll", LexAction)
cxx_file.add_emitter(".ll", lexEmitter)
env["LEXFLAGS"] = SCons.Util.CLVar("")
if sys.platform == 'win32':
# ignore the return - we do not need the full path here
_ = get_lex_path(env, append_paths=True)
env["LEX"] = env.Detect(BINS)
if not env.get("LEXUNISTD"):
env["LEXUNISTD"] = SCons.Util.CLVar("")
env["LEXCOM"] = "$LEX $LEXUNISTD $LEXFLAGS -t $SOURCES > $TARGET"
else:
env["LEX"] = env.Detect(BINS)
env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET"
def exists(env):
if sys.platform == 'win32':
return get_lex_path(env)
else:
return env.Detect(BINS)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
gpl-3.0
| 1,416,306,155,329,501,000
| 33.539007
| 114
| 0.674127
| false
| 3.631618
| false
| false
| false
|
ryansb/tremendous
|
tremendous/colors.py
|
1
|
12059
|
from functools import partial
from tremendous.api import (
apply_color,
apply_256,
apply_256_bg,
apply_256_hl,
)
from tremendous.bindings import lib as __lib
from tremendous.bindings import ffi
colors_16 = dict(
bold=__lib.BOLD,
italic=__lib.ITALIC,
under=__lib.UNDER,
under2=__lib.UNDER2,
strike=__lib.STRIKE,
blink=__lib.BLINK,
flip=__lib.FLIP,
black=__lib.BLACK,
red=__lib.RED,
green=__lib.GREEN,
yellow=__lib.YELLOW,
blue=__lib.BLUE,
magenta=__lib.MAGENTA,
cyan=__lib.CYAN,
white=__lib.WHITE,
hblack=__lib.HBLACK,
hred=__lib.HRED,
hgreen=__lib.HGREEN,
hyellow=__lib.HYELLOW,
hblue=__lib.HBLUE,
hmagenta=__lib.HMAGENTA,
hcyan=__lib.HCYAN,
hwhite=__lib.HWHITE,
bgblack=__lib.BGBLACK,
bgred=__lib.BGRED,
bggreen=__lib.BGGREEN,
bgyellow=__lib.BGYELLOW,
bgblue=__lib.BGBLUE,
bgmagenta=__lib.BGMAGENTA,
bgcyan=__lib.BGCYAN,
bgwhite=__lib.BGWHITE,
)
__funcs = {}
# This is also gross. Sorry.
for k, v in colors_16.items():
if k.startswith('h'):
__funcs['highlight_' + k[1:]] = partial(apply_color, v)
__funcs['hi_' + k[1:]] = partial(apply_color, v)
__funcs['hl_' + k[1:]] = partial(apply_color, v)
elif k.startswith('bg'):
__funcs['background_' + k[1:]] = partial(apply_color, v)
__funcs['bg_' + k[2:]] = partial(apply_color, v)
elif k.startswith('under'):
__funcs[k] = partial(apply_color, v)
__funcs['underline' + k[5:]] = partial(apply_color, v)
else:
__funcs[k] = partial(apply_color, v)
extended_colors = {
'Grey0': [0, 0, 0],
'NavyBlue': [0, 0, 95],
'DarkBlue': [0, 0, 135],
'Blue3': [0, 0, 175],
'Blue3': [0, 0, 215],
'Blue1': [0, 0, 255],
'DarkGreen': [0, 95, 0],
'DeepSkyBlue4': [0, 95, 95],
'DeepSkyBlue4': [0, 95, 135],
'DeepSkyBlue4': [0, 95, 175],
'DodgerBlue3': [0, 95, 215],
'DodgerBlue2': [0, 95, 255],
'Green4': [0, 135, 0],
'SpringGreen4': [0, 135, 95],
'Turquoise4': [0, 135, 135],
'DeepSkyBlue3': [0, 135, 175],
'DeepSkyBlue3': [0, 135, 215],
'DodgerBlue1': [0, 135, 255],
'Green3': [0, 175, 0],
'SpringGreen3': [0, 175, 95],
'DarkCyan': [0, 175, 135],
'LightSeaGreen': [0, 175, 175],
'DeepSkyBlue2': [0, 175, 215],
'DeepSkyBlue1': [0, 175, 255],
'Green3': [0, 215, 0],
'SpringGreen3': [0, 215, 95],
'SpringGreen2': [0, 215, 135],
'Cyan3': [0, 215, 175],
'DarkTurquoise': [0, 215, 215],
'Turquoise2': [0, 215, 255],
'Green1': [0, 255, 0],
'SpringGreen2': [0, 255, 95],
'SpringGreen1': [0, 255, 135],
'MediumSpringGreen': [0, 255, 175],
'Cyan2': [0, 255, 215],
'Cyan1': [0, 255, 255],
'DarkRed': [95, 0, 0],
'DeepPink4': [95, 0, 95],
'Purple4': [95, 0, 135],
'Purple4': [95, 0, 175],
'Purple3': [95, 0, 215],
'BlueViolet': [95, 0, 255],
'Orange4': [95, 95, 0],
'Grey37': [95, 95, 95],
'MediumPurple4': [95, 95, 135],
'SlateBlue3': [95, 95, 175],
'SlateBlue3': [95, 95, 215],
'RoyalBlue1': [95, 95, 255],
'Chartreuse4': [95, 135, 0],
'DarkSeaGreen4': [95, 135, 95],
'PaleTurquoise4': [95, 135, 135],
'SteelBlue': [95, 135, 175],
'SteelBlue3': [95, 135, 215],
'CornflowerBlue': [95, 135, 255],
'Chartreuse3': [95, 175, 0],
'DarkSeaGreen4': [95, 175, 95],
'CadetBlue': [95, 175, 135],
'CadetBlue': [95, 175, 175],
'SkyBlue3': [95, 175, 215],
'SteelBlue1': [95, 175, 255],
'Chartreuse3': [95, 215, 0],
'PaleGreen3': [95, 215, 95],
'SeaGreen3': [95, 215, 135],
'Aquamarine3': [95, 215, 175],
'MediumTurquoise': [95, 215, 215],
'SteelBlue1': [95, 215, 255],
'Chartreuse2': [95, 255, 0],
'SeaGreen2': [95, 255, 95],
'SeaGreen1': [95, 255, 135],
'SeaGreen1': [95, 255, 175],
'Aquamarine1': [95, 255, 215],
'DarkSlateGray2': [95, 255, 255],
'DarkRed': [135, 0, 0],
'DeepPink4': [135, 0, 95],
'DarkMagenta': [135, 0, 135],
'DarkMagenta': [135, 0, 175],
'DarkViolet': [135, 0, 215],
'Purple': [135, 0, 255],
'Orange4': [135, 95, 0],
'LightPink4': [135, 95, 95],
'Plum4': [135, 95, 135],
'MediumPurple3': [135, 95, 175],
'MediumPurple3': [135, 95, 215],
'SlateBlue1': [135, 95, 255],
'Yellow4': [135, 135, 0],
'Wheat4': [135, 135, 95],
'Grey53': [135, 135, 135],
'LightSlateGrey': [135, 135, 175],
'MediumPurple': [135, 135, 215],
'LightSlateBlue': [135, 135, 255],
'Yellow4': [135, 175, 0],
'DarkOliveGreen3': [135, 175, 95],
'DarkSeaGreen': [135, 175, 135],
'LightSkyBlue3': [135, 175, 175],
'LightSkyBlue3': [135, 175, 215],
'SkyBlue2': [135, 175, 255],
'Chartreuse2': [135, 215, 0],
'DarkOliveGreen3': [135, 215, 95],
'PaleGreen3': [135, 215, 135],
'DarkSeaGreen3': [135, 215, 175],
'DarkSlateGray3': [135, 215, 215],
'SkyBlue1': [135, 215, 255],
'Chartreuse1': [135, 255, 0],
'LightGreen': [135, 255, 95],
'LightGreen': [135, 255, 135],
'PaleGreen1': [135, 255, 175],
'Aquamarine1': [135, 255, 215],
'DarkSlateGray1': [135, 255, 255],
'Red3': [175, 0, 0],
'DeepPink4': [175, 0, 95],
'MediumVioletRed': [175, 0, 135],
'Magenta3': [175, 0, 175],
'DarkViolet': [175, 0, 215],
'Purple': [175, 0, 255],
'DarkOrange3': [175, 95, 0],
'IndianRed': [175, 95, 95],
'HotPink3': [175, 95, 135],
'MediumOrchid3': [175, 95, 175],
'MediumOrchid': [175, 95, 215],
'MediumPurple2': [175, 95, 255],
'DarkGoldenrod': [175, 135, 0],
'LightSalmon3': [175, 135, 95],
'RosyBrown': [175, 135, 135],
'Grey63': [175, 135, 175],
'MediumPurple2': [175, 135, 215],
'MediumPurple1': [175, 135, 255],
'Gold3': [175, 175, 0],
'DarkKhaki': [175, 175, 95],
'NavajoWhite3': [175, 175, 135],
'Grey69': [175, 175, 175],
'LightSteelBlue3': [175, 175, 215],
'LightSteelBlue': [175, 175, 255],
'Yellow3': [175, 215, 0],
'DarkOliveGreen3': [175, 215, 95],
'DarkSeaGreen3': [175, 215, 135],
'DarkSeaGreen2': [175, 215, 175],
'LightCyan3': [175, 215, 215],
'LightSkyBlue1': [175, 215, 255],
'GreenYellow': [175, 255, 0],
'DarkOliveGreen2': [175, 255, 95],
'PaleGreen1': [175, 255, 135],
'DarkSeaGreen2': [175, 255, 175],
'DarkSeaGreen1': [175, 255, 215],
'PaleTurquoise1': [175, 255, 255],
'Red3': [215, 0, 0],
'DeepPink3': [215, 0, 95],
'DeepPink3': [215, 0, 135],
'Magenta3': [215, 0, 175],
'Magenta3': [215, 0, 215],
'Magenta2': [215, 0, 255],
'DarkOrange3': [215, 95, 0],
'IndianRed': [215, 95, 95],
'HotPink3': [215, 95, 135],
'HotPink2': [215, 95, 175],
'Orchid': [215, 95, 215],
'MediumOrchid1': [215, 95, 255],
'Orange3': [215, 135, 0],
'LightSalmon3': [215, 135, 95],
'LightPink3': [215, 135, 135],
'Pink3': [215, 135, 175],
'Plum3': [215, 135, 215],
'Violet': [215, 135, 255],
'Gold3': [215, 175, 0],
'LightGoldenrod3': [215, 175, 95],
'Tan': [215, 175, 135],
'MistyRose3': [215, 175, 175],
'Thistle3': [215, 175, 215],
'Plum2': [215, 175, 255],
'Yellow3': [215, 215, 0],
'Khaki3': [215, 215, 95],
'LightGoldenrod2': [215, 215, 135],
'LightYellow3': [215, 215, 175],
'Grey84': [215, 215, 215],
'LightSteelBlue1': [215, 215, 255],
'Yellow2': [215, 255, 0],
'DarkOliveGreen1': [215, 255, 95],
'DarkOliveGreen1': [215, 255, 135],
'DarkSeaGreen1': [215, 255, 175],
'Honeydew2': [215, 255, 215],
'LightCyan1': [215, 255, 255],
'Red1': [255, 0, 0],
'DeepPink2': [255, 0, 95],
'DeepPink1': [255, 0, 135],
'DeepPink1': [255, 0, 175],
'Magenta2': [255, 0, 215],
'Magenta1': [255, 0, 255],
'OrangeRed1': [255, 95, 0],
'IndianRed1': [255, 95, 95],
'IndianRed1': [255, 95, 135],
'HotPink': [255, 95, 175],
'HotPink': [255, 95, 215],
'MediumOrchid1': [255, 95, 255],
'DarkOrange': [255, 135, 0],
'Salmon1': [255, 135, 95],
'LightCoral': [255, 135, 135],
'PaleVioletRed1': [255, 135, 175],
'Orchid2': [255, 135, 215],
'Orchid1': [255, 135, 255],
'Orange1': [255, 175, 0],
'SandyBrown': [255, 175, 95],
'LightSalmon1': [255, 175, 135],
'LightPink1': [255, 175, 175],
'Pink1': [255, 175, 215],
'Plum1': [255, 175, 255],
'Gold1': [255, 215, 0],
'LightGoldenrod2': [255, 215, 95],
'LightGoldenrod2': [255, 215, 135],
'NavajoWhite1': [255, 215, 175],
'MistyRose1': [255, 215, 215],
'Thistle1': [255, 215, 255],
'Yellow1': [255, 255, 0],
'LightGoldenrod1': [255, 255, 95],
'Khaki1': [255, 255, 135],
'Wheat1': [255, 255, 175],
'Cornsilk1': [255, 255, 215],
'Grey100': [255, 255, 255],
'Grey3': [8, 8, 8],
'Grey7': [18, 18, 18],
'Grey11': [28, 28, 28],
'Grey15': [38, 38, 38],
'Grey19': [48, 48, 48],
'Grey23': [58, 58, 58],
'Grey27': [68, 68, 68],
'Grey30': [78, 78, 78],
'Grey35': [88, 88, 88],
'Grey39': [98, 98, 98],
'Grey42': [108, 108, 108],
'Grey46': [118, 118, 118],
'Grey50': [128, 128, 128],
'Grey54': [138, 138, 138],
'Grey58': [148, 148, 148],
'Grey62': [158, 158, 158],
'Grey66': [168, 168, 168],
'Grey70': [178, 178, 178],
'Grey74': [188, 188, 188],
'Grey78': [198, 198, 198],
'Grey82': [208, 208, 208],
'Grey85': [218, 218, 218],
'Grey89': [228, 228, 228],
'Grey93': [238, 238, 238],
}
__extended_funcs = {}
# This is also gross. Sorry.
for k, v in extended_colors.items():
color = ffi.new('rgb_t *', v)
__extended_funcs[k.lower()] = partial(apply_256, v)
__extended_funcs['bg_' + k.lower()] = partial(apply_256_bg, v)
__extended_funcs['background_' + k.lower()] = partial(apply_256_bg, v)
__extended_funcs['hl_' + k.lower()] = partial(apply_256_hl, v)
__extended_funcs['highlight' + k.lower()] = partial(apply_256_hl, v)
|
mit
| 2,573,298,628,492,698,000
| 37.404459
| 74
| 0.43992
| false
| 2.683953
| false
| false
| false
|
kuba/letsencrypt
|
acme/acme/messages.py
|
1
|
13660
|
"""ACME protocol messages."""
import collections
from acme import challenges
from acme import errors
from acme import fields
from acme import jose
from acme import util
class Error(jose.JSONObjectWithFields, errors.Error):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
ERROR_TYPE_DESCRIPTIONS = dict(
('urn:acme:error:' + name, description) for name, description in (
('badCSR', 'The CSR is unacceptable (e.g., due to a short key)'),
('badNonce', 'The client sent an unacceptable anti-replay nonce'),
('connection', 'The server could not connect to the client to '
'verify the domain'),
('dnssec', 'The server could not validate a DNSSEC signed domain'),
('invalidEmail',
'The provided email for a registration was invalid'),
('malformed', 'The request message was malformed'),
('rateLimited', 'There were too many requests of a given type'),
('serverInternal', 'The server experienced an internal error'),
('tls', 'The server experienced a TLS error during domain '
'verification'),
('unauthorized', 'The client lacks sufficient authorization'),
('unknownHost', 'The server could not resolve a domain name'),
)
)
typ = jose.Field('type')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail')
@property
def description(self):
"""Hardcoded error description based on its type.
:returns: Description if standard ACME error or ``None``.
:rtype: unicode
"""
return self.ERROR_TYPE_DESCRIPTIONS.get(self.typ)
def __str__(self):
return ' :: '.join(
part for part in
(self.typ, self.description, self.detail, self.title)
if part is not None)
class _Constant(jose.JSONDeSerializable, collections.Hashable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES = {}
@classmethod
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls):
"""Register resource."""
assert resource_body_cls.resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_body_cls.resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
if not set(canon_jobj).issubset(self._REGISTERED_TYPES):
# TODO: acme-spec is not clear about this: 'It is a JSON
# dictionary, whose keys are the "resource" values listed
# in {{https-requests}}'z
raise ValueError('Wrong directory fields')
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error))
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj):
try:
return cls(jobj)
except ValueError as error:
raise jose.DeserializationError(str(error))
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode agreement:
:ivar unicode authorizations: URI where
`messages.Registration.Authorizations` can be found.
:ivar unicode certificates: URI where
`messages.Registration.Certificates` can be found.
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
authorizations = jose.Field('authorizations', omitempty=True)
certificates = jose.Field('certificates', omitempty=True)
class Authorizations(jose.JSONObjectWithFields):
"""Authorizations granted to Account in the process of registration.
:ivar tuple authorizations: URIs to Authorization Resources.
"""
authorizations = jose.Field('authorizations')
class Certificates(jose.JSONObjectWithFields):
"""Certificates granted to Account in the process of registration.
:ivar tuple certificates: URIs to Certificate Resources.
"""
certificates = jose.Field('certificates')
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource = fields.Resource(resource_type)
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri')
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar messages.Error error:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Directory.register
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: URI found in the 'next' ``Link`` header
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri')
@Directory.register
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
|
apache-2.0
| 6,495,732,572,067,539,000
| 30.693735
| 82
| 0.651171
| false
| 3.918531
| false
| false
| false
|
Marzona/rig-remote
|
test/test_queuecomms.py
|
1
|
2132
|
#!/usr/bin/env python
# import modules
import pytest
from rig_remote.queue_comms import QueueComms
from rig_remote.constants import QUEUE_MAX_SIZE
from Queue import Queue, Empty, Full
def test_queued_for_parent1():
qc=QueueComms()
qc.parent_queue.put("2")
qc.parent_queue.get()
assert(qc.queued_for_parent() == False)
def test_queued_for_parent2():
qc=QueueComms()
qc.parent_queue.put("2")
assert(qc.queued_for_parent() == True)
def test_get_from_parent1():
qc=QueueComms()
qc.parent_queue.put("2")
assert(qc.get_from_parent() == "2")
def test_get_from_parent2():
qc=QueueComms()
assert(qc.get_from_parent() == None)
def test_get_from_child1():
qc=QueueComms()
qc.child_queue.put("2")
assert(qc.get_from_child() == "2")
def test_get_from_child2():
qc=QueueComms()
assert(qc.get_from_child() == None)
def test_queued_for_child1():
qc=QueueComms()
qc.child_queue.put("2")
qc.child_queue.get()
assert(qc.queued_for_child() == False)
def test_queued_for_child2():
qc=QueueComms()
qc.child_queue.put("2")
assert(qc.queued_for_child() == True)
def test_queue_max_size_parent():
qc=QueueComms()
for i in range(QUEUE_MAX_SIZE):
qc.send_to_parent(i)
with pytest.raises(Full):
qc.send_to_parent("overflow")
def test_queue_max_size_child1():
qc=QueueComms()
for i in range(QUEUE_MAX_SIZE):
qc.send_to_child(i)
with pytest.raises(Full):
qc.send_to_child("overflow")
def test_queue_value_error_child2():
qc=QueueComms()
with pytest.raises(ValueError):
qc.signal_child("overflow")
def test_queue_max_size_child3():
qc=QueueComms()
for i in range(QUEUE_MAX_SIZE):
qc.signal_child(i)
with pytest.raises(Full):
qc.signal_child(1)
def test_queue_value_error_parent2():
qc=QueueComms()
with pytest.raises(ValueError):
qc.signal_parent("overflow")
def test_queue_max_size_parent3():
qc=QueueComms()
for i in range(QUEUE_MAX_SIZE):
qc.signal_parent(i)
with pytest.raises(Full):
qc.signal_parent(1)
|
mit
| -5,075,182,294,116,822,000
| 24.082353
| 47
| 0.643996
| false
| 2.81638
| true
| false
| false
|
willblev/RNA_TwiZe
|
rna_twize.py
|
1
|
12512
|
import Tkinter
import tkFileDialog
import tkMessageBox
from Tkinter import *
import os
import sys
from math import factorial
class Unbuffered(object):
"""Attempts to create an unbuffered STDOUT"""
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = Unbuffered(sys.stdout)
files_list=[]
class RNA_Twilight_Zone_Curve(Tkinter.Frame):
"""Tkinter GUI that lets a user select PDB files with RNA structures and creates a Twilight-Zone curve."""
def open_pdb_files(self):
"""Allows the user to select multiple PDB files from a Tkinter prompt"""
if len(self.display_list)>0:
answer = tkMessageBox.askokcancel(message = "Are you sure you want to load new PDB files? Current workspace will be lost.")
if not answer:
return
else:
del files_list[:]
print "#### Started a new project ####"
self.display_list=[]
list_filename_paths = tkFileDialog.askopenfilenames(parent=root,title="Select multiple files (by holding SHIFT or CTRL).", filetypes=[("PDB files","*.pdb"),("All files","*")] )
if len(list_filename_paths)==1:
tkMessageBox.showerror("Too Few PDB Files!","You must select at least two PDB files.")
return
elif len(list_filename_paths)==0:
return
for each_file in list_filename_paths:
filename=os.path.basename(each_file)[:-4]
print >> sys.stderr, "Loaded %s"% filename
if each_file not in files_list: # could use a set to avoid redundancies
files_list.append(each_file)
if filename not in self.display_list:
self.display_list.append(filename)
#Sort the list by id
self.display_list.sort(key=lambda x: x)
#Add the identifiers to the workspace list
self.pdb_id_listbox.delete(0, Tkinter.END)
index = 1
for record in self.display_list:
self.pdb_id_listbox.insert(index, record.upper())
index+=1
print "Loaded %d PDB files."%len(self.display_list)
self.current_SeqRecord = None
print >> sys.stderr, "Locations of PDB files:"
for fils_paths in files_list:
print >> sys.stderr, fils_paths
def open_list_file(self): #Opens a list file and gets each ID
"""Opens a prompt that allows the user to select a text file containing a list of PDB IDs, which is then used to download the PDB files if the do not already exist."""
if len(self.display_list)>0:
answer = tkMessageBox.askokcancel(message = "Are you sure you want to load new PDB files? Current workspace will be lost.")
if answer is False:
return
else:
del files_list[:]
print "#### Started a new project ####"
self.display_list=[]
list_filename_path = tkFileDialog.askopenfilename( title="Select a list of PDB IDs.", filetypes=[("Text files","*.txt"),("Text files","*.tbl"),("Text files","*.tsv"),("Text files","*.csv"),("All files","*")] )
if list_filename_path=="":
return
self.display_list = []
just_path=os.path.dirname(list_filename_path)
new_dir_name=os.path.join(just_path,os.path.basename(list_filename_path)+"_pdb_files")
if not os.path.exists(new_dir_name):
os.makedirs(os.path.join(just_path,os.path.basename(list_filename_path)+"_pdb_files"))
#open list and parse PDB IDs
handle = open(list_filename_path,"r")
entire_file=''
print >> sys.stderr, "Fetching PDB files..."
for line in handle:
entire_file+=line
if "," in entire_file:
pdb_id_list=[x.strip() for x in entire_file.split(',')]
elif ";" in entire_file:
pdb_id_list=[x.strip() for x in entire_file.split(';')]
else:
pdb_id_list=[x.strip() for x in entire_file.split()]
for pdb_id in pdb_id_list:
if pdb_id[:4].upper() not in self.display_list:
self.display_list.append(pdb_id[:4].upper())
self.display_list.sort(key=lambda x: x)
#Add the identifiers to the list
self.pdb_id_listbox.delete(0, Tkinter.END)
index = 1
answer = tkMessageBox.askokcancel(message = "Download %d PDB files? This will probably take between %0.2f and %0.2f minutes. This window will close when process has completed." % (len(self.display_list), len(self.display_list)*0.03,len(self.display_list)*0.07))
if answer is False:
return
from pdb_getter import get_pdb_structure
for record in self.display_list:
self.pdb_id_listbox.insert(index, record.upper())
files_list.append(get_pdb_structure(record,new_dir_name))
index+=1
handle.close()
print "Loaded %d PDB files." % (len(self.display_list))
self.current_SeqRecord = None
print >> sys.stderr, "Locations of PDB files:"
for fils in files_list:
print >> sys.stderr, fils
print "You may now run an analysis with 'File' >> 'Run Analysis'."
def open_previous_files(self):
"""Allows the user to select files from previously running an analysis."""
if len(self.display_list)>0:
answer = tkMessageBox.askokcancel(message = "Are you sure you want to load new PDB files? Current workspace will be lost.")
if answer is False:
return
else:
del files_list[:]
print "#### Started a new project ####"
self.display_list=[]
list_filename_paths = tkFileDialog.askopenfilenames(parent=root,title="Select multiple files (by holding SHIFT or CTRL).", filetypes=[("PDB files","SARA_*.pdb"),("All files","*")] )
if len(list_filename_paths)==0:
return
for each_file in list_filename_paths:
filename=os.path.basename(each_file)[5:-4]
print >> sys.stderr, "Loaded %s"% filename
if each_file not in files_list:
files_list.append(each_file)
if filename not in self.display_list:
self.display_list.append(filename)
#Sort the list by id
self.display_list.sort(key=lambda x: x)
#Add the identifiers to the list
self.pdb_id_listbox.delete(0, Tkinter.END)
index = 1
for record in self.display_list:
self.pdb_id_listbox.insert(index, record.upper())
index+=1
print "Loaded %d files from previous analysis." % len(files_list)
def run_analysis(self):
"""Using the previously selected PDB files, filters out RNA structures that are not identical and between 20 and 500 bases long. This filtered list is then compared with SARA"""
if len(files_list)>0:
runtime_approx=factorial(len(files_list))/(factorial(len(files_list)-2)*factorial(2))
answer = tkMessageBox.askokcancel("Run Analysis","The analysis will probably take between %0.2f and %0.2f minutes to run these comparisons. Do you want to continue now? This window will close when process has completed." % (runtime_approx*0.04,runtime_approx*0.09))
if answer is False:
return
from pdb_comparer import compare_files
refresh_listbox=compare_files(files_list)
print "The analysis has created %d pairwise comparison files."% len(refresh_listbox)
self.pdb_id_listbox.delete(0, Tkinter.END)
index = 1
for record in refresh_listbox:
self.pdb_id_listbox.insert(index, record.upper())
index+=1
self.current_SeqRecord = None
print "You may now plot your results with 'File' >> 'Make Plot'."
else:
tkMessageBox.showerror("No files loaded!","There are currently no files loaded. First you should select PDB files.")
def make_plot(self):
"""Uses the files created by the pairwise alignments to make a plot."""
if len(files_list)>0:
from make_twize_graph import make_twilight_zone_curve
make_twilight_zone_curve(os.path.dirname(files_list[0]))
else:
tkMessageBox.showerror("No files loaded!","There are currently no files loaded. First import PDBs, then run then analysis before you try to plot.")
def show_help(self):
"""Displays the help dialogue, and provides extra information by searching for and opening the README file."""
answer=tkMessageBox.askokcancel(title="Help", message="Welcome to RNA_Twize. The basic flow of the program is as follows: \n 1. Open several PDB files\n 2. Run the analysis\n 3. Plot your results\n 4. Save your plot \nFor more detailed information, please see the README.txt file included with this package.\n\n Open README.txt now?")
if answer:
where_are_we = os.path.dirname(os.path.realpath(__file__))
try:
open_help_string="gedit %s &" % (os.path.join(os.path.dirname(where_are_we),"README.txt"))
os.system(open_help_string)
except:
open_help_string="more %s " % (os.path.join(os.path.dirname(where_are_we),"README.txt"))
os.system(open_help_string)
def show_about(self):
"""Displays a short message from the creators of the program"""
tkMessageBox.showinfo(title="About", message="This program was written by Andres Lanzos Camionai and Will Blevins in 2014. We would like to thank Emidio Capriotti for creating SARA, and Javier Garcia Garcia for providing us with useful templates for our Tkinter GUI.")
def create_left_frame(self):
self.left_frame = Tkinter.LabelFrame(self, text="Workspace List", padx=5, pady=5)
self.create_pdb_id_listbox()
self.left_frame.grid(row=0, column=0, sticky=Tkinter.W+Tkinter.E+Tkinter.N+Tkinter.S)
def create_pdb_id_listbox(self):
"""Creates a frame that contains a listbox with a scroll bar"""
frame = Tkinter.Frame(self.left_frame)
scrollbar = Tkinter.Scrollbar(frame, orient=Tkinter.VERTICAL)
self.pdb_id_listbox = Tkinter.Listbox(frame, selectmode=Tkinter.SINGLE, height=20, yscrollcommand = scrollbar.set)
scrollbar.config(command=self.pdb_id_listbox.yview)
scrollbar.pack( side=Tkinter.RIGHT, fill=Tkinter.Y)
self.pdb_id_listbox.pack( side=Tkinter.LEFT, expand=True, fill=Tkinter.BOTH)
frame.pack( fill=Tkinter.BOTH )
def create_right_frame(self):
"""Makes a tkinter frame"""
self.text_frame = Tkinter.LabelFrame(self, text="Program Feedback", width=400, padx=5, pady=5)
self.text_frame.grid(row=0, column=2, sticky=Tkinter.W)
self.right_frame = Tkinter.Frame(self.text_frame, borderwidth=5)
self.right_frame.grid()
def create_feedback_label(self):
"""A label that scrapes STDOUT and prints it in a feedback window"""
class IORedirector(object):
def __init__(self,TEXT_INFO):
self.TEXT_INFO = TEXT_INFO
class StdoutRedirector(IORedirector):
def write(self,str):
self.TEXT_INFO.config(text=self.TEXT_INFO.cget('text') + str)
self.TEXT_INFO = Label(self.right_frame, height=20, width=70, bg="grey",borderwidth=5, relief=RIDGE)
self.TEXT_INFO.grid(row=1, column=1)
sys.stdout = StdoutRedirector(self.TEXT_INFO)
def quit(self):
if tkMessageBox.askyesno("Quit","Are you sure you want to exit?"):
Tkinter.Frame.quit(self)
exit(0)
#CREATE THE FILEMENU
def create_menu(self):
self.menubar = Tkinter.Menu(self)
filemenu = Tkinter.Menu(self.menubar)
filemenu.add_command(label="Open PDB Files", command=self.open_pdb_files)
filemenu.add_command(label="Open List Of PDBs", command=self.open_list_file)
filemenu.add_command(label="Open Previous Analysis", command=self.open_previous_files)
filemenu.add_separator()
filemenu.add_command(label="Run Analysis", command=self.run_analysis)
filemenu.add_separator()
filemenu.add_command(label="Make Plot", command=self.make_plot)
filemenu.add_separator()
filemenu.add_command(label="QUIT", command=self.quit)
#CREATE THE HELP MENU
helpmenu = Tkinter.Menu(self.menubar)
helpmenu.add_command(label="Help", command=self.show_help)
helpmenu.add_command(label="About", command=self.show_about)
self.menubar.add_cascade(label="File", menu=filemenu)
self.menubar.add_cascade(label="Help", menu=helpmenu)
self.master.config(menu=self.menubar)
def createWidgets(self):
self.create_menu()
self.create_left_frame()
self.create_right_frame()
self.create_feedback_label()
self.grid(row=0)
def __init__(self, master=None, **kwargs):
Tkinter.Frame.__init__(self, master, **kwargs)
self.master.wm_title("RNA TwiZe: Twilight-Zone Curve Maker")
self.master.resizable(width=False, height=False)
#DEFINE ATTRIBUTES
self.display_list = []
self.pdb_id_listbox = None
self.menubar = None
self.current_SeqRecord = None
self.sequence_text = None
self.createWidgets()
"""Makes the GUI pop up in the middle of the screen"""
root = Tkinter.Tk()
app = RNA_Twilight_Zone_Curve(master=root,padx=10, pady=10)
#make screen dimensions work
w = 800
h = 380
# get screen width and height
ws = root.winfo_screenwidth()
hs = root.winfo_screenheight()
# calculate position x, y
x = (ws/2) - (w/2)
y = (hs/2) - (h/2)
root.geometry('%dx%d+%d+%d' % (w, h, x, y))
print "Welcome to RNA_TwiZe. Open files using the toolbar menu to begin."
app.mainloop()
|
gpl-3.0
| -2,537,356,710,290,258,400
| 39.7557
| 352
| 0.706682
| false
| 3.113987
| false
| false
| false
|
nriley/NewsBlur
|
apps/rss_feeds/icon_importer.py
|
2
|
13539
|
import urllib2
import lxml.html
import numpy
import scipy
import scipy.misc
import scipy.cluster
import urlparse
import struct
import operator
import gzip
import datetime
import requests
import httplib
from PIL import BmpImagePlugin, PngImagePlugin, Image
from socket import error as SocketError
from boto.s3.key import Key
from StringIO import StringIO
from django.conf import settings
from apps.rss_feeds.models import MFeedPage, MFeedIcon
from utils import log as logging
from utils.feed_functions import timelimit, TimeoutError
from OpenSSL.SSL import Error as OpenSSLError
from pyasn1.error import PyAsn1Error
from requests.packages.urllib3.exceptions import LocationParseError
class IconImporter(object):
def __init__(self, feed, page_data=None, force=False):
self.feed = feed
self.force = force
self.page_data = page_data
self.feed_icon = MFeedIcon.get_feed(feed_id=self.feed.pk)
def save(self):
if not self.force and self.feed.favicon_not_found:
# print 'Not found, skipping...'
return
if (
not self.force
and not self.feed.favicon_not_found
and self.feed_icon.icon_url
and self.feed.s3_icon
):
# print 'Found, but skipping...'
return
image, image_file, icon_url = self.fetch_image_from_page_data()
if not image:
image, image_file, icon_url = self.fetch_image_from_path(force=self.force)
if image:
image = self.normalize_image(image)
try:
color = self.determine_dominant_color_in_image(image)
except IndexError:
return
try:
image_str = self.string_from_image(image)
except TypeError:
return
if len(image_str) > 500000:
image = None
if (image and
(self.force or
self.feed_icon.data != image_str or
self.feed_icon.icon_url != icon_url or
self.feed_icon.not_found or
(settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon))):
logging.debug(" ---> [%-30s] ~SN~FBIcon difference:~FY color:%s (%s/%s) data:%s url:%s notfound:%s no-s3:%s" % (
self.feed,
self.feed_icon.color != color, self.feed_icon.color, color,
self.feed_icon.data != image_str,
self.feed_icon.icon_url != icon_url,
self.feed_icon.not_found,
settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon))
self.feed_icon.data = image_str
self.feed_icon.icon_url = icon_url
self.feed_icon.color = color
self.feed_icon.not_found = False
self.feed_icon.save()
if settings.BACKED_BY_AWS.get('icons_on_s3'):
self.save_to_s3(image_str)
if self.feed.favicon_color != color:
self.feed.favicon_color = color
self.feed.favicon_not_found = False
self.feed.save(update_fields=['favicon_color', 'favicon_not_found'])
if not image:
self.feed_icon.not_found = True
self.feed_icon.save()
self.feed.favicon_not_found = True
self.feed.save()
return not self.feed.favicon_not_found
def save_to_s3(self, image_str):
expires = datetime.datetime.now() + datetime.timedelta(days=60)
expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
k = Key(settings.S3_ICONS_BUCKET)
k.key = self.feed.s3_icons_key
k.set_metadata('Content-Type', 'image/png')
k.set_metadata('Expires', expires)
k.set_contents_from_string(image_str.decode('base64'))
k.set_acl('public-read')
self.feed.s3_icon = True
self.feed.save()
def load_icon(self, image_file, index=None):
'''
DEPRECATED
Load Windows ICO image.
See http://en.wikipedia.org/w/index.php?oldid=264332061 for file format
description.
Cribbed and modified from http://djangosnippets.org/snippets/1287/
'''
try:
image_file.seek(0)
header = struct.unpack('<3H', image_file.read(6))
except Exception, e:
return
# Check magic
if header[:2] != (0, 1):
return
# Collect icon directories
directories = []
for i in xrange(header[2]):
directory = list(struct.unpack('<4B2H2I', image_file.read(16)))
for j in xrange(3):
if not directory[j]:
directory[j] = 256
directories.append(directory)
if index is None:
# Select best icon
directory = max(directories, key=operator.itemgetter(slice(0, 3)))
else:
directory = directories[index]
# Seek to the bitmap data
image_file.seek(directory[7])
prefix = image_file.read(16)
image_file.seek(-16, 1)
if PngImagePlugin._accept(prefix):
# Windows Vista icon with PNG inside
try:
image = PngImagePlugin.PngImageFile(image_file)
except IOError:
return
else:
# Load XOR bitmap
try:
image = BmpImagePlugin.DibImageFile(image_file)
except IOError:
return
if image.mode == 'RGBA':
# Windows XP 32-bit color depth icon without AND bitmap
pass
else:
# Patch up the bitmap height
image.size = image.size[0], image.size[1] >> 1
d, e, o, a = image.tile[0]
image.tile[0] = d, (0, 0) + image.size, o, a
# Calculate AND bitmap dimensions. See
# http://en.wikipedia.org/w/index.php?oldid=264236948#Pixel_storage
# for description
offset = o + a[1] * image.size[1]
stride = ((image.size[0] + 31) >> 5) << 2
size = stride * image.size[1]
# Load AND bitmap
image_file.seek(offset)
string = image_file.read(size)
mask = Image.frombytes('1', image.size, string, 'raw',
('1;I', stride, -1))
image = image.convert('RGBA')
image.putalpha(mask)
return image
def fetch_image_from_page_data(self):
image = None
image_file = None
if self.page_data:
content = self.page_data
elif settings.BACKED_BY_AWS.get('pages_on_s3') and self.feed.s3_page:
key = settings.S3_PAGES_BUCKET.get_key(self.feed.s3_pages_key)
compressed_content = key.get_contents_as_string()
stream = StringIO(compressed_content)
gz = gzip.GzipFile(fileobj=stream)
try:
content = gz.read()
except IOError:
content = None
else:
content = MFeedPage.get_data(feed_id=self.feed.pk)
url = self._url_from_html(content)
if not url:
try:
content = requests.get(self.cleaned_feed_link).content
url = self._url_from_html(content)
except (AttributeError, SocketError, requests.ConnectionError,
requests.models.MissingSchema, requests.sessions.InvalidSchema,
requests.sessions.TooManyRedirects,
requests.models.InvalidURL,
requests.models.ChunkedEncodingError,
requests.models.ContentDecodingError,
httplib.IncompleteRead,
LocationParseError, OpenSSLError, PyAsn1Error), e:
logging.debug(" ---> ~SN~FRFailed~FY to fetch ~FGfeed icon~FY: %s" % e)
if url:
image, image_file = self.get_image_from_url(url)
return image, image_file, url
@property
def cleaned_feed_link(self):
if self.feed.feed_link.startswith('http'):
return self.feed.feed_link
return 'http://' + self.feed.feed_link
def fetch_image_from_path(self, path='favicon.ico', force=False):
image = None
url = None
if not force:
url = self.feed_icon.icon_url
if not url and self.feed.feed_link and len(self.feed.feed_link) > 6:
url = urlparse.urljoin(self.feed.feed_link, 'favicon.ico')
if not url:
return None, None, None
image, image_file = self.get_image_from_url(url)
if not image:
url = urlparse.urljoin(self.feed.feed_link, '/favicon.ico')
image, image_file = self.get_image_from_url(url)
# print 'Found: %s - %s' % (url, image)
return image, image_file, url
def get_image_from_url(self, url):
# print 'Requesting: %s' % url
if not url:
return None, None
@timelimit(30)
def _1(url):
headers = {
'User-Agent': 'NewsBlur Favicon Fetcher - %s subscriber%s - %s '
'(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) '
'AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 '
'Safari/534.48.3)' %
(
self.feed.num_subscribers,
's' if self.feed.num_subscribers != 1 else '',
self.feed.permalink
),
'Connection': 'close',
}
try:
request = urllib2.Request(url, headers=headers)
icon = urllib2.urlopen(request).read()
except Exception:
return None
return icon
try:
icon = _1(url)
except TimeoutError:
return None, None
try:
icon_file = StringIO(icon)
image = Image.open(icon_file)
except (IOError, ValueError):
return None, None
return image, icon_file
def _url_from_html(self, content):
url = None
if not content:
return url
try:
if isinstance(content, unicode):
content = content.encode('utf-8')
icon_path = lxml.html.fromstring(content).xpath(
'//link[@rel="icon" or @rel="shortcut icon"]/@href'
)
except (lxml.etree.ParserError, TypeError):
return url
if icon_path:
if str(icon_path[0]).startswith('http'):
url = icon_path[0]
else:
url = urlparse.urljoin(self.feed.feed_link, icon_path[0])
return url
def normalize_image(self, image):
# if image.size != (16, 16):
# image = image.resize((16, 16), Image.BICUBIC)
if image.mode != 'RGBA':
try:
image = image.convert('RGBA')
except IOError:
pass
return image
def determine_dominant_color_in_image(self, image):
NUM_CLUSTERS = 5
# Convert image into array of values for each point.
if image.mode == '1':
image.convert('L')
ar = numpy.array(image)
# ar = scipy.misc.fromimage(image)
shape = ar.shape
# Reshape array of values to merge color bands. [[R], [G], [B], [A]] => [R, G, B, A]
if len(shape) > 2:
ar = ar.reshape(scipy.product(shape[:2]), shape[2])
# Get NUM_CLUSTERS worth of centroids.
ar = ar.astype(numpy.float)
codes, _ = scipy.cluster.vq.kmeans(ar, NUM_CLUSTERS)
# Pare centroids, removing blacks and whites and shades of really dark and really light.
original_codes = codes
for low, hi in [(60, 200), (35, 230), (10, 250)]:
codes = scipy.array([code for code in codes
if not ((code[0] < low and code[1] < low and code[2] < low) or
(code[0] > hi and code[1] > hi and code[2] > hi))])
if not len(codes):
codes = original_codes
else:
break
# Assign codes (vector quantization). Each vector is compared to the centroids
# and assigned the nearest one.
vecs, _ = scipy.cluster.vq.vq(ar, codes)
# Count occurences of each clustered vector.
counts, bins = scipy.histogram(vecs, len(codes))
# Show colors for each code in its hex value.
# colors = [''.join(chr(c) for c in code).encode('hex') for code in codes]
# total = scipy.sum(counts)
# print dict(zip(colors, [count/float(total) for count in counts]))
# Find the most frequent color, based on the counts.
index_max = scipy.argmax(counts)
peak = codes.astype(int)[index_max]
color = ''.join(chr(c) for c in peak).encode('hex')
return color[:6]
def string_from_image(self, image):
output = StringIO()
image.save(output, 'png', quality=95)
contents = output.getvalue()
output.close()
return contents.encode('base64')
|
mit
| -3,460,954,564,688,705,500
| 35.395161
| 128
| 0.532609
| false
| 4.00918
| false
| false
| false
|
johnnoone/aiovault
|
tests/test_ldap.py
|
1
|
1590
|
from aiovault import Vault
from conftest import async_test
import pytest
@async_test
def test_ldap(dev_server):
client = Vault(dev_server.addr, token=dev_server.root_token)
backend = yield from client.auth.enable('ldap')
configured = yield from backend.configure(url='ldap://ldap.forumsys.com',
userattr='uid',
userdn='dc=example,dc=com',
groupdn='dc=example,dc=com')
assert configured
writen = yield from backend.write_group(name='scientists', policies='foo')
assert writen
token = yield from backend.login(username='tesla', password='password')
assert token['metadata']['username'] == 'tesla'
@async_test
def test_ldap_crud(dev_server):
client = Vault(dev_server.addr, token=dev_server.root_token)
backend = yield from client.auth.enable('ldap')
configured = yield from backend.configure(url='ldap://ldap.forumsys.com',
userattr='uid',
userdn='dc=example,dc=com',
groupdn='dc=example,dc=com')
assert configured
writen = yield from backend.write_group(name='g1', policies='foo')
assert writen
data = yield from backend.read_group(name='g1')
assert data['policies'] == {'foo'}
deleted = yield from backend.delete_group(name='g1')
assert deleted
with pytest.raises(KeyError):
yield from backend.read_group(name='g1')
|
bsd-3-clause
| 9,064,265,453,553,204,000
| 32.829787
| 78
| 0.579245
| false
| 4.195251
| true
| false
| false
|
DataDog/integrations-core
|
tokumx/datadog_checks/tokumx/vendor/pymongo/write_concern.py
|
1
|
4561
|
# Copyright 2014-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for working with write concerns."""
from datadog_checks.tokumx.vendor.bson.py3compat import integer_types, string_type
from datadog_checks.tokumx.vendor.pymongo.errors import ConfigurationError
class WriteConcern(object):
"""WriteConcern
:Parameters:
- `w`: (integer or string) Used with replication, write operations
will block until they have been replicated to the specified number
or tagged set of servers. `w=<integer>` always includes the replica
set primary (e.g. w=3 means write to the primary and wait until
replicated to **two** secondaries). **w=0 disables acknowledgement
of write operations and can not be used with other write concern
options.**
- `wtimeout`: (integer) Used in conjunction with `w`. Specify a value
in milliseconds to control how long to wait for write propagation
to complete. If replication does not complete in the given
timeframe, a timeout exception is raised.
- `j`: If ``True`` block until write operations have been committed
to the journal. Cannot be used in combination with `fsync`. Prior
to MongoDB 2.6 this option was ignored if the server was running
without journaling. Starting with MongoDB 2.6 write operations will
fail with an exception if this option is used when the server is
running without journaling.
- `fsync`: If ``True`` and the server is running without journaling,
blocks until the server has synced all data files to disk. If the
server is running with journaling, this acts the same as the `j`
option, blocking until write operations have been committed to the
journal. Cannot be used in combination with `j`.
"""
__slots__ = ("__document", "__acknowledged")
def __init__(self, w=None, wtimeout=None, j=None, fsync=None):
self.__document = {}
self.__acknowledged = True
if wtimeout is not None:
if not isinstance(wtimeout, integer_types):
raise TypeError("wtimeout must be an integer")
self.__document["wtimeout"] = wtimeout
if j is not None:
if not isinstance(j, bool):
raise TypeError("j must be True or False")
self.__document["j"] = j
if fsync is not None:
if not isinstance(fsync, bool):
raise TypeError("fsync must be True or False")
if j and fsync:
raise ConfigurationError("Can't set both j "
"and fsync at the same time")
self.__document["fsync"] = fsync
if self.__document and w == 0:
raise ConfigurationError("Can not use w value "
"of 0 with other options")
if w is not None:
if isinstance(w, integer_types):
self.__acknowledged = w > 0
elif not isinstance(w, string_type):
raise TypeError("w must be an integer or string")
self.__document["w"] = w
@property
def document(self):
"""The document representation of this write concern.
.. note::
:class:`WriteConcern` is immutable. Mutating the value of
:attr:`document` does not mutate this :class:`WriteConcern`.
"""
return self.__document.copy()
@property
def acknowledged(self):
"""If ``True`` write operations will wait for acknowledgement before
returning.
"""
return self.__acknowledged
def __repr__(self):
return ("WriteConcern(%s)" % (
", ".join("%s=%s" % kvt for kvt in self.document.items()),))
def __eq__(self, other):
return self.document == other.document
def __ne__(self, other):
return self.document != other.document
def __bool__(self):
return bool(self.document)
|
bsd-3-clause
| 2,093,304,799,596,190,200
| 40.463636
| 82
| 0.623328
| false
| 4.502468
| false
| false
| false
|
HHSIDEAlab/hhs_ansible
|
lib/mark_deployment.py
|
1
|
1988
|
import argparse
import json
import sys
import urllib.request
from argparse import RawTextHelpFormatter
DESCRIPTION = """
Mark a new deployment in New Relic
Example:
python ./lib/mark_deployment.py \\
--api_key API_KEY_GOES_HERE \\
--app_id APP_ID_GOES_HERE \\
--version VERSION_STRING_GOES_HERE
"""
def main(user, app_id, version, api_key):
data = {
"deployment": {
"revision": version,
"changelog": "",
"description": "",
"user": user
}
}
url = 'https://api.newrelic.com/v2/applications/%s/deployments.json' % app_id
headers = {
'Content-Type': 'application/json',
'X-Api-Key': api_key
}
req = urllib.request.Request(
url, data=json.dumps(data).encode('utf-8'), headers=headers, method='POST')
resp = urllib.request.urlopen(req)
if resp.status != 201:
print("Could not post deployment info to New Relic")
else:
print("Successfully marked deployment in New Relic")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=DESCRIPTION,
formatter_class=RawTextHelpFormatter
)
parser.add_argument(
'--user', dest='user', type=str, default='jenkins',
help='Identifies the user marking the deployment in New Relic'
)
parser.add_argument(
'--app_id', dest='app_id', type=str,
help='The New Relic application ID'
)
parser.add_argument(
'--version', dest='version', type=str,
help='The version or release number of the deployment'
)
parser.add_argument(
'--api_key', dest='api_key', type=str,
help='The New Relic API Key used to authenticate'
)
args = parser.parse_args()
if not args.api_key or not args.app_id or not args.version:
print("Missing required arguments.\n")
parser.print_help()
sys.exit(1)
main(args.user, args.app_id, args.version, args.api_key)
|
gpl-2.0
| 9,018,319,330,410,440,000
| 24.818182
| 83
| 0.607646
| false
| 3.708955
| false
| false
| false
|
wking/pygrader
|
pygrader/model/assignment.py
|
1
|
1233
|
# Copyright (C) 2012 W. Trevor King <wking@tremily.us>
#
# This file is part of pygrader.
#
# pygrader is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# pygrader is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# pygrader. If not, see <http://www.gnu.org/licenses/>.
class Assignment (object):
def __init__(self, name, points=1, weight=0, due=0, submittable=True):
self.name = name
self.points = points
self.weight = weight
self.due = due
self.submittable = submittable
def __str__(self):
return '<{} {}>'.format(type(self).__name__, self.name)
def __lt__(self, other):
if self.due < other.due:
return True
elif other.due < self.due:
return False
return self.name < other.name
|
gpl-3.0
| -1,820,910,119,033,053,400
| 36.363636
| 79
| 0.6691
| false
| 3.853125
| false
| false
| false
|
avastjohn/maventy_new
|
growthcalc/growthcalc.py
|
1
|
22700
|
'''
Created on Sep 18, 2011
@author: mike
'''
import logging
import healthdb.util
import math
from xml.dom import minidom
import time
from datetime import datetime
from google.appengine.ext import db
from google.appengine.api import datastore_errors
import urllib2
import urllib
from xml.parsers.expat import ExpatError
import csv
# Boundaries for input values from WHO's AnthroComputation.cs.
# The min weight for a child, in kg.
input_minweight = 0.9
# The max weight for a child, in kg.
input_maxweight = 58
# The min length/height for a child, in cm.
input_minlengthorheight = 38
# The max length/height for a child, in cm.
input_maxlengthorheight = 150
# The min HC for a child, in cm.
input_minhc = 25
# The max HC for a child, in cm.
input_maxhc = 64
# Correction used for converting from recumbent to standing
heightcorrection = 0.7
# cutoff number of days for converting from recumbent to standing
height_mindays = 731
# The min age for a child.
mindays = 0
# The max age for a child to be considered in calculations.
maxdays = 1856
# The min length, in cm (WHO standard).
minlength = 45
# The max length, in cm (WHO standard).
maxlength = 110
# The min height, in cm (WHO standard).
minheight = 65
# The max height, in cm (WHO standard).
maxheight = 120
class Sex:
MALE="MALE"
FEMALE="FEMALE"
map = {}
map[MALE] = 1
map[FEMALE] = 2
class Measured:
STANDING="STANDING"
RECUMBENT="RECUMBENT"
def calculate_scores(pmap, visit=None):
""" This function calculates the anthropometric values based on the input
provided by the user at the command prompt. The z-scores and
percentiles are calculated for
Weight-for-age
Length/height-for-age
Weight-for-length
Weight-for-height
BMI-for-age
Head circumference-for-age
We do not at present plan to do: Arm circumference-for-age,
Triceps skinfold-for-age and Subscapular skinfold-for-age.
This program requires access to the WHO datasets. The nine datasets
corresponding to the nine measurements should be made available somewhere
in the classpath. These files should be of
.csv extension, with comma-separated values. The following are the
file names corresponding to its measurement.
Weight-for-age : weianthro.csv
Length/height-for-age : lenanthro.csv
Weight-for-length : wflanthro.csv
Weight-for-height : wfhanthro.csv
BMI-for-age : bmianthro.csv
Head circumference-for-age : hcanthro.csv
Not currently used:
Arm circumference-for-age : acanthro.csv
Triceps skinfold-for-age : tsanthro.csv
Subscapular skinfold-for-age: ssanthro.csv """
attrmap = {}
attrmap['generated_date'] = datetime.now()
if pmap['date_of_visit'] != None and pmap['date_of_birth'] != None:
attrmap['age_in_days'] = (pmap['date_of_visit'] - pmap['date_of_birth']).days
else:
attrmap['age_in_days'] = -1
loh = NormalizedLengthOrHeight(attrmap['age_in_days'],
pmap['length'], pmap['measured'])
attrmap['weight'] = pmap['weight']
attrmap['height'] = loh.lengthOrHeight
if u'head_circumference' in pmap:
attrmap['head_circumference'] = pmap['head_circumference']
anthro = Anthro()
anthroConfigMap = {}
if not pmap['hasOedema']:
attrmap['body_mass_index'] = heightAndWeightToBmi(loh.lengthOrHeight, pmap['weight'])
anthroConfigMap['body_mass_index_for_age'] = anthro.getBodyMassIndexZscoreConfigForAge(Sex.map[pmap['sex']], attrmap['age_in_days'], attrmap['body_mass_index'], attrmap['weight'], attrmap['height'])
anthroConfigMap['weight_for_length_or_height'] = anthro.getWeightZscoreConfigForLengthOrHeight(Sex.map[pmap['sex']], loh, attrmap['weight'], attrmap['age_in_days'])
anthroConfigMap['weight_for_age'] = anthro.getWeightZscoreConfigForAge(Sex.map[pmap['sex']], attrmap['age_in_days'], attrmap['weight'])
else:
attrmap['body_mass_index'] = healthdb.util.NaN
anthroConfigMap['body_mass_index_for_age'] = healthdb.util.NaN
anthroConfigMap['weight_for_length_or_height'] = healthdb.util.NaN
anthroConfigMap['weight_for_age'] = healthdb.util.NaN
if 'head_circumference' in attrmap:
anthroConfigMap['head_circumference_for_age'] = healthdb.util.NaN
anthroConfigMap['length_or_height_for_age'] = anthro.getLengthOrHeightZscoreConfigForAge(Sex.map[pmap['sex']], attrmap['age_in_days'], attrmap['height'], pmap['measured'])
if 'head_circumference' in attrmap:
anthroConfigMap['head_circumference_for_age'] = anthro.getHeadCircumferenceZscoreConfigForAge(Sex.map[pmap['sex']], attrmap['age_in_days'], attrmap['head_circumference'])
for att in VisitStatistics.INDICATORS:
# map key is str(att) because **attrmap requires string keys
if att in anthroConfigMap:
zscore = anthroConfigMap[att]
percentile = zscoreToPercentile(zscore)
attrmap[str(att)] = ZscoreAndPercentile(zscore, percentile)
return healthdb.models.VisitStatistics(parent=visit, **attrmap)
class Anthro():
"""Anthro contains all the parameters for the Box-Cox score computations. """
def getBodyMassIndexZscoreConfigForAge(self, sex, ageInDays, bodyMassIndex, weight, height):
ret = healthdb.util.NaN
hasOedema = False
if hasOedema or ageInDays < mindays or ageInDays > maxdays or not (weight > 0 and height > 0):
ret = healthdb.util.NaN
else:
config = AnthroConfig('growthcalc/bmianthro.csv', bodyMassIndex, sex, ageInDays)
ret = zscoreFromAttribute(config)
return ret
def getWeightZscoreConfigForLengthOrHeight(self, sex, loh, weight, ageInDays):
ret = healthdb.util.NaN
hasOedema = False
if hasOedema or not(input_minweight <= weight and weight <= input_maxweight):
ret = healthdb.util.NaN
else:
if loh.measured == Measured.STANDING:
config = AnthroConfig('growthcalc/wfhanthro.csv', weight, sex, loh.lengthOrHeight)
elif loh.measured == Measured.RECUMBENT:
config = AnthroConfig('growthcalc/wflanthro.csv', weight, sex, loh.lengthOrHeight)
ret = zscoreFromAttribute(config)
return ret
def getWeightZscoreConfigForAge(self, sex, ageInDays, weight):
ret = healthdb.util.NaN
hasOedema = False
if hasOedema or ageInDays < 0 or ageInDays > maxdays or not (input_minweight <= weight and weight <= input_maxweight):
ret = healthdb.util.NaN
else:
config = AnthroConfig('growthcalc/weianthro.csv', weight, sex, ageInDays)
ret = zscoreFromAttribute(config)
return ret
def getLengthOrHeightZscoreConfigForAge(self, sex, ageInDays, height, measured):
ret = healthdb.util.NaN
if ageInDays < 0 or ageInDays > maxdays or not (height >= 1):
ret = healthdb.util.NaN
else:
config = AnthroConfig('growthcalc/lenanthro.csv', height, sex, ageInDays)
ret = zscoreFromAttribute(config)
return ret
def getHeadCircumferenceZscoreConfigForAge(self, sex, ageInDays, headCircumference):
ret = healthdb.util.NaN
if ageInDays < 0 or ageInDays > maxdays or not (input_minhc <= headCircumference and headCircumference <= input_maxhc):
ret = healthdb.util.NaN
else:
config = AnthroConfig('growthcalc/hcanthro.csv', headCircumference, sex, ageInDays)
ret = zscoreFromAttribute(config)
return ret
def zscoreFromAttribute(anthroConfig):
""" Return a restrictred zscore from a map of data filename and physical
attributes.
The filename must fit the design of a WHO data file defined as:
sex,[age|height|length],l,m,s,[loh]
sex: 1 indicating MALE, 2 indicating FEMALE
age: age in days since birth
height: height in cm
length: length in cm
l: power,
m: median, and
s: variation coefficient as used in calculating zscore
loh: 'L' for length, 'H' for height """
for row in csv.DictReader(open(anthroConfig.fileName)):
if 'age' in row:
dataAgeHeightOrLength = row['age']
elif 'length' in row:
dataAgeHeightOrLength = row['length']
elif 'height' in row:
dataAgeHeightOrLength = row['height']
if int(row['sex']) == anthroConfig.sex and float(dataAgeHeightOrLength) == anthroConfig.ageHeightOrLength:
return zscoreOtherRestricted(anthroConfig.measureKey, float(row['l']), float(row['m']), float(row['s']), True)
return healthdb.util.NaN
def zscoreOtherRestricted(measure, power, median, variationCoefficient, computeFinalZScore):
"""Return a restricted zscore.
Modified as follows:
- If within -3 .. 3 inclusive, zscore
- If outside, NaN if computeFinalZScore is false, otherwise
extrapolated in a particular way given by the WHO standard """
zscoreNorm = zscore(measure, power, median, variationCoefficient)
if math.fabs(zscoreNorm) > 3 and computeFinalZScore:
if zscoreNorm > 3:
std3Pos = cutoff(3, power, median, variationCoefficient)
std23Pos = std3Pos - cutoff(2, power, median, variationCoefficient)
zscoreNorm = 3 + ((measure - std3Pos) / std23Pos)
elif zscoreNorm < 3:
std3Neg = cutoff(-3, power, median, variationCoefficient)
std23Neg = cutoff(-2, power, median, variationCoefficient) - std3Neg
zscoreNorm = -3 + ((measure - std3Neg) / std23Neg)
return zscoreNorm
def zscore(measure, power, median, variationCoefficient):
return (math.pow((measure / median), power) - 1) / (power * variationCoefficient)
def cutoff(desiredZscore, power, median, variationCoefficient):
return median * (math.pow((1 + (power * variationCoefficient * desiredZscore)), (1 / power)))
def heightAndWeightToBmi(height, weight):
"""standard metric conversion from weight and height to BMI, height in cm, weight in kg"""
if weight < input_minweight or weight > input_maxweight or height < input_minlengthorheight or height > input_maxlengthorheight:
output = healthdb.util.NaN
else:
output = weight / ((height / 100.0) ** 2.0)
return output
def zscoreToPercentile(zscore):
"""Produce a number between 0 and 100 inclusive that is the percentile for
the given zscore, or Double.NaN if the zscore is outside of -3 to 3."""
retVal = healthdb.util.NaN
# WHO technical specs chapter 7: "However, a restriction was imposed on
# all indicators to enable the derivation of percentiles only within
# the interval corresponding to z-scores between -3 and 3. The
# underlying reasoning is that percentiles beyond +-3 SD are invariant
# to changes in equivalent z-scores. The loss accruing to this
# restriction is small since the inclusion range corresponds to the
# 0.135th to 99.865th percentiles."
if math.fabs(zscore) <= 3:
absVal = math.fabs(zscore)
P1 = (1 - 1 / math.sqrt(2 * math.pi) * math.exp(-math.pow(absVal, 2) / 2)
* (
0.31938 * (1 / (1 + 0.2316419 * absVal))
- 0.356563782 * math.pow((1 / (1 + 0.2316419 * absVal)), 2)
+ 1.781477937 * math.pow((1 / (1 + 0.2316419 * absVal)), 3)
- 1.82125 * math.pow((1 / (1 + 0.2316419 * absVal)), 4)
+ 1.330274429 * math.pow((1 / (1 + 0.2316419 * absVal)), 5)
))
if zscore > 0:
P1 = P1 * 100
else:
P1 = 100 - P1 * 100
if 0 <= P1 and P1 <= 100:
retVal = P1
return retVal
class NormalizedLengthOrHeight():
"""Adjust length-or-height by whether person is standing or recumbent (lying
down)."""
def __init__(self, ageInDays, lengthOrHeight, measured):
self.lengthOrHeight = lengthOrHeight
self.measured = measured
if lengthOrHeight < input_minlengthorheight or lengthOrHeight > input_maxlengthorheight:
self.lengthOrHeight = healthdb.util.NaN
if ageInDays >= height_mindays and measured == Measured.RECUMBENT:
self.lengthOrHeight -= heightcorrection
self.measured = Measured.STANDING
elif 0 <= ageInDays and ageInDays < height_mindays and measured == Measured.STANDING:
self.lengthOrHeight += heightcorrection
self.measured = Measured.RECUMBENT
class ZscoreAndPercentile():
"""A class to contain zscore and percentile, each a float or NaN."""
def __init__(self, zscore, percentile):
self.zscore = zscore
self.percentile = percentile
def __str__(self):
"""String for debugging"""
return "zscore %s percentile %s" % (self.zscore, self.percentile)
def is_alertworthy(self):
"""Alertable if zscore not NaN and <0, and percentile < 25 or NaN.
This means that this statistic shows the patient is in bad shape.
"""
return (not healthdb.util.isNaN(self.zscore)) and self.zscore < 0 and (
healthdb.util.isNaN(self.percentile) or (self.percentile < 25))
def zscore_is_nan(self):
"""Return True if self.zscore is Nan, otherwise false.
Convenience method for Django templates, which have no good logic.
"""
return healthdb.util.isNaN(self.zscore)
def percentile_is_nan(self):
"""Return True if self.percentile is Nan, otherwise false.
Convenience method for Django templates, which have no good logic.
"""
return healthdb.util.isNaN(self.percentile)
class AnthroConfig:
def __init__(self, fileName, measureKey, sex, ageHeightOrLength):
self.fileName = fileName
self.measureKey = measureKey
self.sex = sex
self.ageHeightOrLength = ageHeightOrLength
class ZscoreAndPercentileProperty(db.Property):
"""A ZscoreAndPercentile property class."""
data_type = ZscoreAndPercentile
def get_value_for_datastore(self, model_instance):
zandp = super(ZscoreAndPercentileProperty, self
).get_value_for_datastore(model_instance)
if zandp:
zandp = str(zandp.zscore) + ':' + str(zandp.percentile)
return zandp
def make_value_from_datastore(self, value):
ret = None
if value:
zscore, percentile = value.split(':')
try:
zscore = float(zscore)
except ValueError, dummy:
assert healthdb.util.isNaNString(zscore), 'value is %s, zscore is ' % (
value, zscore)
zscore = healthdb.util.NaN
try:
percentile = float(percentile)
except ValueError, dummy:
#logging.warning('percentile was invalid: %s' % percentile)
# On some platforms, float('NaN') doesn't work
assert healthdb.util.isNaNString(percentile), 'value is %s, percentile is ' % (
value, percentile)
percentile = healthdb.util.NaN
ret = ZscoreAndPercentile(zscore, percentile)
return ret
def validate(self, value):
value = super(ZscoreAndPercentileProperty, self).validate(value)
if value is None or isinstance(value, ZscoreAndPercentile):
return value
elif isinstance(value, basestring):
return self.make_value_from_datastore(value)
raise db.BadValueError(
"Property %s must be a ZscoreAndPercentile or string." % self.name)
class VisitStatistics(db.Model):
# Constants for datastore
GROWTHSERVER_MALE = Sex.MALE
GROWTHSERVER_FEMALE = Sex.FEMALE
GROWTHSERVER_STANDING = Measured.STANDING
GROWTHSERVER_RECUMBENT = Measured.RECUMBENT
# Different models computed from WHO model
INDICATORS = [u'weight_for_length_or_height', u'weight_for_age',
u'length_or_height_for_age', u'body_mass_index_for_age',
u'head_circumference_for_age']
# parent is Visit
generated_date = db.DateTimeProperty(required=True)
weight_for_length_or_height = ZscoreAndPercentileProperty(required=True)
weight_for_age = ZscoreAndPercentileProperty(required=True)
length_or_height_for_age = ZscoreAndPercentileProperty(required=True)
body_mass_index_for_age = ZscoreAndPercentileProperty(required=True)
head_circumference_for_age = ZscoreAndPercentileProperty(required=False)
age_in_days = db.IntegerProperty(required=True)
body_mass_index = db.FloatProperty(required=True)
@property
def id(self):
return self.key().id()
def __str__(self):
return ("<VisitStatistics id=%s, generated_date=%s, "
"age_in_days=%s, body_mass_index=%s, "
"weight_for_length_or_height=%s, "
"weight_for_age=%s, "
"length_or_height_for_age=%s, "
"body_mass_index_for_age=%s, "
"head_circumference_for_age=%s" % (
self.id,
self.generated_date,
self.age_in_days,
self.body_mass_index,
self.weight_for_length_or_height,
self.weight_for_age,
self.length_or_height_for_age,
self.body_mass_index_for_age,
self.head_circumference_for_age))
@staticmethod
def _parse_zscore_and_percentile(att, doc):
zandp = None
results_elem = doc.getElementsByTagName('results')
if results_elem:
attp = att + u'_percentile'
attz = att + u'_zscore'
zscore = results_elem[0].getAttribute(attz)
# Note: float('NaN') only works sometimes, so go by the string instead
if zscore and not healthdb.util.isNaNString(zscore):
zscore = float(zscore)
else:
zscore = healthdb.util.NaN
percentile = results_elem[0].getAttribute(attp)
if percentile and not healthdb.util.isNaNString(percentile):
percentile = float(percentile)
else:
percentile = healthdb.util.NaN
if zscore and percentile:
zandp = ZscoreAndPercentile(zscore, percentile)
return zandp
@staticmethod
def _parse_visit_statistics(result, visit=None):
'''Parse an XML string from growthserver, return a VisitStatistics object with visit as its parent
'''
#logging.info("start parse visit obj %s" % visit)
visit_stats = None
try:
doc = minidom.parseString(result)
assert doc.documentElement.tagName == 'growthserver_response'
attrmap = {}
response_elem = doc.getElementsByTagName('growthserver_response')
stime = time.strptime(response_elem[0].getAttribute('date_generated'),
"%Y-%m-%d %H:%M:%S +0000")
attrmap['generated_date'] = datetime(*stime[:6])
results_elem = doc.getElementsByTagName('results')
att = u'age_in_days'
attrmap[str(att)] = int(results_elem[0].getAttribute(att))
att = u'body_mass_index'
bmi = results_elem[0].getAttribute(att)
try:
bmi = float(bmi)
except ValueError, err:
assert healthdb.util.isNaNString(bmi), 'bmi is ' % bmi
# TODO(dan): Unit test that NaN bmi is okay
bmi = NaN
attrmap[str(att)] = bmi
if not healthdb.util.isNaN(bmi):
try:
for att in VisitStatistics.INDICATORS:
# map key is str(att) because **attrmap requires string keys
attrmap[str(att)] = VisitStatistics._parse_zscore_and_percentile(
att, doc)
#print "attrmap: %s" % attrmap
visit_stats = VisitStatistics(parent=visit, **attrmap)
except ValueError, err:
logging.error("Couldn't parse visit statistics xml: %s from '%s'"
% (err, result))
except datastore_errors.BadValueError, err:
logging.error("Visit statistics missing values: %s: from '%s'"
% (err, result))
except ExpatError, err:
logging.error("error '%s' parsing '%s'" % (err, result))
return visit_stats
def is_alertworthy(self):
ret = False
for indicator in VisitStatistics.INDICATORS:
if hasattr(self, indicator):
zandp = getattr(self, indicator)
if zandp and zandp.is_alertworthy():
ret = True
return ret
def get_zandp(self, indicator):
return getattr(self, indicator)
def get_worst_zscore(self):
"""Get the worst zscore of any indicator, NOT INCLUDING NaNs!
We ignore NaNs because they are troublesome to sort or filter by.
"""
worst_zscore = None
for indicator in VisitStatistics.INDICATORS:
if hasattr(self, indicator):
zandp = getattr(self, indicator)
if zandp and not healthdb.util.isNaN(zandp.zscore):
if worst_zscore is None or worst_zscore > zandp.zscore:
# logging.info("new worst_zscore = %s" % zandp.zscore)
worst_zscore = zandp.zscore
# logging.info("worst_zscore = %s" % worst_zscore)
return worst_zscore
# TODO(dan): Unit test this method
@staticmethod
def get_stats_for_visit(visit):
patient = visit.get_patient()
# TODO(dan): Add oedema as an attribute in future
hasOedema = False
return VisitStatistics.get_stats(patient.birth_date,
visit.visit_date,
patient.sex,
visit.weight,
visit.head_circumference,
visit.height,
visit.height_position,
hasOedema,
visit)
@staticmethod
def get_stats(birth_date, visit_date, sex, weight, head_circumference, height,
height_position, hasOedema, visit = None):
'''Get growth statistics from growthserver.
head_circumference is optional
sex is Patient.MALE or Patient.FEMALE
height_position is Visit.STANDING or Visit.RECUMBENT
'''
# For debugging a future version:
#rooturl = 'http://4.latest.growthserver.appspot.com/growthserver'
rooturl = 'http://growthserver.appspot.com/growthserver'
# TODO(dan): Return none if visit date is too far, or rather push that
# logic to the growthserver
pmap = {'date_of_birth': birth_date,
'date_of_visit': visit_date,
'sex' : VisitStatistics.SEX_MAP[sex],
'weight' : weight,
'length' : height,
'measured' : VisitStatistics.HEIGHT_POSITION_MAP[height_position],
'hasOedema' : hasOedema,
'format' : 'xml'}
# head_circumference is optional
if head_circumference: pmap['head_circumference'] = head_circumference
remote_growthcalc = False
if remote_growthcalc:
try:
data = urllib.urlencode(pmap)
result = urllib2.urlopen(rooturl, data)
result_string = result.read()
visit_stats = VisitStatistics._parse_visit_statistics(result_string, visit)
logging.debug("result %s" % result_string)
except urllib2.URLError, e:
logging.error("get_stats_for_visit: %s" % e)
visit_stats = None
else:
visit_stats = calculate_scores(pmap, visit)
return visit_stats
|
bsd-3-clause
| 5,793,959,453,705,045,000
| 36.959866
| 202
| 0.662643
| false
| 3.470948
| true
| false
| false
|
I-sektionen/i-portalen
|
wsgi/iportalen_django/exchange_portal/models.py
|
1
|
6767
|
__author__ = 'Magnus Forzelius & Jesper Lehtonen'
from django.db import models
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from utils.validators import liu_id_validator
from tags.models import Tag
import os
import datetime
class Continent(models.Model):
name = models.CharField(max_length=50)
class Meta:
verbose_name = _("Världsdel")
verbose_name_plural = _("Världsdelar")
ordering = ['name']
def __str__(self):
return self.name
class Country(models.Model):
name = models.CharField(max_length=50)
in_continent = models.ForeignKey(Continent, on_delete=models.CASCADE, null=True)
class Meta:
verbose_name = _("Land")
verbose_name_plural = _("Länder")
ordering = ['name']
def __str__(self):
return self.name
def get_country_list(self):
return self.objects.all()
class City(models.Model):
name = models.CharField(max_length=50)
in_country = models.ForeignKey(Country, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Stad")
verbose_name_plural = _("Städer")
ordering = ['name']
def __str__(self):
return self.name
class School(models.Model):
name = models.CharField(max_length=50)
in_city = models.ForeignKey(City, on_delete=models.CASCADE)
freemover = models.BooleanField(default=False)
exchange_with_liu = models.BooleanField(default=False)
class Meta:
verbose_name = _("Skola")
verbose_name_plural = _("Skolor")
ordering = ['name']
def __str__(self):
return self.name
# Lägga till: HP, Nivå,
class Liu_Course(models.Model):
name = models.CharField(max_length=50)
course_code = models.CharField(max_length=20)
liu_hp = models.IntegerField(default=0)
level = models.CharField(max_length=10, default='N/A')
is_compulsary = models.BooleanField(default=False)
class Meta:
verbose_name = _("Liukurs")
verbose_name_plural = _("Liukurser")
def __str__(self):
return self.name
class Exchange_Course(models.Model):
# Kanske expandera
TECH_PROFILES = (
('D', "D"),
('E', "E"),
('M', "M"),
('B', "B"),
('S', "S"),
('Övr', "Övr")
)
name = models.CharField(max_length=50)
course_code = models.CharField(max_length=20)
year = models.IntegerField()
technical_profile = models.CharField(verbose_name='Teknisk inriktning', max_length=12, choices=TECH_PROFILES, default='NONE')
in_school = models.ForeignKey(School, on_delete=models.CASCADE)
corresponding_liu_course = models.ForeignKey(Liu_Course, on_delete=models.CASCADE)
credits = models.IntegerField(default=0)
level = models.CharField(max_length=10, default='N/A')
# Add attribute "credits", and att the credits/hp quota in school model
# A course can be required in several tech profile
class Meta:
verbose_name = _("Utlandskurs")
verbose_name_plural = _("Utlandskurser")
ordering = ['name']
def __str__(self):
return self.name
def _file_path(instance, filename):
return os.path.join(
'travel_stories', str(instance.about_school.pk), filename
)
class Travel_Story(models.Model):
TERM_CHOICES = (
("HT", "HT"),
("VT", "VT"),
("Helår", "Helår")
)
YEAR_CHOICES = []
for y in range(1969, (datetime.datetime.now().year + 1)):
YEAR_CHOICES.append((y, y))
#Change from file to form
about_school = models.ForeignKey(School, on_delete=models.CASCADE)
added_by_user = models.CharField(verbose_name=_("liu-id"), max_length=10)
term_abroad = models.CharField(verbose_name=("termin utomlands"), help_text="Termin du var utomlands", max_length=5, choices=TERM_CHOICES)
year_abroad = models.IntegerField(verbose_name=("tid utomlands"), help_text="År när du var utomlands", choices=YEAR_CHOICES)
headline = models.CharField(
verbose_name=_("rubrik"),
max_length=255,
help_text=_("Rubriken till reseberättelsen"))
lead = models.TextField(
verbose_name=_("ingress"),
help_text=_("Ingressen är den text som syns i reseberättelse"))
prep_text = models.TextField(
verbose_name=_("förberedelser"),
help_text=_("Var det några särskilda förberedelser som krävdes?
Har du några generella tips gällande ansökan? Visum?"),
null=True,
blank=True)
location_text = models.TextField(
verbose_name=_("landet och staden"),
help_text=_("Hur upplevdes landet? Staden? Kultur? Billigt eller dyrt?"),
null=True,
blank=True)
school_text = models.TextField(
verbose_name=_("skolan"),
help_text=_("Geografisk placering i staden?
Hur var campus?
Var det lätt att träffa lokalbefolkning?
Hur var studentlivet? Kurser: var det lätt/svårt att få kurser? Var de lätta/svåra att få tillgodoräknade?"),
null=True,
blank=True)
studies_text = models.TextField(
verbose_name=_("studier"),
help_text=_("Hur var nivån på kurserna?
Råd angående att välja kurser på plats?
Svårt att hitta kurser på engelska?
Hur var språket? (framförallt för de som läser ii eller som inte läste på engelska)"),
null=True,
blank=True)
living_text = models.TextField(
verbose_name=_("boende"),
help_text=_("Hur bodde du?
Hur hittade du ditt boende? Tips på eventuell mäklare eller liknande? Vilka alternativ finns?
Priser och standard?
"),
null=True,
blank=True)
sparetime_text = models.TextField(
verbose_name=_("fritid"),
help_text=_("Vad gör man på fritiden?
Resor?
Tips på saker man inte får missa"),
null=True,
blank=True)
other_text = models.TextField(
verbose_name=_("övrigt"),
help_text=_("Brödtext syns när en reseberättelse visas enskilt."),
null=True,
blank=True)
#tags = models.ManyToManyField(
# Tag,
# verbose_name=_("tag"),
# blank=True,
# help_text=_("Håll ner Ctrl för att markera flera."))
class Meta:
verbose_name = _("Reseberättelse")
verbose_name_plural = _("Reseberättelser")
ordering= ['-year_abroad','term_abroad']
def __str__(self):
return self.headline
class Feedback(models.Model):
message = models.CharField(max_length=500)
def __str__(self):
return self.message
#def get_absolute_url(self):
# """Get url of object"""
# return reverse(self)
#self.about_school
|
mit
| 4,308,718,308,841,344,500
| 30.976077
| 142
| 0.634595
| false
| 3.233188
| false
| false
| false
|
citrix-openstack-build/neutron
|
neutron/plugins/ml2/rpc.py
|
1
|
9476
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.common import constants as q_const
from neutron.common import rpc as q_rpc
from neutron.common import topics
from neutron.db import agents_db
from neutron.db import api as db_api
from neutron.db import dhcp_rpc_base
from neutron.db import securitygroups_rpc_base as sg_db_rpc
from neutron import manager
from neutron.openstack.common import log
from neutron.openstack.common.rpc import proxy
from neutron.plugins.ml2 import db
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_tunnel
# REVISIT(kmestery): Allow the type and mechanism drivers to supply the
# mixins and eventually remove the direct dependencies on type_tunnel.
LOG = log.getLogger(__name__)
TAP_DEVICE_PREFIX = 'tap'
TAP_DEVICE_PREFIX_LENGTH = 3
class RpcCallbacks(dhcp_rpc_base.DhcpRpcCallbackMixin,
sg_db_rpc.SecurityGroupServerRpcCallbackMixin,
type_tunnel.TunnelRpcCallbackMixin):
RPC_API_VERSION = '1.1'
# history
# 1.0 Initial version (from openvswitch/linuxbridge)
# 1.1 Support Security Group RPC
def __init__(self, notifier, type_manager):
# REVISIT(kmestery): This depends on the first three super classes
# not having their own __init__ functions. If an __init__() is added
# to one, this could break. Fix this and add a unit test to cover this
# test in H3.
super(RpcCallbacks, self).__init__(notifier, type_manager)
def create_rpc_dispatcher(self):
'''Get the rpc dispatcher for this manager.
If a manager would like to set an rpc API version, or support more than
one class as the target of rpc messages, override this method.
'''
return q_rpc.PluginRpcDispatcher([self,
agents_db.AgentExtRpcCallback()])
@classmethod
def _device_to_port_id(cls, device):
# REVISIT(rkukura): Consider calling into MechanismDrivers to
# process device names, or having MechanismDrivers supply list
# of device prefixes to strip.
if device.startswith(TAP_DEVICE_PREFIX):
return device[TAP_DEVICE_PREFIX_LENGTH:]
else:
return device
@classmethod
def get_port_from_device(cls, device):
port_id = cls._device_to_port_id(device)
port = db.get_port_and_sgs(port_id)
if port:
port['device'] = device
return port
def get_device_details(self, rpc_context, **kwargs):
"""Agent requests device details."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s details requested by agent "
"%(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port_id = self._device_to_port_id(device)
session = db_api.get_session()
with session.begin(subtransactions=True):
port = db.get_port(session, port_id)
if not port:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s not found in database"),
{'device': device, 'agent_id': agent_id})
return {'device': device}
segments = db.get_network_segments(session, port.network_id)
if not segments:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s has network %(network_id)s with "
"no segments"),
{'device': device,
'agent_id': agent_id,
'network_id': port.network_id})
return {'device': device}
binding = db.ensure_port_binding(session, port.id)
if not binding.segment:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s on network %(network_id)s not "
"bound, vif_type: %(vif_type)s"),
{'device': device,
'agent_id': agent_id,
'network_id': port.network_id,
'vif_type': binding.vif_type})
return {'device': device}
segment = self._find_segment(segments, binding.segment)
if not segment:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s on network %(network_id)s "
"invalid segment, vif_type: %(vif_type)s"),
{'device': device,
'agent_id': agent_id,
'network_id': port.network_id,
'vif_type': binding.vif_type})
return {'device': device}
new_status = (q_const.PORT_STATUS_BUILD if port.admin_state_up
else q_const.PORT_STATUS_DOWN)
if port.status != new_status:
port.status = new_status
entry = {'device': device,
'network_id': port.network_id,
'port_id': port.id,
'admin_state_up': port.admin_state_up,
'network_type': segment[api.NETWORK_TYPE],
'segmentation_id': segment[api.SEGMENTATION_ID],
'physical_network': segment[api.PHYSICAL_NETWORK]}
LOG.debug(_("Returning: %s"), entry)
return entry
def _find_segment(self, segments, segment_id):
for segment in segments:
if segment[api.ID] == segment_id:
return segment
def update_device_down(self, rpc_context, **kwargs):
"""Device no longer exists on agent."""
# TODO(garyk) - live migration and port status
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s no longer exists at agent "
"%(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port_id = self._device_to_port_id(device)
plugin = manager.NeutronManager.get_plugin()
port_exists = plugin.update_port_status(rpc_context, port_id,
q_const.PORT_STATUS_DOWN)
return {'device': device,
'exists': port_exists}
def update_device_up(self, rpc_context, **kwargs):
"""Device is up on agent."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s up at agent %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port_id = self._device_to_port_id(device)
plugin = manager.NeutronManager.get_plugin()
plugin.update_port_status(rpc_context, port_id,
q_const.PORT_STATUS_ACTIVE)
class AgentNotifierApi(proxy.RpcProxy,
sg_rpc.SecurityGroupAgentRpcApiMixin,
type_tunnel.TunnelAgentRpcApiMixin):
"""Agent side of the openvswitch rpc API.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
update_dhcp_port, and removed get_dhcp_port methods.
"""
BASE_RPC_API_VERSION = '1.1'
def __init__(self, topic):
super(AgentNotifierApi, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
self.topic_network_delete = topics.get_topic_name(topic,
topics.NETWORK,
topics.DELETE)
self.topic_port_update = topics.get_topic_name(topic,
topics.PORT,
topics.UPDATE)
def network_delete(self, context, network_id):
self.fanout_cast(context,
self.make_msg('network_delete',
network_id=network_id),
topic=self.topic_network_delete)
def port_update(self, context, port, network_type, segmentation_id,
physical_network):
self.fanout_cast(context,
self.make_msg('port_update',
port=port,
network_type=network_type,
segmentation_id=segmentation_id,
physical_network=physical_network),
topic=self.topic_port_update)
|
apache-2.0
| -6,509,788,417,810,741,000
| 42.269406
| 79
| 0.550232
| false
| 4.370849
| false
| false
| false
|
eXma/meet-and-eat-registration-system
|
src/cfg/__init__.py
|
1
|
2351
|
import os
from contextlib import contextmanager
import yaml
import locale
from datetime import datetime
__author__ = 'jan'
def parse_cfg_date(cfg_date):
return datetime.strptime(cfg_date, "%Y-%m-%d %H:%M")
@contextmanager
def _fix_locale(prefix):
oldloc = locale.getlocale(locale.LC_TIME)
if not oldloc[0] == prefix:
tried = []
for suffx in ("", ".UTF8", ".ISO-8859-1", "@euro"):
tried.append(prefix + suffx)
try:
locale.setlocale(locale.LC_TIME, prefix + suffx)
yield
locale.setlocale(locale.LC_TIME, oldloc)
return
except locale.Error:
pass
raise Exception("Cannot set locale with prefix %s. Tried: %s" % (prefix,
", ".join(tried)))
else:
yield
def pretty_date(date, month_name=False, show_year=False, with_weekday=False):
"""Pretty print the date
:type date: datetime
"""
format = ["%d."]
if month_name:
format.append(" %B ")
else:
format.append("%m.")
if show_year:
format.append("%Y")
if with_weekday:
format = ["%A, den "] + format
with _fix_locale("de_DE"):
pretty = date.strftime("".join(format).strip())
return pretty
class GlobalConfig(object):
def __init__(self):
self.data = None
def initialize(self, data):
self.data = data
def clear(self):
self.data = None
def loaded(self):
return self.data is not None
def __getattr__(self, item):
assert self.data is not None, "No configuration loaded!"
if item not in self.data:
raise AttributeError, item
return self.data[item]
def __getitem__(self, key):
assert self.data is not None, "No configuration loaded!"
if key not in self.data:
raise KeyError, key
return self.data[key]
config = GlobalConfig()
def load_config(fname=None):
if fname is None:
fname = os.getenv("CONFIG_FILE_PATH", None)
assert fname is not None, "No config file set!"
assert os.path.exists(fname), "Config file %s does not exist" % fname
with open(fname, "r") as fn:
data = yaml.load(fn)
config.initialize(data)
|
bsd-3-clause
| -6,858,246,530,555,979,000
| 23.237113
| 91
| 0.560613
| false
| 3.905316
| true
| false
| false
|
DarthMaulware/EquationGroupLeaks
|
Leak #4 - Don't Forget Your Base/EQGRP-Auction-File/Linux/bin/fg.py
|
1
|
23049
|
#!/usr/local/bin/python
# VER=2.0.0.2
# 09 FEB 2012
"""
fg UTILITIES
requires:
+ winscp for win32
+ pexpect 2.3 on linux
"""
import re, sys, time, os, getpass, string, traceback
from os import popen
from optparse import OptionParser
from subprocess import *
try:
import pexpect
except:
pass
class fg:
def __init__(self, userLogin, userID, userPassword, server, **kwargs):
"""
Initializes class setup some variables.
fg = fg(userLogin, userID, userPassword, server, kwargs[sharedDIRBool, userDIRBool, diskDIRBool, fileWildcard, debugBool, timeout, privKeyFile])
"""
self.sharedDIRBool = self.userDIRBool = self.diskDIRBool = False
self.fileWildcard = ""
self.debugBool = False
self.timeout = 120
#determine OS
self.platform = sys.platform
if self.debugBool: print "Running on %s" % self.platform
self.userLogin = userLogin
self.userID = userID
self.userPassword = userPassword
self.server = server
self.remoteDir = ""
self.destDir = "."
#self.privKeyFile = privKeyFile
if kwargs.__contains__("sharedDIRBool"):
self.sharedDIRBool = kwargs["sharedDIRBool"]
if self.sharedDIRBool: self.remoteDir = "/data/shared/"
if kwargs.__contains__("userDIRBool"):
self.userDIRBool = kwargs["userDIRBool"]
if self.userDIRBool: self.remoteDir = "/data/users/" + self.userID + "/"
if kwargs.__contains__("diskDIRBool"):
self.diskDIRBool = kwargs["diskDIRBool"]
if self.diskDIRBool: self.remoteDir = "/data/gc/"
if kwargs.__contains__("privKeyFile"):
self.privKeyFile = kwargs["privKeyFile"]
if kwargs.__contains__("fileWildcard"):
self.fileWildcard = kwargs["fileWildcard"]
self.debugBool = kwargs["debugBool"]
self.timeout = int(kwargs["timeout"])
#ask for a password if the user didn't specify one or a privKeyFile
if not self.userPassword and not self.privKeyFile:
self.userPassword = self.setPass()
if not self.userID:
print "USER ID NOT SET!!"
exit(0)
if not os.path.isfile(self.privKeyFile):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] Key file does not exist: " + self.privKeyFile + bcolors.ENDC + "\n\n"
sys.stdout.flush()
exit(0)
#this is the host key for the server to SSH into, needed for winscp
self.host_key = "ssh-rsa 2048 xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx"
if(self.platform == "linux2"):
self.sshKeys = [
'authenticity',
'assword:',
'denied',
'No such file or directory',
'100%',
'ETA',
pexpect.EOF,
'Permission denied',
'total '
]
self.sftpKeys = [
'authenticity',
'assword:',
'denied',
pexpect.EOF,
'sftp>',
'Connecting to'
]
#--------------------------------
def setPass(self):
"""
Prompts the user for a password if this class was not passed the password by another script
"""
print "\n"
userPassword = getpass.getpass()
if self.debugBool: print "Password set: %s" % (userPassword)
print "\n\n"
return(userPassword)
#--------------------------------
def fgAutoGet(self):
"""
Automatically gets the files. Does a dir, displays the file list, prompts user for all, #, or wildcard get
"""
#if self.debugBool: print "Using options: %s --> %s" % (self.type, self.userLogin)
if(self.platform == "win32"):
# list the files then display them to the user
print "AUTO GET FILES WIN32"
print "===================================="
#cmd = 'cmd.exe /c winscp ' + self.userLogin + ":" + self.userPassword + '@' + self.server + " -hostkey\=\"" + self.host_key + "\" /command \"option confirm off\" \"get " + self.remoteDir + self.fileWildcard + "* " + self.destDir + "\ \" exit \n"
#cmdnopass = 'cmd.exe /c winscp ' + self.userLogin + ":" + "<PASSWORD>" + '@' + self.server + " -hostkey\=\"" + self.host_key + "\" /command \"option confirm off\" \"get " + self.remoteDir + self.fileWildcard + "* " + self.destDir + "\ \" exit \n"
cmd = 'cmd.exe /c winscp ' + "/console /command \"open " + self.userLogin + ":" + self.userPassword + '@' + self.server + "\" \"option confirm off\" \"get " + self.remoteDir + self.fileWildcard + "* " + self.destDir + "\ \" exit" + " -hostkey\=\"" + self.host_key
print cmd
#print "SENDING COMMAND: %s" % cmdnopass
#output = fg.winRunIt(cmd)
#print "\t[+] " + output.strip()
elif(self.platform == "linux2"):
print "AUTO GET FILES LINUX"
additionalArgs=""
#If we need to pass some additional args, do so here
if (self.privKeyFile):
additionalArgs= '-i ' + self.privKeyFile + ' '
if (self.fileWildcard[0]=='^'):
cmd = 'scp ' + str(additionalArgs) + self.userLogin + '@' + self.server + ':' + self.remoteDir + self.fileWildcard.lstrip('^') + "* " + self.destDir
else:
cmd = 'scp ' + str(additionalArgs) + self.userLogin + '@' + self.server + ':' + self.remoteDir + "*" + self.fileWildcard + "* " + self.destDir
print "===================================="
print "\t" + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#--------------------------------
def fgManualGet(self):
"""
Provides the user with a list of files then gets the user selected files.
"""
file_re = re.compile(r"^[drwx-]+\s", re.IGNORECASE | re.VERBOSE)
if(self.platform == "win32"):
#cd into directory then dir
print "====================================\n"
print " SORRY NOT WORKING YET! PUNT!"
exit(0)
#cmd = 'cmd.exe /c winscp ' + self.userLogin + ":" + self.userPassword + '@' + self.server + " -hostkey\=\"" + self.host_key + "\" /command \"cd " + self.remoteDir + "\" dir exit \n"
#output = fg.winRunIt(cmd)
elif(self.platform == "linux2"):
additionalArgs=""
#If we need to pass some additional args, do so here
if (self.privKeyFile):
additionalArgs= '-oIdentityFile=' + self.privKeyFile + ' '
# TODO, implement this with sftp: sftp -oIdentityFile=/root/testKey op@server
sftpCmd = 'sftp ' + str(additionalArgs) + self.userLogin + '@' + self.server
sftpRunCmd='ls -l ' + self.remoteDir
print sftpCmd + " THEN RUNNING " + sftpRunCmd
print "===================================="
try:
#outputChild = fg.sftpRunCmd(sftpCmd,sftpRunCmd, self.sftpKeys)
result = fg.sftpRunCmd(sftpCmd,sftpRunCmd, self.sftpKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#lines = string.split(str(outputChild.before), "\r\n")
#outputChild.close()
#result = string.split(str(outputChild.before), "\r\n")
lines = string.split(str(result), "\r\n")
fileList = {}
print "\t[+] Getting list of files...\n"
for line in lines:
if file_re.match(line):
filename = re.split('\s+', line)
nf = string.strip(filename[len(filename)-1])
nftype = string.strip(filename[0])
if not (nf == "." or nf == ".."):
fileList[nf] = nftype
cnt = 1
keys = fileList.keys()
keys.sort()
fileList2 = {}
for key in keys:
print "\t[%3s] %10s %s" % (cnt, fileList[key], key)
fileList2[cnt] = [key, fileList[key]]
cnt = cnt + 1
if cnt > 1:
print "Please select file(s) to copy: (\"all\" | num,[num...] | part of the filename) q = quit"
filesget = raw_input('-->')
print "====================================\n"
else:
print "NO FILES WAITING! SKIPPING PROMPT!"
filesget = "quit"
if filesget == "q" or filesget == "quit":
exit(0)
elif filesget == "all":
#get all files
for key in keys:
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + key + " " + self.destDir
print "\t[+] " + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\t======="
#get #,# | # #
elif re.match("[0-9\,]+", filesget):
filesget = filesget.replace(", ", ",")
tmpF = re.split(",|\s", filesget)
for i in tmpF:
#catch error when user put in number out of index, or not an INT
if str(i).isdigit() and int(i) <= int(len(keys)):
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + str(fileList2[int(i)][0]) + " " + self.destDir
print "\t[+] " + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\t======="
else:
#raise CustomException("\t[!] BAD USER INPUT FORMAT! - %s, MALFORMED CHARACTER OR INDEX OUT OF BOUNDS!!" % i)
if str(i).isdigit() and int(i) > int(len(keys)):
#try a wildcard get on the file even though it is an integer before bailing out
getFileStr = "*" +str(i) + "*"
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + getFileStr + " " + self.destDir
print "\t[+] " + cmd
try:
#TODO properly handle the output for when this matches multiple files (it works it just doesn't show all the files that got copied)
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "You either entered a number that was invalid or a filename with digits only which apparently wasn't on the server"
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#print bcolors.BOLD + bcolors.FAIL + "\t[!] BAD USER INPUT! <" + str(i) + "> INDEX OUT OF BOUNDS, SKIPPING TO NEXT ONE..." + bcolors.ENDC
#print "\t======="
else:
print bcolors.BOLD + bcolors.FAIL + "\t[!] NO IDEA WHAT YOU DID! <" + str(i) + ">, SKIPPING TO NEXT ONE..." + bcolors.ENDC
print "\t======="
#get filename match
#TODO fixup case where string is given that doesn't match ( ie someone accidentally types filename,1,3 )
elif re.match('\w+', filesget):
for key in keys:
if re.search(filesget, key, re.IGNORECASE | re.VERBOSE):
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + key + " " + self.destDir
print "\t[+] " + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\t======="
#This seems to not be needed
#elif (keys=1): #if we get througnall keys and no match:
# print "DEBUGGING key " + key + " keys " + str(keys) + " filesget " + filesget
# raise CustomException("\t[!] FILE MATCH NOT FOUND! - THINK ABOUT WHAT YOU WANT THEN TRY AGAIN!!")
else:
raise CustomException("\t[!] BAD USER INPUT FORMAT! - THINK ABOUT WHAT YOU WANT THEN TRY AGAIN!!")
#--------------------------------
def winRunIt(self, cmd):
"""
Run a command
"""
pass
#print "Running " + cmd
#p1 = Popen(cmd, stdout=PIPE, stderr=PIPE)
#output = p1.communicate()[0]
#erroutput = p1.communicate()[1]
#p1.wait()
#return output
#--------------------------------
def sftpRunCmd(self, sftpConnectCmd, sftpCommand, expectKeys):
child = pexpect.spawn(sftpConnectCmd, timeout=self.timeout,)
seen = child.expect(expectKeys)
workedB = False
printWorkedCNT = 0
cnt = 0
cnt2 = 0
#yup, this is a horrible duplication of code
while seen != 3:
#print "Debugging " + str(child)
cnt = cnt + 1
if printWorkedCNT == 1:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write("\t[+] RUNNING COMMAND [ " + sftpConnectCmd + " ]")
sys.stdout.flush()
#~~~~~~~~~~~~~~~
#authenticty
if seen == 0:
sys.stdout.write("\t[+] ACCEPTING RSA KEY...")
sys.stdout.flush()
child.sendline('yes')
seen = child.expect(expectKeys)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
#assword:
if seen == 1:
child.sendline(self.userPassword)
if cnt2 < 1:
sys.stdout.write("\t[+] AUTHENTICATING WITH SSH SERVER...")
sys.stdout.flush()
else:
if cnt2 == 1:
sys.stdout.write("\r|")
sys.stdout.flush()
if cnt2 == 2:
sys.stdout.write("\r/")
sys.stdout.flush()
if cnt2 == 3:
sys.stdout.write("\r-")
sys.stdout.flush()
if cnt2 == 4:
sys.stdout.write("\r\\")
sys.stdout.flush()
cnt2 = 0
cnt2 = cnt2 + 1
seen = child.expect(expectKeys)
#sftp>
if seen == 4:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
print "Sending command " + sftpCommand
sys.stdout.flush()
child.sendline(sftpCommand)
seen = child.expect(expectKeys)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
workedB = True
#print "DEBUGGING case 4 " + str(child)
result=str(child.before)
#now quit and cleanup
child.sendline("quit")
seen = child.expect(expectKeys)
child.close()
return result
#Connecting to ...
if seen == 5:
print "Connecting to server"
seen = child.expect(expectKeys)
if workedB:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write(bcolors.OKGREEN + "[OK]" + bcolors.ENDC + "\t[+] SESSION COMPLETE!\n")
sys.stdout.flush()
else:
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] CONNECTION ERROR - CHECK IP ADDRESS, USERNAME, OR PASSWORD\n\n"
sys.stdout.flush()
#seen = child.expect(expectKeys)
return(child)
#--------------------------------
def nixRunIt(self, cmd, expectKeys):
"""
Controls Pexpect for
"""
child = pexpect.spawn(cmd, timeout=self.timeout,)
seen = child.expect(expectKeys)
workedB = False
printWorkedCNT = 0
cnt = 0
cnt2 = 0
while seen != 6:
cnt = cnt + 1
if printWorkedCNT == 1:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write("\t[+] RUNNING COMMAND [ " + cmd + " ]")
sys.stdout.flush()
#~~~~~~~~~~~~~~~
#authenticty
if seen == 0:
sys.stdout.write("\t[+] ACCEPTING RSA KEY...")
sys.stdout.flush()
child.sendline('yes')
seen = child.expect(expectKeys)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
#assword:
if seen == 1:
child.sendline(self.userPassword)
if cnt2 < 1:
sys.stdout.write("\t[+] AUTHENTICATING WITH SSH SERVER...")
sys.stdout.flush()
else:
if cnt2 == 1:
sys.stdout.write("\r|")
sys.stdout.flush()
if cnt2 == 2:
sys.stdout.write("\r/")
sys.stdout.flush()
if cnt2 == 3:
sys.stdout.write("\r-")
sys.stdout.flush()
if cnt2 == 4:
sys.stdout.write("\r\\")
sys.stdout.flush()
cnt2 = 0
cnt2 = cnt2 + 1
seen = child.expect(expectKeys)
#denied:
if seen == 2:
workedB = False
child.kill(0)
raise CustomException("ACCESS DENIED! - CHECK USERNAME OR PASSWORD\n\n\t!! IF YOU SEE A DIALOG BOX CLOSE PRESS CANCEL !!")
#'No such file or directory',
if seen == 3:
#workedB = False
child.kill(0)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
raise CustomException("FILE MATCH NOT FOUND! - MAYBE THERE ARE NO FILES WAITING FOR YOU ON THE SERVER?")
#100%
if seen == 4:
printWorkedCNT = printWorkedCNT + 1
workedB = True
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write("\t")
sys.stdout.flush()
tmpStr = str(child.before)
tmpStr = tmpStr.replace("\r", "")
tmpStr = tmpStr.replace("\d", "")
tmpStr = tmpStr.replace("\n", "")
sys.stdout.write(tmpStr)
sys.stdout.flush()
seen = child.expect(expectKeys)
#ETA
if seen == 5:
printWorkedCNT = printWorkedCNT + 1
workedB = True
if cnt == 1:
sys.stdout.write("\r|")
sys.stdout.flush()
if cnt == 2:
sys.stdout.write("\r/")
sys.stdout.flush()
if cnt == 3:
sys.stdout.write("\r-")
sys.stdout.flush()
if cnt == 4:
sys.stdout.write("\r\\")
sys.stdout.flush()
cnt = 1
seen = child.expect(expectKeys)
#Permission denied
if seen == 7:
workedB = False
child.kill(0)
raise CustomException("ACCESS DENIED! - CHECK USERNAME OR PASSWORD\n\n\t!! IF YOU SEE A DIALOG BOX CLOSE PRESS CANCEL !!")
workedB = True
#total (result from an ls when a key is used versus password authentication)
if seen == 8:
wokedB = True
sys.stdout.write("\t[+] REMOTE LISTING COMPLETE.")
sys.stdout.flush()
seen = child.expect(expectKeys)
if workedB:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write(bcolors.OKGREEN + "[OK]" + bcolors.ENDC + "\t[+] SESSION COMPLETE!\n")
sys.stdout.flush()
else:
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] CONNECTION ERROR - CHECK IP ADDRESS, USERNAME, OR PASSWORD\n\n"
sys.stdout.flush()
#seen = child.expect(expectKeys)
return(child)
#--------------------------------
class CustomException(Exception):
"""
Custom Exceptions...kinda
"""
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
#--------------------------------
class bcolors:
"""
Pretty colors on the console
"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
def disable(self):
self.HEADER = ''
self.OKBLUE = ''
self.OKGREEN = ''
self.WARNING = ''
self.FAIL = ''
self.BOLD = ''
self.ENDC = ''
#--------------------------------
if(__name__ == "__main__"):
"""
Main
"""
# setup args
VER = '2.0.0.1'
parser = OptionParser(usage='%prog -l <USERLOGIN> -u <USERID> -p <USERPASS> -s <SERVER> (--sharedDIR|--userDIR|--diskDIR) [-f PART_OF_FILENAME]', add_help_option = True)
#connection info
parser.add_option("-v", dest="versionB", action="store_true", default=False)
parser.add_option("-l", "--LoginUser", dest="userLogin", help="Your server login username")
parser.add_option("-u", "--userID", dest="userID", help="Your user ID number")
parser.add_option("-p", "--pass", dest="userPassword", default=None, help="Your password")
parser.add_option("-s", "--server", dest="server", help="The server to connect to")
#types
parser.add_option("--sharedDIR", dest="sharedDIRBool", action="store_true", default=False, help="Get files from shared directory")
parser.add_option("--userDIR", dest="userDIRBool", action="store_true", default=False, help="Get files from user directory")
parser.add_option("--diskDIR", dest="diskDIRBool", action="store_true", default=False, help="Get files from disk directory")
parser.add_option("-f", "--file", dest="fileWildcard", default=None, help="Get files with this wildcard; REGEX used => .*YOURTEXT.*")
parser.add_option("-i", "--privKeyFile", dest="privKeyFile", default=None, help="Keyfile to use for server authentication")
parser.add_option("--debug", dest="debugBool", action="store_true", default=False, help="Prints more stuff to the screen")
parser.add_option("--timeout", dest="timeout", default=120, help="Overrides the timeout for ssh sessions to server")
(options, sys.argv) = parser.parse_args(sys.argv)
#print "login:" + options.userLogin + "\nuser:" + options.userID + "\npass:" + options.userPassword + "\nserver:" + options.server + "\nshared:" + str(options.sharedDIRBool) + "\nuser:" + str(options.userDIRBool) + "\ndisk:" + str(options.diskDIRBool) + "\nwildcard:" + str(options.fileWildcard) + "\ndebug:" + str(options.debugBool) + "\ntimeout:" + str(options.timeout)
if options.versionB:
print VER
exit(0)
#User must put in one of these options or fail!
if not(options.sharedDIRBool or options.userDIRBool or options.diskDIRBool):
print "\n\n!!! DID NOT SPECIFY TYPE !!!\n\t[--sharedDIR | --userDIR | --diskDIR]\n\n"
exit(0)
try:
fg = fg(options.userLogin, options.userID, options.userPassword, options.server, sharedDIRBool=options.sharedDIRBool, userDIRBool=options.userDIRBool, diskDIRBool=options.diskDIRBool, fileWildcard=options.fileWildcard, debugBool=options.debugBool, timeout=options.timeout, privKeyFile=options.privKeyFile)
except:
print "\n\n!!! FG EXCEPTION !!!\n!!! CHECK USAGE !!!"
print "usage: fg.py -l <USERLOGIN> -u <USERID> -p <USERPASS> -s <SERVER> (--sharedDIR|--userDIR|--diskDIR) [-f PART_OF_FILENAME]\n\n"
try:
raise CustomException("ACCESS DENIED! - CHECK USERNAME OR PASSWORD\n\n\t!! IF YOU SEE A DIALOG BOX CLOSE PRESS CANCEL !!")
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
if options.debugBool: print sys.exc_info()
if options.debugBool: print str(traceback.tb_lineno(sys.exc_traceback))
exit(0)
#shared
if options.sharedDIRBool:
if options.debugBool: print "SHARED!!"
if options.fileWildcard:
print "AUTO GET WITH WILDCARD %s" % options.fileWildcard
try:
fg.fgAutoGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
else:
print "PROMPT USER FILENAMES TO GET"
try:
fg.fgManualGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#user
elif options.userDIRBool:
if options.debugBool: print "USER_DIR!!"
if options.fileWildcard:
print "AUTO GET WITH WILDCARD %s" % options.fileWildcard
try:
fg.fgAutoGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
else:
print "PROMPT USER FILENAMES TO GET"
try:
fg.fgManualGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#disks
elif options.diskDIRBool:
if options.debugBool: print "DISK!!"
if options.fileWildcard:
print "AUTO GET WITH WILDCARD %s" % options.fileWildcard
try:
fg.fgAutoGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
else:
print "PROMPT USER FILENAMES TO GET"
try:
fg.fgManualGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\n\n\n"
#----------------------------------
|
unlicense
| 1,231,811,236,877,711,400
| 35.412322
| 372
| 0.613302
| false
| 2.978292
| false
| false
| false
|
lioupayphone/gdeploy
|
gdeployfeatures/volume/volume.py
|
1
|
12197
|
"""
Add functions corresponding to each of the actions in the json file.
The function should be named as follows <feature name>_<action_name>
"""
from gdeploylib import defaults, Helpers, Global, YamlWriter
import os, re
from os.path import basename
from collections import defaultdict
helpers = Helpers()
writers = YamlWriter()
def volume_create(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
if Global.trace:
Global.logger.info("Splitting volume and hostnames")
if not section_dict.get('brick_dirs'):
section_dict = get_common_brick_dirs(section_dict)
if Global.trace:
Global.logger.info("Retrieving common brick directories among hosts.")
else:
section_dict = validate_brick_dirs(section_dict, 'brick_dirs')
if Global.trace:
Global.logger.info("Error in retrieving brick directories"\
" Validating brick directories.")
section_dict['service'] = 'glusterd'
section_dict['state'] = 'started'
Global.current_hosts = helpers.unique(Global.current_hosts)
section_dict['hosts'] = Global.current_hosts
yamls = [defaults.SERVICE_MGMT, defaults.CREATEDIR_YML]
if Global.trace:
Global.logger.info("Executing yamls %s and %s."\
% (defaults.SERVICE_MGMT, defaults.CREATEDIR_YML))
ret = call_peer_probe(section_dict)
if ret:
section_dict = ret
yamls.append(defaults.PROBE_YML)
if Global.trace:
Global.logger.info("Executing %s."% defaults.PROBE_YML)
yamls.append(defaults.VOLCREATE_YML)
if Global.trace:
Global.logger.info("Executing %s."% defaults.VOLCREATE_YML)
section_dict, set_yml = volume_set(section_dict)
if set_yml:
yamls.append(set_yml)
section_dict, start_yml = volume_start(section_dict)
yamls.append(start_yml)
sdict, yml = get_smb_data(section_dict)
if Global.trace:
Global.logger.info("Checking if Samba is enabled on volume.")
if sdict:
yml = helpers.listify(yml)
section_dict = sdict
yamls.extend(yml)
if type(section_dict['transport']) is list:
section_dict['transport'] = ','.join(section_dict['transport'])
# Configure SSL on the volume if enable_ssl is set.
if section_dict['enable_ssl'].lower() == "yes":
if section_dict.has_key('ssl_clients'):
section_dict['ssl_hosts'] = list(set(helpers.listify
(section_dict['ssl_clients'])\
+ Global.hosts))
else:
section_dict['ssl_hosts'] = list(set(Global.hosts))
section_dict['ssl_allow_list'] = ','.join(section_dict['ssl_hosts'])
section_dict['ssl_base_dir'] = Global.base_dir
helpers.write_to_inventory('ssl_hosts', section_dict['ssl_hosts'])
# Enable SSL on the volume
yamls.append(defaults.ENABLE_SSL)
if Global.trace:
Global.logger.info("Executing %s."% defaults.ENABLE_SSL)
return section_dict, yamls
def get_smb_data(section_dict):
smb = section_dict.get('smb')
if smb:
if smb.lower() == 'yes':
return volume_smb_setup(section_dict)
elif smb.lower() == 'no':
return volume_smb_disable(section_dict)
return False, False
def call_peer_probe(section_dict):
global helpers
peer_action = helpers.config_section_map(
'peer', 'action', False) or 'True'
if peer_action != 'ignore':
to_be_probed = Global.current_hosts + Global.brick_hosts
to_be_probed = helpers.unique(to_be_probed)
section_dict['to_be_probed'] = to_be_probed
return section_dict
return False
def get_common_brick_dirs(section_dict):
global helpers, writers
f_brick_list, brick_name = [], []
host_files = os.listdir(Global.host_vars_dir)
for host in host_files:
filename = helpers.get_file_dir_path(Global.host_vars_dir,
host)
ret = read_brick_dir_from_file(filename)
if not ret:
continue
brick_list, brick_name = ret
check_brick_name_format(brick_name)
writers.create_yaml_dict('brick_dirs', sorted(
set(brick_name)), filename)
Global.brick_hosts.append(host)
f_brick_list.extend(brick_list)
if set(Global.current_hosts) - set(Global.brick_hosts):
ret = read_brick_dir_from_file(Global.group_file)
if ret:
brick_list, brick_name = ret
check_brick_name_format(brick_name)
f_brick_list.extend(brick_list)
section_dict['brick_dirs'] = helpers.unique(brick_name)
else:
print "\nError: 'brick_dirs' not provided for all the "\
"hosts."
helpers.cleanup_and_quit()
section_dict['mountpoints'] = helpers.unique(f_brick_list)
return section_dict
def read_brick_dir_from_file(filename):
global helpers, writers
brick_list, brick_name = [], []
if basename(filename) == 'all':
hostlist = Global.current_hosts
else:
hostlist = [basename(filename)]
if helpers.is_present_in_yaml(filename, 'mountpoints'):
brick_name = helpers.get_value_from_yaml(filename,
'mountpoints')
for each in brick_name:
brick_list.extend([host + ':' + each for host in
hostlist])
return (brick_list, brick_name)
return False
def validate_brick_dirs(section_dict, section):
global helpers, writers
brick_list, brick_name = [], []
brick_dict = {}
brick_dict = defaultdict(lambda: [], brick_dict)
brick_dirs = helpers.listify(section_dict[section])
for brick in brick_dirs:
bpat = re.match('(.*):(.*)', brick)
if not bpat:
if not Global.hosts:
print "Please provide the brick_dirs in the format " \
"<hostname>:<brick_dir name>"
helpers.cleanup_and_quit()
brick_list.extend([host + ':' + brick for host in
Global.hosts])
brick_name.append(brick)
else:
brick_list.append(brick)
brick_name.append(bpat.group(2))
brick_dict[bpat.group(1)].append(bpat.group(2))
if bpat.group(1) not in Global.brick_hosts:
Global.brick_hosts.append(bpat.group(1))
if brick_dict:
for host, bname in zip(brick_dict.keys(), brick_dict.values()):
filename = helpers.get_file_dir_path(Global.host_vars_dir, host)
helpers.touch_file(filename)
helpers.create_yaml_dict('brick_dirs', bname, filename)
check_brick_name_format(brick_name)
section_dict['brick_dirs'] = helpers.unique(brick_name)
section_dict['mountpoints'] = helpers.unique(brick_list)
return section_dict
def check_brick_name_format(brick_name):
global helpers
if False in [brick.startswith('/') for brick in
helpers.unique(brick_name)]:
msg = "values to 'brick_dirs' should be absolute"\
" path. Relative given. Exiting!"
print msg
helpers.cleanup_and_quit()
return
def volume_delete(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, defaults.VOLDEL_YML
def volume_start(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, defaults.VOLUMESTART_YML
def volume_stop(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, defaults.VOLSTOP_YML
def volume_add_brick(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
yamls = []
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
section_dict = validate_brick_dirs(section_dict, 'bricks')
ret = call_peer_probe(section_dict)
if ret:
section_dict = ret
yamls.append(defaults.PROBE_YML)
yamls.append(defaults.ADDBRICK_YML)
return section_dict, yamls
def volume_remove_brick(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
section_dict['old_bricks'] = section_dict.pop('bricks')
return section_dict, defaults.REMOVEBRK_YML
def volume_rebalance(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, [defaults.VOLUMESTART_YML,
defaults.REBALANCE_YML]
def volume_set(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
keys = section_dict.get('key')
values = section_dict.get('value')
if not keys or not values:
return section_dict, ''
data = []
key = helpers.listify(keys)
value = helpers.listify(values)
# If values has a string and is colon or semicolon
# separated replace with comma
for idx, item in enumerate(value):
if type(item) == str and (item.__contains__(':')
or item.__contains__(';')):
value[idx] = item.replace(';', ',').replace(':', ',')
for k,v in zip(key, value):
names = {}
names['key'] = k
names['value'] = v
data.append(names)
section_dict['set'] = data
return section_dict, defaults.VOLUMESET_YML
def volume_smb_setup(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
SMB_DEFAULTS = {
'glusterfs:logfile': '/var/log/samba/' +
section_dict['volname'] + '.log',
}
section_dict = helpers.set_default_values(section_dict, SMB_DEFAULTS)
options = ''
for key, value in SMB_DEFAULTS.iteritems():
if section_dict[key]:
options += key + ' = ' + str(section_dict[key]) + '\n'
section_dict['key'] = ['server.allow-insecure',
'storage.batch-fsync-delay-usec']
section_dict['value'] = ['on', 0]
section_dict, yml = volume_set(section_dict)
section_dict['service'] = 'glusterd'
section_dict['state'] = 'started'
return section_dict, [defaults.SERVICE_MGMT, yml, defaults.SMBREPLACE_YML,
defaults.SMBSRV_YML]
def volume_smb_disable(section_dict):
section_dict['key'] = "user.smb"
section_dict['value'] = "disable"
return volume_set(section_dict)
def volume_enable_ssl(section_dict):
"""
Enable ssl on an existing volume
"""
print "Ensure clients are unmounted before continuing. Add umount "\
"section in config."
if section_dict.has_key('ssl_clients'):
section_dict['ssl_hosts'] = list(set(section_dict['ssl_clients'] +
Global.hosts))
else:
section_dict['ssl_hosts'] = list(set(Global.hosts))
section_dict['ssl_allow_list'] = ','.join(section_dict['ssl_hosts'])
section_dict['ssl_base_dir'] = Global.base_dir
helpers.write_to_inventory('ssl_hosts', section_dict['ssl_hosts'])
# Enable SSL on the volume
return section_dict, [defaults.ENABLE_SSL]
|
gpl-2.0
| -1,763,972,171,218,459,100
| 37.720635
| 81
| 0.618185
| false
| 3.645248
| false
| false
| false
|
LCBRU/reporter
|
reporter/uol_redcap_reports/limb/demographics_data_quality.py
|
1
|
2538
|
#!/usr/bin/env python3
import re
from reporter.connections import RedcapInstance
from reporter.emailing import (
RECIPIENT_LIMB_ADMIN as RECIPIENT_ADMIN,
RECIPIENT_LIMB_MANAGER as RECIPIENT_MANAGER,
)
from reporter.application_abstract_reports.redcap.data_quality import (
RedcapFieldMatchesRegularExpression,
RedcapInvalidDate,
RedcapInvalidNhsNumber,
RedcapInvalidUhlSystemNumber,
RedcapInvalidPostCode,
RedcapInvalidEmailAddress,
)
REDCAP_PROJECT_ID = 34
class LimbDemographicsRedcapStudyNumber(RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['record_id'],
regular_expression='^[A-Z]{2}\d{4}$',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidDate(RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['nhs_no'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidUhlSystemNumber(RedcapInvalidUhlSystemNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['s_no'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidPostCode(RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['postcode', 'gp_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidEmailAddress(RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['email_add'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
|
mit
| 9,036,729,170,423,756,000
| 30.538462
| 81
| 0.6316
| false
| 3.776786
| false
| false
| false
|
kyleellefsen/Glams
|
Glams/glams/glamsTemplate.py
|
1
|
3083
|
# encoding: utf-8
def glamsTemplate(article, username=None, resources='', rightbar=''):
'''Wraps an article string in the html template'''
if username:
userLogin="""<a href="/home">""" + username +"""</a>"""
else:
userLogin="""<a href="/home/login/">Login</a>"""
webpage="""<!DOCTYPE HTML>
<html>
<head>
<title>Glams</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<link href='http://fonts.googleapis.com/css?family=Belleza' rel='stylesheet' type='text/css'>
<link rel="stylesheet" type="text/css" href="/support/css/almanacstyle.css" />
<link rel="stylesheet" type="text/css" href="/support/css/searchform.css" />
<link rel="stylesheet" type="text/css" href="/support/css/sharebubble.css" />
<script type="text/javascript" src="/support/javascript/jquery.js"></script>
<script type="text/javascript" src="/support/javascript/jquery-ui.js"></script>
<script type="text/javascript" src="/support/javascript/glamsscript.js"></script>
<script type="text/javascript" src="/support/javascript/jquery-color.js"></script>
<link rel="icon" type="image/png" href="/support/images/neurons.png">
"""+resources+"""
</head>
<body>
<div class='bubble' style='display:none;'></div>
<div id="everything">
<header>
<div id='logo'><a href='/'> <p style="color: white;font-family:'Belleza', sans-serif;">Glams Database</p> </a></div>
<!-- <form id='topsearchform' method="post" action="/search/" class="searchform">
<input class="searchfield" name="tags" type="text" value="Search..." onfocus="if (this.value == 'Search...') {this.value = '';}" onblur="if (this.value == '') {this.value = 'Search...';}" />
<input class="searchbutton" type="submit" value="Go" />
</form> -->
<div id='userLoginBox'>
<div id='userLogin'>"""+userLogin+"""</div>"""
if username:
webpage+="""
<ul id='userLoginList'>
<li><a class='button-link' href='/home/logout/'>Log out</a></li>
<li><a class='button-link' href='/home/settings/'>Account Settings</a></li>
</ul>"""
webpage+="""
</div>
</header>
<div id="content">
<article>"""
webpage+= article+"""</article>
<div id='between_article_and_aside'></div>
<aside>"""
webpage+=rightbar+"""</aside>
</div>
</div>
<footer><a href='http://scitru.com/kyleellefsen/'>Kyle Ellefsen. © 2015-2016 </footer>
</body>
</html>"""
return webpage
|
mit
| -3,873,556,045,646,859,300
| 54.035714
| 214
| 0.49416
| false
| 4.002597
| false
| false
| false
|
knarfeh/HiaBlog
|
app/main/signals.py
|
1
|
1539
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from flask import request, current_app
from blinker import Namespace
from . import models, ext
from app.hia.config import HiaBlogSettings
hiablog_signals = Namespace()
post_visited = hiablog_signals.signal('post-visited')
post_published = hiablog_signals.signal('post-published')
search_engine_submit_urls = HiaBlogSettings['search_engine_submit_urls']
@post_visited.connect
def on_post_visited(sender, post, **extra):
tracker = models.Tracker()
tracker.post = post
proxy_list = request.headers.getlist('X-Forwarded-For')
tracker.ip = request.remote_addr if not proxy_list else proxy_list[0]
tracker.user_agent = request.headers.get('User-Agent')
tracker.save()
try:
post_statistic = models.PostStatistics.objects.get(post=post)
except models.PostStatistics.DoesNotExist:
post_statistic = models.PostStatistics()
post_statistic.post = post
from random import randint
post_statistic.verbose_count_base = randint(500, 5000)
post_statistic.save()
post_statistic.modify(inc__visit_count=1)
@post_published.connect
def on_post_pubished(sender, post, **extra):
post_url = request.host + post.get_absolute_url()
# print post_url
baidu_url = search_engine_submit_urls['baidu']
if baidu_url:
# print 'Ready to post to baidu'
res = ext.submit_url_to_baidu(baidu_url, post_url)
print res.status_code, res.text
else:
print 'Not ready to submit urls yet'
|
gpl-2.0
| -2,330,866,395,023,611,400
| 28.596154
| 73
| 0.693957
| false
| 3.442953
| false
| false
| false
|
timy/dm_spec
|
pkg/pymod_data.py
|
1
|
11796
|
import numpy as np
def readDirIndex(fileName):
import json
fileDir = open(fileName)
idxDir = json.load(fileDir)['order']
fileDir.close()
order = ['1', '3', '5']
idxStart = [0, 6, 44]
return order, idxDir, idxStart
def calcAmplitude(fileNameFunc, nt, nCoo, iDir):
s = np.zeros(nt)
for iCoo in range(nCoo):
data = np.loadtxt( fileNameFunc(iCoo, iDir) )
for i in range(2):
s += data[:,i]**2
return np.sqrt(s)
# this function is not used, the index of the direction l= [l1, l2, l3]
def indexOfDir( idxDir, l ):
# for iOrder in range(nOrder):
for index, item in enumerate(idxDir):
if item == l:
return index
def calcAmplitudeMax(fileNameFunc, nt, nDir, nCoo=3, idxStart=0):
maxAmp = np.zeros(nDir)
for iDir in range(nDir):
maxAmp[iDir] = np.amax( calcAmplitude(fileNameFunc, nt, nCoo, idxStart+iDir) )
print( "%d: %le" % (iDir, maxAmp[iDir]) )
return maxAmp
def plotAmplitudeMax(maxAmp, idxDir, fileName):
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
nDir = len(idxDir)
fig = plt.figure(figsize=(26, 10))
ax = fig.add_subplot( 1, 1, 1 )
ax.plot(maxAmp, marker='.')
ax.grid(True)
ax.xaxis.set_major_locator(MaxNLocator(nbins=nDir-1, integer=True))
ax.set_xticks(range(nDir))
ax.set_xticklabels(idxDir, rotation='90')
plt.savefig(fileName, bbox_inches='tight')
plt.close(fig)
# def var_direction():
# dir1 = [ [ 1, 0, 0], [ 0, 1, 0], [ 0, 0, 1],
# [-1, 1, 1], [ 1,-1, 1], [ 1, 1,-1],
# [-2, 1, 0], [ 1,-2, 0], [-2, 0, 1],
# [ 0,-2, 1], [ 1, 0,-2], [ 0, 1,-2] ]
# dir2 = map( lambda a1: map( lambda a2: -1 * a2, a1 ), dir1 )
# dir3 = [ [ 3, 0, 0], [ 0, 3, 0], [ 0, 0, 3],
# [ 2, 1, 0], [ 2, 0, 1], [ 1, 2, 0],
# [ 0, 2, 1], [ 1, 0, 2], [ 0, 1, 2],
# [ 1, 1, 1] ]
# dir4 = map( lambda a1: map( lambda a2: -1 * a2, a1 ), dir3)
# # flatten to obtain a whole list of all directions
# direction = [ elem for s in [dir1, dir2, dir3, dir4] for elem in s ]
# return direction
# def var_coord():
# return [ 'x', 'y', 'z' ]
# def var_re_im():
# return [ "Re", "Im" ]
# def var_key_name( i_dir, i_coo, i_re ):
# return "%s[%2d,%2d,%2d]%s"%(i_re, i_dir[0], i_dir[1], i_dir[2], i_coo)
# def construct_var_name():
# direction, coord, re_im = var_direction(), var_coord(), var_re_im()
# name = []
# for i_dir in direction:
# for i_coo in coord:
# for i_re in re_im:
# name.append( var_key_name( i_dir, i_coo, i_re ) )
# return name
# def extract_data( d ):
# name_list = construct_var_name()
# indx_list = range( 1, 1+len(name_list) )
# data_list = [ d[:, i] for i in indx_list ]
# return dict( zip( name_list, data_list ) )
# def plot_data( data, dat_idx, handle, legend, ax, lim=[],
# lbl="", factor=1 ):
# t = data[:,0]
# dat = extract_data( data )
# if lim:
# idx_range = range( *map( lambda v: (np.abs(t-v)).argmin(), lim ) )
# else:
# idx_range = range( len(t) )
# for i_dat in dat_idx:
# legend.append( "%s:%s" % (lbl, i_dat) )
# handle.append( ax.plot( t[idx_range],
# dat[i_dat][idx_range] * factor,
# marker='.')[0] )
# def plot_data_abs( data, dir_idx, handle, legend, ax, lim=[],
# lbl="", factor=1 ):
# t = data[:,0]
# dat = extract_data( data )
# if lim:
# idx_range = range( *map( lambda v: (np.abs(t-v)).argmin(), lim ) )
# else:
# idx_range = range( len(t) )
# coord, re_im = var_coord(), var_re_im()
# for i_dir in dir_idx:
# s = np.zeros( len(t) )
# for i_coo in coord:
# for i_re in re_im:
# name = var_key_name( i_dir, i_coo, i_re )
# s += np.array( dat[name] ) * np.array( dat[name] )
# s = np.array( map( np.sqrt, s ) )
# legend.append( "%s:%s" % (lbl, i_dir) )
# handle.append( ax.plot( t[idx_range],
# s[idx_range] * factor,
# marker='.')[0] )
import matplotlib.pyplot as plt
# import pymod_plot
# import sys
# def plot_data_abs_all( file_name, lbl, factor=[], lim=[] ):
# """
# Loop over all directions. Each image shows abs results for
# the same direction (results may come from different files)
# """
# data = map( np.loadtxt, file_name )
# n_file = len( file_name )
# direction = var_direction()
# for i_dir in direction:
# fig = plt.figure()
# ax = fig.add_subplot( 1, 1, 1 )
# handle, legend = [], []
# print "Processing direction %s..." % i_dir
# for i in range(n_file):
# plot_data_abs( data[i], [i_dir], handle, legend, ax, lim,
# lbl[i], factor[i] )
# fname = "fig/ppar_%2d_%2d_%2d.png" % (i_dir[0], i_dir[1], i_dir[2])
# pymod_plot.plot_1d_draw( handle, legend, ax,
# xlabel=r"Time (fs)",
# ylabel=r"Polarization (arb.)",
# filename=fname,
# show=False )
# #plt.xlim( [ t[idx_range[0]], t[idx_range[-1]] ] )1
# plt.close()
def fft_1d( t, ns, data, t0=0.0, row=True, inv=False, w_shift=[],
debug=False ):
C_cm2au, C_fs2au = 4.55941e-6, 41.3413733
coef = C_cm2au * C_fs2au
if row == True:
signal = np.zeros( (ns, len(t)), dtype=complex )
else:
signal = np.zeros( (len(t), ns), dtype=complex )
# frequencies
dt= t[1] - t[0]
idx_0 = np.abs( t - t0 ).argmin()
if inv == False: # don't know if this is correct..
f = np.fft.fftshift( np.fft.fftfreq( len(t), dt*C_fs2au ) )
else:
f = np.fft.ifftshift( np.fft.fftfreq( len(t), dt*C_fs2au ) )
w = 2.0 * np.pi * f / C_cm2au
if w_shift:
w -= w_shift
# data
for i in range(ns):
if row == True:
data_1d = data[i,:]
else:
data_1d = data[:,i]
data_1d = np.squeeze( np.asarray( data_1d ) )
# hanning window
# data_1d = data_1d * np.hanning(len(data_1d))
if w_shift:
data_1d = map( lambda it: data_1d[it] *
np.exp( 1j * coef * w_shift * t[it] ),
range( len(t) ) )
d1 = np.roll( data_1d, 0-idx_0 )
if debug == True:
plt.figure()
t1 = np.roll( t, 0-idx_0 )
plt.plot( t1, np.abs(d1), marker='.' )
if inv == False:
ft = np.fft.fftshift( np.fft.fft( d1 ) )
else:
ft = np.fft.fftshift( np.fft.ifft( d1 ) )
if row == True:
signal[i,:] = ft
else:
signal[:,i] = ft
return w, signal
def plot_signal_1d( t, data, func, handle, legend, ax, obj=[],
t0=0.0, lim=[], lbl="", factor=1, padding=0.0,
w_shift=[] ):
"""
plot_signal_1d is used to plot multiple (columns of) 1d signal/spec
from a single file. If freq is designated, the FFT is conducted
for freq domain
data: the data load from file storing signal
dat_idx: list of column idx and descriptions: [[3, "z-axis"], ...]
fun_idx: list of functions to apply on data e.g., [ np.real, np.abs ]
handle: handle of plots
legend: handle of legends
ax: axis of the main figure
t0: the t0 time for FFT
lim: list, defining the time range for use, e.g., [-300, 500]
lbl: the common label for plots of current data
factor: the scale of current plot
padding [fs]: the extra time padding for higher resolution with FFT
thus "lim" can be go beyond actual data after padding
|++++++++++ data format ++++++++++|
"t" has the format:
t0 t1 t2 ... tn, should be read from file "res/time.dat"
------------------------------
"data" has the format:
Re[p(t0)] Im[p(t0)]
Re[p(t1)] Im[p(t1)]
...
Re[p(tn)] Im[p(tn)]
* The i-th row represent signals for the i-th time point
* Each line can be defined as a complex variable
------------------------------
"""
legend.append( "%s" % lbl )
# convert data to complex variable, and Pol -> Ef
d = 1j * ( data[:,0] + 1j * data[:,1] )
dt = t[1] - t[0] # fs.
# padding should be done before invoking other operations
if padding > 0:
t_padding = np.arange( t[-1]+dt, t[-1]+padding+dt, dt )
n_padding = len(t_padding)
t = np.concatenate( [t, t_padding] )
d = np.concatenate( [ d, np.zeros( n_padding, dtype=complex ) ] )
# find index within lim
if lim:
idx_range = range( *map( lambda v: (np.abs(t-v)).argmin(), lim ) )
else:
idx_range = range( len(t) )
# truncated time
tt, dd = t[idx_range], d[idx_range]
if obj == 'p': # draw the 'polarization' to be fft transformed
handle.append( ax.plot( tt, func( dd ) * factor, marker='.' )[0] )
return
dd = np.asmatrix( dd ) # shape (n,) -> (1, n)
w, signal = fft_1d( tt, 1, dd, t0=t0, row=True, inv=False,
w_shift=w_shift )
signal = np.squeeze( signal ) # shape (1, n) -> (n,)
handle.append( ax.plot( w, func( signal ) * factor, marker='.' )[0] )
def plot_2D( signal, ax, extent ):
origin = 'lower'
norm = plt.cm.colors.Normalize(vmax=signal.max(), vmin=signal.min())
im = ax.imshow( signal, cmap=plt.cm.summer, norm=norm,
#aspect=1 ) #aspect='auto',
origin=origin,
extent=extent )
ax.contour( signal, 20, hold='on', colors = 'k',
origin=origin,
extent=extent )
from mpl_toolkits.axes_grid1 import make_axes_locatable
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.5)
cbar = plt.colorbar(im, cax=cax)
cbar.ax.tick_params(labelsize=16)
import sys
def plot_signal_2d( t, s, data, func, ax, obj=[],
t0=[0.0, 0.0], lim=[], lbl="",
factor=1, padding=0.0,
w_shift=[] ):
nt, ns = len(t), len(s)
print nt, ns
print len(data)
print np.shape(data)
if ( len(data) != nt*ns ) or ( np.shape(data)[1] != 2 ):
print( "data does not have proper shape!" )
sys.exit()
# t_lower, t_upper, s_lower, s_upper = t[0], t[-1], s[0], s[-1]
data = np.reshape( data[:,0] + 1j * data[:,1], (ns, nt) )
# convert Pol -> Ef
data *= +1j
if obj == 'p': # draw the 'polarization' to be fft transformed
extent = ( t[0], t[-1], s[0], s[-1])
plot_2D( func(data), ax, extent );
return
x, signal = fft_1d( t, ns, data, t0=t0[0], row=True, inv=False,
w_shift=w_shift[0] )
y, signal = fft_1d( s, nt, signal, t0=t0[1], row=False, inv=False,
w_shift=w_shift[1] )
# the correct one for (FFT, IFFT) sequence
# then no need to process the signal if axes is given by ( w_t, -w_tau )
# since the signal under IFFT is automatically generated for -w_tau
# extent = (x[0], x[-1], -y[-1], -y[0])
# the most general results
# the correct one for (FFT, FFT), with axes given by ( w_t, w_tau )
# extent = (x[0], x[-1], y[0], y[-1])
# After general (FFT, FFT), can also flip for axes ( w_t, -w_tau )
# can be directly compare with paper
signal = np.flipud( signal )
extent = ( x[0], x[-1], -y[-1], -y[0] )
plot_2D( func(signal), ax, extent )
|
mit
| 5,346,539,297,204,704,000
| 35.63354
| 86
| 0.508562
| false
| 2.939447
| false
| false
| false
|
oscurart/BlenderAddons
|
old/oscurart_resize_resolution.py
|
1
|
1753
|
# Compensa el tamanio de imagen al modificar el lente de la camara.
bl_info = {
"name": "Resize Render Resolution",
"author": "Oscurart",
"version": (1, 0),
"blender": (2, 66, 0),
"location": "Search > Resize Resolution by Camera Angle",
"description": "Resize render dimension by camera angle.",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": "Render"}
import bpy
import math
def defResizeResolution(context, anguloInicio, anguloPrimero, resx, resy):
# calcula valores
anguloActual= math.degrees(anguloInicio/ 2)
proportionxy = resx / resy
opuesto = resx / 2
adyacente = opuesto / math.tan(anguloInicio / 2)
newx = (adyacente * math.tan(math.radians(anguloPrimero/2))) * 2
# setea valores
context.scene.render.resolution_x = newx
context.scene.render.resolution_y = newx / proportionxy
context.scene.camera.data.angle = math.radians(anguloPrimero)
class ResizeResolution(bpy.types.Operator):
bl_idname = "scene.resize_resolution"
bl_label = "Resize Resolution by Camera Angle"
bl_options = {"REGISTER", "UNDO"}
anguloPrimero = bpy.props.FloatProperty(name="Field of View", default=math.degrees(.8575), min=.01 )
def execute(self, context):
anguloInicio = context.scene.camera.data.angle
resx = context.scene.render.resolution_x
resy = context.scene.render.resolution_y
print(resx)
defResizeResolution(context, anguloInicio, self.anguloPrimero, resx, resy)
return {'FINISHED'}
def register():
bpy.utils.register_class(ResizeResolution)
def unregister():
bpy.utils.unregister_class(ResizeResolution)
if __name__ == "__main__":
register()
|
gpl-2.0
| 4,949,207,930,317,479,000
| 27.274194
| 104
| 0.661723
| false
| 3.246296
| false
| false
| false
|
BjornFJohansson/molbio-test-generator
|
exam_generator.py
|
1
|
3576
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
make_only_first_exam = False
encrypt_exam = True
import time
import string
import re
#import py7zlib
import subprocess
import os
import shelve
import shutil
from bio_info_questions import *
from exam_settings import *
import ParseStudentList
mecs, names = ParseStudentList.parse_student_file(student_list_file)
if not encrypt_exam:
print "No encryption!"
password_to_open_exam=""
password_to_see_correct_exam=""
else:
print u"password_to_open_exam = {}".format(password_to_open_exam)
print u"password_to_see_correct_exam = {}".format(password_to_see_correct_exam)
password_to_open_exam= u"-p{}".format(password_to_open_exam)
password_to_see_correct_exam= u"-p{}".format(password_to_see_correct_exam)
studentlist = zip(mecs,names)
if make_only_first_exam:
studentlist = studentlist[:1]
shelf = shelve.open(shelf_file)
if not os.path.isdir(exam_folder):
os.makedirs(exam_folder)
for student in studentlist:
timestamp = int(time.time())
mec, name = student
print "Start prep exam for",mec,name
q=[]
q.append( reverse_complement.question(1,50) )
q.append( change_origin.question(2) )
q.append( find_feature_rc.question(1) )
q.append( find_region_of_similarity.question(4) )
q.append( find_repeated_sequences.question(4) )
q.append( pcr_cloning.question(8) )
empty_exam = header.format(name=name,
mec=mec,
timestamp=timestamp,
question_separator=question_separator,
number_of_questions=len(q) )
correct_exam = empty_exam
for index, question in enumerate(q):
empty_exam += question_separator.format(index+1)
correct_exam += question_separator.format(index+1)
empty_exam += question.empty_question
correct_exam += question.correct_answer
shelf[question.id] = question
empty_exam += endseparator
correct_exam += endseparator
empty_exam = re.sub("\r?\n", "\r\n", empty_exam)
correct_exam = re.sub("\r?\n", "\r\n", correct_exam)
if os.path.exists(u"/tmp/exam"):
shutil.rmtree(u"/tmp/exam")
os.makedirs(u"/tmp/exam")
os.makedirs(u"/tmp/exam/files")
#os.chdir(u"/tmp/exam")
with open(u"/tmp/exam/correct_exam.txt".format(mec=mec), "w") as f:
f.write(correct_exam.encode("latin-1"))
cmd = u'7z a -tzip /tmp/exam/correct_exam_encrypted.zip /tmp/exam/correct_exam.txt {pw} '.format(pw=password_to_see_correct_exam)
slask=subprocess.call(cmd, shell=True)
os.remove(u"/tmp/exam/correct_exam.txt")
for file in os.listdir(included_files_location):
if "~" not in file and not file.startswith("."):
shutil.copy(os.path.join(included_files_location, file),u"/tmp/exam/files/"+file)
filename = u"{}_{}".format(name.replace(" ","_"),mec)
with open(u"/tmp/exam/{filename}.txt".format(filename=filename).format(mec=mec), "w") as f:
f.write( empty_exam.encode("latin-1"))
cmd = u'7za a -tzip "{exam_folder}/{filename}.zip" /tmp/exam/ {pw} '.format(pw = password_to_open_exam,
exam_folder = exam_folder,
filename = filename)
slask=subprocess.call(cmd, shell=True)
shelf.close()
print "Finished"
|
bsd-2-clause
| 3,588,146,955,561,264,000
| 31.509091
| 133
| 0.598993
| false
| 3.425287
| false
| false
| false
|
eads/deck-blueprint
|
blueprint.py
|
1
|
5367
|
# -*- coding: utf-8 -*-
import codecs
import getpass
import json
import os
import requests
import shutil
from clint.textui import colored, puts
from cssmin import cssmin
from flask import g, Blueprint
from jinja2 import Markup
from slimit import minify
from smartypants import smartypants
from tarbell.hooks import register_hook
from tarbell.utils import ensure_directory
NAME = "deck.js slideshow"
EXCLUDES = [
'app',
'styles',
'lib',
'bower.json',
'requirements.txt',
'*.md',
]
blueprint = Blueprint('base', __name__)
class Includer(object):
"""
Base class for Javascript and CSS psuedo-template-tags.
See `make_context` for an explanation of `asset_depth`.
"""
def __init__(self):
self.includes = []
self.tag_string = None
def push(self, path):
self.includes.append(path)
return ''
def _compress(self):
raise NotImplementedError()
def _get_path(self, path):
blueprint_root = os.path.dirname(os.path.realpath(__file__))
project_path = os.path.join(blueprint_root, '../', path)
if os.path.isfile(project_path):
return project_path
blueprint_path = os.path.join(blueprint_root, path)
if os.path.isfile(blueprint_path):
return blueprint_path
def render(self, path):
config = g.current_site.app.config
# If we're in a build context, mash everything together
if config.get('BUILD_PATH'):
fullpath = os.path.join(config.get('BUILD_PATH'), path)
ensure_directory(fullpath)
with codecs.open(fullpath, 'w', encoding='utf-8') as f:
f.write(self._compress())
response = self.tag_string.format(path)
else:
response = '\n'.join([
self.tag_string.format(src) for src in self.includes
])
markup = Markup(response)
del self.includes[:]
return markup
class JavascriptIncluder(Includer):
"""
Psuedo-template tag that handles collecting Javascript and serving appropriate clean or compressed versions.
"""
def __init__(self, *args, **kwargs):
Includer.__init__(self, *args, **kwargs)
self.tag_string = '<script type="text/javascript" src="{0}"></script>'
def _compress(self):
output = []
for src in self.includes:
with codecs.open(self._get_path(src), encoding='utf-8') as f:
output.append(minify(f.read()))
return '\n'.join(output)
class CSSIncluder(Includer):
"""
Psuedo-template tag that handles collecting CSS and serving appropriate clean or compressed versions.
"""
def __init__(self, *args, **kwargs):
Includer.__init__(self, *args, **kwargs)
self.tag_string = '<link rel="stylesheet" type="text/css" href="{0}" />'
def _compress(self):
output = []
for src in self.includes:
with codecs.open(self._get_path(src), encoding='utf-8') as f:
output.append(cssmin(f.read()))
return '\n'.join(output)
@blueprint.app_context_processor
def context_processor():
"""
Add helper functions to context for all projects.
"""
return {
'JS': JavascriptIncluder(),
'CSS': CSSIncluder(),
'enumerate': enumerate,
}
@register_hook('newproject')
def copy_files(site, git):
"""
Copy the files
"""
puts('\nCopying files from blueprint\n')
style_dir = '{0}/styles'.format(site.path)
os.mkdir(style_dir)
style_src_path = '{0}/_blueprint/styles/style.css'.format(site.path)
shutil.copy(style_src_path, style_dir)
git.add('styles/style.css')
git.commit(m='Add style.css')
slide_dir = '{0}/_slides'.format(site.path)
os.mkdir(slide_dir)
slide_src_path = '{0}/_blueprint/_slides/example.md'.format(site.path)
shutil.copy(slide_src_path, slide_dir)
git.add('_slides/example.md')
git.commit(m='Add example slide content')
bowerrc_src_path = '{0}/_blueprint/.bowerrc'.format(site.path)
shutil.copy(bowerrc_src_path, site.path)
git.add('.bowerrc')
git.commit(m='Add Bower configuration')
@register_hook('newproject')
def create_repo(site, git):
"""
Create repo when making new project
"""
create = raw_input("Want to create a Github repo for this project [Y/n]? ")
if create and not create.lower() == "y":
return puts("Not creating Github repo...")
name = site.path.split('/')[-1]
user = raw_input("What is your Github username? ")
password = getpass.getpass("What is your Github password? ")
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
data = {'name': name, 'has_issues': True, 'has_wiki': True}
resp = requests.post('https://api.github.com/user/repos', auth=(user, password), headers=headers, data=json.dumps(data))
puts("Created {0}".format(colored.green("https://github.com/{0}/{1}".format(user, name))))
clone_url = resp.json().get("clone_url")
puts(git.remote.add("origin", "git@github.com:{0}/{1}.git".format(user,name)))
puts(git.push("origin", "master"))
@blueprint.app_template_filter('smartypants')
def smartypants_filter(text):
"""
Smarty pants
"""
if text:
return smartypants(text)
else:
return ''
|
mit
| 422,399,089,869,528,400
| 27.854839
| 124
| 0.617477
| false
| 3.616577
| false
| false
| false
|
inTagger/UpdateDDNS
|
DNSPod/DNSPodAPI.py
|
1
|
3202
|
__author__ = 'Alexander Logger'
from string import Template
import requests
import json
import logging
base_url = 'https://www.dnspod.com/api'
get_records_url_tpl = Template(base_url + '/records/$domain')
modify_record_url_tpl = Template(base_url + '/records/$domain/$record_id')
get_auth_token_url_tpl = Template(base_url + '/auth?email=$email&password=$password')
class DNSPodAPI():
def __init__(self, auth_token: dict=None):
self.auth_token = auth_token
self.logger = logging.getLogger("UpdateDDNS")
@staticmethod
def get_auth_token(email: str, password: str) -> dict:
url = get_auth_token_url_tpl.substitute(email=email, password=password)
auth_token = requests.get(url).json()
return auth_token
def auth(self, email: str, password: str):
self.auth_token = DNSPodAPI.get_auth_token(email, password)
def get_domain_records(self, domain: str) -> dict:
self._check_auth()
url = get_records_url_tpl.substitute(domain=domain)
records = requests.get(url, cookies=self.auth_token).json()
return records
def modify_domain_record(self, domain: str, record: dict) -> dict:
self._check_auth()
url = modify_record_url_tpl.substitute(domain=domain, record_id=record['id'])
resp = requests.put(url, data=json.dumps(record), cookies=self.auth_token).json()
return resp
def update_ips(self, domain: str, sub_domains: dict, ipv4: str, ipv6: str):
self.logger.info("Getting records for domain '%s'..." % domain)
records = self.get_domain_records(domain)
self.logger.info(' Got %d records.' % len(records))
self.logger.info("Updating records...")
records_updated = 0
for RECORD in records:
# self.logger.info(" '%s' " % RECORD['sub_domain'], end='')
sub_domain = RECORD['sub_domain']
if sub_domain not in sub_domains:
continue
if RECORD['record_type'] not in sub_domains[sub_domain]:
continue
if RECORD['area'] != 'default':
# stub for default area only, unless api doesn't return area value as string, not as code.
continue
record_type = RECORD['record_type']
if record_type == 'A':
value = ipv4
elif record_type == 'AAAA':
value = ipv6
else:
continue
if RECORD['value'] == value: # Skip when record already contains actual value.
continue
self.logger.info(" '%s' [%s]->[%s]... " % (sub_domain, RECORD['value'], value))
record = {'id': RECORD['id'], 'area': '0', 'sub_domain': sub_domain, 'record_type': record_type,
'value': value, 'ttl': RECORD['ttl']}
resp = self.modify_domain_record(domain, record)
self.logger.info(resp['message'] + '.')
records_updated += 1
self.logger.info("%d records were updated." % records_updated)
def _check_auth(self):
if self.auth_token is None:
raise NotAuthenticated()
class NotAuthenticated(Exception):
pass
|
mit
| -6,272,639,740,326,752,000
| 36.682353
| 108
| 0.590881
| false
| 3.862485
| false
| false
| false
|
jtucker1972/XBMC-TVTime
|
resources/lib/Overlay.py
|
1
|
68585
|
# Copyright (C) 2011 James A. Tucker
#
#
# This file is part of TV Time.
#
# TV Time is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TV Time is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TV Time. If not, see <http://www.gnu.org/licenses/>.
import xbmc, xbmcgui, xbmcaddon
import subprocess, os
import time, threading, thread
import datetime
import sys, re
import random
from operator import itemgetter
from time import time, localtime, strftime, strptime, mktime, sleep
from datetime import datetime, date, timedelta
from decimal import *
import Globals
from xml.dom.minidom import parse, parseString
from Playlist import Playlist
from Globals import *
from Channel import Channel
from EPGWindow import EPGWindow
from ChannelList import ChannelList
from PrestageThread import *
class MyPlayer(xbmc.Player):
def __init__(self):
xbmc.Player.__init__(self, xbmc.PLAYER_CORE_AUTO)
self.stopped = False
def log(self, msg, level = xbmc.LOGDEBUG):
log('Player: ' + msg, level)
def onPlayBackStopped(self):
if self.stopped == False:
self.log('Playback stopped')
if self.overlay.sleepTimeValue == 0:
self.overlay.sleepTimer = threading.Timer(1, self.overlay.sleepAction)
self.overlay.sleepTimeValue = 1
self.overlay.startSleepTimer()
self.stopped = True
# overlay window to catch events and change channels
class TVOverlay(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
xbmcgui.WindowXMLDialog.__init__(self, *args, **kwargs)
self.log('Overlay: __init__')
# initialize all variables
self.channels = []
self.Player = MyPlayer()
self.Player.overlay = self
self.inputChannel = -1
self.channelLabel = []
self.lastActionTime = 0
self.actionSemaphore = threading.BoundedSemaphore()
self.setCoordinateResolution(1)
self.timeStarted = 0
self.infoOnChange = True
self.infoOffset = 0
self.invalidatedChannelCount = 0
self.showingInfo = False
self.showChannelBug = False
random.seed()
for i in range(3):
self.channelLabel.append(xbmcgui.ControlImage(50 + (50 * i), 50, 50, 50, IMAGES_LOC + 'solid.png', colorDiffuse='0xAA00ff00'))
self.addControl(self.channelLabel[i])
self.channelLabel[i].setVisible(False)
self.doModal()
self.log('Overlay: __init__ return')
def resetChannelTimes(self):
curtime = time.time()
for i in range(self.maxChannels):
self.channels[i].setAccessTime(curtime - self.channels[i].totalTimePlayed)
def onFocus(self, controlId):
pass
# override the doModal function so we can setup everything first
def onInit(self):
self.log('Overlay: onInit')
migrate()
self.channelLabelTimer = threading.Timer(5.0, self.hideChannelLabel)
self.infoTimer = threading.Timer(5.0, self.hideInfo)
self.background = self.getControl(101)
self.getControl(102).setVisible(False)
self.channelList = ChannelList()
# need to reset for scheduled auto channel reset
Globals.prestageThreadExit = 0
# setup directories
self.createDirectories()
self.myEPG = EPGWindow("script.pseudotv.EPG.xml", ADDON_INFO, "default")
self.myEPG.MyOverlayWindow = self
# Don't allow any actions during initialization
self.actionSemaphore.acquire()
self.log('Overlay: Read Config')
if self.readConfig() == False:
return
# build meta files if first time loading
if (
REAL_SETTINGS.getSetting("bumpers") == "true" or
REAL_SETTINGS.getSetting("commercials") == "true" or
REAL_SETTINGS.getSetting("trailers") == "true"
):
self.buildMetaFiles()
# read in channel playlists in video, music and mixed folders
channelNum = 0
for i in range(500):
if os.path.exists(xbmc.translatePath('special://profile/playlists/video') + '/Channel_' + str(i + 1) + '.xsp'):
channelNum = channelNum + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_type", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_time", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_1", xbmc.translatePath('special://profile/playlists/video/') + 'Channel_' + str(i + 1) + '.xsp')
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_2", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_3", self.channelList.cleanString(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/video') + '/Channel_' + str(i + 1) + '.xsp')))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_4", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_5", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_6", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_7", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_8", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_9", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_playlist", xbmc.translatePath('special://profile/playlists/video/') + 'Channel_' + str(i + 1) + '.xsp')
#self.updateDialog(progressIndicator,"Auto Tune","Found " + str(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/video') + '/Channel_' + str(i + 1) + '.xsp')),"")
elif os.path.exists(xbmc.translatePath('special://profile/playlists/mixed') + '/Channel_' + str(i + 1) + '.xsp'):
channelNum = channelNum + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_type", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_time", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_1", xbmc.translatePath('special://profile/playlists/mixed/') + 'Channel_' + str(i + 1) + '.xsp')
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_2", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_3", self.channelList.cleanString(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/mixed') + '/Channel_' + str(i + 1) + '.xsp')))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_4", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_5", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_6", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_7", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_8", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_9", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_playlist", xbmc.translatePath('special://profile/playlists/mixed/') + 'Channel_' + str(i + 1) + '.xsp')
#self.updateDialog(progressIndicator,"Auto Tune","Found " + str(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/mixed') + '/Channel_' + str(i + 1) + '.xsp')),"")
elif os.path.exists(xbmc.translatePath('special://profile/playlists/music') + '/Channel_' + str(i + 1) + '.xsp'):
channelNum = channelNum + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_type", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_time", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_1", xbmc.translatePath('special://profile/playlists/music/') + 'Channel_' + str(i + 1) + '.xsp')
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_2", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_3", self.channelList.cleanString(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/music') + '/Channel_' + str(i + 1) + '.xsp')))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_4", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_5", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_6", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_7", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_8", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_9", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_playlist", xbmc.translatePath('special://profile/playlists/music/') + 'Channel_' + str(i + 1) + '.xsp')
#self.updateDialog(progressIndicator,"Auto Tune","Found " + str(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/music') + '/Channel_' + str(i + 1) + '.xsp')),"")
if (REAL_SETTINGS.getSetting("autoFindMixGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindMovieGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindNetworks") == "true" or
REAL_SETTINGS.getSetting("autoFindStudios") == "true" or
REAL_SETTINGS.getSetting("autoFindTVGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindTVShows") == "true" or
REAL_SETTINGS.getSetting("autoFindMusicGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindLive") == "true"):
Globals.resetSettings2 = 1
Globals.resetPrestage = 1
self.channelList.autoTune()
# There are two types of force resets
# 1. Force All Channels Reset (Addon Setting)
# 2. Force a changed channel to reset (Channel Config Change)
forceReset = int(REAL_SETTINGS.getSetting("ForceChannelReset"))
# Loop through each channel and determine if channel setting has changed
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Channel Check")
progressIndicator = 0
self.log("setMaxChannels")
self.channelList.setMaxChannels()
maxChannels = int(REAL_SETTINGS.getSetting("maxChannels"))
for i in range(maxChannels):
progressIndicator = progressIndicator + (100/maxChannels)
self.dlg.update(progressIndicator,"Channel Check","Checking if Channel " + str(i+1) + " needs to be reset")
channelChanged = ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_changed")
if channelChanged == "true":
self.log("Channel Configuration Changed")
self.log("Resetting Channel Playlist " + str(i+1))
# rebuild playlist
self.channelList.resetPlaylist(i+1)
# force channel reset does not use pre-staged file lists
# this will only reset the channel that changed
# it will not reset channels which have not changed
# only want to force channel reset once, so if force reset
# is on then skip since we will force reset the channel later
if forceReset == 0:
self.log("Force Channel " + str(i+1) + " Reset")
# reset only the specified channel
self.forceChannelReset(i+1)
Globals.resetPrestage = 1
self.dlg.close()
# update settings2.xml file
ADDON_SETTINGS.writeSettings()
# pause while settings file is being written to
while int(Globals.savingSettings) == 1:
pass
# Check if a force reset is required for all channels
# This will force rebuilding of ALL channel file lists
if forceReset > 0:
self.log("Force All Channels Reset")
# reset all channels
self.forceChannelReset("all")
Globals.resetPrestage = 1
# check auto reset
if self.checkAutoChannelReset() == True:
self.log("Auto Reset Channels")
# auto channel reset copies over pre-staged file lists to speed up loading
self.autoChannelReset()
# update live channels
self.resetLiveChannels()
# time to load in the channels
if self.loadChannels() == False:
return
self.myEPG.channelLogos = self.channelLogos
self.maxChannels = len(self.channels)
if self.maxChannels == 0:
#self.Error('Unable to find any channels. \nPlease go to the Addon Settings to configure TV Time.')
#return
dlg = xbmcgui.Dialog()
autoTune = False
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune TV Network\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindNetworks","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune TV Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindTVGenre","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Movie Studio\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindStudios","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Movie Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindMovieGenres","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Mix Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindMixGenres","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Music Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindMusicGenres","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Live\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindLive","true")
autoTune = True
if autoTune:
self.end()
return
del dlg
found = False
for i in range(self.maxChannels):
if self.channels[i].isValid:
self.log("Channel " + str(i) + " isValid")
found = True
break
if found == False:
self.Error('Unable to find any channels. \nPlease go to the Addon Settings to configure TV Time.')
return
if self.sleepTimeValue > 0:
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
# start thread to build prestage channel files in the background
self.prestageThread = PrestageThread()
self.prestageThread.start()
# shutdown check timer
self.shutdownTimer = threading.Timer(1, self.checkShutdownFlag)
self.shutdownTimer.start()
try:
if int(self.forceReset) == 0:
self.currentChannel = self.fixChannel(int(REAL_SETTINGS.getSetting("CurrentChannel")))
else:
self.currentChannel = self.fixChannel(1)
except:
self.currentChannel = self.fixChannel(1)
self.resetChannelTimes()
self.setChannel(self.currentChannel)
self.timeStarted = time.time()
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.log("onInit: startSleepTimer")
self.startSleepTimer()
self.log("onInit: releasing semaphore")
self.actionSemaphore.release()
self.log('Overlay: onInit return')
def checkShutdownFlag(self):
if Globals.userExit == 1:
self.log("Calling TV Time Exit")
self.shutdownTimer.cancel()
self.end()
else:
self.shutdownTimer = threading.Timer(1, self.checkShutdownFlag)
self.shutdownTimer.start()
def createDirectories(self):
self.log("createDirectories")
# setup directories
self.createDirectory(CHANNELS_LOC)
self.createDirectory(GEN_CHAN_LOC)
self.createDirectory(PRESTAGE_LOC)
self.createDirectory(TEMP_LOC)
self.createDirectory(META_LOC)
self.createDirectory(FEED_LOC)
def copySourcesXML(self):
self.log("copySourcesXML")
if not os.path.exists(os.path.join(FEED_LOC,"sources.xml")):
# copy default feeds.xml file
self.channelList.copyFiles(os.path.join(ADDON_INFO, 'resources', 'live'), LIVE_LOC)
def buildMetaFiles(self):
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Initializing")
progressIndicator = 0
if REAL_SETTINGS.getSetting("bumpers"):
if not os.path.exists(META_LOC + "bumpers.meta"):
# prompt user that we need to build this meta file
self.dlg.update(progressIndicator,"Initializing","Creating Bumper File List")
bumpersfolder = REAL_SETTINGS.getSetting("bumpersfolder")
if len(bumpersfolder) > 0:
self.buildMetaFile("bumpers",bumpersfolder)
if REAL_SETTINGS.getSetting("commercials"):
if not os.path.exists(META_LOC + "commercials.meta"):
# prompt user that we need to build this meta file
self.dlg.update(progressIndicator,"Initializing","Creating Commercial File List")
commercialsfolder = REAL_SETTINGS.getSetting("commercialsfolder")
if len(commercialsfolder) > 0:
self.buildMetaFile("commercials",commercialsfolder)
if REAL_SETTINGS.getSetting("trailers"):
if not os.path.exists(META_LOC + "trailers.meta"):
# prompt user that we need to build this meta file
self.dlg.update(progressIndicator,"Initializing","Creating Trailer File List")
trailersfolder = REAL_SETTINGS.getSetting("trailersfolder")
if len(trailersfolder) > 0:
self.buildMetaFile("trailers",trailersfolder)
self.dlg.close()
def buildMetaFile(self, type, folder):
if (Globals.prestageThreadExit == 0):
self.log("buildMetaFile")
self.videoParser = VideoParser()
flext = [".avi",".mp4",".m4v",".3gp",".3g2",".f4v",".flv",".mkv",".flv"]
metaFileList = []
if os.path.exists(folder):
# get a list of valid filenames from the folder
fnlist = []
for root, subFolders, files in os.walk(folder):
for filename in files:
if (Globals.prestageThreadExit == 0): # pseudo break point to exit thread
# get file extension
basename, extension = os.path.splitext(filename)
if extension in flext: # passed first test
if (Globals.prestageThreadExit == 0):
# get file duration
filepath = os.path.join(root, filename)
dur = self.videoParser.getVideoLength(filepath)
if (dur > 0): # passed second test
# let's parse out some file information
filename_base = []
filename_parts = []
filename_parts2 = []
filename_base = filename.split(".")
filename_parts = filename_base[0].split("_")
filename_parts2 = filename_base[0].split("-")
if len(filename_parts) > len(filename_parts2):
# use filename_parts
title = filename_parts[0]
if len(filename_parts) > 1:
showtitle = filename_parts[1]
else:
showtitle = ""
if len(filename_parts) > 2:
description = filename_parts[2]
else:
description = ""
else:
# use filename_parts2
title = filename_parts2[0]
if len(filename_parts2) > 1:
showtitle = filename_parts2[1]
else:
showtitle = ""
if len(filename_parts2) > 2:
description = filename_parts2[2]
else:
description = ""
metastr = str(filepath) + '|' + str(dur) + '|' + str(title) + '|' + str(showtitle) + '|' + str(description)
metaFileList.append(metastr)
else:
self.abort()
else:
self.abort()
self.writeMetaFile(type, metaFileList)
else:
self.abort()
def writeMetaFile(self, type, metaFileList):
if (Globals.prestageThreadExit == 0):
try:
metafile = open(META_LOC + str(type) + ".meta", "w")
except:
self.Error('Unable to open the meta file ' + META_LOC + str(type) + '.meta', xbmc.LOGERROR)
return False
for file in metaFileList:
metafile.write(file + "\n")
metafile.close()
else:
self.abort()
# setup all basic configuration parameters, including creating the playlists that
# will be used to actually run this thing
def readConfig(self):
self.log('readConfig')
# Sleep setting is in 30 minute incriments...so multiply by 30, and then 60 (min to sec)
self.sleepTimeValue = int(REAL_SETTINGS.getSetting('AutoOff')) * 1800
self.infoOnChange = REAL_SETTINGS.getSetting("InfoOnChange") == "true"
self.showChannelBug = REAL_SETTINGS.getSetting("ShowChannelBug") == "true"
self.forceReset = REAL_SETTINGS.getSetting('ForceChannelReset')
self.channelLogos = xbmc.translatePath(REAL_SETTINGS.getSetting('ChannelLogoFolder'))
if self.channelLogos == "":
self.channelLogos = xbmc.translatePath("special://home/addons/script.tvtime/resources/images/")
if os.path.exists(self.channelLogos) == False:
self.channelLogos = IMAGES_LOC
self.startupTime = time.time()
try:
self.lastResetTime = int(REAL_SETTINGS.getSetting("LastResetTime"))
except:
self.lastResetTime = 0
# Output all settings for debugging purposes
self.log('#####################################################################################')
self.log('General Settings:')
self.log(' Auto off is - ' + str(REAL_SETTINGS.getSetting('AutoOff')))
self.log(' Show info label on channel change is - ' + str(REAL_SETTINGS.getSetting('InfoOnChange') == "true"))
self.log(' Force Channel Reset is - ' + str(REAL_SETTINGS.getSetting('ForceChannelReset')))
self.log(' Auto Channel Reset is - ' + str(REAL_SETTINGS.getSetting('autoChannelReset') == "true"))
self.log(' Auto Channel Reset Setting is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetSetting')))
self.log(' Auto Channel Reset Interval is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetInterval')))
self.log(' Auto Channel Reset Time is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetTime')))
self.log(' Auto Channel Reset Shutdown is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetShutdown') == "true"))
self.log(' Show Channel Bug is - ' + str(REAL_SETTINGS.getSetting('ShowChannelBug') == "true"))
self.log(' Channel Logo Folder is - ' + str(REAL_SETTINGS.getSetting('ChannelLogoFolder')))
self.log(' Version is - ' + str(REAL_SETTINGS.getSetting('Version')))
self.log('Channels Settings:')
self.log(' Auto Find TV Network Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindNetworks')))
self.log(' Auto Find Movie Studios Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindStudios')))
self.log(' Auto Find TV Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindTVGenres')))
self.log(' Auto Find Movie Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindMovieGenres')))
self.log(' Auto Find Mixed Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindMixGenres')))
self.log(' Auto Find Music Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindMusicGenres')))
self.log(' Auto Find Live Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindLive')))
self.log(' Channel Limit is - ' + str(REAL_SETTINGS.getSetting('limit')))
self.log('Off Air Settings:')
self.log(' Off Air Mode is - ' + str(REAL_SETTINGS.getSetting('offair') == "true" ))
self.log(' Off Air File is - ' + str(REAL_SETTINGS.getSetting('offairfile')))
self.log('Bumpers Settings:')
self.log(' Bumpers Mode is - ' + str(REAL_SETTINGS.getSetting('bumpers') == "true" ))
self.log(' Bumpers Folder is - ' + str(REAL_SETTINGS.getSetting('bumpersfolder')))
self.log(' Number of Bumpers is - ' + str(REAL_SETTINGS.getSetting('numbumpers')))
self.log(' Max Number of Bumpers is - ' + str(REAL_SETTINGS.getSetting('maxbumpers')))
self.log('Commercials Settings:')
self.log(' Commercials Mode is - ' + str(REAL_SETTINGS.getSetting('commercials') == "true" ))
self.log(' Commercials Folder is - ' + str(REAL_SETTINGS.getSetting('commercialsfolder')))
self.log(' Number of Commercials is - ' + str(REAL_SETTINGS.getSetting('numcommercials')))
self.log(' Max Number of Commercials is - ' + str(REAL_SETTINGS.getSetting('maxcommercials')))
self.log('Trailers Settings:')
self.log(' Trailers Mode is - ' + str(REAL_SETTINGS.getSetting('trailers') == "true" ))
self.log(' Trailers Folder is - ' + str(REAL_SETTINGS.getSetting('trailersfolder')))
self.log(' Number of Trailers is - ' + str(REAL_SETTINGS.getSetting('numtrailers')))
self.log(' Max Number of Trailers is - ' + str(REAL_SETTINGS.getSetting('maxtrailers')))
self.log('Runtime Settings:')
self.log(' Current Channel is - ' + str(REAL_SETTINGS.getSetting('CurrentChannel')))
self.log(' Last Reset Time is - ' + str(REAL_SETTINGS.getSetting('LastResetTime')))
self.log(' Next Auto Reset Date/Time is - ' + str(REAL_SETTINGS.getSetting('nextAutoResetDateTime')))
self.log(' Next Auto Reset Time Interval is - ' + str(REAL_SETTINGS.getSetting('nextAutoResetDateTimeInterval')))
self.log(' Next Auto Reset Hour is - ' + str(REAL_SETTINGS.getSetting('nextAutoResetDateTimeResetTime')))
self.log('#####################################################################################')
self.log('readConfig return')
return True
def loadChannels(self):
self.log('loadChannels')
self.background.setVisible(True)
self.channels = self.channelList.setupList()
if self.channels is None:
self.log('loadChannels: No channel list returned')
self.log("loadChannels: calling end")
self.end()
return False
self.Player.stop()
return True
def channelDown(self):
self.log('channelDown')
if self.maxChannels == 1:
return
self.background.setVisible(True)
channel = self.fixChannel(self.currentChannel - 1, False)
self.setChannel(channel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.log('channelDown return')
def channelUp(self):
self.log('channelUp')
if self.maxChannels == 1:
return
self.background.setVisible(True)
channel = self.fixChannel(self.currentChannel + 1)
self.setChannel(channel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.log('channelUp return')
# set the channel, the proper show offset, and time offset
def setChannel(self, channel):
self.log('setChannel ' + str(channel))
if channel < 1 or channel > self.maxChannels:
self.log('setChannel invalid channel ' + str(channel), xbmc.LOGERROR)
return
if self.channels[channel - 1].isValid == False:
self.log('setChannel channel not valid ' + str(channel), xbmc.LOGERROR)
return
self.lastActionTime = 0
timedif = 0
self.getControl(102).setVisible(False)
self.showingInfo = False
# first of all, save playing state, time, and playlist offset for
# the currently playing channel
if self.Player.isPlaying():
if channel != self.currentChannel:
self.channels[self.currentChannel - 1].setPaused(xbmc.getCondVisibility('Player.Paused'))
# Automatically pause in serial mode
#if self.channels[self.currentChannel - 1].mode & MODE_ALWAYSPAUSE > 0:
# self.channels[self.currentChannel - 1].setPaused(True)
self.channels[self.currentChannel - 1].setShowTime(self.Player.getTime())
self.channels[self.currentChannel - 1].setShowPosition(xbmc.PlayList(xbmc.PLAYLIST_MUSIC).getposition())
self.channels[self.currentChannel - 1].setAccessTime(time.time())
self.currentChannel = channel
# now load the proper channel playlist
xbmc.PlayList(xbmc.PLAYLIST_MUSIC).clear()
if xbmc.PlayList(xbmc.PLAYLIST_MUSIC).load(self.channels[channel - 1].fileName) == False:
self.log("Error loading playlist")
self.InvalidateChannel(channel)
return
# Disable auto playlist shuffling if it's on
if xbmc.getInfoLabel('Playlist.Random').lower() == 'random':
self.log('Random on. Disabling.')
xbmc.PlayList(xbmc.PLAYLIST_MUSIC).unshuffle()
xbmc.executebuiltin("self.PlayerControl(repeatall)")
timedif += (time.time() - self.channels[self.currentChannel - 1].lastAccessTime)
# adjust the show and time offsets to properly position inside the playlist
while self.channels[self.currentChannel - 1].showTimeOffset + timedif > self.channels[self.currentChannel - 1].getCurrentDuration():
timedif -= self.channels[self.currentChannel - 1].getCurrentDuration() - self.channels[self.currentChannel - 1].showTimeOffset
self.channels[self.currentChannel - 1].addShowPosition(1)
self.channels[self.currentChannel - 1].setShowTime(0)
# set the show offset
self.Player.playselected(self.channels[self.currentChannel - 1].playlistPosition)
# set the time offset
self.channels[self.currentChannel - 1].setAccessTime(time.time())
if self.channels[self.currentChannel - 1].isPaused:
self.channels[self.currentChannel - 1].setPaused(False)
try:
self.Player.seekTime(self.channels[self.currentChannel - 1].showTimeOffset)
if self.channels[self.currentChannel - 1].mode & MODE_ALWAYSPAUSE == 0:
self.Player.pause()
if self.waitForVideoPaused() == False:
return
except:
self.log('Exception during seek on paused channel', xbmc.LOGERROR)
else:
seektime = self.channels[self.currentChannel - 1].showTimeOffset + timedif
try:
self.Player.seekTime(seektime)
except:
self.log('Exception during seek', xbmc.LOGERROR)
self.showChannelLabel(self.currentChannel)
self.lastActionTime = time.time()
self.log('setChannel return')
def InvalidateChannel(self, channel):
self.log("InvalidateChannel" + str(channel))
if channel < 1 or channel > self.maxChannels:
self.log("InvalidateChannel invalid channel " + str(channel))
return
self.channels[channel - 1].isValid = False
self.invalidatedChannelCount += 1
if self.invalidatedChannelCount > 3:
self.Error("Exceeded 3 invalidated channels. Exiting.")
return
remaining = 0
for i in range(self.maxChannels):
if self.channels[i].isValid:
remaining += 1
if remaining == 0:
self.Error("No channels available. Exiting.")
return
self.setChannel(self.fixChannel(channel))
def waitForVideoPaused(self):
self.log('waitForVideoPaused')
sleeptime = 0
while sleeptime < TIMEOUT:
xbmc.sleep(100)
if self.Player.isPlaying():
if xbmc.getCondVisibility('Player.Paused'):
break
sleeptime += 100
else:
self.log('Timeout waiting for pause', xbmc.LOGERROR)
return False
self.log('waitForVideoPaused return')
return True
def setShowInfo(self):
self.log('setShowInfo')
if self.infoOffset > 0:
self.getControl(502).setLabel('COMING UP:')
elif self.infoOffset < 0:
self.getControl(502).setLabel('ALREADY SEEN:')
elif self.infoOffset == 0:
self.getControl(502).setLabel('NOW WATCHING:')
position = xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition() + self.infoOffset
self.getControl(503).setLabel(self.channels[self.currentChannel - 1].getItemTitle(position))
self.getControl(504).setLabel(self.channels[self.currentChannel - 1].getItemEpisodeTitle(position))
self.getControl(505).setLabel(self.channels[self.currentChannel - 1].getItemDescription(position))
self.getControl(506).setImage(self.channelLogos + self.channels[self.currentChannel - 1].name + '.png')
self.log('setShowInfo return')
# Display the current channel based on self.currentChannel.
# Start the timer to hide it.
def showChannelLabel(self, channel):
self.log('showChannelLabel ' + str(channel))
if self.channelLabelTimer.isAlive():
self.channelLabelTimer.cancel()
self.channelLabelTimer = threading.Timer(5.0, self.hideChannelLabel)
tmp = self.inputChannel
#self.hideChannelLabel()
self.inputChannel = tmp
curlabel = 0
if channel > 99:
self.channelLabel[curlabel].setImage(IMAGES_LOC + 'label_' + str(channel // 100) + '.png')
self.channelLabel[curlabel].setVisible(True)
curlabel += 1
if channel > 9:
self.channelLabel[curlabel].setImage(IMAGES_LOC + 'label_' + str((channel % 100) // 10) + '.png')
self.channelLabel[curlabel].setVisible(True)
curlabel += 1
self.channelLabel[curlabel].setImage(IMAGES_LOC + 'label_' + str(channel % 10) + '.png')
self.channelLabel[curlabel].setVisible(True)
##ADDED BY SRANSHAFT: USED TO SHOW NEW INFO WINDOW WHEN CHANGING CHANNELS
if self.inputChannel == -1 and self.infoOnChange == True:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.infoOffset = 0
self.showInfo(5.0)
if self.showChannelBug == True:
try:
self.getControl(103).setImage(self.channelLogos + self.channels[self.currentChannel - 1].name + '.png')
except:
pass
##
self.channelLabelTimer.start()
self.log('showChannelLabel return')
# Called from the timer to hide the channel label.
def hideChannelLabel(self):
self.log('hideChannelLabel')
self.channelLabelTimer = threading.Timer(5.0, self.hideChannelLabel)
for i in range(3):
self.channelLabel[i].setVisible(False)
self.inputChannel = -1
self.log('hideChannelLabel return')
def hideInfo(self):
self.getControl(102).setVisible(False)
self.infoOffset = 0
self.showingInfo = False
if self.infoTimer.isAlive():
self.infoTimer.cancel()
self.infoTimer = threading.Timer(5.0, self.hideInfo)
def showInfo(self, timer):
self.getControl(102).setVisible(True)
self.showingInfo = True
self.setShowInfo()
if self.infoTimer.isAlive():
self.infoTimer.cancel()
self.infoTimer = threading.Timer(timer, self.hideInfo)
self.infoTimer.start()
# return a valid channel in the proper range
def fixChannel(self, channel, increasing = True):
while channel < 1 or channel > self.maxChannels:
if channel < 1: channel = self.maxChannels + channel
if channel > self.maxChannels: channel -= self.maxChannels
if increasing:
direction = 1
else:
direction = -1
if self.channels[channel - 1].isValid == False:
return self.fixChannel(channel + direction, increasing)
return channel
# Handle all input while videos are playing
def onAction(self, act):
action = act.getId()
self.log('onAction ' + str(action))
# Since onAction isnt always called from the same thread (weird),
# ignore all actions if we're in the middle of processing one
self.log("acquiring semaphore")
if self.actionSemaphore.acquire(False) == False:
self.log('onAction: Unable to get semaphore')
return
else:
lastaction = time.time() - self.lastActionTime
# during certain times we just want to discard all input
if lastaction < 2:
# unless it is an exit action
if action == ACTION_STOP:
Globals.userExit = 1
self.log("Exiting because user pressed exit")
#self.end()
else:
self.log('Not allowing actions')
action = ACTION_INVALID
self.log("onAction: startSleepTimer")
self.startSleepTimer()
if action == ACTION_SELECT_ITEM:
# If we're manually typing the channel, set it now
if self.inputChannel > 0:
if self.inputChannel != self.currentChannel:
self.setChannel(self.inputChannel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.inputChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.inputChannel = -1
else:
# Otherwise, show the EPG
if self.sleepTimeValue > 0:
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
self.hideInfo()
self.newChannel = 0
self.myEPG.doModal()
if self.newChannel != 0:
self.background.setVisible(True)
self.setChannel(self.newChannel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
elif action == ACTION_MOVE_UP or action == ACTION_PAGEUP:
self.channelUp()
elif action == ACTION_MOVE_DOWN or action == ACTION_PAGEDOWN:
self.channelDown()
elif action == ACTION_MOVE_LEFT:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
if self.showingInfo:
self.infoOffset -= 1
self.showInfo(10.0)
elif action == ACTION_MOVE_RIGHT:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
if self.showingInfo:
self.infoOffset += 1
self.showInfo(10.0)
elif action == ACTION_PREVIOUS_MENU:
if self.showingInfo:
self.hideInfo()
else:
dlg = xbmcgui.Dialog()
if self.sleepTimeValue > 0:
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
if dlg.yesno("Exit?", "Are you sure you want to exit TV Time?"):
Globals.userExit = 1
self.log("Exiting because user selected yes")
#self.end()
else:
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
self.startSleepTimer()
del dlg
elif action == ACTION_SHOW_INFO:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
if self.showingInfo:
self.hideInfo()
else:
self.showInfo(10.0)
elif action >= ACTION_NUMBER_0 and action <= ACTION_NUMBER_9:
if self.inputChannel < 0:
self.inputChannel = action - ACTION_NUMBER_0
else:
if self.inputChannel < 100:
self.inputChannel = self.inputChannel * 10 + action - ACTION_NUMBER_0
self.showChannelLabel(self.inputChannel)
elif action == ACTION_OSD:
xbmc.executebuiltin("ActivateWindow(12901)")
elif action == ACTION_STOP:
Globals.userExit = 1
self.log("Exiting because user pressed exit")
#self.end()
self.log("onAction: releasing semaphore")
self.actionSemaphore.release()
self.log('onAction return')
# Reset the sleep timer
def startSleepTimer(self):
if self.sleepTimeValue == 0:
return
# Cancel the timer if it is still running
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
# resetting sleep time value
self.sleepTimeValue = int(REAL_SETTINGS.getSetting('AutoOff')) * 1800
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
self.sleepTimer.start()
# This is called when the sleep timer expires
def sleepAction(self):
self.log("sleepAction: acquiring semaphore")
# TODO: show some dialog, allow the user to cancel the sleep
# perhaps modify the sleep time based on the current show
self.log("sleepAction: calling end")
self.end()
# cleanup and end
def end(self):
self.log("end")
self.background.setVisible(True)
# add a control to block script from calling end twice
# unsure why it does sometimes
if Globals.exitingTVTime == 0:
Globals.exitingTVTime = 1
self.log('EXITING TV TIME')
# trigger prestage thread to exit
self.log("end: triggering prestage thread to exit")
Globals.prestageThreadExit = 1
# wait a few seconds to allow script to exit threads, etc.
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Exiting")
self.dlg.update(0,"Exiting TV Time","Please wait...")
time.sleep(3)
# shutdown check timer
self.shutdownTimer = threading.Timer(1, self.checkShutdownFlag)
self.shutdownTimer.start()
try:
if self.shutdownTimer.isAlive():
self.log("shutdownTimer is still alive")
self.shutdownTimer.cancel()
self.log("channelLabelTimer is cancelled")
except:
self.log("error cancelling shutdownTimer")
pass
try:
if self.channelLabelTimer.isAlive():
self.log("channelLabelTimer is still alive")
self.channelLabelTimer.cancel()
self.log("channelLabelTimer is cancelled")
except:
self.log("error cancelling channelLabelTimer")
pass
try:
if self.infoTimer.isAlive():
self.log("infoTimer is still alive")
self.infoTimer.cancel()
self.log("infoTimer is cancelled")
except:
self.log("error cancelling infoTimer")
pass
try:
if self.sleepTimeValue > 0:
if self.sleepTimer.isAlive():
self.log("sleepTimer is still alive")
self.sleepTimer.cancel()
self.log("sleepTimer is cancelled")
except:
self.log("error cancelling sleepTimer")
pass
#if self.autoResetTimer > 0:
try:
if self.autoResetTimer.isAlive():
self.log("autoResetTimer is still alive")
self.autoResetTimer.cancel()
self.log("autoResetTimer is cancelled")
except:
self.log("error cancelling autoResetTimer")
pass
if self.Player.isPlaying():
self.Player.stop()
if self.timeStarted > 0 and int(Globals.channelsReset) == 0:
# for i in range(self.maxChannels):
for i in range(int(REAL_SETTINGS.getSetting("maxChannels"))):
if self.channels[i].isValid:
if self.channels[i].mode & MODE_RESUME == 0:
ADDON_SETTINGS.setSetting('Channel_' + str(i + 1) + '_time', str(int(time.time() - self.timeStarted + self.channels[i].totalTimePlayed)))
else:
tottime = 0
for j in range(self.channels[i].playlistPosition):
tottime += self.channels[i].getItemDuration(j)
tottime += self.channels[i].showTimeOffset
if i == self.currentChannel - 1:
tottime += (time.time() - self.channels[i].lastAccessTime)
ADDON_SETTINGS.setSetting('Channel_' + str(i + 1) + '_time', str(int(tottime)))
ADDON_SETTINGS.writeSettings()
try:
self.log("saving current channel " + str(self.currentChannel))
REAL_SETTINGS.setSetting('CurrentChannel', str(self.currentChannel))
except:
self.log("unable to save current channel " + str(self.currentChannel))
pass
# wait while settings file is being written to
# settings2.xml wasn't being completely written to
# before script would end
while int(Globals.savingSettings) == 1:
self.dlg.update(25,"Exiting TV Time","Waiting on settings to be saved...")
pass
self.dlg.update(50,"Exiting TV Time","Please wait...")
time.sleep(3)
self.dlg.close()
ADDON_SETTINGS.setSetting('LastExitTime', str(int(time.time())))
self.background.setVisible(False)
# need to distinguish between user eXits and auto shutdown
if int(Globals.userExit) == 0 and REAL_SETTINGS.getSetting("autoChannelResetShutdown") == "true":
#print xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "JSONRPC.Introspect", "id": 1}')
#XBMC.Quit
self.log("Threads - " + str(threading.enumerate()))
self.log("Exiting XBMC")
json_query = '{"jsonrpc": "2.0", "method": "XBMC.Quit", "id": 1}'
xbmc.executeJSONRPC(json_query)
#self.close()
else:
self.log("Threads - " + str(threading.enumerate()))
self.close()
else:
self.log("TVTime already triggered end")
#####################################################
#####################################################
#
# Channel Reset Functions
#
#####################################################
#####################################################
# rebuild filelists
def forceChannelReset(self, channel):
self.log('forceChannelReset: Channel ' + str(channel))
self.channels = []
if channel == "all":
# reset all channels
# we only want one reset occuring at a time so let's put a check in
if Globals.forceChannelResetActive == 0:
Globals.forceChannelResetActive = 1
REAL_SETTINGS.setSetting('LastResetTime', str( int ( time.time() ) ) )
# if force reset, delete all cache files
self.channelList.deleteFiles(CHANNELS_LOC)
# if force reset, delete all prestage files
self.channelList.deleteFiles(PRESTAGE_LOC)
# call function to rebuild all channel file lists
self.channelList.buildChannelFileList(CHANNELS_LOC, "all")
# reset finished
Globals.channelsReset = 1
Globals.forceChannelResetActive = 0
else:
pass
else:
# only reset the channel passed
if Globals.forceChannelResetActive == 0:
Globals.forceChannelResetActive = 1
filename = "Channel_" + str(channel) + ".m3u"
REAL_SETTINGS.setSetting('LastResetTime', str(int(time.time())))
# delete cache file
if os.path.exists(os.path.join(CHANNELS_LOC, filename)):
os.remove(os.path.join(CHANNELS_LOC, filename))
# delete prestage files
if os.path.exists(os.path.join(PRESTAGE_LOC, filename)):
os.remove(os.path.join(PRESTAGE_LOC, filename))
# call function to rebuild channel file lists
self.channelList.buildChannelFileList(CHANNELS_LOC, channel)
# reset finished
Globals.channelsReset = 1
Globals.forceChannelResetActive = 0
def resetLiveChannels(self):
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Updating Live Channels")
progressIndicator = 0
self.dlg.update(progressIndicator,"Updating Live Channels")
channel = 0
maxChannels = REAL_SETTINGS.getSetting("maxChannels")
for i in range(int(maxChannels)):
channel = channel + 1
if int(ADDON_SETTINGS.getSetting("Channel_" + str(channel) + "_type")) == 9:
chname = ADDON_SETTINGS.getSetting("Channel_" + str(channel) + "_3")
progressIndicator = (int(channel) / int(maxChannels)) * 100
self.dlg.update(progressIndicator,"Updating Live Channels","Updating Channel " + str(channel) + " - " + str(chname))
self.channelList.buildChannelFileList(CHANNELS_LOC, channel)
self.dlg.close()
# check if auto reset times have expired
def checkAutoChannelReset(self):
needsreset = False
"""
autoChannelResetSetting
values:
0 = automatic
1 = each day
2 = each week
3 = each month
4 = scheduled
"""
autoChannelResetSetting = int(REAL_SETTINGS.getSetting("autoChannelResetSetting"))
if autoChannelResetSetting == "":
autoChannelResetSetting = 0
self.log("autoChannelResetSetting " + str(autoChannelResetSetting))
"""
if autoChannelResetSetting is set to automatic
loop through all channels to get their totalduration and time values
if total time played for the channel is greater than total duration
watched since last auto reset, then set needsreset flag to true
"""
if autoChannelResetSetting == 0:
# need to get channel settings
self.channels = []
needsreset = False
# loop through channel settings to get
# totalTimePlayed
# totalDuration
for i in range(int(REAL_SETTINGS.getSetting("maxChannels"))):
if not ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_offair") == "1":
# need to figure out how to store
totalTimePlayed = ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_time")
if totalTimePlayed == "":
totalTimePlayed = 0
self.log("Channel_" + str(i+1) + "_time " + str(totalTimePlayed))
totalDuration = ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_totalDuration")
if totalDuration == "":
totalDuration = 0
self.log("Channel_" + str(i+1) + "_totalDuration " + str(totalDuration))
if int(totalTimePlayed) > int(totalDuration):
needsreset = True
if needsreset:
REAL_SETTINGS.setSetting('LastResetTime', str(int(time.time())))
elif autoChannelResetSetting > 0 and autoChannelResetSetting < 4: # each day, each week, each month
try:
self.lastResetTime = int(REAL_SETTINGS.getSetting("LastResetTime"))
except:
self.lastResetTime = 0
timedif = time.time() - self.lastResetTime
if int(autoChannelResetSetting) == 1 and timedif > (60 * 60 * 24):
needsreset = True
if int(autoChannelResetSetting) == 2 and timedif > (60 * 60 * 24 * 7):
needsreset = True
if int(autoChannelResetSetting) == 3 and timedif > (60 * 60 * 24 * 30):
needsreset = True
if timedif < 0:
needsreset = True
if needsreset:
REAL_SETTINGS.setSetting('LastResetTime', str(int(time.time())))
elif autoChannelResetSetting == 4: # scheduled
"""
if autoChannelResetSetting = 4,
set next reset date/time,
set timer until next reset date/time,
start auto reset timer
"""
if REAL_SETTINGS.getSetting('nextAutoResetDateTime') == "":
self.setNextAutoResetTime()
elif REAL_SETTINGS.getSetting('nextAutoResetDateTimeInterval') <> REAL_SETTINGS.getSetting('autoChannelResetInterval'):
self.setNextAutoResetTime()
elif REAL_SETTINGS.getSetting('nextAutoResetDateTimeResetTime') <> REAL_SETTINGS.getSetting('autoChannelResetTime'):
self.setNextAutoResetTime()
# set auto reset timer
self.setAutoResetTimer()
# start auto reset timer
self.startAutoResetTimer()
return needsreset
def setNextAutoResetTime(self):
# set next auto resetChannel time
# need to get current datetime in local time
currentDateTimeTuple = localtime()
# parse out year, month and day so we can computer resetDate
cd = datetime.datetime(*(currentDateTimeTuple[0:6]))
year = cd.strftime('%Y')
month = cd.strftime('%m')
day = cd.strftime('%d')
hour = cd.strftime('%H')
minutes = cd.strftime('%M')
seconds = cd.strftime('%S')
# convert to date object so we can add timedelta in the next step
currentDateTime = year + "-" + month + "-" + day + " " + hour + ":" + minutes + ":" + seconds
currentDateTimeTuple = strptime(currentDateTime,"%Y-%m-%d %H:%M:%S")
currentDate = date(int(year), int(month), int(day))
# need to get setting of when to auto reset
# Daily|Weekly|Monthly
# 0 = Daily
# 1 = Weekly
# 2 = Monthly
# Daily = Current Date + 1 Day
# Weekly = CUrrent Date + 1 Week
# Monthly = CUrrent Date + 1 Month
resetInterval = REAL_SETTINGS.getSetting("autoChannelResetInterval")
# Time to Reset: 12:00am, 1:00am, 2:00am, etc.
# get resetTime setting
resetTime = REAL_SETTINGS.getSetting("autoChannelResetTime")
if resetInterval == "0":
# Daily
interval = timedelta(days=1)
elif resetInterval == "1":
# Weekly
interval = timedelta(days=7)
elif resetInterval == "2":
# Monthly
interval = timedelta(days=30)
# determine resetDate based on current date and interval
if resetTime > hour and resetInterval == "0":
resetDate = currentDate
else:
resetDate = currentDate + interval
# need to convert to tuple to be able to parse out components
resetDateTuple = strptime(str(resetDate), "%Y-%m-%d")
# parse out year, month, and day
rd = datetime.datetime(*(resetDateTuple[0:3]))
year = rd.strftime('%Y')
month = rd.strftime('%m')
day = rd.strftime('%d')
# set hour, minutes and seconds
hour = resetTime
minutes = 0
seconds = 0
# join components together to form reset date and time
resetDateTime = str(year) + "-" + str(month) + "-" + str(day) + " " + str(hour) + ":" + str(minutes) + ":" + str(seconds)
# save next resetDateTime to settings
REAL_SETTINGS.setSetting('nextAutoResetDateTime', str(resetDateTime))
REAL_SETTINGS.setSetting('nextAutoResetDateTimeInterval', str(resetInterval))
REAL_SETTINGS.setSetting('nextAutoResetDateTimeResetTime', str(resetTime))
def setAutoResetTimer(self):
# set next auto resetChannel time
# need to get current datetime in local time
currentDateTimeTuple = localtime()
nextAutoResetDateTime = REAL_SETTINGS.getSetting('nextAutoResetDateTime')
nextAutoResetDateTimeTuple = strptime(nextAutoResetDateTime,"%Y-%m-%d %H:%M:%S")
# need to get difference between the two
self.autoResetTimeValue = mktime(nextAutoResetDateTimeTuple) - mktime(currentDateTimeTuple)
self.log("Next auto reset will occur in " + str(self.autoResetTimeValue) + " seconds")
# set timer
self.autoResetTimer = threading.Timer(self.autoResetTimeValue, self.autoChannelReset)
# Reset the sleep timer
def startAutoResetTimer(self):
if self.autoResetTimeValue == 0:
return
# Cancel the auto reset timer if it is still running
if self.autoResetTimer.isAlive():
self.autoResetTimer.cancel()
self.autoResetTimer = threading.Timer(self.resetTimerValue, self.autoChannelReset)
self.autoResetTimer.start()
def autoChannelReset(self):
self.log("autoChannelReset")
# need to allow user to abort the channel reset
self.resetDialog = xbmcgui.DialogProgress()
self.resetDialog.create("TV Time", "Preparing for Auto Channel Reset")
self.resetDialog.update(0, "Preparing for Auto Channel Reset")
if self.resetDialog.iscanceled():
self.log("autoResetChannels: auto channel reset Cancelled")
self.resetDialog.close()
return False
progressPercentage = 0
for count in self.countdown(10):
progressPercentage = progressPercentage + 10
self.resetDialog.update(progressPercentage, "Preparing for Auto Channel Reset")
self.resetDialog.close()
if not self.resetDialog.iscanceled():
if Globals.autoResetChannelActive == 0:
# trigger prestage thread to exit
Globals.prestageThreadExit = 1
# block any attempt to run concurrent auto channel resets
Globals.autoResetChannelActive = 1
self.log("autoChannelReset: reset started")
# reset started
REAL_SETTINGS.setSetting('LastResetTime', str( int ( time.time() ) ) )
# delete previous files in the cache
self.log("autoChannelReset: delete previous files in the cache")
self.channelList.deleteFiles(CHANNELS_LOC)
# copy pre-staged channel file lists to cache
self.log("autoChannelReset: copying prestaged files to the cache")
self.channelList.copyFiles(PRESTAGE_LOC, CHANNELS_LOC)
# reset next auto reset time
self.setNextAutoResetTime()
try:
if self.channelLabelTimer.isAlive():
self.channelLabelTimer.cancel()
if self.infoTimer.isAlive():
self.infoTimer.cancel()
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
if self.autoResetTimer.isAlive():
self.autoResetTimer.cancel()
except:
pass
if xbmc.Player().isPlaying():
xbmc.Player().stop()
# reset channel times
if self.timeStarted > 0:
for i in range(int(REAL_SETTINGS.getSetting("maxChannels"))):
if self.channels[i].isValid:
#ADDON_SETTINGS.setSetting('Channel_' + str(i + 1) + '_time', str(int(time() - self.timeStarted + self.channels[i].totalTimePlayed)))
channel = i + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channel) + "_time","0")
totalDuration = self.channelList.getTotalDuration(channel,CHANNELS_LOC)
ADDON_SETTINGS.setSetting("Channel_" + str(channel) + "_totalDuration",str(totalDuration))
try:
ADDON_SETTINGS.setSetting('CurrentChannel', str(self.currentChannel))
except:
pass
ADDON_SETTINGS.writeSettings()
Globals.channelsReset = 1
Globals.autoResetChannelActive = 0
# need to find right way to re initialize the script
# reload channels
# update EPC and restart
autoChannelResetSetting = int(REAL_SETTINGS.getSetting("autoChannelResetSetting"))
if autoChannelResetSetting > 0 and autoChannelResetSetting < 5:
if REAL_SETTINGS.getSetting("autoChannelResetShutdown") == "false":
self.log("Restarting TV Time")
self.__init__()
else:
self.log("Exiting because auto channel reset shutdown")
self.end()
#####################################################
#####################################################
#
# Utility Functions
#
#####################################################
#####################################################
def log(self, msg, level = xbmc.LOGDEBUG):
log('TVOverlay: ' + msg, level)
def createDirectory(self, directory):
if not os.path.exists(directory):
try:
os.makedirs(directory)
except:
self.Error('Unable to create the directory - ' + str(directory))
return
# handle fatal errors: log it, show the dialog, and exit
def Error(self, message):
self.log('FATAL ERROR: ' + message, xbmc.LOGFATAL)
dlg = xbmcgui.Dialog()
dlg.ok('Error', message)
del dlg
self.log("Error: calling end")
self.end()
def message(self, data):
log('Dialog message: ' + data)
dlg = xbmcgui.Dialog()
dlg.ok('Info', data)
del dlg
def countdown(self, secs, interval=1):
while secs > 0:
yield secs
secs = secs - 1
sleep(interval)
|
gpl-3.0
| 426,476,372,236,932,100
| 43.739074
| 240
| 0.563534
| false
| 4.424553
| false
| false
| false
|
maxpinto/Ptz
|
bootcamp/auth/forms.py
|
1
|
3750
|
from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from bootcamp.settings import ALLOWED_SIGNUP_DOMAINS
def Validardominio(value):
if '*' not in ALLOWED_SIGNUP_DOMAINS:
try:
dominio = value[value.index("@"):]
if dominio not in ALLOWED_SIGNUP_DOMAINS:
raise ValidationError(u'Dominio no valido, esta aplicacion actualmente es solo para direcciones de correo especificas {0}'.format(','.join(ALLOWED_SIGNUP_DOMAINS)))
except Exception, e:
raise ValidationError(u'Dominio no valido {0}'.format(','.join(ALLOWED_SIGNUP_DOMAINS)))
def Palabras_reservadas(value):
listado = ['admin', 'settings', 'news', 'about', 'help', 'signin', 'signup',
'signout', 'terms', 'privacy', 'cookie', 'new', 'login', 'logout', 'administrator',
'join', 'account', 'username', 'root', 'blog', 'user', 'users', 'billing', 'subscribe',
'reviews', 'review', 'blog', 'blogs', 'edit', 'mail', 'email', 'home', 'job', 'jobs',
'contribute', 'newsletter', 'shop', 'profile', 'register', 'auth', 'authentication',
'campaign', 'config', 'delete', 'remove', 'forum', 'forums', 'download', 'downloads',
'contact', 'blogs', 'feed', 'feeds', 'faq', 'intranet', 'log', 'registration', 'search',
'explore', 'rss', 'support', 'status', 'static', 'media', 'setting', 'css', 'js',
'follow', 'activity', 'questions', 'articles', 'network',]
if value.lower() in listado:
raise ValidationError('Esta es una palabra Reservada')
def Usuario_no_valido(value):
if '@' in value or '+' in value or '-' in value:
raise ValidationError('Introduce un nombre de usuario valido')
def Registro_unico_correo(value):
if User.objects.filter(email__iexact=value).exists():
raise ValidationError('Esta direccion ya se encuentra registrada')
def Registro_unico_usuario(value):
if User.objects.filter(username__iexact=value).exists():
raise ValidationError('Ya existe un usuario con este nombre')
class SignUpForm(forms.ModelForm):
username = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}),
max_length=30,
required=True,
help_text='El nombre de usuario puede contener <strong>Alfanumericos</strong>, <strong>_</strong> y <strong>.</strong> caracteres')
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control'}))
confirm_password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control'}),
label="Confirm your password",
required=True)
email = forms.CharField(widget=forms.EmailInput(attrs={'class':'form-control'}),
required=True,
max_length=75)
class Meta:
model = User
exclude = ['last_login', 'date_joined']
fields = ['username', 'email', 'password', 'confirm_password',]
def __init__(self, *args, **kwargs):
super(SignUpForm, self).__init__(*args, **kwargs)
self.fields['username'].validators.append(Palabras_reservadas)
self.fields['username'].validators.append(Usuario_no_valido)
self.fields['username'].validators.append(Registro_unico_usuario)
self.fields['email'].validators.append(Registro_unico_correo)
self.fields['email'].validators.append(Validardominio)
def clean(self):
super(SignUpForm, self).clean()
password = self.cleaned_data.get('password')
confirm_password = self.cleaned_data.get('confirm_password')
if password and password != confirm_password:
self._errors['password'] = self.error_class(['Passwords no coinciden'])
return self.cleaned_data
|
mit
| 6,717,397,107,873,970,000
| 51.097222
| 180
| 0.652267
| false
| 3.571429
| false
| false
| false
|
ryankynor/Hello-friend
|
hellofriend.py
|
1
|
1070
|
"""
hellofriend.py
Author: Ryan Kynor
Credit:
http://stackoverflow.com/questions/19664840/typeerror-cant-convert-float-object-to-str-implicitly
Milo
Assignment:
Write and submit an interactive Python program that asks for the user's name and age,
then prints how much older Python is than the user (based on a simple comparison of
birth year). Python's first public release occurred in 1991. Something like this:
Please tell me your name: Guido
Please tell me your age: 16
Hello, Guido. Python is 8 years older than you are!
Note that the text: "Guido" and "16" are entered by the user running the program.
The final line ("Hello...") is generated dynamically when you run the program, based
on the name and age that the user enters.
"""
name = input("Please tell me your name: ")
age = input("Please tell me your age: ")
int(age)-24
x=(int(age))
y=24-x
print("Hello, {0}. Python is {1} years older than you are!".format(name, y))
#s1 = "You are {0} years old."
#s2 = "pythin is {y} years older than you."
#print(int) s1.format((age))
#print(s2.format(age + 5))
|
mit
| -7,764,921,810,251,109,000
| 33.548387
| 97
| 0.729907
| false
| 3.222892
| false
| false
| false
|
tompecina/legal
|
legal/hjp/urls.py
|
1
|
1385
|
# -*- coding: utf-8 -*-
#
# hjp/urls.py
#
# Copyright (C) 2011-19 Tomáš Pecina <tomas@pecina.cz>
#
# This file is part of legal.pecina.cz, a web-based toolbox for lawyers.
#
# This application is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.conf.urls import url
from legal.common.views import genrender
from legal.hjp.views import mainpage, transform, transdel
urlpatterns = [
url(r'^$', mainpage, name='mainpage'),
url(r'^transform/(\d+)/$', transform, name='transform'),
url(r'^transform/$', transform, name='transform'),
url(r'^transdel/(\d+)/$', transdel, name='transdel'),
url(r'^transdeleted/$',
genrender,
kwargs={
'template': 'hjp_transdeleted.xhtml',
'page_title': 'Smazání transakce'},
name='transdeleted'),
]
|
gpl-3.0
| -6,588,313,614,671,599,000
| 33.525
| 72
| 0.692976
| false
| 3.596354
| false
| false
| false
|
dsalazarr/pfc_ii
|
pfc/pfc/applications/models.py
|
1
|
2347
|
from __future__ import unicode_literals
from django.db import models
from django.conf import settings
from oauth2_provider.models import Application as ApplicationModel, AccessToken as AccessTokenModel
from pfc.users.models import Company, User
class ApplicationConfig(models.Model):
id = models.AutoField(primary_key=True)
application = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL)
key = models.CharField(max_length=255, null=False)
value = models.CharField(max_length=255, null=False)
class Meta:
unique_together = ('application', 'key')
class License(models.Model):
LICENSE_TYPES = (
('DAY', 'DAY'),
('MONTH', 'MONTH'),
('YEAR', 'YEAR'),
)
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=256)
type = models.CharField(max_length=15, choices=LICENSE_TYPES)
max_users = models.IntegerField("Maximum number of users")
duration_days = models.IntegerField("Duration days of the license")
def __str__(self):
return self.name
class CompanyApplicationLicense(models.Model):
company = models.ForeignKey(Company, related_name='licenses')
license = models.ForeignKey(License)
application = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL)
active = models.BooleanField(default=True)
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True)
def __str__(self):
return "%s %s" % (self.application, self.license)
class UserApplicationLicense(models.Model):
user = models.ForeignKey(User, related_name='licenses')
company_license = models.ForeignKey(CompanyApplicationLicense)
class Meta:
unique_together = (
('user', 'company_license')
)
class Permission(models.Model):
application = models.ForeignKey(ApplicationModel)
id = models.AutoField(primary_key=True)
codename = models.CharField(max_length=50)
name = models.CharField(max_length=256)
class Meta:
unique_together = (
('application', 'codename')
)
def __str__(self):
return "{} | {}".format(self.application.name, self.name)
class Application(ApplicationModel):
class Meta:
proxy = True
class AccessToken(AccessTokenModel):
class Meta:
proxy = True
|
gpl-3.0
| -8,329,098,107,580,959,000
| 27.621951
| 99
| 0.683852
| false
| 4.074653
| false
| false
| false
|
larlequin/CleanMyBib
|
CleanMyBib/CleanMyBib_Qt.py
|
1
|
12400
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys, os
import platform
import re
import csv
from PyQt4 import QtGui
from PyQt4 import QtCore
from CleanMyBib import CleanFileBib
from pybtex.database.input import bibtex
__version__ = "4.0.0"
# Allow to read the picture files in pyinstaller
datadir = ".img"
if not hasattr(sys, "frozen"): # not packed
datadir = os.path.join(os.path.dirname(__file__), datadir)
elif "_MEIPASS2" in os.environ: # one-file temp's directory
datadir = os.path.join(os.environ["_MEIPASS2"], datadir)
else: # one-dir
datadir = os.path.join(os.path.dirname(sys.argv[0]), datadir)
# --------------------------------------------------------------
# GRAPHICAL INTERFACE FOR CLEAN MY BIB
# --------------------------------------------------------------
class MainWindow(QtGui.QMainWindow):
def __init__(self):
""" Define the main widgets and options
"""
super(MainWindow, self).__init__()
# Default fields to keep in the cleaned file
self.chx = ['abstract','author','year','title','booktitle', 'journal',\
'pages', 'volume', 'editor','publisher','address']
# Create the main frame to handle the widgets
self.mainWidget=QtGui.QWidget(self)
self.setCentralWidget(self.mainWidget)
self.grid = QtGui.QGridLayout(self.mainWidget) # Define a grid
self.setLayout(self.grid)
# Create a status bar
self.status = self.statusBar()
self.status.showMessage("Ready", 5000)
# Call the menu, options and status bar
self.menu()
self.style_block()
self.bibFile()
self.statusBib()
# Define the main window size and name
self.setWindowTitle('Clean My Bib')
self.show()
def menu(self):
""" Define the action in the Menu and Toolbar
"""
# Options
options = QtGui.QAction(QtGui.QIcon('opt.jpeg'), 'Options', self)
options.setShortcut('Ctrl+O')
options.setStatusTip('Change the fields to ignore')
options.triggered.connect(self.Opts)
# Exit
exitAction = QtGui.QAction(QtGui.QIcon('exit2.jpeg'), 'Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(self.close)
# About
aboutAction = QtGui.QAction(QtGui.QIcon('about.jpeg'), 'About', self)
aboutAction.setStatusTip('About Clean My Bib')
aboutAction.triggered.connect(self.about)
# Fill the Menu
menubar = self.menuBar()
mainMenu = menubar.addMenu('&Menu')
mainMenu.addAction(options)
mainMenu.addAction(aboutAction)
mainMenu.addAction(exitAction)
def style_block(self):
""" Define the section of the GUI dedicated to the style format
User can choose between the 'Long' and 'Short' style for the journal
name and the page numbers.
An option is provided to add or not the DOIs
"""
# Create a ComboBox to select the journal name's style
cx_journal_style = QtGui.QComboBox(self)
list_style = [" Long format", "Short format"]
cx_journal_style.addItems(list_style)
cx_journal_style.SizeAdjustPolicy(1)
# Create a ComboBox to select the page numbers' style
cx_pages_style = QtGui.QComboBox(self)
cx_pages_style.addItems(list_style)
cx_pages_style.SizeAdjustPolicy(1)
# Create a checkbox for the DOIs
self.add_doi = QtGui.QCheckBox("Add DOIs")
# Define some Logo and Labels to display information to the user
logo1 = QtGui.QLabel(self)
icon1 = QtGui.QPixmap(datadir+"/nb1.png")
logo1.setPixmap(icon1)
first_step = QtGui.QLabel("<b><font color ='darkblue'><h2> \
Define a style</b></h2>")
lab_style = QtGui.QLabel("<b>Journal</b>", self)
lab_format = QtGui.QLabel("<b>Pages</b>", self)
lab_example = QtGui.QLabel("<b>DOI</b>", self)
self.lab_empty = QtGui.QLabel("", self)
self.lab_empty.setMinimumSize(90, 25)
# Place the widgets on the grid.
self.grid.addWidget(logo1, 0, 1, 1, 2)
self.grid.addWidget(first_step, 0, 1, 1, 2)
self.grid.addWidget(lab_style, 2, 1)
self.grid.addWidget(cx_journal_style, 2, 2)
self.grid.addWidget(lab_format, 3, 1)
self.grid.addWidget(lab_example, 4, 1)
self.grid.addWidget(cx_pages_style, 3, 2)
self.grid.addWidget(self.add_doi, 4, 2)
# Control the style and the choice done
self.journal_style = "long"
self.pages_style = "long"
cx_journal_style.activated[int].connect(self.styleJournal)
cx_pages_style.activated[int].connect(self.stylePages)
self.mainWidget.connect(self.add_doi,
QtCore.SIGNAL('stateChanged(int)'), self.doi)
def doi(self):
""" Add and remove the doi fields in the list of fields to keep
"""
if self.add_doi.isChecked():
self.chx.append("doi")
else:
if "doi" in self.chx:
self.chx.remove("doi")
def styleJournal(self, style):
if style == 0:
self.journal_style = "long"
else:
self.journal_style = "short"
def stylePages(self, style):
if style == 0:
self.pages_style = "long"
else:
self.pages_style = "short"
def bibFile(self):
""" GUI section to receive a dropped file
And take it as the bib file to clean
"""
self.setAcceptDrops(True)
# Define a picture where to drop the file
self.dropIcon = QtGui.QLabel(self)
dragdrop = QtGui.QPixmap(datadir+"/drop.png")
self.dropIcon.setPixmap(dragdrop)
self.dropIcon.setAlignment(QtCore.Qt.AlignCenter)
# Define some Logo and Labels to display information to the user
logo2 = QtGui.QLabel(self)
icon2 = QtGui.QPixmap(datadir+"/nb2.png")
logo2.setPixmap(icon2)
second_step = QtGui.QLabel("<b><font color ='darkblue'><h2> \
Bibtex file</b></h2>")
lab_drop = QtGui.QLabel("<b><h3>Drop a bib file here</b></h3>", self)
lab_drop.setAlignment(QtCore.Qt.AlignCenter)
# Place the widgets on the grid
self.grid.addWidget(self.lab_empty, 2, 3, 1, 5) # Add an empty column
self.grid.addWidget(self.lab_empty, 2, 4, 1, 5) # Add an empty column
self.grid.addWidget(logo2, 0, 6, 1, 2)
self.grid.addWidget(second_step, 0, 6, 1, 2)
self.grid.addWidget(self.dropIcon, 2, 6, 2, 3)
self.grid.addWidget(lab_drop, 4, 6, 1, 3)
def dragEnterEvent(self, event):
if event.mimeData().hasUrls:
event.accept()
else:
event.ignore()
def dropEvent(self, event):
""" Extract the path of the dropped file
Call the CleanMyBib script and update the status bar
"""
for url in event.mimeData().urls():
path = url.toLocalFile().toLocal8Bit().data()
if os.path.isfile(path):
# Extract the path and open the cleaned file
rep, name = os.path.split(path)
name_bibOk = 'Cleaned_'+name
fileBibOK = open(os.path.join(rep, name_bibOk), 'w')
# Update the status bar
self.status.showMessage("File to clean: "+name, 5000)
# Prepare the fields to keep in the final file
fields = []
for item in self.chx:
fields.append(item.lower())
try:
CleanFileBib(path, fileBibOK, fields, self.journal_style, self.pages_style)
self.statusClean.setText("File cleaned successfully!")
icon4 = QtGui.QPixmap(datadir+"/success.png")
self.waitLogo.setPixmap(icon4)
self.status.showMessage("Drop another file", 5000)
except:
self.statusClean.setText("An error has occurred.\
\nPlease check your bibtex file\nand the log file")
icon5 = QtGui.QPixmap(datadir+"/error.png")
self.waitLogo.setPixmap(icon5)
fileBibOK.close()
def statusBib(self):
""" The third panel of the main frame is used to display the current
status of the file to be cleaned
"""
logo3 = QtGui.QLabel(self)
icon3 = QtGui.QPixmap(datadir+"/nb3.png")
logo3.setPixmap(icon3)
third_step = QtGui.QLabel("<b><font color ='darkblue'><h2> \
Clean my bib...</b></h2>")
self.statusClean = QtGui.QLabel("<i> <BR>Ready to receive <BR>a bibtex file</i>")
self.statusClean.setAlignment(QtCore.Qt.AlignCenter)
self.waitLogo = QtGui.QLabel(self)
self.icon4 = QtGui.QPixmap(datadir+"/wait.png")
self.waitLogo.setPixmap(self.icon4)
self.waitLogo.setAlignment(QtCore.Qt.AlignCenter)
# Display the widgets on the grid
self.grid.addWidget(logo3, 0, 12, 1, 2)
self.grid.addWidget(third_step, 0, 12, 1, 2)
self.grid.addWidget(self.statusClean, 1, 12, 2, 3)
self.grid.addWidget(self.waitLogo, 3, 12, 1, 2)
def about(self):
QtGui.QMessageBox.about(self, "About Clean My Bib",
"""<b>Clean My Bib</b> v %s
<p><b>Licence:</b> GPLv3 by GT Vallet
<p>This application can be used to prepare a bibtex
file to prepare the journal name and page numbers format into short or
long forms.
<p>Python %s - on %s""" % (
__version__, platform.python_version(), platform.system()))
def Opts(self):
""" Option panel to add/remove key words defining the fields
to add in the cleaned bibtex file
"""
opt = QtGui.QDialog(self)
opt.setWindowTitle('Options -- Fields to keep')
self.listOpt = QtGui.QListWidget(opt)
for item in sorted(self.chx):
self.listOpt.addItem(item.capitalize())
# Define the buttons
AddBt = QtGui.QPushButton('Add', opt)
RemBt = QtGui.QPushButton('Remove', opt)
QtBt = QtGui.QPushButton('Quit', opt)
Cl_Bt = QtGui.QPushButton('Cancel', opt)
# Define the action associated to the buttons
RemBt.clicked.connect(self.RemoveField)
AddBt.clicked.connect(self.Add)
Cl_Bt.clicked.connect(opt.close)
QtBt.clicked.connect(opt.close)
QtBt.clicked.connect(self.UpList)
# Place the widgets on the grid
grid_opt = QtGui.QGridLayout()
grid_opt.addWidget(self.listOpt, 0, 0, 5, 3)
grid_opt.addWidget(AddBt, 0, 3)
grid_opt.addWidget(RemBt, 1, 3)
grid_opt.addWidget(QtBt, 5, 3)
grid_opt.addWidget(Cl_Bt, 5, 2)
# Show the option window
opt.setLayout(grid_opt)
opt.show()
def Add(self):
""" Add a new field to the list
"""
text, ok = QtGui.QInputDialog.getText(self, 'Input Dialog',
'Add a field:')
if ok:
self.listOpt.addItem(str(text))
self.listOpt.sortItems(order = QtCore.Qt.AscendingOrder)
def RemoveField(self):
""" Remove a field for the list
"""
index = self.listOpt.currentRow()
self.listOpt.takeItem(index)
self.listOpt.sortItems(order = QtCore.Qt.AscendingOrder)
def UpList(self):
""" Finally update the list of field to send back to the program
"""
self.chx = []
for index in xrange(self.listOpt.count()):
self.chx.append(str(self.listOpt.item(index).text()))
# --------------------------------------------------------------
# START THE APPLICATION
# --------------------------------------------------------------
def main():
"""Define the main application
Calling the UI
"""
app = QtGui.QApplication(sys.argv)
ex = MainWindow()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
gpl-3.0
| 7,906,700,085,643,785,000
| 37.509317
| 95
| 0.575081
| false
| 3.831891
| false
| false
| false
|
KhronosGroup/COLLADA-CTS
|
Core/Gui/Dialog/FOpenDialog.py
|
1
|
6196
|
# Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
import os
import os.path
import wx
from Core.Common.FConstants import *
class FOpenDialog(wx.Dialog):
__DIALOG_TITLE = "Open Test Procedure"
def __init__(self, parent):
wx.Dialog.__init__(self, parent, wx.ID_ANY, FOpenDialog.__DIALOG_TITLE)
self.__ID_OK = wx.NewId()
self.__ID_CANCEL = wx.NewId()
self.__ID_PROCEDURE = wx.NewId()
self.__ID_PROCEDURE = wx.NewId()
self.__commentsCtrl = None
self.__proceduresCtrl = None
outterSizer = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(outterSizer)
procedureSizer = self.__GetProcedureSizer()
commentSizer = self.__GetCommentsSizer()
bottomSizer = self.__GetBottomSizer()
outterSizer.Add(procedureSizer, 0, wx.EXPAND | wx.ALL, 5)
outterSizer.Add(commentSizer, 0, wx.EXPAND | wx.ALL, 5)
outterSizer.Add(bottomSizer, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.Fit()
def GetPath(self):
selection = self.__proceduresCtrl.GetStringSelection()
if (selection == ""): return None
return os.path.abspath(os.path.join(
RUNS_FOLDER, self.__proceduresCtrl.GetStringSelection(),
TEST_PROCEDURE_FILENAME))
def __OnOk(self, e):
if (self.__proceduresCtrl.GetStringSelection() == ""): return
if (self.IsModal()):
self.EndModal(wx.ID_OK)
else:
self.SetReturnCode(wx.ID_OK)
self.Show(False)
def __OnCancel(self, e):
if (self.IsModal()):
self.EndModal(wx.ID_CANCEL)
else:
self.SetReturnCode(wx.ID_CANCEL)
self.Show(False)
def __OnClick(self, e):
file = os.path.join(RUNS_FOLDER,
self.__proceduresCtrl.GetStringSelection(),
TEST_PROCEDURE_COMMENTS)
comments = ""
if (os.path.isfile(file)):
f = open(file)
line = f.readline()
while (line):
comments = comments + line
line = f.readline()
f.close()
self.__commentsCtrl.SetValue(comments)
def __OnDClick(self, e):
self.__OnOk(e)
def __GetProcedureSizer(self):
"""Retuns the Sizer used to display test procedures."""
staticBox = wx.StaticBox(self, wx.ID_ANY, "Available Test Procedures")
sizer = wx.StaticBoxSizer(staticBox, wx.HORIZONTAL)
choices = []
if (os.path.isdir(RUNS_FOLDER)):
for entry in os.listdir(RUNS_FOLDER):
if (os.path.isfile(os.path.join(
RUNS_FOLDER, entry, TEST_PROCEDURE_FILENAME))):
choices.append(entry)
self.__proceduresCtrl = wx.ListBox(self, self.__ID_PROCEDURE,
size = wx.Size(300, 140), choices = choices,
style = wx.LB_SINGLE | wx.LB_SORT)
self.Bind(wx.EVT_LISTBOX, self.__OnClick, self.__proceduresCtrl,
self.__ID_PROCEDURE)
self.Bind(wx.EVT_LISTBOX_DCLICK, self.__OnDClick,
self.__proceduresCtrl, self.__ID_PROCEDURE)
sizer.Add(self.__proceduresCtrl, 1, wx.EXPAND | wx.ALL, 5)
return sizer
def __GetCommentsSizer(self):
"""Returns the Sizer used for comments."""
staticBox = wx.StaticBox(self, wx.ID_ANY, "Test Procedure Comments")
sizer = wx.StaticBoxSizer(staticBox, wx.HORIZONTAL)
self.__commentsCtrl = wx.TextCtrl(self, wx.ID_ANY, "",
size = wx.Size(300, 60),
style = wx.TE_MULTILINE | wx.TE_READONLY)
self.__commentsCtrl.SetBackgroundColour(
wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE))
sizer.Add(self.__commentsCtrl, 1, wx.EXPAND | wx.ALL, 5)
return sizer
def __GetBottomSizer(self):
"""Returns the Sizer used to confirm or cancel this dialog."""
okButton = wx.Button(self, self.__ID_OK, "Ok")
wx.EVT_BUTTON(self, self.__ID_OK, self.__OnOk)
cancelButton = wx.Button(self, self.__ID_CANCEL, "Cancel")
wx.EVT_BUTTON(self, self.__ID_CANCEL, self.__OnCancel)
bottomSizer = wx.BoxSizer(wx.HORIZONTAL)
bottomSizer.Add(okButton, 0, wx.ALIGN_LEFT)
bottomSizer.Add(cancelButton, 0, wx.ALIGN_RIGHT)
return bottomSizer
# Used to start up this dialog without the entire application.
##class MainFrame(wx.MDIParentFrame):
## def __init__(self, parent, id, title):
## wx.MDIParentFrame.__init__(self, parent, id, title, size = (600, 480),
## style = wx.DEFAULT_FRAME_STYLE | wx.NO_FULL_REPAINT_ON_RESIZE)
##
## dialog = FOpenDialog(self)
## if (dialog.ShowModal() == wx.ID_OK):
## print dialog.GetPath()
## print "ok"
## else:
## print "cancelled"
##
##app = wx.PySimpleApp()
##frame = MainFrame(None,-1, "Test")
##app.MainLoop()
|
mit
| -3,829,923,993,941,251,600
| 40.864865
| 466
| 0.598612
| false
| 3.824691
| true
| false
| false
|
deepmind/distrax
|
distrax/_src/bijectors/tfp_compatible_bijector.py
|
1
|
7577
|
# Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrapper to adapt a Distrax bijector for use in TFP."""
from typing import Any, Optional
import chex
from distrax._src.bijectors import bijector
from distrax._src.utils import math
import jax
import jax.numpy as jnp
from tensorflow_probability.substrates import jax as tfp
tfb = tfp.bijectors
tfd = tfp.distributions
Array = chex.Array
Bijector = bijector.Bijector
def tfp_compatible_bijector(
base_bijector: Bijector,
name: Optional[str] = None):
"""Create a TFP-compatible bijector from a Distrax bijector.
Given a Distrax bijector, return a wrapped bijector that behaves as a TFP
bijector, to be used in TFP meta-bijectors and the TransformedDistribution.
In particular, the wrapped bijector implements the methods
`[forward|inverse]_event_ndims`, `[forward|inverse]_event_shape`,
`[forward|inverse]_event_shape_tensor`, `[forward|inverse]_log_det_jacobian`,
and the properties `[forward|inverse]_min_event_ndims`. Other attributes are
delegated to the `base_bijector`.
The methods of the resulting object do not take a `name` argument,
unlike their TFP equivalents.
The `shape` methods are implemented by tracing the `forward` and `inverse`
methods of the bijector, applied to a zero tensor of the requested dtype. If
the `forward` or `inverse` methods are not traceable or cannot be applied to a
zero tensor, then we cannot guarantee the correctness of the result.
Args:
base_bijector: A Distrax bijector.
name: The bijector name.
Returns:
An object that behaves like a TFP bijector.
"""
name_ = name
class TFPCompatibleBijector(base_bijector.__class__):
"""Class to wrap a Distrax bijector."""
def __init__(self):
self._is_injective = True
self._is_permutation = False
self._parts_interact = False
self.dtype = None
self.has_static_min_event_ndims = True
self.forward_min_event_ndims = base_bijector.event_ndims_in
self.inverse_min_event_ndims = base_bijector.event_ndims_out
def __getattr__(self, name: str):
return getattr(base_bijector, name)
def forward_and_log_det(self, x: Array) -> Array:
"""See `Bijector.forward_and_log_det`."""
return base_bijector.forward_and_log_det(x)
@property
def name(self) -> str:
"""The name of the wrapped bijector."""
return name_ or f"TFPCompatible{base_bijector.name}"
def experimental_batch_shape(self, x_event_ndims=None, y_event_ndims=None):
raise NotImplementedError()
def experimental_batch_shape_tensor(
self, x_event_ndims=None, y_event_ndims=None):
raise NotImplementedError()
def forward_dtype(self, _: jnp.dtype) -> None:
"""Returns None, making no promise regarding dtypes."""
return None
def inverse_dtype(self, _: jnp.dtype) -> None:
"""Returns None, making no promise regarding dtypes."""
return None
def forward_event_ndims(self, event_ndims: int) -> int:
"""Returns the number of event dimensions of the output of `forward`."""
extra_event_ndims = self._check_ndims(
"Forward", event_ndims, base_bijector.event_ndims_in)
return base_bijector.event_ndims_out + extra_event_ndims
def inverse_event_ndims(self, event_ndims: int) -> int:
"""Returns the number of event dimensions of the output of `inverse`."""
extra_event_ndims = self._check_ndims(
"Inverse", event_ndims, base_bijector.event_ndims_out)
return base_bijector.event_ndims_in + extra_event_ndims
def forward_event_shape(self, event_shape) -> tfp.tf2jax.TensorShape:
"""Returns the shape of the output of `forward` as a `TensorShape`."""
self._check_shape("Forward", event_shape, base_bijector.event_ndims_in)
forward_event_shape = jax.eval_shape(
base_bijector.forward, jnp.zeros(event_shape)).shape
return tfp.tf2jax.TensorShape(forward_event_shape)
def inverse_event_shape(self, event_shape) -> tfp.tf2jax.TensorShape:
"""Returns the shape of the output of `inverse` as a `TensorShape`."""
self._check_shape("Inverse", event_shape, base_bijector.event_ndims_out)
inverse_event_shape = jax.eval_shape(
base_bijector.inverse, jnp.zeros(event_shape)).shape
return tfp.tf2jax.TensorShape(inverse_event_shape)
def forward_event_shape_tensor(self, event_shape) -> Array:
"""Returns the shape of the output of `forward` as a `jnp.array`."""
self._check_shape("Forward", event_shape, base_bijector.event_ndims_in)
forward_event_shape = jax.eval_shape(
base_bijector.forward, jnp.zeros(event_shape)).shape
return jnp.array(forward_event_shape, dtype=jnp.int32)
def inverse_event_shape_tensor(self, event_shape) -> Array:
"""Returns the shape of the output of `inverse` as a `jnp.array`."""
self._check_shape("Inverse", event_shape, base_bijector.event_ndims_out)
inverse_event_shape = jax.eval_shape(
base_bijector.inverse, jnp.zeros(event_shape)).shape
return jnp.array(inverse_event_shape, dtype=jnp.int32)
def forward_log_det_jacobian(
self, x: Array, event_ndims: Optional[int] = None) -> Array:
"""See `Bijector.forward_log_det_jacobian`."""
extra_event_ndims = self._check_ndims(
"Forward", event_ndims, base_bijector.event_ndims_in)
fldj = base_bijector.forward_log_det_jacobian(x)
return math.sum_last(fldj, extra_event_ndims)
def inverse_log_det_jacobian(
self, y: Array, event_ndims: Optional[int] = None) -> Array:
"""See `Bijector.inverse_log_det_jacobian`."""
extra_event_ndims = self._check_ndims(
"Inverse", event_ndims, base_bijector.event_ndims_out)
ildj = base_bijector.inverse_log_det_jacobian(y)
return math.sum_last(ildj, extra_event_ndims)
def _check_ndims(
self, direction: str, event_ndims: int, expected_ndims: int) -> int:
"""Checks that `event_ndims` are correct and returns any extra ndims."""
if event_ndims is not None and event_ndims < expected_ndims:
raise ValueError(f"{direction} `event_ndims` of {self.name} must be at "
f"least {expected_ndims} but was passed {event_ndims} "
f"instead.")
return 0 if event_ndims is None else event_ndims - expected_ndims
def _check_shape(
self, direction: str, event_shape: Any, expected_ndims: int):
"""Checks that `event_shape` is correct, raising ValueError otherwise."""
if len(event_shape) < expected_ndims:
raise ValueError(f"{direction} `event_shape` of {self.name} must have "
f"at least {expected_ndims} dimensions, but was "
f"{event_shape} which has only {len(event_shape)} "
f"dimensions instead.")
return TFPCompatibleBijector()
|
apache-2.0
| 1,191,359,019,594,068,500
| 41.567416
| 80
| 0.674145
| false
| 3.712396
| false
| false
| false
|
uwosh/uwosh.dropcard
|
uwosh/dropcard/__init__.py
|
1
|
2069
|
"""Main product initializer
"""
from zope.i18nmessageid import MessageFactory
from uwosh.dropcard import config
from Products.Archetypes import atapi
from Products.CMFCore import utils
# Define a message factory for when this product is internationalised.
# This will be imported with the special name "_" in most modules. Strings
# like _(u"message") will then be extracted by i18n tools for translation.
dropcardMessageFactory = MessageFactory('uwosh.dropcard')
def initialize(context):
"""Initializer called when used as a Zope 2 product.
This is referenced from configure.zcml. Regstrations as a "Zope 2 product"
is necessary for GenericSetup profiles to work, for example.
Here, we call the Archetypes machinery to register our content types
with Zope and the CMF.
"""
# Retrieve the content types that have been registered with Archetypes
# This happens when the content type is imported and the registerType()
# call in the content type's module is invoked. Actually, this happens
# during ZCML processing, but we do it here again to be explicit. Of
# course, even if we import the module several times, it is only run
# once.
content_types, constructors, ftis = atapi.process_types(
atapi.listTypes(config.PROJECTNAME),
config.PROJECTNAME)
# Now initialize all these content types. The initialization process takes
# care of registering low-level Zope 2 factories, including the relevant
# add-permission. These are listed in config.py. We use different
# permissions for each content type to allow maximum flexibility of who
# can add which content types, where. The roles are set up in rolemap.xml
# in the GenericSetup profile.
for atype, constructor in zip(content_types, constructors):
utils.ContentInit('%s: %s' % (config.PROJECTNAME, atype.portal_type),
content_types=(atype, ),
permission=config.ADD_PERMISSIONS[atype.portal_type],
extra_constructors=(constructor,),
).initialize(context)
|
gpl-2.0
| -6,773,124,266,639,195,000
| 40.38
| 78
| 0.727888
| false
| 4.222449
| true
| false
| false
|
edf-hpc/hpcstats
|
HPCStats/Model/Cluster.py
|
1
|
6981
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2015 EDF SA
# Contact:
# CCN - HPC <dsp-cspit-ccn-hpc@edf.fr>
# 1, Avenue du General de Gaulle
# 92140 Clamart
#
# Authors: CCN - HPC <dsp-cspit-ccn-hpc@edf.fr>
#
# This file is part of HPCStats.
#
# HPCStats is free software: you can redistribute in and/or
# modify it under the terms of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with HPCStats. If not, see
# <http://www.gnu.org/licenses/>.
#
# On Calibre systems, the complete text of the GNU General
# Public License can be found in `/usr/share/common-licenses/GPL'.
"""
Schema of the ``Cluster`` table in HPCStats database:
.. code-block:: sql
Cluster(
cluster_id SERIAL,
cluster_name character varying(30) NOT NULL,
CONSTRAINT Cluster_pkey PRIMARY KEY (cluster_id),
CONSTRAINT Cluster_unique UNIQUE (cluster_name)
)
"""
import logging
logger = logging.getLogger(__name__)
from HPCStats.Exceptions import HPCStatsDBIntegrityError, HPCStatsRuntimeError
class Cluster(object):
"""Model class for Cluster table"""
def __init__(self, name, cluster_id=None):
self.cluster_id = cluster_id
self.name = name
def __str__(self):
return self.name
def __eq__(self, other):
return self.name == other.name
def find(self, db):
"""Search the Cluster in the database based on its name. If exactly
one cluster matches in database, set cluster_id attribute properly
and returns its value. If more than one cluster matches, raises
HPCStatsDBIntegrityError. If no cluster is found, returns None.
"""
req = """
SELECT cluster_id
FROM Cluster
WHERE cluster_name = %s
"""
params = ( self.name, )
db.execute(req, params)
nb_rows = db.cur.rowcount
if nb_rows == 0:
logger.debug("cluster %s not found in DB", str(self))
return None
elif nb_rows > 1:
raise HPCStatsDBIntegrityError(
"several cluster_id found in DB for cluster %s" \
% (str(self)))
else:
self.cluster_id = db.cur.fetchone()[0]
logger.debug("cluster %s found in DB with id %d",
str(self),
self.cluster_id )
return self.cluster_id
def save(self, db):
"""Insert Cluster in database. You must make sure that the Cluster does
not already exist in database yet (typically using Cluster.find()
method else there is a risk of future integrity errors because of
duplicated clusters. If cluster_id attribute is set, it raises
HPCStatsRuntimeError.
"""
if self.cluster_id is not None:
raise HPCStatsRuntimeError(
"could not insert cluster %s since already existing in "\
"database" \
% (str(self)))
req = """
INSERT INTO Cluster ( cluster_name )
VALUES ( %s )
RETURNING cluster_id
"""
params = ( self.name, )
#print db.cur.mogrify(req, params)
db.execute(req, params)
self.cluster_id = db.cur.fetchone()[0]
def get_nb_cpus(self, db):
"""Returns the total number of CPUs available on the cluster"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT SUM(node_nbCpu)
FROM Node
WHERE cluster_id = %s
"""
params = ( self.cluster_id, )
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
def get_min_datetime(self, db):
"""Returns the start datetime of the oldest started and unfinished
job on the cluster.
"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT MIN(job_start)
FROM Job
WHERE cluster_id = %s
AND job_state NOT IN ('CANCELLED', 'NODE_FAIL', 'PENDING')
"""
params = ( self.cluster_id, )
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
def get_nb_accounts(self, db, creation_date):
"""Returns the total of users on the cluster whose account have been
created defore date given in parameter.
"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT COUNT (userhpc_id)
FROM Userhpc,
Account
WHERE Account.userhpc_id = Userhpc.userhpc_id
AND Account.account_creation < %s
AND Account.cluster_id = %s
"""
params = (creation_date, self.cluster_id )
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
def get_nb_active_users(self, db, start, end):
"""Returns the total number of users who have run job(s) on the cluster
between start and end datetimes in parameters.
"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT COUNT(DISTINCT userhpc_id)
FROM Job
WHERE Job.cluster_id = %s,
AND ((job_start BETWEEN %s AND %s)
OR (job_end BETWEEN %s AND %s)
OR (job_start <= %s AND job_end >= %s))
"""
params = (self.cluster_id, start, end, start, end, start, end)
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
|
gpl-2.0
| -7,844,108,807,252,249,000
| 32.085308
| 79
| 0.545481
| false
| 4.230909
| false
| false
| false
|
airanmehr/bio
|
Scripts/Plasmodium/Data.py
|
1
|
7725
|
'''
Copyleft Oct 10, 2015 Arya Iranmehr, PhD Student, Bafna's Lab, UC San Diego, Email: airanmehr@gmail.com
'''
import numpy as np
import pandas as pd
import os,sys;home=os.path.expanduser('~') +'/'
class Data:
@staticmethod
def read(param):
"""
data is sorted first by Chrom and then POS in addGlobalPos. Important to have them sorted together
"""
try:
meta=pd.read_pickle(param['dspath']+param['dsname']+'.meta.df')
snp=pd.read_pickle(param['dspath']+param['dsname']+'.snp.df')
except:
if param['Region']=='Peru' and param['dsname']=='all':
meta= Data.readPeruAll()
elif param['Region']=='Peru' and param['dsname']=='winzeler':
meta= Data.readPeruFiltered()
elif param['Region']=='Sudan':
meta= Data.readSudan()
else:
print >> sys.stderr, 'Bad Parameter: ',param
exit()
meta= Data.removeNonPolymorphicandTriAllele(meta, param)
meta = Data.correctCall(meta, param)
meta= Data.computeRC(meta, param)
meta.ix[:,'hetero']= meta[param['names']].apply(lambda x: ((x=='0/1')|(x=='1/0')).sum(),axis=1)
meta=pd.concat([meta, meta[param['names']].apply(lambda x: x.value_counts(),axis=1).fillna(0)],axis=1)
meta['0/1']+=meta['1/0'];meta.drop(['1/0'],axis=1,inplace=True)
calls=meta[param['names']]
snp=pd.concat([pd.DataFrame(calls.applymap(lambda x: x.split('/')[0]).values, columns=calls.columns+'maj') , pd.DataFrame(calls.applymap(lambda x: x.split('/')[1]).values, columns=calls.columns+'min')],axis=1).astype(int).T.sort_index();snp.columns=calls.index.values #major is always zero in heterozygotes in the other getsnp function 1/0 is possible for example line 7 mdio08 in the xlsx
from Scripts.Plasmodium.Run import runHW
meta=runHW(param,meta)
meta.to_pickle(param['dspath']+param['dsname']+'.meta.df')
snp.to_pickle(param['dspath']+param['dsname']+'.snp.df')
return snp,meta
@staticmethod
def computeRC(meta,param):
meta.ix[:,map(lambda x: x[-2:]=='rc' ,meta.columns.values)]=meta.ix[:,map(lambda x: x[-2:]=='rc' ,meta.columns.values)].astype(int)
meta.ix[:,'totrc']=0
for n in param['names']:
meta.ix[:,n+'rc']=meta[n+'majrc']+meta[n+'minrc']
meta['totrc']+=meta[n+'majrc']+meta[n+'minrc']
return meta
@staticmethod
def correctCall(meta,param):
names=[]
for x in param['names']:
names+= [x +'maj']
names+= [x +'min']
b=meta[names].apply(lambda x: x!=meta.REF,axis=0).astype(int)
c=pd.DataFrame(b.apply(lambda c: map(lambda x: '{}/{}'.format(x[0],x[1]), zip(c[np.arange(0,12,2)].values,c[np.arange(1,12,2)].values)) +list('000000'),axis=1).icol(range(6)))
c.columns=param['names']
meta.loc[:,param['names']]=c
return meta
@staticmethod
def removeNonPolymorphicandTriAllele(meta,param):
geno=meta[param['names']].apply(lambda x: x.value_counts(), axis=1)
geno.fillna(0,inplace=True)
biallele=param['biallele']
print 'Total sites: {}'.format(meta.shape[0])
print 'Discarding {} tri-allelic sites...'.format(sum(geno[biallele].sum(1)!=6))
print 'Discarding {} non-polymorphic sites...'.format(sum((geno['0/0']==6) | (geno['1/1']==6)))
idx= (geno[biallele].sum(1)==6) &(geno[biallele]['0/0']!=6) & (geno[biallele]['1/1']!=6)
return meta[idx]
@staticmethod
def getName(dsname,Peru):
return ('Sudan', ('PeruFiltered','PeruAll')[not dsname])[Peru]
# return ('Sudan', ('PeruFiltered','PeruAll')[not dsname]
@staticmethod
def readSudan():
df = Data.read_xl( home+ 'datasets/vineet/additionalmergedPvivax.xlsx')
df=Data.addGlobalPos(df)
meta=df.icol(range(9))
# geno=df.icol(range(9,12))
# SNP=Data.getSNP(geno, meta.REF)
return meta
@staticmethod
def getSNP(calls,ref):
SNP=np.zeros(calls.shape,dtype=int)
for j in range(calls.shape[1]):
SNP[:,j]= (ref!=calls.icol(j)).astype(int)
return pd.DataFrame(SNP.T,calls.columns,columns=calls.index).sort_index()
@staticmethod
def readPeruAll():
path=home+ 'datasets/popgen/mdio_annotated_passcalled_filteredwindow_majmin_nocontigs_32K.df'
try:
df=pd.read_pickle(path)
except:
df=Data.read_xl(path.replace('.df','.xlsx'))
df=Data.addGlobalPos(df)
df.to_pickle(path)
return df
@staticmethod
def correctWeinzlerFilterDataColumnNames(df):
names=map(unicode.lower,df.loc[0].dropna().values)
df.dropna(inplace=True)
df.columns=df.iloc[0].values.copy()
df=df.iloc[1:]
df.rename(columns={'substitution AA change':'AAeff', 'Alt allele':'ALT','quality':'QUAL', 'gene id':'GENE', 'Chromosome': '#CHROM', 'position': 'POS', 'Ref allele':'REF', 'substitution effect':'TYPE','minor allele read count': 'minrc', 'maj allele read count': 'majrc','min allele': 'min', 'maj allele': 'maj'}, inplace=True)
i=8
for n in names:
for _ in range( 5):
if df.columns.values[i]=='genotype':
df.columns.values[i]= n
else:
df.columns.values[i]= n+df.columns.values[i]
i+=1
return df
@staticmethod
def readPeruFiltered():
path=home+ 'datasets/popgen/SuppTableS2_SNVs.df'
try:
df = pd.read_pickle(path)
except:
df=Data.read_xl(path.replace('.df','.xlsx'))
df=Data.correctWeinzlerFilterDataColumnNames(df)
df=Data.addGlobalPos(df)
df.to_pickle(path)
# calls=pd.concat([df.icol(range(12,df.shape[1],5)), df.icol(range(13,df.shape[1],5))] , axis=1)
# SNP=Data.getSNP(calls, meta.REF)
return df
@staticmethod
def getChromOffset(results):
try:
chromOffset=results.groupby('#CHROM').end.agg('max').sort_index()
except AttributeError:
chromOffset=results.groupby('#CHROM').POS.agg('max').sort_index()
chromOffset.iloc[:]=chromOffset.values.cumsum()
chromOffset.loc[chromOffset.index.max()+1] =0
chromOffset.iloc[1:]=chromOffset.iloc[:-1].values
chromOffset.iloc[0]=0
return chromOffset
@staticmethod
def addGlobalPos(df):
Lens=df.groupby('#CHROM').POS.agg('max').values
chromOffset=np.append([1],Lens.cumsum())
df.insert(0, 'ChromOffset', chromOffset[(df['#CHROM'].values-1).astype(int)] )
df.insert(0, 'POSGlobal', chromOffset[(df['#CHROM'].values-1).astype(int)] +df.POS)
df.insert(0, 'CHROMLen', Lens[(df['#CHROM'].values-1).astype(int)] )
df.sort(['#CHROM', 'POS'],inplace=True) # Important
df.index=range(df.shape[0])
return df
@staticmethod
def read_xl(path):
xl_file = pd.ExcelFile(path)
return [ xl_file.parse(sheet_name) for sheet_name in xl_file.sheet_names][0]
@staticmethod
def getSNPfromString(x):
x=map(str.strip,x.split('\n'))
snp=[]
for l in x:
if len(l):
snp.append([])
for i in l:
snp[-1].append(int(i))
snp = np.array(snp)
return snp
@staticmethod
def getSNPfromDataframe(df):
return df[(df=='2').sum(axis=1)==0].transpose().values.astype(int)
|
mit
| 6,538,085,675,037,243,000
| 41.445055
| 401
| 0.574369
| false
| 3.234925
| false
| false
| false
|
franklinsales/udacity-data-analyst-nanodegree
|
project3/class-works/data-wrangling/data-extract-fundamentals/set_problem_2013_ERCOT_Hourly_Load_Data_corrected.py
|
1
|
3217
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 15 18:05:20 2017
@author: franklin
"""
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 15 13:30:33 2017
@author: franklin
"""
'''
Find the time and value of max load for each of the regions
COAST, EAST, FAR_WEST, NORTH, NORTH_C, SOUTHERN, SOUTH_C, WEST
and write the result out in a csv file, using pipe character | as the delimiter.
An example output can be seen in the "example.csv" file.
'''
import xlrd
import os
import csv
from zipfile import ZipFile
datafile = "data/2013_ERCOT_Hourly_Load_Data.xls"
outfile = "data/2013_Max_Loads.csv"
def open_zip(datafile):
with ZipFile('{0}.zip'.format(datafile), 'r') as myzip:
myzip.extractall()
def parse_file(datafile):
workbook = xlrd.open_workbook(datafile)
sheet = workbook.sheet_by_index(0)
data = {}
# process all rows that contain station data
for n in range (1, 9):
station = sheet.cell_value(0, n)
cv = sheet.col_values(n, start_rowx=1, end_rowx=None)
maxval = max(cv)
maxpos = cv.index(maxval) + 1
maxtime = sheet.cell_value(maxpos, 0)
realtime = xlrd.xldate_as_tuple(maxtime, 0)
data[station] = {"maxval": maxval,
"maxtime": realtime}
print data
return data
def save_file(data, filename):
with open(filename, "w") as f:
w = csv.writer(f, delimiter='|')
w.writerow(["Station", "Year", "Month", "Day", "Hour", "Max Load"])
for s in data:
year, month, day, hour, _ , _= data[s]["maxtime"]
w.writerow([s, year, month, day, hour, data[s]["maxval"]])
def test():
#open_zip(datafile)
data = parse_file(datafile)
save_file(data, outfile)
number_of_rows = 0
stations = []
ans = {'FAR_WEST': {'Max Load': '2281.2722140000024',
'Year': '2013',
'Month': '6',
'Day': '26',
'Hour': '17'}}
correct_stations = ['COAST', 'EAST', 'FAR_WEST', 'NORTH',
'NORTH_C', 'SOUTHERN', 'SOUTH_C', 'WEST']
fields = ['Year', 'Month', 'Day', 'Hour', 'Max Load']
with open(outfile) as of:
csvfile = csv.DictReader(of, delimiter="|")
for line in csvfile:
print line
station = line['Station']
if station == 'FAR_WEST':
for field in fields:
# Check if 'Max Load' is within .1 of answer
if field == 'Max Load':
max_answer = round(float(ans[station][field]), 1)
max_line = round(float(line[field]), 1)
assert max_answer == max_line
# Otherwise check for equality
else:
assert ans[station][field] == line[field]
number_of_rows += 1
stations.append(station)
# Output should be 8 lines not including header
assert number_of_rows == 8
# Check Station Names
assert set(stations) == set(correct_stations)
if __name__ == "__main__":
test()
|
mit
| 2,643,257,169,679,058,000
| 27.723214
| 80
| 0.539322
| false
| 3.477838
| false
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/aio/operations/_backup_protected_items_crr_operations.py
|
1
|
6308
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BackupProtectedItemsCrrOperations:
"""BackupProtectedItemsCrrOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
vault_name: str,
resource_group_name: str,
filter: Optional[str] = None,
skip_token: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ProtectedItemResourceList"]:
"""Provides a pageable list of all items that are backed up within a vault.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param filter: OData filter options.
:type filter: str
:param skip_token: skipToken Filter.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProtectedItemResourceList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.recoveryservicesbackup.models.ProtectedItemResourceList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProtectedItemResourceList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-20"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'vaultName': self._serialize.url("vault_name", vault_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if skip_token is not None:
query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ProtectedItemResourceList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.NewErrorResponseAutoGenerated, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupProtectedItems/'} # type: ignore
|
mit
| 1,538,860,436,925,158,100
| 48.28125
| 191
| 0.64331
| false
| 4.557803
| true
| false
| false
|
fyookball/electrum
|
plugins/shuffle_deprecated/crypto.py
|
1
|
3181
|
import ecdsa
from ecdsa.util import number_to_string, string_to_number
from electroncash.bitcoin import (generator_secp256k1, point_to_ser, EC_KEY,
Hash, InvalidPassword)
class CryptoError(Exception):
''' base class of a subset of the possible exceptions raised in this class
Subclasses have 4 items in their .args, see below '''
pass
class DecryptError(CryptoError):
''' always has 4 .args:
args[0] = programmer string message explaining what was caught
args[1] = the wrapped exception generatede by bitcoin.py (may be InvalidPassword or Exception)
args[2] = the private key used for decryption
args[3] = the message that failed for decrypt '''
pass
class EncryptError(CryptoError):
''' always has 4 .args:
args[0] = programmer string message explaining what was caught
args[1] = the wrapped exception generatede by bitcoin.py (may be InvalidPassword or Exception)
args[2] = the public key used for encryption
args[3] = the message that failed for decrypt '''
pass
class Crypto:
""" Functions related to cryptography """
def __init__(self):
self.G = generator_secp256k1
self._r = self.G.order()
self.private_key, self.eck, self.public_key = None, None, None
def generate_key_pair(self):
""" generate encryption/decryption pair """
self.private_key = ecdsa.util.randrange( self._r )
self.eck = EC_KEY(number_to_string(self.private_key, self._r))
self.public_key = point_to_ser(self.private_key*self.G, True)
def export_private_key(self):
""" Export private key as hex string """
if self.private_key:
return bytes.hex(number_to_string(self.private_key, self._r))
else:
return None
def restore_from_privkey(self, secret_string):
"restore key pair from private key expressed in a hex form"
self.private_key = string_to_number(bytes.fromhex(secret_string))
self.eck = EC_KEY(bytes.fromhex(secret_string))
self.public_key = point_to_ser(self.private_key*self.G, True)
def export_public_key(self):
""" serialization of public key """
return bytes.hex(self.public_key)
def encrypt(self, message, pubkey):
""" encrypt message with pubkey """
try:
res = self.eck.encrypt_message(message.encode('utf-8'), bytes.fromhex(pubkey))
return res.decode('utf-8')
except Exception as e: # grrr.. bitcoin.py raises 'Exception' :/
raise EncryptError("Bitcoin.py raised '{}' during Crypto.encrypt".format(type(e).__name__), e, pubkey, message) from e
def decrypt(self, message):
""" decrypt message """
try:
return self.eck.decrypt_message(message)
except (InvalidPassword, Exception) as e:
raise DecryptError("Bitcoin.py raised '{}' during Crypto.decrypt".format(type(e).__name__), e, self.private_key, message) from e
@staticmethod
def hash(text):
''' Returns sha256(sha256(text)) as bytes. text may be bytes or str. '''
return Hash(text) # bitcoin.Hash is sha256(sha256(x))
|
mit
| 2,820,235,096,471,262,700
| 39.782051
| 140
| 0.647281
| false
| 3.83253
| false
| false
| false
|
goett/TRPS
|
Tools/CrystalCalculator/slownessZnSe.py
|
1
|
2953
|
import math
import numpy as np
import continuum as dyn
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
from matplotlib import cm
#plt.rcParams['image.cmap'] = 'viridis';
numt = 161;
nump = 321;
t = np.linspace(0,math.pi,numt)
p = np.linspace(0,math.pi*2.0,nump)
print(t)
print(p)
pm,tm = np.meshgrid(p,t);
#vectors holding results for phase velocity
L = np.zeros(numt*nump);
FT = np.zeros(numt*nump);
ST = np.zeros(numt*nump);
rho = dyn.density('ZnSe')
C = dyn.elasticity('ZnSe')
# placeholders for
i = 0
for tt in t:
for pp in p:
Vp,pol = dyn.CalcPhaseVelocityPol(tt,pp,C,rho);
print(Vp)
L[i] = Vp[0];
FT[i] = Vp[1];
ST[i] = Vp[2];
i+=1
#Save resulting data
np.savetxt('ZnSeslowL.dat',L);
np.savetxt('ZnSeslowFT.dat',FT);
np.savetxt('ZnSeslowST.dat',ST);
Lm = L.reshape(numt,nump);
Lmc = Lm/np.amax(L);
FTm = FT.reshape(numt,nump);
FTmc = FTm/np.amax(FT);
STm = ST.reshape(numt,nump);
STmc = STm/np.amax(ST);
xl=np.sin(tm)*np.cos(pm)/Lm;
yl=np.sin(tm)*np.sin(pm)/Lm;
zl=np.cos(tm)/Lm;
xft=np.sin(tm)*np.cos(pm)/FTm;
yft=np.sin(tm)*np.sin(pm)/FTm;
zft=np.cos(tm)/FTm;
xst=np.sin(tm)*np.cos(pm)/STm;
yst=np.sin(tm)*np.sin(pm)/STm;
zst=np.cos(tm)/STm;
fig1 = plt.figure(figsize=(8.5,8.5));
#f,(ax, bx, cx) = plt.subplots(1,3);
ax = fig1.add_subplot(111,projection='3d');
#ax.plot_surface(xl,yl,zl,facecolors=Lmc,cmap='PuBuGn');
ax.plot_wireframe(xl,yl,zl,color='k',alpha=0.3);
cset = ax.contour(xl, yl, zl, zdir='z', offset=1.2*np.amin(zl), cmap=cm.coolwarm)
cset = ax.contour(xl, yl, zl, zdir='y', offset=1.2*np.amax(yl), cmap=cm.coolwarm)
cset = ax.contour(xl, yl, zl, zdir='x', offset=1.2*np.amin(xl), cmap=cm.coolwarm)
ax.set_xlim((1.2*np.amin(xl),1.2*np.amax(xl)));
ax.set_ylim((1.2*np.amin(yl),1.2*np.amax(yl)));
ax.set_zlim((1.2*np.amin(zl),1.2*np.amax(zl)));
fig2 = plt.figure(figsize=(8.5,8.5));
bx = fig2.add_subplot(111,projection='3d');
bx.plot_wireframe(xft,yft,zft,color='k',alpha=0.3);
cset = bx.contour(xft, yft, zft, zdir='z', offset=1.2*np.amin(zft), cmap=cm.coolwarm)
cset = bx.contour(xft, yft, zft, zdir='y', offset=1.2*np.amax(yft), cmap=cm.coolwarm)
cset = bx.contour(xft, yft, zft, zdir='x', offset=1.2*np.amin(xft), cmap=cm.coolwarm)
bx.set_xlim((1.2*np.amin(xft),1.2*np.amax(xft)));
bx.set_ylim((1.2*np.amin(yft),1.2*np.amax(yft)));
bx.set_zlim((1.2*np.amin(zft),1.2*np.amax(zft)));
fig3 = plt.figure(figsize=(8.5,8.5));
cx = fig3.add_subplot(111,projection='3d');
cx.plot_wireframe(xst,yst,zst,color='k',alpha=0.3);
cset = cx.contour(xst, yst, zst, zdir='z', offset=1.2*np.amin(zst), cmap=cm.coolwarm)
cset = cx.contour(xst, yst, zst, zdir='y', offset=1.2*np.amax(yst), cmap=cm.coolwarm)
cset = cx.contour(xst, yst, zst, zdir='x', offset=1.2*np.amin(xst), cmap=cm.coolwarm)
cx.set_xlim((1.2*np.amin(xst),1.2*np.amax(xst)));
cx.set_ylim((1.2*np.amin(yst),1.2*np.amax(yst)));
cx.set_zlim((1.2*np.amin(zst),1.2*np.amax(zst)));
#ax.set_axis_off()
plt.show()
|
mit
| 339,710,894,652,596,600
| 29.132653
| 85
| 0.655943
| false
| 1.960823
| false
| false
| false
|
Nikolas1814/HackTues
|
webServer/all/views.py
|
1
|
2898
|
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from django.template.loader import get_template
from django.template import Context
from django.http import StreamingHttpResponse
from django.http import HttpResponseRedirect
import datetime
from django.db.models import Q
import os.path
from news.forms import ClassesForm
from news.models import classes
from django.contrib.auth.models import *
from register.models import userInformation
from models import *
from forms import *
@csrf_exempt
def all(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/index/login/')
if request.method == 'POST':
html = validateForm(request)
return html
else:
form = HomeworkForm()
signals = homeworks.objects.filter().order_by("-id")
userInfo = userInformation.objects.filter(userID = request.user.id)
userName = request.user.first_name
t = get_template('all.html')
html = t.render(Context({'profilePicturePath' : userInfo[0].userProfilePicturePath, 'username' : userName, 'form' : form, 'signals' : signals}))
return StreamingHttpResponse(html)
def validateForm(request):
form = HomeworkForm(request.POST)
signals = homeworks.objects.filter().order_by('-modified')
if form.is_valid():
data = form.cleaned_data
homework = homeworks(date = data['date'], classesId = data['classHomework'], day = data['day'], petitionDescription = data['petitionDescription'],
homework = data['homework'], username = request.user.username, modified = datetime.datetime.now())
homework.save()
return HttpResponseRedirect('/index/all/')
t = get_template('all.html')
html = t.render(Context({'profilePicturePath' : userInfo[0].userProfilePicturePath, 'username' : userName, 'form' : form, 'signals' : signals}))
return StreamHttpResponse(html)
#def validateForm(request):
# form = ProgramForm(request.POST)
# if form.is_valid():
# data = form.cleaned_data
# pr = program(first = data['first'], second = data['second'], third = data['third'], fourth = data['fourth'],
# five = data['five'], sixth = data['sixth'], seven = data['seven'], eight = data['eight'], nine = data['nine'],
# ten = data['ten'],firstTime = data['firstTime'], secondTime = data['secondTime'], thirdTime = data['thirdTime'],
# fourthTime = data['fourthTime'],fiveTime = data['fiveTime'], sixthTime = data['sixthTime'], sevenTime = data['sevenTime'],
# eightTime = data['eightTime'], nineTime = data['nineTime'], tenTime = data['tenTime'], date = data['date'], classesId = data['grade'])
# pr.save()
# return HttpResponseRedirect('/index/all/')
# userInfo = userInformation.objects.filter(userID = request.user.id)
# userName = request.user.first_name
# t = get_template('all.html')
# html = t.render(Context({'profilePicturePath' : userInfo[0].userProfilePicturePath, 'username' : userName, 'form' : form}))
# return StreamingHttpResponse(html)
|
mit
| -6,916,202,942,270,189,000
| 45.015873
| 149
| 0.730504
| false
| 3.433649
| false
| false
| false
|
olatoft/reverse-hangman
|
Main.py
|
1
|
2693
|
import Words
def get_word_length():
word_length = 0
while word_length == 0:
try:
word_length = int(input('Kor mange bokstavar er det i ordet?\n'))
except:
print('Du må skrive inn eit tal. Prøv igjen.\n')
return word_length
def get_if_letter_in_word(letter):
answer = ''
while answer == '':
answer = input('\nInneheldt ordet bokstaven ' + letter +
'? Ja eller nei?\n')
if (answer == 'Ja') or (answer == 'ja'):
return True
elif (answer == 'Nei') or (answer == 'nei'):
return False
else:
answer = ''
print('Du må skrive enten ja eller nei\n')
def get_letter_pos_list(letter, words):
letter_pos_list = ''
while letter_pos_list == '':
try:
while letter_pos_list == '':
letter_pos_list = input(
'Skriv inn nummer på posisjonar i ordet der bokstaven ' +
letter + ' er med:\n').split()
if len(letter_pos_list) == 0:
letter_pos_list = ''
print('Du må skrive inn minst 1 tal. Prøv igjen\n')
for i in range(len(letter_pos_list)):
letter_pos_list[i] = int(letter_pos_list[i]) - 1
if (min(letter_pos_list) < 0) or (
(max(letter_pos_list) + 1) > words.word_length):
letter_pos_list = ''
print('Tal må vere større enn null og mindre enn ordlengde.\n')
except:
letter_pos_list = ''
print('Du må skrive inn tal. Prøv igjen.\n')
return letter_pos_list
def loop(words):
while True:
letter = words.get_letter_to_guess()
answer = get_if_letter_in_word(letter)
if answer:
letter_pos_list = get_letter_pos_list(letter, words)
for element in letter_pos_list:
words.set_words_with_letter_in_pos(letter, element)
for i in range(words.word_length):
if i not in letter_pos_list:
words.set_words_without_letter_in_pos(letter, i)
else:
words.set_words_without_letter(letter)
if len(words.get_words()) == 1:
print('\nOrdet er ' + words.get_words()[0])
break
elif len(words.get_words()) == 0:
print('\nOrdet er ikkje i ordboka')
break
print(words.get_words())
def main():
words = Words.Words()
words.word_length = get_word_length()
words.set_words_with_length(words.word_length)
loop(words)
if __name__ == '__main__':
main()
|
gpl-3.0
| 4,939,119,348,872,317,000
| 32.123457
| 83
| 0.513231
| false
| 3.435339
| false
| false
| false
|
CeON/avroknife
|
avroknife/test/command_line_runner.py
|
1
|
8137
|
# Copyright 2013-2015 University of Warsaw
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import subprocess
from avroknife.test.tools import Tools
from avroknife.test.file_system import HDFS, LocalFS
class RunResult:
def __init__(self, stdout, input_dir, output_dir):
self.__stdout = stdout
self.__input_dir = input_dir
self.__output_dir = output_dir
def get_stdout(self):
"""Get produced stdout string"""
return self.__stdout
def get_input_path(self, name):
"""Get path to the input in the local file system"""
return os.path.join(self.__input_dir, name)
def get_output_path(self, name):
"""Get path to the output in the local file system"""
return os.path.join(self.__output_dir, name)
def __str__(self):
return 'stdout: {}\n\ninput_dir={}\noutput_dir={}'.format(
self.__stdout, self.__input_dir, self.__output_dir)
class CommandLineRunnerException:
def __init__(self, exception):
"""
Args:
exception: instance of subprocess.CalledProcessError
"""
self.returncode = exception.returncode
self.cmd = exception.cmd
self.output = exception.output
class CommandLineRunner:
__input_file_prefix = '@in:'
__output_file_prefix = '@out:'
__input_subdir = 'input'
__outputs_subdir = 'outputs'
__hdfs_path_prefix = ''
__local_path_prefix = 'local:'
def __init__(self, program_path, local_input_dir, enforce_local=False):
"""
Args:
program_path: path to the command line program to be executed
local_input_dir: path to the directory containing input files
that can be referenced through placeholders
enforce_local: allow running the program only on local file system.
HDFS is not accessed in this mode.
"""
self.__program_path = program_path
self.__enforce_local = enforce_local
self.__local_fs = LocalFS()
self.__local_tmp_dir = \
self.__initialize_tmp_dirs(self.__local_fs, local_input_dir)
if not self.__enforce_local:
self.__hdfs = HDFS()
self.__hdfs_tmp_dir = \
self.__initialize_tmp_dirs(self.__hdfs, local_input_dir)
self.__is_closed = False
@staticmethod
def __initialize_tmp_dirs(fs, local_input_dir):
dir_ = fs.create_temporary_dir()
fs.copy_from_local_dir(local_input_dir,
fs.join_path([dir_, CommandLineRunner.__input_subdir]))
fs.create_dir(fs.join_path([dir_, CommandLineRunner.__outputs_subdir]))
return dir_
def run(self, args_string, is_input_local, is_output_local,
discard_stderr=False):
"""
Execute program with replacing placeholders in arguments string
Args:
args_string: parameters of the program with file placeholders
is_input_local: if True, the input placeholders will be replaced
with paths in local file system. If False, they will be replaced
with paths in HDFS.
is_output_local: if True, the output placeholders will be replaced
with path in local file system. If False, they will be replaced
with paths in HDFS.
ignore_stderr: if True, the standar output is discarded
Returns:
RunResult object
Raises:
CommandLineRunnerException: exception raised when executed process
returns non-zero exit status.
"""
if self.__is_closed:
raise Exception('This object has been already closed')
if self.__enforce_local:
if not (is_input_local and is_output_local):
raise Exception('is_input_local={}, is_output_local={}, while '\
'the enforce_local mode allows running the program only '\
'on local file system '.\
format(is_input_local, is_output_local))
local_out_dir = self.__local_fs.create_temporary_dir(
self.__local_fs.join_path([self.__local_tmp_dir, self.__outputs_subdir]))
hdfs_out_dir = None
if not is_output_local:
hdfs_out_dir = self.__hdfs.create_temporary_dir(
self.__hdfs.join_path([self.__hdfs_tmp_dir, self.__outputs_subdir]))
args_replaced = self.__replace_args(args_string,
is_input_local, is_output_local, local_out_dir, hdfs_out_dir)
stdout = self.run_raw(args_replaced, discard_stderr)
if not is_output_local:
## We need to delete this directory because the copying operation
## requires that the destination directory doesn't already exist
self.__local_fs.delete_dir(local_out_dir)
self.__hdfs.copy_to_local_dir(hdfs_out_dir, local_out_dir)
return RunResult(stdout,
os.path.join(self.__local_tmp_dir, self.__input_subdir),
local_out_dir)
def run_raw(self, args_string, discard_stderr=False):
"""
Execute program WITHOUT replacing placeholders in arguments string
Args:
args_string: parameters of the program
discard_stderr: if True, the standard error is discarded
Returns:
stdout string
"""
return self.__system(self.__program_path + ' ' + args_string,
discard_stderr)
def __replace_args(self, args_string, is_input_local, is_output_local,
local_out_dir, hdfs_out_dir):
text = args_string
if is_input_local:
text = self.__replace(text, self.__input_file_prefix,
self.__local_path_prefix, self.__local_fs.join_path,
[self.__local_tmp_dir, self.__input_subdir])
else:
text = self.__replace(text, self.__input_file_prefix,
self.__hdfs_path_prefix, self.__hdfs.join_path,
[self.__hdfs_tmp_dir, self.__input_subdir])
if is_output_local:
text = self.__replace(text, self.__output_file_prefix,
self.__local_path_prefix, self.__local_fs.join_path,
[local_out_dir])
else:
text = self.__replace(text, self.__output_file_prefix,
self.__hdfs_path_prefix, self.__hdfs.join_path,
[hdfs_out_dir])
return text
@staticmethod
def __replace(text, placeholder_prefix, path_prefix, path_joiner_function,
dir_name_elements):
"""Replace placeholders with paths to files"""
replaced = Tools.replace(text, placeholder_prefix,
lambda s: path_prefix + path_joiner_function(dir_name_elements + [s]))
return replaced
def close(self):
"""Do the cleanup"""
if self.__is_closed:
raise Exception('This object has been already closed')
self.__is_closed = True
self.__local_fs.delete_dir(self.__local_tmp_dir)
if not self.__enforce_local:
self.__hdfs.delete_dir(self.__hdfs_tmp_dir)
@staticmethod
def __system(command, discard_stderr):
try:
if discard_stderr:
with open(os.devnull, 'w') as devnull:
return subprocess.check_output(
command, shell=True, stderr=devnull)
else:
return subprocess.check_output(command, shell=True)
except subprocess.CalledProcessError as ex:
raise CommandLineRunnerException(ex)
|
apache-2.0
| 9,083,836,292,020,347,000
| 38.5
| 85
| 0.598501
| false
| 4.181398
| false
| false
| false
|
bostonlink/pamalt
|
pamalt/transforms/log_queries.py
|
1
|
5494
|
#!/usr/bin/env python
# Copyright (C) 2012 pamalt Developer.
# This file is part of pamalt - https://github.com/bostonlink/pamalt
# See the file 'LICENSE' for copying permission.
# PaloAlto Log query Maltego transforms module
# Author: David Bressler (@bostonlink)
import urllib, urllib2
import time, sys
import xml.etree.ElementTree as ET
from pamalt.lib import pamod
# Threat Log queries
def ip_2_threat(pa_hostname, key, ip):
query = '(addr.dst in %s) or (addr.src in %s)' % (ip, ip)
jobid = pamod.pa_log_query(pa_hostname, key, 'threat', query)
time.sleep(5)
# Loop function to check if the log query job is done
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
while status.text == 'ACT':
time.sleep(5)
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
if status.text == 'FIN':
break
# parse the log data and create dictionaries stored in a list for each individual log
log_list = []
for entry in root.findall(".//log/logs/entry"):
entry_dic = {}
for data in entry:
entry_dic[data.tag] = data.text
log_list.append(entry_dic)
# Maltego XML Output
print "<MaltegoMessage>\n<MaltegoTransformResponseMessage>"
print " <Entities>"
threat_list = []
for dic in log_list:
if dic['threatid'] in threat_list:
continue
else:
print """ <Entity Type="pamalt.paThreat">
<Value>%s</Value>
<AdditionalFields>
<Field Name="ipsrc" DisplayName="IP Source">%s</Field>
<Field Name="ipdst" DisplayName="IP Destination">%s</Field>
<Field Name="tid" DisplayName="Threat ID">%s</Field>
</AdditionalFields>
</Entity>""" % (dic['threatid'], dic['src'], dic['dst'], dic['tid'])
threat_list.append(dic['threatid'])
print " </Entities>"
print "</MaltegoTransformResponseMessage>\n</MaltegoMessage>"
def threat_2_ipsrc(pa_hostname, key, tid):
query = '(threatid eq %s)' % (tid)
jobid = pamod.pa_log_query(pa_hostname, key, 'threat', query)
time.sleep(5)
# Loop function to check if the log query job is done
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
while status.text == 'ACT':
time.sleep(5)
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
if status.text == 'FIN':
break
# parse the log data and create dictionaries stored in a list for each individual log
log_list = []
for entry in root.findall(".//log/logs/entry"):
entry_dic = {}
for data in entry:
entry_dic[data.tag] = data.text
log_list.append(entry_dic)
# Maltego XML Output
print "<MaltegoMessage>\n<MaltegoTransformResponseMessage>"
print " <Entities>"
ip_list = []
for dic in log_list:
if dic['src'] in ip_list:
continue
else:
print """ <Entity Type="maltego.IPv4Address">
<Value>%s</Value>
<AdditionalFields>
<Field Name="ipdst" DisplayName="IP Destination">%s</Field>
<Field Name="tid" DisplayName="Threat ID">%s</Field>
</AdditionalFields>
</Entity>""" % (dic['src'], dic['dst'], dic['tid'])
ip_list.append(dic['src'])
print " </Entities>"
print "</MaltegoTransformResponseMessage>\n</MaltegoMessage>"
def threat_2_ipdst(pa_hostname, key, tid):
query = '(threatid eq %s)' % (tid)
jobid = pamod.pa_log_query(pa_hostname, key, 'threat', query)
time.sleep(5)
# Loop function to check if the log query job is done
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
while status.text == 'ACT':
time.sleep(5)
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
if status.text == 'FIN':
break
# parse the log data and create dictionaries stored in a list for each individual log
log_list = []
for entry in root.findall(".//log/logs/entry"):
entry_dic = {}
for data in entry:
entry_dic[data.tag] = data.text
log_list.append(entry_dic)
# Maltego XML Output
print "<MaltegoMessage>\n<MaltegoTransformResponseMessage>"
print " <Entities>"
ip_list = []
for dic in log_list:
if dic['dst'] in ip_list:
continue
else:
print """ <Entity Type="maltego.IPv4Address">
<Value>%s</Value>
<AdditionalFields>
<Field Name="ipdst" DisplayName="IP Source">%s</Field>
<Field Name="tid" DisplayName="Threat ID">%s</Field>
</AdditionalFields>
</Entity>""" % (dic['dst'], dic['src'], dic['tid'])
ip_list.append(dic['dst'])
print " </Entities>"
print "</MaltegoTransformResponseMessage>\n</MaltegoMessage>"
|
gpl-3.0
| -3,957,049,368,385,212,400
| 34.681818
| 89
| 0.568074
| false
| 3.699663
| false
| false
| false
|
Bouke/django-user-sessions
|
user_sessions/management/commands/migratesessions.py
|
1
|
2472
|
# -*- coding: UTF-8 -*-
import importlib
import logging
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from user_sessions.models import Session as UserSession
logger = logging.getLogger(__name__)
def get_model_class(full_model_name):
try:
old_model_package, old_model_class_name = full_model_name.rsplit('.', 1)
package = importlib.import_module(old_model_package)
return getattr(package, old_model_class_name)
except RuntimeError as e:
if 'INSTALLED_APPS' in e.message:
raise RuntimeError(
"To run this command, temporarily append '{model}' to settings.INSTALLED_APPS"
.format(model=old_model_package.rsplit('.models')[0]))
raise
class Command(BaseCommand):
"""
Convert existing (old) sessions to the user_sessions SessionStore.
If you have an operational site and switch to user_sessions, you might want to keep your
active users logged in. We assume the old sessions are stored in a database table `oldmodel`.
This command creates a `user_session.Session` object for each session of the previous model.
"""
def add_arguments(self, parser):
parser.add_argument(
'--oldmodel',
dest='oldmodel',
default='django.contrib.sessions.models.Session',
help='Existing session model to migrate to the new UserSessions database table'
)
def handle(self, *args, **options):
User = get_user_model()
old_sessions = get_model_class(options['oldmodel']).objects.all()
logger.info("Processing %d session objects" % old_sessions.count())
conversion_count = 0
for old_session in old_sessions:
if not UserSession.objects.filter(session_key=old_session.session_key).exists():
data = old_session.get_decoded()
user = None
if '_auth_user_id' in data:
user = User.objects.filter(pk=data['_auth_user_id']).first()
UserSession.objects.create(
session_key=old_session.session_key,
session_data=old_session.session_data,
expire_date=old_session.expire_date,
user=user,
ip='127.0.0.1'
)
conversion_count += 1
logger.info("Created %d new session objects" % conversion_count)
|
mit
| -1,303,204,494,783,075,600
| 38.870968
| 97
| 0.619741
| false
| 4.262069
| false
| false
| false
|
rjungbeck/rasterizer
|
servicebase.py
|
1
|
3168
|
import argparse
import json
import multiprocessing
import os
import logging
import win32api
import win32service
import win32serviceutil
cmdline_style="pywin32"
logger=logging.getLogger("servicebase")
class ServiceBase(win32serviceutil.ServiceFramework):
_svc_name_ = "RsjService"
_svc_display_name_ = "RSJ Service"
_svc_deps_=[]
epilog="(C) Copyright 2013-2014 by RSJ Software GmbH Germering. All rights reserved."
options={}
def __init__(self, args=None):
if args:
#self._svc_name_=args[0]
try:
win32serviceutil.ServiceFramework.__init__(self, args)
except:
pass
def SvcDoRun(self):
import servicemanager
servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE,servicemanager.PYS_SERVICE_STARTED,(self._svc_name_, ''))
directory=self.getOption("directory")
if directory:
os.chdir(directory)
self.ServiceRun()
servicemanager.LogInfoMsg("%s - STOPPED!" %(self._svc_display_name_,))
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
self.ServiceStop()
def ServiceMain(self):
multiprocessing.freeze_support()
win32api.SetConsoleCtrlHandler(self.ctrlHandler, True)
parser=argparse.ArgumentParser(self._svc_display_name_, epilog=self.epilog, fromfile_prefix_chars="@")
customInstallOptions=""
for k,v in self.options.iteritems():
customInstallOptions+=k[1:]+":"
parser.add_argument(k, type=str, default=v.get("default", None),help=v.get("help", None))
parser.add_argument("--username", type=str, default=None, help="User name")
parser.add_argument("--password", type=str, default=None, help="Password")
parser.add_argument("--startup", type=str, default="manual", help="Startup type (auto, manual, disabled)")
subparsers=parser.add_subparsers(help="Subcommands")
parserInstall=subparsers.add_parser("install", help="Install Service")
parserUninstall=subparsers.add_parser("remove", help="Remove Service")
parserConfig=subparsers.add_parser("update", help="Update Service")
parserDebug=subparsers.add_parser("debug", help="Debug")
parserStart=subparsers.add_parser("start", help="Start Service")
parserStop=subparsers.add_parser("stop", help="Stop Service")
parserRestart=subparsers.add_parser("restart", help="Restart Service")
self.__name__=self.__class__.__name__
win32serviceutil.HandleCommandLine(self,customInstallOptions=customInstallOptions, customOptionHandler=self.customOptionHandler)
def ServiceRun(self):
pass
def ServiceStop(self):
pass
def ctrlHandler(self, ctrlType):
return True
def customOptionHandler(self, opts):
logger.debug(opts)
for opt,val in opts:
if opt in self.options:
if "name" in self.options[opt]:
self.setOption(self.options[opt]["name"], val)
self.setOption("directory", os.getcwd())
def setOption(self, name, val):
win32serviceutil.SetServiceCustomOption(self, name, val)
def getOption(self, name, default=None):
return win32serviceutil.GetServiceCustomOption(self, name, default)
|
agpl-3.0
| 4,275,224,004,615,823,400
| 27.886792
| 130
| 0.703598
| false
| 3.373802
| false
| false
| false
|
cdegroc/scikit-learn
|
examples/covariance/plot_outlier_detection.py
|
2
|
3882
|
"""
==========================================
Outlier detection with several methods.
==========================================
This example illustrates two ways of performing :ref:`outlier_detection`
when the amount of contamination is known:
- based on a robust estimator of covariance, which is assuming that the
data are Gaussian distributed and performs better than the One-Class SVM
in that case.
- using the One-Class SVM and its ability to capture the shape of the
data set, hence performing better when the data is strongly
non-Gaussian, i.e. with two well-separated clusters;
The ground truth about inliers and outliers is given by the points colors
while the orange-filled area indicates which points are reported as outliers
by each method.
Here, we assume that we know the fraction of outliers in the datasets.
Thus rather than using the 'predict' method of the objects, we set the
threshold on the decision_function to separate out the corresponding
fraction.
"""
print __doc__
import numpy as np
import pylab as pl
import matplotlib.font_manager
from scipy import stats
from sklearn import svm
from sklearn.covariance import EllipticEnvelop
# Example settings
n_samples = 200
outliers_fraction = 0.25
clusters_separation = [0, 1, 2]
# define two outlier detection tools to be compared
classifiers = {
"One-Class SVM": svm.OneClassSVM(nu=0.95 * outliers_fraction + 0.05,
kernel="rbf", gamma=0.1),
"robust covariance estimator": EllipticEnvelop(contamination=.1),
}
# Compare given classifiers under given settings
xx, yy = np.meshgrid(np.linspace(-7, 7, 500), np.linspace(-7, 7, 500))
n_inliers = int((1. - outliers_fraction) * n_samples)
n_outliers = int(outliers_fraction * n_samples)
ground_truth = np.ones(n_samples, dtype=int)
ground_truth[-n_outliers:] = 0
# Fit the problem with varying cluster separation
for i, offset in enumerate(clusters_separation):
np.random.seed(42)
# Data generation
X1 = 0.3 * np.random.randn(0.5 * n_inliers, 2) - offset
X2 = 0.3 * np.random.randn(0.5 * n_inliers, 2) + offset
X = np.r_[X1, X2]
# Add outliers
X = np.r_[X, np.random.uniform(low=-6, high=6, size=(n_outliers, 2))]
# Fit the model with the One-Class SVM
pl.figure(figsize=(10, 5))
pl.set_cmap(pl.cm.Blues_r)
for i, (clf_name, clf) in enumerate(classifiers.iteritems()):
# fit the data and tag outliers
clf.fit(X)
y_pred = clf.decision_function(X).ravel()
threshold = stats.scoreatpercentile(y_pred,
100 * outliers_fraction)
y_pred = y_pred > threshold
n_errors = (y_pred != ground_truth).sum()
# plot the levels lines and the points
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
subplot = pl.subplot(1, 2, i + 1)
subplot.set_title("Outlier detection")
subplot.contourf(xx, yy, Z,
levels=np.linspace(Z.min(), threshold, 7))
a = subplot.contour(xx, yy, Z, levels=[threshold],
linewidths=2, colors='red')
subplot.contourf(xx, yy, Z, levels=[threshold, Z.max()],
colors='orange')
b = subplot.scatter(X[:-n_outliers, 0], X[:-n_outliers, 1], c='white')
c = subplot.scatter(X[-n_outliers:, 0], X[-n_outliers:, 1], c='black')
subplot.axis('tight')
subplot.legend(
[a.collections[0], b, c],
['learned decision function', 'true inliers', 'true outliers'],
prop=matplotlib.font_manager.FontProperties(size=11))
subplot.set_xlabel("%d. %s (errors: %d)" % (i + 1, clf_name, n_errors))
subplot.set_xlim((-7, 7))
subplot.set_ylim((-7, 7))
pl.subplots_adjust(0.04, 0.1, 0.96, 0.94, 0.1, 0.26)
pl.show()
|
bsd-3-clause
| -2,521,126,800,319,898,000
| 37.82
| 79
| 0.62983
| false
| 3.500451
| false
| false
| false
|
bgewehr/RPiMower
|
sens_groundCam.py
|
1
|
1775
|
#!/usr/bin/python
__author__ = 'mp911de'
import time
import os,sys
import picamera
import picamera.array
import time
import numpy as np
import lib_mqtt as MQTT
from math import sqrt, atan2, degrees
DEBUG = False
def get_colour_name(rgb):
rgb = rgb / 255
alpha = (2 * rgb[0] - rgb[1] - rgb [2])/2
beta = sqrt(3)/2*(rgb[1] - rgb[2])
hue = int(degrees(atan2(beta, alpha)))
std = np.std(rgb)
mean = np.mean(rgb)
if hue < 0:
hue = hue + 360
if std < 0.055:
if mean > 0.85:
colour = "white"
elif mean < 0.15:
colour = "black"
else:
colour = "grey"
elif (hue > 50) and (hue <= 160):
colour = "green"
elif (hue > 160) and (hue <= 250):
colour = "blue"
else:
colour = "red"
if DEBUG:
print rgb, hue, std, mean, colour
return colour
if __name__ == '__main__':
# os.nice(10)
try:
MQTT.init()
while True:
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as stream:
camera.start_preview()
camera.resolution = (100, 100)
for foo in camera.capture_continuous(stream, 'rgb', use_video_port=False, resize=None, splitter_port=0, burst=True):
stream.truncate()
stream.seek(0)
RGBavg = stream.array.mean(axis=0).mean(axis=0)
colour = get_colour_name(RGBavg)
MQTT.mqttc.publish("/RPiMower/Ground_Colour", colour)
# interrupt
except KeyboardInterrupt:
print("Programm interrupted")
camera.stop_preview()
MQTT.cleanup()
sys.exit(2)
|
gpl-2.0
| 3,489,056,492,993,856,500
| 26.307692
| 137
| 0.522817
| false
| 3.557114
| false
| false
| false
|
pcampese/codewars
|
next_bigger3.py
|
1
|
1877
|
# https://www.codewars.com/kata/next-bigger-number-with-the-same-digits/train/python
def next_bigger(n):
import itertools
# Print the arguments
print('n = {}'.format(n))
# Define default result as -1
result = -1
# Convert the number to a list of digits
numbers = [int(d) for d in str(n)]
print('numbers = {}'.format(numbers))
# Save a permanent copy of the original numbers list
number_vault = numbers[:]
# Create next largest number
# Start from right to left
# Goal is to keep as many as the left most digits as possible, as they are and
# for the right-most digits, sort as few as possible (sorted from low to high)
# Current number sorted
numbers_sorted = sorted(numbers)
print('numbers_sorted = {}'.format(numbers_sorted))
# Get the number length
number_length = len(numbers)
# For length of 2
if (number_length == 2):
sorted_number = int(''.join([str(d) for d in sorted(numbers)]))
if (sorted_number > n):
result = sorted_number
else:
element = numbers.pop(1)
print(element)
numbers.insert(0, element)
print(numbers)
result = list_to_int(numbers)
print(result)
# For length of 3
elif (number_length >= 3):
numbers_on_right = next_bigger(list_to_int(numbers[1:]))
if numbers_on_right >= 0:
result = list(str(numbers_on_right))
result.insert(0,numbers[0])
result = list_to_int(result)
print('result = {}'.format(result))
return result
def swap(array, item1, item2):
index_1 = array.index(item1)
index_2 = array.index(item2)
array[index_1], array[index_2] = array[index_2], array[index_1]
return array
def shift_left(arr, index):
print('Array = {}'.format(arr))
print('Index = {}'.format(index))
# element = arr.pop(index)
# arr.insert(index - 1, element)
arr.insert(arr[index-1], [arr[index]])
del arr[index]
def list_to_int(numbers):
return int(''.join([str(d) for d in numbers]))
|
gpl-3.0
| -2,691,056,460,727,516,700
| 25.083333
| 84
| 0.67821
| false
| 2.960568
| false
| false
| false
|
pytroll/pytroll-schedule
|
trollsched/spherical.py
|
1
|
11055
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2013, 2014, 2015, 2018 Martin Raspaud
# Author(s):
# Martin Raspaud <martin.raspaud@smhi.se>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Some generalized spherical functions.
base type is a numpy array of size (n, 2) (2 for lon and lats)
"""
import numpy as np
import pyresample.spherical
import logging
logger = logging.getLogger(__name__)
class SCoordinate(object):
"""Spherical coordinates
"""
def __init__(self, lon, lat):
self.lon = lon
self.lat = lat
def cross2cart(self, point):
"""Compute the cross product, and convert to cartesian coordinates
"""
lat1 = self.lat
lon1 = self.lon
lat2 = point.lat
lon2 = point.lon
ad = np.sin(lat1 - lat2) * np.cos((lon1 - lon2) / 2.0)
be = np.sin(lat1 + lat2) * np.sin((lon1 - lon2) / 2.0)
c = np.sin((lon1 + lon2) / 2.0)
f = np.cos((lon1 + lon2) / 2.0)
g = np.cos(lat1)
h = np.cos(lat2)
i = np.sin(lon2 - lon1)
res = CCoordinate(np.array([-ad * c + be * f,
ad * f + be * c,
g * h * i]))
return res
def to_cart(self):
"""Convert to cartesian.
"""
return CCoordinate(np.array([np.cos(self.lat) * np.cos(self.lon),
np.cos(self.lat) * np.sin(self.lon),
np.sin(self.lat)]))
def distance(self, point):
"""Vincenty formula.
"""
dlambda = self.lon - point.lon
num = ((np.cos(point.lat) * np.sin(dlambda)) ** 2 +
(np.cos(self.lat) * np.sin(point.lat) -
np.sin(self.lat) * np.cos(point.lat) *
np.cos(dlambda)) ** 2)
den = (np.sin(self.lat) * np.sin(point.lat) +
np.cos(self.lat) * np.cos(point.lat) * np.cos(dlambda))
return np.arctan2(num ** .5, den)
def hdistance(self, point):
"""Haversine formula
"""
return 2 * np.arcsin((np.sin((point.lat - self.lat) / 2.0) ** 2.0 +
np.cos(point.lat) * np.cos(self.lat) *
np.sin((point.lon - self.lon) / 2.0) ** 2.0) ** .5)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
return np.allclose((self.lon, self.lat), (other.lon, other.lat))
def __str__(self):
return str((np.rad2deg(self.lon), np.rad2deg(self.lat)))
def __repr__(self):
return str((np.rad2deg(self.lon), np.rad2deg(self.lat)))
def __iter__(self):
return [self.lon, self.lat].__iter__()
class CCoordinate(object):
"""Cartesian coordinates
"""
def __init__(self, cart):
self.cart = np.array(cart)
def norm(self):
"""Euclidean norm of the vector.
"""
return np.sqrt(np.einsum('...i, ...i', self.cart, self.cart))
def normalize(self):
"""normalize the vector.
"""
self.cart /= np.sqrt(np.einsum('...i, ...i', self.cart, self.cart))
return self
def cross(self, point):
"""cross product with another vector.
"""
return CCoordinate(np.cross(self.cart, point.cart))
def dot(self, point):
"""dot product with another vector.
"""
return np.inner(self.cart, point.cart)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
return np.allclose(self.cart, other.cart)
def __str__(self):
return str(self.cart)
def __repr__(self):
return str(self.cart)
def __add__(self, other):
try:
return CCoordinate(self.cart + other.cart)
except AttributeError:
return CCoordinate(self.cart + np.array(other))
def __radd__(self, other):
return self.__add__(other)
def __mul__(self, other):
try:
return CCoordinate(self.cart * other.cart)
except AttributeError:
return CCoordinate(self.cart * np.array(other))
def __rmul__(self, other):
return self.__mul__(other)
def to_spherical(self):
return SCoordinate(np.arctan2(self.cart[1], self.cart[0]),
np.arcsin(self.cart[2]))
EPSILON = 0.0000001
def modpi(val, mod=np.pi):
"""Puts *val* between -*mod* and *mod*.
"""
return (val + mod) % (2 * mod) - mod
class Arc(object):
"""An arc of the great circle between two points.
"""
start = None
end = None
def __init__(self, start, end):
self.start, self.end = start, end
def __eq__(self, other):
if(self.start == other.start and self.end == other.end):
return 1
return 0
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return (str(self.start) + " -> " + str(self.end))
def __repr__(self):
return (str(self.start) + " -> " + str(self.end))
def angle(self, other_arc):
"""Oriented angle between two arcs.
"""
if self.start == other_arc.start:
a__ = self.start
b__ = self.end
c__ = other_arc.end
elif self.start == other_arc.end:
a__ = self.start
b__ = self.end
c__ = other_arc.start
elif self.end == other_arc.end:
a__ = self.end
b__ = self.start
c__ = other_arc.start
elif self.end == other_arc.start:
a__ = self.end
b__ = self.start
c__ = other_arc.end
else:
raise ValueError("No common point in angle computation.")
ua_ = a__.cross2cart(b__)
ub_ = a__.cross2cart(c__)
val = ua_.dot(ub_) / (ua_.norm() * ub_.norm())
if abs(val - 1) < EPSILON:
angle = 0
elif abs(val + 1) < EPSILON:
angle = np.pi
else:
angle = np.arccos(val)
n__ = ua_.normalize()
if n__.dot(c__.to_cart()) > 0:
return -angle
else:
return angle
def intersections(self, other_arc):
"""Gives the two intersections of the greats circles defined by the
current arc and *other_arc*.
From http://williams.best.vwh.net/intersect.htm
"""
if self.end.lon - self.start.lon > np.pi:
self.end.lon -= 2 * np.pi
if other_arc.end.lon - other_arc.start.lon > np.pi:
other_arc.end.lon -= 2 * np.pi
if self.end.lon - self.start.lon < -np.pi:
self.end.lon += 2 * np.pi
if other_arc.end.lon - other_arc.start.lon < -np.pi:
other_arc.end.lon += 2 * np.pi
ea_ = self.start.cross2cart(self.end).normalize()
eb_ = other_arc.start.cross2cart(other_arc.end).normalize()
cross = ea_.cross(eb_)
lat = np.arctan2(cross.cart[2],
np.sqrt(cross.cart[0] ** 2 + cross.cart[1] ** 2))
lon = np.arctan2(cross.cart[1], cross.cart[0])
return (SCoordinate(lon, lat),
SCoordinate(modpi(lon + np.pi), -lat))
def intersects(self, other_arc):
"""Says if two arcs defined by the current arc and the *other_arc*
intersect. An arc is defined as the shortest tracks between two points.
"""
return bool(self.intersection(other_arc))
def intersection(self, other_arc):
"""Says where, if two arcs defined by the current arc and the
*other_arc* intersect. An arc is defined as the shortest tracks between
two points.
"""
if self == other_arc:
return None
# if (self.end == other_arc.start or
# self.end == other_arc.end or
# self.start == other_arc.start or
# self.start == other_arc.end):
# return None
for i in self.intersections(other_arc):
a__ = self.start
b__ = self.end
c__ = other_arc.start
d__ = other_arc.end
ab_ = a__.hdistance(b__)
cd_ = c__.hdistance(d__)
if(((i in (a__, b__)) or
(abs(a__.hdistance(i) + b__.hdistance(i) - ab_) < EPSILON)) and
((i in (c__, d__)) or
(abs(c__.hdistance(i) + d__.hdistance(i) - cd_) < EPSILON))):
return i
return None
def get_next_intersection(self, arcs, known_inter=None):
"""Get the next intersection between the current arc and *arcs*
"""
res = []
for arc in arcs:
inter = self.intersection(arc)
if (inter is not None and
inter != arc.end and
inter != self.end):
res.append((inter, arc))
def dist(args):
"""distance key.
"""
return self.start.distance(args[0])
take_next = False
for inter, arc in sorted(res, key=dist):
if known_inter is not None:
if known_inter == inter:
take_next = True
elif take_next:
return inter, arc
else:
return inter, arc
return None, None
class SphPolygon(pyresample.spherical.SphPolygon):
def draw(self, mapper, options, **more_options):
lons = np.rad2deg(self.lon.take(np.arange(len(self.lon) + 1),
mode="wrap"))
lats = np.rad2deg(self.lat.take(np.arange(len(self.lat) + 1),
mode="wrap"))
rx, ry = mapper(lons, lats)
mapper.plot(rx, ry, options, **more_options)
def get_twilight_poly(utctime):
"""Return a polygon enclosing the sunlit part of the globe at *utctime*.
"""
from pyorbital import astronomy
ra, dec = astronomy.sun_ra_dec(utctime)
lon = modpi(ra - astronomy.gmst(utctime))
lat = dec
vertices = np.zeros((4, 2))
vertices[0, :] = modpi(lon - np.pi / 2), 0
if lat <= 0:
vertices[1, :] = lon, np.pi / 2 + lat
vertices[3, :] = modpi(lon + np.pi), -(np.pi / 2 + lat)
else:
vertices[1, :] = modpi(lon + np.pi), np.pi / 2 - lat
vertices[3, :] = lon, -(np.pi / 2 - lat)
vertices[2, :] = modpi(lon + np.pi / 2), 0
return SphPolygon(vertices)
|
gpl-3.0
| 8,368,029,202,223,411,000
| 28.717742
| 81
| 0.520669
| false
| 3.540999
| false
| false
| false
|
valexandersaulys/airbnb_kaggle_contest
|
venv/lib/python3.4/site-packages/keras/preprocessing/image.py
|
1
|
7684
|
from __future__ import absolute_import
import numpy as np
import re
from scipy import ndimage
from scipy import linalg
from os import listdir
from os.path import isfile, join
import random, math
from six.moves import range
'''
Fairly basic set of tools for realtime data augmentation on image data.
Can easily be extended to include new transforms, new preprocessing methods, etc...
'''
def random_rotation(x, rg, fill_mode="nearest", cval=0.):
angle = random.uniform(-rg, rg)
x = ndimage.interpolation.rotate(x, angle, axes=(1,2), reshape=False, mode=fill_mode, cval=cval)
return x
def random_shift(x, wrg, hrg, fill_mode="nearest", cval=0.):
crop_left_pixels = 0
crop_right_pixels = 0
crop_top_pixels = 0
crop_bottom_pixels = 0
original_w = x.shape[1]
original_h = x.shape[2]
if wrg:
crop = random.uniform(0., wrg)
split = random.uniform(0, 1)
crop_left_pixels = int(split*crop*x.shape[1])
crop_right_pixels = int((1-split)*crop*x.shape[1])
if hrg:
crop = random.uniform(0., hrg)
split = random.uniform(0, 1)
crop_top_pixels = int(split*crop*x.shape[2])
crop_bottom_pixels = int((1-split)*crop*x.shape[2])
x = ndimage.interpolation.shift(x, (0, crop_left_pixels, crop_top_pixels), mode=fill_mode, cval=cval)
return x
def horizontal_flip(x):
for i in range(x.shape[0]):
x[i] = np.fliplr(x[i])
return x
def vertical_flip(x):
for i in range(x.shape[0]):
x[i] = np.flipud(x[i])
return x
def random_barrel_transform(x, intensity):
# TODO
pass
def random_shear(x, intensity):
# TODO
pass
def random_channel_shift(x, rg):
# TODO
pass
def random_zoom(x, rg, fill_mode="nearest", cval=0.):
zoom_w = random.uniform(1.-rg, 1.)
zoom_h = random.uniform(1.-rg, 1.)
x = ndimage.interpolation.zoom(x, zoom=(1., zoom_w, zoom_h), mode=fill_mode, cval=cval)
return x # shape of result will be different from shape of input!
def array_to_img(x, scale=True):
from PIL import Image
x = x.transpose(1, 2, 0)
if scale:
x += max(-np.min(x), 0)
x /= np.max(x)
x *= 255
if x.shape[2] == 3:
# RGB
return Image.fromarray(x.astype("uint8"), "RGB")
else:
# grayscale
return Image.fromarray(x[:,:,0].astype("uint8"), "L")
def img_to_array(img):
x = np.asarray(img, dtype='float32')
if len(x.shape)==3:
# RGB: height, width, channel -> channel, height, width
x = x.transpose(2, 0, 1)
else:
# grayscale: height, width -> channel, height, width
x = x.reshape((1, x.shape[0], x.shape[1]))
return x
def load_img(path, grayscale=False):
from PIL import Image
img = Image.open(path)
if grayscale:
img = img.convert('L')
else: # Assure 3 channel even when loaded image is grayscale
img = img.convert('RGB')
return img
def list_pictures(directory, ext='jpg|jpeg|bmp|png'):
return [join(directory,f) for f in listdir(directory) \
if isfile(join(directory,f)) and re.match('([\w]+\.(?:' + ext + '))', f)]
class ImageDataGenerator(object):
'''
Generate minibatches with
realtime data augmentation.
'''
def __init__(self,
featurewise_center=True, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=True, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range=0., # degrees (0 to 180)
width_shift_range=0., # fraction of total width
height_shift_range=0., # fraction of total height
horizontal_flip=False,
vertical_flip=False,
):
self.__dict__.update(locals())
self.mean = None
self.std = None
self.principal_components = None
def flow(self, X, y, batch_size=32, shuffle=False, seed=None, save_to_dir=None, save_prefix="", save_format="jpeg"):
if seed:
random.seed(seed)
if shuffle:
seed = random.randint(1, 10e6)
np.random.seed(seed)
np.random.shuffle(X)
np.random.seed(seed)
np.random.shuffle(y)
nb_batch = int(math.ceil(float(X.shape[0])/batch_size))
for b in range(nb_batch):
batch_end = (b+1)*batch_size
if batch_end > X.shape[0]:
nb_samples = X.shape[0] - b*batch_size
else:
nb_samples = batch_size
bX = np.zeros(tuple([nb_samples]+list(X.shape)[1:]))
for i in range(nb_samples):
x = X[b*batch_size+i]
x = self.random_transform(x.astype("float32"))
x = self.standardize(x)
bX[i] = x
if save_to_dir:
for i in range(nb_samples):
img = array_to_img(bX[i], scale=True)
img.save(save_to_dir + "/" + save_prefix + "_" + str(i) + "." + save_format)
yield bX, y[b*batch_size:b*batch_size+nb_samples]
def standardize(self, x):
if self.featurewise_center:
x -= self.mean
if self.featurewise_std_normalization:
x /= self.std
if self.zca_whitening:
flatx = np.reshape(x, (x.shape[0]*x.shape[1]*x.shape[2]))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, (x.shape[0], x.shape[1], x.shape[2]))
if self.samplewise_center:
x -= np.mean(x)
if self.samplewise_std_normalization:
x /= np.std(x)
return x
def random_transform(self, x):
if self.rotation_range:
x = random_rotation(x, self.rotation_range)
if self.width_shift_range or self.height_shift_range:
x = random_shift(x, self.width_shift_range, self.height_shift_range)
if self.horizontal_flip:
if random.random() < 0.5:
x = horizontal_flip(x)
if self.vertical_flip:
if random.random() < 0.5:
x = vertical_flip(x)
# TODO:
# zoom
# barrel/fisheye
# shearing
# channel shifting
return x
def fit(self, X,
augment=False, # fit on randomly augmented samples
rounds=1, # if augment, how many augmentation passes over the data do we use
seed=None):
'''
Required for featurewise_center, featurewise_std_normalization and zca_whitening.
'''
X = np.copy(X)
if augment:
aX = np.zeros(tuple([rounds*X.shape[0]]+list(X.shape)[1:]))
for r in range(rounds):
for i in range(X.shape[0]):
img = array_to_img(X[i])
img = self.random_transform(img)
aX[i+r*X.shape[0]] = img_to_array(img)
X = aX
if self.featurewise_center:
self.mean = np.mean(X, axis=0)
X -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(X, axis=0)
X /= self.std
if self.zca_whitening:
flatX = np.reshape(X, (X.shape[0], X.shape[1]*X.shape[2]*X.shape[3]))
fudge = 10e-6
sigma = np.dot(flatX.T, flatX) / flatX.shape[1]
U, S, V = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(U, np.diag(1. / np.sqrt(S + fudge))), U.T)
|
gpl-2.0
| 4,823,282,614,734,855,000
| 30.235772
| 120
| 0.560776
| false
| 3.362801
| false
| false
| false
|
crosenth/csvpandas
|
csvpandas/subcommands/sample.py
|
1
|
2063
|
# This file is part of csvpandas
#
# csvpandas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# csvpandas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with csvpandas. If not, see <http://www.gnu.org/licenses/>.
"""Randomly sample rows of a csv file
"""
import logging
import time
from csvpandas import utils
log = logging.getLogger(__name__)
def build_parser(parser):
# required inputs
parser.add_argument(
'n',
type=float,
help='number of rows to sample. Can be a decimal fraction.')
parser.add_argument(
'--seed-in',
type=utils.opener('r'),
help=('file containing integer to generate random seed'))
parser.add_argument(
'--seed-out',
type=utils.opener('w'),
help=('file containing integer used to generate seed'))
parser.add_argument(
'--rest',
help='file to output rows not included in sample.')
parser.add_argument(
'--replace',
action='store_true',
help=('Sample with or without replacement.'))
def action(args):
if args.seed_in:
seed = int(args.seed_in.read().strip())
else:
seed = int(time.time())
df = args.csv
if args.n < 1:
sample = df.sample(
frac=args.n, replace=args.replace, random_state=seed)
else:
sample = df.sample(
n=int(args.n), replace=args.replace, random_state=seed)
sample.to_csv(args.out)
if args.rest:
df[~df.index.isin(sample.index)].to_csv(args.rest)
if args.seed_out:
args.seed_out.write(str(seed))
|
gpl-3.0
| -6,485,512,836,406,878,000
| 27.260274
| 73
| 0.636452
| false
| 3.82037
| false
| false
| false
|
arpitprogressive/arpittest
|
pursuite/settings/staging.py
|
1
|
2573
|
# -*- coding: utf-8 -*-
"""
Setting for production env
:copyright: (c) 2013 by Openlabs Technologies & Consulting (P) Limited
:license: see LICENSE for more details.
"""
#Flake8: noqa
from common import *
STATIC_ROOT = '/opt/pursuite/www/static'
MEDIA_ROOT = '/opt/pursuite/www/media'
ALLOWED_HOSTS = ['pursuite.openlabs.us']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'pursuite',
'USER': 'mysqluser',
'PASSWORD': 'mysqlpassword',
'HOST': 'pursuite.c6ga5pe5mdoq.ap-southeast-1.rds.amazonaws.com',
'PORT': '3306',
}
}
# Email Settings
EMAIL_USE_TLS = False
EMAIL_HOST = 'mailtrap.io'
EMAIL_PORT = 2525
EMAIL_HOST_USER = 'nasscom-5ae7880ac967ae5d'
EMAIL_HOST_PASSWORD = 'eb5073db7bdb7af1'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Raven configuration
# Set your DSN value
RAVEN_CONFIG = {
'dsn': 'http://e542381309e640bebb79ae26123e52e5:' + \
'85869376ce9143a699ed05d07b552059@sentry.openlabs.co.in/22',
}
# Add amazon s3 as a storage mechanism
INSTALLED_APPS += ('storages', 's3_folder_storage',)
DEFAULT_FILE_STORAGE = 's3_folder_storage.s3.DefaultStorage'
DEFAULT_S3_PATH = "media"
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = "AKIAIBGU6ZPMYAHTFOWQ"
AWS_SECRET_ACCESS_KEY = "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"
AWS_STORAGE_BUCKET_NAME = "pursuite"
AWS_QUERYSTRING_AUTH = False
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = '//s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
# Setup caching
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
'LOCATION': '127.0.0.1:11211',
}
}
|
bsd-3-clause
| -8,043,136,736,176,889,000
| 28.238636
| 74
| 0.643218
| false
| 3.041371
| false
| false
| false
|
wkarmistead/InvestmentAnalysisDashboard
|
pyBackend/businessLayer/ValueScoreObject.py
|
1
|
1740
|
'''
Created on Apr 5, 2014
@author: Walker Armistead
'''
class ValueScoreObject(object):
_intrinsicValueToSharePrice = 0
_pe_ratio = 0
_peg = 0
_currentPrice = 0
_debtToEquityRatio = 0
_currentAssets = 0
_currentLiabilities = 0
_dividendYield = 0
_earningsGrowth = 0
def setIntrinsicValueToSharePrice(self):
# TODO
self._intrinsicValueToSharePrice = 1
def getIntrinsicValueToSharePrice(self):
return self._intrinsicValueToSharePrice
def setPEratio(self):
# TODO
self._pe_ratio = 1
def getPEratio(self):
return self._pe_ratio
def setPEG(self):
# TODO
self._peg = 1
def getPEG(self):
return self._peg
def setCurrentPrice(self):
# TODO
self._currentPrice
def getCurrentPrice(self):
return self._currentPrice
def setDebtToEquityRatio(self):
# TODO
self._debtToEquityRatio = 1
def getDebtToEquityRatio(self):
return self._debtToEquityRatio
def setCurrentAssets(self):
# TODO
self._currentAssets = 1
def getCurrentAssets(self):
return self._currentAssets
def setCurrentLiabilities(self):
# TODO
self._currentLiabilities = 1
def getCurrentLiabilities(self):
return self._currentLiabilities
def setDividendYield(self):
# TODO
self._dividendYield = 1
def getDividendYield(self):
return self._dividendYield
def setEarningsGrowth(self):
# TODO
self._earningsGrowth = 1
def getEarningsGrowth(self):
return self._earningsGrowth
|
gpl-2.0
| -3,799,703,472,309,803,000
| 20.493827
| 47
| 0.597126
| false
| 3.832599
| false
| false
| false
|
Frodox/buildbot
|
master/buildbot/test/unit/test_steps_shell.py
|
1
|
41551
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import re
import textwrap
from twisted.trial import unittest
from buildbot import config
from buildbot.process import properties
from buildbot.process import remotetransfer
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.steps import shell
from buildbot.test.fake.remotecommand import Expect
from buildbot.test.fake.remotecommand import ExpectRemoteRef
from buildbot.test.fake.remotecommand import ExpectShell
from buildbot.test.util import config as configmixin
from buildbot.test.util import steps
class TestShellCommandExecution(steps.BuildStepMixin, unittest.TestCase, configmixin.ConfigErrorsMixin):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def assertLegacySummary(self, step, running, done=None):
done = done or running
self.assertEqual(
(step._getLegacySummary(done=False),
step._getLegacySummary(done=True)),
(running, done))
def test_doStepIf_False(self):
self.setupStep(
shell.ShellCommand(command="echo hello", doStepIf=False))
self.expectOutcome(result=SKIPPED,
state_string=u"'echo hello' (skipped)")
return self.runStep()
def test_constructor_args_kwargs(self):
# this is an ugly way to define an API, but for now check that
# the RemoteCommand arguments are properly passed on
step = shell.ShellCommand(workdir='build', command="echo hello",
want_stdout=0, logEnviron=False)
self.assertEqual(step.remote_kwargs, dict(want_stdout=0,
logEnviron=False,
workdir='build',
usePTY=None))
def test_constructor_args_validity(self):
# this checks that an exception is raised for invalid arguments
self.assertRaisesConfigError(
"Invalid argument(s) passed to RemoteShellCommand: ",
lambda: shell.ShellCommand(workdir='build', command="echo Hello World",
wrongArg1=1, wrongArg2='two'))
def test_getLegacySummary_from_empty_command(self):
# this is more of a regression test for a potential failure, really
step = shell.ShellCommand(workdir='build', command=' ')
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_from_short_command(self):
step = shell.ShellCommand(workdir='build', command="true")
step.rendered = True
self.assertLegacySummary(step, u"'true'")
def test_getLegacySummary_from_short_command_list(self):
step = shell.ShellCommand(workdir='build', command=["true"])
step.rendered = True
self.assertLegacySummary(step, "'true'")
def test_getLegacySummary_from_med_command(self):
step = shell.ShellCommand(command="echo hello")
step.rendered = True
self.assertLegacySummary(step, u"'echo hello'")
def test_getLegacySummary_from_med_command_list(self):
step = shell.ShellCommand(command=["echo", "hello"])
step.rendered = True
self.assertLegacySummary(step, u"'echo hello'")
def test_getLegacySummary_from_long_command(self):
step = shell.ShellCommand(command="this is a long command")
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_long_command_list(self):
step = shell.ShellCommand(command="this is a long command".split())
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_list(self):
step = shell.ShellCommand(command=["this", ["is", "a"], "nested"])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_tuples(self):
step = shell.ShellCommand(command=["this", ("is", "a"), "nested"])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_list_empty(self):
step = shell.ShellCommand(command=["this", [], ["is", "a"], "nested"])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_list_deep(self):
step = shell.ShellCommand(command=[["this", [[["is", ["a"]]]]]])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_custom(self):
step = shell.ShellCommand(command="echo hello",
description=["echoing"],
descriptionDone=["echoed"])
step.rendered = True
self.assertLegacySummary(step, None) # handled by parent class
def test_getLegacySummary_with_suffix(self):
step = shell.ShellCommand(
command="echo hello", descriptionSuffix="suffix")
step.rendered = True
self.assertLegacySummary(step, u"'echo hello' suffix")
def test_getLegacySummary_unrendered_WithProperties(self):
step = shell.ShellCommand(command=properties.WithProperties(''))
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_unrendered_custom_new_style_class_renderable(self):
step = shell.ShellCommand(command=object())
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_unrendered_custom_old_style_class_renderable(self):
class C:
pass
step = shell.ShellCommand(command=C())
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_unrendered_WithProperties_list(self):
step = shell.ShellCommand(
command=['x', properties.WithProperties(''), 'y'])
step.rendered = True
self.assertLegacySummary(step, "'x y'")
def test_run_simple(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello"))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello')
+ 0
)
self.expectOutcome(result=SUCCESS, state_string="'echo hello'")
return self.runStep()
def test_run_list(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_nested_description(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=properties.FlattenList(
['trial', ['-b', '-B'], 'buildbot.test']),
descriptionDone=properties.FlattenList(
['test', ['done']]),
descriptionSuffix=properties.FlattenList(['suff', ['ix']])))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string='test done suff ix')
return self.runStep()
def test_run_nested_command(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=['trial', ['-b', '-B'], 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_nested_deeply_command(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=[['trial', ['-b', ['-B']]], 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_nested_empty_command(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=['trial', [], '-b', [], 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_env(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello"),
worker_env=dict(DEF='HERE'))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello',
env=dict(DEF='HERE'))
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_env_override(self):
self.setupStep(
shell.ShellCommand(workdir='build', env={'ABC': '123'},
command="echo hello"),
worker_env=dict(ABC='XXX', DEF='HERE'))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello',
env=dict(ABC='123', DEF='HERE'))
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_usePTY(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello",
usePTY=False))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello',
usePTY=False)
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_usePTY_old_worker(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello",
usePTY=True),
worker_version=dict(shell='1.1'))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello')
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_decodeRC(self, rc=1, results=WARNINGS, extra_text=" (warnings)"):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello",
decodeRC={1: WARNINGS}))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello')
+ rc
)
self.expectOutcome(
result=results, state_string="'echo hello'" + extra_text)
return self.runStep()
def test_run_decodeRC_defaults(self):
return self.test_run_decodeRC(2, FAILURE, extra_text=" (failure)")
def test_run_decodeRC_defaults_0_is_failure(self):
return self.test_run_decodeRC(0, FAILURE, extra_text=" (failure)")
def test_missing_command_error(self):
# this checks that an exception is raised for invalid arguments
self.assertRaisesConfigError(
"ShellCommand's `command' argument is not specified",
lambda: shell.ShellCommand())
class TreeSize(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_run_success(self):
self.setupStep(shell.TreeSize())
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['du', '-s', '-k', '.'])
+ ExpectShell.log('stdio', stdout='9292 .\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="treesize 9292 KiB")
self.expectProperty('tree-size-KiB', 9292)
return self.runStep()
def test_run_misparsed(self):
self.setupStep(shell.TreeSize())
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['du', '-s', '-k', '.'])
+ ExpectShell.log('stdio', stdio='abcdef\n')
+ 0
)
self.expectOutcome(result=WARNINGS,
state_string="treesize unknown (warnings)")
return self.runStep()
def test_run_failed(self):
self.setupStep(shell.TreeSize())
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['du', '-s', '-k', '.'])
+ ExpectShell.log('stdio', stderr='abcdef\n')
+ 1
)
self.expectOutcome(result=FAILURE,
state_string="treesize unknown (failure)")
return self.runStep()
class SetPropertyFromCommand(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_constructor_conflict(self):
self.assertRaises(config.ConfigErrors, lambda:
shell.SetPropertyFromCommand(property='foo', extract_fn=lambda: None))
def test_run_property(self):
self.setupStep(
shell.SetPropertyFromCommand(property="res", command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='\n\nabcdef\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="property 'res' set")
self.expectProperty("res", "abcdef") # note: stripped
self.expectLogfile('property changes', r"res: " + repr(u'abcdef'))
return self.runStep()
def test_renderable_workdir(self):
self.setupStep(
shell.SetPropertyFromCommand(property="res", command="cmd", workdir=properties.Interpolate('wkdir')))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='\n\nabcdef\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="property 'res' set")
self.expectProperty("res", "abcdef") # note: stripped
self.expectLogfile('property changes', r"res: " + repr(u'abcdef'))
return self.runStep()
def test_run_property_no_strip(self):
self.setupStep(shell.SetPropertyFromCommand(property="res", command="cmd",
strip=False))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='\n\nabcdef\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="property 'res' set")
self.expectProperty("res", "\n\nabcdef\n")
self.expectLogfile('property changes', r"res: " + repr(u'\n\nabcdef\n'))
return self.runStep()
def test_run_failure(self):
self.setupStep(
shell.SetPropertyFromCommand(property="res", command="blarg"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="blarg")
+ ExpectShell.log('stdio', stderr='cannot blarg: File not found')
+ 1
)
self.expectOutcome(result=FAILURE,
state_string="'blarg' (failure)")
self.expectNoProperty("res")
return self.runStep()
def test_run_extract_fn(self):
def extract_fn(rc, stdout, stderr):
self.assertEqual(
(rc, stdout, stderr), (0, 'startend\n', 'STARTEND\n'))
return dict(a=1, b=2)
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='start', stderr='START')
+ ExpectShell.log('stdio', stdout='end')
+ ExpectShell.log('stdio', stderr='END')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="2 properties set")
self.expectLogfile('property changes', 'a: 1\nb: 2')
self.expectProperty("a", 1)
self.expectProperty("b", 2)
return self.runStep()
def test_run_extract_fn_cmdfail(self):
def extract_fn(rc, stdout, stderr):
self.assertEqual((rc, stdout, stderr), (3, '', ''))
return dict(a=1, b=2)
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ 3
)
# note that extract_fn *is* called anyway
self.expectOutcome(result=FAILURE,
state_string="2 properties set (failure)")
self.expectLogfile('property changes', 'a: 1\nb: 2')
return self.runStep()
def test_run_extract_fn_cmdfail_empty(self):
def extract_fn(rc, stdout, stderr):
self.assertEqual((rc, stdout, stderr), (3, '', ''))
return dict()
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ 3
)
# note that extract_fn *is* called anyway, but returns no properties
self.expectOutcome(result=FAILURE,
state_string="'cmd' (failure)")
return self.runStep()
def test_run_extract_fn_exception(self):
def extract_fn(rc, stdout, stderr):
raise RuntimeError("oh noes")
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ 0
)
# note that extract_fn *is* called anyway, but returns no properties
self.expectOutcome(result=EXCEPTION,
state_string="'cmd' (exception)")
d = self.runStep()
d.addCallback(lambda _:
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1))
return d
def test_error_both_set(self):
"""
If both ``extract_fn`` and ``property`` are defined,
``SetPropertyFromCommand`` reports a config error.
"""
self.assertRaises(config.ConfigErrors,
shell.SetPropertyFromCommand, command=["echo", "value"], property="propname", extract_fn=lambda x: {"propname": "hello"})
def test_error_none_set(self):
"""
If neither ``extract_fn`` and ``property`` are defined,
``SetPropertyFromCommand`` reports a config error.
"""
self.assertRaises(config.ConfigErrors,
shell.SetPropertyFromCommand, command=["echo", "value"])
class PerlModuleTest(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_new_version_success(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
Test Summary Report
Result: PASS
Tests: 10 Failed: 0
Tests: 10 Failed: 0
Files=93, Tests=20"""))
+ 0
)
self.expectOutcome(result=SUCCESS, state_string='20 tests 20 passed')
return self.runStep()
def test_new_version_warnings(self):
self.setupStep(shell.PerlModuleTest(command="cmd",
warningPattern='^OHNOES'))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
Test Summary Report
-------------------
foo.pl (Wstat: 0 Tests: 10 Failed: 0)
Failed test: 0
OHNOES 1
OHNOES 2
Files=93, Tests=20, 0 wallclock secs ...
Result: PASS"""))
+ 0
)
self.expectOutcome(
result=WARNINGS,
state_string='20 tests 20 passed 2 warnings (warnings)')
return self.runStep()
def test_new_version_failed(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
foo.pl .. 1/4"""))
+ ExpectShell.log('stdio', stderr=textwrap.dedent("""\
# Failed test 2 in foo.pl at line 6
# foo.pl line 6 is: ok(0);"""))
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
foo.pl .. Failed 1/4 subtests
Test Summary Report
-------------------
foo.pl (Wstat: 0 Tests: 4 Failed: 1)
Failed test: 0
Files=1, Tests=4, 0 wallclock secs ( 0.06 usr 0.01 sys + 0.03 cusr 0.01 csys = 0.11 CPU)
Result: FAIL"""))
+ ExpectShell.log('stdio', stderr=textwrap.dedent("""\
Failed 1/1 test programs. 1/4 subtests failed."""))
+ 1
)
self.expectOutcome(result=FAILURE,
state_string='4 tests 3 passed 1 failed (failure)')
return self.runStep()
def test_old_version_success(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
All tests successful
Files=10, Tests=20, 100 wall blah blah"""))
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string='20 tests 20 passed')
return self.runStep()
def test_old_version_failed(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
Failed 1/1 test programs, 3/20 subtests failed."""))
+ 1
)
self.expectOutcome(result=FAILURE,
state_string='20 tests 17 passed 3 failed (failure)')
return self.runStep()
class SetPropertyDeprecation(unittest.TestCase):
"""
Tests for L{shell.SetProperty}
"""
def test_deprecated(self):
"""
Accessing L{shell.SetProperty} reports a deprecation error.
"""
shell.SetProperty
warnings = self.flushWarnings([self.test_deprecated])
self.assertEqual(len(warnings), 1)
self.assertIdentical(warnings[0]['category'], DeprecationWarning)
self.assertEqual(warnings[0]['message'],
"buildbot.steps.shell.SetProperty was deprecated in Buildbot 0.8.8: "
"It has been renamed to SetPropertyFromCommand"
)
class Configure(unittest.TestCase):
def test_class_attrs(self):
# nothing too exciting here, but at least make sure the class is
# present
step = shell.Configure()
self.assertEqual(step.command, ['./configure'])
class WarningCountingShellCommand(steps.BuildStepMixin, unittest.TestCase,
configmixin.ConfigErrorsMixin):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_no_warnings(self):
self.setupStep(shell.WarningCountingShellCommand(workdir='w',
command=['make']))
self.expectCommands(
ExpectShell(workdir='w',
command=["make"])
+ ExpectShell.log('stdio', stdout='blarg success!')
+ 0
)
self.expectOutcome(result=SUCCESS)
self.expectProperty("warnings-count", 0)
return self.runStep()
def test_default_pattern(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make']))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio',
stdout='normal: foo\nwarning: blarg!\n'
'also normal\nWARNING: blarg!\n')
+ 0
)
self.expectOutcome(result=WARNINGS)
self.expectProperty("warnings-count", 2)
self.expectLogfile("warnings (2)",
"warning: blarg!\nWARNING: blarg!\n")
return self.runStep()
def test_custom_pattern(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make'],
warningPattern=r"scary:.*"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio',
stdout='scary: foo\nwarning: bar\nscary: bar')
+ 0
)
self.expectOutcome(result=WARNINGS)
self.expectProperty("warnings-count", 2)
self.expectLogfile("warnings (2)", "scary: foo\nscary: bar\n")
return self.runStep()
def test_maxWarnCount(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make'],
maxWarnCount=9))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout='warning: noo!\n' * 10)
+ 0
)
self.expectOutcome(result=FAILURE)
self.expectProperty("warnings-count", 10)
return self.runStep()
def test_fail_with_warnings(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make']))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout='warning: I might fail')
+ 3
)
self.expectOutcome(result=FAILURE)
self.expectProperty("warnings-count", 1)
self.expectLogfile("warnings (1)", "warning: I might fail\n")
return self.runStep()
def do_test_suppressions(self, step, supps_file='', stdout='',
exp_warning_count=0, exp_warning_log='',
exp_exception=False):
self.setupStep(step)
# Invoke the expected callbacks for the suppression file upload. Note
# that this assumes all of the remote_* are synchronous, but can be
# easily adapted to suit if that changes (using inlineCallbacks)
def upload_behavior(command):
writer = command.args['writer']
writer.remote_write(supps_file)
writer.remote_close()
command.rc = 0
if supps_file is not None:
self.expectCommands(
# step will first get the remote suppressions file
Expect('uploadFile', dict(blocksize=32768, maxsize=None,
workersrc='supps', workdir='wkdir',
writer=ExpectRemoteRef(remotetransfer.StringFileWriter)))
+ Expect.behavior(upload_behavior),
# and then run the command
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout=stdout)
+ 0
)
else:
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout=stdout)
+ 0
)
if exp_exception:
self.expectOutcome(result=EXCEPTION,
state_string="'make' (exception)")
else:
if exp_warning_count != 0:
self.expectOutcome(result=WARNINGS,
state_string="'make' (warnings)")
self.expectLogfile("warnings (%d)" % exp_warning_count,
exp_warning_log)
else:
self.expectOutcome(result=SUCCESS,
state_string="'make'")
self.expectProperty("warnings-count", exp_warning_count)
return self.runStep()
def test_suppressions(self):
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps')
supps_file = textwrap.dedent("""\
# example suppressions file
amar.c : .*unused variable.*
holding.c : .*invalid access to non-static.*
""").strip()
stdout = textwrap.dedent("""\
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
amar.c: In function 'write_record':
amar.c:164: warning: unused variable 'x'
amar.c:164: warning: this should show up
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
holding.c: In function 'holding_thing':
holding.c:984: warning: invalid access to non-static 'y'
""")
exp_warning_log = textwrap.dedent("""\
amar.c:164: warning: this should show up
""")
return self.do_test_suppressions(step, supps_file, stdout, 1,
exp_warning_log)
def test_suppressions_directories(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor)
supps_file = textwrap.dedent("""\
# these should be suppressed:
amar-src/amar.c : XXX
.*/server-src/.* : AAA
# these should not, as the dirs do not match:
amar.c : YYY
server-src.* : BBB
""").strip()
# note that this uses the unicode smart-quotes that gcc loves so much
stdout = textwrap.dedent(u"""\
make: Entering directory \u2019amar-src\u2019
amar.c:164: warning: XXX
amar.c:165: warning: YYY
make: Leaving directory 'amar-src'
make: Entering directory "subdir"
make: Entering directory 'server-src'
make: Entering directory `one-more-dir`
holding.c:999: warning: BBB
holding.c:1000: warning: AAA
""")
exp_warning_log = textwrap.dedent("""\
amar.c:165: warning: YYY
holding.c:999: warning: BBB
""")
return self.do_test_suppressions(step, supps_file, stdout, 2,
exp_warning_log)
def test_suppressions_directories_custom(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor,
directoryEnterPattern="^IN: (.*)",
directoryLeavePattern="^OUT:")
supps_file = "dir1/dir2/abc.c : .*"
stdout = textwrap.dedent(u"""\
IN: dir1
IN: decoy
OUT: decoy
IN: dir2
abc.c:123: warning: hello
""")
return self.do_test_suppressions(step, supps_file, stdout, 0, '')
def test_suppressions_linenos(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor)
supps_file = "abc.c:.*:100-199\ndef.c:.*:22"
stdout = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:150: warning: unseen
def.c:22: warning: unseen
abc.c:200: warning: seen 2
""")
exp_warning_log = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:200: warning: seen 2
""")
return self.do_test_suppressions(step, supps_file, stdout, 2,
exp_warning_log)
def test_suppressions_warningExtractor_exc(self):
def warningExtractor(step, line, match):
raise RuntimeError("oh noes")
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor)
# need at least one supp to trigger warningExtractor
supps_file = 'x:y'
stdout = "abc.c:99: warning: seen 1"
d = self.do_test_suppressions(step, supps_file, stdout,
exp_exception=True)
d.addCallback(lambda _:
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1))
return d
def test_suppressions_addSuppression(self):
# call addSuppression "manually" from a subclass
class MyWCSC(shell.WarningCountingShellCommand):
def start(self):
self.addSuppression([('.*', '.*unseen.*', None, None)])
return shell.WarningCountingShellCommand.start(self)
def warningExtractor(step, line, match):
return line.split(':', 2)
step = MyWCSC(command=['make'], suppressionFile='supps',
warningExtractor=warningExtractor)
stdout = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:150: warning: unseen
abc.c:200: warning: seen 2
""")
exp_warning_log = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:200: warning: seen 2
""")
return self.do_test_suppressions(step, '', stdout, 2,
exp_warning_log)
def test_suppressions_suppressionsParameter(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
supps = (
("abc.c", ".*", 100, 199),
("def.c", ".*", 22, 22),
)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionList=supps,
warningExtractor=warningExtractor)
stdout = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:150: warning: unseen
def.c:22: warning: unseen
abc.c:200: warning: seen 2
""")
exp_warning_log = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:200: warning: seen 2
""")
return self.do_test_suppressions(step, None, stdout, 2,
exp_warning_log)
def test_warnExtractFromRegexpGroups(self):
step = shell.WarningCountingShellCommand(command=['make'])
we = shell.WarningCountingShellCommand.warnExtractFromRegexpGroups
line, pat, exp_file, exp_lineNo, exp_text = \
('foo:123:text', '(.*):(.*):(.*)', 'foo', 123, 'text')
self.assertEqual(we(step, line, re.match(pat, line)),
(exp_file, exp_lineNo, exp_text))
def test_missing_command_error(self):
# this checks that an exception is raised for invalid arguments
self.assertRaisesConfigError(
"WarningCountingShellCommand's `command' argument is not "
"specified",
lambda: shell.WarningCountingShellCommand())
class Compile(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_class_args(self):
# since this step is just a pre-configured WarningCountingShellCommand,
# there' not much to test!
step = self.setupStep(shell.Compile())
self.assertEqual(step.name, "compile")
self.assertTrue(step.haltOnFailure)
self.assertTrue(step.flunkOnFailure)
self.assertEqual(step.description, ["compiling"])
self.assertEqual(step.descriptionDone, ["compile"])
self.assertEqual(step.command, ["make", "all"])
class Test(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
self.setUpBuildStep()
def tearDown(self):
self.tearDownBuildStep()
def test_setTestResults(self):
step = self.setupStep(shell.Test())
step.setTestResults(total=10, failed=3, passed=5, warnings=3)
self.assertEqual(step.statistics, {
'tests-total': 10,
'tests-failed': 3,
'tests-passed': 5,
'tests-warnings': 3,
})
# ensure that they're additive
step.setTestResults(total=1, failed=2, passed=3, warnings=4)
self.assertEqual(step.statistics, {
'tests-total': 11,
'tests-failed': 5,
'tests-passed': 8,
'tests-warnings': 7,
})
def test_describe_not_done(self):
step = self.setupStep(shell.Test())
step.rendered = True
self.assertEqual(step.describe(), None)
def test_describe_done(self):
step = self.setupStep(shell.Test())
step.rendered = True
step.statistics['tests-total'] = 93
step.statistics['tests-failed'] = 10
step.statistics['tests-passed'] = 20
step.statistics['tests-warnings'] = 30
self.assertEqual(step.describe(done=True),
['93 tests', '20 passed', '30 warnings', '10 failed'])
def test_describe_done_no_total(self):
step = self.setupStep(shell.Test())
step.rendered = True
step.statistics['tests-total'] = 0
step.statistics['tests-failed'] = 10
step.statistics['tests-passed'] = 20
step.statistics['tests-warnings'] = 30
# describe calculates 60 = 10+20+30
self.assertEqual(step.describe(done=True),
['60 tests', '20 passed', '30 warnings', '10 failed'])
|
gpl-2.0
| 6,412,873,024,448,098,000
| 38.952885
| 147
| 0.550047
| false
| 4.401589
| true
| false
| false
|
dequis/qtile
|
docs/conf.py
|
1
|
9459
|
# -*- coding: utf-8 -*-
#
# Qtile documentation build configuration file, created by
# sphinx-quickstart on Sat Feb 11 15:20:21 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
try:
# Python >=3.3
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
class Mock(MagicMock):
# xcbq does a dir() on objects and pull stuff out of them and tries to sort
# the result. MagicMock has a bunch of stuff that can't be sorted, so let's
# like about dir().
def __dir__(self):
return []
MOCK_MODULES = [
'libqtile._ffi_pango',
'libqtile._ffi_xcursors',
'cairocffi',
'cffi',
'dateutil',
'dateutil.parser',
'dbus',
'dbus.mainloop.glib',
'iwlib',
'keyring',
'mpd',
'trollius',
'xcffib',
'xcffib.randr',
'xcffib.xfixes',
'xcffib.xinerama',
'xcffib.xproto',
'xdg.IconTheme',
]
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.coverage',
'sphinx.ext.graphviz',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinxcontrib.seqdiag',
'sphinx_qtile',
'numpydoc',
]
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Qtile'
copyright = u'2008-2016, Aldo Cortesi and contributers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.10.5'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'man']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output --------fautod-------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {'index': 'index.html'}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Qtiledoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Qtile.tex', u'Qtile Documentation',
u'Aldo Cortesi', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('man/qtile', 'qtile', u'Qtile Documentation',
[u'Tycho Andersen'], 1),
('man/qsh', 'qsh', u'Qtile Documentation',
[u'Tycho Andersen'], 1),
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Qtile', u'Qtile Documentation',
u'Aldo Cortesi', 'Qtile', 'A hackable tiling window manager.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# only import and set the theme if we're building docs locally
if not os.environ.get('READTHEDOCS', None):
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
graphviz_dot_args = ['-Lg']
# A workaround for the responsive tables always having annoying scrollbars.
def setup(app):
app.add_stylesheet("no_scrollbars.css")
|
mit
| 5,405,494,409,025,351,000
| 29.61165
| 86
| 0.693625
| false
| 3.621363
| true
| false
| false
|
bolkedebruin/airflow
|
airflow/providers/amazon/aws/operators/datasync.py
|
1
|
16833
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Create, get, update, execute and delete an AWS DataSync Task.
"""
import random
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.datasync import AWSDataSyncHook
from airflow.utils.decorators import apply_defaults
# pylint: disable=too-many-instance-attributes, too-many-arguments
class AWSDataSyncOperator(BaseOperator):
r"""Find, Create, Update, Execute and Delete AWS DataSync Tasks.
If ``do_xcom_push`` is True, then the DataSync TaskArn and TaskExecutionArn
which were executed will be pushed to an XCom.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:AWSDataSyncOperator`
.. note:: There may be 0, 1, or many existing DataSync Tasks defined in your AWS
environment. The default behavior is to create a new Task if there are 0, or
execute the Task if there was 1 Task, or fail if there were many Tasks.
:param str aws_conn_id: AWS connection to use.
:param int wait_interval_seconds: Time to wait between two
consecutive calls to check TaskExecution status.
:param str task_arn: AWS DataSync TaskArn to use. If None, then this operator will
attempt to either search for an existing Task or attempt to create a new Task.
:param str source_location_uri: Source location URI to search for. All DataSync
Tasks with a LocationArn with this URI will be considered.
Example: ``smb://server/subdir``
:param str destination_location_uri: Destination location URI to search for.
All DataSync Tasks with a LocationArn with this URI will be considered.
Example: ``s3://airflow_bucket/stuff``
:param bool allow_random_task_choice: If multiple Tasks match, one must be chosen to
execute. If allow_random_task_choice is True then a random one is chosen.
:param bool allow_random_location_choice: If multiple Locations match, one must be chosen
when creating a task. If allow_random_location_choice is True then a random one is chosen.
:param dict create_task_kwargs: If no suitable TaskArn is identified,
it will be created if ``create_task_kwargs`` is defined.
``create_task_kwargs`` is then used internally like this:
``boto3.create_task(**create_task_kwargs)``
Example: ``{'Name': 'xyz', 'Options': ..., 'Excludes': ..., 'Tags': ...}``
:param dict create_source_location_kwargs: If no suitable LocationArn is found,
a Location will be created if ``create_source_location_kwargs`` is defined.
``create_source_location_kwargs`` is then used internally like this:
``boto3.create_location_xyz(**create_source_location_kwargs)``
The xyz is determined from the prefix of source_location_uri, eg ``smb:/...`` or ``s3:/...``
Example: ``{'Subdirectory': ..., 'ServerHostname': ..., ...}``
:param dict create_destination_location_kwargs: If no suitable LocationArn is found,
a Location will be created if ``create_destination_location_kwargs`` is defined.
``create_destination_location_kwargs`` is used internally like this:
``boto3.create_location_xyz(**create_destination_location_kwargs)``
The xyz is determined from the prefix of destination_location_uri, eg ``smb:/...` or ``s3:/...``
Example: ``{'S3BucketArn': ..., 'S3Config': {'BucketAccessRoleArn': ...}, ...}``
:param dict update_task_kwargs: If a suitable TaskArn is found or created,
it will be updated if ``update_task_kwargs`` is defined.
``update_task_kwargs`` is used internally like this:
``boto3.update_task(TaskArn=task_arn, **update_task_kwargs)``
Example: ``{'Name': 'xyz', 'Options': ..., 'Excludes': ...}``
:param dict task_execution_kwargs: Additional kwargs passed directly when starting the
Task execution, used internally like this:
``boto3.start_task_execution(TaskArn=task_arn, **task_execution_kwargs)``
:param bool delete_task_after_execution: If True then the TaskArn which was executed
will be deleted from AWS DataSync on successful completion.
:raises AirflowException: If ``task_arn`` was not specified, or if
either ``source_location_uri`` or ``destination_location_uri`` were
not specified.
:raises AirflowException: If source or destination Location weren't found
and could not be created.
:raises AirflowException: If ``choose_task`` or ``choose_location`` fails.
:raises AirflowException: If Task creation, update, execution or delete fails.
"""
template_fields = (
"task_arn",
"source_location_uri",
"destination_location_uri",
"create_task_kwargs",
"create_source_location_kwargs",
"create_destination_location_kwargs",
"update_task_kwargs",
"task_execution_kwargs"
)
ui_color = "#44b5e2"
@apply_defaults
def __init__(
self,
aws_conn_id="aws_default",
wait_interval_seconds=5,
task_arn=None,
source_location_uri=None,
destination_location_uri=None,
allow_random_task_choice=False,
allow_random_location_choice=False,
create_task_kwargs=None,
create_source_location_kwargs=None,
create_destination_location_kwargs=None,
update_task_kwargs=None,
task_execution_kwargs=None,
delete_task_after_execution=False,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
# Assignments
self.aws_conn_id = aws_conn_id
self.wait_interval_seconds = wait_interval_seconds
self.task_arn = task_arn
self.source_location_uri = source_location_uri
self.destination_location_uri = destination_location_uri
self.allow_random_task_choice = allow_random_task_choice
self.allow_random_location_choice = allow_random_location_choice
self.create_task_kwargs = create_task_kwargs if create_task_kwargs else dict()
self.create_source_location_kwargs = dict()
if create_source_location_kwargs:
self.create_source_location_kwargs = create_source_location_kwargs
self.create_destination_location_kwargs = dict()
if create_destination_location_kwargs:
self.create_destination_location_kwargs = create_destination_location_kwargs
self.update_task_kwargs = update_task_kwargs if update_task_kwargs else dict()
self.task_execution_kwargs = task_execution_kwargs if task_execution_kwargs else dict()
self.delete_task_after_execution = delete_task_after_execution
# Validations
valid = False
if self.task_arn:
valid = True
if self.source_location_uri and self.destination_location_uri:
valid = True
if not valid:
raise AirflowException(
"Either specify task_arn or both source_location_uri and destination_location_uri. "
"task_arn={0} source_location_uri={1} destination_location_uri={2}".format(
task_arn, source_location_uri, destination_location_uri
)
)
# Others
self.hook = None
# Candidates - these are found in AWS as possible things
# for us to use
self.candidate_source_location_arns = None
self.candidate_destination_location_arns = None
self.candidate_task_arns = None
# Actuals
self.source_location_arn = None
self.destination_location_arn = None
self.task_execution_arn = None
def get_hook(self):
"""Create and return AWSDataSyncHook.
:return AWSDataSyncHook: An AWSDataSyncHook instance.
"""
if not self.hook:
self.hook = AWSDataSyncHook(
aws_conn_id=self.aws_conn_id,
wait_interval_seconds=self.wait_interval_seconds,
)
return self.hook
def execute(self, context):
# If task_arn was not specified then try to
# find 0, 1 or many candidate DataSync Tasks to run
if not self.task_arn:
self._get_tasks_and_locations()
# If some were found, identify which one to run
if self.candidate_task_arns:
self.task_arn = self.choose_task(
self.candidate_task_arns)
# If we couldnt find one then try create one
if not self.task_arn and self.create_task_kwargs:
self._create_datasync_task()
if not self.task_arn:
raise AirflowException(
"DataSync TaskArn could not be identified or created.")
self.log.info("Using DataSync TaskArn %s", self.task_arn)
# Update the DataSync Task
if self.update_task_kwargs:
self._update_datasync_task()
# Execute the DataSync Task
self._execute_datasync_task()
if not self.task_execution_arn:
raise AirflowException("Nothing was executed")
# Delete the DataSyncTask
if self.delete_task_after_execution:
self._delete_datasync_task()
return {"TaskArn": self.task_arn, "TaskExecutionArn": self.task_execution_arn}
def _get_tasks_and_locations(self):
"""Find existing DataSync Task based on source and dest Locations."""
hook = self.get_hook()
self.candidate_source_location_arns = self._get_location_arns(
self.source_location_uri
)
self.candidate_destination_location_arns = self._get_location_arns(
self.destination_location_uri
)
if not self.candidate_source_location_arns:
self.log.info("No matching source Locations")
return
if not self.candidate_destination_location_arns:
self.log.info("No matching destination Locations")
return
self.log.info("Finding DataSync TaskArns that have these LocationArns")
self.candidate_task_arns = hook.get_task_arns_for_location_arns(
self.candidate_source_location_arns,
self.candidate_destination_location_arns,
)
self.log.info("Found candidate DataSync TaskArns %s",
self.candidate_task_arns)
def choose_task(self, task_arn_list):
"""Select 1 DataSync TaskArn from a list"""
if not task_arn_list:
return None
if len(task_arn_list) == 1:
return task_arn_list[0]
if self.allow_random_task_choice:
# Items are unordered so we dont want to just take
# the [0] one as it implies ordered items were received
# from AWS and might lead to confusion. Rather explicitly
# choose a random one
return random.choice(task_arn_list)
raise AirflowException(
"Unable to choose a Task from {}".format(task_arn_list))
def choose_location(self, location_arn_list):
"""Select 1 DataSync LocationArn from a list"""
if not location_arn_list:
return None
if len(location_arn_list) == 1:
return location_arn_list[0]
if self.allow_random_location_choice:
# Items are unordered so we dont want to just take
# the [0] one as it implies ordered items were received
# from AWS and might lead to confusion. Rather explicitly
# choose a random one
return random.choice(location_arn_list)
raise AirflowException(
"Unable to choose a Location from {}".format(location_arn_list))
def _create_datasync_task(self):
"""Create a AWS DataSyncTask."""
hook = self.get_hook()
self.source_location_arn = self.choose_location(
self.candidate_source_location_arns
)
if not self.source_location_arn and self.create_source_location_kwargs:
self.log.info('Attempting to create source Location')
self.source_location_arn = hook.create_location(
self.source_location_uri, **self.create_source_location_kwargs
)
if not self.source_location_arn:
raise AirflowException(
"Unable to determine source LocationArn."
" Does a suitable DataSync Location exist?")
self.destination_location_arn = self.choose_location(
self.candidate_destination_location_arns
)
if not self.destination_location_arn and self.create_destination_location_kwargs:
self.log.info('Attempting to create destination Location')
self.destination_location_arn = hook.create_location(
self.destination_location_uri, **self.create_destination_location_kwargs
)
if not self.destination_location_arn:
raise AirflowException(
"Unable to determine destination LocationArn."
" Does a suitable DataSync Location exist?")
self.log.info("Creating a Task.")
self.task_arn = hook.create_task(
self.source_location_arn,
self.destination_location_arn,
**self.create_task_kwargs
)
if not self.task_arn:
raise AirflowException("Task could not be created")
self.log.info("Created a Task with TaskArn %s", self.task_arn)
return self.task_arn
def _update_datasync_task(self):
"""Update a AWS DataSyncTask."""
hook = self.get_hook()
self.log.info("Updating TaskArn %s", self.task_arn)
hook.update_task(self.task_arn, **self.update_task_kwargs)
self.log.info("Updated TaskArn %s", self.task_arn)
return self.task_arn
def _execute_datasync_task(self):
"""Create and monitor an AWSDataSync TaskExecution for a Task."""
hook = self.get_hook()
# Create a task execution:
self.log.info("Starting execution for TaskArn %s", self.task_arn)
self.task_execution_arn = hook.start_task_execution(
self.task_arn, **self.task_execution_kwargs)
self.log.info("Started TaskExecutionArn %s", self.task_execution_arn)
# Wait for task execution to complete
self.log.info("Waiting for TaskExecutionArn %s",
self.task_execution_arn)
result = hook.wait_for_task_execution(self.task_execution_arn)
self.log.info("Completed TaskExecutionArn %s", self.task_execution_arn)
task_execution_description = hook.describe_task_execution(
task_execution_arn=self.task_execution_arn
)
self.log.info("task_execution_description=%s",
task_execution_description)
if not result:
raise AirflowException(
"Failed TaskExecutionArn %s" % self.task_execution_arn
)
return self.task_execution_arn
def on_kill(self):
"""Cancel the submitted DataSync task."""
hook = self.get_hook()
if self.task_execution_arn:
self.log.info("Cancelling TaskExecutionArn %s",
self.task_execution_arn)
hook.cancel_task_execution(
task_execution_arn=self.task_execution_arn)
self.log.info("Cancelled TaskExecutionArn %s",
self.task_execution_arn)
def _delete_datasync_task(self):
"""Deletes an AWS DataSync Task."""
hook = self.get_hook()
# Delete task:
self.log.info("Deleting Task with TaskArn %s", self.task_arn)
hook.delete_task(self.task_arn)
self.log.info("Task Deleted")
return self.task_arn
def _get_location_arns(self, location_uri):
location_arns = self.get_hook().get_location_arns(
location_uri
)
self.log.info(
"Found LocationArns %s for LocationUri %s", location_arns, location_uri
)
return location_arns
|
apache-2.0
| 2,900,169,435,753,842,000
| 42.608808
| 104
| 0.641775
| false
| 4.166584
| false
| false
| false
|
mivanov/editkit
|
editkit/ckeditor/tests/tests.py
|
1
|
6356
|
# coding=utf-8
import os
from django.test import TestCase
from django.db import models
from django.core import exceptions
from ckeditor.models import XHTMLField
from ckeditor.models import XMLField
from ckeditor.models import HTML5Field
from ckeditor.models import HTML5FragmentField
from ckeditor.widgets import CKEditor
class XHTMLModel(models.Model):
html = XHTMLField()
class HTML5Model(models.Model):
html = HTML5Field()
class HTML5FragmentModel(models.Model):
html = HTML5FragmentField()
class RestrictedHTML5FragmentModel(models.Model):
html = HTML5FragmentField(allowed_elements=['a', 'span'],
allowed_attributes_map={'a': ['href'],
'span': ['style']},
allowed_styles_map={'span': ['width']},
rename_elements={'div': 'span'})
class XHTMLFieldTest(TestCase):
def test_html_schema_set(self):
html = XHTMLField()
self.assertTrue(isinstance(html, XMLField))
self.assertEquals(html.schema_path, XHTMLField.schema_path)
def test_html_schema_exists(self):
self.assertTrue(os.path.exists(XHTMLField.schema_path))
def test_valid_html(self):
m = XHTMLModel()
m.html = ('<html><head><title>Lorem</title></head>'
'<body>Ipsum</body></html>')
m.clean_fields()
def test_invalid_html(self):
m = XHTMLModel()
m.html = 'invalid html'
self.assertRaises(exceptions.ValidationError, m.clean_fields)
class HTML5FieldTest(TestCase):
def test_sanitize(self):
m = HTML5Model()
m.html = '<html><head/><body><script/></body></html>'
m.clean_fields()
self.assertEquals(m.html,
('<html><head/><body><html><head/><body>'
'<script/></body></html></body></html>')
)
class HTML5FragmentField(TestCase):
def test_sanitize(self):
m = HTML5FragmentModel()
m.html = '<script/>'
m.clean_fields()
self.assertEquals(m.html, '<script/>')
def test_allowed_elements(self):
m = RestrictedHTML5FragmentModel()
m.html = '<p><a href="#top">This link</a> takes you to the top</p>'
m.clean_fields()
self.assertEquals(m.html, ('<p><a href="#top">This link</a>'
' takes you to the top</p>'))
def test_allowed_attributes(self):
m = RestrictedHTML5FragmentModel()
m.html = ('<span style="width: 300px;" class="myclass">'
'Click <a href="www.example.com" target="_top">here</a>'
'</span>')
m.clean_fields()
self.assertEquals(m.html, ('<span style="width: 300px;">'
'Click <a href="www.example.com">here</a></span>'))
def test_allowed_styles(self):
m = RestrictedHTML5FragmentModel()
m.html = ('<span style="width: 300px; height:100px">Blah</span>')
m.clean_fields()
self.assertEquals(m.html, '<span style="width: 300px;">Blah</span>')
def test_rename_elements(self):
m = RestrictedHTML5FragmentModel()
m.html = '<div>This should be a span</div>'
m.clean_fields()
self.assertEquals(m.html, '<span>This should be a span</span>')
def test_empty_a_element(self):
m = HTML5FragmentModel()
m.html = '<p><a name="test"></a></p>'
m.clean_fields()
self.assertEquals(m.html, '<p><a name="test"></a></p>')
def test_nbsp(self):
''' We store UTF-8, so should be stored as \xc2\xa0 (2 chars)
'''
m = HTML5FragmentModel()
m.html = '<p> </p> '
m.clean_fields()
self.assertEquals(m.html, '<p>\xc2\xa0</p>\xc2\xa0')
def test_charset(self):
m = HTML5FragmentModel()
m.html = '<p>Привет</p>'
m.clean_fields()
self.assertEquals(m.html, '<p>Привет</p>')
class CKEditorWidgetTest(TestCase):
def test_default_config(self):
ck = CKEditor()
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\n'
'CKEDITOR.basePath = \'/static/js/ckeditor/\';\n'
"CKEDITOR.replace('id_ck');\n"
'-->\n'
'</script>\n')
self.assertEqual(rendered, expected)
def test_config_based_on_allowed_tags(self):
ck = CKEditor(allowed_tags=['a'])
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\nCKEDITOR.basePath = \'/static/js/ckeditor/\';'
'\nCKEDITOR.replace(\'id_ck\', {"toolbar": [["Link",'
' "Unlink", "Anchor"]]});\n-->\n</script>\n'
)
self.assertEqual(rendered, expected)
def test_custom_config(self):
ck = CKEditor(ck_config={'extraPlugins': 'myThing'})
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\nCKEDITOR.basePath = \'/static/js/ckeditor/\';\n'
'CKEDITOR.replace(\'id_ck\', {"extraPlugins": "myThing"});'
'\n-->\n</script>\n')
self.assertEqual(rendered, expected)
class CustomCKEditor(CKEditor):
def get_extra_plugins(self):
plugins = ["myPlugin1", "myPlugin2"]
return ','.join(plugins)
class CustomCKEditorTest(TestCase):
def test_config(self):
ck = CustomCKEditor()
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\nCKEDITOR.basePath = \'/static/js/ckeditor/\';\n'
"CKEDITOR.replace('id_ck', "
'{"extraPlugins": "myPlugin1,myPlugin2"});\n'
'-->\n'
'</script>\n')
self.assertEqual(rendered, expected)
|
gpl-2.0
| 8,648,883,103,976,645,000
| 35.045455
| 79
| 0.54918
| false
| 3.654378
| true
| false
| false
|
uannight/reposan
|
plugin.video.tvalacarta/channels/vuittv.py
|
1
|
7934
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# tvalacarta - XBMC Plugin
# Canal para 8TV
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#------------------------------------------------------------
import re
import sys
import os
import traceback
import urllib2
from core import logger
from core import config
from core import scrapertools
from core.item import Item
from servers import servertools
__channel__ = "vuittv"
__category__ = "R"
__type__ = "generic"
__title__ = "8TV"
__language__ = "ES"
__creationdate__ = "20160928"
DEBUG = config.get_setting("debug")
URL_LIVE = "rtmp://streaming.8tv.cat:1935/8TV?videoId=3998198240001&lineUpId=&pubId=1589608506001&playerId=1982328835001&affiliateId=/8aldia-directe?videoId=3998198240001&lineUpId=&pubId=1589608506001&playerId=1982328835001&affiliateId="
def isGeneric():
return True
def mainlist(item):
logger.info("tvalacarta.channels.8tv mainlist")
itemlist = []
itemlist.append( Item(channel=__channel__, title="8tv directe", action="play", url = URL_LIVE, folder=False) )
itemlist.append( Item(channel=__channel__, title="8aldia Inici (destacat)", action="loadprogram", url = "http://www.8tv.cat/8aldia/", folder=True) )
itemlist.append( Item(channel=__channel__, title="8aldia Reflexió Cuní", action="loadprogram", url = "http://www.8tv.cat/8aldia/reflexio-de-josep-cuni/", folder=True) )
itemlist.append( Item(channel=__channel__, title="8aldia Seccions", action="loadsections", folder=True) )
itemlist.append( Item(channel=__channel__, title="8aldia Programes sencers", action="loadprogram", url = "http://www.8tv.cat/8aldia/programes-sencers/", folder=True) )
return itemlist
# Carga secciones
def loadsections(item):
logger.info("tvalacarta.channels.8tv loadsection")
itemlist = []
itemlist.append( Item(channel=__channel__, title="Entrevistes", action="loadprogram", url="http://www.8tv.cat/8aldia/category/entrevistes/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Pilar Rahola", action="loadprogram", url="http://www.8tv.cat/8aldia/category/pilar-rahola/", folder=True) )
itemlist.append( Item(channel=__channel__, title="La Tertúlia", action="loadprogram", url="http://www.8tv.cat/8aldia/category/tertulia/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Opinió", action="loadprogram", url="http://www.8tv.cat/8aldia/category/opinio/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Política", action="loadprogram", url="http://www.8tv.cat/8aldia/category/politica/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Internacional", action="loadprogram", url="http://www.8tv.cat/8aldia/category/internacional/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Economia", action="loadprogram", url="http://www.8tv.cat/8aldia/category/economia-videos/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Societat", action="loadprogram", url="http://www.8tv.cat/8aldia/category/societat/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Successos", action="loadprogram", url="http://www.8tv.cat/8aldia/category/successos/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Tribunals", action="loadprogram", url="http://www.8tv.cat/8aldia/category/tribunals/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Cultura", action="loadprogram", url="http://www.8tv.cat/8aldia/category/cultura/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Tecnologia", action="loadprogram", url="http://www.8tv.cat/8aldia/category/tecnologia/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Esports", action="loadprogram", url="http://www.8tv.cat/8aldia/category/esports/", folder=True) )
return itemlist
# Carga programas de una sección
def loadprogram(item):
logger.info("tvalacarta.channels.8tv loadprogram")
return pager(item.url, item.channel, item)
# Genera listado de los videos con paginador
def pager(url, channel=__channel__, item=None):
logger.info("tvalacarta.channels.8tv pager")
try:
itemlist = []
data = scrapertools.downloadpage(url)
data = data.replace("\\\"","")
#logger.error("DATA: " + str(data))
# --------------------------------------------------------
# Extrae los videos (tag article)
# --------------------------------------------------------
patron = '<article class="entry-box entry-video (.*?)</article>'
matches = re.compile(patron,re.DOTALL).findall(data)
if len(matches) > 0:
for chapter in matches:
try:
#
# Ex: <h2 class="entry-title"><a href="http://www.8tv.cat/8aldia/videos/el-proxim-11-de-setembre-marcat-pel-referendum/" title="El pròxim 11 de Setembre, marcat pel referèndum">
#
patron = ' src="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(chapter)
scrapedthumbnail = matches[0]
patron = '<h2 class="entry-title"><a href="([^"]+)" title="([^"]+)">'
matches = re.compile(patron,re.DOTALL).findall(chapter)
urlprog = matches[0][0]
scrapedtitle = matches[0][1]
date = scrapertools.find_single_match(chapter, '<time datetime="[^"]+" pubdate class="updated">(.*?) - [^<]+</time>')
# Añade al listado
itemlist.append(
Item(channel=channel,
action = 'play',
title = date.strip() + " - " + str(scrapedtitle).replace(""", "'").replace("“", "").replace("”", "").replace('“', "").replace('”', "").strip(),
url = urlprog,
thumbnail = scrapedthumbnail,
server = channel,
folder = False
)
)
except:
for line in sys.exc_info():
logger.error("tvalacarta.channels.8tv pager ERROR1: %s" % line)
# Extrae el paginador para la página siguiente
patron = "<a class="+"'"+"no_bg"+"'"+' href="([^"]+)">Següent</a>'
urlpager = re.compile(patron,re.DOTALL).findall(data)
#logger.info("URLPAGER: %s" % urlpager[0])
if len(urlpager)>0 :
next_page_item = Item(channel=channel,
action = 'loadprogram',
title = '>> Següent',
url = urlpager[0],
thumbnail = ''
)
itemlist.append(next_page_item)
except:
for line in sys.exc_info():
logger.error("tvalacarta.channels.8tv pager ERROR2: %s" % line)
return itemlist
# Reproduce el item con server propio
def play(item):
item.server = __channel__;
itemlist = [item]
return itemlist
# Verificación automática de canales: Esta función debe devolver "True" si todo está ok en el canal.
def test():
# Comprueba que la primera opción tenga algo
items = mainlist(Item())
section = loadsections(items[1])
if len(section)==0:
return False,"No hay videos en portada"
section = loadprogram(items[4])
if len(section)==0:
return False,"No hay videos en 8aldia"
return True,""
|
gpl-2.0
| 5,244,642,386,767,732,000
| 45.274854
| 238
| 0.573739
| false
| 3.653278
| false
| false
| false
|
almc/nao_basic
|
scripts/greedy_rrt.py
|
1
|
8935
|
#!/usr/bin/env python
import numpy as np
import sys, random, math, pygame
from pygame.locals import *
from math import sqrt,cos,sin,atan2
# constants
DIM, DIM_X, DIM_Y = 2, 640, 480 # range 0 to 640, and 0 to 480
WINSIZE = [DIM_X, DIM_Y] # could be any range for each var
# parameters
NUMNODES = 2000
DELTA_IN = np.array([0, 0]) # 15
DELTA_UT = np.array([20, 20]) # 20
PEN_DIST_EXP = 5
PEN_DIST_OBS = 8
EXP_DIST = 5
GOAL_DIST = 10
# EPSILON_X = 20.0
# EPSILON_Y = 15.0
# DELTA_X = 10.0
# DELTA_Y = 7.0
# PEN_EXPANSION = 0.9
# PEN_INVALID = 0.1
# PATCH_VALID = np.array([])
# PATCH_INVALID = np.array([])
def dist(p1, p2):
return np.linalg.norm(p1[0:2]-p2[0:2])
def raw_score(node1, node2):
return 1.0/dist(node1, node2)
def generate_random():
rand = np.array([np.random.uniform(DELTA_IN[0], DELTA_UT[0], 1)[0],
np.random.uniform(DELTA_IN[1], DELTA_UT[1], 1)[0], 0])
sign = np.random.uniform(0, 1, 2)
for r_i, r in enumerate(rand[:-1]):
if sign[r_i] >= 0.5:
rand[r_i] *= -1
return rand
# def avg_score(nn, nodes):
# box = pygame.Rect(nn[0], nn[1], int(DELTA_X), int(DELTA_Y))
# delta_n_counter = 0
# delta_s_counter = 0
# for n_i, n in enumerate(nodes):
# if box.collidepoint(n[0], n[1]): # check delta neighbourhood
# delta_n_counter += 1
# delta_s_counter += n[2]
# print ">>>>>>>>>>>>>>FOUND THIS MANY NODES IN THE NEIGHBOURHOOD", delta_n_counter
# if delta_n_counter >= 1: # could be a parameter
# avg_score = delta_s_counter / delta_n_counter
# return 0.5*nn[2] + 0.5*avg_score # could be two parameters
# else:
# return nn[2]
def check_collision(node, obsta):
for o in obsta: # check node collision with every obstacle
if (o.collidepoint(node[0], node[1])):
return True # return as soon as one of them is true
return False # if no obstacle collides return false
def check_unexplored(nn, nodes):
for n_i, n in enumerate(nodes):
d = dist(nn, n)
if d < EXP_DIST:
# print "explored node, rejected node, distance", n, nn, d
return False
return True
def check_goal(nn, goals):
if dist(nn, goals) < GOAL_DIST: return True
else: return False
def draw_obsta(screen, obsta, color):
for o in obsta:
pygame.draw.rect(screen, color, o, 0)
def draw_nodes(screen, nodes, color, node_radius):
for n in nodes:
pygame.draw.circle(screen, color, (int(n[0]),int(n[1])), node_radius, 2)
def draw_goals(screen, goals, color, node_radius):
for g in goals:
pygame.draw.circle(screen, color, (int(g[0]),int(g[1])), node_radius, 2)
def penalize_nodes(nn, nodes, pen_dist):
for n_i, n in enumerate(nodes):
d = dist(nn, n)
if d < pen_dist:
# print "penalizing node", n_i, nodes[n_i][2]
nodes[n_i][2] *= d/pen_dist
# print "score after penalization", nodes[n_i][2]
return nodes
def organize_nodes(nodes):
# nodes = np.sort(nodes, axis=0)[::-1]
temp = nodes[nodes[:,2].argsort()]
nodes = temp[::-1]
# print "nodes organized\n", nodes
return nodes
def insert_node(nn, nodes):
flag_inserted = False
for p_i, p in enumerate(nodes):
if nn[2] > p[2]: # if avg_score of new node is higher than p_i
# print "adding node", nn
nodes = np.insert(nodes, [p_i], nn, 0)
flag_inserted = True
break
if flag_inserted == False:
# print "score is worse than others"
# print nodes
# print nn
nodes = np.append(nodes, [nn], 0)
return nodes
def generate_newnode(nodes):
rand = generate_random()
nn = nodes[0] + rand # new node, carefull with third coord still has prev raw_score
# nodes = penalize_nodes(nn, nodes, PEN_DIST_EXP) # penalize nodes closer than PEN_DIST
# nodes = organize_nodes(nodes)
scaling_factor = 1.01
while not check_unexplored(nn, nodes):
# penalize node for being close to other nodes
# print "penalizing node", nodes[0][2]
nodes = penalize_nodes(nn, nodes, PEN_DIST_EXP) # penalize nodes closer than PEN_DIST
nodes = organize_nodes(nodes)
if (scaling_factor >= 2):
scaling_factor = 2
rand = scaling_factor*generate_random()
nn = nodes[0] + rand
scaling_factor *= scaling_factor
# nodes[0][2] *= 0.5
# nodes = organize_nodes(nodes)
# print "score after penalization", nodes[0][2]
# generate another node
# rand = generate_random()
# nn = nodes[0] + rand # new node, carefull with third coord still has prev raw_score
# print "newnode", nn
return nn, nodes
## MAIN ##
def main():
np.set_printoptions(precision=10, suppress=True)
pygame.init()
pygame.display.set_caption('RRT mod - Alejandro Marzinotto - June 2014')
screen = pygame.display.set_mode(WINSIZE)
white = (255, 240, 200)
black = ( 20, 20, 40)
red = (192, 0, 0)
green = ( 0, 192, 0)
blue = ( 0, 0, 192)
yellow = (192, 192, 0)
node_radius = 3
screen.fill(black)
# variables
goals = np.array([[DIM_X/2.0,DIM_Y*1.0/12.0, 1.0]]) # goal importance
nodes = np.array([[DIM_X/2.0,DIM_Y*3.0/ 4.0, 0.0]]) # node raw_score
nodes[0][2] = raw_score(nodes[0], goals[0])
x, y = DIM_X*1.0/8.0, DIM_Y*1.0/8.0
# obsta = [pygame.Rect(x, y, 380, 100), pygame.Rect(x, y, 100, 250)]
# obsta = [pygame.Rect(x, y, 380, 100), pygame.Rect(x, y, 100, 250),
# pygame.Rect(x+400, y, 100, 250)]
# obsta = [pygame.Rect(x, y, 390, 100), pygame.Rect(x, y, 100, 250),
# pygame.Rect(x+400, y, 100, 250)]
obsta = [pygame.Rect(100, 100, 200, 40),
pygame.Rect(310, 100, 200, 40),
pygame.Rect(200, 200, 300, 40),
pygame.Rect(100, 300, 300, 40)]
invalid = check_collision(nodes[0], obsta)
assert invalid == False, "The initial pose is in a collision state"
draw_obsta(screen, obsta, red)
draw_nodes(screen, nodes, white, node_radius)
draw_goals(screen, goals, green, node_radius)
pygame.display.update()
a=raw_input()
for i in range(NUMNODES): # assumes that node[0] has the highest score
# print ">>>>>>>>>>expansion number:", i, "node:", nodes[0]
# a=raw_input()
[nn, nodes] = generate_newnode(nodes)
# raw_input()
# revisar que el nodo generado no caiga dentro de patched area.
# si cae dentro, penalizar todos los nodos involucrados, ordenar los nodos,
# extraer el mejor y volver a expandir. (hasta que salgamos del problema)
cn_i, cn = 0, nodes[0] # closest node hypothesis
for p_i, p in enumerate(nodes):
if dist(p, nn) < dist(cn, nn):
cn_i, cn = p_i, p
# print "closest node found:", cn, cn_i
# print "nodes before check_collision\n", nodes
if check_collision(nn, obsta):
# print ">>> in-valid node, penalizing"
nodes = penalize_nodes(nn, nodes, PEN_DIST_OBS) # penalize nodes closer than PEN_DIST
nodes = organize_nodes(nodes)
pygame.draw.circle(screen, blue, (int(nn[0]),int(nn[1])), node_radius, 2)
# print "nodes after check_collision\n", nodes
# a=raw_input()
else:
# print ">>> valid node, scoring"
# print goals[0]
# print nn
# print "***************"
nn[2] = raw_score(nn, goals[0]) # overwriting raw_score of node who generated it
# nn[2] = avg_score(nn, nodes) # overwriting raw_score of the goal heuristic
pygame.draw.line(screen, white, nodes[0][0:2], nn[0:2])
nodes = insert_node(nn, nodes)
# print "new node list:", nodes
pygame.draw.circle(screen, yellow, (int(nn[0]),int(nn[1])), node_radius, 2)
# print "nodes after check_collision\n", nodes
pygame.display.update()
if check_goal(nn, goals[0]):
print "found path, finishing"
break
# for i in range(NUMNODES):
# rand = random.random()*640.0, random.random()*480.0
# nn = nodes[0]
# for p in nodes:
# if dist(p,rand) < dist(nn,rand):
# nn = p
# newnode = step_from_to(nn,rand)
# nodes.append(newnode)
# pygame.draw.line(screen,white,nn,newnode)
# pygame.display.update()
# #print i, " ", nodes
# for e in pygame.event.get():
# if e.type == QUIT or (e.type == KEYUP and e.key == K_ESCAPE):
# sys.exit("Leaving because you requested it.")
# if python says run, then we should run
if __name__ == '__main__':
main()
|
gpl-2.0
| -7,462,432,645,537,458,000
| 33.498069
| 97
| 0.566536
| false
| 3.044293
| false
| false
| false
|
prateeksan/python-design-patterns
|
behavioural/chain_of_responsibility.py
|
1
|
3358
|
""" The Chain of Responsibility Pattern
Notes:
The Chain of Responsibility pattern allows the client programmer to dynamically
create a recursive chain of objects - each of which tries to fulfill a
'responsibility' (usually represented by a method call). If an object in the
chain is unable to fulfill it, the request propagates to the next level of the
chain until it can be fulfilled. This chain is usually implemented as a linked
list (but it can be implemented using other iterable structures).
In the following example, we simulate a service that searches for a job
candidate from several pools of candidates. The pools are categorized by
geographical clusters (local/regional/global) and we assume that the user of
this service wants to find the nearest candidate (in the smallest cluster)
that meets all requirements. The chain of responsibility will thus be a linked
list of the pools which the user will recursively check (smallest to largest)
in order to find a good candidate.
"""
class AbstractPool:
"""The interface for the pool classes. All pools inherit from this."""
candidates = []
def __init__(self, successor_pool=None):
"""Note how each pool object can store a pointer to a successor_pool.
If no such pointer is assigned, we assume that is the last pool in the
chain.
"""
self._successor = successor_pool
def get_match(self, params):
"""If a match is found in the pool of candidates, the candidate is
returned, else the responsibility is propagated to the next pool in the
chain.
"""
match = self._find(params)
if match:
return match
elif self._successor:
return self._successor.get_match(params)
def _find(self, params):
"""Returns the first matching candidate in the pool if a match is found.
The exact implementation of this method is irrelevant to the concept of
the pattern. It may also be implemented differently for each pool.
"""
for candidate in self.__class__.candidates:
if all(key in candidate.items() for key in params.items()):
print("> Match found in {}:".format(self.__class__.__name__))
return candidate
print("> No match found in {}.".format(self.__class__.__name__))
class LocalPool(AbstractPool):
candidates = [
{"id": 12, "type": "developer", "level": "intermediate"},
{"id": 21, "type": "analyst", "level": "junior"}
]
class RegionalPool(AbstractPool):
candidates = [
{"id": 123, "type": "project_manager", "level": "intermediate"},
{"id": 321, "type": "designer", "level": "intermediate"}
]
class GlobalPool(AbstractPool):
candidates = [
# The following candidate is the only one that matches the needs.
{"id": 1234, "type": "developer", "level": "senior"},
{"id": 4321, "type": "designer", "level": "senior"}
]
if __name__ == "__main__":
# Setting up recursive propagation in this order: local > regional > global.
global_pool = GlobalPool()
regional_pool = RegionalPool(global_pool)
local_pool = LocalPool(regional_pool)
print("Searching for a senior developer in the pools chain:")
print(local_pool.get_match({"type": "developer", "level": "senior"}))
|
mit
| -5,352,763,461,890,104,000
| 36.741573
| 80
| 0.662001
| false
| 4.234552
| false
| false
| false
|
ewiger/runstat
|
python/runstat.py
|
1
|
3779
|
'''
Implementation of running variance/standard deviation.
The MIT License (MIT)
Copyright (c) 2015 Yauhen Yakimovich <eugeny.yakimovitch@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from math import sqrt
from decimal import Decimal
__version__ = '1.0.0'
class RunStat(object):
'''
Class for keeping the running statistics of a continuously sampled one-
or multi-dimensional process/signal.
'''
def __init__(self, dtype=Decimal):
self.dtype = dtype
# (running) mean
self.m = dtype(0)
# counter of updates
self.n = 0
# (running) sum of the recurrence form:
# M(2,n) = M(2,n-1) + (x - mean(x_n))*(x - mean(x_{n-1}))
self.M2 = dtype(0)
# max/min
self.max_value = dtype(0)
self.min_value = dtype(0)
# weight of items seen
# TODO: implement this
self.total_weight = dtype(1)
@property
def mean(self):
return self.m
@property
def var(self):
if self.n > 2:
return self.M2 / (self.n - 1)
return self.M2 / self.n
@property
def std(self):
return self.dtype(sqrt(self.var))
@property
def min(self):
return self.min_value
@property
def max(self):
return self.max_value
@property
def count(self):
return self.n
def reset(self):
self.n = 0
self.is_started = False
def update(self, value, weight=None):
'''
Update running stats with weight equals 1 by default.
'''
# Initialize.
value = self.dtype(value)
self.n = self.n + 1
if self.n <= 1:
# First update.
self.m = value
self.M2 = self.dtype(0)
self.total_weight = self.dtype(0)
self.n = self.dtype(1)
return
# Update max/min.
if value > self.max_value:
self.max_value = value
elif value < self.min_value:
self.min_value = value
# No update.
delta = value - self.m
if delta == 0:
return
# Update running moments.
self.m = self.m + delta / self.n
if weight is None:
# Ignore weight
if self.n > 1:
self.M2 = self.M2 + delta * (value - self.m)
return
# Weight-aware implementation.
weight = self.dtype(weight)
next_weight = self.total_weight + weight
R = self.dtype(delta * (weight / next_weight))
self.m = self.m + R
if self.total_weight > 0:
self.M2 = self.M2 + self.total_weight * delta * R
self.total_weight = next_weight
def __call__(self, *args, **kwds):
self.update(*args, **kwds)
|
mit
| -5,212,956,734,936,615,000
| 27.413534
| 78
| 0.604922
| false
| 3.899897
| false
| false
| false
|
Rezzie/Batcher
|
generators/g_cycle.py
|
1
|
2394
|
#!/usr/bin/env python
# Copyright (c) 2011, The University of York
# All rights reserved.
# Author(s):
# James Arnold <jarnie@gmail.com>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the The University of York nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF YORK BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from generator import Generator
class cycle(Generator):
def __init__(self, choices, initial_offset=0):
assert len(choices) > 0
assert initial_offset <= len(choices)
self.__choices = choices
self.__offset = initial_offset
def Generate(self):
"""Return the next item in the list, wrapping around if necessary."""
while True:
yield self.__choices[self.__offset]
self.__offset += 1
if self.__offset >= len(self.__choices):
self.__offset = 0
if __name__ == "__main__":
from generator import PrintExamples
options = {'choices': ["James", "Ralph"],
'initial_offset': 0}
gen = cycle(**options)
PrintExamples(gen)
|
bsd-3-clause
| 8,469,629,835,516,487,000
| 39.576271
| 80
| 0.702172
| false
| 4.508475
| false
| false
| false
|
mekkablue/Glyphs-Scripts
|
Color Fonts/Merge Suffixed Glyphs into Color Layers.py
|
1
|
10746
|
#MenuTitle: Merge Suffixed Glyphs into Color Layers
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Takes the master layer of suffixed glyphs (e.g., x.shadow, x.body, x.front) and turns them in a specified order into CPAL Color layers of the unsuffixed glyph (e.g., Color 1, Color 0, Color 2 of x).
"""
import vanilla
from copy import copy as copy
from AppKit import NSFont
class MergeSuffixedGlyphsIntoColorLayers( object ):
def __init__( self ):
# Window 'self.w':
windowWidth = 400
windowHeight = 300
windowWidthResize = 1000 # user can resize width by this value
windowHeightResize = 1000 # user can resize height by this value
self.w = vanilla.FloatingWindow(
( windowWidth, windowHeight ), # default window size
"Merge Suffixed Glyphs into Color Layers", # window title
minSize = ( windowWidth, windowHeight ), # minimum size (for resizing)
maxSize = ( windowWidth + windowWidthResize, windowHeight + windowHeightResize ), # maximum size (for resizing)
autosaveName = "com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.mainwindow" # stores last window position and size
)
# UI elements:
linePos, inset, lineHeight = 12, 15, 22
self.w.descriptionText = vanilla.TextBox( (inset, linePos+2, -inset, 14), "Merge suffixed glyphs into the following color indexes:", sizeStyle='small', selectable=True )
linePos += lineHeight
self.w.indexToSuffix = vanilla.TextEditor( (2, linePos, -2, -110), "# Syntax: CPAL index = glyph name suffix\n# list them in chronological order (bottom-up)\n# use hashtags for comments\n0=.shadow\n2=.body\n1=.front", callback=self.SavePreferences, checksSpelling=False )
#self.w.indexToSuffix.getNSTextEditor().setToolTip_("Syntax: colorindex=.suffix, use hashtags for comments. List them in chronological order (bottom-up). Example:\n0=.shadow\n2=.body\n1=.front")
self.w.indexToSuffix.getNSScrollView().setHasVerticalScroller_(1)
self.w.indexToSuffix.getNSScrollView().setHasHorizontalScroller_(1)
self.w.indexToSuffix.getNSScrollView().setRulersVisible_(0)
legibleFont = NSFont.legibileFontOfSize_(NSFont.systemFontSize())
textView = self.w.indexToSuffix.getNSTextView()
textView.setFont_(legibleFont)
textView.setHorizontallyResizable_(1)
textView.setVerticallyResizable_(1)
textView.setAutomaticDataDetectionEnabled_(1)
textView.setAutomaticLinkDetectionEnabled_(1)
textView.setDisplaysLinkToolTips_(1)
textSize = textView.minSize()
textSize.width = 1000
textView.setMinSize_(textSize)
linePos = -105
self.w.disableSuffixedGlyphs = vanilla.CheckBox( (inset, linePos-1, -inset, 20), "Deactivate export for glyphs with listed suffixes", value=True, callback=self.SavePreferences, sizeStyle='small' )
linePos += lineHeight
self.w.deletePreexistingColorLayers = vanilla.CheckBox( (inset, linePos-1, -inset, 20), "Delete preexisting Color layers in target glyphs", value=True, callback=self.SavePreferences, sizeStyle='small' )
linePos += lineHeight
self.w.processCompleteFont = vanilla.CheckBox( (inset, linePos-1, -inset, 20), "Process complete font (otherwise only add into selected glyphs)", value=False, callback=self.SavePreferences, sizeStyle='small' )
linePos += lineHeight
# Run Button:
self.w.runButton = vanilla.Button( (-80-inset, -20-inset, -inset, -inset), "Merge", sizeStyle='regular', callback=self.MergeSuffixedGlyphsIntoColorLayersMain )
# self.w.setDefaultButton( self.w.runButton )
# Load Settings:
if not self.LoadPreferences():
print("Note: 'Merge Suffixed Glyphs into Color Layers' could not load preferences. Will resort to defaults")
# Open window and focus on it:
self.w.open()
self.w.makeKey()
def SavePreferences( self, sender=None ):
try:
# write current settings into prefs:
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix"] = self.w.indexToSuffix.get()
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs"] = self.w.disableSuffixedGlyphs.get()
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers"] = self.w.deletePreexistingColorLayers.get()
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont"] = self.w.processCompleteFont.get()
return True
except:
import traceback
print(traceback.format_exc())
return False
def LoadPreferences( self ):
try:
# register defaults:
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix", "# CPAL index, followed by ‘=’, followed by glyph name suffix\n# list them in chronological order, i.e., bottom-up\n# use hashtags for comments\n0=.shadow\n2=.body\n1=.front")
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs", 1)
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers", 1)
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont", 1)
# load previously written prefs:
self.w.indexToSuffix.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix"] )
self.w.disableSuffixedGlyphs.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs"] )
self.w.deletePreexistingColorLayers.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers"] )
self.w.processCompleteFont.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont"] )
return True
except:
import traceback
print(traceback.format_exc())
return False
def nameContainsAnyOfTheseSuffixes(self, glyphName, allSuffixes):
for suffix in allSuffixes:
if suffix in glyphName:
return True
return False
def allSuffixes(self, suffixMapping):
suffixes = []
for mapping in suffixMapping:
suffix = mapping[0]
suffixes.append(suffix)
return set(suffixes)
def parseIndexSuffixList(self, textEntry):
suffixMapping = []
for line in textEntry.splitlines():
if "#" in line:
hashtagOffset = line.find("#")
line = line[:hashtagOffset]
if "=" in line:
items = line.split("=")
colorIndex = int(items[0].strip())
suffix = items[1].strip().split()[0]
suffixMapping.append((suffix, colorIndex))
return suffixMapping
def MergeSuffixedGlyphsIntoColorLayersMain( self, sender=None ):
try:
# clear macro window log:
Glyphs.clearLog()
# update settings to the latest user input:
if not self.SavePreferences():
print("Note: 'Merge Suffixed Glyphs into Color Layers' could not write preferences.")
thisFont = Glyphs.font # frontmost font
if thisFont is None:
Message(title="No Font Open", message="The script requires a font. Open a font and run the script again.", OKButton=None)
else:
print("Merge Suffixed Glyphs into Color Layers Report for %s" % thisFont.familyName)
if thisFont.filepath:
print(thisFont.filepath)
else:
print("⚠️ The font file has not been saved yet.")
print()
indexToSuffix = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix"]
disableSuffixedGlyphs = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs"]
deletePreexistingColorLayers = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers"]
processCompleteFont = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont"]
suffixMapping = self.parseIndexSuffixList(indexToSuffix)
if not suffixMapping:
Message(title="Merge Error", message="No mapping could be derived from your text entry. Stick to the colorindex=.suffix syntax.", OKButton=None)
else:
allSuffixes = self.allSuffixes(suffixMapping)
if processCompleteFont:
glyphsToProcess = [g for g in thisFont.glyphs if not self.nameContainsAnyOfTheseSuffixes(g.name, allSuffixes)]
else:
glyphsToProcess = [l.parent for l in thisFont.selectedLayers if not self.nameContainsAnyOfTheseSuffixes(l.parent.name, allSuffixes)]
for targetGlyph in glyphsToProcess:
glyphName = targetGlyph.name
print("🔠 %s" % glyphName)
if deletePreexistingColorLayers:
print("⚠️ Deleting preexisting Color layers...")
for i in reversed(range(len(targetGlyph.layers))):
potentialColorLayer = targetGlyph.layers[i]
if not potentialColorLayer.isMasterLayer:
deleteThisLayer = False
try:
# GLYPHS 3
if potentialColorLayer.isColorPaletteLayer():
deleteThisLayer = True
except:
# GLYPHS 2
if potentialColorLayer.name.startswith("Color "):
deleteThisLayer = True
if deleteThisLayer:
print(" 🚫 Removing Color layer ‘%s’" % potentialColorLayer.name)
currentLayerID = potentialColorLayer.layerId
try:
# GLYPHS 3
targetGlyph.removeLayerForId_(currentLayerID)
except:
# GLYPHS 2
targetGlyph.removeLayerForKey_(currentLayerID)
for mapping in suffixMapping:
suffix = mapping[0]
colorIndex = mapping[1]
suffixGlyphName = "%s%s"%(glyphName, suffix)
suffixGlyph = thisFont.glyphs[suffixGlyphName]
if not suffixGlyph:
print("⚠️ Not found: %s"%suffixGlyphName)
else:
print("✅ Merging %s into CPAL Color %i" % (suffixGlyphName, colorIndex))
if suffixGlyph.export and disableSuffixedGlyphs:
suffixGlyph.export = False
for master in thisFont.masters:
mID = master.id
colorLayer = copy(suffixGlyph.layers[mID])
colorLayer.associatedMasterId = mID
try:
# GLYPHS 3
colorLayer.setColorPaletteLayer_(1)
colorLayer.setAttribute_forKey_(colorIndex, "colorPalette")
except:
# GLYPHS 2
colorLayer.name = "Color %i" % colorIndex
targetGlyph.layers.append(colorLayer)
# self.w.close() # delete if you want window to stay open
# Final report:
Glyphs.showNotification(
"%s: Done" % (thisFont.familyName),
"Merge Suffixed Glyphs into Color Layers is finished. Details in Macro Window",
)
print("\nDone.")
except Exception as e:
# brings macro window to front and reports error:
Glyphs.showMacroWindow()
print("Merge Suffixed Glyphs into Color Layers Error: %s" % e)
import traceback
print(traceback.format_exc())
MergeSuffixedGlyphsIntoColorLayers()
|
apache-2.0
| -2,315,050,609,437,465,600
| 44.227848
| 273
| 0.727654
| false
| 3.366206
| false
| false
| false
|
april1452/craigslove
|
generate_post.py
|
1
|
3348
|
import string
import sys
import argparse
import csv
import random
# import scipy
# import scipy.stats
# from scipy.stats import rv_discrete
from collections import defaultdict
# locations = ['chicago', 'dallas', 'denver', 'jacksonville', 'lasvegas', 'losangeles', 'miami', 'minneapolis', 'newyork', 'oklahomacity', 'providence', 'seattle', 'sfbay', 'washingtondc']
locations = ['providence']
posttype = ['m4m', 'm4w', 'msr', 'stp', 'w4m', 'w4w']
def main():
for location in locations:
with open('posts/' + location + '/w4w.csv') as f:
reader = csv.reader(f)
entries = []
for row in reader:
entry = row[8]
entries.append(entry)
transition_matrix = calculate_transition_matrix(entries)
# sampling_mechanism = defaultdict(lambda: None)
# for w in transition_matrix:
# xk = []
# pk = []
# for w_prime in transition_matrix:
# xk.append(w_prime)
# pk.append(transition_matrix[w][w_prime])
# sampling_mechanism[w] = rv_discrete(values=(xk, pk))
prev_word = None
next_word = '*START*'
generated_post = ''
while next_word != '*END*':
prev_word = next_word
random_probability = random.random() # between 0 and 1
cumulative_probability = 0.0
# next_word = sampling_mechanism[prev_word].rvs()
for w_prime in transition_matrix[prev_word]:
cumulative_probability += transition_matrix[prev_word][w_prime]
if cumulative_probability > random_probability:
next_word = w_prime
break
if len(next_word) > 1 or next_word in (string.punctuation + 'i' + 'a'):
generated_post += next_word + ' '
print generated_post[:-7]
def tokenize(words):
index = 0
while index < len(words):
if len(words[index]) > 1 and words[index][-1] in string.punctuation:
words[index] = words[index][:-1]
words.insert(index + 1, words[index][-1])
index += 1
return words
def calculate_transition_matrix(training_data): # training_data is a list of strings
transition_matrix = defaultdict(lambda: defaultdict(float))
for post in training_data:
words = tokenize(post.lower().split())
transition_matrix['*START*'][words[0]] += 1.0
for i in range(len(words) - 1):
transition_matrix[words[i]][words[i + 1]] += 1.0
transition_matrix[words[len(words) - 1]]['*END*'] += 1.0
for w in transition_matrix:
unigram_count = 0
for w_prime in transition_matrix[w]:
unigram_count += transition_matrix[w][w_prime]
for w_prime in transition_matrix[w]:
transition_matrix[w][w_prime] = transition_matrix[w][w_prime] / unigram_count
return transition_matrix
if __name__ == "__main__":
main()
|
mit
| -8,948,929,042,323,856,000
| 32.828283
| 188
| 0.512843
| false
| 4.174564
| false
| false
| false
|
NoahPeeters/pymathexpressions
|
mathexpressions/lib.py
|
1
|
2465
|
__author__ = 'Noah Peeters'
import math
const = {
'pi': [math.pi, '\pi'],
'e': [math.e, 'e']
}
float_chars = [str(x) for x in range(10)]
float_chars.append('.')
operators = ['+', '-', '*', '/', '^', '%', '=']
operators_priorities = [0, 0, 1, 1, 2, 1, 0]
operators_latex = ['%s+%s', '%s-%s', '%s*%s', '\\frac{%s}{%s}', '%s^{%s}', '%s\\mod%s', '%s=%s']
max_priority = 2
def use_operator(o, para1, para2):
if o == '+':
return para1 + para2
elif o == '-':
return para1 - para2
elif o == '*':
return para1 * para2
elif o == '/':
return para1 / para2
elif o == '^':
return math.pow(para1, para2)
elif o == '%':
return math.pow(para1, para2)
elif o == '=':
return None
def latex_operator(o, para1, para2):
index = operators.index(o)
return operators_latex[index] % (para1, para2)
def get_priority(p):
return operators_priorities[operators.index(p.name)]
def is_number(name):
if len(name) == 0:
return False
for i in name:
if i not in float_chars:
return False
return True
functions = {
'acos': '\\arccos(%s)',
'acosh': None,
'asin': '\\arcsin(%s)',
'asinh': None,
'atan': '\\arctan(%s)',
'atan2': None,
'atanh': None,
'ceil': None,
'copysign': None,
'cos': '\\cos(%s)',
'cosh': '\\cosh(%s)',
'degrees': None,
'erf': None,
'erfc': None,
'exp': 'e^{%s}',
'expm1': 'e^{%s}-1',
'abs': '|%s|',
'factorial': '%s!',
'floor': None,
'fmod': '%s\\mod%s',
'gamma': None,
'hypot': '\\sqrt(%s^{2}+%s^{2})',
'ldexp': None,
'lgamma': None,
'log': '\\log(%s)',
'log10': '\\log_10(%s)',
'logn': None, # latex support
'pow': '%s^{%s}',
'radians': None,
'round': None,
'roundn': None,
'sin': '\\sin(%s)',
'sinh': '\\sinh(%s)',
'sqrt': '\\sqrt(%s)',
'tan': '\\tan(%s)',
'tanh': '\\tanh(%s)'
}
def use_function(name, para):
if name == 'logn':
return math.log(para[0], para[1])
elif name == 'round':
return round(para[0])
elif name == 'roundn':
return round(para[0], para[1])
elif name == 'abs':
return math.fabs(para[0])
else:
return getattr(math, name)(*para)
def get_function_latex(name, para):
if name == 'logn':
return '\\log_%s(%s)' % (para[1], para[0])
else:
return functions[name] % tuple(para)
|
mit
| 4,933,296,013,770,592,000
| 21.017857
| 96
| 0.481136
| false
| 2.927553
| false
| false
| false
|
boreq/botnet
|
botnet/modules/builtin/meta.py
|
1
|
2191
|
from ...signals import _request_list_commands, _list_commands
from .. import BaseResponder
from ..lib import parse_command
class Meta(BaseResponder):
"""Displays basic info about this bot."""
ignore_help = False
ibip_repo = 'https://github.com/boreq/botnet'
def __init__(self, config):
super().__init__(config)
_list_commands.connect(self.on_list_commands)
def command_git(self, msg):
"""Alias for the IBIP identification.
Syntax: git
"""
self.ibip(msg)
@parse_command([('command_names', '*')])
def command_help(self, msg, args):
"""Sends a list of commands. If COMMAND is specified sends detailed help
in a private message.
Syntax: help [COMMAND ...]
"""
if len(args.command_names) == 0:
_request_list_commands.send(self, msg=msg, admin=False)
else:
super().command_help(msg)
@parse_command([('command_names', '*')])
def admin_command_help(self, msg, args):
"""Sends a list of commands. If COMMAND is specified sends detailed help
in a private message.
Syntax: help [COMMAND ...]
"""
if len(args.command_names) == 0:
_request_list_commands.send(self, msg=msg, admin=True)
else:
super().command_help(msg)
def ibip(self, msg):
"""Makes the bot identify itself as defined by The IRC Bot
Identification Protocol Standard.
"""
text = 'Reporting in! [Python] {ibip_repo} try {prefix}help'.format(
ibip_repo=self.ibip_repo,
prefix=self.config_get('command_prefix')
)
self.respond(msg, text)
def on_list_commands(self, sender, msg, admin, commands):
"""Sends a list of commands received from the Manager."""
if admin:
text = 'Supported admin commands: %s' % ', '.join(commands)
else:
text = 'Supported commands: %s' % ', '.join(commands)
self.respond(msg, text)
def handle_privmsg(self, msg):
# Handle IBIP:
if self.is_command(msg, 'bots', command_prefix='.'):
self.ibip(msg)
mod = Meta
|
mit
| -4,800,175,909,826,175,000
| 29.859155
| 80
| 0.581013
| false
| 3.891652
| false
| false
| false
|
harikishen/addons-server
|
src/olympia/addons/tasks.py
|
1
|
14975
|
import hashlib
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import transaction
from elasticsearch_dsl import Search
from PIL import Image
import olympia.core.logger
from olympia import amo
from olympia.addons.models import (
Addon, attach_tags, attach_translations, AppSupport, CompatOverride,
IncompatibleVersions, Persona, Preview)
from olympia.addons.indexers import AddonIndexer
from olympia.amo.celery import task
from olympia.amo.decorators import set_modified_on, write
from olympia.amo.helpers import user_media_path
from olympia.amo.storage_utils import rm_stored_dir
from olympia.amo.utils import cache_ns_key, ImageCheck, LocalFileStorage
from olympia.editors.models import RereviewQueueTheme
from olympia.lib.es.utils import index_objects
from olympia.tags.models import Tag
from olympia.versions.models import Version
log = olympia.core.logger.getLogger('z.task')
@task
@write
def version_changed(addon_id, **kw):
update_last_updated(addon_id)
update_appsupport([addon_id])
def update_last_updated(addon_id):
queries = Addon._last_updated_queries()
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info('[1@None] Updating last updated for %s failed, no addon found'
% addon_id)
return
log.info('[1@None] Updating last updated for %s.' % addon_id)
if addon.is_persona():
q = 'personas'
elif addon.status == amo.STATUS_PUBLIC:
q = 'public'
else:
q = 'exp'
qs = queries[q].filter(pk=addon_id).using('default')
res = qs.values_list('id', 'last_updated')
if res:
pk, t = res[0]
Addon.objects.filter(pk=pk).update(last_updated=t)
@write
def update_appsupport(ids):
log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids))
addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms()
support = []
for addon in addons:
for app, appver in addon.compatible_apps.items():
if appver is None:
# Fake support for all version ranges.
min_, max_ = 0, 999999999999999999
else:
min_, max_ = appver.min.version_int, appver.max.version_int
support.append(AppSupport(addon=addon, app=app.id,
min=min_, max=max_))
if not support:
return
with transaction.atomic():
AppSupport.objects.filter(addon__id__in=ids).delete()
AppSupport.objects.bulk_create(support)
# All our updates were sql, so invalidate manually.
Addon.objects.invalidate(*addons)
@task
def delete_preview_files(id, **kw):
log.info('[1@None] Removing preview with id of %s.' % id)
p = Preview(id=id)
for f in (p.thumbnail_path, p.image_path):
try:
storage.delete(f)
except Exception, e:
log.error('Error deleting preview file (%s): %s' % (f, e))
@task(acks_late=True)
def index_addons(ids, **kw):
log.info('Indexing addons %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
transforms = (attach_tags, attach_translations)
index_objects(ids, Addon, AddonIndexer.extract_document,
kw.pop('index', None), transforms, Addon.unfiltered)
@task
def unindex_addons(ids, **kw):
for addon in ids:
log.info('Removing addon [%s] from search index.' % addon)
Addon.unindex(addon)
@task
def delete_persona_image(dst, **kw):
log.info('[1@None] Deleting persona image: %s.' % dst)
if not dst.startswith(user_media_path('addons')):
log.error("Someone tried deleting something they shouldn't: %s" % dst)
return
try:
storage.delete(dst)
except Exception, e:
log.error('Error deleting persona image: %s' % e)
@set_modified_on
def create_persona_preview_images(src, full_dst, **kw):
"""
Creates a 680x100 thumbnail used for the Persona preview and
a 32x32 thumbnail used for search suggestions/detail pages.
"""
log.info('[1@None] Resizing persona images: %s' % full_dst)
preview, full = amo.PERSONA_IMAGE_SIZES['header']
preview_w, preview_h = preview
orig_w, orig_h = full
with storage.open(src) as fp:
i_orig = i = Image.open(fp)
# Crop image from the right.
i = i.crop((orig_w - (preview_w * 2), 0, orig_w, orig_h))
# Resize preview.
i = i.resize(preview, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[0], 'wb') as fp:
i.save(fp, 'png')
_, icon_size = amo.PERSONA_IMAGE_SIZES['icon']
icon_w, icon_h = icon_size
# Resize icon.
i = i_orig
i.load()
i = i.crop((orig_w - (preview_h * 2), 0, orig_w, orig_h))
i = i.resize(icon_size, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[1], 'wb') as fp:
i.save(fp, 'png')
return True
@set_modified_on
def save_persona_image(src, full_dst, **kw):
"""Creates a PNG of a Persona header/footer image."""
log.info('[1@None] Saving persona image: %s' % full_dst)
img = ImageCheck(storage.open(src))
if not img.is_image():
log.error('Not an image: %s' % src, exc_info=True)
return
with storage.open(src, 'rb') as fp:
i = Image.open(fp)
with storage.open(full_dst, 'wb') as fp:
i.save(fp, 'png')
return True
@task
def update_incompatible_appversions(data, **kw):
"""Updates the incompatible_versions table for this version."""
log.info('Updating incompatible_versions for %s versions.' % len(data))
addon_ids = set()
for version_id in data:
# This is here to handle both post_save and post_delete hooks.
IncompatibleVersions.objects.filter(version=version_id).delete()
try:
version = Version.objects.get(pk=version_id)
except Version.DoesNotExist:
log.info('Version ID [%d] not found. Incompatible versions were '
'cleared.' % version_id)
return
addon_ids.add(version.addon_id)
try:
compat = CompatOverride.objects.get(addon=version.addon)
except CompatOverride.DoesNotExist:
log.info('Compat override for addon with version ID [%d] not '
'found. Incompatible versions were cleared.' % version_id)
return
app_ranges = []
ranges = compat.collapsed_ranges()
for range in ranges:
if range.min == '0' and range.max == '*':
# Wildcard range, add all app ranges
app_ranges.extend(range.apps)
else:
# Since we can't rely on add-on version numbers, get the min
# and max ID values and find versions whose ID is within those
# ranges, being careful with wildcards.
min_id = max_id = None
if range.min == '0':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('id')
.values_list('id', flat=True)[:1])
if versions:
min_id = versions[0]
else:
try:
min_id = Version.objects.get(addon=version.addon_id,
version=range.min).id
except Version.DoesNotExist:
pass
if range.max == '*':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('-id')
.values_list('id', flat=True)[:1])
if versions:
max_id = versions[0]
else:
try:
max_id = Version.objects.get(addon=version.addon_id,
version=range.max).id
except Version.DoesNotExist:
pass
if min_id and max_id:
if min_id <= version.id <= max_id:
app_ranges.extend(range.apps)
for app_range in app_ranges:
IncompatibleVersions.objects.create(version=version,
app=app_range.app.id,
min_app_version=app_range.min,
max_app_version=app_range.max)
log.info('Added incompatible version for version ID [%d]: '
'app:%d, %s -> %s' % (version_id, app_range.app.id,
app_range.min, app_range.max))
# Increment namespace cache of compat versions.
for addon_id in addon_ids:
cache_ns_key('d2c-versions:%s' % addon_id, increment=True)
def make_checksum(header_path, footer_path):
ls = LocalFileStorage()
footer = footer_path and ls._open(footer_path).read() or ''
raw_checksum = ls._open(header_path).read() + footer
return hashlib.sha224(raw_checksum).hexdigest()
def theme_checksum(theme, **kw):
theme.checksum = make_checksum(theme.header_path, theme.footer_path)
dupe_personas = Persona.objects.filter(checksum=theme.checksum)
if dupe_personas.exists():
theme.dupe_persona = dupe_personas[0]
theme.save()
def rereviewqueuetheme_checksum(rqt, **kw):
"""Check for possible duplicate theme images."""
dupe_personas = Persona.objects.filter(
checksum=make_checksum(rqt.header_path or rqt.theme.header_path,
rqt.footer_path or rqt.theme.footer_path))
if dupe_personas.exists():
rqt.dupe_persona = dupe_personas[0]
rqt.save()
@task
@write
def save_theme(header, footer, addon, **kw):
"""Save theme image and calculates checksum after theme save."""
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'header.png')
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'footer.png')
try:
save_persona_image(src=header, full_dst=header_dst)
if footer:
save_persona_image(src=footer, full_dst=footer_dst)
create_persona_preview_images(
src=header, full_dst=[os.path.join(dst_root, 'preview.png'),
os.path.join(dst_root, 'icon.png')],
set_modified_on=[addon])
theme_checksum(addon.persona)
except IOError:
addon.delete()
raise
@task
@write
def save_theme_reupload(header, footer, addon, **kw):
header_dst = None
footer_dst = None
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
try:
if header:
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'pending_header.png')
save_persona_image(src=header, full_dst=header_dst)
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'pending_footer.png')
save_persona_image(src=footer, full_dst=footer_dst)
except IOError as e:
log.error(str(e))
raise
if header_dst or footer_dst:
theme = addon.persona
header = 'pending_header.png' if header_dst else theme.header
# Theme footer is optional, but can't be None.
footer = theme.footer or ''
if footer_dst:
footer = 'pending_footer.png'
# Store pending header and/or footer file paths for review.
RereviewQueueTheme.objects.filter(theme=theme).delete()
rqt = RereviewQueueTheme(theme=theme, header=header, footer=footer)
rereviewqueuetheme_checksum(rqt=rqt)
rqt.save()
@task
@write
def calc_checksum(theme_id, **kw):
"""For migration 596."""
lfs = LocalFileStorage()
theme = Persona.objects.get(id=theme_id)
header = theme.header_path
footer = theme.footer_path
# Delete invalid themes that are not images (e.g. PDF, EXE).
try:
Image.open(header)
Image.open(footer)
except IOError:
log.info('Deleting invalid theme [%s] (header: %s) (footer: %s)' %
(theme.addon.id, header, footer))
theme.addon.delete()
theme.delete()
rm_stored_dir(header.replace('header.png', ''), storage=lfs)
return
# Calculate checksum and save.
try:
theme.checksum = make_checksum(header, footer)
theme.save()
except IOError as e:
log.error(str(e))
@task
@write # To bypass cache and use the primary replica.
def find_inconsistencies_between_es_and_db(ids, **kw):
length = len(ids)
log.info(
'Searching for inconsistencies between db and es %d-%d [%d].',
ids[0], ids[-1], length)
db_addons = Addon.unfiltered.in_bulk(ids)
es_addons = Search(
doc_type=AddonIndexer.get_doctype_name(),
index=AddonIndexer.get_index_alias(),
using=amo.search.get_es()).filter('ids', values=ids)[:length].execute()
es_addons = es_addons
db_len = len(db_addons)
es_len = len(es_addons)
if db_len != es_len:
log.info('Inconsistency found: %d in db vs %d in es.',
db_len, es_len)
for result in es_addons.hits.hits:
pk = result['_source']['id']
db_modified = db_addons[pk].modified.isoformat()
es_modified = result['_source']['modified']
if db_modified != es_modified:
log.info('Inconsistency found for addon %d: '
'modified is %s in db vs %s in es.',
pk, db_modified, es_modified)
db_status = db_addons[pk].status
es_status = result['_source']['status']
if db_status != es_status:
log.info('Inconsistency found for addon %d: '
'status is %s in db vs %s in es.',
pk, db_status, es_status)
@task
@write
def add_firefox57_tag(ids, **kw):
"""Add firefox57 tag to addons with the specified ids."""
log.info(
'Adding firefox57 tag to addons %d-%d [%d].',
ids[0], ids[-1], len(ids))
addons = Addon.objects.filter(id__in=ids)
for addon in addons:
# This will create a couple extra queries to check for tag/addontag
# existence, and then trigger update_tag_stat tasks. But the
# alternative is adding activity log manually, making sure we don't
# add duplicate tags, manually updating the tag stats, so it's ok for
# a one-off task.
Tag(tag_text='firefox57').save_tag(addon)
|
bsd-3-clause
| 1,826,447,814,899,760,000
| 34.070258
| 79
| 0.585576
| false
| 3.665851
| false
| false
| false
|
kevinrigney/PlaylistDatabase
|
youtube_search.py
|
1
|
4208
|
#!/usr/bin/env python3
import httplib2
import os
import sys
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import argparser, run_flow
class YoutubeSearcher():
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google Developers Console at
# https://console.developers.google.com/.
# Please ensure that you have enabled the YouTube Data API for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
# https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "client_secrets.json"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
# This OAuth 2.0 access scope allows for full read/write access to the
# authenticated user's account.
YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def __init__(self):
flow = flow_from_clientsecrets(self.CLIENT_SECRETS_FILE,
message=self.MISSING_CLIENT_SECRETS_MESSAGE,
scope=self.YOUTUBE_READ_WRITE_SCOPE)
storage = Storage("ytpl-oauth2.json")
credentials = storage.get()
if credentials is None or credentials.invalid:
flags = argparser.parse_args()
credentials = run_flow(flow, storage, flags)
self.youtube = build(self.YOUTUBE_API_SERVICE_NAME, self.YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def get_most_viewed_link(self,query,max_results=5):
videos = self.youtube_search(query,max_results)
try:
video = videos[0]
return ('https://www.youtube.com/watch?v='+video,video)
except IndexError:
return ('','')
def youtube_search(self,query,max_results=5):
# Call the search.list method to retrieve results matching the specified
# query term.
search_response = self.youtube.search().list(
q=query,
part="id",
maxResults=max_results,
order="relevance"
).execute()
videos = []
# Add each result to the appropriate list, and then display the lists of
# matching videos, channels, and playlists.
for search_result in search_response.get("items", []):
if search_result["id"]["kind"] == "youtube#video":
#print(search_result)
videos.append(search_result["id"]["videoId"])
#print("Videos:\n", "\n".join(videos), "\n")
return videos
def is_video_valid(self,video_id):
# Check if a video is still valid.
# (make sure it hasn't been deleted)
# The part is "id" because it has a quota cost of 0
search_response = self.youtube.videos().list(
id=video_id,
part="id"
).execute()
return search_response['pageInfo']['totalResults'] > 0
if __name__ == "__main__":
argparser.add_argument("--q", help="Search term", default="Google")
argparser.add_argument("--max-results", help="Max results", default=25)
args = argparser.parse_args()
searcher = YoutubeSearcher()
try:
video = searcher.get_most_viewed_link(args.q)
print("Video: "+video)
except HttpError as e:
print("An HTTP error %d occurred:\n%s" % (e.resp.status, e.content))
|
mit
| 6,983,723,532,908,328,000
| 34.361345
| 86
| 0.694867
| false
| 3.700967
| false
| false
| false
|
south-coast-science/scs_dev
|
src/scs_dev/control_receiver.py
|
1
|
7257
|
#!/usr/bin/env python3
"""
Created on 17 Apr 2017
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
DESCRIPTION
The function of the control_receiver utility is execute commands received over a messaging topic. In addition to
enabling secure remote management, the utility provides a secure challenge-response facility.
A typical South Coast Science device is provided with a messaging topic whose purpose is to enable bidirectional
command-and-response communications between the device and one or more remote management systems. Commands
are in the form of a specific JSON format, which is validated as follows:
* the message must identify the device as the recipient
* the digest in the incoming message matches the digest computed by the device
* the command must be listed in the device's ~/SCS/cmd/ directory, or be "?"
The digest is computed using a shared secret generated by the scs_mfr/shared_secret utility.
If validated, the control_receiver utility executes the command, then publishes a receipt message which includes:
* the command stdout
* the command stderr
* the command return code
* the original message digest
* a new message digest
Entries in ~/SCS/cmd/ are typically symbolic links to commands that are implemented elsewhere, either by the operating
system, or by South Coast Science packages.
It is the responsibility of the device administrator to mange the ~/SCS/cmd/ directory. Care should be taken to exclude
commands that:
* could cause harm to the system
* require an interactive mode
* require root privileges
* can change the contents of the ~/SCS/cmd/ directory
SYNOPSIS
control_receiver.py [-r] [-e] [-v]
EXAMPLES
/home/pi/SCS/scs_dev/src/scs_dev/aws_topic_subscriber.py -cX -s /home/pi/SCS/pipes/mqtt_control_subscription.uds | \
/home/pi/SCS/scs_dev/src/scs_dev/control_receiver.py -r -v | \
/home/pi/SCS/scs_dev/src/scs_dev/aws_topic_publisher.py -v -cX -p /home/pi/SCS/pipes/mqtt_publication.uds
FILES
~/SCS/cmd/*
~/SCS/conf/system_id.json
~/SCS/conf/shared_secret.json
DOCUMENT EXAMPLE - REQUEST
{"/orgs/south-coast-science-dev/development/device/alpha-pi-eng-000006/control":
{"tag": "bruno", "attn": "scs-ap1-6", "rec": "2018-04-04T14:41:11.872+01:00", "cmd_tokens": ["?"],
"digest": "bf682976cb45d889207bf3e3b4a6e12336859a93d7023b8454514"}}
DOCUMENT EXAMPLE - RESPONSE
{"/orgs/south-coast-science-dev/development/device/alpha-pi-eng-000006/control":
{"tag": "scs-ap1-6", "rec": "2018-04-04T13:41:59.521+00:00",
"cmd": {"cmd": "?", "params": [],
"stdout": ["[\"afe_baseline\", \"afe_calib\", \"opc_power\", \"ps\", \"schedule\", \"shared_secret\"]"],
"stderr": [], "ret": 0},
"omd": "40ef7a9c0f70033bbe21827ed25286b448a5ad3ace9b16f44f3d94da6a89ab25",
"digest": "597f8de3852f1067f52f126398777204c378e8f5d30bad6d8d99ee536"}}
SEE ALSO
scs_analysis/aws_mqtt_control
scs_analysis/osio_mqtt_control
scs_mfr/shared_secret
"""
import json
import sys
import time
from scs_core.control.command import Command
from scs_core.control.control_datum import ControlDatum
from scs_core.control.control_receipt import ControlReceipt
from scs_core.data.datetime import LocalizedDatetime
from scs_core.data.json import JSONify
from scs_core.sys.shared_secret import SharedSecret
from scs_core.sys.signalled_exit import SignalledExit
from scs_core.sys.system_id import SystemID
from scs_dev.cmd.cmd_control_receiver import CmdControlReceiver
from scs_host.sys.host import Host
# --------------------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
# ----------------------------------------------------------------------------------------------------------------
# config...
deferred_commands = ('shutdown', 'reboot', 'restart')
# ----------------------------------------------------------------------------------------------------------------
# cmd...
cmd = CmdControlReceiver()
if cmd.verbose:
print("control_receiver: %s" % cmd, file=sys.stderr)
# ------------------------------------------------------------------------------------------------------------
# resources...
# SystemID...
system_id = SystemID.load(Host)
if system_id is None:
print("control_receiver: SystemID not available.", file=sys.stderr)
exit(1)
if cmd.verbose:
print("control_receiver: %s" % system_id, file=sys.stderr)
# SharedSecret...
secret = SharedSecret.load(Host)
if secret is None:
print("control_receiver: SharedSecret not available.", file=sys.stderr)
exit(1)
if cmd.verbose:
print("control_receiver: %s" % secret, file=sys.stderr)
sys.stderr.flush()
system_tag = system_id.message_tag()
key = secret.key
try:
# ------------------------------------------------------------------------------------------------------------
# run...
# signal handler...
SignalledExit.construct("control_receiver", cmd.verbose)
for line in sys.stdin:
# control...
try:
jdict = json.loads(line)
except ValueError:
continue
try:
datum = ControlDatum.construct_from_jdict(jdict)
except TypeError:
continue
if datum.attn != system_tag:
continue
if cmd.verbose:
print("control_receiver: %s" % datum, file=sys.stderr)
sys.stderr.flush()
if not datum.is_valid(key):
print("control_receiver: invalid digest: %s" % datum, file=sys.stderr)
sys.stderr.flush()
continue
if cmd.echo:
print(JSONify.dumps(datum))
sys.stdout.flush()
# command...
command = Command.construct_from_tokens(datum.cmd_tokens)
if command.cmd is not None and not command.is_valid(Host):
command.error("invalid command")
# execute immediate commands...
elif command.cmd not in deferred_commands:
command.execute(Host, datum.timeout)
# receipt...
if cmd.receipt:
now = LocalizedDatetime.now().utc()
receipt = ControlReceipt.construct_from_datum(datum, now, command, key)
print(JSONify.dumps(receipt))
sys.stdout.flush()
if cmd.verbose:
print("control_receiver: %s" % receipt, file=sys.stderr)
sys.stderr.flush()
# execute deferred commands...
if command.cmd in deferred_commands:
time.sleep(10.0) # wait, hoping that the receipt is sent
command.execute(Host, datum.timeout)
# ----------------------------------------------------------------------------------------------------------------
# end...
except ConnectionError as ex:
print("control_receiver: %s" % ex, file=sys.stderr)
except (KeyboardInterrupt, SystemExit):
pass
finally:
if cmd and cmd.verbose:
print("control_receiver: finishing", file=sys.stderr)
|
mit
| 8,452,370,803,216,645,000
| 32.911215
| 119
| 0.592945
| false
| 3.926948
| false
| false
| false
|
afraser/CellProfiler-Analyst
|
cpa/dirichletintegrate.py
|
1
|
6480
|
from numpy import *
from scipy.integrate import quadrature, romberg, fixed_quad
from scipy.special import gammaln, betaln, digamma, polygamma, betainc, gamma
import pdb
from hypergeom import hyper3F2regularizedZ1, hyper3F2Z1, hyper3F2aZ1
def dirichlet_integrate(alpha):
normalizer = exp(sum(gammaln(alpha)) - gammaln(sum(alpha)))
def f_recur(x, idx, upper, vals):
if idx == 1:
# base case.
# set values for last two components
vals[1] = x
vals[0] = 1.0 - sum(vals[1:])
# compute Dirichlet value
print vals.T, prod(vals ** (alpha - 1)) , normalizer, alpha
return prod(vals.T ** (alpha - 1)) / normalizer
else:
vals[idx] = x
split = alpha[idx-1] / sum(alpha)
if (split < upper - x):
return romberg(f_recur, 0, split, args=(idx - 1, upper - x, vals), vec_func=False) + \
romberg(f_recur, split, upper - x, args=(idx - 1, upper - x, vals), vec_func=False)
else:
return romberg(f_recur, 0, upper - x, args=(idx - 1, upper - x, vals), vec_func=False)
split = alpha[-1] / sum(alpha)
print alpha / sum(alpha)
return romberg(f_recur, 0, split, args=(len(alpha) - 1, 1.0, zeros((len(alpha), 1), float64)), vec_func=False) + \
romberg(f_recur, split, 1, args=(len(alpha) - 1, 1.0, zeros((len(alpha), 1), float64)), vec_func=False)
def dirichlet_integrate_near0(alpha):
normalizer = exp(sum(gammaln(alpha)) - gammaln(sum(alpha)))
K = len(alpha)
def f_recur(x, idx, vals):
if idx == K - 2:
# base case.
# set values for last two components
vals[K - 2] = x
vals[K - 1] = 1.0 - sum(vals[0:K-1])
# print vals, prod(vals ** (alpha - 1)) / normalizer, normalizer
for v in vals[1:]:
assert v <= vals[0]+0.001
# compute Dirichlet value
return prod(vals.T ** (alpha - 1)) / normalizer
else:
vals[idx] = x
# we have to fulfill three requirements:
# vals[i] > 0 for all i
# vals[0] >= vals[i] for all i
# vals[i] sum to 1
# how much weight is left to assign?
remaining = 1.0 - sum(vals[:(idx+1)])
# require vals[i] > 0, and vals[0] >= vals[i]
lower_bound = max(0.0, remaining - vals[0] * (K - idx - 2))
upper_bound = min(remaining, vals[0])
assert lower_bound <= upper_bound+0.001
v = romberg(f_recur, lower_bound, upper_bound, args=(idx + 1, vals), vec_func=False)
return v
return romberg(f_recur, 1.0 / len(alpha), 1, args=(0, zeros((len(alpha), 1), float64)), vec_func=False)
def dirichlet_integrate_zero_enriched(alpha, base_level):
normalizer = exp(sum(gammaln(alpha)) - gammaln(sum(alpha)))
K = len(alpha)
def f_recur(x, idx, vals, remaining):
if idx == K - 2:
# base case.
# set values for last two components
vals[K - 2] = x
vals[K - 1] = remaining - x
# compute Dirichlet value
return prod(vals.T ** (alpha - 1)) / normalizer
else:
vals[idx] = x
remaining = remaining - x
v = romberg(f_recur, 0, remaining, args=(idx + 1, vals, remaining), vec_func=False)
return v
return romberg(f_recur, base_level, 1, args=(0, zeros((len(alpha), 1), float64), 1.0), vec_func=False)
def integrate_splits(prior, posterior):
splits = [finfo(float64).eps, 1.0 - finfo(float64).eps, prior[0] / sum(prior),
prior[1] / sum(prior), posterior[0] / sum(posterior),
posterior[1] / sum (posterior)]
splits.sort()
return splits
def integrate(f, splits):
return sum([romberg(f, lo, hi, vec_func=True, tol=1e-4, divmax=10) for lo, hi in zip(splits[:-1], splits[1:])])
def integrateold(f, splits):
return sum([fixed_quad(f, lo, hi, n=100)[0] for lo, hi in zip(splits[:-1], splits[1:])])
def pdf_cdf_prod(x, prior, posterior):
lnCDF = log(betainc(prior[0], prior[1], x))
lnPDF = (posterior[0] - 1) * log(x) + (posterior[1] - 1) * log(1 - x) - betaln(posterior[0], posterior[1])
return exp(lnCDF + lnPDF)
def beta_enriched(prior, posterior):
# def f(x):
# return beta.cdf(x, prior[0], prior[1]) * beta.pdf(x, posterior[0], posterior[1])
# def g(x):
# return beta.pdf(x, posterior[0], posterior[1])
# def h(x):
# return pdf_cdf_prod(x, prior, posterior)
# # compute by integration
# splits = integrate_splits(prior, posterior)
# v = integrate(f, splits) / integrate(g, splits)
# use closed form
a = prior[0]
b = prior[1]
c = posterior[0]
d = posterior[1]
# See Integration.mathetmatica
# This would be better if we computed the log of the
# hypergeometric function, but I don't think that's generally
# possible.
hyper = hyper3F2aZ1(a, 1-b, a+c, a+c+d)
scale = exp(gammaln(a) + gammaln(a+c) + gammaln(d) - gammaln(1+a) - gammaln(a+c+d) - betaln(a,b) - betaln(c,d))
if isnan(hyper * scale):
# This can happen if hyper and scale are 0 and inf (or vice versa).
if prior[0] / sum(prior) > posterior[0] / sum(posterior):
return 0.0
return 1.0
return clip(hyper * scale, 0, 1)
def score(prior, counts):
''' score a well based on the prior fit to the data and the observed counts '''
assert prior.shape==counts.shape, "dirichletintegrate.score: array shapes do not match: "+str(prior.shape)+' and '+str(counts.shape)
K = len(prior)
posterior = prior + counts
def score_idx(idx):
prior_a = prior[idx]
prior_b = sum(prior) - prior_a
posterior_a = posterior[idx]
posterior_b = sum(posterior) - posterior_a
return beta_enriched((prior_a, prior_b), (posterior_a, posterior_b))
return [score_idx(i) for i in range(K)]
def logit(p):
return log2(p) - log2(1-p)
if __name__ == '__main__':
from polyafit import fit_to_data_infile
alpha, converged, wellnums, wellcounts = fit_to_data_infile('PBcounts.txt')
print "Fit alpha:", alpha, "\tconverged:", converged
for idx, wellnum in enumerate(wellnums):
print wellnum, "\t", "\t".join([str(logit(v)) for v in score(alpha, wellcounts[idx])]), "\t", "\t".join([str(v) for v in wellcounts[idx]])
|
gpl-2.0
| -7,962,094,248,256,210,000
| 39.754717
| 146
| 0.572685
| false
| 3.14258
| false
| false
| false
|
leathersole/midi-accordion-fancy
|
src/main/player.py
|
1
|
1967
|
#!/bin/env python
import pygame.midi
import soundvalue
class Player:
def __init__(self):
instrument = 22
port = 2
self.button2sound = {'a':(60,61), 's':(62,63), 'd':(64,65) }
self.buttons = self.button2sound.keys()
self.volume = 127
pygame.midi.init()
self.midiOutput = pygame.midi.Output(port, 0)
self.midiOutput.set_instrument(instrument)
self.currently_playing = {k : False for k in self.button2sound.iterkeys()}
def play(self, key):
note = self.button2sound[key][0]
self.midiOutput.note_on(note,self.volume)
self.currently_playing[key] = True
def stop(self, key):
note = self.button2sound[key][0]
self.midiOutput.note_off(note,self.volume)
self.currently_playing[key] = False
def quit(self):
pygame.midi.quit()
if __name__ == "__main__":
p = Player()
print(p.button2sound)
#import pygame
#import pygame.midi
#
#pygame.init()
#pygame.midi.init()
#
#pygame.display.set_mode((640,480))
#
#instrument = 22
#note = 74
#volume = 127
#port = 2
#
#midiOutput = pygame.midi.Output(port, 0)
#midiOutput.set_instrument(instrument)
#
#finished = False
#
#key2sound = {'a':60, 's':62, 'd':64 }
#
#print "Press q to quit..."
#currently_playing = {k : False for k in key2sound.iterkeys()}
#
#while not finished:
#
# event = pygame.event.wait()
#
# if event.type == pygame.QUIT:
# finished = True
# elif event.type in (pygame.KEYDOWN,pygame.KEYUP):
# key = pygame.key.name(event.key)
# if key == 'q':
# finished = True
#
# if key in key2sound:
# if event.type == pygame.KEYDOWN:
# note = key2sound[key]
# midiOutput.note_on(note,volume)
# currently_playing[key] = True
# elif event.type == pygame.KEYUP:
# midiOutput.note_off(note,volume)
# currently_playing[key] = False
#
#del midiOutput
#pygame.midi.quit()
#
#print "-- END --"
|
gpl-3.0
| 1,122,424,234,700,175,700
| 21.352273
| 82
| 0.605999
| false
| 3.030817
| false
| false
| false
|
tristan-c/massive-octo-tribble
|
massive/views.py
|
1
|
3540
|
import uuid
import os
from flask_restful import Resource, reqparse
from flask_login import login_required
from flask import redirect, send_file, g, url_for
from massive import api, app
from massive.models import *
from massive.utils import *
from io import BytesIO
class Resource(Resource):
method_decorators = [login_required]
@app.route('/')
def index():
if g.user is not None and g.user.is_authenticated:
return redirect('/index.html')
else:
return redirect("/login")
parser = reqparse.RequestParser()
parser.add_argument('url', type=str)
parser.add_argument('tags', type=str, default=None)
class links(Resource):
def get(self):
user = User.query.get(g.user.id)
links = Link.query.join(User).filter(User.email == user.email)
return [link.dump() for link in links]
def post(self, linkId=None):
args = parser.parse_args()
user = User.query.get(g.user.id)
if linkId:
link = Link.query.get(id=linkId)
if not link:
return "no link found", 404
#taglist = [t.name for t in link.tags]
# for tag in args['tags']:
# if tag not in taglist:
# db_tag = Tags.get(name=tag)
# if not db_tag:
# db_tag = Tags(name=tag)
# link.tags.add(db_tag)
return link.dump()
url = args['url']
tags = args['tags']
#prepend if no protocole specified
if url.find("http://") == -1 and url.find("https://") == -1:
url = "http://%s" % url
if Link.query.filter_by(url=url, user_id=user.id).first():
return "already in db", 400
if tags:
tags = tags.split(",")
link = save_link(
get_page_title(url),
url,
tags,
user
)
return link.dump()
def delete(self, linkId=None):
if linkId == None:
return "no link provided", 400
link = Link.query.get(linkId)
if not link:
return "no link found", 404
#delete favicon
if link.favicon:
favicon_path = os.path.join(app.config['FAVICON_REPO'],link.favicon)
try:
os.remove(favicon_path)
except Exception as e:
app.logger.warning("error while trying to remove a favicon")
app.logger.warning(e)
db.session.delete(link)
db.session.commit()
return ""
api.add_resource(links, '/links', '/links/<string:linkId>')
@app.route('/ico/<icoId>')
def get_avatar(icoId=None):
file_path = os.path.join(app.config['FAVICON_REPO'],icoId)
if os.path.isfile(file_path):
return send_file(file_path, as_attachment=True)
else:
return "no favicon found",404
def save_link(title, url, tags=[], user=None):
if not title:
title = url.split('/')[-1]
iconfile_name = "%s.ico" % str(uuid.uuid4())
favicon = get_page_favicon(url,iconfile_name)
link = Link(
title=title,
url=url,
favicon=iconfile_name,
#tags=db_tags,
user=user
)
for tag in tags:
db_tag = Tags.query.filter_by(name=tag).first()
if not db_tag:
db_tag = Tags(name=tag)
db.session.add(db_tag)
link.tags.append(db_tag)
if favicon:
link.favicon = favicon
db.session.add(link)
db.session.commit()
return link
|
bsd-2-clause
| 8,048,658,675,714,831,000
| 24.467626
| 80
| 0.554237
| false
| 3.623337
| false
| false
| false
|
branden/dcos
|
packages/dcos-net/extra/dcos-net-setup.py
|
1
|
1380
|
#!/opt/mesosphere/bin/python
"""
The script allows to add network interfaces and ip addresses multiple times
ip command returns 2 as exit code if interface or ipaddr already exists [1]
dcos-net-setup.py checks output of ip command and returns success exit code [2]
[1] ExecStartPre=-/usr/bin/ip link add name type dummy
[2] ExecStartPre=/path/dcos-net-setup.py ip link add name type dummy
Also the script prevents from duplicating iptables rules [3]
[3] ExecStartPre=/path/dcos-net-setup.py iptables --wait -A FORWARD -j ACCEPT
"""
import subprocess
import sys
def main():
if sys.argv[1:4] in [['ip', 'link', 'add'], ['ip', 'addr', 'add'], ['ip', '-6', 'addr']]:
result = subprocess.run(sys.argv[1:], stderr=subprocess.PIPE)
sys.stderr.buffer.write(result.stderr)
if result.stderr.strip().endswith(b'File exists'):
result.returncode = 0
elif sys.argv[1] == 'iptables':
# check whether a rule matching the specification does exist
argv = ['-C' if arg in ['-A', '-I'] else arg for arg in sys.argv[1:]]
result = subprocess.run(argv)
if result.returncode != 0:
# if it doesn't exist append or insert that rules
result = subprocess.run(sys.argv[1:])
else:
result = subprocess.run(sys.argv[1:])
sys.exit(result.returncode)
if __name__ == "__main__":
main()
|
apache-2.0
| -2,490,736,207,287,210,000
| 34.384615
| 93
| 0.65
| false
| 3.538462
| false
| false
| false
|
itsMichael/uair-pro
|
app/launcher.py
|
1
|
19189
|
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
import os
import sys
import random
import socket
import subprocess
import signal
import hashlib
import subprocess
from functions import lt
from langs import langs
from loaders import load_config, save_config
ROOT_PATH=os.path.dirname(__file__)
ICON_PATH=os.path.join(ROOT_PATH, "static/launcher.png")
PIDFILE_PATH=os.path.expanduser("~/.uair.pid")
DEFAULT_IMAGES_PATH=os.path.expanduser("~/Pictures")
try:
import gtk
CLIMODE=False
if len(sys.argv)>1 and sys.argv[1]=="cli":
CLIMODE=True
sys.argv=sys.argv[1:]
except:
CLIMODE=True
CLIMODE = False
def check_pidfile():
if os.path.exists(PIDFILE_PATH):
return int(open(PIDFILE_PATH).read())
else:
return False
def create_pidfile(pid):
ff=open(PIDFILE_PATH, "w")
ff.write(str(pid))
ff.close()
def delete_pidfile():
if check_pidfile():
os.remove(PIDFILE_PATH)
return True
else:
return False
def remove_orphaned_pidfile(pid):
if not os.path.exists("/proc/%s" % str(pid)):
result=delete_pidfile()
if result:
print("Removed orphaned pid file")
def generate_password(length=5):
alphabet = "abcdefghijkmnoprstuwxyz1234567890"
pwd = ''
for count in range(length):
for x in random.choice(alphabet):
pwd+=x
return pwd
def hash_password(password):
from main import SECRET_KEY
#hash password
hashed=hashlib.md5()
hashed.update(SECRET_KEY+password)
return hashed.hexdigest()
def get_local_ip_address():
import socket
try:
s = socket.socket()
s.connect(('google.com', 80))
ip=s.getsockname()[0]
except:
ip=""
if ip:
return ip
else:
return "127.0.0.1"
def get_global_ip_address():
import urllib2
try:
ip=urllib2.urlopen('http://icanhazip.com').read()
ip=ip.strip()
except:
ip=""
if ip:
return ip
else:
return "127.0.0.1"
def start_server(config):
#Dont start server when is running
if check_pidfile():
print("Server already started.")
return
#create server path
path=os.path.join(ROOT_PATH, "main.pyc")
#Start server
cmd=["nohup", "python", path,"launch"]
server=subprocess.Popen(cmd)
#create pid file diable start button
create_pidfile(server.pid)
return server
def stop_server(config):
#stop www server by sending SIGINT signal
pid=check_pidfile()
if pid:
#remove pid file
delete_pidfile()
#Kill process
try:
os.kill(pid, signal.SIGTERM)
except OSError:pass
print("Web server stopped")
ddd = subprocess.Popen("/usr/bin/notify-send Server Stopped", shell=True)
ddd.poll()
return True
else:
print("Server not started.")
return False
######################
# CLI Launcher
######################
if CLIMODE:
#ignore not run as script
if not __name__=="__main__":
exit(0)
#ignore arguments
if len(sys.argv)<2:
print("Usage: sudo python launcher.pyc start/stop")
print("launcher.pyc password <newpassword>")
print("launcher.pyc port <new port>")
exit(0)
#ignore others commands
if sys.argv[1] not in ["start", "stop", "password", "port"]:
print("Invalid command")
exit(0)
#remove pid
remove_orphaned_pidfile(check_pidfile())
#load config
try:
config=load_config()
except: pass
if sys.argv[1]=="start":
if check_pidfile():
print("Server already started.")
exit(0)
gip=get_global_ip_address()
lip=get_local_ip_address()
#print addresses
print("Local IP: %s" % lip)
print("Public IP: %s" % gip)
#gen passwords
pass1=generate_password()
pass2=generate_password()
config["gen_password"]=hash_password(pass1)
config["gen_password_shared"]=hash_password(pass2)
print("Login password:%s" % pass1)
print("Shared password:%s" % pass2)
config["local_ip"]=lip
config["global_ip"]=gip
save_config(config)
start_server(config)
config["status"]=1
save_config(config)
if sys.argv[1]=="stop":
done=stop_server(config)
if done:
config["status"]=0
save_config(config)
if sys.argv[1]=="password" and len(sys.argv)>2:
config["password"]=hash_password(sys.argv[2].strip())
save_config(config)
print("New password set")
if sys.argv[1]=="port":
if len(sys.argv)>2:
try:
config["port"]=int(sys.argv[2].strip())
save_config(config)
print("Port set to %s "% int(sys.argv[2].strip()))
except:pass
else:
print("Current port: %s "% config["port"])
#exit
exit(0)
class MainWindow(gtk.Window):
def __init__(self):
gtk.Window.__init__(self)
self.set_title("U-Air Launcher")
self.set_icon_from_file(ICON_PATH)
self.set_resizable(False)
self.set_size_request(440, 320)
self.set_border_width(20)
self.set_position(gtk.WIN_POS_CENTER)
#load config
self.config=load_config()
save_config(self.config)
#www server process
self.server=None
#get lang from config
self.lang=self.config.get("lang", "en")
#connect close event
self.connect("destroy", self.close_app)
self.fixed = gtk.Fixed()
self.label_status = gtk.Label("Status:")
self.label_status.set_text(lt("Status", self.lang)+":")
#local IP label
self.label_local_ip = gtk.Label("Local IP:")
label=lt("Local IP", self.lang)+": "+self.config["local_ip"]
label+=":"+str(self.config["port"])
self.label_local_ip.set_text(label)
self.label_public_ip = gtk.Label("Public IP:")
label=lt("Public IP", self.lang)+": "+self.config["global_ip"]+":"+str(self.config["port"])
self.label_public_ip.set_text(label)
self.label_gen_password = gtk.Label("Login password:")
self.label_gen_password.set_text(lt("Login password", self.lang)+":")
self.label_gen_password_shared = gtk.Label("Shared password:")
self.label_gen_password_shared.set_text(lt("Shared password", self.lang)+":")
self.label_set_gen_password = gtk.Label("...")
self.label_set_gen_password_shared = gtk.Label("...")
self.button_regenerate = gtk.Button("Regenerate password")
self.button_regenerate.set_label(lt("Regenerate password", self.lang))
self.button_regenerate.connect("clicked", self.regenerate)
self.button_start = gtk.Button("Start")
self.button_start.set_label(lt("Start", self.lang))
self.button_start.connect("clicked", self.start)
self.button_start.set_size_request(110, 50)
self.button_stop = gtk.Button("Stop")
self.button_stop.set_label(lt("Stop", self.lang))
self.button_stop.connect("clicked", self.stop)
self.button_stop.set_size_request(110, 50)
self.button_options = gtk.Button("Options")
self.button_options.set_label(lt("Options", self.lang))
self.button_options.set_size_request(130, 30)
self.button_options.connect("clicked", self.show_option_window)
self.button_about = gtk.Button("About")
self.button_about.set_label(lt("About", self.lang))
self.button_about.set_size_request(130, 30)
self.button_about.connect("clicked", self.show_about_window)
self.button_quit = gtk.Button("Quit")
self.button_quit.set_label(lt("Quit", self.lang))
self.button_quit.set_size_request(130, 30)
self.button_quit.connect("clicked", self.close_app)
self.img_banner = gtk.Image()
self.img_banner.set_from_file(os.path.join(ROOT_PATH,
"static/banner1.png"))
self.fixed.put(self.img_banner, 0, 0)
self.fixed.put(self.label_status, 5, 5)
#self.fixed.put(self.label_local_ip, 3, 130)
#self.fixed.put(self.label_public_ip, 200 ,130)
self.fixed.put(self.label_local_ip, 5, 110)
self.fixed.put(self.label_public_ip, 5 ,130)
self.fixed.put(self.button_regenerate, 70, 200)
self.fixed.put(self.button_start, 0, 230)
self.fixed.put(self.button_stop, 120, 230)
self.fixed.put(self.label_gen_password, 0, 160)
self.fixed.put(self.label_set_gen_password, 150, 160)
self.fixed.put(self.label_gen_password_shared, 0, 180)
self.fixed.put(self.label_set_gen_password_shared, 150, 180)
self.fixed.put(self.button_options, 250, 170)
self.fixed.put(self.button_about, 250, 210)
self.fixed.put(self.button_quit, 250, 250)
self.add(self.fixed)
#show all
self.show_all()
#create pictures folder if not exist
if not os.path.exists(DEFAULT_IMAGES_PATH):
os.mkdir(DEFAULT_IMAGES_PATH)
#remove pid file when process not exist
remove_orphaned_pidfile(check_pidfile())
#set status
self.setstatus()
#update start stop buttons
self.toggle_start_stop_buttons()
#generate new login password
self.gen_login_password()
self.gen_shared_password()
def regenerate(self, widgget, data=None):
self.gen_login_password()
self.gen_shared_password()
def setstatus(self):
self.config["status"]=bool(check_pidfile())
#get and save global ip
gip=get_global_ip_address()
self.config["global_ip"]=gip
self.label_public_ip.set_text(lt("Public IP", self.lang)+": "+\
self.config["global_ip"]+":"+str(self.config["port"]))
# get and save local IP
lip=get_local_ip_address()
self.config["local_ip"]=lip
self.label_local_ip.set_text(lt("Local IP", self.lang)+": "+\
self.config["local_ip"]+":"+str(self.config["port"]))
if self.config["status"]:
self.label_status.set_text("Status: "+lt("Online", self.lang))
else:
self.label_status.set_text("Status: "+lt("Offline", self.lang))
def toggle_start_stop_buttons(self):
serverpid=check_pidfile()
if serverpid:
self.button_start.set_sensitive(False)
self.button_stop.set_sensitive(True)
else:
self.button_start.set_sensitive(True)
self.button_stop.set_sensitive(False)
def start(self, widget, data=None):
"""Start web server"""
if self.server:return
serv=start_server(self.config)
if serv:
self.server=serv
self.toggle_start_stop_buttons()
#set status
self.setstatus()
save_config(self.config)
print("Web server started")
ddd = subprocess.Popen("/usr/bin/notify-send Server Started", shell=True)
ddd.poll()
def stop(self, widget, data=None):
"""Stop web server"""
if stop_server(self.config):
self.server=None
self.setstatus()
self.toggle_start_stop_buttons()
#save config
save_config(self.config)
def close_app(self, widget, data=None):
exit(0)
def show_option_window(self, widget, data=None):
OptionWindow(self.config)
def show_about_window(self, widget, data=None):
AboutWindow(self.config)
def gen_login_password(self):
pwd=generate_password()
hpwd=hash_password(pwd)
self.config["gen_password"] = hpwd
save_config(self.config)
#set text for widget with password
self.label_set_gen_password.set_text(pwd)
def gen_shared_password(self):
pwd=generate_password()
hpwd=hash_password(pwd)
self.config["gen_password_shared"] = hpwd
save_config(self.config)
#set text for widget with password
self.label_set_gen_password_shared.set_text(pwd)
class OptionWindow(gtk.Window):
def __init__(self, config):
self.config=config
self.lang=self.config.get("lang", "en")
gtk.Window.__init__(self)
self.set_title(lt("Options", self.lang))
self.set_resizable(False)
self.set_size_request(300, 250)
self.set_border_width(20)
self.set_position(gtk.WIN_POS_CENTER)
self.connect("destroy", self.close_window)
self.fixed = gtk.Fixed()
self.label_set_pass = gtk.Label("Password:")
self.label_set_pass.set_text(lt("Password", self.lang)+":")
self.entry_set_pass = gtk.Entry()
self.label_startup = gtk.Label("Load in startup Ubuntu")
self.label_startup.set_text(lt("Load in startup Ubuntu", self.lang))
self.check_startup = gtk.CheckButton()
self.check_startup.set_active(self.config["startup"])
self.check_startup.connect("toggled", self.entry_checkbox)
self.label_set_port = gtk.Label("Port:")
self.label_set_port.set_text(lt("Port", self.lang)+":")
self.entry_set_port = gtk.Entry()
self.entry_set_port.set_text(str(self.config["port"]))
self.label_choose_image = gtk.Label("Choose images folder")
self.label_choose_image.set_text(lt("Choose images folder", self.lang)+":")
self.chooser_image_folder = \
gtk.FileChooserButton(lt("Choose images folder", self.lang))
self.chooser_image_folder.set_action(
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER
)
self.chooser_image_folder.set_size_request(150,35)
self.label_set_language = gtk.Label("Set language:")
self.label_set_language.set_text(lt("Set language", self.lang)+":")
self.combo_language = gtk.combo_box_new_text()
#add languages
for lang in langs:
self.combo_language.append_text(lang)
self.combo_language.set_active(langs.keys().index(\
self.config["lang"]))
self.combo_language.connect("changed", self.select_language)
#get images path
imgpath=self.config.get("images_path", \
os.path.expanduser("~/Pictures"))
self.chooser_image_folder.set_filename(imgpath)
self.button_save = gtk.Button("Save")
self.button_save.set_size_request(130, 30)
self.button_save.connect("clicked", self.onsave)
self.fixed.put(self.label_set_pass, 10, 5)
self.fixed.put(self.entry_set_pass, 90, 0)
#self.fixed.put(self.check_startup, 5, 42)
#self.fixed.put(self.label_startup, 40, 44)
self.fixed.put(self.label_set_port, 10, 44)
self.fixed.put(self.entry_set_port, 90 ,42)
self.fixed.put(self.label_choose_image, 10, 90)
self.fixed.put(self.chooser_image_folder, 10, 110)
self.fixed.put(self.label_set_language, 10, 150)
self.fixed.put(self.combo_language, 10, 170)
self.fixed.put(self.button_save, 120, 170)
self.add(self.fixed)
self.show_all()
def select_language(self, combo_language, data=None):
model = self.combo_language.get_model()
index = self.combo_language.get_active()
self.config["lang"]=langs.keys()[index]
dlg=gtk.MessageDialog(self, gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO, gtk.BUTTONS_CLOSE,
"Launcher restart required.")
dlg.run()
dlg.destroy()
def onsave(self, widget, data=None):
"""Save options configuration"""
from main import SECRET_KEY
passwd = self.entry_set_pass.get_text()
check = self.check_startup.get_active()
port = self.entry_set_port.get_text()
hashed=hashlib.md5()
hashed.update(SECRET_KEY+passwd)
if passwd:
self.config["password"] = hashed.hexdigest()
self.config["startup"] = check
self.config["port"] = int(port)
self.config["images_path"] = self.chooser_image_folder.get_filename()
save_config(self.config)
self.destroy()
def add_startup(self, data=None):
a = os.path.expanduser("~")
b = ".xinitrc"
c = os.path.join(a,b)
if not os.path.isfile(c):
os.system("cd ~ && touch .xinitrc")
open(c, 'w').write("/opt/uair/bin/uairlauncher start")
elif os.path.isfile(c):
open(c, 'a').write("/opt/uair/bin/uairlauncher start")
def del_startup(self, data=None):
try:
zrodlo = open('~/.xinitrc').readlines()
cel = open('~/.xinitrc', 'a')
for s in zrodlo:
cel.write(s.replace("/opt/uair/bin/uairlauncher start", ""))
cel.close()
except:
pass
def entry_checkbox(self, widget):
global b_entry_checkbox
b_entry_checkbox = self.check_startup.get_active()
if b_entry_checkbox:
self.add_startup()
else:
self.del_startup()
return
def close_window(self, widget, data=None):
self.destroy()
class AboutWindow(gtk.Window):
def __init__(self, config):
self.config=config
self.lang=self.config.get("lang", "en")
gtk.Window.__init__(self)
self.set_resizable(False)
self.set_title(lt("About", self.lang))
self.set_size_request(540, 250)
self.set_border_width(20)
self.set_position(gtk.WIN_POS_CENTER)
self.connect("destroy", self.close_window)
self.fixed = gtk.Fixed()
self.label_about = gtk.Label(lt("U-Air\n\
---------------------\n\
U-Air allow you to browse upload and download your files, wherever you are.\n\
You forget take any file from home to your friend ? Now its not problem.\n\
You can easly browse your files, upload new and listen MP3 songs.", self.lang))
self.label_authors=gtk.Label("Authors:")
self.label_authors.set_text(lt("Authors", self.lang)+":")
author1="Michal Rosiak <michal0468@gmail.com>"
author2="Marcin Swierczynski <orneo1212@gmail.com>"
self.label_autor1 = gtk.Label(author1)
self.label_autor2 = gtk.Label(author2)
self.fixed.put(self.label_about, 5, 10)
self.fixed.put(self.label_authors, 5, 120)
self.fixed.put(self.label_autor1, 10, 150)
self.fixed.put(self.label_autor2, 10, 180)
self.add(self.fixed)
self.show_all()
def close_window(self, widget, data=None):
self.destroy()
#class HelpConsole():
# def __init__(self):
# print "Help Console:\n"
# print "start_console - Start web server in console"
if __name__ == "__main__":
main=MainWindow()
#get arguments
if "start" in sys.argv:
main.hide()
main.start(None)
main.close_app(None)
elif "stop" in sys.argv:
main.hide()
main.stop(None)
main.close_app(None)
try:
gtk.main()
except:
import traceback
traceback.print_exc()
main.stop(None)
|
gpl-3.0
| 3,870,659,540,756,294,000
| 31.196309
| 99
| 0.597738
| false
| 3.507403
| true
| false
| false
|
Maescool/libk8000
|
setup.py
|
1
|
1727
|
#!/usr/bin/env python
#
# k8000 install script
#
# See COPYING for info about the license (GNU GPL)
# Check AUTHORS to see who wrote this software.
from distutils.core import setup
from distutils.extension import Extension
import sys, glob, re, os
# Check for Python < 2.2
if sys.version < '2.2':
sys.exit('Error: Python-2.2 or newer is required. Current version:\n %s'
% sys.version)
authors = [ ('Pieter Maes', 'maescool@gmail.com') ]
lname = max([len(author[0]) for author in authors])
__author__ = '\n'.join(['%s <%s>' % (author[0].ljust(lname), author[1])
for author in authors])
short = 'Connection library to Velleman Kit K8000'
long = '''\
This is a connection Library to the Velleman K8000 Input/Output interface card.'''
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Input/output :: Driver' ]
py_version='python%d.%d' % (sys.version_info[0],sys.version_info[1])
incl_dir = [ os.path.join(sys.prefix,'include',py_version), os.curdir ]
setup(name='pyk8000',
version='0.1',
description=short,
long_description=long,
classifiers=classifiers,
author=', '.join([author[0] for author in authors]),
author_email=', '.join([author[1] for author in authors]),
url='http://github.com/Maescool/libk8000',
ext_modules = [Extension('_k8000', sources=['k8000.c','k8000.i'],swig_opts=['-shadow'])],
py_modules = ["k8000"],
license='GPL'
)
|
gpl-2.0
| 6,887,290,133,431,512,000
| 33.54
| 95
| 0.643312
| false
| 3.467871
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.