repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
rodxavier/open-pse-initiative | django_project/jobs/management/commands/update_listed_companies.py | Python | mit | 1,437 | 0.001392 | import logging
from datetime import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import requests
from companies.models import Company
logger = logging.getLogger('jobs.management.commands')
class Command(BaseCommand):
help = 'Update currently listed companies'
def handle(self, *args, **options):
logger.info('Started updating currently listed companies')
companies = Company.objects.filter(is_inde | x=False)
r = requests.get(settings.COMPANY_LIST_URL)
records = r.json()['records']
for record in records:
symbol = record['securitySymbol']
name = record['securityName']
listing_date = record['listingDate'].split()[0]
status = record['securityStatus']
try:
company = companies.get(symbol=symbol)
companies = companies.exclude(id=c | ompany.id)
except Company.DoesNotExist:
company = Company(symbol=symbol)
company.name = name
company.is_currently_listed = True
company.is_suspended = True if status == 'S' else False
company.listing_date = datetime.strptime(listing_date, '%Y-%m-%d').date()
company.save()
companies.update(is_currently_listed=False, is_suspended=False)
logger.info('Finished updating currently listed companies')
|
qtproject/qt-creator | tests/system/tools/toolfunctions.py | Python | gpl-3.0 | 1,722 | 0.000581 | ############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: http | s://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at ht | tps://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
import os
import sys
def checkDirectory(directory):
if not os.path.exists(directory):
print("Given path '%s' does not exist" % directory)
sys.exit(1)
objMap = os.path.join(directory, "objects.map")
if not os.path.exists(objMap):
print("Given path '%s' does not contain an objects.map file" % directory)
sys.exit(1)
return objMap
def getFileContent(filePath):
if os.path.isfile(filePath):
f = open(filePath, "r")
data = f.read()
f.close()
return data
return ""
|
tarikgwa/nfd | newfies/dialer_campaign/models.py | Python | mpl-2.0 | 26,728 | 0.003218 | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <info@star2billing.com>
#
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext
from django.utils.timezone import now
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.db.models.signals import post_save
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db import transaction
from django_lets_go.intermediate_model_base_class import Model
from django_lets_go.common_functions import get_unique_code, percentage
from audiofield.models import AudioFile
from datetime import datetime
from django.utils.timezone import utc
from dateutil.relative | delta import relativedelta
import jsonfield
import logging
import re
from .constants import SUBSCRIBER_STATUS, CAMPAIGN_STATUS, AMD_BEHAVIOR
from dialer_contact.constants import CONTACT_STATU | S
from dialer_contact.models import Phonebook, Contact
from dialer_gateway.models import Gateway
from sms.models import Gateway as SMS_Gateway
from dnc.models import DNC
#from agent.models import Agent
logger = logging.getLogger('newfies.filelog')
def build_kwargs_runnning_campaign():
"""Return kwargs configured to filter running Campaign"""
kwargs = {}
kwargs['status'] = CAMPAIGN_STATUS.START
tday = datetime.utcnow().replace(tzinfo=utc)
kwargs['startingdate__lte'] = datetime(
tday.year, tday.month, tday.day, tday.hour,
tday.minute, tday.second, tday.microsecond).replace(tzinfo=utc)
kwargs['expirationdate__gte'] = datetime(
tday.year, tday.month, tday.day, tday.hour,
tday.minute, tday.second, tday.microsecond).replace(tzinfo=utc)
# s_time = "%s:%s:%s" % (str(tday.hour), str(tday.minute), str(tday.second))
# Fix for timezone
today = datetime.now()
# or Fix for timezone
# today = datetime.utcnow().replace(tzinfo=utc)
kwargs['daily_start_time__lte'] = today.strftime('%H:%M:%S')
kwargs['daily_stop_time__gte'] = today.strftime('%H:%M:%S')
# weekday status 1 - YES
# self.model._meta.get_field(tday.strftime("%A").lower()).value()
kwargs[tday.strftime("%A").lower()] = 1
return kwargs
class CampaignManager(models.Manager):
"""Campaign Manager"""
def get_running_campaign(self):
"""Return all the active campaigns which will be running based on
the expiry date, the daily start/stop time and days of the week"""
kwargs = build_kwargs_runnning_campaign()
return Campaign.objects.filter(**kwargs)
def get_expired_campaign(self):
"""
Return all the campaigns which are expired or going to expire
based on the expiry date but status is not 'END'
"""
kwargs = {}
kwargs['expirationdate__lte'] = datetime.utcnow().replace(tzinfo=utc)
return Campaign.objects.filter(**kwargs).exclude(status=CAMPAIGN_STATUS.END)
def common_contact_authorization(dialersetting, str_contact):
"""
Common Function to check contact no is authorized or not.
For this we will check the dialer settings : whitelist and blacklist
"""
whitelist = dialersetting.whitelist
blacklist = dialersetting.blacklist
if whitelist == '*':
whitelist = ''
if blacklist == '*':
blacklist = ''
if whitelist and len(whitelist) > 0:
try:
if re.search(whitelist, str_contact):
return True
except ValueError:
logger.error('Error to identify the whitelist')
if blacklist and len(blacklist) > 0:
try:
if re.search(blacklist, str_contact):
return False
except ValueError:
logger.error('Error to identify the blacklist')
return True
def set_campaign_code():
return get_unique_code(length=5)
def set_expirationdate():
return datetime.utcnow().replace(tzinfo=utc) + relativedelta(days=+1)
class Campaign(Model):
"""This defines the Campaign
**Attributes**:
* ``campaign_code`` - Auto-generated campaign code to identify the campaign
* ``name`` - Campaign name
* ``description`` - Description about the Campaign
* ``status`` - Campaign status
* ``callerid`` - Caller ID
* ``startingdate`` - Starting date of the Campaign
* ``expirationdate`` - Expiry date of the Campaign
* ``daily_start_time`` - Start time
* ``daily_stop_time`` - End time
* ``week_day_setting`` (monday, tuesday, wednesday, thursday, friday, \
saturday, sunday)
* ``frequency`` - Frequency, speed of the campaign. number of calls/min
* ``callmaxduration`` - Max retry allowed per user
* ``maxretry`` - Max retry allowed per user
* ``intervalretry`` - Time to wait between retries in seconds
* ``completion_maxretry`` - Number of retries until a contact completes survey
* ``completion_intervalretry`` - Time delay in seconds before retrying contact \
to complete survey
* ``calltimeout`` - Number of seconds to timeout on calls
* ``aleg_gateway`` - Gateway to use to reach the contact
* ``extra_data`` - Additional data to pass to the application
* ``totalcontact`` - Total Contact for this campaign
* ``completed`` - Total Contact that completed Call / Survey
* ``has_been_started`` - campaign started flag
* ``has_been_duplicated`` - campaign duplicated flag
* ``voicemail`` - Enable Voicemail Detection
* ``amd_behavior`` - Detection Behaviour
* ``sms_gateway`` - Gateway to transport the SMS
**Relationships**:
* ``content_type`` - Defines the application (``survey``) \
to use when the call is established on the A-Leg
* ``object_id`` - Defines the object of content_type application
* ``content_object`` - Used to define the Voice App or the Survey with generic ForeignKey
* ``phonebook`` - Many-To-Many relationship to the Phonebook model.
* ``user`` - Foreign key relationship to the a User model. \
Each campaign assigned to a User
* ``voicemail_audiofile`` - Foreign key relationship to the a AudioFile model.
* ``dnc`` - Foreign key relationship to the a DNC model.
**Name of DB table**: dialer_campaign
"""
campaign_code = models.CharField(
unique=True, max_length=20, blank=True, verbose_name=_("campaign code"),
help_text=_('this code is auto-generated by the platform, this is used to identify the campaign'),
default=set_campaign_code)
name = models.CharField(max_length=100, verbose_name=_('name'))
description = models.TextField(verbose_name=_('description'), blank=True,
null=True, help_text=_("campaign description"))
user = models.ForeignKey('auth.User', related_name='Campaign owner')
status = models.IntegerField(choices=list(CAMPAIGN_STATUS), default=CAMPAIGN_STATUS.PAUSE,
verbose_name=_("status"), blank=True, null=True)
callerid = models.CharField(max_length=80, blank=True, verbose_name=_("Caller ID Number"),
help_text=_("outbound Caller ID"))
caller_name = models.CharField(max_length=80, blank=True, verbose_name=_("Caller Name"),
help_text=_("outbound Caller Name"))
# General Starting & Stopping date
startingdate = models.DateTimeField(default=now, verbose_name=_('start'))
stoppeddate = models.DateTimeField(default=set_expirationdate, verbose_name=_('stopped'))
expirationdate = models.DateTimeField(default=set_expirationdate, verbose_name=_('finish'))
# Per Day Starting & Stopping Time
|
mitsuhiko/flask | src/flask/config.py | Python | bsd-3-clause | 11,068 | 0.000542 | import errno
import os
import types
import typing as t
from werkzeug.utils import import_string
class ConfigAttribute:
"""Makes an attribute forward to the config"""
def __init__(self, name: str, get_converter: t.Optional[t.Callable] = None) -> None:
self.__name__ = name
self.get_converter = get_converter
def __get__(self, obj: t.Any, owner: t.Any = None) -> t.Any:
if obj is None:
return self
rv = obj.config[self.__name__]
if self.get_converter is not None:
rv = self.get_converter(rv)
return rv
def __set__(self, obj: t.Any, value: t.Any) -> None:
obj.config[self.__name__] = value
class Config(dict):
"""Works exactly like a dict but provides ways to fill it from files
or special dictionaries. There are two common patterns to populate the
config.
Either you can fill the config from a config file::
app.config.from_pyfile('yourconfig.cfg')
Or alternatively you can define the configuration options in the
module that calls :meth:`from_object` or provide an import path to
a module that should be loaded. It is also possible to tell it to
use the same module and with that provide the configuration values
just before the call::
DEBUG = True
SECRET_KEY = 'development key'
app.config.from_object(__name__)
In both cases (loading from any Python file or loading from modules),
only uppercase keys are added to the config. This makes it possible to use
lowercase values in the config file for temporary values that are not added
to the config or to define the config keys in the same file that implements
the application.
Probably the most interesting way to load configurations is from an
environment variable pointing to a file::
app.config.from_envvar('YOURAPPLICATION_SETTINGS')
In this case before launching the application you have to set this
environment variable to the file you want to use. On Linux and OS X
use the export statement::
export YOURAPPLICATION_SETTINGS='/path/to/config/file'
On windows use `set` instead.
:param root_path: path to which files are read relative from. When the
config object is created by the application, this is
the application's :attr:`~flask.Flask.root_path`.
:param defaults: an optional dictionary of default values
"""
def __init__(self, root_path: str, defaults: t.Optional[dict] = None) -> None:
dict.__init__(self, defaults or {})
self.root_path = root_path
def from_envvar(self, variable_name: str, silent: bool = False) -> bool:
"""Loads a configuration from an environment variable pointing to
a configuration file. This is basically just a shortcut with nicer
error messages for this line of code::
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
:param variable_name: name of the environment variable
:param silent: set to ``True`` if you want silent failure for missing
files.
:return: bool. ``True`` if able to load config, ``False`` otherwise.
"""
rv = os.environ.get(variable_name)
if not rv:
if silent:
return False
raise RuntimeError(
f"The environment variable {variable_name!r} is not set"
" and as such configuration could not be loaded. Set"
" this variable and make it point to a configuration"
" file"
)
return self.from_pyfile(rv, silent=silent)
def from_pyfile(self, filename: str, silent: bool = False) -> bool:
"""Updates the values in the config from a Python file. This function
behaves as if the file was imported as module with the
:meth:`from_object` function.
:param filename: the filename of the config. This can either be an
absolute filename or a filename relative to the
root path.
:param silent: set to ``True`` if you want silent failure for missing
files.
.. versionadded:: 0.7
`silent` parameter.
"""
filename = os.path.join(self.root_path, filename)
d = types.ModuleType("config")
d.__file__ = filename
try:
with open(filename, mode="rb") as config_file:
exec(compile(config_file.read(), filename, "exec"), d.__dict__)
except OSError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR):
return False
e.strerror = f"Unable to load configuration file ({e.strerror})"
raise
self.from_object(d)
return True
def from_object(self, obj: t.Union[object, str]) -> None:
"""Updates the values from the given object. An object can be of one
of the following two types:
- a string: in this case the object with that name will be imported
- an actual object reference: that object is used directly
Objects are usually either modules or classes. :meth:`from_object`
loads only the uppercase attributes of the module/class. A ``dict``
object will not work with :meth:`from_object` because the keys of a
``dict`` are not attributes of the ``dict`` class.
Example of module-based configuration::
app.config.from_object('yourapplication.default_config')
from yourapplication import default_config
app.config.from_object(default_config)
Nothing is done to the object before loading. If the object is a
class and has ``@property`` attributes, it needs to be
instantiated before being passed to this method.
You should not use this function to load the actual configuration but
rather configuration defaults. The actual config should be loaded
with :meth:`from_pyfile` and ideally from a location not within the
package because the package might be installed system wide.
See :ref:`config-dev-prod` for an example of class-based configuration
using :meth:`from_object`.
:param obj: an import name or object
"""
if isinstance(obj, str):
obj = import_string(obj)
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def from_file(
self,
filename: str,
load: t.Callable[[t.IO[t.Any]], t.Mapping],
silent: bool = False,
) -> bool | :
"""Update the values in the config from a file that is loaded
using the ``load`` parameter. The loaded data is passed to the
:m | eth:`from_mapping` method.
.. code-block:: python
import toml
app.config.from_file("config.toml", load=toml.load)
:param filename: The path to the data file. This can be an
absolute path or relative to the config root path.
:param load: A callable that takes a file handle and returns a
mapping of loaded data from the file.
:type load: ``Callable[[Reader], Mapping]`` where ``Reader``
implements a ``read`` method.
:param silent: Ignore the file if it doesn't exist.
.. versionadded:: 2.0
"""
filename = os.path.join(self.root_path, filename)
try:
with open(filename) as f:
obj = load(f)
except OSError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = f"Unable to load configuration file ({e.strerror})"
raise
return self.from_mapping(obj)
def from_json(self, filename: str, silent: bool = False) -> bool:
"""Update the values in the config from a JSON file. The loaded
data is passed to the :meth:`from_mapping` method.
:param filename: The path to the JSON file. This can be an
absolute path or rela |
gitmill/gitmill | django/repository/models.py | Python | mit | 2,579 | 0.002326 | from django.db import models
from django.contrib.auth.models import User, Group
from django.utils.translation import ugettext_lazy as _
from django.core.validators import RegexValidator
from django.conf import settings
class Repository(models.Model):
"""
Git repository
"""
# basic info
name = models.CharField(
max_length=64,
validators=[RegexValidator(regex=r'^[^\x00-\x2c\x2f\x3a-\x40\x5b-\x5e\x60\x7b-\x7f\s]+$')],
verbose_name=_('name'),
help_text=_('Name of the repository, cannot contain special characters other than hyphens.'),
)
description = models.TextField(blank=True, verbose_name=_('description'))
# owner
user = models.ForeignKey(
User,
blank=True,
null=True,
related_name='repositories',
| on_delete=models.SET_NULL,
verbose_name=_('user'),
help_text=_('Owner of the repository. Repository path will be prefixed by owner\'s username.'),
)
# access control
users = models.ManyToManyField(
User,
blank=True,
verbose_name=_('users'),
help_text=_('These users have right access to the repository.'),
)
groups = models.ManyToManyField(
Group,
blank=True,
verbose_name=_('groups'),
help_text | =_('Users in these groups have right access to the repository.'),
)
is_private = models.BooleanField(
default=True,
verbose_name=_('is private'),
help_text=_('Restrict read access to specified users and groups.'),
)
# meta
created = models.DateTimeField(auto_now_add=True, verbose_name=_('created'))
modified = models.DateTimeField(auto_now=True, verbose_name=_('modified'))
class Meta:
verbose_name = _('repository')
verbose_name_plural = _('repositories')
ordering = ['user', 'name']
unique_together = ['user', 'name']
def __unicode__(self):
if self.user:
return u'%s/%s' % (self.user.username, self.name)
return u'./%s' % (self.name)
def can_read(self, user):
if not user and settings.PROTECTED:
return False
if not self.is_private:
return True
return self.can_write(user)
def can_write(self, user):
if not user:
return False
if user.id == self.user_id:
return True
if self.users.filter(pk=user.id).exists():
return True
if self.groups.filter(user__pk=user.id).exists():
return True
return False
|
ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/cherrypy/cherrypy/lib/xmlrpcutil.py | Python | bsd-3-clause | 1,606 | 0.002491 | import sys
import cherrypy
fro | m cherrypy._cpcompat import ntob
def get_xmlrpclib():
try:
import xmlrpc.client as x
except ImportError:
import xmlrpclib as x
return x
def process_body():
"""Retu | rn (params, method) from request body."""
try:
return get_xmlrpclib().loads(cherrypy.request.body.read())
except Exception:
return ('ERROR PARAMS', ), 'ERRORMETHOD'
def patched_path(path):
"""Return 'path', doctored for RPC."""
if not path.endswith('/'):
path += '/'
if path.startswith('/RPC2/'):
# strip the first /rpc2
path = path[5:]
return path
def _set_response(body):
# The XML-RPC spec (http://www.xmlrpc.com/spec) says:
# "Unless there's a lower-level error, always return 200 OK."
# Since Python's xmlrpclib interprets a non-200 response
# as a "Protocol Error", we'll just return 200 every time.
response = cherrypy.response
response.status = '200 OK'
response.body = ntob(body, 'utf-8')
response.headers['Content-Type'] = 'text/xml'
response.headers['Content-Length'] = len(body)
def respond(body, encoding='utf-8', allow_none=0):
xmlrpclib = get_xmlrpclib()
if not isinstance(body, xmlrpclib.Fault):
body = (body,)
_set_response(xmlrpclib.dumps(body, methodresponse=1,
encoding=encoding,
allow_none=allow_none))
def on_error(*args, **kwargs):
body = str(sys.exc_info()[1])
xmlrpclib = get_xmlrpclib()
_set_response(xmlrpclib.dumps(xmlrpclib.Fault(1, body)))
|
goodfeli/pylearn2 | pylearn2/models/mlp.py | Python | bsd-3-clause | 166,284 | 0 | """
Multilayer Perceptron
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2012-2013, Universite de Montreal"
__credits__ = ["Ian Goodfellow", "David Warde-Farley"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
import logging
import math
import operator
import sys
import warnings
import numpy as np
from theano.compat import six
from theano.compat.six.moves import reduce, xrange
from theano import config
from theano.gof.op import get_debug_values
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.sandbox.cuda.dnn import dnn_available, dnn_pool
from theano.tensor.signal.downsample import max_pool_2d
import theano.tensor as T
from pylearn2.compat import OrderedDict
from pylearn2.costs.mlp import Default
from pylearn2.expr.probabilistic_max_pooling import max_pool_channels
from pylearn2.linear import conv2d
from pylearn2.linear.matrixmul import MatrixMul
from pylearn2.model_extensions.norm_constraint import MaxL2FilterNorm
from pylearn2.models.model import Model
from pylearn2.monitor import get_monitor_doc
from pylearn2.expr.nnet import arg_of_softmax
from pylearn2.expr.nnet import pseudoinverse_softmax_numpy
from pylearn2.space import CompositeSpace
from pylearn2.space import Conv2DSpace
from pylearn2.space import Space
from pylearn2.space import VectorSpace, IndexSpace
from pylearn2.utils import function
from pylearn2.utils import is_iterable
from pylearn2.utils import py_float_types
from pylearn2.utils import py_integer_types
from pylearn2.utils import safe_union
from pylearn2.utils import safe_zip
from pylearn2.utils import safe_izip
from pylearn2.utils import sharedX
from pylearn2.utils import wraps
from pylearn2.utils import contains_inf
from pylearn2.utils import isfinite
from pylearn2.utils.data_specs import DataSpecsMapping
from pylearn2.expr.nnet import (elemwise_kl, kl, compute_precision,
compute_recall, compute_f1)
# Only to be used by the deprecation warning wrapper functions
from pylearn2.costs.mlp import L1WeightDecay as _L1WD
from pylearn2.costs.mlp import WeightDecay as _WD
from pylearn2.sandbox.rnn.models.mlp_hook import RNNWrapper
logger = logging.getLogger(__name__)
logger.debug("MLP changing the recursion limit.")
# We need this to be high enough that the big theano graphs we make
# when doing max pooling via subtensors don't cause python to complain.
# python intentionally declares stack overflow well before the stack
# segment is actually exceeded. But we can't make this value too big
# either, or we'll get seg faults when the python interpreter really
# does go over the stack segment.
# IG encountered seg faults on eos3 (a machine at LISA labo) when using
# 50000 so for now it is set to 40000.
# I think the actual safe recursion limit can't be predicted in advance
# because you don't know how big of a stack frame each function will
# make, so there is not really a "correct" way | to do this. Really the
# python interpreter should provide an option to raise the error
# precisely when you're going to exceed the stack segment.
sys.setrecursionlimit(40000)
if six.PY3:
LayerBase = six.with_metaclass(RNNWrapper, Model)
else:
LayerBase = Model
cla | ss Layer(LayerBase):
"""
Abstract class. A Layer of an MLP.
May only belong to one MLP.
Parameters
----------
kwargs : dict
Passed on to the superclass.
Notes
-----
This is not currently a Block because as far as I know the Block interface
assumes every input is a single matrix. It doesn't support using Spaces to
work with composite inputs, stacked multichannel image inputs, etc. If the
Block interface were upgraded to be that flexible, then we could make this
a block.
"""
# This enables RNN compatibility
__metaclass__ = RNNWrapper
# When applying dropout to a layer's input, use this for masked values.
# Usually this will be 0, but certain kinds of layers may want to override
# this behaviour.
dropout_input_mask_value = 0.
def get_mlp(self):
"""
Returns the MLP that this layer belongs to.
Returns
-------
mlp : MLP
The MLP that this layer belongs to, or None if it has not been
assigned to an MLP yet.
"""
if hasattr(self, 'mlp'):
return self.mlp
return None
def set_mlp(self, mlp):
"""
Assigns this layer to an MLP. This layer will then use the MLP's
random number generator, batch size, etc. This layer's name must
be unique within the MLP.
Parameters
----------
mlp : MLP
"""
assert self.get_mlp() is None
self.mlp = mlp
def get_layer_monitoring_channels(self, state_below=None,
state=None, targets=None):
"""
Returns monitoring channels.
Parameters
----------
state_below : member of self.input_space
A minibatch of states that this Layer took as input.
Most of the time providing state_blow is unnecessary when
state is given.
state : member of self.output_space
A minibatch of states that this Layer took on during fprop.
Provided externally so that we don't need to make a second
expression for it. This helps keep the Theano graph smaller
so that function compilation runs faster.
targets : member of self.output_space
Should be None unless this is the last layer.
If specified, it should be a minibatch of targets for the
last layer.
Returns
-------
channels : OrderedDict
A dictionary mapping channel names to monitoring channels of
interest for this layer.
"""
return OrderedDict()
def fprop(self, state_below):
"""
Does the forward prop transformation for this layer.
Parameters
----------
state_below : member of self.input_space
A minibatch of states of the layer below.
Returns
-------
state : member of self.output_space
A minibatch of states of this layer.
"""
raise NotImplementedError(
str(type(self)) + " does not implement fprop.")
def cost(self, Y, Y_hat):
"""
The cost of outputting Y_hat when the true output is Y.
Parameters
----------
Y : theano.gof.Variable
The targets
Y_hat : theano.gof.Variable
The predictions.
Assumed to be the output of the layer's `fprop` method.
The implmentation is permitted to do things like look at the
ancestors of `Y_hat` in the theano graph. This is useful for
e.g. computing numerically stable *log* probabilities when
`Y_hat` is the *probability*.
Returns
-------
cost : theano.gof.Variable
A Theano scalar describing the cost.
"""
raise NotImplementedError(
str(type(self)) + " does not implement mlp.Layer.cost.")
def cost_from_cost_matrix(self, cost_matrix):
"""
The cost final scalar cost computed from the cost matrix
Parameters
----------
cost_matrix : WRITEME
Examples
--------
>>> # C = model.cost_matrix(Y, Y_hat)
>>> # Do something with C like setting some values to 0
>>> # cost = model.cost_from_cost_matrix(C)
"""
raise NotImplementedError(
str(type(self)) + " does not implement "
"mlp.Layer.cost_from_cost_matrix.")
def cost_matrix(self, Y, Y_hat):
"""
The element wise cost of outputting Y_hat when the true output is Y.
Parameters
----------
Y : WRITEME
Y_hat : WRITEME
Returns
-------
WRITEME
"""
raise NotImplementedError(
str(type(self)) + " does not implement mlp.Layer.cost_matrix")
def set_weights(self, weights):
"""
Sets t |
alirizakeles/tendenci | tendenci/apps/payments/payflowlink/urls.py | Python | gpl-3.0 | 243 | 0.00823 | from django.conf.ur | ls import *
urlpatterns = patterns('tendenci.apps.payments.payflowlink.views',
url(r'^thankyou/$', 'thank_you', name="payflowlink.thank_you"),
url(r'^silentpost/', 'silent_post', name="payflowlink.silent_post"),
) | |
bluecap-se/yarr.client | tests/conftest.py | Python | mit | 161 | 0 | # -*- coding: utf-8 -*-
import pytes | t
from yarr_client.app import configurate_app
@pytest.fixture
def app():
app, _, | _ = configurate_app()
return app
|
ydkhatri/mac_apt | plugins/helpers/hfs_alt.py | Python | mit | 22,333 | 0.006358 |
'''
Copyright 2011 Jean-Baptiste B'edrune, Jean Sigwald
Using New BSD License:
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
#
# This code has since been edited to improve HFS parsing, add lzvn/lzfse support
# and is now a part of the mac_apt framework
#
import os
import mmap
import sys
import struct
import tempfile
import zlib
import pytsk3
import logging
from plugins.helpers.common import CommonFunctions
from plugins.helpers.btree import AttributesTree, CatalogTree, ExtentsOverflowTree
from plugins.helpers.structs import *
log = logging.getLogger('MAIN.HELPERS.HFS_ALT')
lzfse_capable = False
try:
import liblzfse
lzfse_capable = True
except ImportError:
print("liblzfse not found. Won't decompress lzfse/lzvn streams")
def write_file(filename,data):
f = open(filename, "wb")
f.write(data)
f.close()
def lzvn_decompress(compressed_stream, compressed_size, uncompressed_size): #TODO: Move to a class!
'''Adds Prefix and Postfix bytes as required by decompressor,
then decompresses and returns uncompressed bytes buffer
'''
header = b'bvxn' + struct.pack('<I', uncompressed_size) + struct.pack('<I', compressed_size)
footer = b'bvx$'
return liblzfse.decompress(header + compressed_stream + footer)
class HFSFile(object):
def __init__(self, volume, hfsplusfork, fileID, deleted=False):
self.volume = volume
self.blockSize = volume.blockSize
self.fileID = fileID
self.totalBlocks = hfsplusfork.totalBlocks
self.logicalSize = hfsplusfork.logicalSize
self.extents = []
self.deleted = deleted
b = 0
for extent in hfsplusfork.HFSPlusExtentDescriptor:
self.extents.append(extent)
b += extent.blockCount
while b != hfsplusfork.totalBlocks:
#log.debug("extents overflow {}".format(b))
k,v = volume.getExtentsOverflowForFile(fileID, b)
if not v:
log.debug("extents overflow missing, startblock={}".format(b))
break
for extent in v:
self.extents.append(extent)
b += extent.blockCount
def copyOutFile(self, outputfile, truncate=True):
f = open(outputfile, "wb")
for i in range(self.totalBlocks):
f.write(self.readBlock(i))
if truncate:
f.truncate(self.logicalSize)
f.close()
'''def readAllBuffer(self, truncate=True):
r = b""
for i in range(self.totalBlocks):
r += self.readBlock(i)
if truncate:
r = r[:self.logicalSize]
return r
'''
def readAllBuffer(self, truncate=True, output_file=None):
'''Write to output_file if valid, else return a buffer of data.
Warning: If file size > 200 MiB, b'' is returned, file data is only written to output_file.
'''
r = b""
bs = self.volume.blockSize
blocks_max = 52428800 // bs # 50MB
for extent in self.extents:
if extent.blockCount == 0: continue
#if not self.deleted and self.fileID != kHFSAllocationFileID and not self.volume.isBlockInUse(lba):
# log.debug("FAIL, block "0x{:x}" not marked as used".format(n))
if extent.blockCount > blocks_max:
counter = blocks_max
remaining_blocks = extent.blockCount
start_address = extent.startBlock * bs
while remaining_blocks > 0:
num_blocks_to_read = min(blocks_max, remaining_blocks)
size = num_blocks_to_read * bs
data = self.volume.read(start_address, size)
if output_file:
output_file.write(data)
elif self.logicalSize < 209715200: # 200MiB
r += data
remaining_blocks -= num_blocks_to_read
start_address += size
else:
data = self.volume.read(extent.startBlock * bs, bs * extent.blockCount)
if output_file:
output_file.write(data)
elif self.logicalSize < 209715200: # 200MiB
r += data
if truncate:
if output_file:
output_file.truncate(self.logicalSize)
elif self.logicalSize < 209715200: # 200MiB
r = r[:self.logicalSize]
return r
def processBlock(self, block, lba):
return block
def readBlock(self, n):
bs = self.volume.blockSize
if n*bs > self.logicalSize:
raise ValueError("BLOCK OUT OF BOUNDS")
bc = 0
for extent in self.extents:
bc += extent.blockCount
if n < bc:
lba = extent.startBlock+(n-(bc-extent.blockCount))
if not self.deleted and self.fileID != kHFSAllocationFileID and not self.volume.isBlockInUse(lba):
raise ValueError("FAIL, block %x not marked as used" % n)
return self.processBlock(self.volume.read(lba*bs, bs), lba)
return b""
class HFSCompressedResourceFork(HFSFile):
def __init__(self, volume, hfsplusfork, fileID, compression_type, uncompressed_size):
super(HFSCompressedResourceFork,self).__init__(volume, hfsplusfork | , fileID)
block0 = self.r | eadBlock(0)
self.compression_type = compression_type
self.uncompressed_size = uncompressed_size
if compression_type in [8, 12]: # 8 is lzvn, 12 is lzfse
#only tested for 8
self.header = HFSPlusCmpfLZVNRsrcHead.parse(block0)
#print(self.header)
else:
self.header = HFSPlusCmpfRsrcHead.parse(block0)
#print(self.header)
self.blocks = HFSPlusCmpfRsrcBlockHead.parse(block0[self.header.headerSize:])
log.debug("HFSCompressedResourceFork numBlocks:{}".format(self.blocks.numBlocks))
#HAX, readblock not implemented
def readAllBuffer(self, truncate=True, output_file=None):
'''Warning: If output size > 200 MiB, b'' is returned, file data is only written to output_file.'''
if self.compression_type in [7, 8, 11, 12] and not lzfse_capable:
raise ValueError('LZFSE/LZVN compression detected, no decompressor available!')
if self.logicalSize >= 209715200:
temp_file = tempfile.SpooledTemporaryFile(209715200)
super(HFSCompressedResourceFork, self).readAllBuffer(True, temp_file)
temp_file.seek(0)
buff = mmap.mmap(temp_file.fileno(), 0) # memory mapped file to access as buffer
else:
buff = super(HFSCompressedResourceFork, self).readAllBuffer()
r = b""
if self.compression_type in |
plotly/python-api | packages/python/plotly/plotly/validators/scatter3d/marker/line/_cmin.py | Python | mit | 533 | 0 | import _plotly_utils.basevalidators
class CminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="cmin", parent_name="scatter3d.marker.line", **kwargs
):
supe | r(CminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {"cauto": False}),
role=kwargs.pop("role", "info"),
**kwar | gs
)
|
MartinHjelmare/home-assistant | homeassistant/components/deconz/gateway.py | Python | apache-2.0 | 8,357 | 0 | """Representation of a deCONZ gateway."""
import asyncio
import async_timeout
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.const import CONF_EVENT, CONF_HOST, CONF_ID
from homeassistant.core import EventOrigin, callback
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect, async_dispatcher_send)
from homeassistant.util import slugify
from .const import (
_LOGGER, CONF_ALLOW_CLIP_SENSOR, CONF_ALLOW_DECONZ_GROUPS, CONF_BRIDGEID,
CONF_MASTER_GATEWAY, DOMAIN, NEW_DEVICE, NEW_SENSOR, SUPPORTED_PLATFORMS)
from .errors import AuthenticationRequired, CannotConnect
@callback
def get_gateway_from_config_entry(hass, config_entry):
"""Return gateway with a matching bridge id."""
return hass.data[DOMAIN][config_entry.data[CONF_BRIDGEID]]
class DeconzGateway:
"""Manages a single deCONZ gateway."""
def __init__(self, hass, config_entry):
"""Initialize the system."""
self.hass = hass
self.config_entry = config_entry
self.available = True
self.api = None
self.deconz_ids = {}
self.events = []
self.listeners = []
@property
def bridgeid(self) -> str:
"""Return the unique identifier of the gateway."""
return self.config_entry.data[CONF_BRIDGEID]
@property
def master(self) -> bool:
"""Gateway which is used with deCONZ services without defining id."""
return self.config_entry.options[CONF_MASTER_GATEWAY]
@property
def allow_clip_sensor(self) -> bool:
"""Allow loading clip sensor from gateway."""
return self.config_entry.data.get(CONF_ALLOW_CLIP_SENSOR, True)
@property
def allow_deconz_groups(self) -> bool:
"""Allow loading deCONZ groups from gateway."""
return self.config_entry.data.get(CONF_ALLOW_DECONZ_GROUPS, True)
async def async_update_device_registry(self):
"""Update device registry."""
device_registry = await \
self.hass.helpers.device_registry.async_get_registry()
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id,
connections={(CONNECTION_NETWORK_MAC, self.api.config.mac)},
identifiers={(DOMAIN, self.api.config.bridgeid)},
manufacturer='Dresden Elektronik',
model=self.api.config.modelid,
name=self.api.config.name,
sw_version=self.api.config.swversion
)
async def async_setup(self):
"""Set up a deCONZ gateway."""
hass = self.hass
try:
self.api = await get_gateway(
hass, self.config_entry.data, self.async_add_device_callback,
self.async_connection_status_callback
)
except CannotConnect:
raise ConfigEntryNotReady
except Exception: # pylint: disable=broad-except
_LOGGER.error('Error connecting with deCONZ gateway')
return False
for component in SUPPORTED_PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(
self.config_entry, component))
self.listeners.append(async_dispatcher_connect(
hass, self.async_event_new_device(NEW_SENSOR),
self.async_add_remote))
self.async_add_remote(self.api.sensors.values())
self.api.start()
self.config_entry.add_update_listener(self.async_new_address_callback)
return True
@staticmethod
async def async_new_address_callback(hass, entry):
"""Handle signals of gateway getting new address.
This is a static method because a class method (bound method),
can not be used with weak references.
"""
gateway = hass.data[DOMAIN][entry.data[CONF_BRIDGEID]]
gateway.api.close()
gateway.api.host = entry.data[CONF_HOST]
gateway.api.start()
@property
def event_reachable(self):
"""Gateway specific event to signal a change in connection status."""
return 'deconz_reachable_{}'.format(self.bridgeid)
@callback
def async_connection_status_callback(self, available):
"""Handle signals of gateway connection status."""
self.available = available
async_dispatcher_send(self.hass, self.event_reachable,
{'state': True, 'attr': 'reachable'})
@callback
def async_event_new_device(self, device_type):
"""Gateway specific event to signal new device."""
return NEW_DEVICE[device_type].format(self.bridgeid)
@callback
def async_add_device_callback(self, device_type, device):
"""Handle event of new device creation in deCONZ."""
if not isinstance(device, list):
device = [device]
async_dispatcher_send(
self.hass, self.async_event_new_device(device_type), device)
@callback
def async_add_remote(self, sensors):
"""Set up remote from deCONZ."""
from pydeconz.sensor import SWITCH as DECONZ_REMOTE
for sensor in sensors:
if sensor.type in DECONZ_REMOTE and \
not (not self.allow_clip_sensor and
sensor.type.startswith('CLIP')):
self.events.append(DeconzEvent(self.hass, sensor))
@callback
def shutdown(self, event):
"""Wrap the call to deconz.close.
Used as an argument to EventBus.async_listen_once.
"""
self.api.close()
async def async_reset(self):
"""Reset this gateway to default state.
Will cancel any scheduled setup retry and will unload
the config entry.
"""
self.api.close()
| for component in SUPPORTED_PLATFORMS:
await self.hass.config_entries.async_forward_entry_unload(
self.config_entry, component)
for unsub_dispatcher in self.listeners:
unsub_dispatcher()
self.listeners = []
for event in self.events:
event.async_will_remove_from_hass()
self.events.remove(event)
self. | deconz_ids = {}
return True
async def get_gateway(hass, config, async_add_device_callback,
async_connection_status_callback):
"""Create a gateway object and verify configuration."""
from pydeconz import DeconzSession, errors
session = aiohttp_client.async_get_clientsession(hass)
deconz = DeconzSession(hass.loop, session, **config,
async_add_device=async_add_device_callback,
connection_status=async_connection_status_callback)
try:
with async_timeout.timeout(10):
await deconz.async_load_parameters()
return deconz
except errors.Unauthorized:
_LOGGER.warning("Invalid key for deCONZ at %s", config[CONF_HOST])
raise AuthenticationRequired
except (asyncio.TimeoutError, errors.RequestError):
_LOGGER.error(
"Error connecting to deCONZ gateway at %s", config[CONF_HOST])
raise CannotConnect
class DeconzEvent:
"""When you want signals instead of entities.
Stateless sensors such as remotes are expected to generate an event
instead of a sensor entity in hass.
"""
def __init__(self, hass, device):
"""Register callback that will be used for signals."""
self._hass = hass
self._device = device
self._device.register_async_callback(self.async_update_callback)
self._event = 'deconz_{}'.format(CONF_EVENT)
self._id = slugify(self._device.name)
_LOGGER.debug("deCONZ event created: %s", self._id)
@callback
def async_will_remove_from_hass(self) -> None:
"""Disconnect event object when removed."""
self._device.remove_callback(self.async_update_callback)
self._device = None
@callback
def async_update_callback(self, reason):
"""Fire the event if reason is that state is updated.""" |
mwhooker/jones | tests/__init__.py | Python | apache-2.0 | 532 | 0 | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 |
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY | KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
|
blitzmann/Pyfa | gui/fitCommands/calc/fitRemoveBooster.py | Python | gpl-3.0 | 849 | 0.002356 | import wx
import eos.db
from logbook import Logger
pyfalog = Logger(__name__)
class FitRemoveBoosterCommand(wx.Command):
""""
from sFit.removeBooster
"""
def __init__(self, fitID, position):
wx.Command.__init__(self, True, "Implant remove")
self.fitID = fitID
self.position = positio | n
self.old = None
def Do(self):
pyfalog.debug("Removing booster from position ({0}) for fit ID: {1}", self.position, self.fitID)
fit = eos.db.getFit(self.fitID)
booster = fit.boosters[self.position]
self.old = booster.itemID
fit.boosters.remove(booster)
return True
def Undo(self):
from .fitAddBooster import FitAddBoosterCommand # Avoid circular import
cmd = FitAddBoosterCom | mand(self.fitID, self.old)
cmd.Do()
return True
|
mnot/redbot | redbot/message/cache.py | Python | mit | 29,383 | 0.004016 | #!/usr/bin/env python
"""
Cacheability checking function.
"""
from redbot.formatter import relative_time, f_num
from redbot.message import HttpRequest, HttpResponse
from redbot.speak import Note, categories, levels
### configuration
cacheable_methods = ["GET"]
heuristic_cacheable_status = ["200", "203", "206", "300", "301", "410"]
max_clock_skew = 5 # seconds
def checkCaching(response: H | ttpResponse, request: HttpRequest = None) -> None:
"Examine HTTP caching characteristics."
# get header values
lm_hdr = response.parsed_headers.get("last-modified", None)
date_hdr = response.parsed_headers.get("date", None)
expires_hdr = response.parsed_headers.get("expires", None)
etag_hdr = response.parsed_headers.get("etag", None)
age_hdr = response.parsed_headers.get("age", None)
cc_set = response.parsed_headers.get("cache-control", [])
cc_list = [k for (k, | v) in cc_set]
cc_dict = dict(cc_set)
cc_keys = list(cc_dict.keys())
# Last-Modified
if lm_hdr:
serv_date = date_hdr or response.start_time
if lm_hdr > serv_date:
response.add_note("header-last-modified", LM_FUTURE)
else:
response.add_note(
"header-last-modified",
LM_PRESENT,
last_modified_string=relative_time(lm_hdr, serv_date),
)
# known Cache-Control directives that don't allow duplicates
known_cc = [
"max-age",
"no-store",
"s-maxage",
"public",
"private",
"pre-check",
"post-check",
"stale-while-revalidate",
"stale-if-error",
]
# check for mis-capitalised directives /
# assure there aren't any dup directives with different values
for cc in cc_keys:
if cc.lower() in known_cc and cc != cc.lower():
response.add_note(
"header-cache-control", CC_MISCAP, cc_lower=cc.lower(), cc=cc
)
if cc in known_cc and cc_list.count(cc) > 1:
response.add_note("header-cache-control", CC_DUP, cc=cc)
# Who can store this?
if request and request.method not in cacheable_methods:
response.store_shared = response.store_private = False
request.add_note("method", METHOD_UNCACHEABLE, method=request.method)
return # bail; nothing else to see here
if "no-store" in cc_keys:
response.store_shared = response.store_private = False
response.add_note("header-cache-control", NO_STORE)
return # bail; nothing else to see here
if "private" in cc_keys:
response.store_shared = False
response.store_private = True
response.add_note("header-cache-control", PRIVATE_CC)
elif (
request
and "authorization" in [k.lower() for k, v in request.headers]
and "public" not in cc_keys
):
response.store_shared = False
response.store_private = True
response.add_note("header-cache-control", PRIVATE_AUTH)
else:
response.store_shared = response.store_private = True
response.add_note("header-cache-control", STOREABLE)
# no-cache?
if "no-cache" in cc_keys:
if lm_hdr is None and etag_hdr is None:
response.add_note("header-cache-control", NO_CACHE_NO_VALIDATOR)
else:
response.add_note("header-cache-control", NO_CACHE)
return
# pre-check / post-check
if "pre-check" in cc_keys or "post-check" in cc_keys:
if "pre-check" not in cc_keys or "post-check" not in cc_keys:
response.add_note("header-cache-control", CHECK_SINGLE)
else:
pre_check = post_check = None
try:
pre_check = int(cc_dict["pre-check"])
post_check = int(cc_dict["post-check"])
except ValueError:
response.add_note("header-cache-control", CHECK_NOT_INTEGER)
if pre_check is not None and post_check is not None:
if pre_check == 0 and post_check == 0:
response.add_note("header-cache-control", CHECK_ALL_ZERO)
elif post_check > pre_check:
response.add_note("header-cache-control", CHECK_POST_BIGGER)
post_check = pre_check
elif post_check == 0:
response.add_note("header-cache-control", CHECK_POST_ZERO)
else:
response.add_note(
"header-cache-control",
CHECK_POST_PRE,
pre_check=pre_check,
post_check=post_check,
)
# vary?
vary = response.parsed_headers.get("vary", set())
if "*" in vary:
response.add_note("header-vary", VARY_ASTERISK)
return # bail; nothing else to see here
if len(vary) > 3:
response.add_note("header-vary", VARY_COMPLEX, vary_count=f_num(len(vary)))
else:
if "user-agent" in vary:
response.add_note("header-vary", VARY_USER_AGENT)
if "host" in vary:
response.add_note("header-vary", VARY_HOST)
# calculate age
response.age = age_hdr or 0
age_str = relative_time(response.age, 0, 0)
if date_hdr and date_hdr > 0:
apparent_age = max(0, int(response.start_time - date_hdr))
else:
apparent_age = 0
current_age = max(apparent_age, response.age)
current_age_str = relative_time(current_age, 0, 0)
if response.age >= 1:
response.add_note("header-age header-date", CURRENT_AGE, age=age_str)
# Check for clock skew and dateless origin server.
if not date_hdr:
response.add_note("", DATE_CLOCKLESS)
if expires_hdr or lm_hdr:
response.add_note(
"header-expires header-last-modified", DATE_CLOCKLESS_BAD_HDR
)
else:
skew = date_hdr - response.start_time + (response.age)
if response.age > max_clock_skew and (current_age - skew) < max_clock_skew:
response.add_note("header-date header-age", AGE_PENALTY)
elif abs(skew) > max_clock_skew:
response.add_note(
"header-date",
DATE_INCORRECT,
clock_skew_string=relative_time(skew, 0, 2),
)
else:
response.add_note("header-date", DATE_CORRECT)
# calculate freshness
freshness_lifetime = 0
has_explicit_freshness = False
has_cc_freshness = False
freshness_hdrs = ["header-date"]
if "s-maxage" in cc_keys:
freshness_lifetime = cc_dict["s-maxage"]
freshness_hdrs.append("header-cache-control")
has_explicit_freshness = True
has_cc_freshness = True
elif "max-age" in cc_keys:
freshness_lifetime = cc_dict["max-age"]
freshness_hdrs.append("header-cache-control")
has_explicit_freshness = True
has_cc_freshness = True
elif "expires" in response.parsed_headers:
# An invalid Expires header means it's automatically stale
has_explicit_freshness = True
freshness_hdrs.append("header-expires")
freshness_lifetime = (expires_hdr or 0) - (date_hdr or int(response.start_time))
freshness_left = freshness_lifetime - current_age
freshness_left_str = relative_time(abs(int(freshness_left)), 0, 0)
freshness_lifetime_str = relative_time(int(freshness_lifetime), 0, 0)
response.freshness_lifetime = freshness_lifetime
fresh = freshness_left > 0
if has_explicit_freshness:
if fresh:
response.add_note(
" ".join(freshness_hdrs),
FRESHNESS_FRESH,
freshness_lifetime=freshness_lifetime_str,
freshness_left=freshness_left_str,
current_age=current_age_str,
)
elif has_cc_freshness and response.age > freshness_lifetime:
response.add_note(
" ".join(freshness_hdrs),
FRESHNESS_STALE_CACHE,
freshness_lifetime=freshness_lifetime_str,
freshness_left=freshness_left_str,
current_age=current_ag |
Juniper/neutron | neutron/db/loadbalancer/loadbalancer_db.py | Python | apache-2.0 | 34,064 | 0.000059 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from sqlalchemy.orm import validates
from neutron.api.v2 import attributes
from neutron.common import exceptions as n_exc
from neutron.db import db_base_plugin_v2 as base_db
from neutron.db import model_base
from neutron.db import models_v2
from neutron.db import servicetype_db as st_db
from neutron.extensions import loadbalancer
from neutron.extensions.loadbalancer import LoadBalancerPluginBase
from neutron import manager
from neutron.openstack.common.db import exception
from neutron.openstack.common import excutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants
from neutron.services.loadbalancer import constants as lb_const
LOG = logging.getLogger(__name__)
class SessionPersistence(model_base.BASEV2):
vip_id = sa.Column(sa.String(36),
sa.ForeignKey("vips.id"),
primary_key=True)
type = sa.Column(sa.Enum("SOURCE_IP",
"HTTP_COOKIE",
"APP_COOKIE",
name="sesssionpersistences_type"),
nullable=False)
cookie_name = sa.Column(sa.String(1024))
class PoolStatistics(model_base.BASEV2):
"""Represents pool statistics."""
pool_id = sa.Column(sa.String(36), sa.ForeignKey("pools.id"),
primary_key=True)
bytes_in = sa.Column(sa.BigInteger, nullable=False)
bytes_out = sa.Column(sa.BigInteger, nullable=False)
active_connections = sa.Column(sa.BigInteger, nullable=False)
total_connections = sa.Column(sa.BigInteger, nullable=False)
@validates('bytes_in', 'bytes_out',
'active_connections', 'total_connections')
def validate_non_negative_int(self, key, value):
if value < 0:
data = {'key': key, 'value': value}
raise ValueError(_('The %(key)s field can not have '
'negative value. '
'Current value is %(value)d.') % data)
return value
class Vip(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant,
models_v2.HasStatusDescription):
"""Represents a v2 neutron loadbalancer vip."""
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
protocol_port = sa.Column(s | a.Integer, nullable=False)
protocol = sa.Column(sa.Enum("HTTP", "HTTPS", "TCP", name="lb_protocols"),
nullable=False)
pool_id = sa.Column(sa.String(36), nullable=False, unique=True)
session_persistence = orm.relationship(SessionPersistence,
| uselist=False,
backref="vips",
cascade="all, delete-orphan")
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
connection_limit = sa.Column(sa.Integer)
port = orm.relationship(models_v2.Port)
class Member(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant,
models_v2.HasStatusDescription):
"""Represents a v2 neutron loadbalancer member."""
__table_args__ = (
sa.schema.UniqueConstraint('pool_id', 'address', 'protocol_port',
name='uniq_member0pool_id0address0port'),
)
pool_id = sa.Column(sa.String(36), sa.ForeignKey("pools.id"),
nullable=False)
address = sa.Column(sa.String(64), nullable=False)
protocol_port = sa.Column(sa.Integer, nullable=False)
weight = sa.Column(sa.Integer, nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
class Pool(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant,
models_v2.HasStatusDescription):
"""Represents a v2 neutron loadbalancer pool."""
vip_id = sa.Column(sa.String(36), sa.ForeignKey("vips.id"))
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
subnet_id = sa.Column(sa.String(36), nullable=False)
protocol = sa.Column(sa.Enum("HTTP", "HTTPS", "TCP", name="lb_protocols"),
nullable=False)
lb_method = sa.Column(sa.Enum("ROUND_ROBIN",
"LEAST_CONNECTIONS",
"SOURCE_IP",
name="pools_lb_method"),
nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
stats = orm.relationship(PoolStatistics,
uselist=False,
backref="pools",
cascade="all, delete-orphan")
members = orm.relationship(Member, backref="pools",
cascade="all, delete-orphan")
monitors = orm.relationship("PoolMonitorAssociation", backref="pools",
cascade="all, delete-orphan")
vip = orm.relationship(Vip, backref='pool')
provider = orm.relationship(
st_db.ProviderResourceAssociation,
uselist=False,
lazy="joined",
primaryjoin="Pool.id==ProviderResourceAssociation.resource_id",
foreign_keys=[st_db.ProviderResourceAssociation.resource_id]
)
class HealthMonitor(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a v2 neutron loadbalancer healthmonitor."""
type = sa.Column(sa.Enum("PING", "TCP", "HTTP", "HTTPS",
name="healthmontiors_type"),
nullable=False)
delay = sa.Column(sa.Integer, nullable=False)
timeout = sa.Column(sa.Integer, nullable=False)
max_retries = sa.Column(sa.Integer, nullable=False)
http_method = sa.Column(sa.String(16))
url_path = sa.Column(sa.String(255))
expected_codes = sa.Column(sa.String(64))
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
pools = orm.relationship(
"PoolMonitorAssociation", backref="healthmonitor",
cascade="all", lazy="joined"
)
class PoolMonitorAssociation(model_base.BASEV2,
models_v2.HasStatusDescription):
"""Many-to-many association between pool and healthMonitor classes."""
pool_id = sa.Column(sa.String(36),
sa.ForeignKey("pools.id"),
primary_key=True)
monitor_id = sa.Column(sa.String(36),
sa.ForeignKey("healthmonitors.id"),
primary_key=True)
class LoadBalancerPluginDb(LoadBalancerPluginBase,
base_db.CommonDbMixin):
"""Wraps loadbalancer with SQLAlchemy models.
A class that wraps the implementation of the Neutron loadbalancer
plugin database access interface using SQLAlchemy models.
"""
@property
def _core_plugin(self):
return manager.NeutronManager.get_plugin()
def update_status(self, context, model, id, status,
status_description=None):
with context.session.begin(subtransactions=True):
v_db = self._get_resource(context, model, id)
if v_db.status != status:
v_db.status = status
# update status_description in two cases:
# - new value is passed
# - old value is not None (needs to be updated anyway)
if status_desc |
TinyOS-Camp/DDEA-DEV | [Python]Collection/retrieve_weather.py | Python | gpl-2.0 | 6,690 | 0.009716 | #!/adsc/DDEA_PROTO/bin/python
"""
@author: NGO Quang Minh Khiem
@contact: khiem.ngo@adsc.com.sg
"""
import urllib
import urllib2
from datetime import *
from pathos.multiprocessing import ProcessingPool
import pathos.multiprocessing as pmp
from toolset import dill_save_obj
airport_codes = {
'SDH' : {'code' : 'KOAK', 'city': 'Berkeley', 'state' : 'CA', 'statename' : 'California'},
'VTT' : {'code' : 'EFHF', 'city': 'Espoo', 'state' : '', 'statename' : 'Finland'},
'GValley' : {'code' : 'RKSS', 'city' : 'Seoul', 'state': '', 'statename' : 'South Korea'},
'SG' : {'code' : 'WSSS', 'city' : 'Singapore', 'state': '', 'statename' : 'Singapore'}
}
save_path = {
'SDH':'/adsc/bigdata/input_data/sdh/weather/',
'VTT':'/adsc/bigdata/input_data/VTT/data_year/weather/',
'GValley':'/adsc/bigdata/input_data/gvalley/weather/',
'SG':'/adsc/bigdata/input_data/sg/weather/',
}
site_prefix = {
'SDH':'sdh_',
'VTT':'VTT_',
'GValley':'gvalley_',
'SG':'sg_'
}
view_type = {'d' : 'DailyHistory', 'w' : 'WeeklyHistory', 'm': 'MonthlyHistory', 'custom' : "CustomHistory"}
URL_PREFIX = 'http://www.wunderground.com/history/airport/'
############
# construct the request url, based on the locations,
# time period and history view type
############
def construct_url(site_code, sy, sm, sd, view='d', ey=2014, em=12, ed=31):
airport_code = airport_codes[site_code]['code']
city = airport_codes[site_code]['city']
state = city = airport_codes[site_code]['state']
statename = airport_codes[site_code]['statename']
request_option = 'DailyHistory.html'
if view_type.has_key(view):
request_option = view_type[view] + '.html'
url = URL_PREFIX + airport_code + '/' + str(sy) + '/' + str(sm) + '/' + str(sd) + '/' + request_option
# print url
values = {}
values['req_city'] = city
values['req_state'] = state
values['req_statename'] = statename
values['format'] = 1 # ## data will be returned in CSV format
if view == 'custom':
values['yearend'] = ey
values['monthend'] = em
values['dayend'] = ed
# print values
return url, values
def construct_filepath(sitecode, sy, sm, sd):
path = save_path[sitecode] + site_prefix[sitecode] + '{:04d}_{:02d}_{:02d}'.format(sy, sm, sd) + '.bin'
return path
############
# Retriev | e the weather data, given the site code,
# the history view type, and the time period
# Return the data from server (text), in CSV format
# site_code: SDH, VTT, GValley, SG
# view: history view type: 'd' (day), 'w' (week), 'm' (month), 'custom'
# sy, sm, sd: start year/month/day
# view='d': retrieve hourly weather data during the day sy/sm/sd
# view='w': retrieve daily weather data during the week of sy/sm/sd
# view='m': retrieve daily weather data d | uring the month of sy/sm/sd
#
# view='custom': retrieve daily weather data from sy/sm/sd to ey/em/ed
# if view='custom': the parameters ey,em,ed should be specified
############
def retrieve_data_package(site_code, sy, sm, sd, view='d', ey=2014, em=12, ed=31):
# ## construct url based on the parameters
url, values = construct_url(site_code, sy, sm, sd, view, ey, em, ed)
path = construct_filepath(site_code, sy, sm, sd)
return [url, values, path]
def retrieve_data(target):
url, values, path = target
req_data = urllib.urlencode(values)
# ## send request to the server and get response
req = urllib2.Request(url, req_data)
response = urllib2.urlopen(req)
data = response.read()
if not data:
return
# ## pre-process the data returned from server
data = data.strip()
data = data.replace('<br />', '')
if data:
dill_save_obj(data, path)
#return data
def daterange(start_date, end_date):
for n in range(int ((end_date - start_date).days)):
yield start_date + timedelta(n)
"""
The following section demonstrates how to use the retrieve_data() method,
with different history view types
"""
"""
View type: Day
granularity support: hourly
Ex: Retrieve hourly weather data on the date Jan 8th, 2014
"""
#data = retrieve_data('VTT', 2014, 1, 8, view='d')
#print data
"""
View type: Week
granularity support: daily
Ex: Retrieve daily weather data during the week of Jan 8th, 2014
"""
# data = retrieve_data('VTT', 2014, 1, 8, view='w')
# print data
"""
View type: Month
granularity support: daily
Ex: Retrieve daily weather data during the month of Jan 8th, 2014
"""
# data = retrieve_data('VTT', 2014, 1, 8, view='m')
# print data
"""
View type: Custom
granularity support: daily
Ex: Retrieve daily weather data from Jan 1, 2014 to Jan 7, 2014
"""
# data = retrieve_data('VTT', 2014, 1, 1, view='custom', ey=2014, em=1, ed=7)
# print data
if __name__ == '__main__':
target = date.today() - timedelta(days=1)
y = target.year
m = target.month
d = target.day
ey = y + 1
weather_points = list()
weather_points.append(retrieve_data_package('SDH', y, m, d, view='d', ey=ey, em=12, ed=31))
weather_points.append(retrieve_data_package('VTT', y, m, d, view='d', ey=ey, em=12, ed=31))
weather_points.append(retrieve_data_package('GValley', y, m, d, view='d', ey=ey, em=12, ed=31))
weather_points.append(retrieve_data_package('SG', y, m, d, view='d', ey=ey, em=12, ed=31))
# pool = ProcessingPool(nodes=4)
# pool.map(retrieve_data, weather_points)
p = pmp.Pool(4)
p.map(retrieve_data, weather_points)
p.close()
p.join()
"""
pool = Pool(4)
pool.map(retrieve_data, weather_points)
pool.close()
pool.join()
for single_date in daterange(date(2014, 5, 11), date(2014, 10, 10)):
#print date.strftime("%Y-%m-%d", single_date.timetuple())
#print single_date.year, single_date.month, single_date.day
y = single_date.year
m = single_date.month
d = single_date.day
weather_points = list()
weather_points.append(retrieve_data_package('SDH', y, m, d, view='d',ey=2014, em=12, ed=31))
weather_points.append(retrieve_data_package('VTT', y, m, d, view='d',ey=2014, em=12, ed=31))
weather_points.append(retrieve_data_package('GValley', y, m, d, view='d',ey=2014, em=12, ed=31))
weather_points.append(retrieve_data_package('SG', y, m, d, view='d',ey=2014, em=12, ed=31))
pool = Pool(4)
pool.map(retrieve_data, weather_points)
pool.close()
pool.join()
""" |
wmanley/stb-tester | tests/preconditions.py | Python | lgpl-2.1 | 276 | 0 | from stbt import press, wait_f | or_match
def checkers_via_gamut():
"""Change input video to "gamut" patterns, then "checker | s" pattern"""
wait_for_match("videotestsrc-redblue.png")
press("gamut")
wait_for_match("videotestsrc-gamut.png")
press("checkers-8")
|
tensorflow/tfx | tfx/experimental/templates/penguin/pipeline/pipeline.py | Python | apache-2.0 | 6,485 | 0.005705 | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TFX penguin template pipeline definition.
This file defines TFX pipeline and various components in the pipeline.
"""
from typing import List, Optional
import tensorflow_model_analysis as tfma
from tfx import v1 as tfx
from tfx.experimental.templates.penguin.models import features
from ml_metadata.proto import metadata_store_pb2
def create_pipeline(
pipeline_name: str,
pipeline_root: str,
data_path: str,
preprocessing_fn: str,
run_fn: str,
train_args: tfx.proto.TrainArgs,
eval_args: tfx.proto.EvalArgs,
eval_accuracy_threshold: float,
serving_model_dir: str,
schema_path: Optional[str] = None,
metadata_connection_config: Optional[
metadata_store_pb2.ConnectionConfig] = None,
beam_pipeline_args: Optional[List[str]] = None,
) -> tfx.dsl.Pipeline:
"""Implements the penguin pipeline with TFX."""
components = []
# Brings data into the pipeline or otherwise joins/converts training data.
# TODO(step 2): Might use another ExampleGen class for your data.
example_gen = tfx.components.CsvExampleGen(input_base=data_path)
components.append(example_gen)
# Computes statistics over data for visualization and example validation.
statistics_gen = tfx.components.StatisticsGen(
examples=example_gen.outputs['examples'])
components.append(statistics_gen)
if schema_path is None:
# Generates schema based on statistics files.
schema_gen = tfx.components.SchemaGen(
statistics=statistics_gen.outputs['statistics'])
components.append(schema_gen)
else:
# Import user provided schema into the pipeline.
schema_gen = tfx.components.ImportSchemaGen(schema_file=schema_path)
components.append(schema_gen)
# Performs anomaly detection bas | ed on statistics and data schema.
example_validator = tfx.components.ExampleValidator( # pylint: disable=unused-variable
stati | stics=statistics_gen.outputs['statistics'],
schema=schema_gen.outputs['schema'])
components.append(example_validator)
# Performs transformations and feature engineering in training and serving.
transform = tfx.components.Transform( # pylint: disable=unused-variable
examples=example_gen.outputs['examples'],
schema=schema_gen.outputs['schema'],
preprocessing_fn=preprocessing_fn)
# TODO(step 3): Uncomment here to add Transform to the pipeline.
# components.append(transform)
# Uses user-provided Python function that implements a model using Tensorflow.
trainer = tfx.components.Trainer(
run_fn=run_fn,
examples=example_gen.outputs['examples'],
# Use outputs of Transform as training inputs if Transform is used.
# examples=transform.outputs['transformed_examples'],
# transform_graph=transform.outputs['transform_graph'],
schema=schema_gen.outputs['schema'],
train_args=train_args,
eval_args=eval_args)
# TODO(step 4): Uncomment here to add Trainer to the pipeline.
# components.append(trainer)
# Get the latest blessed model for model validation.
model_resolver = tfx.dsl.Resolver(
strategy_class=tfx.dsl.experimental.LatestBlessedModelStrategy,
model=tfx.dsl.Channel(type=tfx.types.standard_artifacts.Model),
model_blessing=tfx.dsl.Channel(
type=tfx.types.standard_artifacts.ModelBlessing)).with_id(
'latest_blessed_model_resolver')
# TODO(step 5): Uncomment here to add Resolver to the pipeline.
# components.append(model_resolver)
# Uses TFMA to compute a evaluation statistics over features of a model and
# perform quality validation of a candidate model (compared to a baseline).
eval_config = tfma.EvalConfig(
model_specs=[
tfma.ModelSpec(
signature_name='serving_default',
label_key=features.LABEL_KEY,
# Use transformed label key if Transform is used.
# label_key=features.transformed_name(features.LABEL_KEY),
preprocessing_function_names=['transform_features'])
],
slicing_specs=[tfma.SlicingSpec()],
metrics_specs=[
tfma.MetricsSpec(metrics=[
tfma.MetricConfig(
class_name='SparseCategoricalAccuracy',
threshold=tfma.MetricThreshold(
value_threshold=tfma.GenericValueThreshold(
lower_bound={'value': eval_accuracy_threshold}),
change_threshold=tfma.GenericChangeThreshold(
direction=tfma.MetricDirection.HIGHER_IS_BETTER,
absolute={'value': -1e-10})))
])
])
evaluator = tfx.components.Evaluator( # pylint: disable=unused-variable
examples=example_gen.outputs['examples'],
model=trainer.outputs['model'],
baseline_model=model_resolver.outputs['model'],
# Change threshold will be ignored if there is no baseline (first run).
eval_config=eval_config)
# TODO(step 5): Uncomment here to add Evaluator to the pipeline.
# components.append(evaluator)
# Pushes the model to a file destination if check passed.
pusher = tfx.components.Pusher( # pylint: disable=unused-variable
model=trainer.outputs['model'],
model_blessing=evaluator.outputs['blessing'],
push_destination=tfx.proto.PushDestination(
filesystem=tfx.proto.PushDestination.Filesystem(
base_directory=serving_model_dir)))
# TODO(step 5): Uncomment here to add Pusher to the pipeline.
# components.append(pusher)
return tfx.dsl.Pipeline(
pipeline_name=pipeline_name,
pipeline_root=pipeline_root,
components=components,
# Change this value to control caching of execution results. Default value
# is `False`.
# enable_cache=True,
metadata_connection_config=metadata_connection_config,
beam_pipeline_args=beam_pipeline_args,
)
|
GeoCat/QGIS | python/plugins/processing/gui/BatchInputSelectionPanel.py | Python | gpl-2.0 | 7,904 | 0.001772 | # -*- coding: utf-8 -*-
"""
***************************************************************************
BatchInputSelectionPanel.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
from builtins import range
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright_ | _ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtCore import pyqtSignal
from qgis.PyQt.QtWidgets import QWidget, QHBoxLayout, QMenu, QPushButton, QLineEdit, QSizePolicy, QAction, QFileDialog
from qgis.PyQt.QtGui import QCursor
from qgis.core import (QgsMapLayer,
QgsSettings,
| QgsProject,
QgsProcessing,
QgsProcessingUtils,
QgsProcessingParameterMultipleLayers,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterDefinition,
QgsProcessingParameterVectorLayer,
QgsProcessingParameterFeatureSource)
from processing.gui.MultipleInputDialog import MultipleInputDialog
from processing.gui.ParameterGuiUtils import getFileFilter
from processing.tools import dataobjects
class BatchInputSelectionPanel(QWidget):
valueChanged = pyqtSignal()
def __init__(self, param, row, col, dialog):
super(BatchInputSelectionPanel, self).__init__(None)
self.param = param
self.dialog = dialog
self.row = row
self.col = col
self.horizontalLayout = QHBoxLayout(self)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setMargin(0)
self.text = QLineEdit()
self.text.setObjectName('text')
self.text.setMinimumWidth(300)
self.setValue('')
self.text.editingFinished.connect(self.textEditingFinished)
self.text.setSizePolicy(QSizePolicy.Expanding,
QSizePolicy.Expanding)
self.horizontalLayout.addWidget(self.text)
self.pushButton = QPushButton()
self.pushButton.setText('…')
self.pushButton.clicked.connect(self.showPopupMenu)
self.horizontalLayout.addWidget(self.pushButton)
self.setLayout(self.horizontalLayout)
def _panel(self):
return self.dialog.mainWidget
def _table(self):
return self._panel().tblParameters
def showPopupMenu(self):
popupmenu = QMenu()
if not (isinstance(self.param, QgsProcessingParameterMultipleLayers) and
self.param.datatype == dataobjects.TYPE_FILE):
selectLayerAction = QAction(
self.tr('Select from open layers'), self.pushButton)
selectLayerAction.triggered.connect(self.showLayerSelectionDialog)
popupmenu.addAction(selectLayerAction)
selectFileAction = QAction(
self.tr('Select from filesystem'), self.pushButton)
selectFileAction.triggered.connect(self.showFileSelectionDialog)
popupmenu.addAction(selectFileAction)
popupmenu.exec_(QCursor.pos())
def showLayerSelectionDialog(self):
layers = []
if (isinstance(self.param, QgsProcessingParameterRasterLayer) or
(isinstance(self.param, QgsProcessingParameterMultipleLayers) and
self.param.layerType() == QgsProcessing.TypeRaster)):
layers = QgsProcessingUtils.compatibleRasterLayers(QgsProject.instance())
elif isinstance(self.param, QgsProcessingParameterVectorLayer):
layers = QgsProcessingUtils.compatibleVectorLayers(QgsProject.instance())
else:
datatypes = [QgsProcessing.TypeVectorAnyGeometry]
if isinstance(self.param, QgsProcessingParameterFeatureSource):
datatypes = self.param.dataTypes()
elif isinstance(self.param, QgsProcessingParameterMultipleLayers):
datatypes = [self.param.layerType()]
if QgsProcessing.TypeVectorAnyGeometry not in datatypes:
layers = QgsProcessingUtils.compatibleVectorLayers(QgsProject.instance(), datatypes)
else:
layers = QgsProcessingUtils.compatibleVectorLayers(QgsProject.instance())
dlg = MultipleInputDialog([layer.name() for layer in layers])
dlg.exec_()
if dlg.selectedoptions is not None:
selected = dlg.selectedoptions
if len(selected) == 1:
self.setValue(layers[selected[0]].id())
else:
if isinstance(self.param, QgsProcessingParameterMultipleLayers):
self.text.setText(';'.join(layers[idx].id() for idx in selected))
else:
rowdif = len(selected) - (self._table().rowCount() - self.row)
for i in range(rowdif):
self._panel().addRow()
for i, layeridx in enumerate(selected):
self._table().cellWidget(i + self.row,
self.col).setValue(layers[layeridx].id())
def showFileSelectionDialog(self):
settings = QgsSettings()
text = str(self.text.text())
if os.path.isdir(text):
path = text
elif os.path.isdir(os.path.dirname(text)):
path = os.path.dirname(text)
elif settings.contains('/Processing/LastInputPath'):
path = str(settings.value('/Processing/LastInputPath'))
else:
path = ''
ret, selected_filter = QFileDialog.getOpenFileNames(self, self.tr('Open file'), path,
self.tr('All files (*.*);;') + getFileFilter(self.param))
if ret:
files = list(ret)
settings.setValue('/Processing/LastInputPath',
os.path.dirname(str(files[0])))
for i, filename in enumerate(files):
files[i] = dataobjects.getRasterSublayer(filename, self.param)
if len(files) == 1:
self.text.setText(files[0])
self.textEditingFinished()
else:
if isinstance(self.param, QgsProcessingParameterMultipleLayers):
self.text.setText(';'.join(str(f) for f in files))
else:
rowdif = len(files) - (self._table().rowCount() - self.row)
for i in range(rowdif):
self._panel().addRow()
for i, f in enumerate(files):
self._table().cellWidget(i + self.row,
self.col).setValue(f)
def textEditingFinished(self):
self._value = self.text.text()
self.valueChanged.emit()
def value(self):
return self._value
def setValue(self, value):
self._value = value
if isinstance(value, QgsMapLayer):
self.text.setText(value.name())
else: # should be basestring
self.text.setText(value)
self.valueChanged.emit()
|
dahool/vertaal | versioncontrol/lib/types/filesystem.py | Python | gpl-3.0 | 2,573 | 0.005441 | # -*- coding: utf-8 -*-
"""Copyright (c) 2012 Sergio Gabriel Teves
All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import time
import shutil
from django.utils.translation import ugettext as _
import versioncontrol.lib.browser as browser
import logging
logger = logging.getLogger('vertaal.vcs')
# This is for test only, is not completed
class FileSystemBrowser(browser.RepositoryBrowser):
def __init__(self, location, u | rl, folder, branch='', auth=None):
super(FileSystemBrowser, self).__init__(location, url, fol | der, branch, auth)
if self.url.startswith('file://'):
self.url = self.url[7:]
@property
def _remote_location(self):
return os.path.join(self.url, self.branch, self.folder)
def init_repo(self):
logger.debug("init")
self._send_callback(self.callback_on_action_notify,_('Initializing repository %s') % self._remote_location)
logger.debug("Checkout %s on %s" % (self._remote_location, self.location))
self._process_files(self._remote_location, self.location)
return int(time.time())
def cleanup(self):
pass
def update(self):
self._send_callback(self.callback_on_action_notify,_('Updating repository %s') % self._remote_location)
self._process_files(self._remote_location, self.location)
return int(time.time())
def _process_files(self, src, tgt):
for filename in os.listdir(src):
sourcefile = os.path.join(src, filename)
targetfile = os.path.join(tgt, filename)
shutil.copy(sourcefile, targetfile)
self._send_callback(self.callback_on_file_add,targetfile)
def revert(self):
pass
def submit(self, auth, files, msg):
logger.debug("Perform submit %s (%s) [%s]" % (self.location, files, msg))
self._send_callback(self.callback_on_action_notify,_('Checking in'))
return int(time.time())
|
zeza/gnuradio-rc-testcode | gr-flysky/python/qa_flysky_dumpsync.py | Python | gpl-3.0 | 1,124 | 0.010676 | #!/usr/bin/env python
#
# Copyright 2012 <+YOU OR YOUR COM | PANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR | A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
#
from gnuradio import gr, gr_unittest
import flysky_swig
class qa_dumpsync (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self):
# set up fg
self.tb.run ()
# check data
if __name__ == '__main__':
gr_unittest.main ()
|
errantlinguist/tangrams-analysis | add_tabular_participant_metadata.py | Python | apache-2.0 | 4,239 | 0.020996 | #!/usr/bin/env python3
"""
Adds participant metadata to a given tabular data file for those sessions.
"""
__author__ = "Todd Shore <errantlinguist+github@gmail.com>"
__copyright__ = "Copyright 2018 Todd Shore"
__license__ = "Apache License, Version 2.0"
import argparse
import csv
import os
import sys
from typing import Iterable, Mapping, Tuple
import pandas as pd
import tangrams_analysis.session_data
TABULAR_FILE_CSV_DIALECT = csv.excel_tab
TABULAR_FILE_DTYPES = {"session": "category", "Adt": "bool", "Wgt": "bool", "RndAdt": "bool"}
TABULAR_FILE_ENCODING = "utf-8"
def add_tabular_participant_metadata(df: pd.DataFrame,
session_data: Iterable[Tuple[str, tangrams_analysis.session_data.SessionData]]):
df["Instructor"] = df["round"].transform(lambda game_round: "B" if game_round % 2 == 0 else "A")
session_participant_metadata = dict(
(session_name, sd.read_participant_metadata()) for (session_name, sd) in session_data)
metadata_names = tuple(sorted(frozenset(
metadatum_name for participant_metadata in session_participant_metadata.values() for metadatum_name in
participant_metadata.keys())))
print("Metadata to add: {}.".format(metadata_names), file=sys.stderr)
for metadatum_name in metadata_names:
df["Instructor" + metadatum_name] = df.apply(
lambda row: instructor_datum(row, metadatum_name, session_participant_metadata),
axis=1)
df["Manipulator" + metadatum_name] = df.apply(
lambda row: other_datum(row, metadatum_name, session_participant_metadata), axis=1)
def instructor_datum(row: pd.Series, datum_name: str,
session_participant_metadata: Mapping[str, Mapping[str, Mapping[str, str]]]) -> str:
session = row["session"]
participant_metadata = session_participant_metadata[session]
gender_metadata = participant_metadata[datum_name]
instructor = row["Instructor"]
return gender_metadata[instructor]
def other_datum(row: pd.Series, datum_name: str,
session_participant_metadata: Mapping[str, Mapping[str, Mapping[str, str]]]) -> str:
session = row["session"]
participant_metadata = session_participant_metadata[session]
gender_metadata = participant_metadata[datum_name]
instructor = row["Instructor"]
other_genders = tuple(
gender for (participant_id, gender) in gender_metadata.items() if participant_id != instructor)
if len(other_genders) > 1:
raise ValueError("Dyads with more than two participants are (currently) not supported.")
else:
return other_genders[0]
def parse_dir_session_name(dirpath: str) -> str:
return os.path.basename(dirpath)
def read_tabular_data(infile: str) -> pd.DataFrame:
return pd.read_csv(infile, dialect=TABULAR_FILE_CSV_DIALECT, sep=TABULAR_FILE_CSV_DIALECT.delimiter,
dtype=TABULAR_FILE_DTYPES,
float_precision="round_trip",
encoding=TABULAR_FILE_ENCODING, memory_map=True)
def __create_argparser() -> argparse.ArgumentParser:
result = argparse.ArgumentParser(
description="Adds par | ticipant metadata to a given tabular data file for those sessions.")
result.add_argument("infile", metavar="INFILE", help="The tabular file to add to.")
result.add_argument("session_dir", metavar="PATH", help="The directory under which the dyad files are to be found.")
return result
def __main(args):
infile = args.infile
pr | int("Reading tabular data from \"{}\".".format(infile), file=sys.stderr)
df = read_tabular_data(infile)
session_names = frozenset(df["session"].unique())
print("Read results for {} sessions.".format(len(session_names)), file=sys.stderr)
session_dir = args.session_dir
print("Will look for sessions underneath \"{}\".".format(session_dir), file=sys.stderr)
session_data = tuple((parse_dir_session_name(indir), sd) for (indir, sd) in
tangrams_analysis.session_data.walk_session_data((session_dir,)))
missing_session_names = session_names.difference(frozenset(session_name for session_name, _ in session_data))
if missing_session_names:
raise ValueError("Missing sessions: {}".format(missing_session_names))
else:
add_tabular_participant_metadata(df, session_data)
df.to_csv(sys.stdout, sep=TABULAR_FILE_CSV_DIALECT.delimiter, encoding=TABULAR_FILE_ENCODING, index=False)
if __name__ == "__main__":
__main(__create_argparser().parse_args())
|
eclee25/flu-SDI-exploratory-age | scripts/create_fluseverity_figs_v5/ILINet_RR_time_v5.py | Python | mit | 3,186 | 0.016949 | #!/usr/bin/python
##############################################
###Python template
###Author: Elizabeth Lee
###Date: 11/4/14
###Function: RR of incidence in adults to incidence in children vs. week number. Incidence in children and adults is normalized by the size of the child and adult populations in the second calendar year of the flu season. ILINet data
# 11/4 changed to v5: coverage & careseek adjustment, a:c, RR
###Import data: CDC_Source/Import_Data/all_cdc_source_data.csv, Census/Import_Data/totalpop_age_Census_98-14.csv
###Command Line: python ILINet_RR_time_v5.py
##############################################
### notes ###
# Incidence per 100,000 is normalized by total population by second calendar year of the flu season
# 2013-14 ILINet data is normalized by estimated population size from December 2013 because 2014 estimates are not available at this time
### packages/modules ###
import csv
import matplotlib.pyplot as plt
## local modules ##
import functions_v5 as fxn
### data structures ###
### functions ###
### data files ###
incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/CDC_Source/Import_Data/all | _cdc_source_data.csv','r')
incidin.readline() # remove header
incid = csv.reader(incidin, delimiter=',')
popin = open('/home/elee/Dropbox/Eliz | abeth_Bansal_Lab/Census/Import_Data/totalpop_age_Census_98-14.csv', 'r')
pop = csv.reader(popin, delimiter=',')
### called/local plotting parameters ###
ps = fxn.pseasons
fw = fxn.gp_fluweeks
sl = fxn.gp_ILINet_seasonlabels
colvec = fxn.gp_ILINet_colors
wklab = fxn.gp_weeklabels
fs = 24
fssml = 16
### program ###
# import data
d_wk, d_pop, d_totILI53ls, d_totILIadj53ls, d_ageILIadj_season = fxn.ILINet_week_RR_processing(incid, pop)
d_totIncid53ls, d_totIncidAdj53ls, d_RR53ls, d_zRR53ls = fxn.week_RR_processing_part2(d_pop, d_totILI53ls, d_totILIadj53ls, d_ageILIadj_season)
d_indices = fxn.identify_retro_early_weeks(d_wk, d_totIncidAdj53ls)
# plot values
fig = plt.figure()
ax = plt.subplot(111)
for s, i in zip(ps, xrange(len(ps))):
ax.plot(xrange(fw), d_RR53ls[s][:fw], marker = fxn.gp_marker, color = colvec[i], label = sl[i], linewidth = fxn.gp_linewidth)
for s in ps:
beg_retro, end_retro = d_indices[(s, 'r')]
beg_early, end_early = d_indices[(s, 'e')]
plt.plot(range(beg_retro, end_retro), d_RR53ls[s][beg_retro:end_retro], marker = 'o', color = fxn.gp_retro_early_colors[0], linewidth = 2)
plt.plot(range(beg_early, end_early), d_RR53ls[s][beg_early:end_early], marker = 'o', color = fxn.gp_retro_early_colors[1], linewidth = 2)
plt.xlim([0, fw-1])
plt.xticks(range(fw)[::5], wklab[:fw:5])
plt.ylim([0, 1.5])
plt.xlabel('Week Number', fontsize=fs)
plt.ylabel('RR, adult:child', fontsize=fs)
# shrink current axis by 10%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width*0.9, box.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs_v5/ILINet/ILINet_RR_time.png', transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
# plt.show()
# print sorted([wk for wk in d_wk if d_wk[wk]==-2])
# print d_totIncid53ls[-2]
# print d_totIncidAdj53ls[-2]
|
roninio/gae-boilerplate | boilerplate/routes.py | Python | lgpl-3.0 | 2,607 | 0.008055 | """
Using redirect route instead of simple routes since it supports strict_slash
Simple route: http://webapp-improved.appspot.com/guide/routing.html#simple-routes
RedirectRoute: http://webapp-improved.appspot.com/api/webapp2_extras/routes.html#webapp2_extras.routes.RedirectRoute
"""
from webapp2_extras.routes import RedirectRoute
import handlers
secure_scheme = 'https'
_routes = [
RedirectRoute('/taskqueue-send-email/', handlers.SendEmailHandler, name='taskqueue-send-email', strict_slash=True),
RedirectRoute('/_ah/login_required', handlers.LoginRequiredHandler),
RedirectRoute('/login/', handlers.LoginHandler, name='login', strict_slash=True),
RedirectRoute('/logout/', handlers.LogoutHandler, name='logout', strict_slash=True),
RedirectRoute('/social_login/<provider_name>', handlers.SocialLoginHandler, name='social-login', strict_slash=True),
RedirectRoute('/social_login/<provider_name>/complete', handlers.CallbackSocialLoginHandler, name='social-login-complete', strict_slash=True),
RedirectRoute('/social_login/<provider_name>/delete', handlers.DeleteSocialProviderHandler, name='delete-social-provider', strict_slash=True),
RedirectRoute('/register/', handlers.RegisterHandler, name='register', strict_slash=True),
RedirectRoute('/activation/<user_id>/<token>', handlers.AccountActivationHandler, name='account-activation', strict_slash=True),
RedirectRoute('/resend/<user_id>/<token>', handlers.ResendActivationEmailHandler, name='resend-account-activation', strict_slash=True),
RedirectRoute('/contact/', handlers.ContactHandler, name='contact', strict_slash=True),
RedirectRoute('/settings/profile', handlers.EditProfileHandler, name='edit-profile', strict_slash=True),
RedirectRoute('/settings/password', handlers.EditPasswordHandler, name='edit-password', strict_slash=True),
RedirectRoute('/settings/email', handlers.EditEmailHandler, name='edit-email', strict_slash=True), |
RedirectRoute('/password-reset/', handlers.PasswordResetHandler, name='password-reset', strict_slash=True),
RedirectRoute('/password-reset/<user_id>/<token>', handlers.PasswordResetCompleteHandler, name='password-reset-check', strict_slash=True),
RedirectRoute('/change-email/<user_id>/<encoded_email>/<token>', handlers.EmailChangedCompleteHandler, name='email-changed-check', strict_slash=True),
| RedirectRoute('/', handlers.HomeRequestHandler, name='home', strict_slash=True)
]
def get_routes():
return _routes
def add_routes(app):
if app.debug:
secure_scheme = 'http'
for r in _routes:
app.router.add(r)
|
uclouvain/osis_louvain | base/migrations/0024_documentfile.py | Python | agpl-3.0 | 1,981 | 0.003029 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-10 16:10
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('base', '0023_messagetemplate'),
]
operations = [
migrations.CreateModel(
name='DocumentFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('content_type', models.CharField(choices=[('APPLICATION_CSV', 'application/csv'), ('APPLICATION_DOC', 'application/doc'), ('APPLICATION_PDF', 'application/pdf'), ('APPLICATION_XLS', 'application/xls'), ('APPLICATION_XLSX', 'application/xlsx'), ('APPLICATION_XML', 'application/xml'), ('APPLICATION_ZIP', 'application/zip'), ('IMAGE_JPEG', 'image/jpeg'), ('IMAGE_GIF', 'image/gif'), ('IMAGE_PNG', 'image/png'), ('TEXT_HTML', 'text/html'), ('TEXT_PLAIN', 'text/plain')], max_length=50)),
('creation_date', models.DateTimeField(auto_now=True)),
('storage_duration', models.IntegerField()),
('full_path', models.CharField(max_length=255)),
('physical_name', models.UUIDField(default=uuid.uuid4, editable=False)),
('physical_extension', models.CharField(max_length=10)),
('description', models.CharField(blank=True, max_length=255, null=True)),
('sub_directory', models.CharField(blank=True, max_length=100, null=True)),
('size', models.IntegerFi | eld(blank=True, null=True)),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH | _USER_MODEL)),
],
),
]
|
des-testbed/des_chan_algorithms | dga/dmp.py | Python | gpl-3.0 | 27,779 | 0.011124 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
DGA: Implementation of the Distributed Greedy Algorithm for channel assignment
The DMP class handles the control flow after the initialization of DGA.
Authors: Simon Seif <seif.simon@googlemail.com>,
Felix Juraschek <fjuraschek@gmail.com>
Copyright 2008-2013, Freie Universitaet Berlin (FUB). All rights reserved.
These sources were developed at the Freie Universitaet Berlin,
Computer Systems and Telematics / Distributed, embedded Systems (DES) group
(http://cst.mi.fu-berlin.de, http://www.des-testbed.net)
-------------------------------------------------------------------------------
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see http://www.gnu.org/licenses/ .
--------------------------------------------------------------------------------
For further information and questions please use the web site
http://www.des-testbed.net
"""
import dga
from dga import hostname
from twisted.internet import protocol, reactor
from twisted.protocols import basic
from logger import Logger
import random
import socket
import sys
import re
import os
import subprocess
# insert path to des_chan framework
p = subprocess.Popen('logname', stdout=subprocess.PIPE,stderr=subprocess.PIPE)
logname, errors = p.communicate()
sys.path.insert(0, '/home/' + logname.strip())
from des_chan import util
### CONSTANTS
# LOGGING
DEBUG = True
log = Logger(DEBUG)
# STATES
STATE_MOVING = 1
STATE_STALLED = 0
# MESSAGE HEADERS
MESSAGE_REQUEST = "REQUEST"
MESSAGE_ABORT = "ABORT"
MESSAGE_UPDATE = "UPDATE"
MESSAGE_ACCEPT = "ACCEPT"
MESSAGE_REJECT = "REJECT"
MESSAGE_QUERY = "QUERY"
# OTHER MESSAGE RELATED STUFF
DELIMITER = ";"
# TIMEOUT
STALLED_TIMEOUT = 15 # time between retrieving query and getting either abort or update
MOVING_TIMEOUT = 15 # timeout between sending request and getting accept/reject
GRACE_TIMEOUT = 5*60 # time between last "action" (except query) and shutdown
RETRY_DELAY = 2 # delay between two connection retries
MAX_RETRIES = 10 # max number of retries for each conncection
QUERY_TIMEOUT = 40 # max time allowed to query
CONNECTION_TIMEOUT = 5 # general connection timeout for any outgoing TCP connection
# time the protocol shall wait before issueing the own request
STALL_MOVE_TIMEOUT_LOWER_BOUND = 0
STALL_MOVE_TIMEOUT_UPPER_BOUND = 4
LAMBDA = 0.2 # lambda for the exponential distribution
### MESSAGE PROCESSING
def createAssignmentString(assignment):
"""Creates a textual representation of a list.
"""
return repr(assignment)[1:-1].replace(" ","")
def parseAssignmentString(line):
"""Parses a textual representation of a list back to an integer array.
"""
try:
return map(l | ambda x:int(x),line.split(","))
except:
log.error("cannot parse assignment string:"+line)
return list()
def parseRequest(line):
"""Parses a request message.
Returns a quadruple containing old_channel,new_channel,re | duction,assignment.
"""
tokens = line.split(DELIMITER)
old_channel = int(tokens[1])
new_channel = int(tokens[2])
reduction = int(tokens[3])
assignment = parseAssignmentString(tokens[4])
return old_channel, new_channel, reduction, assignment
def parseQuery(line):
"""Parses a query message. The assignment piggy bakced in the message is returned.
"""
return parseReject(line) # both message formats are the same
def parseReject(line):
"""Parses a reject message. The assignment piggy backed in the message is returned.
"""
tokens = line.split(DELIMITER)
return parseAssignmentString(tokens[1])
def createRequest(request, assignment):
"""Creates a request message.
Format: <REQUEST>;old_channel;new_channel;reduction;c1,...,cn
"""
line = MESSAGE_REQUEST
line += DELIMITER
line += repr(request.channel)
line += DELIMITER
line += repr(request.new_channel)
line += DELIMITER
line += repr(request.reduction)
line += DELIMITER
line += createAssignmentString(assignment)
return line
def createQuery(assignment):
"""Creates a query message piggy backing an assignment.
Format: <Query>;c1,...,cn
"""
assignmentString = createAssignmentString(assignment)
return MESSAGE_QUERY + DELIMITER + assignmentString
def createReject(assignment):
"""Creates a reject message piggy backing an assignment.
Format: <Reject>;c1,...,cn
"""
assignmentString = createAssignmentString(assignment)
return MESSAGE_REJECT + DELIMITER + assignmentString
def createAbort():
"""Creates an aboort message.
"""
return MESSAGE_ABORT
def createAccept():
"""Creates an accept message.
"""
return MESSAGE_ACCEPT
def createUpdate():
"""Creates an update message.
"""
return MESSAGE_UPDATE
### STALLED STATE
class StalledFactory(protocol.Factory):
"""Factory for the Stalled Protocol.
Handels incoming foreign requests.
"""
def __init__(self, dmp):
self.dmp = dmp
self.protocol = Stalled
def notifyRequest(self, protocol, request, assignment):
"""Callback for the protocol instances.
Will drop the latter of two concurrent requests.
Determines if the request can be approved or not.
"""
self.dmp.cancelGraceTimeOut()
if self.dmp.state != STATE_STALLED:
log.warn("Incoming request from " + protocol.node + ", but I am moving.")
# remember the request so that the abort message will not cause confusion
self.dmp.foreignRequests[protocol.node] = request
protocol.sendReject()
else:
self.dmp.foreignRequests[protocol.node] = request
if self.dmp.dga.isAssignmentUpToDate(assignment): # it's valid
# possible that we've lost a node...but now got req from it, so put it again in our interference set
if protocol.node not in self.dmp.dga.interferenceSet:
log.error("Got request from node that is not in interference set..."+repr(protocol.node))
self.dmp.dga.interferenceSet[protocol.node]=list()
reactor.callLater(1,self.dmp.move) # in any case (apply/abort) something changed -> check for new possible assignment
if self.dmp.request is not None and self.dmp.request.conflicts(request): # we've got something on our own
if self.dmp.request.wins(request): # and i win
log.debug("Incoming request from node " + protocol.node + " is losing.")
protocol.sendReject()
else: # but i loose
log.debug("Incoming request from node " + protocol.node + " is winning.")
protocol.sendAccept()
self.dmp.request.invalidate()
else: # not conflicting and up to date
log.debug("Incoming request from node " + protocol.node + " is not conflicting.")
protocol.sendAccept()
else: # out of date
log.warn("Incoming request from node " + protocol.node + " is out of date.")
protocol.sendReject()
def notifyUpdate(self,client):
"""Callback for the protocol instances.
Will commit the foreign request.
"""
request = self.dmp.fo |
eguven/mobai | mobai/engine/game.py | Python | mit | 7,171 | 0.001116 | import enum
import gzip
import pickle
from .base import Player
from .map import Map
class ActionType(enum.Enum):
target = 0
clear_target = 1
stop = 2
class Command(object):
'''a command received from a player
{'id': '<uuid>', 'action': '<action-type>.name', 'target': '<uuid>' | {'posx': X, 'posy': Y} }
'''
def __init__(self, player, command):
# id present and not empty
assert command.get('id') and isinstance(command['id'], str)
# action present and valid
assert 'action' in command and command['action'] in ActionType.__members__
if ActionType[command['action']] is ActionType.target:
# target present and valid
assert 'target' in command and command['target']
assert isinstance(command['target'], (str, dict))
self.id = command['id']
self.action = ActionType[command['action']]
if self.action is ActionType.target:
self.target = command['target']
if isinstance(self.target, dict):
# position target valid
assert 'posx' in self.target and 'posy' in self.target
assert isinstance(self.target['posx'], int)
assert isinstance(self.target['posy'], int)
self.player = player
def verify_unit(self, units):
'''check if id is correct and player owns unit'''
assert self.id in units and units[self.id].player == self.player
self.unit = units[self.id]
def verify_target(self, units, map):
'''make sure target is valid'''
if isinstance(self.target, str): # targeting a unit
assert self.target in units and units[self.target].player != self.player
target = units[self.target]
elif isinstance(self.target, dict): # targeting a tile (position)
assert self.unit.mobile
target = map.get_tile(self.target['posx'], self.target['posy'])
assert map.player_has_vision(self.player, target)
self.target = target
def execute(self):
if self.action is ActionType.target:
self.unit.set_target(self.target)
elif self.action is ActionType.clear_target:
self.unit.clear_target()
elif self.action is ActionType.stop:
self.unit.stop()
else:
raise Exception('Uhm?')
class GameState(object):
'''
* creating a GameState object initializes a game with map, players,
tiles and buildings
* `begin_turn` runs through the steps necessary prior to sending
players the game state (eg. spawning units if necessary)
* State representation are sent to players and actions are retrieved
* Actions are verified and applied to units
* `evaluate_turn` runs through the steps of executing actions and finishes turn
'''
def __init__(self):
self.player0, self.player1 = Player(0), Player(1)
self.players = {0: self.player0, 1: self.player1}
self.init_map()
self.turn = 0
self.spawn_interval = 10
self._all_units = None
@staticmethod
def serialize(gamestate):
return gzip.compress(pickle.dumps(gamestate), compresslevel=1)
@staticmethod
def deserialize(serialized):
return pickle.loads(gzip.decompress(serialized))
@property
def all_units(self):
'''resetted at the end of turn and regenerated at first access of every turn'''
if self._all_units is None:
self._all_units = self.map.get_all_units()
return self._all_units
@property
def finished(self):
p0, p1 = 0, 0 # unit counts
for unit in self.all_units:
if p0 and p1:
return False
if unit.player == self.player0:
p0 += 1
else:
p1 += 1
return True
@property
def winner(self):
if not self.finished:
return None
if not self.all_units:
return False
return self.all_units[0].player
def init_map(self):
assert not hasattr(self, 'map') or self.map is None
self.map = Map(p0=self.player0, p1=self.player1)
def begin_turn(self):
if self.turn % self.spawn_interval == 0:
self._spawn_new_units()
assert not self.finished, 'Game is finished'
for unit in self.all_units:
unit.action_points = 1
def state_for_player(self, player):
return dict(
player_id=player.id, turn=self.turn,
map=self.map.to_array(by_player=player),
)
def commands_from_player(self, player, commands):
'''actions are limited to total unit count, extras will be trimmed
from the beginning
'''
unit_lookup = {unit.id: unit for unit in self.all_units}
actions = []
errors = [] # TODO maybe feedback, maybe clear error definitions
commands = commands[-1 * len(unit_lookup):]
for command in commands:
try:
cmd = Command(player, command)
cmd.verify_unit(unit_lookup)
if cmd.action is ActionType.target:
cmd.verify_target(unit_lookup, self.map)
except AssertionError:
errors.append(command)
continue
cmd.execute()
actions.append(command)
return dict(actions=actions, errors=errors)
def _spawn_new_units(self):
for fort in self.map.get_forts():
fort.spawn_soldiers(count=3)
def _remove_dead_units(self):
'''remove dead units and clear targets on them'''
# TODO: might use for | feedback
dead_units = []
for tile in self.map.tiles():
| dead_units.extend([unit for unit in tile.occupants if unit.health <= 0])
tile.occupants = [unit for unit in tile.occupants if unit.health > 0]
for unit in self.all_units:
if unit.target in dead_units:
unit.clear_target()
def evaluate_turn(self):
'''execute planned actions for one turn'''
for step in ('attack', 'move', 'chase', 'finish'):
# NOTE: execution order by-tile, all actions need to be synced, otherwise can be unfair
for unit in self.all_units:
unit.end_of_turn(step)
self._remove_dead_units()
self._all_units = None
self.turn += 1
def ascii(self, pid=None):
tmp_map = Map()
if pid is not None:
assert pid in (0, 1)
player = self.player0 if pid == 0 else self.player1
positions = self.map.vision_by_player(player)
else:
positions = None
for y in range(self.map.size_y):
for x in range(self.map.size_x):
if not self.map.is_valid_position(x, y):
continue
if positions is None or (x, y) in positions:
tmp_map.map[y][x] = self.map.get_tile(x, y)
else:
tmp_map.map[y][x] = None
return tmp_map.as_string()
|
gnowledge/ncert_nroer | demo/urls.py | Python | agpl-3.0 | 9,860 | 0.012069 | # Copyright (c) 2011, 2012 Free Software Foundation
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Urls for the demo of Gstudio"""
from django.conf import settings
from django.contrib import admin
from django.conf.urls.defaults import url
from django.conf.urls.defaults import include
from django.conf.urls.defaults import patterns
from django.views.generic.simple import direct_to_template
from django.contrib.auth.views import password_change,password_change_done,login, password_reset_confirm, password_reset, password_reset_done
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.decorators import login_required
from gstudio.sitemaps import TagSitemap
from gstudio.sitemaps import NodetypeSitemap
from gstudio.sitemaps import MetatypeSitemap
from gstudio.sitemaps import AuthorSitemap
from gstudio.forms import *
from objectapp.sitemaps import GbobjectSitemap
from registration.views import register
from views import home_view, more_view, nroer_view, proceed_view
from decorator import decorated_includes
admin.autodiscover()
handler500 = 'demo.views.server_error'
handler404 = 'django.views.defaults.page_not_found'
urlpatterns = patterns(
'',
(r'^$', 'django.views.generic.simple.redirect_to',
{'url': '/home/'}),
url(r'^home/', nroer_view),
url(r'^nroer/', home_view),
#url(r'^browserError', browserError_view),
url(r'^proceed', proceed_view),
url(r'^browserError/', direct_to_template, {'template': 'gstudio/browserError.html'}),
# url(r'^more/',more_view),
url(r'^gstudio/', include('gstudio.urls')),
url(r'^nodetypes/', include('gstudio.urls')),
url(r'^objects/', include('objectapp.urls')),
#url(r'^tagclouds/',include('gstudio.urls.tagclouds')),
url(r'^comments/', include('django.contrib.comments.urls')),
url(r'^browsecollection/','gstudio.views.browseCollection.browseCollection'),
url(r'^collection/(\w+)/','gstudio.views.collection.collection'),
url(r'^AboutUs/', direct_to_template, {'template': 'gstudio/aboutUs.html'}),
url(r'^TermsOfUse/', direct_to_template, {'template': 'gstudio/termsOfUse.html'}),
url(r'^EnrichTimeline/', direct_to_template, {'template': 'gstudio/enrichtimeline.html'}),
url(r'^ShareResources/', direct_to_template, {'template': 'gstudio/shareresources.html'}),
url(r'^KnowledgeKingdom/', direct_to_template, {'template': 'gstudio/knowledgekingdom.html'}),
url(r'^RejuvenatingHistory/', direct_to_template, {'template': 'gstudio/rejuvenatinghistory.html'}),
url(r'^CaptureNature/', direct_to_template, {'template': 'gstudio/capturenature.html'}),
url(r'^ScienceinHands/', direct_to_template, {'template': 'gstudio/scienceinhands.html'}),
url(r'^SharingIdeas/', direct_to_template, {'template': 'gstudio/sharingideas.html'}),
url(r'^VoicetoMasses/', direct_to_template, {'template': 'gstudio/voicetomasses.html'}),
url(r'^ShareKnowledge/', direct_to_template, {'template': 'gstudio/shareknowledge.html'}),
url(r'^CommentOnResources/', direct_to_template, {'template': 'gstudio/commentonresources.html'}),
url(r'^ContactUs/', direct_to_template, {'template': 'gstudio/contactUs.html'}),
url(r'^Contribute/', direct_to_template, {'template': 'gstudio/contribute.html'}),
url(r'^SrinivasaRamanujan/', direct_to_template, {'template': 'gstudio/ramanujan.html'}),
url(r'^EnrichTheTimeline/', direct_to_template, {'template': 'gstudio/enrichthetimeline.html'}),
url(r'^MagicSquare/', direct_to_template, {'template': 'gstudio/magicsquare.html'}),
url(r'^MathsTricks/', direct_to_template, {'template': 'gstudio/mathstricks.html'}),
url(r'^ILoveMaths/', direct_to_template, {'template': 'gstudio/ilovemaths.html'}),
url(r'^ConstitutionCalling/', direct_to_template, {'template': 'gstudio/constitution.html'}),
url(r'^ConstitutionShareResources/', direct_to_template, {'template': 'gstudio/constitutionshareresources.html'}),
url(r'^ConstitutionEnrichTimeline/', direct_to_template, {'template': 'gstudio/constitutionenrichtimeline.html'}),
url(r'^ConstitutionShareKnowledge/', direct_to_template, {'template': 'gstudio/constitutionshareknowledge.html'}),
url(r'^MyHand/', direct_to_template, {'template': 'gstudio/myhand.html'}),
url(r'^Democratic/', direct_to_template, {'template': 'gstudio/democratic.html'}),
url(r'^Amendment/', direct_to_template, {'template': 'gstudio/amendment.html'}),
url(r'^CourseOnOERFindOutMore/', direct_to_template, {'template': 'gstudio/eventpage2.html'}),
url(r'^CourseOnOER/', direct_to_template, {'template': 'gstudio/eventpage.html'}),
url(r'^DiscoverPatterns/', direct_to_template, {'template': 'gstudio/discoverpatterns.html'}),
url(r'^NaturePatterns/', direct_to_template, {'template': 'gstudio/naturepatterns.html'}),
url(r'^ArtPatterns/', direct_to_template, {'template': 'gstudio/artpatterns.html'}),
url(r'^Tangram/', direct_to_template, {'template': 'gstudio/tangram.html'}),
url(r'^TurtleArt/', direct_to_template, {'template': 'gstudio/turtlepatterns.html'}),
url(r'^ShareCreativity/', direct_to_template, {'template': 'gstudio/sharecreativity.html'}),
url(r'^contribute/resources', direct_to_template, {'template': 'gstudio/contrib_resource.html'}),
url(r'^ShareResources/', direct_to_template, {'template': 'gstudio/shareresources.html'}),
url(r'^ContributeResourceForm/', direct_to_template, {'template': 'gstudio/contributeresform.html'}),
url(r'^ContributeForm2/', direct_to_template, {'template': 'gstudio/contributeresform2.html'}),
url(r'^dashboard/','gstudio.views.dashboard.dashboard'),
url(r'^translate/',include('gstudio.urls.translate')),
url(r'^contribute/theresources', di | rect_to_template, {'template': 'gstudio/contribute_resource.html'}),
url(r'^Ganit/', direct_to_template, {'template': 'gstudio/GanitPoster.html'}),
url(r'^FunWithGeogebra/', direct_to_template, {'template': 'gstudio/FunWithGeogebra.html'}),
url(r'^ExploringMathKits/', direc | t_to_template, {'template': 'gstudio/ExploringMathKits.html'}),
url(r'^GanitMagicSquare/', direct_to_template, {'template': 'gstudio/GanitMagicSquare.html'}),
url(r'^KnowSrinivasa/', direct_to_template, {'template': 'gstudio/KnowSrinivasa.html'}),
url(r'^MoreonMaths/', direct_to_template, {'template': 'gstudio/MoreonMaths.html'}),
url(r'^GanitInnerTemplate/', direct_to_template, {'template': 'gstudio/GanitInnerTemplate.html'}),
url(r'^ExpressionSeries/', direct_to_template, {'template': 'gstudio/ExpressionSeries.html'}),
#URL for XMLRPC
#url(r'^xmlrpc/$','django_xmlrpc.views.handle_xmlrpc'),
#url(r'^i18n/', include('django.conf.urls.i18n')),
#url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/gstudio/', include('gstudio.urls.ajaxurls')),
url(r'^admin/password_change/', password_change,{'password_change_form':UserChangeform,'template_name':'registration/password_change_form1.html'}),
url(r'^account/password/change/', password_change,{'password_change_form':UserChangeform,'template_name':'registration/password_change_form1.html'}),
url(r'^accounts/password/change/done/', password_change_done,{'template_name':'registration/password_change_done1.html'}),
url(r'^admin/', decorated_includes(login_required,include(admin.site.urls))),
url(r'^objects/admin/', decorated_includes(login_required,include(admin.site.urls))),
url(r'^nodetypes/admin/', decorated_includes(login_required,include(admin.site.urls))),
url(r'^grappelli/', |
openshift-mobile/openshift-mobile-app-demo | wsgi/osmdemo/questionnaire/views.py | Python | gpl-3.0 | 2,536 | 0.039038 | from django.template import RequestContext
from django.shortcuts import render_to_response,get_object_or_404
from django.core.mail import send_mail
from questionnaire.models import *
import re,os
body_template = """
Thank you %s for your sumbission for the %s.
We appreciate your help in improving the OpenShift Mobile Project.
Thanks again,
The OpenShift Mobile Team
"""
def index(request):
return render_to_response('questionnaire/questionnaire.html', {
'questionnaire' : Questionnaire.objects.order_by('pk')[0]
},context_instance=RequestContext(request))
def questionnaire(request,questionnaire_id):
questionnaire = get_object_or_404(Questionnaire,pk=questionnaire_id)
if len(request.POST):
email = request.POST['email']
if not re.match(r"[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?",email):
return render_to_response('questionnaire/questionnaire.html', {
'questionnaire' : questionnaire,
'error' : 'Invalid Email Address'
},context_instance=RequestContext(request))
try:
uq = UserQuestionnaire.objects.get(email=email,questionnaire=questionnaire)
return render_to_response('questionnaire/questionnaire.html', {
'questionnaire' : questionnaire,
'error' : 'Questionnaire already submitted for this email address'
},context_instance=RequestContext(request))
except:
| r | esp = UserQuestionnaire(
email = email,
questionnaire = questionnaire
)
resp.save()
sections = questionnaire.section_set.all()
for section in sections:
questions = section.questions.all()
for question in questions:
if question.name in request.POST:
answer = Answer(
answer = "true" if question.elem_type == 'checkbox' else request.POST[question.name],
question = question,
user_questionnaire = resp
)
answer.save()
else:
answer = Answer(
answer = 'false',
question = question,
user_questionnaire = resp
)
answer.save()
if 'EMAIL_HOST' in os.environ:
subject,from_user = ('Thank You for You Submission!', os.environ['EMAIL_FROM'])
body = body_template % (email,questionnaire.title)
send_mail(subject,body,from_user,[email])
return render_to_response('questionnaire/thanks.html', {
'email' : email
},context_instance=RequestContext(request))
return render_to_response('questionnaire/questionnaire.html', {
'questionnaire' : questionnaire
},context_instance=RequestContext(request))
|
ssorgatem/pulsar | test/pulsar_objectstore_test.py | Python | apache-2.0 | 5,154 | 0.002716 | from os import makedirs
from os.path import join, dirname, exists
from string import Template
from galaxy.util.bunch import Bunch
from galaxy.objectstore import build_object_store_from_config
from .test_utils import TempDirectoryTestCase
from .test_objectstore import MockDataset
class PulsarObjectStoreTest(TempDirectoryTestCase):
def __write(self, contents, name):
path = join(self.temp_directory, name)
directory = dirname(path)
if not exists(directory):
makedirs(directory)
open(path, "wb").write(contents)
return path
def test_pulsar_objectstore(self):
# Define real object store used by Pulsar server.
object_store_config_file = join(self.temp_directory, "object_store_conf.xml")
with open(object_store_config_file, "w") as configf:
config_template = Template("""<?xml version="1.0"?>
<object_store type="disk">
<files_dir path="${temp_directory}"/>
<extra_dir type="temp" path="${temp_directory}"/>
<extra_dir type="job_work" path="${temp_directory}"/>
</object_store>
""")
config_contents = config_template.safe_substitute(temp_directory=self.temp_directory)
configf.write(config_contents)
app_conf = dict(
object_store_config_file=object_store_config_file,
private_token="12345",
)
from .test_utils import test_pulsar_server
with test_pulsar_server(app_conf=app_conf) as server:
url = server.application_url
# Define a proxy Pulsar object store.
proxy_object_store_config_file = join(self.temp_directory, "proxy_object_store_conf.xml")
with open(proxy_object_store_config_file, "w") as configf:
config_template = Template("""<?xml version="1.0"?>
<object_store type="pulsar" url="$url" private_token="12345" transport="urllib">
<!-- private_token is optional - see Pulsar documentation for more information. -->
<!-- transport is optional, set to curl to use libcurl instead of urllib for communication with Pulsar. -->
</object_store>
""")
contents = config_template.safe_substitute(url=url)
configf.write(contents)
config = Bunch(object_store_config_file=proxy_object_store_config_file)
object_store = build_object_store_from_config(config=config)
# Test no dataset with id 1 exists.
absent_dataset = MockDataset(1)
assert not object_store.exists(absent_dataset)
# Write empty dataset 2 in second backend, ensure it is empty and
# exists.
empty_dataset = MockDataset(2)
self.__write(b"", "000/dataset_2.dat")
| assert object_store.exists(empty_dataset)
assert object_store.empty(empty_dataset)
# Write non-empty dataset in backend 1, test it is not emtpy & exists.
hello_world_dataset = MockDataset(3)
self.__write(b"Hello World!", "000/dataset_3.dat")
assert object_store.exists(hello_world_dataset)
assert not object_store.empty(hello_world_dataset)
# Test get_data
data = object_store.get_data(hello_worl | d_dataset)
assert data == "Hello World!"
data = object_store.get_data(hello_world_dataset, start=1, count=6)
assert data == "ello W"
# Test Size
# Test absent and empty datasets yield size of 0.
assert object_store.size(absent_dataset) == 0
assert object_store.size(empty_dataset) == 0
# Elsewise
assert object_store.size(hello_world_dataset) > 0 # Should this always be the number of bytes?
# Test percent used (to some degree)
percent_store_used = object_store.get_store_usage_percent()
assert percent_store_used > 0.0
assert percent_store_used < 100.0
# Test update_from_file test
output_dataset = MockDataset(4)
output_real_path = join(self.temp_directory, "000", "dataset_4.dat")
assert not exists(output_real_path)
output_working_path = self.__write(b"NEW CONTENTS", "job_working_directory1/example_output")
object_store.update_from_file(output_dataset, file_name=output_working_path, create=True)
assert exists(output_real_path)
# Test delete
to_delete_dataset = MockDataset(5)
to_delete_real_path = self.__write(b"content to be deleted!", "000/dataset_5.dat")
assert object_store.exists(to_delete_dataset)
assert object_store.delete(to_delete_dataset)
assert not object_store.exists(to_delete_dataset)
assert not exists(to_delete_real_path)
# Test json content.
complex_contents_dataset = MockDataset(6)
complex_content = b'{"a":6}'
self.__write(complex_content, "000/dataset_6.dat")
assert object_store.exists(complex_contents_dataset)
data = object_store.get_data(complex_contents_dataset) == complex_content
|
priscillaboyd/SPaT_Prediction | src/decision_tree/DT_Utils.py | Python | apache-2.0 | 3,533 | 0.000566 | # Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + | '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, | folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
|
kevinzhou96/CascadingFailureSimulation | rescale_power.py | Python | gpl-3.0 | 2,340 | 0.00641 | import pypower.api as pp
import networkx as nx
import numpy as np
import copy
import pypower.idx_brch as idx_brch
import pypower.idx_bus as idx_bus
import pypower.idx_gen as idx_gen
def rescale_power_down(ppc):
"""Rescales power generation or load within a component uniformly among all
buses to balance generation and load. Only scales values downwards.
ARGUMENTS: ppc: dict (representing a PYPOWER case file)
RETURNS: None (does in-place update of ppc)
"""
buses = set(ppc['bus'][:,idx_bus.BUS_I].astype(int))
genInComponent = lambda gen : int(gen[idx_gen.GEN_BUS]) in buses
component_generators = np.array(list(filter(genInComponent, ppc['gen'])))
total_gen = sum(ppc['gen'][:, idx_gen.PG]) if len(component_generators)>0 else 0
total_load = sum(ppc['bus'][:, idx_bus.PD])
if np.isclose(total_gen, total_load):
# no need to scale
return
elif total_gen > total_load:
# scale generation down
scale_factor = total_load / total_gen # note total_gen > 0
ppc['gen'][:, idx_gen.PG] *= scale_factor
else: # total_load > total_gen
# scale load down
scale_factor = total_gen / total_load # note total_load > 0
ppc['bus'][:, idx_bus.PD] *= scale_factor
def rescale_power_ge | n(ppc):
"""Rescales power generation only (not load) within a component uniformly
among all buses to match load. If total generation is zero, we cannot fulfill
the load and so load is set to 0.
ARGUMENTS: ppc: dict (representing a PYPOWER case file)
RETURNS: None (does in-place update of ppc)
"""
buses = set(ppc['bus'][:,idx_bus.BUS_I].astype(int))
genInComponent = lambda gen : int(gen[idx_gen.GEN_BUS]) in buses
component_generators | = np.array(list(filter(genInComponent, ppc['gen'])))
total_gen = sum(component_generators[:, idx_gen.PG]) if len(component_generators)>0 else 0
total_load = sum(ppc['bus'][:, idx_bus.PD])
if np.isclose(total_gen, 0):
# no power generated, set loads to zero
ppc['bus'][:, idx_bus.PD] = np.zeros(len(ppc['bus']))
elif np.isclose(total_gen, total_load):
# no need to scale
return
else:
# scale generation to match load
scale_factor = total_load / total_gen
ppc['gen'][:, idx_gen.PG] *= scale_factor
|
lawzou/shoop | shoop/admin/modules/methods/views/edit_detail.py | Python | agpl-3.0 | 1,471 | 0.00068 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import six
from django.utils.translation import ugettext_lazy as _
from django.views.generic.detail import DetailView
from shoop.core.models import PaymentMethod, ShippingMethod
from shoop.utils.excs import Problem
from shoop.utils.importing import load
class _BaseMethodDetailView(DetailView):
model = None # Overridden below
title = _(u"Edit Details")
def dispatch(self, request, *args, **kwargs):
# This view only dispatches further to the method module's own detail view class
object = self.get_object()
module = object.module
if not module.admin_detail_view_class | :
raise Problem("Module %s has no admin detail vi | ew" % module.name)
if isinstance(module.admin_detail_view_class, six.text_type):
view_class = load(module.admin_detail_view_class)
else:
view_class = module.admin_detail_view_class
kwargs["object"] = object
return view_class(model=self.model).dispatch(request, *args, **kwargs)
class ShippingMethodEditDetailView(_BaseMethodDetailView):
model = ShippingMethod
class PaymentMethodEditDetailView(_BaseMethodDetailView):
model = PaymentMethod
|
tmbdev/clstm | display_server.py | Python | apache-2.0 | 819 | 0.015873 | import os
import numpy
from pylab import *
import traceback
import zmq
context = zmq.Context()
socket = context.socket(zmq.REP)
addr = os.envir | on.get("PYSERVER","tcp://127.0.0.1:9876")
socket.bind(addr)
poller = zmq.Poller()
poller.register(socket, zmq.POLLIN)
def farg(index):
global args
return numpy.fromstring(args[index],dtype=float32)
def farg2(index,d0,d1):
global args
return numpy.fromstring(args[index],dtype=float32).res | hape(d0,d1)
while True:
while True:
evts = poller.poll(100)
if evts!=[]: break
ginput(1,0.01)
args = socket.recv_multipart()
print "----------------"
print args[0]
result = None
try:
exec args[0]
except Exception,e:
print "FAILED"
traceback.print_exc()
draw()
socket.send(str(result))
|
OpenSeizureDetector/ESP8266_SD | monitor.py | Python | gpl-3.0 | 516 | 0.001938 | #!/usr/bin/python
#
# Simple script to echo the ttyUSB0 serial port to the console at 74880 baud.
# Based on http://www.esp8266.com/viewtopic.php?p=33650.
# I found it really really hard to do using standard tools like cu...
# By doing ./monitory. | py and resetting the esp8226 (while it is connected
# via USB) you can see the boot up messages and anything you 'printf'
# to stdout.
#
import sys
from serial import Serial
dev = Serial("/dev/ttyUSB0", 74880 | )
while True:
c = dev.read(1)
sys.stdout.write(c)
|
vileopratama/vitech | src/addons/purchase/tests/test_onchange_product_id.py | Python | mit | 3,448 | 0.00348 | from datetime import datetime
from openerp.tests.common import TransactionCase
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
class TestOnchangeProductId(TransactionCase):
"""Test that when an included tax is mapped by a fiscal position, the included tax must be
subtracted to the price of the product.
"""
def setUp(self):
super(TestOnchangeProductId, self).setUp()
self.fiscal_position_model = self.env['account.fiscal.position']
self.fiscal_position_tax_model = self.env['account.fiscal.position.tax']
self.tax_model = self.env['account.tax']
self.po_model = self.env['purchase.order']
self.po_line_model = self.env['purchase.order.line']
self.res_partner_model = self.env['res.partner']
self.product_tmpl_model = self.env['product.template']
self.product_model = self.env['product.product']
self.product_uom_model = self.env['product.uom']
self.supplierinfo_model = self.env["product.supplierinfo"]
def test_onchange_product_id(self):
uom_id = self.product_uom_model.search([('name', '=', 'Unit(s)')])[0]
partner_id = self.res_partner_model.create(dict(name="George"))
tax_include_id = self.tax_model.create(dict(name="Include tax",
amount='21.00',
price_include=True,
type_tax_use='purchase'))
tax_exclude_id = self.tax_model.create(dict(name="Exclude tax",
amount='0.00',
type_tax_use='purchase'))
supplierinfo_vals = {
'name': partner_id.id,
'price': 121.0,
}
supplierinfo = self.supplierinfo_model.create(supplierinfo_vals)
product_tmpl_id = self.product_tmpl_model.create(dict(name="Voiture",
list_price=121,
seller_ids=[(6, 0, [supplierinfo.id])],
| supplier_taxes_id=[(6, 0, [tax_include_id.id])]))
product_id = self.product_model.crea | te(dict(product_tmpl_id=product_tmpl_id.id))
fp_id = self.fiscal_position_model.create(dict(name="fiscal position", sequence=1))
fp_tax_id = self.fiscal_position_tax_model.create(dict(position_id=fp_id.id,
tax_src_id=tax_include_id.id,
tax_dest_id=tax_exclude_id.id))
po_vals = {
'partner_id': partner_id.id,
'fiscal_position_id': fp_id.id,
'order_line': [
(0, 0, {
'name': product_id.name,
'product_id': product_id.id,
'product_qty': 1.0,
'product_uom': uom_id.id,
'price_unit': 121.0,
'date_planned': datetime.today().strftime(DEFAULT_SERVER_DATETIME_FORMAT),
})],
}
po = self.po_model.create(po_vals)
po_line = po.order_line[0]
po_line.onchange_product_id()
self.assertEquals(100, po_line.price_unit, "The included tax must be subtracted to the price")
|
therealjumbo/python_summer | py31eg/grepword-m.py | Python | gpl-3.0 | 4,049 | 0.00247 | #!/usr/bin/env python3
# Copyright (c) 2008-11 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import multiprocessing
import optparse
import os
# The maximum length of the word to be search for is BLOCK_SIZE
BLOCK_SIZE = 8000
class Worker(multiprocessing.Process):
def __init__(self, work_queue, word, number):
super().__init__()
self.work_queue = work_queue
self.word = word
self.number = number
def run(self):
while True:
try:
filename = self.work_queue.get()
self.process(filename)
finally:
self.work_queue.task_done()
def process(self, filename):
previous = ""
try:
with open(filename, "rb") as fh:
while True:
current = fh.read(BLOCK_SIZE)
if not current:
break
current = current.decode("utf8", "ignore")
if (self.word in current or
self.word in previous[-len(self.word):] +
current[:len(self.word)]):
print("{0}{1}".format(self.number, filename))
break
if len(current) != BLOCK_SIZE:
break
previous = current
except EnvironmentError as err:
print("{0}{1}".format(self.number, err))
def parse_options():
parser = optparse.OptionParser(
usage=("usage: %prog [options] word name1 "
"[name2 [... nameN]]\n\n"
"names are filenames or paths; paths only "
"make sense with the -r option set"))
parser.add_option("-p", "--processes", dest="count", default=7,
type="int",
| help=("the number of processes to use (1..20) "
"[default %default]"))
parser.add_option("-r", "--recurse", dest="recurse",
default=False, action="store_true",
help="recurse into subdirectories")
parser.add_option("-d", "--debug", dest="debug", default=False,
action="store_true")
opts, args = parser.parse_args()
if len(args) == 0:
parser.error("a word and at least one path must be specified")
elif len(ar | gs) == 1:
parser.error("at least one path must be specified")
if (not opts.recurse and
not any([os.path.isfile(arg) for arg in args])):
parser.error("at least one file must be specified; or use -r")
if not (1 <= opts.count <= 20):
parser.error("process count must be 1..20")
return opts, args[0], args[1:]
def get_files(args, recurse):
filelist = []
for path in args:
if os.path.isfile(path):
filelist.append(path)
elif recurse:
for root, dirs, files in os.walk(path):
for filename in files:
filelist.append(os.path.join(root, filename))
return filelist
def main():
opts, word, args = parse_options()
filelist = get_files(args, opts.recurse)
work_queue = multiprocessing.JoinableQueue()
for i in range(opts.count):
number = "{0}: ".format(i + 1) if opts.debug else ""
worker = Worker(work_queue, word, number)
worker.daemon = True
worker.start()
for filename in filelist:
work_queue.put(filename)
work_queue.join()
if __name__ == "__main__": # This is *vital* on Windows!
main()
|
Snaipe/Tequila | tequila/server/group/exception.py | Python | gpl-3.0 | 1,272 | 0.000786 | """
Tequila: a command-line Minecraft server manager written in python
Copyr | ight (C) 2014 Snaipe
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will b | e useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from ...exception import TequilaException
class ServerGroupException(TequilaException):
def __init__(self, message, group, **kwargs):
super().__init__(message, name=group.name, **kwargs)
self.server = group
class ServerGroupDoesNotExistException(ServerGroupException):
def __init__(self, group):
super().__init__('Server group $name does not exist', group)
class ServerGroupAlreadyExistsException(ServerGroupException):
def __init__(self, group):
super().__init__('Server group $name already exists', group) |
citrix-openstack-build/cliff | cliff/tests/test_commandmanager.py | Python | apache-2.0 | 3,511 | 0 |
import mock
from cliff.commandmanager import CommandManager
class TestCommand(object):
@classmethod
def load(cls):
return cls
def __init__(self):
return
class TestCommandManager(CommandManager):
def _load_commands(self):
self.commands = {
'one': TestCommand,
'two words': TestCommand,
'three word command': TestCommand,
}
def test_lookup_and_find():
def check(mgr, argv):
cmd, name, remaining = mgr.find_command(argv)
assert cmd
assert name == ' '.join(argv)
assert not remaining
mgr = TestCommandManager('test')
for expected in [['one'],
['two', 'words'],
['three', 'word', 'command'],
]:
yield check, mgr, expected
return
def test_lookup_with_remainder():
def check(mgr, argv):
cmd, name, remaining = mgr.find_command(argv)
assert cmd
assert remaining == ['--opt']
mgr = TestCommandManager('test')
for expected in [['one', '--opt'],
['two', 'words', '--opt'],
['three', 'word', 'command', '--opt'],
]:
yield check, mgr, expected
return
def test_find_invalid_command():
mgr = TestCommandManager('test')
def check_one(argv):
try:
mgr.find_command(argv)
except ValueError as err:
assert '-b' in ('%s' % err)
else:
assert False, 'expected a failure'
for argv in [['a', '-b'],
['-b'],
]:
yield check_one, argv
def test_find_unknown_command():
mgr = TestCommandManager('test')
try:
mgr.find_command(['a', 'b'])
except ValueError as err:
assert "['a', 'b']" in ('%s' % err)
else:
assert False, ' | expected a failure'
def test_add_command():
mgr = TestCommandManager('test')
mock_cmd = mock.Mock()
mgr.add_command('mock', mock_cmd)
found_cmd, name, args = mgr.find_command(['mock'])
assert found_cmd is mock_cmd
def test_load_commands():
testcm | d = mock.Mock(name='testcmd')
testcmd.name.replace.return_value = 'test'
mock_pkg_resources = mock.Mock(return_value=[testcmd])
with mock.patch('pkg_resources.iter_entry_points',
mock_pkg_resources) as iter_entry_points:
mgr = CommandManager('test')
assert iter_entry_points.called_once_with('test')
names = [n for n, v in mgr]
assert names == ['test']
def test_load_commands_keep_underscores():
testcmd = mock.Mock()
testcmd.name = 'test_cmd'
mock_pkg_resources = mock.Mock(return_value=[testcmd])
with mock.patch('pkg_resources.iter_entry_points',
mock_pkg_resources) as iter_entry_points:
mgr = CommandManager('test', convert_underscores=False)
assert iter_entry_points.called_once_with('test')
names = [n for n, v in mgr]
assert names == ['test_cmd']
def test_load_commands_replace_underscores():
testcmd = mock.Mock()
testcmd.name = 'test_cmd'
mock_pkg_resources = mock.Mock(return_value=[testcmd])
with mock.patch('pkg_resources.iter_entry_points',
mock_pkg_resources) as iter_entry_points:
mgr = CommandManager('test', convert_underscores=True)
assert iter_entry_points.called_once_with('test')
names = [n for n, v in mgr]
assert names == ['test cmd']
|
AndersenLab/cegwas-web | base/views/api/api_docs.py | Python | mit | 370 | 0.002703 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Author: Daniel E. Cook
Handles redirecting the user to the API Documentation.
"""
from base | .application import app
from flask import send_from_directory
@app.route("/data/api/docs/")
@app.route("/data/api/docs/<path:path>")
def docs(path="index.html"):
return send_ | from_directory('../cendr-api-docs/docs/', path)
|
Comunitea/CMNT_004_15 | project-addons/purchase_picking/models/stock.py | Python | agpl-3.0 | 14,639 | 0.002528 | ##############################################################################
#
# Copyright (C) 2014 Pexego Sistemas Informáticos All Rights Reserved
# $Jesús Ventosinos Mayor <jesus@pexego.es>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import models, fields, api, _, exceptions
class StockContainer(models.Model):
_name = 'stock.container'
type = fields.Selection([
('air', 'Air'),
('sea', 'Sea'),
('road', 'Road'),
])
dimensions = fields.Char(string="CBM/KG", help="Dimensions")
ready = fields.Date(string="Ready", help="Ready merchandise date")
etd = fields.Date(string="ETD", help="Date of departure of transport")
eta = fields.Date(string="ETA", help="Arrival date at port / destination")
notes_purchases = fields.Char(string="Notes", help="Purchases notes")
notes_warehouse = fields.Text(string="Warehouse notes", help="Warehouse notes")
conf = fields.Boolean(string="Conf", help="Confirmed")
telex = fields.Boolean(string="Telex", help="Telex")
arrived = fields.Boolean(string="Arrived", help="Arrived", compute="_set_arrived", store=True)
cost = fields.Float(sting="Cost")
n_ref = fields.Integer(string="Nº ref", store=False, compute="_get_ref")
forwarder = fields.Many2one('res.partner', domain="['&',('supplier','=',True),('forwarder','=',True)]",
string="FWDR")
forwarder_comercial = fields.Char(related="forwarder.comercial", store=False, string="FWDR")
incoterm = fields.Many2one('stock.incoterms', string='Incoterm', ondelete="restrict")
destination_port = fields.Many2one('stock.container.port', string='NAV/PTO', ondelete="restrict")
status = fields.Many2one('stock.container.status', string='Status', help='For more information click on the status', ondelete="restrict")
ctns = fields.Char(string="Ctns")
departure = fields.Boolean(string="Departure", help="Transport departure")
pickings_warehouse = fields.Char(string="Pickings", store=False, compute="_get_picking_ids")
set_eta = fields.Boolean(string="set_eta", help="Set eta", default=0, compute="_set_eta", store=True)
set_date_exp = fields.Boolean(string="set_date_expected", help="Set date expected", default=0, compute="_set_date_exp", store=True)
@api.multi
@api.depends('eta')
def _set_eta(self):
for container | in self:
if container.eta:
container.set_eta = True
@api.multi
@api.depends('date_expected')
def _set_date_exp(self):
for container in self:
if container.date_expected:
container.set_date_exp = True
@api.multi
@api.depends('move_ids.picking_id.state')
def _set_arrived(self):
for container in self:
container.a | rrived = False
if container.picking_ids and all(pick_state == 'done' for pick_state in container.picking_ids.mapped('state')):
container.arrived = True
@api.multi
def _set_date_expected(self):
for container in self:
if container.move_ids:
date_expected = container.date_expected
container.move_ids.write({'date_expected': date_expected})
picking_ids = container.move_ids.mapped('picking_id')
if picking_ids:
picking_ids.write({'scheduled_date': date_expected})
return True
@api.multi
@api.depends('move_ids.picking_id.scheduled_date')
def _get_date_expected(self):
for container in self:
if container.move_ids:
max_date = max(container.move_ids.mapped('date_expected') or fields.Date.today())
if max_date:
container.move_ids.write({'date_expected': max_date})
container.date_expected = max_date
@api.multi
def _get_picking_ids(self):
for container in self:
res = []
pickings_warehouse = ""
aux = 0
for line in container.move_ids:
if line.picking_id.id not in res:
res.append(line.picking_id.id)
container.picking_ids = res
aux += 1
if line.picking_id and aux < 3:
pickings_warehouse = pickings_warehouse + line.picking_id.name + ", "
if pickings_warehouse and aux < 3:
container.pickings_warehouse = pickings_warehouse[:-2]
elif pickings_warehouse and aux >= 3:
container.pickings_warehouse = pickings_warehouse + "..."
@api.multi
def _get_ref(self):
for container in self:
res = []
n_ref = 0
for line in container.move_ids:
if line.product_id.id not in res:
res.append(line.product_id.id)
n_ref += 1
container.n_ref = n_ref
@api.multi
def _get_responsible(self):
for container in self:
responsible = ''
if container.picking_id:
responsible = container.picking_id.commercial
elif container.origin:
responsible = self.env['sale.order'].search([('name', '=', container.origin)]).user_id
container.user_id = responsible
name = fields.Char("Container Ref.", required=True)
date_expected = fields.Date("Date expected", compute='_get_date_expected', inverse='_set_date_expected',
store=True, readonly=False, required=False)
move_ids = fields.One2many("stock.move", "container_id", "Moves",
readonly=True, copy=False, domain=[('state', '!=', 'cancel')])
picking_ids = fields.One2many('stock.picking', "container_ids", compute='_get_picking_ids', string='Pickings', readonly=True)
user_id = fields.Many2one(string='Responsible', compute='_get_responsible')
company_id = fields.Many2one("res.company", "Company", required=True,
default=lambda self: self.env['res.company']._company_default_get('stock.container'))
_sql_constraints = [
('name_uniq', 'unique(name)', 'Container name must be unique')
]
class StockPicking(models.Model):
_inherit = 'stock.picking'
usage = fields.Char(compute='_get_usage')
shipping_identifier = fields.Char('Shipping identifier', size=64)
temp = fields.Boolean("Temp.")
container_ids = fields.Many2many('stock.container', string='Containers', compute='_get_containers')
@api.model
def create(self, vals):
res = super().create(vals)
if not res.shipping_identifier and res.container_ids:
res.shipping_identifier = ''.join(res.container_ids.mapped('name'))
return res
@api.multi
def _get_usage(self):
for pick in self:
if not pick.location_id:
pick.usage = pick.picking_type_id.default_location_src_id
else:
pick.usage = pick.location_id.usage
@api.multi
def action_cancel(self):
for pick in self:
if pick.temp:
for move in pick.move_lines:
if move.state == "assigned":
move._do_unreserve()
move.state = "draft"
move.picking_id = False
pick.state = "cancel"
r |
SUSE-Cloud/nova | nova/tests/api/openstack/compute/contrib/test_aggregates.py | Python | apache-2.0 | 18,885 | 0.000688 | # Copyright (c) 2012 Citrix Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for the aggregates admin api."""
from webob import exc
from nova.api.openstack.compute.contrib import aggregates
from nova import context
from nova import exception
from nova import test
from nova.tests import matchers
AGGREGATE_LIST = [
{"name": "aggregate1", "id": "1", "availability_zone": "nova1"},
{"name": "aggregate2", "id": "2", "availability_zone": "nova1"},
{"name": "aggregate3", "id": "3", "availability_zone": "nova2"},
{"name": "aggregate1", "id": "4", "availability_zone": "nova1"}]
AGGREGATE = {"name": "aggregate1",
"id": "1",
"availability_zone": "nova1",
"metadata": {"foo": "bar"},
"hosts": ["host1, host2"]}
class FakeRequest(object):
environ = {"nova.context": context.get_admin_context()}
class AggregateTestCase(test.NoDBTestCase):
"""Test Case for aggregates admin api."""
def setUp(self):
super(AggregateTestCase, self).setUp()
self.controller = aggregates.AggregateController()
self.req = FakeRequest()
self.context = self.req.environ['nova.context']
def test_index(self):
def stub_list_aggregates(context):
if context is None:
raise Exception()
return AGGREGATE_LIST
self.stubs.Set(self.controller.api, 'get_aggregate_list',
stub_list_aggregates)
result = self.controller.index(self.req)
self.assertEqual(AGGREGATE_LIST, result["aggregates"])
def test_create(self):
def stub_create_aggregate(context, name, availability_zone):
self.assertEqual(context, self.context, "context")
self.assertEqual("test", name, "name")
self.assertEqual("nova1", availability_zone, "availability_zone")
return AGGREGATE
self.stubs.Set(self.controller.api, "create_aggregate",
| stub_create_aggregate)
result = self.controller.create(self.req, {"aggregate":
{"name": "test",
"ava | ilability_zone": "nova1"}})
self.assertEqual(AGGREGATE, result["aggregate"])
def test_create_with_duplicate_aggregate_name(self):
def stub_create_aggregate(context, name, availability_zone):
raise exception.AggregateNameExists(aggregate_name=name)
self.stubs.Set(self.controller.api, "create_aggregate",
stub_create_aggregate)
self.assertRaises(exc.HTTPConflict, self.controller.create,
self.req, {"aggregate":
{"name": "test",
"availability_zone": "nova1"}})
def test_create_with_incorrect_availability_zone(self):
def stub_create_aggregate(context, name, availability_zone):
raise exception.InvalidAggregateAction(action='create_aggregate',
aggregate_id="'N/A'",
reason='invalid zone')
self.stubs.Set(self.controller.api, "create_aggregate",
stub_create_aggregate)
self.assertRaises(exception.InvalidAggregateAction,
self.controller.create,
self.req, {"aggregate":
{"name": "test",
"availability_zone": "nova_bad"}})
def test_create_with_no_aggregate(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"foo":
{"name": "test",
"availability_zone": "nova1"}})
def test_create_with_no_name(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"aggregate":
{"foo": "test",
"availability_zone": "nova1"}})
def test_create_with_no_availability_zone(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"aggregate":
{"name": "test",
"foo": "nova1"}})
def test_create_with_null_name(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"aggregate":
{"name": "",
"availability_zone": "nova1"}})
def test_create_with_name_too_long(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"aggregate":
{"name": "x" * 256,
"availability_zone": "nova1"}})
def test_create_with_extra_invalid_arg(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, dict(name="test",
availability_zone="nova1",
foo='bar'))
def test_show(self):
def stub_get_aggregate(context, id):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", id, "id")
return AGGREGATE
self.stubs.Set(self.controller.api, 'get_aggregate',
stub_get_aggregate)
aggregate = self.controller.show(self.req, "1")
self.assertEqual(AGGREGATE, aggregate["aggregate"])
def test_show_with_invalid_id(self):
def stub_get_aggregate(context, id):
raise exception.AggregateNotFound(aggregate_id=2)
self.stubs.Set(self.controller.api, 'get_aggregate',
stub_get_aggregate)
self.assertRaises(exc.HTTPNotFound,
self.controller.show, self.req, "2")
def test_update(self):
body = {"aggregate": {"name": "new_name",
"availability_zone": "nova1"}}
def stub_update_aggregate(context, aggregate, values):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", aggregate, "aggregate")
self.assertEqual(body["aggregate"], values, "values")
return AGGREGATE
self.stubs.Set(self.controller.api, "update_aggregate",
stub_update_aggregate)
result = self.controller.update(self.req, "1", body=body)
self.assertEqual(AGGREGATE, result["aggregate"])
def test_update_with_only_name(self):
body = {"aggregate": {"name": "new_name"}}
def stub_update_aggregate(context, aggregate, values):
return AGGREGATE
self.stubs.Set(self.controller.api, "update_aggregate",
stub_update_aggregate)
result = self.controller.update(self.req, "1", body=body)
self.assertEqual(AGGREGATE, result["aggregate"])
def test_update_with_only_availability_zone(self):
body = {"aggregate": {"availability_zone": "nova1"}}
def stub_update_aggregate(context, aggregate, values):
return AGGREGATE
self.stubs.Set(self.controller.api, "update_aggregate",
stub_update_aggregate)
result = self.controller.update(self.req, "1", body=body)
self.assertEqual(AGGREGATE, r |
brad999/nikita | client/modules/Gmail.py | Python | mit | 3,834 | 0 | # -*- coding: utf-8-*-
import imaplib
import email
import re
from dateutil import parser
WORDS = ["EMAIL", "INBOX"]
def getSender(email):
"""
Returns the best-guess sender of an email.
Arguments:
email -- the email whose sender is desired
Returns:
Sender of the email.
"""
sender = email['From']
m = re.match(r'(.*)\s<.*>', sender)
if m:
return m.group(1)
return sender
def getDate(email):
return parser.parse(email.get('date'))
def getMostRecentDate(emails):
"""
Returns the most recent date of any email in the list provided.
Arguments:
emails -- a list of emails to check
Returns:
Date of the most recent email.
"""
dates = [getDate(e) for e in emails]
dates.sort(reverse=True)
if dates:
return dates[0]
return None
def fetchUnreadEmails(profile, since=None, markRead=False, limit=None):
"""
Fetches a list of unread email objects from a user's Gmail inbox.
Arguments:
profile -- contains information related to the user (e.g., Gmail
address)
since -- if provided, no emails before this date will be returned
markRead -- if True, marks all returned emails as read in target inbox
Returns:
A list of unread email objects.
"""
conn = imaplib.IMAP4_SSL('imap.gmail.com')
conn.debug = 0
conn.login(profile['gmail_address'], profile['gmail_password'])
conn.select(readonly=(not markRead))
msgs = []
(retcode, messages) = conn.search(None, '(UNSEEN)')
if retcode == 'OK' and messages != ['']:
numUnread = len(messages[0].split(' '))
if limit and numUnread > limit:
return numUnread
for num in messages[0].split(' '):
# parse email RFC822 format
ret, data = conn.fetch(num, '(RFC822)')
msg = email.message_from_string(data[0][1])
if not since or getDate(msg) > since:
msgs.append(msg)
conn.close()
conn.logout()
return msgs
def handle(text, mic, profile):
"""
Responds to user-input, typically speech text, with a summary of
the user's Gmail inbox, reporting on the number of unread emails
in the inbox, as well as their senders.
Arguments:
text -- user-input, typically transcribed speech
mic -- used to interact with | the user (for both input and output)
profile -- contains information related to the user (e.g., Gmail
address)
"""
try:
msgs = fetchUnreadEmails(profile, limit=5)
if isinstance(msgs, int):
response = "You have %d unread emails." % msgs
mic.say('A', response)
return
send | ers = [getSender(e) for e in msgs]
except imaplib.IMAP4.error:
mic.say('A', "I'm sorry. I'm not authenticated " +
"to work with your Gmail.")
return
if not senders:
mic.say('A', "You have no unread emails.")
elif len(senders) == 1:
mic.say('I', "You have one unread email from " + senders[0] + ".")
else:
response = "You have %d unread emails" % len(
senders)
unique_senders = list(set(senders))
if len(unique_senders) > 1:
unique_senders[-1] = 'and ' + unique_senders[-1]
response += ". Senders include: "
response += '...'.join(senders)
else:
response += " from " + unique_senders[0]
mic.say('I', response)
def isValid(text):
"""
Returns True if the input is related to email.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search(r'\bemail\b', text, re.IGNORECASE))
|
ESultanik/ZoningMaps | intersect_maps.py | Python | gpl-3.0 | 10,096 | 0.00733 | import bisect
import json
import progress
import zoning
def calculate_stream_size(stream):
old_pos = stream.tell()
stream.seek(0, 2)
size = f.tell()
stream.seek(old_pos, 0)
return size
class NullFeatures(object):
def __init__(self, map1_len, map2_len):
self._mapping = map1_len * map2_len
self._max_mapping = self._mapping * map1_len + 1
self.regions = []
def add_null_region(self, fromn, fromi, ton, toi):
fromid = fromn * self._mapping + fromi
toid = ton * self._mapping + toi
bisect.insort_right(self.regions, (fromid, toid))
def is_null(self, n, i):
fid = n * self._mapping + i
j = bisect.bisect_right(self.regions, (fid, self._max_mapping))
if j == 0:
return False
else:
return self.regions[j - 1][1] >= fid
def load_save_file(stream, logger = None):
if hasattr(stream, "name"):
estimator = progress.TimeEstimator(logger, 0, calculate_stream_size(stream), precision = 1)
else:
estimator = None
save = {}
map1_len = None
map2_len = None
null_features = None
last_index = None
for line in stream:
if estimator is not None:
estimator.increment(len(line))
data = json.loads(line)
if map1_len is None:
map1_len = data["MAP1_LEN"]
map2_len = data["MAP2_LEN"]
save[None] = null_features = NullFeatures(map1_len, map2_len)
continue;
elif data[0] is None:
fromn, fromi = data[1]
ton, toi = data[2]
last_index = data[2]
null_features.add_null_region(fromn, fromi, ton, toi)
#for n in range(fromn, ton + 1):
# for i in range(fromi, map2_len):
# if n == ton and i == toi:
# break
# save[(n, i)] = None
elif data[2] is None:
save[(data[0], data[1])] = None
last_index = data[:2]
else:
features = []
for f in data[2:]:
if f is None:
features.append(None)
else:
features.append(zoning.parse_feature(f))
save[(data[0], data[1])] = features
last_index = data[:2]
save["LAST_INDEX"] = last_index
return save
class StateSaver(object):
def __init__(self, save_state_to, flush_interval = 50000):
self.stream = save_state_to
self.flush_interval = flush_interval
self.current_state_flush = 0
self.last_state_flush = 0
self.nulls_start = None
def record_map_sizes(self, map1_len, map2_len):
if self.stream is not None:
self.stream.write("%s\n" % json.dumps({"MAP1_LEN" : map1_len, "MAP2_LEN" : map2_len}))
def record(self, n, i, *args):
if self.stream is None:
return
self.current_state_flush += 1
flush = (self.current_state_flush - self.last_state_flush) >= self.flush_interval
if args:
if self.nulls_start:
line = "[null,[%d,%d],[%d,%d]]" % (self.nulls_start[0], self.nulls_start[1], n, i)
self.stream.write("%s\n" % line)
flush = True
self.nulls_start = None
a = []
for arg in args:
if arg is None:
a.append(None)
else:
a.append(arg.to_geo())
line = json.dumps([n, i] + a)
self.stream.write("%s\n" % line)
else:
if self.nulls_start is None:
self.nulls_start = [n, i]
#line = json.dumps([n, i, None])
if flush:
self.last_state_flush = self.current_state_flush
self.stream.flush()
def intersect(map1, map2, logger = None, previous_save = None, save_state_to = None, incremental_save_path = None, incremental_save_time = 600):
if logger is None:
logger = lambda m : None
map1 = zoning.ModifiableMap(map1)
map2 = zoning.ModifiableMap(map2)
estimator = progress.TimeEstimator(logger, 0, len(map1) * len(map2), precision = 2, interval = 3.0)
saver = StateSaver(save_state_to)
last_incremental_save = 0
if previous_save is not None:
logger("\r%s\rFast-forwarding using saved state...\n" % (' ' * 40))
last_n, last_i = previous_save["LAST_INDEX"]
estimator.end_value = (last_n - 1) * len(map2) + last_i
else:
saver.record_map_sizes(len(map1), len(map2))
for n, f1 in enumerate(map1):
if f1.geometry.is_empty:
continue
for i, f2 in enumerate(map2):
if previous_save is not None and n <= last_n:
if (n, i) in previous_save:
state = previous_save[(n,i)]
if state is None:
continue
| map2[i] = state[0]
if state | [1] is not None:
map2.append(state[1])
map1[n] = state[2]
estimator.increment()
if map1[n].geometry.is_empty:
estimator.increment(len(map2) - i)
break
continue
elif previous_save[None].is_null(n, i):
estimator.increment()
continue
elif n < last_n or (n == last_n and i <= last_i):
estimator.increment()
continue
elif n == last_n and i == last_i:
estimator.end_value = len(map1) * len(map2)
logger("\r%s\rDone.\n" % (' ' * 40))
estimator.update(n * len(map2) + i)
if f2.geometry.is_empty:
saver.record(n, i)
continue
try:
isect = f1.geometry.intersection(f2.geometry)
except Exception as e:
logger("\r%s\rError: %s\n" % (' ' * 40, e))
estimator.force_next_refresh()
continue
if isect.is_empty:
saver.record(n, i)
continue
area_delta = 10.0 # square meters
new_feature = zoning.ZoningFeature("%s->%s" % (f1.objectid, f2.objectid), f2.zoning, isect, f2.old_zoning + f1.zoning)
new_state = [None, None, None]
if new_feature.area() < area_delta:
# The intersection is less than area_delta square meters, so it's probably just floating point error.
# Skip it!
saver.record(n, i)
continue
elif f2.area() - area_delta < new_feature.area():
# the intersection is almost covering the entire preexisting area, so just assume that they're identical.
new_feature = zoning.ZoningFeature("%s->%s" % (f1.objectid, f2.objectid), f2.zoning, f2.geometry, f2.old_zoning + f1.zoning)
else:
# add a new feature containing the portion of f2 that does not intersect with f1
new_geom = f2.geometry.difference(new_feature.geometry)
if not new_geom.is_empty:
map2.append(zoning.ZoningFeature("%s.2" % f2.objectid, f2.zoning, new_geom, f2.old_zoning))
estimator.end_value = len(map1) * len(map2)
new_state[1] = map2[-1]
map2[i] = new_feature
new_state[0] = map2[i]
logger("\r%s\rPlot %s (%.02f acres) -> %s (%.02f acres) went from %s to %s\n" % (' ' * 40, f1.objectid, zoning.square_meters_to_acres(f1.area()), f2.objectid, zoning.square_meters_to_acres(new_feature.area()), f1.zoning, f2.zoning))
estimator.force_next_refresh()
# Delete the portion of overlap in f1 to hopefully speed up further comparisons:
# (This is making the assumption that the zoning regions in map2 are non-overlapping)
map1[n] = zoning.ZoningFeature(f1.objectid, f1.zoning, f1.geometry.difference(isect))
new_state[2] = map1[n]
saver.record |
mlowen/Pyke | pyke/meta.py | Python | mit | 2,310 | 0.040693 | import json
import os.path
from hashlib import md5
class TargetFile:
def __init__(self, p | ath, data = None):
self.path = path
self.hash = None
self.dependencies = {}
if data is not None:
self.hash = data['hash']
self.dependencies = data['dependencies']
def raw(self):
return {
'hash': self.hash,
'dependencies': self.dependencies
}
def clean(self):
self.hash = None
for dependency in self.dependencies:
self.dependencies[dependency] = None
def changed(self):
changed = False
# File Hash
computed_hash = | md5(open(self.path, 'rb').read()).hexdigest()
if self.hash is None or self.hash != computed_hash:
changed = True
self.hash = computed_hash
# File Dependencies
for dependency in self.dependencies:
stored_hash = self.dependencies[dependency]
computed_hash = md5(open(dependency, 'rb').read()).hexdigest()
if stored_hash is None or stored_hash != computed_hash:
changed = True
self.dependencies[dependency] = computed_hash
return changed
def set_dependencies(self, dependencies):
for dependency in [ d for d in dependencies if d not in self.dependencies ]:
self.dependencies[dependency] = None
for dependency in [d for d in self.dependencies if d not in dependencies]:
del self.dependencies[dependency]
class Target(dict):
def __init__(self, fp, data = {}):
dict.__init__(self)
self._file = fp
for f in data:
self[f] = TargetFile(f, data[f])
def raw(self):
data = {}
for f in self:
data[f] = self[f].raw()
return data
def tidyup(self, files):
for f in [tf for tf in self if tf not in files]:
del self[f]
def clean(self):
for f in self:
self[f].clean()
def __getitem__(self, key):
if key not in self:
self[key] = TargetFile(key)
return dict.__getitem__(self, key)
class File(dict):
def __init__(self, path):
dict.__init__(self)
self._path = path
if os.path.exists(self._path):
data = json.load(open(self._path))
if isinstance(data, dict):
for t in data: self[t] = Target(self, data[t])
def write(self):
data = {}
for t in self:
data[t] = self[t].raw()
json.dump(data, open(self._path, 'w'), indent = 4)
def __getitem__(self, key):
if key not in self:
self[key] = Target(self)
return dict.__getitem__(self, key)
|
dieseldev/diesel | examples/newwait.py | Python | bsd-3-clause | 530 | 0.00566 | import random
from diesel import quickstart, first, sleep, fork
from diesel.util.queue import Queue
def fire_random(queues):
while True:
sleep(1)
random.choice(queues).put(None)
def m | ake_and_wait():
q1 = Queue()
q2 = Queue()
both = [q1, q2]
fork(fire_random, both)
while True:
q, v = first(waits=both)
assert v is None
if q == q1:
print 'q1'
elif q == q2:
print 'q2'
else:
ass | ert 0
quickstart(make_and_wait)
|
kbarbary/sncosmo | sncosmo/tests/test_magsystems.py | Python | bsd-3-clause | 2,223 | 0 | # Licensed under a 3-clause BSD style license - see LICENSES
import math
import numpy as np
import pytest
from astropy import units as u
from numpy.testing import assert_allclose, assert_almost_equal
import sncosmo
def test_abmagsystem():
magsys = sncosmo.ABMagSystem()
m = magsys.band_flux_to_mag(1.0, 'bessellb')
f = magsys.band_mag_to_flux(m, 'bessellb')
assert_almost_equal(f, 1.0)
def test_spectralmagsystem():
"""Check that SpectralMagSystem matches ABMagSystem when the spectrum is
the same as AB."""
# construct a spectrum with same flux as AB: 3631 x 10^{-23} erg/s/cm^2/Hz
# Use a fine grid to reduce linear interpolation errors when integrating
# in Spectrum.bandflux().
wave = np.linspace(1000., 20000., 100000) # fine grid
flux = 3631.e-23 * np.ones_like(wave)
unit = u.erg / u.s / u.cm**2 / u.Hz
s = sncosmo.Spectrum(wave, flux, unit=unit)
magsys1 = sncosmo.SpectralMagSystem(s)
magsys2 = sncosmo.ABMagSystem()
assert_allclose(magsys1.zpbandflux('bessellb'),
magsys2.zpbandflux('bessellb'))
@pytest.mark.might_download
def test_csp_magsystem():
csp = sncosmo.get_magsystem('csp')
# filter zeropoints (copied from
# http://csp.obs.carnegiescience.edu/data/filters
# on 13 April 2017)
zps = {"cspu": 12.986,
"cspg": 15.111,
"cspr": 14.902,
"cspi": 14.535,
"cspb": 14.328,
"cspv3014": 14.437,
"cspv3009": 14.388,
"cspv9844": 14.439,
"cspys": 13.921,
"cspjs": 13.836,
"csphs": 13.510,
"cspk": 11.968,
"cspyd": 13.770,
"cspjd": 13.866,
"cs | phd": 13.502}
# The "zero point bandflux" should be the flux that corresponds to
# magnitude zero. So, 0 = zp - 2.5 log(F)
for band, zp in zps.items():
assert abs(2.5 * math.l | og10(csp.zpbandflux(band)) - zp) < 0.015
@pytest.mark.might_download
def test_compositemagsystem_band_error():
"""Test that CompositeMagSystem raises an error when band is
not in system."""
csp = sncosmo.get_magsystem('csp')
with pytest.raises(ValueError):
csp.zpbandflux('desi')
|
mecforlove/oj-web | app/utils/__init__.py | Python | apache-2.0 | 61 | 0 | #!/usr/bin/env | python
# -*- coding: utf-8 -*-
# @Author: mec
| |
ArtemBernatskyy/FundExpert.NET | mutual_funds/registration/forms.py | Python | gpl-3.0 | 4,680 | 0.001709 | """
Forms and validation code for user registration.
Note that all of these forms assume Django's bundle default ``User``
model; since it's not possible for a form to anticipate in advance the
needs of custom user models, you will need to write your own forms if
you're using a custom model.
"""
from __future__ import unicode_literals
from captcha.fields import ReCaptchaField
from django import forms
from django.db.models import Q
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.forms import AuthenticationForm
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.forms import UserCreationForm
from .users import UserModel, UsernameField
User = UserModel()
class RegistrationForm(UserCreationForm):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should avoid defining a ``save()`` method -- the actual
saving of collected user data is delegated to the active
registration backend.
"""
required_css_class = 'required'
email = forms.EmailField(label=_("E-mail"))
class Meta:
model = User
fields = (UsernameField(), "email")
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput,
label=_('I have read and agree to the Terms of Service'),
error_messages={'required': _("You must agree to the terms to register")})
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_("This email address is already in use. Please supply a different email address."))
return self.cleaned_data['email']
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; mod | erately
useful for preventing automated spam registrati | ons.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com',
'yahoo.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_("Registration using free email addresses is prohibited. Please supply a different email address."))
return self.cleaned_data['email']
class ResendActivationForm(forms.Form):
required_css_class = 'required'
email = forms.EmailField(label=_("E-mail"))
class EmailAuthenticationForm(AuthenticationForm):
def clean_username(self):
username = self.data['username']
try:
username = User.objects.get(Q(email=username) | Q(username=username)).username
except ObjectDoesNotExist:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name},
)
return username
class UserProfileRegistrationForm(RegistrationForm):
first_name = forms.CharField(label=_('First name'), max_length=30, min_length=3)
last_name = forms.CharField(label=_('Last name'), max_length=30, min_length=3)
captcha = ReCaptchaField(attrs={'theme': 'clean'})
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_("This email address is already in use. Please supply a different email address."))
return self.cleaned_data['email']
|
ESS-LLP/erpnext | erpnext/healthcare/doctype/healthcare_service_order_priority/test_healthcare_service_order_priority.py | Python | gpl-3.0 | 232 | 0.008621 | # -*- coding: | utf-8 -*-
# Copyright (c) 20 | 20, earthians and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestHealthcareServiceOrderPriority(unittest.TestCase):
pass
|
nielsole/ycombinator_newsletter | code/crawl.py | Python | mit | 916 | 0.004367 | #!/usr/bin/env python3
import requests
from database import is_in_db
import database
__author__ = 'flshrmb'
def handle(some_story, conn):
cursor = conn.cursor()
database.insert(some_story, cursor)
cursor.close()
conn.commit()
def main():
top_list = requests.get('https://hacker-news.firebaseio.com/v0/topstories.json')
if top_list.status_code != 200:
return # Maybe a | dd exception?
top_json = top_list.json()
conn = database.get | _con()
cur = conn.cursor()
database.create_table(cur)
cur.close()
for i, id in enumerate(top_json):
story_request = requests.get('https://hacker-news.firebaseio.com/v0/item/{0}.json'.format(id))
if story_request.status_code != 200:
continue
handle(story_request.json(), conn)
print('Number: {0}'.format(i))
if i >= 30:
break
if __name__ == "__main__":
main()
|
suutari-ai/shoop | shuup_tests/default_reports/test_default_reports.py | Python | agpl-3.0 | 36,452 | 0.002826 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import json
import random
from datetime import date, datetime, timedelta
from decimal import Decimal
import pytest
import six
from django.utils.encoding import force_text
from shuup.apps.provides import override_provides
from shuup.core.models import (
CustomCarrier, FixedCostBehaviorComponent, get_person_contact, Order
)
from shuup.core.order_creator import OrderCreator
from shuup.default_reports.reports import (
CustomerSalesReport, NewCustomersReport, ProductSalesReport, SalesPerHour,
SalesReport, ShippingReport, TaxesReport, TotalSales, RefundedSalesReport
)
from shuup.reports.admin_module.views import ReportView
from shuup.reports.forms import DateRangeChoices
from shuup.reports.writer import get_writer_instance
from shuup.testing.factories import (
CompanyFactory, create_order_with_product, create_product,
create_random_order, create_random_person, get_address,
get_default_payment_method, get_default_product, get_default_shop,
get_default_supplier, get_default_tax_class, get_initial_order_status,
get_test_tax, OrderLineType, UserFactory, get_default_shipping_method
)
from shuup.testing.utils import apply_request_middleware
from shuup_tests.core.test_basic_order import create_order
from shuup_tests.reports.test_reports import initialize_report_test
from shuup_tests.utils.basketish_order_source import BasketishOrderSource
from shuup.utils.money import Money
class InfoTest(object):
def __init__(self, **kwargs):
for k, v in six.iteritems(kwargs):
setattr(self, k, v)
def initialize_simple_report(cls, data_overrides={}):
product_price = 100
product_count = 2
tax_rate = Decimal("0.10")
line_count = 1
expected_taxful_total, expected_taxless_total, shop, order = initialize_report_test(
product_price, product_count, tax_rate, line_count)
data = {
"report": cls.get_name(),
"shop": shop.pk,
"date_range": DateRangeChoices.THIS_YEAR,
"writer": "json",
"force_download": 1,
}
data.update(data_overrides)
report = cls(**data)
writer = get_writer_instance(data["writer"])
response = writer.get_response(report=report)
if hasattr(response, "render"):
response.render()
json_data = json.loads(response.content.decode("utf-8"))
return InfoTest(**{
"expected_taxful_total": expected_taxful_total,
"expected_taxless_total": expected_taxless_total,
"json_data": json_data,
"product_count": product_count,
"shop": shop,
"writer": writer,
"report": report,
"order": order,
})
@pytest.mark.django_db
def test_sales_report(rf):
test_info = initialize_simple_report(SalesReport)
assert force_text(SalesReport.title) in test_info.json_data.get("heading")
totals = test_info.json_data.get("tables")[0].get("totals")
return_data = test_info.json_data.get("tables")[0].get("data")[0]
assert int(totals.get("product_count", 0)) == test_in | fo.product_count
assert int(return_data.get("product_count", 0)) == test_info.product_count
assert int(totals.get("order_count", 0)) == 1
assert int(return_data.get("order_count", 0)) == 1
assert str(test_info.expected_taxless_total) in totals.get("taxless_total", "0")
assert str(test_info.expected_taxful_total) in totals.get | ("taxful_total", "0")
@pytest.mark.django_db
def test_total_sales_report(rf):
test_info = initialize_simple_report(TotalSales)
assert force_text(TotalSales.title) in test_info.json_data.get("heading")
return_data = test_info.json_data.get("tables")[0].get("data")[0]
assert return_data.get("currency") == test_info.shop.currency
assert return_data.get("name") == test_info.shop.name
assert int(return_data.get("order_amount")) == 1
assert str(test_info.expected_taxful_total) in return_data.get("total_sales")
@pytest.mark.django_db
def test_total_sales_customers_report(rf):
shop = get_default_shop()
supplier = get_default_supplier()
p1 = create_product("p1", shop=shop, supplier=supplier, default_price="5")
p2 = create_product("p2", shop=shop, supplier=supplier, default_price="20")
# orders for person 1
person1 = create_random_person()
order1 = create_random_order(customer=person1, completion_probability=1, products=[p1, p2])
order2 = create_random_order(customer=person1, completion_probability=1, products=[p1, p2])
# orders for person 2
person2 = create_random_person()
order3 = create_random_order(customer=person2, completion_probability=1, products=[p1, p2])
order4 = create_random_order(customer=person2, completion_probability=1, products=[p1, p2])
order5 = create_random_order(customer=person2, completion_probability=1, products=[p1, p2])
# pay orders
[o.create_payment(o.taxful_total_price) for o in Order.objects.all()]
data = {
"report": TotalSales.get_name(),
"shop": shop.pk,
"date_range": DateRangeChoices.ALL_TIME,
"writer": "json",
"force_download": 1,
}
report = TotalSales(**data)
writer = get_writer_instance(data["writer"])
response = writer.get_response(report=report)
if hasattr(response, "render"):
response.render()
json_data = json.loads(response.content.decode("utf-8"))
assert force_text(TotalSales.title) in json_data.get("heading")
data = json_data.get("tables")[0].get("data")[0]
avg_sales = (
order1.taxful_total_price
+ order2.taxful_total_price
+ order3.taxful_total_price
+ order4.taxful_total_price
+ order5.taxful_total_price
) / Decimal(5)
assert int(data["customers"]) == 2
assert int(data["order_amount"]) == 5
assert data["customer_avg_sale"] == str(avg_sales.value.quantize(Decimal('0.01')))
@pytest.mark.django_db
def test_total_sales_report_with_zero_total(rf):
new_customer = create_random_person() # This customer shouldn't have any sales
test_info = initialize_simple_report(TotalSales, data_overrides={"customer": [new_customer]})
assert force_text(TotalSales.title) in test_info.json_data.get("heading")
return_data = test_info.json_data.get("tables")[0].get("data")[0]
assert return_data.get("currency") == test_info.shop.currency
assert return_data.get("name") == test_info.shop.name
assert int(return_data.get("order_amount")) == 0
assert str(test_info.shop.create_price(0).as_rounded().value) in return_data.get("total_sales")
@pytest.mark.django_db
def test_total_sales_per_hour_report(rf):
test_info = initialize_simple_report(SalesPerHour)
assert force_text(SalesPerHour.title) in test_info.json_data.get("heading")
return_data = test_info.json_data.get("tables")[0].get("data")
order_hour = test_info.order.order_date.strftime("%H")
assert len(return_data) == 24 # all hours present
assert min([int(data_item.get("hour")) for data_item in return_data]) == 0
assert max([int(data_item.get("hour")) for data_item in return_data]) == 23
for hour_data in return_data:
if int(hour_data.get("hour")) == int(order_hour):
assert str(test_info.expected_taxful_total) in hour_data.get("total_sales")
else:
assert hour_data.get("total_sales") == "0"
@pytest.mark.django_db
def test_contact_filters(rf, admin_user):
shop = get_default_shop()
products_per_order = 5
request = rf.get('/')
request.shop = shop
apply_request_middleware(request)
product = get_default_product()
customer = get_person_contact(admin_user)
create_order(request, creator=admin_user, customer=customer, product=product)
order_one = Order.objects.first()
user = UserFactory()
second_customer = get_person_contact(user)
create_order(request, creator=admin_user, customer=second_customer, product=prod |
zde/librepo | tests/python/tests/test_yum_repo_downloading.py | Python | gpl-2.0 | 58,125 | 0.003802 | from tests.base import TestCaseWithFlask, MOCKURL, TEST_DATA
from tests.servermock.server import app
import tests.servermock.yum_mock.config as config
import os.path
import unittest
import tempfile
import shutil
import gpgme
import librepo
PUB_KEY = TEST_DATA+"/key.pub"
class TestCaseYumRepoDownloading(TestCaseWithFlask):
application = app
# @classmethod
# def setUpClass(cls):
# super(TestCaseYumRepoDownloading, cls).setUpClass()
def setUp(self):
self.tmpdir = tempfile.mkdtemp(prefix="librepotest-")
# Import public key into the temporary gpg keyring
self._gnupghome = os.environ.get('GNUPGHOME')
gpghome = os.path.join(self.tmpdir, "keyring")
os.mkdir(gpghome, 0o700)
os.environ['GNUPGHOME'] = gpghome
self.ctx = gpgme.Context()
self.ctx.import_(open(PUB_KEY, 'rb'))
def tearDown(self):
self.ctx.delete(self.ctx.get_key('22F2C4E9'))
if self._gnupghome is None:
os.environ.pop('GNUPGHOME')
else:
os.environ['GNUPGHOME'] = self._gnupghome
shutil.rmtree(self.tmpdir)
def test_download_repo_01(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_01_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
timestamp = r.getinfo(librepo.LRR_YUM_TIMESTAMP)
self.assertEqual(yum_repo,
{ #'deltainfo': None,
'destdir': self.tmpdir,
'filelists': self.tmpdir+'/repodata/aeca08fccd3c1ab831e1df1a62711a44ba1922c9-filelists.xml.gz',
'filelists_db': self.tmpdir+'/repodata/4034dcea76c94d3f7a9616779539a4ea8cac288f-filelists.sqlite.bz2',
#'group': None,
#'group_gz': None,
#'origin': None,
'other': self.tmpdir+'/repodata/a8977cdaa0b14321d9acfab81ce8a85e869eee32-other.xml.gz',
'other_db': self.tmpdir+'/repodata/fd96942c919628895187778633001cff61e872b8-other.sqlite.bz2',
#'prestodelta': None,
'primary': self.tmpdir+'/repodata/4543ad62e4d86337cd1949346f9aec976b847b58-primary.xml.gz',
'primary_db': self.tmpdir+'/repodata/735cd6294df08bdf28e2ba113915ca05a151118e-primary.sqlite.bz2',
'repomd': self.tmpdir+'/repodata/repomd.xml',
#'updateinfo': None,
'url': url,
'signature': None,
'mirrorlist': None,
'metalink': None}
)
self.assertEqual(yum_repomd,
{ 'content_tags': [],
#'deltainfo': None,
'distro_tags': [],
'filelists': {
'checksum': 'aeca08fccd3c1ab831e1df1a62711a44ba1922c9',
'checksum_open': '52d30ae3162ca863c63c345ffdb7f0e10c1414a5',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 0,
'location_href': 'repodata/aeca08fccd3c1ab831e1df1a62711a44ba1922c9-filelists.xml.gz',
'size': 43310,
'size_open': 735088,
'timestamp': 1347459930},
'filelists_db': {
'checksum': '4034dcea76c94d3f7a9616779539a4ea8cac288f',
'checksum_open': '94 | 9c6b7b605b2bc66852630c841a5003603ca5b2',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 10,
'location_href': 'repodata/4034dcea76c94d3f7a9616779539a4ea8cac288f-filelists.sqlite.b | z2',
'size': 22575,
'size_open': 201728,
'timestamp': 1347459931},
#'group': None,
#'group_gz': None,
#'origin': None,
'other': {
'checksum': 'a8977cdaa0b14321d9acfab81ce8a85e869eee32',
'checksum_open': '4b5b8874fb233a626b03b3260a1aa08dce90e81a',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 0,
'location_href': 'repodata/a8977cdaa0b14321d9acfab81ce8a85e869eee32-other.xml.gz',
'size': 807,
'size_open': 1910,
'timestamp': 1347459930},
'other_db': {
'checksum': 'fd96942c919628895187778633001cff61e872b8',
'checksum_open': 'c5262f62b6b3360722b9b2fb5d0a9335d0a51112',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 10,
'location_href': 'repodata/fd96942c919628895187778633001cff61e872b8-other.sqlite.bz2',
'size': 1407,
'size_open': 8192,
'timestamp': 1347459931},
#'prestodelta': None,
'primary': {
'checksum': '4543ad62e4d86337cd1949346f9aec976b847b58',
'checksum_open': '68457ceb8e20bda004d46e0a4dfa4a69ce71db48',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 0,
'location_href': 'repodata/4543ad62e4d86337cd1949346f9aec976b847b58-primary.xml.gz',
'size': 936,
'size_open': 3385,
'timestamp': 1347459930},
'primary_db': {
'checksum': '735cd6294df08bdf28e2ba113915ca05a151118e',
'checksum_open': 'ba636386312e1b597fc4feb182d04c059b2a77d5',
'checksum_open_type': 'sha1',
'checksum_type': 'sha1',
'db_version': 10,
'location_href': 'repodata/735cd6294df08bdf28e2ba113915ca05a151118e-primary.sqlite.bz2',
'size': 2603,
'size_open': 23552,
'timestamp': 1347459931},
'repo_tags': [],
'revision': '1347459931',
#'updateinfo': None
}
)
self.assertEqual(timestamp, 1347459931)
# Test if all mentioned files really exist
self.assertTrue(os.path.isdir(yum_repo["destdir"]))
for key in yum_repo:
if yum_repo[key] and (key not in ("url", "destdir")):
self.assertTrue(os.path.isfile(yum_repo[key]))
self.assertFalse(h.mirrors)
self.assertFalse(h.metalink)
def test_download_repo_02(self):
h = librepo.Handle()
r = librepo.Result()
url = "%s%s" % (MOCKURL, config.REPO_YUM_02_PATH)
h.setopt(librepo.LRO_URLS, [url])
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_DESTDIR, self.tmpdir)
h.perform(r)
yum_repo = r.getinfo(librepo.LRR_YUM_REPO)
yum_repomd = r.getinfo(librepo.LRR_YUM_REPOMD)
self.assertEqual(yum_repo,
{'deltainfo': self.tmpdir+'/repodata/32d3307b672abf7356061912fa3dc9b54071c03a75c671111c1c8daf5ed1eb7e-deltainfo.xml.gz',
'destdir': self.tmpdir,
'filelists': self.tmpdir+'/repodata/2431efa18b5de6bfddb87da2a526362108226752d46ef3a298cd4bf39ba16b1d-filelists.xml.gz',
'filelists_db': self.tmpdir+'/repodata/5b37f89f9f4474801ec5f23dc30d3d6cf9cf663cb75a6656aaa864a041836ffe-filelists.sqlite.bz2',
'group': self.tmpdir+'/repodata/5b3b362d644e8fa3b359db57be0ff5de8a08365ce9a59cddc3205244a968231e-comps.xml',
'group_gz': self.tmpdir+'/repodata/c395ae7d8a9117f4e81aa23e37fb9da9865b50917f5f701b50d422875bb0cb14-comps.xml.gz',
'origin': self.tmpdir+'/repodata/c949d2b2371fab1a03d03b41057004caf1133a56e4c9236f63b3163ad358c941-pkgorigins.gz',
'other': self.tmpdir+'/repodata/76b2cf |
SciTools/iris | lib/iris/tests/unit/fileformats/pp/test__interpret_field.py | Python | lgpl-3.0 | 5,255 | 0 | # Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""Unit tests for the `iris.fileformats.pp._interpret_field` function."""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests # isort:skip
from copy import deepcopy
from unittest import mock
import numpy as np
import iris
import iris.fileformats.pp as pp
class Test__interpret_fields__land_packed_fields(tests.IrisTest):
def setUp(self):
return_value = ("dummy", 0, 0, np.dtype("f4"))
core_data = mock.MagicMock(return_value=return_value)
# A field packed using a land/sea mask.
self.pp_field = mock.Mock(
lblrec=1,
lbext=0,
lbuser=[0] * 7,
lbrow=0,
lbnpt=0,
raw_lbpack=21,
lbpack=mock.Mock(n1=0, n2=2, n3=1),
core_data=core_data,
)
# The field specifying the land/seamask.
lbuser = [None, None, None, 30, None, None, 1] # m01s00i030
self.land_mask_field = mock.Mock(
lblrec=1,
lbext=0,
lbuser=lbuser,
lbrow=3,
lbnpt=4,
raw_lbpack=0,
core_data=core_data,
)
def test_non_deferred_fix_lbrow_lbnpt(self):
# Checks the fix_lbrow_lbnpt is applied to fields which are not
# deferred.
f1, mask = self.pp_field, self.land_mask_field
self.assertEqual(f1.lbrow, 0)
self.assertEqual(f1.lbnpt, 0)
list(pp._interpret_fields([mask, f1]))
self.assertEqual(f1.lbrow, 3)
self.assertEqual(f1.lbnpt, 4)
# Check the data's shape has been updated too.
self.assertEqual(f1.data.shape, (3, 4))
def test_fix_lbrow_lbnpt_no_mask_available(self):
# Check a warning is issued when loading a land masked field
# without a land mask.
with mock.patch("warnings.warn") as warn:
list(pp._interpret_fields([self.pp_field]))
self.assertEqual(warn.call_count, 1)
warn_msg = warn.call_args[0][0]
self.assertTrue(
warn_msg.startswith(
"Landmask compressed fields " "existed without a landmask"
),
"Unexpected warning message: {!r}" | .format(warn_msg),
)
def test_deferred_mask_field(self):
# Check that the order of the load is yielded last if the mask
# hasn't yet been seen.
result = list(
pp._interpret_fields([self.pp_field, self | .land_mask_field])
)
self.assertEqual(result, [self.land_mask_field, self.pp_field])
def test_not_deferred_mask_field(self):
# Check that the order of the load is unchanged if a land mask
# has already been seen.
f1, mask = self.pp_field, self.land_mask_field
mask2 = deepcopy(mask)
result = list(pp._interpret_fields([mask, f1, mask2]))
self.assertEqual(result, [mask, f1, mask2])
def test_deferred_fix_lbrow_lbnpt(self):
# Check the fix is also applied to fields which are deferred.
f1, mask = self.pp_field, self.land_mask_field
list(pp._interpret_fields([f1, mask]))
self.assertEqual(f1.lbrow, 3)
self.assertEqual(f1.lbnpt, 4)
@tests.skip_data
def test_landsea_unpacking_uses_dask(self):
# Ensure that the graph of the (lazy) landsea-masked data contains an
# explicit reference to a (lazy) landsea-mask field.
# Otherwise its compute() will need to invoke another compute().
# See https://github.com/SciTools/iris/issues/3237
# This is too complex to explore in a mock-ist way, so let's load a
# tiny bit of real data ...
testfile_path = tests.get_data_path(
["FF", "landsea_masked", "testdata_mini_lsm.ff"]
)
landsea_mask, soil_temp = iris.load_cubes(
testfile_path, ("land_binary_mask", "soil_temperature")
)
# Now check that the soil-temp dask graph correctly references the
# landsea mask, in its dask graph.
lazy_mask_array = landsea_mask.core_data()
lazy_soildata_array = soil_temp.core_data()
# Work out the main dask key for the mask data, as used by 'compute()'.
mask_toplev_key = (lazy_mask_array.name,) + (0,) * lazy_mask_array.ndim
# Get the 'main' calculation entry.
mask_toplev_item = lazy_mask_array.dask[mask_toplev_key]
# This should be a task (a simple fetch).
self.assertTrue(callable(mask_toplev_item[0]))
# Get the key (name) of the array that it fetches.
mask_data_name = mask_toplev_item[1]
# Check that the item this refers to is a PPDataProxy.
self.assertIsInstance(
lazy_mask_array.dask[mask_data_name], pp.PPDataProxy
)
# Check that the soil-temp graph references the *same* lazy element,
# showing that the mask+data calculation is handled by dask.
self.assertIn(mask_data_name, lazy_soildata_array.dask.keys())
if __name__ == "__main__":
tests.main()
|
GoelDeepak/dcos | packages/dcos-integration-test/extra/test_meta.py | Python | apache-2.0 | 3,623 | 0.000552 | """
Tests for the integration test suite itself.
"""
import logging
import os
import subprocess
from collections import defaultdict
from pathlib import Path
from typing import Set
import yaml
from get_test_group import patterns_from_group
__maintainer__ = 'adam'
__contact__ = 'tools-infra-team@mesosphere.io'
log = logging.getLogger(__file__)
def _tests_from_pattern(ci_pattern: str) -> Set[str]:
"""
From a CI pattern, get all tests ``pytest`` would collect.
"""
tests = set([]) # type: Set[str]
args = [
'pytest',
'--disable-pytest-warnings',
'--collect-only',
ci_pattern,
'-q',
]
# Test names will not be in ``stderr`` so we ignore that.
result = subprocess.run(
args=args,
stdout=subprocess.PIPE,
env={**os.environ, **{'PYTHONIOENCODING': 'UTF-8'}},
)
output = result.stdout
for line in output.splitlines():
if b'error in' in line:
message = (
'Error collecting tests for pattern "{ci_pattern}". '
'Full output:\n'
'{output}'
).format(
ci_pattern=ci_pattern,
output=output,
)
raise Exception(message)
# Whitespace is important to avoid confusing pytest warning messages
# with test names. For example, the pytest output may contain '3 tests
# deselected' which would conflict with a test file called
# test_agent_deselected.py if we ignored whitespace.
if (
line and
# Some tests show warnings on collection.
b' warnings' not in line and
# Some tests are skipped on collection.
b'skipped in' not in line and
# Some tests are deselected by the ``pytest.ini`` configuration.
b' deselected' not in line and
not line.startswith(b'no tests ran in')
):
tests.add(line.decode())
return tests
def test_test_groups() -> None:
"""
The test suite is split into various "groups".
This test confirms that the groups together contain all tests, and each
test is collected only once.
"""
test_group_file = Path('test_groups.yaml')
test_group_file_contents = test_group_file.read_text()
test_groups = yaml.load(test_group_file_contents)['groups']
test_patterns = []
for group in test_groups:
test_patterns += patterns_from_group(group_name=group)
# Turn this into a list otherwise we can't cannonically state whether every test was collected _exactly_ once :-)
tests_to_patterns = defaultdict(list) # type: Mapping[str, List]
for pattern in test_patterns:
tests = _tests_from_pattern(ci_pattern=pattern)
for test in tests:
tests_to_patterns[test].append(pattern)
errs = []
for test_name, patterns in tests_to_patterns.items():
message = (
'Test "{test_name}" will be run once for each pattern in '
'{patterns}. '
'Each test should be run only once.'
).format(
test_name=test_name,
patterns=patterns,
)
if len(patterns) != 1:
| assert len(patterns) != 1, message
errs.append(message)
if errs:
for message in errs:
log.error(message)
| raise Exception("Some tests are not collected exactly once, see errors.")
all_tests = _tests_from_pattern(ci_pattern='')
assert tests_to_patterns.keys() - all_tests == set()
assert all_tests - tests_to_patterns.keys() == set()
|
anhstudios/swganh | data/scripts/templates/object/draft_schematic/armor/component/shared_heavy_armor_layer_environmental.py | Python | mit | 476 | 0.046218 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangib | le()
result.template = "object/draft_schemat | ic/armor/component/shared_heavy_armor_layer_environmental.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
kashifiqb/Aspose.Email-for-Java | Plugins/Aspose.Email Java for Python/tests/ProgrammingOutlook/AddMapiJournalToPST/AddMapiJournalToPST.py | Python | mit | 672 | 0.00744 | # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the | template in the editor.
#if __name__ == "__main__":
# print "Hello World"
from ProgrammingOutlook import AddMapiJournalToPST
import jpype
import os.path
asposeapispath = os.path.join(os.path.abspath("./../../../"), "lib/")
dataDir = os.path.join(os.path.abspath("./"), "data/")
print "You need to put your Aspose.Email for Java APIs .jars in this folder:\n"+asposeapispath
#print dataDir
jpype.startJVM(jpype.getDefaultJVMPath(), "-Djava.ext.dirs=%s" % asposeapispath)
hw = AddMapiJournalTo | PST(dataDir)
hw.main() |
nejucomo/preconditions | tests.py | Python | mit | 7,012 | 0.000143 | from unittest import TestCase, main
from preconditions import PreconditionError, preconditions
class PreconditionTestBase (TestCase):
def assertPreconditionFails(self, target, *args, **kw):
self.assertRaises(PreconditionError, target, *args, **kw)
def assertPreconditionFailsRegexp(self, rgx, target, *args, **kw):
self.assertRaisesRegexp(PreconditionError, rgx, target, *args, **kw)
class InvalidPreconditionTests (PreconditionTestBase):
def test_varargs(self):
self.assertPreconditionFailsRegexp(
(r'^Invalid precondition must not accept \* nor \*\* args:\n' +
r' lambda \*a: True,\n$'),
preconditions,
lambda *a: True,
)
def test_kwargs(self):
self.assertPreconditionFailsRegexp(
(r'^Invalid precondition must not accept \* nor \*\* args:\n' +
r' lambda \*\*kw: True,\n$'),
preconditions,
lambda **kw: True,
)
def test_unknown_nondefault_param(self):
p = preconditions(lambda x: True)
self.assertPreconditionFailsRegexp(
(r"^Invalid precondition refers to unknown parameter 'x':\n" +
r" p = preconditions\(lambda x: True\)\n" +
r"Known parameters: \['a', 'b'\]\n$"),
p,
lambda a, b: a+b)
def test_default_masks_param(self):
p = preconditions(lambda a, b='a stored value': True)
self.assertPreconditionFailsRegexp(
(r"^Invalid precondition masks parameter 'b':\n" +
r" p = preconditions\(lambda a, b='a stored value': True\)\n" +
r"Known parameters: \['a', 'b'\]\n$"),
p,
lambda a, b: a+b)
class BasicPreconditionTests (PreconditionTestBase):
def test_basic_precondition(self):
@preconditions(lambda i: isinstance(i, int) and i > 0)
def uint_pred(i):
return i-1
# Not greater than 0:
self.assertPreconditionFails(uint_pred, 0)
# Not an int:
self.assertPreconditionFails(uint_pred, 1.0)
# Test a successful call:
self.assertEqual(0, uint_pred(1))
def test_relational_precondition(self):
@preconditions(lambda a, b: a < b)
def inc_range(a, b):
return range(a, b)
self.assertPreconditionFails(inc_range, 3, 3)
self.assertPreconditionFails(inc_range, 5, 3)
self.assertEqual([3, 4], inc_range(3, 5))
def test_multiple_preconditions(self):
@preconditions(
lambda a: isinstance(a, float),
lambda b: isinstance(b, int),
lambda b: b > 0,
lambda a, b: a < b,
)
def f(a, b):
return a ** b
self.assertPreconditionFails(f, 3, 5)
self.assertPreconditionFails(f, 3.0, 5.0)
self.assertPreconditionFails(f, 3.0, -2)
self.assertPreconditionFails(f, 3.0, 2)
self.assertEqual(0.25, f(0.5, 2))
def test_zero_preconditions(self):
p = preconditions()
def f():
return None
g = p(f)
self.assertIs(None, f())
self.assertIs(None, g())
self.assertIs(f, g)
def test_precondition_with_default(self):
@preconditions(lambda a, _s=[2, 3, 5]: a in _s)
def f(a):
return a
self.assertPreconditionFails(f, 4)
self.assertEqual(3, f(3))
class MethodPreconditionTests (PreconditionTestBase):
def test_invariant_precondition(self):
class C (object):
@preconditions(lambda self: self.key in self.items)
def get(self):
return self.items[self.key]
i = C()
i.items = {'a': 'apple', 'b': 'banana'}
i.key = 'X'
self.assertPreconditionFails(i.get)
i.key = 'b'
self.assertEqual('banana', i.get())
def test__init__(self):
class C (object):
@preconditions(lambda name: isinstance(name, unicode))
def __init__(self, name):
self.name = name
self.assertPreconditionFails(C, b'Not unicode!')
self.assertEqual(u'Alice', C(u'Alice').name)
def test_old_school__init__(self):
class C:
@preconditions(lambda name: isinstance(name, unicode))
def __init__(self, name):
self.name = name
self.assertPreconditionFails(C, b'Not unicode!')
self.assertEqual(u'Alice', C(u'Alice').name)
def test__new__(self):
class C (tuple):
@preconditions(lambda a, b: a < b)
def __new__(self, a, b):
return tuple.__new__(self, (a, b))
self.assertPreconditionFails(C, 5, 3)
self.assertEqual((3, 5), C(3, 5))
def test_old_school_method(self):
class OldSchool:
def __init__(self, x):
self.x = x
@preconditions(lambda self, x: self.x < x)
def increase_to(self, x):
self.x = x
obj = OldSchool(5)
self.assertPreconditionFails(obj.increase_to, 3)
obj.increase_to(7)
self.assertPreconditionFails(obj.increase_to, 6)
class PreconditionInterfaceTests (PreconditionTestBase):
def test__name__(self):
@preconditions(lambda x: True)
def f(x):
return x
self.assertEqual('f', f.__name__)
def test_zero_preconditions__name__(self):
@preconditions()
def f(x):
return x
self.assertEqual('f', f.__name__)
def test_nopre(self):
def assert_false():
assert False
p = preconditions(lambda x: assert_false())
def f(x):
return 2*x
g = p(f)
self.assertIs(f, g.nopre)
self.assertEqual(6, g.nopre(3))
def test_zero_preconditions_nopre(self):
p = preconditions()
def f(x):
return 2*x
g = p(f)
self.assertIs(f, g.nopre)
self.assertEqual(6, g.nopre(3))
class ErrorReportingTests (PreconditionTestBase):
def test_single_predicate_single_line_failure_includes_source(self):
@preconditions(lambda x: x != 7)
def f(x):
return x
self.assertPreconditionFailsRegexp(
(r'^Preco | ndition failed in call ' +
r'<function f at 0x[0-9a-fA-F]+>\(x=7\):\n' +
r' @preconditions\(lambda x: x != 7\)\n$'),
f,
7)
def test_multiple_line_multiple_predicates_includes_specific_source(self):
@preconditions(
lambda x: x > 0,
lambda x: isinstance(x, int),
| )
def f(x):
return x
self.assertPreconditionFailsRegexp(
(r'Precondition failed in call ' +
r'<function f at 0x[0-9a-fA-F]+>\(x=6\.5\):\n' +
r' lambda x: isinstance\(x, int\),\n$'),
f,
6.5)
if __name__ == '__main__':
main()
|
wbond/certvalidator | certvalidator/path.py | Python | mit | 6,777 | 0.000738 | # coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from asn1crypto import pem, x509
from ._errors import pretty_message
from ._types import byte_cls, type_name
from .errors import DuplicateCertificateError
class ValidationPath():
"""
Represents a path going towards an end-entity certificate
"""
# A list of asn1crypto.x509.Certificate objects, starting with a trust root
# and chaining to an end-entity certificate
_certs = None
# A set of asn1crypto.x509.Certificate.issuer_serial byte strings of
# certificates that are already in ._certs
_cert_hashes = None
def __init__(self, end_entity_cert=None):
"""
:param end_entity_cert:
An asn1crypto.x509.Certificate object for the end-entity certificate
"""
self._certs = []
self._cert_hashes = set()
if end_entity_cert:
self.prepend(end_entity_cert)
@property
def first(self):
"""
Returns the current beginning of the path - for a path to be complete,
this certificate should be a trust root
:return:
The first asn1crypto.x509.Certificate object in the path
"""
return self._certs[0]
def find_issuer(self, cert):
"""
Return the issuer of the cert specified, as defined by this path
:param cert:
An asn1crypto.x509.Certificate object to get the issuer of
:raises:
LookupError - when the issuer of the certificate could not be found
:return:
An asn1crypto.x509.Certificate object of the issuer
"""
for entry in self:
if entry.subject == cert.issuer:
if entry.key_identifier and cert.authority_key_identifier:
if entry.key_identifier == cert.authority_key_identifier:
return entry
else:
return entry
raise LookupError('Unable to find the issuer of the certificate specified')
def truncate_to(self, cert):
"""
Remove all certificates in the path after the cert specified
:param cert:
An asn1crypto.x509.Certificate object to find
:raises:
LookupError - when the certificate could not be found
:return:
The current ValidationPath object, for chaining
"""
cert_index = None
for index, entry in enumerate(self):
if entry.issuer_serial == cert.issuer_serial:
cert_index = index
break
if cert_index is None:
raise LookupError('Unable to find the certificate specified')
while len(self) > cert_index + 1:
self.pop()
return self
def truncate_to_issuer(self, cert):
"""
Remove all certificates in the path after the issuer of the cert
specified, as defined by this path
:param cert:
An asn1crypto.x509.Certificate object to find the issuer of
:raises:
LookupError - when the issuer of the certificate could not be found
:return:
The current ValidationPath object, for chaining
"""
issuer_index = None
for index, entry in enumerate(self):
if entry.subject == cert.issuer:
if entry.key_identifier and cert.authority_key_identifier:
if entry.key_identifier == cert.authority_key_identifier:
issuer_index = index
break
else:
issuer_index = index
break
if issuer_index is None:
raise LookupError('Unable to find the issuer of the certificate specified')
while len(self) > issuer_index + 1:
self.pop()
return self
def copy(self):
"""
Creates a copy of this path
:return:
A ValidationPath object
"""
copy = self.__class__()
copy._certs = self._certs[:]
copy._cert_hashes = self._cert_hashes.copy()
return copy
def pop(self):
"""
Removes the last certificate from the path
:return:
The current ValidationPath object, for chaining
"""
last_cert = self._certs.pop()
self._cert_hashes.remove(last_cert.issuer_serial)
return self
def append(self, cert):
"""
Appends a cert to the path. This should be a cert issued by the last
cert in the path.
:param cert:
An asn1crypto.x509.Certificate object
:return:
The current ValidationPath object, for chaining
"""
if not isinstance(cert, x509.Certificate):
if not isinstance(cert, byte_cls):
raise TypeError(pretty_message(
'''
cert must be a byte string or an
asn1crypto.x509.Certificate object, not %s
''',
type_name(cert)
))
if pem.detect(cert):
_, _, cert = pem.unarmor(cert)
cert = x509.Certificate.load(cert)
if cert.issuer_serial in self._cert_hashes:
raise DuplicateCertificateError()
self._cert_hashes.add(cert.issuer_serial)
self._certs.append(cert)
return self
def prepend(self, cert):
"""
Prepends a cert to the path. This should be the issuer of the previously
prepended cert.
:param cert:
An asn1crypto.x509.Certificate object or a byte string
:return:
The current ValidationPath object, for chaining
"""
if not isinstance(cert, x509.Certificate):
if not isinstance(cert, byte_cls):
raise TypeError(pretty_message(
'''
cert | must be a byte string or an
asn1crypto.x509.Certificate object, not %s
''',
type_name(cert)
))
if pem.detect(cert):
_, | _, cert = pem.unarmor(cert)
cert = x509.Certificate.load(cert)
if cert.issuer_serial in self._cert_hashes:
raise DuplicateCertificateError()
self._cert_hashes.add(cert.issuer_serial)
self._certs.insert(0, cert)
return self
def __len__(self):
return len(self._certs)
def __getitem__(self, key):
return self._certs[key]
def __iter__(self):
return iter(self._certs)
def __eq__(self, other):
return self._certs == other._certs
|
paradisessssspee/nlptools | tests/context.py | Python | gpl-3.0 | 174 | 0.017241 | """use __file__ to determine library path
"""
impor | t sys
import o | s
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import language_model |
jessicalucci/NovaOrc | nova/openstack/common/processutils.py | Python | apache-2.0 | 5,488 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import logging
import random
import shlex
from eventlet.green import subprocess
from eventlet import greenthread
from nova.openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class UnknownArgumentError(Exception):
def __init__(self, message=None):
super(UnknownArgumentError, self).__init__(message)
class ProcessExecutionError(Exception):
def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None,
description=None):
if description is None:
description = "Unexpected error while running command."
if exit_code is None:
exit_code = '-'
message = ("%s\nCommand: %s\nExit code: %s\nStdout: %r\nStderr: %r"
% (description, cmd, exit_code, stdout, stderr))
super(ProcessExecutionError, self).__init__(message)
def execute(*cmd, **kwargs):
"""
Helper method to shell out and execute a command through subprocess with
optional retry.
:param cmd: Passed to subprocess.Popen.
:type cmd: string
:param process_input: Send to opened process.
:type proces_input: string
:param check_exit_code: Defaults to 0. Will raise
:class:`ProcessExecutionError`
if the command exits without returning this value
as a returncode
:type check_exit_code: int
:param delay_on_retry: True | False. Defaults to True. If set to True,
wait a short amount of time before retrying.
:type delay_on_retry: boolean
:param attempts: How many times to retry cmd.
:type attempts: int
:param run_as_root: True | False. Defaults to False. If set to True,
the command is prefixed by the command specified
in the root_helper kwarg.
:type run_as_root: boolean
:param root_helper: command to prefix all cmd's with
:type root_helper: string
:returns: (stdout, stderr) from process execution
:raises: :class:`UnknownArgumentError` on
receiving unknown arguments
:raises: :class:`ProcessExecutionError`
"""
process_input = kwargs.pop('process_input', None)
check_exit_code = kwargs.pop('check_exit_code', 0)
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
run_as_root = kwargs.pop('run_as_root', False)
root_helper = kwargs.pop('root_helper', '')
if len(kwargs):
raise UnknownArgumen | tError(_('Got unknown keyword args '
'to utils.execute: %r') % kwargs)
if run_as_root:
cmd = shlex.split(root_helper) + list(cmd)
cmd = map(str, cmd)
while attempts > 0:
attempts -= 1
try:
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
_PIPE = subprocess.PIPE # pylint: disable=E1101
obj = subproc | ess.Popen(cmd,
stdin=_PIPE,
stdout=_PIPE,
stderr=_PIPE,
close_fds=True)
result = None
if process_input is not None:
result = obj.communicate(process_input)
else:
result = obj.communicate()
obj.stdin.close() # pylint: disable=E1101
_returncode = obj.returncode # pylint: disable=E1101
if _returncode:
LOG.debug(_('Result was %s') % _returncode)
if (isinstance(check_exit_code, int) and
not isinstance(check_exit_code, bool) and
_returncode != check_exit_code):
(stdout, stderr) = result
raise ProcessExecutionError(exit_code=_returncode,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return result
except ProcessExecutionError:
if not attempts:
raise
else:
LOG.debug(_('%r failed. Retrying.'), cmd)
if delay_on_retry:
greenthread.sleep(random.randint(20, 200) / 100.0)
finally:
# NOTE(termie): this appears to be necessary to let the subprocess
# call clean something up in between calls, without
# it two execute calls in a row hangs the second one
greenthread.sleep(0)
|
esenti/ld-poznan | ldpoznan/core/urls.py | Python | mit | 160 | 0.0125 | from django.conf.urls import patterns, url
from d | jango.conf import settings
import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
| )
|
yamrock/confcentral | settings.py | Python | apache-2.0 | 495 | 0.00202 | #!/usr/bin/env python
"""settings.py
Udacity conference server-side Pytho | n App Engine app user settings
$Id$
created/forked f | rom conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '942213791788-amreps3i4mdhv6d646ufm82t1jhqgn8j.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID_AUDIENCE = WEB_CLIENT_ID
|
LLNL/spack | var/spack/repos/builtin/packages/vpic/package.py | Python | lgpl-2.1 | 1,121 | 0.000892 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Vpic(CMakePackage):
"""VPIC is a general purpose particle-in-cell simulation code for modeling
kinetic plasmas in one, two, or three spatial dimensions. It employs a
second-order, explicit, | leapfrog algorithm to update charged particle
positions and velocities in order to solve the relativistic kinetic
equation for each species in the plasma, along with a full Maxwell
description f | or the electric and magnetic fields evolved via a second-
order finite-difference-time-domain (FDTD) solve.
"""
homepage = "https://github.com/lanl/vpic"
git = "https://github.com/lanl/vpic.git"
version('develop', branch='master', submodules=True)
depends_on("cmake@3.1:", type='build')
depends_on('mpi')
def cmake_args(self):
options = ['-DENABLE_INTEGRATED_TESTS=ON', '-DENABLE_UNIT_TESTS=ON']
return options
|
Ins1ne/smyt | manage.py | Python | mit | 253 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "smyt.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv) | ||
mncoon/odoo-addons | syscoon_finance_export/wizard/move_export.py | Python | lgpl-3.0 | 748 | 0.004011 | from openerp import models, api, _
from openerp.exceptions import UserError
class ExportMoveExport(models.TransientModel):
_name = 'export.move.export'
_description = 'Export Moves'
@api.multi
def create_export_file(self):
context = dict(self._context or {})
moves = self.env['export.move'].browse(context.get('active_ids'))
export_to_create = self.env['export.move']
for move in moves:
if move.state == 'created':
export_to_create += move
if not export_to_create:
raise UserError(_('There is no posted move item to create a Export-File.'))
export_to_create.action_cre | ate_export_file()
return {'type': 'ir.actions.act_window_cl | ose'}
|
andresgz/zshoes | zshoes/articles/views.py | Python | bsd-3-clause | 1,444 | 0 | from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.views.generic import ListView, CreateView, UpdateView
from .models import Article
class ArticleListView(ListView):
"""
View to list all the articles
"""
#: Article model
model = Article
template_name = "articles/list.html"
class ArticleCreateView(CreateView):
"""
View to create a Article
"""
model = Article
fields = ['name', 'store', 'description', 'price',
'total_in_shelf', 'total_in_vault']
template_name = "articles/create.html"
success_url = reverse_lazy('articles:list')
def form_valid(self, form):
messages.success(
self.request,
_('Article {0} created'.format(f | orm.cleaned_data['name'])))
return super(ArticleCreateView, self).form_valid(form)
class ArticleUpdateView(UpdateView):
"""
View to update a Article
"""
model = Article
fields = ['name', 'store', 'description', 'price',
'total_in_shelf', 'total_in_vault']
template_name = "articles/update.html"
success_url = reverse_lazy('articles:list')
def form_valid(self, form):
| messages.success(
self.request,
_('Article {0} updated'.format(form.cleaned_data['name'])))
return super(ArticleUpdateView, self).form_valid(form)
|
tellproject/helper_scripts | hive.py | Python | apache-2.0 | 3,067 | 0.013694 | #!/usr/bin/env python
import os
import sys
import time
from ServerConfig import General
from ServerConfig import Hadoop
from ServerConfig import Hive
xmlProp = lambda key, value: "<property><name>" + key +"</name><value>" + value + "</value></property>\n"
concatStr = lambda servers, sep: sep.join(servers)
def copyToHost(hosts, path):
for host in hosts:
os.system('scp {0} root@{1}:{0}'.format(path, host))
def confMaster():
# hive-env.sh
hiveEnv = "{0}/conf/hive-env.sh".format(Hive.hivedir)
with open (hiveEnv, 'w+') as f:
f.write("export HADOOP_HOME={0}\n".format(Hadoop.hadoopdir))
f.write("export HADOOP_USER_CLASSPATH_FIRST=true\n")
copyToHost([Hive.master], hiveEnv)
# hive-site.xml
hiveSiteXml = "{0}/conf/hive-site.xml".format(Hive.hivedir)
with open (hiveSiteXml, 'w+') as f:
f.write("<configuration>\n")
f.write(xmlProp("hive.metastore.warehouse.dir", "/usr/hive/warehouse"))
#f.write(xmlProp("hive.metastore.uris", "thrift://{0}:{1}".format(Hive.thriftbindhost, Hive.thriftport)))
f.write(xmlProp("javax.jdo.option.ConnectionURL", "jdbc:derby:;databaseName={0}/metastore_db;".format(Hive.hivedir)))
f.write(xmlProp("hive.server2.thrift.port", Hive.thriftport))
f.write(xmlProp("hive.server2.thrift.bind.host", Hive.thriftbindhost))
f.write("</configuration>\n")
copyToHost([Hive.master], hiveSiteXml)
def startHive():
# metastore
start_hivems_cmd = "JAVA_HOME={1} {0}/bin/hive --service metastore".format(Hive.hivedir, General.javahome)
print "{1} :{0}".format(start_hivems_cmd, Hive.master)
#os.system('ssh -A root@{0} {1}'.format(Hive.master, start_hivems_cmd))
os.system('ssh -n -f root@{0} "sh -c \'{1} > /dev/null 2>&1 &\'"'.format(Hive.master, start_hivems_cmd))
time.sleep(2)
# hiveserver
start_hiveserver_cmd = "JAVA_HOME={1} {0}/bin/hive --service hiveserver2".format(Hive.hivedir, General.javahome)
print "{1} : {0}".format(start_hiveserver_cmd, Hive.master)
#os.system('ssh -A root@{0} {1}'.format(Hive.master, start_hiveserver_cmd))
os.system('ssh -n -f root@{0} "sh -c \'{1} > /dev/null 2>&1 &\'"'.format(Hive.master, start_hiveserver_cmd))
time.sleep(2)
def stopHive():
stop_hivems_cmd = "ps -a | grep HiveMetaStore | grep -v grep | awk '{print $2}' | xargs kill -9"
print "{1} : {0}".format(s | top_hivems_cmd, Hive.master)
os.system('ssh -A root@{0} {1}'.format(Hive.master, stop_hivems_cmd))
# hiveserver
stop_hiveserver_cmd = "ps -a | grep HiverServer2 | grep -v grep | awk '{print $2}' | xargs kill -9"
os.system('ssh -A root@{0} {1}'.format(Hive.master, stop_hiveserver_cmd))
print "{1} : {0}".format(stop_hiveserver_cmd, Hive.master)
def main(argv):
if ((len(argv) == 0) or | (argv[0] == 'start')):
confMaster()
startHive()
elif ((len(argv) == 1) and (argv[0] == 'stop')):
stopHive()
else:
print "Usage: <start|stop> Default: start"
if __name__ == "__main__":
main(sys.argv[1:])
|
KevinHock/rtdpyt | profiling/test_projects/flaskbb_lite_3/flaskbb/utils/fields.py | Python | gpl-2.0 | 1,093 | 0 | # -*- coding: utf-8 -*-
"""
flaskbb.utils.fields
~~~~~~~~~~~~~~~~~~~~
Additional fields for wtforms
:copyright: (c) 2014 by the FlaskBB Team.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from wtforms.fields import DateField
class Birthd | ayField(DateField):
"""Same as DateField, except it allows ``None`` values in case a user
wants to delete his birthday.
"""
def __init__(self, label=None, validators=None, format='%Y-%m-%d',
** | kwargs):
DateField.__init__(self, label, validators, format, **kwargs)
def process_formdata(self, valuelist):
if valuelist:
date_str = ' '.join(valuelist)
try:
self.data = datetime.strptime(date_str, self.format).date()
except ValueError:
self.data = None
# Only except the None value if all values are None.
# A bit dirty though
if valuelist != ["None", "None", "None"]:
raise ValueError("Not a valid date value")
|
dougnd/matplotlib2tikz | test/testfunctions/quadmesh.py | Python | mit | 840 | 0 | # -*- coding: utf-8 -*-
#
desc = 'Plot Taylor--Green Vortex using pcolormesh'
# phash = 'ff1a8578c9847b22'
phash = '7f1a8578c9857932' |
def plot():
from matplotlib import pyplot as plt
import numpy as np
x = np.linspace(0*np.pi, 2*np.pi, 128)
y = np.linspace(0*np.pi, 2*np.pi, 128)
X, Y = np.meshgrid(x, y)
nu = 1e-5
def F(t):
retu | rn np.exp(-2*nu*t)
def u(x, y, t):
return np.sin(x)*np.cos(y)*F(t)
def v(x, y, t):
return -np.cos(x)*np.sin(y)*F(t)
fig, axs = plt.subplots(2, figsize=(8, 12))
axs[0].pcolormesh(X, Y, u(X, Y, 0))
axs[1].pcolormesh(X, Y, v(X, Y, 0))
for ax in axs:
ax.set_xlim(x[0], x[-1])
ax.set_ylim(y[0], y[-1])
ax.set_xlabel('x')
ax.set_ylabel('y')
axs[0].set_title('Taylor--Green Vortex')
return fig
|
jrbl/pygitplay | pygitplay.py | Python | gpl-3.0 | 87 | 0.011494 | #!/usr/bin/env pytho | n
# -*- coding: utf-8 -*-
"""Throwaway where I play with | pygit"""
|
aweber/test-helpers | tests/integration/test_mongo.py | Python | bsd-3-clause | 2,050 | 0.000488 | from __future__ import absolute_import
import os
from pymongo import MongoClient
from test_helpers import bases, mixins, mongo
class WhenCreatingTemporaryDatabase(bases.BaseTest):
@classmethod
def configure(cls):
super(WhenCreatingTemporaryDatabase, cls).configure()
cls.database = mongo.TemporaryDatabase(host='localhost', port='27017')
@classmethod
def execute(cls):
cls.database.create()
def should_create_database(self):
mongodb = MongoClient(host='localhost', port=27017)
self.assertIn(self.database.database_name, mongodb.database_names())
class WhenDroppingTemporaryDatabase(bases.BaseTest):
@classmethod
def configure(cls):
super(WhenDroppingTemporaryDatabase, cls).configure()
cls.database = mongo.TemporaryDatabase(host='localhost', port='27017')
cls.database.create()
@classmethod
def execute(cls):
cls.database.drop()
def should_drop_database(self):
mongodb = MongoClient(host='localhost', port=27017)
self.assertNotIn(self.database.database_name, mongodb.database_names())
class WhenCreatingTemporaryDatabaseAndExportingEnv(
mixins.EnvironmentMixin, bases.BaseTest):
@classmethod
def configure(cls):
super(WhenCreatingTemporaryDatabaseAndExportingEnv, cls).configure()
cls.unset_environment_variable('MONGOHOST')
cls.unset_environment_variable('MONGOPORT')
cls.unset_environment_variable('MONGODATABASE')
cls.database = mongo.TemporaryDatabase(host='localhost', port='27017')
cls.database.create()
@classmethod
def execute(cls):
cls.database.set_environment()
def should_export_mongohost(self):
self.assertEqual(os.environ['MONGOHOST'], self.database.host)
def should_export_mongoport(self):
self.assertEqual(os.environ['MONGOPORT'], str(self.database.port))
def should_export_mongodatabase(self):
self.assertEqual(os.environ['MONGODATABAS | E'], self.database. | database_name)
|
staranjeet/fjord | vendor/packages/translate-toolkit/translate/misc/autoencode.py | Python | bsd-3-clause | 2,252 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; | if not, see <http://www.gnu.org/licenses/>.
"""Supports a hybrid Unicode string that knows which encoding is preferable,
and uses this when converting to a string."""
class autoencode(unicode):
def __new__(newtype, string=u"", encoding=None, errors=None):
if isinstance(string, unicode):
if errors is None:
newstring = unicode.__new__(newtype, string)
| else:
newstring = unicode.__new__(newtype, string, errors=errors)
if encoding is None and isinstance(string, autoencode):
newstring.encoding = string.encoding
else:
newstring.encoding = encoding
else:
if errors is None and encoding is None:
newstring = unicode.__new__(newtype, string)
elif errors is None:
try:
newstring = unicode.__new__(newtype, string, encoding)
except LookupError, e:
raise ValueError(str(e))
elif encoding is None:
newstring = unicode.__new__(newtype, string, errors)
else:
newstring = unicode.__new__(newtype, string, encoding, errors)
newstring.encoding = encoding
return newstring
def join(self, seq):
return autoencode(super(autoencode, self).join(seq))
def __str__(self):
if self.encoding is None:
return super(autoencode, self).__str__()
else:
return self.encode(self.encoding)
|
Beauhurst/django | django/utils/timezone.py | Python | bsd-3-clause | 8,544 | 0 | """
Timezone-related classes and functions.
"""
import functools
from contextlib import ContextDecorator
from datetime import datetime, timedelta, tzinfo
from threading import local
import pytz
from django.conf import settings
__all__ = [
'utc', 'get_fixed_timezone',
'get_default_timezone', 'get_default_timezone_name',
'get_current_timezone', 'get_current_timezone_name',
'activate', 'deactivate', 'override',
'localtime', 'now',
'is_aware', 'is_naive', 'make_aware', 'make_naive',
]
# UTC and local time zones
ZERO = timedelta(0)
class FixedOffset(tzinfo):
"""
Fixed offset in minutes east from UTC. Taken from Python's docs.
Kept as close as possible to the reference version. __init__ was changed
to make its arguments optional, according to Python's requirement that
tzinfo subclasses can be instantiated without arguments.
"""
def __init__(self, offset=None | , name=None):
if offset is not None:
self.__offset = timedelta(minutes=offset)
if name is not None:
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
# UTC time zone as a tzinfo instance.
utc = pytz.utc
def get_fixed_timezone(offset):
"""Return a tzinfo instance w | ith a fixed offset from UTC."""
if isinstance(offset, timedelta):
offset = offset.seconds // 60
sign = '-' if offset < 0 else '+'
hhmm = '%02d%02d' % divmod(abs(offset), 60)
name = sign + hhmm
return FixedOffset(offset, name)
# In order to avoid accessing settings at compile time,
# wrap the logic in a function and cache the result.
@functools.lru_cache()
def get_default_timezone():
"""
Return the default time zone as a tzinfo instance.
This is the time zone defined by settings.TIME_ZONE.
"""
return pytz.timezone(settings.TIME_ZONE)
# This function exists for consistency with get_current_timezone_name
def get_default_timezone_name():
"""Return the name of the default time zone."""
return _get_timezone_name(get_default_timezone())
_active = local()
def get_current_timezone():
"""Return the currently active time zone as a tzinfo instance."""
return getattr(_active, "value", get_default_timezone())
def get_current_timezone_name():
"""Return the name of the currently active time zone."""
return _get_timezone_name(get_current_timezone())
def _get_timezone_name(timezone):
"""Return the name of ``timezone``."""
try:
# for pytz timezones
return timezone.zone
except AttributeError:
# for regular tzinfo objects
return timezone.tzname(None)
# Timezone selection functions.
# These functions don't change os.environ['TZ'] and call time.tzset()
# because it isn't thread safe.
def activate(timezone):
"""
Set the time zone for the current thread.
The ``timezone`` argument must be an instance of a tzinfo subclass or a
time zone name.
"""
if isinstance(timezone, tzinfo):
_active.value = timezone
elif isinstance(timezone, str):
_active.value = pytz.timezone(timezone)
else:
raise ValueError("Invalid timezone: %r" % timezone)
def deactivate():
"""
Unset the time zone for the current thread.
Django will then use the time zone defined by settings.TIME_ZONE.
"""
if hasattr(_active, "value"):
del _active.value
class override(ContextDecorator):
"""
Temporarily set the time zone for the current thread.
This is a context manager that uses django.utils.timezone.activate()
to set the timezone on entry and restores the previously active timezone
on exit.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If it is ``None``, Django enables the default
time zone.
"""
def __init__(self, timezone):
self.timezone = timezone
def __enter__(self):
self.old_timezone = getattr(_active, 'value', None)
if self.timezone is None:
deactivate()
else:
activate(self.timezone)
def __exit__(self, exc_type, exc_value, traceback):
if self.old_timezone is None:
deactivate()
else:
_active.value = self.old_timezone
# Templates
def template_localtime(value, use_tz=None):
"""
Check if value is a datetime and converts it to local time if necessary.
If use_tz is provided and is not None, that will force the value to
be converted (or not), overriding the value of settings.USE_TZ.
This function is designed for use by the template engine.
"""
should_convert = (
isinstance(value, datetime) and
(settings.USE_TZ if use_tz is None else use_tz) and
not is_naive(value) and
getattr(value, 'convert_to_local_time', True)
)
return localtime(value) if should_convert else value
# Utilities
def localtime(value=None, timezone=None):
"""
Convert an aware datetime.datetime to local time.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone
is specified.
"""
if value is None:
value = now()
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("localtime() cannot be applied to a naive datetime")
value = value.astimezone(timezone)
if hasattr(timezone, 'normalize'):
# This method is available for pytz time zones.
value = timezone.normalize(value)
return value
def localdate(value=None, timezone=None):
"""
Convert an aware datetime to local time and return the value's date.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone is
specified.
"""
return localtime(value, timezone).date()
def now():
"""
Return an aware or naive datetime.datetime, depending on settings.USE_TZ.
"""
if settings.USE_TZ:
# timeit shows that datetime.now(tz=utc) is 24% slower
return datetime.utcnow().replace(tzinfo=utc)
else:
return datetime.now()
# By design, these four functions don't perform any checks on their arguments.
# The caller should ensure that they don't receive an invalid value like None.
def is_aware(value):
"""
Determine if a given datetime.datetime is aware.
The concept is defined in Python's docs:
http://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is not None
def is_naive(value):
"""
Determine if a given datetime.datetime is naive.
The concept is defined in Python's docs:
http://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is None
def make_aware(value, timezone=None, is_dst=None):
"""Make a naive datetime.datetime in a given time zone aware."""
if timezone is None:
timezone = get_current_timezone()
if hasattr(timezone, 'localize'):
# This method is available for pytz time zones.
return timezone.localize(value, is_dst=is_dst)
else:
# Check that we won't overwrite the timezone of an aware datetime.
if is_aware(value):
raise ValueError(
"make_aware expects a naive datetime, got %s" % value)
# This may be wrong around DST changes!
return value.replace(tzinfo=timezone)
def make_naive(value, timezone=None):
"""Make an aware datetime.datetime naive in a given time zone."""
if timezone is None:
timezone = get_current_timezone()
# Emulate |
VioletRed/script.module.urlresolver | lib/urlresolver/plugins/purevid.py | Python | gpl-2.0 | 5,907 | 0.00965 | #-*- coding: utf-8 -*-
"""
Purevid urlresolver XBMC Addon
Copyright (C) 2011 t0mm0, belese, JUL1EN094
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import random
import re
import urllib, urllib2
import ast
import xbmc,xbmcplugin,xbmcgui,xbmcaddon,time,datetime
import cookielib
import json
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import SiteAuth
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
from urlresolver import common
#SET ERROR_LOGO# THANKS TO VOINAGE, BSTRDMKR, ELDORADO
error_logo = os.path.join(common.addon_path, 'resources', 'images', 'redx.png')
class purevid(Plugin, UrlResolver, SiteAuth, PluginSettings):
implements = [UrlResolver, SiteAuth, PluginSettings]
name = "purevid"
domains = [ "purevid.com" ]
profile_path = common.profile_path
cookie_file = os.path.join(profile_path, '%s.cookies' % name)
def __init__(self):
p = self.get_setting('priority') or 1
self.priority = int(p)
self.net = Net()
try:
os.makedirs(os.path.dirname(self.cookie_file))
except OSError:
pass
#UrlResolver methods
def get_media_url(self, host, media_id):
try :
web_url = self.get_url(host, media_id)
try:
html = self.net.http_GET(web_url).content
except urllib2.URLError, e:
raise Exception ('got http error %d fetching %s' % (e.code, web_url))
data = json.loads(html)
if self.get_setting('quality') == '0' :
url = data['clip']['bitrates'][0]['url']
else :
url = data['clip']['bitrates'][-1]['url']
params = ''
for val in data['plugins']['lighttpd']['params'] :
params += val['name'] + '=' + val['value'] + '&'
url = url + '?' + params[:-1]
cookies = {}
for cookie in self.net._cj:
cookies[cookie.name] = cookie.value
url = url + '|' + urllib.urlencode({'Cookie' :urllib.urlencode(cookies)})
common.addon.log(url)
return url
except Exception, e:
common.addon.log('**** Purevid Error occured: %s' % e)
common.addon.show_small_popup(title='[B][COLOR white]PUREVID[/COLOR][/B]', msg='[COLOR red]%s[/COLOR]' % e, delay=5000, image=error_logo)
return self.unresolvable(code=0, msg=e)
def get_url(self, host, media_id):
return 'http://www.purevid.com/?m=video_info_embed_flv&id=%s' % media_id
def get_host_and_id(self, url):
r = re.search('//(.+?)/v/([0-9A-Za-z]+)', url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
if self.get_setting('login') == 'false':
return False
common.addon.log(url)
return 'purevid' in url
#SiteAuth methods
def needLogin(self):
url = 'http://www.purevid.com/?m=main'
if not os.path.exists(self.cookie_file):
return True
self.net.set_cookies(self.cookie_file)
| source = self.net.http_GET(url).content
common.addon.log_debug(source.encode('utf-8'))
if re.search("""<span>Welcome <strong>.*</strong></span>""", source) :
common.addon.log_debug('needLogin returning False')
r | eturn False
else :
common.addon.log_debug('needLogin returning True')
return True
def login(self):
if self.needLogin() :
common.addon.log('login to purevid')
url = 'http://www.purevid.com/?m=login'
data = {'username' : self.get_setting('username'), 'password' : self.get_setting('password')}
source = self.net.http_POST(url,data).content
if re.search(self.get_setting('username'), source):
self.net.save_cookies(self.cookie_file)
self.net.set_cookies(self.cookie_file)
return True
else:
return False
else :
return True
#PluginSettings methods
def get_settings_xml(self):
xml = PluginSettings.get_settings_xml(self)
xml += '<setting id="purevid_login" '
xml += 'type="bool" label="Login" default="false"/>\n'
xml += '<setting id="purevid_username" enable="eq(-1,true)" '
xml += 'type="text" label=" username" default=""/>\n'
xml += '<setting id="purevid_password" enable="eq(-2,true)" '
xml += 'type="text" label=" password" option="hidden" default=""/>\n'
xml += '<setting label="Video quality" id="%s_quality" ' % self.__class__.__name__
xml += 'type="enum" values="FLV|Maximum" default="0" />\n'
xml += '<setting label="This plugin calls the Purevid urlresolver - '
xml += 'change settings there." type="lsep" />\n'
return xml
|
elstupido/rpg | rooms/prolog/Room420-1.room.py | Python | mit | 1,463 | 0.006835 |
from room import Room
r = Room()
r.roomname = 'room 420'
r.exits = {'hallway': 'hallway'}
r.roomdesc = """
as the door opens smoke languidly rolls out. The lights are off and the curtain is pulled, which would normally make for a very dark room except for what appears to be a super nova sitting in the corner of the room. Anti political posters cover all the walls and some of the ceiling. a panel of six monitors sits on a desk, a few have news feeds and chat windows, the rest appear to be filled with code. the only other thing you can make out in the blinding light is the garret, sitting in his office chair, silhouetted like some kind of bizarre, | techno angel.
"""
r.looktargets = {'blinding light': '(squinting)It apears to be some kind of computer, "whats that smell? burrned retninas you say"\n\n',
'light': '(squinting)It apears to be some kind of computer, "whats that smell? burrned retninas you | say"\n\n',
'monitors': 'two of the monitors have news feeds detailing various horrors, one monitor has a few chat windows up where two people one named shifty and the other stupid seem to be conversing.\n\n',
'posters': 'various posters striking out at governments and "the new world order." one poster which seems different from the rest. it has no words and no slogans on it just a black masked man watching as countless people walk down a road into a pitch black tunnel.\n\n'}
r.talktargets = {'garrett' : 'garret_1'}
r.characters = ['garret_1'] |
coreycb/charms.openstack | charms_openstack/plugins/trilio.py | Python | apache-2.0 | 24,279 | 0 | # Copyright 2019 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import os
import re
from urllib.parse import urlparse
import charms_openstack.adapters
import charms_openstack.charm
import charmhelpers.core as ch_core
import charmhelpers.fetch as fetch
import charmhelpers.core.unitdata as unitdata
import charmhelpers.contrib.openstack.utils as os_utils
import charms.reactive as reactive
TV_MOUNTS = "/var/triliovault-mounts"
# Location of the certificate file to use when talking to S3 endpoint.
S3_SSL_CERT_FILE = '/usr/share/ca-certificates/charm-s3.cert'
# Used to store the discovered release version for caching between invocations
TRILIO_RELEASE_KEY = 'charmers.trilio-release-version'
# _trilio_releases{} is a dictionary of release -> class that is instantiated
# according to the release that is being requested. i.e. a charm can
# handle more than one release. The BaseOpenStackCharm() derived class sets the
# `release` variable to indicate which OpenStack release that the charm
# supports # and `trilio_release` to indicate which Trilio release the charm
# supports. # Any subsequent releases that need a different/specialised charm
# uses the # `release` and `trilio_release` class properties to indicate that
# it handles those releases onwards.
_trilio_releases = {}
@charms_openstack.adapters.config_property
def trilio_properties(cls):
"""Trilio properties additions for config adapter.
:param cls: Configuration Adapter class
:type cls: charms_openstack.adapters.DefaultConfigurationAdapter
"""
cur_ver = cls.charm_instance.release_pkg_version()
comp = fetch.apt_pkg.version_compare(cur_ver, '4.1')
if comp >= 0:
return {
'db_type': 'dedicated',
'transport_type': 'dmapi'}
else:
return {
'db_type': 'legacy',
'transport_type': 'legacy'}
@charms_openstack.adapters.config_property
def trilio_s3_cert_config(cls):
"""Trilio S3 certificate config
:param cls: Configuration Adapter class
:type cls: charms_openstack.adapters.DefaultConfigurationAdapter
"""
s3_cert_config = {}
config = ch_core.hookenv.config('tv-s3-ssl-cert')
if config:
s3_cert_config = {
'cert_file': S3_SSL_CERT_FILE,
'cert_data': base64.b64decode(config).decode('utf-8')}
return s3_cert_config
class AptPkgVersion():
"""Allow package version to be compared."""
def __init__(self, version):
self.version = version
def __lt__(self, other):
return fetch.apt_pkg.version_compare(self.version, other.version) == -1
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __gt__(self, other):
return fetch.apt_pkg.version_compare(self.version, other.version) == 1
def __ge__(self, other):
return self.__gt__(other) or self.__eq__(other)
def __eq__(self, other):
return fetch.apt_pkg.version_compare(self.version, other.version) == 0
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return self.version
def __hash__(self):
return hash(repr(self))
class NFSShareNotMountedException(Exception):
"""Signal that the trilio nfs share is not mount"""
pass
class UnitNotLeaderException(Exception):
"""Signal that the unit is not the application leader"""
pass
class GhostShareAlreadyMountedException(Exception):
"""Signal that a ghost share is already mounted"""
pass
class MismatchedConfigurationException(Exception):
"""Signal that nfs-shares and ghost-shares are mismatched"""
pass
def _configure_triliovault_source():
"""Configure triliovault specific package sources in addition to
any general openstack package sources (via openstack-origin)
"""
with open(
"/etc/apt/sources.list.d/trilio-gemfury-sources.list", "w"
) as tsources:
tsources.write(ch_core.hookenv.config("triliovault-pkg-source"))
def _install_triliovault(charm):
"""Install packages dealing with Trilio nuances for upgrades as well
Set the 'upgrade.triliovault' flag to ensure that any triliovault
packages are upgraded.
"""
packages = charm.all_packages
if not reactive.is_flag_set("upgrade.triliovault"):
packages = fetch.filter_installed_packages(
charm.all_packages)
if packages:
ch_core.hookenv.status_set('maintenance',
'Installing/upgrading packages')
fetch.apt_install(packages, fatal=True)
# AJK: we set this as charms can use it to detect installed state
charm.set_state('{}-installed'.format(charm.name))
charm.update_api_ports()
# NOTE(jamespage): clear upgrade flag if set
if reactive.is_flag_set("upgrade.triliovault"):
reactive.clear_flag('upgrade.triliovault')
def get_trilio_codename_install_source(trilio_source):
"""Derive codenam | e from trilio source string.
Try and derive a trilio version from a deb string l | ike:
'deb [trusted=yes] https://apt.fury.io/triliodata-4-0/ /'
:param trilio_source: Trilio source
:type trilio_source: str
:returns: Trilio version
:rtype: str
:raises: AssertionError
"""
deb_url = trilio_source.split()[-2]
code = re.findall(r'-(\d*-\d*)', urlparse(deb_url).path)
assert len(code) == 1, "Cannot derive release from {}".format(deb_url)
new_os_rel = code[0].replace('-', '.')
return new_os_rel
def make_trilio_get_charm_instance_handler():
"""This handler sets the get_charm_instance function.
"""
@charms_openstack.charm.core.register_get_charm_instance
def get_trilio_charm_instance(release=None, package_type='deb', *args,
**kwargs):
"""Get an instance of the charm based on the release (or use the
default if release is None).
Note that it passes args and kwargs to the class __init__() method.
:param release: String representing release wanted. Should be of the
form '<openstack_release>_<trilio_release>'
eg 'queens_4.0'
:type release: str
:param package_type: The package type required
:type package_type: str
:returns: Charm class
:rtype: BaseOpenStackCharm() derived class according to cls.releases
"""
cls = None
known_os_releases = sorted(_trilio_releases.keys())
if release is None:
# If release is None then select the class(es) which supports the
# most recent OpenStack release, from within this set select the
# class that supports the most recent Trilio release.
os_release = known_os_releases[-1]
known_trilio_releases = sorted(_trilio_releases[os_release].keys())
trilio_release = known_trilio_releases[-1]
cls = _trilio_releases[os_release][trilio_release][package_type]
else:
os_release, trilio_release = release.split('_')
trilio_release = AptPkgVersion(trilio_release)
if os_release not in os_utils.OPENSTACK_RELEASES:
raise RuntimeError(
"Release {} is not a known OpenStack release?".format(
os_release))
os_release_index = os_utils.OPENSTACK_RELEASES.index(os_release)
if (os_release_index <
os_utils.OPENSTACK_RELEASES.index(known_os_releases[0])):
raise RuntimeError(
"Release {} is not supported by this charm. Earliest "
|
alexrudy/Cauldron | Cauldron/ext/__init__.py | Python | bsd-3-clause | 193 | 0.010363 | # -*- coding: utf- | 8 -*-
"""
Extensions to Cauldron are user-facing helpful features
which do not strictly obey the KTL API, but which a | re
compatible with the KTL API when used via Cauldron.
""" |
h5py/h5py | h5py/tests/test_h5p.py | Python | bsd-3-clause | 6,042 | 0.000662 | # This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
import unittest as ut
from h5py import h5p, h5f, version
from .common import TestCase
class TestLibver(TestCase):
"""
Feature: Setting/getting lib ver bounds
"""
def test_libver(self):
""" Test libver bounds set/get """
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST)
self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 10, 2),
'Requires HDF5 1.10.2 or later')
def test_libver_v18(self):
""" Test libver bounds set/get for H5F_LIBVER_V18"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_EARLIEST, h5f.LIBVER_V18)
self.assertEqual((h5f.LIBVER_EARLIEST, h5f.LIBVER_V18),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 10, 2),
'Requires HDF5 1.10.2 or later')
def test_libver_v110(self):
""" Test libver bounds set/get for H5F_LIBVER_V110"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_V18, h5f.LIBVER_V110)
self.assertEqual((h5f.LIBVER_V18, h5f.LIBVER_V110),
plist.get_libver_bounds())
@ut.skipIf(version.hdf5_version_tuple < (1, 11, 4),
'Requires HDF5 1.11.4 or later')
def test_libver_v112(self):
""" Test libver bounds set/get for H5F_LIBVER_V112"""
plist = h5p.create(h5p.FILE_ACCESS)
plist.set_libver_bounds(h5f.LIBVER_V18, h5f.LIBVER_V112)
self.assertEqual((h5f.LIBVER_V18, h5f.LIBVER_V112),
plist.get_libver_bounds())
class TestDA(TestCase):
'''
Feature: setting/getting chunk cache size on a dataset access property list
'''
def test_chunk_cache(self):
'''test get/set chunk cache '''
dalist = h5p.create(h5p.DATASET_ACCESS)
nslots = 10000 # 40kb hash table
nbytes = 1000000 # 1MB cache size
w0 = .5 # even blend of eviction strategy
dalist.set_chunk_cache(nslots, nbytes, w0)
self.assertEqual((nslots, nbytes, w0),
dalist.get_chunk_cache())
class TestFA(TestCase):
'''
Feature: setting/getting mdc config on a file access property list
'''
def test_mdc_config(self):
'''test get/set mdc config '''
falist = h5p.create(h5p.FILE_ACCESS)
config = falist.get_mdc_config()
falist.set_mdc_config(config)
def test_set_alignment(self):
'''test get/set chunk cache '''
falist = h5p.create(h5p.FILE_ACCESS)
threshold = 10 * 1024 # threshold of 10kiB
alignment = 1024 * 1024 # threshold of 1kiB
falist.set_alignment(threshold, alignment)
self.assertEqual((threshold, alignment),
falist.get_alignment())
@ut.skipUnless(
version.hdf5_version_tuple >= (1, 12, 1) or
(version.hdf5_version_tuple[:2] == (1, 10) and version.hdf5_version_tuple[2] >= 7),
'Requires HDF5 1.12.1 or later or 1.10.x >= 1.10.7')
def test_set_file_locking(self):
'''test get/set file locking'''
falist = h5p.create(h5p.FILE_ACCESS)
use_file_locking = False
ignore_when_disabled = False
falist.set_file_locking(use_file_locking, ignore_when_disabled)
self.assertEqual((use_file_locking, ignore_when_disabled),
falist.get_file_locking())
class TestPL(TestCase):
| def test_obj_track_times(self):
"""
tests if the object track times set/get
"""
# test for groups
gcid = h5p.create(h5p.GROUP_CREATE)
gcid.set_obj_track_times(False)
self.assertEqual(False, gcid.get_obj_track_times())
gcid.set_obj_track_times(True)
self.assertE | qual(True, gcid.get_obj_track_times())
# test for datasets
dcid = h5p.create(h5p.DATASET_CREATE)
dcid.set_obj_track_times(False)
self.assertEqual(False, dcid.get_obj_track_times())
dcid.set_obj_track_times(True)
self.assertEqual(True, dcid.get_obj_track_times())
# test for generic objects
ocid = h5p.create(h5p.OBJECT_CREATE)
ocid.set_obj_track_times(False)
self.assertEqual(False, ocid.get_obj_track_times())
ocid.set_obj_track_times(True)
self.assertEqual(True, ocid.get_obj_track_times())
def test_link_creation_tracking(self):
"""
tests the link creation order set/get
"""
gcid = h5p.create(h5p.GROUP_CREATE)
gcid.set_link_creation_order(0)
self.assertEqual(0, gcid.get_link_creation_order())
flags = h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED
gcid.set_link_creation_order(flags)
self.assertEqual(flags, gcid.get_link_creation_order())
# test for file creation
fcpl = h5p.create(h5p.FILE_CREATE)
fcpl.set_link_creation_order(flags)
self.assertEqual(flags, fcpl.get_link_creation_order())
def test_attr_phase_change(self):
"""
test the attribute phase change
"""
cid = h5p.create(h5p.OBJECT_CREATE)
# test default value
ret = cid.get_attr_phase_change()
self.assertEqual((8,6), ret)
# max_compact must < 65536 (64kb)
with self.assertRaises(ValueError):
cid.set_attr_phase_change(65536, 6)
# Using dense attributes storage to avoid 64kb size limitation
# for a single attribute in compact attribute storage.
cid.set_attr_phase_change(0, 0)
self.assertEqual((0,0), cid.get_attr_phase_change())
|
yzhuan/car | crawler/mycar168/mycar168/pipelines.py | Python | gpl-2.0 | 262 | 0 | # | Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class Mycar168Pipeline(object):
def process_item(self, item, spider):
| return item
|
openstack/octavia | octavia/controller/worker/v2/flows/amphora_flows.py | Python | apache-2.0 | 27,873 | 0 | # Copyright 2015 Hewlett-Packard Development Company, L.P.
# Copyright 2020 Red Hat, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_config import cfg
from oslo_log import log as logging
from taskflow.patterns import linear_flow
from taskflow.patterns import unordered_flow
from octavia.common import constants
from octavia.common import utils
from octavia.controller.worker.v2.tasks import amphora_driver_tasks
from octavia.controller.worker.v2.tasks import cert_task
from octavia.controller.worker.v2.tasks import compute_tasks
from octavia.controller.worker.v2.tasks import database_tasks
from octavia.controller.worker.v2.tasks import lifecycle_tasks
from octavia.controller.worker.v2.tasks import network_tasks
from octavia.controller.worker.v2.tasks import retry_tasks
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class AmphoraFlows(object):
def get_create_amphora_flow(self):
"""Creates a flow to create an amphora.
:returns: The flow for creating the amphora
"""
create_amphora_flow = linear_flow.Flow(constants.CREATE_AMPHORA_FLOW)
create_amphora_flow.add(database_tasks.CreateAmphoraInDB(
provides=constants.AMPHORA_ID))
create_amphora_flow.add(lifecycle_tasks.AmphoraIDToErrorOnRevertTask(
requires=constants.AMPHORA_ID))
create_amphora_flow.add(cert_task.GenerateServerPEMTask(
provides=constants.SERVER_PEM))
create_amphora_flow.add(
database_tasks.UpdateAmphoraDBCertExpiration(
requires=(constants.AMPHORA_ID, constants.SERVER_PEM)))
create_amphora_flow.add(compute_tasks.CertComputeCreate(
requires=(constants.AMPHORA_ID, constants.SERVER_PEM,
constants.SERVER_GROUP_ID,
constants.BUILD_TYPE_PRIORITY, constants.FLAVOR),
provides=constants.COMPUTE_ID))
create_amphora_flow.add(database_tasks.MarkAmphoraBootingInDB(
requires=(constants.AMPHORA_ID, constants.COMPUTE_ID)))
create_amphora_flow.add(compute_tasks.ComputeActiveWait(
requires=(constants.COMPUTE_ID, constants.AMPHORA_ID),
provides=constants.COMPUTE_OBJ))
create_amphora_flow.add(database_tasks.UpdateAmphoraInfo(
requires=(constants.AMPHORA_ID, constants.COMPUTE_OBJ),
provides=constants.AMPHORA))
retry_subflow = linear_flow.Flow(
constants.CREATE_AMPHORA_RETRY_SUBFLOW,
retry=amphora_driver_tasks.AmpRetry())
retry_subflow.add(
amphora_driver_tasks.AmphoraComputeConnectivityWait(
requires=constants.AMPHORA,
inject={'raise_retry_exception': True}))
create_amphora_flow.add(retry_subflow)
create_amphora_flow.add(database_tasks.ReloadAmphora(
requires=constants.AMPHORA,
provides=constants.AMPHORA))
create_amphora_flow.add(amphora_driver_tasks.AmphoraFinalize(
requires=constants.AMPHORA))
create_amphora_flow.add(database_tasks.MarkAmphoraReadyInDB(
requires=constants.AMPHORA))
return create_amphora_flow
def get_amphora_for_lb_subflow(self, prefix, role):
"""Create a new amphora for lb."""
sf_name = prefix + '-' + constants.CREATE_AMP_FOR_LB_SUBFLOW
create_amp_for_lb_subflow = linear_flow.Flow(sf_name)
create_amp_for_lb_subflow.add(database_tasks.CreateAmphoraInDB(
name=sf_name + '-' + constants.CREATE_AMPHORA_INDB,
requires=constants.LOADBALANCER_ID,
provides=constants.AMPHORA_ID))
create_amp_for_lb_subflow.add(cert_task.GenerateServerPEMTask(
name=sf_name + '-' + constants.GENERATE_SERVER_PEM,
provides=constants.SERVER_PEM))
create_amp_for_lb_subflow.add(
database_tasks.UpdateAmphoraDBCertExpiration(
name=sf_name + '-' + constants.UPDATE_CERT_EXPIRATION,
requires=(constants.AMPHORA_ID, constants.SERVER_PEM)))
create_amp_for_lb_subflow.a | dd(compute_tasks.CertComputeCreate(
name=sf_name + '-' + constants.CERT_COMPUTE_CREATE,
requires=(constants.AMPHORA_I | D, constants.SERVER_PEM,
constants.BUILD_TYPE_PRIORITY,
constants.SERVER_GROUP_ID,
constants.FLAVOR, constants.AVAILABILITY_ZONE),
provides=constants.COMPUTE_ID))
create_amp_for_lb_subflow.add(database_tasks.UpdateAmphoraComputeId(
name=sf_name + '-' + constants.UPDATE_AMPHORA_COMPUTEID,
requires=(constants.AMPHORA_ID, constants.COMPUTE_ID)))
create_amp_for_lb_subflow.add(database_tasks.MarkAmphoraBootingInDB(
name=sf_name + '-' + constants.MARK_AMPHORA_BOOTING_INDB,
requires=(constants.AMPHORA_ID, constants.COMPUTE_ID)))
create_amp_for_lb_subflow.add(compute_tasks.ComputeActiveWait(
name=sf_name + '-' + constants.COMPUTE_WAIT,
requires=(constants.COMPUTE_ID, constants.AMPHORA_ID,
constants.AVAILABILITY_ZONE),
provides=constants.COMPUTE_OBJ))
create_amp_for_lb_subflow.add(database_tasks.UpdateAmphoraInfo(
name=sf_name + '-' + constants.UPDATE_AMPHORA_INFO,
requires=(constants.AMPHORA_ID, constants.COMPUTE_OBJ),
provides=constants.AMPHORA))
create_amp_for_lb_subflow.add(self._retry_flow(sf_name))
create_amp_for_lb_subflow.add(amphora_driver_tasks.AmphoraFinalize(
name=sf_name + '-' + constants.AMPHORA_FINALIZE,
requires=constants.AMPHORA))
create_amp_for_lb_subflow.add(
database_tasks.MarkAmphoraAllocatedInDB(
name=sf_name + '-' + constants.MARK_AMPHORA_ALLOCATED_INDB,
requires=(constants.AMPHORA, constants.LOADBALANCER_ID)))
if role == constants.ROLE_MASTER:
create_amp_for_lb_subflow.add(database_tasks.MarkAmphoraMasterInDB(
name=sf_name + '-' + constants.MARK_AMP_MASTER_INDB,
requires=constants.AMPHORA))
elif role == constants.ROLE_BACKUP:
create_amp_for_lb_subflow.add(database_tasks.MarkAmphoraBackupInDB(
name=sf_name + '-' + constants.MARK_AMP_BACKUP_INDB,
requires=constants.AMPHORA))
elif role == constants.ROLE_STANDALONE:
create_amp_for_lb_subflow.add(
database_tasks.MarkAmphoraStandAloneInDB(
name=sf_name + '-' + constants.MARK_AMP_STANDALONE_INDB,
requires=constants.AMPHORA))
return create_amp_for_lb_subflow
def _retry_flow(self, sf_name):
retry_task = sf_name + '-' + constants.AMP_COMPUTE_CONNECTIVITY_WAIT
retry_subflow = linear_flow.Flow(
sf_name + '-' + constants.CREATE_AMPHORA_RETRY_SUBFLOW,
retry=amphora_driver_tasks.AmpRetry())
retry_subflow.add(
amphora_driver_tasks.AmphoraComputeConnectivityWait(
name=retry_task, requires=constants.AMPHORA,
inject={'raise_retry_exception': True}))
return retry_subflow
def get_delete_amphora_flow(
self, amphora,
retry_attempts=CONF.controller_worker.amphora_delete_retries,
retry_interval=(
CONF.controller_worker.amphora_delete_retry_interval)):
"""Creates a subflow to delete an amphora and it's port.
This flow is idempotent and safe to retry.
:param amphora: An amphora |
rs2/bokeh | bokeh/models/annotations.py | Python | bsd-3-clause | 34,179 | 0.001141 | ''' Renderers for various kinds of annotations that can be added to
Bokeh plots
'''
from __future__ import absolute_import
from six import string_types
from ..core.enums import (AngleUnits, Dimension, FontStyle, LegendClickPolicy, LegendLocation,
Orientation, RenderMode, SpatialUni | ts, VerticalAlign, TextAlign)
from ..core.has_props import abstract
from ..core.properties import (Angle, AngleSpec, Auto, Bool, ColorSpec, Datetime, Dict, DistanceSpec, Either,
Enum, Float, FontSizeSpec, Include, Instance, Int, List, NumberSpec, Overrid | e,
Seq, String, StringSpec, Tuple, value)
from ..core.property_mixins import FillProps, LineProps, TextProps
from ..core.validation import error
from ..core.validation.errors import BAD_COLUMN_NAME, NON_MATCHING_DATA_SOURCES_ON_LEGEND_ITEM_RENDERERS
from ..model import Model
from ..util.serialization import convert_datetime_type
from .formatters import BasicTickFormatter, TickFormatter
from .mappers import ContinuousColorMapper
from .renderers import GlyphRenderer, Renderer
from .sources import ColumnDataSource, DataSource
from .tickers import BasicTicker, Ticker
@abstract
class Annotation(Renderer):
''' Base class for all annotation models.
'''
plot = Instance(".models.plots.Plot", help="""
The plot to which this annotation is attached.
""")
level = Override(default="annotation")
@abstract
class TextAnnotation(Annotation):
''' Base class for text annotation models such as labels and titles.
'''
class LegendItem(Model):
'''
'''
def __init__(self, *args, **kwargs):
super(LegendItem, self).__init__(*args, **kwargs)
if isinstance(self.label, string_types):
# Allow convenience of setting label as a string
self.label = value(self.label)
label = StringSpec(default=None, help="""
A label for this legend. Can be a string, or a column of a
ColumnDataSource. If ``label`` is a field, then it must
be in the renderers' data_source.
""")
renderers = List(Instance(GlyphRenderer), help="""
A list of the glyph renderers to draw in the legend. If ``label`` is a field,
then all data_sources of renderers must be the same.
""")
@error(NON_MATCHING_DATA_SOURCES_ON_LEGEND_ITEM_RENDERERS)
def _check_data_sources_on_renderers(self):
if self.label and 'field' in self.label:
if len({r.data_source for r in self.renderers}) != 1:
return str(self)
@error(BAD_COLUMN_NAME)
def _check_field_label_on_data_source(self):
if self.label and 'field' in self.label:
if len(self.renderers) < 1:
return str(self)
source = self.renderers[0].data_source
if self.label.get('field') not in source.column_names:
return str(self)
class Legend(Annotation):
''' Render informational legends for a plot.
'''
location = Either(Enum(LegendLocation), Tuple(Float, Float), default="top_right", help="""
The location where the legend should draw itself. It's either one of
``bokeh.core.enums.LegendLocation``'s enumerated values, or a ``(x, y)``
tuple indicating an absolute location absolute location in screen
coordinates (pixels from the bottom-left corner).
""")
orientation = Enum(Orientation, default="vertical", help="""
Whether the legend entries should be placed vertically or horizontally
when they are drawn.
""")
border_props = Include(LineProps, help="""
The %s for the legend border outline.
""")
border_line_color = Override(default="#e5e5e5")
border_line_alpha = Override(default=0.5)
background_props = Include(FillProps, help="""
The %s for the legend background style.
""")
inactive_props = Include(FillProps, help="""
The %s for the legend background style when inactive.
""")
click_policy = Enum(LegendClickPolicy, default="none", help="""
Defines what happens when a lengend's item is clicked.
""")
background_fill_color = Override(default="#ffffff")
background_fill_alpha = Override(default=0.95)
inactive_fill_color = Override(default="white")
inactive_fill_alpha = Override(default=0.9)
label_props = Include(TextProps, help="""
The %s for the legend labels.
""")
label_text_baseline = Override(default='middle')
label_text_font_size = Override(default={'value': '10pt'})
label_standoff = Int(5, help="""
The distance (in pixels) to separate the label from its associated glyph.
""")
label_height = Int(20, help="""
The minimum height (in pixels) of the area that legend labels should occupy.
""")
label_width = Int(20, help="""
The minimum width (in pixels) of the area that legend labels should occupy.
""")
glyph_height = Int(20, help="""
The height (in pixels) that the rendered legend glyph should occupy.
""")
glyph_width = Int(20, help="""
The width (in pixels) that the rendered legend glyph should occupy.
""")
margin = Int(10, help="""
Amount of margin around the legend.
""")
padding = Int(10, help="""
Amount of padding around the contents of the legend. Only applicable when
when border is visible, otherwise collapses to 0.
""")
spacing = Int(3, help="""
Amount of spacing (in pixles) between legend entries.
""")
items = List(Instance(LegendItem), help="""
A list of :class:`~bokeh.model.annotations.LegendItem` instances to be
rendered in the legend.
This can be specified explicitly, for instance:
.. code-block:: python
legend = Legend(items=[
LegendItem(label="sin(x)" , renderers=[r0, r1]),
LegendItem(label="2*sin(x)" , renderers=[r2]),
LegendItem(label="3*sin(x)" , renderers=[r3, r4])
])
But as a convenience, can also be given more compactly as a list of tuples:
.. code-block:: python
legend = Legend(items=[
("sin(x)" , [r0, r1]),
("2*sin(x)" , [r2]),
("3*sin(x)" , [r3, r4])
])
where each tuple is of the form: *(label, renderers)*.
""").accepts(List(Tuple(String, List(Instance(GlyphRenderer)))), lambda items: [LegendItem(label=item[0], renderers=item[1]) for item in items])
class ColorBar(Annotation):
''' Render a color bar based on a color mapper.
'''
location = Either(Enum(LegendLocation), Tuple(Float, Float),
default="top_right", help="""
The location where the color bar should draw itself. It's either one of
``bokeh.core.enums.LegendLocation``'s enumerated values, or a ``(x, y)``
tuple indicating an absolute location absolute location in screen
coordinates (pixels from the bottom-left corner).
.. warning::
If the color bar is placed in a side panel, the location will likely
have to be set to `(0,0)`.
""")
orientation = Enum(Orientation, default="vertical", help="""
Whether the color bar should be oriented vertically or horizontally.
""")
height = Either(Auto, Int(), help="""
The height (in pixels) that the color scale should occupy.
""")
width = Either(Auto, Int(), help="""
The width (in pixels) that the color scale should occupy.
""")
scale_alpha = Float(1.0, help="""
The alpha with which to render the color scale.
""")
title = String(help="""
The title text to render.
""")
title_props = Include(TextProps, help="""
The %s values for the title text.
""")
title_text_font_size = Override(default={'value': "10pt"})
title_text_font_style = Override(default="italic")
title_standoff = Int(2, help="""
The distance (in pixels) to separate the title from the color bar.
""")
ticker = Instance(Ticker, default=lambda: BasicTicker(), help="""
A Ticker to use for computing locations of axis components.
""")
formatter = Instance(TickFormatter, default=lambda: BasicTickFormatter(), help="""
A TickFormatter to use for formatting the visu |
pitunti/alfaPitunti | plugin.video.alfa/platformcode/logger.py | Python | gpl-3.0 | 1,925 | 0.000519 | # -*- coding: utf-8 -*-
# --------------------------------------------------------------------------------
# Logger (kodi)
# --------------------------------------------------------------------------------
import inspect
import xbmc
from platformcode import config
loggeractive = (config.get_setting("debug") == True)
def log_enable(active):
global loggeractive
logg | eractive = active
def encode_log(message=""):
# Unicode to utf8
if type(message) == unicode:
message = message.encode("utf8")
# All encodings to utf8
elif type(message) == str:
message = unicode(message, "utf8", errors="replace").encode("utf8")
# Objects to string
else:
message = str(message)
return message
def get_caller(message=None):
module = inspect.getmodule(inspect.currentfr | ame().f_back.f_back)
module = module.__name__
function = inspect.currentframe().f_back.f_back.f_code.co_name
if module == "__main__":
module = "alfa"
else:
module = "alfa." + module
if message:
if module not in message:
if function == "<module>":
return module + " " + message
else:
return module + " [" + function + "] " + message
else:
return message
else:
if function == "<module>":
return module
else:
return module + "." + function
def info(texto=""):
if loggeractive:
xbmc.log(get_caller(encode_log(texto)), xbmc.LOGNOTICE)
def debug(texto=""):
if loggeractive:
texto = " [" + get_caller() + "] " + encode_log(texto)
xbmc.log("######## DEBUG #########", xbmc.LOGNOTICE)
xbmc.log(texto, xbmc.LOGNOTICE)
def error(texto=""):
texto = " [" + get_caller() + "] " + encode_log(texto)
xbmc.log("######## ERROR #########", xbmc.LOGERROR)
xbmc.log(texto, xbmc.LOGERROR)
|
mvidalgarcia/indico | indico/modules/events/sessions/models/types.py | Python | mit | 1,690 | 0.000592 | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from sqlalchemy.ext.declarative import decl | ared_attr
from indico.core.db import db
from indico.util.locators import locator_property
from indico.util.string import format_repr, return_ascii
class SessionType(db.Model):
__tablename__ = 'session_types'
@declared_attr
def __table_args__(cls):
return (db.Index('ix_uq_session_types_event_id_name_lower', cls.event_id, db.func.lower(cls.name),
unique=True),
| {'schema': 'events'})
id = db.Column(
db.Integer,
primary_key=True
)
event_id = db.Column(
db.Integer,
db.ForeignKey('events.events.id'),
index=True,
nullable=False
)
name = db.Column(
db.String,
nullable=False
)
code = db.Column(
db.String,
nullable=False,
default=''
)
is_poster = db.Column(
db.Boolean,
nullable=False,
default=False
)
event = db.relationship(
'Event',
lazy=True,
backref=db.backref(
'session_types',
cascade='all, delete-orphan',
lazy=True
)
)
# relationship backrefs:
# - sessions (Session.type)
@return_ascii
def __repr__(self):
return format_repr(self, 'id', _text=self.name)
@locator_property
def locator(self):
return dict(self.event.locator, session_type_id=self.id)
|
IlyaGusev/PersonalPage | PersonalPage/apps/entries/views.py | Python | gpl-2.0 | 252 | 0.003968 | f | rom django.views.generic import DetailView
from entries.models import Entry
class EntryView(DetailView):
model = Entry
template_name = "entry.html"
context_object_name = 'entry'
slug_field = 'sname'
slug_url_kwarg = 'entry_snam | e' |
zouyapeng/horizon-newtouch | openstack_dashboard/dashboards/project/firewalls/tabs.py | Python | apache-2.0 | 5,342 | 0 | # Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http: | //www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specifi | c language governing permissions and limitations
# under the License.
#
# @author: KC Wang, Big Switch Networks
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.firewalls import tables
FirewallsTable = tables.FirewallsTable
PoliciesTable = tables.PoliciesTable
RulesTable = tables.RulesTable
class RulesTab(tabs.TableTab):
table_classes = (RulesTable,)
name = _("Firewall Rules")
slug = "rules"
template_name = "horizon/common/_detail_table.html"
def get_rulestable_data(self):
try:
tenant_id = self.request.user.tenant_id
rules = api.fwaas.rule_list(self.tab_group.request,
tenant_id=tenant_id)
except Exception:
rules = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve rules list.'))
for r in rules:
r.set_id_as_name_if_empty()
return rules
class PoliciesTab(tabs.TableTab):
table_classes = (PoliciesTable,)
name = _("Firewall Policies")
slug = "policies"
template_name = "horizon/common/_detail_table.html"
def get_policiestable_data(self):
try:
tenant_id = self.request.user.tenant_id
policies = api.fwaas.policy_list(self.tab_group.request,
tenant_id=tenant_id)
except Exception:
policies = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve policies list.'))
for p in policies:
p.set_id_as_name_if_empty()
return policies
class FirewallsTab(tabs.TableTab):
table_classes = (FirewallsTable,)
name = _("Firewalls")
slug = "firewalls"
template_name = "horizon/common/_detail_table.html"
def get_firewallstable_data(self):
try:
tenant_id = self.request.user.tenant_id
firewalls = api.fwaas.firewall_list(self.tab_group.request,
tenant_id=tenant_id)
except Exception:
firewalls = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve firewall list.'))
for f in firewalls:
f.set_id_as_name_if_empty()
return firewalls
class RuleDetailsTab(tabs.Tab):
name = _("Firewall Rule Details")
slug = "ruledetails"
template_name = "project/firewalls/_rule_details.html"
failure_url = reverse_lazy('horizon:project:firewalls:index')
def get_context_data(self, request):
rid = self.tab_group.kwargs['rule_id']
try:
rule = api.fwaas.rule_get(request, rid)
except Exception:
exceptions.handle(request,
_('Unable to retrieve rule details.'),
redirect=self.failure_url)
return {'rule': rule}
class PolicyDetailsTab(tabs.Tab):
name = _("Firewall Policy Details")
slug = "policydetails"
template_name = "project/firewalls/_policy_details.html"
failure_url = reverse_lazy('horizon:project:firewalls:index')
def get_context_data(self, request):
pid = self.tab_group.kwargs['policy_id']
try:
policy = api.fwaas.policy_get(request, pid)
except Exception:
exceptions.handle(request,
_('Unable to retrieve policy details.'),
redirect=self.failure_url)
return {'policy': policy}
class FirewallDetailsTab(tabs.Tab):
name = _("Firewall Details")
slug = "firewalldetails"
template_name = "project/firewalls/_firewall_details.html"
failure_url = reverse_lazy('horizon:project:firewalls:index')
def get_context_data(self, request):
fid = self.tab_group.kwargs['firewall_id']
try:
firewall = api.fwaas.firewall_get(request, fid)
except Exception:
exceptions.handle(request,
_('Unable to retrieve firewall details.'),
redirect=self.failure_url)
return {'firewall': firewall}
class FirewallTabs(tabs.TabGroup):
slug = "fwtabs"
tabs = (FirewallsTab, PoliciesTab, RulesTab)
sticky = True
class RuleDetailsTabs(tabs.TabGroup):
slug = "ruletabs"
tabs = (RuleDetailsTab,)
class PolicyDetailsTabs(tabs.TabGroup):
slug = "policytabs"
tabs = (PolicyDetailsTab,)
class FirewallDetailsTabs(tabs.TabGroup):
slug = "firewalltabs"
tabs = (FirewallDetailsTab,)
|
hortonworks/hortonworks-sandbox | desktop/core/ext-py/Twisted/doc/core/howto/listings/pb/exc_client.py | Python | apache-2.0 | 810 | 0.007407 | #! /usr/bin/python
from twisted.spread import pb
from twisted.internet import reactor
def main():
factory = pb.PBClientFactory()
reactor.connectTCP("localhost", 8800, factory)
d = factory.getRootObject()
d.addCallbacks(got_obj)
reactor.run()
def got_obj(obj):
# change "broken" into "broken2" to demonstrate an unhandled exception
d2 = obj.callRemote("broken")
d2.addCallback(working)
d2.addErrback(broken)
def working():
print "erm, it wasn't *supposed* to work.."
def broken(reason):
print "got remote Exception"
# reason should be a Failure (or subclass) holding the MyError exceptio | n
print " | .__class__ =", reason.__class__
print " .getErrorMessage() =", reason.getErrorMessage()
print " .type =", reason.type
reactor.stop()
main()
|
evook/mirall | doc/ocdoc/user_manual/conf.py | Python | gpl-2.0 | 9,650 | 0.007254 | # -*- coding: utf-8 -*-
#
# ownCloud Documentation documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 22 23:16:40 2012-2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'rst2pdf.pdfbuilder']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['../_shared_assets/templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General information about the project.
project = u'ownCloud User Manual'
copyright = u'2012-2014, The ownCloud developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '7.0'
# The full version, including alpha/beta/rc tags.
release = '7.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, s | ectionauthor and moduleauthor directives will be shown in | the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../_shared_assets/themes']
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'owncloud_org'
html_theme_options = {
# "rightsidebar": "true",
}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = "User Manual"
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['../_shared_assets/static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ownCloudUserManual'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('contents', 'ownCloudUserManual.tex', u'ownCloud User Manual',
u'The ownCloud developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = '../_shared_assets/static/logo-blue.pdf'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for pdf page output -----------------------------------------------
pdf_documents = [('contents', u'owncloudUserManual', u'ownCloud User Manual', u'The ownCloud developers'),]
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('contents', 'owncloudusermanual', u'ownCloud User Manual',
[u'The ownCloud developers'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('contents', 'ownCloudUserManual', u'ownCloud User Manual',
u'The ownCloud developers', 'ownCloud', 'The ownCloud User Manual.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footn |
hyperspy/hyperspy | hyperspy/tests/misc/test_utils.py | Python | gpl-3.0 | 4,362 | 0.000689 | # -*- coding: utf-8 -*-
# Copyright 2007-2022 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import pytest
from hyperspy import signals
from hyperspy.misc.utils import (
is_hyperspy_signal,
parse_quantity,
slugify,
strlist2enumeration,
str2num,
swapelem,
fsdict,
closest_power_of_two,
shorten_name,
is_binned,
)
from hyperspy.exceptions import VisibleDeprecationWarning
def test_slugify():
assert slugify("a") == "a"
assert slugify("1a") == "1a"
assert slugify("1") == "1"
assert slugify("a a") == "a_a"
assert slugify(42) == "42"
assert slugify(3.14159) == "314159"
assert slugify("├── Node1") == "Node1"
assert slugify("a", valid_variable_name=True) == "a"
assert slugify("1a", valid_variable_name=True) == "Number_1a"
assert slugify("1", valid_variable_name=True) == "Number_1"
assert slugify("a", valid_variable_name=False) == "a"
assert slugify("1a", valid_variable_name=False) == "1a"
assert slugify("1", valid_variable_name=False) == "1"
def test_parse_quantity():
# From the metadata specification, the quantity is defined as
# "name (units)" without backets in the name of the quantity
assert parse_quanti | ty("a (b)") == ("a", "b")
assert parse_quantity("a (b/(c))") == ("a", "b/(c)")
assert parse_quantity("a (c) (b/(c))") == ("a (c)", "b/(c)")
assert parse_quantity("a [ | b]") == ("a [b]", "")
assert parse_quantity("a [b]", opening="[", closing="]") == ("a", "b")
def test_is_hyperspy_signal():
s = signals.Signal1D(np.zeros((5, 5, 5)))
p = object()
assert is_hyperspy_signal(s) is True
assert is_hyperspy_signal(p) is False
def test_strlist2enumeration():
assert strlist2enumeration([]) == ""
assert strlist2enumeration("a") == "a"
assert strlist2enumeration(["a"]) == "a"
assert strlist2enumeration(["a", "b"]) == "a and b"
assert strlist2enumeration(["a", "b", "c"]) == "a, b and c"
def test_str2num():
assert (
str2num("2.17\t 3.14\t 42\n 1\t 2\t 3")
== np.array([[2.17, 3.14, 42.0], [1.0, 2.0, 3.0]])
).all()
def test_swapelem():
L = ["a", "b", "c"]
swapelem(L, 1, 2)
assert L == ["a", "c", "b"]
def test_fsdict():
parrot = {}
fsdict(
["This", "is", "a", "dead", "parrot"], "It has gone to meet its maker", parrot
)
fsdict(["This", "parrot", "is", "no", "more"], "It is an ex parrot", parrot)
fsdict(
["This", "parrot", "has", "seized", "to", "be"],
"It is pushing up the daisies",
parrot,
)
fsdict([""], "I recognize a dead parrot when I see one", parrot)
assert (
parrot["This"]["is"]["a"]["dead"]["parrot"] == "It has gone to meet its maker"
)
assert parrot["This"]["parrot"]["is"]["no"]["more"] == "It is an ex parrot"
assert (
parrot["This"]["parrot"]["has"]["seized"]["to"]["be"]
== "It is pushing up the daisies"
)
assert parrot[""] == "I recognize a dead parrot when I see one"
def test_closest_power_of_two():
assert closest_power_of_two(5) == 8
assert closest_power_of_two(13) == 16
assert closest_power_of_two(120) == 128
assert closest_power_of_two(973) == 1024
def test_shorten_name():
assert (
shorten_name("And now for soemthing completely different.", 16)
== "And now for so.."
)
# Can be removed in v2.0:
def test_is_binned():
s = signals.Signal1D(np.zeros((5, 5)))
assert is_binned(s) == s.axes_manager[-1].is_binned
with pytest.warns(VisibleDeprecationWarning, match="Use of the `binned`"):
s.metadata.set_item("Signal.binned", True)
assert is_binned(s) == s.metadata.Signal.binned
|
kelsa-pi/unodit | examples/embed dialog/src/Test_embed.py | Python | gpl-3.0 | 27,471 | 0.004405 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
# =============================================================================
#
# Dialog implementation generated from a XDL file.
#
# Created: Sat Jul 9 15:14:39 2016
# by: unodit 0.5
#
# WARNING! All changes made in this file will be overwritten
# if the file is generated again!
#
# =============================================================================
import uno
import unohelper
from com.sun.star.awt import XActionListener
from com.sun.star.task import XJobExecutor
class Test_embed(unohelper.Base, XActionListener, XJobExecutor):
"""
Class documentation...
"""
def __init__(self):
self.LocalContext = uno.getComponentContext()
self.ServiceManager = self.LocalContext.ServiceManager
self.Toolkit = self.ServiceManager.createInstanceWithContext("com.sun.star.awt.ExtToolkit", self.LocalContext)
# -----------------------------------------------------------
# Create dialog and insert controls
# -----------------------------------------------------------
# --------------create dialog container and set model and properties
self.DialogContainer = self.ServiceManager.createInstanceWithContext("com.sun.star.awt.UnoControlDialog", self.LocalContext)
self.DialogModel = self.ServiceManager.createInstance("com.sun.star.awt.UnoControlDialogModel")
self.DialogContainer.setModel(self.DialogModel)
self.DialogModel.Moveable = True
self.DialogModel.Closeable = True
self.DialogModel.Name = "Default"
self.DialogModel.Width = 300
self.DialogModel.PositionX = "60"
self.DialogModel.Height = 220
self.DialogModel.PositionY = "60"
# --------- create an instance of ComboBox control, set properties ---
self.ComboBox1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlComboBoxModel")
self.ComboBox1.TabIndex = 10
self.ComboBox1.Dropdown = True
self.ComboBox1.StringItemList = ('one', 'two')
self.ComboBox1.Name = "ComboBox1"
self.ComboBox1.Width = 60
self.ComboBox1.PositionX = "83"
self.ComboBox1.Height = 20
self.ComboBox1.PositionY = "143"
# inserts the control model into the dialog model
self.DialogModel.insertByName("ComboBox1", self.ComboBox1)
# --------- create an instance of GroupBox control, set properties ---
self.FrameControl1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlGroupBoxModel")
self.FrameControl1.TabIndex = 36
self.FrameControl1.Label = "FrameControl1"
self.FrameControl1.Name = "FrameControl1"
self.FrameControl1.Width = 60
self.FrameControl1.PositionX = "9"
self.FrameControl1.Height = 65
self.FrameControl1.PositionY = "147"
# inserts the control model into the dialog model
self.DialogModel.insertByName("FrameControl1", self.FrameControl1)
# --------- create an instance of Button control, set properties ---
self.CommandButton1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlButtonModel")
self.CommandButton1.Align = 0
self.CommandButton1.TabIndex = 0
self.CommandButton1.Label = "CommandButton1"
self.CommandButton1.Toggle = 1
self.CommandButton1.Name = "CommandButton1"
self.CommandButton1.Width = 60
self.CommandButton1.PositionX = "9"
self.CommandButton1.Height = 20
self.CommandButton1.PositionY = "8"
# inserts the control model into the dialog model
self.DialogModel.insertByName("CommandButton1", self.CommandButton1)
# add the action listener
self.DialogContainer.getControl('CommandButton1').addActionListener(self)
self.DialogContainer.getControl('CommandButton1').setActionCommand('CommandButton1_OnClick')
# --------- create an instance of FixedText control, set properties ---
self.Label8 = self.DialogModel.createInstance("c | om.sun.star.awt.UnoControlFixedTextModel")
self.Label8.TabIndex = 29
self.Label8.Label = "ProgressBar"
self.Label8.Name = "Label8"
self.Label8.Width = 60
self.Label8.PositionX = "83"
self.Label8.Height = 10
self.Label8.PositionY = "170"
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label8", self.Label8)
# --------- create an instance of FixedText | control, set properties ---
self.Label4 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label4.TabIndex = 25
self.Label4.Label = "NumericField"
self.Label4.Name = "Label4"
self.Label4.Width = 60
self.Label4.PositionX = "158"
self.Label4.Height = 10
self.Label4.PositionY = "76"
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label4", self.Label4)
# --------- create an instance of FileControl control, set properties ---
self.FileControl1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFileControlModel")
self.FileControl1.TabIndex = 18
self.FileControl1.Name = "FileControl1"
self.FileControl1.Text = "/home/sasa"
self.FileControl1.Width = 60
self.FileControl1.PositionX = "235"
self.FileControl1.Height = 20
self.FileControl1.PositionY = "17"
# inserts the control model into the dialog model
self.DialogModel.insertByName("FileControl1", self.FileControl1)
# --------- create an instance of FixedText control, set properties ---
self.Label7 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label7.TabIndex = 28
self.Label7.Label = "PatternField"
self.Label7.Name = "Label7"
self.Label7.Width = 60
self.Label7.PositionX = "158"
self.Label7.Height = 10
self.Label7.PositionY = "185"
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label7", self.Label7)
# --------- create an instance of FixedText control, set properties ---
self.Label5 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label5.TabIndex = 26
self.Label5.Label = "CurrencyField"
self.Label5.Name = "Label5"
self.Label5.Width = 60
self.Label5.PositionX = "158"
self.Label5.Height = 10
self.Label5.PositionY = "114"
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label5", self.Label5)
# --------- create an instance of Button control, set properties ---
self.CommandButton2 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlButtonModel")
self.CommandButton2.TabIndex = 1
self.CommandButton2.Label = "CommandButton2"
self.CommandButton2.Enabled = True
self.CommandButton2.Name = "CommandButton2"
self.CommandButton2.Width = 29
self.CommandButton2.PositionY = "33"
self.CommandButton2.Height = 20
self.CommandButton2.PositionX = "9"
# inserts the control model into the dialog model
self.DialogModel.insertByName("CommandButton2", self.CommandButton2)
# add the action listener
self.DialogContainer.getControl('CommandButton2').addActionListener(self)
self.DialogContainer.getControl('CommandButton2').setActionCommand('CommandButton2_OnClick')
# --------- create an instance of SpinButton control, set properties ---
self.SpinButton1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlSpinButtonModel")
self.SpinButton1.TabIndex = 22
self.SpinButton1.Name = "SpinButton1"
self.SpinButton1.Width = 60
self.SpinButton1.PositionX = "235"
self.SpinButton1.Height = 20
self.SpinButton1.PositionY = "167"
# inserts the control model into the dialog model
self |
kylon/pacman-fakeroot | test/pacman/tests/sync702.py | Python | gpl-2.0 | 504 | 0 | self.description = "incoming package rep | laces symlink with directory (order 2)"
lp = pmpkg("pkg2")
lp.files = ["usr/lib/foo",
"lib -> usr/lib"]
self.addpkg2db("local", lp)
p1 = pmpkg("pkg1")
p1.files = ["lib/bar"]
self.addpkg2db("sync", p1)
p2 = pmpkg("pkg2", "1.0-2")
p2.files = ["usr/lib/foo"]
self.addpkg2db("sync", p2)
self.args = "-S pkg1 pkg2"
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_VERSION=pkg2|1.0-2")
self.addrule("PKG_EXIST=pkg1")
self. | addrule("FILE_TYPE=lib|dir")
|
lildadou/Flexget | flexget/utils/imdb.py | Python | mit | 12,292 | 0.001627 | from __future__ import unicode_literals, division, absolute_import
import difflib
import logging
import re
from bs4.element import Tag
from flexget.utils.soup import get_soup
from flexget.utils.requests import Session
from flexget.utils.tools import str_to_int
from flexget.plugin import get_plugin_by_name, PluginError
log = logging.getLogger('utils.imdb')
# IMDb delivers a version of the page which is unparsable to unknown (and some known) user agents, such as requests'
# Spoof the old urllib user agent to keep results consistent
requests = Session()
requests.headers.update({'User-Agent': 'Python-urllib/2.6'})
# requests.headers.update({'User-Agent': random.choice(USERAGENTS)})
# this makes most of the titles to be returned in english translation, but not all of them
requests.headers.update({'Accept-Language': 'en-US,en;q=0.8'})
# give imdb a little break between requests (see: http://flexget.com/ticket/129#comment:1)
requests.set_domain_delay('imdb.com', '3 seconds')
def is_imdb_url(url):
"""Tests the url to see if it's for imdb.com."""
if not isinstance(url, basestring):
return
# Probably should use urlparse.
return re.match(r'https?://[^/]*imdb\.com/', url)
def extract_id(url):
"""Return IMDb ID of the given URL. Return None if not valid or if URL is not a string."""
if not isinstance(url, basestring):
return
m = re.search(r'((?:nm|tt)[\d]{7})', url)
if m:
return m.group(1)
def make_url(imdb_id):
"""Return IMDb URL of the given ID"""
return u'http://www.imdb.com/title/%s/' % imdb_id
class ImdbSearch(object):
def __init__(self):
# de-prioritize aka matches a bit
self.aka_weight = 0.95
# prioritize first
self.first_weight = 1.1
self.min_match = 0.7
self.min_diff = 0.01
self.debug = False
self.max_results = 10
def ireplace(self, text, old, new, count=0):
"""Case insensitive string replace"""
pattern = re.compile(re.escape(old), re.I)
return re.sub(pattern, new, text, count)
def smart_match(self, raw_name):
"""Accepts messy name, cleans it and uses information available to make smartest and best match"""
parser = get_plugin_by_name('parsing').instance.parse_movie(raw_name)
name = parser.name
year = | parser.year
if name == '':
log.critical('Failed to parse name from %s' % raw_name)
return None
log.debug('smart_match name=%s year=%s' % (name, str(year)))
return self.best_match(name, year)
def best_match(self, name, year=None): |
"""Return single movie that best matches name criteria or None"""
movies = self.search(name)
if not movies:
log.debug('search did not return any movies')
return None
# remove all movies below min_match, and different year
for movie in movies[:]:
if year and movie.get('year'):
if movie['year'] != str(year):
log.debug('best_match removing %s - %s (wrong year: %s)' % (
movie['name'],
movie['url'],
str(movie['year'])))
movies.remove(movie)
continue
if movie['match'] < self.min_match:
log.debug('best_match removing %s (min_match)' % movie['name'])
movies.remove(movie)
continue
if not movies:
log.debug('FAILURE: no movies remain')
return None
# if only one remains ..
if len(movies) == 1:
log.debug('SUCCESS: only one movie remains')
return movies[0]
# check min difference between best two hits
diff = movies[0]['match'] - movies[1]['match']
if diff < self.min_diff:
log.debug('unable to determine correct movie, min_diff too small (`%s` <-?-> `%s`)' %
(movies[0], movies[1]))
for m in movies:
log.debug('remain: %s (match: %s) %s' % (m['name'], m['match'], m['url']))
return None
else:
return movies[0]
def search(self, name):
"""Return array of movie details (dict)"""
log.debug('Searching: %s' % name)
url = u'http://www.imdb.com/find'
# This will only include movies searched by title in the results
params = {'q': name, 's': 'tt', 'ttype': 'ft'}
log.debug('Serch query: %s' % repr(url))
page = requests.get(url, params=params)
actual_url = page.url
movies = []
# in case we got redirected to movie page (perfect match)
re_m = re.match(r'.*\.imdb\.com/title/tt\d+/', actual_url)
if re_m:
actual_url = re_m.group(0)
log.debug('Perfect hit. Search got redirected to %s' % actual_url)
movie = {}
movie['match'] = 1.0
movie['name'] = name
movie['imdb_id'] = extract_id(actual_url)
movie['url'] = make_url(movie['imdb_id'])
movie['year'] = None # skips year check
movies.append(movie)
return movies
# the god damn page has declared a wrong encoding
soup = get_soup(page.text)
section_table = soup.find('table', 'findList')
if not section_table:
log.debug('results table not found')
return
rows = section_table.find_all('td', 'result_text')
if not rows:
log.debug('Titles section does not have links')
for count, row in enumerate(rows):
# Title search gives a lot of results, only check the first ones
if count > self.max_results:
break
movie = {}
additional = re.findall(r'\((.*?)\)', row.text)
if len(additional) > 0:
movie['year'] = additional[-1]
link = row.find_next('a')
movie['name'] = link.text
movie['imdb_id'] = extract_id(link.get('href'))
movie['url'] = make_url(movie['imdb_id'])
log.debug('processing name: %s url: %s' % (movie['name'], movie['url']))
# calc & set best matching ratio
seq = difflib.SequenceMatcher(lambda x: x == ' ', movie['name'].title(), name.title())
ratio = seq.ratio()
# check if some of the akas have better ratio
for aka in link.parent.find_all('i'):
aka = aka.next.string
match = re.search(r'".*"', aka)
if not match:
log.debug('aka `%s` is invalid' % aka)
continue
aka = match.group(0).replace('"', '')
log.trace('processing aka %s' % aka)
seq = difflib.SequenceMatcher(lambda x: x == ' ', aka.title(), name.title())
aka_ratio = seq.ratio()
if aka_ratio > ratio:
ratio = aka_ratio * self.aka_weight
log.debug('- aka `%s` matches better to `%s` ratio %s (weighted to %s)' %
(aka, name, aka_ratio, ratio))
# prioritize items by position
position_ratio = (self.first_weight - 1) / (count + 1) + 1
log.debug('- prioritizing based on position %s `%s`: %s' % (count, movie['url'], position_ratio))
ratio *= position_ratio
# store ratio
movie['match'] = ratio
movies.append(movie)
movies.sort(key=lambda x: x['match'], reverse=True)
return movies
class ImdbParser(object):
"""Quick-hack to parse relevant imdb details"""
def __init__(self):
self.genres = []
self.languages = []
self.actors = {}
self.directors = {}
self.score = 0.0
self.votes = 0
self.year = 0
self.plot_outline = None
self.name = None
self.original_name = None
self.url = None
self.imdb_id = None
self.photo = None
self.mpaa_rating = ''
def __str__(self):
|
anhstudios/swganh | data/scripts/templates/object/tangible/mission/quest_item/shared_sayama_edosun_q2_needed.py | Python | mit | 481 | 0.045738 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = | "obje | ct/tangible/mission/quest_item/shared_sayama_edosun_q2_needed.iff"
result.attribute_template_id = -1
result.stfName("loot_nboo_n","sayama_edosun_q2_needed")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
wagnerand/zamboni | mkt/collections/filters.py | Python | bsd-3-clause | 6,380 | 0.000784 | from django import forms
from django.core.validators import EMPTY_VALUES
from django_filters.filters import ChoiceFilter, ModelChoiceFilter
from django_filters.filterset import FilterSet
import amo
import mkt
from addons.models import Category
from mkt.api.forms import SluggableModelChoiceField
from mkt.collections.models import Collection
class SlugChoiceFilter(ChoiceFilter):
def __init__(self, *args, **kwargs):
self.choices_dict = kwargs.pop('choices_dict')
# Create a choice dynamically to allow None, slugs and ids.
slugs_choices = self.choices_dict.items()
ids_choices = [(v.id, v) for v in self.choices_dict.values()]
kwargs['choices'] = [(None, None)] + slugs_choices + ids_choices
return super(SlugChoiceFilter, self).__init__(*args, **kwargs)
def filter(self, qs, value):
if value == '' or value is None:
value = None
elif not value.isdigit():
# We are passed a slug, get the id by looking at the choices
# dict, defaulting to None if no corresponding value is found.
value = self.choices_dict.get(value, None)
if value is not None:
value = value.id
return qs.filter(**{self.name: value})
class SlugModelChoiceFilter(ModelChoiceFilter):
field_class = SluggableModelChoiceField
def filter(self, qs, value):
return qs.filter(**{'%s__%s' % (self.name, self.lookup_type): value})
class CollectionFilterSet(FilterSet):
# Note: the filter names must match what ApiSearchForm and CategoryViewSet
# are using.
carrier = SlugChoiceFilter(name='carrier',
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceFilter(name='region',
choices_dict=mkt.regions.REGION_LOOKUP)
cat = SlugModelChoiceFilter(name='category',
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP),
sluggable_to_field_name='slug',)
class Meta:
model = Collection
# All fields are provided above, but django-filter needs Meta.field to
# exist.
fields = []
def get_queryset(self):
"""
Return the queryset to use for the filterset.
Copied from django-filter qs property, modified to support filtering on
'empty' values, at the expense of multi-lookups like 'x < 4 and x > 2'.
"""
valid = self.is_bound and self.form.is_valid()
if self.strict and self.is_bound and not valid:
qs = self.queryset.none()
qs.filter_errors = self.form.errors
return qs
# Start with all the results and filter from there.
qs = self.queryset.all()
for name, filter_ in self.filters.items():
if valid:
if name in self.form.data:
value = self.form.cleaned_data[name]
else:
continue
else:
raw_value = self.form[name].value()
try:
value = self.form.fields[name].clean(raw_value)
except forms.ValidationError:
if self.strict:
return self.queryset.none()
else:
continue
# At this point we should have valid & clean data.
qs = filter_.filter(qs, value)
# Optional ordering.
if self._meta.order_by:
order_ | field = self.form.fields[self.order_by_field]
data = self.form[self.order_by_field].data
ordered = None
try:
ordered = order_field.clean(data)
except forms.ValidationError:
pass
if ordered in EMPTY_VALUES and self.strict:
ordered = self.form.fields[self.order_by_field].choices[0][0]
if ordered:
qs = qs.order_by(*s | elf.get_order_by(ordered))
return qs
@property
def qs(self):
if hasattr(self, '_qs'):
return self._qs
self._qs = self.get_queryset()
return self._qs
class CollectionFilterSetWithFallback(CollectionFilterSet):
"""
FilterSet with a fallback mechanism, dropping filters in a certain order
if no results are found.
"""
# Combinations of fields to try to set to NULL, in order, when no results
# are found. See `next_fallback()`.
fields_fallback_order = (
('region',),
('carrier',),
('region', 'carrier',)
)
def next_fallback(self):
"""
Yield the next set of filters to set to NULL when refiltering the
queryset to find results. See `refilter_queryset()`.
"""
for f in self.fields_fallback_order:
yield f
def refilter_queryset(self):
"""
Reset self.data, then override fields yield by the `fallback` generator
to NULL. Then recall the `qs` property and return it.
When we are using this FilterSet, we really want to return something,
even if it's less relevant to the original query. When the `qs`
property is evaluated, if no results are found, it will call this
method to refilter the queryset in order to find /something/.
Can raise StopIteration if the fallback generator is exhausted.
"""
self.data = self.original_data.copy()
self.fields_to_null = next(self.fallback)
for field in self.fields_to_null:
if field in self.data:
self.data[field] = None
del self._form
return self.qs
def __init__(self, *args, **kwargs):
super(CollectionFilterSetWithFallback, self).__init__(*args, **kwargs)
self.original_data = self.data.copy()
self.fallback = self.next_fallback()
self.fields_to_null = None
@property
def qs(self):
if hasattr(self, '_qs'):
return self._qs
qs = self.get_queryset()
if hasattr(qs, 'filter_errors'):
# Immediately return if there was an error.
self._qs = qs
return self._qs
elif not qs.exists():
try:
qs = self.refilter_queryset()
except StopIteration:
pass
self._qs = qs
self._qs.filter_fallback = self.fields_to_null
return self._qs
|
pgergov/belmis | config/wsgi.py | Python | mit | 1,443 | 0 | """
WSGI config for belmis project.
This module contains the WSGI application used by Django's development server
and any produ | ction WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For | example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
cnamejj/PyProc | regentest/self_sched.py | Python | gpl-2.0 | 3,267 | 0.003979 | #!/usr/bin/env python
"""Handle records from PID specific /proc/PID/sched data files"""
import regentest as RG
import ProcHandlers as PH
import ProcBaseRoutines as PBR
PFC = PH.ProcFieldConstants
# ---
# pylint: disable=R0914
def re_self_sched(inprecs):
"""Iterate through parsed records and re-generate data file"""
__headtemp = "{prog:s} ({pid:d}, #threads: {threads:d})\n\
{hrule:s}"
__strtemptemp = "{{desc:<{dlen:d}s}}:{{count:>21s}}"
__longtemptemp = "{{desc:<{dlen:d}s}}:{{count:21d}}"
__floattemptemp = "{{desc:<{dlen:d}s}}:{{count:21.6f}}"
__hilotemp = "{hi:14d}.{low:06d}"
__numamigtemp = "{desc:s} {count:d}"
__numafaulttemp = "{label:s} {idx:d}, {node:d}, {cpu:d}, {home:d}, \
{flt:d}"
__template = ()
for __hilit in inprecs:
__ff = inprecs.field
__hits = inprecs.hit_order
if len(__template) == 0:
__dlen = len(__ff[PFC.F_HRULE]) - 22
__strtemp = __strtemptemp.format(dlen=__dlen)
__longtemp = __longtemptemp.format(dlen=__dlen)
__floattemp = __floattemptemp.format(dlen=__dlen)
__template = { str: __strtemp, long: __longtemp,
float: __floattemp }
__keydesc = dict()
__keyconv = dict()
| __rule_list = inprecs.parse_rule
for __seq in __rule_list:
__rule = __rule_list[__seq][0]
try:
__key = __rule[PBR.FIELD_NAME]
__keydesc[__key] = __rule[PBR.PREFIX_VAL]
try:
__keyconv[__key] = __rule[PBR.CONVERSION]
except KeyError:
__keyconv[__key] = str
except KeyError:
pass
| for __key in inprecs.two_longs:
try:
__keyconv[__key] = str
__ff[__key] = __hilotemp.format(hi=__ff[__key] / 1000000,
low=__ff[__key] % 1000000)
except KeyError:
pass
print __headtemp.format(prog=__ff[PFC.F_PROGRAM], pid=__ff[PFC.F_PID],
threads=__ff[PFC.F_THREADS], hrule=__ff[PFC.F_HRULE])
for __seq in range(0, len(__hits)):
__key = __hits[__seq]
if __keydesc.has_key(__key):
if __key == PFC.F_NUMA_MIGRATE:
print __numamigtemp.format(desc=__keydesc[__key],
count=__ff[__key])
else:
__val = __ff[__key]
print __template[__keyconv[__key]].format(
desc=__keydesc[__key], count=__ff[__key])
if __ff.has_key(PFC.F_NUMA_FAULTS):
__faultlist = __ff[PFC.F_NUMA_FAULTS]
for __seq in range(0, len(__faultlist)):
__fset = __faultlist[__seq]
print __numafaulttemp.format(idx=__fset[PFC.F_INDEX],
node=__fset[PFC.F_NODE], cpu=__fset[PFC.F_CPU],
home=__fset[PFC.F_HOME], flt=__fset[PFC.F_FAULT],
label=__fset[PFC.F_NUMA_FAULTS_LAB])
# pylint: enable=R0914
#...+....1....+....2....+....3....+....4....+....5....+....6....+....7....+....8
RG.RECREATOR[PH.GET_HANDLER("/proc/self/sched")] = re_self_sched
|
Willyham/tchannel-python | tchannel/testing/vcr/proxy/ttypes.py | Python | mit | 6,330 | 0.012638 | # Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Autogenerated by Thrift Compiler (0.9.2)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:tornado,dynamic,slots,dynexc=VCRThriftError,dynbase=VCRThriftBase,dynimport=from tchannel.testing.vcr.thrift import *
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from tchannel.testing.vcr.thrift import *
class StatusCode(VCRThriftBase):
SUCCESS = 0
FAILURE = 1
_VALUES_TO_NAMES = {
0: "SUCCESS",
1: "FAILURE",
}
_NAMES_TO_VALUES = {
"SUCCESS": 0,
"FAILURE": 1,
}
class ArgScheme(VCRThriftBase):
RAW = 0
JSON = 1
THRIFT = 2
_VALUES_TO_NAMES = {
0: "RAW",
1: "JSON",
2: "THRIFT",
}
_NAMES_TO_VALUES = {
"RAW": 0,
"JSON": 1,
"THRIFT": 2,
}
class TransportHeader(VCRThriftBase):
"""
Attributes:
- key
- value
"""
__slots__ = [
'key',
'value',
]
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.STRING, 'value', None, None, ), # 2
)
def __init__(self, key=None, value=None,):
self.key = key
self.value = value
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.key)
value = (value * 31) ^ hash(self.value)
return value
class Request(VCRThriftBase):
"""
Attributes:
- serviceName
- endpoint
- headers
- body
- hostPort
- argScheme
- transportHeaders
"""
__slots__ = [
'serviceName',
'endpoint',
'headers',
'body',
'hostPort',
'argScheme',
'transportHeaders',
]
thrift_spec = (
None, # 0
(1, TType.STRING, 'serviceName', None, None, ), # 1
(2, TType.STRING, 'endpoint', None, None, ), # 2
(3, TType.STRING, 'headers', None, "", ), # 3
(4, TType.STRING, 'body', None, None, ), # 4
(5, TType.STRING, 'hostPort', None, "", ), # 5
(6, TType.I32, 'argScheme', None, 0, ), # 6
(7, TType.LIST, 'transportHeaders', (TType.STRUCT,(TransportHeader, TransportHeader.thrift_spec)), [
], ), # 7
)
def __init__(self, serviceName=None, endpoint=None, headers=thrift_spec[3][4], body=None, hostPort=thrift_spec[5][4], argScheme=thrift_spec[6][4], transportHeaders=thrift_spec[7][4],):
self.serviceName = serviceName
self.endpoint = endpoint
self.headers = headers
self.body = body
self.hostPort = hostPort
self.argScheme = argScheme
if transportHeaders is self.thrift_spec[7][4]:
transportHeaders = [
]
self.transportHeaders = transportHeaders
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.serviceName)
value = (value * 31) ^ hash(self.endpoint)
value = (value * 31) ^ hash(self.headers)
value = (value * 31) ^ hash(self.body)
value = (value * 31) ^ hash(self.hostPort)
value = (value * 31) ^ hash(self.argScheme)
value = (value * 31) ^ hash(self.transportHeaders)
return value
class Response(VCRThriftBase):
"""
Attributes:
- code
- headers
- body
"""
__slots__ = [
'code',
'headers',
'body',
]
thrift_spec = (
None, # 0
| (1, TTy | pe.I32, 'code', None, None, ), # 1
(2, TType.STRING, 'headers', None, "", ), # 2
(3, TType.STRING, 'body', None, None, ), # 3
)
def __init__(self, code=None, headers=thrift_spec[2][4], body=None,):
self.code = code
self.headers = headers
self.body = body
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.code)
value = (value * 31) ^ hash(self.headers)
value = (value * 31) ^ hash(self.body)
return value
class CannotRecordInteractionsError(VCRThriftError):
"""
Raised when the record mode for a cassette prevents recording new
interactions for it.
Attributes:
- message
"""
__slots__ = [
'message',
]
thrift_spec = (
None, # 0
(1, TType.STRING, 'message', None, None, ), # 1
)
def __init__(self, message=None,):
self.message = message
def __str__(self):
return repr(self)
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.message)
return value
class RemoteServiceError(VCRThriftError):
"""
Raised when the remote service throws a protocol error.
Attributes:
- code
- message
"""
__slots__ = [
'code',
'message',
]
thrift_spec = (
None, # 0
(1, TType.BYTE, 'code', None, None, ), # 1
(2, TType.STRING, 'message', None, None, ), # 2
)
def __init__(self, code=None, message=None,):
self.code = code
self.message = message
def __str__(self):
return repr(self)
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.code)
value = (value * 31) ^ hash(self.message)
return value
class VCRServiceError(VCRThriftError):
"""
A generic error for VCR exceptions not covered elsewhere.
Attributes:
- message
"""
__slots__ = [
'message',
]
thrift_spec = (
None, # 0
(1, TType.STRING, 'message', None, None, ), # 1
)
def __init__(self, message=None,):
self.message = message
def __str__(self):
return repr(self)
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.message)
return value
|
rbracken/internbot | plugins/pick/choices.py | Python | bsd-2-clause | 218 | 0.013761 | # Add y | our own choices here!
fruit = ["apples", "oranges", "pears", "grapes", "blueberries"]
lunch = ["pho", "timmies", "thai", "burgers", "buffet!", "indian", "montanas"]
situations = {"fruit":fruit, "lunch":lunch}
| |
GbalsaC/bitnamiP | edx-val/edxval/migrations/0001_initial.py | Python | agpl-3.0 | 7,559 | 0.007408 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Profile'
db.create_table('edxval_profile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('profile_name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
('extension', self.gf('django.db.models.fields.CharField')(max_length=10)),
('width', self.gf('django.db.models.fields.PositiveIntegerField')()),
('height', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('edxval', ['Profile'])
# Adding model 'Video'
db.create_table('edxval_video', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('edx_video_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
('client_video_id', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('duration', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('edxval', ['Video'])
# Adding model 'CourseVideo'
db.create_table('edxval_coursevideo', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('course_id', self.gf('django.db.models.fields.CharField')(max_length=255)),
('video', self.gf('django.db.models.fields.related.ForeignKey')(related_name='courses', to=orm['edxval.Video'])),
))
db.send_create_signal('edxval', ['CourseVideo'])
# Adding unique constraint on 'CourseVideo', fields ['course_id', 'video']
db.create_unique('edxval_coursevideo', ['course_id', 'video_id'])
# Adding model 'EncodedVideo'
db.create_table('edxval_encodedvideo', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('file_size', self.gf('django.db.models.fields.PositiveIntegerField')()),
('bitrate', self.gf('django.db.models.fields.PositiveIntegerField')()),
('profile', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['edxval.Profile'])),
('video', self.gf('django.db.models.fields.related.ForeignKey')(related_name='encoded_videos', to=orm['edxval.Video'])),
))
db.send_create_signal('edxval', ['EncodedVideo'])
# Adding model 'Subtitle'
db.create_table('edxval_subtitle', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('video', self.gf('django.db.models.fields.related.ForeignKey')(related_name='subtitles', to=orm['edxval.Video'])),
('fmt', self.gf('django.db.models.fields.CharField')(max_length=20, db_index=True)),
('language', self.gf('django.db.models.fields.CharField')(max_length=8, db_index=True)),
('content', self.gf('django.db.models.fields.TextField')(default='')),
))
db.send_create_signal('edxval', ['Subtitle'])
def backwards(self, orm):
# Removing unique constraint on 'CourseVideo', fields ['course_id', 'video']
db.delete_unique('edxval_coursevideo', ['course_id', 'video_id'])
# Deleting model 'Profile'
db.delete_table('edxval_profile')
# Deleting model 'Video'
db.delete_table('edxval_video')
# Deleting model 'CourseVideo'
db.delete_table('edxval_coursevideo')
# Deleting model 'EncodedVideo'
db.delete_table('edxval_encodedvideo')
# Deleting model 'Subtitle'
db.delete_table('edxval_subtitle')
models = {
'edxval.coursevideo': {
'Meta': {'unique_together': "(('course_id', 'video'),)", 'object_name': 'CourseVideo'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'courses'", 'to': "orm['edxval.Video']"})
},
'edxval.encodedvideo': {
'Meta': {'object_name': 'EncodedVideo'},
'bitrate': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'file_size': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fie | lds.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.related.ForeignKey', [ | ], {'related_name': "'+'", 'to': "orm['edxval.Profile']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'encoded_videos'", 'to': "orm['edxval.Video']"})
},
'edxval.profile': {
'Meta': {'object_name': 'Profile'},
'extension': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'height': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profile_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'width': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'edxval.subtitle': {
'Meta': {'object_name': 'Subtitle'},
'content': ('django.db.models.fields.TextField', [], {'default': "''"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fmt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '8', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subtitles'", 'to': "orm['edxval.Video']"})
},
'edxval.video': {
'Meta': {'object_name': 'Video'},
'client_video_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'duration': ('django.db.models.fields.FloatField', [], {}),
'edx_video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['edxval'] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.