repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
OpenTouch/night-watch
|
src/nw/providers/Ping.py
|
Python
|
apache-2.0
| 8,878
| 0.01194
|
# Copyright (c) 2014 Alcatel-Lucent Enterprise
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nw.providers.Provider import Provider
import subprocess
import re
from logging import getLogger
# /!\ Warning: this Provider uses the ping system command and has been designed for Linux (Debian Wheezy).
# List of data the Ping Provider can return (set in Provider's config field 'requested_data').
# If the Provider is configured with another requested_data, an exception is raised.
# If no requested_data is configured for Ping Provider, status is used by default.
_data_available = [
'status', # returns the status code (integer) of ping command execution: 0 = success, other = error occurred
'ping_response', # returns the whole std output of ping command (string)
'pkt_transmitted', # returns the number of packets transmitted (integer) (extracted from stdout of ping command using a regex)
'pkt_received', # returns the number of packets received (integer) (extracted from stdout of ping command using a regex)
'pkt_loss', # returns the number of packets loss (integer) (extracted from stdout of ping command using a regex)
'ping_avg', # returns the average ping time (in ms) (float) (extracted from stdout of ping command using a regex)
'ping_min', # returns the min ping time (in ms) (float) (extracted from stdout of ping command using a regex)
'ping_max' # returns the max ping time (in ms) (float) (extracted from stdout of ping command using a regex)
]
class Ping(Provider):
# Overload _mandatory_parameters and _optional_parameters to list the parameters required by HttpRequest provider
_mandatory_parameters = [
'ping_addr' # IP address or hostname of the machine to ping
]
_optional_parameters = [
'requested_data', # (string) Requested data (default is 'status' which returns the status code of ping command execution). See _data_available for available options.
'count', # (integer) -c option of ping: Stop after sending (and receiving) count ECHO_RESPONSE packets. If not defined, default value is 1.
'timeout' # (integer) -W option of ping: Time to wait for a response, in seconds. The option affects only timeout in absense of any responses, otherwise ping waits for two RTTs.
]
def __init__(self, options):
Provider.__init__(self, options)
# Build ping command
self.ping_cmd = "ping"
# Add -c option
if not self._config.get('count'):
getLogger(__name__).info('Option "count" is not provided to provider Ping, use default value (1)')
self.count = 1
else:
self.count = self._config.get('count')
self.ping_cmd += " -c " + str(self.count)
# Add -W option if requested
if self._config.get('timeout'):
self.ping_cmd += " -W " + str(self._config.get('timeout'))
# Add ping address
self.ping_cmd += " " + self._config.get('ping_addr')
# Load requested data (default is 'status')
self.requested_data = self._config.get('requested_data') or "status"
def process(self):
if (self.requested_data == "status"):
return self._getPingStatus()
else:
# TODO: better management of ping errors
try:
ping_data = self._performPing()
except:
return None # Ping error
# Return the requested data
if (self.requested_data == "ping_response"):
return ping_data.ping_response
if (self.requested_data == "pkt_transmitted"):
return ping_data.pkt_transmitted
if (self.requested_data == "pkt_received"):
return ping_data.pkt_received
elif (self.requested_data == "pkt_loss"):
return ping_data.pkt_loss
if (self.requested_data == "ping_avg"):
return ping_data.ping_avg
if (self.requested_data == "ping_min"):
return ping_data.ping_min
if (self.requested_data == "ping_max"):
return ping_data.ping_max
# Simply execute ping command to retrieve the command's returned code
def _getPingStatus(self):
getLogger(__name__).debug('Call ping command with the following options: ' + self.ping_cmd)
returncode = subprocess.call(self.ping_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
getLogger(__name__).debug('Ping command returned status code: ' + str(returncode))
return returncode
# Execute ping command and returned a PingData object in case of success
def _performPing(self):
getLogger(__name__).debug('Call ping command with the following options: ' + self.ping_cmd)
(output, error) = subprocess.Popen(self.ping_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True).communicate()
if output:
getLogger(__name__).debug('Ping command returned: ' + output)
return PingData(output)
else:
getLogger(__name__).debug('Ping error: ' + error)
raise Exception(error)
# This function is called by __init__ of the abstract Provider class, it verify during the object initializat
|
ion if the Provider' configuration is valid.
def _isConfigValid(self):
Provider._isConfigValid(self)
# If requested_data is provided, check if it is managed by Ping provider
if self._config.get('requested_data') and not (self._config.get('requested_data') in _data_available):
getLogger(__name__).error('Parameter requested_data "' + self._config.get('requested_data') + '" provided to prov
|
ider Ping is not allowed. Allowed conditions are: ' + str(_data_available))
return False
return True
class PingData:
"""
Class extracting ping statistics data using regexps on ping command response.
/!\ Warning: regexp used to extract information applies on string returned by ping command on Linux (tested on Debian Wheezy).
Extracted data are:
- ping_response = the whole output of ping command
- pkt_transmitted = number of packets transmitted (integer)
- pkt_received = number of packets received (integer)
- pkt_loss = packet loss rate in percentage (float)
- ping_min = ping minimum response time in milliseconds (float)
- ping_avg = ping average response time in milliseconds (float)
- ping_max = ping maximum response time in milliseconds (float)
- ping_stdev = standard deviation of ping response time in milliseconds (float)
"""
def __init__(self, ping_response):
if not ping_response:
raise Exception("Can't create PingData object without ping response data")
self.ping_response = ping_response
# Extract packets data from statistics section of Ping response
result = re.search('(?P<pkt_transmitted>\d)\spackets\stransmitted,\s(?P<pkt_received>\d)?\s?\w*\sreceived,\s(?P<pkt_loss>[\d]*?\.?[\d]*)\%\spacket\slo
|
richardliaw/ray
|
rllib/agents/trainer_template.py
|
Python
|
apache-2.0
| 8,307
| 0
|
import logging
from typing import Callable, Iterable, List, Optional, Type
from ray.rllib.agents.trainer import Trainer, COMMON_CONFIG
from ray.rllib.env.env_context import EnvContext
from ray.rllib.evaluation.worker_set import WorkerSet
from ray.rllib.execution.rollout_ops import ParallelRollouts, ConcatBatches
from ray.rllib.execution.train_ops import TrainOneStep
from ray.rllib.execution.metric_ops import StandardMetricsReporting
from ray.rllib.policy import Policy
from ray.rllib.utils import add_mixins
from ray.rllib.utils.annotations import override, DeveloperAPI
from ray.rllib.utils.typing import EnvConfigDict, EnvType, ResultDict, \
TrainerConfigDict
logger = logging.getLogger(__name__)
def default_execution_plan(workers: WorkerSet, config: TrainerConfigDict):
# Collects experiences in parallel from multiple RolloutWorker actors.
rollouts = ParallelRollouts(workers, mode="bulk_sync")
# Combine experiences batches until we hit `train_batch_size` in size.
# Then, train the policy on those experiences and update the workers.
train_op = rollouts \
.combine(ConcatBatches(
min_batch_size=config["train_batch_size"])) \
.for_each(TrainOneStep(workers))
# Add on the standard episode reward, etc. metrics reporting. This returns
# a LocalIterator[metrics_dict] representing metrics for each train step.
return StandardMetricsReporting(train_op, workers, config)
@DeveloperAPI
def build_trainer(
name: str,
*,
default_config: Optional[TrainerConfigDict] = None,
validate_config: Optional[Callable[[TrainerConfigDict], None]] = None,
default_policy: Optional[Type[Policy]] = None,
get_policy_class: Optional[Callable[[TrainerConfigDict], Optional[Type[
Policy]]]] = None,
validate_env: Optional[Callable[[EnvType, EnvContext], None]] = None,
before_init: Optional[Callable[[Trainer], None]] = None,
after_init: Optional[Callable[[Trainer], None]] = None,
before_evaluate_fn: Optional[Callable[[Trainer], None]] = None,
mixins: Optional[List[type]] = None,
execution_plan: Optional[Callable[[
WorkerSet, TrainerConfigDict
], Iterable[ResultDict]]] = default_execution_plan) -> Type[Trainer]:
"""Helper function for defining a custom trainer.
Functions will be run in this order to initialize the trainer:
1. Config setup: validate_config, get_policy
2. Worker setup: before_init, execution_plan
3. Post setup: after_init
Args:
name (str): name of the trainer (e.g., "PPO")
default_config (Optional[TrainerConfigDict]): The default config dict
of the algorithm, otherwise uses the Trainer default config.
validate_config (Optional[Callable[[TrainerConfigDict], None]]):
Optional callable that takes the config to check for correctness.
It may mutate the config as needed.
default_policy (Optional[Type[Policy]]): The default Policy class to
use.
get_policy_class (Optional[Callable[
TrainerConfigDict, Optional[Type[Policy]]]]): Optional callable
that takes a config and returns the policy class or None. If None
is
|
returned, will use `default_policy` (which must be provided
then).
validate_env (Optional[Callable[[EnvType, EnvContext], None]]):
Optional callable to validate the generated environment (only
on worker=0).
before_init (Opt
|
ional[Callable[[Trainer], None]]): Optional callable to
run before anything is constructed inside Trainer (Workers with
Policies, execution plan, etc..). Takes the Trainer instance as
argument.
after_init (Optional[Callable[[Trainer], None]]): Optional callable to
run at the end of trainer init (after all Workers and the exec.
plan have been constructed). Takes the Trainer instance as
argument.
before_evaluate_fn (Optional[Callable[[Trainer], None]]): Callback to
run before evaluation. This takes the trainer instance as argument.
mixins (list): list of any class mixins for the returned trainer class.
These mixins will be applied in order and will have higher
precedence than the Trainer class.
execution_plan (Optional[Callable[[WorkerSet, TrainerConfigDict],
Iterable[ResultDict]]]): Optional callable that sets up the
distributed execution workflow.
Returns:
Type[Trainer]: A Trainer sub-class configured by the specified args.
"""
original_kwargs = locals().copy()
base = add_mixins(Trainer, mixins)
class trainer_cls(base):
_name = name
_default_config = default_config or COMMON_CONFIG
_policy_class = default_policy
def __init__(self, config=None, env=None, logger_creator=None):
Trainer.__init__(self, config, env, logger_creator)
def _init(self, config: TrainerConfigDict,
env_creator: Callable[[EnvConfigDict], EnvType]):
# Validate config via custom validation function.
if validate_config:
validate_config(config)
# No `get_policy_class` function.
if get_policy_class is None:
# Default_policy must be provided (unless in multi-agent mode,
# where each policy can have its own default policy class.
if not config["multiagent"]["policies"]:
assert default_policy is not None
self._policy_class = default_policy
# Query the function for a class to use.
else:
self._policy_class = get_policy_class(config)
# If None returned, use default policy (must be provided).
if self._policy_class is None:
assert default_policy is not None
self._policy_class = default_policy
if before_init:
before_init(self)
# Creating all workers (excluding evaluation workers).
self.workers = self._make_workers(
env_creator=env_creator,
validate_env=validate_env,
policy_class=self._policy_class,
config=config,
num_workers=self.config["num_workers"])
self.execution_plan = execution_plan
self.train_exec_impl = execution_plan(self.workers, config)
if after_init:
after_init(self)
@override(Trainer)
def step(self):
res = next(self.train_exec_impl)
return res
@override(Trainer)
def _before_evaluate(self):
if before_evaluate_fn:
before_evaluate_fn(self)
@override(Trainer)
def __getstate__(self):
state = Trainer.__getstate__(self)
state["train_exec_impl"] = (
self.train_exec_impl.shared_metrics.get().save())
return state
@override(Trainer)
def __setstate__(self, state):
Trainer.__setstate__(self, state)
self.train_exec_impl.shared_metrics.get().restore(
state["train_exec_impl"])
@staticmethod
@override(Trainer)
def with_updates(**overrides) -> Type[Trainer]:
"""Build a copy of this trainer class with the specified overrides.
Keyword Args:
overrides (dict): use this to override any of the arguments
originally passed to build_trainer() for this policy.
Returns:
Type[Trainer]: A the Trainer sub-class using `original_kwargs`
and `overrides`.
Examples:
>>> MyClass = SomeOtherClass.with_updates({"name": "Mine"})
>>> issubclass(MyClass, SomeOtherClass)
... False
>>> issubclass(MyClass, Trainer)
... True
"""
return build_trainer(**d
|
cuckoobox/cuckoo
|
cuckoo/data/analyzer/android/lib/core/config.py
|
Python
|
mit
| 881
| 0.001135
|
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
# Originally contributed by Check Point Software Technologies, Ltd.
import ConfigParser
class Conf
|
ig:
def __init__(self, cfg):
"""@param cfg: configuration file."""
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(cfg)
for section in config.sections():
for name, raw_value in config.items(section):
try:
value = config.getboolean(section, name)
except ValueError:
try:
value = config.getint(section, n
|
ame)
except ValueError:
value = config.get(section, name)
setattr(self, name, value)
|
nagyv/python-api-library
|
kayako/core/lib.py
|
Python
|
bsd-2-clause
| 5,597
| 0.001787
|
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2011, Evan Leis
#
# Distributed under the terms of the Lesser GNU General Public License (LGPL)
#-----------------------------------------------------------------------------
'''
Created on May 5, 2011
@author: evan
'''
from datetime import datetime
__all__ = [
'UnsetParameter',
'FOREVER',
'ParameterObject',
'NodeParser',
]
class _unsetparameter(object):
def __bool__(self):
return False
def __call__(self):
return self
def __repr__(self):
return 'UnsetParameter(
|
)'
def __str__(self):
|
return '??'
class _forever(object):
def __int__(self):
return 0
def __repr__(self):
return '0'
def __str__(self):
return '<Forever>'
UnsetParameter = _unsetparameter()
FOREVER = _forever()
class ParameterObject(object):
'''
An object used to build a dictionary around different parameter types.
'''
__parameters__ = []
''' Parameters that this ParameterObject can have. '''
def __init__(self, **parameters):
'''
Creates this parameter object setting parameter values as given by
keyword arguments.
'''
self._update_parameters(**parameters)
for parameter in self.__parameters__:
if parameter not in parameters:
setattr(self, parameter, UnsetParameter)
@property
def parameters(self):
return self._parameters_from_list(self.__parameters__)
def _parameters_from_list(self, list):
'''
Returns parameters based on a list.
'''
params = {}
for parameter in list:
attribute = getattr(self, parameter)
if attribute is not UnsetParameter:
params[parameter] = attribute
return params
def _update_parameters(self, **parameters):
for parameter, value in parameters.items():
if parameter not in self.__parameters__:
raise TypeError("'%s' is an invalid keyword argument for %s" % (parameter, self.__class__.__name__))
else:
setattr(self, parameter, value)
def __str__(self):
return '<ParamterObject at %s>' % (hex(id(self)))
class NodeParser(object):
''' Methods to parse text data from an lxml etree object. '''
@staticmethod
def _parse_int(data, required=True, strict=True):
''' Simply parses data as an int.
If its required, invalid data will raise a ValueError or TypeError.
If its not required, but is strict and there is data, invalid data will
raise a ValueError.
If it is not required and is not strict, invalid data returns None.
'''
if required:
return int(data)
else:
if data:
try:
return int(data)
except ValueError:
if strict:
raise
@staticmethod
def _parse_date(data, required=True, strict=True):
'''
Return an integer, or FOREVER. See _parse_int for information on
required and strict.
'''
value = NodeParser._parse_int(data, required=required, strict=strict)
if value is None:
return None
elif value == 0:
return FOREVER
else:
return datetime.fromtimestamp(value)
@staticmethod
def _get_int(node, required=True, strict=True):
'''
Pulls out an integer from the etree on the given node.
If it is required, it will assume everything is present, otherwise the
return value could be None.
If strict is True, it will parse any available text as an integer,
raising ValueError if it does not parse.
Otherwise, unparsable text is ignored. Required implies Strict.
'''
if required:
return int(node.text)
else:
if node is not None and node.text:
try:
return int(node.text)
except ValueError:
if strict:
raise
@staticmethod
def _get_string(node):
'''
Pulls out the text of a given node. Returns None if missing.
'''
if node is not None:
return node.text
@staticmethod
def _get_boolean(node, required=True, strict=True):
'''
Returns the boolean value of an integer node. See _get_int for details
about required and strict.
'''
value = NodeParser._get_int(node, required=required, strict=strict)
if value is None:
return None
else:
if not strict:
return bool(value)
else:
if value == 0:
return False
elif value == 1:
return True
else:
raise ValueError('Value for node not 1 or 0')
@staticmethod
def _get_date(node, required=True, strict=True):
'''
Return an integer, or FOREVER. See _get_int for details about required
and strict.
'''
value = NodeParser._get_int(node, required=required, strict=strict)
if value is None:
return None
elif value == 0:
return FOREVER
else:
return datetime.fromtimestamp(value)
def __str__(self):
return '<NodeParser at %s>' % (hex(id(self)))
|
beyond-content/python-pdf-paper-saver
|
src/pdfpapersaver/__init__.py
|
Python
|
bsd-2-clause
| 2,516
| 0.001192
|
from P
|
yPDF2 import PdfFileReader, PdfFileWriter
from rect import Rec
|
t
from rect.packer import pack
from reportlab.lib import pagesizes
from reportlab.lib.units import mm
__version__ = "0.1.0"
class PDFPagePacker(object):
def __init__(self, pdf_file, canvas_size=pagesizes.A4, padding=5 * mm):
super(PDFPagePacker, self).__init__()
self.pdf_file = pdf_file
self.canvas_size = canvas_size
self.inner_canvas_size = canvas_size[0] - 4 * padding, canvas_size[1] - 4 * padding
self.padding = padding
self.reader = PdfFileReader(self.pdf_file)
self.rects = list()
self.create_rect_page_dictionary()
@property
def page_count(self):
return self.reader.numPages
def create_rect_page_dictionary(self):
for page in self.reader.pages:
rect = Rect([page.mediaBox.getWidth(), page.mediaBox.getHeight()])
rect.page = page
self.rects.append(rect)
def pack(self):
def place_rects_and_append_to_pages(rects_to_place):
pages_to_place = [rect.page for rect in rects_to_place]
placed_rects = pack(self.inner_canvas_size, rects_to_place, self.padding)
for rect, page in zip(placed_rects, pages_to_place):
rect.page = page
if placed_rects:
pages.append(placed_rects)
items_to_place = list(self.rects)
rects_to_place = []
pages = []
while items_to_place:
try:
rect = items_to_place[0]
rects_to_place.append(rect)
pack(self.inner_canvas_size, rects_to_place, self.padding)
items_to_place.pop(0)
except ValueError, e:
if e.message == "Pack size too small.":
rects_to_place.pop()
place_rects_and_append_to_pages(rects_to_place)
rects_to_place = []
else:
raise
place_rects_and_append_to_pages(rects_to_place)
return pages
def get_packed_file(self, packed_file):
writer = PdfFileWriter()
scale = 1.0
for rects in self.pack():
page = writer.addBlankPage(*self.canvas_size)
for rect in rects:
y = self.canvas_size[1] - rect.top - 2 * self.padding
x = rect.left + 2 * self.padding
page.mergeScaledTranslatedPage(rect.page, scale, x, y)
writer.write(packed_file)
|
graingert/sqlalchemy
|
lib/sqlalchemy/dialects/postgresql/__init__.py
|
Python
|
mit
| 2,432
| 0
|
# postgresql/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import base
from . import pg8000 # noqa
from . import psycopg2 # noqa
from . import psycopg2cffi # noqa
from . import pygresql # noqa
from . import pypostgresql # noqa
from .array import All
from .array import Any
from .array import ARRAY
from .array import array
from .base import BIGINT
from .base import BIT
from .base import BOOLEAN
from .base import BYTEA
from .base import CHAR
from .base import CIDR
from .base import CreateEnumType
from .base import DATE
from .base import DOUBLE_PRECISION
from .base import DropEnumType
from .base import ENUM
from .base import FLOAT
from .base import INET
from .base import INTEGER
from .base import INTERVAL
from .base import MACADDR
from .base import MONEY
from .base import NUMERIC
from .base import OID
from .base import REAL
from .base import REGCLASS
from .base import SMALLINT
from .base import TEXT
from .base import TIME
from .base import TIMESTAMP
from .base import TSVECTOR
from .base import UUID
from .base import VARCHAR
from .dml import Insert
from .dml import insert
from .ext import aggregate_order_by
from .ext import array_agg
from .ext import ExcludeConstraint
from .hstore import HSTORE
from .hstore import hstore
from .json import JSON
from .json import JSONB
from .ranges import DATERANGE
from .ranges import INT4RANGE
from .ranges import INT8RANGE
from .ranges import NUMRANGE
from .ranges import TSRANGE
from
|
.ranges import TSTZRANGE
base.dialect = dialect = psycopg2.dialect
__all__ = (
"INTEGER",
"BIGINT",
"SMALLINT",
"VARCHAR",
"CHAR",
"TEXT",
"NUMERIC",
"FLOAT",
"REAL",
"INET",
"CIDR",
"UUID",
"BIT",
"MACADDR",
"MONEY",
"OID",
"REGCLASS",
"DOUBLE_PRECISION",
"TIMESTAMP",
"TIME",
"DATE",
"BYTEA",
|
"BOOLEAN",
"INTERVAL",
"ARRAY",
"ENUM",
"dialect",
"array",
"HSTORE",
"hstore",
"INT4RANGE",
"INT8RANGE",
"NUMRANGE",
"DATERANGE",
"TSVECTOR",
"TSRANGE",
"TSTZRANGE",
"JSON",
"JSONB",
"Any",
"All",
"DropEnumType",
"CreateEnumType",
"ExcludeConstraint",
"aggregate_order_by",
"array_agg",
"insert",
"Insert",
)
|
dana-i2cat/felix
|
modules/resource/orchestrator/src/extensions/sfa/trust/speaksfor_util.py
|
Python
|
apache-2.0
| 18,996
| 0.005159
|
#----------------------------------------------------------------------
# Copyright (c) 2014 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
from __future__ import absolute_import
import datetime
from dateutil import parser as du_parser, tz as du_tz
import optparse
import os
import subprocess
import sys
import tempfile
from xml.dom.minidom import *
from StringIO import StringIO
from extensions.sfa.trust.abac_credential import ABACCredential, ABACElement
from extensions.sfa.trust.certificate import Certificate
from extensions.sfa.trust.credential import Credential, signature_template, HAVELXML
from extensions.sfa.trust.credential_factory import CredentialFactory
from extensions.sfa.trust.gid import GID
# Routine to validate that a speaks-for credential
# says what it claims to say:
# It is a signed credential wherein the signer S is attesting to the
# ABAC statement:
# S.speaks_for(S)<-T Or "S says that T speaks for S"
# Requires that openssl be installed and in the path
# create_speaks_for requires that xmlsec1 be on the path
# Simple XML helper functions
# Find the text associated with first child text node
def findTextChildValue(root):
child = findChildNamed(root, '#text')
if child: return str(child.nodeValue)
return None
# Find first child with given name
def findChildNamed(root, name):
for child in root.childNodes:
if child.nodeName == name:
return child
return None
# Write a string to a tempfile, returning name of tempfile
def write_to_tempfile(str):
str_fd, str_file = tempfile.mkstemp()
if str:
os.write(str_fd, str)
os.close(str_fd)
return str_file
# Run a subprocess and return output
def run_subprocess(cmd, stdout, stderr):
try:
proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
proc.wait()
if stdout:
output = proc.stdout.read()
else:
output = proc.returncode
return output
except Exception as e:
raise Exception("Failed call to subprocess '%s': %s" % (" ".join(cmd), e))
def get_cert_keyid(gid):
"""Extract the subject key identifier from the given certificate.
Return they key id as lowercase string with no colon separators
between pairs. The key id as shown in the text output of a
certificate are in uppercase with colon separators.
"""
raw_key_id = gid.get_extension('subjectKeyIdentifier')
# Raw has colons separating pairs, and all characters are upper case.
# Remove the colons and convert to lower case.
keyid = raw_key_id.replace(':', '').lower()
return keyid
# Pull the cert out of a list of certs in a PEM formatted cert string
def grab_toplevel_cert(cert):
start_label = '-----BEGIN CERTIFICATE-----'
if cert.find(start_label) > -1:
start_index = cert.find(start_label) + len(start_label)
else:
start_index = 0
end_label = '-----END CERTIFICATE-----'
end_index = cert.find(end_label)
first_cert = cert[start_index:end_index]
pieces = first_cert.split('\n')
first_cert = "".join(pieces)
return first_cert
# Validate that the given speaks-for credential represents the
# statement User.speaks_for(User)<-Tool for the given user and tool certs
# and was signed by the user
# Return:
# Boolean indicating whether the given credential
# is not expired
# is an ABAC credential
# was signed by the user associated with the speaking_for_urn
# is verified by xmlsec1
# asserts U.speaks_for(U)<-T ("user says that T may speak for user")
# If schema provided, validate against schema
# is trusted by given set of trusted roots (both user cert and tool cert)
# String user certificate of speaking_for user if the above tests succeed
# (None otherwise)
# Error message indicating why the speaks_for call failed ("" otherwise)
def verify_speaks_for(cred, tool_gid, speaking_for_urn, \
trusted_roots, schema=None, logger=None):
# Credential has not expired
if cred.expiration and cred.expiration < datetime.datetime.utcnow():
return False, None, "ABAC Credential expired at %s (%s)" % (cred.expiration.isoformat(), cred.get_summary_tostring())
# Must be ABAC
if cred.get_cred_type() != ABACCredential.ABAC_CREDENTIAL_TYPE:
return False, None, "Credential not of type ABAC but %s" % cred.get_cred_type
if cred.signature is None or cred.signature.gid is None:
return False, None, "Credential malformed: missing signature or signer cert. Cred: %s" % cred.get_summary_tostring()
user_gid = cred.signature.gid
user_urn = user_gid.get_urn()
# URN of signer from cert must match URN of 'speaking-for' argument
if user_urn != speaking_for_urn:
return False, None, "User URN from cred doesn't match speaking_for URN: %s != %s (cred %s)" % \
(user_urn, speaking_for_urn, cred.get_summary_tostring())
tails = cred.get_tails()
if len(tails) != 1:
return False, None, "Invalid ABAC-SF credential: Need exactly 1 tail element, got %d (%s)" % \
(len(tails), cred.get_summary_tostring())
user_keyid = get_cert_keyid(user_gid)
tool_keyid = get_cert_keyid(tool_gid)
subject_keyid = tails[0].get_principal_keyid()
head = cred.get_head()
principal_keyid = head.get_principal_keyid()
role = head.get_role()
# Credential must pass xmlsec1 verify
cred_file = write_to_temp
|
file(cred.save_to_string())
cert_args = []
if trusted_roots:
for x in trusted_roots:
cert_args += ['--trusted-pem', x.filename]
# FIXME: Why do we not need to specify the --node-id option as credential.py does?
xmlsec1_args = [cred.xmlsec_path, '--verify'] + cert_args + [ cred_file]
output = run_subprocess(xmlsec1_args, stdout=None, stderr=subprocess.PIPE)
os.unlink(cred_file)
if output != 0:
|
# FIXME
# xmlsec errors have a msg= which is the interesting bit.
# But does this go to stderr or stdout? Do we have it here?
verified = ""
mstart = verified.find("msg=")
msg = ""
if mstart > -1 and len(verified) > 4:
mstart = mstart + 4
mend = verified.find('\\', mstart)
msg = verified[mstart:mend]
if msg == "":
msg = output
return False, None, "ABAC credential failed to xmlsec1 verify: %s" % msg
# Must say U.speaks_for(U)<-T
if user_keyid != principal_keyid or \
tool_keyid != subject_keyid or \
role != ('speaks_for_%s' % user_keyid):
return False, None, "ABAC statement doesn't assert U.speaks_for(U)<-T (%s)" % cred.get_summary_tostring()
# If schema provided, validate against schema
if HAVELXML and schema and os.path.exists(schema):
from lxml import etree
tree = etree.parse(StringIO(cred.xml))
schema_doc = etree.parse(schema)
xmlschema = etree.XMLSchema(schema_doc)
if not xmlschema.validate(tree):
error = xmlschema.error_log.last_error
message =
|
zwChan/VATEC
|
~/eb-virt/Lib/site-packages/werkzeug/local.py
|
Python
|
apache-2.0
| 14,275
| 0.003853
|
# -*- coding: utf-8 -*-
"""
werkzeug.local
~~~~~~~~~~~~~~
This module implements context-local objects.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import copy
from functools import update_wrapper
from werkzeug.wsgi import ClosingIterator
from werkzeug._compat import PY2, implements_bool
# since each thread has its own greenlet we can just use those as identifiers
# for the context. If greenlets are not available we fall back to the
# current thread ident depending on where it is.
try:
from greenlet import getcurrent as get_ident
except ImportError:
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
def release_local(local):
"""Releases the contents of the local for the current context.
This makes it possible to use locals without a manager.
Example::
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
With this function one can release :class:`Local` objects as well
as :class:`LocalStack` objects. However it is not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
.. versionadded:: 0.6.1
"""
local.__release_local__()
class Local(object):
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
object.__setattr__(self, '__storage__', {})
object.__setattr__(self, '__ident_func__', get_ident)
def __iter__(self):
return iter(self.__storage__.items())
def __call__(self, proxy):
"""Create a proxy for a name."""
return LocalProxy(self, proxy)
def __release_local__(self):
self.__storage__.pop(self.__ident_func__(), None)
def __getattr__(self, name):
try:
return self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
ident = self.__ident_func__()
storage = self.__storage__
try:
storage[ident][name] = value
except KeyError:
stora
|
ge[ident] = {name: value}
def __delattr__(self, name):
try:
del self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
class LocalStack(object):
"""This class work
|
s similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it returns a proxy that resolves to
the topmost item on the stack.
.. versionadded:: 0.6.1
"""
def __init__(self):
self._local = Local()
def __release_local__(self):
self._local.__release_local__()
def _get__ident_func__(self):
return self._local.__ident_func__
def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
del _get__ident_func__, _set__ident_func__
def __call__(self):
def _lookup():
rv = self.top
if rv is None:
raise RuntimeError('object unbound')
return rv
return LocalProxy(_lookup)
def push(self, obj):
"""Pushes a new item to the stack"""
rv = getattr(self._local, 'stack', None)
if rv is None:
self._local.stack = rv = []
rv.append(obj)
return rv
def pop(self):
"""Removes the topmost item from the stack, will return the
old value or `None` if the stack was already empty.
"""
stack = getattr(self._local, 'stack', None)
if stack is None:
return None
elif len(stack) == 1:
release_local(self._local)
return stack[-1]
else:
return stack.pop()
@property
def top(self):
"""The topmost item on the stack. If the stack is empty,
`None` is returned.
"""
try:
return self._local.stack[-1]
except (AttributeError, IndexError):
return None
class LocalManager(object):
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Everytime the manager cleans up
it, will clean up all the data left in the locals for this context.
The `ident_func` parameter can be added to override the default ident
function for the wrapped locals.
.. versionchanged:: 0.6.1
Instead of a manager the :func:`release_local` function can be used
as well.
.. versionchanged:: 0.7
`ident_func` was added.
"""
def __init__(self, locals=None, ident_func=None):
if locals is None:
self.locals = []
elif isinstance(locals, Local):
self.locals = [locals]
else:
self.locals = list(locals)
if ident_func is not None:
self.ident_func = ident_func
for local in self.locals:
object.__setattr__(local, '__ident_func__', ident_func)
else:
self.ident_func = get_ident
def get_ident(self):
"""Return the context identifier the local objects use internally for
this context. You cannot override this method to change the behavior
but use it to link other context local objects (such as SQLAlchemy's
scoped sessions) to the Werkzeug locals.
.. versionchanged:: 0.7
You can pass a different ident function to the local manager that
will then be propagated to all the locals passed to the
constructor.
"""
return self.ident_func()
def cleanup(self):
"""Manually clean up the data in the locals for this context. Call
this at the end of the request or use `make_middleware()`.
"""
for local in self.locals:
release_local(local)
def make_middleware(self, app):
"""Wrap a WSGI application so that cleaning up happens after
request end.
"""
def application(environ, start_response):
return ClosingIterator(app(environ, start_response), self.cleanup)
return application
def middleware(self, func):
"""Like `make_middleware` but for decorating functions.
Example usage::
@manager.middleware
def application(environ, start_response):
...
The difference to `make_middleware` is that the function passed
will have all the arguments copied from the inner application
(name, docstring, module).
"""
return update_wrapper(self.make_middleware(func), func)
def __repr__(self):
return '<%s storages: %d>' % (
self.__class__.__name__,
len(self.locals)
)
@implements_bool
class LocalProxy(object):
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.
Example usage::
from werkzeug.local import Local
l = Local()
# these are proxies
request = l('request')
user = l('user')
from werkzeug.local import LocalStack
_response_local = LocalStack()
#
|
firebitsbr/termineter
|
framework/core.py
|
Python
|
gpl-3.0
| 18,028
| 0.023408
|
# framework/core.py
#
# Copyright 2011 Spencer J. McIntyre <SMcIntyre [at] SecureState [dot] net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
|
import logging
import logging.hand
|
lers
import os
import re
import serial
import sys
from c1218.connection import Connection
from c1218.errors import C1218IOError, C1218ReadTableError
from framework.errors import FrameworkConfigurationError, FrameworkRuntimeError
from framework.options import AdvancedOptions, Options
from framework.templates import TermineterModule, TermineterModuleOptical
from framework.utilities import FileWalker, Namespace, get_default_serial_settings
from serial.serialutil import SerialException
class Framework(object):
"""
This is the main instance of the framework. It contains and
manages the serial connection as well as all of the loaded
modules.
"""
def __init__(self, stdout=None):
self.modules = {}
self.__package__ = '.'.join(self.__module__.split('.')[:-1])
package_path = __import__(self.__package__, None, None, ['__path__']).__path__[0] # that's some python black magic trickery for you
if stdout == None:
stdout = sys.stdout
self.stdout = stdout
self.directories = Namespace()
self.directories.user_data = os.path.expanduser('~') + os.sep + '.termineter' + os.sep
self.directories.modules_path = package_path + os.sep + 'modules' + os.sep
self.directories.data_path = package_path + os.sep + 'data' + os.sep
if not os.path.isdir(self.directories.data_path):
self.logger.critical('path to data not found')
raise FrameworkConfigurationError('path to data not found')
if not os.path.isdir(self.directories.user_data):
os.mkdir(self.directories.user_data)
self.serial_connection = None
self.__serial_connected__ = False
# setup logging stuff
self.logger = logging.getLogger(self.__package__ + '.' + self.__class__.__name__.lower())
main_file_handler = logging.handlers.RotatingFileHandler(self.directories.user_data + self.__package__ + '.log', maxBytes=262144, backupCount=5)
main_file_handler.setLevel(logging.DEBUG)
main_file_handler.setFormatter(logging.Formatter("%(asctime)s %(name)-50s %(levelname)-10s %(message)s"))
logging.getLogger('').addHandler(main_file_handler)
# setup and configure options
# Whether or not these are 'required' is really enforced by the individual
# modules get_missing_options method and by which options they require based
# on their respective types. See framework/templates.py for more info.
self.options = Options(self.directories)
self.options.add_boolean('USECOLOR', 'enable color on the console interface', default=False)
self.options.add_string('CONNECTION', 'serial connection string')
self.options.add_string('USERNAME', 'serial username', default='0000')
self.options.add_integer('USERID', 'serial userid', default=0)
self.options.add_string('PASSWORD', 'serial c12.18 password', default='00000000000000000000')
self.options.add_boolean('PASSWORDHEX', 'if the password is in hex', default=True)
self.advanced_options = AdvancedOptions(self.directories)
self.advanced_options.add_integer('BAUDRATE', 'serial connection baud rate', default=9600)
self.advanced_options.add_integer('BYTESIZE', 'serial connection byte size', default=serial.EIGHTBITS)
self.advanced_options.add_boolean('CACHETBLS', 'cache certain read-only tables', default=True)
self.advanced_options.set_callback('CACHETBLS', self.__opt_callback_set_table_cache_policy)
self.advanced_options.add_integer('STOPBITS', 'serial connection stop bits', default=serial.STOPBITS_ONE)
self.advanced_options.add_integer('NBRPKTS', 'c12.18 maximum packets for reassembly', default=2)
self.advanced_options.add_integer('PKTSIZE', 'c12.18 maximum packet size', default=512)
if sys.platform.startswith('linux'):
self.options.set_option('USECOLOR', 'True')
# check and configure rfcat stuff
self.rfcat_available = False
try:
import rflib
self.logger.info('the rfcat library is available')
self.rfcat_available = True
except ImportError:
self.logger.info('the rfcat library is not available, it can be found at https://code.google.com/p/rfcat/')
pass
if self.rfcat_available:
# init the values to be used
self.rfcat_connection = None
self.__rfcat_connected__ = False
self.is_rfcat_connected = lambda: self.__rfcat_connected__
# self.options.add_integer('RFCATIDX', 'the rfcat device to use', default = 0)
# start loading modules
modules_path = self.directories.modules_path
self.logger.debug('searching for modules in: ' + modules_path)
self.current_module = None
if not os.path.isdir(modules_path):
self.logger.critical('path to modules not found')
raise FrameworkConfigurationError('path to modules not found')
for module_path in FileWalker(modules_path, absolute_path=True, skip_dirs=True):
module_path = module_path.replace(os.path.sep, '/')
if not module_path.endswith('.py'):
continue
module_path = module_path[len(modules_path):-3]
module_name = module_path.split(os.path.sep)[-1]
if module_name.startswith('__'):
continue
if module_name.lower() != module_name:
continue
if module_path.startswith('rfcat') and not self.rfcat_available:
self.logger.debug('skipping module: ' + module_path + ' because rfcat is not available')
continue
# looks good, proceed to load
self.logger.debug('loading module: ' + module_path)
try:
module_instance = self.import_module(module_path)
except FrameworkRuntimeError:
continue
if not isinstance(module_instance, TermineterModule):
self.logger.error('module: ' + module_path + ' is not derived from the TermineterModule class')
continue
# if isinstance(module_instance, TermineterModuleRfcat) and not self.rfcat_available:
# self.logger.debug('skipping module: ' + module_path + ' because rfcat is not available')
# continue
if not hasattr(module_instance, 'run'):
self.logger.critical('module: ' + module_path + ' has no run() method')
raise FrameworkRuntimeError('module: ' + module_path + ' has no run() method')
if not isinstance(module_instance.options, Options) or not isinstance(module_instance.advanced_options, Options):
self.logger.critical('module: ' + module_path + ' options and advanced_options must be Options instances')
raise FrameworkRuntimeError('options and advanced_options must be Options instances')
module_instance.name = module_name
module_instance.path = module_path
self.modules[module_path] = module_instance
self.logger.info('successfully loaded ' + str(len(self.modules)) + ' modules into the framework')
return
def __repr__(self):
return '<' + self.__class__.__name__ + ' Loaded Modules: ' + str(len(self.modules)) + ', Serial Connected: ' + str(self.is_serial_connected()) + ' >'
def reload_module(self, module_path=None):
"""
Reloads a module into the framework. If module_path is not
specified, then the curent_module variable is used. Returns True
on success, False on error.
@type module_path: String
@param module_path: The name of the module to reload
"""
if module_path == None:
if self.current_module != None:
module_path = self.current_module.path
else:
self.logger.warning('must specify module if not module is currently being used')
return False
if not module_path in self.modules.keys():
self.logger.error('invalid module requested for reload')
raise FrameworkRuntimeError('invalid module requested for reload
|
quiubas/quiubas-python
|
quiubas/quiubas.py
|
Python
|
mit
| 1,137
| 0.05277
|
import re
import urllib
from network import network
from balance import balance
from sms import sms
class Quiubas:
def __init__( self ):
self.lib_version = '1.1.1'
self.api_key = None
self.api_private = None
self.base_url = 'https://api.quiubas.com'
self.version = '2.1'
self.network = network( self )
self.balance = balance( self )
self.sms = sms( self )
def setBaseURL( self, url ):
self.base_url = url
return self.base_url
def getBaseURL( self ):
return self.base_url
def setAuth( self, api_key, api_private ):
|
self.api_key = api_key
self.api_private = api_private
def getAuth( self ):
return {
'api_key': self.api_key,
'api_private': self.api_private
}
def format(self, path, vars = None):
if not vars:
vars = dict()
parsed_vars = dict()
for k in vars.keys():
if vars[k] is not None:
parsed_vars['{' + k + '}'] = urllib.quote_plus(vars[k])
regex = re.compile("(%s)" % "|".join(map(re.escape, parse
|
d_vars.keys())))
if len(parsed_vars) != 0:
return regex.sub(lambda mo: str(parsed_vars[mo.string[mo.start():mo.end()]]), path)
else:
return path
|
AfricaChess/lichesshub
|
lichesshub/settings.py
|
Python
|
mit
| 4,754
| 0.001052
|
"""
Django settings for lichesshub project.
Generated by 'django-admin startproject' using Django 1.11.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECUR
|
ITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$3+$70f7z6kyjb^=u26flklf^&%fso+)lrc27)i-_rzjf@@tt@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HO
|
STS = ['africachess.everyday.com.ng', 'localhost', '127.0.0.1', '138.197.117.2']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'account.apps.AccountConfig',
'club.apps.ClubConfig',
'tournament.apps.TournamentConfig',
'grandprix.apps.GrandprixConfig',
'player.apps.PlayerConfig',
'pairing.apps.PairingConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'lichesshub.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lichesshub.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'otherstatic'),
]
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
LOGIN_REDIRECT_URL = '/'
MEDIA_URL = '/static/media/'
MEDIA_ROOT = os.path.join(STATIC_ROOT, 'media')
LICHESS_API_URL = 'https://lichess.org/api/'
BEST_SCORE_COUNT = 10
BYE_SCORE = 1
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s %(asctime)s %(module)s %(message)s'
}
},
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(BASE_DIR, 'debug.log'),
'formatter': 'simple'
},
},
'loggers': {
'django.request': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True
},
'tournament': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True
},
'club': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True
},
'grandprix': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True
}
}
}
try:
from local_settings import *
except ImportError:
pass
|
makinacorpus/django
|
django/db/models/sql/compiler.py
|
Python
|
bsd-3-clause
| 49,723
| 0.001589
|
import datetime
from django.conf import settings
from django.core.exceptions import FieldError
from django.db.backends.util import truncate_name
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import select_related_descend, QueryWrapper
from django.db.models.sql.constants import (SINGLE, MULTI, ORDER_DIR,
GET_ITERATOR_CHUNK_SIZE, SelectInfo)
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.sql.query import get_order_dir, Query
from django.db.utils import DatabaseError
from django.utils import six
from django.utils.six.moves import zip
from django.utils import timezone
class SQLCompiler(object):
def __init__(self, query, connection, using):
self.query = query
self.connection = connection
self.using = using
self.quote_cache = {}
# When ordering a queryset with distinct on a column not part of the
# select set, the ordering column needs to be added to the select
# clause. This information is needed both in SQL construction and
# masking away the ordering selects from the returned row.
self.ordering_aliases = []
self.ordering_params = []
def pre_sql_setup(self):
"""
Does any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
# TODO: after the query has been executed, the altered state should be
# cleaned. We are not using a clone() of the query here.
"""
if not self.query.tables:
self.query.join((None, self.query.get_meta().db_table, None))
if (not self.query.select and self.query.default_cols and not
self.query.included_inherited_models):
self.query.setup_inherited_models()
if self.query.select_related and not self.query.related_select_cols:
self.fill_related_selections()
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if ((name in self.query.alias_map and name not in self.query.table_map) or
name in self.query.extra_select):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Creates the SQL for this query. Returns the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
if with_limits and self.query.low_mark
|
== self.query.high_mark:
return '', ()
self.pre_sql_setup()
# After executing the query, we must get rid of any joins the query
# setup created. So, take note of alias counts before the query ran.
#
|
However we do not want to get rid of stuff done in pre_sql_setup(),
# as the pre_sql_setup will modify query state in a way that forbids
# another run of it.
self.refcounts_before = self.query.alias_refcount.copy()
out_cols, s_params = self.get_columns(with_col_aliases)
ordering, o_params, ordering_group_by = self.get_ordering()
distinct_fields = self.get_distinct()
# This must come after 'select', 'ordering' and 'distinct' -- see
# docstring of get_from_clause() for details.
from_, f_params = self.get_from_clause()
qn = self.quote_name_unless_alias
where, w_params = self.query.where.as_sql(qn=qn, connection=self.connection)
having, h_params = self.query.having.as_sql(qn=qn, connection=self.connection)
having_group_by = self.query.having.get_cols()
params = []
for val in six.itervalues(self.query.extra_select):
params.extend(val[1])
result = ['SELECT']
if self.query.distinct:
result.append(self.connection.ops.distinct_sql(distinct_fields))
params.extend(o_params)
result.append(', '.join(out_cols + self.ordering_aliases))
params.extend(s_params)
params.extend(self.ordering_params)
result.append('FROM')
result.extend(from_)
params.extend(f_params)
if where:
result.append('WHERE %s' % where)
params.extend(w_params)
grouping, gb_params = self.get_grouping(having_group_by, ordering_group_by)
if grouping:
if distinct_fields:
raise NotImplementedError(
"annotate() + distinct(fields) not implemented.")
if not ordering:
ordering = self.connection.ops.force_no_ordering()
result.append('GROUP BY %s' % ', '.join(grouping))
params.extend(gb_params)
if having:
result.append('HAVING %s' % having)
params.extend(h_params)
if ordering:
result.append('ORDER BY %s' % ', '.join(ordering))
if with_limits:
if self.query.high_mark is not None:
result.append('LIMIT %d' % (self.query.high_mark - self.query.low_mark))
if self.query.low_mark:
if self.query.high_mark is None:
val = self.connection.ops.no_limit_value()
if val:
result.append('LIMIT %d' % val)
result.append('OFFSET %d' % self.query.low_mark)
if self.query.select_for_update and self.connection.features.has_select_for_update:
# If we've been asked for a NOWAIT query but the backend does not support it,
# raise a DatabaseError otherwise we could get an unexpected deadlock.
nowait = self.query.select_for_update_nowait
if nowait and not self.connection.features.has_select_for_update_nowait:
raise DatabaseError('NOWAIT is not supported on this database backend.')
result.append(self.connection.ops.for_update_sql(nowait=nowait))
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(self.refcounts_before)
return ' '.join(result), tuple(params)
def as_nested_sql(self):
"""
Perform the same functionality as the as_sql() method, returning an
SQL string and parameters. However, the alias prefixes are bumped
beforehand (in a copy -- the current query isn't changed), and any
ordering is removed if the query is unsliced.
Used when nesting this query inside another.
"""
obj = self.query.clone()
if obj.low_mark == 0 and obj.high_mark is None:
# If there is no slicing in use, then we can safely drop all ordering
obj.clear_ordering(True)
obj.bump_prefix()
return obj.get_compiler(connection=self.connection).as_sql()
def get_columns(self, with_aliases=False):
"""
Returns the list of columns to use in the select statement, as well as
a list any extra parameters that need to be included. If no columns
have been specified, returns all columns relating to fields in the
model.
If 'with_aliases' is true, any column names that are duplicated
(without the table names) are given unique aliases. This is needed in
some cases to avoid ambiguity with nested queries.
"""
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
result = ['(%s) AS %s' % (col[0], qn2(alias)) for alias, col in six.iteritems(self.query.extra_select)]
params = []
aliases = set(self.query.extra_select.keys())
if with_aliases:
|
fbradyirl/home-assistant
|
tests/components/homematicip_cloud/__init__.py
|
Python
|
apache-2.0
| 49
| 0
|
"""Tests
|
for the HomematicIP Cloud compo
|
nent."""
|
hzj123/56th
|
pombola/south_africa/bin/people-json/committees.py
|
Python
|
agpl-3.0
| 2,683
| 0.011182
|
import csv
import re
import unicodedata
from utils import *
def initialise(name):
return re.sub('[^A-Z]', '', name)
def asciify(name):
return unicodedata.normalize('NFKD', unicode(name)).encode('ascii', 'ignore')
def parse(data):
orgs_by_id = dict([ (x['id'], x) for x in data['organizations'].values() ])
# TODO: Perhaps check old/new committees, then stop using parl.py
# committees. Or just assume these new ones are accurate.
for row in csv.DictReader(open(data_path + 'committees.csv')):
if row['Name'] not in data['organizations']:
data['organizations'][row['Name']] = {
'id': idFactory.new('committee_pmg'),
'name': row['Name'],
'slug': row['Name'].lower().replace(' ','-'),
'classification': row['Type']
}
for row in csv.DictReader(open(data_path + 'committee-members.csv')):
row['Name'] = re.sub('^([^,]*) Mr, (.*)$', r'\1, Mr \2', row['Name'])
family_name, initials = row['Name'].split(',')
initials = re.sub('^\s*(Mr|Ms|Dr|Nkosi|Prof|Adv|Prince)\s+', '', initials)
# TODO: Use the person's other_names filed, and get these misspellings in there.
if family_name == 'Khorai': family_name = 'Khoarai'
if family_name == 'Hoosan': family_name = 'Hoosen'
if family_name == 'Jeffrey': family_name = 'Jeffery'
if family_name == 'Hill-Lews': family_name = 'Hill-Lewis'
if family_name == 'Koornhof' and initials == 'NC': initials = 'NJJVR'
matches = [ x for x in data['persons'].values() if asciify(x['family_name']) == family_name ]
if len(matches) > 1:
matches = [ x for x in data['persons'].values() if x['family_name'] == family_name and initialise(x['given_names']) == initials ]
if not matches:
matches = [ x for x in data['persons'].values() if x['family_name'] == family_name and initialise(x['given_names'])[0:len(initials)] == initials ]
# With the current data, we now always have one result
assert len(matches) == 1
person = matches[0]
party = [ x for x in person['memberships'] if 'party' in x['organization_id'] ][0]['organization_id']
assert row['Party'] == orgs_by_id[party]['name'
|
], row['Party'] + orgs_by_id[party]['name']
mship = {
'organization_id': data['organizations'][row['Committee']]['id']
}
if row['IsAlternative?'] == 'True':
mship['role'] = 'Alternate Member'
if row['IsChairperson?'] == 'True':
mship['role'] = 'Chairperson'
add_membership(person
|
, mship)
return data
|
GISPPU/GrenadaLandInformation
|
geonode/people/migrations/0003_link_users_to_account.py
|
Python
|
gpl-3.0
| 10,196
| 0.007846
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.auth.models import User
from account.models import Account
class Migration(DataMigration):
def forwards(self, orm):
# we need to associate each user to an account object
for user in User.objects.all():
a = Account()
a.user = user
a.language = 'en' # default language
a.save()
def backwards(self, orm):
# we need to delete all the accounts records
Account.objects.all().delete()
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 14, 4, 17, 6, 973224)'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['conten
|
ttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
|
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 14, 4, 17, 6, 974570)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 14, 4, 17, 6, 974509)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'relationships': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_to'", 'symmetrical': 'False', 'through': "orm['relationships.Relationship']", 'to': "orm['auth.User']"}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'people.profile': {
'Meta': {'object_name': 'Profile'},
'area': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'delivery': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'profile'", 'unique': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'voice': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'people.role': {
'Meta': {'object_name': 'Role'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'relationships.relationship': {
'Meta': {'ordering': "('created',)", 'unique_together': "(('from_user', 'to_user', 'status', 'site'),)", 'object_name': 'Relationship'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_user': ('django.db.model
|
cirosantilli/python-utils
|
bin/find_path_sha1.py
|
Python
|
mit
| 2,436
| 0.010673
|
#!/usr/bin/env python
#------------------------------------------------------------
#
# Ciro D. Santilli
#
# Prints a list of paths which are files followed by their inodes and sha1 s
|
ums.
#
# Useful to make a backup of paths names before mass renaming them,
# supposing your files are distinct by SHA1 and that SHA1 has not changed,
# or that the inodes have not changed.
#
#------------------------------------------------------------
import os
impor
|
t os.path
import stat
import hashlib
import sys
SHA1_MAX_BYTES_READ_DEFAULT = float("inf") # defaults to read entire file
def sha1_hex_file(filepath, max_bytes=None):
"""
Returns the SHA1 of a given filepath in hexadecimal.
Opt-args:
* max_bytes. If given, reads at most max_bytes bytes from the file.
"""
sha1 = hashlib.sha1()
f = open(filepath, 'rb')
try:
if max_bytes:
data = f.read(max_bytes)
else:
data = f.read()
sha1.update(data)
finally:
f.close()
return sha1.hexdigest()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="""Finds files and creates a lists of their paths, inodes and sha1 checksums.' +
Useful to make a backup of filepaths before renaming them, for example before a large number of renames by a script.
SAMPLE CALLS
find_path_sha1.py
#finds, calculates sha1 based on the entire files, and prints path\nsha1 to stdout.
find_path_sha1.py -n 100000
#finds, calculates sha1 based on 100000 bytes
""",
epilog="Report any bugs to ciro.santilli@gmail.com",
prog='Program')
parser.add_argument('-m', '--max-sha1-bytes',
action="store",
dest="sha1_max_bytes_read",
type=int,
default=SHA1_MAX_BYTES_READ_DEFAULT,
help='Maximum number of bytes to read to calculate SHA1 checksum.'+
'Reading the whole file might be too slow, and unnecessary for some applications.')
args = parser.parse_args(sys.argv[1:])
sha1_max_bytes_read = args.sha1_max_bytes_read
file_output = ""
print "sha1_max_bytes_read"
print sha1_max_bytes_read
print
paths = []
for root, dirs, files in os.walk('.'):
for bname in files:
paths.append(os.path.join(root,bname))
paths.sort()
for path in paths:
print path
print str(sha1_hex_file(path,sha1_max_bytes_read))
print
|
ylcrow/poweron
|
src/thirdparty/Ping.py
|
Python
|
mit
| 5,859
| 0.008875
|
'''
@author: lockrecv@gmail.com
A pure python ping implementation using raw socket.
Note that ICMP messages can only be sent from processes running as root.
Inspired by Matthew Dixon Cowles <http://www.visi.com/~mdc/>.
'''
import os
import select
import socket
import struct
import time
class Ping:
''' Power On State Pint Utility (3rdparty)'''
def __init__(self):
self.ICMP_ECHO_REQUEST = 8
def checksum(self, source_string):
summ = 0
count_to = (len(source_string)/2)*2
for count in xrange(0, count_to, 2):
this = ord(source_string[count+1]) * 256 + ord(source_string[count])
summ = summ + this
summ = summ & 0xffffffff
if count_to < len(source_string):
summ = summ + ord(source_string[len(source_string)-1])
summ = summ & 0xffffffff
summ = (summ >> 16) + (summ & 0xffff)
summ = summ + (summ >> 16)
answer = ~summ
answer = answer & 0xffff
# Swap bytes
answer = answer >> 8 | (answer << 8 & 0xff00)
return answer
def receive_one_ping(self, my_socket, idd, timeout):
'''Receive the ping from the socket'''
time_left = timeout
while True:
started_select = time.time()
what_ready = select.select([my_socket], [], [], time_left)
how_long_in_select = (time.time() - started_select)
if what_ready[0] == []: # Timeout
return
time_received = time.time()
received_packet, addr = my_socket.recvfrom(1024)
icmpHeader = received_packet[20:28]
type, code, checksum, packet_id, sequence = struct.unpack("bbHHh", icmpHeader)
if packet_id == idd:
bytess = struct.calcsize("d")
time_sent = struct.unpack("d", received_packet[28:28 + bytess])[0]
return time_received - time_sent
time_left = time_left - how_long_in_select
if time_left <= 0:
return
def send_one_ping(self, my_socket, dest_addr, idd, psize):
'''Send one ping to the given address'''
dest_addr = socket.gethostbyname(dest_addr)
# Remove header size from packet size
psize = psize - 8
# Header is type (8), code (8), checksum (16), id (16), sequence (16)
my_checksum = 0
# Make a dummy header with a 0 checksum
header = struct.pack("bbHHh", self.ICMP_ECHO_REQUEST, 0, my_checksum, idd, 1)
bytess = struct.calcsize("d")
data = (psize - bytess) * "Q"
data = struct.pack("d", time.time()) + data
# Calculate the checksum on the data and the dummy header
my_checksum = self.checksum(header+data)
# Now that we have the right checksum, we put that in. It's just easier
# to make up a new header than to stuff it into the dummy
header = struct.pack("bbHHh", self.ICMP_ECHO_REQUEST, 0, socket.htons(my_checksum), idd, 1)
packet = header + data
my_socket.sendto(packet, (dest_addr, 1)) # Don't know about the 1
def do_one(self, dest_addr, timeout, psize):
'''Returns either the delay (in seconds) or none on timeout'''
icmp = socket.getprotobyname("icmp")
try:
my_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, icmp)
except socket.errno, (errno, msg):
if errno == 1:
# Operation not permitted
msg = msg + (
" - Note that ICMP messages can only be sent from processes"
" running as root."
)
raise socket.error(msg)
my_id = os.getpid() & 0xFFFF
self.send_one_ping(my_socket, dest_addr, my_id, psize)
delay = self.receive_one_ping(my_socket, my_id, timeout)
my_socket.close()
return delay
def verbose_ping(self, dest_addr, timeout = 2, count = 4, psize = 64):
'''
Send 'count' ping with 'psize' size to 'dest_addr' with
the given 'timeout' and display the result
'''
for i in xrange(count):
print 'ping %s with ...' % dest_addr
try:
delay = self.do_one(dest_addr, timeout, psize)
except socket.gaierror, e:
print 'FAILED. (socket error: "%s")' % e[1]
break
if delay == None:
print 'FAILED. (timeout within %ssec.)' % timeout
else:
delay = delay * 1000
print 'get ping in %0.4fms' % delay
print
def quiet_ping(self, dest_addr, timeout = 2, count = 4, psize = 64):
'''
Send 'count' pint with 'psize' size to 'dest_addr' with
the given 'timeout' and display the result.
Returns 'percent' lost packages, 'max' round trip time
and 'avg' round trip time.
'''
mrtt = None
artt = None
plist = []
|
for i in xrange(count):
try:
delay = self.do_one(dest_addr, timeout, psize)
except socket.gaierror, e:
print 'FAILED. (socket error: "%s")' % e[1]
break
if delay != None:
delay
|
= delay * 1000
plist.append(delay)
# Find lost package percent
percent_lost = 100 - (len(plist)*100/count)
# Find max and avg round trip time
if plist:
mrtt = max(plist)
artt = sum(plist)/len(plist)
return percent_lost, mrtt, artt
|
madAsket/levitin_algorithms
|
src/sieve_eratosphene.py
|
Python
|
gpl-3.0
| 1,323
| 0.004242
|
# -*- coding: utf-8 -*-
# Алгоритм решето Эратосфена. Нахождение последовательности простых чисел, не превышающе заданной дли
|
ны.
from math import sqrt, floor
def sieve(len):
# Генерируем массив начальных значен
|
ий от 2 до len;
init_array = [a for a in range(0, len+1)]
# 1 - не простое число!
init_array[1] = 0
# Идет проход по значениям, не превышающих квадрат len
for z in range(2, int(floor(sqrt(len))) + 1):
# Элемент еще не удален из начального массива
if init_array[z] != 0:
# Минимальное значение, с которого можно начать проходить массив
j = z*z
# Пока не дойдем по значениям до конца массива
# Удаляем все кратные значения из массива
while j <= len:
init_array[j] = 0
j = j + z
# Выводим все простые числа после вычислений
for s in init_array:
if s!=0:
print s
sieve(27)
|
jorge-marques/wagtail
|
wagtail/wagtailadmin/tests/test_pages_views.py
|
Python
|
bsd-3-clause
| 93,571
| 0.003377
|
from datetime import timedelta
import unittest
import mock
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.core import mail
from django.core.paginator import Paginator
from django.db.models.signals import pre_delete, post_delete
from django.utils import timezone
from wagtail.tests.testapp.models import (
SimplePage, EventPage, EventPageCarouselItem,
StandardIndex, StandardChild,
BusinessIndex, BusinessChild, BusinessSubIndex,
TaggedPage, Advert, AdvertPlacement)
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailcore.models import Page, PageRevision
from wagtail.wagtailcore.signals import page_published, page_unpublished
from wagtail.wagtailusers.models import UserProfile
def submittable_timestamp(timestamp):
"""
Helper function to translate a possibly-timezone-aware datetime into the format used in the
go_live_at / expire_at form fields - "YYYY-MM-DD hh:mm", with no timezone indicator.
This will be interpreted as being in the server's timezone (settings.TIME_ZONE), so we
need to pass it through timezone.localtime to ensure that the client and server are in
agreement about what the timestamp means.
"""
return str(timezone.localtime(timestamp)).split('.')[0]
class TestPageExplorer(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Add child page
self.child_page = SimplePage(
title="Hello world!",
slug="hello-world",
)
self.root_page.add_child(instance=self.child_page)
# Login
self.login()
def test_explore(self):
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(self.root_page, response.context['parent_page'])
self.assertTrue(response.context['pages'].paginator.object_list.filter(id=self.child_page.id).exists())
def test_explore_root(self):
response = self.client.get(reverse('wagtailadmin_explore_root'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(Page.objects.get(id=1), response.context['parent_page'])
self.assertTrue(response.context['pages'].paginator.object_list.filter(id=self.root_page.id).exists())
def test_ordering(self):
response = self.client.get(reverse('wagtailadmin_explore_root'), {'ordering': 'content_type'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], 'content_type')
def test_invalid_ordering(self):
response = self.client.get(reverse('wagtailadmin_explore_root'), {'ordering': 'invalid_order'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], '-latest_revision_created_at')
def test_reordering(self):
response = self.client.get(reverse('wagtailadmin_explore_root'), {'ordering': 'ord'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
self.assertEqual(response.context['ordering'], 'ord')
# Pages must not be paginated
self.assertNotIsInstance(response.context['pages'], Paginator)
def make_pages(self):
for i in range(150):
self.root_page.add_child(instance=SimplePage(
title="Page " + str(i),
slug="page-" + str(i),
))
def test_pagination(self):
self.make_pages()
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
# Check that we got the correct page
self.assertEqual(response.context['pages'].number, 2)
def test_pagination_invalid(self):
self.make_pages()
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )), {'p': 'Hello World!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
# Check that we got page one
self.assertEqual(response.context['pages'].number, 1)
def test_pagination_out_of_range(self):
self.make_pages()
response = self.client.get(reverse('wagtailadmin_explore', args=(self.root_page.id, )), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/index.html')
# Check that we got the last page
self.assertEqual(response.context['pages'].number, response.context['pages'].paginator.num_pages)
class TestPageCreation(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Login
self.user = self.login()
def test_add_subpage(self):
response = self.client.get(reverse('wagtailadmin_pages_add_subpage', args=(self.root_page.id, )))
self.assertEqual(response.status_code, 200)
def test_add_subpage_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get add subpage page
response = self.client.get(reverse('wagtailadmin_pages_add_subpage', args=(self.root_page.id, )))
# Check that the user recieved a 403 response
self.assertEqual(response.status_code, 403)
def test_add_subpage_nonexistantparent(self):
response = self.client.get(reverse('wagtailadmin_pages_add_subpage', args=(100000, )))
self.assertEqual(response.status_code, 404)
def test_create_simplepage(self):
response = self.client.get(reverse('wagtailadmin_pages_create', args=('tests', 'simplepage', self.root_page.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<a href="#content" class="active">Content</a>')
self.assertContains(response, '<a href="#promote" class="">Promote</a>')
def test_create_page_without_promote_tab(self):
"""
Test that the Promote tab is not rendered for page classes that define it as empty
"""
|
response = self.client.get(reverse('wagtailadmin_pages_create', args=('tests', 'standardindex', self.root_page.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<a href="#content" class="active">Conte
|
nt</a>')
self.assertNotContains(response, '<a href="#promote" class="">Promote</a>')
def test_create_page_with_custom_tabs(self):
"""
Test that custom edit handlers are rendered
"""
response = self.client.get(reverse('wagtailadmin_pages_create', args=('tests', 'standardchild', self.root_page.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<a href="#content" class="active">Content</a>')
self.assertContains(response, '<a href="#promote" class="">Promote</a>')
self.assertContains(response, '<a href="#dinosaurs" class="">Dinosaurs</a>')
def test_create_simplepage_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
|
pastgift/object-checker-py
|
test/test.py
|
Python
|
mit
| 14,685
| 0.009465
|
# -*- coding: utf-8 -*-
import unittest
from objectchecker import ObjectChecker
# Config
options = {
'messageTemplate': {
'invalid' : "Value of Field `{{fieldName}}` is not valid. Got `{{fieldValue}}`, but require {{checkerName}} = {{checkerOption}}",
'missing' : "Missing {{fieldName}}",
'unexpected': "Not support {{fieldName}}"
}
}
checker = ObjectChecker()
# Complicated objects
complicated_valid_obj = {
"users": [
{
"id" : 1,
"name": "a@a.com",
"additional": {
"age" : 20,
"height": 180,
"score" : [80, 90, 100]
}
},
{
"id" : 2,
"name": "123@b.com"
},
{
"id" : 3,
"name": "123@a.com",
"additional": {
"age" : 100,
"height": 200,
"score" : [60, 70, 80, 90]
}
}
]
}
complicated_invalid_obj = {
"users": [
{
"id" : "a1",
"name": "a@a.com",
"additional": {
"age" : 20,
"height": 180,
"score" : [80, 90, 100]
}
},
{
"id" : 2,
"name": "123@b.com"
},
{
"id" : 3,
"name": "123@a.com",
"additional": {
"age" : 500,
"height": 300,
"score" : [30]
}
}
]
}
complicated_options = {
"users": {
"$maxLength": 5,
"$": {
"id": {
"$matchRegExp": "^\\d$"
},
"name": {
"$isEmail" : True,
"$minLength": 6,
"$maxLength": 10
},
"additional": {
"$isOptional": True,
"$type": "json",
"age": {
"$minValue": 20,
"$maxValue": 100
},
"height": {
"$minValue": 100,
"$maxValue": 200
},
"score": {
"$minLength": 3,
"$type" : "array",
"$": {
"$minValue": 60,
"$maxValue": 100
}
}
}
}
}
}
# Simple objects
obj = None
opt = {
"username": {
"$minLength": 6,
"$maxLength": 10
},
"age": {
"$minValue": 1,
"$maxValue": 100
},
"email": {
"$isEmail" : True,
"$isOptional": True
},
"score1": {
"$isInteger": True
},
"score2": {
"$isPositiveZeroInteger": True
},
"score3": {
"$isPositiveInteger": True
},
"score4": {
"$isNegativeZeroInteger": True
},
"score5": {
"$isNegativeInteger": True
},
"fix1": {
"$isValue": 12345
},
"fix2": {
"$isLength": 5
},
"range1": {
"$in": [1, 2, 3]
},
"range2": {
"$notIn": [1, 2, 3]
}
}
class TestObjectChecker(unittest.TestCase):
def test_complicated_object_valid_object(self):
self.assertEqual(True, checker.is_valid(complicated_valid_obj, complicated_options))
def test_complicated_object_invalid_object(self):
self.assertEqual(False, checker.is_valid(complicated_invalid_obj, complicated_options))
# Valid objects
def test_valid_object_1(self):
obj = {
'username': 'abcdef',
'age' : 1,
'email' : 'a@e.com',
'score1' : 1,
'score2' : 0,
'score3' : 1,
'score4' : 0,
'score5' : -1,
'fix1' : 12345,
'fix2' : '11111',
'range1' : 1,
'range2' : 0
};
self.assertEqual(True, checker.is_valid(obj, opt))
def test_valid_object_2(self):
obj = {
'username': 'abcdef1234',
'age' : 100,
'score1' : 100,
'score2' : 1,
'score3' : 1,
'score4' : -1,
'score5' : -1,
'fix1' : 12345,
'fix2' : '12345',
'range1' : 2,
'range2' : 4
};
self.assertEqual(True, checker.is_valid(obj, opt))
# Invalid objects
def test_invalid_object_1(self):
opt = {
'foo': {
'$minLength': 3
}
};
obj = {
'foo': 'ab'
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_2(self):
opt = {
'foo': {
'$maxLength': 3
}
};
obj = {
'foo': 'abcd'
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_3(self):
opt = {
'foo': {
'$minValue': 3
}
};
obj = {
'foo': 2
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_4(self):
opt = {
'foo': {
'$maxValue': 3
}
};
obj = {
'foo': 4
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_5(self):
opt = {
'foo': {
'$isEmail': True
}
};
obj = {
'foo': 'a@@.com'
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_6(self):
opt = {
'foo': {
'$in': [1,2]
}
};
obj = {
'foo': 0
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_7(self):
opt = {
'foo': {
'$notIn': [1, 2]
}
};
obj = {
'foo': 1
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_8(self):
opt = {
'foo': {
'$isValue': 9
|
}
};
obj = {
'foo': 8
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_9(self):
opt = {
'foo': {
'$isInteger': True
}
};
obj = {
|
'foo': 'a'
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_10(self):
vopt = {
'foo': {
'$isPositiveZeroInteger': True
}
};
obj = {
'foo': -1
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_11(self):
opt = {
'foo': {
'$isPositiveInteger': True
}
};
obj = {
'foo': 0
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_12(self):
opt = {
'foo': {
'$isNegativeZeroInteger': True
}
};
obj = {
'foo': 1
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_13(self):
opt = {
'foo': {
'$isNegativeInteger': True
}
};
obj = {
'foo': 0
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_14(self):
opt = {
'foo': {
'$notEmptyString': True
}
};
obj = {
'foo': ''
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_15(self):
opt = {
'foo': {
'$assertTrue': lambda v: v == 'assertTrue'
}
};
obj = {
'foo': 'xxx'
};
self.assertEqual(False, checker.is_valid(obj, opt))
def test_invalid_object_16(self):
opt = {
'foo': {
'$assertFalse': lambda v: v == 'xxx'
|
lifemapper/LmQGIS
|
lifemapperTools/tools/newExperiment.py
|
Python
|
gpl-2.0
| 17,776
| 0.024415
|
# -*- coding: utf-8 -*-
"""
@author: Jeff Cavner
@contact: jcavner@ku.edu
@license: gpl2
@copyright: Copyright (C) 2014, University of Kansas Center for Research
Lifemapper Project, lifemapper [at] ku [dot] edu,
Biodiversity Institute,
1345 Jayhawk Boulevard, Lawrence, Kansas, 66045, USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
"""
import os
import types
import zipfile
import numpy as np
from collections import namedtuple
from PyQt4.QtGui import *
from PyQt4.QtCore import QSettings, Qt, SIGNAL, QUrl
from qgis.core import *
from qgis.gui import *
from lifemapperTools.tools.ui_newExperimentDialog import Ui_Dialog
from lifemapperTools.tools.listPALayers import ListPALayersDialog
from lifemapperTools.tools.constructGrid import ConstructGridDialog
from lifemapperTools.tools.uploadLayers import UploadDialog
from lifemapperTools.tools.listBuckets import ListBucketsDialog
from lifemapperTools.tools.addSDMLayer import UploadSDMDialog
from lifemapperTools.common.pluginconstants import ListExperiments, GENERIC_REQUEST
from lifemapperTools.common.pluginconstants import QGISProject
from lifemapperTools.common.workspace import Workspace
from lifemapperTools.tools.radTable import RADTable
from lifemapperTools.tools.uploadTreeOTL import UploadTreeDialog
from lifemapperTools.common.communicate import Communicate
class NewExperimentDialog(QDialog, Ui_Dialog):
# .............................................................................
# Constructor
# .............................................................................
def __init__(self, iface, RADids=None, inputs=None, client=None, email=None):
QDialog.__init__(self)
#self.setWindowFlags(self.windowFlags() & Qt.WindowMinimizeButtonHint)
self.interface = iface
self.workspace = Workspace(self.interface,client)
self.checkExperiments()
self.setupUi()
self.client = client
#cc = self.rejectBut
#bok = self.acceptBut
self.expId = None
self.mapunits = None
self.keyvalues = {}
if email is not None:
self.keyvalues['email'] = email
#_Controller.__init__(self, iface, BASE_URL=ListExperiments.BASE_URL,
# STATUS_URL=ListExperiments.STATUS_URL,
# REST_URL=ListExperiments.REST_URL,
# cancel_close=cc, okayButton=bok, ids=RADids,
# initializeWithData=False, client=client)
# ..............................................................................
def _checkQgisProjForKey(self):
project = QgsProject.instance()
filename = str(project.fileName())
found = False
s = QSettings()
for key in s.allKeys():
if 'RADExpProj' in key:
value = str(s.value(key))
if value == filename:
found = True
expId = key.split('_')[1]
s.setValue("currentExpID", int(expId))
return found
# ..............................................................................
def checkExperiments(self):
"""
@summary: gets the current expId, if there is one it gets the current
project path associated with that id. If there is a project path, it
triggers a save project. If there is no path, it asks a save as, and sets
the project path for the id. The last thing it does is to open a new
qgis project
"""
s = QSettings()
currentExpId = s.value("currentExpID",QGISProject.NOEXPID,type=int)
if currentExpId != QGISProject.NOEXPID:
currentpath = str(s.value("RADExpProj_"+str(currentExpId),
QGISProject.NOPROJECT))
if currentpath != QGISProject.NOPROJECT and currentpath != '':
self.interface.actionSaveProject().trigger()
else:
if len(QgsMapLayerRegistry.instance().mapLayers().items()) > 0:
#self.interface.actionSaveProjectAs().trigger()
self.workspace.saveQgsProjectAs(currentExpId)
# now actionNewProject
self.interface.actionNewProject().trigger()
s.setValue("currentExpID",QGISProject.NOEXPID)
else: # no experiment Id
# there is a case where a Qgis project can be opened but there is no
# current id, like after a sign out but that Qgis project belongs to an id, in that case it needs
# to start a new project
if len(QgsMapLayerRegistry.instance().mapLayers().items()) == 0 or self._checkQgisProjForKey():
self.interface.actionNewProject().trigger()
# ..............................................................................
#def accept(self):
#
#
# valid = self.validate()
# if self.expId is not None:
# self.openNewDialog()
# elif valid and self.expId is None:
# self.startThread(GENERIC_REQUEST,outputfunc = self.newExperimentCallBack,
# requestfunc=self.client.rad.postExperiment, client=self.client,
# inputs=self.keyvalues)
# elif not valid and self.expId is None:
# pass
# ..............................................................................
def postNewOpen(self,buttonValue):
valid = self.validate()
if self.expId is not None:
self.openNewDialog(buttonValue)
elif valid and self.expId is None:
try:
print self.keyvalues
exp = self.client.rad.postExperiment(**self.keyvalues)
except Exception, e:
message = "Error posting new experiment "+str(e)
msgBox = QMessageBox.information(self,
"Problem...",
message,
QMessageBox.Ok)
else:
self.newExperimentCallBack(exp,buttonValue)
elif not valid and self.expId is None:
pass
# ..............................................................................
def validate(self):
valid = True
message = ""
self.keyvalues['epsgCode'] = self.epsgEdit.text()
self.keyvalues['name'] = self.expNameEdit.text()
self.keyvalues['description'] = self.description.toPlainText()
epsg = self.epsgEdit.text()
#self.setMapUnitsFromEPSG(epsg=epsg)
experimentname = self.expNameEdit.text()
if len(experimentname) <= 0:
message = "Please supply a experiment name"
valid = False
elif len(epsg) <= 0:
message = "Please supply an
|
EPSG code"
valid = False
else:
self.setMapUnitsFromEPSG(epsg=epsg)
if sel
|
f.mapunits is None or self.mapunits == 'UnknownUnit':
message = "Invalid EPSG Code"
valid = False
if not valid:
msgBox = QMessageBox.information(self,
"Problem...",
message,
QMessageBox.Ok)
return valid
# ..............................................................................
def openProjSelectorSetEPSG(se
|
dexy/cashew
|
example/usage2.py
|
Python
|
mit
| 581
| 0.018933
|
import pprint
### "import"
from example.classes import Data
### "plugins"
Data.plugins
import example.classes2
Data.plugins
### "example-data"
example_data = [{
"foo" : 123,
"bar" : 456
}]
### "csv-example"
csv_data = Data.create_instance('csv', example_data)
csv_data.present()
csv_data.update_settings({
'lineterminator' : '\n',
'write
|
_header' : False
})
csv_data.present()
csv_data.setting('lineterminator')
|
pprint.pprint(csv_data.setting_values())
### "json-example"
json_data = Data.create_instance('json', example_data)
json_data.present()
|
JaredKotoff/100Doors
|
100Doors.py
|
Python
|
mit
| 3,198
| 0.002814
|
import random
# Simply picks the winning door at random
def door_picker():
winner = random.randrange(1, doors+1)
return winner
# This opens all the other doors and allows the user to swich or stay
def door_opener(choice, winner, switch, enable_auto):
if enable_auto == "n":
switch = None
if choice == winner:
closed_door = random.randrange(1, doors+1)
while closed_door == winner:
closed_door = random.randrange(1, doors+1)
else:
closed_door = choice
print("I have opened all but doors " + str(closed_door) + " and " + str(winner))
if enable_auto == "n":
while not (switch == "y" or switch == "n"):
switch = input("Would you like to switch?(y\\n): ").lower()
if switch == "y":
if choice == winner:
choice = closed_door
else:
choice = winner
return choice, switch
# This is the end game. Displays if the player won or lost
def show_winner(choice, winner, switch):
if switch == "n":
print("You did not switch and you ", end="")
else:
print("You switched and you ", end="")
if choice == winner:
print("won!")
return 1
else:
print("lost.")
return 0
# Calculates the amount of games won vs played and your % of wins
def show_rate(wins, games):
rate = wins / games
print("\n" + str(wins) + " wins of " + str(games) + " games")
print("You are winning " + str(rate*100) + "% of the time.\n\n")
# Sorry for the mess
# Theres cleaner ways to made this main but I got tired
def main():
global doors
doors = "0"
wins = 0
games = 0
total_games = "0"
switch = "0"
enable_auto = None
keep_playing = "y"
while not (doors.isdigit() and 2 < int(doors)):
doors = input("How many doors would you like to play with? ")
doors = int(doors)
while not (enable_auto == "y" or enable_auto == "n"):
enable_auto = input("Would you like to see autoplay?(y\\n): ").lower()
if enable_auto == "y":
while not (switch == "y" or switch == "n"):
switch = input("Always switch doors?(y\\n): ")
while not (total_games.isdigit() and 0 < int(total_games)):
|
total_games = input("How many games?: ")
while keep_playing == "y":
choice = "0"
if enable_auto == "y":
choice = str(random.randrange(1, doors+1))
print("There are 10
|
0 doors in front of you.\nOne contains a prize.\n")
if enable_auto == "n":
while not (choice.isdigit() and 0 < int(choice) < doors+1):
choice = input("Pick one: ")
winner = door_picker()
choice, switch = door_opener(int(choice), winner, switch, enable_auto)
wins += show_winner(int(choice), winner, switch)
games += 1
show_rate(wins, games)
if enable_auto == "n":
keep_playing = None
while not (keep_playing == "y" or keep_playing == "n"):
keep_playing = input("Would you like to keep playing?(y\\n): ").lower()
elif int(total_games) == games:
keep_playing = "n"
if __name__ == '__main__':
main()
|
homeworkprod/byceps
|
byceps/permissions/site.py
|
Python
|
bsd-3-clause
| 334
| 0
|
"""
byceps.permissions.site
~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2
|
021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` fi
|
le for details)
"""
from ..util.authorization import create_permission_enum
SitePermission = create_permission_enum(
'site',
[
'create',
'update',
'view',
],
)
|
thelabnyc/wagtail_blog
|
blog/utils.py
|
Python
|
apache-2.0
| 2,644
| 0.000378
|
# https://djangosnippets.org/snippets/690/
import re
from django.template.defaultfilters import slugify
def unique_slugify(instance, value, slug_field_name='slug', queryset=None,
slug_separator='-'):
"""
Calculates and stores a unique slug of ``value`` for an instance.
``slug_field_name`` should be a string matching the name of the field to
store the slug in (and the field to check against for uniqueness).
``queryset`` usually doesn't need to be explicitly provided - it'll default
to using the ``.all()`` queryset from the model's default manager.
"""
slug_field = instance._meta.get_field(slug_field_name)
slug = getattr(instance, slug_field.attname)
slug_len = slug_field.max_length
# Sort out the initial slug, limiting its length if necessary.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create the queryset if one wasn't explicitly provided and exclude the
# current instance from the queryset.
if queryset is None:
queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk)
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '%s%s' % (slug_separator, next)
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
def _slug_strip(value, separator='-'):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of
the default '-' separ
|
ator with the new separator.
"""
separator = separator or ''
if separator == '-' or not separator:
re_sep = '-'
else:
re_sep = '(?:-|%s)' % re.escape(separator)
# Remove multiple instances and if an alternate
|
separator is provided,
# replace the default '-' separator.
if separator != re_sep:
value = re.sub('%s+' % re_sep, separator, value)
# Remove separator from the beginning and end of the slug.
if separator:
if separator != '-':
re_sep = re.escape(separator)
value = re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
return value
|
vecnet/om
|
website/apps/ts_om/tests/test_run_new.py
|
Python
|
mpl-2.0
| 1,914
| 0.000522
|
# -*- coding: utf-8 -*-
#
# This file is part of the VecNet OpenMalaria Portal.
# For copyright and licensing information about this package,
|
see the
# NOTICE.txt and LICENSE.txt files in its top-level directory; they are
# available at https://github.com/vecnet/om
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License (MPL), version 2.0. If a copy of the MPL was
|
not distributed
# with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from django.test.testcases import TestCase
from django.conf import settings
import run
from website.apps.ts_om.models import Simulation
class RunNewTest(TestCase):
def test_failure(self):
simulation = Simulation.objects.create()
simulation.set_input_file("")
run.main(simulation.id)
simulation.refresh_from_db()
self.assertEqual(simulation.status, Simulation.FAILED)
self.assertEqual("Exit code: 66", simulation.last_error_message)
model_stdout = simulation.model_stdout.read().decode("utf-8")
self.assertIn("XSD error", model_stdout)
self.assertIn("invalid document structure", model_stdout)
def test_success(self):
simulation = Simulation.objects.create()
with open(os.path.join(settings.BASE_DIR, "website", "apps", "ts_om", "tests", "data", "default.xml")) as fp:
simulation.set_input_file(fp)
run.main(simulation.id)
simulation.refresh_from_db()
self.assertEqual(simulation.status, Simulation.COMPLETE)
self.assertEqual("", simulation.last_error_message)
model_stdout = simulation.model_stdout.read().decode("utf-8")
self.assertIn("100%", model_stdout)
output = simulation.output_file.read().decode("utf-8")
self.assertNotEqual(output, "")
ctsout = simulation.ctsout_file.read().decode("utf-8")
self.assertNotEqual(ctsout, "")
|
petersterling1/poke-qr-viewer
|
main/migrations/0005_auto_20161129_1044.py
|
Python
|
gpl-3.0
| 633
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-29 10:44
fr
|
om __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Mig
|
ration):
dependencies = [
('main', '0004_auto_20161129_0947'),
]
operations = [
migrations.AlterField(
model_name='pokemon',
name='qr_code',
field=models.TextField(default=''),
),
migrations.AlterField(
model_name='pokemon',
name='qr_code_image',
field=models.ImageField(blank=True, null=True, upload_to='qr'),
),
]
|
paoloRais/lightfm
|
examples/movielens/data.py
|
Python
|
apache-2.0
| 3,559
| 0.000281
|
import itertools
import os
import zipfile
import numpy as np
import requests
import scipy.sparse as sp
def _get_movielens_path():
"""
Get path to the movielens dataset file.
"""
return os.path.join(os.path.dirname(os.path.abspath(__file__)),
'movielens.zip')
def _download_movielens(dest_path):
"""
Download the dataset.
"""
url = 'http://files.grouplens.org/datasets/movielens/ml-100k.zip'
req = requests.get(url, stream=True)
with open(dest_path, 'wb') as fd:
for chunk in req.iter_content():
fd.write(chunk)
def _get_raw_movielens_data():
"""
Return the raw lines of the train and test files.
"""
path = _get_movielens_path()
if not os.path.isfile(path):
_download_movielens(path)
with zipfile.ZipFile(path) as datafile:
return (datafile.read('ml-100k/ua.base').decode().split('\n'),
datafile.read('ml-100k/ua.test').decode().split('\n'))
def _parse(data):
"""
Parse movielens dataset lines.
"""
for line in data:
if not line:
|
continue
uid, iid, rating, timestamp = [int(x) for x in line.split('\t')]
yield uid, iid, rating, timestamp
def _build_interaction_matrix(rows, cols, data):
"""
Build the training matrix (no_users, no_items),
with ratings >= 4.0 being marked as positive and
the rest as negative.
"""
mat = sp.lil_matrix((rows, cols), dtype=np.int32)
for uid, iid, rating, timestamp in data:
if rating >= 4.0:
mat[uid, iid] = 1.0
else:
|
mat[uid, iid] = -1.0
return mat.tocoo()
def _get_movie_raw_metadata():
"""
Get raw lines of the genre file.
"""
path = _get_movielens_path()
if not os.path.isfile(path):
_download_movielens(path)
with zipfile.ZipFile(path) as datafile:
return datafile.read('ml-100k/u.item').decode(errors='ignore').split('\n')
def get_movielens_item_metadata(use_item_ids):
"""
Build a matrix of genre features (no_items, no_features).
If use_item_ids is True, per-item features will also be used.
"""
features = {}
genre_set = set()
for line in _get_movie_raw_metadata():
if not line:
continue
splt = line.split('|')
item_id = int(splt[0])
genres = [idx for idx, val in
zip(range(len(splt[5:])), splt[5:])
if int(val) > 0]
if use_item_ids:
# Add item-specific features too
genres.append(item_id)
for genre_id in genres:
genre_set.add(genre_id)
features[item_id] = genres
mat = sp.lil_matrix((len(features) + 1,
len(genre_set)),
dtype=np.int32)
for item_id, genre_ids in features.items():
for genre_id in genre_ids:
mat[item_id, genre_id] = 1
return mat
def get_movielens_data():
"""
Return (train_interactions, test_interactions).
"""
train_data, test_data = _get_raw_movielens_data()
uids = set()
iids = set()
for uid, iid, rating, timestamp in itertools.chain(_parse(train_data),
_parse(test_data)):
uids.add(uid)
iids.add(iid)
rows = max(uids) + 1
cols = max(iids) + 1
return (_build_interaction_matrix(rows, cols, _parse(train_data)),
_build_interaction_matrix(rows, cols, _parse(test_data)))
|
rogst/drainomote
|
drainomote/views.py
|
Python
|
gpl-3.0
| 115
| 0
|
from django.shortc
|
uts import render, redirect
def index(request):
|
return redirect('status/', permanent=True)
|
tweemeterjop/thug
|
thug/DOM/W3C/HTML/TAnimateColor.py
|
Python
|
gpl-2.0
| 948
| 0.005274
|
#!/usr/bin/env python
import string
import logging
from .HTMLElement import HTMLElement
log = logging.getLogger("Thug")
class TAnimateColor(HTMLElement):
def __init__(self, doc, tag):
self.doc = doc
self.tag = tag
self._values = ""
def get_values(self):
return self._values
def set_values(self, values):
if all(c in string.printable for c in values) is False:
log.ThugLogging.log_exploit_event(self.doc.window.url,
|
"Microsoft Internet Explorer",
"Microsoft Internet Explorer CButton Object Use-After-Free Vulnerability (CVE-2012-4792)",
cve = 'CVE-2012-4792',
forward = True)
log.DFT.check_shellcode(values)
|
self._values = values
values = property(get_values, set_values)
|
zstackio/zstack-woodpecker
|
integrationtest/vm/virtualrouter/scheduler/test_delete_local_backupstorage.py
|
Python
|
apache-2.0
| 5,456
| 0.003116
|
import zstackwoodpecker.operations.scheduler_operations as sch_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.tag_operations as tag_ops
import zstackwoodpecker.operations.backupstorage_operations as bs_ops
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import time
import os
vmBackup = 'vmBackup'
volumeBackup = 'volumeBackup'
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
job1 = None
job2 = None
job_group = None
trigger1 = None
trigger2 = None
def test():
global job1
global job2
global job_group
global trigger1
global trigger2
imagestore = test_lib.lib_get_image_store_backup_storage()
if imagestore == None:
test_util.test_skip('Required imagestore to test')
cond = res_ops.gen_query_conditions("tag", '=', "allowbackup")
allow_backup_tags = res_ops.query_resource(res_ops.SYSTEM_TAG, cond)
if not allow_backup_tags:
tag_ops.create_system_tag(resourceType="ImageStoreBackupStorageVO", resourceUuid=imagestore.uuid, tag="allowbackup")
cond = res_ops.gen_query_conditions("tag", '=', "remotebackup")
tags = res_ops.query_resource(res_ops.SYSTEM_TAG, cond)
if not tags:
cond = res_ops.gen_query_conditions("state", '=', "Enabled")
cond = res_ops.gen_query_conditions("status", '=', "Connected")
hosts = res_ops.query_resource(res_ops.HOST, cond)
if not hosts:
test_util.test_fail("No host available for adding imagestore for backup test")
host = hosts[0]
bs_option = test_util.ImageStoreBackupStorageOption()
bs_option.set_name("remote_bs")
bs_option.set_url("/home/sftpBackupStorage")
bs_option.set_hostname(host.managementIp)
bs_option.set_password('password')
bs_option.set_sshPort(host.sshPort)
bs_option.set_username(host.username)
bs_option.set_system_tags(["remotebackup"])
bs_inv = bs_ops.create_image_store_backup_storage(bs_option)
bs_ops.attach_backup_storage(bs_inv.uuid, host.zoneUuid)
remote_uuid = bs_inv.uuid
else:
remote_uuid = tags[0].resourceUuid
vm1 = test_stub.create_vlan_vm(os.environ.get('l3VlanNetworkName1'))
vm2 = test_stub.create_vlan_vm(os.environ.get('l3VlanNetworkName1'))
volume = test_stub.create_volume()
volume.attach(vm2)
test_obj_dict.add_vm(vm1)
test_obj_dict.add_vm(vm2)
test_obj_dict.add_volume(volume)
parameters = {'retentionType': 'Count',
'retentionValue': '10',
'backupStorageUuids': imagestore.uuid,
'remoteBackupStorageUuid': remote_uuid,
'networkWriteBandwidth': '',
'networkReadBandwidth': '',
'volumeReadBandwidth': '',
'fullBackupTriggerUuid': '',
'volumeWriteBandwidth': ''}
test_util.test_logger(parameters)
job1 = sch_ops.create_scheduler_job(name='vm1', description='vm1 backup', target_uuid=vm1.get_vm().rootVolumeUuid,
type=vmBackup, parameters=parameters)
job2 = sch_ops.create_scheduler_job(name='vm2', description='vm2 backup',
target_uuid=vm2.get_vm().rootVolumeUuid, type=vmBackup,
parameters=parameters)
name1 = 'job_group'
job_group = sch_ops.create_scheduler_job_group(name=name1, description='vmbackup', type=vmBackup,
parameters=parameters)
cond = res_ops.gen_query_conditions('uuid', '=', job_group.uuid)
sch_ops.add_jobs_to_job_group([job1.uuid], job_group.uuid)
job_group_inv = res_ops.query_resource(res_ops.SCHEDULERJOBGROUP, cond)[0]
assert len(job_group_inv.jobsUuid) == 1
sch_ops.add_jobs_to_job_group([job2.uuid], job_group.uuid)
job_group_inv = res_ops.query_resource(res_ops.SCHEDULERJOBGROUP, cond)[0]
assert len(job_group_inv.jobsUuid) == 2
trigger1 = sch_ops.create_scheduler_trigger('10min', start_time=int(time.time() + 5), type='cron',
cron='*0 0/10 * * * ?')
sch_ops.add_scheduler_job_group_to_trigger(trigger1.uuid, job_group.uuid)
job_group_inv = res_ops.query_resource(res_ops.SCHEDULERJOBGROUP, cond)[0]
assert len(job_group_inv.triggersUuid) == 1
tag_ops.delete_tag(allow_backup_tags[0].uuid)
cond = res_ops.gen_query_conditions('uuid', '=', job_group.uuid)
job_group_inv = res_ops.query_resource(res_ops.SCHEDULERJOBGROUP, cond)[0]
assert job_group_inv.state == "Disabled"
test_lib.lib_robot_cleanup(test_obj_dict)
sch_ops.del_scheduler_job(job1.uuid)
sch_ops.del_scheduler_job(job2.uuid)
sch_ops.del_scheduler_job_group(job_group.uuid)
sch_ops.del_scheduler_trigger(trigger1.uuid)
def error_
|
cleanup():
global job1,job2,job_group,trigger1,trigger2
test_lib.lib_error_cleanup(test_obj_dict)
if job1:
sch_ops.del_scheduler_job(job1.uuid)
if job2:
sch_ops.del_scheduler_job(job2.uuid)
if job_group:
sch_ops.del_scheduler_job_group(job_group.uuid)
if trigger1:
sch_ops.del_scheduler_tr
|
igger(trigger1.uuid)
if trigger2:
sch_ops.del_scheduler_trigger(trigger2.uuid)
|
ppwwyyxx/tensorflow
|
tensorflow/python/training/checkpoint_utils.py
|
Python
|
apache-2.0
| 19,434
| 0.005557
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache
|
.org/licenses/LICENSE-2.0
#
# Unless required by applicable law
|
or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tools to work with checkpoints."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import six
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import py_checkpoint_reader
from tensorflow.python.training.saving import saveable_object_util
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"load_checkpoint", "load_variable", "list_variables",
"checkpoints_iterator", "init_from_checkpoint"
]
@tf_export("train.load_checkpoint")
def load_checkpoint(ckpt_dir_or_file):
"""Returns `CheckpointReader` for checkpoint found in `ckpt_dir_or_file`.
If `ckpt_dir_or_file` resolves to a directory with multiple checkpoints,
reader for the latest checkpoint is returned.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint
file.
Returns:
`CheckpointReader` object.
Raises:
ValueError: If `ckpt_dir_or_file` resolves to a directory with no
checkpoints.
"""
filename = _get_checkpoint_filename(ckpt_dir_or_file)
if filename is None:
raise ValueError("Couldn't find 'checkpoint' file or checkpoints in "
"given directory %s" % ckpt_dir_or_file)
return py_checkpoint_reader.NewCheckpointReader(filename)
@tf_export("train.load_variable")
def load_variable(ckpt_dir_or_file, name):
"""Returns the tensor value of the given variable in the checkpoint.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
name: Name of the variable to return.
Returns:
A numpy `ndarray` with a copy of the value of this variable.
"""
# TODO(b/29227106): Fix this in the right place and remove this.
if name.endswith(":0"):
name = name[:-2]
reader = load_checkpoint(ckpt_dir_or_file)
return reader.get_tensor(name)
@tf_export("train.list_variables")
def list_variables(ckpt_dir_or_file):
"""Returns list of all variables in the checkpoint.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
Returns:
List of tuples `(name, shape)`.
"""
reader = load_checkpoint(ckpt_dir_or_file)
variable_map = reader.get_variable_to_shape_map()
names = sorted(variable_map.keys())
result = []
for name in names:
result.append((name, variable_map[name]))
return result
def wait_for_new_checkpoint(checkpoint_dir,
last_checkpoint=None,
seconds_to_sleep=1,
timeout=None):
"""Waits until a new checkpoint file is found.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
last_checkpoint: The last checkpoint path used or `None` if we're expecting
a checkpoint for the first time.
seconds_to_sleep: The number of seconds to sleep for before looking for a
new checkpoint.
timeout: The maximum number of seconds to wait. If left as `None`, then the
process will wait indefinitely.
Returns:
a new checkpoint path, or None if the timeout was reached.
"""
logging.info("Waiting for new checkpoint at %s", checkpoint_dir)
stop_time = time.time() + timeout if timeout is not None else None
while True:
checkpoint_path = checkpoint_management.latest_checkpoint(checkpoint_dir)
if checkpoint_path is None or checkpoint_path == last_checkpoint:
if stop_time is not None and time.time() + seconds_to_sleep > stop_time:
return None
time.sleep(seconds_to_sleep)
else:
logging.info("Found new checkpoint at %s", checkpoint_path)
return checkpoint_path
@tf_export("train.checkpoints_iterator")
def checkpoints_iterator(checkpoint_dir,
min_interval_secs=0,
timeout=None,
timeout_fn=None):
"""Continuously yield new checkpoint files as they appear.
The iterator only checks for new checkpoints when control flow has been
reverted to it. This means it can miss checkpoints if your code takes longer
to run between iterations than `min_interval_secs` or the interval at which
new checkpoints are written.
The `timeout` argument is the maximum number of seconds to block waiting for
a new checkpoint. It is used in combination with the `timeout_fn` as
follows:
* If the timeout expires and no `timeout_fn` was specified, the iterator
stops yielding.
* If a `timeout_fn` was specified, that function is called and if it returns
a true boolean value the iterator stops yielding.
* If the function returns a false boolean value then the iterator resumes the
wait for new checkpoints. At this point the timeout logic applies again.
This behavior gives control to callers on what to do if checkpoints do not
come fast enough or stop being generated. For example, if callers have a way
to detect that the training has stopped and know that no new checkpoints
will be generated, they can provide a `timeout_fn` that returns `True` when
the training has stopped. If they know that the training is still going on
they return `False` instead.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
min_interval_secs: The minimum number of seconds between yielding
checkpoints.
timeout: The maximum number of seconds to wait between checkpoints. If left
as `None`, then the process will wait indefinitely.
timeout_fn: Optional function to call after a timeout. If the function
returns True, then it means that no new checkpoints will be generated and
the iterator will exit. The function is called with no arguments.
Yields:
String paths to latest checkpoint files as they arrive.
"""
checkpoint_path = None
while True:
new_checkpoint_path = wait_for_new_checkpoint(
checkpoint_dir, checkpoint_path, timeout=timeout)
if new_checkpoint_path is None:
if not timeout_fn:
# timed out
logging.info("Timed-out waiting for a checkpoint.")
return
if timeout_fn():
# The timeout_fn indicated that we are truly done.
return
else:
# The timeout_fn indicated that more checkpoints may come.
continue
start = time.time()
checkpoint_path = new_checkpoint_path
yield checkpoint_path
time_to_next_eval = start + min_interval_secs - time.time()
if time_to_next_eval > 0:
time.sleep(time_to_next_eval)
@tf_export(v1=["train.init_from_checkpoint"])
def init_from_checkpoint(ckpt_dir_or_file, assignment_map):
"""Replaces `tf.Variable` initializers so they load from a checkpoint file.
Values are not loaded immediately, but when the initializer is run
(typically by running a `tf.compat.v1.global_variables_initializer` op).
Note: This overrides default initialization ops of specified variables and
redefines dtype.
Assignment map supports following syntax:
* `'checkpoint_scope_name/': 'scope_name/'` - will load all variables in
current `scope_name` from `ch
|
guillemborrell/Thermopy
|
test/test_iapws.py
|
Python
|
bsd-3-clause
| 9,460
| 0.00222
|
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 18 09:29:56 2015
@author: monteiro
"""
from thermopy.iapws import Water
from thermopy.units import Pressure, Temperature
def test_iapws():
"""
Tests are given inside the IAPWS document. See references for more details.
"""
#test Tsat given P
assert round(Water(
1e5, 373.15).temperature_saturation(0.1e6), 6) == 372.755919
assert round(Water(
1e5, 373.15).temperature_saturation(1e6), 6) == 453.035632
assert round(Water(
1e5, 373.15).temperature_saturation(10e6), 6) == 584.149488
#test Psat given T
assert round(Water(
1e5, 373.15).pressure_saturation(300).MPa, 11) == 0.00353658941
assert round(Water(
1e5, 373.15).pressure_saturation(500).MPa, 8) == 2.63889776
assert round(Water(
1e5, 373.15).pressure_saturation(600).MPa, 7) == 12.3443146
#test regions
# arbitrary points
point_in_region1 = (Pressure(20e6), Temperature(300))
point_in_region2 = (Pressure(1e5), Temperature(373.15))
point_in_region3 = (Pressure(40e6), Temperature(700))
point_in_region4 = (Pressure(1).unit('atm'), Temperature(373.1243))
point_in_region5 = (Pressure(20e6), Temperature(1500))
assert Water(*point_in_region1)._is_in_region() == 1
assert Water(*point_in_region2)._is_in_region() == 2
assert Water(*point_in_region3)._is_in_region() == 3
# region 4 does not exist as a region; it is rather the saturation line
assert Water(*point_in_region5)._is_in_region() == 5
#region 1
#assert specific volume
assert round(Water(3e6, 300, massic_basis=True).specific_volume(),
11) == 0.00100215168
assert round(Water(80e6, 300, massic_basis=True).specific_volume(),
12) == 0.000971180894
assert round(Water(3e6, 500, massic_basis=True).specific_volume(),
11) == 0.00120241800
#
# #assert internal energy
assert round(Water(3e6, 300, massic_basis=True).internal_energy(),
6) == 112.324818
assert round(Water(80e6, 300, massic_basis=True).internal_energy(),
6) == 106.448356
assert round(Water(3e6, 500, massic_basis=True).internal_energy(),
6) == 971.934985
#
# #assert enthropy
assert round(Water(3e6, 300, massic_basis=True).entropy(),
9) == 0.392294792
assert round(Water(80e6, 300, massic_basis=True).entropy(),
9) == 0.368563852
assert round(Water(3e6, 500, massic_basis=True).entropy(),
8) == 2.58041912
#assert enthalpy
assert round(Water(3e6, 300, massic_basis=True).enthalpy(),
6) == 115.331273
assert round(Water(80e6, 300, massic_basis=True).enthalpy(),
6) == 184.142828
assert round(Water(3e6, 500, massic_basis=True).enthalpy(),
6) == 975.542239
#assert cp
assert round(Water(3e6, 300, massic_basis=True).heat_capacity(),
8) == 4.17301218
assert round(Water(80e6, 300, massic_basis=True).heat_capacity(),
8) == 4.01008987
assert round(Water(3e6, 500, massic_basis=True).heat_capacity(),
8) == 4.65580682
# #assert cv
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enth
|
alpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
#
#assert speed of sound
assert round(Water(3e6, 300, massic_basis=True).speed_of_sound(),
|
5) == 1507.73921
assert round(Water(80e6, 300, massic_basis=True).speed_of_sound(),
5) == 1634.69054
assert round(Water(3e6, 500, massic_basis=True).speed_of_sound(),
5) == 1240.71337
#region 2
#assert specific volume
assert round(Water(3500, 300, massic_basis=True).specific_volume(),
7) == 39.4913866
assert round(Water(3500, 700, massic_basis=True).specific_volume(),
7) == 92.3015898
assert round(Water(30e6, 700, massic_basis=True).specific_volume(),
11) == 0.00542946619
#
# #assert internal energy
assert round(Water(3500, 300, massic_basis=True).internal_energy(),
5) == 2411.69160
assert round(Water(3500, 700, massic_basis=True).internal_energy(),
5) == 3012.62819
assert round(Water(30e6, 700, massic_basis=True).internal_energy(),
5) == 2468.61076
#
# #assert enthropy
assert round(Water(3500, 300, massic_basis=True).entropy(),
8) == 8.52238967
assert round(Water(3500, 700, massic_basis=True).entropy(),
7) == 10.1749996
assert round(Water(30e6, 700, massic_basis=True).entropy(),
8) == 5.17540298
#assert enthalpy
assert round(Water(3500, 300, massic_basis=True).enthalpy(),
5) == 2549.91145
assert round(Water(3500, 700, massic_basis=True).enthalpy(),
5) == 3335.68375
assert round(Water(30e6, 700, massic_basis=True).enthalpy(),
5) == 2631.49474
#assert cp
# assert round(Water(3e6, 300).heat_capacity(),8) == 4.17301218
# assert round(Water(80e6, 300).heat_capacity(),8) == 4.01008987
# assert round(Water(3e6, 500).heat_capacity(),8) == 4.65580682
# #assert enthalpy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
#
# #assert enthalpy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
#region 3
#assert specific volume
# assert round(Water(3500, 300).specific_volume(),7) == 39.4913866
# assert round(Water(3500, 700).specific_volume(),7) == 92.3015898
# assert round(Water(30e6, 700).specific_volume(),11) == 0.00542946619
#
# #assert internal energy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
#
# #assert enthropy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
#assert enthalpy
assert round(Water(25.5837018e6, 650,
massic_basis=True).enthalpy(), 5) == 1863.43019
assert round(Water(22.2930643e6, 650,
massic_basis=True).enthalpy(),
5) == round(2375.12401, 3)
assert round(Water(78.3095639e6, 750,
massic_basis=True).enthalpy(), 5) == 2258.68845
#assert cp
# assert round(Water(3e6, 300).heat_capacity(),8) == 4.17301218
# assert round(Water(80e6, 300).heat_capacity(),8) == 4.01008987
# assert round(Water(3e6, 500).heat_capacity(),8) == 4.65580682
# #assert enthalpy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
#
# #assert enthalpy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
# region 4
# There is no region 4; instead region 4 is the saturation line
# region 5
#assert specific volume
# assert round(Water(3500, 300).specific_volume(),7) == 39.4913866
# assert round(Water(3500, 700).specific_volume(),7) == 92.3015898
# assert round(Water(30e6, 700).specific_volume(),11) == 0.00542946619
#
# #assert internal energy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round(Water(3e6, 500).enthalpy(),6) == 975.542239
#
# #assert enthropy
# assert round(Water(3e6, 300).enthalpy(),6) == 115.331273
# assert round(Water(80e6, 300).enthalpy(),6) == 184.142828
# assert round
|
faust64/ansible
|
lib/ansible/modules/windows/win_robocopy.py
|
Python
|
gpl-3.0
| 4,902
| 0.00204
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Corwin Brown <blakfeld@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'status': ['preview'],
'supported_
|
by': 'community',
'version': '1.0'}
DOCUMENTATION = r'''
---
module: win_roboc
|
opy
version_added: "2.2"
short_description: Synchronizes the contents of two directories using Robocopy.
description:
- Synchronizes the contents of two directories on the remote machine. Under the hood this just calls out to RoboCopy, since that should be available on most modern Windows Systems.
options:
src:
description:
- Source file/directory to sync.
required: true
dest:
description:
- Destination file/directory to sync (Will receive contents of src).
required: true
recurse:
description:
- Includes all subdirectories (Toggles the `/e` flag to RoboCopy). If "flags" is set, this will be ignored.
choices:
- true
- false
default: false
required: false
purge:
description:
- Deletes any files/directories found in the destination that do not exist in the source (Toggles the `/purge` flag to RoboCopy). If "flags" is set, this will be ignored.
choices:
- true
- false
default: false
required: false
flags:
description:
- Directly supply Robocopy flags. If set, purge and recurse will be ignored.
default: None
required: false
author: Corwin Brown (@blakfeld)
notes:
- This is not a complete port of the "synchronize" module. Unlike the "synchronize" module this only performs the sync/copy on the remote machine, not from the master to the remote machine.
- This module does not currently support all Robocopy flags.
- Works on Windows 7, Windows 8, Windows Server 2k8, and Windows Server 2k12
'''
EXAMPLES = r'''
- name: Sync the contents of one directory to another
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
- name: Sync the contents of one directory to another, including subdirectories
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
recurse: True
- name: Sync the contents of one directory to another, and remove any files/directories found in destination that do not exist in the source
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
purge: True
- name: Sync content in recursive mode, removing any files/directories found in destination that do not exist in the source
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
recurse: True
purge: True
- name: Sync Two Directories in recursive and purging mode, specifying additional special flags
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
flags: /E /PURGE /XD SOME_DIR /XF SOME_FILE /MT:32
'''
RETURN = r'''
src:
description: The Source file/directory of the sync.
returned: always
type: string
sample: c:\Some\Path
dest:
description: The Destination file/directory of the sync.
returned: always
type: string
sample: c:\Some\Path
recurse:
description: Whether or not the recurse flag was toggled.
returned: always
type: bool
sample: False
purge:
description: Whether or not the purge flag was toggled.
returned: always
type: bool
sample: False
flags:
description: Any flags passed in by the user.
returned: always
type: string
sample: "/e /purge"
rc:
description: The return code retuned by robocopy.
returned: success
type: int
sample: 1
output:
description: The output of running the robocopy command.
returned: success
type: string
sample: "-------------------------------------------------------------------------------\n ROBOCOPY :: Robust File Copy for Windows \n-------------------------------------------------------------------------------\n"
msg:
description: Output intrepreted into a concise message.
returned: always
type: string
sample: No files copied!
changed:
description: Whether or not any changes were made.
returned: always
type: bool
sample: False
'''
|
MPI-IS/bilateralNN
|
bilateralnn_code/examples/tile_segmentation/predict.py
|
Python
|
bsd-3-clause
| 710
| 0.002817
|
import numpy as np
from config import *
from get_tile_data import get_tile_data
from compute_accuracy_iou import compute_accuracy_and_iou
[test_x, test_y] = get_tile_data(NUM_DATA['TEST'], RAND_SEED['TEST'])
def predict(prototxt, caffe_model):
net = caffe.Net(prototxt, caffe_model, caffe.TEST)
dinputs = {}
dinputs['data'] = test_x
predictions = net.for
|
ward_all(**dinputs)['conv_result']
[accuracy, iou] = compute_accuracy_and_iou(p
|
redictions, test_y)
print([accuracy, iou])
return [accuracy, iou]
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage: ' + sys.argv[0] + ' <prototxt> <caffe_model>')
else:
predict(sys.argv[1], sys.argv[2])
|
timmyshen/Guide_To_Data_Mining
|
Chapter2/SharpenYourPencil/distance.py
|
Python
|
mit
| 4,916
| 0.003662
|
__author__ = 'Benqing'
users = {
"Angelica": {"Blues Traveler": 3.5, "Broken Bells": 2.0, "Norah Jones": 4.5, "Phoenix": 5.0,
"Slightly Stoopid": 1.5, "The Strokes": 2.5, "Vampire Weekend": 2.0},
"Bill": {"Blues Traveler": 2.0, "Broken Bells": 3.5, "Deadmau5": 4.0, "Phoenix": 2.0, "Slightly Stoopid": 3.5,
"Vampire Weekend": 3.0},
"Chan": {"Blues Traveler": 5.0, "Broken Bells": 1.0, "Deadmau5": 1.0, "Norah Jones": 3.0, "Phoenix": 5,
"Slightly Stoopid": 1.0},
"Dan": {"Blues Traveler": 3.0, "Broken Bells": 4.0, "Deadmau5": 4.5, "Phoenix": 3.0, "Slightly Stoopid": 4.5,
"The Strokes": 4.0, "Vampire Weekend": 2.0},
"Hailey": {"Broken Bells": 4.0, "Deadmau5": 1.0, "Norah Jones": 4.0, "The Strokes": 4.0, "Vampire Weekend": 1.0},
"Jordyn": {"Broken Bells": 4.5, "Deadmau5": 4.0, "Norah Jones": 5.0, "Phoenix": 5.0, "Slightly Stoopid": 4.5,
"The Strokes": 4.0, "Vampire Weekend": 4.0},
"Sam": {"Blues Traveler": 5.0, "Broken Bells": 2.0, "Norah Jones": 3.0, "Phoenix": 5.0, "Slightly Stoopid": 4.0,
"The Strokes": 5.0},
"Veronica": {"Blues Traveler": 3.0, "Norah Jones": 5.0, "Phoenix": 4.0, "Slightly Stoopid": 2.5, "The Strokes": 3.0}
}
# Compute the Euclidean Distance between Hailey and Veronica
import math
def minkowski_dist(user_ratings1, user_ratings2, r):
"""Minkowski Distance between two users"""
if not (isinstance(user_ratings1, dict) and isinstance(user_ratings2, dict)):
exit()
item_score_diff_r_sum = 0.0
for item_name in user_ratings1:
if item_name in user_ratings2:
# there is a matched item
item_score_diff_r_sum += abs(user_ratings1[item_name] - user_ratings2[item_name]) ** r
return math.pow(item_score_diff_r_sum, 1.0 / r)
def euclidean_dist(user_ratings1, user_ratings2):
"""Euclidean Distance between two users"""
if not (isinstance(user_ratings1, dict) and isinstance(user_ratings2, dict)):
exit()
item_score_diff_sqr_sum = 0.0
for item_name in user_ratings1:
if item_name in user_ratings2:
# there is a matched item
item_score_diff_sqr_sum += (user_ratings1[item_name] - user_ratings2[item_name]) ** 2
return math.sqrt(item_score_diff_sqr_sum)
def manhattan_dist(user_ratings1, user_ratings2):
"""Manhattan Distance between two users"""
if not (isinstance(user_ratings1, dict) and isinstance(user_ratings2, dict)):
exit()
item_score_diff_abs_sum = 0.0
for item_name in user_ratings1:
if item_name in user_ratings2:
# there is a matched item
item_score_diff_abs_sum += abs(user_ratings1[item_name] - user_ratings2[item_name])
return item_score_diff_abs_sum
def compute_nearest_neighbor(username, users_in):
"""creates a sorted list of users based on their distance to username"""
distances = []
|
for user in users_in:
if user != username:
distance = minkowski_dist(users_in[user], users_in[username], 2)
distances.append((distance, user))
# sort based on distance -- closest first
distances.sort()
return distances
def pearson(user_ratings1, user_ratings2):
"""An approximation of Pearson Correlation"""
n =
|
0
# This actually could happen
# if vals1_len != vals2_len:
# exit()
sum_of_products = 0.0
sum_of_user1 = 0.0
sum_of_user2 = 0.0
sum_of_user1_sqr = 0.0
sum_of_user2_sqr = 0.0
for k in user_ratings1:
if k in user_ratings2:
sum_of_products += user_ratings1[k] * user_ratings2[k]
sum_of_user1 += user_ratings1[k]
sum_of_user2 += user_ratings2[k]
sum_of_user1_sqr += user_ratings1[k] * user_ratings1[k]
sum_of_user2_sqr += user_ratings2[k] * user_ratings2[k]
n += 1
return (sum_of_products - sum_of_user1 * sum_of_user2 / n) / (
math.sqrt(sum_of_user1_sqr - sum_of_user1 * sum_of_user1 / n) *
math.sqrt(sum_of_user2_sqr - sum_of_user2 * sum_of_user2 / n))
if __name__ == '__main__':
print 'tesing...'
# my_dict1 = {'a': 1, 'b': 2}
# print my_dict1
# for k in my_dict1:
# print k
# print type(my_dict1)
# print type(my_dict1) == dict
# print euclidean_dist(users['Hailey'], users['Veronica'])
# print euclidean_dist(users['Hailey'], users['Jordyn'])
# print manhattan_dist(users['Hailey'], users['Veronica'])
# print manhattan_dist(users['Hailey'], users['Jordyn'])
# print minkowski_dist(users['Hailey'], users['Veronica'], 4)
# print compute_nearest_neighbor('Hailey', users)
# print users['Hailey'].values()
# print type(users['Hailey'].values())
print pearson(users['Angelica'], users['Bill'])
print pearson(users['Angelica'], users['Hailey'])
print pearson(users['Angelica'], users['Jordyn'])
|
slashdd/sos
|
sos/report/plugins/mpt.py
|
Python
|
gpl-2.0
| 732
| 0
|
# Copyright (C) 2015 Red Hat, Inc., Bryn M. Reeves <bmr@redhat.com>
|
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.report.plugins import Plugin, IndependentPlugin
class Mpt(Plugin, IndependentPlugin):
short_desc = 'LSI Message Passing Te
|
chnology'
files = ('/proc/mpt',)
profiles = ('storage', )
plugin_name = 'mpt'
def setup(self):
self.add_copy_spec("/proc/mpt")
# vim: set et ts=4 sw=4 :
|
shizeeg/pyicqt
|
src/debug.py
|
Python
|
gpl-2.0
| 3,058
| 0.039568
|
# Copyright 2004-2006 Daniel Henninger <jadestorm@nc.rr.com>
# Licensed for distribution under the GPL version 2, check COPYING for details
from twisted.python import log
import sys, time
import config
def observer(eventDict):
try:
observer2(eventDict)
except Exception, e:
printf("CRITICAL: Traceback in debug.observer2 - " + str(e))
def observer2(eventDict):
edm = eventDict['message']
if isinstance(edm, tuple): # LogEvent can be in tuple
edm = edm[0]
if isinstance(edm, LogEvent):
if edm.category == INFO and config.debugLevel < 3:
return
elif edm.category == WARN and config.debugLevel < 2:
return
elif edm.category == ERROR and config.debugLevel < 1:
return
text = str(edm)
elif edm:
if not eventDict['isError'] and config.debugLevel < 3: return # not error
text = str(edm)
else:
if ev
|
entDict['isError'] and eventDict.has_key('failur
|
e'):
if config.debugLevel < 1: return
text = eventDict['failure'].getTraceback()
elif eventDict.has_key('format'):
if config.debugLevel < 3: return
text = eventDict['format'] % eventDict
else:
return
# Now log it!
timeStr = time.strftime("[%Y-%m-%d %H:%M:%S]", time.localtime(eventDict['time']))
text = text.replace("\n", "\n\t")
global debugFile
debugFile.write("%s %s\n" % (timeStr, text))
debugFile.flush()
def printf(text):
sys.__stdout__.write(text + "\n")
sys.__stdout__.flush()
debugFile = None
def reloadConfig():
global debugFile
if debugFile:
debugFile.close()
if config.debugLevel > 0:
if len(config.debugFile) > 0:
try:
debugFile = open(config.debugFile, "a")
log.msg("Reopened log file.")
except IOError:
log.discardLogs() # Give up
debugFile = sys.__stdout__
return
else:
debugFile = sys.__stdout__
log.startLoggingWithObserver(observer)
else:
log.discardLogs()
class INFO : pass # debugLevel == 3
class WARN : pass # debugLevel >= 2
class ERROR: pass # debuglevel >= 1
class LogEvent:
def __init__(self, category=INFO, ident="", msg="", log=True, skipargs=False):
self.category, self.ident, self.msg = category, ident, msg
frame = sys._getframe(1)
# Get the class name
s = str(frame.f_locals.get("self", frame.f_code.co_filename))
self.klass = s[s.find(".")+1:s.find(" ")]
if self.klass == "p": self.klass = ""
self.method = frame.f_code.co_name
if self.method == "?": self.method = ""
self.args = frame.f_locals
self.skipargs = skipargs
if log:
self.log()
def __str__(self):
args = {}
if not self.skipargs:
for key in self.args.keys():
if key == "self":
#args["self"] = "instance"
continue
val = self.args[key]
args[key] = val
try:
if len(val) > 128:
args[key] = "Oversize arg"
except:
# If its not an object with length, assume that it can't be too big. Hope that's a good assumption.
pass
category = str(self.category).split(".")[1]
return "%s :: %s :: %s :: %s :: %s :: %s" % (category, str(self.ident), str(self.klass), self.method, str(args), self.msg)
def log(self):
log.msg(self)
|
nkgilley/home-assistant
|
tests/components/atag/test_init.py
|
Python
|
apache-2.0
| 1,472
| 0.000679
|
"""Tests for the ATAG integration."""
import aiohttp
from homeassistant.components.atag import DOMAIN
from homeassistant.config_entries import ENTRY_STATE_SETUP_RETRY
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.components.atag import init_integration
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_config_entry_not_ready(
hass: HomeAssistant, aioclient_mock: Aiohtt
|
pClientMocker
) -> None:
"""Test configuration entry not ready on library error."""
aioclient_mock.get("http://127.0.0.1:10000/retrieve", exc=aiohttp.ClientError)
entry = await init_integration(hass, aioclient_mock)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_config_entry_empty_reply(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test configuration entry not r
|
eady when library returns False."""
with patch("pyatag.AtagOne.update", return_value=False):
entry = await init_integration(hass, aioclient_mock)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unload_config_entry(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the ATAG configuration entry unloading."""
entry = await init_integration(hass, aioclient_mock)
assert hass.data[DOMAIN]
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert not hass.data.get(DOMAIN)
|
YannickJadoul/Parselmouth
|
tests/resource_fixtures.py
|
Python
|
gpl-3.0
| 1,735
| 0.006916
|
# Copyright (C) 2018-2022 Yannick Jadoul
#
# This file is part of Parselmouth.
#
# Parselmouth is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Parselmouth is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Parselmouth. If not, see <http://www.gnu.org/licenses/>
import pytest
import pytest_lazyfixture
import parselmouth
def combined_fixture(*args, **kwargs):
return pytest.fixture(params=map(pytest_lazyfixture.lazy_fixture, args), ids=args, **kwargs)
@pytest.fixture
def sound_path(resources):
yield resources["the_north_wind_and_the_sun.wav"]
@pytest.fixture
def sound(sound_path):
yield parselmouth.read(sound_path)
@pytest.fixture
def intensity(sound):
yield sound.to_intensity()
@pytest.fixture
def pitch(sound):
yield sound.to_pitch()
@pytest.fixture
def
|
spectrogram(sound):
yield sound.to_spectrogram()
@combined_fixture('intensity', 'pitch', 'spectrogram', 'sound')
def sampled(request):
yield request.param
@combined_fixture('sampled')
def thing(request):
yield request.param
@pytest.fixture
def text
|
_grid_path(resources):
yield resources["the_north_wind_and_the_sun.TextGrid"]
@pytest.fixture
def text_grid(text_grid_path):
yield parselmouth.read(text_grid_path)
@pytest.fixture
def script_path(resources):
yield resources["script.praat"]
|
otron/zenodo
|
zenodo/shell.py
|
Python
|
gpl-3.0
| 3,274
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2012, 2013, 2014, 2015 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Convenience module for importing utilities need in a shell."""
import os
from werkzeug.utils import secure_filename
from invenio.base.globals import cfg
from invenio.ext.cache import cache
from invenio.ext.login import UserInfo
from invenio.ext.sqlalchemy import db
from invenio.modules.accounts.models import User
from invenio.modules.deposit.models import Deposition, DepositionFil
|
e, \
DepositionStorage, DepositionType
from invenio.modules.formatter import format_record
from invenio.modu
|
les.pidstore.models import PersistentIdentifier
from invenio.modules.pidstore.tasks import datacite_delete, \
datacite_register, datacite_sync, datacite_update
from invenio.modules.records.api import get_record
from invenio.utils.serializers import ZlibPickle as Serializer
from zenodo.modules.deposit.workflows.upload import transfer_ownership
def ban_user(user_id):
"""Block user."""
u = User.query.get(user_id)
if u.note != '0':
u.note = '0'
db.session.commit()
remove_session(user_id)
def remove_session(user_id):
"""Remove session for a user."""
prefix = cache.cache.key_prefix + "session::"
for k in cache.cache._client.keys():
if k.startswith(prefix):
k = k[len(cache.cache.key_prefix):]
try:
data = Serializer.loads(cache.get(k))
if data['uid'] == user_id:
print k
cache.delete(k)
except TypeError:
pass
def deposition_users(depositions):
"""Iterate over deposition users."""
for d in depositions:
yield Deposition.get(d).user_id
def deposition_users_emails(depositions):
"""Get list of email addresses for depositions."""
for user_id in deposition_users(depositions):
yield User.query.get(user_id).email
def deposition_with_files(files, user_id=None, deposition_id=None):
"""Add very big files to a deposition."""
if deposition_id:
d = Deposition.get(deposition_id)
else:
d = Deposition.create(User.query.get(user_id))
for filepath in files:
with open(filepath, "rb") as fileobj:
filename = os.path.basename(filepath)
df = DepositionFile(backend=DepositionStorage(d.id))
df.save(fileobj, filename=filename)
d.add_file(df)
return d
|
laborautonomo/opps
|
opps/core/__init__.py
|
Python
|
mit
| 4,511
| 0
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from appconf import AppConf
trans_app_label = _('Core')
class OppsCoreConf(AppConf):
DEFAULT_URLS = ('127.0.0.1', 'localhost',)
SHORT = 'googl'
SHORT_URL = 'googl.short.GooglUrlShort'
CHANNEL_CONF = {}
VIEWS_LIMIT = None
PAGINATE_BY = 10
PAGINATE_SUFFIX = u''
PAGINATE_NOT_APP = []
CHECK_MOBILE = False
DOMAIN_MOBILE = u''
PROTOCOL_MOBILE = u'http'
ADMIN_RULES = {}
RELATED_POSTS_PLACEHOLDER = "---related---"
CACHE_PREFIX = 'opps'
CACHE_EXPIRE = 300
CACHE_EXPIRE_LIST = 300
CACHE_EXPIRE_DETAIL = 300
RSS_LINK_TEMPLATE = '<a href="{}" class="ir ico ico-rss">RSS</a>'
LIST_MODELS = ('Post',)
RECOMMENDATION_RANGE_DAYS = 180
SMART_SLUG_ENABLED = True
MENU = True
MIRROR_CHANNEL = False
CONTAINERS_BLACKLIST = ['Entry']
CONTAINERS_SITE_ID = None
# default settings for tinymce
EDITOR = {
'editor': 'tinymce',
'height': 400,
'js': ('/static/tinymce/tinymce.min.js',),
"theme": "modern",
"plugins": [
"""advlist autolink lists link image charmap print preview hr
anchor pagebreak """,
"searchreplace wordcount visualblocks visualchars code fullscreen",
"""insertdatetime media nonbreaking save table contextmenu
directionality""",
"template paste textcolor opps"
],
"toolbar1": """insertfile undo redo | styleselect | bold italic |
alignleft aligncenter alignright alignjustify |
bullist numlist outdent indent | link image media |
print preview | forecolor backcolor | opps""",
"image_advtab": True,
"templates": [
{"title": 'Related', "content": RELATED_POSTS_PLACEHOLDER},
],
"file_browser_callback": 'CustomFileBrowser',
}
class Meta:
prefix = 'opps'
class GrapelliConf(AppConf):
ADMIN_TITLE = "Opps CMS Admin"
INDEX_DASHBOARD = 'opps.contrib.admin.dashboard.CustomIndexDashboard'
class Meta:
prefix = 'GRAPPELLI'
class AdminConf(AppConf):
SHORTCUTS = [
{
'shortcuts': [
{
'url_name': 'admin:articles_post_add',
'title': '+ Notícia',
'class': 'file3',
'help': 'Clique para adicionar uma nova notícia'
},
{
'url_name': 'admin:articles_post_changelist',
'title': 'Notícias',
'count': 'opps.contrib.admin.shortcuts.count_posts',
'class': 'file2',
'help': 'Clique para visualisar todas as notícias'
},
{
'url_name': 'admin:images_image_add',
'title': '+ Imagem',
'class': 'picture',
'help': 'Clique para adicionar uma nova imagem'
},
{
'url_name': 'admin:articles_album_changelist',
'title': 'Álbum',
'count': 'opps.contrib.admin.shortcuts.count_albums',
'class': 'camera',
'help': 'Clique para visualisar todos os álbuns'
},
{
'url': '/',
'open_new_window': True,
'help': 'Cliqu
|
e para visualizar a home page do site'
},
]
}
]
SHORTCUTS_SETTINGS = {
'hide_app_list': True,
'open_new_window': False,
}
SHORTCUTS_CLASS_MAPPINGS_EXTRA = [
('blogs_blogpost', 'blog')
]
class Meta:
prefix = 'ADMIN'
class StaticSiteMapsConf(AppConf):
ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps'
class Meta:
prefix = 'staticsitemaps'
class HaystackCon
|
f(AppConf):
CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
}
}
class Meta:
prefix = 'haystack'
class ThumborConf(AppConf):
SERVER = 'http://localhost:8888'
MEDIA_URL = 'http://localhost:8000/media'
SECURITY_KEY = ''
ARGUMENTS = {}
ENABLED = False
class Meta:
prefix = 'thumbor'
class DjangoConf(AppConf):
CACHES = {'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}}
|
WING-NUS/corpSearch
|
mongo.py
|
Python
|
lgpl-3.0
| 56
| 0
|
from pymongo import MongoCl
|
ient
client = Mong
|
oClient()
|
dillmann/rscs
|
lib/DeviceManager.py
|
Python
|
mit
| 1,683
| 0.026738
|
# author: brian dillmann
# for rscs
from Devices.Input import Input
from Devices.Timer import Timer
from Devices.AnalogInput import AnalogInput
from Devic
|
es.Output import Output
class DeviceManager:
def __init__(self):
self.inputs
|
= {}
self.outputs = {}
def addSimpleInput(self, name, location, invert = False):
if name in self.inputs:
raise KeyError('Cannot create device with name %s because input with that name already exists' % name)
self.inputs[name] = Input(name, location, invert)
def addTimer(self, name, interval = 's'):
if name in self.inputs:
raise KeyError('Cannot create device with name %s because input with that name already exists' % name)
self.inputs[name] = Timer(name, interval)
def addAnalogInput(self, name, location):
if name in self.inputs:
raise KeyError('Cannot create device with name %s because input with that name already exists' % name)
self.inputs[name] = AnalogInput(name, location)
def addOutput(self, name, location, invert = False):
if name in self.outputs:
raise KeyError('Cannot create device with name %s because output with that name already exists' % name)
self.outputs[name] = Output(name, location, invert)
def read(self, name):
if not name in self.inputs:
raise KeyError('Cannot find input with name %s, unable to read' % name)
return self.inputs[name].read()
def turnOn(self, name):
if not name in self.outputs:
raise KeyError('Cannot find output with name %s, unable to turn on' % name)
self.outputs[name].on()
def turnOff(self, name):
if not name in self.outputs:
raise KeyError('Cannot find output with name %s, unable to turn off' % name)
self.outputs[name].off()
|
openstack/murano
|
murano/policy/modify/actions/action_manager.py
|
Python
|
apache-2.0
| 3,403
| 0
|
# Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_utils import importutils
from stevedore import extension
import yaml
LOG = logging.getLogger(__name__)
class ModifyActionManager(object):
"""Manages modify actions
The manager encapsulates extensible plugin mechanism for
modify actions loading. Provides ability to apply action on
given object model based on action specification retrieved
from congress
"""
def __init__(self):
self._cache = {}
def load_action(self, name):
"""Loads action by its name
Loaded actions are cached. Plugin mechanism is based on
distutils entry points. Entry point namespace is
'murano_policy_modify_actions'
:param name: action name
:return:
"""
if name in self._cache:
return self._cache[name]
action = self._load_action(name)
self._cache[name] = action
return action
@staticmethod
def _load_action(name):
mgr = extension.ExtensionManager(
namespace='murano_policy_modify_actions',
invoke_on_load=False
)
for ext in mgr.extensions:
if name == ext.name:
target = ext.entry_point_target.replace(':', '.')
return importutils.import_class(target)
raise ValueError('No such action definition: {action_name}'
.format(action_name=name))
def apply_action(self, obj, action_spec):
"""Apply action on given model
Parse action and its parameters from action specification
retrieved from congress. Action specification is YAML format.
E.g. remove-object: {object_id: abc123}")
Action names are keys in top-level dictionary. Values are
dictionaries containing key/value parameters of the action
:param obj: subject of modification
:param action_spec: YAML action spec
:raise ValueError: in case of malformed action spec
"""
actions = yaml.safe_load(action_spec)
if not isinstance(actions, dict):
raise ValueError('Expe
|
cted action spec format is '
'"action-name: {{p1: v1, ...}}" '
'but got "{action_spec}"'
.format(action_spec=action_spec))
for name, kwargs in actions.items():
LOG.debug('Executing action {name}, params {params}'
.format(name=name, params=kwargs))
|
# loads action class
action_class = self.load_action(name)
# creates action instance
action_instance = action_class(**kwargs)
# apply action on object model
action_instance.modify(obj)
|
namgivu/shared-model-FlaskSqlAlchemy-vs-SQLAlchemy
|
python-app/model/user.py
|
Python
|
gpl-3.0
| 1,292
| 0.026316
|
from base_model import BaseModel
import sqlalchemy as db
class User(BaseModel):
#table mapping
__tablename__ = "users"
##region column mapping
id = db.Column(db.Integer, primary_key=True)
user_name = db.Column(db.Te
|
xt)
primary_email_id = db.Column(db.Integer, db.ForeignKey('user_emails.id') )
#Use model class instead of physical table name for
|
db.ForeignKey() ref. http://stackoverflow.com/a/41633052/248616
from model.address import Address
billing_address_id = db.Column(db.Integer, db.ForeignKey(Address.__table__.c['id'] ))
shipping_address_id = db.Column(db.Integer, db.ForeignKey(Address.__table__.c['id'] ))
##endregion column mapping
##region relationship obj
emails = db.relationship('UserEmail',
primaryjoin='User.id==UserEmail.user_id',
back_populates='owner')
primaryEmail = db.relationship('UserEmail',
primaryjoin='User.primary_email_id==UserEmail.id')
billingAddress = db.relationship('Address',
primaryjoin='User.billing_address_id==Address.id')
shippingAddress = db.relationship('Address',
primaryjoin='User.shipping_address_id==Address.id')
##endregion relationship obj
|
ngokevin/cyder
|
cyder/core/cyuser/models.py
|
Python
|
bsd-3-clause
| 730
| 0.00274
|
from django.contrib.auth.models import User
from django.db i
|
mport models
from django.db.models import signals
from cyder.core.cyuser import backends
from cyder.core.ctnr.models import Ctnr
class UserProfile(models.Model):
user = models.OneToOneField(User)
default_ctnr = models.ForeignKey(Ctnr, default=2)
phone_number = models.IntegerFi
|
eld(null=True)
has_perm = backends.has_perm
class Meta:
db_table = 'auth_user_profile'
def create_user_profile(sender, **kwargs):
user = kwargs['instance']
if (kwargs.get('created', True) and not kwargs.get('raw', False)):
profile = UserProfile(user=user)
profile.save()
signals.post_save.connect(create_user_profile, sender=User)
|
astanin/python-tabulate
|
test/test_regression.py
|
Python
|
mit
| 16,317
| 0.000741
|
# -*- coding: utf-8 -*-
"""Regression tests."""
from __future__ import print_function
from __future__ import unicode_literals
from tabulate import tabulate, _text_type, _long_type, TableFormat, Line, DataRow
from common import assert_equal, assert_in, skip
def test_ansi_color_in_table_cells():
"Regression: ANSI color in table cells (issue #5)."
colortable = [("test", "\x1b[31mtest\x1b[0m", "\x1b[32mtest\x1b[0m")]
colorlessheaders = ("test", "test", "test")
formatted = tabulate(colortable, colorlessheaders, "pipe")
expected = "\n".join(
[
"| test | test | test |",
"|:-------|:-------|:-------|",
"| test | \x1b[31mtest\x1b[0m | \x1b[32mtest\x1b[0m |",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_alignment_of_colored_cells():
"Regression: Align ANSI-colored values as if they were colorless."
colortable = [
("test", 42, "\x1b[31m42\x1b[0m"),
("test", 101, "\x1b[32m101\x1b[0m"),
]
colorheaders = ("test", "\x1b[34mtest\x1b[0m", "test")
formatted = tabulate(colortable, colorheaders, "grid")
expected = "\n".join(
[
"+--------+--------+--------+",
"| test | \x1b[34mtest\x1b[0m | test |",
"+========+========+========+",
"| test | 42 | \x1b[31m42\x1b[0m |",
"+--------+--------+--------+",
"| test | 101 | \x1b[32m101\x1b[0m |",
"+--------+--------+--------+",
|
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_alignment_of_link_cells():
"Regression: Align links as if they were colorless."
linktable = [
("test", 42, "\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\"),
("test", 101, "\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\"),
]
linkheaders = ("test", "\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\", "test")
formatte
|
d = tabulate(linktable, linkheaders, "grid")
expected = "\n".join(
[
"+--------+--------+--------+",
"| test | \x1b]8;;target\x1b\\test\x1b]8;;\x1b\\ | test |",
"+========+========+========+",
"| test | 42 | \x1b]8;;target\x1b\\test\x1b]8;;\x1b\\ |",
"+--------+--------+--------+",
"| test | 101 | \x1b]8;;target\x1b\\test\x1b]8;;\x1b\\ |",
"+--------+--------+--------+",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_alignment_of_link_text_cells():
"Regression: Align links as if they were colorless."
linktable = [
("test", 42, "1\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\2"),
("test", 101, "3\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\4"),
]
linkheaders = ("test", "5\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\6", "test")
formatted = tabulate(linktable, linkheaders, "grid")
expected = "\n".join(
[
"+--------+----------+--------+",
"| test | 5\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\6 | test |",
"+========+==========+========+",
"| test | 42 | 1\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\2 |",
"+--------+----------+--------+",
"| test | 101 | 3\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\4 |",
"+--------+----------+--------+",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_iter_of_iters_with_headers():
"Regression: Generator of generators with a gen. of headers (issue #9)."
def mk_iter_of_iters():
def mk_iter():
for i in range(3):
yield i
for r in range(3):
yield mk_iter()
def mk_headers():
for h in ["a", "b", "c"]:
yield h
formatted = tabulate(mk_iter_of_iters(), headers=mk_headers())
expected = "\n".join(
[
" a b c",
"--- --- ---",
" 0 1 2",
" 0 1 2",
" 0 1 2",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_datetime_values():
"Regression: datetime, date, and time values in cells (issue #10)."
import datetime
dt = datetime.datetime(1991, 2, 19, 17, 35, 26)
d = datetime.date(1991, 2, 19)
t = datetime.time(17, 35, 26)
formatted = tabulate([[dt, d, t]])
expected = "\n".join(
[
"------------------- ---------- --------",
"1991-02-19 17:35:26 1991-02-19 17:35:26",
"------------------- ---------- --------",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_simple_separated_format():
"Regression: simple_separated_format() accepts any separator (issue #12)"
from tabulate import simple_separated_format
fmt = simple_separated_format("!")
expected = "spam!eggs"
formatted = tabulate([["spam", "eggs"]], tablefmt=fmt)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def py3test_require_py3():
"Regression: py33 tests should actually use Python 3 (issue #13)"
from platform import python_version_tuple
print("Expected Python version: 3.x.x")
print("Python version used for tests: %s.%s.%s" % python_version_tuple())
assert_equal(python_version_tuple()[0], "3")
def test_simple_separated_format_with_headers():
"Regression: simple_separated_format() on tables with headers (issue #15)"
from tabulate import simple_separated_format
expected = " a| b\n 1| 2"
formatted = tabulate(
[[1, 2]], headers=["a", "b"], tablefmt=simple_separated_format("|")
)
assert_equal(expected, formatted)
def test_column_type_of_bytestring_columns():
"Regression: column type for columns of bytestrings (issue #16)"
from tabulate import _column_type, _binary_type
result = _column_type([b"foo", b"bar"])
expected = _binary_type
assert_equal(result, expected)
def test_numeric_column_headers():
"Regression: numbers as column headers (issue #22)"
result = tabulate([[1], [2]], [42])
expected = " 42\n----\n 1\n 2"
assert_equal(result, expected)
lod = [dict((p, i) for p in range(5)) for i in range(5)]
result = tabulate(lod, "keys")
expected = "\n".join(
[
" 0 1 2 3 4",
"--- --- --- --- ---",
" 0 0 0 0 0",
" 1 1 1 1 1",
" 2 2 2 2 2",
" 3 3 3 3 3",
" 4 4 4 4 4",
]
)
assert_equal(result, expected)
def test_88_256_ANSI_color_codes():
"Regression: color codes for terminals with 88/256 colors (issue #26)"
colortable = [("\x1b[48;5;196mred\x1b[49m", "\x1b[38;5;196mred\x1b[39m")]
colorlessheaders = ("background", "foreground")
formatted = tabulate(colortable, colorlessheaders, "pipe")
expected = "\n".join(
[
"| background | foreground |",
"|:-------------|:-------------|",
"| \x1b[48;5;196mred\x1b[49m | \x1b[38;5;196mred\x1b[39m |",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_column_with_mixed_value_types():
"Regression: mixed value types in the same column (issue #31)"
expected = "\n".join(["-----", "", "a", "я", "0", "False", "-----"])
data = [[None], ["a"], ["\u044f"], [0], [False]]
table = tabulate(data)
assert_equal(table, expected)
def test_latex_escape_special_chars():
"Regression: escape special characters in LaTeX output (issue #32)"
expected = "\n".join(
[
r"\begin{tabular}{l}",
r"\hline",
r" foo\^{}bar
|
marchon/Debug-Dokku.alt-Mongodb-Flask-Python
|
todo.py
|
Python
|
mit
| 2,262
| 0.014147
|
from datetime import datetime
from flask import Flask, request, render_template, redirect, url_for
from flask.ext.mongokit import MongoKit, Document, Connection
import os
app = Flask(__name__)
class Task(Document):
__collection__ = 'tasks'
structure = {
'title': unicode,
'text': unicode,
'creation': datetime,
}
required_fields = ['title', 'creation']
default_values = {'creation': datetime.utcnow()}
use_dot_notation = True
db = MongoKit(app)
connection = Connection(os.environ['MONGODB_URL'])
db.register([Task])
@app.route('/')
def show_all():
try:
tasks = db.Task.find()
return render_template('list.html', tasks=tasks)
except Exception, e:
d = {}
d['Error'] = e.message
d['URL'] = os.environ['MONGODB_URL']
return render_template('page
|
_not_found.html',d=d)
"""
@app.route('/<ObjectId:task_id>')
def show_task(task_id):
task = db
|
.Task.get_from_id(task_id)
return render_template('task.html', task=task)
@app.route('/new', methods=["GET", "POST"])
def new_task():
if request.method == 'POST':
try:
task = db.Task()
asdf
except Exception, e:
error = {}
error['error'] = e.message
render_template('except.html', error = error)
try:
task.title = request.form['title']
task.text = request.form['text']
except Exception, e:
error = {}
error['error'] = e.message
render_template('except.html', error = error)
try:
task.save()
except Exception, e:
error = {}
error['error'] = e.message
render_template('except.html', error = error)
try:
return redirect(url_for('show_all'))
except Exception, e:
error = {}
error['error'] = e.message
render_template('except.html', error = error)
return render_template('new.html')
@app.route('/')
def show_all():
d = {}
d['MONGODB_URL'] = os.environ.get('MONGODB_URL')
#for item in os.environ:
# d[item] = os.environ[item]
return render_template('hello.html', tasks = d)
"""
if __name__ == '__main__':
app.run(debug=True)
|
dwinings/promptool
|
preferences.py
|
Python
|
gpl-3.0
| 3,585
| 0.006974
|
# promptool - A tool to create prompts for POSIX shells, written in python and GTK
# Copyright (C) 2011 - David Winings
#
# promptool is free software: you can redistribute it and/or modify it under the terms
# of the GNU General Public License as published by the Free Software Found-
# ation, either version 3 of the License, or (at your option) any later version.
#
# promptool is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with promptool.
# If not, see <http://www.gnu.org/licenses/>.
import pygtk
pygtk.require('2.0')
import gtk
class Preferences(object):
def __init__(self):
self.text_reset = True
self.shell = "bash"
self.textview_bg = gtk.gdk.Color(65535, 65535, 65535)
def set(self, pref): #def void set(self, Preferences pref)
self.text_reset = pref.text_reset #Python's lack of optional type specifications bothers me...
self.shell = pref.shell
self.textview_bg = pref.textview_bg
def __eq__(self, pref):
if self.text_rest == pref.text_reset and \
self.shell == pref.shell and \
self.textview_bg == pref.textview_bg:
return True
else:
return False
class PrefWindow(gtk.Dialog):
def __init__(self, pref_in, parent=None, flags=0, buttons=None):
super(PrefWindow, self).__init__('Promptool: Preferences', parent, flags, buttons)
self.pref_global = pref_in
print id(self.pref_global)
self.pref_local = Preferences()
self.connect("destroy", self.destroy_handler)
self.add_button('Ok', 1)
self.add_button('Cancel', 2)
self.connect('response', self._response_handler)
self._pack_vbox()
def destroy_handler(self, widget, data=None):
return False
def _pack_vbox(self):
self.vbox.pack_start(self._init_textview_color_selector(), padding=5)
self.vbox.pack_start(self._init_text_reset_toggle(), padding=5)
self.vbox.pack_start(self._init_shell_combox(), padding=5)
def _init_textview_color_selector(self):
self.textview_color_selector = gtk.ColorSelection()
self.textview_color_selector.show()
return self.textview_color_selector
def _init_text_reset_toggle(self):
self.fg_reset_toggle = gtk.CheckButton(label=
|
"Reset text color after prompt")
self.fg_reset_toggle.active = se
|
lf.pref_global.text_reset
def toggle_handler(widget, data=None):
self.pref_local.text_reset = self.fg_reset_toggle.get_active()
print self.pref_local.text_reset
self.fg_reset_toggle.connect('toggled', toggle_handler)
self.fg_reset_toggle.show()
return self.fg_reset_toggle
def _init_shell_combox(self):
self.shell_combox = gtk.combo_box_new_text()
self.shell_combox.append_text("Bash Only :(")
self.shell_combox.set_active(0)
self.shell_combox.show()
return self.shell_combox
def _response_handler(self, widget, response_id):
if response_id == 1:
self.pref_global.set(self.pref_local)
self.destroy()
elif response_id == 2:
self.destroy()
def main(self):
gtk.main()
if __name__ == '__main__':
window = PrefWindow()
window.main()
|
mburgs/asanorm
|
asana/entities/entity.py
|
Python
|
apache-2.0
| 6,721
| 0.031394
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import re
class EntityException(Exception):
"""Wrap entity specific errors"""
pass
class Entity(object):
"""Base implementation for an Asana entity containing
common funcitonality"""
# Keys which are filtered as part of the HTTP request
_filter_keys = []
#fields this object has. This affects what is returned from the Asana API
#as well as serving as a lookup for lazy-loading
_fields = []
#define regex to match field names that should be wrapped with an instance
#of this object
_matchons = []
#items that are sub-items of the current one such that the API endpoint is
#/api/parent/<id>/subitme
_children = {}
def __init__(self, data):
self._childrenValues = {}
self._init(data)
self._ready = True
def _init(self, data, merge=False):
"""Initializes this entity, either with entirely new data or with an
update to be merged with the current data
:param data: the data to use for the entity
:param merge: if true only set keys from data that aren't already set
internally
"""
if merge:
self._data.update(data)
else:
self._data = data
self._dirty = set()
#todo it would probably be better to subclass
# dict and implement this in there
for key in self._data:
if not self._data[key]:
continue
for regex, cls in self._matchons.items():
if re.search(regex, key):
if isinstance(self._data[key], list):
for idx, val in enumerate(self._data[key]):
if isinstance(val, dict):
self._data[key][idx] = cls(val)
else:
if isinstance(self._data[key], dict):
self._data[key] = cls(self._data[key])
break
@classmethod
def set_api(cls, api):
cls.api = api
@classmethod
def from_link(cls, link):
"""Builds an object from a link to it
This works by assuming the last section of the link is the ID"""
if not link:
return None
return cls({'id': link.split('/')[-1]})
@classmethod
def _get_api(cls):
if not cls.api:
raise EntityException('The api must be set using Entity.set_api()')
return cls.api
@classmethod
def _get_api_endpoint(cls):
"""By default use name of class for endpoint"""
return cls.__name__.lower() + 's'
def _get_item_url(self):
if not self.id:
raise EntityException('Cannot get item URL without id set')
return '/'.join([self._get_api_endpoint(), str(self.id)])
@classmethod
def find(cls, query={}):
"""Find objects of this type that fit query
:param query: dict of key/value pairs to match against. keys that the
API natively handles are sent as part of the request if they have
scalar values, other keys are filtered from the response.
filter values can be either absolute values or lambdas. for lambdas
the value of its key will be passed as the only argument and it
will be considered passing if the lambda returns true
"""
return cls._run_find(cls._get_api_endpoint(), query)
@classmethod
def _run_find(cls, target, query):
params = cls._get_default_params() #params that are part of the request
#todo handle lambdas that are passed in for filter keys
if cls._filter_keys:
for key in query.keys():
if key in cls._filter_keys:
params[key] = query[key]
del query[key]
data = cls._get_api().get(target, params=params)
return cls._build_result(query, data)
@classmethod
def _get_default_params(cls):
"""Hook to add params that will always be part of a find request
Default behavior checks for the 'fields' property and, if present,
joins it with commas and passes it as the opt_fields param
"""
if cls._fields:
return {
'opt_fields': ','.join(cls._fields)
}
return {}
@classmethod
def _build_result(cls, query, data):
"""Filters the result set based on a query returning the resulting
objects as instances of the current class"""
return [cls(ent) for ent in data if cls._filter_result_item(ent, query)]
@classmethod
def _filter_result_item(cls, entity, query):
"""Filters a single entity dict against a dict of allowed values
returning true if it passes
"""
for key, value in query.items():
if key not in entity:
raise EntityException('The key {0} is not a valid query for {1}'.format(key, cls.__name__))
if (
(callable(value) and not value(entity[key])) or
(isinstance(value, basestring) and value != entity[key])
):
return False
return True
def load(self):
"""Loads all of this items data using its ID"""
#TODO check if sending in empty opt_fields will make us lose all fields
self._init(self._get_api().get(self._get_item_url()), merge=True)
return self
def get_subitem(self, subitem_class, query={}):
target = '/'.join([self._get_item_url(), subitem_class._get_api_endpoint()])
return subitem_class._run_find(target, query)
def save(self):
"""Handles both creating and updating content
T
|
he assumption is if there is no ID set this is
a creation request
"""
if self.id:
return self._do_update()
else:
#performing create - post
return self._do_create
|
()
def _do_update(self):
data = {}
for key in self._dirty:
data[key] = self._data[key]
if not data:
return
return self._get_api().put(self._get_item_url(), data=data)
def _do_create(self):
return self._init(self._get_api().post(self._get_api_endpoint(), data=self._data))
def delete(self):
"""Deletes the specified resource. The ID must be set"""
self._get_api().delete(self._get_item_url())
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
if attr in self.__dict__['_data']:
return self.__dict__['_data'][attr]
if attr in self._fields:
self.load()
return self.__dict__['_data'][attr]
if attr in self._children.keys():
if not attr in self._childrenValues.keys():
self._childrenValues[attr] = self.get_subitem(self._children[attr])
return self._childrenValues[attr]
if attr != 'id':
#todo throw standard exception for no property
raise Exception("Could not locate key " + attr)
def __setattr__(self, attr, value):
if attr[0] == '_':
self.__dict__[attr] = value
elif self._ready:
if attr in self._fields:
self._data[attr] = value
self._dirty.add(attr)
else:
raise Exception("Cannot set attribute {0} - unknown name".foramt(attr))
def __str__(self):
return vars(self).__repr__()
def __repr__(self):
return self.__str__()
def __hash__(self):
return hash(self.id if hasattr(self, 'id') else frozenset(self._data.items()))
def __eq__(self, other):
if type(self) is not type(other):
return False
if self.id:
return self.id == other.id
else:
return cmp(self._data, other._data) == 0
|
mick-d/nipype_source
|
nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py
|
Python
|
bsd-3-clause
| 2,420
| 0.032645
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.camino.convert import AnalyzeHeader
def test_AnalyzeHeader_inputs():
input_map = dict(args=dict(argstr='%s',
),
cen
|
tre=dict(argstr='-centre %s',
units='mm',
),
data_dims=dict(argstr='-datadims %s',
units='voxels',
),
datatype=dict(argstr='-data
|
type %s',
mandatory=True,
),
description=dict(argstr='-description %s',
),
environ=dict(nohash=True,
usedefault=True,
),
greylevels=dict(argstr='-gl %s',
units='NA',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='< %s',
mandatory=True,
position=1,
),
initfromheader=dict(argstr='-initfromheader %s',
position=3,
),
intelbyteorder=dict(argstr='-intelbyteorder',
),
networkbyteorder=dict(argstr='-networkbyteorder',
),
nimages=dict(argstr='-nimages %d',
units='NA',
),
offset=dict(argstr='-offset %d',
units='NA',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
picoseed=dict(argstr='-picoseed %s',
units='mm',
),
printbigendian=dict(argstr='-printbigendian %s',
position=3,
),
printimagedims=dict(argstr='-printimagedims %s',
position=3,
),
printintelbyteorder=dict(argstr='-printintelbyteorder %s',
position=3,
),
printprogargs=dict(argstr='-printprogargs %s',
position=3,
),
readheader=dict(argstr='-readheader %s',
position=3,
),
scaleinter=dict(argstr='-scaleinter %d',
units='NA',
),
scaleslope=dict(argstr='-scaleslope %d',
units='NA',
),
scheme_file=dict(argstr='%s',
position=2,
),
terminal_output=dict(mandatory=True,
nohash=True,
),
voxel_dims=dict(argstr='-voxeldims %s',
units='mm',
),
)
inputs = AnalyzeHeader.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_AnalyzeHeader_outputs():
output_map = dict(header=dict(),
)
outputs = AnalyzeHeader.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
PhonologicalCorpusTools/PyAnnotationGraph
|
polyglotdb/utils.py
|
Python
|
mit
| 1,770
| 0.001695
|
from contextlib import contextmanager
import sys
from . import CorpusContext
from .client.client import PGDBClient, ClientError, ConnectionError
def get_corpora_list(config):
with CorpusContext(config) as c:
statement = '''MATCH (n:Corpus) RETURN n.name as name ORDER BY name'''
results = c.execute_cypher(s
|
tatement)
return [x['name'] for x in res
|
ults]
@contextmanager
def ensure_local_database_running(database_name, port=None, token=None):
if port is None:
port = 8080
host = 'http://localhost:{}'.format(port)
client = PGDBClient(host, token=token)
databases = client.list_databases()
try:
response = client.create_database(database_name)
except (ClientError, ConnectionError):
pass
try:
client.start_database(database_name)
except (ClientError, ConnectionError):
pass
try:
db_info = client.get_ports(database_name)
db_info['data_dir'] = client.get_directory(database_name)
db_info['host'] = 'localhost'
pgdb = False
except ConnectionError:
print('Warning: no Polyglot server available locally, using default ports.')
db_info = {'graph_http_port': 7474, 'graph_bolt_port': 7687,
'acoustic_http_port': 8086, 'host': 'localhost'}
pgdb = True
try:
with CorpusContext('test', **db_info) as c:
c.execute_cypher('''MATCH (n) return n limit 1''')
except:
print('Could not connect to a local database. '
'Please check your set up and ensure that a local database is running.')
sys.exit(1)
try:
yield db_info
finally:
if not pgdb:
client.stop_database(database_name)
|
cdondrup/strands_qsr_lib
|
qsr_lib/src/qsrlib_qsrs/qsr_rcc8.py
|
Python
|
mit
| 1,237
| 0.003234
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
from qsrlib_qsrs.qsr_rcc_abstractclass import QSR_RCC_Abstractclass
class QSR_RCC8(QSR_RCC_Abstractclass):
"""Symmetrical RCC5 relations.
Values of the abstract properties
* **_unique_id** = "rcc8"
* **_all_possible_relations** = ("dc", "ec", "po", "eq", "tpp", "ntpp", "tppi", "ntppi")
* **_dtype** = "bounding_boxes_2d"
QSR specific `dynamic_args`
* **'quantisation_factor'** (*float*) = 0.0: Threshold that determines whether two rectangle regions are disconnected.
.. seealso:: For further details about RCC8, refer to its :doc:`description. <../handwritten/qsrs/rcc8>`
"""
_unique_id = "rcc8"
"""str: Unique identifier name of the QSR."""
_all_possible_relations = ("dc", "ec", "po", "eq", "tpp", "ntpp", "tppi", "ntppi")
"""tuple: All
|
possible relations of the QSR."""
def __init__(self):
"""Constructor."""
super(QSR_RCC8, self).__init__()
def _convert_to_req
|
uested_rcc_type(self, qsr):
"""No need for remapping.
:param qsr: RCC8 value.
:type qsr: str
:return: RCC8 value.
:rtype: str
"""
return qsr
|
fuzhouch/amberalertcn
|
server/amberalertcn/api/__init__.py
|
Python
|
bsd-3-clause
| 47
| 0.021277
|
#!/usr
|
/bin/env python
# -*- cod
|
ing: utf-8 -*_
|
asridharan/dcos
|
packages/dcos-history/extra/history/statebuffer.py
|
Python
|
apache-2.0
| 6,354
| 0.001731
|
import json
import logging
import os
import threading
from collections import deque
from datetime import datetime, timedelta
from typing import Optional
import requests
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARN)
FETCH_PERIOD = 2
FILE_EXT = '.state-summary.json'
STATE_SUMMARY_URI = os.getenv('STATE_SUMMARY_URI', 'http://leader.mesos:5050/state-summary')
TLS_VERIFY = True
# The verify arg to requests.get() can either
# be a boolean or the path to a CA_BUNDLE
if 'TLS_VERIFY' in os.environ:
if os.environ['TLS_VERIFY'] == 'false':
TLS_VERIFY = False
elif os.environ['TLS_VERIFY'] == 'true':
TLS_VERIFY = True
else:
TLS_VERIFY = os.environ['TLS_VERIFY']
def parse_log_time(fname):
return datetime.strptime(fname, '%Y-%m-%dT%H:%M:%S.%f{}'.format(FILE_EXT))
def fetch_state(headers_cb):
timestamp = datetime.now()
try:
# TODO(cmaloney): Access the mesos master redirect before requesting
# state-summary so that we always get the "authoritative"
# state-summary. leader.mesos isn't updated instantly.
# That requires mesos stop returning hostnames from `/master/redirect`.
# See: https://github.com/apache/mesos/blob/master/src/master/http.cpp#L746
resp = requests.get(STATE_SUMMARY_URI, timeout=FETCH_PERIOD * .9, headers=headers_cb(), verify=TLS_VERIFY)
resp.raise_for_status()
state = resp.text
except Exception as e:
logging.warning("Could not fetch state: %s" % e)
state = '{}'
return timestamp, state
class HistoryBuffer():
def __init__(self, time_window, update_period, path=None):
"""
:param time_window: how many seconds this buffer will span
:param update_period: the number of seconds between updates for this buffer
:param path: (str) path for the dir to write to disk in
"""
updates_per_window = int(time_window / update_period)
if time_window % update_period != 0:
raise ValueError(
'Invalid updates per window: {} '
'time_window/update_period must be an integer'.format(updates_per_window))
self.in_memory = deque([], updates_per_window)
self.update_period = timedelta(seconds=update_period)
if path:
try:
os.makedirs(path)
except FileExistsError:
logging.info('Using previously created buffer persistence dir: {}'.format(path))
self.path = path
self.disk_count = updates_per_window
old_files = [os.path.join(self.path, f) for f in os.listdir(self.path)]
filtered_old_files = [f for f in old_files if f.endswith(FILE_EXT)]
self.disk_files = list(sorted(filtered_old_files))
backup_files = self.disk_files[-1 * updates_per_window:]
backup_count = len(backup_files)
def update_and_ff(f_path, ff_end):
"""Accounts for gaps between data in memory with blank filler
"""
# Set timestamp to None for memory-only buffer updates
with open(f_path, 'r') as fh:
self._update_buffer(fh.read())
while (ff_end - self.update_period) >= self.next_update:
self._update_buffer('{}')
for idx, f in enumerate(backup_files):
if idx == 0:
# set the first update time to correspond to the oldest backup file
# before we attempt to do an update and fastforward
self.next_update = parse_log_time(f.split('/')[-1])
if idx == (backup_count - 1):
# Last backup file, fastforward to present
update_and_ff(f, datetime.now())
else:
# More backup files, only fastforward to the next one
next_filetime = parse_log_time(backup_files[idx + 1].split('/')[-1])
update_and_ff(f, next_filetime)
else:
self.disk_count = 0
# Guarantees first call after instanciation will cause update
self.next_update = datetime.now()
def _get_datafile_name(self, timestamp: datetime):
assert timestamp.tzinfo is None
return '{}/{}{}'.format(self.path, timestamp.isoformat(), FILE_EXT)
def _clean_excess_disk_files(self):
while len(self.disk_files) > self.disk_count:
os.remove(self.disk_files.pop(0))
def add_data(self, timestamp: datetime, state):
if timestamp >= self.next_update:
self._update_buffer(state, storage_time=timestamp)
def _update_buffer(self, state, storage_time: Optional[datetime]=None):
self.in_memory.append(state)
self.next_update += self.update_period
if storage_time and (self.disk_count > 0):
data_file = self._get_datafile_name(storage_time)
with open(data_file, 'w') as f:
json.dump(state, f)
self.disk_files.append(data_file)
self._clean_excess_disk_files()
def dump(self):
return self.in_memory
class BufferCollection():
"""Defines the buffers to be maintained"""
def __
|
init__(self, buffer_dir):
self.buffers = {
'minute': Histor
|
yBuffer(60, 2, path=buffer_dir + '/minute'),
'hour': HistoryBuffer(60 * 60, 60, path=buffer_dir + '/hour'),
'last': HistoryBuffer(FETCH_PERIOD, FETCH_PERIOD)}
def dump(self, name):
return self.buffers[name].dump()
def add_data(self, timestamp, data):
for buf in self.buffers.keys():
self.buffers[buf].add_data(timestamp, data)
class BufferUpdater():
"""Class that fetchs and pushes that fetched update to BufferCollection
Args:
headers_cb (method): a callback method that returns a dictionary
of headers to be used for mesos state-summary requests
"""
def __init__(self, buffer_collection, headers_cb):
self.buffer_collection = buffer_collection
self.headers_cb = headers_cb
def update(self):
self.buffer_collection.add_data(*fetch_state(self.headers_cb))
def run(self):
self.update()
t = threading.Timer(FETCH_PERIOD, self.run)
t.start()
|
pythonindia/junction
|
junction/conferences/migrations/0009_conferenceproposalreviewer_nick.py
|
Python
|
mit
| 534
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("conferences", "0008_auto_20150601_1436"),
]
operations = [
migrations.AddField(
model_name="confe
|
renceproposalreviewer",
|
name="nick",
field=models.CharField(
default="Reviewer", max_length=255, verbose_name="Nick Name"
),
preserve_default=True,
),
]
|
tomsilver/nupic
|
examples/network/network_api_demo.py
|
Python
|
gpl-3.0
| 7,557
| 0.009528
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import copy
import csv
import json
import os
from pkg_resources import resource_filename
from nupic.algorithms.anomaly import computeRawAnomalyScore
from nupic.data.file_record_stream import FileRecordStream
from nupic.engine import Network
from nupic.encoders import MultiEncoder, ScalarEncoder, DateEncoder
_VERBOSITY = 0 # how chatty the demo should be
_SEED = 1956 # the random seed used throughout
_INPUT_FILE_PATH = resource_filename(
"nupic.datafiles", "extra/hotgym/rec-center-hourly.csv"
)
_OUTPUT_PATH = "network-demo-output.csv"
_NUM_RECORDS = 2000
# Config field for SPRegion
SP_PARAMS = {
"spVerbosity": _VERBOSITY,
"spatialImp": "cpp",
"globalInhibition": 1,
"columnCount": 2048,
# This must be set before creating the SPRegion
"inputWidth": 0,
"numActiveColumnsPerInhArea": 40,
"seed": 1956,
"potentialPct": 0.8,
"synPermConnected": 0.1,
"synPermActiveInc": 0.0001,
"synPermInactiveDec": 0.0005,
"maxBoost": 1.0,
}
# Config field for TPRegion
TP_PARAMS = {
"verbosity": _VERBOSITY,
"columnCount": 2048,
"cellsPerColumn": 32,
"inputWidth": 2048,
"seed": 1960,
"temporalImp": "cpp",
"newSynapseCount": 20,
"maxSynapsesPerSegment": 32,
"maxSegmentsPerCell": 128,
"initialPerm": 0.21,
"permanenceInc": 0.1,
"permanenceDec": 0.1,
"globalDecay": 0.0,
"maxAge": 0,
"minThreshold": 9,
"activationThreshold": 12,
"outputType": "normal",
"pamLength": 3,
}
def createEncoder():
"""Create the encoder instance for our test and return it."""
consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name="consumption",
clipInput=True)
time_encoder = DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay")
encoder = MultiEncoder()
encoder.addEncoder("consumption", consumption_encoder)
encoder.addEncoder("timestamp", time_encoder)
return encoder
def createNetwork(dataSource):
"""Create the Network instance.
The network has a sensor region reading data from `dataSource` and passing
the encoded representation to an SPRegion. The SPRegion output is passed to
a TPRegion.
:param dataSource: a RecordStream instance to get data from
:returns: a Network instance ready to run
"""
network = Network()
# Our input is sensor data from the gym file. The RecordSensor region
# allows us to specify a file record stream as the input source via the
# dataSource attribute.
network.addRegion("sensor", "py.RecordSensor",
json.dumps({"verbosity": _VERBOSITY}))
sensor = network.regions["sensor"].getSelf()
# The RecordSensor needs to know how to encode the input values
sensor.encoder = createEncoder()
# Specify the dataSource as a file record stream instance
sensor.dataSource = dataSource
# Create the spatial pooler region
SP_PARAMS["inputWidth"] = sensor.encoder.getWidth()
network.addRegion("spatialPoolerRegion", "py.SPRegion", json.dumps(SP_PARAMS))
# Link the SP region to the sensor input
network.link("sensor", "spatialPoolerRegion", "UniformLink", "")
network.link("sensor", "spatialPoolerRegion", "UniformLink", "",
srcOutput="resetOut", destInput="resetIn")
network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
srcOutput="spatialTopDownOut", destInput="spatialTopDownIn")
network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
srcOutput="temporalTopDownOut", destInput="temporalTopDownIn")
# Add the TPRegion on top of the SPRegion
network.addRegion("temporalPoolerRegion", "py.TPRegion",
json.dumps(TP_PARAMS))
network.link("spatialPoolerRegion", "temporalPoolerRegion", "UniformLink", "")
network.link("temporalPoolerRegion", "spatialPoolerRegion", "UniformLink", "",
srcOutput="topDownOut", destInput="topDownIn")
# Add the AnomalyRegion on top of the TPRegion
network.addRegion("anomalyRegion", "py.AnomalyRegion", json.dumps({}))
network.link("spatialPoolerRegion", "anomalyRegion", "UniformLink", "",
srcOutput="bottomUpOut", destInput="activeColumns")
network.link("temporalPoolerRegion", "anomalyRegion", "UniformLink", "",
srcOutput="topDownOut", destInput="predictedColumns")
network.initialize()
spatialPoolerRegion = network.regions["spatialPoolerRegion"]
# Make sure learning is enabled
spatialPoolerRegio
|
n.setParameter("learningMode", True)
# We want temporal anomalies so disable anomalyMode in the SP. This mode is
# used for computing anomalies in a non-temporal model.
spatialPoolerRegion.setParameter("anomalyMode", False)
temporalPoolerRegion = network.regions["temporalPoolerRegion"]
# Enable topDownMode to get the predicted columns output
temporalPoolerRegion.setParameter("topDownMode", True)
# Make sure learn
|
ing is enabled (this is the default)
temporalPoolerRegion.setParameter("learningMode", True)
# Enable inference mode so we get predictions
temporalPoolerRegion.setParameter("inferenceMode", True)
# Enable anomalyMode to compute the anomaly score. This actually doesn't work
# now so doesn't matter. We instead compute the anomaly score based on
# topDownOut (predicted columns) and SP bottomUpOut (active columns).
temporalPoolerRegion.setParameter("anomalyMode", True)
return network
def runNetwork(network, writer):
"""Run the network and write output to writer.
:param network: a Network instance to run
:param writer: a csv.writer instance to write output to
"""
sensorRegion = network.regions["sensor"]
spatialPoolerRegion = network.regions["spatialPoolerRegion"]
temporalPoolerRegion = network.regions["temporalPoolerRegion"]
anomalyRegion = network.regions["anomalyRegion"]
prevPredictedColumns = []
i = 0
for _ in xrange(_NUM_RECORDS):
# Run the network for a single iteration
network.run(1)
# Write out the anomaly score along with the record number and consumption
# value.
anomalyScore = anomalyRegion.getOutputData("rawAnomalyScore")[0]
consumption = sensorRegion.getOutputData("sourceOut")[0]
writer.writerow((i, consumption, anomalyScore))
i += 1
if __name__ == "__main__":
dataSource = FileRecordStream(streamID=_INPUT_FILE_PATH)
network = createNetwork(dataSource)
outputPath = os.path.join(os.path.dirname(__file__), _OUTPUT_PATH)
with open(outputPath, "w") as outputFile:
writer = csv.writer(outputFile)
print "Writing output to %s" % outputPath
runNetwork(network, writer)
|
nlloyd/SubliminalCollaborator
|
libs/twisted/test/test_randbytes.py
|
Python
|
apache-2.0
| 3,309
| 0.003022
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for L{twisted.python.randbytes}.
"""
import os
from twisted.trial import unittest
from twisted.python import randbytes
class SecureRandomTestCaseBase(object):
"""
Base class for secureRandom test cases.
"""
def _check(self, source):
"""
The given random bytes source should return the number of bytes
requested each time it is called and should probably not return the
same bytes on two consecutive calls (although this is a perfectly
legitimate occurrence and rejecting it may generate a spurious failure
-- maybe we'll get lucky and the heat death with come first).
"""
for nbytes in range(17, 25):
s = source(nbytes)
self.assertEqual(len(s), nbytes)
s2 = source(nbytes)
self.assertEqual(len(s2), nbytes)
# This is crude but hey
self.assertNotEquals(s2, s)
class SecureRandomTestCase(SecureRandomTestCaseBase, unittest.TestCase):
"""
Test secureRandom under normal conditions.
"""
def test_normal(self):
"""
L{randbytes.secureRandom} should return a string of the requested
length and make some effort to make its result otherwise unpredictable.
"""
self._check(randbytes.secureRandom)
class ConditionalSecureRandomTestCase(SecureRandomTestCaseBase,
unittest.TestCase):
"""
Test random sources one by one, then remove it to.
"""
def setUp(self):
"""
Create a L{randbytes.RandomFactory} to use in the tests.
"""
self.factory = randbytes.RandomFactory()
def errorFactory(self, nbytes):
"""
A factory raising an error when a source is not available.
"""
raise randbytes.SourceNotAvailable()
def test_osUrandom(self):
"""
L{RandomFactory._osUrandom} should work as a random source whenever
L{os.urandom} is available.
"""
self._check(self.factory._osUrandom)
def test_withoutAnything(self):
"""
Remove all secure sources and assert it raises a failure. Then try the
fallback parameter.
"""
self.factory._osUrandom = self.errorFactory
self.assertRaises(randbytes.SecureRandomNotAvailable,
self
|
.factory.secureRandom, 18)
def wrapper():
return self.factory.secureRandom(18, fallback=True)
s = self.assertWarns(
RuntimeWarning,
"urandom unavailable - "
"proceeding with non-cryptographically secure random source",
__file__,
wrapper)
self.assertEqual(len(s), 18)
class RandomTestCaseBase(SecureRandomTestCaseBase, unittest.TestCase):
"""
'Normal' ra
|
ndom test cases.
"""
def test_normal(self):
"""
Test basic case.
"""
self._check(randbytes.insecureRandom)
def test_withoutGetrandbits(self):
"""
Test C{insecureRandom} without C{random.getrandbits}.
"""
factory = randbytes.RandomFactory()
factory.getrandbits = None
self._check(factory.insecureRandom)
|
mykespb/pythoner
|
quicksorts.py
|
Python
|
apache-2.0
| 1,354
| 0.008124
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# quicksorts.py (C) myke, 2015
# 2015-11-08 1.1
# various versions of quicksort alogo
import random
TIMES = 10
SIZE = 10
RANGE = 10
# -----------------------------------------------
def qs1 (al):
""" Algo quicksort for a list
"""
if not al:
return []
return (qs1([x for x in al if x < al[0]])
+ [x for x in al if x == al[0]]
+ qs1([x for x in al if x > al[0]]))
# -----------------------------------------------
def qs2 (array):
""" another longer version"""
less = []
equal = []
greater = []
if len(array) > 1:
pivot = array[0]
for x in array:
if x < pivot:
less.append(x)
if x == pivot:
equal.append(x)
if x > pivot:
greater.append(x)
return qs2(less)+equal+qs2(greater)
else:
return array
# -----------------------------------------------
qs = qs1
# -----------------
|
------------------------------
def main ():
""" dispatcher: tests make and sort """
for i in range(TIMES):
sa = [random.randi
|
nt(1, RANGE) for e in range(SIZE)]
print (sa, "-->", qs (sa))
main()
# -----------------------------------------------
# used: http://stackoverflow.com/questions/18262306/quick-sort-with-python
|
britny/djangocms-blog
|
djangocms_blog/admin.py
|
Python
|
bsd-3-clause
| 5,791
| 0.000691
|
# -*- coding: utf-8 -*-
from copy import deepcopy
from cms.admin.placeholderadmin import FrontendEditableAdminMixin, \
PlaceholderAdminMixin
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import get_user_model
from parler.admin import TranslatableAdmin
from django.contrib.sites.models import Site
from .models import BlogCategory, Post
from .settings import get_setting
try:
from admin_enhancer.admin import EnhancedModelAdminMixin
except ImportError:
class EnhancedModelAdminMixin(object):
pass
class BlogCategoryAdmin(EnhancedModelAdminMixin, TranslatableAdmin):
exclude = ['parent']
_fieldsets = [
(None, {
'fields': [('name', 'slug')]
}),
('Info', {
'fields': ([], ),
'classes': ('collapse',)
}),
]
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('name',)}
def get_queryset(self, request):
current_site = Site.objects.get_current()
return BlogCategory.objects.filter(sites=current_site)
def get_fieldsets(self, request, obj=None):
fsets = deepcopy(self._fieldsets)
if get_setting('MULTISITE'):
fsets[1][1]['fields'][0].append('sites')
return fsets
def save_related(self, request, form, formsets, change):
if not form.cleaned_data['sites']:
form.cleaned_data['sites'] = [Site.objects.get_current()]
super(BlogCategoryAdmin, self).save_related(
request, form, formsets, change)
class Media:
css = {
'all': ('%sdjangocms_blog/css/%s' % (settings.STATIC_URL,
'djangocms_blog_admin.css'),)
}
# from django.contrib import admin
# from django.utils.translation import ugettext_lazy as _
# class SitesFilter(admin.SimpleListFilter):
# title = _('Site')
# parameter_name = 'sites'
#
# def lookups(self, request, model_admin):
# return (('current_site', _('Current Site')),)
#
# def queryset(self, request, queryset):
# if self.value() == 'current_site':
# return queryset.filter(sites__in=[Site.objects.get_current()])
# else:
# return queryset
class PostAdmin(EnhancedModelAdminMixin, FrontendEditableAdminMixin,
PlaceholderAdminMixin, TranslatableAdmin):
list_display = ['title', 'author', 'date_published', 'date_published_end']
# list_filter = (SitesFilter,)
date_hierarchy = 'date_published'
raw_id_fields = ['author']
frontend_editable_fields = ('title', 'abstract', 'post_text')
enhance_exclude = ('main_image', 'tags')
_fieldsets = [
(None, {
'fields': [('title', 'categories', 'publish')]
}),
('Info', {
'fields': (['slug', 'tags'],
('date_published', 'date_published_end', 'enable_comments')),
'classes': ('collapse',)
}),
('Images', {
'fields': (('main_image', 'main_image_thumbnai
|
l', 'main_image_full'),),
'classes': ('collapse',)
}),
('SEO', {
'fields': [('meta_description', 'meta_
|
title', 'meta_keywords')],
'classes': ('collapse',)
}),
]
def formfield_for_dbfield(self, db_field, **kwargs):
field = super(PostAdmin, self).formfield_for_dbfield(db_field, **kwargs)
if db_field.name == 'meta_description':
original_attrs = field.widget.attrs
original_attrs['maxlength'] = 160
field.widget = forms.TextInput(original_attrs)
elif db_field.name == 'meta_title':
field.max_length = 70
return field
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "categories":
kwargs["queryset"] = BlogCategory.objects.filter(
sites=Site.objects.get_current())
return super(PostAdmin, self).formfield_for_manytomany(
db_field, request, **kwargs)
def get_fieldsets(self, request, obj=None):
fsets = deepcopy(self._fieldsets)
if get_setting('USE_ABSTRACT'):
fsets[0][1]['fields'].append('abstract')
if not get_setting('USE_PLACEHOLDER'):
fsets[0][1]['fields'].append('post_text')
if get_setting('MULTISITE'):
fsets[1][1]['fields'][0].append('sites')
if request.user.is_superuser:
fsets[1][1]['fields'][0].append('author')
return fsets
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('title',)}
def get_queryset(self, request):
current_site = Site.objects.get_current()
return Post.objects.filter(sites=current_site)
def save_model(self, request, obj, form, change):
if not obj.author_id and get_setting('AUTHOR_DEFAULT'):
if get_setting('AUTHOR_DEFAULT') is True:
user = request.user
else:
user = get_user_model().objects.get(username=get_setting('AUTHOR_DEFAULT'))
obj.author = user
super(PostAdmin, self).save_model(request, obj, form, change)
def save_related(self, request, form, formsets, change):
if not form.cleaned_data['sites']:
form.cleaned_data['sites'] = [Site.objects.get_current()]
super(PostAdmin, self).save_related(request, form, formsets, change)
class Media:
css = {
'all': ('%sdjangocms_blog/css/%s' % (settings.STATIC_URL,
'djangocms_blog_admin.css'),)
}
admin.site.register(BlogCategory, BlogCategoryAdmin)
admin.site.register(Post, PostAdmin)
|
Alexx-G/tastypie-example
|
src/config/urls.py
|
Python
|
mit
| 771
| 0
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
apipatterns = [
url(r'^', include('cars.api.urls')),
]
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(
r'^manufacturer/',
include('manufacturers.urls', namespace='manufacturers')),
url(r'^cars/', include('cars.urls', namespace='cars')),
url(r'^api/', include(apipatterns, namespace='api')),
)
if settings.DEBUG:
|
from django.conf.url
|
s.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
|
freshplanet/AppEngine-Counter
|
counter/views.py
|
Python
|
apache-2.0
| 1,493
| 0.004019
|
# -*- coding: utf-8 -*-
'''
Copyright 2014 FreshPlanet (http://freshplanet.com | opensource@freshplanet.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
implied.
See the Licen
|
se for the specific language governing permissions and
limitations under the License.
'''
import datetime
import random
from google.appengine.ext import ndb
import webapp2
from counter.models import Counter
class SampleHandler(webapp2.RequestHandler):
@ndb.toplevel
def get(self):
"""
Increments some Counters to play with the feature.
"""
# Fill datastore with data to show case in admin view
otherSliceId = (datetime.datetime.utcnow() - datetime.timedelta(days=1)).strftime("%Y-%m-%d")
for client in ['iOS', 'Android', 'Windows']:
Counter.increment('newInstalls_' + client, random.randint(1, 5))
Counter.increment('newInstalls_' + client, random.randint(1, 5), sliceId=otherSliceId)
self.response.write("""
Counters updated!
Query for counters <a href="/admin/counters/?prefix=newInstalls">here</a>.
""")
|
akshayka/bft2f
|
start_client.py
|
Python
|
gpl-2.0
| 10,155
| 0.006302
|
import sys, glob
sys.path.append('gen-py')
from auth_service import Auth_Service
from auth_service.ttypes import *
from bft2f_pb2 import *
from argparse import ArgumentParser
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
from time import sleep, time
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
from Crypto.Hash import SHA
from base64 import b64encode, b64decode
import threading
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
MULTICAST_ADDR = "228.0.0.5"
BFT2F_PORT = 8005
USER_PORT = 9090
F = 2
parser = ArgumentParser()
parser.add_argument('--client_id', '-ci',
type=long,
required=True)
args = parser.parse_args()
print "start client"
sys.stdout.flush()
# Req_id -> (event, list<replies>), event is triggered when 2f + 1 matching replies
USER_REQUESTS = {}
class Auth_Service_Handler:
def sign_in(self, user_id, token):
req_id = user_id + token
USER_REQUESTS[req_id] = [threading.Event(), [], False]
# Send sign in to BFT2F
twisted_client.bft2f_sign_in(user_id, token)
# Wait for 2f + 1 rep
while(not USER_REQUESTS[req_id][0].wait(timeout=2)):
twisted_client.bft2f_sign_in(user_id, token)
reps = USER_REQUESTS[req_id][1]
if reps[0].res.type != BFT2f_OP_RES.SUCCESS:
return Auth_Service_Sign_In_Res(status=Auth_Service_Res_Status.Failed,
user_id=user_id)
# Extract sign_in_certs (from protobufs to thrift)
sign_in_certs = []
|
for rep in reps:
sign_in_certs.append(Sign_In_Cert(node_pub_key=rep.res.sign_in_cert.node_pub_key,
sig=rep.res.sign_in_cert.sig))
return Auth_Service_Sign_In_Res(status=Auth_Service_Res_Status.Success,
user_id=user_id,
user_pub_key=reps[0].res.user_pub_key,
user_priv_key_
|
enc=reps[0].res.user_priv_key_enc,
sign_in_certs=sign_in_certs)
def sign_up(self, user_id, user_pub_key, user_priv_key_enc):
req_id = user_id
USER_REQUESTS[req_id] = [threading.Event(), [], False]
# Make a call to bft2f
twisted_client.bft2f_sign_up(user_id, user_pub_key, user_priv_key_enc)
# Wait untill bft2f comes up with a response(2f + 1)
while(not USER_REQUESTS[req_id][0].wait(timeout=2)):
twisted_client.bft2f_sign_up(user_id, user_pub_key, user_priv_key_enc)
reps = USER_REQUESTS[req_id][1]
if reps[0].res.type != BFT2f_OP_RES.SUCCESS:
return Auth_Service_Sign_Up_Res(status=Auth_Service_Res_Status.Failed,
user_id=user_id)
return Auth_Service_Sign_Up_Res(status=Auth_Service_Res_Status.Success,
user_id=user_id,
user_pub_key=user_pub_key,
user_priv_key_enc=user_priv_key_enc)
def change_credentials(self, user_id, new_user_pub_key, new_user_priv_key_enc, sig):
req_id = user_id
USER_REQUESTS[req_id] = [threading.Event(), [], False]
# Make a call to bft2f
twisted_client.bft2f_change_credentials(user_id, new_user_pub_key, new_user_priv_key_enc,
sig)
# Wait untill bft2f comes up with a response(2f + 1)
USER_REQUESTS[req_id][0].wait()
reps = USER_REQUESTS[req_id][1]
if reps[0].res.type != BFT2f_OP_RES.SUCCESS:
return Auth_Service_Change_Credentials_Res(status=Auth_Service_Res_Status.Failed,
user_id=user_id)
return Auth_Service_Change_Credentials_Res(status=Auth_Service_Res_Status.Success,
user_id=user_id,
new_user_pub_key=new_user_pub_key,
new_user_priv_key_enc=new_user_priv_key_enc)
class BFT2F_Client(DatagramProtocol):
def __init__(self, client_id):
self.client_id = client_id
# load private key
key = open("./certs/client%d.key"%self.client_id, "r").read()
self.private_key = PKCS1_v1_5.new(RSA.importKey(key))
key = open("./certs/rootCA_pub.pem", "r").read()
self.rootCA_pubkey = PKCS1_v1_5.new(RSA.importKey(key))
self.version = BFT2F_VERSION(node_id=0, view=0, n=0, hcd="")
self.ts = 0
#load public keys
self.server_pubkeys=[]
for i in xrange(0, 3 * F + 1):
key = open("./certs/server%d.pem"%i, "r").read()
self.server_pubkeys.append(PKCS1_v1_5.new(RSA.importKey(key)))
self.user_conn_mapping = {}
def startProtocol(self):
pass
def bft2f_sign_up(self, user_id, user_pub_key, user_priv_key_enc):
msg = BFT2F_MESSAGE(msg_type=BFT2F_MESSAGE.REQUEST,
op=BFT2F_OP(type=SIGN_UP,
user_id=user_id,
user_pub_key=user_pub_key,
user_priv_key_enc=user_priv_key_enc),
ts=self.make_ts(),
client_id=self.client_id,
version=self.version,
sig='')
msg.sig = self.sign_func(msg.SerializeToString())
self.transport.write(msg.SerializeToString(), (MULTICAST_ADDR, BFT2F_PORT))
def bft2f_change_credentials(self, user_id, new_user_pub_key, new_user_priv_key_enc, sig):
msg = BFT2F_MESSAGE(msg_type=BFT2F_MESSAGE.REQUEST,
op=BFT2F_OP(type=CHANGE_CRED,
user_id=user_id,
new_user_pub_key=new_user_pub_key,
new_user_priv_key_enc=new_user_priv_key_enc,
sig=sig),
ts=self.make_ts(),
client_id=self.client_id,
version=self.version,
sig='')
msg.sig = self.sign_func(msg.SerializeToString())
self.transport.write(msg.SerializeToString(), (MULTICAST_ADDR, BFT2F_PORT))
def bft2f_sign_in(self, user_id, token):
msg = BFT2F_MESSAGE(msg_type=BFT2F_MESSAGE.REQUEST,
op=BFT2F_OP(type=SIGN_IN, user_id=user_id, token=token),
ts=self.make_ts(),
client_id=self.client_id,
version=self.version,
sig='')
msg.sig = self.sign_func(msg.SerializeToString())
self.transport.write(msg.SerializeToString(), (MULTICAST_ADDR, BFT2F_PORT))
def datagramReceived(self, datagram, address):
msg = BFT2F_MESSAGE()
msg.ParseFromString(datagram)
signer = self.server_pubkeys[msg.node_id]
signature = msg.sig
msg.sig = ""
if not self.verify_func(signer,signature,msg.SerializeToString()):
print "wrong signature : %d :" % msg.node_id
sys.stdout.flush()
return
else:
print "valid signature from %d" % msg.node_id
sys.stdout.flush()
if msg.res.op_type == SIGN_UP or msg.res.op_type == CHANGE_CRED:
req_id = msg.res.user_id
elif msg.res.op_type == SIGN_IN:
req_id = msg.res.user_id + msg.res.token
# Added the new rep
if req_id in USER_REQUESTS and not USER_REQUESTS[req_id][2]:
USER_REQUESTS[req_id][1].append(msg)
# Check if there are 2F + 1 matching
matching_reps = self
|
kobtea/gof
|
strategy.py
|
Python
|
mit
| 2,642
| 0
|
#!/usr/bin/env python
import random
class WinningStrategy:
def __init__(self):
self.won =
|
False
self.prev_hand = 0
def next_hand(self):
if not self.won:
self.prev_hand = random.randint(0, 2)
return self.prev_hand
def study(self, is_win):
self.won = is_win
class ProbStrategy:
def __init__(self):
self.prev_ha
|
nd = 0
self.curr_hand = 0
# history[previous_hand][current_hand] = won_size
self.history = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1]
]
def next_hand(self):
bet = random.randint(0, sum(self.history[self.prev_hand]))
if bet < self.history[self.prev_hand][0]:
return 0
elif bet < self.history[self.prev_hand][0] \
+ self.history[self.prev_hand][1]:
return 1
else:
return 2
def study(self, is_win):
if is_win:
self.history[self.prev_hand][self.curr_hand] += 1
else:
self.history[self.prev_hand][self.curr_hand + 1] += 1
self.history[self.prev_hand][self.curr_hand + 2] += 1
class Player:
def __init__(self, name, strategy):
self.name = name
self.strategy = strategy
self.count = {'win': 0, 'lose': 0, 'even': 0}
def next_hand(self):
return self.strategy.next_hand()
def win(self):
self.strategy.study(True)
self.count['win'] += 1
def lose(self):
self.strategy.study(False)
self.count['lose'] += 1
def even(self):
self.count['even'] += 1
class Game:
@classmethod
def fight(cls, hand1, hand2):
if hand1 == hand2:
return 0
elif (hand1 + 1) % 3 == hand2:
return 1
else:
return -1
if __name__ == '__main__':
player1 = Player('hogemi', WinningStrategy())
player2 = Player('foobar', ProbStrategy())
for _ in range(10000):
player1_hand = player1.next_hand()
player2_hand = player2.next_hand()
result = Game.fight(player1_hand, player2_hand)
if result == 0:
player1.even()
player2.even()
elif result == 1:
player1.win()
player2.lose()
elif result == -1:
player2.win()
player1.lose()
print('{} is score: {}'.format(player1.name, player1.count))
print('{} is score: {}'.format(player2.name, player2.count))
'''
hogemi is score: {'lose': 3353, 'even': 3333, 'win': 3314}
foobar is score: {'lose': 3314, 'even': 3333, 'win': 3353}
'''
|
elifesciences/builder
|
src/buildvars.py
|
Python
|
mit
| 6,259
| 0.004474
|
from buildercore.bvars import encode_bvars, read_from_current_host
from buildercore.command import remote_sudo, upload
from io import StringIO
from decorators import requires_aws_stack
from buildercore.config import BOOTSTRAP_USER
from buildercore.core import stack_all_ec2_nodes, current_node_id
from buildercore.context_handler import load_context
from buildercore import utils as core_utils, trop, keypair
from buildercore.utils import ensure
from pprint import pprint
import utils
import logging
from json import JSONDecodeError
LOG = logging.getLogger(__name__)
OLD, ABBREV, FULL = 'old', 'abbrev', 'full'
def _retrieve_build_vars():
"""wrapper around `read_from_current_host` with integrity checks. returns buildvars for the current instance.
raises AssertionError on bad data."""
try:
buildvars = read_from_current_host()
LOG.debug('build vars: %s', buildvars)
# buildvars exist
ensure(isinstance(buildvars, dict), 'build vars not found (%s). use `./bldr buildvars.fix` to attempt to fix this.' % buildvars)
# nothing important is missing
missing_keys = core_utils.missingkeys(buildvars, ['stackname', 'instance_id', 'branch', 'revision'])
ensure(
len(missing_keys) == 0,
'build vars are not valid: missing keys %s. use `./bldr buildvars.fix` to attempt to fix this.' % missing_keys
)
return buildvars
except (ValueError, AssertionError, JSONDecodeError) as ex:
LOG.exception(ex)
raise
def _update_remote_bvars(stackname, buildvars):
LOG.info('updating %r with new vars %r', stackname, buildvars)
encoded = encode_bvars(buildvars)
fid = core_utils.ymd(fmt='%Y%m%d%H%M%S')
# make a backup
remote_sudo('if [ -f /etc/build-vars.json.b64 ]; then cp /etc/build-vars.json.b64 /tmp/build-vars.json.b64.%s; fi;' % fid)
upload(StringIO(encoded), "/etc/build-vars.json.b64", use_sudo=True)
LOG.info("%r updated. backup written to /tmp/build-vars.json.b64.%s", stackname, fid)
#
@requires_aws_stack
def switch_revision(stackname, revision=None, concurrency=None):
if revision is None:
revision = utils.uin('revision', None)
def _switch_revision_single_ec2_node():
buildvars = _retrieve_build_vars()
if 'revision' in buildvars and revision == buildvars['revision']:
print('FYI, the instance is already on that revision!')
return
new_data = buildvars
new_data['revision'] = revision
_update_remote_bvars(stackname, new_data)
stack_all_ec2_nodes(stackname, _switch_revision_single_ec2_node, username=BOOTSTRAP_USER, concurrency=concurrency)
@requires_aws_stack
def read(stackname):
"returns the unencoded build variables found on given instance"
return stack_all_ec2_nodes(stackname, lambda: pprint(read_from_current_host()), username=BOOTSTRAP_USER)
@requires_aws_stack
def valid(stackname):
return stack_all_ec2_nodes(stackname, lambda: pprint(_retrieve_build_vars()), username=BOOTSTRAP_USER)
@requires_aws_stack
def fix(stackname):
|
def _fix_single_ec2_node(stackname):
LOG.info("checking build vars on node %s", current_node_id())
try:
buildvars = _retrieve_build_vars()
LOG.info("valid bvars found, no fix necessary: %s", buildvars)
return
except AssertionError:
LOG.info("invalid build vars found, regenerating from context"
|
)
except (ValueError, JSONDecodeError):
LOG.info("bad JSON data found, regenerating from context")
context = load_context(stackname)
# some contexts are missing stackname
context['stackname'] = stackname
node_id = current_node_id()
new_vars = trop.build_vars(context, node_id)
_update_remote_bvars(stackname, new_vars)
stack_all_ec2_nodes(stackname, (_fix_single_ec2_node, {'stackname': stackname}), username=BOOTSTRAP_USER)
# TODO: deletion candidate. can only ever do a shallow update
@requires_aws_stack
def force(stackname, field, value):
"replace a specific key with a new value in the buildvars for all ec2 instances in stack"
def _force_single_ec2_node():
# do not validate build vars.
# this way it can be used to repair buildvars when they are missing some field.
#buildvars = _validate()
buildvars = read_from_current_host()
new_vars = buildvars.copy()
new_vars[field] = value
_update_remote_bvars(stackname, new_vars)
LOG.info("updated bvars %s", new_vars)
stack_all_ec2_nodes(stackname, _force_single_ec2_node, username=BOOTSTRAP_USER)
@requires_aws_stack
def refresh(stackname, context=None):
"(safely) replaces the buildvars file on the ec2 instance(s)"
context = context or load_context(stackname)
def _refresh_buildvars():
old_buildvars = _retrieve_build_vars()
node = old_buildvars.get('node')
if not node or not str(node).isdigit():
# (very) old buildvars. try parsing 'nodename'
nodename = old_buildvars.get('nodename')
if nodename: # ll: "elife-dashboard--prod--1"
node = nodename.split('--')[-1]
if not node.isdigit():
LOG.warning("nodename ends in a non-digit node: %s", nodename)
node = None
if not node:
# no 'node' and no (valid) 'nodename' present
# assume this stack was created before nodes were a thing
# and that there is only 1 in the 'cluster'.
node = 1
new_buildvars = trop.build_vars(context, int(node))
new_buildvars['revision'] = old_buildvars.get('revision') # TODO: is this still necessary?
_update_remote_bvars(stackname, new_buildvars)
# lsh@2019-06: cfn.update_infrastructure fails to run highstate on new (unvisited? not the instance author?)
# ec2 instance if keypair not present, it prompts for a password for the deploy user. prompts when executing
# in parallel cause operation to fail.
keypair.download_from_s3(stackname, die_if_exists=False)
stack_all_ec2_nodes(stackname, _refresh_buildvars, username=BOOTSTRAP_USER)
|
jmhal/elastichpc
|
beta/trials/evolving/System.py
|
Python
|
mit
| 1,945
| 0.028792
|
#!/usr/bin/python
import ctypes
import sys
import logging
from multiprocessing import Process, Pipe, Value, Manager, Lock
from Platform import platform_unit as platform_unit
from Computation import computation_unit as computation_unit
# configure logging
logging.basicConfig(filename='computational_system.log',level=logging.DEBUG, format='%(created)f|%(message)s')
def log(msg):
logging.debug("SYSTEM: " + msg)
return
class ReconfigurationPort():
def __init__(self):
# self.actuator = Value(ctypes.c_char_p, "empty", lock = True)
manager = Manager()
# a lock for updating/reading the machine file
self.machine_file_lock = manager.Lock()
# Pipes for Communication
self.platform_conn, self.computation_conn = Pipe()
# Methods for Computation
def add_node(self):
self.computation_conn.send(["add_node"])
return self.computation_conn.recv()
def remove_node(self):
self.computation_conn.send(["remove_node"])
return self.computation_conn.recv()
def get_sensors(self):
self.computation_conn.sen
|
d(["sensors"])
return self.computation_conn.recv()
if __name__ == "__ma
|
in__":
# The information of the virtual cluster
url = sys.argv[1]
stack_name = sys.argv[2]
stack_id = sys.argv[3]
computation_input = sys.argv[4]
# A port for communication between components
reconfiguration_port = ReconfigurationPort()
log("Starting Platform.")
platform = Process(target = platform_unit, args=(reconfiguration_port, url, stack_name, stack_id))
platform.daemon = True
platform.start()
log("Starting Computation.")
computation = Process(target = computation_unit, args=(reconfiguration_port, computation_input))
computation.daemon = True
computation.start()
log("Waiting on Platform to finish.")
platform.join()
log("Waiting on Computation to finish.")
computation.join()
log("Good bye...")
|
schilduil/suapp
|
suapp/simple_json.py
|
Python
|
mit
| 2,757
| 0.000363
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from pony.orm.core import Entity, SetInstance, Required, Optional
import suapp.orm
__all__ = ["to_json", "dumps"]
def to_json(object_to_serialize):
"""
Adding simple serialization for objects.
If standard json.dumps fails and it is a real object it will try to call
toJSON() on it. If that fails it will return a TypeError.
"""
result = {}
if isinstance(object_to_serialize, Entity):
for attr in object_to_serialize._attrs_:
column = attr.name
result[column] = getattr(object_to_serialize, column)
elif isinstance(object_to_serialize, suapp.orm.UiOrmObject):
for column in object_to_serialize.ui_attributes:
result[column] = getattr(object_to_serialize, column)
else:
try:
return json.dumps(object_to_serialize)
except TypeError as te:
if isinstance(object_to_serialize, object):
try:
return getattr(object_to_serialize, "toJSON")()
except AttributeError:
raise TypeError(
repr(object_to_serialize) + " is not JSON serializable"
)
# Re-raising the TypeError
raise
return result
# Also putting out the primary key
result
|
["_pk_"] = object_to_serialize._pk_
# result['__str__'] = "%s" % (object_to_serialize)
# Checking for foreign keys
for column, value in result.items():
if isinstance(value, Entity):
value = value._pk_
# Setting it
# If is a Set or tuple it will be set again below.
result[column] = value
|
if isinstance(value, SetInstance):
# An empty dictonary signals a Set.
result[column] = {}
elif isinstance(value, tuple):
# On json a tuple = list, so might as well use a list.
converted_tuple = []
for subvalue in value:
# Finding out the references to variables.
if isinstance(subvalue, Required) or isinstance(subvalue, Optional):
cur_obj = object_to_serialize
path = str(subvalue).split(".")[1:]
while len(path) > 0:
subvalue = getattr(cur_obj, path.pop(0))
cur_obj = subvalue
if isinstance(subvalue, Entity):
subvalue = subvalue._pk_
converted_tuple.append(subvalue)
result[column] = converted_tuple
return result
def dumps(object_to_serialize, **kwargs):
kwargs["default"] = to_json
return json.dumps(object_to_serialize, **kwargs)
|
galbramc/gpkit
|
gpkit/posyarray.py
|
Python
|
mit
| 7,110
| 0.000563
|
# -*coding: utf-8 -*-
"""Module for creating PosyArray instances.
Example
-------
>>> x = gpkit.Monomial('x')
>>> px = gpkit.PosyArray([1, x, x**2])
"""
import numpy as np
from .small_classes import Numbers
from . import units as ureg
from . import DimensionalityError
Quantity = ureg.Quantity
class PosyArray(np.ndarray):
"""A Numpy array with elementwise inequalities and substitutions.
Arguments
---------
input_array : array-like
Example
-------
>>> px = gpkit.PosyArray([1, x, x**2])
"""
def __str__(self):
"Returns list-like string, but with str(el) instead of repr(el)."
if self.shape:
return "[" + ", ".join(str(p) for p in self) + "]"
else:
return str(self.flatten()[0])
def __repr__(self):
"Returns str(self) tagged with gpkit information."
if self.shape:
return "gpkit.%s(%s)" % (self.__class__.__name__, str(self))
|
else:
return str(self.flatten()[0])
def __hash__(self):
return getattr(self, "_hashvalue", hash(self.tostring()))
def __new__(cls, input
|
_array):
"Constructor. Required for objects inheriting from np.ndarray."
# Input array is an already formed ndarray instance
# cast to be our class type
obj = np.asarray(input_array).view(cls)
return obj
def __array_finalize__(self, obj):
"Finalizer. Required for objects inheriting from np.ndarray."
pass
def __array_wrap__(self, out_arr, context=None):
"""Called by numpy ufuncs.
Special case to avoid creation of 0-dimensional arrays
See http://docs.scipy.org/doc/numpy/user/basics.subclassing.html"""
if out_arr.ndim:
return np.ndarray.__array_wrap__(self, out_arr, context)
try:
val = out_arr.item()
return np.float(val) if isinstance(val, np.generic) else val
except:
print("Something went wrong. I'd like to raise a RuntimeWarning,"
" but you wouldn't see it because numpy seems to catch all"
" Exceptions coming from __array_wrap__.")
raise
def latex(self, unused=None, matwrap=True):
"Returns 1D latex list of contents."
if len(self.shape) == 0:
return self.flatten()[0].latex()
if len(self.shape) == 1:
return (("\\begin{bmatrix}" if matwrap else "") +
" & ".join(el.latex() for el in self) +
("\\end{bmatrix}" if matwrap else ""))
elif len(self.shape) == 2:
return ("\\begin{bmatrix}" +
" \\\\\n".join(el.latex(matwrap=False) for el in self) +
"\\end{bmatrix}")
else:
return None
def _repr_latex_(self):
return "$$"+self.latex()+"$$"
def __nonzero__(self):
"Allows the use of PosyArrays as truth elements."
return all(p.__nonzero__() for p in self)
def __bool__(self):
"Allows the use of PosyArrays as truth elements in python3."
return all(p.__bool__() for p in self)
@property
def c(self):
try:
floatarray = np.array(self, dtype='float')
if not floatarray.shape:
return floatarray.flatten()[0]
else:
return floatarray
except TypeError:
raise ValueError("only a posyarray of numbers has a 'c'")
_eq = np.vectorize(lambda a, b: a == b)
def __eq__(self, other):
"Applies == in a vectorized fashion."
if isinstance(other, Quantity):
if isinstance(other.magnitude, np.ndarray):
l = []
for i, e in enumerate(self):
l.append(e == other[i])
return PosyArray(l)
else:
return PosyArray([e == other for e in self])
return PosyArray(self._eq(self, other))
def __ne__(self, m):
"Does type checking, then applies 'not ==' in a vectorized fashion."
return (not isinstance(other, self.__class__)
or not all(self._eq(self, other)))
# inequality constraints
_leq = np.vectorize(lambda a, b: a <= b)
def __le__(self, other):
"Applies '<=' in a vectorized fashion."
if isinstance(other, Quantity):
if isinstance(other.magnitude, np.ndarray):
l = []
for i, e in enumerate(self):
l.append(e <= other[i])
return PosyArray(l)
else:
return PosyArray([e <= other for e in self])
return PosyArray(self._leq(self, other))
_geq = np.vectorize(lambda a, b: a >= b)
def __ge__(self, other):
"Applies '>=' in a vectorized fashion."
if isinstance(other, Quantity):
if isinstance(other.magnitude, np.ndarray):
l = []
for i, e in enumerate(self):
l.append(e >= other[i])
return PosyArray(l)
else:
return PosyArray([e >= other for e in self])
return PosyArray(self._geq(self, other))
def outer(self, other):
"Returns the array and argument's outer product."
return PosyArray(np.outer(self, other))
def sub(self, subs, val=None, require_positive=True):
"Substitutes into the array"
return PosyArray([p.sub(subs, val, require_positive) for p in self])
@property
def units(self):
units = None
for el in self: # does this need to be done with np.iter?
if not isinstance(el, Numbers) or el != 0 and not np.isnan(el):
if units:
try:
(units/el.units).to("dimensionless")
except DimensionalityError:
raise ValueError("all elements of a PosyArray must"
" have the same units.")
else:
units = el.units
return units
def padleft(self, padding):
"Returns ({padding}, self[0], self[1] ... self[N])"
if self.ndim != 1:
raise NotImplementedError("not implemented for ndim = %s" %
self.ndim)
padded = PosyArray(np.hstack((padding, self)))
padded.units # check that the units are consistent
return padded
def padright(self, padding):
"Returns (self[0], self[1] ... self[N], {padding})"
if self.ndim != 1:
raise NotImplementedError("not implemented for ndim = %s" %
self.ndim)
padded = PosyArray(np.hstack((self, padding)))
padded.units # check that the units are consistent
return padded
@property
def left(self):
"Returns (0, self[0], self[1] ... self[N-1])"
return self.padleft(0)[:-1]
@property
def right(self):
"Returns (self[1], self[2] ... self[N], 0)"
return self.padright(0)[1:]
|
w01230/test
|
python/numpy/np_index2.py
|
Python
|
gpl-3.0
| 197
| 0
|
# -*- coding: utf-8 -*-
imp
|
ort numpy as np
x = np.array([3, 2, 1, 0])
print(x[[0, 1, 2]])
print(x
|
[[-1, -2, -3]])
y = np.array([[1, 2], [3, 4], [5, 6]])
print(y[[0, 1]])
print(y[[0, 1], [0, 1]])
|
OCA/partner-contact
|
partner_address_street3/hooks.py
|
Python
|
agpl-3.0
| 919
| 0
|
# Copyright 2016-2020 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
def post_init_hook(cr, registry):
""" Add street3 to address format """
query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street2)s\n',
E'%(street2)s\n%(street3)s\n'
)
"""
cr.execute(query)
def uninsta
|
ll_hook(cr, registry):
""" Remove street3 from address format """
# Remove %(street3)s\n from address_format
query = """
UPDATE r
|
es_country
SET address_format = replace(
address_format,
E'%(street3)s\n',
''
)
"""
cr.execute(query)
# Remove %(street3)s from address_format
query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street3)s',
''
)
"""
cr.execute(query)
|
atantet/transferPlasim
|
statistics/plotIndices.py
|
Python
|
gpl-2.0
| 4,841
| 0.005784
|
import os
import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
from matplotlib import cm
import atmath
# Define the observable
srcDir = '../runPlasim/postprocessor/indices/'
# SRng = np.array([1260, 1360, 1380, 1400, 1415, 1425, 1430, 1433,
# 1263, 1265, 1270, 1280, 1300, 1330, 1360, 1435])
# restartStateRng = np.concatenate((['cold']*8, ['warm']*8), 0)
SRng = np.array([1263, 1265, 1270, 1280, 1300, 1330, 1360, 1435])
restartStateRng = ['warm']*8
#SRng = np.array([1263, 1265])
#restartStateRng = ['warm']*2
firstYear = 101
lastYear = 4200
yearsPerFile = 100
daysPerYear = 360
#indexChoice = ('globmst',)
#indexChoice = ('npolemst',)
#indexChoice = ('globdep',)
#indexChoice = ('eqdep',)
#indexChoice = ('MTG',)
#indexChoice = ('areabelowtf20nhemi',)
indexChoice = ('areabelowtfnhemi',)
# Case definition
spinupYears = 100 # Remo
|
ve spinup period from time-series
spinup = spinupYears * daysPerYear
sampFreq = 1 # (days^{-1})
# Plot settings
fs_default = 'x-large'
fs_latex = 'xx-large'
fs_xlabel = fs_default
fs_ylabel = fs_default
fs_xticklabels = fs_default
fs_yticklabels = fs_default
fs_legend_title = fs_default
fs_legend_labels = fs_default
fs_cbar_label = fs_default
# figFormat = 'eps'
figFormat = 'png'
dpi = 300
varRng = np.empty((SRng.shape[0],)
|
)
skewRng = np.empty((SRng.shape[0],))
kurtRng = np.empty((SRng.shape[0],))
lagMax = 80
#lagMax = daysPerYear * 5
ccfRng = np.empty((SRng.shape[0], lagMax*2+1))
for k in np.arange(SRng.shape[0]):
S = SRng[k]
restartState = restartStateRng[k]
# Create directories
resDir = '%s_%s/' % (restartState, S)
dstDir = resDir
indicesPath = '%s/%s/' % (srcDir, resDir)
os.system('mkdir stats %s %s/seasonal %s/anom 2> /dev/null' % (dstDir, dstDir, dstDir))
# Read datasets
obsName = '%s_%d_%05d_%05d_anom' % (restartState, S, firstYear, lastYear)
indexFile = '%s_%s_%d_%05d_%05d.txt' \
% (indexChoice[0], restartState, S, firstYear, lastYear)
print 'Reading index file %s...' % indexFile
observable = np.loadtxt('%s/%s' % (indicesPath, indexFile))
ntFull = observable.shape[0]
obsName += '_%s' % indexChoice[0]
# Get time steps array
time = np.arange(spinup, ntFull)
nt = ntFull - spinup
observable = observable[spinup:]
seasonal = np.empty((daysPerYear,))
anom = np.empty((nt,))
for day in np.arange(daysPerYear):
seasonal[day] = observable[day::daysPerYear].mean()
anom[day::daysPerYear] = observable[day::daysPerYear] - seasonal[day]
varRng[k] = anom.var()
skewRng[k] = stats.skew(anom)
kurtRng[k] = stats.kurtosis(anom)
ccfRng[k] = atmath.ccf(anom, anom, lagMax=lagMax)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(np.arange(1, daysPerYear+1), seasonal)
ax.set_xlabel(r'days', fontsize=fs_latex)
ax.set_ylabel(indexChoice[0], fontsize=fs_latex)
plt.setp(ax.get_xticklabels(), fontsize=fs_xticklabels)
plt.setp(ax.get_yticklabels(), fontsize=fs_yticklabels)
plt.title('Seasonal cycle for case %s_%d\n\sigma = %.5f' % (restartState, S, seasonal.std()))
fig.savefig('%s/seasonal/seasonal_%s.%s' % (dstDir, obsName, figFormat),
bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(time[200*daysPerYear:203*daysPerYear], anom[200*daysPerYear:203*daysPerYear])
ax.set_xlabel(r'days', fontsize=fs_latex)
ax.set_ylabel(indexChoice[0], fontsize=fs_latex)
plt.setp(ax.get_xticklabels(), fontsize=fs_xticklabels)
plt.setp(ax.get_yticklabels(), fontsize=fs_yticklabels)
plt.title('Anomalies for case %s_%d\n\sigma = %.5f' % (restartState, S, anom.std()))
fig.savefig('%s/anom/anom_%s.%s' % (dstDir, obsName, figFormat),
bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(SRng, varRng)
fig.savefig('stats/variance_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(SRng, skewRng)
fig.savefig('stats/skewness_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(SRng, kurtRng)
fig.savefig('stats/kurtosis_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
fig = plt.figure()
ax = fig.add_subplot(111)
for k in np.arange(SRng.shape[0]/2):
S = SRng[k]
ax.plot(np.arange(-lagMax, lagMax+1), ccfRng[k], label=str(S), linestyle='-')
for k in np.arange(SRng.shape[0]/2, SRng.shape[0]):
S = SRng[k]
ax.plot(np.arange(-lagMax, lagMax+1), ccfRng[k], label=str(S), linestyle='--')
ax.legend(loc='upper right')
ax.set_xlim(0, lagMax)
ax.set_ylim(-0.05, 1.)
fig.savefig('stats/acf_%s.%s' % (indexChoice[0], figFormat), bbox_inches='tight', dpi=dpi)
|
V3ckt0r/Quickspin
|
Quickspin/quickspin.py
|
Python
|
lgpl-3.0
| 8,275
| 0.005317
|
#! /usr/bin/python
import boto3
import argparse
import sys
import inspect
import getpass
import os.path
import time
from os.path import expanduser
# Set up acceptable arguments
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument("-u","--up", nargs='+', help="List of EC2 ids to bring up", required=False)
parser.add_argument("-d","--down", nargs='+', help="List of EC2 ids to bring down", required=False)
parser.add_argument("-c","--create", nargs='+', help="Create an EC2 instance", required=False)
parser.add_argument("-r","--remove", nargs='+', help="Create an EC2 instance", required=False)
parser.add_argument("-k", "--config", help="Configure Quickspin with your AWS credentials", action="store_true")
parser.add_argument("-l", "--list", help="Show all EC2 instances running", action="store_true")
parser.add_argument("-la", "--listall", help="Show all EC2 instances running", action="store_true")
parser.add_argument("-v", "--dryrun", help="Perform a dry run of a command", action="store_true")
return parser
# Configure AWS credentials
def configaws():
# User's home
home = expanduser("~")
# create aws credentials file
if os.path.isfile(home+"/.aws/credentials"):
print "Your credentials are already setup"
else:
aws_key = raw_input("Enter your AWS key: ")
aws_secret = getpass.getpass(prompt='Enter your AWS secret: ')
file_name = os.path.join(home+"/.aws/", "credentials")
file = open(file_name, "w")
file.write("[default]")
file.write("\n")
file.write("aws_access_key_id = {}".format(aws_key))
file.write("\n")
file.write("aws_secret_access_key = {}".format(aws_secret))
file.write("\n")
file.close()
# create AWS config file
if os.path.isfile(home+"/.aws/config"):
print "Your config is already setup"
else:
aws_region = raw_input("What region do you want to connect to? (regions can be found here http://docs.aws.amazon.com/general/latest/gr/rande.html): ")
conf_file_name = os.path.join(home+"/.aws/", "config")
conf_file = open(conf_file_name, "w")
conf_file.write("[default]")
conf_file.write("\n")
conf_file.write("# AWS regions")
conf_file.write("\n")
conf_file.write("region = {}".format(aws_region))
conf_file.write("\n")
conf_file.close()
# Establish boto connections
def connect():
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
def createInstance(name, size, count=1):
client = boto3.client('ec2')
ec2 = boto3.resource('ec2')
user = getpass.getuser()
# create instance
instance = ec2.create_instances(
DryRun=False,
ImageId='ami-e4c63e8b',
MinCount=count,
MaxCount=count,
KeyName='BDA-graphana',
InstanceType=size,
SecurityGroups=[
'BDA-zen-dev',
],
)
instance_id = instance[0].id
# check state of new instance
response = ''
state = ''
info = 'Waiting for instance to start up..'
while state != "running":
info += '.'
print info
time.sleep(1)
response = client.describe_instances(InstanceIds=[instance_id])
state = response[u'Reservations'][0][u'Instances'][0][u'State'][u'Name']
# Tag new instance
tag = ec2.create_tags(Resources=[instance_id], Tags=[{'Key':'Name', 'Value': user+"-"+name}])
if state == "running":
print "Instance {} created succesfully, instance id is {}".format(user+"-"+name, instance_id)
return 0
else:
print "Something went wrong"
return 1
# Destroy instance
def deleteInstance(ids):
ec2 = boto3.resource('ec2')
try:
ec2.instances.filter(InstanceIds=ids).terminate()
for e in ids:
print "Instance {} terminated...".format(e)
except boto3.exceptions.botocore.exceptions.ClientError:
print "Invalid id given, check id is correct and try again"
sys.exit(1)
# List all instance in Region using client
def listAllRunning():
client = boto3.client('ec2')
response = client.describe_instances()
print "InstanceID Tags InstanceType PrivateIP LaunchTime State"
for i in response["Reservations"]:
for ins in i["Instances"]:
if ins[u'State'][u'Name'] == "terminated":
print(ins["InstanceId"], ins["Tags"][0]["Value"], ins["InstanceType"], " ", ins[
"LaunchTime"], ins["State"]["Name"]), "\n"
break
print(ins["InstanceId"], ins["Tags"][0]["Value"], ins["InstanceType"], ins["PrivateIpAddress"], ins["LaunchTime"], ins["State"]["Name"]), "\n"
return True
# List all running instances in Region
def listRunning():
ec2 = boto3.resource('ec2')
instances = ec2.instances.filter(Filters=[{'Name': 'instance-state-name', 'Values': ['running']}])
try:
for instance in instances:
for tag in instance.tags:
if tag['Key'] == 'Name':
print(instance.id, tag['Value'], instance.instance_type, instance.public_ip_address)
return 0
except boto3.exceptions.botocore.exceptions.EndpointConnectionError:
print "Check that you have internet connection and the correct proxy settings"
sys.exit(1)
# Spin up from a list of instances ids
def upIt(instance_list, DryRun=False):
client = boto3.client('ec2')
try:
response = client.start_instances( InstanceIds=instance_list, AdditionalInfo='string', DryRun=DryRun)
responseCheck(response)
except boto3.exceptions.botocore.exceptions.ClientError:
print "Instances would of started however this was a Dry Run"
return "
|
DryRun"
# Bring down from a
|
list of instances ids
def downIt(instance_list, DryRun=False):
client = boto3.client('ec2')
try:
response = client.stop_instances( InstanceIds=instance_list, Force=False, DryRun=DryRun)
responseCheck(response)
except boto3.exceptions.botocore.exceptions.ClientError:
print "Instances would of stopped however this was a Dry Run"
return "DryRun"
# Check the response for a given action and evaluate the calling function from the stack.
def responseCheck(response):
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
callingFrame = calframe[1][3]
if response['ResponseMetadata']['HTTPStatusCode'] == 200 and callingFrame == "upIt":
print "Instance have all started sucessfully..."
return 0
elif response['ResponseMetadata']['HTTPStatusCode'] == 200 and callingFrame == "downIt":
print "Instance have all been stopped sucessfully..."
return 0
else:
error_reponse = response['ResponseMetadata']['HTTPStatusCode']
print "Error code {} returned.".format(error_reponse)
return 1
def main():
parser = create_parser()
args = parser.parse_args()
if len(sys.argv) <= 1:
print "You must use a flag to tell quickspin what to do... use -h for help"
sys.exit(1)
if args.config:
configaws()
sys.exit(0)
if args.create:
exitSwitch = 0
#check for instance size specification
try:
size = args.create[1]
except IndexError:
message = 'You need to specify a size for this instance'
print message
exitSwitch = 1
if exitSwitch == 1:
sys.exit(1)
createInstance(args.create[0], args.create[1])
sys.exit(0)
if args.remove:
deleteInstance(args.remove)
sys.exit(0)
if args.list:
listRunning()
sys.exit(0)
if args.listall:
listAllRunning()
sys.exit(0)
if args.up:
if args.dryrun:
upIt(args.up, DryRun=True)
else:
upIt(args.up)
sys.exit(0)
if args.down:
if args.dryrun:
downIt(args.down, DryRun=True)
else:
downIt(args.down)
sys
|
CCS-Lab/hBayesDM
|
Python/tests/test_gng_m3.py
|
Python
|
gpl-3.0
| 198
| 0
|
import pytest
from hbayesdm.models import gng_m3
def test_gng_m3():
_ = gng_m3(
da
|
ta="example", niter=10, nwarmup
|
=5, nchain=1, ncore=1)
if __name__ == '__main__':
pytest.main()
|
ProjectALTAIR/Simulation
|
mdp/tm.py
|
Python
|
gpl-2.0
| 825
| 0.024242
|
"""
The Transition Model.
Init: State object with information about current location & ground velocity, the action
which will be taken, and an environment object containing wind information
and simulation parameters.
This is done as an object so that multiple models may be used at the s
|
ame time for
reinforcement learning techniques. For example, learning the true environment given
an approximation. The AI could learn from a predicted weather environment, but the
tm could uses the real data.
"""
class Tm:
def __init__(self,state,action,environment):
"""
update() computes the n+1 state and updates state location/velocity assigning nth state
to the associated object variables for previous states, and the new n+1 to be the current
state.
"""
d
|
ef update(self,state,action,environment):
return state
|
grimfang/panda3d
|
direct/src/wxwidgets/WxPandaShell.py
|
Python
|
bsd-3-clause
| 9,624
| 0.010183
|
import wx
from wx.lib.agw import fourwaysplitter as FWS
from panda3d.core import *
from direct.showbase.ShowBase import *
from direct.directtools.DirectGlobals import *
try:
base
except NameError:
base = ShowBase(False, windowType = 'none')
from .WxAppShell import *
from .ViewPort import *
ID_FOUR_VIEW = 401
ID_TOP_VIEW = 402
ID_FRONT_VIEW = 403
ID_LEFT_VIEW = 404
ID_PERSP_VIEW = 405
class WxPandaShell(WxAppShell):
""" Class for Panda3D LevelEditor """
frameWidth = 800
frameHeight = 600
appversion = '1.0'
appname = 'Panda3D Generic WX Frame'
copyright = ('Copyright 2010 Disney Online Studios.' +
'\nAll Rights Reserved.')
MENU_TEXTS = {
ID_FOUR_VIEW : ("Four Views", None),
ID_TOP_VIEW : ("Top View", None),
ID_FRONT_VIEW : ("Front View", None),
ID_LEFT_VIEW : ("Left View", None),
ID_PERSP_VIEW : ("Persp View", None),
}
def __init__(self, fStartDirect = False):
fDirect = (base.config.GetBool('want-directtools', 0) or
(base.config.GetString("cluster-mode", '') != ''))
self.fStartDirect = fStartDirect or fDirect
# Create the Wx app
self.wxApp = wx.App(redirect = False)
self.wxApp.SetAppName(self.appname)
WxAppShell.__init__(self, size=wx.Size(self.frameWidth, self.frameHeight))
self.initialize()
def createMenu(self):
self.menuView = wx.Menu()
self.menuBar.Insert(self.menuBar.GetMenuCount() - 1, self.menuView, "&View")
menuItem = self.menuView.AppendRadioItem(ID_FOUR_VIEW, self.MENU_TEXTS[ID_FOUR_VIEW][0])
self.Bind(wx.EVT_MENU, lambda p0=None, p1=-1:self.onViewChange(p0, p1), menuItem)
menuItem = self.menuView.AppendRadioItem(ID_TOP_VIEW, self.MENU_TEXTS[ID_TOP_VIEW][0])
self.Bind(wx.EVT_MENU, lambda p0=None, p1=0:self.onViewChange(p0, p1), menuItem)
menuItem = self.menuView.AppendRadioItem(ID_FRONT_VIEW, self.MENU_TEXTS[ID_FRONT_VIEW][0])
self.Bind(wx.EVT_MENU, lambda p0=None, p1=1:self.onViewChange(p0, p1), menuItem)
menuItem = self.menuView.AppendRadioItem(ID_LEFT_VIEW, self.MENU_TEXTS[ID_LEFT_VIEW][0])
self.Bind(wx.EVT_MENU, lambda p0=None, p1=2:self.onViewChange(p0, p1), menuItem)
self.perspViewMenuItem = self.menuView.AppendRadioItem(ID_PERSP_VIEW, self.MENU_TEXTS[ID_PERSP_VIEW][0])
self.Bind(wx.EVT_MENU, lambda p0=None, p1=3:self.onViewChange(p0, p1), self.perspViewMenuItem)
def createInterface(self):
self.createMenu()
self.mainFrame = wx.SplitterWindow(self, style = wx.SP_3D | wx.SP_BORDER)
self.leftFrame = wx.SplitterWindow(self.mainFrame, style = wx.SP_3D | wx.SP_BORDER)
self.baseFrame = wx.SplitterWindow(self.mainFrame, style = wx.SP_3D | wx.SP_BORDER)
self.viewFrame = FWS.FourWaySplitter(self.baseFrame, style=wx.SP_LIVE_UPDATE)
self.rightFrame = wx.SplitterWindow(self.baseFrame, style = wx.SP_3D | wx.SP_BORDER)
self.topView = Viewport.makeTop(self.viewFrame)
self.viewFrame.AppendWindow(self.topView)
self.frontView = Viewport.makeFront(self.viewFrame)
self.viewFrame.AppendWindow(self.frontView)
self.leftView = Viewport.makeLeft(self.viewFrame)
self.viewFrame.AppendWindow(self.leftView)
self.perspView = Viewport.makePerspective(self.viewFrame)
self.viewFrame.AppendWindow(self.perspView)
self.leftBarUpPane = wx.Panel(self.leftFrame)
self.leftBarDownPane = wx.Panel(self.leftFrame)
self.rightBarUpPane = wx.Panel(self.rightFrame)
self.rightBarDownPane = wx.Panel(self.rightFrame)
self.leftFrame.SplitHorizontally(self.leftBarUpPane, self.leftBarDownPane)
self.rightFrame.SplitHorizontally(self.rightBarUpPane, self.rightBarDownPane)
self.mainFrame.SplitVertically(self.leftFrame, self.baseFrame, 200)
self.baseFrame.SplitVertically(self.viewFrame, self.rightFrame, 600)
self.leftFrame.SetSashGravity(0.5)
self.rightFrame.SetSashGravity(0.5)
self.baseFrame.SetSashGravity(1.0)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.mainFrame, 1, wx.EXPAND, 0)
self.SetSizer(sizer); self.Layout()
def initialize(self):
"""Initializes the viewports and editor."""
self.Update()
ViewportManager.updateAll()
self.wxStep()
ViewportManager.initializeAll()
# Position the camera
if base.trackball != None:
base.trackball.node().setPos(0, 30, 0)
base.trackball.node().setHpr(0, 15, 0)
# to make persp view as default
self.perspViewMenuItem.Toggle()
self.onViewChange(None, 3)
# initializing direct
if self.fStartDirect:
base.startDirect(fWantTk = 0, fWantWx = 0)
base.direct.disableMouseEvents()
newMouseEvents = ["_le_per_%s"%x for x in base.direct.mouseEvents] +\
["_le_fro_%s"%x for x in base.direct.mouseEvents] +\
|
["_le_lef_%s"%x for x in base.direct.mouseEvents] +\
["_le_top_%s"%x for x in base.direct.mouseEvents]
base.direct.mouseEvents = newMouseEvents
base.direct.enableMouseEvents()
base.direct.disableKeyEvents()
keyEvents = ["_le_per_%s"%x for x in b
|
ase.direct.keyEvents] +\
["_le_fro_%s"%x for x in base.direct.keyEvents] +\
["_le_lef_%s"%x for x in base.direct.keyEvents] +\
["_le_top_%s"%x for x in base.direct.keyEvents]
base.direct.keyEvents = keyEvents
base.direct.enableKeyEvents()
base.direct.disableModifierEvents()
modifierEvents = ["_le_per_%s"%x for x in base.direct.modifierEvents] +\
["_le_fro_%s"%x for x in base.direct.modifierEvents] +\
["_le_lef_%s"%x for x in base.direct.modifierEvents] +\
["_le_top_%s"%x for x in base.direct.modifierEvents]
base.direct.modifierEvents = modifierEvents
base.direct.enableModifierEvents()
base.direct.cameraControl.lockRoll = True
base.direct.setFScaleWidgetByCam(1)
unpickables = [
"z-guide",
"y-guide",
"x-guide",
"x-disc-geom",
"x-ring-line",
"x-post-line",
"y-disc-geom",
"y-ring-line",
"y-post-line",
"z-disc-geom",
"z-ring-line",
"z-post-line",
"centerLines",
"majorLines",
"minorLines",
"Sphere",]
for unpickable in unpickables:
base.direct.addUnpickable(unpickable)
base.direct.manipulationControl.optionalSkipFlags |= SKIP_UNPICKABLE
base.direct.manipulationControl.fAllowMarquee = 1
base.direct.manipulationControl.supportMultiView()
base.direct.cameraControl.useMayaCamControls = 1
base.direct.cameraControl.perspCollPlane = self.perspView.collPlane
base.direct.cameraControl.perspCollPlane2 = self.perspView.collPlane2
for widget in base.direct.manipulationControl.widgetList:
widget.setBin('gui-popup', 0)
widget.setDepthTest(0)
# [gjeon] to intercept messages here
base.direct.ignore('DIRECT-delete')
base.direct.ignore('DIRECT-select')
base.direct.ignore('DIRECT-preDeselectAll')
base.direct.ignore('DIRECT-toggleWidgetVis')
base.direct.fIgnoreDirectOnlyKeyMap = 1
# [gjeon] do not use the old way of finding current DR
base.direct.drList.tryToGetCurrentDr = False
else:
base.direct=None
#base.closeWindow(base.win)
base.win = base.winList[3]
def wxStep(self, task = None):
|
nebw/cgt
|
cgt/core.py
|
Python
|
mit
| 122,515
| 0.013329
|
import sys, numpy as np, hashlib, copy, cPickle, ctypes, os, os.path as osp
from collections import defaultdict,namedtuple
import __builtin__
import traceback
import cgt
from . import utils
# ================================================================
# Datatypes
# ================================================================
class Dtype: #pylint: disable=W0232
@staticmethod
def canon(dt):
"""
Return canonical string representation of dtype,
using the floating point type that CGT is currently configured for
The following string representations are used: i1,i2,i4,i8, f4,f8,f16 c8,c16,c32
So either we're using single (f4, c8) or double (f8, c16) or quad (f16, c32)
Note that quad precision is very useful for gradient checking
"""
dt = np.dtype(dt)
k = dt.kind
if k=='f':
return cgt.floatX
elif k in 'biu':
return 'i'+str(dt.itemsize)
elif k=='c':
return cgt.complexX
else:
raise ValueError("Invalid dtype %s"%dt)
def as_valid_array(x, dtype=None):
"""
Converts to numpy array and dtype with valid precision
"""
x = np.asarray(x)
x = x.astype(Dtype.canon(x.dtype) if dtype is None else dtype)
return x
def as_valid_tuple(x):
return tuple(as_valid_array(a) for a in x)
# @TUPLES_OF_TENSORS
def as_valid_arg(x):
if isinstance(x, tuple):
return as_valid_tuple(x)
else:
return as_valid_array(x)
class Type(object):
"""
Represents a datatype for Nodes
"""
pass
class TensorType(Type):
"""
Type used to represent computation results (Nodes in the graph)
that are n-dimensional arrays.
Scalars are represented as zero-dimensional arrays
[though we may create a scalar type later for efficiency]
"""
def __init__(self, dtype, ndim):
self.dtype = Dtype.canon(dtype)
self.ndim = ndim
def __repr__(self):
return "Tensor(%s,%s)"%(self.dtype, self.ndim)
def __eq__(self, other):
return self.dtype == other.dtype and self.ndim == other.ndim
def __hash__(self):
return hash((self.dtype, self.ndim))
class TupleType(Type):
"""
A compound type consisting of a tuple of other types
Only tuples of tensors are currently supported
"""
def __init__(self, *eltypes):
assert all(isinstance(eltype, TensorType) for eltype in eltypes) # @TUPLES_OF_TENSORS
self.eltypes = eltypes
self.dtype = 'O'
def __len__(self):
return len(self.eltypes)
def __getitem__(self, i):
return self.eltypes[i]
def __iter__(self):
return iter(self.eltypes)
def __str__(self):
return "Tup(" + ",".join(map(str,self.eltypes))+")"
def __eq__(self, other):
return len(self.eltypes) == len(other.eltypes)\
and all(typ0 == typ1 for (typ0, typ1) in zip(self.eltypes, other.eltypes))
def __hash__(self):
return hash((self.eltypes, self.dtype))
class Device(object):
"""
Represents a location where a computation is performed
devtype: cpu vs gpu
idx: index of which device
"""
def __init__(self, devtype="cpu", idx=0):
assert isinstance(devtype,str) and isinstance(idx,int)
self.devtype = devtype
self.idx = idx
def __eq__(self, other):
return self.devtype == other.devtype and self.idx == other.idx
def __hash__(self):
return hash((self.devtype, self.idx))
def __repr__(self):
return "%s/%s"%(self.devtype,self.idx)
def _promote(typ1, typ2):
"""
|
Output type of a floating point operation involving these input types
"""
d1 = typ1[0]
s1 = typ1[1:]
d2 = typ2[0]
s2 = typ2[1:]
if d1 == 'c' or d2 == 'c':
return cgt.complexX
elif d1 == 'f' or d2 == 'f':
return cgt.floatX
elif d1
|
== 'i' and d2 == 'i':
assert d1 == d2
return d1 + __builtin__.max(s1,s2)
else:
raise ValueError("Don't know what to do with dtypes %s,%s"%(typ1, typ2))
def _promote_multi(xtypes):
"""
_promote with multiple operands
"""
return reduce(_promote, xtypes)
def dtype_kind(dtype):
"""
one of f,c,i
"""
assert isinstance(dtype, str)
return dtype[0]
def _dtype_itemsize(dtype):
"""
size in bytes
"""
return int(dtype[1:])
def _type_to_int(typ1):
"""
integer type of result of operation such as floor that converts to integer
"""
d1 = dtype_kind(typ1)
if d1 == 'f' or d1 == 'c':
return 'i8'
else:
return typ1
# ================================================================
# Computation Graph Nodes
# ================================================================
class Node(object):
"""
Node in the computation graph
"""
counter = 0 # allows unique identification of argument nodes
# Constants
# ----------------------------------------
def __init__(self, typ, op, parents, props=None, fixed_shape=None, name=None):
self.typ = typ
self.op = op
self.parents = parents
self.props = props or {}
self._fixed_shape = fixed_shape
self.name = name
self.counter = Node.counter
Node.counter += 1
def __repr__(self):
if self.op is None:
return "Argument{%s,name='%s'}"%(self.typ,self.name)
else:
return "Result{%s}"%(str(self.op))
# CGT-specific
# ----------------------------------------
def is_argument(self):
"""
Returns whether Node is an argument
"""
return self.op is None
def is_data(self):
"""
Returns whether Node's Op is data
"""
return self.op is not None and self.op.is_data_op
def is_input(self):
"""
Returns whether this node is either an argument or is data
"""
return self.is_argument() or self.is_data()
def get_diff(self):
"""
Returns a sequence of bool indicating whether output is differentiable wrt each input
"""
return [] if self.op is None else self.op.get_diff(len(self.parents))
def is_tensor(self):
"""
Returns whether this node's type (self.typ) is TensorType
"""
return isinstance(self.typ, TensorType)
def is_tuple(self):
"""
Returns whether this node's type (self.typ) is TupleType
"""
return isinstance(self.typ, TupleType)
def is_scalar(self):
return self.is_tensor() and self.ndim==0
def get_hash(self, node2hash):
"""
Return UNIQUE string identifying this Node
"""
if self.is_input():
return str(self.counter)
else:
hashobj = hashlib.md5(self.op.get_hash())
for p in self.parents:
hashobj.update(node2hash[p])
return hashobj.hexdigest()
def clone(self, newparents):
"""
Create a new Node that applies self.op to `newparents`
Preserve annotations on this node (.props)
"""
if self.is_input(): return self
else: return Node(self.typ, self.op, newparents, props = self.props)
def get_fixed_shape(self):
"""
Returns a tuple of int or None. You'll get ints if this is an argument or data node
with fixed shape provided
"""
if self.is_data():
return self.op.get_fixed_shape()
return (None,)*self.ndim if self._fixed_shape is None else self._fixed_shape
# Math Overloads
# ----------------------------------------
__array_priority__ = 1000 # precedence over numpy operators
def __neg__(self):
return Result(ElwiseUnary("neg"), [self])
def __add__(self, other):
return elwise_binary("+", self, other)
def __sub__(self, other):
return elwise_binary("-", self, other)
def __mul__(self, other):
return elwise_binary("*", self, other)
def __div__(self, other):
return elwise_binary("/", self, other)
def __
|
sotlampr/theano-wrapper
|
tests/test_layers.py
|
Python
|
mit
| 18,340
| 0
|
import unittest
import numpy as np
import theano
import theano.tensor as T
from tests.helpers import (SimpleTrainer, SimpleClf, SimpleTransformer,
simple_reg)
from theano_wrapper.layers import (BaseLayer, HiddenLayer, MultiLayerBase,
BaseEstimator, BaseTransformer,
LinearRegression, LogisticRegression,
MultiLayerPerceptron, MultiLayerRegression,
TiedAutoEncoder, AutoEncoder)
# BASE LAYERS ################################################################
class TestBaseLayer(unittest.TestCase):
""" Tests for layer.py module, which includes various types of layers
for theano-wrapper
"""
def test_base_layer_has_params(self):
base = BaseLayer(100, 10)
self.assertTrue(hasattr(base, 'params'),
msg="Class has no attribute 'parameters'")
def test_base_layer_params_not_empty(self):
base = BaseLayer(100, 10)
self.assertTrue(base.params, msg="Class 'parameters' are empty")
def test_base_layer_no_args(self):
# Test if BaseLayer initializes as expected when given no
# extra arguements
try:
BaseLayer(100, 10)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_base_layer_params_are_theano_shared_variables(self):
base = BaseLayer(100, 10)
for p in base.params:
self.assertIsInstance(p, theano.compile.SharedVariable)
def test_base_layer_has_input(self):
base = BaseLayer(100, 10)
self.assertTrue(hasattr(base, 'X'))
def test_base_layer_input_is_theano_variable(self):
base = BaseLayer(100, 10)
self.assertIsInstance(base.X, theano.tensor.TensorVariable)
def test_base_layer_weights_shape(self):
base = BaseLayer(100, 10)
self.assertEqual(base.W.get_value().shape, (100, 10))
def test_base_layer_bias_shape(self):
base = BaseLayer(100, 10)
self.assertEqual(base.b.get_value().shape, (10,))
def test_base_layer_weights_shape_single_output(self):
base = BaseLayer(100, 1)
self.assertEqual(base.W.get_value().shape, (100,))
def test_base_layer_bias_shape_single_output(self):
base = BaseLayer(100, 1)
self.assertEqual(base.b.get_value().shape, ())
def test_base_layer_no_output(self):
base = BaseLayer(100, 10)
self.assertFalse(hasattr(base, 'y'))
def test_base_layer_int_output(self):
base = BaseLayer(100, 10, y='int')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'int32')
def test_base_layer_float_output(self):
base = BaseLayer(100, 10, y='float')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'float32')
def test_base_layer_custom_weights(self):
try:
BaseLayer(100, 10, weights=np.random.random_sample((100, 10)))
except TypeError:
self.fail("Class did not accept 'weights' arg")
class TestHiddenLayer(unittest.TestCase):
""" Tests for HiddenLayer class.
This class is used only by other classes, so mostly basic stuff here.
"""
def test_hidden_layer_has_params(self):
base = HiddenLayer(100, 10)
self.assertTrue(hasattr(base, 'params'),
msg="Class has no attribute 'parameters'")
def test_hidden_layer_params_not_empty(self):
base = HiddenLayer(100, 10)
self.assertTrue(base.params, msg="Class 'parameters' are empty")
def test_hidden_layer_no_args(self):
# Test if HiddenLayer initializes as expected when given no
# extra arguements
try:
HiddenLayer(100, 10)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_hidden_layer_params_are_theano_shared_variables(self):
base = HiddenLayer(100, 10)
for p in base.params:
self.assertIsInstance(p, theano.compile.SharedVariable)
def test_hid
|
den_layer_has_input(self):
base = HiddenLayer(100, 10)
self.assertTrue(hasattr(base, 'X'))
def test_hidden_layer_input_is_theano_variable(self):
base = HiddenLayer(100, 10)
self.assertIsInstance(base.X, theano.tensor.TensorVariable)
def test_hidden_layer_weights_shape(self):
base = HiddenLayer(100, 10)
self.assertEqual(base.W.get_value().shape, (100, 10))
|
def test_hidden_layer_bias_shape(self):
base = HiddenLayer(100, 10)
self.assertEqual(base.b.get_value().shape, (10,))
def test_hidden_layer_weights_shape_single_output(self):
base = HiddenLayer(100, 1)
self.assertEqual(base.W.get_value().shape, (100,))
def test_hidden_layer_bias_shape_single_output(self):
base = HiddenLayer(100, 1)
self.assertEqual(base.b.get_value().shape, ())
def test_hidden_layer_no_output(self):
base = HiddenLayer(100, 10)
self.assertFalse(hasattr(base, 'y'))
def test_hidden_layer_int_output(self):
base = HiddenLayer(100, 10, y='int')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'int32')
def test_hidden_layer_float_output(self):
base = HiddenLayer(100, 10, y='float')
self.assertTrue(hasattr(base, 'y'))
self.assertTrue(hasattr(base.y, 'dtype'))
self.assertEqual(base.y.dtype, 'float32')
class TestMultiLayerBase(unittest.TestCase):
""" Tests for MultiLayerBase class """
def test_multi_layer_base_has_params(self):
base = MultiLayerBase(100, 50, 10, SimpleClf)
self.assertTrue(hasattr(base, 'params'),
msg="Class has no attribute 'parameters'")
def test_multi_layer_base_params_not_empty(self):
base = MultiLayerBase(100, 50, 10, SimpleClf)
self.assertTrue(base.params, msg="Class 'parameters' are empty")
def test_multi_layer_base_no_args(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, 50, 10, SimpleClf)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_multi_layer_base_single_layer(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, 50, 10, SimpleClf)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_multi_layer_base_multi_layer_single_activation(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, [100, 30, 50], 10, SimpleClf, lambda x: x)
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
def test_multi_layer_base_multi_layer_multi_activation(self):
# Test if MultiLayerBase initializes as expected when given no
# extra arguements
try:
MultiLayerBase(100, [100, 30, 50], 10, SimpleClf,
[lambda x: x for i in range(3)])
except Exception as e:
self.fail("Class initialization failed: %s" % str(e))
class BaseEstimatorTransformerTests:
def test_has_trainers(self):
clf = self.Clf()
for t in ['epoch', 'sgd']:
self.assertIn(t, clf.trainer_aliases)
def test_builtin_sgd_trainer(self):
clf = self.Clf()
try:
clf.fit(*self.fit_args, 'sgd', max_iter=1)
except Exception as e:
self.fail("Fitting failed: %s" % str(e))
def test_builtin_sgd_trainer_all_args(self):
clf = self.Clf()
try:
clf.fit(*self.fit_args, 'sgd', alpha=0.1, batch_size=20,
max_iter=1, patience=100
|
akeym/cyder
|
cyder/cydhcp/supernet/forms.py
|
Python
|
bsd-3-clause
| 417
| 0
|
from dja
|
ngo import forms
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.supernet.models import Supernet
class SupernetForm(forms.ModelForm, UsabilityFormMixin):
class Meta:
m
|
odel = Supernet
exclude = ('start_lower', 'start_upper',
'end_lower', 'end_upper')
widgets = {'ip_type': forms.RadioSelect,
'description': forms.Textarea}
|
BrainTech/openbci
|
obci/gui/frontend/main_gui.py
|
Python
|
gpl-3.0
| 4,516
| 0.005316
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author:
# Łukasz Polak <l.polak@gmail.com>
#
"""This is main file for whole GUI part of OpenBCI - main window of whole
application, along with loading all needed modules GUIs"""
# We are using newer version of QVariant through our GUI, so we might as well
# set it here and let all hell loose on users of older versions of PyQT :)
import sip
sip.setapi('QVariant', 2)
import sys
from PyQt4 import QtCore, QtGui
from obci.gui.frontend.config.modules import MODULES_LIST
class BCIMainWindow(QtGui.QMainWindow):
"""Main window of the BCI application - shows list of available plugins and
enables configuration of them"""
def __init__(self, parent=None):
super(BCIMainWindow, self).__init__(parent)
# Holds all modules classes
self.modules = {}
# Loads modules from config into di
|
ctionary
self.processModules(MODULES_LIST)
# TODO: main gui should be made in designer, and not in code here
self.pluginsList = QtGui.QTreeWidget()
self.pluginsList.setMaximumWidth(200
|
)
self.pluginsList.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.pluginsList.setHeaderLabels(["Nazwa"])
for i_plugin in self.modules.values():
l_item = QtGui.QTreeWidgetItem([i_plugin.name])
l_item.plugin = i_plugin
self.pluginsList.addTopLevelItem(l_item)
self.pluginsList.setCurrentItem(None)
self.connect(self.pluginsList, QtCore.SIGNAL("currentItemChanged(QTreeWidgetItem *, QTreeWidgetItem *)"), self.itemChanged)
# Dictionary for configuration widgets of modules
self.dockWidgets = {}
self.currentDockWidget = None
self.setCentralWidget(self.pluginsList)
def itemChanged(self, p_newItem):
"""Called, when selection on lists of plugins changes. Then it displays
configuration window for newly selected plugin, and closes old one,
unless it's floating.
p_newItem (QTreeWidgetItem) - contains newly selected plugin
p_oldItem (QTreeWidgetItem) - contains plugin that was selected
before"""
if self.currentDockWidget != None:
# We remove widget only if it's not floating
if not self.currentDockWidget.isFloating():
self.removeDockWidget(self.currentDockWidget)
else:
self.currentDockWidget.setAllowedAreas(QtCore.Qt.NoDockWidgetArea)
self.currentDockWidget = None
if p_newItem != None:
l_pluginName = p_newItem.plugin.name
# If we haven't configured this plugin yet, we need to create its GUI
if not self.dockWidgets.has_key(l_pluginName):
self.dockWidgets[l_pluginName] = p_newItem.plugin.buildGui(self)
self.dockWidgets[l_pluginName].setMinimumWidth(500)
self.dockWidgets[l_pluginName].setMinimumHeight(500)
p_pluginDock = self.dockWidgets[l_pluginName]
# We allow docking only on right side of window
p_pluginDock.setAllowedAreas(QtCore.Qt.RightDockWidgetArea)
# If dock was floating and closed before, we reset him into dock
if not p_pluginDock.isVisible() and p_pluginDock.isFloating():
p_pluginDock.setFloating(False)
self.restoreDockWidget(p_pluginDock)
self.currentDockWidget = p_pluginDock
self.addDockWidget(QtCore.Qt.RightDockWidgetArea, p_pluginDock)
def processModules(self, p_modulesList):
"""Processes list with module names, and loads appropriate modules into
program"""
for i_moduleName in p_modulesList:
self.processModule(i_moduleName)
def processModule(self, p_moduleName):
"""Processes sing module with given name and load it into program"""
# We are importing module from correct directory...
l_bciModule = __import__("obci.gui.frontend.modules.%s.%s_module" % (p_moduleName, p_moduleName), fromlist=["modules.%s" % (p_moduleName)])
# ...and then we create and save its main class into modules dictionary
self.modules[p_moduleName] = eval("bci_module.%sModule()" % (p_moduleName.title()), {'bci_module' : l_bciModule})
if __name__ == "__main__":
# We simply show main window
APPLICATION = QtGui.QApplication(sys.argv)
WINDOW = BCIMainWindow()
WINDOW.show()
sys.exit(APPLICATION.exec_())
|
schwa-lab/lsh
|
lsh/hashes.py
|
Python
|
mit
| 2,098
| 0.025262
|
#!/usr/bin/python3
import numpy, sys
#from test.generate_test_vectors import TestVectorGenerator
import pyximport; pyximport.install()
from lsh import bits
class Projection:
def __init__(self, n_bits, n_feats):
self.n_bits = n_bits
self.n_feats = n_feats
self.vectors = numpy.random.randn(self.n_bits, self.n_feats)
def hash(self, v):
h = numpy.dot(self.vectors, v)
# h = ''.join('1' if x > 0 else '0' for x in h)
# h = sum(1 << (len(h) - i - 1) for i, b in enumerate(h) if b > 0)
# return int(h, 2)
return h
def main(n_vecs):
generator = TestVectorGenerator()
proj = Projection(100, 1000)
for n in range(n_vecs):
id, vec = generator.get()
signature = proj.hash(vec)
print(id, vec)
print(signature.bin)
def test_random_vecs(n_vecs):
generator = TestVectorGenerator()
for n in range(n_vecs):
id, vec = generator.get()
proj = Projection(100, 1000)
signature = proj.hash(vec)
print(id, vec)
print(signature.bin)
# Change half the bits
for i in range(500):
vec[i] = 1
signature2 = proj.hash(vec)
print(signature2.bin)
print(signature == signature2)
print(len((signature ^ signature2).bin.replace('0', '')))
import json
def test_json(f):
BITS = 128
f_space = set()
docs = []
for id, name, bow in json.load(open(f)):
docs.append((id, name, bow))
|
f_space |= set(bow.keys())
f_space = filter(lambda x: x, f_space) # remove empty strings
# vectors are sparse so we want to lookup into them directly
f_space = dict(((v, k) for k, v in enumerate(f_space)))
length = len(f_space.keys())
proj = Projection(BITS, length)
for id, name, bow in docs:
vec = [0 for x in range(length)]
for word, count in bow.items():
if not word: # remove empty st
|
rings, again
continue
vec[f_space[word]] = count
print(id, "{0:064b}".format(proj.hash(vec)))
if __name__ == '__main__':
#main(int(sys.argv[1]))
test_json(sys.argv[1])
|
OpenACalendar/OpenACalendar-Tools-Social
|
example-facebook-post-weekly/facebook-post-weekly.py
|
Python
|
bsd-3-clause
| 2,695
| 0.007421
|
#!/usr/bin/env python
import logging
from pdb import set_trace
import requests
import simplejson
from time import time
import os
import facebook
# MY_API_URL
# MY_SITE_MSG
# MY_GROUP_NAME
# POST_TO_ID = None
def run():
data = get_from_cal_json()
msg = create_msg(data)
p
|
ost(msg)
def get_from_cal_json():
print "Getting data from OpenACalendar"
r = requests.get(MY_API_URL)
if r.status_code != requests.codes
|
.ok:
r.raise_for_status()
j = simplejson.loads(r.text)
now = time()
inaweek = now + 60 * 60 * 24 * 7
data = [
x for x in j['data']
if x['start']['timestamp'] > now
and x['start']['timestamp'] < inaweek
and not x['deleted']
]
print "Got Data From OpenACalendar"
return data
def create_msg(data):
for x in data:
x['displaystart'] = x['start']['displaytimezone']
msgbits = []
msgbits.append(MY_SITE_MSG + ':')
msgbits.append('')
for x in data:
msgbits.append(x['displaystart'])
msgbits.append(x['summaryDisplay'])
msgbits.append(x['url'])
msgbits.append('')
msg = '\n'.join(msgbits)
return msg
def get_group_ids(graph):
print "Getting Groups ID"
# need user_groups permission
# Why doesn't Facebook provide an API for getting the
# group id from the name?
my_groups = graph.get_connections('me', 'groups')['data']
print "Got Group ID"
# Add your group names here
group_names = [
MY_GROUP_NAME,
]
assert group_names, "Need to add some groups to post to"
return [x['id'] for x in my_groups if x['name'] in group_names]
def post(msg):
token = os.environ['FACEBOOK_ACCESS_TOKEN']
graph = facebook.GraphAPI(token)
profile = graph.get_object("me")
if POST_TO_ID is None:
group_ids = get_group_ids(graph)
else:
group_ids = [ POST_TO_ID, ]
print msg
return
for group_id in group_ids:
print "Posting to "+str(group_id)
graph.put_object(str(group_id), "feed", message=msg)
print "Posted!"
if __name__ == '__main__':
try:
MY_API_URL
except:
print "Set your MY_API_URL e.g. 'http://jazzcal.com/api1/events.json'"
exit (-1)
try:
MY_SITE_MSG
except:
print "Set your MY_SITE_MSG e.g. 'This week's jazz gigs on Jazzcal.com'"
exit (-1)
try:
MY_GROUP_NAME
except:
print "Set your MY_GROUP_NAME"
exit (-1)
try:
token = os.environ['FACEBOOK_ACCESS_TOKEN']
except:
print "Set the env var FACEBOOK_ACCESS_TOKEN"
exit (-1)
run()
# eof
|
enthought/etsproxy
|
enthought/envisage/developer/developer_plugin.py
|
Python
|
bsd-3-clause
| 104
| 0
|
# proxy module
fr
|
om __future__ import absolute_import
from envisage.developer.developer_plugin impo
|
rt *
|
dchabot/ophyd
|
examples/scaler.py
|
Python
|
bsd-3-clause
| 1,039
| 0
|
import time
import config
from ophyd import scaler
from ophyd.utils import enum
ScalerMode = enum(ONE_SHOT=0, AUTO_COUNT=1)
loggers = ('ophyd.signal',
'ophyd.scaler',
)
config.setup_loggers(loggers)
logger = config.logger
sca = scaler.EpicsScaler(config.scalers[0])
sca.preset_time.put(5.2, wait=True)
logger.info('Counting in One-Shot mode for %f s...', sca.preset_time.get())
sca.trigger()
logger.info('Sleeping...')
time.sleep(3)
logger.info('Done sleeping. Stopping counte
|
r...')
sca.count.put(0)
logger.info('Set mode to AutoCount')
sca.count_mode.put(ScalerMode.AUTO_COUNT, wait=True)
sca.trigger()
logger.info('Begin auto-counting (aka "background count
|
ing")...')
time.sleep(2)
logger.info('Set mode to OneShot')
sca.count_mode.put(ScalerMode.ONE_SHOT, wait=True)
time.sleep(1)
logger.info('Stopping (aborting) auto-counting.')
sca.count.put(0)
logger.info('read() all channels in one-shot mode...')
vals = sca.read()
logger.info(vals)
logger.info('sca.channels.get() shows: %s', sca.channels.get())
|
oblalex/txinvoke
|
setup.py
|
Python
|
mit
| 1,351
| 0
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
__here__ = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(__here__, 'README.rst')).read()
REQUIREMENTS = [
i.strip()
for i in open(os.path.join(__here__, 'require
|
ments.txt')).readlines()
]
# Get VERSION
version_file = os.path.join('txinvoke', 'version.py')
# Use exec for compabibility with Python 3
exec(open(version_file).read())
setup(
name='txinvoke',
version=VERSION,
description="Run inline callbacks from Twisted as Invoke tasks",
long_description=README,
keywords=[
'twisted', 'invoke', 'task', 'callback', 'deferred', 'asynchronous',
],
license='MIT',
url='https://github.com/oblalex/txinvoke',
au
|
thor='Alexander Oblovatniy',
author_email='oblovatniy@gmail.com',
packages=[
'txinvoke',
],
install_requires=REQUIREMENTS,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'License :: Free for non-commercial use',
'Natural Language :: English',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
'Framework :: Twisted',
],
platforms=[
'any',
],
)
|
ufieeehw/IEEE2015
|
ros/ieee2015_vision/src/object_detection/test_functions/ss_find_button_test.py
|
Python
|
gpl-2.0
| 1,759
| 0.03809
|
import cv2
import ss_get_axis_points
import math
import numpy as np
import ss_get_lit_button
#now we don't care about what color the button is
#just care about location of bright center relative to major axis
def find_button(img, mean_cols, mean_rows):
#testing
cv2.circle(img, (mean_cols, mean_rows), 2, (255, 255, 255), 20)
imgf = img.copy()
cv2.imshow('img',imgf)
cv2.waitKey(0)
#get the lines of major and minor + angle or orientation for adjustments
angle, points, goodcircle = ss_get_axis_points.get_axis_points(img, .17)
#below is for testing requires a lot of uncommenting in get axis points too
#cv2.line(img, p1CentMinor, p2CentMinor, (0, 0, 0), 5)
x = goodcircle[0]
y = goodcircle[1]
#formating for testing
temp_point = (x, y)
cv2.line(img, temp_point, (mean_cols, mean_rows), (0, 0, 0), 5)
#get the angle from 0-360 that the point lies, counting minor axis as x axis
calc_angle = math.atan2((mean_rows - y), (mean_cols - x))
calc_angle %= 2 * np.pi
degs = math.degrees(calc_angle)
degs = int(360 - degs + angle)
print degs
#WHOOOT WHOOOT WE GOT ANGLES WORKING
color = 0
#1 is blu
|
e/right
#2 is red/up
#3 is gre
|
en/left
#4 is yellow/down
if (degs > 0 and degs < 50) or degs > 315:
print "we have a blue thing"
color = 1
elif degs >= 50 and degs <= 130:
color = 2
print "we have a red thing"
elif degs >130 and degs <= 225:
color = 3
print "we have a green thing"
elif degs > 225 and degs <= 315:
color = 4
print "we have a yellow thing"
imgg = img.copy()
#imgg = cv2.resize(imgg, (0, 0), fx=0.2, fy=0.2)
cv2.imshow('final image for real', imgg)
cv2.waitKey(0)
return color
img = cv2.imread('heights/17cmss.jpg')
#img = cv2.resize(img, (0, 0), fx=0.2, fy=0.2)
find_button(img, 280, 300)
|
pyocd/pyOCD
|
pyocd/cache/register.py
|
Python
|
apache-2.0
| 7,288
| 0.001921
|
# pyOCD debugger
# Copyright (c) 2016-2020 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from ..core import exceptions
from ..coresight.cortex_m_core_registers import (CortexMCoreRegisterInfo, index_for_reg)
from .metrics import CacheMetrics
LOG = logging.getLogger(__name__)
class RegisterCache(object):
"""@brief Cache of a core's register values.
The only interesting part of this cache is how it handles the special registers: CONTROL,
FAULTMASK, BASEPRI, PRIMASK, and CFBP. The values of the first four registers are read and written
all at once as the CFBP register through the hardware DCRSR register. On reads of any of these
registers, or the combined CFBP, the cache will ask the underlying context to read CFBP. It will
then update the cache entries for all five registers. Writes to any of these registers just
invalidate all five.
Same logic applies for XPSR submasks.
"""
CFBP_INDEX = index_for_reg('cfbp')
XPSR_INDEX = index_for_reg('xpsr')
CFBP_REGS = [index_for_reg(name) for name in [
'cfbp',
'control',
'faultmask',
'basepri',
'primask',
]]
XPSR_REGS = [index_for_reg(name) for name in [
'xpsr',
'apsr',
'iapsr',
'eapsr',
'ipsr',
'epsr',
'iepsr',
]]
def __init__(self, context, core):
self._context = context
self._core = core
self._run_token = -1
self._reset_cache()
def _reset_cache(self):
self._cache = {}
self._metrics = CacheMetrics()
def _dump_metrics(self):
if self._metrics.total > 0:
LOG.debug("%d reads [%d%% hits, %d regs]", self._metrics.total, self._metrics.percent_hit, self._metrics.hits)
else:
LOG.debug("no accesses")
def _check_cache(self):
"""@brief Invalidates the cache if needed and returns whether the core is running."""
if self._core.is_running():
LOG.debug("core is running; invalidating cache")
self._reset_cache()
return True
elif self._run_token != self._core.run_token:
self._dump_metrics()
LOG.debug("out of date run token; invalidating cache")
self._reset_cache()
self._run_token = self._core.run_token
return False
def _convert_and_check_registers(self, reg_list):
# convert to index only
reg_list = [index_for_reg(reg) for reg in reg_list]
self._core.check_reg_list(reg_list)
return reg_list
def read_core_registers_raw(self, reg_list):
# Invalidate the cache. If the core is still running, just read directly from it.
if self._check_cac
|
he():
return self._context.read_core_registers_raw(reg_list)
reg_list = self._convert_and_check_registers(reg_list)
reg_set = set(reg_list)
# Get list of values we have cached.
cached_set = set(r for r in reg_list if r in self._cache)
self._metrics.hits += len(cached_set)
# Read uncached registers from the target.
read_list = list(reg_set.difference(cached_set))
reading_cfbp = any(r
|
for r in read_list if r in self.CFBP_REGS)
reading_xpsr = any(r for r in read_list if r in self.XPSR_REGS)
if reading_cfbp:
if not self.CFBP_INDEX in read_list:
read_list.append(self.CFBP_INDEX)
cfbp_index = read_list.index(self.CFBP_INDEX)
if reading_xpsr:
if not self.XPSR_INDEX in read_list:
read_list.append(self.XPSR_INDEX)
xpsr_index = read_list.index(self.XPSR_INDEX)
self._metrics.misses += len(read_list)
# Read registers not in the cache from the target.
if read_list:
try:
values = self._context.read_core_registers_raw(read_list)
except exceptions.CoreRegisterAccessError:
# Invalidate cache on register read error just to be safe.
self._reset_cache()
raise
else:
values = []
# Update all CFBP based registers.
if reading_cfbp:
v = values[cfbp_index]
self._cache[self.CFBP_INDEX] = v
for r in self.CFBP_REGS:
if r == self.CFBP_INDEX:
continue
self._cache[r] = (v >> ((-r - 1) * 8)) & 0xff
# Update all XPSR based registers.
if reading_xpsr:
v = values[xpsr_index]
self._cache[self.XPSR_INDEX] = v
for r in self.XPSR_REGS:
if r == self.XPSR_INDEX:
continue
self._cache[r] = v & CortexMCoreRegisterInfo.get(r).psr_mask
# Build the results list in the same order as requested registers.
results = []
for r in reg_list:
if r in cached_set:
results.append(self._cache[r])
else:
i = read_list.index(r)
v = values[i]
results.append(v)
self._cache[r] = v
return results
# TODO only write dirty registers to target right before running.
def write_core_registers_raw(self, reg_list, data_list):
# Check and invalidate the cache. If the core is still running, just pass the writes
# to our context.
if self._check_cache():
self._context.write_core_registers_raw(reg_list, data_list)
return
reg_list = self._convert_and_check_registers(reg_list)
self._metrics.writes += len(reg_list)
writing_cfbp = any(r for r in reg_list if r in self.CFBP_REGS)
writing_xpsr = any(r for r in reg_list if r in self.XPSR_REGS)
# Update cached register values.
for i, r in enumerate(reg_list):
v = data_list[i]
self._cache[r] = v
# Just remove all cached CFBP and XPSR based register values.
if writing_cfbp:
for r in self.CFBP_REGS:
self._cache.pop(r, None)
if writing_xpsr:
for r in self.XPSR_REGS:
self._cache.pop(r, None)
# Write new register values to target.
try:
self._context.write_core_registers_raw(reg_list, data_list)
except exceptions.CoreRegisterAccessError:
# Invalidate cache on register write error just to be safe.
self._reset_cache()
raise
def invalidate(self):
self._reset_cache()
|
zhaw/Poker-Bot-Reformed
|
pokerstars/move_catcher.py
|
Python
|
mit
| 10,977
| 0.006013
|
from pokerstars.screen_scraper import ScreenScraper
from public import map_card_string_to_tuple
from public import change_terminal_color
import re
import time
import json
import copy
class MoveCatcher():
def __init__(self, to_act, game_driver):
self.to_act = to_act#{{{
self.game_driver = game_driver
self.old_stack = game_driver.stack
self.game_number = game_driver.game_number
self.cards = game_driver.cards
self.active = game_driver.active
self.betting = game_driver.betting
self.old_betting = copy.deepcopy(game_driver.betting)
self.old_stage = copy.deepcopy(game_driver.stage)
if game_driver.source == 'ps':
self.source = 'ps'
else:
self.source = game_driver.source.splitlines()[12:]
self.seat = game_driver.seat
self.made_my_move = 0
self.screen_scraper = game_driver.screen_scraper#}}}
def next_stage(self):
if len(self.cards) == 7:#{{{
return False
elif len(self.cards) == 6:
tmp_card = self.screen_scraper.get_card(6)
if tmp_card:
self.cards.append(tmp_card)
return self.cards
else:
return False
elif len(self.cards) == 5:
tmp_card = self.screen_scraper.get_card(5)
if tmp_card:
self.cards.append(tmp_card)
return self.cards
else:
return False
else:
tmp_card1, tmp_card2, tmp_card3 = \
[self.screen_scraper.get_card(i) for i in xrange(2, 5)]
if tmp_card1 and tmp_card2 and tmp_card3:
self.cards.append(tmp_card1)
self.cards.append(tmp_card2)
self.cards.append(tmp_card3)
return self.cards
else:
return False#}}}
def next_game(self):
new_game_number = self.screen_scraper.get_game_number()#{{{
all_fold = 1
c1, c2 = self.screen_scraper.get_card(0), self.screen_scraper.get_card(1)
c1, c2 = min([c1, c2]), max([c1, c2])
if c1 != self.cards[0] or c2 != self.cards[1]:
change_terminal_color('green')
print 'game over because my cards are changed'
print 'new card:', c1, c2
print 'old card:', self.cards[0], self.cards[1]
change_terminal_color()
return new_game_number
for i in xrange(1, 6):
if not self.screen_scraper.has_fold(i):
all_fold = 0
if new_game_number != self.game_number or all_fold:
change_terminal_color('green')
print 'game over because new game number is', new_game_number
change_terminal_color()
return new_game_number
else:
return False#}}}
def all_even(self):
amount = max(self.betting)#{{{
for i in xrange(6):
if self.active[i] == 1:
if self.betting[i] != amount:
|
return False
return True#}}}
def make_even(self):
amount = max(self.betting)#
|
{{{
actions = list()
for i in xrange(6):
player = (self.to_act+i) % 6
if self.active[player] < 1:
continue
if self.screen_scraper.has_fold(player):
actions.append([player, 'fold'])
elif self.betting[player] < max(self.betting):
actions.append([player, \
max(self.betting)-self.betting[player]])
self.betting[player] = max(self.betting)
else:
continue
return actions#}}}
def round_search(self):
actions = list()#{{{
shining_player = self.screen_scraper.shining()
while type(shining_player) != int:
self.screen_scraper.update()
shining_player = self.screen_scraper.shining()
for i in xrange(6):
player = (self.to_act+i) % 6
if player == shining_player:
break
if self.active[player] != 1:
continue
elif self.screen_scraper.has_fold(player):
self.active[player] = 0
actions.append([player, 'fold'])
elif self.stack[player] != self.old_stack[player]:
if self.stack[player] == 0:
self.active[player] = 0.5
if self.stack[player] == 'sitting out':
self.active[player] = 0
actions.append([player, 'fold'])
else:
self.betting[player] += self.old_stack[player] - self.stack[player]
self.betting[player] = round(self.betting[player], 2)
actions.append([player, self.old_stack[player]-self.stack[player]])
else:
if self.betting[player] != max(self.betting):
# actions.append([player, max(self.betting)-self.betting[player]])
# self.betting[player] = max(self.betting)
return actions
else:
actions.append([player, 'check'])
self.to_act = player
return actions#}}}
def get_action(self):
self.betting = self.game_driver.betting
if self.source == 'ps':
actions = list()#{{{
self.screen_scraper.update()
self.stack = [[]]*6
for i in xrange(6):
fail = 0
while self.stack[i] == []:
self.stack[i] = self.screen_scraper.get_stack(i)
if self.game_driver.stack[i] == 2.0001:
self.game_driver.stack[i] = self.stack[i]
self.old_stack[i] = self.stack[i]
if self.next_game() != False:
next_game_result = self.next_game()
actions = [['new game', next_game_result]]
return actions
if fail == 1:
self.screen_scraper.update()
fail = 1
next_game_result = self.next_game()
if next_game_result:
actions = [['new game', next_game_result]]
return actions
actions += self.round_search()
next_stage_result = self.next_stage()
if next_stage_result:
if not self.all_even():
actions += self.make_even()
actions.append(['new stage', next_stage_result])
if self.screen_scraper.shining(0):
if self.old_betting[1:] != self.betting[1:]\
or self.game_driver.stage != self.old_stage:
actions.append(['my move', 0])
self.old_betting = copy.deepcopy(self.betting)
self.old_stage = copy.deepcopy(self.game_driver.stage)
for action in actions:
if type(action[1]) == float:
action[1] = round(action[1], 2)#}}}
else:#{{{
while 'has timed out' in self.source[0]\
or 'from pot' in self.source[0]\
or 'said, "' in self.source[0]\
or 'show hand' in self.source[0]\
or 'posts big blind' in self.source[0]\
or 'posts small blind' in self.source[0]\
or 'is disconnect' in self.source[0]\
or 'is connect' in self.source[0]:
self.source = self.source[1:]
instr = self.source[0]
cards = self.cards
self.source = self.source[1:]
if ': ' in instr:
name = ':'.join(instr.split(':')[:-1])
player = self.seat[name]
if player == 0 and not self.made_my_move:
self.source.insert(0, instr)
self.made_my_move = 1
return [['my move', 0]]
|
OpenELEC/service.openelec.settings
|
src/service.py
|
Python
|
gpl-2.0
| 4,695
| 0.002343
|
################################################################################
# This file is part of OpenELEC - http://www.openelec.tv
# Copyright (C) 2009-2017 Stephan Raue (stephan@openelec.tv)
# Copyright (C) 2013 Lutz Fiebach (lufie@openelec.tv)
#
# This program is dual-licensed; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenELEC; see the file COPYING. If not, see
# <http://www.gnu.org/licenses/>.
#
# Alternatively, you can license this library under a commercial license,
# please contact OpenELEC Licensing for more information.
#
# For more information contact:
# OpenELEC Licensing <license@openelec.tv> http://www.openelec.tv
################################################################################
# -*- coding: utf-8 -*-
import oe
import xbmc
import xbmcgui
import time
import threading
import socket
import os
import xbmcaddon
class service_thread(threading.Thread):
def __init__(self, oeMain):
try:
oeMain.dbg_log('_service_::__init__', 'enter_function', 0)
self.oe = oeMain
self.wait_evt = threading.Event()
self.socket_file = '/var/run/service.openelec.settings.sock'
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.setblocking(1)
if os.path.exists(self.socket_file):
os.remove(self.socket_file)
self.sock.bind(self.socket_file)
self.sock.listen(1)
self.stopped = False
threading.Thread.__init__(self)
self.daemon = True
self.oe.dbg_log('_service_::__init__', 'exit_function', 0)
except Exception, e:
self.oe.dbg_log('_service_::__init__', 'ERROR: (' + repr(e) + ')')
def stop(self):
try:
self.oe.dbg_log('_service_::stop', 'enter_function', 0)
self.stopped = True
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(self.socket_file)
sock.send('exit')
sock.close()
self.sock.close()
self.oe.dbg_log('_service_::stop', 'exit_function', 0)
except Exception, e:
self.oe.dbg_log('_service_::stop', 'ERROR: (' + repr(e) + ')')
def run(self):
try:
self.oe.dbg_log('_service_::run', 'enter_function', 0)
if self.oe.read_setting('openelec', 'wizard_completed') == None:
threading.Thread(target=self.oe.openWizard).start()
while self.stopped == False:
self.oe.dbg_log('_service_::run', 'WAITING:', 1)
(conn, addr) = self.sock.accept()
message = conn.recv(1024)
self.oe.dbg_log('_service_::run',
|
'MESSAGE:' + repr(message), 1)
conn.close()
if message == 'openConfigurationWindow':
|
if not hasattr(self.oe, 'winOeMain'):
threading.Thread(target=self.oe.openConfigurationWindow).start()
else:
if self.oe.winOeMain.visible != True:
threading.Thread(target=self.oe.openConfigurationWindow).start()
if message == 'exit':
self.stopped = True
self.oe.dbg_log('_service_::run', 'exit_function', 0)
except Exception, e:
self.oe.dbg_log('_service_::run', 'ERROR: (' + repr(e) + ')')
class cxbmcm(xbmc.Monitor):
def __init__(self, *args, **kwargs):
xbmc.Monitor.__init__(self)
def onScreensaverActivated(self):
oe.__oe__.dbg_log('c_xbmcm::onScreensaverActivated', 'enter_function', 0)
if oe.__oe__.read_setting('bluetooth', 'standby'):
threading.Thread(target=oe.__oe__.standby_devices).start()
oe.__oe__.dbg_log('c_xbmcm::onScreensaverActivated', 'exit_function', 0)
def onAbortRequested(self):
pass
xbmcm = cxbmcm()
oe.load_modules()
oe.start_service()
monitor = service_thread(oe.__oe__)
monitor.start()
xbmcm.waitForAbort()
if hasattr(oe, 'winOeMain'):
if oe.winOeMain.visible == True:
oe.winOeMain.close()
oe.stop_service()
monitor.stop()
|
dereneaton/ipyrad
|
ipyrad/analysis/raxml.py
|
Python
|
gpl-3.0
| 9,380
| 0.00565
|
#!/usr/bin/python
""" wrapper to make simple calls to raxml """
import os
import sys
import glob
import subprocess
from ipyrad.analysis.utils import Params
from ipyrad.assemble.utils import IPyradError
# alias
OPJ = os.path.join
class Raxml(object):
"""
RAxML analysis utility function. This tool makes it easy to build a
raxml command line string and submit it as a job. It also makes it easy
to access the resulting tree files. Set params on the raxml object and
print(<object>.command) to see raxml command string. Call .run() to
submit the job running in background, or .run(block=True) to wait until
it finishes.
Parameters:
-----------
data: str
The phylip formated sequence file (.phy from ipyrad). An alias for '-s'.
name: str
The name for this run. An alias for '-n'.
workdir: str
The output directory for results. An alias for '-w'.
Additional optional parameters
-------------------------------
f: str
(-f a) The raxml function. Default is 'a'.
T: str
(-T 4) The number of threads. Default is 4.
m: str
(-m GTRGAMMA) The model to use.
N: str
(-N 100) The number of distinct starting trees from which to run full
search, or number of bootstrap replicates to run if using -f a.
x: str
(-x 12345) The bootstrap random seed.
p: str
(-p 54321) The parsimony random seed.
n: str
(-n test) The prefix name for output files
w: str
(-w outdir) The output directory
s: str
|
(-s seq.phy) The .phy formatted sequence file.
o: str or list
(-o tax1,tax2) A list of outgroup sample names or a string.
Attributes:
-----------
params: dict
parameters for this raxml run
command:
|
returns the command string to run raxml
Functions:
----------
run()
submits a raxml job to locally or on an ipyparallel client cluster.
"""
# init object for params
def __init__(
self,
data,
name="test",
workdir="analysis-raxml",
*args,
**kwargs):
# path attributes
self._kwargs = {
"f": "a",
"T": 4, # <- change to zero !?
"m": "GTRGAMMA",
"N": 100,
"x": 12345,
"p": 54321,
"o": None,
"binary": "",
}
# update kwargs for user args and drop key if value is None
self._kwargs.update(kwargs)
self._kwargs = {i: j for (i, j) in self._kwargs.items() if j is not None}
# check workdir
if workdir:
workdir = os.path.abspath(os.path.expanduser(workdir))
else:
workdir = os.path.abspath(os.path.curdir)
if not os.path.exists(workdir):
os.makedirs(workdir)
# store entered args in params object
self.params = Params()
self.params.n = name
self.params.w = workdir
self.params.s = os.path.abspath(os.path.expanduser(data))
# if arg append kwargs to top of list of binaries to search for
binaries = _get_binary_paths()
if self._kwargs["binary"]:
binaries = [self._kwargs["binary"]] + binaries
# sefind a binary from the list
self.params.binary = _check_binaries(binaries)
# set params
notparams = set(["workdir", "name", "data", "binary"])
for key in set(self._kwargs.keys()) - notparams:
self.params[key] = self._kwargs[key]
# attributesx
self.rasync = None
self.stdout = None
self.stderr = None
# results files
self.trees = Params()
self.trees.bestTree = OPJ(workdir, "RAxML_bestTree." + name)
self.trees.bipartitionsBranchLabels = OPJ(workdir, "RAxML_bipartitionsBranchLabels." + name)
self.trees.bipartitions = OPJ(workdir, "RAxML_bipartitions." + name)
self.trees.bootstrap = OPJ(workdir, "RAxML_bootstrap." + name)
self.trees.info = OPJ(workdir, "RAxML_info." + name)
@property
def _command_list(self):
""" build the command list """
cmd = [
self.params.binary,
"-f", str(self.params.f),
"-T", str(self.params.T),
"-m", str(self.params.m),
"-n", str(self.params.n),
"-w", str(self.params.w),
"-s", str(self.params.s),
"-p", str(self.params.p),
]
if 'N' in self.params:
cmd += ["-N", str(self.params.N)]
if "x" in self.params:
cmd += ["-x", str(self.params.x)]
# ultrafast boostrap and mapping with -f d
# If no bootstraps then run -f D not -f a, and drop -x and -N
# if "-f D":
# add ougroups
if 'o' in self.params:
cmd += ["-o"]
cmd += [",".join(self.params.o)]
return cmd
@property
def command(self):
""" returns command as a string """
return " ".join(self._command_list)
def run(
self,
ipyclient=None,
quiet=False,
force=False,
block=False,
):
"""
Submits raxml job to run. If no ipyclient object is provided then
the function will block until the raxml run is finished. If an
ipyclient is provided then the job is sent to a remote engine and an
asynchronous result object is returned which can be queried or awaited
until it finishes.
Parameters
-----------
ipyclient:
Not yet supported...
quiet:
suppress print statements
force:
overwrite existing results files with this job name.
block:
will block progress in notebook until job finishes, even if job
is running on a remote ipyclient.
"""
# force removes old files, a bit risky here if names are subsets
if force:
opath = os.path.join(
self.params.w, "RAxML_*.{}".format(self.params.n))
oldfiles = glob.glob(opath)
for oldfile in oldfiles:
if os.path.exists(oldfile):
os.remove(oldfile)
if os.path.exists(self.trees.info):
print("Error Files Exist: set a new name or use Force flag.\n{}"
.format(self.trees.info))
return
## TODO: add a progress bar tracker here. It could even read it from
## the info file that is being written.
## submit it
if not ipyclient:
proc = _call_raxml(self._command_list)
self.stdout = proc[0]
self.stderr = proc[1]
else:
# find all hosts and submit job to the host with most available engines
lbview = ipyclient.load_balanced_view()
self.rasync = lbview.apply(_call_raxml, self._command_list)
# initiate random seed
if not quiet:
if not ipyclient:
# look for errors
if "Overall execution time" not in self.stdout.decode():
print("Error in raxml run\n" + self.stdout.decode())
else:
print("job {} finished successfully".format(self.params.n))
else:
if block:
print("job {} running".format(self.params.n))
ipyclient.wait()
if self.rasync.successful():
print(
"job {} finished successfully"
.format(self.params.n))
else:
raise IPyradError(self.rasync.get())
else:
print("job {} submitted to cluster".format(self.params.n))
def _get_binary_paths():
# check for binary
list_binaries = [
"raxmlHPC-PTHREADS-AVX2",
"raxmlHPC-PTHREADS-AVX",
"raxmlHPC-PTHREADS-SSE3",
"raxmlHPC-PTHREADS",
]
# expand for env path
|
jumoconnect/openjumo
|
jumodjango/etc/func.py
|
Python
|
mit
| 1,840
| 0.004934
|
# -*- coding: utf8 -*-
import re
from unidecode import unidecode
import os, sys
from hashlib import md5 as hasher
import binascii
import settings
def gen_flattened_list(iterables):
for item in iterables:
if hasattr(item, '__iter__'):
for i in item:
yield i
else:
yield item
def crc32(val):
return binascii.crc32(val) & 0xffffffff
# brennan added this
def wrap(text, width):
"""
A word-wrap function that preserves existing line breaks
and most spaces in the text. Expects that existing line
breaks are posix newlines (\n).
"""
return reduce(lambda line, word, width=width: '%s%s%s' %
(line,
' \n'[(len(line)-line.rfind('\n')-1
+ len(word.split('\n',1)[0]
) >= width)],
word),
text.split(' ')
)
_punct_re
|
= re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.;:]+')
htmlCodes = (
('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
(''', "'"),
)
def escape_html(s):
for bad, good in htmlCodes:
s = s.replace(bad, good)
return s
def slugify(text, delim='', lowercase=True):
"""ex: slugify(u'Шамиль Абетуллаев','')
returns u'shamilabetullaev'"""
text = escape_html(text)
result = []
if lowercase:
tex
|
t=text.lower()
for word in _punct_re.split(text):
decoded = _punct_re.split(unidecode(word))
result.extend(decoded)
result = unicode(delim.join(result))
return result.lower() if lowercase else result
def salted_hash(val):
hash = hasher(settings.CRYPTO_SECRET)
hash.update(unicode(val, 'utf-8') if isinstance(val, str) else unicode(val))
return hash.hexdigest()
|
Blimeo/Java
|
out/production/matthew/Contests/CodeForces/pset6/President's Office.py
|
Python
|
apache-2.0
| 684
| 0.023392
|
inp = input().split()
n = int(inp[0])
m
|
= int(inp[1])
c = inp[2]
k = []
for i in range(n):
k.append(list(input()))
lt = (-1, -1)
rt = (-1, -1)
for i in range(n):
for j in range(m):
if k[i][j] == c:
rt = (i, j)
if lt[0] < 0:
lt = (i, j)
ans = set()
for i in range(lt[0], rt[0] + 1):
for j in range(lt[1], rt[1] + 1):
if i > 0 and k[i - 1][j] != c and k[i - 1][j] != '.':
ans
|
.add(k[i - 1][j])
if i < n - 1 and k[i + 1][j] != c and k[i + 1][j] != '.':
ans.add(k[i + 1][j])
if j > 0 and k[i][j - 1] != c and k[i][j - 1] != '.':
ans.add(k[i][j - 1])
if j < m - 1 and k[i][j + 1] != c and k[i][j + 1] != '.':
ans.add(k[i - 1][j + 1])
print(len(ans))
|
mahabuber/erpnext
|
erpnext/patches/v4_0/split_email_settings.py
|
Python
|
agpl-3.0
| 2,172
| 0.02302
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
print "WARNING!!!! Email Settings not migrated. Please setup your email again."
# this will happen if you are migrating very old accounts
# comment out this line below and remember to create new Email Accounts
# for incoming and outgoing mails
raise Exception
return
frappe.reload_doc("core", "doctype", "outgoing_email_settings")
frappe.reload_doc("support", "doctype", "support_email_settings")
email_settings = get_email_settings()
map_outgoing_email_settings(email_settings)
map_support_email_settings(email_settings)
def map_outgoing_email_settings(email_settings):
outgoing_email_settings = frappe.get_doc("Outgoing Email Settings")
for fieldname in (("outgoing_mail_server", "mail_server"),
"use_ssl", "mail_port", "mail_login", "mail_password",
"always_use_login_id_as_sender", "auto_email_id"):
if isinstance(fieldname, tuple):
from_fieldname, to_fieldname = fieldname
else:
from_fieldname = to_fieldname = fieldname
outgoing_email_settings.set(to_fieldname, email_settings.get(from_fieldname))
outgoing_email_settings._fix_numeric_types()
outgoing_email_settings.save()
def map_support_email_settings(email_settings):
support_email_settings = frappe.get_doc("Support Email Settings")
for fieldname in ("sync_support_mails", "support_email",
("support_host", "mail_server"),
("support_use_ssl", "use_ssl"),
("support_username", "mail_login"),
("support_password", "mail_password"),
"support_signature", "send_
|
autoreply", "support_autoreply"):
if isinstance(fieldname, tuple):
from_field
|
name, to_fieldname = fieldname
else:
from_fieldname = to_fieldname = fieldname
support_email_settings.set(to_fieldname, email_settings.get(from_fieldname))
support_email_settings._fix_numeric_types()
support_email_settings.save()
def get_email_settings():
ret = {}
for field, value in frappe.db.sql("select field, value from tabSingles where doctype='Email Settings'"):
ret[field] = value
return ret
|
OCA/OpenUpgrade
|
openupgrade_framework/odoo_patch/odoo/addons/base/models/__init__.py
|
Python
|
agpl-3.0
| 48
| 0
|
from . imp
|
ort ir_model
from . imp
|
ort ir_ui_view
|
plaid/plaid-python
|
plaid/model/recurring_transaction_frequency.py
|
Python
|
mit
| 7,155
| 0.000699
|
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class RecurringTransactionFrequency(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'UNKNOWN': "UNKNOWN",
'WEEKLY': "WEEKLY",
'BIWEEKLY': "BIWEEKLY",
'SEMI_MONTHLY': "SEMI_MONTHLY",
'MONTHLY': "MONTHLY",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""RecurringTransactionFrequency - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): describes the frequency of the transaction stream.., must be one of ["UNKNOWN", "WEEKLY", "BIWEEKLY", "SEMI_MONTHLY", "MONTHLY", ] # noqa: E501
Keyword Args:
value (str): describes the frequency of the transaction stream.., must be one of ["UNKNOWN", "WEEKLY", "BIWEEKLY", "SEMI_MONTHLY", "MONTHLY", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parame
|
ter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
|
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
|
DarthMaulware/EquationGroupLeaks
|
Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/security/cmd/permissions/tasking_dsz.py
|
Python
|
unlicense
| 507
| 0.009862
|
# uncompyle6 version 2.9.10
# Python bytecode 2.7 (62
|
211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: tasking_dsz.py
import mcl.framework
import mcl.tasking
class dsz:
INTERFACE = 16842801
PFAM = 4169
|
PROVIDER_ANY = 4169
PROVIDER = 16846921
RPC_INFO_QUERY = mcl.tasking.RpcInfo(mcl.framework.DSZ, [INTERFACE, PROVIDER_ANY, 0])
RPC_INFO_MODIFY = mcl.tasking.RpcInfo(mcl.framework.DSZ, [INTERFACE, PROVIDER_ANY, 1])
|
mlperf/inference_results_v0.7
|
open/Inspur/code/rnnt/tensorrt/preprocessing/convert_rnnt_data.py
|
Python
|
apache-2.0
| 11,154
| 0.004931
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Script to preprocess .wav files and convert them to .npy format
RNNT harness reads in .npy files
Example command line:
python3 convert_rnnt_data.py --batch_size 1 --output_dir <path> --cudnn_benchmark --dataset_dir <path> --val_manifest <path>/<name>-wav.json --model_toml configs/rnnt.toml
'''
import argparse
import itertools
import os
import torch
import numpy as np
import torchvision
from tqdm import tqdm
import math
import random
import toml
import sys
sys.path.insert(0, os.path.dirname(__file__))
from helpers import Optimization, print_dict, add_blank_label
from dataset import AudioToTextDataLayer
from preprocessing import AudioPreprocessing
def parse_args():
parser = argparse.ArgumentParser(description='Jasper')
parser.add_argument("--dataset_dir", type=str, help='absolute path to dataset folder')
parser.add_argument("--output_dir", type=str, help='absolute path for generated .npy files folder')
parser.add_argument("--val_manifest", type=str, help='relative path to evaluation dataset manifest file')
parser.add_argument("--batch_size", default=1, type=int, help='data batch size')
parser.add_argument("--fp16", action='store_true', help='use half precision')
parser.add_argument("--fixed_seq_length", default=512, type=int, help="produce .npy files with fixed sequence length")
parser.add_argument("--generate_wav_npy", default=True, type=str, help="produce wav .npy files with MAX length")
parser.add_argument("--fixed_wav_file_length", default=240000, type=int, help="produce wav .npy files with MAX length")
parser.add_argument("--seed", default=42, type=int, help='seed')
parser.add_argument("--model_toml", type=str, help='relative model configuration path given dataset folder')
parser.add_argument("--max_duration", default=None, type=float, help='maximum duration of sequences. if None uses attribute from model configuration file')
parser.add_argument("--pad_to", default=No
|
ne, type=int, help="default is pad to value as specified in model configurations. if -1 pad to maximum duration. If > 0 pad batch to next multiple of value")
return parser.parse_args()
def eval(
data_layer,
audio_processor,
args):
if not os.path.exist
|
s(args.output_dir):
os.makedirs(args.output_dir)
if not os.path.exists(args.output_dir + 'fp16'):
os.makedirs(args.output_dir + "fp16")
if not os.path.exists(args.output_dir + 'fp32'):
os.makedirs(args.output_dir + "fp32")
if not os.path.exists(args.output_dir + 'int32'):
os.makedirs(args.output_dir + "int32")
if(args.generate_wav_npy):
if not os.path.exists(args.output_dir + 'wav_files'):
os.makedirs(args.output_dir + "wav_files")
if not os.path.exists(args.output_dir + 'wav_files' + '/int32'):
os.makedirs(args.output_dir + 'wav_files' + '/int32')
if not os.path.exists(args.output_dir + 'wav_files' + '/fp32'):
os.makedirs(args.output_dir + 'wav_files' + '/fp32')
if not os.path.exists(args.output_dir + 'wav_files' + '/fp16'):
os.makedirs(args.output_dir + 'wav_files' + '/fp16')
fixed_seq_length = args.fixed_seq_length
val_map_filename = args.output_dir + "val_map_" + str(fixed_seq_length) + ".txt"
file_handle = open(val_map_filename, "w")
max_seq_length = 0
for it, data in enumerate(tqdm(data_layer.data_iterator)):
tensors = []
for d in data:
tensors.append(d)
file_handle.write("RNNT_input_" + str(fixed_seq_length) + "_" + str(it) + "\n")
if(args.generate_wav_npy):
t_audio_signal_e, t_a_sig_length_e, t_transcript_e, t_transcript_len_e = tensors
print("Audio signal = {} dtype = {} shape {} ".format(t_audio_signal_e, t_audio_signal_e.dtype, torch.numel(t_audio_signal_e)))
print("{} Audio signal length = {}".format(it, t_a_sig_length_e))
t_audio_signal_e_fp16 = t_audio_signal_e.to(torch.float16)
if t_a_sig_length_e <= args.fixed_wav_file_length:
target = torch.zeros(args.fixed_wav_file_length, dtype=torch.float32)
target[:t_a_sig_length_e] = t_audio_signal_e
#print("Target = {}".format(target))
#print("Target num elements = {}".format(torch.numel(target)))
target_np = target.cpu().numpy()
file_name = args.output_dir + "wav_files/fp32/" + "RNNT_input_" + str(fixed_seq_length) + "_" + str(it) + ".npy"
np.save(file_name, target_np)
target = torch.zeros(args.fixed_wav_file_length, dtype=torch.float16)
target[:t_a_sig_length_e] = t_audio_signal_e_fp16
#print("Target = {}".format(target))
#print("Target num elements = {}".format(torch.numel(target)))
target_np = target.cpu().numpy()
file_name = args.output_dir + "wav_files/fp16/" + "RNNT_input_" + str(fixed_seq_length) + "_" + str(it) + ".npy"
np.save(file_name, target_np)
t_a_sig_length_e_int32 = t_a_sig_length_e.to(torch.int32)
t_a_sig_length_e_int32_np = t_a_sig_length_e_int32.cpu().numpy()
print("Length tensor = {}".format(t_a_sig_length_e_int32_np))
file_name = args.output_dir + "wav_files/int32/" + "RNNT_input_" + str(fixed_seq_length) + "_" + str(it) + ".npy"
np.save(file_name, t_a_sig_length_e_int32_np)
else:
target = t_audio_signal_e_fp16[:args.fixed_wav_file_length]
target_np = target.cpu().numpy()
file_name = args.output_dir + "wav_files/fp32/" + "RNNT_input_" + str(fixed_seq_length) + "_" + str(it) + ".npy"
np.save(file_name, target_np)
length_tensor = torch.Tensor([args.fixed_wav_file_length])
#print("Length_tensor = {}".format(length_tensor))
t_a_sig_length_e_int32 = length_tensor.to(torch.int32)
t_a_sig_length_e_int32_np = t_a_sig_length_e_int32.cpu().numpy()
print("Length tensor = {}".format(t_a_sig_length_e_int32_np))
file_name = args.output_dir + "wav_files/int32/" + "RNNT_input_" + str(fixed_seq_length) + "_" + str(it) + ".npy"
np.save(file_name, t_a_sig_length_e_int32_np)
t_audio_signal_e, t_a_sig_length_e, t_transcript_e, t_transcript_len_e = audio_processor(data)
seq_length, batch_size, num_features = t_audio_signal_e.size()
print("Seq length = {} Batch size = {} Features = {}".format(seq_length, batch_size, num_features))
if seq_length > max_seq_length:
max_seq_length = seq_length
t_audio_signal_e_fp16 = t_audio_signal_e.to(torch.float16)
t_audio_signal_e_fp16 = t_audio_signal_e_fp16.reshape(seq_length, num_features)
t_audio_signal_e_fp16_np = t_audio_signal_e_fp16.cpu().numpy()
t_audio_signal_e = t_audio_signal_e.reshape(seq_length, num_features)
t_audio_signal_e_np = t_audio_signal_e.cpu().numpy()
t_a_sig_length_e_int32 = t_a_sig_length_e.to(torch.int32)
t_a_sig_length_e_int32_np = t_a_sig_length_e_int32.cpu().numpy()
target_np = t_a_sig_length_e_int32_np
file_name = args.output_dir + "int32/RNNT_input_" + str(fixed_seq_length) + "_" + str(it) + ".npy"
np.save(file_name, target_np)
# Generating Fixed size seq_length
if seq_length <= fixed_seq_le
|
PW-Sat2/PWSat2OBC
|
integration_tests/emulator/beacon_parser/scrubbing_telemetry_parser.py
|
Python
|
agpl-3.0
| 445
| 0.002247
|
from parser import CategoryParser
class ScrubbingTelemetryParser(CategoryParser):
def __init__(self, reader, store):
CategoryParser.__init__(self, '05: Scrubbing State', reader, store)
def get_bit_count(self):
return 3 + 3 + 32
def parse(self):
self.append("Primary Flash Scrubbing pointer", 3)
self.append("Secondary Flash Scrubbing point
|
er", 3)
self.app
|
end_dword("RAM Scrubbing pointer")
|
disqus/django-old
|
tests/regressiontests/forms/localflavor/co.py
|
Python
|
bsd-3-clause
| 1,661
| 0.000602
|
from django.contrib.localflavor.co.forms import CODepartmentSelect
from utils import LocalFlavorTestCase
class COLocalFlavorTests(LocalFlavorTestC
|
ase):
def test_CODepartmentSelect(self):
|
d = CODepartmentSelect()
out = u"""<select name="department">
<option value="AMA">Amazonas</option>
<option value="ANT">Antioquia</option>
<option value="ARA">Arauca</option>
<option value="ATL">Atl\xe1ntico</option>
<option value="DC">Bogot\xe1</option>
<option value="BOL">Bol\xedvar</option>
<option value="BOY">Boyac\xe1</option>
<option value="CAL">Caldas</option>
<option value="CAQ">Caquet\xe1</option>
<option value="CAS">Casanare</option>
<option value="CAU">Cauca</option>
<option value="CES">Cesar</option>
<option value="CHO">Choc\xf3</option>
<option value="COR" selected="selected">C\xf3rdoba</option>
<option value="CUN">Cundinamarca</option>
<option value="GUA">Guain\xeda</option>
<option value="GUV">Guaviare</option>
<option value="HUI">Huila</option>
<option value="LAG">La Guajira</option>
<option value="MAG">Magdalena</option>
<option value="MET">Meta</option>
<option value="NAR">Nari\xf1o</option>
<option value="NSA">Norte de Santander</option>
<option value="PUT">Putumayo</option>
<option value="QUI">Quind\xedo</option>
<option value="RIS">Risaralda</option>
<option value="SAP">San Andr\xe9s and Providencia</option>
<option value="SAN">Santander</option>
<option value="SUC">Sucre</option>
<option value="TOL">Tolima</option>
<option value="VAC">Valle del Cauca</option>
<option value="VAU">Vaup\xe9s</option>
<option value="VID">Vichada</option>
</select>"""
self.assertEqual(d.render('department', 'COR'), out)
|
dev-coop/meancoach
|
meancoach_project/settings/test.py
|
Python
|
mit
| 136
| 0
|
from base im
|
port *
DATABASES = {
'de
|
fault': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.