repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
xkong/baniugui | dict4ini/p3.py | Python | mit | 5,024 | 0.014928 | # $Id: p3.py,v 1.2 2003/11/18 19:04:03 phr Exp phr $
# Simple p3 encryption "algorithm": it's just SHA used as a stream
# cipher in output feedback mode.
# Author: Paul Rubin, Fort GNOX Cryptography, <phr-crypto at nightsong.com>.
# Algorithmic advice from David Wagner, Richard Parker, Bryan
# Olson, and Paul Crowley on sci.crypt is gratefully acknowledged.
# Copyright 2002,2003 by Paul Rubin
# Copying license: same as Python 2.3 license
# Please include this revision number in any bug reports: $Revision: 1.2 $.
from string import join
from array import array
try:
import hashlib as sha
except:
import sha
from time import time
class CryptError(Exception): pass
def _hash(str): return sha.new(str).digest()
_ivlen = 16
_maclen = 8
_state = _hash(`time()`)
try:
import os
_pid = `os.getpid()`
except ImportError, AttributeError:
_pid = ''
def _expand_key(key, clen):
blocks = (clen+19)/20
xkey=[]
seed=key
for i in xrange(blocks):
seed=sha.new(key+seed).digest()
xkey.append(seed)
j = join(xkey,'')
return array ('L', j)
def p3_encrypt(plain,key):
global _state
H = _hash
# change _state BEFORE using it to compute nonce, in case there's
# a thread switch between computing the nonce and folding it into
# the state. This way if two threads compute a nonce from the
# same data, they won't both get the same nonce. (There's still
# a small danger of a duplicate nonce--see below).
_state = 'X'+_state
# Attempt to make nlist unique for each call, so we can get a
# unique nonce. It might be good to include a process ID or
# something, but I don't know if that's portable between OS's.
# Since is based partly on both the key and plaintext, in the
# worst case (encrypting the same plaintext with the same key in
# two separate Python instances at the same time), you might get
# identical ciphertexts for the identical plaintexts, which would
# be a security failure in some applications. Be careful.
nlist = [`time()`, _pid, _state, `len(plain)`,plain, key]
nonce = H(join(nlist,','))[:_ivlen]
_state = H('update2'+_state+nonce)
k_enc, k_auth = H('enc'+key+nonce), H('auth'+key+nonce)
n=len(plain) # cipher size not counting IV
stream = array('L', plain+'0000'[n&3:]) # pad to fill 32-bit words
xkey = _expand_key(k_enc, n+4)
for i in xrange(len(stream)):
stream[i] = stream[i] ^ xkey[i]
ct = nonce + stream.tostring()[:n]
auth = _hmac(ct, k_auth)
return ct + auth[:_maclen]
def p3_decrypt(cipher,key):
H = _hash
n=len(cipher)-_ivlen-_maclen # length of ciphertext
if n < 0:
raise CryptError, "invalid ciphertext"
nonce,stream,auth = \
cipher[:_ivlen], cipher[_ivlen:-_maclen]+'0000'[n&3:],cipher[-_maclen:]
k_enc, k_auth = H('enc'+key+nonce), H('auth'+key+nonce)
vauth = _hmac (cipher[:-_maclen], k_auth)[:_maclen]
if auth != vauth:
raise CryptError, "invalid key or ciphertext"
stream = array('L', stream)
xkey = _expand_key (k_enc, n+4)
for i in xrange (len(stream)):
stream[i] = stream[i] ^ xkey[i]
plain = stream.tostring()[:n]
return plain
# RFC 2104 HMAC message authentication code
# This implementation is faster than Python 2.2's hmac.py, and also works in
# old Python versions (at least as old as 1. | 5.2).
from string import translate
def _hmac_setup():
global _ipad, _opad, _itrans, _otrans
_itrans = a | rray('B',[0]*256)
_otrans = array('B',[0]*256)
for i in xrange(256):
_itrans[i] = i ^ 0x36
_otrans[i] = i ^ 0x5c
_itrans = _itrans.tostring()
_otrans = _otrans.tostring()
_ipad = '\x36'*64
_opad = '\x5c'*64
def _hmac(msg, key):
if len(key)>64:
key=sha.new(key).digest()
ki = (translate(key,_itrans)+_ipad)[:64] # inner
ko = (translate(key,_otrans)+_opad)[:64] # outer
return sha.new(ko+sha.new(ki+msg).digest()).digest()
#
# benchmark and unit test
#
def _time_p3(n=1000,len=20):
plain="a"*len
t=time()
for i in xrange(n):
p3_encrypt(plain,"abcdefgh")
dt=time()-t
print "plain p3:", n,len,dt,"sec =",n*len/dt,"bytes/sec"
def _speed():
_time_p3(len=5)
_time_p3()
_time_p3(len=200)
_time_p3(len=2000,n=100)
def _test():
e=p3_encrypt
d=p3_decrypt
plain="test plaintext"
key = "test key"
c1 = e(plain,key)
c2 = e(plain,key)
assert c1!=c2
assert d(c2,key)==plain
assert d(c1,key)==plain
c3 = c2[:20]+chr(1+ord(c2[20]))+c2[21:] # change one ciphertext character
try:
print d(c3,key) # should throw exception
print "auth verification failure"
except CryptError:
pass
try:
print d(c2,'wrong key') # should throw exception
print "test failure"
except CryptError:
pass
_hmac_setup()
#_test()
# _speed() # uncomment to run speed test |
rspavel/spack | var/spack/repos/builtin/packages/py-pygobject/package.py | Python | lgpl-2.1 | 2,511 | 0.001991 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPygobject(PythonPackage):
"""bindings for the GLib, and GObject,
to be used in Python."""
homepage = "https://pypi.python.org/pypi/pygobject"
version('3.28.3', sha256='3dd3e21015d06e00482ea665fc1733b77e754a6ab656a5db5d7f7bfaf31ad0b0')
version('2.28.6', sha256='fb8a1d4f665130a125011659bd347c7339c944232163dbb9a34fd0686577adb8')
version('2.28.3', sha256='7da88c169a56efccc516cebd9237da3fe518a343095a664607b368fe21df95b6',
url='http://ftp.gnome.org/pub/GNOME/sources/pygobject/2.28/pygobject-2.28.3.tar.bz2')
extends('python')
depends_on('py-setuptools', type='build')
depends_on('pkgconfig', type='build')
depends_on("libffi")
depends_on('glib')
depends_on('python@2:2.99', when='@2:2.99', type=('build', 'run'))
depends_on('py-pycairo', type | =('build', 'run'), when='@3:')
depends_on('py-py2cairo', type=('build', 'run'), when='@2:2.99')
depends_on('gobject-introspection')
depends_on('gtkplus', w | hen='@3:')
patch('pygobject-2.28.6-introspection-1.patch', when='@2.28.3:2.28.6')
# patch from https://raw.githubusercontent.com/NixOS/nixpkgs/master/pkgs/development/python-modules/pygobject/pygobject-2.28.6-gio-types-2.32.patch
# for https://bugzilla.gnome.org/show_bug.cgi?id=668522
patch('pygobject-2.28.6-gio-types-2.32.patch', when='@2.28.6')
# pygobject links directly using the compiler, not spack's wrapper.
# This causes it to fail to add the appropriate rpaths. This patch modifies
# pygobject's setup.py file to add -Wl,-rpath arguments for dependent
# libraries found with pkg-config.
patch('pygobject-3.28.3-setup-py.patch', when='@3.28.3')
def url_for_version(self, version):
url = 'http://ftp.gnome.org/pub/GNOME/sources/pygobject'
return url + '/%s/pygobject-%s.tar.xz' % (version.up_to(2), version)
# pygobject version 2 requires an autotools build
@when('@2:2.99')
def build(self, spec, prefix):
configure('--prefix=%s' % spec.prefix)
@when('@2:2.99')
def install(self, spec, prefix):
make('install', parallel=False)
@when('^python@3:')
def patch(self):
filter_file(
r'Pycairo_IMPORT',
r'//Pycairo_IMPORT',
'gi/pygi-foreign-cairo.c')
|
EnigmaCurry/ccm | tests/test_lib.py | Python | apache-2.0 | 1,241 | 0.002417 | impor | t sys
sys.path = [".."] + sys.path
from . import TEST_DIR
from ccmlib.cluster import Cluster
CLUSTER_PATH = TEST_DIR
def test1():
cluster = Cluster(CLUSTER_PATH, "test1", cassandra_version='2.0.3')
cl | uster.show(False)
cluster.populate(2)
cluster.set_partitioner("Murmur3")
cluster.start()
cluster.set_configuration_options(None, None)
cluster.set_configuration_options({}, True)
cluster.set_configuration_options({"a": "b"}, False)
[node1, node2] = cluster.nodelist()
node2.compact()
cluster.flush()
cluster.remove()
cluster.stop()
def test2():
cluster = Cluster(CLUSTER_PATH, "test2", cassandra_version='2.0.3')
cluster.populate(2)
cluster.start()
cluster.set_log_level("ERROR")
class FakeNode:
name = "non-existing node"
cluster.remove(FakeNode())
[node1, node2] = cluster.nodelist()
cluster.remove(node1)
cluster.show(True)
cluster.show(False)
#cluster.stress([])
cluster.compact()
cluster.drain()
cluster.stop()
def test3():
cluster = Cluster(CLUSTER_PATH, "test3", cassandra_version='2.0.3')
cluster.populate(2)
cluster.start()
cluster.cleanup()
cluster.clear()
cluster.stop()
|
google/vulkan_test_applications | gapid_tests/synchronization_tests/vkDeviceWaitIdle_test/vkDeviceWaitIdle_test.py | Python | apache-2.0 | 1,043 | 0 | # Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gapit_test_framework import gapit_test, require, require_eq | ual
from gapit_test_framework import GapitTest, require_not_equal
from vulkan_constants import VK_SUCCESS
@gapit_test("vkDeviceWaitIdle_test")
class WaitForSingleQueue(GapitTest):
def expect(self):
device_wait_idle = require(self.nth_call_of("vkDeviceWaitIdle", 1))
require_not_equal(0, device_wait_idle.int_device)
require_equal(VK_SUCCESS, | int(device_wait_idle.return_val))
|
lnielsen/invenio3 | setup.py | Python | gpl-2.0 | 4,365 | 0.000229 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., | 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Invenio Digital Library Framework."""
import os
import sys
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
readme = open('README.rst').read()
history = open('CHANGES.rst').read()
t | ests_require = [
'check-manifest>=0.25',
'coverage>=4.0',
'isort>=4.2.2',
'pep257>=0.6.0',
'pytest-cache>=1.0',
'pytest-cov>=1.8.0',
'pytest-pep8>=1.0.6',
'pytest>=2.8.0',
]
extras_require = {
'minimal': [
'invenio-base>=0.1.0.dev20150000',
],
'core': [
'invenio-assets>=0.1.0.dev20150000',
'invenio-theme>=0.1.0.dev20150000',
'invenio-records>=0.1.0.dev20150000',
],
'docs': [
'Sphinx>=1.3',
],
'tests': tests_require,
}
extras_require['all'] = []
for reqs in extras_require.values():
extras_require['all'].extend(reqs)
setup_requires = [
'Babel>=1.3',
]
install_requires = [
]
packages = find_packages()
class PyTest(TestCommand):
"""PyTest Test."""
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
"""Init pytest."""
TestCommand.initialize_options(self)
self.pytest_args = []
try:
from ConfigParser import ConfigParser
except ImportError:
from configparser import ConfigParser
config = ConfigParser()
config.read('pytest.ini')
self.pytest_args = config.get('pytest', 'addopts').split(' ')
def finalize_options(self):
"""Finalize pytest."""
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
"""Run tests."""
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('invenio', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
setup(
name='invenio3',
version=version,
description=__doc__,
long_description=readme + '\n\n' + history,
keywords='Invenio digital library framework',
license='GPLv2',
author='CERN',
author_email='info@invenio-software.org',
url='https://github.com/inveniosoftware/invenio',
packages=packages,
zip_safe=False,
include_package_data=True,
platforms='any',
entry_points={},
extras_require=extras_require,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 1 - Planning',
],
cmdclass={'test': PyTest},
)
|
rocco8773/bapsflib | bapsflib/_hdf/maps/controls/sixk.py | Python | bsd-3-clause | 16,445 | 0.000426 | # This file is part of the bapsflib package, a Python toolkit for the
# BaPSF group at UCLA.
#
# http://plasma.physics.ucla.edu/
#
# Copyright 2017-2018 Erik T. Everson and contributors
#
# License: Standard 3-clause BSD; see "LICENSES/LICENSE.txt" for full
# license terms and contributor agreement.
#
"""
Module for the 6K Compumotor motion control mapper
`~bapsflib._hdf.maps.controls.sixk.HDFMapControl6K`.
"""
__all__ = ["HDFMapControl6K"]
import h5py
import numpy as np
import re
from warnings import warn
from bapsflib.utils import _bytes_to_str
from bapsflib.utils.exceptions import HDFMappingError
from .templates import HDFMapControlTemplate
from .types import ConType
class HDFMapControl6K(HDFMapControlTemplate):
"""
Mapping module for control device '6K Compumotor'.
Simple group structure looks like:
.. code-block:: none
+-- 6K Compumotor
| +-- Motion list: <name>
| | +--
| +-- Probe: XY[<receptacle #>]: <probe name>
| | +-- Axes[0]
| | | +--
| | +-- Axes[1]
| | | +--
| +-- XY[<receptacle #>]: <probe name>
"""
def __init__(self, group: h5py.Group):
"""
:param group: the HDF5 control device group
"""
HDFMapControlTemplate.__init__(self, group)
# define control type
self._info["contype"] = ConType.motion
# populate self.configs
self._build_configs()
def _build_configs(self):
"""Builds the :attr:`configs` dictionary."""
# build order:
# 1. build a local motion list dictionary
# 2. build a local probe list dictionary
# 3. build configs dict
#
# TODO: HOW TO ADD MOTION LIST TO DICT
# - right now, the dataset has to be read which has the
# potential for creating long mapping times
# - this is probably best left to HDFReadControls
#
# build 'motion list' and 'probe list'
_motion_lists = {}
_probe_lists = {}
for name in self.subgroup_names:
ml_stuff = self._analyze_motionlist(name)
if bool(ml_stuff):
# build 'motion list'
_motion_lists[ml_stuff["name"]] = ml_stuff["config"]
else:
pl_stuff = self._analyze_probelist(name)
if bool(pl_stuff):
# build 'probe list'
_probe_lists[pl_stuff["probe-id"]] = pl_stuff["config"]
# ensure a PL item (config group) is found
if len(_probe_lists) == 0:
why = "has no mappable configurations (Probe List groups)"
raise HDFMappingError(self._info["group path"], why=why)
# build configuration dictionaries
# - the receptacle number is the config_name
# - each probe is one-to-one with receptacle number
#
for pname in _probe_lists:
# define configuration name
config_name = _probe_lists[pname]["receptacle"]
# initialize _configs
self._configs[config_name] = {}
# ---- define general info values ----
# - this has to be done before getting the dataset since
# the _configs dist is used by construct_dataset_name()
#
# add motion list info
self._configs[config_name]["motion lists"] = _motion_lists
# add probe info
self._conf | igs[config_name]["probe"] = _probe_lists[pname]
# add 'receptacle'
self._configs[config_name]["receptacle"] = self._configs[config_name][
"probe"
]["receptacle"]
| # ---- get configuration dataset ----
try:
dset_name = self.construct_dataset_name(config_name)
dset = self.group[dset_name]
except (KeyError, ValueError):
# KeyError: the dataset was not found
# ValueError: the dataset name was not properly
# constructed
#
why = (
f"Dataset for configuration '{pname}' could not be "
f"determined or found."
)
raise HDFMappingError(self._info["group path"], why=why)
# ---- define 'dset paths' ----
self._configs[config_name]["dset paths"] = (dset.name,)
# ---- define 'shotnum' ----
# initialize
self._configs[config_name]["shotnum"] = {
"dset paths": self._configs[config_name]["dset paths"],
"dset field": ("Shot number",),
"shape": dset.dtype["Shot number"].shape,
"dtype": np.int32,
}
# ---- define 'state values' ----
self._configs[config_name]["state values"] = {
"xyz": {
"dset paths": self._configs[config_name]["dset paths"],
"dset field": ("x", "y", "z"),
"shape": (3,),
"dtype": np.float64,
},
"ptip_rot_theta": {
"dset paths": self._configs[config_name]["dset paths"],
"dset field": ("theta",),
"shape": (),
"dtype": np.float64,
},
"ptip_rot_phi": {
"dset paths": self._configs[config_name]["dset paths"],
"dset field": ("phi",),
"shape": (),
"dtype": np.float64,
},
}
def construct_dataset_name(self, *args) -> str:
# The first arg passed is assumed to be the receptacle number.
# If none are passed and there is only one receptacle deployed,
# then the deployed receptacle is assumed.
# get list of configurations
# - configuration names are receptacle numbers
#
_receptacles = list(self.configs)
# get receptacle number
err = True
rnum = -1
if len(args) == 0:
if len(_receptacles) == 1:
# assume the sole receptacle number
rnum = _receptacles[0]
err = False
else: # len(args) >= 1:
receptacle = args[0]
if receptacle in _receptacles:
rnum = receptacle
err = False
if err:
raise ValueError(
f"A valid receptacle number needs to be passed: {_receptacles}"
)
# Find matching probe to receptacle
# - note that probe naming in the HDF5 are not consistent, this
# is why dataset name is constructed based on receptacle and
# not probe name
#
pname = self._configs[rnum]["probe"]["probe name"]
# Construct dataset name
dname = f"XY[{rnum}]: {pname}"
# return
return dname
def _analyze_motionlist(self, gname: str) -> dict:
"""
Determines if `gname` matches the RE for a motion list group
name. It yes, then it gathers the motion list info.
:param str gname: name of potential motion list group
:return: dictionary with `'name'` and `'config'` keys
"""
# Define RE pattern
# - A motion list group follows the naming scheme of:
#
# 'Motion list: <NAME>'
#
# where <NAME> is the motion list name
#
_pattern = r"(\bMotion list:\s)(?P<NAME>.+\b)"
# match _pattern against gname
_match = re.fullmatch(_pattern, gname)
# gather ml info
# - Note: a missing HDF5 attribute will not cause the mapping to
# fail, the associated mapping item will be given an
# appropriate None vale
#
if _match is not None:
# define motion list dict
ml = {"name": _match.group("NAME"), "config" |
plaid/plaid-python | plaid/model/link_token_account_filters.py | Python | mit | 7,714 | 0.000519 | """
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from plaid.model.credit_filter import CreditFilter
from plaid.model.depository_filter import DepositoryFilter
from plaid.model.investment_filter import InvestmentFilter
from plaid.model.loan_filter import LoanFilter
globals()['CreditFilter'] = CreditFilter
globals()['DepositoryFilter'] = DepositoryFilter
globals()['InvestmentFilter'] = InvestmentFilter
globals()['LoanFilter'] = LoanFilter
class LinkTokenAccountFilters(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
| 'd | epository': (DepositoryFilter,), # noqa: E501
'credit': (CreditFilter,), # noqa: E501
'loan': (LoanFilter,), # noqa: E501
'investment': (InvestmentFilter,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'depository': 'depository', # noqa: E501
'credit': 'credit', # noqa: E501
'loan': 'loan', # noqa: E501
'investment': 'investment', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""LinkTokenAccountFilters - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
depository (DepositoryFilter): [optional] # noqa: E501
credit (CreditFilter): [optional] # noqa: E501
loan (LoanFilter): [optional] # noqa: E501
investment (InvestmentFilter): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
openstack/freezer-api | freezer_api/tests/unit/sqlalchemy/test_migrations.py | Python | apache-2.0 | 12,143 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for database migrations. This test case reads the configuration
file test_migrations.conf for database connection settings
to use in the tests. For each connection found in the config file,
the test case runs a series of test cases to ensure that migrations work
properly both upgrading and downgrading, and that no data loss occurs
if possible.
"""
import os
import fixtures
from migrate.versioning import api as migration_api
from migrate.versioning import repository
from oslo_db.sqlalchemy import test_migrations
from oslo_db.sqlalchemy import utils as db_utils
# from oslo_db.tests.sqlalchemy import base as test_base
import sqlalchemy
from sqlalchemy.engine import reflection
from freezer_api.db.sqlalchemy import driver
import freezer_api.db.sqlalchemy.migrate_repo
class MigrationsMixin(test_migrations.WalkVersionsMixin):
"""Test sqlalchemy-migrate migrations."""
BOOL_TYPE = sqlalchemy.types.BOOLEAN
TIME_TYPE = sqlalchemy.types.DATETIME
INTEGER_TYPE = sqlalchemy.types.INTEGER
VARCHAR_TYPE = sqlalchemy.types.VARCHAR
TEXT_TYPE = sqlalchemy.types.Text
@property
def INIT_VERSION(self):
return driver.INIT_VERSION
@property
def REPOSITORY(self):
migrate_file = freezer_api.db.sqlalchemy.migrate_repo.__file__
return repository.Repository(
os.path.abspath(os.path.dirname(migrate_file)))
@property
def migration_api(self):
return migration_api
@property
def migrate_engine(self):
return self.engine
def get_table_ref(self, engine, name, metadata):
metadata.bind = engine
return sqlalchemy.Table(name, metadata, autoload=True)
class BannedDBSchemaOperations(fixtures.Fixture):
"""Ban some operations for migrations"""
def __init__(self, banned_resources=None):
super(MigrationsMixin.BannedDBSchemaOperations, self).__init__()
self._banned_resources = banned_resources or []
@staticmethod
def _explode(resource, op):
print('%s.%s()' % (resource, op)) # noqa
raise Exception(
'Operation %s.%s() is not allowed in a database migration' % (
resource, op))
def setUp(self):
super(MigrationsMixin.BannedDBSchemaOperations, self).setUp()
for thing in self._banned_resources:
self.useFixture(fixtures.MonkeyPatch(
'sqlalchemy.%s.drop' % thing,
lambda *a, **k: self._explode(thing, 'drop')))
self.useFixture(fixtures.MonkeyPatch(
'sqlalchemy.%s.alter' % thing,
lambda *a, **k: self._explode(thing, 'alter')))
def migrate_up(self, version, with_data=False):
# NOTE(dulek): This is a list of migrations where we allow dropping
# things. The rules for adding things here are very very specific.
# Insight on how to drop things from the DB in a backward-compatible
# manner is provided in Cinder's developer documentation.
# Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE WITHOUT CARE
exceptions = [3]
if version not in exceptions:
banned = ['Table', 'Column']
else:
banned = None
with MigrationsMixin.BannedDBSchemaOperations(banned):
super(MigrationsMixin, self).migrate_up(version, with_data)
def assertColumnExists(self, engine, table, column):
t = db_utils.get_table(engine, table)
self.assertIn(column, t.c)
def assertColumnsExist(self, engine, table, columns):
for column in columns:
self.assertColumnExists(engine, table, column)
def assertColumnType(self, engine, table, column, column_type):
t = db_utils.get_table(engine, table)
column_ref_type = str(t.c[column].type)
self.assertEqual(column_ref_type, column_type)
def assertColumnCount(self, engine, table, columns):
t = db_utils.get_table(engine, table)
self.assertEqual(len(columns), len(t.columns))
def assertColumnNotExists(self, engine, table, column):
t = db_utils.get_table(engine, table)
self.assertNotIn(column, t.c)
def assertIndexExists(self, engine, table, index):
t = db_utils.get_table(engine, table)
index_names = [idx.name for idx in t.indexes]
self.assertIn(index, index_names)
def __check_cinderbase_fields(self, columns):
"""Check fields inherited from CinderBase ORM class."""
self.assertIsInstance(columns.created_at.type, self.TIME_TYPE)
self.assertIsInstance(columns.updated_at.type, self.TIME_TYPE)
| self.assertIsInstance(columns.deleted_at.type, self.TIME_TYPE)
self.assertIsInstance( | columns.deleted.type, self.BOOL_TYPE)
def _check_001(self, engine, data):
clients_columns = [
'created_at',
'updated_at',
'deleted_at',
'deleted',
'user_id',
'id',
'project_id',
'client_id',
'hostname',
'description',
'uuid',
]
self.assertColumnsExist(
engine, 'clients', clients_columns)
self.assertColumnCount(
engine, 'clients', clients_columns)
sessions_columns = [
'created_at',
'updated_at',
'deleted_at',
'deleted',
'id',
'session_tag',
'description',
'hold_off',
'schedule',
'job',
'project_id',
'user_id',
'time_start',
'time_end',
'time_started',
'time_ended',
'status',
'result',
]
self.assertColumnsExist(
engine, 'sessions', sessions_columns)
self.assertColumnCount(
engine, 'sessions', sessions_columns)
jobs_columns = [
'created_at',
'updated_at',
'deleted_at',
'deleted',
'id',
'project_id',
'user_id',
'schedule',
'client_id',
'session_id',
'session_tag',
'description',
'job_actions',
]
self.assertColumnsExist(
engine, 'jobs', jobs_columns)
self.assertColumnCount(
engine, 'jobs', jobs_columns)
actions_columns = [
'created_at',
'updated_at',
'deleted_at',
'deleted',
'id',
'action',
'project_id',
'user_id',
'actionmode',
'src_file',
'backup_name',
'container',
'timeout',
'priority',
'max_retries_interval',
'max_retries',
'mandatory',
'log_file',
'backup_metadata',
]
self.assertColumnsExist(
engine, 'actions', actions_columns)
self.assertColumnCount(
engine, 'actions', actions_columns)
action_reports_columns = [
'created_at',
'updated_at',
'deleted_at',
'deleted',
'id',
'project_id',
'user_id',
'result',
'time_elapsed',
'report_date',
'log',
]
self.assertColumnsExist(
engine, 'action_reports', action_reports_columns)
self.assertColumnCount(
|
biancini/met | met/metadataparser/entity_export.py | Python | bsd-2-clause | 4,400 | 0.001364 | #################################################################
# MET v2 Metadate Explorer Tool
#
# This Software is Open Source. See License: https://github.com/TERENA/met/blob/master/LICENSE.md
# Copyright (c) 2012, TERENA All rights reserved.
#
# This Software is based on MET v1 developed for TERENA by Yaco Sistemas, http://www.yaco.es/
# MET v2 was developed for TERENA by Tamim Ziai, DAASI International GmbH, http://www.daasi.de
# Current version of MET has been revised for performance improvements by Andrea Biancini,
# Consortium GARR, http://www.garr.it
##########################################################################
import csv
from xml.dom.minidom import Document
from django.http import HttpResponse, HttpResponseBadRequest
from django.template.defaultfilters import slugify
import simplejson as json
class SetEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
return json.JSONEncoder.default(self, obj)
def _serialize_value_to_csv(value):
if type(value) is list:
vallist = [_serialize_value_to_csv(v) for v in value]
serialized = ", ".join(vallist)
elif type(value) is dict:
vallist = [_serialize_value_to_csv(v) for v in value.values()]
serialized = ", ".join(vallist)
else:
serialized = "%s" % value
return serialized
def export_entity_csv(entity):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment; filename=%s.csv'
% slugify(entity))
writer = csv.writer(response)
edict = entity.to_dict()
writer.writerow(edict.keys())
# Write data to CSV file
row = []
for _, value in edict.items():
row.append(_serialize_value_to_csv(value))
row_ascii = [v.encode("ascii", "ignore") for v in row]
writer.writerow(row_ascii)
# Return CSV file to browser as download
return response
def export_entity_json(entity):
# Return JS file to browser as download
serialized = json.dumps(entity.to_dict(), cls=SetEncoder)
response = HttpResponse(serialized, content_type='application/json')
response['Content-Disposition'] = ('attachment; filename=%s.json'
% slugify(entity))
return response
class Dict2XML(object):
""" http://stackoverflow.com/questions/1019895/serialize-python-dictionar | y-to-xml """
doc = Document()
def __init__(self, | structure):
if len(structure) == 1:
root_name = str(structure.keys()[0])
self.root = self.doc.createElement(root_name)
self.doc.appendChild(self.root)
self.build(self.root, structure[root_name])
def build(self, father, structure):
if type(structure) == dict:
for k in structure:
tag = self.doc.createElement(k)
father.appendChild(tag)
self.build(tag, structure[k])
elif type(structure) == list:
grand_father = father.parentNode
tag_name = father.tagName
grand_father.removeChild(father)
for l in structure:
tag = self.doc.createElement(tag_name)
self.build(tag, l)
grand_father.appendChild(tag)
else:
if type(structure) == unicode:
data = structure.encode("ascii", errors="xmlcharrefreplace")
else:
data = str(structure)
tag = self.doc.createTextNode(data)
father.appendChild(tag)
def __str__(self):
return self.doc.toprettyxml(indent=" ")
def export_entity_xml(entity):
entity_xml = Dict2XML({"Entity": entity.to_dict()})
# Return XML file to browser as download
response = HttpResponse(str(entity_xml), content_type='application/xml')
response['Content-Disposition'] = ('attachment; filename=%s.xml'
% slugify(entity))
return response
export_entity_modes = {
'csv': export_entity_csv,
'json': export_entity_json,
'xml': export_entity_xml,
}
def export_entity(mode, entity):
if mode in export_entity_modes:
return export_entity_modes[mode](entity)
else:
content = "Error 400, Format %s is not supported" % mode
return HttpResponseBadRequest(content)
|
NeotomaDB/Neotoma_SQL | tilia_check.py | Python | mit | 2,487 | 0.002413 | """ Check all newly written functions for the Neotoma postgres DB against
the old functions written in SQL Server T-SQL.
by: Simon Goring """
from sys import argv
from re import sub
from colorama import Fore
from colorama import Style
import requests
tilia_uri = 'http://tilia.neotomadb.org/Retrieve/'
dev_uri = 'http://tilia-dev.neotomadb.org:3001/retrieve/'
headers = {'Content-Type': 'application/json'}
if len(argv) == 1:
argv.append('miss')
tilia_ends = requests.get(tilia_uri, headers=headers).json()
dev_ends = requests.get(dev_uri, headers=headers).json()
print("tilia succeeded, obtained " + str(len(tilia_ends["data"])) +
" SQL Server Tilia functions.")
print("tilia-dev succeeded, obtained " + str(len(dev_ends["data"])) +
" Postgres Tilia functions.")
# Get all the names of the functions curently in the database
# remove the schema indicator.
dev_fun = [x["name"].split(".")[1] for x in dev_ends["data"]]
matched = 0
missing = 0
wrong_param = 0
for i in tilia_ends["data"]:
if i["name"].lower() in dev_fun:
matched = matched + 1
devIndex = dev_fun.index(i["name"].lower())
tilia_params = [x["name"].lower() for x in i["params"]]
# Need to check for `None` values . . .
if not dev_ends["data"][devIndex]["params"][0]["name"] is None:
dev_params = [sub('_', '', x["name"]) for x in dev_ends["data"][devIndex]["params"]]
emptyParam = False
else:
dev_params = [x["name"] for x in dev_ends["data"][devIndex]["params"]]
emptyParam = True
if ((set(tilia_params) == set(dev_params)) |
(bool(tilia_params == []) & bool(emptyParam is True))):
if argv[1] == "all":
print(f"{Fore.GREEN}Found match{Style.RESET_ALL}: " + i["name"].lower())
else:
print(f"{Fore.YELLOW}Match with unmatched parameters{Style.RESET_ALL}: " +
i["name"].lower())
print("New:")
print(dev_params)
print(tilia_params)
wrong_param = wrong_param + 1
# Need to match params now too.
else:
prin | t(f"{Fore.RED}Missing{Style.RESET_ALL}: " + i["name"].lower())
missing = missing + 1
print(f"\n{Fore.GREEN}Total Matched:{Style.RESET_ALL}:" + str(matched))
print(f"{Fore.YELLOW}Matched with wrong parameters:{Style.RESET_ALL}:" + str(wrong_param))
print(f"{Fore.RED}Total Missed: | {Style.RESET_ALL}:" + str(missing))
|
tgquintela/Mscthesis | FirmsLocations/Preprocess/geo_filters.py | Python | mit | 7,868 | 0.001652 |
"""
Geofilters
----------
Filters coded oriented to filter and detect uncorrect data.
"""
import os
import numpy as np
import pandas as pd
from collections import Counter
from sklearn.neighbors import KDTree
from pySpatialTools.Preprocess.Transformations.Transformation_2d.geo_filters\
import check_in_square_area
def check_correct_spain_coord(coord, radians=False):
"Check if the coordinates given are in Spain or not."
coord = np.array(coord)
lim_points = np.array([[-18.25, 4.5], [27.75, 44]])
if radians:
lim_points = np.pi/180*lim_points
logi = check_in_square_area(coord, lim_points)
return logi
def filter_uncorrect_coord_spain(data, coord_vars, radians=False):
"Filter not corrrect spain coordinates."
coord = data[coord_vars].as_matrix()
logi = check_correct_spain_coord(coord, radians)
return data[logi]
def filter_bool_uncorrect_coord_spain(data, coord_vars, radians=False):
"Filter data from pandas dataframe structure."
coord = data[coord_vars].as_matrix()
logi = check_correct_spain_coord(coord, radians)
return logi
def fill_locations_by_region_jittering(locations, uncorrect, regions):
"""Creation random locations for uncorrect locations."""
## 0. Preparing computations
new_locations = locations[:]
u_regs = np.unique(regions)
regs_mean_locs = []
regs_std_locs = []
## 1. Computing statistical correct locations
for reg in u_regs:
logi = np.logical_and(regions == reg, np.logical_not(uncorrect))
reg_mean_locs = np.mean(locations[logi], axis=0)
reg_std_locs = np.std(locations[logi], axis=0)
regs_mean_locs.append(reg_mean_locs)
regs_std_locs.append(reg_std_locs)
## 2. Computing new locations for uncorrect
idxs = np.where(uncorrect)[0]
new_locs = []
for i in idxs:
reg = regions[i]
i_reg = np.where(u_regs == reg)[0][0]
# Random creation
loc = np.random.random(2)*regs_std_locs[i_reg] + regs_mean_locs[i_reg]
new_locs.append(loc)
## 3. Replace
new_locations[uncorrect] = np.array(new_locs)
return new_locations
def get_statistics2fill_locations(locations, regions):
## 0. Preparing computations
correct = check_correct_spain_coord(locations)
regions = [e for e in regions if e != float('nan') and e != np.nan]
u_regs = np.unique(regions)
regs_mean_locs = []
regs_std_locs = []
## 1. Computing statistical correct locations
for reg in u_regs:
logi = np.logical_and(regions == reg, correct)
reg_mean_locs = np.mean(locations[logi], axis=0)
reg_std_locs = np.std(locations[logi], axis=0)
regs_mean_locs.append(reg_mean_locs)
regs_std_locs.append(reg_std_locs)
return regs_mean_locs, regs_std_locs, u_regs
def fill_locations(df, loc_vars, reg_var, mean_locs, std_locs, u_regs):
## 0. Preparation computations
locs = df[loc_vars].as_matrix()
regions = df[reg_var].as_matrix()
correct = check_correct_spain_coord(locs)
idxs = np.where(np.logical_not(correct))[0]
## 1. Compute new locations
new_locs = []
for i in idxs:
reg = regions[i]
i_reg = np.where(u_regs == reg)[0][0]
# Random creation
loc = np.random.random(2)*std_locs[i_reg] + mean_locs[i_reg]
new_locs.append(loc)
df[loc_vars][np.logical_not(correct)] = np.array(new_locs)
return df
###############################################################################
############################ Auxiliar to cleanning ############################
###############################################################################
def fill_nulls(df, mean_locs, std_locs, u_cps, raw_muni, raw_cps, raw_locs,
pathdata):
loc_vars, reg_var = ['es-x', 'es-y'], 'cp'
locs = df[loc_vars].as_matrix()
null_locs = np.logical_not(check_correct_spain_coord(locs))
null_cps = pd.isnull(df[reg_var]).as_matrix()
null_possible = np.array([e in u_cps for e in list(df['cp'])]).astype(bool)
null_imp = np.logical_and(np.logical_not(null_possible), null_locs)
null_both = np.logical_or(np.logical_and(null_locs, null_cps), null_imp)
null_neither = np.logical_and(np.logical_not(null_locs),
np.logical_not(null_cps))
# print null_locs.sum(), null_cps.sum(), null_both.sum()
null_cps2locs = np.logical_and(null_locs, np.logical_not(null_cps))
null_cps2locs = np.logical_and(null_cps2locs, null_possible)
null_locs2cps = np.logical_and(null_cps, np.logical_not(null_locs))
# print null_both.sum(), null_cps2locs.sum(), null_locs2cps.sum()
# print null_locs.sum(), null_cps.sum(), null_imp.sum()
## Inputing locations from cp
if null_cps2locs.sum():
new_locs = create_cp2locs(mean_locs, std_locs, u_cps, null_cps2locs,
list(df['cp']))
df_null_locs = pd.DataFrame({'nif': list(df['nif'][null_cps2locs]),
'es-x': new_locs[:, 0],
'es-y': new_locs[:, 1]})
df['es-x'][null_cps2locs] = new_locs[:, 0]
df['es-y'][null_cps2locs] = new_locs[:, 1]
else:
df_null_locs = pd.DataFrame({'nif': [], 'es-x': [], 'es-y': []})
df_null_locs.to_csv(os.path.join(pathdata, 'cps2locs'), sep=';')
## Inputing cp from locations
if null_locs2cps.sum():
new_cps = create_locs2cp(locs, null_locs2cps, raw_locs, raw_cps)
df_null_cps = pd.DataFrame({'nif': list(df['nif'][null_locs2cps]),
'cp': list(new_cps)})
df['cp'][null_locs2cps] = new_cps
else:
df_null_cps = pd.DataFrame({'nif': [], 'cp': []})
df_null_cps.to_csv(os.path.join(pathdata, 'locs2cps'), sep=';')
## Inputing cp and locations from municipio
# localidades = list(df['localidad'][null_both])
# localidades_known = list(df['localidad'][np.logical_not(null_both)])
# cp
# new2_cps, new2_locs = create_locsandcp()
localidades = [e.strip().lower() for e in list(df['localidad'][null_both])]
df_null_both = pd.DataFrame({'nif': list(df['nif'][null_both]),
# 'localidad': localidades,
'cp': list(df['cp'][null_both]),
'es-x': df['es-x'][null_both],
'es-y': df['es-y'][null_both]})
# 'cp': list(new2_cps),
# 'es-x': new2_locs[:, 0],
# ' | es-y': new2_locs[:, 1]})
df_null_both.to_csv(os.path.join(pathdata, 'nulllocsandcps'), sep=';')
# df['cp'][null_both] = new2_cps
# df['es-x'][null_both] = new2_locs[:, 0]
# df['es-y'][null_both] = new2_locs[:, 1]
# print df.shape, null_neither.sum()
df = df[null_neither]
return df
def create_cp2locs(mean_locs, std_locs, u_regs, uncorrect, regions):
idxs = np.where(uncorrect)[0]
new_locs = []
| for i in idxs:
reg = regions[i]
i_reg = np.where(u_regs == reg)[0][0]
# Random creation
loc = np.random.random(2)*std_locs[i_reg] + mean_locs[i_reg]
new_locs.append(loc)
new_locs = np.array(new_locs)
return new_locs
def create_locs2cp(locs, null_locs2cps, raw_locs, raw_cps):
locs_cp = locs[null_locs2cps]
new_cps = retrieve_7major_cp(locs_cp, raw_locs, raw_cps)
return new_cps
def retrieve_7major_cp(locs, raw_locs, raw_cps):
raw_cps = np.array(raw_cps).astype(int)
ret = KDTree(raw_locs)
new_cps = []
for i in range(len(locs)):
neighs = ret.query(locs[[i]], 7)[1].ravel()
c = Counter([raw_cps[nei] for nei in neighs])
new_cps.append(c.keys()[np.argmax(c.values())])
return new_cps
def create_locsandcp():
pass
|
renanvicente/threadurl | setup.py | Python | apache-2.0 | 332 | 0.084337 | from distutils.core import setup
setup(
name | = 'threadurl',
version = '0.0.1',
py_modules = ['threadurl'],
author = 'renanvicente',
author | _email = 'renanvice@gmail.com',
url = 'http://github.com/renanvicente/threadurl',
description = 'A simple way to send a lot of requests using thread',
)
|
cheery/essence | essence3/layout.py | Python | gpl-3.0 | 11,955 | 0.007779 | from essence3.util import clamp
class Align(object):
def __init__(self, h, v = None):
self.h = h
self.v = h if v is None else v
def __call__(self, node, edge):
if edge in ('top', 'bottom'):
return node.width * self.h
if edge in ('left', 'right'):
return node.height * self.v
class FlowAlign(object):
def __init__(self, h, v = None):
self.h = h
self.v = h if v is None else v
def __call__(self, node, edge):
if edge in ('top', 'bottom'):
return node.flowline(edge, self.h)
if edge in ('left', 'right'):
return node.flowline(edge, self.v)
def flow_simple(node, (low, high), edge, which):
if which == 0:
return low + node.offset1[0] + node[0].flowline(edge, which)
if which == 2:
return low + node.offset1[-2] + node[-1].flowline(edge, which)
i = len(node) / 2
if which == 1:
if len(node) % 2 == 1:
return low + node.offset1[i] + node[i].flowline(edge, which)
else:
return low + (node.offset0[i] + node.offset1[i])*0.5
class Box(object):
def __init__(self, (left, top, width, height), style):
self.left = left
self.top = top
self.width = width
self.height = height
self.style = style
def flowline(self, edge, which):
if edge in ('top', 'bottom'):
return self.width * (0.0, 0.5, 1.0)[which]
if edge in ('left', 'right'):
return self.height * (0.0, 0.5, 1.0)[which]
def measure(self, parent):
pass
def arrange(self, parent, (left,top)):
self.left = left
self.top = top
def render(self):
background = self.style['background']
if background:
background(self)
def pick(self, (x,y), hits):
return hits
def subintrons(self, res):
return res
def traverse(self, res, cond):
if cond(self):
res.append(self)
return res
class Slate(Box):
def __init__(self, (width, | height), style):
Box.__init__(self, (0, 0, width, height), style)
class Label(Box):
def __init__(self, source, style):
self.source = source
Box.__init__(self, (0, 0, 0, 0), style)
self.offsets = None
def flowline(se | lf, edge, which):
left, top, right, bottom = self.style['padding']
if edge in ('top', 'bottom'):
return self.width * (0.0, 0.5, 1.0)[which] + left
if edge in ('left', 'right'):
if which == 0:
return top
if which == 1:
return top + self.style['font'].mathline * self.style['font_size']
if which == 2:
return top + self.style['font'].baseline * self.style['font_size']
def measure(self, parent):
left, top, right, bottom = self.style['padding']
self.offsets = self.style['font'].measure(self.source, self.style['font_size'])
self.width = left + right + self.offsets[-1]
self.height = top + bottom + self.style['font'].lineheight * self.style['font_size']
def arrange(self, parent, (left,top)):
self.left = left
self.top = top
def render(self):
background = self.style['background']
if background:
background(self)
self.style['font'](self)
def selection_rect(self, start, stop):
left, top, right, bottom = self.style['padding']
x0 = self.offsets[start]
x1 = self.offsets[stop]
return (self.left + left + x0 - 1, self.top, x1-x0 + 2, self.height)
def scan_offset(self, (x,y)):
left, top, right, bottom = self.style['padding']
x -= self.left + left
k = 0
best = abs(x - 0)
for index, offset in enumerate(self.offsets):
v = abs(x - offset)
if v <= best:
best = v
k = index
return k, best ** 2.0 + abs(y - clamp(self.top, self.top + self.height, y)) ** 2.0
class Container(Box):
def __init__(self, nodes, style):
self.nodes = nodes
self.offset0 = [0] * (len(nodes) + 1)
self.offset1 = [0] * (len(nodes) + 1)
self.flow0 = [0] * len(nodes)
self.flow1 = [0] * len(nodes)
self.base0 = 0
self.base1 = 0
Box.__init__(self, (0, 0, 0, 0), style)
def __getitem__(self, i):
return self.nodes[i]
def __iter__(self):
return iter(self.nodes)
def __len__(self):
return len(self.nodes)
def render(self):
background = self.style['background']
if background:
background(self)
for node in self:
node.render()
def pick(self, (x,y), hits):
for node in self:
res = node.pick((x,y), hits)
return hits
def subintrons(self, res):
for node in self:
res = node.subintrons(res)
return res
def traverse(self, res, cond):
if cond(self):
res.append(self)
for node in self:
res = node.traverse(res, cond)
return res
class HBox(Container):
def flowline(self, edge, which):
left, top, right, bottom = self.style['padding']
if edge == 'left':
return top + self.base0 - self.flow0[0] + self[0].flowline(edge, which)
elif edge == 'right':
return top + self.base1 - self.flow1[-1] + self[-1].flowline(edge, which)
else:
return self.style['flow'](self, (left, self.width-right), edge, which)
def measure(self, parent):
offset = cap = 0
low = org = high = 0
for i, node in enumerate(self):
node.measure(self)
self.offset0[i] = cap
self.offset1[i] = offset
self.flow0[i] = f0 = self.style['align'](node, 'left')
self.flow1[i] = f1 = self.style['align'](node, 'right')
low = min(low, 0 - f0)
high = max(high, node.height - f0)
low += f0 - f1
org += f0 - f1
high += f0 - f1
cap = offset + node.width
offset += node.width + self.style['spacing']
self.offset0[len(self)] = self.offset1[len(self)] = cap
self.base0 = org - low
self.base1 = 0 - low
left, top, right, bottom = self.style['padding']
self.width = cap + left + right
self.height = high - low + top + bottom
def arrange(self, parent, (left,top)):
self.left = left
self.top = top
left, top, right, bottom = self.style['padding']
base_x = self.left + left
base_y = self.base0 + self.top + top
for i, node in enumerate(self):
node.arrange(self, (base_x + self.offset1[i], base_y - self.flow0[i]))
base_y += self.flow1[i] - self.flow0[i]
def get_spacer(self, i):
left, top, right, bottom = self.style['padding']
x0 = self.offset0[i]
x1 = self.offset1[i]
return self.left + left+x0, self.top + top, x1-x0, self.height-bottom-top
class VBox(Container):
def flowline(self, edge, which):
left, top, right, bottom = self.style['padding']
if edge == 'top':
return left + self.base0 - self.flow0[0] + self[0].flowline(edge, which)
elif edge == 'bottom':
return left + self.base1 - self.flow1[-1] + self[-1].flowline(edge, which)
else:
return self.style['flow'](self, (top, self.height-bottom), edge, which)
def measure(self, parent):
offset = cap = 0
low = org = high = 0
for i, node in enumerate(self):
node.measure(self)
self.offset0[i] = cap
self.offset1[i] = offset
self.flow0[i] = f0 = self.style['align'](node, 'top')
self.flow1[i] = f1 = self.style['align'](node, 'bottom')
low = min(low, 0 - f0)
high = max(high, node.width - f0)
low += f0 - f1
org += f0 - f1
high += f0 - f1
cap = offset + |
ros2/system_tests | test_cli_remapping/test/test_cli_remapping.py | Python | apache-2.0 | 6,164 | 0.000649 | # Copyright 2018 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import sys
import time
import unittest
from launch import LaunchDescription
from launch.actions import ExecuteProcess
import launch_testing
import launch_testing.actions
import rclpy
def get_environment_variable(name):
"""Get environment variable or raise if it does not exist."""
path = os.getenv(name)
if not path:
raise EnvironmentError('Missing environment variable "%s"' % name)
return path
CLIENT_LIBRARY_EXECUTABLES = (
get_environment_variable('NAME_MAKER_RCLCPP'),
get_environment_variable('NAME_MAKER_RCLPY')
)
TEST_CASES = {
'namespace_replacement': (
'/ns/s{random_string}/relative/name',
'__ns:=/ns/s{random_string}'
),
'node_name_replacement': (
'node_{random_string}',
'__node:=node_{random_string}'
),
'topic_and_service_replacement': (
'/remapped/ts{random_string}',
'/fully/qualified/name:=/remapped/ts{random_string}'
),
'topic_replacement': (
'/remapped/t{random_string}',
'rostopic://~/private/name:=/remapped/t{random_string}'
),
'service_replacement': (
'/remapped/s{random_string}',
'rosservice://~/private/name:=/remapped/s{random_string}'
)
}
@launch_testing.parametrize('executable', CLIENT_LIBRARY_EXECUTABLES)
def generate_test_description(executable):
command = [executable]
# Execute python files using same python used to start this test
env = dict(os.environ)
if command[0][-3:] == '.py':
command.insert(0, sys.executable)
env['PYTHONUNBUFFERED'] = '1'
launch_description = LaunchDescription()
test_context = {}
for replacement_name, (replacement_value, cli_argument) in TEST_CASES.items():
random_string = '%d_%s' % (
random.randint(0, 9999), time.strftime('%H_%M_%S', time.gmtime()))
launch_description.add_actio | n(
ExecuteProcess(
cmd=command + ['--ros-args', '--remap', cli_argument.format(**locals())],
name='name_maker_' + replacement_name, env=env
)
)
test_context[replacement_name] = replacement_value.format(random_string=random_string)
launch_description.add_action(
launch_testing.actions.ReadyToTest()
)
return launch_description, test_context
class TestCLIRemapping | (unittest.TestCase):
ATTEMPTS = 10
TIME_BETWEEN_ATTEMPTS = 1
@classmethod
def setUpClass(cls):
rclpy.init()
cls.node = rclpy.create_node('test_cli_remapping')
@classmethod
def tearDownClass(cls):
cls.node.destroy_node()
rclpy.shutdown()
def get_topics(self):
topic_names_and_types = self.node.get_topic_names_and_types()
return [name for name, _ in topic_names_and_types]
def get_services(self):
service_names_and_types = self.node.get_service_names_and_types()
return [name for name, _ in service_names_and_types]
def test_namespace_replacement(self, namespace_replacement):
for attempt in range(self.ATTEMPTS):
if (
namespace_replacement in self.get_topics() and
namespace_replacement in self.get_services()
):
break
time.sleep(self.TIME_BETWEEN_ATTEMPTS)
rclpy.spin_once(self.node, timeout_sec=0)
self.assertIn(namespace_replacement, self.get_topics())
self.assertIn(namespace_replacement, self.get_services())
def test_node_name_replacement(self, node_name_replacement):
for attempt in range(self.ATTEMPTS):
if node_name_replacement in self.node.get_node_names():
break
time.sleep(self.TIME_BETWEEN_ATTEMPTS)
rclpy.spin_once(self.node, timeout_sec=0)
self.assertIn(node_name_replacement, self.node.get_node_names())
def test_topic_and_service_replacement(self, topic_and_service_replacement):
for attempt in range(self.ATTEMPTS):
if (
topic_and_service_replacement in self.get_topics() and
topic_and_service_replacement in self.get_services()
):
break
time.sleep(self.TIME_BETWEEN_ATTEMPTS)
rclpy.spin_once(self.node, timeout_sec=0)
self.assertIn(topic_and_service_replacement, self.get_topics())
self.assertIn(topic_and_service_replacement, self.get_services())
def test_topic_replacement(self, topic_replacement):
for attempt in range(self.ATTEMPTS):
if topic_replacement in self.get_topics():
break
time.sleep(self.TIME_BETWEEN_ATTEMPTS)
rclpy.spin_once(self.node, timeout_sec=0)
self.assertIn(topic_replacement, self.get_topics())
self.assertNotIn(topic_replacement, self.get_services())
def test_service_replacement(self, service_replacement):
for attempt in range(self.ATTEMPTS):
if service_replacement in self.get_services():
break
time.sleep(self.TIME_BETWEEN_ATTEMPTS)
rclpy.spin_once(self.node, timeout_sec=0)
self.assertNotIn(service_replacement, self.get_topics())
self.assertIn(service_replacement, self.get_services())
@launch_testing.post_shutdown_test()
class TestCLIRemappingAfterShutdown(unittest.TestCase):
def test_processes_finished_gracefully(self, proc_info):
"""Test that both executables finished gracefully."""
launch_testing.asserts.assertExitCodes(proc_info)
|
stetie/postpic | postpic/datareader/dummy.py | Python | gpl-3.0 | 7,561 | 0.000794 | #
# This file is part of postpic.
#
# postpic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# postpic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with postpic. If not, see <http://www.gnu.org/licenses/>.
#
# Stephan Kuschel 2014
'''
Dummy reader for creating fake simulation Data for testing purposes.
Stephan Kuschel 2014
'''
from __future__ import absolute_import, division, print_function, unicode_literals
from . import Dumpreader_ifc
from . import Simulationreader_ifc
import numpy as np
from .. import helper
from ..helper import PhysicalConstants
class Dummyreader(Dumpreader_ifc):
'''
Dummyreader creates fake Data for testing purposes.
Args:
dumpid : int
the dumpidentifier is the dumpid in this case. It is a float variable,
that will also change the dummyreader | s output (for example it
will pretend to have dumpid many particles).
'''
def __init__(self, dumpid, dimensions=2, randfunc=np.random.normal, seed=0, **kwargs):
super(self.__class__, self).__init__(d | umpid, **kwargs)
self._dimensions = dimensions
self._seed = seed
self._randfunc = randfunc
# initialize fake data
if seed is not None:
np.random.seed(seed)
self._xdata = randfunc(size=int(dumpid))
if dimensions > 1:
self._ydata = randfunc(size=int(dumpid))
if dimensions > 2:
self._zdata = randfunc(size=int(dumpid))
self._pxdata = np.roll(self._xdata, 1) ** 2 * (PhysicalConstants.me * PhysicalConstants.c)
self._pydata = np.roll(self._xdata, 2) * (PhysicalConstants.me * PhysicalConstants.c)
self._pzdata = np.roll(self._xdata, 3) * (PhysicalConstants.me * PhysicalConstants.c)
self._weights = np.repeat(1, len(self._xdata))
self._ids = np.arange(len(self._xdata))
np.random.shuffle(self._ids)
def keys(self):
pass
def __getitem__(self, key):
pass
def __eq__(self, other):
ret = super(self.__class__, self).__eq__(other)
return ret \
and self._randfunc == other._randfunc \
and self._seed == other._seed \
and self._dimensions == other._dimensions
def timestep(self):
return self.dumpidentifier
def time(self):
return self.timestep() * 1e-10
def simdimensions(self):
return self._dimensions
def gridoffset(self, key, axis):
raise Exception('Not Implemented')
def gridspacing(self, key, axis):
raise Exception('Not Implemented')
def data(self, axis):
axid = helper.axesidentify[axis]
def _Ex(x, y, z):
ret = np.sin(np.pi * self.timestep() *
np.sqrt(x**2 + y**2 + z**2)) / \
np.sqrt(x**2 + y**2 + z**2 + 1e-9)
return ret
def _Ey(x, y, z):
ret = np.sin(np.pi * self.timestep() * x) + \
np.sin(np.pi * (self.timestep() - 1) * y) + \
np.sin(np.pi * (self.timestep() - 2) * z)
return ret
def _Ez(x, y, z):
ret = x**2 + y**2 + z**2
return ret
fkts = {0: _Ex,
1: _Ey,
2: _Ez}
if self.simdimensions() == 1:
ret = fkts[axid](self.grid(None, 'x'), 0, 0)
elif self.simdimensions() == 2:
xx, yy = np.meshgrid(self.grid(None, 'x'), self.grid(None, 'y'), indexing='ij')
ret = fkts[axid](xx, yy, 0)
elif self.simdimensions() == 3:
xx, yy, zz = np.meshgrid(self.grid(None, 'x'),
self.grid(None, 'y'),
self.grid(None, 'z'), indexing='ij')
ret = fkts[axid](xx, yy, zz)
return ret
def _keyE(self, component):
return component
def _keyB(self, component):
return component
def simgridpoints(self, axis):
return self.grid(None, axis)
def simextent(self, axis):
g = self.grid(None, axis)
return np.asfarray([g[0], g[-1]])
def gridnode(self, key, axis):
'''
Args:
axis : string or int
the axisidentifier
Returns: list of grid points of the axis specified.
Thus only regular grids are supported currently.
'''
axid = helper.axesidentify[axis]
grids = {1: [(-2, 10, 601)],
2: [(-2, 10, 301), (-5, 5, 401)],
3: [(-2, 10, 101), (-5, 5, 81), (-4, 4, 61)]}
if axid >= self.simdimensions():
raise KeyError('axis ' + str(axis) + ' not present.')
args = grids[self.simdimensions()][axid]
ret = np.linspace(*args)
return ret
def grid(self, key, axis):
'''
Args:
axis : string or int
the axisidentifier
Returns: list of grid points of the axis specified.
Thus only regular grids are supported currently.
'''
axid = helper.axesidentify[axis]
grids = {1: [(-2, 10, 600)],
2: [(-2, 10, 300), (-5, 5, 400)],
3: [(-2, 10, 100), (-5, 5, 80), (-4, 4, 60)]}
if axid >= self.simdimensions():
raise KeyError('axis ' + str(axis) + ' not present.')
args = grids[self.simdimensions()][axid]
ret = np.linspace(*args)
return ret
def listSpecies(self):
return ['electron']
def getSpecies(self, species, attrib):
attribid = helper.attribidentify[attrib]
if attribid == 0: # x
ret = self._xdata
elif attribid == 1 and self.simdimensions() > 1: # y
ret = self._ydata
elif attribid == 2 and self.simdimensions() > 2: # z
ret = self._zdata
elif attribid == 3: # px
ret = self._pxdata
elif attribid == 4: # py
ret = self._pydata
elif attribid == 5: # pz
ret = self._pzdata
elif attribid == 9: # weights
ret = self._weights
elif attribid == 10: # ids
ret = self._ids
else:
raise KeyError('Attrib "' + str(attrib) + '" of species "' +
str(species) + '" not present')
return ret
def __str__(self):
ret = '<Dummyreader ({:d}d) initialized with "' \
+ str(self.dumpidentifier) + '">'
ret = ret.format(self._dimensions)
return ret
class Dummysim(Simulationreader_ifc):
def __init__(self, simidentifier, dimensions=2, **kwargs):
super(self.__class__, self).__init__(simidentifier, **kwargs)
self._dimensions = dimensions
def __len__(self):
return self.simidentifier
def _getDumpreader(self, index):
if index < len(self):
return Dummyreader(index, dimensions=self._dimensions)
else:
raise IndexError()
def __str__(self):
ret = '<Dummysimulation ({:d}d) initialized with "' \
+ str(self.dumpidentifier) + '">'
ret = ret.format(self._dimensions)
return ret
|
streed/antZoo | test_server.py | Python | mit | 126 | 0.015873 | import sys
from antZoo.gossip import GossipServiceHandler
server = GossipServiceHandler.Server( sys. | argv[1] )
server.serve()
| |
corumcorp/redsentir | redsentir/seguridad/views.py | Python | gpl-3.0 | 3,382 | 0.015671 | from django.shortcuts import render
from django.contrib.auth.models import User
from .models import *
from django.contrib.auth.views import login
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
import csv
from djqscsv import render_to_csv_response
from django.contrib.auth.decorators import login_required
from django.http import Http404
from datetime import datetime, date, timedelta
from lineatiempo.models import Publicacion
import base64
from django.core.files.base import ContentFile
def registro(request):
if request.POST:
if 'username' in request.POST :
usuarioTmp = User.objects.filter(username = request.POST['username']).first()
if usuarioTmp != None:
return render(request, 'registration/registro.html',{'usuario':'el nombre de usuario '+request.POST['username']+' ya existe'})
if 'password' in request.POST and 'password1' in request.POST and request.POST['password'] != '':
if request.POST['password'] == request.POST['password1'] :
usuario = User.objects.create_user(username = request.POST['username'],password = request.POST['password'],email = request.POST['email'])
perfil = None
if 'genero' in request.POST :
pgenero = request.POST['genero']
else :
pgenero = None
perfil = Perfil(
user_id=usuario.id,
genero=pgenero,
fecha_nacimiento=request.POST['fecha_nacimiento'],
telefono=request.POST['telefono']
)
if 'avatar' in request.POST and request.POST['avatar']!='' :
image_data = request.POST['avatar']
format, imgstr = image_data.split(';base64,')
ext = format.split('/')[-1]
data = ContentFile(base64.b64decode(imgstr))
file_name = "'perfil"+str(usuario.pk)+"." + ext
perfil.avatar.save(file_name, data, save=True)
if perfil.edad() < 20 :
perfil.es_joven = True
perfil.save()
return HttpResponseRedirect(reverse(login,args=[]))
else :
return render(request, 'registration/registro.html',{'password':'las claves deben coincidir'})
else :
return render(request, 'registration/registro.html',{'password':'ingrese clave de seguridad valida'})
else :
return render(request, 'registration/registro.html',{'usuario':'debe ingresar un usuario'})
else :
return render(request, 'registration/registro.html')
@login_required
def exportarUsuarios(request):
if request.user.is_superuser :
return render_to_csv_response(User.objects.all())
else :
raise Http404
@login_required
def perfilUsuario(request, pid):
usuario = User.objects.get(pk=pid)
publicaciones = Publicacion.obje | cts.filter(usuario_i | d=usuario.pk).order_by('id').reverse()[:20]
return render(request, 'sitio/perfil/perfil.html', {'usuario':usuario,'publicaciones':publicaciones})
|
satdav/mozillians | manage.py | Python | bsd-3-clause | 590 | 0.001695 | #!/usr/bin/env python
import os
import sys
try:
# For local development in a virtualenv:
from funfactory import manage
except ImportError:
# Production:
# Add a temporary path so that we can import the funfactory
tmp_path = os.path.join(os.path. | dirname(os.path.abspath(__ | file__)),
'vendor', 'src', 'funfactory')
sys.path.append(tmp_path)
from funfactory import manage
# Let the path magic happen in setup_environ() !
sys.path.remove(tmp_path)
manage.setup_environ(__file__)
if __name__ == "__main__":
manage.main()
|
skywalka/splunk-for-nagios | bin/liveservicestate.py | Python | gpl-3.0 | 2,474 | 0.031124 | # Script to list remote services in Nagios by accessing MK Livestatus
# Required field to be passed to this script from Splunk: status (eg. 0, 1, 2, 3, 666, 9999)
# where 666 is any non-zero status | , and 9999 is any status
import socket,string,sys,re,mklivestatus
import splunk.Intersplunk
results = []
if len(sys.argv) != 3:
print "Usage: %s [status] [host_name]" % sys.argv[0]
sys.exit(1) |
status_zero = 0
status2 = int(sys.argv[1])
host_name3 = sys.argv[2]
host_name2 = host_name3.lower()
if status2 == 666:
mkl_filter = ">"
status3 = status_zero
elif status2 == 9999:
mkl_filter = "!="
status3 = status2
else:
mkl_filter = "="
status3 = status2
status = "%s %d" % (mkl_filter, status3)
if host_name2 == "all":
mkl_filter2 = "!="
host_name = host_name2
else:
mkl_filter2 = "=~"
host_name = host_name2
host_status = "%s %s" % (mkl_filter2, host_name)
try:
results,dummyresults,settings = splunk.Intersplunk.getOrganizedResults()
for r in results:
try:
HOST = mklivestatus.HOST
PORT = mklivestatus.PORT
for h in HOST:
content = "GET services\nFilter: host_name %s\nFilter: state %s\nAnd: 2\nColumns: host_name description plugin_output state\n" % (host_status, status)
query = "".join(map(str,content))
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((h, PORT))
except socket.error, (value,message):
if s:
s.close()
#Error: Could not open socket: connection refused (MK Livestatus not setup in xinetd?)
break
s.send(query)
s.shutdown(socket.SHUT_WR)
data = s.recv(100000000)
data2 = (re.findall(r'(No UNIX socket)', data))
if data2:
#Error: MK Livestatus module not loaded?
s.close()
else:
liveservices2 = data.strip()
table2 = data.strip()
table = table2.split("\n")
s.close()
r["liveservicestatus_results"] = table
except:
r["liveservicestatus_results"] = "Unknown"
except:
import traceback
stack = traceback.format_exc()
results = splunk.Intersplunk.generateErrorResults("Error : Traceback: " + str(stack))
splunk.Intersplunk.outputResults( results )
|
jrief/django-shop-productvariations | shop_textoptions/views.py | Python | bsd-3-clause | 932 | 0.003219 | # -*- coding: utf-8 -*-
class ProductTextOptionsViewMixin | (object):
"""
DetailView Mixin class when using ProductTextOptionsMixin
"""
def get_variation(self):
"""
The post request contains information a | bout the chosen variation.
Recombine this with the information extracted from the OptionGroup
for the given product
"""
variation = super(ProductTextOptionsViewMixin, self).get_variation()
variation.update({ 'text_options': {} })
product = self.get_object()
for text_option in product.text_options.all():
key = 'add_item_text_option_%s' % text_option.id
if self.request.POST.has_key(key):
value = text_option.__dict__
del value['_state']
value['text'] = self.request.POST[key]
variation['text_options'][text_option.id] = value
return variation
|
DzikuVx/PowerCutter | power_cutter.py | Python | gpl-2.0 | 300 | 0.043333 | #!/usr/bin/en | v python
# -*- coding: utf-8 -*-
import RPi.GPIO as GPIO
import time
def main():
# GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
GPIO.output( | 18, GPIO.HIGH)
time.sleep(5);
GPIO.output(18, GPIO.LOW)
GPIO.cleanup()
if __name__ == "__main__":
main()
|
akesandgren/easybuild-easyblocks | easybuild/easyblocks/n/ncurses.py | Python | gpl-2.0 | 1,919 | 0.003648 | ##
# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild
#
# Copyright:: Copyright 2012-2019 Uni.Lu/LCSB, NTUA
# Authors:: Cedri | c Laczny <cedric.laczny@uni.lu>, Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste
# License:: MIT/GPL
# $Id$
#
# This work implements a part of the HPCBIOS project and is a component of the policy:
# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-90.html
##
"""
Easybuild support for building ncurses, implemented as an ea | syblock
@author: Cedric Laczny (Uni.Lu)
@author: Fotis Georgatos (Uni.Lu)
@author: Kenneth Hoste (Ghent University)
"""
import os
from easybuild.easyblocks.generic.configuremake import ConfigureMake
class EB_ncurses(ConfigureMake):
"""
Support for building ncurses
"""
def configure_step(self):
"""
No configure
"""
self.cfg.update('configopts', '--with-shared --enable-overwrite')
super(EB_ncurses, self).configure_step()
def sanity_check_step(self):
"""Custom sanity check for ncurses."""
binaries = ["captoinfo", "clear", "infocmp", "infotocap", "ncurses5-config", "reset", "tabs", "tic", "toe",
"tput", "tset"]
libs = ['lib%s.a' % x for x in ["form", "form", "menu", "menu_g", "ncurses", "ncurses++", "ncurses_g",
"panel", "panel_g"]]
custom_paths = {
'files': [os.path.join('bin', x) for x in binaries] + [os.path.join('lib', x) for x in libs],
'dirs': ['include']
}
super(EB_ncurses, self).sanity_check_step(custom_paths=custom_paths)
def make_module_req_guess(self):
"""
Set correct CPLUS path.
"""
guesses = super(EB_ncurses, self).make_module_req_guess()
guesses.update({'CPLUS': ['include/ncurses']}) # will only be present without --enable-overwrite
return guesses
|
sulantha2006/Processing_Pipeline | ExecutePipeline/ExecutePipeline.py | Python | apache-2.0 | 5,465 | 0.010064 | __author__ = 'Sulantha'
import sys, argparse
sys.path.extend(['/home/sulantha/PycharmProjects/Processing_Pipeline'])
from Config import StudyConfig
from Manager.PipelineManager import PipelineManager
import logging.config
from Utils.PipelineLogger import PipelineLogger
import Config.EmailConfig as ec
from Utils.EmailClient import EmailClient
import traceback
from Manager.QSubJobHanlder import QSubJobHandler
import os
from Config import PipelineConfig
PipelineConfig.SourcePath = os.getcwd()
def main():
# Start an email client and logging capacity
emailClient = EmailClient()
logging.config.fileConfig('Config/LoggingConfig.conf')
studyList = None
## Added ability to run from command line.
try:
## Open up for arguments
parser = argparse.ArgumentParser()
parser.add_argument('--studyList', required=True, nargs='+', choices=StudyConfig.AllowedStudyList, help='Space seperated study list.')
parser.add_argument('--steps', required=False, nargs='+', choices=StudyConfig.AllowedStepsList, help='Space seperated steps list.')
parser.add_argument('--modalities', required=False, nargs='+', choices=StudyConfig.AllowedModalityList, help='Space seperated modality list.')
parser.add_argument('--pipe_v', required=False, nargs='+', choices=StudyConfig.AllowedVersions, help='Version of pipeline need to run.')
args = parser.parse_args()
studyList = args.studyList
steps = args.steps
modalities = args.modalities
version = args.pipe_v
if not validateStepSequence(steps):
sys.exit(2)
steps = ['ALL'] if not steps else steps
modalities = StudyConfig.AllowedModalityList if not modalities else modalities
if version and len(version) > 1 and len(studyList) > 1:
PipelineLogger.log('root', 'info', 'Versioning with multiple studies is not supported. ')
sys.exit(2)
version = StudyConfig.defaultVersioningForStudy[studyList[0]] if not version else dict(zip(modalities, version))
PipelineLogger.log('root', 'info', '##################Pipeline Started.#################')
PipelineLogger.log('root', 'info', 'StudyIds = %s' %', '.join(map(str, studyList)))
PipelineLogger.log('root', 'info', 'Steps = %s' %', '.join(map(str, steps)))
PipelineLogger.log('root', 'info', 'Version = %s' %version)
pipeline = PipelineMa | nager(studyList, version)
####ToDo: Process steps sequence.
## Recurse for new data
PipelineLogger.log('root', 'info', 'Recursing for new data started ...')
pipeline.recurseForNewData()
PipelineLogger.log('root', 'info', 'Recursing for new | data done ...############')
## Add data to Sorting table.
pipeline.addNewDatatoDB()
##Get Unmoved Raw File List
pipeline.getUnmovedRawDataList()
PipelineLogger.log('root', 'info', 'Moving new data started ...')
pipeline.moveRawData()
PipelineLogger.log('root', 'info', 'Moving new data done ...############')
pipeline.getConversionList()
PipelineLogger.log('root', 'info', 'Converting to MINC started ...')
pipeline.convertRawData()
PipelineLogger.log('root', 'info', 'Converting to MINC done ...############')
PipelineLogger.log('root', 'info', 'Modifying processing pipeline table. This may take a while. Please wait....############')
pipeline.getConvertedList() # Get all files that have been converted, from Conversion table
pipeline.refreshModalityTables() # Get the correct modality, insert or ignore information in Processing table, takes quite some time
pipeline.getProcessList() # Returning all rows from the Processing table that has not been processed or skipped, takes time too
pipeline.fillPipelineTables() # Creating jobs queue in {study}_{modality}_Pipeline table, takes time too
for modality in modalities:
if modality == 'BLUFF':
break
pipeline.checkExternalJobs(modality) # Update list of external waiting jobs in externalWaitingJobs table
for modality in modalities:
if modality == 'BLUFF':
break
pipeline.checkOnQCJobs(modality) # Update jobs with success QC in Processing table
for modality in modalities:
if modality == 'BLUFF':
break
pipeline.processModality(modality) # Actually processing jobs
pipeline.qsubJobHandler.submittedJobs['xxxx'].Fin = True
PipelineLogger.log('root', 'info', 'Pipeline reached end. Waiting on submitted jobs. ')
#### End
if not pipeline.qsubJobHandler.submittedJobs:
PipelineLogger.log('root', 'info', 'No QSUB Jobs in waiting ...############')
PipelineLogger.log('root', 'info', 'Pipeline exiting ...############')
PipelineLogger.log('root', 'info', '##################Pipeline Done.#################')
pipeline.qsubJobHandler.QUIT = 1
except:
PipelineLogger.log('root', 'exception', 'Pipeline crashed with exception. ')
emailClient.send_email(ec.EmailRecList_admin, 'Pipeline crashed with exception. ', ' {0} '.format(traceback.format_exc()))
##This method will validate the sequence of steps. If not returns False.
def validateStepSequence(stepsList):
return True
if __name__ == '__main__':
main()
|
vabs22/zulip | tools/linter_lib/custom_check.py | Python | apache-2.0 | 24,509 | 0.006651 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import os
import re
import traceback
from .printer import print_err, colors
from typing import cast, Any, Callable, Dict, List, Optional, Tuple
def build_custom_checkers(by_lang):
# type: (Dict[str, List[str]]) -> Tuple[Callable[[], bool], Callable[[], bool]]
RuleList = List[Dict[str, Any]]
def custom_check_file(fn, identifier, rules, color, skip_rules=None, max_length=None):
# type: (str, str, RuleList, str, Optional[Any], Optional[int]) -> bool
failed = False
line_tups = []
for i, line in enumerate(open(fn)):
line_newline_stripped = line.strip('\n')
line_fully_stripped = line_newline_stripped.strip()
skip = False
for rule in skip_rules or []:
if re.match(rule, line):
skip = True
if line_fully_stripped.endswith(' # nolint'):
continue
if skip:
continue
tup = (i, line, line_newline_stripped, line_fully_stripped)
line_tups.append(tup)
rules_to_apply = []
fn_dirname = os.path.dirname(fn)
for rule in rules:
exclude_list = rule.get('exclude', set())
if fn in exclude_list or fn_dirname in exclude_list:
continue
if rule.get("include_only"):
found = False
for item in rule.get("include_only", set()):
if item in fn:
found = True
if not found:
continue
rules_to_apply.append(rule)
for rule in rules_to_apply:
exclude_lines = {
line for
(exclude_fn, line) in rule.get('exclude_line', set())
if exclude_fn == fn
}
pattern = rule['pattern']
for (i, line, line_newline_stripped, line_fully_stripped) in line_tups:
if line_fully_stripped in exclude_lines:
exclude_lines.remove(line_fully_stripped)
continue
try:
line_to_check = line_fully_stripped
if rule.get('strip') is not None:
if rule['strip'] == '\n':
line_to_check = line_newline_stripped
else:
raise Exception("Invalid strip rule")
if re.search(pattern, line_to_check):
print_err(identifier, color, '{} at {} line {}:'.format(
rule['description'], fn, i+1))
print_err(identifier, color, line)
failed = True
except Exception:
print("Exception with %s at %s line %s" % (rule['pattern'], fn, i+1))
traceback.print_exc()
if exclude_lines:
print('Please remove exclusions for file %s: %s' % (fn, exclude_lines))
lastLine = None
for (i, line, line_newline_stripped, line_fully_stripped) in line_tups:
if isinstance(line, bytes):
line_length = len(line.decode("utf-8"))
else:
line_length = len(line)
if (max_length is not None and line_length > max_length and
'# type' not in line and 'test' not in fn and 'example' not in fn and
not re.match("\[[ A-Za-z0-9_:,&()-]*\]: http.*", line) and
not re.match("`\{\{ external_api_uri_subdomain \}\}[^`]+`", line) and
"#ignorelongline" not in line and 'migrations' not in fn):
print("Line too long (%s) at %s line %s: %s" % (len(line), fn, i+1, line_newline_stripped))
failed = True
lastLine = line
if lastLine and ('\n' not in lastLine):
print("No newline at the end of file. Fix with `sed -i '$a\\' %s`" % (fn,))
failed = True
return failed
whitespace_rules = [
# This linter should be first since bash_rules depends on it.
{'pattern': '\s+$',
'strip': '\n',
'description': 'Fix trailing whitespace'},
{'pattern': '\t',
'strip': '\n',
'exclude': set(['zerver/lib/bugdown/codehilite.py',
'tools/travis/success-http-headers.txt']),
'description': 'Fix tab-based whitespace'},
] # type: RuleList
markdown_whitespace_rules = list([rule for rule in whitespace_rules if rule['pattern'] != '\s+$']) + [
# Two spaces trailing a line with other content is okay--it's a markdown line break.
# This rule finds | one space t | railing a non-space, three or more trailing spaces, and
# spaces on an empty line.
{'pattern': '((?<!\s)\s$)|(\s\s\s+$)|(^\s+$)',
'strip': '\n',
'description': 'Fix trailing whitespace'},
{'pattern': '^#+[A-Za-z0-9]',
'strip': '\n',
'description': 'Missing space after # in heading'},
] # type: RuleList
js_rules = cast(RuleList, [
{'pattern': '[^_]function\(',
'description': 'The keyword "function" should be followed by a space'},
{'pattern': '.*blueslip.warning\(.*',
'description': 'The module blueslip has no function warning, try using blueslip.warn'},
{'pattern': '[)]{$',
'description': 'Missing space between ) and {'},
{'pattern': '["\']json/',
'description': 'Relative URL for JSON route not supported by i18n'},
# This rule is constructed with + to avoid triggering on itself
{'pattern': " =" + '[^ =>~"]',
'description': 'Missing whitespace after "="'},
{'pattern': '^[ ]*//[A-Za-z0-9]',
'description': 'Missing space after // in comment'},
{'pattern': 'if[(]',
'description': 'Missing space between if and ('},
{'pattern': 'else{$',
'description': 'Missing space between else and {'},
{'pattern': '^else {$',
'description': 'Write JS else statements on same line as }'},
{'pattern': '^else if',
'description': 'Write JS else statements on same line as }'},
{'pattern': 'console[.][a-z]',
'exclude': set(['static/js/blueslip.js',
'frontend_tests/zjsunit',
'frontend_tests/casper_lib/common.js',
'frontend_tests/node_tests',
'static/js/debug.js']),
'description': 'console.log and similar should not be used in webapp'},
{'pattern': 'i18n[.]t',
'include_only': set(['static/js/portico/']),
'description': 'i18n.t is not available in portico pages yet'},
{'pattern': '[.]text\(["\'][a-zA-Z]',
'description': 'Strings passed to $().text should be wrapped in i18n.t() for internationalization'},
{'pattern': 'compose_error\(["\']',
'description': 'Argument to compose_error should be a literal string enclosed '
'by i18n.t()'},
{'pattern': 'ui.report_success\(',
'description': 'Deprecated function, use ui_report.success.'},
{'pattern': 'report.success\(["\']',
'description': 'Argument to report_success should be a literal string enclosed '
'by i18n.t()'},
{'pattern': 'ui.report_error\(',
'description': 'Deprecated function, use ui_report.error.'},
{'pattern': 'report.error\(["\']',
'description': 'Argument to report_error should be a literal string enclosed '
'by i18n.t()'},
]) + whitespace_rules
python_rules = cast(RuleList, [
{'pattern': '^(?!#)@login_required',
'description': '@login_required is unsupported; use @zulip_login_required'},
{'pattern': '".*"%\([a-z_].*\)?$',
'description': 'Missing space around "%"'},
{'pattern': "'.*'%\([a-z_].*\)?$",
'exclude': set(['analytics/lib/counts.py',
|
dirtycoder/opbeat_python | tests/events/tests.py | Python | bsd-3-clause | 676 | 0.001479 | # | -*- coding: utf-8 -*-
from mock import Mock
from django.test import TestCase
from opbeat.events import Message
class MessageTest(TestCase):
def test_to_string(self):
unformatted_message = 'My message from %s about %s'
client = Mock()
message = Message(client)
message.logger = Mock()
data = {
'param_message': {
'message': unformatted_message,
}
}
| self.assertEqual(message.to_string(data), unformatted_message)
data['param_message']['params'] = (1, 2)
self.assertEqual(message.to_string(data),
unformatted_message % (1, 2))
|
tadek-project/tadek-common | tadek/engine/contexts.py | Python | gpl-3.0 | 11,158 | 0.006363 | ################################################################################
## ##
## This file is a part of TADEK. ##
## ##
## TADEK - Test Automation in a Distributed Environment ##
## (http://tadek.comarch.com) ##
## ##
## Copyright (C) 2011 Comarch S.A. ##
## All rights reserved. ##
## ##
## TADEK is free software for non-commercial purposes. For commercial ones ##
## we offer a commercial license. Please check http://tadek.comarch.com for ##
## details or write to tadek-licenses@comarch.com ##
## ##
## You can redistribute it and/or modify it under the terms of the ##
## GNU General Public License as published by the Free Software Foundation, ##
## either version 3 of the License, or (at your option) any later version. ##
## ##
## TADEK is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with TADEK bundled with this file in the file LICENSE. ##
## If not, see http://www.gnu.org/licenses/. ##
## ##
## Please notice that Contributor Agreement applies to any contribution ##
## you make to TADEK. The Agreement must be completed, signed and sent ##
## to Comarch before any contribution is made. You should have received ##
## a copy of Contribution Agreement along with TADEK bundled with this file ##
## in the file CONTRIBUTION_AGREEMENT.pdf or see http://tadek.comarch.com ##
## or write to tadek-licenses@comarch.com ##
## ##
################################################################################
import testexec
__all__ = ["CaseContext", "SuiteContext", "DeviceContext"]
# All classes defined herein are 'new-style' classes
__metaclass__ = type
class TaskContext:
'''
A base class of task contexts.
'''
child = None
def __init__(self, task):
self.task = task
def _runTask(self, test, device, result):
'''
Runs the related test task within the context.
'''
pass
def run(self, test, device, result):
'''
Runs the task context.
'''
result.startTest(self.task.result, device)
execResult = self.task.result.device(device)
try:
try:
self._runTask(test, device, result)
except testexec.TestAbortError, err:
execResult.errors.append(testexec.errorInfo(err))
execResult.status = err.status
raise
except testexec.TestAssertError, err:
execResult.errors.append(testexec.errorInfo(err))
execResult.status = err.status
except KeyboardInterrupt, err:
execResult.errors.append(testexec.errorInfo(err))
execResult.status = testexec.STATUS_NOT_COMPLETED
| raise testexec.TestAbortError("Keyboard interrupt")
except Exception, err:
execResult.errors.ap | pend(testexec.errorInfo(err))
execResult.status = testexec.STATUS_ERROR
# FIXME
# Shouldn't some exception be passed higher?
else:
execResult.status = testexec.STATUS_PASSED
finally:
result.stopTest(self.task.result, device)
class CaseContext(TaskContext):
'''
A class of test case contexts.
'''
def _runTask(self, test, device, result):
'''
Runs a related test case task within the context.
'''
idx = 0
try:
if self.task.parent:
idx = len(self.task.parent.caseSetUps)
for setUp in self.task.parent.caseSetUps:
setUp(test, device)
idx -= 1
for step, stepResult in zip(self.task.test,
self.task.result.children):
result.startTest(stepResult, device)
execResult = stepResult.device(device)
try:
try:
step.run(test, device)
except testexec.TestFailThisError, err:
execResult.errors.append(testexec.errorInfo(err))
execResult.status = err.status
except KeyboardInterrupt, err:
execResult.errors.append(testexec.errorInfo(err))
execResult.status = testexec.STATUS_NOT_COMPLETED
raise testexec.TestAbortError("Keyboard interrupt")
except Exception, err:
execResult.errors.append(testexec.errorInfo(err))
execResult.status = getattr(err, "status",
testexec.STATUS_ERROR)
raise
else:
execResult.status = testexec.STATUS_PASSED
finally:
result.stopTest(stepResult, device)
finally:
if self.task.parent:
error = None
for tearDown in self.task.parent.caseTearDowns[idx:]:
try:
tearDown(test, device)
except Exception, err:
# FIXME
# An exception raised by previous tearDownCase() should
# be also saved here.
error = err
if error:
raise error
def run(self, *args, **kwargs):
'''
Runs the test case context.
'''
self.task.done()
TaskContext.run(self, *args, **kwargs)
class GroupContext:
'''
An interface class for contexts of task groups.
'''
parent = None
tasker = None
def __init__(self, parent=None):
if parent:
self.parent = parent
self.tasker = parent.tasker
self._forbidden = []
def forbid(self, id):
'''
Forbids an execution of tasks of the given id in the group context.
'''
self._forbidden.append(id)
if self.parent:
self.parent.forbid(id)
def taskContext(self, task, parent=None):
'''
Return an execution context for the given task.
'''
tasks = [task]
while task.parent and task.parent != parent:
task = task.parent
tasks.insert(0, task)
parent = None
contexts = []
while tasks:
task = tasks.pop(0)
context = task.context(parent or self) if tasks else task.context()
contexts.append(context)
if parent:
parent.child = context
parent = context
return contexts[0]
class SuiteContext(TaskContext, GroupContext):
'''
A class of test suite contexts.
'''
def __init__(self, task, parent):
|
ArdanaCLM/ardana-service | ardana_service/admin.py | Python | apache-2.0 | 8,071 | 0 | # (c) Copyright 2017-2019 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import abort
from flask import Blueprint
from flask import jsonify
from flask import request
from keystoneauth1 import exceptions as exc
from keystoneauth1 import session as ks_session
from keystoneclient.auth.identity import v3
from keystoneclient.v3 import client as ks_client
import logging
import os
from oslo_config import cfg
import pbr.version
import pwd
import threading
import time
from .util import ping
from . import config
from . import policy
bp = Blueprint('admin', __name__)
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
USER_AGENT = 'Installer UI'
@bp.route("/api/v2/version")
def version():
"""Returns the version of the service
.. :quickref: Admin; Returns the version of the service
**Example valid response**:
.. sourcecode:: http
HTTP/1.1 200 OK
0.0.1.dev16
"""
version_info = pbr.version.VersionInfo('ardana-service')
return version_info.version_string_with_vcs()
@bp.route("/api/v2/heartbeat")
def heartbeat():
"""Returns the epoch time
Simple API to verify that the service is up and responding. Returns
the number of seconds since 1970-01-01 00:00:00 GMT.
.. :quickref: Admin; Returns the epoch time
**Example valid response**:
.. sourcecode:: http
HTTP/1.1 200 OK
1502745650
"""
return jsonify(int(time.time()))
@bp.route("/api/v2/user")
@policy.enforce('lifecycle:get_user')
def user():
"""Returns the username the service is running under
.. :quickref: Admin; Returns the username the service is running under
**Example valid response**:
.. sourcecode:: http
HTTP/1.1 200 OK
{"username": "myusername"}
"""
user_dict = {'username': pwd.getpwuid(os.getuid()).pw_name}
return jsonify(user_dict)
def update_trigger_file():
trigger_file = os.path.join(CONF.paths.log_dir, 'trigger.txt')
with open(trigger_file, 'w') as f:
f.write("Triggered restart at %s\n" % time.asctime())
@bp.route("/api/v2/restart", methods=['POST'])
@policy.enforce('lifecycle:restart')
def restart():
"""Requests the service to restart after a specified delay, in seconds
.. :quickref: Admin; Requests a service restart after a delay
**Example Request**:
.. sourcecode:: http
POST /api/v2/user HTTP/1.1
Content-Type: application/json
{
"delay": 60
}
"""
info = request.get_json() or {}
delay_secs = int(info.get('delay', 0))
t = threading.Timer(delay_secs, update_trigger_file)
t.start()
return jsonify('Success')
@bp.route("/api/v2/login", methods=['POST'])
def login():
"""Authenticates with keystone and returns a token
.. :quickref: Admin; Authenticates with keystone
**Example Request**:
.. sourcecode:: http
POST /api/v2/login HTTP/1.1
Content-Type: application/json
{
"username": "admin",
"password": "secret"
}
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"token": "gAAAAABbEaruZDQGIH5KmKWHlDZIw7CLq",
"expires": "2018-06-01T21:22:06+00:00"
}
:status 200: successful authentication
:status 401: invalid credentials
:status 403: authentication not permitted, or user not authorized for any
projects
"""
if not config.requires_auth():
abort(403,
"authentication not permitted since service is in insecure mode")
info = request.get_json() or {}
username = info.get('username')
password = info.get('password')
user_domain_name = info.get('user_domain_name', 'Default')
token = _authenticate(CONF.keystone_authtoken.auth_url,
username,
password,
user_domain_name)
return jsonify(token)
def _authenticate(auth_url, username=None, password=None,
user_domain_name='Default'):
"""Authenticate with keystone
Creates an unscoped token using the given credentials (which validates
them), and then uses that token to get a project-scoped token.
"""
unscoped_auth = v3.Password(auth_url,
username=username,
password=password,
user_domain_name=user_domain_name,
unscoped=True)
session = ks_session.Session(user_agent=USER_AGENT,
verify=not CONF.keystone_authtoken.insecure)
try:
# Trigger keystone to verify the credentials
unscoped_auth_ref = unscoped_auth.get_access(session)
except exc.connection.ConnectFailure as e:
abort(503, str(e))
except exc.http.HttpError as e:
abort(e.http_status, e.message)
except exc.ClientException as e:
abort(401, str(e))
except Exception as e:
LOG.exception(e)
abort(500, "Unable to authenticate")
client = ks_client.Client(session=session,
auth=unscoped_auth,
user_agent=USER_AGENT)
auth_url = unscoped_auth.auth_url
projects = client.projects.list(user=unscoped_auth_ref.user_id)
# Filter out disabled projects
projects = [project for project in projects if project.enabled]
# Prioritize the admin project by putting it at the beginning of the list
for pos, project in enumerate(projects):
if project.name == 'admin':
projects.pop(pos)
projects.insert(0, project)
break
# Return the first project token that we have the admin role on, otherwise
# return the first project token we have any role on.
fallback_auth_ref = None
for project in projects:
auth = v3.Token(auth_url=auth_url,
token=unscoped_auth_ref.auth_token,
project_id=project.id,
reauthenticate=False)
try:
auth_ref = auth.get_access(session)
if 'admin' in auth_ref.role_names:
return {'token': auth_ref.auth_token,
'expires': auth_ref.expires.isoformat()}
elif not fallback_auth_ref:
fallback_auth_ref = auth_ref
except Exception as e:
pass
if fallback_auth_ref:
return {'token': fallback_auth_ref.auth_token,
'expires': fallback_auth_ref.expires.isoformat()}
# TODO(gary): Consider as a secondary fallback to return a domain-scoped
# token
abort(403, "Not authorized for any project")
@bp.route("/api/v2/is_secured")
def get_secured():
"""Returns whether authentication is required
Returns a json | object indicating whether the servic | e is configured to
enforce authentication
.. :quickref: Model; Returns whether authentication is required
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"isSecured": false
}
:status 200: success
"""
return jsonify({'isSecured': config.requires_auth()})
@bp.route("/api/v2/connection_test", methods=['POST'])
def connection_test():
body = request.get_json() or {}
host = body['host']
try:
ping(host, 22)
return jsonify('Success')
except Exception as e:
return jsonify(error=str(e)), 404
|
shamitb/Tantal | bot/backup01.py | Python | mit | 9,157 | 0.006443 | import json
import logging
import random
import Algorithmia
import nltk
from textblob import TextBlob
from text_corpus import TextCorpus
from aylienapiclient import textapi
logger = logging.getLogger(__name__)
class RtmEventHandler(object):
def __init__(self, slack_clients, msg_writer):
self.clients = slack_clients
self.msg_writer = msg_writer
#self.trump_corpus = trump_corpus
def handle(self, event):
if 'type' in event:
self._handle_by_type(event['type'], event)
def _handle_by_type(self, event_type, event):
# See https://api.slack.com/rtm for a full list of events
# if event_type == 'error':
# error
# ignore self.msg_writer.write_error(event['channel'], json.dumps(event))
if event_type == 'message':
# message was sent to channel
self._handle_message(event)
elif event_type == 'channel_joined':
# you joined a channel
self.msg_writer.send_message(event['channel'], "Welcome, Interact with the Tantal Slack bot ...")
elif event_type == 'group_joined':
# you joined a private group
self.msg_writer.write_help_message(event['channel'])
elif event_type == 'file_shared':
self.msg_writer.send_message(event['channel'], "Got your file, thanks!")
else:
pass
def _handle_message(self, event):
# Filter out messages from the bot itself
if not self.clients.is_message_from_me(event['user']):
msg_txt = event['text']
if 'sentiment' in msg_txt:
client = textapi.Client("a19bb245", "2623b77754833e2711998a0b0bdad9db")
msg_txt = msg_txt.split(' ', 1)[1]
msg_txt = msg_txt[1:-1]
if 'http:' in msg_txt or 'https:' in msg_txt :
print 'URL'
sentiment = client.Sentiment({"url": msg_txt})
else:
sentiment = client.Sentiment({"text": msg_txt})
str = sentiment['polarity']
str2 = " - %3.3f" % sentiment['polarity_ | confidence']
str += str2
self.msg_writer.send_message(event['channel'], str)
elif 'tag' in msg_txt:
count = 0;
client = Algorithmia.client('sim3x6PzEv6m2icRR+23rqTTcOo1') |
algo = client.algo('nlp/AutoTag/1.0.0')
tags = algo.pipe(msg_txt)
str_final = ""
#print entities.result
for item in tags.result:
if count == 0:
pass
else:
str_final += item
str_final += ", "
count = count + 1
self.msg_writer.send_message(event['channel'], str_final)
#algo = client.algo('StanfordNLP/NamedEntityRecognition/0.2.0')
#entities = algo.pipe(msg_txt)
elif 'tensor' in msg_txt:
hello = tf.constant('Hello, TensorFlow!')
sess = tf.Session()
value = sess.run(hello)
self.msg_writer.send_message(event['channel'], value)
elif 'ocr' in msg_txt or 'OCR' in msg_txt:
msg_txt = msg_txt.split(' ', 1)[1]
msg_txt = msg_txt[1:-1]
input = {"src":msg_txt,
"hocr":{
"tessedit_create_hocr":1,
"tessedit_pageseg_mode":1,
"tessedit_char_whitelist":"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-@/.,:()!?"}}
client = Algorithmia.client('sim3x6PzEv6m2icRR+23rqTTcOo1')
algo = client.algo('tesseractocr/OCR/0.1.0')
words = algo.pipe(input).result['result']
value = words.rstrip('\n')
self.msg_writer.send_message(event['channel'], value)
elif 'entity' in msg_txt or 'Entity' in msg_txt or 'ENTITY' in msg_txt:
client = Algorithmia.client('sim3x6PzEv6m2icRR+23rqTTcOo1')
msg_txt = msg_txt.split(' ', 1)[1]
algo = client.algo('StanfordNLP/NamedEntityRecognition/0.2.0')
entities = algo.pipe(msg_txt)
str_final = ""
#print entities.result
for inner_l in entities.result:
for item in inner_l:
str = item[0] + " - " + item[1] + ", "
str_final += str
self.msg_writer.send_message(event['channel'], str_final)
elif 'ftp' in msg_txt or 'FTP' in msg_txt:
import re
#string = 'ftp://pm_adm@ftp.kyoceradocumentsolutions.eu/pm_link/KWM/Datasheet%20Portrait%20A4-RGB.zip'
string = msg_txt.split(' ', 1)[1]
string = string[1:-1]
string = re.sub('/pm_link', '', string)
string = re.sub('pm_adm','eupm_58972:kABm1Zp!A70V',string)
self.msg_writer.send_message(event['channel'], string)
elif 'IMAGE' in msg_txt or 'Image' in msg_txt or 'image' in msg_txt:
import re
msg_txt = msg_txt.split(' ', 1)[1]
msg_txt = msg_txt[1:-1]
url = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', msg_txt)
url = ''.join(url)
client = Algorithmia.client('sim3x6PzEv6m2icRR+23rqTTcOo1')
algo = client.algo('deeplearning/InceptionNet/1.0.2')
tags = algo.pipe(url).result['tags'][0]['class']
import unicodedata
tags = unicodedata.normalize('NFKD', tags).encode('ascii','ignore')
self.msg_writer.send_message(event['channel'], tags)
elif 'POS' in msg_txt or 'Pos' in msg_txt or 'pos' in msg_txt :
msg_txt = msg_txt.split(' ', 1)[1]
s = msg_txt
from pattern.en import parse
response = parse(s, relations=False, lemmata=False)
self.msg_writer.send_message(event['channel'], response)
elif 'classify' in msg_txt:
client = textapi.Client("a19bb245", "2623b77754833e2711998a0b0bdad9db")
classifications = client.ClassifyByTaxonomy({"text": msg_txt, "taxonomy": "iab-qag"})
sent_str = ""
for category in classifications['categories']:
sent_str += category['label'] + ", "
sent_str = sent_str[:-1]
response = sent_str
self.msg_writer.send_message(event['channel'], response)
elif 'hash' in msg_txt:
client = textapi.Client("a19bb245", "2623b77754833e2711998a0b0bdad9db")
msg_txt = msg_txt.split(' ', 1)[1]
msg_txt = msg_txt[1:-1]
url = msg_txt
import re
url = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', url)
hashtags = client.Hashtags({"url": url})
response = ', '.join(hashtags['hashtags'])
str = ""
list = response.split(' ')
size = len(list)
for i in range(0, size):
str += list[i]
str += " "
self.msg_writer.send_message(event['channel'], str)
elif 'help' in msg_txt:
self.msg_writer.write_help_message(event['channel'])
elif 'joke' in msg_txt:
self.msg_writer.write_joke(event['channel'])
elif 'attachment' in msg_txt:
self.msg_writer.demo_attachment(event['channel'])
elif 'button' in msg_txt:
self.msg_writer.demo_button(event['channel'])
elif 'echo' in msg |
bt3gl/NetAna-Complex-Network-Analysis | src/calculate_features_advanced/auto.py | Python | mit | 604 | 0.006623 | #!/usr/bin/env python
__author__ = "Mari Wahl"
__copyright__ = "Copyright 2014, The Cogent Project" |
__credits__ = ["Mari Wahl"]
__license__ = "GPL"
__version__ = "4.1"
__maintainer__ = "Mari Wahl"
__email__ = "marina.w4hl@gmail.com"
from helpers import running, constants
# change here for type of net:
NETWORK_FILES = constants.NETWORK_FILES_UN_AUTO + constants.NETWORK_FILES_DIR_AUTO
TYPE_NET_DIR = "auto/"
def main():
running.sampling(NETWORK_FILES, TYPE_NET_DIR, [])
print("All graphs for " + TYPE_NET_DIR + " were processed. The end! \n")
if __name__ = | = '__main__':
main()
|
m-vdb/ourplaylists | ourplaylists/app/migrations/0009_playlistitem_created_at.py | Python | mit | 527 | 0.001898 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('app', ' | 0008_playlistitem_network'),
]
operations = [
migrations.AddField(
model_name='playlistitem',
name='created_at',
field=models.DateTimeField(default=datetime.datetime(2014, 1 | 0, 6, 10, 0, 29, 893833), auto_now_add=True),
preserve_default=False,
),
]
|
mec07/PyLATO | tests/pylato/test_electronic.py | Python | gpl-2.0 | 17,499 | 0.000971 | import json
import numpy as np
import pytest
from pylato.electronic import Electronic, num_swaps_to_sort
from pylato.exceptions import UnimplementedMethodError
from pylato.init_job import InitJob
from pylato.main import execute_job
from tests.conftest import load_json_file
@pytest.mark.parametrize(
("array", "expected_num_swaps"),
[
([1, 2, 3, 4, 5], 0),
([2, 1, 3, 4, 5], 1),
([2, 3, 1, 4, 5], 2),
([2, 3, 4, 1, 5], 3),
([2, 4, 3, 1, 5], 4),
([4, 2, 3, 1, 5], 5),
([4, 2, 3, 5, 1], 6),
([4, 2, 5, 3, 1], 7),
([4, 5, 2, 3, 1], 8),
([5, 4, 2, 3, 1], 9),
]
)
def test_num_swaps_to_sort(array, expected_num_swaps):
assert num_swaps_to_sort(array) == expected_num_swaps
class TestElectronic:
def test_init_input_density_matrix(self):
# Setup
Job = InitJob("test_data/JobDef_input_density_matrix.json")
input_rho_file = Job.Def['input_rho']
with open(input_rho_file, 'r') as file_handle:
input_rho = np.matrix(json.load(file_handle))
# Action
electronic = Electronic(Job)
# Result
assert np.array_equal(electronic.rho, input_rho)
assert np.array_equal(electronic.rhotot, input_rho)
def test_init_incorrect_input_density_matrix_dimensions(self):
# Setup
Job = InitJob("test_data/JobDef_input_density_matrix.json")
bad_rho_file = "test_data/bad_rho.json"
Job.Def['input_rho'] = bad_rho_file
expected_rho = np.matrix(np.zeros(
(Job.Hamilton.HSOsize, Job.Hamilton.HSOsize), dtype='complex'))
# Action
electronic = Electronic(Job)
# Result
assert np.array_equal(electronic.rho, expected_rho)
assert np.array_equal(electronic.rhotot, expected_rho)
def test_quantum_number_S_is_None(self):
# Setup
Job = InitJob("test_data/JobDef_scase.json")
# Fake
def fake_magnetic_correlation(*args):
return -1
Job.Electron.magnetic_correlation = fake_magnetic_correlation
# Action
S = Job.Electron.quantum_number_S(Job)
# Result
assert S is None
@pytest.mark.parametrize(
("name", "rho", "expected_S"),
[
(" | singlet", [
[0.5, 0.5, 0.0, 0.0],
[0.5, 0.5, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.5],
[0.0, 0.0, 0.5, 0.5],
], 0),
("triplet up", [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0]
], 1),
("triplet down", [
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
| [0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]
], 1),
]
)
def test_quantum_number_S(self, name, rho, expected_S):
# Setup
Job = InitJob("test_data/JobDef_scase.json")
# Spin 0 density matrix
Job.Electron.rho = np.matrix(rho)
# Action
S = Job.Electron.quantum_number_S(Job)
# Result
assert S == expected_S
def test_quantum_number_L_z_p_orb_is_None(self):
# Setup
Job = InitJob("test_data/JobDef_pcase.json")
# Fake
def fake_L_z_p_orb(*args):
return -1
Job.Electron.L_z_p_orb_part_1 = fake_L_z_p_orb
Job.Electron.L_z_p_orb_part_2 = fake_L_z_p_orb
# Action
L_z = Job.Electron.quantum_number_L_z_p_orb(Job)
# Result
assert L_z is None
def test_quantum_number_L_z_d_orb_is_None(self):
# Setup
Job = InitJob("test_data/JobDef_pcase.json")
# Fake
def fake_L_z_d_orb(*args):
return -1
Job.Electron.L_z_d_orb = fake_L_z_d_orb
# Action
L_z = Job.Electron.quantum_number_L_z_d_orb(Job)
# Result
assert L_z is None
@pytest.mark.parametrize(
("name", "rho", "expected_L_z"),
[
("s atom", [
[1.0, 0.0],
[0.0, 1.0]
], 0),
("p atom 2 electrons", [
[0.5, 0.5, 0.0, 0.0, 0.0, 0.0],
[0.5, 0.5, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
], 1),
("p atom 3 electrons", [
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
], 0),
("d atom 2 electrons", [
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.5, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.5, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
], 1),
]
)
def test_quantum_number_L_z(self, name, rho, expected_L_z):
# Setup
Job = InitJob("test_data/JobDef_pcase.json")
# Fake
Job.NAtom = 1
Job.NOrb = [len(rho)/2]
Job.Electron.NElectrons = sum(rho[ii][ii] for ii in range(len(rho)))
Job.Electron.rho = np.matrix(rho, dtype='complex')
# Action
L_z = Job.Electron.quantum_number_L_z(Job)
# Result
assert L_z == expected_L_z
@pytest.mark.parametrize(
("job_file", "rho_file", "expected_L_z"),
[
("test_data/JobDef_scase.json", "test_data/rho_scase.json", 0),
("test_data/JobDef_pcase.json", "test_data/rho_pcase_2.json", 1),
("test_data/JobDef_dcase.json", "test_data/rho_dcase.json", 0),
# Not too sure about this last test case, rho_dcase_2 was
# arbitrarily constructed to get this result...
("test_data/JobDef_dcase.json", "test_data/rho_dcase_2.json", 1),
]
)
def test_quantum_number_L_z_dimers(self, job_file, rho_file, expected_L_z):
# Setup
Job = InitJob(job_file)
# Fake
rho = load_json_file(rho_file)
Job.Electron.rho = np.matrix(rho, dtype='complex')
# Action
L_z = Job.Electron.quantum_number_L_z(Job)
# Result
assert L_z == expected_L_z
def test_quantum_number_L_z_not_implemented_error(self):
# Setup
Job = InitJob("test_data/JobDef_scase.json")
Job.NOrb = [1, 3]
expected_message = (
"Quantum Number L_z methods have only been implemented for "
"simulations consisting of solely s, p or d orbital atoms"
)
# Action
with pytest.raises(UnimplementedMethodError, message=expected_message):
print(Job.Electron.quantum_number_L_z(Job))
def test_quantum_number_L_z_1_electron(self):
# Setup
Job = InitJob("test_data/JobDef_scase.json")
Job.Electron.NElectrons = 1
# Action
L_z = Job.Electron.quantum_number_L_z(Job)
# Result
assert L_z == 0
@pytest.mark.parametrize(
("norb", "expected_result"),
[
([1, 1, 1, 1], True),
([2, 2, 2, 2, 2, 2, 2], True),
([1, 2, 1], False),
([4, 3, 3, 3, 3, 3, 3, 3, 3, 3], False),
]
)
def test_all_atoms_same_num_orbitals(self, norb, expected_result):
# Setup
Job = InitJob("test_data/JobDef_scase.json")
# Fake
|
angelverde/evadoc | models/menu.py | Python | gpl-3.0 | 1,321 | 0.004549 | # -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
response.KEY = KEY = 'youshallnotpass' |
#########################################################################
## this is the m | ain application menu add/remove items as required
#########################################################################
response.menu = [
]
es_nuevo = not auth.is_logged_in() and not session.id_facultad
response.nav = auth.navbar(mode="dropdown")
if es_nuevo:
# si no esta logueado y no tiene facultad como anonimo
link = response.nav.element('a')
link.components[0] = 'Iniciar'
e = response.nav.element('.dropdown-menu')
e.insert(0, '')
e[0]['_class'] = 'divider'
e.insert(0, A(I(_class='icon-question-sign'),' Anónimo',
_href='#modalFacultad', **{'_data-toggle': 'modal'}))
elif not auth.is_logged_in() and session.id_facultad:
# si no esta logueado pero tiene facultad de anonimo
link = response.nav.element('a')
link.components[0] = 'Anónimo'
e = response.nav.element('.dropdown-menu')
e.insert(0, '')
e[0]['_class'] = 'divider'
e.insert(0, A(I(_class='icon-question-sign'),' Olvidame',
_href=URL('utilidades','borrar_session',hmac_key=KEY)))
if "auth" in locals(): auth.wikimenu()
|
s20121035/rk3288_android5.1_repo | external/chromium_org/chrome/test/ispy/server/debug_view_handler.py | Python | gpl-3.0 | 1,348 | 0.002967 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Request handler to display the debug view for a Failure."""
import jinja2
import os
import sys
import webapp2
from common import ispy_utils
import views
JINJA = jinja2.Environment(
loader | =jinja2.FileSystemLoader(os.path.dirname(views.__file__)),
extensions=['jinja2.ext.autoescape'])
class DebugViewHandler(webapp2.RequestHandler):
"""Request handler to display the debug view for a failure."""
def get(self):
"""Handles get requests to the /debug_view page.
GET Parameters:
test_run: The test run.
expectation: The expectation | name.
"""
test_run = self.request.get('test_run')
expectation = self.request.get('expectation')
expected_path = ispy_utils.GetExpectationPath(expectation, 'expected.png')
actual_path = ispy_utils.GetFailurePath(test_run, expectation, 'actual.png')
data = {}
def _ImagePath(url):
return '/image?file_path=%s' % url
data['expected'] = _ImagePath(expected_path)
data['actual'] = _ImagePath(actual_path)
data['test_run'] = test_run
data['expectation'] = expectation
template = JINJA.get_template('debug_view.html')
self.response.write(template.render(data))
|
yapdns/yapdns-app | web/core/views.py | Python | mit | 3,697 | 0.001352 | from django.http import JsonResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from .search import DnsRecord
from .models import Client
from functools import wraps
from elasticsearch.helpers import bulk
from elasticsearch_dsl.connections import connections
import json
response_map = {
200: 'DNS Record create successfully',
400: 'Failed to create DNS Record',
500: 'Invalid request'
}
class InvalidDnsRecord(Exception):
pass
def check_client_credentials(client_id, client_secret):
try:
Client.objects.filter(id=client_id, secret_key=client_secret)
except:
return False
return True
def client_auth(func):
@wraps(func)
def _decorator(request, *args, **kwargs):
if 'HTTP_AUTHORIZATION' in request.META:
authmeth, auth = request.META['HTTP_AUTHORIZATION'].split(' ', 1)
if authmeth.lower() == 'basic':
auth = auth.strip().decode('base64')
client_id, client_secret = auth.split(':', 1)
if check_client_credentials(client_id, client_secret):
return func(request, *args, **kwargs)
return response_from_code(500)
return _decorator
def response_from_code(code, message=None):
message = message if message else response_map[code]
response = {
'status': code,
'message': message
}
return JsonResponse(response, status=code)
def build_record_from_dict(body):
required_fields = ('domain', 'rtype', 'client', 'rdata', 'timestamp')
if not all(k in body for k in required_fields):
raise InvalidDnsRecord('Required fields {} not present'.format(required_fields))
if body['rtype'] not in ['A', 'AAAA', 'SOA', 'NS', 'PTR', 'CNAME', 'MX', 'SRV']:
raise InvalidDnsRecord('Invalid value for record type')
if 'ttl' in body and int(body['ttl']) < 0:
raise InvalidDnsRecord('Invalid value for TTL')
client = body['client']
clie | nt_fields = ('service_type', 'ip')
if no | t all(k in client for k in client_fields):
raise InvalidDnsRecord('Required fields {} not present in client'.format(client_fields))
dns_record = DnsRecord(**body)
return dns_record
@csrf_exempt
@client_auth
def create_record(request):
if request.method != 'POST':
return response_from_code(400)
body = json.loads(request.body)
try:
dns_record = build_record_from_dict(body)
dns_record.save()
except Exception, e:
return response_from_code(400, str(e))
return response_from_code(200)
@csrf_exempt
@client_auth
def create_record_bulk(request):
if request.method != 'POST':
return response_from_code(400)
dns_records = []
body = json.loads(request.body)
for record in body:
try:
dns_record = build_record_from_dict(record)
except Exception, e:
return response_from_code(400, str(e))
dns_records.append(dns_record)
bulk(connections.get_connection(), (d.to_dict(True) for d in dns_records))
return response_from_code(200)
def search_records(request):
domain = request.GET['domain']
s = DnsRecord.search()
s = s.filter('term', domain=domain)
results = s.execute()
response = []
for record in results:
response.append({
'timestamp': record['timestamp'],
'domain': record['domain'],
'type': record['rtype'],
'data': record['rdata'],
# 'client': dict(record['client'])
})
return JsonResponse(response, safe=False)
def home(request):
return render(request, 'core/index.html')
|
klmitch/nova | nova/policies/assisted_volume_snapshots.py | Python | apache-2.0 | 1,590 | 0 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-assisted-volume-snapshot | s:%s | '
assisted_volume_snapshots_policies = [
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'create',
check_str=base.SYSTEM_ADMIN,
description="Create an assisted volume snapshot",
operations=[
{
'path': '/os-assisted-volume-snapshots',
'method': 'POST'
}
],
scope_types=['system']),
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'delete',
check_str=base.SYSTEM_ADMIN,
description="Delete an assisted volume snapshot",
operations=[
{
'path': '/os-assisted-volume-snapshots/{snapshot_id}',
'method': 'DELETE'
}
],
scope_types=['system']),
]
def list_rules():
return assisted_volume_snapshots_policies
|
jarretraim/py-docstack | docstack/config.py | Python | apache-2.0 | 2,382 | 0 | import logging
import random
import string
import sys
from oslo.config import cfg
# Logging setup
logger = logging.getLogger(__name__)
stdout = logging.StreamHandler(sys.stdout)
stdout.setLevel(logging.DEBUG)
logger.addHandler(stdout)
logger.setLevel(logging.DEBUG)
default_opts = [
cfg.StrOpt('working_dir',
default='/opt/docstack',
help="The base path to use for docstack."),
]
# Option Definitions
infrastructure_opts = [
cfg.StrOpt('sql_backend',
default='mysql',
choices=['mysql', 'postgresql'],
help="The sql backend to use."),
cfg.StrOpt('sql_host',
default='127.0.0.1',
help="The host for the sql backend."),
cfg.StrOpt('sql_user',
default='mysql',
help="The user for the sql backend."),
cfg.StrOpt('sql_password',
default='',
help="Password for the sql backend."),
cfg.StrOpt('queue_backend',
default='rabbit',
choices=['rabbit', 'qpid', 'zeromq'],
help="The shared queue to use."),
cfg.StrOpt('queue_host',
default='127.0.0.1',
help="The host for the queue backend."),
cfg.StrOpt('queue_user',
default='rabbit',
help="The user for the queue backend."),
cfg.StrOpt('queue_password',
default='',
help="Password for the sql backend."),
]
def generate_password(length):
chars = ''.join([string.lowercase, string.uppercase, "1234567890"])
choice = random.SystemRandom().choice
return ''.join((choice(chars) for i in range(length)))
def parse():
conf = cfg.ConfigOpts()
conf(project='docstack', prog='docstack')
# Base options
conf.register_opts(default_opts)
# Infrastruc | ture
infrastructure_group = cfg.OptGroup(name="infrastructure",
title="Infrastructure Services")
conf.register_group(infrastructure_group)
conf.register_opts(infrastructure_opts, infrastructure_group)
conf.set_default('sql_password', generate_password(12), 'infrastructure')
conf.set_default('queue_password', generate_password(12), 'infrastructure')
conf.reload_config_files()
# Log it all out
conf.log_opt_values(lo | gger, logging.INFO)
return conf
|
Inveracity/jinjabread | jinjabread/functions/salt/saltexceptions.py | Python | mit | 12,746 | 0.000628 | # -*- coding: utf-8 -*-
'''
This module is a central location for all salt exceptions
'''
from __future__ import absolute_import
# Import python libs
import copy
import logging
import time
# Import Salt libs
from .six import six
log = logging.getLogger(__name__)
'''
Classification of Salt exit codes. These are intended to augment
universal exit codes (found in Python's `os` module with the `EX_`
prefix or in `sysexits.h`).
'''
# Too many situations use "exit 1" - try not to use it when something
# else is more appropriate.
EX_GENERIC = 1
# Salt SSH "Thin" deployment failures
EX_THIN_PYTHON_INVALID = 10
EX_THIN_DEPLOY = 11
EX_THIN_CHECKSUM = 12
EX_MOD_DEPLOY = 13
EX_SCP_NOT_FOUND = 14
# One of a collection failed
EX_AGGREGATE = 20
# The os.EX_* exit codes are Unix only so in the interest of cross-platform
# compatiblility define them explicitly here.
#
# These constants are documented here:
# https://docs.python.org/2/library/os.html#os.EX_OK
EX_OK = 0 # successful termination
EX_USAGE = 64 # command line usage error
EX_NOUSER = 67 # addressee unknown
EX_UNAVAILABLE = 69 # service unavailable
EX_SOFTWARE = 70 # internal software error
EX_CANTCREAT = 73 # can't create (user) output file
EX_TEMPFAIL = 75 # temp failure; user is invited to retry
# The Salt specific exit codes are defined below:
# keepalive exit code is a hint that the process should be restarted
SALT_KEEPALIVE = 99
# SALT_BUILD_FAIL is used when salt fails to build something, like a container
SALT_BUILD_FAIL = 101
def _nested_output(obj):
'''
Serialize obj and format for output
'''
# Explicit late import to avoid circular import
from salt.output import nested
nested.__opts__ = {}
ret = nested.output(obj).rstrip()
return ret
def get_error_message(error):
'''
Get human readable message from Python Exception
'''
return error.args[0] if error.args else ''
class SaltException(Exception):
'''
Base exception class; all Salt-specific exceptions should subclass this
'''
def __init__(self, message=''):
super(SaltException, self).__init__(message)
self.strerror = message
def pack(self):
'''
Pack this exception into a serializable dictionary that is safe for
transport via msgpack
'''
if six.PY3:
return {'message': str(self), 'args': self.args}
return dict(message=self.__unicode__(), args=self.args)
class SaltClientError(SaltException):
'''
Problem reading the master root key
'''
class SaltMasterError(SaltException):
'''
Problem reading the master root key
'''
class SaltNoMinionsFound(SaltException):
'''
An attempt to retrieve a list of minions failed
'''
class SaltSyndicMasterError(SaltException):
'''
Problem while proxying a request in the syndication master
'''
class MasterExit(SystemExit):
'''
Rise when the master exits
'''
class AuthenticationError(SaltException):
'''
If sha256 signature fails during decryption
'''
class CommandNotFoundError(SaltException):
'''
Used in modules or grains when a required binary is not available
'''
class CommandExecutionError(SaltException | ):
'''
Used when a module runs a command which returns an error and wants
to show the user the output gracefully instead of dying
'''
def __init__(self, message='', info=None):
self.error = exc_str_prefix = message
self.info = info
if self.info:
try:
if exc_str_pref | ix[-1] not in '.?!':
exc_str_prefix += '.'
except IndexError:
pass
exc_str_prefix += ' Additional info follows:\n\n'
# Get rid of leading space if the exception was raised with an
# empty message.
exc_str_prefix = exc_str_prefix.lstrip()
# NOTE: exc_str will be passed to the parent class' constructor and
# become self.strerror.
exc_str = exc_str_prefix + _nested_output(self.info)
# For states, if self.info is a dict also provide an attribute
# containing a nested output of the info dict without the changes
# (since they will be in the 'changes' key of the state return and
# this information would be redundant).
if isinstance(self.info, dict):
info_without_changes = copy.deepcopy(self.info)
info_without_changes.pop('changes', None)
if info_without_changes:
self.strerror_without_changes = \
exc_str_prefix + _nested_output(info_without_changes)
else:
# 'changes' was the only key in the info dictionary. We no
# longer have any additional info to display. Use the
# original error message.
self.strerror_without_changes = self.error
else:
self.strerror_without_changes = exc_str
else:
self.strerror_without_changes = exc_str = self.error
super(CommandExecutionError, self).__init__(exc_str)
class LoaderError(SaltException):
'''
Problems loading the right renderer
'''
class PublishError(SaltException):
'''
Problems encountered when trying to publish a command
'''
class MinionError(SaltException):
'''
Minion problems reading uris such as salt:// or http://
'''
class FileserverConfigError(SaltException):
'''
Used when invalid fileserver settings are detected
'''
class FileLockError(SaltException):
'''
Used when an error occurs obtaining a file lock
'''
def __init__(self, msg, time_start=None, *args, **kwargs):
super(FileLockError, self).__init__(msg, *args, **kwargs)
if time_start is None:
log.warning(
'time_start should be provided when raising a FileLockError. '
'Defaulting to current time as a fallback, but this may '
'result in an inaccurate timeout.'
)
self.time_start = time.time()
else:
self.time_start = time_start
class GitLockError(SaltException):
'''
Raised when an uncaught error occurs in the midst of obtaining an
update/checkout lock in salt.utils.gitfs.
NOTE: While this uses the errno param similar to an OSError, this exception
class is *not* as subclass of OSError. This is done intentionally, so that
this exception class can be caught in a try/except without being caught as
an OSError.
'''
def __init__(self, errno, strerror, *args, **kwargs):
super(GitLockError, self).__init__(strerror, *args, **kwargs)
self.errno = errno
self.strerror = strerror
class SaltInvocationError(SaltException, TypeError):
'''
Used when the wrong number of arguments are sent to modules or invalid
arguments are specified on the command line
'''
class PkgParseError(SaltException):
'''
Used when of the pkg modules cannot correctly parse the output from
the CLI tool (pacman, yum, apt, aptitude, etc)
'''
class SaltRenderError(SaltException):
'''
Used when a renderer needs to raise an explicit error. If a line number and
buffer string are passed, get_context will be invoked to get the location
of the error.
'''
def __init__(self,
message,
line_num=None,
buf='',
marker=' <======================',
trace=None):
self.error = message
exc_str = copy.deepcopy(message)
self.line_num = line_num
self.buffer = buf
self.context = ''
if trace:
|
suutari-ai/shoop | shuup_tests/notify/fixtures.py | Python | agpl-3.0 | 3,614 | 0 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from shuup.notify.script import Context
from shuup.notify.base import Action, Event, Variable
from shuup.notify.enums import (
StepConditionOperator, TemplateUse, UNILINGUAL_TEMPLATE_LANGUAGE
)
from shuup.notify.models import Script
from shuup.notify.template import Template
from shuup.notify.typology import Language, Model, Text
from shuup.testing.factories import (
create_random_order, create_random_person, get_default_product
)
from shuup.testing.text_data import random_title
TEST_STEP_DATA = [
{
'next': 'continue',
'actions': [
{
'identifier': 'set_debug_flag',
'flag_name': {'constant': 'success'}
}
],
'conditions': [
{
'identifier': 'language_equal',
'v1': {'variable': 'order_language'},
'v2': {'constant': 'fi'}
},
{
'identifier': 'language_equal',
'v1': {'variable': 'order_language'},
'v2': {'constant': 'ja'}
},
],
'cond_op': StepConditionOperator.ANY.value,
'enabled': True
},
]
TEST_TEMPLATE_DATA = {
"en": {
# English
"subject": "Hello, {{ name }}!",
"body": "Hi, {{ name }}. This is a test.",
"content_type": "plain"
},
"ja": {
# Japanese
"subject": u"こんにちは、{{ name|upper }}!",
"body": u"こんにちは、{{ name|upper }}.これはテストです。",
"content_type": "html"
},
"sw": {
# Swahili
"body": "Hi, {{ name }}. Hii ni mtihani.",
"content_type": "plain"
}
}
TEST_UNI_TEMPLATE_DATA = {
UNILINGUAL_TEMPLATE_LANGUAGE: {
"subject": u"This is a kokeilu {{ name }}",
"body": u"täm | ä on a test",
"content_type": "plain"
}
}
TEST_TEMPLATE_LANGUAGES = ("sw", "ja", "en")
class ATestEvent(Event):
identifier = "test_event"
log_target_variable = "order"
order_language = Variable(name="Order Language", | type=Language)
just_some_text = Variable(name="Just Some Text", type=Text)
order = Variable(name="Order", type=Model("shuup.Order"))
class ATestTemplateUsingAction(Action):
identifier = "test_template_action"
template_use = TemplateUse.MULTILINGUAL
template_fields = {
"subject": forms.CharField(),
"body": forms.CharField(),
"content_type": forms.CharField()
}
class ATestUnilingualTemplateUsingAction(Action):
identifier = "test_unilingual_template_action"
template_use = TemplateUse.UNILINGUAL
template_fields = {
"subject": forms.CharField(),
"body": forms.CharField(),
"content_type": forms.CharField()
}
def get_test_script():
sc = Script()
sc.set_serialized_steps(TEST_STEP_DATA)
return sc
def get_initialized_test_event():
get_default_product()
customer = create_random_person()
order = create_random_order(customer)
return ATestEvent(
order_language=order.language,
order=order,
just_some_text=random_title()
)
def get_test_template():
ctx = Context.from_variables(name=u"Sir Test")
template = Template(ctx, TEST_TEMPLATE_DATA)
return template
|
cykl/codespeed | tools/migrate_script.py | Python | lgpl-2.1 | 1,602 | 0.003121 | # -*- coding: utf-8 -*-
"""Adds the default branch to all existing revisions
Note: This file is assumed to be in the same directory
as the project settings.py. Otherwise you have to set the
shell environment DJANGO_SETTINGS_MODULE
"""
import sys
import os
## Setup to import models from Django app ##
def import_from_string(name):
"""helper to import module from a given string"""
components = name.split('.')[1:]
return reduce(lambda mod, y: getattr(mod, y), components, __import__(name))
sys.path.append(os.path.abspath('..'))
if 'DJANGO_SETTINGS_MODULE' in os.environ:
settings = import_from_string(os.environ['DJANGO_SETTINGS_MODULE'])
else:
try:
import settings # | Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write(
"Error: Can't find the file 'settings.py' in the directory "
"containing %r. It appears you've customized | things.\nYou'll have "
"to run django-admin.py, passing it your settings module.\n(If the"
" file settings.py does indeed exist, it's causing an ImportError "
"somehow.)\n" % __file__)
sys.exit(1)
from django.core.management import setup_environ
setup_environ(settings)
from codespeed.models import Revision, Branch
def main():
"""add default branch to revisions"""
branches = Branch.objects.filter(name='default')
for branch in branches:
for rev in Revision.objects.filter(project=branch.project):
rev.branch = branch
rev.save()
if __name__ == '__main__':
main()
|
hgrimelid/feincms | example/admin.py | Python | bsd-3-clause | 323 | 0.003096 | from django.contrib import admin
from feincms.admin import editor
fr | om example.models import Cate | gory
class CategoryAdmin(editor.TreeEditor):
list_display = ('name', 'slug')
list_filter = ('parent',)
prepopulated_fields = {
'slug': ('name',),
}
admin.site.register(Category, CategoryAdmin)
|
ivanlyon/exercises | general/state_machine_process.py | Python | mit | 6,277 | 0.002708 | '''
Finite State Machine algorithm used to assess score of Python found in
statements of the form 'Python is ___'. The 2 possible scores are
'positive' and 'negative'.
Reference: http://www.python-course.eu/finite_state_machine.php
+--------------+-------------+--------------+
| From State | Input | To State |
+--------------+-------------+--------------+
| Start | 'Python' | Python_state |
| Start | Not handled | error_state |
| Python_state | 'is' | is_state |
| Python_state | Not handled | error_state |
| is_state | {positive} | pos_state |
| is_state | {negative} | neg_state |
| is_state | 'not' | not_state |
| is_state | Not handled | error_state |
| not_state | {positive} | neg_state |
| not_state | {negative} | pos_state |
| not_state | Not handled | error_state |
| pos_state | Any | End |
| neg_state | Any | End |
| error_state | Any | End |
+--------------+-------------+--------------+
Input:
------
The first line contains a single integer for the number of test cases.
Each test case then appears on its own line as a word ('hex2dec' or 'dec2hex')
and a value to be converted.
+------------------------------------------------------------------+
| 3 |
| fsm_score('Python is great') |
| fsm_score('Python is difficult') |
| fsm_score('Perl is great') |
+------------------------------------------------------------------+
Output:
-------
For each test case, the result will displayed on a line.
+------------------------------------------------------------------+
| fsm_score('Python is great') = positive |
| fsm_score('Python is difficult') = negative |
| fsm_score('Perl is great') = error |
+------------------------------------------------------------------+
'''
from | general import state_machine
POSITIVE_ADJECTIVES = []
NEGATIVE_ADJECTIVES = []
IS_STATE = 'is_state'
PYTHON_STATE = 'Python_state'
START_STATE = 'Start'
ERROR_STATE = 'error_state'
NOT_STATE = 'not_state'
POS_STATE = 'pos_state'
NEG_STATE = 'neg_state'
END_STATE = 'End'
SENTIMENT = ''
###############################################################################
def start_transitions(text):
'''Perform transition at the state designated as start.' | ''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word == "Python":
newState = PYTHON_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(ERROR_STATE)
return (newState, text)
###############################################################################
def python_state_transitions(text):
'''Perform transition from state designated by last transition "Python".'''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word == "is":
newState = IS_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(ERROR_STATE)
return (newState, text)
###############################################################################
def is_state_transitions(text):
'''Perform transition from state designated by last transition "is".'''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word == "not":
newState = NOT_STATE
else:
if word in POSITIVE_ADJECTIVES:
newState = POS_STATE
elif word in NEGATIVE_ADJECTIVES:
newState = NEG_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(newState)
return (newState, text)
###############################################################################
def not_state_transitions(text):
'''Perform transition from state designated by last transition "not".'''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word in POSITIVE_ADJECTIVES:
newState = NEG_STATE
elif word in NEGATIVE_ADJECTIVES:
newState = POS_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(newState)
return (newState, text)
###############################################################################
def final_transitions(text):
'''Perform transition from any state to designated end state.'''
return (END_STATE, text)
###############################################################################
def process_sentiment(text):
'''Compute sentiment from resolved state identifier.'''
if text == POS_STATE: result = 'positive'
elif text == NEG_STATE: result = 'negative'
else: result = 'error'
return result
###############################################################################
fsm = state_machine.StateMachine()
fsm.add_state(START_STATE, start_transitions)
fsm.add_state(PYTHON_STATE, python_state_transitions)
fsm.add_state(IS_STATE, is_state_transitions)
fsm.add_state(NOT_STATE, not_state_transitions)
fsm.add_state(NEG_STATE, final_transitions)
fsm.add_state(POS_STATE, final_transitions)
fsm.add_state(ERROR_STATE, final_transitions)
fsm.add_state(END_STATE, None)
fsm.set_start(START_STATE)
fsm.set_end(END_STATE)
if __name__== "__main__":
def parse_command(text, enclosures = '()'):
lparen = text.find(enclosures[0])
rparen = text.rfind(enclosures[1])
return text[:lparen], text[lparen + 1: rparen]
for line in range(int(input())):
command, value = parse_command(input().strip())
if command == 'ADD_POSITIVE':
POSITIVE_ADJECTIVES.append(eval(value))
elif command == 'ADD_NEGATIVE':
NEGATIVE_ADJECTIVES.append(eval(value))
elif command == 'RUN':
fsm.run(eval(value))
print(SENTIMENT)
|
Bakkes/Slick2DRPG | res/scripts/pokemon/pokemon.py | Python | gpl-2.0 | 2,539 | 0.054746 | from org.bakkes.game.scripting.interfaces import IPokemon
from org.bakkes.game.scripting.interfaces import PokemonType
#from org.bakkes.fuzzy import *
#from org.bakkes.fuzzy.hedges import *
#from org.bakkes.fuzzy.operators import *
#from org.bakkes.fuzzy.sets import *
class Pokemon(IPokemon):
def __init__(self):
self.id = -1
self.name = "undefined"
def initialize(self):
self.level = 1 #NPC starting level
self.type = PokemonType.NOT_SET
self.health = 100 #every pokemon has 100 health by default
self.water_strength = 0
self.earth_strength = 0
self.fire_strength = 0
self.moves = []
def get_id(self):
return self.id
def get_name(self):
return self.name
def get_image(self):
return str(self.id) + ".png"
def get_level(self):
return level
def get_type(self):
if self.type is PokemonType.NOT_SET:
raise "Pokemon type is not set"
return self.type
def get_moves(self):
return self.moves
def get_earth_strength(self):
return self.earth_strength
def get_water_strength(self):
return self.water_strength
def get_fire_strength(self):
return self.fire_strength
def get_health(self):
return self.health
def get_desirability(self):
raise "Desirability not im | plemented"
def initialize_fuzzy(self):
raise "Fuzzymodule not initialized"
def set_earth_strength(self, newVal):
if(newVal > 0):
self.earth_strength = newVal
els | e:
self.earth_strength = 0
def set_water_strength(self, newVal):
if(newVal > 0):
self.water_strength = newVal
else:
self.water_strength = 0
def set_fire_strength(self, newVal):
if(newVal > 0):
self.fire_strength = newVal
else:
self.fire_strength = 0
class pokemon_0(Pokemon):
def info(self):
self.id = 1
self.name = "Bulbasaur"
self.earth_strength = 70
self.water_strength = 40
self.fire_strength = 20
self.moves = [WaterMove(), EarthMove(), FireWeaken(), WaterWeaken()]
class pokemon_1(Pokemon):
def info(self):
self.id = 2
self.name = "Charmander"
self.earth_strength = 30
self.water_strength = 10
self.fire_strength = 70
self.moves = [FireMove(), EarthMove(), FireWeaken(), WaterWeaken()]
class pokemon_2(Pokemon):
def info(self):
self.id = 3
self.name = "Squirtle"
self.earth_strength = 10
self.water_strength = 70
self.fire_strength = 30
self.moves = [WaterMove(), EarthMove(), FireWeaken(), WaterWeaken()]
|
hgn/bcc | examples/simple_tc.py | Python | apache-2.0 | 804 | 0.001244 | #!/usr/b | in/env python
# Copyright (c) PLUMgrid, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
from bpf import BPF
from pyroute2 import IPRoute
ipr = IPRoute()
text = """
int hello(struct __sk_buff *skb) {
return 1;
}
"""
try:
b = BPF(text=text, debug=0)
fn = b.load_func("hello", BPF.SCHED_CLS)
ipr.link_create(ifname="t1a", kind="veth", peer="t1b")
idx = ipr.li | nk_lookup(ifname="t1a")[0]
ipr.tc("add", "ingress", idx, "ffff:")
ipr.tc("add-filter", "bpf", idx, ":1", fd=fn.fd,
name=fn.name, parent="ffff:", action="ok", classid=1)
ipr.tc("add", "sfq", idx, "1:")
ipr.tc("add-filter", "bpf", idx, ":1", fd=fn.fd,
name=fn.name, parent="1:", action="ok", classid=1)
finally:
if "idx" in locals(): ipr.link_remove(idx)
|
nicolasm/lastfm-export | import.py | Python | mit | 2,386 | 0 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import logging
import sys
from lfmconf.lfmconf import get_lastfm_conf
from lfmdb import lfmdb
from stats.stats import process_track, retrieve_total_plays_from_db, \
retrieve_total_json_tracks_from_db
from queries.inserts import get_query_insert_play
logging.basicConfig(
level=logging.INFO,
format=f'%(asctime)s %(levelname)s %(message)s'
)
conf = get_lastfm_conf()
nb_json_tracks_in_db = retrieve_total_json_tracks_from_db()
nb_plays_in_db = retrieve_total_plays_from_db()
nb_plays_to_insert = nb_json_tracks_in_db - nb_plays_in_db
query = """
select id, json from
(select id, json from json_track order by id desc limit %s) tmp
order by id
"""
if nb_plays_to_insert == 0:
logging.info('Nothing new!')
sys.exit(0)
new_plays = lfmdb.select(query % nb_plays_to_insert)
connection = lfmdb.create_connection()
cursor = connection.cursor()
parame | ters = []
for (track_id, json_track) in new_plays:
logging.info('Track %s' % track_id)
track = json.loads(json_track)
transformed_track = process_track(track)
| artist_name = transformed_track['artist_text']
album_name = transformed_track['album_text']
track_name = transformed_track['name']
# Cut artist name if too long.
if len(artist_name) > 512:
artist_name = artist_name[:512]
# Cut track name if too long.
if len(track_name) > 512:
track_name = track_name[:512]
try:
insert_query = get_query_insert_play()
lfmdb.insert(connection,
cursor,
insert_query, (artist_name,
transformed_track['artist_mbid'],
album_name,
transformed_track['album_mbid'],
track_name,
transformed_track['mbid'],
transformed_track['url'],
transformed_track['date_uts'],
transformed_track['date_uts']
))
except Exception as e:
logging.exception(
'An error occurred when inserting play into database!')
logging.error(track_name, ', ', artist_name)
sys.exit(1)
cursor.close()
connection.close()
|
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py | Python | gpl-3.0 | 1,498 | 0.018692 | """
Add UUIDs to workflows
"""
from sqlalchemy import *
from sqlalchemy.orm import *
from migrate import *
from migrate.changeset import *
from galaxy.model.custom_types import UUIDType, TrimmedString
import logging
log = logging.getLogger( __name__ )
metadata = MetaData()
"""
Because both workflow and job requests can be determined
based the a fixed data structure, their IDs are based on
hashing the data structure
"""
workflow_uuid_column = Column( "uuid", UUIDType, nullable=True )
def display_migration_details():
print "This migration script adds a UUID column to workflows"
def u | pgrade(migrate_engine):
print __doc__
metadata.bind = migrate_engine
metadata.reflect()
# Add the uuid colum to the workflow table
try:
workflow_table = Table( "workflow", metadata, autoload=True )
workflow_uuid_column.create( workflow_table )
assert workflow_uuid_column is workflow_table.c.uuid
except Exception, e:
print str(e)
log.error( "Adding column 'uuid' to workf | low table failed: %s" % str( e ) )
return
def downgrade(migrate_engine):
metadata.bind = migrate_engine
metadata.reflect()
# Drop the workflow table's uuid column.
try:
workflow_table = Table( "workflow", metadata, autoload=True )
workflow_uuid = workflow_table.c.uuid
workflow_uuid.drop()
except Exception, e:
log.debug( "Dropping 'uuid' column from workflow table failed: %s" % ( str( e ) ) )
|
crate/crate-python | src/crate/client/sqlalchemy/tests/connection_test.py | Python | apache-2.0 | 2,183 | 0 | # -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from unittest import TestCase
import sqlalchemy as sa
class SqlAlchemyConnectionTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
self.connection = self.engine.connect()
def test_default_connection(self):
engine = sa.create_engine('crate://')
conn = engine.raw_connection()
| self.assertEqual("<Connection <Client ['http://127.0.0.1:4200']>>",
repr(conn.connection))
def test_connection_server(self):
engine = sa.create_engine(
"crate://otherhost:19201")
conn = engine.raw_connection()
self.assertEqual("<Connection <Client ['http://otherhost:19201']>>",
repr(conn.connection))
def test_connection_multiple_server(self):
engine = sa.create_engine(
"crat | e://", connect_args={
'servers': ['localhost:4201', 'localhost:4202']
}
)
conn = engine.raw_connection()
self.assertEqual(
"<Connection <Client ['http://localhost:4201', " +
"'http://localhost:4202']>>",
repr(conn.connection))
|
l33tdaima/l33tdaima | p713m/subarray_product_less_than_k.py | Python | mit | 707 | 0.002829 | from typing import List
class Solution:
def numSubarrayProductLessThanK(self, nums: List[int], k: int) -> int:
if k <= 0:
return 0
l, r, prod, ans = 0, 0, 1, 0
while r < len(nums):
prod *= nums[r]
while l <= r and prod >= k:
prod /= nums[l]
l += 1
ans += r | - l + 1
r += 1
return ans
# TESTS
tests = [
([10, 5, 2, 6], 0, 0),
([10, 5, 2, 6], 100, 8),
]
for nums, k, expected in tests:
sol = Solution()
actual = sol.numSubarrayProductLessThanK(nums, k)
print("# of subarray in", nums, "with product less than", k, "->", actual)
assert actual == expecte | d
|
ckwatson/kernel | tests/quick_test.py | Python | gpl-3.0 | 893 | 0.022396 | import sys
import os
from ..data.molecular_species import molecular_species
from ..data.reaction_mechanism_class import reaction_mechanism
from ..data.condition_class import condition
from ..data.re | agent import reagent
from ..data.puzzle_class import puzzle
from ..data.solution_class import solution
def name(class_obj):
return class_obj.__name__
# depends on JSON base class
for class_being_tested in [molecular_species, condition, reaction_mechanism, reagent, puzzle, solution]:
system_output = sys.stdout # store stdout
sys.stdout = open(os.getcwd() + "/testing_result_" + name(class_being_tested) + ".txt", "w") # pipe to file
test_result = class_be | ing_tested.test()
sys.stdout.close() # close file
sys.stdout = system_output #replace stdout
if test_result:
print("PASSED", name(class_being_tested), sep=" ")
else:
print("FAILED", name(class_being_tested), sep=" ")
|
jeremiah-c-leary/vhdl-style-guide | vsg/rules/attribute_declaration/__init__.py | Python | gpl-3.0 | 189 | 0 |
from .rule_100 import rule_100
from .rule_101 import rule_101
from .rule_300 import rule_300
from .rule_500 import rule_500
from .rule_501 import rule_501
from .rule_502 import ru | le_502
| |
NixaSoftware/CVis | venv/bin/tools/build/v2/test/core_parallel_multifile_actions_1.py | Python | apache-2.0 | 1,606 | 0.000623 | #!/usr/bin/python
# Copyright 2007 Rene Rivera.
# Copyright 2011 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Added to guard against a bug causing targets to be used before they
# themselves have finished building. This used to happen for targets built by a
# multi-file action that got triggered by another target.
#
# Example:
# When target A and target B were declared as created by a single action and
# target A triggered running that action then, while the action was still
# running, target B was already reporting as being built causing other targets
# depending on target A to be built prematurely.
import BoostBuild
t = BoostBuild.Tester(pass_toolset=0, p | ass_d0=False)
t.write("sleep.bat", """\
::@timeout /T %1 /NOBREAK >nul
@ping 127.0.0.1 -n 2 -w 1000 >nul
@ping 127.0.0.1 -n %1 -w 1000 >nul
@exit /B 0
""")
t.write("file.jam", """\
if $(NT)
{
SLEEP = @call sleep.bat ;
}
else
{
SLEEP = sleep ;
}
actions .gen.
{
echo 001
$(SLEEP) 4
echo 002
}
rule .use.1 { DEPENDS $(<) : $(>) | ; }
actions .use.1
{
echo 003
}
rule .use.2 { DEPENDS $(<) : $(>) ; }
actions .use.2
{
$(SLEEP) 1
echo 004
}
.gen. g1.generated g2.generated ;
.use.1 u1.user : g1.generated ;
.use.2 u2.user : g2.generated ;
DEPENDS all : u1.user u2.user ;
""")
t.run_build_system(["-ffile.jam", "-j2"], stdout="""\
...found 5 targets...
...updating 4 targets...
.gen. g1.generated
001
002
.use.1 u1.user
003
.use.2 u2.user
004
...updated 4 targets...
""")
t.cleanup()
|
thomastu/django-wiki | wiki/plugins/notifications/models.py | Python | gpl-3.0 | 4,121 | 0.000243 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db.models import signals
from django.db import models
from django_nyt.utils import notify
from django_nyt.models import Subscription
from wiki import models as wiki_models
from wiki.models.pluginbase import ArticlePlugin
from wiki.core.plugins import registry
from wiki.plugins.notifications import settings
from wiki.plugins.notifications.util import get_title
@python_2_unicode_compatible
class ArticleSubscription(ArticlePlugin):
subscription = models.OneToOneField(Subscription)
def __str__(self):
title = (_("%(user)s subscribing to %(article)s (%(type)s)") %
{'user': self.subscription.settings.user.username,
'article': self.article.current_revision.title,
'type': self.subscription.notification_type.label})
return str(title)
class Meta:
unique_together = ('subscri | ption', 'articleplugin_ptr')
# Matches label of upcoming 0.1 release
db_table = 'wiki_notifications_articlesubscription'
if settings.APP_LABEL:
app_label = settings.APP_LABEL
def default_url(article, urlpath=None):
if urlpath:
return reverse('wiki:get', kwargs={'path': urlpath.path})
return article.get_absolute_url()
def post_article_revision_save(**kwargs):
instance = kwargs['instance']
if kwa | rgs.get('created', False):
url = default_url(instance.article)
filter_exclude = {'settings__user': instance.user}
if instance.deleted:
notify(
_('Article deleted: %s') %
get_title(instance),
settings.ARTICLE_EDIT,
target_object=instance.article,
url=url,
filter_exclude=filter_exclude)
elif instance.previous_revision:
notify(
_('Article modified: %s') %
get_title(instance),
settings.ARTICLE_EDIT,
target_object=instance.article,
url=url,
filter_exclude=filter_exclude)
else:
notify(
_('New article created: %s') %
get_title(instance),
settings.ARTICLE_EDIT,
target_object=instance,
url=url,
filter_exclude=filter_exclude)
# Whenever a new revision is created, we notifý users that an article
# was edited
signals.post_save.connect(
post_article_revision_save,
sender=wiki_models.ArticleRevision,
)
# TODO: We should notify users when the current_revision of an article is
# changed...
##################################################
# NOTIFICATIONS FOR PLUGINS
##################################################
for plugin in registry.get_plugins():
notifications = getattr(plugin, 'notifications', [])
for notification_dict in notifications:
def plugin_notification(instance, **kwargs):
if notification_dict.get('ignore', lambda x: False)(instance):
return
if kwargs.get(
'created',
False) == notification_dict.get(
'created',
True):
url = None
if 'get_url' in notification_dict:
url = notification_dict['get_url'](instance)
else:
url = default_url(
notification_dict['get_article'](instance))
message = notification_dict['message'](instance)
notify(
message,
notification_dict['key'],
target_object=notification_dict['get_article'](instance),
url=url)
signals.post_save.connect(
plugin_notification,
sender=notification_dict['model'])
|
vollib/vollib | vollib/helper/numerical_greeks.py | Python | mit | 7,763 | 0.005541 | # -*- coding: utf-8 -*-
"""
vollib.helper.numerical_greeks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A library for option pricing, implied volatility, and
greek calculation. vollib is based on lets_be_rational,
a Python wrapper for LetsBeRational by Peter Jaeckel as
described below.
:copyright: © 2015 Iota Technologies Pte Ltd
:license: MIT, see LICENSE for more details.
About LetsBeRational:
~~~~~~~~~~~~~~~~~~~~~~~
The source code of LetsBeRational resides at www.jaeckel.org/LetsBeRational.7z .
::
======================================================================================
Copyright © 2013-2014 Peter Jäckel.
Permission to use, copy, modify, and distribute this software is freely granted,
provided that this notice is preserved.
WARRANTY DISCLAIMER
The Software is provided "as is" without warranty of any kind, either express or implied,
including without limitation any implied warranties of condition, uninterrupted use,
merchantability, fitness for a particular purpose, or non-infringement.
======================================================================================
Note about the parameter "b":
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
::
======================================================================================
from Espen Gaarder Haug's
"The Complete Guide to Option Pricing Formulas," Second Edition,
page 90.
+-----------+---------------------------------- | --------------------+
| b = r | gives the Black and Scholes (1973) stock option |
| | model |
+-----------+------------------------------------------------------+
| b = r -q | gives the Merton (1973) stock option model with |
| | continuous dividend yield q |
+-----------+---------------------------------------- | --------------+
| b = 0 | gives the Black (1976) futures option model |
+-----------+------------------------------------------------------+
| b = 0 and | gives the Asay (1982) margined futures option model |
| r = 0 | |
+-----------+------------------------------------------------------+
======================================================================================
"""
# -----------------------------------------------------------------------------
# IMPORTS
# Standard library imports
# Related third party imports
# Local application/library specific imports
# -----------------------------------------------------------------------------
# FUNCTIONS - GENERIC FUNCTIONS FOR NUMERICAL GREEK CALCULATION
dS = .01
def delta(flag, S, K, t, r, sigma, b, pricing_function):
"""Calculate option delta using numerical integration.
:param S: underlying asset price
:type S: float
:param K: strike price
:type K: float
:param sigma: annualized standard deviation, or volatility
:type sigma: float
:param t: time to expiration in years
:type t: float
:param r: risk-free interest rate
:type r: float
:param b: see above
:type b: float
:param flag: 'c' or 'p' for call or put.
:type flag: str
:param pricing_function: any function returning the price of an option
:type pricing_function: python function object
"""
if t == 0.0:
if S == K:
return {'c':0.5, 'p':-0.5}[flag]
elif S > K:
return {'c':1.0, 'p':0.0}[flag]
else:
return {'c':0.0, 'p':-1.0}[flag]
else:
return (pricing_function(flag, S + dS, K, t, r, sigma, b) - \
pricing_function(flag, S - dS, K, t, r, sigma, b)) / (2 * dS)
def theta(flag, S, K, t, r, sigma, b, pricing_function):
"""Calculate option theta using numerical integration.
:param S: underlying asset price
:type S: float
:param K: strike price
:type K: float
:param sigma: annualized standard deviation, or volatility
:type sigma: float
:param t: time to expiration in years
:type t: float
:param r: risk-free interest rate
:type r: float
:param b: see above
:type b: float
:param flag: 'c' or 'p' for call or put.
:type flag: str
:param pricing_function: any function returning the price of an option
:type pricing_function: python function object
"""
if t <= 1. / 365.:
return pricing_function(flag, S, K, 0.00001, r, sigma, b) - \
pricing_function(flag, S, K, t, r, sigma, b)
else:
return pricing_function(flag, S, K, t - 1. / 365., r, sigma, b) - \
pricing_function(flag, S, K, t, r, sigma, b)
def vega(flag, S, K, t, r, sigma, b, pricing_function):
"""Calculate option vega using numerical integration.
:param S: underlying asset price
:type S: float
:param K: strike price
:type K: float
:param sigma: annualized standard deviation, or volatility
:type sigma: float
:param t: time to expiration in years
:type t: float
:param r: risk-free interest rate
:type r: float
:param b: see above
:type b: float
:param flag: 'c' or 'p' for call or put.
:type flag: str
:param pricing_function: any function returning the price of an option
:type pricing_function: python function object
"""
return (pricing_function(flag, S, K, t, r, sigma + 0.01, b) - \
pricing_function(flag, S, K, t, r, sigma - 0.01, b)) / 2.
def rho(flag, S, K, t, r, sigma, b, pricing_function):
"""Calculate option rho using numerical integration.
:param S: underlying asset price
:type S: float
:param K: strike price
:type K: float
:param sigma: annualized standard deviation, or volatility
:type sigma: float
:param t: time to expiration in years
:type t: float
:param r: risk-free interest rate
:type r: float
:param b: see above
:type b: float
:param flag: 'c' or 'p' for call or put.
:type flag: str
:param pricing_function: any function returning the price of an option
:type pricing_function: python function object
"""
return (pricing_function(flag, S, K, t, r + 0.01, sigma, b ) - \
pricing_function(flag, S, K, t, r - 0.01, sigma, b )) / (2)
def gamma(flag, S, K, t, r, sigma, b, pricing_function):
"""Calculate option gamma using numerical integration.
:param S: underlying asset price
:type S: float
:param K: strike price
:type K: float
:param sigma: annualized standard deviation, or volatility
:type sigma: float
:param t: time to expiration in years
:type t: float
:param r: risk-free interest rate
:type r: float
:param b: see above
:type b: float
:param flag: 'c' or 'p' for call or put.
:type flag: str
:param pricing_function: any function returning the price of an option
:type pricing_function: python function object
"""
if t == 0:
return POSINF if S == K else 0.0
return (pricing_function(flag, S + dS, K, t, r, sigma, b) - 2. * \
pricing_function(flag, S, K, t, r, sigma, b) + \
pricing_function(flag, S - dS, K, t, r, sigma, b)) / dS ** 2.
|
feranick/SpectralMachine | Utilities/PlotRruffSpectraRound.py | Python | gpl-3.0 | 3,466 | 0.014426 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
*********************************************
*
* PlotRruffSpectraRound
* Plot Rruff spectra
* Files must be in RRuFF
* version: 20171208c
*
* By: Nicola Ferralis <feranick@hotm | ail.com>
*
***********************************************
'''
print(__doc__)
import numpy as np
import sys, os.path, getopt, | glob, csv, re
from datetime import datetime, date
import matplotlib.pyplot as plt
def main():
if len(sys.argv) < 4:
print(' Usage:\n python3 PlotRruffSpectraRound.py <EnIn> <EnFin> <EnStep> <decimals>\n')
print(' Requires python 3.x. Not compatible with python 2.x\n')
return
else:
enInit = sys.argv[1]
enFin = sys.argv[2]
enStep = sys.argv[3]
decimals = int(sys.argv[4])
rootPlotFile = "plot_"
dateTimeStamp = str(datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
summaryPlotFile = rootPlotFile+"summary_"+dateTimeStamp+".csv"
plotFile = rootPlotFile+dateTimeStamp
plt.figure(num=plotFile)
with open(summaryPlotFile, "a") as sum_file:
sum_file.write('Classification started: '+dateTimeStamp+"\n")
index = 0
for ind, file in enumerate(sorted(os.listdir("."))):
#try:
if os.path.splitext(file)[-1] == ".txt":
with open(file, 'r') as f:
En = np.loadtxt(f, unpack = True, usecols=range(0,1), delimiter = ',', skiprows = 10)
with open(file, 'r') as f:
R = np.loadtxt(f, unpack = True, usecols=range(1,2), delimiter = ',', skiprows = 10)
print(file + '\n File OK, converting to ASCII...')
EnT = np.arange(float(enInit), float(enFin), float(enStep), dtype=np.float)
if EnT.shape[0] == En.shape[0]:
print(' Number of points in the learning dataset: ' + str(EnT.shape[0]))
else:
print('\033[1m' + ' Mismatch in datapoints: ' + str(EnT.shape[0]) + '; sample = ' + str(En.shape[0]) + '\033[0m')
# Interpolate to new axis
R = np.interp(EnT, En, R, left = R[0], right = 0)
# Renormalize offset by min R
R = R - np.amin(R) + 1e-8
# Renormalize to max of R
R = R/np.amax(R)
R = np.around(R, decimals=decimals)
index += 1
'''
try:
convertFile = os.path.splitext(file)[0] + '_ASCII.txt'
convertR = np.transpose(np.vstack((EnT, R)))
with open(convertFile, 'ab') as f:
np.savetxt(f, convertR, delimiter='\t', fmt='%10.6f')
except:
pass
'''
label = re.search('(.+?)__',file).group(1)
with open(summaryPlotFile, "a") as sum_file:
sum_file.write(str(index) + ',,,' + label + ','+file+'\n')
plt.plot(EnT,R,label=label)
#except:
# print("\n Skipping: ",file)
plt.xlabel('Raman shift [1/cm]')
plt.ylabel('Raman Intensity [arb. units]')
plt.legend(loc='upper left')
plt.savefig(plotFile+".png", dpi = 160, format = 'png') # Save plot
plt.show()
plt.close()
#************************************
''' Main initialization routine '''
#************************************
if __name__ == "__main__":
sys.exit(main())
|
UManPychron/pychron | pychron/pipeline/plot/plotter/arar_figure.py | Python | apache-2.0 | 24,577 | 0.000814 | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may | not us | e this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import math
# ============= enthought library imports =======================
from chaco.array_data_source import ArrayDataSource
from chaco.tools.broadcaster import BroadcasterTool
from chaco.tools.data_label_tool import DataLabelTool
from numpy import Inf, vstack, zeros_like, ma
from traits.api import HasTraits, Any, Int, Str, Property, \
Event, cached_property, List, Float, Instance, TraitError
from uncertainties import std_dev, nominal_value, ufloat
from pychron.core.filtering import filter_ufloats, sigma_filter
from pychron.core.helpers.formatting import floatfmt, format_percent_error, standard_sigfigsfmt
from pychron.graph.error_bar_overlay import ErrorBarOverlay
from pychron.graph.ticks import SparseLogTicks
from pychron.graph.ticks import SparseTicks
from pychron.graph.tools.analysis_inspector import AnalysisPointInspector
from pychron.graph.tools.point_inspector import PointInspectorOverlay
from pychron.graph.tools.rect_selection_tool import RectSelectionOverlay, \
RectSelectionTool
from pychron.pipeline.plot.flow_label import FlowDataLabel, FlowPlotLabel
from pychron.pipeline.plot.overlays.points_label_overlay import PointsLabelOverlay
from pychron.pipeline.plot.point_move_tool import OverlayMoveTool
from pychron.processing.analyses.analysis_group import AnalysisGroup
from pychron.pychron_constants import PLUSMINUS, format_mswd
class SelectionFigure(HasTraits):
graph = Any
def _set_selected(self, ans, sel):
for i, a in enumerate(ans):
if i in sel:
a.set_temp_status(a.otemp_status if a.otemp_status else 'omit')
else:
a.set_temp_status('ok')
def _filter_metadata_changes(self, obj, ans, func=None):
sel = obj.metadata.get('selections', [])
self._set_selected(ans, sel)
if func:
func(sel)
return sel
class BaseArArFigure(SelectionFigure):
analyses = Any
sorted_analyses = Property(depends_on='analyses')
analysis_group = Property(depends_on='analyses, _analysis_group')
_analysis_group = Instance(AnalysisGroup)
_analysis_group_klass = AnalysisGroup
group_id = Int
ytitle = Str
title = Str
xtitle = Str
replot_needed = Event
recalculate_event = Event
options = Any
refresh_unknowns_table = Event
suppress_ylimits_update = False
suppress_xlimits_update = False
xpad = None
ymas = List
ymis = List
xmi = Float
xma = Float
data_xma = 0
_has_formatting_hash = None
_reverse_sorted_analyses = False
def get_update_dict(self):
return {}
def build(self, plots, plot_dict=None):
"""
make plots
"""
graph = self.graph
vertical_resize = not all([p.height for p in plots])
graph.vertical_resize = vertical_resize
graph.clear_has_title()
title = self.title
if not title:
title = self.options.title
for i, po in enumerate(plots):
kw = {'ytitle': po.name}
if plot_dict:
kw.update(plot_dict)
if po.height:
kw['bounds'] = [50, po.height]
if i == (len(plots) - 1):
kw['title'] = title
if i == 0 and self.ytitle:
kw['ytitle'] = self.ytitle
if not po.ytitle_visible:
kw['ytitle'] = ''
if self.xtitle:
kw['xtitle'] = self.xtitle
kw['padding'] = self.options.get_paddings()
p = graph.new_plot(**kw)
if i == (len(plots) - 1):
p.title_font = self.options.title_font
# set a tag for easy identification
p.y_axis.tag = po.name
self._setup_plot(i, p, po)
def post_make(self):
self._fix_log_axes()
def post_plot(self, plots):
graph = self.graph
for (plotobj, po) in zip(graph.plots, plots):
self._apply_aux_plot_options(plotobj, po)
def plot(self, *args, **kw):
pass
def replot(self, *args, **kw):
if self.options:
self.plot(self.options.get_plotable_aux_plots())
def max_x(self, *args):
return -Inf
def min_x(self, *args):
return Inf
def mean_x(self, *args):
return 0
# private
def _fix_log_axes(self):
for i, p in enumerate(self.graph.plots):
if p.value_scale == 'log':
if p.value_mapper.range.low < 0:
ys = self.graph.get_data(plotid=i, axis=1)
m = 10 ** math.floor(math.log10(min(ys)))
p.value_mapper.range.low = m
def _setup_plot(self, i, pp, po):
# add limit tools
self.graph.add_limit_tool(pp, 'x', self._handle_xlimits)
self.graph.add_limit_tool(pp, 'y', self._handle_ylimits)
self.graph.add_axis_tool(pp, pp.x_axis)
self.graph.add_axis_tool(pp, pp.y_axis)
pp.value_range.on_trait_change(lambda: self.update_options_limits(i), 'updated')
pp.index_range.on_trait_change(lambda: self.update_options_limits(i), 'updated')
pp.value_range.tight_bounds = False
self._apply_aux_plot_options(pp, po)
def _apply_aux_plot_options(self, pp, po):
options = self.options
for k, axis in (('x', pp.x_axis), ('y', pp.y_axis)):
for attr in ('title_font', 'tick_in', 'tick_out', 'tick_label_formatter'):
value = getattr(options, '{}{}'.format(k, attr))
try:
setattr(axis, attr, value)
except TraitError:
pass
axis.tick_label_font = getattr(options, '{}tick_font'.format(k))
# pp.x_axis.title_font = options.xtitle_font
# pp.x_axis.tick_label_font = options.xtick_font
# pp.x_axis.tick_in = options.xtick_in
# pp.x_axis.tick_out = options.xtick_out
#
# pp.y_axis.title_font = options.ytitle_font
# pp.y_axis.tick_label_font = options.ytick_font
# pp.y_axis.tick_in = options.ytick_in
# pp.y_axis.tick_out = options.ytick_out
pp.bgcolor = options.plot_bgcolor
pp.x_grid.visible = options.use_xgrid
pp.y_grid.visible = options.use_ygrid
if po:
if not po.ytick_visible:
pp.y_axis.tick_visible = False
pp.y_axis.tick_label_formatter = lambda x: ''
if po.y_axis_right:
pp.y_axis.orientation = 'right'
pp.y_axis.axis_line_visible = False
pp.value_scale = po.scale
if po.scale == 'log':
if po.use_sparse_yticks:
st = SparseLogTicks(step=po.sparse_yticks_step)
pp.value_axis.tick_generator = st
pp.value_grid.tick_generator = st
else:
pp.value_axis.tick_interval = po.ytick_interval
if po.use_sparse_yticks:
st = SparseTicks(step=po.sparse_yticks_step)
pp.value_axis.tick_generator = st
pp.value_grid.tick_generator = st
def _set_options_format(self, pp):
# print 'using options format'
pass
def _set_selected(self, ans, sel):
super(BaseArArFigure, self)._set_selected(ans, sel)
self.refresh_unknowns_table = True
def _cmp_analyses(self |
OCA/bank-statement-reconcile | account_mass_reconcile_ref_deep_search/models/__init__.py | Python | agpl-3.0 | 171 | 0 | # Copyri | ght 2015-2018 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl. | html)
from . import mass_reconcile
from . import advanced_reconciliation
|
Gu1/ansible-lxc-remote | _ssh.py | Python | gpl-3.0 | 21,418 | 0.003829 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import os
import re
import subprocess
import shlex
import pipes
import random
import select
import fcntl
import hmac
import pwd
import gettext
import pty
from hashlib import sha1
from cStringIO import StringIO
import ansible.constants as C
from ansible.callbacks import vvv
from ansible import errors
from ansible import utils
class Connection(object):
''' ssh based connections '''
def __init__(self, runner, host, port, user, password, private_key_file, *args, **kwargs):
self.runner = runner
self.host = host
self.ipv6 = ':' in self.host
self.port = port
self.user = str(user)
self.password = password
self.private_key_file = private_key_file
self.HASHED_KEY_MAGIC = "|1|"
self.has_pipelining = True
# TODO: add pbrun, pfexec
self.become_methods_supported=['sudo', 'su', 'pbrun']
fcntl.lockf(self.runner.process_lockfile, fcntl.LOCK_EX)
self.cp_dir = utils.prepare_writeable_dir('$HOME/.ansible/cp',mode=0700)
fcntl.lockf(self.runner.process_lockfile, fcntl.LOCK_UN)
def connect(self):
''' connect to the remote host '''
vvv("ESTABLISH CONNECTION FOR USER: %s" % self.user, host=self.host)
self.common_args = []
extra_args = C.ANSIBLE_SSH_ARGS
if extra_args is not None:
# make sure there is no empty string added as this can produce weird errors
self.common_args += [x.strip() for x in shlex.split(extra_args) if x.strip()]
else:
self.common_args += ["-o", "ControlMaster=auto",
"-o", "ControlPersist=60s",
"-o", "ControlPath=\"%s\"" % (C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=self.cp_dir))]
cp_in_use = False
cp_path_set = False
for arg in self.common_args:
if "ControlPersist" in arg:
cp_in_use = True
if "ControlPath" in arg:
cp_path_set = True
if cp_in_use and not cp_path_set:
self.common_args += ["-o", "ControlPath=\"%s\"" % (C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=self.cp_dir))]
if not C.HOST_KEY_CHECKING:
self.common_args += ["-o", "StrictHostKeyChecking=no"]
if self.port is not None:
self.common_args += ["-o", "Port=%d" % (self.port)]
if self.private_key_file is not None:
self.common_args += ["-o", "IdentityFile=\"%s\"" % os.path.expanduser(self.private_key_file)]
elif self.runner.private_key_file is not None:
self.common_args += ["-o", "IdentityFile=\"%s\"" % os.path.expanduser(self.runner.private_key_file)]
if self.password:
self.common_args += ["-o", "GSSAPIAuthentication=no",
"-o", "PubkeyAuthentication=no"]
else:
self.common_args += ["-o", "KbdInteractiveAuthentication=no",
"-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey",
"-o", "PasswordAuthentication=no"]
if self.user != pwd.getpwuid(os.geteuid())[0]:
self.common_args += ["-o", "User="+self.user]
self.common_args += ["-o", "ConnectTimeout=%d" % self.runner.timeout]
return self
def _run(self, cmd, indata):
if indata:
# do not use pseudo-pty
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
else:
# try to use upseudo-pty
try:
# Make sure stdin is a proper (pseudo) pty to avoid: tcgetattr errors
master, slave = pty.openpty()
p = subprocess.Popen(cmd, stdin=slave,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = os.fdopen(master, 'w', 0)
os.close(slave)
except:
p = subprocess.Popen(cmd, stdin=subprocess. | PIPE,
| stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
return (p, stdin)
def _password_cmd(self):
if self.password:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
except OSError:
raise errors.AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
(self.rfd, self.wfd) = os.pipe()
return ["sshpass", "-d%d" % self.rfd]
return []
def _send_password(self):
if self.password:
os.close(self.rfd)
os.write(self.wfd, "%s\n" % self.password)
os.close(self.wfd)
class CommunicateCallbacks(object):
def __init__(self, runner, indata, sudoable=False, prompt=None):
self.stdout = ''
self.stderr = ''
self.runner = runner
self.sudoable = sudoable
self.prompt = prompt
if isinstance(indata, basestring) and indata:
self.indata = StringIO(indata)
elif not indata: # None, False..
self.indata = StringIO('')
else:
self.indata = indata # file-like object
def _check_for_su_sudo_fail(self, data):
# fail early if the become password is wrong
if self.runner.become and self.sudoable:
incorrect_password = gettext.dgettext(self.runner.become_method, C.BECOME_ERROR_STRINGS[self.runner.become_method])
if self.prompt:
if self.runner.become_pass:
if data.endswith("%s\r\n%s" % (incorrect_password, self.prompt)):
raise errors.AnsibleError('Incorrect become password')
if data.endswith(self.prompt):
raise errors.AnsibleError('Missing become password')
elif data.endswith("%s\r\n%s" % (incorrect_password, self.prompt)):
raise errors.AnsibleError('Incorrect become password')
def stdout_cb(self, data):
self.stdout += data
# fail early if the sudo/su password is wrong
self._check_for_su_sudo_fail(self.stdout)
def stderr_cb(self, data):
self.stderr += data
def stdin_cb(self, size):
return self.indata.read(size)
def _communicate(self, p, stdin, callbacks=(None, None, None)):
fcntl.fcntl(p.stdout, fcntl.F_SETFL, fcntl.fcntl(p.stdout, fcntl.F_GETFL) & ~os.O_NONBLOCK)
fcntl.fcntl(p.stderr, fcntl.F_SETFL, fcntl.fcntl(p.stderr, fcntl.F_GETFL) & ~os.O_NONBLOCK)
# We can't use p.communicate here because the ControlMaster may have stdout open as well
rpipes = [p.stdout, p.stderr]
rpipes = [p.stdout, p.stderr]
wpipes = []
if callable(callbacks[0]):
wpipes = [stdin]
# Read stdout/stderr from process
while True:
rfd, wfd, efd = select.select(rpipes, wpipes, rpipes+wpipes, 1)
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), 9000)
callbacks[1](dat)
if |
ruibarreira/linuxtrail | usr/lib/virtualbox/sdk/bindings/xpcom/python/xpcom/file.py | Python | gpl-3.0 | 11,962 | 0.006855 | # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the Python XPCOM language bindings.
#
# The Initial Developer of the Original Code is
# ActiveState Tool Corp.
# Portions created by the Initial Developer are Copyright (C) 2000, 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Hammond <MarkH@ActiveState.com> (original author)
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
"""Implementation of Python file objects for Mozilla/xpcom.
Introduction:
This module defines various class that are implemented using
Mozilla streams. This allows you to open Mozilla URI's, and
treat them as Python file object.
Example:
>>> file = URIFile("chrome://whatever")
>>> data = file.read(5) # Pass no arg to read everything.
Known Limitations:
* Not all URL schemes will work from "python.exe" - most notably
"chrome://" and "http://" URLs - this is because a simple initialization of
xpcom by Python does not load up the full set of Mozilla URL handlers.
If you can work out how to correctly initialize the chrome registry and
setup a message queue.
Known Bugs:
* Only read ("r") mode is supported. Although write ("w") mode doesnt make
sense for HTTP type URLs, it potentially does for file:// etc type ones.
* No concept of text mode vs binary mode. It appears Mozilla takes care of
this internally (ie, all "text/???" mime types are text, rest are binary)
"""
from xpcom import components, Exception, _xpcom
import os
import threading # for locks.
NS_RDONLY = 0x01
NS_WRONLY = 0x02
NS_RDWR = 0x04
NS_CREATE_FILE = 0x08
NS_APPEND = 0x10
NS_TRUNCATE = 0x20
NS_SYNC = 0x40
NS_EXCL = 0x80
# A helper function that may come in useful
def LocalFileToURL(localFileName):
"Convert a filename to an XPCOM nsIFileURL object."
# Create an nsILocalFile
localFile = components.classes["@mozilla.org/file/local;1"] \
.createInstance(components.interfaces.nsILocalFile)
localFile.initWithPath(localFileName)
# Use the IO Service to create the interface, then QI for a FileURL
io_service = components.classes["@mozilla.org/network/io-service;1"] \
.getService(components.interfaces.nsIIOService)
url = io_service.newFileURI(localFile).queryInterface(components.interfaces.nsIFileURL)
# Setting the "file" attribute causes initialization...
url.file = localFile
return url
# A base class for file objects.
class _File:
def __init__(self, name_thingy = None, mode="r"):
self.lockob = threading.Lock()
self.inputStream = self.outputStream = None
if name_thingy is not None:
self.init(name_thingy, mode)
def __del__(self):
self.close()
# The Moz file streams are not thread safe.
def _lock(self):
self.lockob.acquire()
def _release(self):
self.lockob.release()
def read(self, n = -1):
assert self.inputStream is not None, "Not setup for read!"
self._lock()
try:
return str(self.inputStream.read(n))
finally:
self._release()
def readlines(self):
# Not part of the xpcom interface, but handy for direct Python users.
# Not 100% faithful, but near enough for now!
lines = self.read().split("\n")
if len(lines) and len(lines[-1]) == 0:
| lines = lines[:-1]
return [s+"\n" for s in | lines ]
def write(self, data):
assert self.outputStream is not None, "Not setup for write!"
self._lock()
try:
self.outputStream.write(data, len(data))
finally:
self._release()
def close(self):
self._lock()
try:
if self.inputStream is not None:
self.inputStream.close()
self.inputStream = None
if self.outputStream is not None:
self.outputStream.close()
self.outputStream = None
self.channel = None
finally:
self._release()
def flush(self):
self._lock()
try:
if self.outputStream is not None: self.outputStream.flush()
finally:
self._release()
# A synchronous "file object" used to open a URI.
class URIFile(_File):
def init(self, url, mode="r"):
self.close()
if mode != "r":
raise ValueError, "only 'r' mode supported'"
io_service = components.classes["@mozilla.org/network/io-service;1"] \
.getService(components.interfaces.nsIIOService)
if hasattr(url, "queryInterface"):
url_ob = url
else:
url_ob = io_service.newURI(url, None, None)
# Mozilla asserts and starts saying "NULL POINTER" if this is wrong!
if not url_ob.scheme:
raise ValueError, ("The URI '%s' is invalid (no scheme)"
% (url_ob.spec,))
self.channel = io_service.newChannelFromURI(url_ob)
self.inputStream = self.channel.open()
# A "file object" implemented using Netscape's native file support.
# Based on io.js - http://lxr.mozilla.org/seamonkey/source/xpcom/tests/utils/io.js
# You open this file using a local file name (as a string) so it really is pointless -
# you may as well be using a standard Python file object!
class LocalFile(_File):
def __init__(self, *args):
self.fileIO = None
_File.__init__(self, *args)
def init(self, name, mode = "r"):
name = os.path.abspath(name) # Moz libraries under Linux fail with relative paths.
self.close()
file = components.classes['@mozilla.org/file/local;1'].createInstance("nsILocalFile")
file.initWithPath(name)
if mode in ["w","a"]:
self.fileIO = components.classes["@mozilla.org/network/file-output-stream;1"].createInstance("nsIFileOutputStream")
if mode== "w":
if file.exists():
file.remove(0)
moz_mode = NS_CREATE_FILE | NS_WRONLY
elif mode=="a":
moz_mode = NS_APPEND
else:
assert 0, "Can't happen!"
self.fileIO.init(file, moz_mode, -1,0)
self.outputStream = self.fileIO
elif mode == "r":
self.fileIO = components.classes["@mozilla.org/network/file-input-stream;1"].createInstance("nsIFileInputStream")
self.fileIO.init(file, NS_RDONLY, -1,0)
self.inputStream = components.classes["@mozilla.org/scriptableinputstream;1"].createInstance("nsIScriptableInputStream")
self.inputStream.init(self.fileIO)
else:
raise ValueError, "Unknown mode"
def close(self):
if self.fileIO is not None:
self.fileIO.close()
self. |
aaronprunty/starfish | vezda/examples/starfish-tutorial/data/extractData.py | Python | apache-2.0 | 609 | 0 | import numpy as np
import scipy.io as io
dataStructure = io.loadmat('starfish.mat')
r | eceiverPoints = dataStructure['receivers']
sourcePoints = dataStructure['receivers']
scattererPoints = dataStructure['scatterer']
scatteredData = dataStructure['scatteredData']
recordingTimes = dataStructure['recordTimes']
recordingTimes = np.reshape(recordingTimes, (recordingTimes.shape[1],))
np.save('receiverPoints.npy', receiverPoints)
np.save('sourcePoints.npy', sourcePoints)
np.save('scattererPoints.npy', scattererPoints)
np.save('scatteredData.npy', scatteredData)
np.save('recordingTime | s.npy', recordingTimes)
|
anomaly/vishnu | vishnu/session.py | Python | apache-2.0 | 10,835 | 0.001292 | """
Vishnu session.
"""
from __future__ import absolute_import
from http.cookies import Morsel
from http.cookies import SimpleCookie
from datetime import datetime, timedelta
import hashlib
import hmac
import logging
import sys
import uuid
from vishnu.cipher import AESCipher
from vishnu.backend.config import Base as BackendConfig
# constant used for specifying this cookie should expire at the end of the session
TIMEOUT_SESSION = "timeout_session"
SECRET_MIN_LEN = 32
ENCRYPT_KEY_MIN_LEN = 32
DEFAULT_COOKIE_NAME = "vishnu"
DEFAULT_PATH = "/"
SIG_LENGTH = 128
SID_LENGTH = 32
EXPIRES_FORMAT = "%a, %d-%b-%Y %H:%M:%S GMT"
class Config(object):
def __init__(self, secret, cookie_name=None, encrypt_key=None,
secure=True, domain=None, path=None, http_only=True,
auto_save=False, timeout=None, backend=None):
self._secret = secret
if self._secret is None or len(self._secret) < SECRET_MIN_LEN:
raise ValueError("Secret should be at least %i characters" % SECRET_MIN_LEN)
if cookie_name is None:
cookie_name = DEFAULT_COOKIE_NAME
# todo: check cookie name is a string
self._cookie_name = cookie_name
self._encrypt_key = encrypt_key
if self._encrypt_key is not None and len(self._encrypt_key) < ENCRYPT_KEY_MIN_LEN:
raise ValueError("Encrypt key should be at least %i characters" % ENCRYPT_KEY_MIN_LEN)
# todo: check secure is a bool
self._secure = secure
# todo: check domain is a string
self._domain = domain
if path is None:
path = DEFAULT_PATH
# todo: check path is a string
self._path = path
# todo: check http_only is a bool
self._http_only = http_only
# todo: check auto save is a bool
self._auto_save = auto_save
self._timeout = None
if timeout is not None:
try:
self._timeout = int(timeout)
except ValueError:
raise TypeError("timeout must be a non-negative integer")
if self._timeout < 0:
raise TypeError("timeout must be a non-negative integer")
if backend is None or not isinstance(backend, Backen | dConfig):
raise TypeError("unknown backend configuration received %s" % backend)
self._backend = backend
@property
def secret(self):
"""
:return: secret used for HMAC signature
:rtype: string
"""
return self._secret
@property
def cookie_name(self):
"""
:return: the name for the cookie
:rtype: string
"""
return self._cookie_name
@property
def encrypt | _key(self):
"""
:return: key to use for encryption
:rtype: string
"""
return self._encrypt_key
@property
def secure(self):
"""
:return: whether the cookie can only be transmitted over HTTPS
:rtype: boolean
"""
return self._secure
@property
def domain(self):
"""
:return: the domain the cookie is valid for
:rtype: string
"""
return self._domain
@property
def path(self):
"""
:return: the path the cookie is valid for
:rtype: string
"""
return self._path
@property
def http_only(self):
"""
:return: whether the cookie should only be sent over HTTP/HTTPS
:rtype: boolean
"""
return self._http_only
@property
def auto_save(self):
"""
:return: whether this session should auto save
:rtype: boolean
"""
return self._auto_save
@property
def timeout(self):
return self._timeout
@timeout.setter
def timeout(self, value):
self._timeout = value
@property
def backend(self):
"""
:return: config for desired backend
:rtype: vishnu.backend.config.Base
"""
return self._backend
class Session(object):
"""The vishnu session object."""
def __init__(self, environ, config):
self._environ = environ
self._send_cookie = False
self._expire_cookie = False
self._started = False
self._sid = Session.generate_sid()
self._loaded = False
self._needs_save = False
# todo: check config is correct class
self._config = config
# attempt to load an existing cookie
self._load_cookie()
self._backend_client = self._config.backend.client_from_config(self._sid)
# calculate the expiry date if a timeout exists (must be done after client setup)
if self._config.timeout:
self._calculate_expires()
@classmethod
def generate_sid(cls):
"""
:return: generates a unique ID for use by a session
:rtype: string
"""
return uuid.uuid4().hex
@property
def started(self):
"""
Has the session been started?
- True if autosave is on and session has been modified
- True if autosave is off if session has been saved at least once
- True is a matching persistent session was found
"""
return self._started
@property
def auto_save(self):
return self._config.auto_save
@property
def needs_save(self):
"""Does this session need to be saved."""
return self._needs_save
@property
def timeout(self):
"""Fetch the current timeout value for this session"""
return self._config.timeout
@timeout.setter
def timeout(self, value):
"""Sets a custom timeout value for this session"""
if value == TIMEOUT_SESSION:
self._config.timeout = None
self._backend_client.expires = None
else:
self._config.timeout = value
self._calculate_expires()
def _calculate_expires(self):
"""Calculates the session expiry using the timeout"""
self._backend_client.expires = None
now = datetime.utcnow()
self._backend_client.expires = now + timedelta(seconds=self._config.timeout)
def _load_cookie(self):
"""Loads HTTP Cookie from environ"""
cookie = SimpleCookie(self._environ.get('HTTP_COOKIE'))
vishnu_keys = [key for key in cookie.keys() if key == self._config.cookie_name]
# no session was started yet
if not vishnu_keys:
return
morsel = cookie[vishnu_keys[0]]
morsel_value = morsel.value
if self._config.encrypt_key:
cipher = AESCipher(self._config.encrypt_key)
morsel_value = cipher.decrypt(morsel_value)
received_sid = Session.decode_sid(self._config.secret, morsel_value)
if received_sid:
self._sid = received_sid
else:
logging.warn("found cookie with invalid signature")
def header(self):
"""Generates HTTP header for this cookie."""
if self._send_cookie:
morsel = Morsel()
cookie_value = Session.encode_sid(self._config.secret, self._sid)
if self._config.encrypt_key:
cipher = AESCipher(self._config.encrypt_key)
cookie_value = cipher.encrypt(cookie_value)
if sys.version_info > (3, 0):
cookie_value = cookie_value.decode()
morsel.set(self._config.cookie_name, cookie_value, cookie_value)
# domain
if self._config.domain:
morsel["domain"] = self._config.domain
# path
if self._config.path:
morsel["path"] = self._config.path
# expires
if self._expire_cookie:
morsel["expires"] = "Wed, 01-Jan-1970 00:00:00 GMT"
elif self._backend_client.expires:
morsel["expires"] = self._backend_client.expires.strftime(EXPIRES_FORMAT)
# secure
if self._config.secure:
morsel["secure"] = True
# http on |
edbgon/rpipin | hmc5883l.py | Python | mit | 1,591 | 0.010685 | #!/usr/bin/python3
class hmc5883l:
def __init__(self, i2cbus, addr, tilt_magnitude):
self.addr = addr
self.i2cbus = i2cbus
try:
self.i2cbus.write_byte_data(addr, 0x00, 0xF8) # CRA 75Hz.
self.i2cbus.write_byte_d | ata(addr, 0x02, 0x00) # Mode continuous reads.
except OSError:
pass
| self.valX = 0
self.valY = 0
self.valZ = 0
self.iX = 0
self.iY = 0
self.iZ = 0
self.tilt_magnitude = tilt_magnitude
self.tilt_delta = 0
self.tilted = False
def update(self):
try:
X = (self.i2cbus.read_byte_data(self.addr, 0x03) << 8) | self.i2cbus.read_byte_data(self.addr, 0x04)
Y = (self.i2cbus.read_byte_data(self.addr, 0x05) << 8) | self.i2cbus.read_byte_data(self.addr, 0x06)
Z = (self.i2cbus.read_byte_data(self.addr, 0x07) << 8) | self.i2cbus.read_byte_data(self.addr, 0x08)
except OSError:
pass
# Update the values to be of two compliment
self.valX = self.twos_to_int(X, 16);
self.valY = self.twos_to_int(Y, 16);
self.valZ = self.twos_to_int(Z, 16);
if(self.iX == 0): self.iX = self.valX
if(self.iY == 0): self.iY = self.valY
if(self.iZ == 0): self.iZ = self.valZ
self.tilt_delta = abs(self.valX - self.iX) + abs(self.valY - self.iY) + abs(self.valZ - self.iZ)
if(self.tilt_delta > self.tilt_magnitude): self.tilted = True
return
def twos_to_int(self, val, len):
# Convert twos compliment to integer
if(val & (1 << len - 1)):
val = val - (1<<len)
return val |
RodericDay/MKS | setup.py | Python | mit | 375 | 0 | #!/usr/bin/env | python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='MKS',
version='0.1.0',
description="A unit system based on meter, kilo, and second",
author='Roderic Day',
author_email='roderic.day@gmail.com',
url='www.permanentsignal.com',
license='MIT',
) | |
jachym/PyWPS-SVN | tests/processes/dummyprocess.py | Python | gpl-2.0 | 1,590 | 0.030818 | """
DummyProcess to check the WPS structure
Author: Jorge de Jesus (jorge.de-jesus@jrc.it) as suggested by Kor de Jong
"""
from pywps.Process import WPSProcess
class Process(WPSProcess):
def __init__(self):
# init process
WPSProcess.__init__(self,
identifier = "dummyprocess", # must be same, as filename
title="Dummy Process",
version = "0.1",
storeSupported = "true",
statusSupported = "true",
abstract="The Dummy process is used for testing the WPS structure. The process will accept 2 input numbers and will return the XML result with an ad | d one an | d subtract one operation",
grassLocation =False)
self.Input1 = self.addLiteralInput(identifier = "input1",
title = "Input1 number",
default=100)
self.Input2= self.addLiteralInput(identifier="input2",
title="Input2 number",
default=200)
self.Output1=self.addLiteralOutput(identifier="output1",
title="Output1 add 1 result")
self.Output2=self.addLiteralOutput(identifier="output2",title="Output2 subtract 1 result" )
def execute(self):
self.Output1.setValue(self.Input1.getValue()+1)
self.Output2.setValue(self.Input1.getValue()-1)
return
|
weinbe58/QuSpin | tests/mean_level_spacing_test.py | Python | bsd-3-clause | 1,223 | 0.044154 | from __future__ import print_function, division
import sys,os
qspin_path = os.path.join(os.getcwd(),"../")
sys.path.insert(0,qspin_path)
#
from quspin.operators import hamiltonian # Hamiltonians and operators
from quspin.basis import spin_basis_1d # Hilbert space spin basis
from quspin.tools.measurements import mean_level_spacing
import numpy as np # generic math functions
#
L=12 # syste size
# coupling strenghts
J=1.0 # spin-spin coupling
h=0.8945 # x-field strength
g=0.945 # z-field strength
# create site-coupling lists
J_zz=[[J,i,(i+1)%L] for i in range(L)] # PBC
x_field=[[h,i] for i in range(L) | ]
z_field=[[g,i] for i in range(L)]
# create static and dynamic lists
static_2=[["zz",J_zz],["x",x_field],["z",z_field]]
dynamic=[]
# create spin-1/ | 2 basis
basis=spin_basis_1d(L,kblock=0,pblock=1)
# set up Hamiltonian
H2=hamiltonian(static_2,dynamic,basis=basis,dtype=np.float64)
# compute eigensystem of H2
E2=H2.eigvalsh()
# calculate mean level spacing of spectrum E2
r=mean_level_spacing(E2)
print("mean level spacing is", r)
E2=np.insert(E2,-1,E2[-1])
r=mean_level_spacing(E2)
print("mean level spacing is", r)
E2=np.insert(E2,-1,E2[-1])
r=mean_level_spacing(E2,verbose=False)
print("mean level spacing is", r) |
fabiocaccamo/django-freeze | freeze/settings.py | Python | mit | 3,077 | 0.006825 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
import os
FREEZE_ROOT | = get | attr(settings, 'FREEZE_ROOT', os.path.abspath(os.path.join(settings.MEDIA_ROOT, '../freeze/')) )
if not os.path.isabs(FREEZE_ROOT):
raise ImproperlyConfigured('settings.FREEZE_ROOT should be an absolute path')
if settings.MEDIA_ROOT.find(FREEZE_ROOT) == 0 or settings.STATIC_ROOT.find(FREEZE_ROOT) == 0:
raise ImproperlyConfigured('settings.FREEZE_ROOT cannot be a subdirectory of MEDIA_ROOT or STATIC_ROOT')
FREEZE_MEDIA_ROOT = settings.MEDIA_ROOT
FREEZE_MEDIA_URL = settings.MEDIA_URL
FREEZE_STATIC_ROOT = settings.STATIC_ROOT
FREEZE_STATIC_URL = settings.STATIC_URL
FREEZE_USE_HTTPS = getattr(settings, 'FREEZE_USE_HTTPS', False)
FREEZE_PROTOCOL = 'https://' if FREEZE_USE_HTTPS else 'http://'
FREEZE_SITE_URL = getattr(settings, 'FREEZE_SITE_URL', None)
if(FREEZE_SITE_URL == None):
# handled this way to remove DB dependency unless strictly needed. If FREEZE_SITE_URL is set then collectstatic
# can be called without needing a db setup, which is useful for build servers
FREEZE_SITE_URL = '%s%s' % (FREEZE_PROTOCOL, Site.objects.get_current().domain,)
FREEZE_BASE_URL = getattr(settings, 'FREEZE_BASE_URL', None)
if FREEZE_BASE_URL:
if FREEZE_BASE_URL.startswith('/') or FREEZE_BASE_URL.startswith('http'):
if not FREEZE_BASE_URL.endswith('/'):
FREEZE_BASE_URL += '/'
else:
raise ImproperlyConfigured('settings.FREEZE_BASE_URL should start with \'/\' or \'http\' or be an empty string')
FREEZE_RELATIVE_URLS = getattr(settings, 'FREEZE_RELATIVE_URLS', False)
if FREEZE_RELATIVE_URLS and FREEZE_BASE_URL != None:
raise ImproperlyConfigured('settings.FREEZE_RELATIVE_URLS cannot be set to True if FREEZE_BASE_URL is specified')
FREEZE_LOCAL_URLS = getattr(settings, 'FREEZE_LOCAL_URLS', False)
if FREEZE_LOCAL_URLS and not FREEZE_RELATIVE_URLS:
raise ImproperlyConfigured('settings.FREEZE_LOCAL_URLS cannot be set to True if FREEZE_RELATIVE_URLS is set to False')
FREEZE_FOLLOW_SITEMAP_URLS = getattr(settings, 'FREEZE_FOLLOW_SITEMAP_URLS', True)
FREEZE_FOLLOW_HTML_URLS = getattr(settings, 'FREEZE_FOLLOW_HTML_URLS', True)
FREEZE_REPORT_INVALID_URLS = getattr(settings, 'FREEZE_REPORT_INVALID_URLS', False)
FREEZE_REPORT_INVALID_URLS_SUBJECT = getattr(settings, 'FREEZE_REPORT_INVALID_URLS_SUBJECT', '[freeze] invalid urls')
FREEZE_INCLUDE_MEDIA = getattr(settings, 'FREEZE_INCLUDE_MEDIA', True)
FREEZE_INCLUDE_STATIC = getattr(settings, 'FREEZE_INCLUDE_STATIC', True)
FREEZE_ZIP_ALL = getattr(settings, 'FREEZE_ZIP_ALL', False)
FREEZE_ZIP_NAME = getattr(settings, 'FREEZE_ZIP_NAME', 'freeze')
if len(FREEZE_ZIP_NAME) >= 4 and FREEZE_ZIP_NAME[-4:].lower() != '.zip':
FREEZE_ZIP_NAME += '.zip'
FREEZE_ZIP_PATH = os.path.abspath(os.path.join(FREEZE_ROOT, FREEZE_ZIP_NAME))
FREEZE_REQUEST_HEADERS = getattr(settings, 'FREEZE_REQUEST_HEADERS', {'user-agent': 'django-freeze'})
|
beni55/networkx | networkx/generators/tests/test_random_graphs.py | Python | bsd-3-clause | 4,826 | 0.019892 | #!/usr/bin/env python
from nose.tools import *
from networkx import *
from networkx.generators.random_graphs import *
class TestGeneratorsRandom():
def smoke_test_random_graph(self):
seed = 42
G=gnp_random_graph(100,0.25,seed)
G=binomial_graph(100,0.25,seed)
G=erdos_renyi_graph(100,0.25,seed)
G=fast_gnp_random_graph(100,0.25,seed)
G=gnm_random_graph(100,20,seed)
G=dense_gnm_random_graph(100,20,seed)
G=watts_strogatz_graph(10,2,0.25,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 10)
G=connected_watts_strogatz_graph(10,2,0.1,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 10)
G=watts_strogatz_graph(10,4,0.25,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 20)
G=newman_watts_strogatz_graph(10,2,0.0,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 10)
G=newman_watts_strogatz_graph(10,4,0.25,seed)
assert_equal(len(G), 10)
assert_true(G.number_of_edges() >= 20)
G=barabasi_albert_graph(100,1,seed)
G=barabasi_albert_graph(100,3,seed)
assert_equal(G.number_of_edges(),(97*3))
G=powerlaw_cluster_graph(100,1,1.0,seed)
G=powerlaw_cluster_graph(100,3,0.0,seed)
assert_equal(G.number_of_edges(),(97*3))
G=duplication_divergence_graph(100,1.0,seed)
assert_equal(len(G), 100)
assert_raises(networkx.exception.NetworkXError,
duplication_divergence_graph, 100, 2)
assert_raises(networkx.exception.NetworkXError,
duplication_divergence_graph, 100, -1)
G=random_regular_graph(10,20,seed)
assert_raises(networkx.exception.NetworkXError,
random_regular_graph, 3, 21)
constructor=[(10,20,0.8),(20,40,0.8)]
G=random_shell_graph(constructor,seed)
G=nx.random_lobster(10,0.1,0.5,seed)
def test_random_zero_regular_graph(self):
"""Tests that a 0-regular graph has the correct number of nodes and
edges.
"""
G = random_regular_graph(0, 10)
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 0)
def test_gnp(self):
for generator in [gnp_random_graph, binomial_graph, erdos_renyi_graph,
fast_gnp_random_graph]:
G = generator(10, -1.1)
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 0)
G = generator(10, 0.1)
assert_equal(len(G), 10)
G = generator(10, 0.1, seed=42)
assert_equal(len(G), 10)
G = generator(10, 1.1)
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 45)
G = generator(10, -1.1, directed=True)
assert_true(G.is_directed())
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 0)
G = generator(10, 0.1, directed=True)
assert_true(G.is_directed())
assert_equal(len(G), 10)
G = generator(10, 1.1, directed=True)
assert_true(G.is_directed())
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 90)
# assert that random graphs generate all edges for p close to 1
edges = 0
runs = 100
for i in range(runs):
edges += sum(1 for _ in generator(10, 0.99999, directed=True).edges())
assert_almost_equal(edges/float(runs), 90, delta=runs*2.0/100)
def test_gnm(self):
G=gnm_random_graph(10,3)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()), 3)
G=gnm_random_graph(10,3,seed=42)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()), 3)
G=gnm_random_graph(10,100)
assert_equal(len(G),10)
| assert_equal(sum(1 for _ in G.edges()), 45)
| G=gnm_random_graph(10,100,directed=True)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()),90)
G=gnm_random_graph(10,-1.1)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()),0)
def test_watts_strogatz_big_k(self):
assert_raises(networkx.exception.NetworkXError,
watts_strogatz_graph, 10, 10, 0.25)
assert_raises(networkx.exception.NetworkXError,
newman_watts_strogatz_graph, 10, 10, 0.25)
# could create an infinite loop, now doesn't
# infinite loop used to occur when a node has degree n-1 and needs to rewire
watts_strogatz_graph(10, 9, 0.25, seed=0)
newman_watts_strogatz_graph(10, 9, 0.5, seed=0)
|
rsmith-nl/scripts | git-origdate.py | Python | mit | 1,260 | 0.000794 | #!/usr/bin/env python
# file: git-origdate.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Copyright © 2015-2018 R.F. Smith <rsmith@xs4all.nl>.
# SPDX-License-Identifier: MIT
# Created: 2015-01-03T16:31:09+01:00
# Last modified: 2020-12-16T00:25:34+0100
"""Report when arguments were checked into git."""
import os.path
import subprocess as sp
import sys
__version__ = "2020.12.16"
if len(sys.argv) == 1:
binary = os.path.basename(sys.argv[0])
print(f"{binary} ver. {__version__}", file=sys.stderr)
pri | nt(f"Usage: {binary} [file ...]", file=sys.stderr)
sys.exit(0)
try:
for fn in sys.argv[1:]:
args = [
"git",
"--no-pager",
"log",
"--diff-filter=A",
"--format=%ai",
"--",
fn,
]
cp = | sp.run(args, stdout=sp.PIPE, stderr=sp.DEVNULL, text=True, check=True)
# Sometimes this git command will return *multiple dates*!
# In that case, select the oldest.
date = cp.stdout.strip().splitlines()[-1]
print(f'"{fn}": {date}')
except sp.CalledProcessError as e:
if e.returncode == 128:
print("Not a git repository! Exiting.")
else:
print(f"git error: '{e.strerror}'. Exiting.")
|
jnayak1/osf.io | scripts/osfstorage/files_audit.py | Python | apache-2.0 | 7,198 | 0.001667 | #!/usr/bin/env python
# encoding: utf-8
"""Verify that all OSF Storage files have Glacier backups and parity files,
creating any missing backups.
TODO: Add check against Glacier inventory
Note: Must have par2 installed to run
"""
from __future__ import division
import gc
import os
import math
import hashlib
import logging
import pyrax
from modularodm import Q
from boto.glacier.layer2 import Layer2
from pyrax.exceptions import NoSuchObject
from framework.celery_tasks import app as celery_app
from website.app import init_app
from website.files import models
from scripts import utils as scripts_utils
from scripts.osfstorage import utils as storage_utils
from scripts.osfstorage import settings as storage_settings
container_primary = None
container_parity = None
vault = None
audit_temp_path = None
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
logging.getLogger('boto').setLevel(logging.CRITICAL)
def delete_temp_file(version):
path = os.path.join(audit_temp_path, version.location['object'])
try:
os.remove(path)
except OSError:
pass
def download_from_cloudfiles(version):
path = os.path.join(audit_temp_path, version.location['object'])
if os.path.exists(path):
# we cannot assume the file is valid and not from a previous failure.
delete_temp_file(version)
try:
obj = container_primary.get_object(version.location['object'])
with open(path, 'wb') as fp:
hasher = hashlib.sha256()
fetcher = obj.fetch(chunk_size=262144000) # 256mb chunks
while True:
try:
chunk = next(fetcher)
except StopIteration:
break
hasher.update(chunk)
fp.write(chunk)
if hasher.hexdigest() != version.metadata['sha256']:
raise Exception('SHA256 mismatch, cannot continue')
return path
except NoSuchObject as err:
logger.error('*** FILE NOT FOUND ***')
logger.error('Exception:')
logger.exception(err)
logger.error('Version info:')
logger.error(version.to_storage())
return None
def ensure_glacier(version, dry_run):
if version.metadata.get('archive'):
return
logger.warn('Glacier archive for version {0} not found'.format(version._id))
if dry_run:
return
file_path = download_from_cloudfiles(version)
if file_path:
glacier_id = vault.upload_archive(file_path, description=version.location['object'])
version.metadata['archive'] = glacier_id
version.save()
def check_parity_files(version):
index = list(container_parity.list_all(prefix='{0}.par2'.format(version.location['object'])))
vols = list(container_parity.list_all(prefix='{0}.vol'.format(version.location['object'])))
return len(index) == 1 and len(vols) >= 1
def ensure_parity(version, dry_run):
if check_parity_files(version):
return
logger.warn('Parity files for version {0} not found'.format(version._id))
if dry_run:
return
file_path = download_from_cloudfiles(version)
if file_path:
parity_paths = storage_utils.create_parity_files(file_path)
for parity_path in parity_paths:
container_parity.create(parity_path)
os.remove(parity_path)
if not check_parity_files(version):
logger.error('Parity files for version {0} not found after update'.format(version._id))
def ensure_backups(version, dry_run):
ensure_glacier(version, dry_run)
ensure_parity(version, dry_run)
delete_temp_file(version)
def glacier_targets():
return models.FileVersion.find(
Q('status', 'ne', 'cached') &
Q('location.object', 'exists', True) &
Q('metadata.archive', 'eq', None)
)
def parity_targets():
# TODO: Add metadata.parity information from wb so we do not need to check remote services
return models.FileVersion.find(
Q('status', 'ne', 'cached') &
Q('location.object', 'exists', True)
# & Q('metadata.parity', 'eq', None)
)
def audit(targets, num_of_workers, worker_id, dry_run):
maxval = math.ceil(targets.count() / num_of_workers)
idx = 0
last_progress = -1
for version in targets:
if hash(version._id) % num_of_workers == worker_id:
if version.size == 0:
continue
ensure_backups(version, dry_run)
idx += 1
progress = int(idx / maxval * 100)
if last_progress < 100 and last_progress < progress:
logger.info(str(progress) + '%')
last_progress = progress
# clear modm cache so we don't run out of memory from the cursor enumeration
models.FileVersion._cache.clear()
models.FileVersion._object_cache.clear()
gc.collect()
@celery_app.task(name='scripts.osfstorage.files_audit')
def main(num_of_workers=0, worker_id=0, glacier=True, parity=True, dry_run=True):
global container_primary
global container_parity
global vault
global audit_temp_path
# Set up storage backends
init_app(set_backends=True, routes=False)
try:
# Authenticate to Rackspace
pyrax.settings.set('identity_type', 'rackspace')
pyrax.set_credentials(
storage_settings.USERNAME,
storage_settings.API_KEY,
region=storage_settings.REGION
)
container_primary = pyrax.cloudfiles.get_container(storage_settings.PRIMARY_CONTAINER_NAME)
container_parity = pyrax.cloudfiles.get_container(storage_settings.PARITY_CONTAINER_NAME)
# Connect to AWS
layer2 = Layer2(
aws_access_key_id=storage_settings.AWS_ACCESS_KEY,
aws_secret_access_key=storage_settings.AWS_SECRET_KEY,
)
vault = layer2.get_vault(storage_settings.GLACIER_VAULT)
# Log to file
if not dry_run:
scripts_utils.add_file_logger(logger, __file__, suffix=worker_id)
audit_temp_path = os.path.join(storage_settings.AUDIT_TEMP_PATH, str(worker_id))
if not dry_run:
try:
os.makedirs | (audit_temp_path)
except OSError:
pass
if glacier:
logger.info('glacier audit start')
audit(glacier_targets(), num_of_workers, worker_id, dry_run)
logger.info('glacier audit complete')
if parity:
logger.info('parity audit start')
audit(parity_targets(), num_of_workers, worker_id, dry_run)
logger.info('parity audit complete')
except Exception as err:
log | ger.error('=== Unexpected Error ===')
logger.exception(err)
raise err
if __name__ == '__main__':
import sys
arg_num_of_workers = int(sys.argv[1])
arg_worker_id = int(sys.argv[2])
arg_glacier = 'glacier' in sys.argv
arg_parity = 'parity' in sys.argv
arg_dry_run = 'dry' in sys.argv
main(num_of_workers=arg_num_of_workers, worker_id=arg_worker_id, glacier=arg_glacier, parity=arg_parity, dry_run=arg_dry_run)
|
Ricyteach/candemaker | src/candemaker/filemaker.py | Python | bsd-2-clause | 1,977 | 0.011128 | class FileOut():
'''Provides a file exporting interface compatible with the pathlib.Path API
for any iterable object.'''
def __init__(self, obj, path = None, *, mode = 'w', obj_iter = None):
self.obj = obj
self.path = path
self.mode = mode
try:
self._iter = obj.__iter__
except AttributeError:
| raise TypeError('The obj is not iterable.') from None
def to_str(self):
'''Export the object to a string.'''
return '\n'.join(self._iter())
def to_stream(self, fstream):
'''Export the object to provided stream.'''
fstream.write(self.to_str())
def __ent | er__(self):
try:
# do not override an existing stream
self.fstream
except AttributeError:
# convert self.path to str to allow for pathlib.Path objects
self.fstream = open(str(self.path), mode = self.mode)
return self
def __exit__(self, exc_t, exc_v, tb):
self.fstream.close()
del self.fstream
def to_file(self, path = None, mode = None):
'''Export the object to a file at the path.
Saves to the active stream if it exists.'''
if mode is None:
mode = self.mode
try:
fstream = self.fstream
except AttributeError:
if path is None:
path = self.path
# convert path to str to allow for pathlib.Path objects
with open(str(path), mode = mode) as fstream:
self.to_stream(fstream)
else:
if mode != fstream.mode:
raise IOError('Ambiguous stream output mode: \
provided mode and fstream.mode conflict')
if path is not None:
raise IOError('Ambiguous output destination: \
a path was provided with an already active file stream.')
self.to_stream(fstream)
|
david-cattermole/qt-learning | python/qtLearn/widgets/ui_floatAttr.py | Python | bsd-3-clause | 1,458 | 0.004115 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/data/Public/qt-learning/ui/widgets/floatAttr.ui'
#
# Created: Tue Nov 14 18:49:58 2017
# by: PyQt4 UI code generator 4.6.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Widget(object):
def setupUi(self, Widget):
Widget.setObjectName("Widget")
Widget.resize(403, 20)
self.horizontalLayout = QtGui.QHBoxLayout(Widget)
self.horizontalLayout.setSpacing(3)
self.horizontalLayout.set | Margin(3)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtGui.QLabel(Widget)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.doubleSpinBox = QtGui.QDoubleSpinBox(Widget)
self.doubleSpinBox.setObjectName("doubleSpinBox")
self.horizontal | Layout.addWidget(self.doubleSpinBox)
spacerItem = QtGui.QSpacerItem(250, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.retranslateUi(Widget)
QtCore.QMetaObject.connectSlotsByName(Widget)
def retranslateUi(self, Widget):
Widget.setWindowTitle(QtGui.QApplication.translate("Widget", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Widget", "Text", None, QtGui.QApplication.UnicodeUTF8))
|
piotrdrag/guake | guake/main.py | Python | gpl-2.0 | 11,423 | 0.001401 | # -*- coding: utf-8; -*-
"""
Copyright (C) 2007-2013 Guake authors
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301 USA
"""
import inspect
import time
# You can put calls to p() everywhere in this page to inspect timing
# g_start = time.time()
# def p():
# print(time.time() - g_start, __file__, inspect.currentframe().f_back.f_lineno)
import logging
import os
import signal
import subprocess
import sys
import uuid
from locale import gettext as _
from optparse import OptionParser
log = logging.getLogger(__name__)
from guake.globals import NAME
from guake.globals import bindtextdomain
# When we are in the doc | ument generation on readthedocs, we do not have paths.py generated
try:
from guake.paths import LOCALE_DIR
bindtextdomain(NAME, LOCALE_DIR)
except: # pylint: disable=bare-except
pass
def main():
"""Parses the command line parameters and decide if dbus methods
should be c | alled or not. If there is already a guake instance
running it will be used and a True value will be returned,
otherwise, false will be returned.
"""
# Force to xterm-256 colors for compatibility with some old command line programs
os.environ["TERM"] = "xterm-256color"
# Force use X11 backend underwayland
os.environ["GDK_BACKEND"] = "x11"
# do not use version keywords here, pbr might be slow to find the version of Guake module
parser = OptionParser()
parser.add_option(
'-V',
'--version',
dest='version',
action='store_true',
default=False,
help=_('Show Guake version number and exit')
)
parser.add_option(
'-v',
'--verbose',
dest='verbose',
action='store_true',
default=False,
help=_('Enable verbose logging')
)
parser.add_option(
'-f',
'--fullscreen',
dest='fullscreen',
action='store_true',
default=False,
help=_('Put Guake in fullscreen mode')
)
parser.add_option(
'-t',
'--toggle-visibility',
dest='show_hide',
action='store_true',
default=False,
help=_('Toggles the visibility of the terminal window')
)
parser.add_option(
'--show',
dest="show",
action='store_true',
default=False,
help=_('Shows Guake main window')
)
parser.add_option(
'--hide',
dest='hide',
action='store_true',
default=False,
help=_('Hides Guake main window')
)
parser.add_option(
'-p',
'--preferences',
dest='show_preferences',
action='store_true',
default=False,
help=_('Shows Guake preference window')
)
parser.add_option(
'-a',
'--about',
dest='show_about',
action='store_true',
default=False,
help=_('Shows Guake\'s about info')
)
parser.add_option(
'-n',
'--new-tab',
dest='new_tab',
action='store',
default='',
help=_('Add a new tab (with current directory set to NEW_TAB)')
)
parser.add_option(
'-s',
'--select-tab',
dest='select_tab',
action='store',
default='',
help=_('Select a tab (SELECT_TAB is the index of the tab)')
)
parser.add_option(
'-g',
'--selected-tab',
dest='selected_tab',
action='store_true',
default=False,
help=_('Return the selected tab index.')
)
parser.add_option(
'-l',
'--selected-tablabel',
dest='selected_tablabel',
action='store_true',
default=False,
help=_('Return the selected tab label.')
)
parser.add_option(
'-e',
'--execute-command',
dest='command',
action='store',
default='',
help=_('Execute an arbitrary command in the selected tab.')
)
parser.add_option(
'-i',
'--tab-index',
dest='tab_index',
action='store',
default='0',
help=_('Specify the tab to rename. Default is 0. Can be used to select tab by UUID.')
)
parser.add_option(
'--bgcolor',
dest='bgcolor',
action='store',
default='',
help=_('Set the hexadecimal (#rrggbb) background color of '
'the selected tab.')
)
parser.add_option(
'--fgcolor',
dest='fgcolor',
action='store',
default='',
help=_('Set the hexadecimal (#rrggbb) foreground color of the '
'selected tab.')
)
parser.add_option(
'--rename-tab',
dest='rename_tab',
metavar='TITLE',
action='store',
default='',
help=_(
'Rename the specified tab by --tab-index. Reset to default if TITLE is '
'a single dash "-".'
)
)
parser.add_option(
'-r',
'--rename-current-tab',
dest='rename_current_tab',
metavar='TITLE',
action='store',
default='',
help=_('Rename the current tab. Reset to default if TITLE is a '
'single dash "-".')
)
parser.add_option(
'-q',
'--quit',
dest='quit',
action='store_true',
default=False,
help=_('Says to Guake go away =(')
)
parser.add_option(
'-u',
'--no-startup-script',
dest='execute_startup_script',
action='store_false',
default=True,
help=_('Do not execute the start up script')
)
options = parser.parse_args()[0]
if options.version:
from guake import gtk_version
from guake import guake_version
from guake import vte_version
from guake import vte_runtime_version
print('Guake Terminal: {}'.format(guake_version()))
print('VTE: {}'.format(vte_version()))
print('VTE runtime: {}'.format(vte_runtime_version()))
print('Gtk: {}'.format(gtk_version()))
sys.exit(0)
import dbus
from guake.dbusiface import DBUS_NAME
from guake.dbusiface import DBUS_PATH
from guake.dbusiface import DbusManager
from guake.guake_logging import setupLogging
instance = None
# Trying to get an already running instance of guake. If it is not
# possible, lets create a new instance. This function will return
# a boolean value depending on this decision.
try:
bus = dbus.SessionBus()
remote_object = bus.get_object(DBUS_NAME, DBUS_PATH)
already_running = True
except dbus.DBusException:
# can now configure the logging
setupLogging(options.verbose)
# COLORTERM is an environment variable set by some terminal emulators such as
# gnome-terminal.
# To avoid confusing applications running inside Guake, clean up COLORTERM at startup.
if "COLORTERM" in os.environ:
del os.environ['COLORTERM']
log.info("Guake not running, starting it")
# late loading of the Guake object, to speed up dbus comm
from guake.guake_app import Guake
instance = Guake()
remote_object = DbusManager(instance)
already_running = False
only_show_hide = True
if options.fullscreen:
remote_object.fullscreen()
if options.show:
remote_object.show_from_remote()
if options.hide:
remote_object.hide_from_remote()
if options.show_preferences:
remote_object.show_prefs()
onl |
wateraccounting/SEBAL | Processing_Scripts/METEO/CalcHumidityGLDASdata.py | Python | apache-2.0 | 978 | 0.018405 | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 19 10:09:38 2017
@author: tih
"""
Tfile = r"J:\Tyler\Input\Meteo\daily\avgsurft_inst\mean\T_GLDA | S-NOAH_C_daily_2016.06.15.tif"
Pfile = r"J:\Tyler\Input\Meteo\daily\psurf_f_inst\mean\P_GLDAS-NOAH_kpa_daily_2016.06.15.tif"
Hfile = r"J:\Tyler\Input\Meteo\daily\qair_f_inst\mean\Hum_GLDAS-NOAH_kg-kg_daily_2016.06.15.tif"
Outfilename = r"J:\Tyler\Input\Meteo\daily | \Hum_Calculated\Humidity_percentage_Calculated_daily.tif"
import gdal
import os
import wa.General.raster_conversions as RC
import wa.General.data_conversions as DC
import numpy as np
geo_out, proj, size_X, size_Y = RC.Open_array_info(Tfile)
Tdata = RC.Open_tiff_array(Tfile)
Tdata[Tdata<-900]=np.nan
Pdata = RC.Open_tiff_array(Pfile)
Hdata = RC.Open_tiff_array(Hfile)
Esdata = 0.6108*np.exp((17.27*Tdata)/(Tdata+237.3))
HumData = np.minimum((1.6077717*Hdata*Pdata/Esdata),1)*100
DC.Save_as_tiff(Outfilename,HumData,geo_out,"WGS84") |
Ingenico-ePayments/connect-sdk-python2 | ingenico/connect/sdk/domain/payment/definitions/non_sepa_direct_debit_payment_method_specific_input.py | Python | mit | 8,060 | 0.004591 | # -*- coding: utf-8 -*-
#
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
from ingenico.connect.sdk.domain.definitions.abstract_payment_method_specific_input import AbstractPaymentMethodSpecificInput
from ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product705_specific_input import NonSepaDirectDebitPaymentProduct705SpecificInput
from ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product730_specific_input import NonSepaDirectDebitPaymentProduct730SpecificInput
class NonSepaDirectDebitPaymentMethodSpecificInput(AbstractPaymentMethodSpecificInput):
__date_collect = None
__direct_debit_text = None
__is_recurring = None
__payment_product705_specific_input = None
__payment_product730_specific_input = None
__recurring_payment_sequence_indicator = None
__requires_approval = None
__token = None
__tokenize = None
@property
def date_collect(self):
"""
| Direct Debit payment collection date
| Format: YYYYMMDD
Type: str
"""
return self.__date_collect
@date_collect.setter
def date_collect(self, value):
self.__date_collect = value
@property
def direct_debit_text(self):
"""
| Descriptor intended to identify the transaction on the customer's bank statement
Type: str
"""
return self.__direct_debit_text
@direct_debit_text.setter
def direct_debit_text(self, value):
self.__direct_debit_text = value
@property
def is_recurring(self):
"""
| Indicates if this transaction is of a one-off or a recurring type
* true - This is recurring
* false - This is one-off
Type: bool
"""
return self.__is_recurring
@is_recurring.setter
def is_recurring(self, value):
self.__is_recurring = value
@property
def payment_product705_specific_input(self):
"""
| Object containing UK Direct Debit specific details
Type: :class:`ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product705_specific_input.NonSepaDirectDebitPaymentProduct705SpecificInput`
"""
return self.__payment_product705_specific_input
@payment_product705_specific_input.setter
def payment_product705_specific_input(self, value):
self.__payment_product705_specific_input = value
@property
def payment_product730_specific_input(self):
"""
| Object containing ACH specific details
Type: :class:`ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product730_specific_input.NonSepaDirectDebitPaymentProduct730SpecificInput`
"""
return self.__payment_product730_specific_input
@payment_product730_specific_input.setter
def payment_product730_specific_input(self, value):
self.__payment_product730_specific_input = value
@property
def recurring_payment_sequence_indicator(self):
"""
* first = This transaction is the first of a series of recurring transactions
* recurring = This transaction is a subsequent transaction in a series of recurring transactions
* last = This transaction is the last transaction of a series of recurring transactions
Type: str
"""
return self.__recurring_payment_sequence_indicator
@recurring_payment_sequence_indicator.setter
def recurring_payment_sequence_indicator(self, value):
self.__recurring_payment_sequence_indicator = value
@property
def requires_approval(self):
"""
* true - The payment requires approval before the funds will be captured using the Approve payment or Capture payment API.
* false - The payment does not require approval, and the funds will be captured automatically.
Type: bool
"""
return self.__requires_approval
@requires_approval.setter
def requires_approval(self, value):
self.__requires_approval = value
@property
def token(self):
"""
| ID of the stored token that contains the bank account details to be debited
Type: str
"""
return self.__token
@token.setter
def token(self, value):
self.__token = value
@property
def tokenize(self):
"""
| Indicates if this transaction should be tokenized
* true - Tokenize the transaction
* false - Do not tokenize the transaction, unless it would be tokenized by other means such as auto-tokenization of recurring payments.
Type: bool
"""
return self.__tokenize
@tokenize.setter
def tokenize(self, value):
self.__tokenize = value
def to_dictionary(self):
dictionary = super(NonSepaDirectDebitPaymentMethodSpecificInput, self).to_dictionary()
if self.date_collect is not None:
dictionary['dateCollect'] = self.date_collect
if self.direct_debit_text is not None:
dictionary['directDebitText'] = self.direct_debit_text
if self.is_recurring is not None:
dictionary['isRecurring'] = self.is_recurring
if self.payment_product705_specific_input is not None:
dictionary['paymentProduct705SpecificInput'] = self.payment_product705_specific_input.to_dictionary()
if self.payment_product730_specific_input is not None:
dictionary['paymentProduct730SpecificInput'] = self.payment_product730_specific_input.to_dictionary()
if self.recurring_payment_sequence_indicator is not None:
dictionary['recurringPaymentSequenceIndicator'] = self.recurring_payment_sequence_indicator
if self.requires_approval is not None:
dictionary['requiresApproval'] = self.requires_approval
if self.token is not None:
dictionary['token'] = self.token
if self.tokenize is not None:
dictionary['tokenize'] = self.tokenize
return dictionary
def from_dictionary(self, dictionary):
super(NonSepaDirectDebitPaymentMethodSpecificInput, self).from_dictionary(dictionary)
if 'dateCollect' in dictionary:
self.date_collect = dictionary['dateCollect']
if 'directDebitText' in dictionary:
self.direct_debit_text = dictionary['directDebitText']
if 'isRecurring' in dictionary:
self.is_recurring = dictionary['isRecurring']
if 'paymentProduct705SpecificInput' in dictionary:
if not isinstance(dictionary['paymentProduct705SpecificInput'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['paymentProduct705SpecificInput']))
value = NonSepaDirectDebitPaymentProduct705SpecificInput()
self.payment_product705_specific_input = value.from_dictionary(dictionary['paymentProduct705SpecificInput'])
if 'paymentProduct730SpecificInput' in dictionary:
if not isinstance(dictionary['paymentProduct730SpecificInput'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['paymentProduct730SpecificInput']))
value = NonSepaDirectDebitPaymentProduct730SpecificInput()
self.payment_product730_specific_input = value.from_dictionary(dictionary['paym | entProduct730SpecificInput'])
if 'recurringPaymentSequenceIndicator' in dictionary:
self.recurring_payment_sequence_indicator = dictionary['recurringPaymentSequenceIndicator']
if 'requiresApproval' | in dictionary:
self.requires_approval = dictionary['requiresApproval']
if 'token' in dictionary:
self.token = dictionary['token']
if 'tokenize' in dictionary:
self.tokenize = dictionary['tokenize']
return self
|
Manolaru/Python_train | Les_3/Task_9/test/test_modify_group.py | Python | apache-2.0 | 554 | 0.001805 | from model.group import Group
def test_modify_group_name(app):
if app.group.count() == 0:
app.group.create(Group(name="testing"))
app.group.modify_first_group(Group(name= | "NewGroup"))
def test_modify_group_header(app):
if app.group.count() == 0:
app.group.create(Group(name="testing"))
app.group.modify_first_group(Group(header="New header"))
def test_modify_group_footer(app):
if app.group.count() == 0:
app.group.create(Group(name="testing"))
app.group.modify_first_group(Group(foot | er="New footer"))
|
dashpay/dash | test/functional/feature_llmq_is_cl_conflicts.py | Python | mit | 13,986 | 0.003075 | #!/usr/bin/env python3
# Copyright (c) 2015-2021 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import time
from decimal import Decimal
from test_framework.blocktools import get_masternode_payment, create_coinbase, create_block
from test_framework.mininode import *
from test_framework.test_framework import DashTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, get_bip9_status
'''
feature_llmq_is_cl_conflicts.py
Checks conflict handling between ChainLocks and InstantSend
'''
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.clsigs = {}
self.islocks = {}
def send_clsig(self, clsig):
hash = uint256_from_str(hash256(clsig.serialize()))
self.clsigs[hash] = clsig
inv = msg_inv([CInv(29, hash)])
self.send_message(inv)
def send_islock(self, islock):
hash = uint256_from_str(hash256(islock.serialize()))
self.islocks[hash] = islock
inv = msg_inv([CInv(30, hash)])
self.send_message(inv)
def on_getdata(self, message):
for inv in message.inv:
if inv.hash in self.clsigs:
self.send_message(self.clsigs[inv.hash])
if inv.hash in self.islocks:
self.send_message(self.islocks[inv.hash])
class LLMQ_IS_CL_Conflicts(DashTestFramework):
def set_test_params(self):
self.set_dash_test_params(4, 3, fast_dip3_enforcement=True)
#disable_mocktime()
def run_test(self):
self.activate_dip8()
self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn())
network_thread_start()
self.nodes[0].p2p.wait_for_verack()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()
# mine single block, wait for chainlock
self.nodes[0].generate(1)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
self.test_chainlock_overrides_islock(False)
self.test_chainlock_overrides_islock(True, False)
self.test_chainlock_overrides_islock(True, True)
self.test_chainlock_overrides_islock_overrides_nonchainlock()
def test_chainlock_overrides_islock(self, test_block_conflict, mine_confllicting=False):
if not test_block_conflict:
assert not mine_confllicting
# create three raw TXs, they will conflict with each other
rawtx1 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx2 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx1_obj = FromHex(CTransaction(), rawtx1)
rawtx2_obj = FromHex(CTransaction(), rawtx2)
rawtx1_txid = self.nodes[0].sendrawtransaction(rawtx1)
rawtx2_txid = encode(hash256(hex_str_to_bytes(rawtx2))[::-1], 'hex_codec').decode('ascii')
# Create a chained TX on top of tx1
inputs = []
n = 0
for out in rawtx1_obj.vout:
if out.nValue == 100000000:
inputs.append({"txid": rawtx1_txid, "vout": n})
n += 1
rawtx4 = self.nodes[0].createrawtransaction(inputs, {self.nodes[0].getnewaddress(): 0.999})
rawtx4 = self.nodes[0].signrawtransactionwithwallet(rawtx4)['hex']
rawtx4_txid = self.nodes[0].sendrawtransaction(rawtx4)
# wait for transactions to propagate
self.sync_mempools()
for node in self.nodes:
self.wait_for_instantlock(rawtx1_tx | id, node)
self.wait_for_instantlock(rawtx4_txid, node)
block = self.create_block(self.nodes[0], [rawtx2_obj])
if t | est_block_conflict:
# The block shouldn't be accepted/connected but it should be known to node 0 now
submit_result = self.nodes[0].submitblock(ToHex(block))
assert(submit_result == "conflict-tx-lock")
cl = self.create_chainlock(self.nodes[0].getblockcount() + 1, block)
if mine_confllicting:
islock_tip = self.nodes[0].generate(1)[-1]
self.test_node.send_clsig(cl)
for node in self.nodes:
self.wait_for_best_chainlock(node, block.hash)
self.sync_blocks()
if mine_confllicting:
# The tip with IS-locked txes should be marked conflicting now
found1 = False
found2 = False
for tip in self.nodes[0].getchaintips(2):
if tip["hash"] == islock_tip:
assert tip["status"] == "conflicting"
found1 = True
elif tip["hash"] == block.hash:
assert tip["status"] == "active"
found2 = True
assert found1 and found2
# At this point all nodes should be in sync and have the same "best chainlock"
submit_result = self.nodes[1].submitblock(ToHex(block))
if test_block_conflict:
# Node 1 should receive the block from node 0 and should not accept it again via submitblock
assert(submit_result == "duplicate")
else:
# The block should get accepted now, and at the same time prune the conflicting ISLOCKs
assert(submit_result is None)
for node in self.nodes:
self.wait_for_chainlocked_block(node, block.hash)
# Create a chained TX on top of tx2
inputs = []
n = 0
for out in rawtx2_obj.vout:
if out.nValue == 100000000:
inputs.append({"txid": rawtx2_txid, "vout": n})
n += 1
rawtx5 = self.nodes[0].createrawtransaction(inputs, {self.nodes[0].getnewaddress(): 0.999})
rawtx5 = self.nodes[0].signrawtransactionwithwallet(rawtx5)['hex']
rawtx5_txid = self.nodes[0].sendrawtransaction(rawtx5)
# wait for the transaction to propagate
self.sync_mempools()
for node in self.nodes:
self.wait_for_instantlock(rawtx5_txid, node)
if mine_confllicting:
# Lets verify that the ISLOCKs got pruned and conflicting txes were mined but never confirmed
for node in self.nodes:
rawtx = node.getrawtransaction(rawtx1_txid, True)
assert not rawtx['chainlock']
assert not rawtx['instantlock']
assert not rawtx['instantlock_internal']
assert_equal(rawtx['confirmations'], 0)
assert_equal(rawtx['height'], -1)
rawtx = node.getrawtransaction(rawtx4_txid, True)
assert not rawtx['chainlock']
assert not rawtx['instantlock']
assert not rawtx['instantlock_internal']
assert_equal(rawtx['confirmations'], 0)
assert_equal(rawtx['height'], -1)
rawtx = node.getrawtransaction(rawtx2_txid, True)
assert rawtx['chainlock']
assert rawtx['instantlock']
assert not rawtx['instantlock_internal']
else:
# Lets verify that the ISLOCKs got pruned
for node in self.nodes:
assert_raises_rpc_error(-5, "No such mempool or blockchain transaction", node.getrawtransaction, rawtx1_txid, True)
assert_raises_rpc_error(-5, "No such mempool or blockchain transaction", node.getrawtransaction, rawtx4_txid, True)
rawtx = node.getrawtransaction(rawtx2_txid, True)
assert rawtx['chainlock']
assert rawtx['instantlock']
assert not rawtx['instantlock_internal']
def test_chainlock_overrides_islock_overrides_nonchainlock(self):
# create two raw TXs, they will conflict with each other
rawtx1 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx2 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx1_txid = encode(hash256(hex_str_to_bytes(rawtx1))[::-1], 'hex_codec').decode('ascii')
rawtx2_txid = encode(hash256(hex_str_to_bytes(rawt |
jimi-c/ansible | lib/ansible/plugins/connection/httpapi.py | Python | gpl-3.0 | 9,898 | 0.002223 | # (c) 2018 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
author: Ansible Networking Team
connection: httpapi
short_description: Use httpapi to run command on network appliances
description:
- This connection plugin provides a connection to remote devices over a
HTTP(S)-based api.
version_added: "2.6"
options:
host:
description:
- Specifies the remote device FQDN or IP address to establish the HTTP(S)
connection to.
default: inventory_hostname
vars:
- name: ansible_host
port:
type: int
description:
- Specifies the port on the remote device to listening for connections
when establishing the HTTP(S) connection.
When unspecified, will pick 80 or 443 based on the value of use_ssl
ini:
- section: defaults
key: remote_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_httpapi_port
network_os:
description:
- Configures the device platform network operating system. This value is
used to load the correct httpapi and cliconf plugins to communicate
with the remote device
vars:
- name: ansible_network_os
remote_user:
description:
- The username used to authenticate to the remote device when the API
connection is first established. If the remote_user is not specified,
the connection will use the username of the logged in user.
- Can be configured form the CLI via the C(--user) or C(-u) options
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
password:
description:
- Secret used to authenticate
vars:
- name: ansible_password
- name: ansible_httpapi_pass
use_ssl:
description:
- Whether to connect using SSL (HTTPS) or not (HTTP)
default: False
vars:
- name: ansible_httpapi_use_ssl
validate_certs:
version_added: '2.7'
description:
- Whether to validate SSL certificates
default: True
vars:
- name: ansible_httpapi_validate_certs
timeout:
type: int
description:
- Sets the connection time, in seconds, for the communicating with the
remote device. This timeout is used as the default timeout value for
commands when issuing a command to the network CLI. If the command
does not return in timeout seconds, the an error is generated.
default: 120
become:
type: boolean
description:
- The become option will instruct the CLI session to attempt privilege
escalation on platforms that support it. Normally this means
transitioning from user mode to C(enable) mode in the CLI session.
If become is set to True and the remote device does not support
privilege escalation or the privilege has already been elevated, then
this option is silently ignored
- Can be configured form the CLI via the C(--become) or C(-b) options
default: False
ini:
section: privilege_escalation
key: become
env:
- name: ANSIBLE_BECOME
vars:
- name: ansible_become
become_method:
description:
- This option allows the become method to be specified in for handling
privilege escalation. Typically the become_method value is set to
C(enable) but could | be defined as other values.
default: sudo
ini:
section: privilege_escalation
key: become_method
env:
- name: ANSIBLE_BECOME_METHOD
vars:
- name: ansible_become_method
persistent_connect_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait when trying to
initially establish | a persistent connection. If this value expires
before the connection to the remote device is completed, the connection
will fail
default: 30
ini:
- section: persistent_connection
key: connect_timeout
env:
- name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
persistent_command_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait for a command to
return from the remote device. If this timer is exceeded before the
command returns, the connection plugin will raise an exception and
close
default: 10
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
"""
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_bytes
from ansible.module_utils.six import PY3, BytesIO
from ansible.module_utils.six.moves import cPickle
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import open_url
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import cliconf_loader, httpapi_loader
from ansible.plugins.connection import NetworkConnectionBase
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(NetworkConnectionBase):
'''Network API connection'''
transport = 'httpapi'
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._url = None
self._auth = None
if not self._network_os:
raise AnsibleConnectionFailure(
'Unable to automatically determine host network os. Please '
'manually configure ansible_network_os value for this host'
)
display.display('network_os is set to %s' % self._network_os, log_only=True)
def update_play_context(self, pc_data):
"""Updates the play context information for the connection"""
pc_data = to_bytes(pc_data)
if PY3:
pc_data = cPickle.loads(pc_data, encoding='bytes')
else:
pc_data = cPickle.loads(pc_data)
play_context = PlayContext()
play_context.deserialize(pc_data)
messages = ['updating play_context for connection']
if self._play_context.become ^ play_context.become:
self.set_become(play_context)
if play_context.become is True:
messages.append('authorizing connection')
else:
messages.append('deauthorizing connection')
self._play_context = play_context
return messages
def _connect(self):
if not self.connected:
protocol = 'https' if self.get_option('use_ssl') else 'http'
host = self.get_option('host')
port = self.get_option('port') or (443 if protocol == 'https' else 80)
self._url = '%s://%s:%s' % (protocol, host, port)
httpapi = httpapi_loader.get(self._network_os, self)
if httpapi:
display.vvvv('loaded API plugin for network_os %s' % self._network_os, host=self._play_context.remote_addr)
self._implementation_plugins.append(httpapi)
httpapi.set_become(self._play_context)
httpapi.login(self.get_option('remote_user'), self.get_option('password'))
else:
raise AnsibleConnectionFailure('unable to load API plugin for network_os %s' % self._network_os)
cliconf = cliconf_loader.get(self._network_os, self)
if cliconf:
display.vvvv('loaded cliconf plugin for network_os %s' % self._network_os, host=host)
self._implementation_plugins.append(cliconf)
else:
display.vvvv('unable to load cliconf for network_os %s' % self._network_os)
self._connected = True
def close(self):
'''
Close the active session to the device
'''
# only close the connection if its connected.
if self._c |
aspiringguru/sentexTuts | PracMachLrng/sentex_ML_demo12.py | Python | mit | 692 | 0.011561 | '''
Euclidean Distance - Practical Machine Learning Tutorial with Python p.15
https://youtu.be/hl3bQySs8sM?list=PLQVvvaa0QuDfKTOs3Keq_kaG2P55YRn5v
https://en.wikipedia.org/wiki/Euclidean_distance
The distance (d) from p to q, or fro | m q to p is given by the Pythagorean formula:
d(q,p) = d(p,q) = sqrt( (q1-p1)^2 + (q2-p2)^2 + .... + (qn-pn)^2)
[recall hyptoneuse of 90 deg triangle formula h = sqrt(x^2 + y^2) where x & y are the square sides.]
euclidian distance = sqrt(Sum [i=1 to n] (qi - pi)^2)
'''
from math import sqrt
plot1 = [1,3]
plot2 = [2,5]
euclidian_distance = sqrt( (plot1[0]-plot2[0])**2 + (plot1[1]-plot2[1])**2 )
print | ("euclidian_distance={}".format(euclidian_distance)) |
brenton/openshift-ansible | roles/lib_openshift/src/test/unit/test_oc_route.py | Python | apache-2.0 | 12,042 | 0.000747 | '''
Unit tests for oc route
'''
import os
import six
import sys
import unittest
import mock
# Removing invalid variable names for tests so that I can
# keep them brief
# pylint: disable=invalid-name,no-name-in-module
# Disable import-error b/c our libraries aren't loaded in jenk | ins
# pylint: disable=import-error,wrong-import-position
# place class in our pyt | hon path
module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501
sys.path.insert(0, module_path)
from oc_route import OCRoute, locate_oc_binary # noqa: E402
class OCRouteTest(unittest.TestCase):
'''
Test class for OCServiceAccount
'''
@mock.patch('oc_route.locate_oc_binary')
@mock.patch('oc_route.Utils.create_tmpfile_copy')
@mock.patch('oc_route.OCRoute._run')
def test_list_route(self, mock_cmd, mock_tmpfile_copy, mock_oc_binary):
''' Testing getting a route '''
# Arrange
# run_ansible input parameters
params = {
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'state': 'list',
'debug': False,
'name': 'test',
'namespace': 'default',
'tls_termination': 'passthrough',
'dest_cacert_path': None,
'cacert_path': None,
'cert_path': None,
'key_path': None,
'dest_cacert_content': None,
'cacert_content': None,
'cert_content': None,
'key_content': None,
'service_name': 'testservice',
'host': 'test.openshift.com',
'wildcard_policy': None,
'weight': None,
'port': None
}
route_result = '''{
"kind": "Route",
"apiVersion": "v1",
"metadata": {
"name": "test",
"namespace": "default",
"selfLink": "/oapi/v1/namespaces/default/routes/test",
"uid": "1b127c67-ecd9-11e6-96eb-0e0d9bdacd26",
"resourceVersion": "439182",
"creationTimestamp": "2017-02-07T01:59:48Z"
},
"spec": {
"host": "test.example",
"to": {
"kind": "Service",
"name": "test",
"weight": 100
},
"port": {
"targetPort": 8443
},
"tls": {
"termination": "passthrough"
},
"wildcardPolicy": "None"
},
"status": {
"ingress": [
{
"host": "test.example",
"routerName": "router",
"conditions": [
{
"type": "Admitted",
"status": "True",
"lastTransitionTime": "2017-02-07T01:59:48Z"
}
],
"wildcardPolicy": "None"
}
]
}
}'''
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
# First call to mock
(0, route_result, ''),
]
mock_oc_binary.side_effect = [
'oc'
]
mock_tmpfile_copy.side_effect = [
'/tmp/mock.kubeconfig',
]
# Act
results = OCRoute.run_ansible(params, False)
# Assert
self.assertFalse(results['changed'])
self.assertEqual(results['state'], 'list')
self.assertEqual(results['results'][0]['metadata']['name'], 'test')
# Making sure our mock was called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'route', 'test', '-o', 'json', '-n', 'default'], None),
])
@mock.patch('oc_route.locate_oc_binary')
@mock.patch('oc_route.Utils.create_tmpfile_copy')
@mock.patch('oc_route.Yedit._write')
@mock.patch('oc_route.OCRoute._run')
def test_create_route(self, mock_cmd, mock_write, mock_tmpfile_copy, mock_oc_binary):
''' Testing getting a route '''
# Arrange
# run_ansible input parameters
params = {
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'state': 'present',
'debug': False,
'name': 'test',
'namespace': 'default',
'tls_termination': 'edge',
'dest_cacert_path': None,
'cacert_path': None,
'cert_path': None,
'key_path': None,
'dest_cacert_content': None,
'cacert_content': 'testing',
'cert_content': 'testing',
'key_content': 'testing',
'service_name': 'testservice',
'host': 'test.openshift.com',
'wildcard_policy': None,
'weight': None,
'port': None
}
route_result = '''{
"apiVersion": "v1",
"kind": "Route",
"metadata": {
"creationTimestamp": "2017-02-07T20:55:10Z",
"name": "test",
"namespace": "default",
"resourceVersion": "517745",
"selfLink": "/oapi/v1/namespaces/default/routes/test",
"uid": "b6f25898-ed77-11e6-9755-0e737db1e63a"
},
"spec": {
"host": "test.openshift.com",
"tls": {
"caCertificate": "testing",
"certificate": "testing",
"key": "testing",
"termination": "edge"
},
"to": {
"kind": "Service",
"name": "testservice",
"weight": 100
},
"wildcardPolicy": "None"
},
"status": {
"ingress": [
{
"conditions": [
{
"lastTransitionTime": "2017-02-07T20:55:10Z",
"status": "True",
"type": "Admitted"
}
],
"host": "test.openshift.com",
"routerName": "router",
"wildcardPolicy": "None"
}
]
}
}'''
test_route = '''\
kind: Route
spec:
tls:
caCertificate: testing
termination: edge
certificate: testing
key: testing
to:
kind: Service
name: testservice
weight: 100
host: test.openshift.com
wildcardPolicy: None
apiVersion: v1
metadata:
namespace: default
name: test
'''
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
# First call to mock
(1, '', 'Error from server: routes "test" not found'),
(1, '', 'Error from server: routes "test" not found'),
(0, 'route "test" created', ''),
(0, route_result, ''),
]
mock_oc_binary.side_effect = [
'oc'
]
mock_tmpfile_copy.side_effect = [
'/tmp/mock.kubeconfig',
]
mock_write.assert_has_calls = [
# First call to mock
mock.call('/tmp/test', test_route)
]
# Act
results = OCRoute.run_ansible(params, False)
# Assert
self.assertTrue(results['changed'])
self.assertEqual(results['state'], 'present')
self.assertEqual(results['results']['results'][0]['metadata']['name'], 'test')
# Making sure our mock was called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'route', |
ryfeus/lambda-packs | pytorch/source/caffe2/python/attention.py | Python | mit | 12,504 | 0.00008 | ## @package attention
# Module caffe2.python.attention
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import brew
class AttentionType:
Regular, Recurrent, Dot, SoftCoverage = tuple(range(4))
def s(scope, name):
# We have to manually scope due to our internal/external blob
# relationships.
return "{}/{}".format(str(scope), str(name))
# c_i = \sum_j w_{ij}\textbf{s}_j
def _calc_weighted_context(
model,
encoder_outputs_transposed,
encoder_output_dim,
attention_weights_3d,
scope,
):
# [batch_size, encoder_output_dim, 1]
attention_weighted_encoder_context = brew.batch_mat_mul(
model,
[encoder_outputs_transposed, attention_weights_3d],
s(scope, 'attention_weighted_encoder_context'),
)
# [batch_size, encoder_output_dim]
attention_weighted_encoder_context, _ = model.net.Reshape(
attention_weighted_encoder_context,
[
attention_weighted_encoder_context,
s(scope, 'attention_weighted_encoder_contex | t_old_shape'),
],
shape=[1, -1, encoder_output_dim],
)
return attention_weighted_encoder_context
# Calculate a softmax over the passed in attention energy logits
def _calc_attention_weights(
model,
attention_logits_transposed,
scope,
encoder_lengths=None,
):
if encoder_lengths is not None:
attention_logits_transposed = model.net.SequenceMask(
[attention_logits_transposed, enco | der_lengths],
['masked_attention_logits'],
mode='sequence',
)
# [batch_size, encoder_length, 1]
attention_weights_3d = brew.softmax(
model,
attention_logits_transposed,
s(scope, 'attention_weights_3d'),
engine='CUDNN',
axis=1,
)
return attention_weights_3d
# e_{ij} = \textbf{v}^T tanh \alpha(\textbf{h}_{i-1}, \textbf{s}_j)
def _calc_attention_logits_from_sum_match(
model,
decoder_hidden_encoder_outputs_sum,
encoder_output_dim,
scope,
):
# [encoder_length, batch_size, encoder_output_dim]
decoder_hidden_encoder_outputs_sum = model.net.Tanh(
decoder_hidden_encoder_outputs_sum,
decoder_hidden_encoder_outputs_sum,
)
# [encoder_length, batch_size, 1]
attention_logits = brew.fc(
model,
decoder_hidden_encoder_outputs_sum,
s(scope, 'attention_logits'),
dim_in=encoder_output_dim,
dim_out=1,
axis=2,
freeze_bias=True,
)
# [batch_size, encoder_length, 1]
attention_logits_transposed = brew.transpose(
model,
attention_logits,
s(scope, 'attention_logits_transposed'),
axes=[1, 0, 2],
)
return attention_logits_transposed
# \textbf{W}^\alpha used in the context of \alpha_{sum}(a,b)
def _apply_fc_weight_for_sum_match(
model,
input,
dim_in,
dim_out,
scope,
name,
):
output = brew.fc(
model,
input,
s(scope, name),
dim_in=dim_in,
dim_out=dim_out,
axis=2,
)
output = model.net.Squeeze(
output,
output,
dims=[0],
)
return output
# Implement RecAtt due to section 4.1 in http://arxiv.org/abs/1601.03317
def apply_recurrent_attention(
model,
encoder_output_dim,
encoder_outputs_transposed,
weighted_encoder_outputs,
decoder_hidden_state_t,
decoder_hidden_state_dim,
attention_weighted_encoder_context_t_prev,
scope,
encoder_lengths=None,
):
weighted_prev_attention_context = _apply_fc_weight_for_sum_match(
model=model,
input=attention_weighted_encoder_context_t_prev,
dim_in=encoder_output_dim,
dim_out=encoder_output_dim,
scope=scope,
name='weighted_prev_attention_context',
)
weighted_decoder_hidden_state = _apply_fc_weight_for_sum_match(
model=model,
input=decoder_hidden_state_t,
dim_in=decoder_hidden_state_dim,
dim_out=encoder_output_dim,
scope=scope,
name='weighted_decoder_hidden_state',
)
# [1, batch_size, encoder_output_dim]
decoder_hidden_encoder_outputs_sum_tmp = model.net.Add(
[
weighted_prev_attention_context,
weighted_decoder_hidden_state,
],
s(scope, 'decoder_hidden_encoder_outputs_sum_tmp'),
)
# [encoder_length, batch_size, encoder_output_dim]
decoder_hidden_encoder_outputs_sum = model.net.Add(
[
weighted_encoder_outputs,
decoder_hidden_encoder_outputs_sum_tmp,
],
s(scope, 'decoder_hidden_encoder_outputs_sum'),
broadcast=1,
)
attention_logits_transposed = _calc_attention_logits_from_sum_match(
model=model,
decoder_hidden_encoder_outputs_sum=decoder_hidden_encoder_outputs_sum,
encoder_output_dim=encoder_output_dim,
scope=scope,
)
# [batch_size, encoder_length, 1]
attention_weights_3d = _calc_attention_weights(
model=model,
attention_logits_transposed=attention_logits_transposed,
scope=scope,
encoder_lengths=encoder_lengths,
)
# [batch_size, encoder_output_dim, 1]
attention_weighted_encoder_context = _calc_weighted_context(
model=model,
encoder_outputs_transposed=encoder_outputs_transposed,
encoder_output_dim=encoder_output_dim,
attention_weights_3d=attention_weights_3d,
scope=scope,
)
return attention_weighted_encoder_context, attention_weights_3d, [
decoder_hidden_encoder_outputs_sum,
]
def apply_regular_attention(
model,
encoder_output_dim,
encoder_outputs_transposed,
weighted_encoder_outputs,
decoder_hidden_state_t,
decoder_hidden_state_dim,
scope,
encoder_lengths=None,
):
weighted_decoder_hidden_state = _apply_fc_weight_for_sum_match(
model=model,
input=decoder_hidden_state_t,
dim_in=decoder_hidden_state_dim,
dim_out=encoder_output_dim,
scope=scope,
name='weighted_decoder_hidden_state',
)
# [encoder_length, batch_size, encoder_output_dim]
decoder_hidden_encoder_outputs_sum = model.net.Add(
[weighted_encoder_outputs, weighted_decoder_hidden_state],
s(scope, 'decoder_hidden_encoder_outputs_sum'),
broadcast=1,
use_grad_hack=1,
)
attention_logits_transposed = _calc_attention_logits_from_sum_match(
model=model,
decoder_hidden_encoder_outputs_sum=decoder_hidden_encoder_outputs_sum,
encoder_output_dim=encoder_output_dim,
scope=scope,
)
# [batch_size, encoder_length, 1]
attention_weights_3d = _calc_attention_weights(
model=model,
attention_logits_transposed=attention_logits_transposed,
scope=scope,
encoder_lengths=encoder_lengths,
)
# [batch_size, encoder_output_dim, 1]
attention_weighted_encoder_context = _calc_weighted_context(
model=model,
encoder_outputs_transposed=encoder_outputs_transposed,
encoder_output_dim=encoder_output_dim,
attention_weights_3d=attention_weights_3d,
scope=scope,
)
return attention_weighted_encoder_context, attention_weights_3d, [
decoder_hidden_encoder_outputs_sum,
]
def apply_dot_attention(
model,
encoder_output_dim,
# [batch_size, encoder_output_dim, encoder_length]
encoder_outputs_transposed,
# [1, batch_size, decoder_state_dim]
decoder_hidden_state_t,
decoder_hidden_state_dim,
scope,
encoder_lengths=None,
):
if decoder_hidden_state_dim != encoder_output_dim:
weighted_decoder_hidden_state = brew.fc(
model,
decoder_hidden_state_t,
s(scope, 'weighted_decoder_hidden_state'),
dim_in=decoder_hidden_state_dim,
dim_out=encoder_output_dim,
axis=2,
)
else:
weighted_decoder_hidden_state = decoder_hidden_state_t
# [batch_size, decoder_state_dim]
squeezed |
rmariotti/py_cli_rpg | test.py | Python | gpl-3.0 | 598 | 0.040201 | #!/usr/bin/env python3
import rooms
imp | ort entity
''' creazione delle stanze da creare durante il gioco '''
def rand_room(sub_rooms=1, name=1):
room_list = []
name_sub = name
while sub_rooms > 0:
name_sub = name_sub + 1
sub_rooms = sub_rooms - 1
room_list.append(rand_r | oom(sub_rooms, name_sub))
return rooms.Room(doors=room_list, description=("Door n°"+str(name)))
''' creazione di alcuni mostri '''
space_police = entity.Entity(name="space policeman",
hp=12, velocity=7,
strenght=7,
intelligence=2
hand=None
items=[]
lvl=1) |
wwu-numerik/scripts | python/paraview/paraview_metafile.py | Python | bsd-2-clause | 1,481 | 0.006752 | #!/usr/bin/env python
# - | *- coding: utf-8 -*-
import os
import sys
default_prefixes = ( 'computed__velocity', 'computed__pressure' )
ext = '.vtu'
path = os.getcwd()
pvd_header = '''<?xml version="1.0"?>
<VTKFile type="Collection" version="0.1" byte_order="LittleEndian">
<Collection>
'''
pvd_footer = '''
</Collection>
</VTKFile>'''
def writePVD(prefixes):
for pref in prefixes:
'only consider files named PREFIXsome_numberEXT'
try:
cutoff = int(sys.argv[1])
e | xcept:
cutoff = None
fn = '%s.pvd' % pref
print
fn
if cutoff:
print
'\t\tcutoff: %d' % cutoff
files = filter(lambda p: p.startswith(pref) and p.endswith(ext), os.listdir(path))
files.sort()
with open(fn, 'wb') as pvd:
pvd.write(pvd_header)
for f in files:
try:
'use some_number as timestep'
step = int(f[f.find(pref) + len(pref):f.find(ext)])
if cutoff and step > cutoff:
break
pvd.write('\t\t<DataSet timestep="%d" group="" part="0" file="%s" />\n' % (step, f ))
except ValueError:
print
'\tinvalid filename: %s' % f
pvd.write(pvd_footer)
if __name__ == '__main__':
if len(sys.argv) > 2:
writePVD(sys.argv[2:])
else:
writePVD(default_prefixes) |
kubeflow/kfp-tekton-backend | sdk/python/kfp/components/_component_store.py | Python | apache-2.0 | 6,197 | 0.005164 | __all__ = [
'ComponentStore',
]
from pathlib import Path
import copy
import requests
from typing import Callable
from . import _components as comp
from .structures import ComponentReference
class ComponentStore:
def __init__(self, local_search_paths=None, url_search_prefixes=None):
self.local_search_paths = local_search_paths or ['.']
self.url_search_prefixes = url_search_prefixes or []
self._component_file_name = 'component.yaml'
self._digests_subpath = 'versions/sha256'
self._tags_subpath = 'versions/tags'
def load_component_from_url(self, url):
return comp.load_component_from_url(url)
def load_component_from_file(self, path):
return comp.load_component_from_file(path)
def load_component(self, name, digest=None, tag=None):
'''
Loads component local file or URL and creates a task factory function
Search locations:
<local-search-path>/<name>/component.yaml
<url-search-prefix>/<name>/component.yaml
If the digest is specified, then the search locations are:
<local-search-path>/<name>/versions/sha256/<digest>
<url-search-prefix>/<name>/versions/sha256/<digest>
If the tag is specified, then the search | locations are:
<local-search-path>/<name>/versions/tags/<digest>
<url-search-prefix>/<name>/versio | ns/tags/<digest>
Args:
name: Component name used to search and load the component artifact containing the component definition.
Component name usually has the following form: group/subgroup/component
digest: Strict component version. SHA256 hash digest of the component artifact file. Can be used to load a specific component version so that the pipeline is reproducible.
tag: Version tag. Can be used to load component version from a specific branch. The version of the component referenced by a tag can change in future.
Returns:
A factory function with a strongly-typed signature.
Once called with the required arguments, the factory constructs a pipeline task instance (ContainerOp).
'''
#This function should be called load_task_factory since it returns a factory function.
#The real load_component function should produce an object with component properties (e.g. name, description, inputs/outputs).
#TODO: Change this function to return component spec object but it should be callable to construct tasks.
component_ref = ComponentReference(name=name, digest=digest, tag=tag)
component_ref = self._load_component_spec_in_component_ref(component_ref)
return comp._create_task_factory_from_component_spec(
component_spec=component_ref.spec,
component_ref=component_ref,
)
def _load_component_spec_in_component_ref(
self,
component_ref: ComponentReference,
) -> ComponentReference:
'''Takes component_ref, finds the component spec and returns component_ref with .spec set to the component spec.
See ComponentStore.load_component for the details of the search logic.
'''
if component_ref.spec:
return component_ref
component_ref = copy.copy(component_ref)
if component_ref.url:
component_ref.spec = comp._load_component_spec_from_url(component_ref.url)
return component_ref
name = component_ref.name
if not name:
raise TypeError("name is required")
if name.startswith('/') or name.endswith('/'):
raise ValueError('Component name should not start or end with slash: "{}"'.format(name))
digest = component_ref.digest
tag = component_ref.tag
tried_locations = []
if digest is not None and tag is not None:
raise ValueError('Cannot specify both tag and digest')
if digest is not None:
path_suffix = name + '/' + self._digests_subpath + '/' + digest
elif tag is not None:
path_suffix = name + '/' + self._tags_subpath + '/' + tag
#TODO: Handle symlinks in GIT URLs
else:
path_suffix = name + '/' + self._component_file_name
#Trying local search paths
for local_search_path in self.local_search_paths:
component_path = Path(local_search_path, path_suffix)
tried_locations.append(str(component_path))
if component_path.is_file():
# TODO: Verify that the content matches the digest (if specified).
component_ref._local_path = str(component_path)
component_ref.spec = comp._load_component_spec_from_file(str(component_path))
return component_ref
#Trying URL prefixes
for url_search_prefix in self.url_search_prefixes:
url = url_search_prefix + path_suffix
tried_locations.append(url)
try:
response = requests.get(url) #Does not throw exceptions on bad status, but throws on dead domains and malformed URLs. Should we log those cases?
response.raise_for_status()
except:
continue
if response.content:
# TODO: Verify that the content matches the digest (if specified).
component_ref.url = url
component_ref.spec = comp._load_component_spec_from_yaml_or_zip_bytes(response.content)
return component_ref
raise RuntimeError('Component {} was not found. Tried the following locations:\n{}'.format(name, '\n'.join(tried_locations)))
def _load_component_from_ref(self, component_ref: ComponentReference) -> Callable:
component_ref = self._load_component_spec_in_component_ref(component_ref)
return comp._create_task_factory_from_component_spec(component_spec=component_ref.spec, component_ref=component_ref)
ComponentStore.default_store = ComponentStore(
local_search_paths=[
'.',
],
url_search_prefixes=[
'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/'
],
)
|
NegativeMjark/mockingmirror | mockingmirror.py | Python | apache-2.0 | 4,974 | 0.000201 | # Copyright 2015 Mark Haines
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from functools import wraps
__version__ = "0.2"
def mirror():
"""Returns a tuple of a mirror and the mock object that it configures"""
mirror = Mirror()
return (mirror, mirror._mock)
def mirrored(setup):
"""Convience decorator for setUp in testcases::
@mirrored
def setUp(self, mirror, mock):
...
is the same as::
def setUp(self):
self.mirror, self.mock = mirror()
mirror, mock = self.mirror, self.mock
...
"""
@wraps(setup)
def wrapped_setup(self):
self.mirror, self.mock = mirror()
return setup(self, self.mirror, self.mock)
return wrapped_setup
class NonCallableMock(mock.NonCallableMock):
def __getattr__(self, name):
try:
return super(NonCallableMock, self).__getattr__(name)
except AttributeError:
raise AttributeError("%r has no attribute %r" % (self, name))
class Mirror(object):
"""Convienence object for setting up mock objects::
mirror.myobject.mymethod()[:] = "Hello"
does the same as::
mock.myobject = NonCallableMock(spec_set=["mymethod"])
mock.myobject.mymethod = Mock(spec_set=[])
mock.myobject.mymethod.return_value = "Hello"
"""
def __init__(self, name=None, parent=None, mirrors=None):
if mirrors is None:
mirrors = []
if parent is not None and parent._name is not None:
path = parent._name + "." + name
else:
path = name
# Add a temporary mock to capture all of the following calls to setattr
self._mock = mock.NonCallableMock()
self._spec = set()
self._name = name
self._parent = parent
self._mirrors | = mirrors
self._ | mirrors.append(self)
self._is_callable = False
self._path = path
# Replace our mock and spec objects.
self._mock = NonCallableMock(name=path)
self._mock.mock_add_spec([], True)
self._spec = set()
if name is not None:
setattr(self._parent._mock, self._name, self._mock)
def __getattr__(self, name):
"""Whenever a member or method is accessed on the mirror for the
first time we need to create a new mirror for that potential member
or method."""
self._add_to_spec(name)
mirror = Mirror(name, self, self._mirrors)
object.__setattr__(self, name, mirror)
return mirror
def __setattr__(self, name, value):
"""Setting an attribute on the mirror causes the same attribute to be
set on the mock object it is mirroring."""
object.__setattr__(self, name, value)
if name != "_mock" and name != "_spec":
self._add_to_spec(name)
setattr(self._mock, name, value)
def __call__(self):
"""Calling a mirror makes the mirrored mock object callable.
Returns an invocation object that can be used to set return values and
side effects for the mocked method."""
if not self._is_callable:
self._is_callable = True
self._mock = mock.Mock(name=self._path)
self._mock.add_spec([], True)
setattr(self._parent._mock, self._name, self._mock)
return Invocation(self, self._mock)
def _add_to_spec(self, name):
"""The spec of the mirrored mock object is updated whenever the mirror
gains new attributes"""
self._spec.add(name)
self._mock.mock_add_spec(list(self._spec), True)
class ReturnValueNotSet(object):
"""Special object to indicate methods without return values"""
__slots__ = []
def __repr__(self):
return "RETURN_VALUE_NOT_SET"
RETURN_VALUE_NOT_SET = ReturnValueNotSet()
class Invocation(object):
"""Used to manipulate the return value and side effects of mock methods"""
def __init__(self, mirror, mock):
self.mirror = mirror
self.mock = mock
self.mock.return_value = RETURN_VALUE_NOT_SET
def __call__(self, side_effect):
"""Decorate a function to use it as a side effect"""
self.mock.side_effect = side_effect
def __setitem__(self, _ignored, return_value):
"""Item assignment sets the return value and removes any side effect"""
self.mock.return_value = return_value
self.mock.side_effect = None
|
atados/api | atados_core/migrations/0054_auto_20160731_1811.py | Python | mit | 1,722 | 0.002904 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import atados_core.models.uploads
import django_resized.forms
class Migration(migrations.Migration):
dependencies = [
('atados_core', '0053_auto_20160728_1309'),
]
operations = [
migrations.AddField(
model_name='project',
name='uploaded_image',
field=models.ForeignKey(blank=True, to='atados_core.UploadedImage', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='uploadedimage',
name='image',
field=models.ImageField(upload_to=atados_core.models.uploads.ImageName(), verbose_name='Image 350x260'),
preserve_default=True,
),
migrations.AlterField(
| model_name='uploadedimage',
name='image_large',
field=django_resized.forms.ResizedImageField(default=None, null=True, upload_to=atados_core.models.uploads.ImageName(b'-large'), blank=True),
preserve_default=True,
),
migrations.AlterField(
| model_name='uploadedimage',
name='image_medium',
field=django_resized.forms.ResizedImageField(default=None, null=True, upload_to=atados_core.models.uploads.ImageName(b'-medium'), blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='uploadedimage',
name='image_small',
field=django_resized.forms.ResizedImageField(default=None, null=True, upload_to=atados_core.models.uploads.ImageName(b'-small'), blank=True),
preserve_default=True,
),
]
|
andrius-momzyakov/grade | web/migrations/0002_auto_20170604_1403.py | Python | gpl-3.0 | 593 | 0.001704 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-04 14:03
from __future__ import unicode_literals
from django.db impor | t migrations, models
class Migration(migrations.Migration):
dependencies = [
('web', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='worker',
name='jobcategories',
),
migrations.AddField(
model_name='worker',
name='jobcategories',
field=models.ManyToManyField(to='web.JobCategory | ', verbose_name='Услуги'),
),
]
|
Anaconda-Platform/anaconda-client | binstar_client/commands/config.py | Python | bsd-3-clause | 6,043 | 0.001158 | '''
anaconda-client configuration
Get, Set, Remove or Show the anaconda-client configuration.
###### anaconda-client sites
anaconda-client sites are a mechanism to allow users to quickly switch
between Anaconda repository instances. This is primarily used for testing
the anaconda alpha site. But also has applications for the
on-site [Anaconda Enterprise](http://continuum.io/anaconda-server).
anaconda-client comes with two pre-configured sites `alpha` and
`binstar` you may use these in one of two ways:
* Invoke the anaconda command with the `-s/--site` option
e.g. to use the alpha testing site:
anaconda -s alpha whoami
* Set a site as the default:
anaconda config --set default_site alpha
anaconda whoami
###### Add a anaconda-client site
After installing [Anaconda Enterprise](http://continuum.io/anaconda-server)
you can add a site named **site_name** like this:
anaconda config --set sites.site_name.url "http://<anaconda-enterprise-ip>:<port>/api"
anaconda config --set default_site site_name
###### Site Options VS Global Options
All options can be set as global options - affecting all sites,
or site options - affecting only one site
By default options are set globally e.g.:
anaconda config --set OPTION VALUE
If you want the option to be limited to a single site,
prefix the option with `sites.site_name` e.g.
anaconda config --set sites.site_name.OPTION VALUE
###### Common anaconda-client configuration options
* `url`: Set the anaconda api url (default: https://api.anaconda.org)
* `ssl_verify`: Perform ssl validation on the https requests.
ssl_verify may be `True`, `False` or a path to a root CA pem file.
###### Toggle auto_register when doing anaconda upload
The default is yes, automatically create a new package when uploading.
If no, then an upload will fail if the package name does not already exist on the server.
anaconda config --set auto_register yes|no
'''
from __future__ import print_function
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
logger = logging.getLogger('binstar.config')
DEPRECATED = {
'verify_ssl': 'Please use ssl_verify instead'
}
def recursive_set(config_data, key, value, type_):
while '.' in key:
prefix, key = key.split('.', 1)
config_data = config_data.setdefault(prefix, {})
if key not in CONFIGURATION_KEYS:
logger.warning('"%s" is not a known configuration key', key)
if key in DEPRECATED.keys():
message = "{} is deprecated: {}".format(key, DEPRECATED[key])
logger.warning(message)
config_data[key] = type_(value)
def recursive_remove(config_data, key):
while '.' in key:
if not config_data:
return
prefix, key = key.split('.', 1)
config_data = config_data.get(prefix, {})
del config_data[key]
def main(args):
config = get_config()
if args.show:
logger.info(yaml_dump(config))
return
if args.show_sources:
config_files = load_file_configs(SEARCH_PATH)
for path in config_files:
logger.info('==> %s <==', path)
logger.info(yaml_dump(config_files[path]))
return
if args.get:
if args.get in config:
logger.info(config[args.get])
else:
logger.info("The value of '%s' is not set." % args.get)
return
if args.files:
logger.info('User Config: %s' % USER_CONFIG)
logger.info('System Config: %s' % SYSTEM_CONFIG)
return
config_file = USER_CONFIG if args.user else SYSTEM_CONFIG
config = load_config(config_file)
for key, value in args.set:
recursive_set(config, key, value, args.type)
for key in args.remove:
try:
recursive_remove(config, key)
except KeyError:
logger.error("Key %s does not exist" % key)
if not (args.set or args.remove):
raise ShowHelp()
save_config(config, config_file)
def add_parser(subparsers):
description = 'Anaconda client configuration'
parser = subparsers.add_parser('config',
| help=description,
description=description,
epilog=__doc__,
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('--type', default=safe_load,
help='The type of the values in the set commands')
agroup = parser.add_argument_group('actions')
agroup.add_argument('--set', nargs=2, acti | on='append', default=[],
help='sets a new variable: name value', metavar=('name', 'value'))
agroup.add_argument('--get', metavar='name',
help='get value: name')
agroup.add_argument('--remove', action='append', default=[],
help='removes a variable')
agroup.add_argument('--show', action='store_true', default=False,
help='show all variables')
agroup.add_argument('-f', '--files', action='store_true',
help='show the config file names')
agroup.add_argument('--show-sources', action='store_true',
help='Display all identified config sources')
lgroup = parser.add_argument_group('location')
lgroup.add_argument('-u', '--user', action='store_true', dest='user', default=True,
help='set a variable for this user')
lgroup.add_argument('-s', '--system', '--site', action='store_false', dest='user',
help='set a variable for all users on this machine')
parser.set_defaults(main=main, sub_parser=parser)
|
rdkit/rdkit-orig | Contrib/LEF/ClusterFps.py | Python | bsd-3-clause | 2,812 | 0.014225 | #
# Copyright (c) 2009, Novartis Institutes for BioMedical Research Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Novartis Institutes for BioMedical Research Inc.
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Cr | eated by Greg Landrum and Anna Vulpetti, March 2009
from rdkit.ML.Cluster import Butina
from rdkit import DataStructs
import sys,cPickle
# sims is the list of similarity thresholds used to generate clusters
sims=[.9,.8,.7,.6]
smis=[]
uniq=[]
uFps=[]
for fileN in sys.argv[1:]:
inF = file(sys.argv[1],'r')
cols = cPickle.load(inF)
fps = cPickle.load(inF)
for row in fps:
nm,smi,fp = row[:3]
if smi not in smis | :
try:
fpIdx = uFps.index(fp)
except ValueError:
fpIdx=len(uFps)
uFps.append(fp)
uniq.append([fp,nm,smi,'FP_%d'%fpIdx]+row[3:])
smis.append(smi)
def distFunc(a,b):
return 1.-DataStructs.DiceSimilarity(a[0],b[0])
for sim in sims:
clusters=Butina.ClusterData(uniq,len(uniq),1.-sim,False,distFunc)
print >>sys.stderr,'Sim: %.2f, nClusters: %d'%(sim,len(clusters))
for i,cluster in enumerate(clusters):
for pt in cluster:
uniq[pt].append(str(i+1))
cols.append('cluster_thresh_%d'%(int(100*sim)))
print ' '.join(cols)
for row in uniq:
print ' '.join(row[1:])
|
ecs28/pyoverview | pyoverview/basic/variables.py | Python | apache-2.0 | 835 | 0.005988 | """
Basic variables overview:
All variable in Python are objects and the types are defined on the fly
"""
# function
def functionexample(param):
print(param)
# variables
# String
letter = "hello"
letter2 = 'ab"c'
# concat using +
letter3 = letter + ' ' + letter2
# ** mixing concat numers + letters is no supported
# int
myint = 20
letter4 = letter3 + ' ' + str(myint)
# null
letter = None
# float
myfloat1 = 10.0
myfloat2 = float(7)
# | call function
functionexample(letter4)
# multiple assigns
a,b = 3,4
mystring = letter
''' Check the object type to use the co | rrect output format '''
if mystring == "hello":
print("String: %s" % mystring)
if isinstance(myfloat1, float) and myfloat1 == 10.0:
print("Float: %f" % myfloat1)
if isinstance(myint, int) and myint == 20:
print("Integer: %d" % myint)
|
egorsmkv/wmsigner | setup.py | Python | mit | 848 | 0 | import os
from distutils.core import setup
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='wmsigner',
version='0.1.1',
url='https://github.com/egorsmkv/wmsigner',
description='WebMoney Signer',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
author='Egor Smo | lyakov',
author_e | mail='egorsmkv@gmail.com',
license='MIT',
keywords='webmoney singer security wmsigner WMXI',
packages=['wmsigner'],
data_files=[('', ['README.rst'])],
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Financial and Insurance Industry',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
j-5/zask | zask/ext/sqlalchemy/__init__.py | Python | bsd-3-clause | 18,665 | 0 | # -*- coding: utf-8 -*-
"""
zask.ext.sqlalchemy
~~~~~~~~~~~~~~~~~~~
Adds basic SQLAlchemy support to your application.
I have not add all the feature, bacause zask is not for web,
The other reason is i can't handle all the features right now :P
Differents between Flask-SQLAlchemy:
1. No default ``scopefunc`` it means that you need define
how to separate sessions your self
2. No signal session
3. No query record
4. No pagination and HTTP headers, e.g. ``get_or_404``
5. No difference between app bound and not bound
:copyright: (c) 2015 by the J5.
:license: BSD, see LICENSE for more details.
:copyright: (c) 2012 by Armin Ronacher, Daniel Neuhäuser.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement, absolute_import
import os
import re
import sys
import functools
import sqlalchemy
import atexit
from functools import partial
from sqlalchemy import orm, event
from sqlalchemy.orm.exc import UnmappedClassError
from sqlalchemy.orm.session import Session as SessionBase
from sqlalchemy.engine.url import make_url
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from zask import _request_ctx
from zask.ext.sqlalchemy._compat import iteritems, itervalues, xrange, \
string_types
_camelcase_re = re.compile(r'([A-Z]+)(?=[a-z0-9])')
def _make_table(db):
def _make_table(*args, **kwargs):
if len(args) > 1 and isinstance(args[1], db.Column):
args = (args[0], db.metadata) + args[1:]
info = kwargs.pop('info', None) or {}
info.setdefault('bind_key', None)
kwargs['info'] = info
return sqlalchemy.Table(*args, **kwargs)
return _make_table
def _set_default_query_class(d):
if 'query_class' not in d:
d['query_class'] = orm.Query
def _wrap_with_default_query_class(fn):
@functools.wraps(fn)
def newfn(*args, **kwargs):
_set_default_query_class(kwargs)
if "backref" in kwargs:
backref = kwargs['backref']
if isinstance(backref, string_types):
backref = (backref, {})
_set_default_query_class(backref[1])
return fn(*args, **kwargs)
return newfn
def get_state(app):
"""Gets the state for the application"""
assert 'sqlalchemy' in app.extensions, \
'The sqlalchemy extension was not registered to the current ' \
'application. Please make sure to call init_app() first.'
return app.extensions['sqlalchemy']
def _include_sqlalchemy(obj):
for module in sqlalchemy, sqlalchemy.orm:
for key in module.__all__:
if not hasattr(obj, key):
setattr(obj, key, getattr(module, key))
# Note: obj.Table does not attempt to be a SQLAlchemy Table class.
obj.Table = _make_table(obj)
obj.relationship = _wrap_with_default_query_class(obj.relationship)
obj.relation = _wrap_with_default_query_class(obj.relation)
obj.dynamic_loader = _wrap_with_default_query_class(obj.dynamic_loader)
obj.event = event
def _should_set_tablename(bases, d):
"""Check what values are set by a class and
its bases to determine if a
tablename should be automatically generated.
The class and its bases are checked in order
of precedence: the class itself then each base
in the order they were given at class definition.
Abstract classes do not generate a tablename,
although they may have set
or inherited a tablename elsewhere.
If a class defines a tablename or table,
a new one will not be generated.
Otherwise, if the class defines a primary key,
a new name will be generated.
This supports:
* Joined table inheritance without explicitly
naming sub-models.
* Single table inheritance.
* Inheriting from mixins or abstract models.
:param bases: base classes of new class
:param d: new class dict
:return: True if tablename should be set
"""
if '__tablename__' in d or '__table__' in d or '__abstract__' in d:
return False
if any(v.primary_key for v in itervalues(d)
if isinstance(v, sqlalchemy.Column)):
return True
for base in bases:
if hasattr(base, '__tablename__') or hasattr(base, '__table__'):
return False
for name in dir(base):
attr = getattr(base, name)
if isinstance(attr, sqlalchemy.Column) and attr.primary_key:
return True
class _BoundDeclarativeMeta(DeclarativeMeta):
def __new__(cls, name, bases, d):
if _should_set_tablename(bases, d):
def _join(match):
word = match.group()
if len(word) > 1:
return ('_%s_%s' % (word[:-1], word[-1])).lower()
return '_' + word.lower()
d['__tablename__'] = _camelcase_re.sub(_join, name).lstrip('_')
return DeclarativeMeta.__new__(cls, name, bases, d)
def __init__(self, name, bases, d):
bind_key = d.pop('__bind_key__', None)
DeclarativeMeta.__init__(self, name, bases, d)
if bind_key is not None:
self.__table__.info['bind_key'] = bind_key
class BindSession(SessionBase):
"""The BindSession is the default session that Zask-SQLAlchemy
uses. It extends the default session system with bind selection.
If you want to use a different session you can override the
:meth:`SQLAlchemy.create_session` function.
"""
def __init__(self, db, autocommit=False, autoflush=True, **options):
#: The application that this session belongs to.
self.app = db.get_app()
bind = options.pop('bind', None) or db.engine
SessionBase.__init__(self, autocommit=autocommit, autoflush=autoflush,
bind=bind,
binds=db.get_binds(self.app), **options)
def get_bind(self, mapper, clause=None):
# mapper is None if someone tries to just get a connection
if mapper is not None:
info = getattr(mapper.mapped_table, 'info', {})
bind_key = info.get('bind_key')
if bind_key is not None:
state = get_state(self.app)
return state.db.get_engine(self.app, bind=bind_key)
return SessionBase.get_bind(self, mapper, clause)
class _SQLAlchemyState(object):
"""Remembers configuration for the (db, app) tuple."""
def __init__(self, db, app):
self.db = db
self.app = app
self.connectors = {}
class _QueryProperty(object):
def __init__(self, sa):
self.sa = sa
def __get__(self, obj, type):
try:
mapper = orm.class_mapper(type)
if mapper:
return type.query_class(mapper, session=self.sa.session())
except UnmappedClassError:
return None
class _EngineConnector(object):
def __init__(self, sa, app, bind=None):
self._sa = sa
self._app = app
self._engine = None
self._connected_for = None
self._bind = bind
def get_uri(self):
if self._bind is None:
return self._app.config['SQLALCHEMY_DATABASE_URI']
binds = self._app.config.get('SQLALCHEMY_BINDS') or ()
assert self._bind in binds, \
'Bind %r is not specified. Set it in the SQLALCHEMY_BINDS ' \
'configuration variable' % self._bind
return binds[self._bind]
def get_engine(self):
uri = self.get_uri()
echo = self._app.config['SQLALCHEMY_ECHO']
if (uri, echo) == self._connected_for:
return self._engine
info = make_url(uri)
# options = {'convert_unicode': True}
options = {}
self._sa.apply_pool_defaults(self._app, options)
self._sa.apply_driver_hacks(self._app, info, options)
if | echo:
options['echo'] = True
self._engine = rv = sqlalchemy.cr | eate_engine(info, **options)
self._connected_for = (uri, echo)
return rv
class Model(object):
"""Baseclass for custom user models."""
#: the query class used. The :attr: |
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot | decksite/views/kickoff.py | Python | gpl-3.0 | 842 | 0.003563 | from flask import url_for
from decksite.view import View
from magic import tournaments
from shared import dtutil
# pylint: disable=no-self-use
class KickOff(View):
def __init__(self) -> None:
super().__init__()
kick_off_date = tournaments.kick_off_date()
if dtutil.now() > kick_off_date:
self.date_info = 'The Season Kick Off is on the second Saturday of the season'
else:
self.date_info = 'The next Season Kick Off is on ' + dtutil.display_date_with_date_and_year(kick_off_date)
self.faqs_url = url_for('f | aqs')
self.cardhoarder_loan_url = 'https://www.cardhoarder.com/free-loan-program-faq'
self.tournaments_url = url_for('tournaments | ')
self.discord_url = url_for('discord')
def page_title(self) -> str:
return 'The Season Kick Off'
|
akshaynathr/mailman | src/mailman/chains/owner.py | Python | gpl-3.0 | 1,754 | 0.00114 | # Copyright (C) 2012 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""The standard -owner posting chain."""
from __future__ import absolute_import, print_function, unicode_literals
__metaclass__ = type
__all__ = [
'BuiltInOwnerChain',
]
import logging
from zope.event import notify
from mailman.chains.base import ChainNotification, | TerminalChainBase
from mailman.config i | mport config
from mailman.core.i18n import _
log = logging.getLogger('mailman.vette')
class OwnerNotification(ChainNotification):
"""An event signaling that a message is accepted to the -owner address."""
class BuiltInOwnerChain(TerminalChainBase):
"""Default built-in -owner address chain."""
name = 'default-owner-chain'
description = _('The built-in -owner posting chain.')
def _process(self, mlist, msg, msgdata):
# At least for now, everything posted to -owners goes through.
config.switchboards['pipeline'].enqueue(msg, msgdata)
log.info('OWNER: %s', msg.get('message-id', 'n/a'))
notify(OwnerNotification(mlist, msg, msgdata, self))
|
hayalasalah/hayalasalah | web/cloud/cloud/profiles/__init__.py | Python | mpl-2.0 | 283 | 0.003534 | """
profiles - Django application for managing users, mosques, and their membership.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not | distributed with this
file, You c | an obtain one at http://mozilla.org/MPL/2.0/.
"""
|
camptocamp/mapproxy | mapproxy/test/unit/test_util_conf_utils.py | Python | apache-2.0 | 2,557 | 0.001173 | # -:- encoding: utf-8 -:-
# This file is part of the MapProxy project.
# Copyright (C) 2013 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
from mapproxy.script.conf.utils import update_config
class TestUpdateConfig(object):
def test_empty(self):
a = {"a": "foo", "b": 42}
b = {}
assert update_config(deepcopy(a), b) == a
def test_add(self):
a = {"a": "foo", "b": 42}
b = {"c": [1, 2, 3]}
assert update_config(a, b) == {"a": "foo", "b": 42, "c": [1, 2, 3]}
def test_mod(self):
a = {"a": "foo", "b": 42, "c": {}}
b = {"a": [1, 2, 3], "c": 1}
assert update_config(a, b) == {"b": 42, "a": [1, 2, 3], "c": 1}
def test_nested_add_mod(self):
a = {"a": "foo", "b": {"ba": 42, "bb": {}}}
b = {"b": {"bb": {"bba": 1}, "bc": [1, 2, 3]}}
assert update_config(a, b) == {
"a": "foo",
"b": {"ba": 42, "bb": {"bba": 1}, "bc": [1, 2, 3]},
}
def test_add_all(self):
a = {"a": "foo", "b": {"ba": 42, "bb": {}}}
b | = {"__all__": {"ba": 1}}
assert update_config(a, b) == {"a": {"ba": 1}, "b": {"ba": 1, "bb": {}}}
def test_extend(self):
a = {"a": "foo", "b": ["ba"]}
b = {"b__extend__": ["bb", "bc"]}
assert update_config(a, | b) == {"a": "foo", "b": ["ba", "bb", "bc"]}
def test_prefix_wildcard(self):
a = {"test_foo": "foo", "test_bar": "ba", "test2_foo": "test2", "nounderfoo": 1}
b = {"____foo": 42}
assert update_config(a, b) == {
"test_foo": 42,
"test_bar": "ba",
"test2_foo": 42,
"nounderfoo": 1,
}
def test_suffix_wildcard(self):
a = {"test_foo": "foo", "test_bar": "ba", "test2_foo": "test2", "nounderfoo": 1}
b = {"test____": 42}
assert update_config(a, b) == {
"test_foo": 42,
"test_bar": 42,
"test2_foo": "test2",
"nounderfoo": 1,
}
|
citrix-openstack-build/oslo.log | oslo/log/openstack/common/rpc/matchmaker.py | Python | apache-2.0 | 9,429 | 0 | # Copyright 2011 Cloudscaling Group, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The MatchMaker classes should except a Topic or Fanout exchange key and
return keys for direct exchanges, per (approximate) AMQP parlance.
"""
import contextlib
import eventlet
from oslo.config import cfg
from oslo.log.openstack.common.gettextutils import _, _LI
from oslo.log.openstack.common import log as logging
matchmaker_opts = [
cfg.IntOpt('matchmaker_heartbeat_freq',
default=300,
help='Heartbeat frequency'),
cfg.IntOpt('matchmaker_heartbeat_ttl',
default=600,
help='Heartbeat time- | to-live.'),
]
CONF = cfg.CONF
CONF.register_opts(matchmaker_opts)
LOG = logging.getLogger(__name__)
contextmanager = contextlib.contex | tmanager
class MatchMakerException(Exception):
"""Signified a match could not be found."""
message = _("Match not found by MatchMaker.")
class Exchange(object):
"""Implements lookups.
Subclass this to support hashtables, dns, etc.
"""
def __init__(self):
pass
def run(self, key):
raise NotImplementedError()
class Binding(object):
"""A binding on which to perform a lookup."""
def __init__(self):
pass
def test(self, key):
raise NotImplementedError()
class MatchMakerBase(object):
"""Match Maker Base Class.
Build off HeartbeatMatchMakerBase if building a heartbeat-capable
MatchMaker.
"""
def __init__(self):
# Array of tuples. Index [2] toggles negation, [3] is last-if-true
self.bindings = []
self.no_heartbeat_msg = _('Matchmaker does not implement '
'registration or heartbeat.')
def register(self, key, host):
"""Register a host on a backend.
Heartbeats, if applicable, may keepalive registration.
"""
pass
def ack_alive(self, key, host):
"""Acknowledge that a key.host is alive.
Used internally for updating heartbeats, but may also be used
publicly to acknowledge a system is alive (i.e. rpc message
successfully sent to host)
"""
pass
def is_alive(self, topic, host):
"""Checks if a host is alive."""
pass
def expire(self, topic, host):
"""Explicitly expire a host's registration."""
pass
def send_heartbeats(self):
"""Send all heartbeats.
Use start_heartbeat to spawn a heartbeat greenthread,
which loops this method.
"""
pass
def unregister(self, key, host):
"""Unregister a topic."""
pass
def start_heartbeat(self):
"""Spawn heartbeat greenthread."""
pass
def stop_heartbeat(self):
"""Destroys the heartbeat greenthread."""
pass
def add_binding(self, binding, rule, last=True):
self.bindings.append((binding, rule, False, last))
# NOTE(ewindisch): kept the following method in case we implement the
# underlying support.
# def add_negate_binding(self, binding, rule, last=True):
# self.bindings.append((binding, rule, True, last))
def queues(self, key):
workers = []
# bit is for negate bindings - if we choose to implement it.
# last stops processing rules if this matches.
for (binding, exchange, bit, last) in self.bindings:
if binding.test(key):
workers.extend(exchange.run(key))
# Support last.
if last:
return workers
return workers
class HeartbeatMatchMakerBase(MatchMakerBase):
"""Base for a heart-beat capable MatchMaker.
Provides common methods for registering, unregistering, and maintaining
heartbeats.
"""
def __init__(self):
self.hosts = set()
self._heart = None
self.host_topic = {}
super(HeartbeatMatchMakerBase, self).__init__()
def send_heartbeats(self):
"""Send all heartbeats.
Use start_heartbeat to spawn a heartbeat greenthread,
which loops this method.
"""
for key, host in self.host_topic:
self.ack_alive(key, host)
def ack_alive(self, key, host):
"""Acknowledge that a host.topic is alive.
Used internally for updating heartbeats, but may also be used
publicly to acknowledge a system is alive (i.e. rpc message
successfully sent to host)
"""
raise NotImplementedError("Must implement ack_alive")
def backend_register(self, key, host):
"""Implements registration logic.
Called by register(self,key,host)
"""
raise NotImplementedError("Must implement backend_register")
def backend_unregister(self, key, key_host):
"""Implements de-registration logic.
Called by unregister(self,key,host)
"""
raise NotImplementedError("Must implement backend_unregister")
def register(self, key, host):
"""Register a host on a backend.
Heartbeats, if applicable, may keepalive registration.
"""
self.hosts.add(host)
self.host_topic[(key, host)] = host
key_host = '.'.join((key, host))
self.backend_register(key, key_host)
self.ack_alive(key, host)
def unregister(self, key, host):
"""Unregister a topic."""
if (key, host) in self.host_topic:
del self.host_topic[(key, host)]
self.hosts.discard(host)
self.backend_unregister(key, '.'.join((key, host)))
LOG.info(_LI("Matchmaker unregistered: %(key)s, %(host)s"),
{'key': key, 'host': host})
def start_heartbeat(self):
"""Implementation of MatchMakerBase.start_heartbeat.
Launches greenthread looping send_heartbeats(),
yielding for CONF.matchmaker_heartbeat_freq seconds
between iterations.
"""
if not self.hosts:
raise MatchMakerException(
_("Register before starting heartbeat."))
def do_heartbeat():
while True:
self.send_heartbeats()
eventlet.sleep(CONF.matchmaker_heartbeat_freq)
self._heart = eventlet.spawn(do_heartbeat)
def stop_heartbeat(self):
"""Destroys the heartbeat greenthread."""
if self._heart:
self._heart.kill()
class DirectBinding(Binding):
"""Specifies a host in the key via a '.' character.
Although dots are used in the key, the behavior here is
that it maps directly to a host, thus direct.
"""
def test(self, key):
return '.' in key
class TopicBinding(Binding):
"""Where a 'bare' key without dots.
AMQP generally considers topic exchanges to be those *with* dots,
but we deviate here in terminology as the behavior here matches
that of a topic exchange (whereas where there are dots, behavior
matches that of a direct exchange.
"""
def test(self, key):
return '.' not in key
class FanoutBinding(Binding):
"""Match on fanout keys, where key starts with 'fanout.' string."""
def test(self, key):
return key.startswith('fanout~')
class StubExchange(Exchange):
"""Exchange that does nothing."""
def run(self, key):
return [(key, None)]
class LocalhostExchange(Exchange):
"""Exchange where all direct topics are local."""
def __init__(self, host='localhost'):
self.host = host
super(Exchange, self).__init__()
def run(self, key):
return [('.'.join((key.split('.')[0], self |
tschutter/homefiles | install.py | Python | bsd-2-clause | 27,678 | 0 | #!/usr/bin/env python3
"""
Configure user settings.
- Install files in tschutter/homefiles using symbolic links.
- Configure window manager keybindings.
- Install fonts.
"""
import argparse
import glob
import os
import shutil
import stat
import subprocess
import sys
import tempfile
import time
try:
# pylint: disable=F0401
from xdg.BaseDirectory import xdg_cache_home
from xdg.BaseDirectory import xdg_config_home
except ImportError:
# xdg not available on all platforms
# pylint: disable=C0103
xdg_cache_home = os.path.expanduser("~/.cache")
xdg_config_home = os.path.expanduser("~/.config")
def force_run_command(cmdargs, stdinstr=None):
"""Run an external command, returning stdout and stderr as a string."""
if stdinstr is None:
stdinstr = ""
with subprocess.Popen(
cmdargs,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
) as process:
if stdinstr is not None:
stdinstr = stdinstr.encode("ascii")
stdoutdata, stderrdata = process.communicate(stdinstr)
return (stdoutdata + stderrdata).decode()
def run_command(args, cmdargs, stdinstr=None):
"""Run an external command, printing stdout and stderr."""
if args.verbose:
print("Running '{0}'".format(" ".join(cmdargs)))
if not args.dryrun:
output = force_run_command(cmdargs, stdinstr)
output = output.rstrip()
if output:
print(output)
def cygpath_w(pathname):
r"""Convert a pathname to Windows style X:\foo\bar."""
if sys.platform == "cygwin":
pipe = subprocess.Popen(
["cygpath", "--windows", pathname],
stdout=subprocess.PIPE
).stdout
for line in pipe:
pathname = line.strip().decode("ascii")
pipe.close()
return pathname
def cygpath_u(pathname):
"""Convert a pathname to Cygwin style /cygpath/X/foo/bar."""
if sys.platform == "cygwin":
pipe = subprocess.Popen(
["cygpath", "--unix", pathname],
stdout=subprocess.PIPE
).stdout
for line in pipe:
pathname = line.strip().decode("ascii")
pipe.close()
return pathname
def simplify_path(path):
"""Perform the inverse of os.path.expanduser()."""
homedir = os.path.expanduser("~")
path = os.path.abspath(path)
if path.startswith(homedir):
path = os.path.join("~", path[len(homedir) + 1:])
return path
def mkdir(args, enabled, directory, mode):
"""Create directory."""
if enabled:
if not os.path.isdir(directory):
print("Creating '{0}' directory.".format(simplify_path(directory)))
if not args.dryrun:
os.mkdir(directory, mode)
else:
clean_link(args, directory, False)
def is_exe(pathname):
"""Return True if pathname is an executable file."""
return os.path.isfile(pathname) and os.access(pathname, os.X_OK)
def exe_in_path(filename):
"""Return True if filename is in PATH and is executable."""
# Get the PATH.
path = os.getenv("PATH", os.defpath)
# Add ~/bin in case this is not a login shell.
path = os.pathsep.join((path, os.path.expanduser("~/bin")))
# Loop through all of the path components searching for filename.
for pathdir in path.split(os.pathsep):
pathname = os.path.join(pathdir, filename)
if is_exe(pathname):
return True
return False
def clean_link(args, linkname, backup=True):
"""Delete link or backup files and dirs."""
link_pathname = os.path.join(args.home_dir, linkname)
if os.path.islink(link_pathname):
# The destination exists as a symbolic link.
print("Deleting symbolic link '{0}'.".format(link_pathname))
if not args.dryrun:
os.unlink(link_pathname)
elif os.path.exists(link_pathname):
if os.path.isdir(link_pathname) and not os.listdir(link_pathname):
print("Removing empty directory '{0}'.".format(link_pathname))
if not args.dryrun:
os.rmdir(link_pathname)
return
# The destination exists as a file or dir. Back it up.
if backup:
backup_dir = os.path.join(args.cache_dir, "homefiles_backup")
mkdir(args, True, backup_dir, 0o700)
print("Moving '{0}' to '{1}'.".format(link_pathname, backup_dir))
if not args.dryrun:
shutil.move(link_pathname, backup_dir)
else:
print("Deleting file or directory '{0}'.".format(link_pathname))
if not args.dryrun:
if os.path.isdir(link_pathname):
shutil.rmtree(link_pathname)
else:
os.unlink(link_pathname)
def make_link(args, enabled, filename, linkname=None):
"""
Create a symbolic link from linkname to filename if enabled is True.
If filename is relative, prefix it with $HOME/.homefiles.
If linkname is relative, prefix it with $HOME. If linkname is not
specified, it is the same as filename.
| """
# pylint: disable=too-many-branches
if l | inkname is None:
if os.path.isabs(filename):
raise ValueError(
"default linkname cannot be used with absolute filename"
)
linkname = filename
# Determine the source and destination pathnames.
if os.path.isabs(filename):
file_pathname = filename
else:
file_pathname = os.path.normpath(
os.path.join(args.homefiles, filename)
)
if os.path.isabs(linkname):
link_pathname = linkname
else:
link_pathname = os.path.join(args.home_dir, linkname)
# The target filename should always exist.
if not os.path.exists(file_pathname):
print("ERROR: File '{0}' does not exist.".format(file_pathname))
sys.exit(1)
if enabled and not args.force and os.path.islink(link_pathname):
# The destination already exists as a symbolic link. Delete it if
# it points to the wrong place.
try:
samefile = os.path.samefile(file_pathname, link_pathname)
except OSError:
samefile = False
if not samefile:
clean_link(args, linkname)
else:
if args.verbose:
print(
"Link already exists from '{0}' to '{1}'.".format(
link_pathname,
file_pathname
)
)
return
else:
clean_link(args, linkname)
if not enabled:
if args.verbose:
print("Not linking to '{0}' (not enabled).".format(filename))
return
# Ensure that the link_pathname directory exists.
link_dir = os.path.dirname(link_pathname)
if not os.path.isdir(link_dir):
mkdir(args, True, link_dir, 0o766)
# Make the link target relative. This usually makes the link
# shorter in ls output.
link_target = os.path.relpath(
file_pathname,
link_dir
)
# Make the symbolic link from link_pathname to link_target.
print(
"Creating symbolic link from '{0}' to '{1}'.".format(
link_pathname,
link_target
)
)
if not args.dryrun:
os.symlink(link_target, link_pathname)
def make_dot_link(args, enabled, filename):
"""Create a symbolic link from home/.filename to homefiles/filename."""
return make_link(args, enabled, filename, "." + filename)
def make_sig_link(args):
"""Create a link to the appropriate signature file."""
# Determine the realm.
computername = os.uname()[1].lower()
prefix = computername[:7]
if prefix == "fdsvbld":
realm = "ISC"
elif prefix == "fdsvdfw":
realm = "ISCP"
elif prefix == "fdsvmad":
realm = "ISC"
elif prefix == "fdsvsna":
realm = "ISCP"
elif computername in ["apple", "passion", "wampi", "wampi-win2003"]:
realm = "ISC"
else:
realm = "HOME"
# Link the correct signature.
if realm in ["ISC", "ISCP"]:
|
OuHangKresnik/Ninja | raspberry/ledplay/ledstart.py | Python | mit | 612 | 0.022876 | import RPi.GPIO as GPIO
import time
led = 11
GPIO.setmode(GPIO.BOARD)
GPIO.setup(led,GPIO.OUT)
#for x in range(0,100):
GPIO.output(led,True)
time.sle | ep(0.5)
GPIO.output(led,False)
time.sleep(0.5)
GPIO.output(led,True)
time.sleep(0.5)
GPIO.output(led,False)
time.sleep(0.5)
GPIO.output(led,True)
time.sleep(0.2)
GPIO.output(led,False)
time.sleep(0.2)
GPIO.output(led,True)
time.sleep(0.2)
GPIO.output(led,False)
| time.sleep(0.2)
GPIO.output(led,True)
time.sleep(0.2)
GPIO.output(led,False)
time.sleep(0.2)
# time.sleep(1)
# GPIO.output(led,False)
# time.sleep(1)
#GPIO.cleanup()
#GPIO.output(led,False)
|
deepmind/dd_two_player_games | dd_two_player_games/drift_utils_test.py | Python | apache-2.0 | 3,547 | 0.002255 | # Copyright 2021 DeepMind Technologies Limited and Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expr | ess or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Drift utils test."""
from absl.testing import absltest
from absl.testing import parameterized
from dd_two_player_games import drift_utils
from dd_two_player_games import gan
LEARNING_RATE_TUPLES = [
(0.01, 0.01),
(0.01, 0.05),
(0.05, 0.01),
| (0.0001, 0.5)]
class DriftUtilsTest(parameterized.TestCase):
"""Test class to ensure drift coefficients are computed correctly.
Ensures that the drift coefficients in two-player games are
computed as for the math for:
* simultaneous updates.
* alternating updates (for both player orders).
"""
@parameterized.parameters(LEARNING_RATE_TUPLES)
def test_sim_updates(self, disc_lr, gen_lr):
# player order does not matter.
# the number of updates does not matter for simultaneous updates.
learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr)
drift_coeffs = drift_utils.get_dd_coeffs(
None, True, learning_rates, num_updates=None)
self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.disc.other_norm, 0.0)
self.assertEqual(drift_coeffs.disc.other_dot_prod, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr)
self.assertEqual(drift_coeffs.gen.other_norm, 0.0)
self.assertEqual(drift_coeffs.gen.other_dot_prod, 0.5 * gen_lr)
@parameterized.parameters(LEARNING_RATE_TUPLES)
def test_alt_updates(self, disc_lr, gen_lr):
learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr)
num_updates = gan.GANTuple(disc=1, gen=1)
drift_coeffs = drift_utils.get_dd_coeffs(
drift_utils.PlayerOrder.disc_first, False, learning_rates,
num_updates=num_updates)
self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.disc.other_norm, 0.0)
self.assertEqual(drift_coeffs.disc.other_dot_prod, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr)
self.assertEqual(drift_coeffs.gen.other_norm, 0.0)
self.assertEqual(
drift_coeffs.gen.other_dot_prod,
0.5 * gen_lr * (1 - 2 * disc_lr / gen_lr))
@parameterized.parameters(LEARNING_RATE_TUPLES)
def test_alt_updates_change_player_order(self, disc_lr, gen_lr):
learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr)
num_updates = gan.GANTuple(disc=1, gen=1)
drift_coeffs = drift_utils.get_dd_coeffs(
drift_utils.PlayerOrder.gen_first, False, learning_rates,
num_updates=num_updates)
self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.disc.other_norm, 0.0)
self.assertEqual(
drift_coeffs.disc.other_dot_prod,
0.5 * disc_lr * (1 - 2 * gen_lr / disc_lr))
self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr)
self.assertEqual(drift_coeffs.gen.other_norm, 0.0)
self.assertEqual(drift_coeffs.gen.other_dot_prod, 0.5 * gen_lr)
if __name__ == '__main__':
absltest.main()
|
iw3hxn/LibrERP | export_teamsystem/test_data.py | Python | agpl-3.0 | 8,152 | 0.002454 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2016 Didotech srl (http://www.didotech.com)
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from team_system_template import cash_book, account_template, tax_template
from team_system_template import deadline_book, industrial_accounting_template, industrial_accounting
tax_data = tax_template.format(**{
'taxable': 240000000, # Imponibile 6 dec?
'vat_code': 22, # Aliquota Iva o Codice esenzione
'agro_vat_code': 0, # Aliquota iva di compensazione agricola
'vat11_code': 0,
'vat_total': 52800}
) * 8
account_data = account_template.format(**{
'account_proceeds': 5810502,
'total_proceeds': 240000000 # Imponibile 6 dec?
}) * 8
cash_book_values = {
'company_id': 1,
'version': 3,
'type': 0,
'partner_id': 34,
'name': 'Cliente prova con nome estremamente lungo'[:32],
'address': 'via Tre Porcellini'[:30],
'zip': 35020,
'city': 'Padova',
'province': 'PD'[:2],
'fiscalcode': 'RSSMRA85T10A562S',
'vat_number': 01032450072,
'individual': True and 'S' or 'N', # 134
'space': 0, # Posizione spazio fra cognome nome
# Estero:
'country': 0, # Codice paese estero di residenza. Dove si prende il codice???
'vat_ext': '', # Solo 12 caratteri??? Doveva essere 14... Ex (Croazia): HR12345678901, Sweden: SE999999999901
'fiscalcode_ext': '',
# Dati di nascita,se questi dati sono vuoti vengono presi dal codice fiscale.
'sex': 'M', # M/F 173
'birthday': 01012001, # ggmmaaaa
'city_of_birth': 'Palermo', # KGB?
'province_of_birth': 'PA',
'phone_prefix': '091',
'phone': '1234567',
'fax_prefix': '0921',
'fax': '7890123',
# Solo per i fornitori 246 -
'account_code': 9999999, # Codice conto di costo abituale
'payment_conditions_code': 4444, # Codice condizioni di pagamento
'abi': 3002,
'cab': 3280,
'partner_interm': 2, # Codice intermedio clienti / fornitori 267
# Dati fattura 268
'causal': 1, # Codice causale movimento
# Fattura di vendita=001
# Nota Credito = 002
# Fattura di acquisto=011
# Corrispettivo=020
# Movimenti diversi a diversi=027
# ( E' possibile indicare anche una causale multi collegata a una causale iva es. 101 collegata alla 1 )
# Vendita agenzia di viaggio=causale collegata alla 1 o alla 20 con il campo agenzia di viaggio = S
# Acquisti agenzia di viaggio=causale collagta alla 11 con il campo agenzia di viaggio = S
'causal_description': 'FATT. VENDITA',
'causal_ext': 'Causale aggiuntiva',
'causal_ext_1': 'Causale aggiuntiva 1',
'causal_ext_2': 'Causale aggiuntiva 2',
'registration_date': 0, # Se 0 si intende uguale alla data documento
'document_date': 01012016,
'document_number': 345, # Numero documento fornitore compreso sezionale
'document_number_no_sectional': 34, # Numero documento (numero doc senza sezionale)
'vat_sectional': 22,
'account_extract': 1501, # Estratto conto Numero partita (numero doc + sezionale (tutto unito):
# es. 1501 per una fattura numero 15 del sez. 1)
'account_extract_year': 2016, # Estratto conto Anno partita (anno di emissione della fattura in formato AAAA)
'ae_currency': 0, # Estratto conto in valuta Codice valuta estera
'ae_exchange_rate': 1000000, # 13(7+6 dec)
'ae_date': 23012016,
'ae_total_currency': 240000, # 16(13+3dec)
'ae_total_currency_vat': 52800, # 16(13+3dec)
'plafond_month': 012016, # MMAAAA Riferimento PLAFOND e fatture diferite
# Dati iva
'tax_data': tax_data,
# Totale fattura
'invoice_total': 240000000, # Imponibile 6 dec?
# Conti di ricavo/costo
'account_data': account_data,
# Dati eventuale pagamento fattura o movimenti diversi
# Iva Editoria
'vat_collectability': 0, # 0=Immediata 1=Differita 2=Differita DL. 185/08
# 3=Immediata per note credito/debito 4=Split payment
# R=Risconto C=Competenza
# N=Non aggiorna estratto conto
'val_0': 0,
'empty': ''
}
deadline_book_values = {
'company_id': 1,
'version': 3,
'type': 1,
# Dati INTRASTAT
'val_0': 0,
'empty': '',
# Dati portafoglio
'payment_condition': 0, # ??? Codice condizione di pagamento
'abi': 0, # ???
'cab': 0, # ???
'agency_description': '', # Descrizione agenzia
'total_number_of_payments': 0, # ??? Numero totale rate
'invoice_total': 0, # ??? Totale documento (totale fattura)
# Dettaglio effetti
'payment_count': 0, # ??? Numero rata
'payment_deadline': 0, # ??? Data scadenza
'document_type': 0, # Tipo effetto
# 1=Tratta
# 2=Ricevuta bancaria
# 3=Rimessa diretta
# 4=Cessioni
# 5=Solo descrittivo
# 6=Contanti alla consegna
'payment_total': 0, # ??? Importo effetto
'payment_total_currency': 0, # Portafoglio in valuta. Importo effetto in valuta
'total_stamps': 0, # Importo bolli
'payment_stamp_currency': 0, # Portafoglio in valuta. Importo bolli in valuta
'payment_state': '', # ??? Stato effetto 0=Aperto 1=Chiuso 2=Insoluto 3=Personalizzato
' | payment_subtype': '', # Sottotipo rimessa diretta
'agent_code': 0, # Codice agente
'paused_payment': '', # Effetto sospeso
'cig': '',
'cup': '',
# Movimenti INTRASTAT BENI dati aggiuntivi...
}
def get_accounting_data():
empty_accounting = {
'val_0': 0,
'empty': '',
'causal': 0, # ??? Causale cont. industr.
# Fa | tt vendita = 001
# Fatt acquisto = 002
'account': 0, # ??? Conto cont. Industriale
# 1 = sistemi
# 2 = Noleggi
# 3 = domotica
'account_proceeds': 0, # ??? Voce di spesa / ricavo (uguale ai conti di ricavo contabilità generale ma con uno 0 in più)
# 58100501
# 58100502
# 58100503
'sign': '', # ??? Segno ( D o A )
'total_ammount': 0, # Importo movimento o costo complessivo
}
accounting_data = ''
for k in range(0, 20):
accounting_data += industrial_accounting_template.format(**empty_accounting)
return accounting_data
industrial_accounting_values = {
'company_id': 1,
'version': 3,
'type': 2,
'val_0': 0,
# 'empty': '',
# CONTAB. INDUSTRIALE 8
'accounting_data': get_accounting_data()
}
if __name__ == '__main__':
record_type = 0
if record_type == 0:
record = cash_book.format(**cash_book_values)
elif record_type == 1:
record = deadline_book.format(**deadline_book_values)
elif record_type == 2:
record = industrial_accounting.format(**industrial_accounting_values)
print record
# for s in record:
# print 'X:', s
print len(record)
|
kubestack/kubestack | app/kubestack/setup.py | Python | gpl-2.0 | 1,536 | 0.000651 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read() | .replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='kubestack',
version='0.1.0',
description="Python app to manage dynamic Jenkins slaves with Kubernetes",
long_description=readme + '\n\n' + history,
author="Yolanda Robla",
author_email='info@ysoft.biz',
url='https://github.com/yrobla/kubestack' | ,
packages=[
'kubestack',
],
package_dir={'kubestack':
'kubestack'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='kubestack',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=test_requirements
) |
giantas/minor-python-tests | Operate List/operate_list.py | Python | mit | 1,301 | 0.018447 | # Define a function sum() and a function multiply()
# that sums and multiplies (respectively) all the nu | mbers in a list of numbers.
# For example, sum([1, 2, 3, 4]) should return 10,
# and multiply([1, 2, 3, 4]) should return 24.
def check_list(num_list):
"""Check if input is list"""
if num_list is None:
return False
if len(num_list) == 0:
return False
new_list = []
for i in num_list:
if i!='[' and i!=']' and i!=',':
new_list.append(i)
for x in new_list:
if type(x) != int:
return False
| return True
def sum(num_list):
"""Compute sum of list values"""
if check_list(num_list):
final_sum = 0
for i in num_list:
final_sum = final_sum + i
return final_sum
else:
return False
def multiply(num_list):
"""Multiply list values"""
if check_list(num_list):
final_sum = 1
for i in num_list:
final_sum = final_sum * i
return final_sum
else:
return False
def main():
get_list = input("Enter list: ")
operations = [sum, multiply]
print map(lambda x: x(get_list), operations)
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.