repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
xrg/openerp-server | bin/service/web_services.py | Python | agpl-3.0 | 41,970 | 0.005742 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import addons
import base64
import ir
import locale
import logging
import netsvc
import os
import platform
import pooler
import release
import security
import sql_db
import sys
import threading
import time
import tools
from tools.translate import _
from cStringIO import StringIO
#.apidoc title: Exported Service methods
#.apidoc module-mods: member-order: bysource
""" This python module defines the RPC methods available to remote clients.
Each 'Export Service' is a group of 'methods', which in turn are RPC
procedures to be called. Each method has its own arguments footprint.
"""
logging.basicConfig()
class baseExportService(netsvc.ExportService):
""" base class for the objects that implement the standardized
xmlrpc2 dispatch
"""
_auth_commands = { 'pub': [] , 'root': [], 'db': [] }
def new_dispatch(self, method, auth, params, auth_domain=None):
# Double check, that we have the correct authentication:
if not auth:
domain='pub'
else:
domain=auth.provider.domain
if method not in self._auth_commands[domain]:
raise Exception("Method not found: %s" % method)
fn = getattr(self, 'exp_'+method)
if domain == 'db':
u, p, db, uid = auth.auth_creds[auth.last_auth]
cr = pooler.get_db(db).cursor()
try:
res = fn(cr, uid, *params)
cr.commit()
return res
finally:
cr.close()
else:
return fn(*params)
class db(baseExportService):
_auth_commands = { 'root': [ 'create', 'get_progress', 'drop', 'dump',
'restore', 'rename',
'change_admin_password', 'migrate_databases' ],
'pub': [ 'db_exist', 'list', 'list_lang', 'server_version' ],
}
def __init__(self, name="db"):
netsvc.ExportService.__init__(self, name)
self.joinGroup("web-services")
self.actions = {}
self.id = 0
self.id_protect = threading.Semaphore()
self._pg_psw_env_var_is_set = False # on win32, pg_dump need the PGPASSWORD env var
def dispatch(self, method, auth, params):
if method in [ 'create', 'get_progress', 'drop', 'dump',
'restore', 'rename',
'change_admin_password', 'migrate_databases' ]:
passwd = params[0]
params = params[1:]
security.check_super(passwd)
elif method in [ 'db_exist', 'list', 'list_lang', 'server_version' ]:
# params = params
# No security check for these methods
pass
else:
raise KeyError("Method not found: %s" % method)
fn = getattr(self, 'exp_'+method)
return fn(*params)
def _create_empty_database(self, name):
db = sql_db.db_connect('template1')
cr = db.cursor()
try:
cr.autocommit(True) # avoid transaction block
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % name)
finally:
cr.close()
def exp_create(self, db_name, demo, lang, user_password='admin'):
self.id_protect.acquire()
self.id += 1
id = self.id
self.id_protect.release()
self.actions[id] = {'clean': False}
self._create_empty_database(db_name)
class DBInitialize(object):
def __call__(self, serv, id, db_name, demo, lang, user_password='admin'):
cr = None
try:
serv.actions[id]['progress'] = 0
cr = sql_db.db_connect(db_name).cursor()
tools.init_db(cr)
cr.commit()
cr.close()
cr = None
_langs = []
if lang:
_langs.append(lang)
pool = pooler.restart_pool(db_name, demo, serv.actions[id],
update_module=True, languages=_langs)[1]
cr = sql_db.db_connect(db_name).cursor()
if lang:
modobj = pool.get('ir.module.module')
mids = modobj.search(cr, 1, [('state', '=', 'installed')])
modobj.update_translations(cr, 1, mids, lang)
cr | .execute('UPDATE res_users SE | T password=%s, context_lang=%s, active=True WHERE login=%s', (
user_password, lang, 'admin'))
cr.execute('SELECT login, password, name ' \
' FROM res_users ' \
' ORDER BY login')
serv.actions[id]['users'] = cr.dictfetchall()
serv.actions[id]['clean'] = True
cr.commit()
cr.close()
except Exception, e:
serv.actions[id]['clean'] = False
serv.actions[id]['exception'] = e
import traceback
e_str = StringIO()
traceback.print_exc(file=e_str)
traceback_str = e_str.getvalue()
e_str.close()
logging.getLogger('web-services').error('CREATE DATABASE\n%s' % (traceback_str))
serv.actions[id]['traceback'] = traceback_str
if cr:
cr.close()
logger = logging.getLogger('web-services')
logger.info('CREATE DATABASE: %s' % (db_name.lower()))
dbi = DBInitialize()
create_thread = threading.Thread(target=dbi,
args=(self, id, db_name, demo, lang, user_password))
create_thread.start()
self.actions[id]['thread'] = create_thread
return id
def exp_get_progress(self, id):
if self.actions[id]['thread'].isAlive():
# return addons.init_progress[db_name]
return (min(self.actions[id].get('progress', 0),0.95), [])
else:
clean = self.actions[id]['clean']
if clean:
users = self.actions[id]['users']
self.actions.pop(id)
return (1.0, users)
else:
e = self.actions[id]['exception']
self.actions.pop(id)
raise Exception, e
def exp_drop(self, db_name):
sql_db.close_db(db_name)
logger = logging.getLogger()
db = sql_db.db_connect('template1')
cr = db.cursor()
cr.autocommit(True) # avoid transaction block
if tools.config.get_misc('debug', 'drop_guard', False):
raise Exception("Not dropping database %s because guard is set!" % db_name)
try:
cr.execute('DROP DATABASE "%s"' % db_name)
logger.info('DROP DB: %s' % (db_name))
except Exception, e:
logger.exception('DROP DB: %s failed:' % (db_name,))
raise Exception("Couldn't drop database %s: %s" % (db_name, e))
finally:
cr.close()
return True
def _set_pg_psw_env_var(self):
if os.name == 'nt' |
chrisjsewell/ipypublish | ipypublish/sphinx/tests/conftest.py | Python | bsd-3-clause | 2,432 | 0.000411 | """
Uses sphinx's pytest fixture to run builds
usage:
.. code-block:: python
from ipypublish.sphinx.tests import get_test_source_dir
@pytest.mark.sphinx(
buildername='html',
srcdir=get_test_source_dir('notebook'))
def test_basic(app, status, warning, get_sphinx_app_output):
app.b | uild()
assert 'build succeeded' in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, buildername='html')
parameters available to parse to ``@pytest.mark. | sphinx``:
- buildername='html'
- srcdir=None
- testroot='root' (only used if srcdir not set)
- freshenv=False
- confoverrides=None
- status=None
- warning=None
- tags=None
- docutilsconf=None
"""
import os
import shutil
import re
import pytest
from sphinx.testing.path import path
try:
import pathlib
except ImportError:
import pathlib2 as pathlib
from ipypublish.sphinx.tests import get_test_source_dir
@pytest.fixture(scope="session", autouse=True)
def remove_sphinx_builds():
""" remove all build directories from the test folder
"""
srcdirs = pathlib.Path(get_test_source_dir())
for entry in srcdirs.iterdir(): # type: pathlib.Path
if entry.is_dir() and entry.joinpath("_build").exists():
shutil.rmtree(str(entry.joinpath("_build")))
@pytest.fixture
def get_sphinx_app_output():
def read(
app,
buildername="html",
filename="contents.html",
encoding="utf-8",
extract_body=False,
remove_scripts=False,
):
outpath = path(os.path.join(str(app.srcdir), "_build", buildername, filename))
if not outpath.exists():
raise IOError("no output file exists: {}".format(outpath))
content = outpath.text(encoding=encoding)
if extract_body:
body_rgx = re.compile("\\<body\\>(.*)\\</body\\>", re.DOTALL)
body_search = body_rgx.search(content)
if not body_search:
raise IOError("could not find body content of {}".format(path))
content = body_search.group(1)
if remove_scripts:
# remove script environments which can change
script_rgx = re.compile("\\<script\\>(.*)\\</script\\>", re.DOTALL)
content = script_rgx.sub("<script></script>", content)
return content
return read
|
MediaSapiens/wavesf | djangoappengine/boot.py | Python | bsd-3-clause | 7,691 | 0.00091 | import logging
import os
import sys
PROJECT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
# Overrides for os.environ
env_ext = {'DJANGO_SETTINGS_MODULE': 'settings'}
def setup_env():
"""Configures app engine environment for command-line apps."""
# Try to import the appengine code from the system path.
try:
from google.appengine.api import apiproxy_stub_map
except ImportError:
for k in [k for k in sys.modules if k.startswith('google')]:
del sys.modules[k]
# Not on the system path. Build a list of alternative paths where it
# may be. First look within the project for a local copy, then look for
# where the Mac OS SDK installs it.
paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
os.environ.get('APP_ENGINE_SDK'),
'/usr/local/google_appengine',
'/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
for path in os.environ.get('PATH', '').split(os.pathsep):
path = path.rstrip(os.sep)
if path.endswith('google_appengine'):
paths.append(path)
if os.name in ('nt', 'dos'):
path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
paths.append(path)
# Loop through all possible paths and look for the SDK dir.
sdk_path = None
for path in paths:
if not path:
| continue
path = os.path.expanduser(path)
path = os.path.realpath(path)
if os.path.exists(path):
sdk_path = path
break
if sdk_path is None:
# The SDK could not be found in any known location.
sys.stderr.write('The Google App Engine SDK could not be found!\n'
"Make sure it's accessible via your PATH "
"environment and called google_appengine | .\n")
sys.exit(1)
# Add the SDK and the libraries within it to the system path.
extra_paths = [sdk_path]
lib = os.path.join(sdk_path, 'lib')
# Automatically add all packages in the SDK's lib folder:
for dir in os.listdir(lib):
path = os.path.join(lib, dir)
# Package can be under 'lib/<pkg>/<pkg>/' or 'lib/<pkg>/lib/<pkg>/'
detect = (os.path.join(path, dir), os.path.join(path, 'lib', dir))
for path in detect:
if os.path.isdir(path) and not dir == 'django':
extra_paths.append(os.path.dirname(path))
break
sys.path = extra_paths + sys.path
from google.appengine.api import apiproxy_stub_map
setup_project()
from .utils import have_appserver
if have_appserver:
# App Engine's threading.local is broken
setup_threading()
setup_logging()
if not have_appserver:
# Patch Django to support loading management commands from zip files
from django.core import management
management.find_commands = find_commands
def find_commands(management_dir):
"""
Given a path to a management directory, returns a list of all the command
names that are available.
This version works for django deployments which are file based or
contained in a ZIP (in sys.path).
Returns an empty list if no commands are defined.
"""
import pkgutil
return [modname for importer, modname, ispkg in pkgutil.iter_modules(
[os.path.join(management_dir, 'commands')]) if not ispkg]
def setup_threading():
# XXX: GAE's threading.local doesn't work correctly with subclassing
try:
from django.utils._threading_local import local
import threading
threading.local = local
except ImportError:
pass
def setup_logging():
# Fix Python 2.6 logging module
logging.logMultiprocessing = 0
# Enable logging
level = logging.DEBUG
from .utils import have_appserver
if have_appserver:
# We can't import settings at this point when running a normal
# manage.py command because this module gets imported from settings.py
from django.conf import settings
if not settings.DEBUG:
level = logging.INFO
logging.getLogger().setLevel(level)
def setup_project():
from .utils import have_appserver, on_production_server
if have_appserver:
# This fixes a pwd import bug for os.path.expanduser()
env_ext['HOME'] = PROJECT_DIR
# The dev_appserver creates a sandbox which restricts access to certain
# modules and builtins in order to emulate the production environment.
# Here we get the subprocess module back into the dev_appserver sandbox.
# This module is just too important for development.
# Also we add the compiler/parser module back and enable https connections
# (seem to be broken on Windows because the _ssl module is disallowed).
if not have_appserver:
from google.appengine.tools import dev_appserver
try:
# Backup os.environ. It gets overwritten by the dev_appserver,
# but it's needed by the subprocess module.
env = dev_appserver.DEFAULT_ENV
dev_appserver.DEFAULT_ENV = os.environ.copy()
dev_appserver.DEFAULT_ENV.update(env)
# Backup the buffer() builtin. The subprocess in Python 2.5 on
# Linux and OS X uses needs it, but the dev_appserver removes it.
dev_appserver.buffer = buffer
except AttributeError:
logging.warn('Could not patch the default environment. '
'The subprocess module will not work correctly.')
try:
# Allow importing compiler/parser and _ssl modules (for https)
dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
('parser', '_ssl'))
except AttributeError:
logging.warn('Could not patch modules whitelist. '
'The compiler and parser modules will not work and '
'SSL support is disabled.')
elif not on_production_server:
try:
# Restore the real subprocess module
from google.appengine.api.mail_stub import subprocess
sys.modules['subprocess'] = subprocess
# Re-inject the buffer() builtin into the subprocess module
from google.appengine.tools import dev_appserver
subprocess.buffer = dev_appserver.buffer
except Exception, e:
logging.warn('Could not add the subprocess module to the sandbox: %s' % e)
os.environ.update(env_ext)
extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
# We support zipped packages in the common and project folders.
if os.path.isdir(zip_packages_dir):
for zip_package in os.listdir(zip_packages_dir):
extra_paths.append(os.path.join(zip_packages_dir, zip_package))
# App Engine causes main.py to be reloaded if an exception gets raised
# on the first request of a main.py instance, so don't call setup_project()
# multiple times. We ensure this indirectly by checking if we've already
# modified sys.path, already.
if len(sys.path) < len(extra_paths) or \
sys.path[:len(extra_paths)] != extra_paths:
for path in extra_paths:
while path in sys.path:
sys.path.remove(path)
sys.path = extra_paths + sys.path
|
MiroK/dolfin | test/unit/python/parameter/test_parameters.py | Python | gpl-3.0 | 4,019 | 0.002488 | #!/usr/bin/env py.test
"""Unit tests for parameter library"""
# Copyright (C) 2011 Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import pytest
import os
from dolfin import *
from dolfin_utils.test import *
@skip_in_parallel
def test_simple(tempdir):
# Create some parameters
p0 = Parameters("test")
p0.add("filename", "foo.txt")
p0.add("maxiter", 100)
p0.add("tolerance", 0.001)
p0.add("monitor_convergence", True)
# Save to file
f0 = File(os.path.join(tempdir, "test_parameters.xml"))
f0 << p0
# Read from file
p1 = Parameters()
f1 = File(os.path.join(tempdir, "test_parameters.xml"))
f1 >> p1
# Check values
assert p1.name() == "test"
assert p1["filename"] == "foo.txt"
assert p1["maxiter"] == 100
assert p1["tolerance"] == 0.001
assert p1["monitor_convergence"] == True
@skip_in_parallel
def test_gzipped_simple(tempdir):
# Create some parameters
p0 = Parameters("test")
p0.add("filename", "foo.txt")
p0.add("maxiter", 100)
p0.add("tolerance", 0.001)
p0.add("monitor_convergence", True)
# Save to file
f0 = File(os.path.join(tempdir, "test_parameters.xml.gz"))
f0 << p0
# Read from file
p1 = Parameters()
f1 = File(os.path.join(tempdir, "test_parameters.xml.gz"))
f1 >> p1
# Check values
assert p1.name() == "test"
assert p1["filename"] == "foo.txt"
assert p1["maxiter"] == 100
assert p1["tolerance"] == 0.001
assert p1["monitor_convergence"] == True
@skip_in_parallel
def test_nested(tempdir):
# Create some nested parameters
p0 = Parameters("test")
p00 = Parameters("sub0")
p00.add("filename", "foo.txt")
p00.add("maxiter", 100)
p00.add("tolerance", 0.001)
p00.add("monitor_convergence", True)
p0.add("foo", "bar")
p01 = Parameters(p00);
p01.rename("sub1");
p0.add(p00)
p0.add(p01)
# Save to file
f0 = File(os.path.join(tempdir, "test_parameters.xml"))
f0 << p0
# Read from file
p1 = Parameters()
f1 = File(os.path.join(tempdir, "test_parameters.xml"))
f1 >> p1
# Check values
assert p1.name() == "test"
assert p1["foo"] == "bar"
assert p1["sub0"]["filename"] == "foo.txt"
assert p1["sub0"]["maxiter"] == 100 |
assert p1["sub0"]["tolerance"] == 0.001
assert p1["sub0"]["monitor_convergence"] == True
@skip_in_parallel
def test_nested_read_existing(tempdir):
"""Test that we can read in a nested parameter database into
an existing (and matching) parameter database"""
file = | File(os.path.join(tempdir, "test_parameters.xml"))
file << parameters
p = Parameters("test")
file >> p
file >> p
@skip_in_parallel
def test_solver_parameters():
"Test that global parameters are propagated to solvers"
# Record default values so we can change back
absolute_tolerance = parameters["krylov_solver"]["absolute_tolerance"]
# Set global parameters
parameters["krylov_solver"]["absolute_tolerance"] = 1.23456
# Create solvers
krylov_solver = KrylovSolver()
# Check that parameters propagate to solvers
assert krylov_solver.parameters["absolute_tolerance"] == 1.23456
# Reset parameters so that other tests will continue to work
parameters["krylov_solver"]["absolute_tolerance"] = absolute_tolerance
|
nguyentran/openviber | tools/swtoolkit/site_scons/site_tools/atlmfc_vc80.py | Python | mit | 2,627 | 0.004187 | #!/usr/bin/python2.4
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the abo | ve copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * N | either the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Windows ATL MFC for VC80 (Visual Studio 2005) tool for SCons.
Note that ATL MFC requires the commercial (non-free) version of Visual Studio
2005. Using this in an open-source project thus limits the size of the
developer community to those with the commercial version.
"""
import os
def _FindLocalInstall():
"""Returns the directory containing the local install of the tool.
Returns:
Path to tool (as a string), or None if not found.
"""
# TODO: Should use a better search. Probably needs to be based on msvs tool,
# as msvc detection is.
default_dir = 'C:/Program Files/Microsoft Visual Studio 8/VC/atlmfc'
if os.path.exists(default_dir):
return default_dir
else:
return None
def generate(env):
# NOTE: SCons requires the use of this name, which fails gpylint.
"""SCons entry point for this tool."""
if not env.get('ATLMFC_VC80_DIR'):
env['ATLMFC_VC80_DIR'] = _FindLocalInstall()
env.AppendENVPath('INCLUDE', env.Dir('$ATLMFC_VC80_DIR/include').abspath)
env.AppendENVPath('LIB', env.Dir('$ATLMFC_VC80_DIR/lib').abspath)
|
thisisshi/cloud-custodian | tools/c7n_mailer/c7n_mailer/azure_mailer/deploy.py | Python | apache-2.0 | 5,823 | 0.003091 | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import copy
import json
import logging
import os
import jmespath
from c7n_mailer.deploy import CORE_DEPS
try:
from c7n.mu import generate_requirements
from c7n_azure.constants import AUTH_TYPE_EMBED
from c7n_azure.function_package import FunctionPackage
from c7n_azure.functionapp_utils import FunctionAppUtilities
from c7n_azure.policy import AzureFunctionMode
from c7n_azure.session import Session
from c7n_azure.utils import StringUtils
from c7n.utils import local_session
except ImportError:
FunctionPackage = None
pass
def cache_path():
return os.path.join(os.path.dirname(__file__), 'cache')
def get_mailer_requirements():
deps = ['azure-mgmt-managementgroups', 'azure-mgmt-web',
'azure-graphrbac', 'azure-keyvault', 'azure-storage-queue',
'azure-storage-blob', 'netaddr', 'sendgrid', 'pyyaml'] + list(CORE_DEPS)
requirements = generate_requirements(
deps, ignore=['boto3', 'botocore', 'pywin32'],
exclude=['pkg_resources'],
include_self=True)
return requirements
def build_function_package(config, function_name, sub_id):
schedule = config.get('function_schedule', '0 */10 * * * *')
cache_override_path = cache_path()
function_path = function_name + "_" + sub_id
# Build package
package = FunctionPackage(
function_name,
os.path.join(os.path.dirname(__file__), 'function.py'),
target_sub_ids=[sub_id],
cache_override_path=cache_override_path)
identity = jmespath.search('function_properties.identity', config)
package.build(None,
modules=['c7n', 'c7n_azure', 'c7n_mailer'],
requirements=get_mailer_requirements(),
identity=identity)
package.pkg.add_contents(
function_path + '/function.json',
contents=package.get_function_config({'mode':
{'type': 'azure-periodic',
'schedule': schedule}}))
# Add mail templates
for d in set(config['templates_folders']):
if not os.path.exists(d):
continue
for t in [f for f in os.listdir(d) if os.path.splitext(f)[1] == '.j2']:
with open(os.path.join(d, t)) as fh:
package.pkg.add_contents(function_path + '/msg-templates/%s' % t, fh.read())
function_config = copy.deepcopy(config)
functions_full_template_path = '/home/site/wwwroot/' + function_path + '/msg-templates/'
function_config['templates_folders'] = [functions_full_template_path]
package.pkg.add_contents(
function_path + '/config.json',
contents=json.dumps(function_config))
package.close()
return package
def provision(config):
log = logging.getLogger('c7n_mailer.azure.deploy')
function_name = config.get('function_name', 'mailer')
function_properties = config.get('function_properties', {})
# service plan is parse first, because its location might be shared with storage & insights
service_plan = AzureFunctionMode.extract_properties(function_properties,
'servicePlan',
{
'name': 'cloud-custodian',
'location': 'eastus',
'resource_group_name': 'cloud-custodian',
'sku_tier': 'Dynamic', # consumption plan
'sku_name': 'Y1'
})
location = service_plan.get('location', 'eastus')
rg_name = service_plan['resource_group_name']
sub_id = local_session(Session).get_subscription_id()
suffix = StringUtils.naming_hash(rg_name + sub_id)
storage_account = AzureFunctionMode.extract_properties(function_properties,
'storageAccount',
{'name': 'mailerstorage' + suffix,
'location': location,
'resource_group_na | me': rg_name})
app_insights = AzureFunctionMode.extract_properties(function_properties,
'appInsights',
{'na | me': service_plan['name'],
'location': location,
'resource_group_name': rg_name})
function_app_name = FunctionAppUtilities.get_function_name(
'-'.join([service_plan['name'], function_name]), suffix)
FunctionAppUtilities.validate_function_name(function_app_name)
identity = jmespath.search('function_properties.identity', config) or {
'type': AUTH_TYPE_EMBED}
params = FunctionAppUtilities.FunctionAppInfrastructureParameters(
app_insights=app_insights,
service_plan=service_plan,
storage_account=storage_account,
function_app={'resource_group_name': service_plan['resource_group_name'],
'identity': identity,
'name': function_app_name})
FunctionAppUtilities.deploy_function_app(params)
log.info("Building function package for %s" % function_app_name)
package = build_function_package(config, function_name, sub_id)
log.info("Function package built, size is %0.2f MB" % (package.pkg.size / (1024 * 1024.0)))
FunctionAppUtilities.publish_functions_package(params, package)
|
Erotemic/hotspotter | _graveyard/oldhotspotter/__init__.py | Python | apache-2.0 | 46 | 0.021739 | __all__ | = ['H | otSpotterAPI', 'Facade', 'tpl']
|
GeoDaCenter/CAST | stars/visualization/plots/SigTrendGraphLocalG.py | Python | gpl-3.0 | 28,985 | 0.015146 | """
"""
__author__ = "Xun Li <xunli@asu.edu> "
__all__ = ['SigTrendGraphLocalG', 'SigTrendGraphLocalGQueryDialog','ShowSigTrendGraphLocalG']
import os
import wx
import numpy as np
from scipy.spatial import cKDTree
import pysal
import stars
from stars.visualization.maps.DynamicLisaMap import DynamicLISAQueryDialog
from stars.visualization.maps.BaseMap import PolygonLayer
from stars.visualization.EventHandler import AbstractData
from stars.visualization.PlotWidget import PlotWidget,PlottingCanvas
from stars.visualization.utils import PaintCollection, View2ScreenTransform, GetRandomColor, FilterShapeList,GetDateTimeIntervals
from stars.visualization.utils.PaintCollection import DrawLines
from stars.visualization.SpaceTimeQueryDialog import SpaceTimeQueryDialog
class SigTrendGraphLocalG(PlottingCanvas):
"""
"""
def __init__(self,parent, layer, data, **kwargs):
PlottingCanvas.__init__(self,parent,data)
try:
self.layer = layer
self.layer_name = layer.name
#self.weight_file = kwargs["weight"]
self.cs_data_dict = kwargs["query_data"]
self.step, self.step_by = kwargs["step"] ,kwargs["step_by"]
self.start_date, self.end_date = kwargs["start"],kwargs["end"]
self.lbls = kwargs['lbls']
self.parent = parent
self.data_sel_keys = sorted(self.cs_data_dict.keys())
self.data_sel_values = [self.cs_data_dict[i] for i in self.data_sel_keys]
#self.weight = pysal.open(self.weight_file).read()
self.t = len(self.cs_data_dict) # number of data slices
self.n = len(self.data_sel_values[0]) # number of shape objects
self.datetime_intervals, self.interval_labels = GetDateTimeIntervals(self.start_date, self.end_date,self.t, self.step, self.step_by)
# promote for time weights
from stars.visualization.dialogs import TimeWeightsDlg
tw_dlg = TimeWeightsDlg(self.main, self.t, self.layer.name)
tw_path = tw_dlg.Show()
if tw_path == False:
raise Exception("no time weights")
tweights = pysal.open(tw_path).read()
# G settings
from stars.visualization.dialogs import choose_local_g_settings
b_gstar, b_binary = choose_local_g_settings(self)
map_type = 'Gi*' if b_gstar else 'Gi'
add_type = 'binary' if b_binary else 'row-standardized'
self.title = 'Local G (%s,%s) Trend Graph -%s' % (map_type,add_type,layer.name)
self.parentFrame.SetTitle(self.title)
# calculate Gi using time weights
time_gstar = dict()
time_gstar_z = dict()
tseries_data = []
for pid in range(self.n):
tseries = []
for tid in range(self.t):
tseries.append(self.cs_data_dict[tid][pid])
tseries_data.append(tseries)
for pid in range(self.n):
tseries = tseries_data[pid]
y = np.array(tseries)
#lg = pysal.esda.getisord.G_Local(y,tweights,transform='B',star=True)
if b_binary == False:
lg = pysal.esda.getisord.G_Local(y,tweights,star=b_gstar)
else:
lg = pysal.esda.getisord.G_Local(y,tweights,star=b_gstar,transform='B')
time_gstar[pid] = lg.p_sim
time_gstar_z[pid] = lg.Zs
trendgraph_data = dict()
for i in range(self.n):
data = []
for j in range(self.t):
data.append(self.cs_data_dict[j][i])
trendgraph_data[i] = data
self.trendgraph_data = trendgraph_data
self.tweights = tweights
self.time_gstar = time_gstar
self.time_gstar_z = time_gstar_z
self.t_neighbors = tweights.neighbors
data = [self.trendgraph_data, [], self.interval_labels, 0, self.time_gstar,self.time_gstar_z, self.t_neighbors]
self.data = data[0]
self.highlight_ids= data[1]
self.labels = data[2]
self.tick = data[3]
self.time_gstar_p = data[4]
self.time_gstar_z = data[5]
self.time_neighbors = data[6]
self.n = len(self.data)
self.selected_path_ids = []
self.line_marker = []
self.selected_line = None
self.margin_right = 50
self.margin_bottom = 140
self.margin_left = 100
self.enable_axis = False
self.enable_axis_x = False
self.enable_axis_y = False
self.x_label = ""
self.y_label = "# of observations"
self.font_size_title = 8
if os.name == "posix":
self.font_size_title= 10
self.font_size_x_axis = 8
self.font_size_y_axis = 8
self.font_size_xy_label = 8
if os.name == "posix":
self.font_size_xy_label = 10
all_values = self.data.values()
self.x_min = 1
self.x_max = len(all_values[0])
self.x_max = self.x_max if self.x_max > self.x_min else self.x_max*1.5
all_values = np.array(all_values)
self.y_min = np.min(all_values)
self.y_min = self.y_min if self.y_min > 0 else 0
self.y_max = np.max(all_values)
self.local_paths = []
self.extent = (self.x_min, self.y_min, self.x_max,self.y_max)
except Exception as err:
self.ShowMsgBox('Local G Trend Graph could not be created. ' + str(err.message))
self.isValidPlot = False
self.parentFrame.Close(True)
return None
# linking-brushing events
self.Register(stars.EVT_OBJS_SELECT, self.OnPathsSelected)
self.Register(stars.EVT_OBJS_UNSELECT, self.OnNoPathSelect)
def OnClose(self, event):
self.Unregister(stars.EVT_OBJS_SELECT, self.OnPathsSelected)
self.Unregister(stars.EVT_OBJS_UNSELECT, self.OnNoPathSelect)
event.Skip()
def DoDraw(self,dc): |
super(SigTrendGraphLocalG, self).DoDraw(dc)
# draw y axis at ea | ch time interval
dc.SetFont(wx.Font(self.font_size_y_axis,wx.NORMAL,wx.NORMAL,wx.NORMAL))
dc.SetPen(wx.Pen(wx.Color(200,200,200)))
for i in range(self.x_min, self.x_max+1):
if i == self.x_min or i == self.x_max:
self.enable_axis_labels = True
else:
self.enable_axis_labels = False
self.draw_axis_y(dc, start_x=i, isRotate=False, lblFormat='%d')
def test_line_at_rect_liang(self, line_seg, rect):
t_min = 0
t_max = 1
x1,y1 = line_seg[0]
x2,y2 = line_seg[1]
left,upper = rect[0]
right,bottom = rect[1]
if max(x1,x2) < left or min(x1,x2) > right or max(y1,y2) < bottom or min(y1,y2) > upper:
return False
dx = float(x2-x1)
dy = float(y2-y1)
P1 = -dx
q1 = x1 - left
r1 = q1 / P1
P2 = dx
q2 = right - x1
r2 = q2/P2
P3 = -dy
q3 = y1- bottom
r3 = q3/P3
P4 = dy
q4 = upper - y1
r4 = q4/P4
P_set = (P1, P2, P3, P4)
r_set = (r1, r2, r3, r4)
t1_set = [0]
t2_set = [1]
for i in range(4):
if P_set[i] < 0:
t1_set.append(r_set[i])
if P_set[i] > 0:
t2_set.append(r_set[i])
|
avanzosc/avanzosc6.1 | nan_stock_purchase_price/__openerp__.py | Python | agpl-3.0 | 486 | 0.063786 | {
"name" : "Purchase Price on Input Material",
"versi | on" : "0.1",
"description" : """Include facilities to enter purchase price in pickings of incomming products.""",
"author" : "NaN Projectes de Programari Lliure, S.L.",
"website" : "http://www.NaN-tic.com",
"depends" : [
'stock',
'purchase_discount',
],
"category" : "Cust | om Modules",
"init_xml" : [],
"demo_xml" : [],
"update_xml" : [ 'product_view.xml','stock_view.xml' ],
"active": False,
"installable": True
}
|
arthurprs/aerospike-client-python | test/test_scan.py | Python | apache-2.0 | 8,729 | 0.001833 | # -*- coding: utf-8 -*-
import pytest
import sys
from test_base_class import TestBaseClass
aerospike = pytest.importorskip("aerospike")
try:
from aerospike.exception import *
except:
print "Please install aerospike python client."
sys.exit(1)
class TestScan(TestBaseClass):
def setup_method(self, method):
"""
Setup method.
"""
hostlist, user, password = TestBaseClass.get_hosts()
config = {'hosts': hostlist}
if user == None and password == None:
self.client = aerospike.client(config).connect()
else:
self.client = aerospike.client(config).connect(user, password)
for i in xrange(20):
key = ('test', u'demo', i)
rec = {'name': 'name%s' % (str(i)), 'age': i}
self.client.put(key, rec)
def teardown_method(self, method):
"""
Teardown method
"""
for i in xrange(20):
key = ('test', u'demo', i)
self.client.remove(key)
self.client.close()
def test_scan_without_any_parameter(self):
scan_obj = None
with pytest.raises(TypeError) as typeError:
scan_obj = self.client.scan()
scan_obj.foreach()
assert "Required argument 'callback' (pos 1) not found" in typeError.value
def test_scan_with_non_existent_ns_and_set(self):
ns = 'namespace'
st = 'set'
records = []
scan_obj = None
scan_obj = self.client.scan(ns, st)
def callback((key, meta, bins)):
records.append(bins)
try:
scan_obj.foreach(callback)
except NamespaceNotFound as exception:
assert exception.code == 20L
except ServerError as exception:
assert exception.code == 1L
def test_scan_with_none_ns_and_set(self):
ns = None
st = None
try:
scan_obj = self.client.scan( ns, st )
except ParamError as exception:
assert exception.code == -2L
assert exception.msg == 'Parameters are incorrect'
def test_scan_with_existent_ns_and_set(self):
ns = 'test'
st = 'demo'
records = []
scan_obj = None
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.foreach(callback)
assert len(records) != 0
def test_scan_with_existent_ns_and_none_set(self):
ns = 'test'
st = 'demo'
records = []
scan_obj = None
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, None)
scan_obj.foreach(callback)
assert len(records) != 0
def test_scan_with_timeout_policy(self):
ns = 'test'
st = 'demo'
records = []
scan_obj = None
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.foreach(callback, {'timeout': 2000})
assert len(records) != 0
def test_scan_with_callback_contains_error(self):
pytest.xfail("segfault!")
ns = 'test'
st = 'demo'
records = []
scan_obj = None
val = 1
def callback( (key, meta, bins) ):
val += 1
records.append(bins)
scan_obj = self.client.scan(ns, st)
try:
scan_obj.foreach(callback, { 'timeout' : 1000 })
exce | pt ParamError as exception:
assert exception.code == -2L
assert exception.msg == "Callb | ack function contains an error"
def test_scan_with_callback_returning_false(self):
"""
Invoke scan() with callback function returns false
"""
ns = 'test'
st = 'demo'
records = []
scan_obj = None
def callback((key, meta, bins)):
if len(records) == 10:
return False
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.foreach(callback, {'timeout': 1000})
assert len(records) == 10
def test_scan_with_unicode_set(self):
ns = 'test'
st = u'demo'
records = []
scan_obj = None
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.foreach(callback)
assert len(records) != 0
def test_scan_with_select_clause(self):
ns = 'test'
st = 'demo'
records = []
scan_obj = None
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.select('name')
scan_obj.foreach(callback)
assert len(records) != 0
def test_scan_with_results_method(self):
ns = 'test'
st = 'demo'
records = []
scan_obj = None
scan_obj = self.client.scan(ns, st)
scan_obj.select(u'name', u'age')
records = scan_obj.results()
assert len(records) != 0
def test_scan_with_select_bin_integer(self):
"""
Invoke scan() with select bin is of type integer.
"""
scan_obj = None
scan_obj = self.client.scan('test', 'demo')
try:
scan_obj.select(22, 'test_age')
except ParamError as exception:
assert exception.code == -2L
assert exception.msg == 'Bin name should be of type string'
def test_scan_with_options_positive(self):
"""
Invoke scan() with options positive
"""
ns = 'test'
st = 'demo'
records = []
scan_obj = None
options = {
"percent": 100,
"concurrent": True,
"priority": aerospike.SCAN_PRIORITY_HIGH
}
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.foreach(callback, {}, options)
assert len(records) != 0
def test_scan_with_options_percent_negative(self):
"""
Invoke scan() with options negative
"""
ns = 'test'
st = 'demo'
records = []
scan_obj = None
options = {
"percent": 80,
"concurrent": True,
"priority": aerospike.SCAN_PRIORITY_HIGH
}
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.foreach(callback, {}, options)
assert records == []
def test_scan_with_options_nobins(self):
"""
Invoke scan() with nobins
"""
ns = 'test'
st = 'demo'
records = []
scan_obj = None
options = {"priority": aerospike.SCAN_PRIORITY_HIGH, "nobins": True}
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.foreach(callback, {}, options)
assert len(records) != 0
def test_scan_with_options_nobins_false(self):
"""
Invoke scan() with nobins
"""
ns = 'test'
st = 'demo'
records = []
scan_obj = None
options = {"priority": aerospike.SCAN_PRIORITY_HIGH, "nobins": "true"}
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
try:
scan_obj.foreach(callback, { 'timeout' : 1000 }, options)
except ParamError as exception:
assert exception.code == -2L
assert exception.msg == 'Invalid value(type) for nobins'
def test_scan_with_multiple_foreach_on_same_scan_object(self):
"""
Invoke multiple foreach on same scan object.
"""
ns = 'test'
st = 'demo'
records = []
scan_obj = None
def callback((key, meta, bins)):
records.append(bins)
scan_obj = self.client.scan(ns, st)
scan_obj.for |
ikoz/mitmproxy | pathod/pathod.py | Python | mit | 16,060 | 0.000747 | import copy
import logging
import os
import sys
import threading
import urllib
from netlib import tcp, http, certutils, websockets
from netlib.exceptions import HttpException, HttpReadDisconnect, TcpTimeout, TcpDisconnect, \
TlsException
from . import version, app, language, utils, log, protocols
import language.http
import language.actions
import language.exceptions
import language.websockets
DEFAULT_CERT_DOMAIN = "pathod.net"
CONFDIR = "~/.mitmproxy"
CERTSTORE_BASENAME = "mitmproxy"
CA_CERT_NAME = "mitmproxy-ca.pem"
DEFAULT_CRAFT_ANCHOR = "/p/"
logger = logging.getLogger('pathod')
class PathodError(Exception):
pass
class SSLOptions(object):
def __init__(
self,
confdir=CONFDIR,
cn=None,
sans=(),
not_after_connect=None,
request_client_cert=False,
ssl_version=tcp.SSL_DEFAULT_METHOD,
ssl_options=tcp.SSL_DEFAULT_OPTIONS,
ciphers=None,
certs=None,
alpn_select=b'h2',
):
self.confdir = confdir
self.cn = cn
self.sans = sans
self.not_after_connect = not_after_connect
self.request_client_cert = request_client_cert
self.ssl_version = ssl_version
self.ssl_options = ssl_options
self.ciphers = ciphers
self.alpn_select = alpn_select
self.certstore = certutils.CertStore.from_store(
os.path.expanduser(confdir),
CERTSTORE_BASENAME
)
for i in certs or []:
self.certstore.add_cert_file(*i)
def get_cert(self, name):
if self.cn:
name = self.cn
elif not name:
name = DEFAULT_CERT_DOMAIN
return self.certstore.get_cert(name, self.sans)
class PathodHandler(tcp.BaseHandler):
wbufsize = 0
sni = None
def __init__(
self,
connection,
address,
server,
logfp,
settings,
http2_framedump=False
):
tcp.BaseHandler.__init__(self, connection, address, server)
self.logfp = logfp
self.settings = copy.copy(settings)
self.protocol = None
self.use_http2 = False
self.http2_framedump = http2_framedump
def handle_sni(self, connection):
self.sni = connection.get_servername()
def http_serve_crafted(self, crafted, logctx):
error, crafted = self.server.check_policy(
crafted, self.settings
)
if error:
err = self.make_http_error_response(error)
language.serve(err, self.wfile, self.settings)
return None, dict(
type="error",
msg=error
)
if self.server.explain and not hasattr(crafted, 'is_error_response'):
crafted = crafted.freeze(self.settings)
logctx(">> Spec: %s" % crafted.spec())
response_log = language.serve(
crafted,
self.wfile,
self.settings
)
if response_log["disconnect"]:
return None, response_log
return self.handle_http_request, response_log
def handle_http_request(self, logger):
"""
Returns a (handler, log) tuple.
handler: Handler for the next request, or None to disconnect
log: A dictionary, or None
"""
with logger.ctx() as lg:
try:
req = self.protocol.read_request(self.rfile)
except HttpReadDisconnect:
return None, None
except HttpException as s:
s = str(s)
lg(s)
return None, dict(type="error", msg=s)
if req.method == 'CONNECT':
return self.protocol.handle_http_connect([req.host, req.port, req.http_version], lg)
method = req.method
path = req.path
http_version = req.http_version
headers = req.headers
body = req.content
clientcert = None
if self.clientcert:
clientcert = dict(
cn=self.clientcert.cn,
subject=self.clientcert.subject,
serial=self.clientcert.serial,
notbefore=self.clientcert.notbefore.isoformat(),
notafter=self.clientcert.notafter.isoformat(),
keyinfo=self.clientcert.keyinfo,
)
retlog = dict(
type="crafted",
protocol="http",
request=dict(
path=path,
method=method,
headers=headers.fields,
http_version=http_version,
sni=self.sni,
remote_address=self.address(),
clientcert=clientcert,
),
cipher=None,
)
if self.ssl_established:
retlog["cipher"] = self.get_current_cipher()
m = utils.MemBool()
websocket_key = websockets.WebsocketsProtocol.check_client_handshake(headers)
self.settings.websocket_key = websocket_key
# If this is a websocket initiation, we respond with a proper
# server response, unless over-ridden.
if websocket_key:
anchor_gen = language.parse_pathod("ws")
else:
anchor_gen = None
for regex, spec in self.server.anchors:
if regex.match(path):
anchor_gen = language.parse_pathod(spec, self.use_http2)
break
else:
if m(path.startswith(self.server.craftanchor)):
spec = urllib.unquote(path)[len(self.server.craftanchor):]
if spec:
try:
anchor_gen = language.parse_pathod(spec, self.use_http2)
except language.ParseException as v:
lg("Parse error: %s" % v.msg)
anchor_gen = iter([self.make_http_error_response(
"Parse Error",
"Error parsing response spec: %s\n" % (
v.msg + v.marked()
)
)])
else:
if self.use_http2:
anchor_gen = iter([self.make_http_error_response(
"Spec Error",
"HTTP/2 only supports request/response with the craft anchor point: %s" %
s | elf.server.craftanchor
)])
if anchor_gen:
spec = anchor_gen.next()
if self.use_http2 and isinstance(spec, language.http2.Response):
spec.stream_id = req.stream_id
lg("crafting spec: %s" % spec)
nexthandler, retlog["response"] = self.http_serve_crafted(
spec,
| lg
)
if nexthandler and websocket_key:
self.protocol = protocols.websockets.WebsocketsProtocol(self)
return self.protocol.handle_websocket, retlog
else:
return nexthandler, retlog
else:
return self.protocol.handle_http_app(method, path, headers, body, lg)
def make_http_error_response(self, reason, body=None):
resp = self.protocol.make_error_response(reason, body)
resp.is_error_response = True
return resp
def handle(self):
self.settimeout(self.server.timeout)
if self.server.ssl:
try:
cert, key, _ = self.server.ssloptions.get_cert(None)
self.convert_to_ssl(
cert,
key,
handle_sni=self.handle_sni,
request_client_cert=self.server.ssloptions.request_client_cert,
cipher_list=self.server.ssloptions.ciphers,
method=self.server. |
LudumDareProject/CppGameEngine | build/bakefiles/bakefile_gen.py | Python | apache-2.0 | 4,899 | 0.047561 | import os, configparser, sys, string, re
from fnmatch import fnmatch
def writeMakefile(type):
config = Config('bakefile.ini')
settings = ProjectSettings(config, type)
bakefile = Bakefile(settings)
bf = open('generated-bakefile.bkl', 'w', newline='')
bf.write(bakefile.generateProgram(type))
bf.close()
def runBakefile():
# todo:
# if windows:
# bkl bakefile.bkl
# else if osx:
# bakefile bakefile.bkl
#
# - check if bakefile is installed
#
# - use subprocess to run bakefile
# https://docs.python.org/2/library/subprocess.html
return
class Generator:
def findFiletypes(root, filetypes):
matchedFiles = ''
for path, subdirs, files in os.walk(root):
for name in files:
if fnmatch(name, filetypes):
matchedFiles += '\n' + os.path.join(path, name)
return matchedFiles
def genHeaders(root):
headers = Generator.findFiletypes(root, '*.hpp')
headers += Generator.findFiletypes(root, '*.h')
headers += Generator.findFiletypes(root, '*.hxx')
return headers
def genSources(root):
sources = Generator.findFiletypes(root, '*.cpp')
sources += Generator.findFiletypes(root, '*.c')
return sources
class Bakefile:
def generateLib(self):
self._writeLib()
return self._buffer
def generateProgram(self, type):
self._writeBody(type)
return self._buffer
def includeDir(self, path):
self._includeDirs.append(path)
def libs(self, lib):
self._libs.append(path)
def libD | ir(self, path):
self._libDir.append(path)
def _writeBody(self, type):
self._write(type + ' ' + self._settings.config.projectName)
self._writeDependencies()
self._write(' {\n')
self._indent(1)
self._writeIncludes()
self._writeHeaders()
self._writeSources()
self._indent(- | 1)
self._write('\n}')
return self._buffer
def _format(self, s):
ind = ''
for i in range(0, self._indents):
ind += '\t'
newlines = [m.start() for m in re.finditer('\n', s)]
if (not newlines):
return ind + s
for n in range(0, len(newlines)):
s = s[:newlines[n]] + ind + s[newlines[n]:]
return ind + s
def _write(self, s):
self._buffer += s #self._format(s)
def _indent(self, indents):
self._indents += indents
def _writeIncludes(self):
for i in range(0, len(self._settings.includeDirs)):
self.includeDir(self._settings.includeDirs[i])
def _writeDependencies(self):
if not self._dependencies:
return
self._write(' : ')
for i in range(0, len(self._dependencies)):
self._write(self._dependencies[i])
if i != len(self._dependencies):
self._write(', ')
def _writeLibs(self):
for i in range(0, len(self._libs)):
self._write('libs += ' + self._libs[i])
def _writeHeaders(self):
self._write('headers {')
for i in range(0, len(self._includeDirs)):
self._write(Generator.genHeaders(self._includeDirs[i]))
self._write('\n}\n')
def _writeSources(self):
self._write('sources {')
for i in range(0, len(self._includeDirs)):
self._write(Generator.genSources(self._includeDirs[i]))
self._write('\n}\n')
def __init__(self, settings):
self._buffer = ''
self._settings = settings
self._indents = 0
self._includeDirs = []
self._libs = []
self._libDirs = []
self._headers = []
self._sources = []
self._dependencies = []
class Config:
def _loadConfig(self):
self._readProjectProperties()
def _getPath(self, section, option):
path = self.parser.get(section, option)
path = path.replace('/', '\\')
if not path.endswith('\\'):
path += '\\'
return path
def _readProjectProperties(self):
s = 'Project'
self.projectName = self.parser.get(s, 'projectname')
self.rootdir = self._getPath(s, 'rootdir')
s = 'Engine'
self.enginesrc = self.rootdir + self._getPath(s, 'enginesrc')
s = 'Editor'
self.editorsrc = self.rootdir + self._getPath(s, 'editorsrc')
s = 'Game'
self.gameName = self.parser.get(s, 'gamename')
self.gamesrc = self.rootdir + self._getPath(s, 'gamesrc')
def __init__(self, path):
self.path = path
self.parser = configparser.ConfigParser()
self.parser.read(path)
self._loadConfig()
class ProjectSettings:
def _editorSetup(settings):
settings.includeDirs.append(settings.config.enginesrc)
settings.includeDirs.append(settings.config.editorsrc)
def _gameSetup(settings):
settings.includeDirs.append(settings.config.enginesrc)
settings.includeDirs.append(settings.config.gamesrc)
projectTypes = {
'editor': _editorSetup,
'game': _gameSetup,
}
def __init__(self, config, *types):
self.includeDirs = []
self.config = config
for i in range(0, len(types)):
if types[i] in self.projectTypes:
self.projectTypes[types[i]](self)
else:
print('Unknown type: ' + str(types[i]))
if __name__ == "__main__":
if not sys.argv[1:]:
print("Missing arguments")
else:
for i in range(1, len(sys.argv)):
writeMakefile(sys.argv[i]) |
fiete201/qutebrowser | qutebrowser/misc/editor.py | Python | gpl-3.0 | 10,070 | 0 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Launcher for an external editor."""
import os
import tempfile
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QObject, QProcess,
QFileSystemWatcher)
from qutebrowser.config import config
from qutebrowser.utils import message, log
from qutebrowser.misc import guiprocess
from qutebrowser.qt import sip
class ExternalEditor(QObject):
"""Class to simplify editing a text in an external editor.
Attributes:
_text: The current text before the editor is opened.
_filename: The name of the file to be edited.
_remove_file: Whether the file should be removed when the editor is
closed.
_proc: The GUIProcess of the editor.
_watcher: A QFileSystemWatcher to watch the edited file for changes.
Only set if watch=True.
_content: The last-saved text of the editor.
Signals:
file_updated: The text in the edited file was updated.
arg: The new text.
editing_finished: The editor process was closed.
"""
file_updated = pyqtSignal(str)
editing_finished = pyqtSignal()
def __init__(self, parent=None, watch=False):
super().__init__(parent)
self._filename = None
self._proc = None
self._remove_file = None
self._watcher = QFileSystemWatcher(parent=self) if watch else None
self._content = None
def _cleanup(self):
"""Clean up temporary files after the editor closed."""
assert self._remove_file is not None
if (self._watcher is not None and
not sip.isdeleted(self._watcher) and
self._watcher.files()):
failed = self._watcher.removePaths(self._watcher.files())
if failed:
log.procs.error("Failed to unwatch paths: {}".format(failed))
if self._filename is None or not self._remove_file:
# Could not create initial file.
return
assert self._proc is not None
try:
if self._proc.exit_status() != QProcess.CrashExit:
os.remove(self._filename)
| except OSError as e:
# NOTE: Do not replace this with "raise CommandError" as it's
# executed async.
message.error("Failed to delete tempfile... ({})".format(e))
@pyqtSlot(int, QProcess.ExitStatus)
def _on_proc_closed(self, _exitcode, exitstatus):
"""Write the editor text into the form field and clean up tempfile.
Callback for QProcess when the editor was closed.
| """
if sip.isdeleted(self): # pragma: no cover
log.procs.debug("Ignoring _on_proc_closed for deleted editor")
return
log.procs.debug("Editor closed")
if exitstatus != QProcess.NormalExit:
# No error/cleanup here, since we already handle this in
# on_proc_error.
return
# do a final read to make sure we don't miss the last signal
self._on_file_changed(self._filename)
self.editing_finished.emit()
self._cleanup()
@pyqtSlot(QProcess.ProcessError)
def _on_proc_error(self, _err):
self._cleanup()
def edit(self, text, caret_position=None):
"""Edit a given text.
Args:
text: The initial text to edit.
caret_position: The position of the caret in the text.
"""
if self._filename is not None:
raise ValueError("Already editing a file!")
try:
self._filename = self._create_tempfile(text, 'qutebrowser-editor-')
except OSError as e:
message.error("Failed to create initial file: {}".format(e))
return
self._remove_file = True
line, column = self._calc_line_and_column(text, caret_position)
self._start_editor(line=line, column=column)
def backup(self):
"""Create a backup if the content has changed from the original."""
if not self._content:
return
try:
fname = self._create_tempfile(self._content,
'qutebrowser-editor-backup-')
message.info('Editor backup at {}'.format(fname))
except OSError as e:
message.error('Failed to create editor backup: {}'.format(e))
def _create_tempfile(self, text, prefix):
# Close while the external process is running, as otherwise systems
# with exclusive write access (e.g. Windows) may fail to update
# the file from the external editor, see
# https://github.com/qutebrowser/qutebrowser/issues/1767
with tempfile.NamedTemporaryFile(
mode='w', prefix=prefix,
encoding=config.val.editor.encoding,
delete=False) as fobj:
if text:
fobj.write(text)
return fobj.name
@pyqtSlot(str)
def _on_file_changed(self, path):
try:
with open(path, 'r', encoding=config.val.editor.encoding) as f:
text = f.read()
except OSError as e:
# NOTE: Do not replace this with "raise CommandError" as it's
# executed async.
message.error("Failed to read back edited file: {}".format(e))
return
log.procs.debug("Read back: {}".format(text))
if self._content != text:
self._content = text
self.file_updated.emit(text)
def edit_file(self, filename):
"""Edit the file with the given filename."""
if not os.path.exists(filename):
with open(filename, 'w', encoding='utf-8'):
pass
self._filename = filename
self._remove_file = False
self._start_editor()
def _start_editor(self, line=1, column=1):
"""Start the editor with the file opened as self._filename.
Args:
line: the line number to pass to the editor
column: the column number to pass to the editor
"""
self._proc = guiprocess.GUIProcess(what='editor', parent=self)
self._proc.finished.connect(self._on_proc_closed)
self._proc.error.connect(self._on_proc_error)
editor = config.val.editor.command
executable = editor[0]
if self._watcher:
assert self._filename is not None
ok = self._watcher.addPath(self._filename)
if not ok:
log.procs.error("Failed to watch path: {}"
.format(self._filename))
self._watcher.fileChanged.connect( # type: ignore[attr-defined]
self._on_file_changed)
args = [self._sub_placeholder(arg, line, column) for arg in editor[1:]]
log.procs.debug("Calling \"{}\" with args {}".format(executable, args))
self._proc.start(executable, args)
def _calc_line_and_column(self, text, caret_position):
r"""Calculate line and column numbers given a text and caret position.
Both line and column are 1-based indexes, because that's what most
editors use as line and column starting index. By "most" we mean at
least vim, nvim, gvim, emacs, atom, sublimetext, notepad++, brackets,
visual studio, QtCreator and so on.
To find the line we just count how many newlines there are bef |
gajim/gajim | gajim/gtk/message_input.py | Python | gpl-3.0 | 10,223 | 0 | # Copyright (C) 2003-2014 Yann Leboulanger <asterix AT lagaule.org>
# Copyright (C) 2005-2007 Nikos Kouremenos <kourem AT gmail.com>
# Copyright (C) 2006 Dimitur Kirov <dkirov AT gmail.com>
# Copyright (C) 2008-2009 Julien Pivotto <roidelapluie AT gmail.com>
#
# This file is part of Gajim.
#
# Gajim is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; version 3 only.
#
# Gajim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gajim. If not, see <http://www.gnu.org/licenses/>.
from typing import Any
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkPixbuf
from gi.repository import GLib
from gi.repository import | Pango
from gajim.common import app
from gajim.common.styling import process
from gajim.common.styling import PlainBlock
from .util import scroll_to_end
if app.is_installed('GSPELL | '):
from gi.repository import Gspell # pylint: disable=ungrouped-imports
UNDO_LIMIT: int = 20
FORMAT_CHARS: dict[str, str] = {
'bold': '*',
'italic': '_',
'strike': '~',
'pre': '`',
}
class MessageInputTextView(Gtk.TextView):
"""
A Gtk.Textview for chat message input
"""
def __init__(self) -> None:
Gtk.TextView.__init__(self)
self.set_border_width(3)
self.set_accepts_tab(True)
self.set_editable(True)
self.set_cursor_visible(True)
self.set_wrap_mode(Gtk.WrapMode.WORD_CHAR)
self.set_left_margin(2)
self.set_right_margin(2)
self.set_pixels_above_lines(2)
self.set_pixels_below_lines(2)
self.get_style_context().add_class('gajim-conversation-text')
self.drag_dest_unset()
self._undo_list: list[str] = []
self.undo_pressed: bool = False
self.get_buffer().create_tag('strong', weight=Pango.Weight.BOLD)
self.get_buffer().create_tag('emphasis', style=Pango.Style.ITALIC)
self.get_buffer().create_tag('strike', strikethrough=True)
self.get_buffer().create_tag('pre', family='monospace')
self.get_buffer().connect('changed', self._on_text_changed)
self.connect_after('paste-clipboard', self._after_paste_clipboard)
self.connect('focus-in-event', self._on_focus_in)
self.connect('focus-out-event', self._on_focus_out)
self.connect('destroy', self._on_destroy)
def _on_destroy(self, _widget: Gtk.Widget) -> None:
# We restore the TextView’s drag destination to avoid a GTK warning
# when closing the control. BaseControl.shutdown() calls destroy()
# on the control’s main box, causing GTK to recursively destroy the
# child widgets. GTK then tries to set a target list on the TextView,
# resulting in a warning because the Widget has no drag destination.
self.drag_dest_set(
Gtk.DestDefaults.ALL,
None,
Gdk.DragAction.DEFAULT)
def _on_focus_in(self,
_widget: Gtk.Widget,
_event: Gdk.EventFocus
) -> bool:
self.toggle_speller(True)
scrolled = self.get_parent()
assert scrolled
scrolled.get_style_context().add_class('message-input-focus')
return False
def _on_focus_out(self,
_widget: Gtk.Widget,
_event: Gdk.EventFocus
) -> bool:
scrolled = self.get_parent()
assert scrolled
scrolled.get_style_context().remove_class('message-input-focus')
if not self.has_text():
self.toggle_speller(False)
return False
def _clear_tags(self) -> None:
buf = self.get_buffer()
start, end = buf.get_bounds()
buf.remove_all_tags(start, end)
def _on_text_changed(self, buf: Gtk.TextBuffer) -> None:
text = self.get_text()
if not text:
return
self._clear_tags()
result = process(text)
for block in result.blocks:
if isinstance(block, PlainBlock):
for span in block.spans:
start_iter = buf.get_iter_at_offset(span.start)
end_iter = buf.get_iter_at_offset(span.end)
buf.apply_tag_by_name(span.name, start_iter, end_iter)
def insert_text(self, text: str) -> None:
self.get_buffer().insert_at_cursor(text)
def insert_newline(self) -> None:
buf = self.get_buffer()
buf.insert_at_cursor('\n')
mark = buf.get_insert()
iter_ = buf.get_iter_at_mark(mark)
if buf.get_end_iter().equal(iter_):
GLib.idle_add(scroll_to_end, self.get_parent())
def has_text(self) -> bool:
buf = self.get_buffer()
start, end = buf.get_bounds()
text = buf.get_text(start, end, True)
return text != ''
def get_text(self) -> str:
buf = self.get_buffer()
start, end = buf.get_bounds()
text = self.get_buffer().get_text(start, end, True)
return text
def toggle_speller(self, activate: bool) -> None:
if app.is_installed('GSPELL') and app.settings.get('use_speller'):
spell_view = Gspell.TextView.get_from_gtk_text_view(self)
spell_view.set_inline_spell_checking(activate)
@staticmethod
def _after_paste_clipboard(textview: Gtk.TextView) -> None:
buf = textview.get_buffer()
mark = buf.get_insert()
iter_ = buf.get_iter_at_mark(mark)
if iter_.get_offset() == buf.get_end_iter().get_offset():
GLib.idle_add(scroll_to_end, textview.get_parent())
def _get_active_iters(self) -> tuple[Gtk.TextIter, Gtk.TextIter]:
buf = self.get_buffer()
return_val = buf.get_selection_bounds()
if return_val: # if something is selected
start, end = return_val[0], return_val[1]
else:
start, end = buf.get_bounds()
return (start, end)
def apply_formatting(self, formatting: str) -> None:
format_char = FORMAT_CHARS[formatting]
buf = self.get_buffer()
start, end = self._get_active_iters()
start_offset = start.get_offset()
end_offset = end.get_offset()
text = buf.get_text(start, end, True)
if text.startswith(format_char) and text.endswith(format_char):
# (Selected) text begins and ends with formatting chars
# -> remove them
buf.delete(
start,
buf.get_iter_at_offset(start_offset + 1))
buf.delete(
buf.get_iter_at_offset(end_offset - 2),
buf.get_iter_at_offset(end_offset - 1))
return
ext_start = buf.get_iter_at_offset(start_offset - 1)
ext_end = buf.get_iter_at_offset(end_offset + 1)
ext_text = buf.get_text(ext_start, ext_end, True)
if ext_text.startswith(format_char) and ext_text.endswith(format_char):
# (Selected) text is surrounded by formatting chars -> remove them
buf.delete(
ext_start,
buf.get_iter_at_offset(start_offset))
buf.delete(
buf.get_iter_at_offset(end_offset - 1),
buf.get_iter_at_offset(end_offset))
return
# No formatting chars found at start/end or surrounding -> add them
buf.insert(start, format_char, -1)
buf.insert(
buf.get_iter_at_offset(end_offset + 1),
format_char,
-1)
buf.select_range(
buf.get_iter_at_offset(start_offset),
buf.get_iter_at_offset(end_offset + 2))
def replace_emojis(self) -> None:
theme = app.settings.get('emoticons_theme')
if not theme or theme == 'font':
return
def _replace(anchor: Gtk.TextChildAnchor) -> |
BeeeOn/server | t/xmlui/t2007-parameter-create-update-delete.py | Python | bsd-3-clause | 8,043 | 0.036802 | #! /usr/bin/env python3
import config
config.import_libs()
import unittest
from xmlui import Connector, Logout
from xmlui import GatewayRegister, GatewayUnregister
from xmlui import DeviceParameterGet
from xmlui import DeviceParameterCreate, DeviceParameterUpdate, DeviceParameterDelete
class TestCRUDeviceParameter(unittest.TestCase):
"""
Create a session, register a well-known gateway with
assigned devices.
"""
def setUp(self):
self.c = Connector(config.xmlui_host, config.xmlui_port, config.xmlui_ssl)
response = self.c.request(config.PERMIT_LOGIN)
self.assertTrue(response.is_data())
self.session = response.sessionid()
response = self.c.request(GatewayRegister(
config.gateway_id,
self.session,
name = "Gateway with devices"
))
self.assertTrue(response.is_ok())
"""
Unregister the gateway and destroy the session.
"""
def tearDown(self):
response = self.c.request(GatewayUnregister(
config.gateway_id,
self.session
))
self.assertTrue(response.is_ok())
response = self.c.request(Logout(self.session))
self.assertTrue(response.is_ok())
"""
Parameter 'invalid' (or any other garbage) cannot be created.
"""
def test1_create_invalid(self):
response = self.c.request(DeviceParameterCreate(
config.gateway_id,
"0xa335d00019f5234e",
"invalid",
"value",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("998", response.error_code())
response = self.c.request(DeviceParameterCreate(
config.gateway_id,
"0xa335d00019f5234e",
"garbage",
"value",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("998", response.error_code())
"""
Create IP address parameter for device 0xa335d00019f5234e.
There is no such parameter defined thus it can be created.
Finally, delete it to return to the previous state.
"""
def test2_create_delete_ip_address(self):
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"ip-address",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("13", response.error_code())
response = self.c.request(DeviceParameterCreate(
config.gateway_id,
"0xa335d00019f5234e",
"ip-address",
"10.0.0.6",
self.session
))
self.assertTrue(response.is_ok())
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"ip-address",
self.session
))
self.assertTrue(response.is_data())
self.assertEqual("10.0.0.6", response.root[0].get("parametervalue"))
response = self.c.request(DeviceParameterDelete(
config.gateway_id,
"0xa335d00019f5234e",
"ip-address",
self.session
))
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"ip-address",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("13", response.error_code())
"""
Create password parameter for device 0xa335d00019f5234e.
There is no such parameter defined thus it can be created.
Finally, delete it to return to the previous state.
"""
def test3_create_password(self):
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"password",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("13", response.error_code())
response = self.c.request(DeviceParameterCreate(
config.gateway_id,
"0xa335d00019f5234e",
"password",
"top secret",
sel | f.session
))
self.assertTrue(response.is_ok())
response = self.c.request(Devic | eParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"password",
self.session
))
self.assertTrue(response.is_data())
self.assertEqual("*****", response.root[0].get("parametervalue"))
response = self.c.request(DeviceParameterDelete(
config.gateway_id,
"0xa335d00019f5234e",
"password",
self.session
))
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"password",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("13", response.error_code())
"""
Check value of IP address parameter of device 0xa32d27aa5e94ecfd.
Update it to a new value and check it was successful. Finally,
revert the change back.
"""
def test4_update_ip_address(self):
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
self.session
))
self.assertTrue(response.is_data())
self.assertEqual("10.0.0.1", response.root[0].get("parametervalue"))
response = self.c.request(DeviceParameterUpdate(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
"192.168.1.2",
self.session
))
self.assertTrue(response.is_ok())
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
self.session
))
self.assertTrue(response.is_data())
self.assertEqual("192.168.1.2", response.root[0].get("parametervalue"))
response = self.c.request(DeviceParameterUpdate(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
"10.0.0.1",
self.session
))
self.assertTrue(response.is_ok())
"""
Check value of IP address parameter of device 0xa32d27aa5e94ecfd.
Delete it and then re-created it again.
"""
def test5_delete_ip_address(self):
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
self.session
))
self.assertTrue(response.is_data())
self.assertEqual("10.0.0.1", response.root[0].get("parametervalue"))
response = self.c.request(DeviceParameterDelete(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
self.session
))
self.assertTrue(response.is_ok())
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("13", response.error_code())
response = self.c.request(DeviceParameterCreate(
config.gateway_id,
"0xa32d27aa5e94ecfd",
"ip-address",
"10.0.0.1",
self.session
))
self.assertTrue(response.is_ok())
"""
Device 0xa371959aad24618e does not contain firmware but we cannot
create it via this interface anyway.
"""
def test6_create_undefined_firmware_fails(self):
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa371959aad24618e",
"firmware",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("13", response.error_code())
response = self.c.request(DeviceParameterCreate(
config.gateway_id,
"0xa371959aad24618e",
"firmware",
"v2000",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("998", response.error_code())
"""
Device 0xa335d00019f5234e contains firmware but we cannot
update it via this interface anyway.
"""
def test7_update_firmware_fails(self):
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"firmware",
self.session
))
self.assertTrue(response.is_data())
response = self.c.request(DeviceParameterUpdate(
config.gateway_id,
"0xa335d00019f5234e",
"firmware",
"v2001",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("998", response.error_code())
"""
Device 0xa335d00019f5234e contains firmware but we cannot
delete it via this interface anyway.
"""
def test8_update_firmware_fails(self):
response = self.c.request(DeviceParameterGet(
config.gateway_id,
"0xa335d00019f5234e",
"firmware",
self.session
))
self.assertTrue(response.is_data())
response = self.c.request(DeviceParameterDelete(
config.gateway_id,
"0xa335d00019f5234e",
"firmware",
self.session
))
self.assertTrue(response.is_error())
self.assertEqual("998", response.error_code())
if __name__ == '__main__':
import sys
import taprunner
unittest.main(testRunner=taprunner.TAPTestRunner(stream = sys.stdout))
|
Labgoo/google-analytics-for-python | gap/tracker.py | Python | mit | 2,493 | 0.001604 | __author__ = 'minhtule'
from request import *
class Tracker(object):
"""
"""
def __init__(self, tracking_id, visitor):
self.__tracking_id = tracking_id
self.__visitor = visitor
self.__debug_enabled = False
@property
def tracking_id(self):
return self.__tracking_id
@property
def client_id(self):
return self.__visitor.id
@property
def visitor(self):
return self.__visitor
@property
def debug_enabled(self):
return self.__debug_enabled
@debug_enabled.setter
def debug_enabled(self, value):
self.__debug_enabled = value
@property
def original_request_ip(self):
return self.visitor.ip_address
@property
def original_request_user_agent(self):
return self.visitor.user_agent
@property
def original_request_language(self):
return self.visitor.language
# Public method
def send_page(self, hostname=None, path=None, title=None):
PageTrackingRequest(
self,
document_hostname=hostname if hostname else self.visitor.document_host,
document_path=path if path else self.visitor.document_path,
document_title=title
).send()
def send_transaction(self, transaction_id, transaction_affiliation=None, transaction_revenue=None, transaction_shipping=None, transaction_tax=None, currency_code=None):
TransactionTrackingRequest(
self,
transaction_id,
transaction_affiliation=transaction_affiliation,
transaction_revenue=transaction_revenue,
transaction_shipping=transaction_shipping,
transaction_tax=transaction_tax,
currency_code=currency_code
).send()
def send_item(self, transaction_id, item_name, item_price=None, item_quantity=None, item_code=None, item_category=None, currency_code=None):
ItemTrackingRequest(
self,
transaction_id,
item_name,
item_price=i | tem_price,
item_quantity=item_quantity,
item_code=item_code,
item_category=item_category,
currency_code=currency_code
).send()
class CustomVariable(object):
@property
def index(self):
return self.__index
@property
def value(self):
return self.__value
def __init__(self, i | ndex, value):
self.__index = index
self.__value = value |
alazanman/py_epg_tests | utils/file_util.py | Python | apache-2.0 | 535 | 0.005607 | # -*- coding: utf-8 -*-
import os
import base64
from | random import choice
def random_file_from_dir(relative_path):
random_file = choice(os.listdir(os.path.join(os.getcwd(), relativ | e_path)))
return abs_path_to_file(os.path.join(relative_path, random_file))
def abs_path_to_file(relative_path):
# print os.getcwd()
return os.path.abspath(os.path.join(os.getcwd(), relative_path))
def encode_base64(abs_path):
print "abs_path", abs_path
with open(abs_path, 'rb') as f:
return base64.b64encode(f.read()) |
yamt/neutron | quantum/plugins/cisco/l2device_plugin_base.py | Python | apache-2.0 | 5,260 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
from abc import ABCMeta, abstractmethod
import inspect
class L2DevicePluginBase(object):
"""Base class for a device-specific plugin.
An example of a device-specific plugin is a Nexus switch plugin.
The network model relies on device-category-specific plugins to perform
the configuration on each device.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_all_networks(self, tenant_id, **kwargs):
"""Get newtorks.
:returns:
:raises:
"""
pass
@abstractmethod
def create_network(self, tenant_id, net_name, net_id, vlan_name, vlan_id,
**kwargs):
"""Create network.
:returns:
:raises:
"""
pass
@abstractmethod
def delete_network(self, tenant_id, net_id, **kwargs):
"""Delete network.
:returns:
:raises:
"""
pass
@abstractmethod
def get_network_details(self, tenant_id, net_id, **kwargs):
"""Get network details.
:returns:
:raises:
"""
pass
@abstractmethod
def update_network(self, tenant_id, net_id, name, **kwargs):
"""Update network.
:returns:
:raises:
"""
pass
@abstractmethod
def get_all_ports(self, tenant_id, net_id, **kwargs):
"""Get ports.
:returns:
:raises:
"""
pass
@abstractmethod
def create_port(self, tenant_id, net_id, port_state, port_id, **kwargs):
"""Create port.
:returns:
:raises:
"""
pass
@abstractmethod
def delete_port(self, tenant_id, net_id, port_id, **kwargs):
"""Delete port.
:returns:
:raises:
"""
pass
@abstractmethod
def update_port(self, tenant_id, net_id, port_id, **kwargs):
"""Update port.
:returns:
:raises:
"""
pass
@abstractmethod
def get_port_details(self, tenant_id, net_id, port_id, **kwargs):
"""Get port details.
:returns:
:raises:
"""
pass
@abstractmethod
def plug_interface(self, tenant_id, net_id, port_id, remote_interface_id,
**kwargs):
"""Plug interface.
:returns:
:raises:
"""
pass
@abstractmethod
def unplug_interface(self, tenant_id, net_id, port_id, **kwargs):
"""Unplug interface.
:returns:
:raises:
"""
pass
def create_subnet(self, tenant_id, net_id, ip_version,
subnet_cidr, **kwargs):
"""Create subnet.
:returns:
:raises:
"""
pass
def get_subnets(self, tenant_id, net_id, **kwargs):
"""Get subnets.
:returns:
:raises:
"""
pass
def get_subnet(self, tenant_id, net_id, subnet_id, **kwargs):
| """Get subnet.
:returns:
:raises:
"""
pass
def | update_subnet(self, tenant_id, net_id, subnet_id, **kwargs):
"""Update subnet.
:returns:
:raises:
"""
pass
def delete_subnet(self, tenant_id, net_id, subnet_id, **kwargs):
"""Delete subnet.
:returns:
:raises:
"""
pass
@classmethod
def __subclasshook__(cls, klass):
"""Check plugin class.
The __subclasshook__ method is a class method
that will be called everytime a class is tested
using issubclass(klass, Plugin).
In that case, it will check that every method
marked with the abstractmethod decorator is
provided by the plugin class.
"""
if cls is L2DevicePluginBase:
for method in cls.__abstractmethods__:
method_ok = False
for base in klass.__mro__:
if method in base.__dict__:
fn_obj = base.__dict__[method]
if inspect.isfunction(fn_obj):
abstract_fn_obj = cls.__dict__[method]
arg_count = fn_obj.func_code.co_argcount
expected_arg_count = \
abstract_fn_obj.func_code.co_argcount
method_ok = arg_count == expected_arg_count
if method_ok:
continue
return NotImplemented
return True
return NotImplemented
|
yamstudio/Codeforces | 000/1A - Theatre Square.py | Python | gpl-3.0 | 177 | 0.00565 | from math import *
inp = raw | _input()
spl = inp.split()
n = int(spl[0])
m = | int(spl[1])
a = int(spl[2])
i = int(ceil(n * 1.0 / a))
j = int(ceil(m * 1.0 / a))
print max(1, i * j) |
jasonwee/asus-rt-n14uhp-mrtg | src/lesson_runtime_features/os_system_example.py | Python | apache-2.0 | 45 | 0 | import os
# Simple command
os.system('pw | d')
| |
ebbkrdrcn/rexp | test/compiler.py | Python | apache-2.0 | 790 | 0.002532 | import unittest, re
from rexp.compiler import PatternCompiler
class | CompilerTestMethods(unittest.TestCase):
def test_compile_1(self):
compiler = PatternCompiler(pattern_set=dict(
TEST=r'\w+'
))
try:
c1 = compiler.compile('$1{TEST}')
except Exception as exc:
self.assertTrue(1)
c1 = compiler.compile( | '$1{TEST}', ['test'])
self.assertEqual(c1, r'(?:(?P<test>(\w+)))')
def test_compile_2(self):
compiler = PatternCompiler(pattern_set=dict(
TEST=r'\w+'
))
try:
c1 = compiler.compile('$1{TEST}')
except:
self.assertTrue(1)
c1 = compiler.compile('$1{TEST}', ['test'])
self.assertEqual(c1, r'(?:(?P<test>(\w+)))')
|
JulienMcJay/eclock | windows/kivy/kivy/adapters/listadapter.py | Python | gpl-2.0 | 17,276 | 0.001679 | '''
ListAdapter
=================
.. versionadded:: 1.5
.. warning::
This code is still experimental, and its API is subject to change in a
future version.
A :class:`ListAdapter` is an adapter around a python list.
Selection operations are a main concern for the class.
From an :class:`Adapter`, a :class:`ListAdapter` gets cls, template, and
args_converter properties and adds others that control selection behaviour:
* *selection*, a list of selected items.
* *selection_mode*, 'single', 'multiple', 'none'
* *allow_empty_selection*, a boolean -- If False, a selection is forced. If
True, and only user or programmatic action will change selection, it can
be empty.
If you wish to have a bare-bones list adapter, without selection, use a
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter`.
A :class:`~kivy.adapters.dictadapter.DictAdapter` is a subclass of a
:class:`~kivy.adapters.listadapter.ListAdapter`. They both dispatch the
*on_selection_change* event.
:Events:
`on_selection_change`: (view, view list )
Fired when selection changes
.. versionchanged:: 1.6.0
Added data = ListProperty([]), which was proably inadvertently deleted at
some point. This means that whenever data changes an update will fire,
instead of having to reset the data object (Adapter has data defined as
an ObjectProperty, so we need to reset it here to ListProperty). See also
DictAdapter and its set of data = DictProperty().
'''
__all__ = ('ListAdapter', )
import inspect
from kivy.event import EventDispatcher
from kivy.adapters.adapter import Adapter
from kivy.adapters.models import SelectableDataItem
from kivy.properties import ListProperty
from kivy.properties import DictProperty
from kivy.properties import BooleanProperty
from kivy.properties import OptionProperty
from kivy.properties import NumericProperty
from kivy.lang import Builder
class ListAdapter(Adapter, EventDispatcher):
'''
A base class for adapters interfacing with lists, dictionaries or other
collection type data, adding selection, view creation and management
functonality.
'''
data = ListProperty([])
'''The data list property is redefined here, overriding its definition as
an ObjectProperty in the Adapter class. We bind to data so that any
changes will trigger updates. See also how the
:class:`~kivy.adapters.DictAdapter` redefines data as a
:class:`~kivy.properties.DictProperty`.
:attr:`data` is a :class:`~kivy.properties.ListProperty` and defaults
to [].
'''
selection = ListProperty([])
'''The selection list property is the container for selected items.
:attr:`selection` is a :class:`~kivy.properties.ListProperty` and defaults
to [].
'''
selection_mode = OptionProperty('single',
options=('none', 'single', 'multiple'))
'''Selection modes:
* *none*, use the list as a simple list (no select action). This option
is here so that selection can be turned off, momentarily or
permanently, for an existing list adapter.
A :class:`~kivy.adapters.listadapter.ListAdapter` is not meant to be
used as a primary no-selection list adapter. Use a
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter` for that.
* *single*, multi-touch/click ignored. Single item selection only.
* *multiple*, multi-touch / incremental addition to selection allowed;
may be limited to a count by selection_limit
:attr:`selection_mode` is an :class:`~kivy.properties.OptionProperty` and
defaults to 'single'.
'''
propagate_selection_to_data = BooleanProperty(False)
'''Normally, data items are not selected/deselected because the data items
might not have an is_selected boolean property -- only the item view for a
given data item is selected/deselected as part of the maintained selection
list. However, if the data items do have an is_selected property, or if
they mix in :class:`~kivy.adapters.models.SelectableDataItem`, the
selection machinery can propagate selection to data items. This can be
useful for storing selection state in a local database or backend database
for maintaining state in game play or other similar scenarios. It is a
convenience function.
To propagate selection or not?
Consider a shopping list application for shopping for fruits at the
market. The app allows for the selection of fruits to buy for each day of
the week, presenting seven lists: one for each day of the week. Each list is
loaded with all the available fruits, but the selection for each is a
subset. There is only one set of fruit data shared between the lists, so
it would not make sense to propagate selection to the data because
selection in any of the seven lists would clash and mix with that of the
others.
However, consider a game that uses the same fruits data for selecting
fruits available for fruit-tossing. A given round of play could have a
full fruits list, with fruits available for tossing shown selected. If the
game is saved and rerun, the full fruits list, with selection marked on
each item, would be reloaded correctly if selection is always propagated to
the data. You could accomplish the same functionality by writing code to
operate on list selection, but having selection stored in the data
ListProperty might prove convenient in some cases.
:attr:`propagate_selection_to_data` is a
:class:`~kivy.properties.BooleanProperty` and defaults to False.
'''
allow_empty_selection = BooleanProperty(True)
'''The allow_empty_selection may be used for cascading selection between
several list views, or between a list view and an observing view. Such
automatic maintenance of the selection is important for all but simple
list displays. Set allow_empty_selection to False and the selection is
auto-initialized and always maintained, so any observing views
may likewise be updated to stay in sync.
:attr:`allow_empty_selection` is a
:class:`~kivy.properties.BooleanProperty` and defaults to True.
'''
selection_limit = NumericProperty(-1)
'''When the selection_mode is multiple and the selection_limit is
non-negative, this number will limit the number of selected items. It can
be set to 1, which is equivalent to single selection. If selection_limit is
not set, the default value is -1, meaning that no limit will | be enforced.
:attr:`selection_limit` is a :class:`~kivy.properties.NumericProperty` and
defaults to -1 (no limit).
'''
cached_views = DictProperty({})
'''View instances for data items are instantiated and managed by the
adapter. Here we maintain a dictionary containing the view
instances keyed to the indices in the data.
This dictionary works as a cache. get_view() only asks for a view from
| the adapter if one is not already stored for the requested index.
:attr:`cached_views` is a :class:`~kivy.properties.DictProperty` and
defaults to {}.
'''
__events__ = ('on_selection_change', )
def __init__(self, **kwargs):
super(ListAdapter, self).__init__(**kwargs)
self.bind(selection_mode=self.selection_mode_changed,
allow_empty_selection=self.check_for_empty_selection,
data=self.update_for_new_data)
self.update_for_new_data()
def delete_cache(self, *args):
self.cached_views = {}
def get_count(self):
return len(self.data)
def get_data_item(self, index):
if index < 0 or index >= len(self.data):
return None
return self.data[index]
def selection_mode_changed(self, *args):
if self.selection_mode == 'none':
for selected_view in self.selection:
self.deselect_item_view(selected_view)
else:
self.check_for_empty_selection()
def get_view(self, index):
if index in self.cached_views:
return self.cached_views[index]
item_view = self.create_view(index)
i |
elmadjian/mac0499 | coletas/user_5_OK/detector.py | Python | mit | 4,312 | 0.009511 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#----------------------
#Criado por Cadu
#----------------------
import re, sys, time, threading
#Detector de leitura (Campbell, 2001)
#------------------------------------
class Detector (threading.Thread):
def __init__(self, thresh, cv):
threading.Thread.__init__(self)
self.x_coord = []
self.y_coord = []
self.thresh = thresh
self.state = 0
self.ant_saccade = False
self.stop = False
self.cv = cv
self.time = time.time()
self.cnt_yes = 0
self.cnt_no = 0
self.cnt_total = 0
def run(self):
self.detect(self.cv)
def storeValues(self, x, y):
self.x_coord.append(float(x))
self.y_coord.append(float(y))
def average(self):
sum_x = 0
sum_y = 0
length = len(self.x_coord)
for i in range(length):
sum_x += self.x_coord[i]
sum_y += self.y_coord[i]
avg_x, avg_y = sum_x/length, sum_y/length
self.x_coord[:] = []
self.y_coord[:] = []
return avg_x, avg_y
def analyze_x(self, prev_x, avg_x):
x = avg_x - prev_x
#short right:
if 0.015 <= x < 0.05:
self.state += 10
#medium right:
elif 0.05 < x <= 0.1:
self.state += 5
#long right:
elif 0.1 < x:
self.state = 0
#short left:
elif -0.05 <= x < -0.015:
self.state -= 10
#medium left:
elif -0.1 <= x < -0.05:
if self.ant_saccade:
self.state += 5
else:
self.state -= 5
#long left:
elif x < -0.1:
| if self.ant_saccade:
self.state += 5
else:
self.state = 0
self.ant_saccade = False
de | f analyze_y(self, prev_y, avg_y):
y = prev_y - avg_y
#short up:
if -0.05 <= y < -0.015:
self.state -= 5
#long and medium up:
elif y < -0.05:
self.state = 0
#short down:
if 0.015 <= y < 0.05:
self.ant_saccade = True
#medium down:
elif 0.05 <= y < 0.1:
self.state -= 5
#long down:
elif 0.1 <= y:
self.state = 0
def detect(self, cv):
prev_x = 0
prev_y = 0
while True:
with cv:
cv.wait()
if self.stop:
break
avg_x, avg_y = self.average()
self.analyze_y(prev_y, avg_y)
self.analyze_x(prev_x, avg_x)
prev_x = avg_x
prev_y = avg_y
if self.state >= self.thresh:
print("\rleitura detectada em", time.time() - self.time)
self.cnt_yes += 3
else:
#print("\raguardando...", self.state)#, time.time(), end="")
self.cnt_no += 3
self.cnt_total += 3
#Lê o arquivo de entrada
#-----------------------
class FileReader:
def __init__(self, filename):
self.x_coord = []
self.y_coord = []
self.readFile(filename)
def readFile(self, filename):
pattern = re.compile("\d+.?\d+")
with open(filename, 'r') as sample:
for line in sample:
group = pattern.findall(line)
if group:
self.x_coord.append(group[0])
self.y_coord.append(group[1])
#------------------------
if __name__ == '__main__':
fr = FileReader(sys.argv[1])
cv = threading.Condition()
detector = Detector(30, cv)
detector.start()
for i in range(len(fr.x_coord)//3):
detector.storeValues(fr.x_coord.pop(0), fr.y_coord.pop(0))
detector.storeValues(fr.x_coord.pop(0), fr.y_coord.pop(0))
detector.storeValues(fr.x_coord.pop(0), fr.y_coord.pop(0))
with cv:
cv.notify_all()
time.sleep(0.0001)
detector.stop = True
print("total:", detector.cnt_total)
print("found:", detector.cnt_yes)
print("not found:", detector.cnt_no)
with cv:
cv.notify_all()
detector.join()
|
amal029/DataStructuresAndAlgorithmsInPython | Tree/binaryTree.py | Python | mit | 6,263 | 0 | class BinaryTree:
"""This is the binary tree ADT
"""
def __init__(self, root=None):
self._root = root
def element(self):
return self.element
def root(self):
return self._root
def setRoot(self, r):
self._root = r
def parent(self, v):
if v == self._root:
raise RuntimeError('Root has no parent')
else:
# Search the children to get the node
v = BinaryTree._getNode(self._root, v)
return v.getParent()
def children(self, v):
if v == self._root:
return [self._root.getLeftSubTree(), self._root.getRightSubTree()]
else:
r = BinaryTree._getNode(self._root, v)
return [r.getLeftSubTree(), r.getRightSubTree()]
def __str__(self):
return str(BinaryTree.__getAllNodes(self._root))
# The tree traversal methods
@classmethod
def _getNode(cls, root, v):
ret = None
# print('PIPI:', root, v)
if root == v:
ret = root
if root.getLeftSubTree() is not None and ret is None:
ret = BinaryTree._getNode(root.getLeftSubTree().root(), v)
if root.getRightSubTree() is not None and ret is None:
ret = BinaryTree._getNode(root.getRightSubTree().root(), v)
return ret
@classmethod
def __getAllNodes(cls, root):
"""
Does DFS traversal
"""
ll = []
BinaryTree.dfs(ll, root, func='__str__')
return ll
@staticmethod
def dfs(ll, root, func=None):
res = root
if func is not None:
res = getattr(root, func)()
ll.append(res)
if root.getLeftSubTree() is not None:
BinaryTree.dfs(ll, root.getLeftSubTree().root(), func)
if root.getRightSubTree() is not None:
BinaryTree.dfs(ll, root.getRightSubTree().root(), func)
class Node:
"""
This is the node of the Binary Tree
"""
def __init__(self, element, parent=None, lc=None, rc=None):
self._parent = parent
self._element = element
self._lc = lc
self._rc = rc
self._height = None
def setParent(self, parent):
self._parent = parent
def setLeftSubTree(self, lc):
self._lc = lc
def setRightSubTree(self, rc):
self._rc = rc
def getLeftSubTree(self):
return self._lc
def getRightSubTree(self):
return self._rc
def getParent(self):
return self._parent
def __str__(self):
return str(self._element)
def getElement(self):
return self._element
def setElement(self, e):
self._element = e
class manipulateBinaryTree(object):
def __init__(self, tree):
self._tree = tree
def addRoot(self, e):
if self._tree.root() is None:
self._tree.setRoot(Node(e))
else:
raise RuntimeError('Tree is not empty')
def insertLeft(self, v, e):
vv = BinaryTree._getNode(self._tree.root(), v)
if vv is not None:
if vv.getLeftSubTree() is None:
# Add
w = Node(e, parent=vv)
vv.setLeftSubTree(BinaryTree(w))
return w
else:
| rais | e RuntimeError('Node:', v, 'already has a left sub-tree')
else:
raise RuntimeError('Node:', v, 'not in the tree')
def insertRight(self, v, e):
vv = BinaryTree._getNode(self._tree.root(), v)
if vv is not None:
if vv.getRightSubTree() is None:
# Add
w = Node(e, parent=vv)
vv.setRightSubTree(BinaryTree(w))
return w
else:
raise RuntimeError('Node:', v.__str__(),
'already has a right sub-tree')
else:
raise RuntimeError('Node:', v.__str__(), 'not in the tree')
def remove(self, v):
vv = BinaryTree._getNode(self._tree.root(), v)
if vv is not None:
element = vv.getElement()
lst = vv.getLeftSubTree()
rst = vv.getRightSubTree()
if vv == self._tree.root():
if lst is not None and rst is not None:
raise RuntimeError('Node', v, 'has two children')
elif lst is not None:
lst.root().setParent(None)
self._tree.setRoot(lst.root())
elif rst is not None:
rst.root().setParent(None)
self._tree.setRoot(rst.root())
else:
# Get the parent
vp = vv.getParent()
if lst is None and rst is None:
if vp.getLeftSubTree().root() == vv:
vp.setLeftSubTree(lst)
else:
vp.setRightSubTree(rst)
elif lst is not None and rst is None:
lst.root().setParent(vp)
if vp.getLeftSubTree().root() == vv:
vp.setLeftSubTree(lst)
else:
vp.setRightSubTree(lst)
elif lst is None and rst is not None:
rst.root().setParent(vp)
if vp.getLeftSubTree().root() == vv:
vp.setLeftSubTree(rst)
else:
vp.setRightSubTree(rst)
else:
raise RuntimeError('Node', v, 'has two children')
return element
else:
raise RuntimeError('Node', v, 'is not in the tree')
def attach(self, v, t1, t2):
vv = BinaryTree._getNode(self._tree.root(), v)
if vv is not None:
if vv.getLeftSubTree() is None and vv.getRightSubTree() is None:
vv.setLeftSubTree(t1)
vv.setRightSubTree(t2)
# Now set the parent of t1's root and t2's root
t1.root().setParent(vv)
t2.root().setParent(vv)
else:
raise RuntimeError('Node:', v, 'is not an external node')
else:
raise RuntimeError('Node', v, 'is not in the tree')
|
csxeba/brainforge | brainforge/gradientcheck/raw_gradients.py | Python | gpl-3.0 | 1,211 | 0.000826 | import numpy as np
def analytical_gradients(network, X, Y):
print("Calculating analytical gradients...")
print("Forward pass:", end=" ")
preds = network.predict(X)
print("done! Backward pass:", end=" ")
delta = network.cost.derivative(preds, Y)
network.backpropagate(delta)
print("done!")
| return network.get_gradients(unfold=True)
def numerical_gradients(network, X, Y, epsilon):
ws = network.layers.get_weights(unfold=True)
numgrads | = np.zeros_like(ws)
perturb = np.zeros_like(ws)
nparams = ws.size
lstr = len(str(nparams))
print("Calculating numerical gradients...")
for i in range(nparams):
print("\r{0:>{1}} / {2}".format(i + 1, lstr, nparams), end=" ")
perturb[i] += epsilon
network.layers.set_weights(ws + perturb, fold=True)
pred1 = network.predict(X)
cost1 = network.cost(pred1, Y)
network.layers.set_weights(ws - perturb, fold=True)
pred2 = network.predict(X)
cost2 = network.cost(pred2, Y)
numgrads[i] = (cost1 - cost2)
perturb[i] = 0.
numgrads /= 2. * epsilon
network.layers.set_weights(ws, fold=True)
print("Done!")
return numgrads
|
CINPLA/expipe-dev | python-neo/neo/test/__init__.py | Python | gpl-3.0 | 137 | 0 | # -*- coding: utf-8 -*-
"""
The subdirectory coretest contains tests for neo.core
| The subdirectory iotest contains tests | for neo.io
"""
|
StephanDollberg/stephandollberg.github.io | transform.py | Python | apache-2.0 | 441 | 0.002268 | #!/usr/bin/env python
#transforms jekyll dir structure to directory servable structure
import os
import sys
import shutil
for root, subdirs, files in os.walk('.'):
if len(subdirs) == 0 and len | (files) == 1 and files[0] == 'index.html':
with open(root + '/' + files[0]) as f:
data = f.read()
shutil.rmtree(root)
with open(root, 'w') as f:
f.write(data)
|
print('replaced', root)
|
khs26/pele | playground/native_code/tests/__init__.py | Python | gpl-3.0 | 206 | 0 | from _test_lbfgs_cpp import *
from _test_lj_cpp import *
from _test_lj_interaction_list import *
from _test_frozen_atoms import *
fro | m _test_bljcut import *
if __name__ = | = "__main__":
unittest.main()
|
qilicun/python | python2/PyMOTW-1.132/PyMOTW/smtpd/smtpd_debug.py | Python | gpl-3.0 | 255 | 0.003922 | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 Doug Hellmann All rights reserved.
#
"""
"""
__version__ = "$Id$"
#end_pymotw_header
import smtpd
import asyncore
server = smtpd.DebuggingServer(('127.0.0.1', 1 | 025), No | ne)
asyncore.loop()
|
ptone/BirdFish | examples/pulse_duo.py | Python | bsd-3-clause | 2,578 | 0.008146 | import sys
from birdfish.input.midi import MidiDispatcher
from birdfish.lights import RGBLight, PulseChase, LightShow
from birdfish.output.lumos_network import LumosNetwork
from birdfish import tween
# create a light show - manages the updating of all lights
show = LightShow()
# Create a network - in this case, universe 3
dmx3 = LumosNetwork(3)
# add the network to the show
show.networks.append(dmx3)
# create an input interface
dispatcher = MidiDispatcher("MidiKeys")
p = PulseChase(name="greenpulse",
start_pos=12,
end_pos=65,
speed=3,
move_tween=tween.IN_OUT_CUBIC,
)
elementid = 0
for i in range(1,360,3):
elementid += 1
l = RGBLight(
start_channel=i,
name="pulse_%s" % elementid,
attack_duration=0,
release_duration=0,
sustain_value=1,
)
# l.hue = random.random() * 255
l.hue = .74
l.saturation = 1
l.update_rgb()
# l.simple = True
# add the light to the network
dmx3.add_element(l)
p.elements.append(l)
p.start_pos = 12
# p.left_width = p.right_width = 10
p.left_width = 10
p.right_width = 10
p.left_shape = p.right_shape = tween.OUT_CIRC
p.speed = 3
p.moveto = p.end_pos = 65
p.trigger_toggle = True
show.add_element(p)
pp = PulseChase(name="otherpulse",
start_pos=12,
end_pos=65,
speed=5,
# move_tween=tween.IN_OUT_CUBIC,
)
elementid = 0
fo | r i in range(1,360,3):
elementid += 1
l = RGBLight(
start_channel=i,
name="pulse_%s" % elementid,
attack_duration=0,
release_duration=0,
sustain_val | ue=1,
)
# l.hue = random.random() * 255
l.hue = .24
l.saturation = 1
l.update_rgb()
# l.simple = True
# add the light to the network
dmx3.add_element(l)
pp.elements.append(l)
pp.start_pos = 12
# p.left_width = p.right_width = 10
pp.left_width = 15
pp.right_width = 15
pp.left_shape = pp.right_shape = tween.OUT_CIRC
pp.moveto = pp.end_pos = 65
pp.trigger_toggle = True
show.add_element(pp)
# set the input interface to trigger the element
# midi code 70 is the "J" key on the qwerty keyboard for the midikeys app
dispatcher.add_observer((0,70),p)
dispatcher.add_observer((0,70),pp)
# startup the midi communication - runs in its own thread
dispatcher.start()
# start the show in a try block so that we can catch ^C and stop the midi
# dispatcher thread
try:
show.run_live()
except KeyboardInterrupt:
# cleanup
dispatcher.stop()
sys.exit(0)
|
Tattiuss/python_training | test/test_add_group.py | Python | apache-2.0 | 1,283 | 0.006235 | # -*- coding: utf-8 -*-
from model.group import Group
def test_add_group(app, db, json_groups, check_ui):
group = json_groups
old_groups = db.get_group_list()
app.group.create(group)
# assert len(old_groups) + 1 == app.group.count()
new_groups = db.get_group_list()
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max)
#def test_add_group(app, json_groups):
# group = json_groups
# old_groups = app.group.get_group_list()
# app.group.create(group)
# assert len(old_groups) + 1 == app.group.count()
# new_groups = app.group.get_group_list()
# old_groups.append(group)
# assert sorted(old_groups, key=Group.id_or_max) == sorted(ne | w_groups, key=Group.id_or_max)
#def test_add_empty_group(app):
# old_groups = app.group.get_group_list()
# group = Group(name="", header="", footer="")
# app.group.create(group)
# new_groups = app.group.get_group_list()
# assert len(old_groups) + 1 == len(new_groups)
# old_ | groups.append(group)
# assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
|
stormi/tsunami | src/primaires/scripting/fonctions/memoires.py | Python | bsd-3-clause | 5,094 | 0.000396 | # -*-coding:Utf-8 -*
# Copyright (c) 2014 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# | modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright n | otice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la fonction memoires."""
import re
from primaires.format.fonctions import supprimer_accents
from primaires.scripting.fonction import Fonction
class ClasseFonction(Fonction):
"""Renvoie les noms des mémoires de script."""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.memoires_salle, "Salle")
cls.ajouter_types(cls.memoires_salle, "Salle", "str")
cls.ajouter_types(cls.memoires_perso, "Personnage")
cls.ajouter_types(cls.memoires_perso, "Personnage", "str")
cls.ajouter_types(cls.memoires_objet, "Objet")
cls.ajouter_types(cls.memoires_objet, "Objet", "str")
@staticmethod
def memoires_salle(salle, expression=""):
"""Renvoie les mémoires contenues dans la salle précisée.
Paramètres à entrer :
* salle : la salle dans laquelle on va chercher les mémoires
* expression (optionnel) : l'expression qui correspond aux mémoires
Si une expression est précisée, ne renvoie que les mémoires
qui correspondent à cette expression. L'expression doit être
sous la forme d'une expression régulière. Voici un exemple :
memoires = memoires(salle, "^[a-z]+$")
"""
if salle in importeur.scripting.memoires:
return ClasseFonction.filtrer(
importeur.scripting.memoires[salle], expression)
else:
return []
@staticmethod
def memoires_perso(personnage, expression=""):
"""Renvoie les mémoires contenues dans le personnage précisé.
Paramètres à entrer :
* personnage : le personnage dans lequel on va chercher les mémoires
* expression (optionnel) : l'expression qui correspond aux mémoires
Si une expression est précisée, ne renvoie que les mémoires
qui correspondent à cette expression. L'expression doit
être sous la forme d'une expression régulière. Voici un exemple :
memoires = memoires(personnage, "^[a-z]+$")
"""
personnage = hasattr(personnage, "prototype") and \
personnage.prototype or personnage
if personnage in importeur.scripting.memoires:
return ClasseFonction.filtrer(
importeur.scripting.memoires[personnage], expression)
else:
return []
@staticmethod
def memoires_objet(objet, expression=""):
"""Renvoie les mémoires contenues dans l'objet précisé.
Paramètres à entrer :
* objet : l'objet dans lequel on va chercher les mémoires
* expression (optionnel) : l'expression qui correspond aux mémoires
Si une expression est précisée, ne renvoie que les mémoires
qui correspondent à cette expression. L'expression doit
être sous la forme d'une expression régulière. Voici un exemple :
memoires = memoires(objet, "^[a-z]+$")
"""
if objet in importeur.scripting.memoires:
return ClasseFonction.filtrer(
importeur.scripting.memoires[objet], expression)
else:
return []
@staticmethod
def filtrer(noms, expression):
"""Filtre les noms de mémoires."""
noms = list(noms)
expression = re.compile(expression, re.I)
for nom in list(noms):
t_nom = supprimer_accents(nom)
if expression.search(t_nom) is None:
noms.remove(nom)
return noms
|
mixdesign/radioice-track | workflow/workflow.py | Python | mit | 75,352 | 0.000187 | # encoding: utf-8
#
# Copyright (c) 2014 Dean Jackson <deanishe@deanishe.net>
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2014-02-15
#
"""
The :class:`Workflow` object is the main interface to this library.
See :ref:`setup` in the :ref:`user-manual` for an example of how to set
up your Python script to best utilise the :class:`Workflow` object.
"""
from __future__ import print_function, unicode_literals
import os
import sys
import string
import re
import plistlib
import subprocess
import unicodedata
import shutil
import json
import cPickle
import pickle
import time
import logging
import logging.handlers
try:
import xml.etree.cElementTree as ET
except ImportError: # pragma: no cover
import xml.etree.ElementTree as ET
####################################################################
# Standard system icons
####################################################################
# These icons are default OS X icons. They are super-high quality, and
# will be familiar to users.
# This library uses `ICON_ERROR` when a workflow dies in flames, so
# in my own workflows, I use `ICON_WARNING` for less fatal errors
# (e.g. bad user input, no results etc.)
# The system icons are all in this directory. There are many more than
# are listed here
ICON_ROOT = '/System/Library/CoreServices/CoreTypes.bundle/Contents/Resources'
ICON_ACCOUNT = os.path.join(ICON_ROOT, 'Accounts.icns')
ICON_BURN = os.path.join(ICON_ROOT, 'BurningIcon.icns')
ICON_CLOCK = os.path.join(ICON_ROOT, 'Clock.icns')
ICON_COLOR = os.path.join(ICON_ROOT, 'ProfileBackgroundColor.icns')
ICON_COLOUR = ICON_COLOR # Queen's English, if you please
ICON_EJECT = os.path.join(ICON_ROOT, 'EjectMediaIcon.icns')
# Shown when a workflow throws an error
ICON_ERROR = os.path.join(ICON_ROOT, 'AlertStopIcon.icns')
ICON_FAVORITE = os.path.join(ICON_ROOT, 'ToolbarFavoritesIcon.icns')
ICON_FAVOURITE = ICON_FAVORITE
ICON_GROUP = os.path.join(ICON_ROOT, 'GroupIcon.icns')
ICON_HELP = os.path.join(ICON_ROOT, 'HelpIcon.icns')
ICON_HOME = os.path.join(ICON_ROOT, 'HomeFolderIcon.icns')
ICON_INFO = os.path.join(ICON_ROOT, 'ToolbarInfo.icns')
ICON_NETWORK = os.path.join(ICON_ROOT, 'GenericNetworkIcon.icns')
ICON_NOTE = os.path.join(ICON_ROOT, 'AlertNoteIcon.icns')
ICON_SETTINGS = os.path.join(ICON_ROOT, 'ToolbarAdvanced.icns')
ICON_SWIRL = os.path.join(ICON_ROOT, 'ErasingIcon.icns')
ICON_SWITCH = os.path.join(ICON_ROOT, 'General.icns')
ICON_SYNC = os.path.join(ICON_ROOT, 'Sync.icns')
ICON_TRASH = os.path.join(ICON_ROOT, 'TrashIcon.icns')
ICON_USER = os.path.join(ICON_ROOT, 'UserIcon.icns')
ICON_WARNING = os.path.join(ICON_ROOT, 'AlertCautionIcon.icns')
ICON_WEB = os.path.join(ICON_ROOT, 'BookmarkIcon.icns')
####################################################################
# non-ASCII to ASCII diacritic folding.
# Used by `fold_to_ascii` method
####################################################################
ASCII_REPLACEMENTS = {
'À': 'A',
'Á': 'A',
'Â': 'A',
'Ã': 'A',
'Ä': 'A',
'Å': 'A',
'Æ': 'AE',
'Ç': 'C',
'È': 'E',
'É': 'E',
'Ê': 'E',
'Ë': 'E',
'Ì': 'I',
'Í': 'I',
'Î': 'I',
'Ï': 'I',
'Ð': 'D',
'Ñ': 'N',
'Ò': 'O',
'Ó': 'O',
'Ô': 'O',
'Õ': 'O',
'Ö': 'O',
'Ø': 'O',
'Ù': 'U',
'Ú': 'U',
'Û': 'U',
'Ü': 'U',
'Ý': 'Y',
'Þ': 'Th',
'ß': 'ss',
'à': 'a',
'á': 'a',
'â': 'a',
'ã': 'a',
'ä': 'a',
'å': 'a',
'æ': 'ae',
'ç': 'c',
'è': 'e',
'é': 'e',
'ê': 'e',
'ë': 'e',
'ì': 'i',
'í': 'i',
'î': 'i',
'ï': 'i',
'ð': 'd',
'ñ': 'n',
'ò': 'o',
'ó': 'o',
'ô': 'o',
'õ': 'o',
'ö': 'o',
'ø': 'o',
'ù': 'u',
'ú': 'u',
'û': 'u',
'ü': 'u',
'ý': 'y',
'þ': 'th',
'ÿ': 'y',
'Ł': 'L',
'ł': 'l',
'Ń': 'N',
'ń': 'n',
'Ņ': 'N',
'ņ': 'n',
'Ň': 'N',
'ň': 'n',
'Ŋ': 'ng',
'ŋ': 'NG',
'Ō': 'O',
'ō': 'o',
'Ŏ': 'O',
'ŏ': 'o',
'Ő': 'O',
'ő': 'o',
'Œ': 'OE',
'œ': 'oe',
'Ŕ': 'R',
'ŕ': 'r',
'Ŗ': 'R',
'ŗ': 'r',
'Ř': 'R',
'ř': 'r',
'Ś': 'S',
'ś': 's',
'Ŝ': 'S',
'ŝ': 's',
'Ş': 'S',
'ş': 's',
'Š': 'S',
'š': 's',
'Ţ': 'T',
'ţ': 't',
'Ť': 'T',
'ť': 't',
'Ŧ': 'T',
'ŧ': 't',
'Ũ': 'U',
'ũ': 'u',
'Ū': 'U',
'ū': 'u',
'Ŭ': 'U',
'ŭ': 'u',
'Ů': 'U',
'ů': 'u',
'Ű': 'U',
'ű': 'u',
'Ŵ': 'W',
'ŵ': 'w',
'Ŷ': 'Y',
'ŷ': 'y',
'Ÿ': 'Y',
'Ź': 'Z',
'ź': 'z',
'Ż': 'Z',
'ż': 'z',
'Ž': 'Z',
'ž': 'z',
'ſ': 's',
'Α': 'A',
'Β': 'B',
'Γ': 'G',
'Δ': 'D',
'Ε': 'E',
'Ζ': 'Z',
'Η': 'E',
'Θ': 'Th',
'Ι': 'I',
'Κ': 'K',
'Λ': 'L',
'Μ': 'M',
'Ν': 'N',
'Ξ': 'Ks',
'Ο': 'O',
'Π': 'P',
'Ρ': 'R',
'Σ': 'S',
'Τ': 'T',
'Υ': 'U',
'Φ': 'Ph',
'Χ': 'Kh',
'Ψ': 'Ps',
'Ω': 'O',
'α': 'a',
'β': 'b',
'γ': 'g',
'δ': 'd',
'ε': 'e',
'ζ': 'z',
'η': 'e',
'θ': 'th',
'ι': 'i',
'κ': 'k',
'λ': 'l',
'μ': 'm',
'ν': 'n',
'ξ': 'x',
'ο': 'o',
'π': 'p',
'ρ': 'r',
'ς': 's',
'σ': 's',
'τ': 't',
'υ': 'u',
'φ': 'ph',
'χ': 'kh',
'ψ': 'ps',
'ω': 'o',
'А': 'A',
'Б': 'B',
'В': 'V',
'Г': 'G',
'Д': 'D',
'Е': 'E',
'Ж': 'Zh',
'З': 'Z',
'И': 'I',
'Й': 'I',
'К': 'K',
'Л': 'L',
'М': 'M',
'Н': 'N',
'О': 'O',
'П': 'P',
'Р': 'R',
'С': 'S',
'Т': 'T',
'У': 'U',
'Ф': 'F',
'Х': 'Kh',
'Ц': 'Ts',
'Ч': 'Ch',
'Ш': 'Sh',
'Щ': 'Shch',
'Ъ': "'",
'Ы': 'Y',
'Ь': "'",
'Э': 'E',
'Ю': 'Iu',
'Я': 'Ia',
'а': 'a',
'б': 'b',
'в': 'v',
'г': 'g',
'д': 'd',
'е': 'e',
'ж': 'zh',
'з': 'z',
'и': 'i',
'й': 'i',
'к': 'k',
'л': 'l',
'м': 'm',
'н': 'n',
'о': 'o',
'п': 'p',
'р': 'r',
'с': 's',
'т': 't',
'у': 'u',
'ф': 'f',
'х': 'kh',
'ц': 'ts',
'ч': 'ch',
'ш': 'sh',
'щ': 'shch',
'ъ': "'",
'ы': 'y',
'ь': "'",
'э': 'e',
'ю': 'iu',
'я': 'ia',
# 'ᴀ': '',
# 'ᴁ': '',
# 'ᴂ': '',
# 'ᴃ': '',
# 'ᴄ': '',
# 'ᴅ': '',
# 'ᴆ': '',
# 'ᴇ': '',
# 'ᴈ': '',
# 'ᴉ': '',
# 'ᴊ': '',
# 'ᴋ': '',
# 'ᴌ': '',
| # 'ᴍ': '',
# 'ᴎ': '',
# 'ᴏ': '',
| # 'ᴐ': '',
# 'ᴑ': '',
# 'ᴒ': '',
# 'ᴓ': '',
# 'ᴔ': '',
# 'ᴕ': '',
# 'ᴖ': '',
# 'ᴗ': '',
# 'ᴘ': '',
# 'ᴙ': '',
# 'ᴚ': '',
# 'ᴛ': '',
# 'ᴜ': '',
# 'ᴝ': '',
# 'ᴞ': '',
# 'ᴟ': '',
# 'ᴠ': '',
# 'ᴡ': '',
# 'ᴢ': '',
# 'ᴣ': '',
# 'ᴤ': '',
# 'ᴥ': '',
'ᴦ': 'G',
'ᴧ': 'L',
'ᴨ': 'P',
'ᴩ': 'R',
'ᴪ': 'PS',
'ẞ': 'Ss',
'Ỳ': 'Y',
'ỳ': 'y',
'Ỵ': 'Y',
'ỵ': 'y',
'Ỹ': 'Y',
'ỹ': 'y',
}
####################################################################
# Used by `Workflow.filter`
####################################################################
# Anchor characters in a name
#: Characters that indicate the beginning of a "word" in CamelCase
INITIALS = string.ascii_uppercase + string.digits
#: Split on non-letters, numbers
split_on_delimiters = re.compile('[^a-zA-Z0-9]').split
# Match filter flags
#: Match items that start with ``query``
MATCH_STARTSWITH = 1
#: Match items whose capital letters start with ``query``
MATCH_CAPITALS = 2
#: Match items with a component "word" that matches ``query``
MATCH_ATOM = 4
#: Match items whose initials (based on atoms) start with ``query``
MATCH_INITIALS_STARTSWITH = 8
#: Match items whose initials (based on atoms) contain ``query``
MATCH_INITIALS_CONTAIN = 16
#: Combination of :const:`MATCH_INITIALS_STARTSWITH` and
#: :const:`MATCH_INITIALS_CONTAIN`
MATCH_INITIALS = 24
#: Match items if ``query`` is a substring
MATCH_SUBSTRING = 32
#: Match items if all characters in ``query`` appear in the item in order
MATCH_ALLCHARS = 64
#: Combination of all other ``MATCH_*`` const |
Zorro666/renderdoc | docs/python_api/examples/renderdoc_intro.py | Python | mit | 853 | 0.001172 | import renderdoc as rd
rd.InitialiseReplay(rd.GlobalEnvironment(), [])
# Open a capture file handle
cap = rd.OpenCaptureFile()
# Open a particular file - see also Open | Buffer to load from memory
status = cap.OpenFile('test.rdc', '', None)
# Make sure the file | opened successfully
if status != rd.ReplayStatus.Succeeded:
raise RuntimeError("Couldn't open file: " + str(status))
# Make sure we can replay
if not cap.LocalReplaySupport():
raise RuntimeError("Capture cannot be replayed")
# Initialise the replay
status,controller = cap.OpenCapture(rd.ReplayOptions(), None)
if status != rd.ReplayStatus.Succeeded:
raise RuntimeError("Couldn't initialise replay: " + str(status))
# Now we can use the controller!
print("%d top-level actions" % len(controller.GetRootActions()))
controller.Shutdown()
cap.Shutdown()
rd.ShutdownReplay()
|
FireClaw/HackerRank | Python/py-collections-namedtuple.py | Python | mit | 382 | 0.002618 | count, index = int(input()), input().split().index('MARKS')
print('%.2f' % (sum([float(input().split()[index]) for _ in range(count)]) / count))
# from collections import namedtuple
# count, Student = int(in | put()), namedtuple('Student', input())
# students = (Student(*input().split()) for _ | in range(count))
# print('%.2f' % (sum(map(lambda s: float(s.MARKS), students)) / count))
|
npuichigo/voicenet | src/models/tf_model.py | Python | apache-2.0 | 13,402 | 0.001418 | # Copyright 2016 ASLP@NPU. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: npuichigo@gmail.com (zhangyuchao)
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import sys
import tensorflow as tf
class TfModel(object):
"""A deep RNN model, for use of acoustic or duration modeling."""
def __init__(self, rnn_cell, dnn_depth, dnn_num_hidden, rnn_depth, rnn_num_hidden,
output_size, bidirectional=False, rnn_output=False, cnn_output=False,
look_ahead=5, mdn_output=False, mix_num=1, name="acoustic_model"):
"""Constructs a TfModel.
Args:
rnn_cell: Type of rnn cell including rnn, gru and lstm
dnn_depth: Number of DNN layers.
dnn_num_hidden: Number of hidden units in each DNN layer.
rnn_depth: Number of RNN layers.
rnn_num_hidden: Number of hidden units in each RNN layer.
output_size: Size of the output layer on top of the DeepRNN.
bidirectional: Whether to use bidirectional rnn.
rnn_output: Whether to use ROL(Rnn Output Layer).
cnn_output: Whether to use COL(Cnn Output Layer).
look_ahead: Look ahead window size, used together with cnn_output.
mdn_output: Whether to interpret last layer as mixture density layer.
mix_num: Number of gaussian mixes in mdn layer.
name: Name of the module.
"""
super(TfModel, self).__init__()
if rnn_cell == "rnn":
self._cell_fn = tf.contrib.rnn.BasicRNNCell
elif rnn_cell == "gru":
self._cell_fn = tf.contrib.rnn.GRUBlockCellV2
elif rnn_cell == "lstm":
self._cell_fn = tf.contrib.rnn.LSTMBlockCell
elif rnn_cell == "fused_lstm":
self._cell_fn = tf.contrib.rnn.LSTMBlockFusedCell
else:
raise ValueError("model type not supported: {}".format(rnn_cell))
self._rnn_cell = rnn_cell
self._dnn_depth = dnn_depth
self._dnn_num_hidden = dnn_num_hidden
self._rnn_depth = rnn_depth
self._rnn_num_hidden = rnn_num_hidden
self._output_size = output_size
self._bidirectional = bidirectional
self._rnn_output = rnn_output
self._cnn_output = cnn_output
self._look_ahead = look_ahead
self._mdn_output = mdn_output
self._mix_num = mix_num
self._input_module = [
tf.layers.Dense(units=self._dnn_num_hidden,
activation=tf.nn.relu,
name="linear_input_{}".format(i))
for i in range(self._dnn_depth)
]
if not self._bidirectional:
if rnn_cell == "fused_lstm":
self._rnns = [
self._cell_fn(self._rnn_num_hidden,
name="{0}_{1}".format(rnn_cell, i))
for i in range(self._rnn_depth)
]
else:
self._rnns = tf.nn.rnn_cell.MultiRNNCell([
self._cell_fn(self._rnn_num_hidden,
name="{0}_{1}".format(rnn_cell, i))
for i in range(self._rnn_depth)
])
else:
if rnn_cell == "fused_lstm":
self._rnns = {
"fw": [
self._cell_fn(self._rnn_num_hidden,
name="{0}_fw_{1}".format(rnn_cell, i))
for i in range(self._rnn_depth)
],
"bw": [
tf.contrib.rnn.TimeReversedFusedRNN(
self._cell_fn(self._rnn_num_hidden,
name="{0}_bw_{1}".format(rnn_cell, i)))
for i in range(self._rnn_depth)
],
}
else:
self._rnns = {
"fw": tf.nn.rnn_cell.MultiRNNCell([
self._cell_fn(self._rnn_num_hidden,
name="{0}_fw_{1}".format(rnn_cell, i))
for i in range(self._rnn_depth)
]),
"bw": tf.nn.rnn_cell.MultiRNNCell([
self._cell_fn(self._rnn_num_hidden,
name="{0}_bw_{1}".format(rnn_cell, i))
for i in range(self._rnn_depth)
]),
}
# If mdn output is used, output size should be mix_num * (2 * output_dim + 1).
if self._mdn_output:
output_size = self._mdn_output_size = self._mix_num * (2 * self._output_size + 1)
else:
output_size = self._output_size
if self._rnn_output and self._cnn_output:
raise ValueError("rnn_output and cnn_output cannot be "
"specified at the same time.")
if self._rnn_output:
self._output_module = tf.contrib.rnn.BasicRNNCell(
output_size, activation=tf.identity)
elif self._cnn_output:
self._output_module = {
"linear": tf.layers.Dense(output_size, name="linear"),
"cnn": tf.layers.Conv2D(
filters=1,
kernel_size=(self._look_ahead, 1),
padding="VALID",
name="cnn_output")
}
else:
self._output_module = tf.layers.Dense(output_size, name="linear_output")
def __call__(self, input_sequence, input_length):
"""Builds the deep LSTM model sub-graph.
Args:
input_sequence: A 3D Tensor with padded input sequence data.
input_length. Actual length of each sequence in padded input data.
Returns:
Tuple of the Tensor of output logits for the batch, with dimensions
`[truncation_length, batch_size, output_size]`, and the
final state of the unrolled core,.
"""
output_sequence = input_sequence
for layer in self._input_module:
output_sequence = layer(output_sequence)
if not self._bidirectional:
| if self._rnn_cell == 'fused_lstm':
output_sequence = tf.transpose(output_sequence, [1, 0, 2])
| new_states = []
for cell in self._rnns:
output_sequence, new_state = cell(
inputs=output_sequence,
sequence_length=input_length,
dtype=tf.float32)
new_states.append(new_state)
output_sequence = tf.transpose(output_sequence, [1, 0, 2])
final_state = tuple(new_states)
else:
output_sequence, final_state = tf.nn.dynamic_rnn(
cell=self._rnns,
inputs=output_sequence,
sequence_length=input_length,
dtype=tf.float32)
else:
if self._rnn_cell == 'fused_lstm':
output_sequence = tf.transpose(output_sequence, [1, 0, 2])
fw_new_states, bw_new_states = [], []
for i in range(self._rnn_depth):
fw_output, fw_new_state = self._rnns["fw"][i](
inputs=output_sequence,
sequence_length=input_length,
dtype=tf.float32)
fw_new_states.append(fw_new_state)
bw_output, b |
showerst/openstates | openstates/nc/__init__.py | Python | gpl-3.0 | 5,650 | 0.007965 | import datetime
import lxml.html
from billy.utils.fulltext import text_after_line_numbers
from .bills import NCBillScraper
from .legislators import NCLegislatorScraper
from .committees import NCCommitteeScraper
from .votes import NCVoteScraper
metadata = dict(
name='North Carolina',
abbreviation='nc',
capitol_timezone='America/New_York',
legislature_name='North Carolina General Assembly',
legislature_url='http://www.ncleg.net/',
chambers = {
'upper': {'name': 'Senate', 'title': 'Senator'},
'lower': {'name': 'House', 'title': 'Representative'},
},
terms=[
#{'name': '1985-1986',
# 'sessions': ['1985', '1985E1'],
# 'start_year': 1985, 'end_year': 1986},
#{'name': '1987-1988',
# 'sessions': ['1987'],
# 'start_year': 1987, 'end_year': 1988},
#{'name': '1989-1990',
# 'sessions': ['1989', '1989E1', '1989E2'],
# 'start_year': 1989, 'end_year': 1990},
#{'name': '1991-1992',
# 'sessions': ['1991', '1991E1'],
# 'start_year': 1991, 'end_year': 1992},
#{'name': '1993-1994',
# 'sessions': ['1993', '1993E1'],
# 'start_year': 1993, 'end_year': 1994},
#{'name': '1995-1996',
# 'sessions': ['1995', '1995E1', '1995E2'],
# 'start_year': 1995, 'end_year': 1996},
#{'name': '1997-1998',
# 'sessions': ['1997', '1997E1'],
# 'start_year': 1997, 'end_year': 1998},
#{'name': '1999-2000',
# 'sessions': ['1999', '1999E1', '1999E2'],
# 'start_year': 1999, 'end_year': 2000},
#{'name': '2001-2002',
# 'sessions': ['2001', '2001E1'],
# 'start_year': 2001, 'end_year': 2002},
#{'name': '2003-2004',
# 'sessions': ['2003', '2003E1', '2003E2', '2003E3'],
# 'start_year': 2003, 'end_year': 2004},
#{'name': '2005-2006',
# 'sessions': ['2005'],
# 'start_year': 2005, 'end_year': 2006},
#{'name': '2007-2008',
# 'sessions': ['2007', '2007E1', '2007E3'],
# 'start_year': 2007, 'end_year': 2008},
{'name': '2009-2010',
'sessions': ['2009'],
'start_year': 2009, 'end_year': 2010},
{'name': '2011-2012',
'sessions': ['2011'],
'start_year': 2011, 'end_year': 2012},
{'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013, 'end_year': 2014},
{'name': '2015-2016',
'sessions': ['2015','2015E1','2015E2', '2015E3', '2015E4'],
'start_year': 2015, 'end_year': 2016},
],
session_details={
'2009': {'start_date': datetime.date(2009,1,28), 'type': 'primary',
'display_name': '2009-2010 Session',
'_scraped_name': '2009-2010 Session',
},
'2011': {'start_date': datetime.date(2011,1,26), 'type': 'primary',
'display_name': '2011-2012 Session',
'_scraped_name': '2011-2012 Session',
},
'2013': {'start_date': datetime.date(2013,1,30), 'type': 'primary',
'display_name': '2013-2014 Session',
'_scraped_name': '2013-2014 Session',
},
'2015': {'start_date': datetime.date(2015,1,30), 'type': 'primary',
'display_name': '2015-2016 Session',
'_scraped_name': '2015-2016 Session',
},
'2015E1': {'type': 'special',
| 'display_name': '2016 Extra Session 1',
'_scraped_name': '2016 Extra Session 1',
},
'2015E2': {'type': ' | special',
'display_name': '2016 Extra Session 2',
'_scraped_name': '2016 Extra Session 2',
},
'2015E3': {'type': 'special',
'display_name': '2016 Extra Session 3',
'_scraped_name': '2016 Extra Session 3',
},
'2015E4': {'type': 'special',
'display_name': '2016 Extra Session 4',
'_scraped_name': '2016 Extra Session 4',
},
},
_ignored_scraped_sessions=[
'2008 Extra Session', '2007-2008 Session',
'2007 Extra Session', '2005-2006 Session',
'2004 Extra Session', '2003-2004 Session',
'2003 Extra Session 1', '2003 Extra Session 2',
'2002 Extra Session', '2001-2002 Session',
'2000 Special Session', '1999-2000 Session',
'1999 Special Session', '1998 Special Session',
'1997-1998 Session', '1996 2nd Special Session',
'1996 1st Special Session', '1995-1996 Session',
'1994 Special Session', '1993-1994 Session',
'1991-1992 Session', '1991 Special Session',
'1990 Special Session', '1989-1990 Session',
'1989 Special Session', '1987-1988 Session',
'1986 Special Session', '1985-1986 Session'],
feature_flags=['subjects', 'influenceexplorer'],
)
def session_list():
from billy.scrape.utils import url_xpath
return url_xpath('http://www.ncleg.net',
'//select[@name="sessionToSearch"]/option/text()')
def extract_text(doc, data):
doc = lxml.html.fromstring(data)
text = ' '.join([x.text_content() for x in
doc.xpath('//p[starts-with(@class, "a")]')])
return text
|
vgrem/Office365-REST-Python-Client | office365/directory/applications/public_client_application.py | Python | mit | 110 | 0 | from office365.runtime.client_val | ue import ClientValue
class PublicClientApplication(ClientValue):
| pass
|
janelia-flyem/neuroglancer | python/neuroglancer/google_credentials.py | Python | apache-2.0 | 3,476 | 0.002014 | # @license
# Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import concurrent.futures
import logging
import threading
from . import credentials_provider
from .futures import run_on_new_thread
class GoogleOAuth2FlowCredentialsProvider(credentials_provider.CredentialsProvider):
def __init__(self, scopes, client_id, client_secret):
super(GoogleOAuth2FlowCredentialsProvider, self).__init__()
self.scopes = scopes
self.client_id = client_id
self.client_secret = client_secret
# Make sure logging is initialized. Does nothing if logging has already
# been initialized.
logging.basicConfig()
def get_new(self):
def func():
import apitools.base.py.credentials_lib
result = apitools.base.py.credentials_lib.GetCredentials(
package_name='',
scopes=self.scopes,
client_id=self.client_id,
client_secret=self.client_secret,
user_agent=u'python-neuroglancer',
)
return dict(tokenType=u'Bearer', accessToken=result.get_access_token().access_token)
return run_on_new_thread(func)
class GoogleApplicationDefaultCredentialsProvider(credentials_provider.CredentialsProvider):
def __init__(self):
super(GoogleApplicationDefaultCredentialsProvider, self).__init__()
# Make sure logging is initialized. Does nothing if logging has already
# been initialized.
logging.basicConfig()
self._lock = threading.Lock()
self._credentials = None
def get_new(self):
def func():
with self._lock:
if self._credentials is None:
import google.auth
credentials, project = google.auth.default()
del project
self._credentials = credentials
if not self._credentials.valid:
import google.auth.transport.requests
import requests
request = google.auth.transport.requests.Request()
self._credentials.refresh(request)
return dict(tokenType=u'Bearer', accessToken=s | elf._credentials.token)
return run_on_new_thread(func)
_global_google_application_default_credentials_provider = None
_global_google_application_default_credentials_provider_lock = thre | ading.Lock()
def get_google_application_default_credentials_provider():
global _global_google_application_default_credentials_provider
with _global_google_application_default_credentials_provider_lock:
if _global_google_application_default_credentials_provider is None:
_global_google_application_default_credentials_provider = GoogleApplicationDefaultCredentialsProvider(
)
return _global_google_application_default_credentials_provider
|
akhilman/cement | examples/load_extensions_via_config/myapp.py | Python | bsd-3-clause | 1,189 | 0.002523 |
from cement.core.foundation import CementApp
class MyApp(CementApp):
class Meta:
label = 'myapp'
config_files = [
'./myapp.conf',
]
def validate_config(self):
if 'extensions' in self.config.keys('myapp'):
exts = self.config.get('myapp', 'extensions')
# convert a comma-separated string to a list
if type(exts) is str:
ext_list = exts.split(',')
# clean up extra space if they had it inbetween commas
ext_list = (x.strip() for x in ext_list)
# set the new extensions value in the config
self.config.set('myapp', 'extensions', ext_list)
# otherwise, if it's a list (ConfigObj?)
elif | type(exts) is list:
ext_list = exts
for ext in ext_list:
# load the extension
self.ext.load_extension(ext)
# add to meta data
self._meta.extensions.append(ext)
def main():
app = MyApp()
try:
app.setup | ()
app.run()
finally:
app.close()
if __name__ == '__main__':
main()
|
geospatial-services-framework/gsfpyarc | gsfarc/test/test_datatype_stringarray.py | Python | mit | 803 | 0.008717 | """
"""
import unittest
import arcpy
from gsfarc.test import config
class TestDataTypeStringArray(unittest.TestCase):
"""Tests the string array task datatype"""
@classmethod
def setUpClass(cls):
config.setup_idl_toolbox('test_datatype_stringarray','qa_idltaskengine_datatype_stringarray')
@classmethod
def tearDownClass(cls):
pass
| def test_datatype_stringarray_one_dimension(self):
"""Verify a one dimensional array of strings returns a semicolon separated string list."""
input = ['foo', 'bar', 'baz']
expect_dims = [len(input)]
result = arcpy.QA_IDLTaskEngine_DataType_St | ringArray_GSF(input, expect_dims)
self.assertEqual(result[0], ';'.join(str(i) for i in input))
if __name__ == '__main__':
unittest.main() |
jackTheRipper/iotrussia | web_server/src/server/server/wsgi.py | Python | gpl-2.0 | 1,419 | 0.000705 | """
WSGI config for server project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WS | GI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites | in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "server.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
ctogle/dilapidator | test/geometry/tform_tests.py | Python | mit | 1,264 | 0.051424 | from dilap.geometry.vec3 import vec3
from dilap.geometry.quat import quat
from dilap.geometry.tform import tform
import dilap.geometry.tools as dpr
import matplotlib.pyplot as plt
import unittest,numpy,math
import pdb
#python3 -m unittest discover -v ./ "*tests.py"
class test_tform(unittest.TestCase):
def setUp(self):
a1 = dpr.PI2
v1,v2,v3 = vec3(1,0,0),vec3(0,1,0),vec3(0,0,1)
q0 = quat(0,0,0,0).av( 0,v3)
q1 = quat(0,0,0,0).av(a1,v1)
q2 = quat(0,0,0,0).av(a1,v2)
q3 = quat(0,0,0,0).av(a1,v3)
self.tf1 = tform(vec3(1,1,0),q3.cp(),vec3(1,2,1))
self.tf2 = tform(vec3(1,1,0),q3.cp(),vec3(1,2,1))
self.tf3 = tform(vec3(0,1,0),q1,vec3(1,1,1))
a2 = dpr.PI
q4 = quat(0,0,0,0).av(a2,v3)
self.tf4 = tform(vec3(0,2,0),q4,vec3(1,4,1))
def test_cp(self):
self.assertTrue(self.tf1 is self.tf1)
self.assertFalse(self.tf1 is self.tf1.cp())
self.assertTrue | (self.tf1 == self.tf1.cp | ())
self.assertFalse(self.tf1 is self.tf2)
self.assertTrue(self.tf1 == self.tf2)
def test_true(self):
tf4 = self.tf1.true(self.tf2)
self.assertEqual(self.tf4,tf4)
if __name__ == '__main__':
unittest.main()
|
jokajak/itweb | data/env/lib/python2.6/site-packages/ToscaWidgets-0.9.11-py2.6.egg/tw/core/command.py | Python | gpl-3.0 | 26,579 | 0.002257 | import errno
import re
import operator
import shutil
import sys
import os
import tempfile
import subprocess
try:
from hashlib import md5 as hash_constructor
except ImportError:
from md5 import new as hash_constructor
import mimetypes
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import pkg_resources
from setuptools import Command
from distutils import log
from tw.core.resources import registry, merge_resources, _JavascriptFileIter
from tw.core.util import OrderedSet
class archive_tw_resources(Command):
"""
Setuptools command to copy and optionally compress all static resources
from a series of distributions and their dependencies into a directory
where they can be served by a fast web server.
To enable compression of CSS and JS files you will need to have installed a
Java Runtime Environment and YUICompressor
(http://www.julienlecomte.net/yuicompressor)
In order for resources from widget eggs to be properly collected these
need to have a 'toscawidgets.widgets' 'widgets' entry-point which points
to a module which, when imported, instantiates all needed JS and CSS Links.
The result is laid out in the output directory in such a way that when
a a web server such as Apache or Nginx is configured to map URLS that
begin with /toscawidgets to that directory static files will be served
from there bypassing python completely.
To integrate this command into your build process you can add these lines
to ``setup.cfg``::
[archive_tw_resources]
output = /home/someuser/public_html/toscawidgets/
compresslevel = 2
distributions = MyProject
yuicompressor = /home/someuser/bin/yuicompressor.jar
onepass = true
[aliases]
| deploy = archive_tw_resources --force install
This way you can run::
$ python setup.py deploy
To install a new version of your app and copy/compress resources.
"""
descr | iption = "Copies ToscaWidgets static resources into a directory where"\
" a fast web-server can serve them."
user_options = [
("output=", "o",
"Output directory. If it doesn't exist it will be created."),
("force", "f", "If output dir exists, it will be ovewritten"),
("onepass", None, "If given, yuicompressor will only be called once "
"for each kind of file with a all files "
"together and then separated back into smaller "
"files"),
("compresslevel=", "c",
"Compression level: 0) for no compression (default). "
"1) for js-minification. "
"2) for js & css compression"),
("yuicompressor=", None, "Name of the yuicompressor jar."),
("distributions=", "d",
"List of widget dists. to include resources from "
"(dependencies will be handled recursively). Note that "
"these distributions need to define a 'toscawidgets.widgets' "
"'widgets' entrypoint pointing to a a module where "
"resources are located."),
("requireonce", "r",
"Surround the gathered Javascript with a require_once-guard."
)
]
IGNORED_NAMES = [".svn",]
"""
A list of names to ignore, used to prevent collecting
subversion control data.
"""
NO_VARIANT = object()
def initialize_options(self):
self.output = ''
self.force = False
self.onepass = False
self.compresslevel = 0
self.distributions = []
self.yuicompressor = 'yuicompressor.jar'
self.requireonce = False
def finalize_options(self):
self.ensure_string("output")
self.ensure_string("yuicompressor")
self.ensure_string_list("distributions")
self.compresslevel = int(self.compresslevel)
self.yuicompressor = os.path.abspath(self.yuicompressor)
def run(self):
if not self.output:
print >> sys.stderr, "Need to specify an output directory"
return
if not self.distributions:
print >> sys.stderr, "Need to specify at least one distribution"
return
if os.path.exists(self.output) and not self.force:
print >> sys.stderr, ("Destination dir %s exists. " % self.output)+\
"Use -f to ovewrite"
return
if self.compresslevel > 0 and not os.path.exists(self.yuicompressor):
print >> sys.stderr, "Could not find YUICompressor at " + \
self.yuicompressor
return
self.tempdir = tempdir = tempfile.mktemp()
self.execute(os.makedirs, (tempdir,), "Creating temp dir %s" % tempdir)
if self.compresslevel > 0:
if self.onepass:
self.writer = OnePassCompressingWriter(self, tempdir)
else:
self.writer = CompressingWriter(self, tempdir)
else:
self.writer = FileWriter(self, tempdir)
self.execute(self._copy_resources, tuple(), "Extracting resources")
self.writer.finalize()
if os.path.exists(self.output):
self.execute(shutil.rmtree, (self.output,),
"Deleting old output dir %s" % self.output)
self.execute(os.makedirs, (self.output,), "Creating output dir")
final_dest = os.path.join(self.output, registry.prefix.strip('/'))
self.execute(shutil.move, (tempdir, final_dest),
"Moving build to %s" % final_dest)
def _load_widgets(self, distribution):
try:
requires = [r.project_name for r in
pkg_resources.get_distribution(distribution).requires()]
map(self._load_widgets, requires)
mod = pkg_resources.load_entry_point(distribution,
'toscawidgets.widgets',
'widgets')
self.announce("Loaded %s" % mod.__name__)
except ImportError, e:
self.announce("%s has no widgets entrypoint" % distribution)
def _copy_resources(self):
map(self._load_widgets, self.distributions)
for webdir, dirname in registry:
parts = filter(None, webdir.split('/'))
modname = parts[0]
fname = '/'.join(parts[1:])
self.execute(self._copy_resource_tree, (modname, fname),
"Copying %s recursively into %s" %
(dirname, self.writer.base))
def _copy_resource_tree(self, modname, fname):
try:
for name in pkg_resources.resource_listdir(modname, fname):
if name in self.IGNORED_NAMES:
continue
name = '/'.join((fname, name))
rel_name = '/'.join((modname, name))
if pkg_resources.resource_isdir(modname, name):
self.execute(self._copy_resource_tree, (modname, name),
"Recursing into " + rel_name)
else:
full_name = pkg_resources.resource_filename(modname, name)
ct, _ = mimetypes.guess_type(full_name)
require_once = None
if self.requireonce and ct == "application/javascript":
require_once = _JavascriptFileIter._marker_name(modname, name)
stream = pkg_resources.resource_stream(modname, name)
filename = '/'.join((modname, name))
self.execute(self.writer.write_file, (stream, filename),
"Processing " + filename)
if require_once is not None:
filename = os.path.join(self.tempdir, filename)
inf = open(filename)
outname = tempfile.mktemp()
outf = open(outname, "w")
outf.write(_JavascriptFileIter.START_TEM |
okfn/spendingstories | webapp/context_processors.py | Python | gpl-3.0 | 471 | 0.006369 | from django.conf import settings
def i18n(request):
| from django.utils import translation
def get_language_code():
base_code = translation.get_language()
sub_codes = base_code.split('-')
if len(sub_codes) > 1:
sub_codes[1] = sub_codes[1].upper()
return "_".join(sub_codes)
else:
return base_code
context_extras = {}
context_extras['LANG'] = get_language_co | de()
return context_extras |
seckcoder/lang-learn | python/sklearn/sklearn/covariance/shrunk_covariance_.py | Python | unlicense | 17,960 | 0.000445 | """
Covariance estimators using shrinkage.
Shrinkage corresponds to regularising `cov` using a convex combination:
shrunk_cov = (1-shrinkage)*cov + shrinkage*structured_estimate.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Virgile Fritsch <virgile.fritsch@inria.fr>
#
# License: BSD Style.
# avoid division truncation
from __future__ import division
import warnings
import numpy as np
from .empirical_covariance_ import empirical_covariance, EmpiricalCovariance
from ..utils import array2d
###############################################################################
# ShrunkCovariance estimator
def shrunk_covariance(emp_cov, shrinkage=0.1):
"""Calculates a covariance matrix shrunk on the diagonal
Parameters
----------
emp_cov: array-like, shape (n_features, n_features)
Covariance matrix to be shrunk
shrinkage: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Returns
-------
shrunk_cov: array-like
shrunk covariance
Notes
-----
The regularized (shrunk) covariance is given by
(1 - shrinkage)*cov
+ shrinkage*mu*np.id | entity(n_features)
where mu = trace(cov) / n_features
"""
emp_cov = array2d(emp_c | ov)
n_features = emp_cov.shape[0]
mu = np.trace(emp_cov) / n_features
shrunk_cov = (1. - shrinkage) * emp_cov
shrunk_cov.flat[::n_features + 1] += shrinkage * mu
return shrunk_cov
class ShrunkCovariance(EmpiricalCovariance):
"""Covariance estimator with shrinkage
Parameters
----------
store_precision : bool
Specify if the estimated precision is stored
shrinkage: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Attributes
----------
`covariance_` : array-like, shape (n_features, n_features)
Estimated covariance matrix
`precision_` : array-like, shape (n_features, n_features)
Estimated pseudo inverse matrix.
(stored only if store_precision is True)
`shrinkage`: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularized covariance is given by
(1 - shrinkage)*cov
+ shrinkage*mu*np.identity(n_features)
where mu = trace(cov) / n_features
"""
def __init__(self, store_precision=True, assume_centered=False,
shrinkage=0.1):
EmpiricalCovariance.__init__(self, store_precision=store_precision,
assume_centered=assume_centered)
self.shrinkage = shrinkage
def fit(self, X, y=None):
""" Fits the shrunk covariance model
according to the given training data and parameters.
Parameters
----------
X: array-like, shape = [n_samples, n_features]
Training data, where n_samples is the number of samples
and n_features is the number of features.
y: not used, present for API consistence purpose.
assume_centered: Boolean
If True, data are not centered before computation.
Useful to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, data are centered before computation.
Returns
-------
self: object
Returns self.
"""
# Not calling the parent object to fit, to avoid a potential
# matrix inversion when setting the precision
if self.assume_centered:
self.location_ = np.zeros(X.shape[1])
else:
self.location_ = X.mean(0)
covariance = empirical_covariance(X,
assume_centered=self.assume_centered)
covariance = shrunk_covariance(covariance, self.shrinkage)
self._set_covariance(covariance)
return self
###############################################################################
# Ledoit-Wolf estimator
def ledoit_wolf_shrinkage(X, assume_centered=False, block_size=1000):
"""Estimates the shrunk Ledoit-Wolf covariance matrix.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Data from which to compute the Ledoit-Wolf shrunk covariance shrinkage
assume_centered: Boolean
If True, data are not centered before computation.
Useful to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, data are centered before computation.
block_size: int,
Size of the blocks into which the covariance matrix will be split.
Returns
-------
shrinkage: float
Coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularized (shrunk) covariance is:
(1 - shrinkage)*cov
+ shrinkage * mu * np.identity(n_features)
where mu = trace(cov) / n_features
"""
X = np.asarray(X)
# for only one feature, the result is the same whatever the shrinkage
if len(X.shape) == 2 and X.shape[1] == 1:
return 0.
if X.ndim == 1:
X = np.reshape(X, (1, -1))
warnings.warn("Only one sample available. " \
"You may want to reshape your data array")
n_samples = 1
n_features = X.size
else:
n_samples, n_features = X.shape
# optionaly center data
if not assume_centered:
X = X - X.mean(0)
# number of blocks to split the covariance matrix into
n_splits = int(n_features / block_size)
X2 = X ** 2
emp_cov_trace = np.sum(X2, axis=0) / n_samples
mu = np.sum(emp_cov_trace) / n_features
beta_ = 0. # sum of the coefficients of <X2.T, X2>
delta_ = 0. # sum of the *squared* coefficients of <X.T, X>
# starting block computation
for i in xrange(n_splits):
for j in xrange(n_splits):
rows = slice(block_size * i, block_size * (i + 1))
cols = slice(block_size * j, block_size * (j + 1))
beta_ += np.sum(np.dot(X2.T[rows], X2[:, cols]))
delta_ += np.sum(np.dot(X.T[rows], X[:, cols]) ** 2)
rows = slice(block_size * i, block_size * (i + 1))
beta_ += np.sum(np.dot(X2.T[rows], X2[:, block_size * n_splits:]))
delta_ += np.sum(
np.dot(X.T[rows], X[:, block_size * n_splits:]) ** 2)
for j in xrange(n_splits):
cols = slice(block_size * j, block_size * (j + 1))
beta_ += np.sum(np.dot(X2.T[block_size * n_splits:], X2[:, cols]))
delta_ += np.sum(
np.dot(X.T[block_size * n_splits:], X[:, cols]) ** 2)
delta_ += np.sum(np.dot(X.T[block_size * n_splits:],
X[:, block_size * n_splits:]) ** 2)
delta_ /= n_samples ** 2
beta_ += np.sum(np.dot(
X2.T[block_size * n_splits:], X2[:, block_size * n_splits:]))
# use delta_ to compute beta
beta = 1. / (n_features * n_samples) * (beta_ / n_samples - delta_)
# delta is the sum of the squared coefficients of (<X.T,X> - mu*Id) / p
delta = delta_ - 2. * mu * emp_cov_trace.sum() + n_features * mu ** 2
delta /= n_features
# get final beta as the min between beta and delta
beta = min(beta, delta)
# finally get shrinkage
shrinkage = beta / delta
return shrinkage
def ledoit_wolf(X, assume_centered=False, block_size=1000):
"""Estimates the shrunk Ledoit-Wolf covariance matrix.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Data from which to compute the covariance estimate
assume_centered: Boolean
If True, data are not centered before computation.
Usefull to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, data are centered before computation.
block_size: int,
Size of the blocks into which the covariance matrix will be split.
If n_features > `block_size`, an error will be raised since the
|
aevum/moonstone | src/moonstone/ilsa/plugins/measure/gui/qt/widget/rulerproperties_ui.py | Python | lgpl-3.0 | 6,860 | 0.003061 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/igor/Desenvolvimento/neppo/moonstone/src/moonstone/ilsa/plugins/measure/resources/ui/qt/rulerproperties.ui'
#
# Created: Fri Feb 21 10:22:36 2014
# by: pyside-uic 0.2.14 running on PySide 1.1.2
#
# WARNING! All changes made in this file will be lost!
from PySide import Q | tCore, QtGui
class Ui_RulerProperties(object):
def setupUi(self, RulerProperties):
RulerProperties.setObjectName("RulerProperties")
RulerProperties.resize(212, 284)
RulerProperties.setMinimumSize(QtCore.QSize(0, 0))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/edit-select.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
RulerProperties.setWindowIcon(icon)
self.ve | rticalLayout = QtGui.QVBoxLayout(RulerProperties)
self.verticalLayout.setObjectName("verticalLayout")
self.widget = QtGui.QWidget(RulerProperties)
self.widget.setMinimumSize(QtCore.QSize(0, 0))
self.widget.setObjectName("widget")
self.gridLayout_4 = QtGui.QGridLayout(self.widget)
self.gridLayout_4.setContentsMargins(0, 0, 0, 0)
self.gridLayout_4.setObjectName("gridLayout_4")
self.label_2 = QtGui.QLabel(self.widget)
self.label_2.setObjectName("label_2")
self.gridLayout_4.addWidget(self.label_2, 4, 0, 1, 1)
self.label = QtGui.QLabel(self.widget)
self.label.setObjectName("label")
self.gridLayout_4.addWidget(self.label, 2, 0, 1, 1)
self.measureLabel = QtGui.QLabel(self.widget)
self.measureLabel.setLayoutDirection(QtCore.Qt.LeftToRight)
self.measureLabel.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.measureLabel.setObjectName("measureLabel")
self.gridLayout_4.addWidget(self.measureLabel, 2, 1, 1, 1)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.widget1 = QtGui.QWidget(self.widget)
self.widget1.setMinimumSize(QtCore.QSize(81, 28))
self.widget1.setMaximumSize(QtCore.QSize(16777215, 28))
self.widget1.setObjectName("widget1")
self.gridLayout_2 = QtGui.QGridLayout(self.widget1)
self.gridLayout_2.setContentsMargins(-1, 5, -1, -1)
self.gridLayout_2.setObjectName("gridLayout_2")
self.lineColorFrame = QtGui.QFrame(self.widget1)
self.lineColorFrame.setMinimumSize(QtCore.QSize(60, 0))
self.lineColorFrame.setMaximumSize(QtCore.QSize(16777215, 14))
self.lineColorFrame.setStyleSheet("background-color: rgb(0, 255, 0);")
self.lineColorFrame.setFrameShape(QtGui.QFrame.StyledPanel)
self.lineColorFrame.setFrameShadow(QtGui.QFrame.Raised)
self.lineColorFrame.setObjectName("lineColorFrame")
self.gridLayout_2.addWidget(self.lineColorFrame, 0, 0, 1, 1)
self.horizontalLayout_4.addWidget(self.widget1)
self.gridLayout_4.addLayout(self.horizontalLayout_4, 3, 1, 1, 1)
self.label_3 = QtGui.QLabel(self.widget)
self.label_3.setObjectName("label_3")
self.gridLayout_4.addWidget(self.label_3, 3, 0, 1, 1)
self.widget_3 = QtGui.QWidget(self.widget)
self.widget_3.setMinimumSize(QtCore.QSize(81, 28))
self.widget_3.setMaximumSize(QtCore.QSize(16777215, 28))
self.widget_3.setObjectName("widget_3")
self.gridLayout_5 = QtGui.QGridLayout(self.widget_3)
self.gridLayout_5.setContentsMargins(-1, 5, -1, -1)
self.gridLayout_5.setObjectName("gridLayout_5")
self.fontColorFrame = QtGui.QFrame(self.widget_3)
self.fontColorFrame.setMaximumSize(QtCore.QSize(16777215, 14))
self.fontColorFrame.setStyleSheet("background-color: rgb(255, 255, 255);")
self.fontColorFrame.setFrameShape(QtGui.QFrame.StyledPanel)
self.fontColorFrame.setFrameShadow(QtGui.QFrame.Raised)
self.fontColorFrame.setObjectName("fontColorFrame")
self.gridLayout_5.addWidget(self.fontColorFrame, 0, 0, 1, 1)
self.gridLayout_4.addWidget(self.widget_3, 4, 1, 1, 1)
self.newRulerButton = QtGui.QPushButton(self.widget)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/document-new.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.newRulerButton.setIcon(icon1)
self.newRulerButton.setObjectName("newRulerButton")
self.gridLayout_4.addWidget(self.newRulerButton, 1, 0, 1, 1)
self.deleteRulerButton = QtGui.QPushButton(self.widget)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/edit-delete.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.deleteRulerButton.setIcon(icon2)
self.deleteRulerButton.setObjectName("deleteRulerButton")
self.gridLayout_4.addWidget(self.deleteRulerButton, 1, 1, 1, 1)
self.verticalLayout.addWidget(self.widget)
self.rulerGroup = QtGui.QGroupBox(RulerProperties)
self.rulerGroup.setMinimumSize(QtCore.QSize(0, 80))
self.rulerGroup.setObjectName("rulerGroup")
self.verticalLayout.addWidget(self.rulerGroup)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.retranslateUi(RulerProperties)
QtCore.QMetaObject.connectSlotsByName(RulerProperties)
def retranslateUi(self, RulerProperties):
RulerProperties.setWindowTitle(QtGui.QApplication.translate("RulerProperties", "Ruler Plugin", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("RulerProperties", "Font color", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("RulerProperties", "Mensure:", None, QtGui.QApplication.UnicodeUTF8))
self.measureLabel.setText(QtGui.QApplication.translate("RulerProperties", "0 mm", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("RulerProperties", "Line color:", None, QtGui.QApplication.UnicodeUTF8))
self.newRulerButton.setText(QtGui.QApplication.translate("RulerProperties", "New", None, QtGui.QApplication.UnicodeUTF8))
self.deleteRulerButton.setText(QtGui.QApplication.translate("RulerProperties", "Delete", None, QtGui.QApplication.UnicodeUTF8))
self.rulerGroup.setTitle(QtGui.QApplication.translate("RulerProperties", "Rulers", None, QtGui.QApplication.UnicodeUTF8))
import resources_rc
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
RulerProperties = QtGui.QWidget()
ui = Ui_RulerProperties()
ui.setupUi(RulerProperties)
RulerProperties.show()
sys.exit(app.exec_())
|
KelvinLu/octo-marklog | marklog/marklog_ext/__init__.py | Python | agpl-3.0 | 39 | 0 | from extension import M | arklogExte | nsion
|
Johnetordoff/osf.io | addons/wiki/migrations/0011_auto_20180415_1649.py | Python | apache-2.0 | 590 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04- | 15 21:49
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('addons_wiki', '0010_migrate_node_wiki_pages'),
]
operations = [
migrations.RemoveField(
model_name='nodewikipage',
name='node',
),
migrations.RemoveField(
model_name='nodewikipage',
name='user',
| ),
migrations.DeleteModel(
name='NodeWikiPage',
),
]
|
pspacek/freeipa | ipatests/test_ipaserver/test_otptoken_import.py | Python | gpl-3.0 | 5,751 | 0.001043 | # Authors:
# Nathaniel McCallum <npmccallum@redhat.com>
#
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import nose
import pytest
from nss import nss
from ipalib.x509 import initialize_nss_database
from ipaserver.install.ipa_otptoken_import import PSKCDocument, ValidationError
basename = os.path.join(os.path.dirname(__file__), "data")
@pytest.mark.skipif(True, reason="Causes NSS errors. Ticket 5192")
class test_otptoken_import(object):
def teardown(self):
initialize_nss_database()
def test_figure3(self):
doc = PSKCDocument(os.path.join(basename, "pskc-figure3.xml"))
assert doc.keyname is None
assert [(t.id, t.options) for t in doc.getKeyPackages()] == \
[(u'12345678', {
'ipatokenotpkey': u'GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ',
'ipatokenvendor': u'Manufacturer',
'ipatokenserial': u'987654321',
'ipatokenhotpcounter': 0,
'ipatokenotpdigits': 8,
'type': u'hotp',
})]
def test_figure4(self):
doc = PSKCDocument(os.path.join(basename, "pskc-figure4.xml"))
assert doc.keyname is None
try:
[(t.id, t.options) for t in doc.getKeyPackages()]
except ValidationError: # Referenced keys are not supported.
pass
else:
assert False
def test_figure5(self):
doc = PSKCDocument(os.path.join(basename, "pskc-figure5.xml"))
assert doc.keyname is None
try:
[(t.id, t.options) for t in doc.getKeyPackages()]
except ValidationError: # PIN Policy is not supported.
pass
else:
assert False
def test_figure6(self):
nss.nss_init_nodb()
try:
doc = PSKCDocument(os.path.join(basename, "pskc-figure6.xml"))
assert doc.keyname == 'Pre-shared-key'
doc.setKey('12345678901234567890123456789012'.decode('hex'))
assert [(t.id, t.options) for t in doc.getKeyPackages()] == \
[(u'12345678', {
'ipatokenotpkey': u'GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ',
'ipatokenvendor': u'Manufacturer',
'ipatokenserial': u'987654321',
'ipatokenhotpcounter': 0,
'ipatokenotpdigits': 8,
'type': u'hotp'})]
finally:
nss.nss_shutdown()
def test_figure7(self):
nss.nss_init_nodb()
try:
doc = PSKCDocument(os.path.join(basename, "pskc-figure7.xml"))
assert doc.keyname == 'My Password 1'
doc.setKey('qwerty')
assert [(t.id, t.options) for t in doc.getKeyPackages()] == \
[(u'123456', {
'ipatokenotpkey': u'GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ',
'ipatokenvendor': u'TokenVendorAcme',
'ipatokenserial': u'987654321',
'ipatokenotpdigits': 8,
'type': u'hotp'})]
finally:
nss.nss_shutdown()
def test_figure8(self):
nss.nss_init_nodb()
try:
doc = PSKCDocument(os.path.join(basename, "pskc-figure8.xml"))
except NotImplementedError: # X.509 is not supported.
pass
else:
assert False
finally:
nss.nss_shutdown()
def test_invalid(self):
nss.nss_init_nodb()
try:
doc = PSKCDocument(os.path.join(basename, "pskc-invalid.xml"))
except ValueError: # File is invalid.
pass
else:
assert False
finally:
nss.nss_shutdown()
def test_mini(self):
nss.nss_init_nodb()
try:
doc = PSKCDocument(os.path.join(basename, "pskc-mini.xml"))
[(t.id, t.options) for t in doc.getKeyPackages()]
except ValidationError: # Unsupported token type.
pass
else:
assert False
finally:
nss.nss_shutdown()
def test_full(self):
nss.nss_init_nodb()
try:
doc = PSKCDocument(os.path.join(basename, "full.xml"))
assert [(t.id, t.options) for t in doc.getKeyPackages()] == \
[(u'KID1', {
'ipatokenotpkey': u'GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ',
'ipatokennotafter': u'20060531000000Z',
'ipatokennotbefore': u'20060501000000Z',
'ipatokenserial': u'SerialNo-IssueNo',
'ipatokentotpclockoffset': 60000,
'ipatokenotpalgorit | hm': u'sha1',
'ipatokenvendor': u'iana.dummy',
'description': u'FriendlyName',
'ipatokentotptimestep': 200,
'ipatokenhotpcounter': 0,
'ipatokenmodel': u'Model',
'ipatokenotpdigits': 8,
| 'type': u'hotp',
})]
finally:
nss.nss_shutdown()
|
dcorbacho/libcloud | docs/examples/compute/openstack/force_base_url.py | Python | apache-2.0 | 680 | 0 | from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security
# This assumes you don't have SSL set up.
# Note: Code | like this poses a security risk (MITM attack) and
# that's the reason why you should never us | e it for anything else
# besides testing. You have been warned.
libcloud.security.VERIFY_SSL_CERT = False
OpenStack = get_driver(Provider.OPENSTACK)
driver = OpenStack('your_auth_username', 'your_auth_password',
ex_force_auth_url='http://192.168.1.101:5000',
ex_force_auth_version='2.0_password',
ex_force_base_url='http://192.168.1.101:3000/v1/12345')
|
dan4ik95dv/housemanagement | tsj/admin.py | Python | mit | 317 | 0 | from django.contrib import admin
from tsj.models import *
admi | n.site.register(Company)
admin.site.register(Resident)
admin.site.register(House)
admin.site.register(ServiceCom | pany)
admin.site.register(MeterType)
admin.site.register(MeterReadingHistory)
admin.site.register(Employer)
admin.site.register(Notification)
|
ff0000/red-fab-deploy | fab_deploy/joyent/postgres.py | Python | mit | 5,514 | 0.004534 | import os
import sys
import tempfile
from fabric.api import run, sudo, env, local, hide, settings
from fabric.contrib.files import append, sed, exists, contains
from fabric.context_managers import prefix
from fabric.operations import get, put
from fabric.context_managers import cd
from fabric.tasks import Task
from fab_deploy.functions import random_password
from fab_deploy.base import postgres as base_postgres
class JoyentMixin(object):
version_directory_join = ''
def _get_data_dir(self, db_version):
# Try to get from svc first
output = run('svcprop -p config/data postgresql')
if output.stdout and exists(output.stdout, use_sudo=True):
return output.stdout
return base_postgres.PostgresInstall._get_data_dir(self, db_version)
def _install_package(self, db_version):
sudo("pkg_add postgresql%s-server" %db_version)
sudo("pkg_add postgresql%s-replicationtools" %db_version)
sudo("svcadm enable postgresql")
def _restart_db_server(self, db_version):
sudo('svcadm restart postgresql')
def _stop_db_server(self, db_version):
sudo('svcadm disable postgresql')
def _start_db_server(self, db_version):
sudo('svcadm enable postgresql')
class PostgresInstall(JoyentMixin, base_postgres.PostgresInstall):
"""
Install postgresql on server
install postgresql package;
enable postgres access from localhost without password;
enable all other user access from other machines with password;
setup a few parameters related with streaming replication;
database server listen to all machines '*';
create a user for database with password.
"""
name = 'master_setup'
db_version = '9.1'
class SlaveSetup(JoyentMixin, base_postgres.SlaveSetup):
"""
Set up master-slave streaming replication: slave node
"""
name = 'slave_setup'
class PGBouncerInstall(Task):
"""
Set up PGBouncer on a database server
"""
name = 'setup_pgbouncer'
pgbouncer_src = 'http://pkgsrc.smartos.org/packages/SmartOS/2012Q2/databases/pgbouncer-1.4.2.tgz'
pkg_name = 'pgbouncer-1.4.2.tgz'
config_dir = '/etc/opt/pkg'
config = {
'*': 'host=127.0.0.1',
'logfile': '/var/log/pgbouncer/pgbouncer.log',
'listen_addr': '*',
'listen_port': '6432',
'unix_socket_dir': '/tmp',
'auth_type': 'md5',
'auth_file': '%s/pgbouncer.userlist' %config_dir,
'pool_mode': 'session',
'admin_users': 'postgres',
'stats_users': 'postgres',
}
def install_package(self):
sudo('pkg_add libevent')
with cd('/tmp'):
run('wget %s' %self.pgbouncer_src)
sudo('pkg_add %s' %self.pkg_name)
def _setup_parameter(self, file_name, **kwargs):
for key, value in kwargs.items():
origin = "%s =" %key
new = "%s = %s" %(key, value)
sudo('sed -i "/%s/ c\%s" %s' %(origin, new, file_name))
def _get_passwd(self, username):
with hide('output'):
string = run('echo "select usename, passwd from pg_shadow where '
'usename=\'%s\' order by 1" | sudo su postgres -c '
'"psql"' %username)
user, passwd = string.split('\n')[2].split('|')
user = user.strip()
passwd = passwd.strip()
__, tmp_name = tempfile.mkstemp()
fn = open(tmp_name, 'w')
fn.write('"%s" "%s" ""\n' %(user, passwd))
fn.close()
put(tmp_name, '%s/pgbouncer.userlist'%self.config_dir, use_sudo=True)
local('rm %s' %tmp_name)
def _get | _username(self, section=None):
try:
names = env.config_object.get_list(section, env.config_object.USERNAME)
u | sername = names[0]
except:
print ('You must first set up a database server on this machine, '
'and create a database user')
raise
return username
def run(self, section=None):
"""
"""
sudo('mkdir -p /opt/pkg/bin')
sudo("ln -sf /opt/local/bin/awk /opt/pkg/bin/nawk")
sudo("ln -sf /opt/local/bin/sed /opt/pkg/bin/nbsed")
self.install_package()
svc_method = os.path.join(env.configs_dir, 'pgbouncer.xml')
put(svc_method, self.config_dir, use_sudo=True)
home = run('bash -c "echo ~postgres"')
bounce_home = os.path.join(home, 'pgbouncer')
pidfile = os.path.join(bounce_home, 'pgbouncer.pid')
self._setup_parameter('%s/pgbouncer.ini' %self.config_dir,
pidfile=pidfile, **self.config)
if not section:
section = 'db-server'
username = self._get_username(section)
self._get_passwd(username)
# postgres should be the owner of these config files
sudo('chown -R postgres:postgres %s' %self.config_dir)
sudo('mkdir -p %s' % bounce_home)
sudo('chown postgres:postgres %s' % bounce_home)
sudo('mkdir -p /var/log/pgbouncer')
sudo('chown postgres:postgres /var/log/pgbouncer')
# set up log
sudo('logadm -C 3 -p1d -c -w /var/log/pgbouncer/pgbouncer.log -z 1')
run('svccfg import %s/pgbouncer.xml' %self.config_dir)
# start pgbouncer
sudo('svcadm enable pgbouncer')
setup = PostgresInstall()
slave_setup = SlaveSetup()
setup_pgbouncer = PGBouncerInstall()
|
denfromufa/mipt-course | demos/python/2_variables_and_types.py | Python | bsd-3-clause | 694 | 0 | #!/usr/bin/python
# Code is executed top-to-bottom on load.
# Variables are defined at the first assignment
a = | 2 # defines `a`
b = 2
# 'print' operator, simple form: just prints out human-readable representation
# of the argument. NOTE: no \n!
print a + b
# Types in Python are dynamic!
v = 42 # `v` is an integer
print v
v = 0.42 # now it's a float
print v
v = 2**76 # NEW: Loooong integers are supported!
print v
v = 4 + 0.2j # NEW: complex numbers!
print v
v = "almost but not quite entirely unlike tea" # now it's a string
print v
# 'print' operator, full form.
print "%d %.1f %s" % (42, 4.2, "forty t | wo")
# non-optimal equivalent:
print str(42) + " " + str(4.2) + " forty two"
|
dmlc/tvm | tests/python/contrib/test_ethosu/cascader/test_ethosu_part_performance.py | Python | apache-2.0 | 6,531 | 0.001837 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
pytest.importorskip("ethosu.vela")
from functools import reduce
import numpy as np
import math
import tvm.contrib.ethosu.cascader as cs
from tvm.contrib.ethosu.cascader.device_config import _Shape
from .infra import make_matrices
@pytest.mark.parametrize(
"acc_config, expected",
[
("ethos-u55-256", (1, 0.125, 0.75, 0.375, 0.75)),
("ethos-u55-128", (1, 0.25, 1.5, 0.75, 0.75)),
("ethos-u55-64", (1, 0.5, 3, 1.5, 1.5)),
("ethos- | u55-32", (2, 1, 6, 3, 3)),
],
)
def test_device_config_cycles(acc_config, expected):
device | _config = cs.EthosuDeviceConfig(acc_config)
conv_type = "ethosu_conv2d"
conv_str = None
conv_ifm_dtype = "int8"
conv_ofm_dtype = "int8"
conv_activation = "LUT"
conv_cycles = device_config._get_output_cycles(
conv_type, conv_str, conv_ifm_dtype, conv_ofm_dtype, conv_activation
)
assert conv_cycles == expected[0]
pool_type = "ethosu_pooling"
pool_str = "MAX"
pool_ifm_dtype = "int8"
pool_ofm_dtype = "int8"
pool_activation = "NONE"
pool_cycles = device_config._get_output_cycles(
pool_type, pool_str, pool_ifm_dtype, pool_ofm_dtype, pool_activation
)
assert pool_cycles == expected[1]
add_type = "ethosu_binary_elementwise"
add_str = "ADD"
add_ifm_dtype = "int8"
add_ofm_dtype = "int8"
add_activation = "NONE"
add_cycles = device_config._get_output_cycles(
add_type, add_str, add_ifm_dtype, add_ofm_dtype, add_activation
)
assert add_cycles == expected[2]
mul_type = "ethosu_binary_elementwise"
mul_str = "MUL"
mul_ifm_dtype = "int8"
mul_ofm_dtype = "int8"
mul_activation = "NONE"
mul_cycles = device_config._get_output_cycles(
mul_type, mul_str, mul_ifm_dtype, mul_ofm_dtype, mul_activation
)
assert mul_cycles == expected[3]
mul_32_type = "ethosu_binary_elementwise"
mul_32_str = "MUL"
mul_32_ifm_dtype = "int8"
mul_32_ofm_dtype = "int32"
mul_32_activation = "NONE"
mul_32_cycles = device_config._get_output_cycles(
mul_32_type, mul_32_str, mul_32_ifm_dtype, mul_32_ofm_dtype, mul_32_activation
)
assert mul_32_cycles == expected[4]
@pytest.mark.parametrize(
"accelerator, op_type, activation, kernel, stride, dilation, padding, in_shape, out_shape, block_shape, input_block_shape, expected",
[
(
"ethos-u55-128",
"ethosu_conv2d",
"NONE",
(3, 3),
(1, 1),
(1, 1),
(0, 0, 0, 0),
(1, 16, 16, 96),
(1, 16, 16, 96),
(1, 8, 8, 16),
(1, 10, 10, 32),
167733,
),
(
"ethos-u55-128",
"ethosu_conv2d",
"NONE",
(10, 4),
(2, 1),
(1, 1),
(0, 0, 0, 0),
(1, 58, 13, 1),
(1, 25, 10, 276),
(1, 6, 10, 32),
(1, 18, 14, 8),
174105,
),
(
"ethos-u55-128",
"ethosu_depthwise_conv2d",
"NONE",
(3, 3),
(2, 2),
(1, 1),
(1, 1, 1, 1),
(1, 25, 10, 276),
(1, 13, 5, 276),
(1, 7, 6, 16),
(1, 15, 14, 16),
17590,
),
(
"ethos-u55-128",
"ethosu_depthwise_conv2d",
"NONE",
(4, 9),
(1, 1),
(1, 1),
(0, 0, 0, 0),
(1, 28, 81, 42),
(1, 25, 73, 41),
(1, 4, 16, 16),
(1, 7, 24, 16),
173414,
),
],
)
def test_conv_performance(
accelerator,
op_type,
activation,
kernel,
stride,
dilation,
padding,
in_shape,
out_shape,
block_shape,
input_block_shape,
expected,
):
ifm_channels = in_shape[3]
ifm_matrix, ifm_offset, weight_matrix, weight_offset, _, _ = make_matrices(
op_type,
kernel,
stride,
padding,
"NHWC",
"NHWC",
dilation,
ifm_channels,
)
propagator = cs.Propagator(ifm_matrix, ifm_offset)
weight_propagator = cs.Propagator(weight_matrix, weight_offset)
subkernels = ((kernel[0] + 7) // 8) * ((kernel[1] + 7) // 8)
device_config = cs.EthosuDeviceConfig(accelerator)
output_cycles = device_config._get_output_cycles(op_type, "", "int8", "int8", activation)
output_cycles *= reduce(lambda a, b: a * b, block_shape, 1)
is_partkernel = device_config.is_partkernel(
op_type, ifm_channels, "int8", kernel[0] * kernel[1]
)
compute_cycles = device_config._estimate_compute_cycles_per_block(
op_type,
_Shape(block_shape),
_Shape(input_block_shape),
kernel[0],
kernel[1],
ifm_channels,
"int8",
is_partkernel,
)
block_configs = [cs.BlockConfig(block_shape, compute_cycles, int(output_cycles))]
output_quantum = [1, 1, 2, 8]
te_subgraph = cs.TESubgraph([], None)
part = cs.EthosuPart(
te_subgraph,
[propagator, weight_propagator],
output_quantum,
subkernels,
block_configs,
1,
)
stripes = [1] * len(output_quantum)
offset = [0] * len(output_quantum)
order = [1, 2, 3, 4]
stripe_config = cs.StripeConfig(out_shape, out_shape, out_shape, order, stripes, offset)
compute_cycles = part.get_performance_info(stripe_config, cs.BufferMode.ROLLING).compute_cycles
tolerance = expected * 0.1
assert expected - tolerance <= compute_cycles <= expected + tolerance
if __name__ == "__main__":
pytest.main([__file__])
|
mbedmicro/pyOCD | pyocd/target/builtin/target_lpc800.py | Python | apache-2.0 | 4,050 | 0.008395 | # pyOCD debugger
# Copyright (c) 2006-2013 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ...flash.flash import Flash
from ...coresight.coresight_target import CoreSightTarget
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ...debug.svd.loader import SVDFile
FLASH_ALGO = { 'load_address' : 0x10000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x47700a80, 0x484e494f, 0x60084449, 0x2100484e, 0x22016301, 0x63416342, 0x6b416342, 0xd0fc07c9,
0x49496382, 0x39402002, 0x20007008, 0x20004770, 0xb5f84770, 0x20324c45, 0x2500444c, 0x4622260f,
0x4621c261, 0x4f423114, 0x91004620, 0x696047b8, 0xd10c2800, 0x46212034, 0x483ac161, 0x68004448,
0x462060e0, 0x47b89900, 0x28006960, 0x2001d000, 0xb5f8bdf8, 0x0a844d35, 0x2032444d, 0x4629606c,
0x311460ac, 0x4e326028, 0x4628460f, 0x696847b0, 0xd10d2800, 0x2034606c, 0x602860ac, 0x46394829,
| 0x68004448, 0x462860e8, 0x696847b0, 0xd00028 | 00, 0xbdf82001, 0x4614b5f8, 0xd11e0006, 0x0180200b,
0x6bc11820, 0x42814823, 0x4823d038, 0xd0354281, 0x42814822, 0x4822d032, 0xd02f4281, 0x68206861,
0x184068e2, 0x188968a1, 0x69211840, 0x69611840, 0x69a11840, 0x42401840, 0x4d1461e0, 0x444d0ab0,
0x60682132, 0x60a86029, 0x31144629, 0x46284f10, 0x47b89100, 0x28006968, 0x606ed110, 0x60ac2033,
0x20016028, 0x60e80280, 0x44484806, 0x61286800, 0x99004628, 0x696847b8, 0xd0002800, 0xbdf82001,
0x00002ee0, 0x00000004, 0x40048040, 0x00000008, 0x1fff1ff1, 0x4e697370, 0x12345678, 0x87654321,
0x43218765
],
'pc_init' : 0x10000024,
'pc_eraseAll' : 0x10000052,
'pc_erase_sector' : 0x10000092,
'pc_program_page' : 0x100000d4,
'begin_data' : 0x10000400, # Analyzer uses a max of 128 B data (32 pages * 4 bytes / page)
# Double buffering is not supported since there is not enough ram
'begin_stack' : 0x10001000,
'static_base' : 0x10000300,
'min_program_length' : 64,
'analyzer_supported' : True,
'analyzer_address' : 0x10000800 # Analyzer 0x10000800..0x10000e00
}
class LPC800(CoreSightTarget):
VENDOR = "NXP"
MEMORY_MAP = MemoryMap(
FlashRegion( start=0, length=0x4000, blocksize=0x400, is_boot_memory=True, algo=FLASH_ALGO),
RamRegion( start=0x10000000, length=0x1000)
)
def __init__(self, session):
super(LPC800, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin("LPC800_v0.3.svd")
def reset_and_halt(self, reset_type=None, map_to_user=True):
super(LPC800, self).reset_and_halt(reset_type)
# Remap to use flash and set SP and SP accordingly
if map_to_user:
self.write_memory(0x40048000, 0x2, 32)
sp = self.read_memory(0x0)
pc = self.read_memory(0x4)
self.write_core_register('sp', sp)
self.write_core_register('pc', pc)
|
SuperTux/flexlay | flexlay/color.py | Python | gpl-3.0 | 2,408 | 0.000831 | # Flexlay - A Generic 2D Game Editor
# Copyright (C) 2014 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General | Public License as | published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from PyQt5.QtGui import QColor
class Color:
def __init__(self, r: int = 255, g: int = 255, b: int = 255, a: int = 255) -> None:
self.r = r
self.g = g
self.b = b
self.a = a
def copy(self):
return Color(self.r, self.g, self.b, self.a)
def get_red(self):
return self.r
def get_green(self):
return self.g
def get_blue(self):
return self.b
def get_alpha(self):
return self.a
def __eq__(self, rhs):
return (self.r == rhs.r and
self.g == rhs.g and
self.b == rhs.b and
self.a == rhs.a)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def to_qt(self):
return QColor(self.r, self.g, self.b, self.a)
def to_list(self):
return [self.r, self.g, self.b, self.a]
def to_hex(self):
return "#%02x%02x%02x" % (self.r, self.g, self.b)
class Colorf:
def __init__(self, r: float = 1.0, g: float = 1.0, b: float = 1.0, a: float = 1.0) -> None:
self.r = r
self.g = g
self.b = b
self.a = a
def to_list(self):
return [self.r, self.g, self.b, self.a]
def to_color(self):
return Color(int(255 * self.r),
int(255 * self.g),
int(255 * self.b),
int(255 * self.a))
def __eq__(self, other: object) -> bool:
if isinstance(other, Colorf):
return (self.r == other.r and
self.b == other.b and
self.g == other.g and
self.a == other.a)
else:
return False
# EOF #
|
EVEprosper/ProsperDatareader | prosper/datareader/__init__.py | Python | mit | 72 | 0 | import prosper.datare | ader.exceptions
import prosper.datare | ader._version
|
joshwatson/binaryninja-api | python/examples/cli_lift.py | Python | mit | 2,801 | 0.017494 | #!/usr/bin/env python
#
# command-line BinaryNinja lifter
#
# BinaryNinja multiplatform version of Z0MBIE's PE_STAT for opcode frequency
# statistics http://z0mbie.dreamhosters.com/opcodes.html
#
# Copyright (c) 2020-2021 Vector 35 Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE | SOFTWARE.
import sys
import binaryninja
from binaryninja import core
from binaryninja import binaryview
from binaryninja import lowlevelil
RED = '\x1B[31m'
NORMAL = '\x1B[0m'
def traverse_IL(il, indent):
if isinstance(il, lowlevelil.LowLevelILInstruction):
print('\t'*indent + il.operation.name)
for o in il.operands:
traverse_IL(o, indent+1)
else:
print('\t'*indent + str( | il))
if __name__ == '__main__':
if not sys.argv[2:]:
print('usage: %s <platform> <bytes>' % sys.argv[0])
print('')
print('examples:')
print(' eg: %s linux-armv7 14 d0 4d e2 01 20 a0 e1 00 30 a0 e1 00 c0 a0 e3' % sys.argv[0])
print('')
print('platforms:')
print('\t' + '\n\t'.join(map(str, list(binaryninja.Platform))))
sys.exit(-1)
# divide arguments
platName = sys.argv[1]
archName = platName.split('-')[1]
bytesList = sys.argv[2:]
# parse byte arguments
data = b''.join(list(map(lambda x: int(x,16).to_bytes(1,'big'), bytesList)))
plat = binaryninja.Platform[platName]
bv = binaryview.BinaryView.new(data)
bv.platform = plat
bv.add_function(0, plat=plat)
# print('print all the functions, their basic blocks, and their mc instructions')
# for func in bv.functions:
# print(repr(func))
# for block in func:
# print("\t{0}".format(block))
# for insn in block:
# print("\t\t{0}".format(insn))
print(RED)
for func in bv.functions:
#print(repr(func))
for block in func.low_level_il:
#print("\t{0}".format(block))
for insn in block:
traverse_IL(insn, 0)
print(NORMAL)
|
googleapis/python-spanner | google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py | Python | apache-2.0 | 765 | 0 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIO | NS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import DatabaseAdminClient
from .async_client import Data | baseAdminAsyncClient
__all__ = (
"DatabaseAdminClient",
"DatabaseAdminAsyncClient",
)
|
ChyauAng/DNN-Composer | src/Preprocess/globalConstant.py | Python | mit | 1,770 | 0.003955 | #global declaration
global index0
global index1
global index2
global index3
global index4
global index5
global index6
global index7
global index8
global index9
global index10
global index11
global index12
global index13
global index14
global index15
global index16
global index17
global index18
global index19
global index20
global index21
global index22
global nextNoteDurationBase
global note
global pitch_train
global pitch_test
global duration_train
global duration_test
#used for getting duration information
index0 = 1/4
index1 = 1/3
index2 = 1/2
index3 = 3/4
index4 = 3/8
index5 = 5/8
index6 = 2/3
index7 = 2/5
index8 = 4/9
index9 = 4/5
index10 = 1.0
index11 = 3/2
index12 = 4/3
index13 = 9/8
index14 = 2.0
index15 = 8/3
index16 = 9/4
index17 = 7/2
index18 = 3.0
index19 = 4.0
index20 = 9/2
index21 = 6.0
index22 = 8.0
#used for gettiing pitch information
note = ['G,', '^G,', '_G,', '=G,',
'A,', '^A,', '_A,', '=A,',
'B,', '^B,', '_B,', '=B,',
'C', '^C', '_C', '=C',
'D', '^D', '_D', '=D',
'E', '^E', '_E', '=E',
'F', '^F', '_F', '=F',
'G', '^G', '_G', '=G',
'A', '^A', '_A', '=A',
'B', '^B', '_B', '=B',
'c', '^c', '_c', '=c',
'd', '^d', '_d', '=d',
'e', '^e', '_e', '=e',
'f', '^f', '_f', '=f',
'g', '^g', '_g', '=g',
'a', '^a', '_a', '=a',
'b', '^b', '_b', '=b',
'c\'', '^c\'', '_c\'', '=c\'',
'd\'', '^d\'', '_d\'', '=d\'',
'%ending']
#nextNoteDurationBase is the base value of duration
nextNoteDu | rat | ionBase = 1.0
pitch_train = []
pitch_test = []
duration_train = []
duration_test = []
|
Horta/limix | limix/qc/test/test_qc.py | Python | apache-2.0 | 922 | 0.001085 | from __future__ import division
from dask.array import from_array
from numpy.random import RandomState
from numpy.testing import assert_allclose, assert_equal
from limix.qc import compute_maf, indep_pairwise
def test_qc_indep_p | airwise():
random = RandomState(0)
X = random.randn(3, | 100)
head = [True, True, False, True, False]
tail = [True, True, False, False]
assert_equal(indep_pairwise(X, 4, 2, 0.5, verbose=False)[:5], head)
assert_equal(indep_pairwise(X, 4, 2, 0.5, verbose=False)[-4:], tail)
X = from_array(X, chunks=(2, 10))
assert_equal(indep_pairwise(X, 4, 2, 0.5, verbose=False)[:5], head)
assert_equal(indep_pairwise(X, 4, 2, 0.5, verbose=False)[-4:], tail)
def test_qc_maf():
random = RandomState(0)
X = random.randint(0, 3, size=(100, 10))
assert_allclose(
compute_maf(X), [0.49, 0.49, 0.445, 0.495, 0.5, 0.45, 0.48, 0.48, 0.47, 0.435]
)
|
twonds/healthmonger | healthmonger/log.py | Python | mit | 1,181 | 0 | """Simple log functions
"""
import sys
import traceback
import config
def debug(msg): |
"""Display messages to stderr only when L{config.debug} is True
@param msg: The message to write to stderr. It can be anything that
can be turned into a string.
| """
if config.debug:
error(msg)
def error(msg):
"""Write a message to stderr
@param msg: The message to write to stderr. It can be anything that
can be turned into a string.
"""
sys.stderr.write(str(msg))
sys.stderr.write("\n")
def info(msg):
"""Write a message to stdout
@param msg: The message to write to stdout. It can be anything that
can be turned into a string.
"""
print(msg)
def log_traceback(ex):
"""Log an exception and traceback without raising the exception.
@param ex: The exception to log.
@type ex: L{Exception}
"""
error(ex)
_, _, ex_traceback = sys.exc_info()
if ex_traceback is None:
ex_traceback = ex.__traceback__
tb_lines = [line.rstrip('\n') for line in
traceback.format_exception(ex.__class__, ex, ex_traceback)]
error(tb_lines)
|
KFPA/ScrapyNews | IPProxyPool-master/test/testsql.py | Python | apache-2.0 | 339 | 0.006042 | # coding:utf- | 8
from db.SqlHelper import SqlHelper
from util.exception import Con_DB_Fail
try:
sqlhelper = SqlHelper()
| sqlhelper.init_db()
except Exception:
raise Con_DB_Fail
proxy = {'ip': '192.168.1.1', 'port': int('80'), 'type': 0, 'protocol': 0, 'country': u'中国', 'area': u'四川', 'speed': 0}
sqlhelper.insert(proxy) |
Xicnet/burnstation | decoder.py | Python | gpl-3.0 | 3,770 | 0.010875 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from ErrorsHandler import *
from string import strip, find, split, lower, rjust, atoi, atof, replace, digits, zfill, join
from tools import cleanlist, which, filecopy, mkdirtree, touch, listmatch, rm, escapedfilename
from too | ls import get | _username, get_tempdir, cmdexec
import os, os.path
import mp3info, ogg.vorbis
logfile = 'decoder.log'
logger.InitAll(logfile, '')
sys.stdout.save = False
class Decoder:
def __init__(self):
""" Converts mp3/ogg-files to wav-files. """
#logger.debug2("Decoder class instantiated")
pass
def convert2wav(self, files, targetPath):
"""walk files list and apply decode() to each"""
i = 0
for source in files:
target = targetPath + "/" + str(i) + ".wav"
#logger.info("Decoding %s to %s ..." % (source, target))
logger.info("Decoding %s ..." % os.path.basename(source))
self.decode(source, target)
i += 1
logger.info("Decoding finished")
def decode(self, filename, target):
"""decode a file to wav"""
if not os.path.isfile(filename):
logger.error("Decoding failed: %s not found" % filename)
return False
mp3count = 0
oggcount = 0
if (lower(filename[-4:]) == ".mp3"):
mp3count = mp3count + 1
if (lower(filename[-4:]) == ".ogg"):
oggcount = oggcount + 1
# Check whether mpg123 and oggdec exists
mpg123_command = which("mpg123")
oggdec_command = which("oggdec")
if ((mp3count > 0) and (mpg123_command == "")):
logger.warn( "mpg123 not found for converting mp3 files" )
if ((oggcount > 0) and (oggdec_command == "")):
logger.warn( "oggdec not found for converting ogg files" )
#logger.info( "Converting %d file(s) now" % (mp3count + oggcount) )
if ((mp3count > 0) or (oggcount > 0)):
#if (lower(filename[-4:]) == ".mp3") or (lower(filename[-4:]) == ".ogg"):
#wavfilename = "%s/%s.wav" % (targetPath, os.path.basename(filename)[:-4])
#logger.info( "target = " + target )
if (lower(filename[-4:]) == ".mp3"):
# Make sure that conversion is done with the correct sample rate
file = open(filename, "rb")
mpeg3info = mp3info.MP3Info(file)
file.close()
samplerate = mpeg3info.mpeg.samplerate
command = "(%s --stereo -s \"%s\" | sox -t raw -r %d -w -s -c 2 - -r 44100 -t wav \"%s\") 2>&1" % (mpg123_command, escapedfilename(filename), samplerate, escapedfilename(target))
elif (lower(filename[-4:]) == ".ogg"):
# get OGG samplerate
vf = ogg.vorbis.VorbisFile(filename)
vi = vf.info()
samplerate = vi.rate
channels = vi.channels
#logger.info( 'OGG info: samplerate = %s , channels = %s' % (samplerate, channels) )
if ( samplerate != 44100) or (channels != 2):
#logger.warn( 'samplerate not 44100, using sox to resample' )
command = "(sox \"%s\" -r 44100 -c 2 -t wav \"%s\") 2>&1" % (escapedfilename(filename), escapedfilename(target))
else:
command = "%s -Q -o \"%s\" \"%s\" 2>&1" % (oggdec_command, escapedfilename(target), escapedfilename(filename))
#logger.info( "Executing: %s" % command )
(result, (stdout_output, stderr_output)) = cmdexec(command)
if (result != 0):
if (lower(filename[-4:]) == ".mp3"):
result = listmatch(output, "Playing")
output = output[result[0]:]
return False
else: return True
|
whitehorse-io/encarnia | pyenv/lib/python2.7/site-packages/PIL/PSDraw.py | Python | mit | 6,841 | 0.000146 | #
# The Python Imaging Library
# $Id$
#
# simple postscript graphics interface
#
# History:
# 1996-04-20 fl Created
# 1999-01-10 fl Added gsave/grestore to image method
# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge)
#
# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved.
# Copyright (c) 1996 by Fredrik Lundh.
#
# See the README file for information on usage and redistribution.
#
from PIL import EpsImagePlugin
##
# Simple Postscript graphics interface.
class PSDraw(object):
"""
Sets up printing to the given file. If **file** is omitted,
:py:attr:`sys.stdout` is assumed.
"""
def __init__(self, fp=None):
if not fp:
import sys
fp = sys.stdout
self.fp = fp
def _fp_write(self, to_write):
if bytes is str:
self.fp.write(to_write)
else:
self.fp.write(bytes(to_write, 'UTF-8'))
def begin_document(self, id=None):
"""Set up printing of a document. (Write Postscript DSC header.)"""
# FIXME: incomplete
self._fp_write("%!PS-Adobe-3.0\n"
"save\n"
"/showpage { } def\n"
"%%EndComments\n"
"%%BeginDocument\n")
# self.fp_write(ERROR_PS) # debugging!
self._fp_write(EDROFF_PS)
self._fp_write(VDI_PS)
self._fp_write("%%EndProlog\n")
self.isofont = {}
def end_document(self):
"""Ends printing. (Write Postscript DSC footer.)"""
self._fp_write("%%EndDocument\n"
"restore showpage\n"
"%%End\n")
if hasattr(self.fp, "flush"):
self.fp.flush()
def setfont(self, font, size):
"""
Selects which font to use.
:param font: A Postscript font name
:param size: Size in points.
"""
if font not in self.isofont:
# reencode font
self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" %
(font, font))
self.isofont[font] = 1
# rough
self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font))
def line(self, xy0, xy1):
"""
Draws a line between the two points. Coordinates are given in
Postscript point coordinates (72 points per inch, (0, 0) is the lower
left corner of the page).
"""
xy = xy0 + xy1
self._fp_write("%d %d %d %d Vl\n" % xy)
def rectangle(self, box):
"""
Draws a rectangle.
:param box: A 4-tuple of integers whose order and function is currently
undocumented.
Hint: the tuple is passed into this format string:
.. code-block:: python
%d %d M %d %d 0 Vr\n
"""
self._fp_write("%d %d M %d %d 0 Vr\n" % box)
def text(self, xy, text):
"""
Draws text at the given position. You must use
:py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method.
"""
text = "\\(".join(text.split("("))
text = "\\)".join(text.split(")"))
xy = xy + (text,)
self._fp_write("%d %d M (%s) S\n" % xy)
def image(self, box, im, dpi=None):
"""Draw a PIL image, centered in the given box." | ""
# default resolution depends on mode
if not dpi:
if im.mode == "1":
dpi = 200 # fax
else:
dpi = 100 # greyscale
| # image size (on paper)
x = float(im.size[0] * 72) / dpi
y = float(im.size[1] * 72) / dpi
# max allowed size
xmax = float(box[2] - box[0])
ymax = float(box[3] - box[1])
if x > xmax:
y = y * xmax / x
x = xmax
if y > ymax:
x = x * ymax / y
y = ymax
dx = (xmax - x) / 2 + box[0]
dy = (ymax - y) / 2 + box[1]
self._fp_write("gsave\n%f %f translate\n" % (dx, dy))
if (x, y) != im.size:
# EpsImagePlugin._save prints the image at (0,0,xsize,ysize)
sx = x / im.size[0]
sy = y / im.size[1]
self._fp_write("%f %f scale\n" % (sx, sy))
EpsImagePlugin._save(im, self.fp, None, 0)
self._fp_write("\ngrestore\n")
# --------------------------------------------------------------------
# Postscript driver
#
# EDROFF.PS -- Postscript driver for Edroff 2
#
# History:
# 94-01-25 fl: created (edroff 2.04)
#
# Copyright (c) Fredrik Lundh 1994.
#
EDROFF_PS = """\
/S { show } bind def
/P { moveto show } bind def
/M { moveto } bind def
/X { 0 rmoveto } bind def
/Y { 0 exch rmoveto } bind def
/E { findfont
dup maxlength dict begin
{
1 index /FID ne { def } { pop pop } ifelse
} forall
/Encoding exch def
dup /FontName exch def
currentdict end definefont pop
} bind def
/F { findfont exch scalefont dup setfont
[ exch /setfont cvx ] cvx bind def
} bind def
"""
#
# VDI.PS -- Postscript driver for VDI meta commands
#
# History:
# 94-01-25 fl: created (edroff 2.04)
#
# Copyright (c) Fredrik Lundh 1994.
#
VDI_PS = """\
/Vm { moveto } bind def
/Va { newpath arcn stroke } bind def
/Vl { moveto lineto stroke } bind def
/Vc { newpath 0 360 arc closepath } bind def
/Vr { exch dup 0 rlineto
exch dup neg 0 exch rlineto
exch neg 0 rlineto
0 exch rlineto
100 div setgray fill 0 setgray } bind def
/Tm matrix def
/Ve { Tm currentmatrix pop
translate scale newpath 0 0 .5 0 360 arc closepath
Tm setmatrix
} bind def
/Vf { currentgray exch setgray fill setgray } bind def
"""
#
# ERROR.PS -- Error handler
#
# History:
# 89-11-21 fl: created (pslist 1.10)
#
ERROR_PS = """\
/landscape false def
/errorBUF 200 string def
/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def
errordict begin /handleerror {
initmatrix /Courier findfont 10 scalefont setfont
newpath 72 720 moveto $error begin /newerror false def
(PostScript Error) show errorNL errorNL
(Error: ) show
/errorname load errorBUF cvs show errorNL errorNL
(Command: ) show
/command load dup type /stringtype ne { errorBUF cvs } if show
errorNL errorNL
(VMstatus: ) show
vmstatus errorBUF cvs show ( bytes available, ) show
errorBUF cvs show ( bytes used at level ) show
errorBUF cvs show errorNL errorNL
(Operand stargck: ) show errorNL /ostargck load {
dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
} forall errorNL
(Execution stargck: ) show errorNL /estargck load {
dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
} forall
end showpage
} def end
"""
|
PyThaiNLP/pythainlp | pythainlp/tag/__init__.py | Python | apache-2.0 | 573 | 0 | # -*- coding: utf-8 -*-
"""
Linguistic and other taggers.
Tagging each token in a sentence with supplementary information,
such as its part-of-speech (POS) tag, and named entity (NE) tag.
"""
__all__ = [
"PerceptronTagger",
"pos_tag",
"pos_tag_sents",
"tag_provinces",
"chunk_parse",
"NER",
]
from pythainlp.tag.locations import tag_provinces
from pythainlp.tag.pos_tag import pos_tag, pos_tag_sents
from pyt | hainlp.tag._tag_perceptron import PerceptronTagger
from pythainlp.tag.chunk import chunk_parse
from pythainlp.tag.named_entity import N | ER
|
zsulocal/pycoin | pycoin/message/make_parser_and_packer.py | Python | mit | 8,636 | 0.0022 | import io
import struct
from pycoin.encoding import double_sha256
from pycoin.serialize import b2h_rev, bitcoin_streamer
from .InvItem import InvItem
from .PeerAddress import PeerAddress
# definitions of message structures and types
# L: 4 byte long integer
# Q: 8 byte long integer
# S: unicode string encoded using utf-8
# [v]: array of InvItem objects
# [LA]: array of (L, PeerAddress) tuples
# b: boolean
# A: PeerAddress object
# B: Block object
# T: Tx object
STANDARD_P2P_MESSAGES = {
'version': (
"version:L services:Q timestamp:Q remote_address:A local_address:A"
" nonce:Q subversion:S last_block_index:L"
),
'verack': "",
'addr': "date_address_tuples:[LA]",
'inv': "items:[v]",
'getdata': "items:[v]",
'notfound': "items:[v]",
'getblocks': "version:L hashes:[#] hash_stop:#",
'getheaders': "version:L hashes:[#] hash_stop:#",
'tx': "tx:T",
'block': "block:B",
'headers': "headers:[zI]",
'getaddr': "",
'mempool': "",
# 'checkorder': obsolete
# 'submitorder': obsolete
# 'reply': obsolete
'ping': "nonce:Q",
'pong': "nonce:Q",
'filterload': "filter:[1] hash_function_count:L tweak:L flags:b",
'filteradd': "data:[1]",
'filterclear': "",
'merkleblock': (
"header:z total_transactions:L hashes:[#] flags:[1]"
),
'alert': "payload:S signature:S",
}
def standard_messages():
return dict(STANDARD_P2P_MESSAGES)
def _recurse(level_widths, level_index, node_index, hashes, flags, flag_index, tx_acc):
idx, r = divmod(flag_index, 8)
mask = (1 << r)
flag_index += 1
if flags[idx] & mask == 0:
h = hashes.pop()
return h, flag_index
if level_index == len(level_widths) - 1:
h = hashes.pop()
tx_acc.append(h)
return h, flag_index
# traverse the left
left_hash, flag_index = _recu | rse(
level_wid | ths, level_index+1, node_index*2, hashes, flags, flag_index, tx_acc)
# is there a right?
if node_index*2+1 < level_widths[level_index+1]:
right_hash, flag_index = _recurse(
level_widths, level_index+1, node_index*2+1, hashes, flags, flag_index, tx_acc)
if left_hash == right_hash:
raise ValueError("merkle hash has same left and right value at node %d" % node_index)
else:
right_hash = left_hash
return double_sha256(left_hash + right_hash), flag_index
def post_unpack_merkleblock(d, f):
"""
A post-processing "post_unpack" to merkleblock messages.
It validates the merkle proofs (throwing an exception if there's
an error), and returns the list of transaction hashes in "tx_hashes".
The transactions are supposed to be sent immediately after the merkleblock message.
"""
level_widths = []
count = d["total_transactions"]
while count > 1:
level_widths.append(count)
count += 1
count //= 2
level_widths.append(1)
level_widths.reverse()
tx_acc = []
flags = d["flags"]
hashes = list(reversed(d["hashes"]))
left_hash, flag_index = _recurse(level_widths, 0, 0, hashes, flags, 0, tx_acc)
if len(hashes) > 0:
raise ValueError("extra hashes: %s" % hashes)
idx, r = divmod(flag_index-1, 8)
if idx != len(flags) - 1:
raise ValueError("not enough flags consumed")
if flags[idx] > (1 << (r+1))-1:
raise ValueError("unconsumed 1 flag bits set")
if left_hash != d["header"].merkle_root:
raise ValueError(
"merkle root %s does not match calculated hash %s" % (
b2h_rev(d["header"].merkle_root), b2h_rev(left_hash)))
d["tx_hashes"] = tx_acc
return d
def post_unpack_version(d, f):
"""
Post-processor to "version" message, to add a "relay" boolean.
"""
if d["version"] >= 70001:
b = f.read(1)
if len(b) > 0:
d["relay"] = (ord(b) != 0)
return d
def _make_parser(streamer, the_struct):
"Return a function that parses the given structure into a dict"
struct_items = [s.split(":") for s in the_struct.split()]
names = [s[0] for s in struct_items]
types = ''.join(s[1] for s in struct_items)
def f(message_stream):
return streamer.parse_as_dict(names, types, message_stream)
return f
def make_post_unpack_alert(streamer):
"""
Post-processor to "alert" message, to add an "alert_info" dictionary of parsed
alert information.
"""
the_struct = ("version:L relayUntil:Q expiration:Q id:L cancel:L setCancel:[L] minVer:L "
"maxVer:L setSubVer:[S] priority:L comment:S statusBar:S reserved:S")
alert_submessage_parser = _make_parser(streamer, the_struct)
def post_unpack_alert(d, f):
d1 = alert_submessage_parser(io.BytesIO(d["payload"]))
d["alert_info"] = d1
return d
return post_unpack_alert
def standard_parsing_functions(Block, Tx):
"""
Return the standard parsing functions for a given Block and Tx class.
The return value is expected to be used with the standard_streamer function.
"""
def stream_block(f, block):
assert isinstance(block, Block)
block.stream(f)
def stream_blockheader(f, blockheader):
assert isinstance(blockheader, Block)
blockheader.stream_as_header(f)
def stream_tx(f, tx):
assert isinstance(tx, Tx)
tx.stream(f)
more_parsing = [
("A", (PeerAddress.parse, lambda f, peer_addr: peer_addr.stream(f))),
("v", (InvItem.parse, lambda f, inv_item: inv_item.stream(f))),
("T", (Tx.parse, stream_tx)),
("B", (Block.parse, stream_block)),
("z", (Block.parse_as_header, stream_blockheader)),
("1", (lambda f: struct.unpack("B", f.read(1))[0], lambda f, b: f.write(struct.pack("B", b)))),
]
all_items = list(bitcoin_streamer.STREAMER_FUNCTIONS.items())
all_items.extend(more_parsing)
return all_items
def standard_streamer(parsing_functions, parse_bc_int=bitcoin_streamer.parse_bc_int):
"""
Create a bitcoin_streamer, which parses and packs using the bitcoin protocol
(mostly the custom way arrays and integers are parsed and packed).
"""
streamer = bitcoin_streamer.Streamer()
streamer.register_array_count_parse(bitcoin_streamer.parse_bc_int)
streamer.register_functions(parsing_functions)
return streamer
def standard_message_post_unpacks(streamer):
"""
The standard message post-processors: one for the version message,
one for the alert message, and one for the merkleblock message.
"""
return dict(version=post_unpack_version,
alert=make_post_unpack_alert(streamer), merkleblock=post_unpack_merkleblock)
def make_parser_and_packer(streamer, message_dict, message_post_unpacks):
"""
Create a parser and a packer for a peer's network messages.
streamer:
used in conjunction with the message_dict. The message_dict turns a message into
a string specifying the fields, and this dictionary specifies how to pack or unpack
fields to or from bytes
message_dict:
a dictionary specifying how to pack or unpack the various messages like "version"
message_post_unpacks:
a dictionary specifying functions to call to postprocess message to, for example
extract submessages, like in "alert"
"""
message_parsers = dict((k, _make_parser(streamer, v)) for k, v in message_dict.items())
def parse_from_data(message_name, data):
message_stream = io.BytesIO(data)
parser = message_parsers.get(message_name)
if parser is None:
raise LookupError("unknown message: %s" % message_name)
d = parser(message_stream)
post_unpack = message_post_unpacks.get(message_name)
if post_unpack:
d = post_unpack(d, message_stream)
return d
def pack_from_data(message_name, **kwargs):
the_struct = message_dict[message_name]
if not the_struct:
return b''
f = io.BytesIO()
the_fields = the_struct.split(" ")
pairs = [t.split(":") for t in the_fields]
for name |
lehmannro/pootle | local_apps/pootle_app/urls.py | Python | gpl-2.0 | 1,109 | 0.003607 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008 Zuza Software Foundation
#
# This file is part of translate.
| #
# trans | late is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^admin', include('pootle_app.views.admin.urls')),
(r'', include('pootle_app.views.index.urls')),
(r'', include('pootle_app.views.language.urls')),
)
|
undergroundtheater/gameheart | gameheart/urls.py | Python | mit | 12,257 | 0.011096 | # gameheart.urls
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from django.contrib import admin
#from paypay.standard.ipn import urls
from gameheart.entities import views
from gameheart.entities import forms
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gameheart.views.home', name='home'),
# url(r'^gameheart/', include('gameheart.foo.urls')),
url(r'^test/', views.test, name='test'),
# Home page redirect
url(r'^$', RedirectView.as_view(url='/portal/')),
# ADMIN
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
# USER VIEWS
url(r'^login/', views.UserLoginView, name='Login'),
url(r'^logout/', views.UserLogoutView, name='Logout'),
url(r'^terms/', views.UserTermsView, name='Terms'),
url(r'^users/new/', views.UserCreateView, name='UserCreate'),
url(r'^users/index/', views.UserIndexView, kwargs=dict(nform=forms.UserForm), name='UserIndex'),
url(r'^users/detail/(?P<pkid>\d+)/$', views.UserDetailView, name='UserDetail'),
url(r'^users/reset/(?P<pkid>\d+)/$', vie | ws.UserResetView, name='UserReset'),
url(r'^account/detail/', views.UserAccountView, name='Account'),
url(r'^account/password/', views.UserPasswordView, name='UserPassword | '),
# PORTAL
url(r'^portal/', views.Portal, name='Portal'),
url(r'^account/favorites/index/', views.UserFavoriteView, name='UserFavorites'),
### ##Create Views## ###
# Chapter Type
url(r'^types/chapters/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.ChapterTypeForm), name='ChapterTypeCreateView'),
# Chapter
url(r'^chapters/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.ChapterForm), name='ChapterCreateView'),
# Event
url(r'^chapters/events/new', views.FlexFormCreateView, kwargs=dict(nform=forms.EventForm), name='EventCreateView'),
# Sign-in
url(r'^signin/$', views.AttendanceCreateView, name='AttendanceCreateView'),
url(r'^signin/(?P<pkid>\d+)/$', views.AttendanceGameCreateView, name='AttendanceGameCreateView'),
# Chapter Address
url(r'^chapters/addresses/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.ChapterAddressForm), name='ChapterAddressCreateView'),
# Staff Type
url(r'^types/staff/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.StaffTypeForm), name='StaffTypeCreateView'),
# Event
url(r'^chapters/events/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.EventForm), name='EventCreateView'),
# Character Type
url(r'^types/characters/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.CharacterTypeForm), name='CharacterTypeCreateView'),
# Character
url(r'^characters/new/', views.CharacterCreateView, name='CharacterCreateView'),
# Trait Type
url(r'^types/traits/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.TraitTypeForm), name='TraitTypeCreateView'),
# Trait
url(r'^traits/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.TraitForm), name='TraitCreateView'),
# Character Owner
url(r'^characterowners/new/', views.FlexFormCreateView, kwargs=dict(nform=forms.CharacterOwnerForm), name='CharacterOwnerCreateView'),
# Note
url(r'^notes/new/', views.NoteCreateView, name='NoteCreateView'),
### ## Indexes ## ###
# ChapterTypes
url(r'^types/chapters/index/', views.FlexFormIndexView, kwargs=dict(nform=forms.ChapterTypeForm), name='ChapterTypeIndex'),
# Chapters
url(r'^chapters/index/', views.FlexFormIndexView, kwargs=dict(nform=forms.ChapterForm), name='ChapterIndex'),
# Chapters ST Index
url(r'^chapters/stindex/', views.STChapterIndexView, kwargs=dict(nform=forms.ChapterForm), name='ChapterIndex'),
# Chapter Addresses
url(r'^chapters/addresses/index/', views.FlexFormIndexView, kwargs=dict(nform=forms.ChapterAddressForm), name='ChapterAddressIndex'),
# Event
url(r'^chapters/events/index/', views.FlexFormIndexView, kwargs=dict(nform=forms.EventForm), name='EventIndex'),
url(r'^events/upcoming/', views.UpcomingEventsView, kwargs=dict(), name='UpcomingEvents'),
url(r'^events/recent/', views.RecentEventsView, kwargs=dict(), name='RecentEvents'),
# Staff Type
url(r'^types/staff/index/', views.FlexFormIndexView, kwargs=dict(nform=forms.StaffTypeForm), name='StaffTypeIndex'),
# Character Types
url(r'^types/characters/index/', views.FlexFormIndexView, kwargs=dict(nform=forms.CharacterTypeForm), name='CharacterTypeIndex'),
# Character Index
url(r'^characters/index/', views.CharacterIndexView, kwargs=dict(nviewtype='owner'), name='CharacterIndex'),
#url(r'^notes/index/(?P<ntagname>\w+)/', views.NoteIndexView, name='NoteIndex'),
# Admin Character Index
url(r'^director/characters/index/', views.CharacterIndexView, kwargs=dict(nviewtype='director'), name='CharacterIndex'),
# ST Characters
url(r'^characters/stindex/', views.CharacterIndexView, kwargs=dict(nviewtype='st'), name='CharacterIndex'),
# User Favorites
url(r'^account/favorites/users/index/', views.FavoriteIndexView, kwargs=dict(nform=forms.FavoriteUserForm), name='FavoriteUserIndex'),
# Chapter Favorites
url(r'^account/favorites/chapters/index/', views.FavoriteIndexView, kwargs=dict(nform=forms.FavoriteChapterForm), name='FavoriteChapterIndex'),
# Character Favorites
url(r'^account/favorites/characters/index/', views.FavoriteIndexView, kwargs=dict(nform=forms.FavoriteCharacterForm), name='FavoriteCharacterIndex'),
# Trait Types
url(r'^types/traits/index/', views.FlexFormIndexView, kwargs=dict(nform=forms.TraitTypeForm), name='TraitTypeIndex'),
# Traits
url(r'^traits/index/', views.TraitIndexView, kwargs=dict(nform=forms.TraitForm), name='TraitIndex'),
# Notes
url(r'^notes/index/(?P<ntagname>\w+)/', views.NoteIndexView, name='NoteIndex'),
# Vocabulary
url(r'^vocabulary/index/', views.VocabularyIndexView, name='VocabularyIndex'),
### ## Detail Views ## ###
# Chapter Type Detail
url(r'^types/chapters/(?P<pkid>\d+)/$', views.FlexFormDetailView, kwargs=dict(nform=forms.ChapterTypeForm), name='ChapterTypeDetail'),
# Chapter Detail
url(r'^chapters/(?P<pkid>\d+)/$', views.FlexFormDetailViewLinked, kwargs=dict(nform=forms.ChapterForm, ntemplate='flexdetailviewlinked.html'), name='ChapterDetail'),
# Chapter Address Detail
url(r'^chapters/addresses/(?P<pkid>\d+)/$', views.FlexFormDetailView, kwargs=dict(nform=forms.ChapterAddressForm), name='ChapterAddressDetail'),
# Event
url(r'^chapters/events/(?P<pkid>\d+)/$', views.EventDetailView, name='EventDetail'),
# Staff Events
url(r'^staff/events/$', views.ChapterEventsView, name='ChapterEventIndex'),
# Staff Type Detail
url(r'^types/staff/(?P<pkid>\d+)/$', views.FlexFormDetailView, kwargs=dict(nform=forms.StaffTypeForm), name='StaffTypeDetail'),
# Character Type Detail
url(r'^types/characters/(?P<pkid>\d+)/$', views.FlexFormDetailView, kwargs=dict(nform=forms.CharacterTypeForm), name='CharacterTypeDetail'),
# Character Trait Detail
url(r'^characters/traits/(?P<pkid>\d+)/$', views.CharacterTraitDetailView, name='CharacterTraitDetail'),
# Character Trait Rename
url(r'^characters/(?P<pkid>\d+)/traits/labels/', views.CharacterTraitLabelView, name='CharacterTraitLabel'),
# Character Detail
url(r'^characters/(?P<pkid>\d+)/$', views.CharacterDetailView, kwargs=dict(nform=forms.CharacterForm), name='CharacterDetail'),
# Character Favorites
url(r'^account/favorites/characters/(?P<pkid>\d+)/', views.FlexFormDetailView, kwargs=dict(nform=forms.CharacterForm), name='FavoriteCharacter'),
# Trait Type Detail
url(r'^types/traits/(?P<pkid>\d+)/$', views.FlexFormDetailView, kwargs=dict(nform=forms.TraitTypeForm), name='TraitTypeDetail'),
# Trait Detail
url(r'^traits/(?P<pkid>\d+)/$', views.FlexFormDetailView, kwargs=dict(nform=forms.TraitForm), name='TraitDetail'),
# Note Detail
url |
lavjain/incubator-hawq | tools/bin/pythonSrc/pychecker-0.8.18/test_input/test96.py | Python | apache-2.0 | 387 | 0.020672 | '''test using string.find() in if s | tatement as a boolean. it returns an int'''
class X:
'''check string.find() usage'''
def foo(self):
s = 'abc'
if s.find('ab'):
print 'this is a bug'
if not s.find('ab'):
print 'this is also a bug'
if s.find('ab' | ) >= 0:
print 'this is not a bug'
if s.find('ab') < 0:
print 'this also is not a bug'
|
ENCODE-DCC/snovault | src/snovault/schema_graph.py | Python | mit | 3,775 | 0.001854 | import logging
from subprocess import (
Popen,
PIPE,
call as subprocess_call
)
from collections import defaultdict
from past.builtins import basestring
from pyramid.response import Response
from pyramid.view import view_config
from xml.sax.saxutils import quoteattr, escape
from snovault import TYPES
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('graph_svg', '/profiles/graph.svg')
config.add_route('graph_dot', '/profiles/graph.dot')
config.scan(__name__)
def node(type_name, props):
yield (
'{type_name} [shape=plaintext label=<\n'
' <table border="1" cellborder="0" cellspacing="0" align="left">\n'
' <tr><td PORT="uuid" border="1" sides="B" bgcolor="lavender" href="/profiles/{type_name}.json">{type_name}</td></tr>'
).format(type_name=type_name)
items = sorted(props.items())
for name, prop in items:
if name == 'uuid' or prop.get('notSubmittable'):
continue
label = escape(name)
if 'items' in prop:
label += ' []'
prop = prop['items']
if 'linkTo' in prop:
label = '<b>' + label + '</b>'
yield ' <tr><td PORT={name}>{label}</td></tr>'.format(name=quoteattr(name), label=label)
yield ' </table>>];'
def edges(source, name, linkTo, exclude, subclasses):
if isinstance(linkTo, basestring):
if linkTo in subclasses:
linkTo = subclasses[linkTo]
else:
linkTo = [linkTo]
exclude = [source] + exclude
return [
'{source}:{name} -> {target}:uuid;'.format(source=source, name=quoteattr(name), target=target)
for target in linkTo if target not in exclude
]
def digraph(types, exclude=None):
if not exclude:
exclude = ['submitted_by', 'lab', 'award']
out = [
'digraph schema {',
'rankdir=LR',
]
subclasses = defaultdict(list)
for type_info in sorted(types.values(), key=lambda ti: ti.name):
for base in type_info.base_types[:-1]:
subclasses[base].append(type_info.name)
for type_info in sorted(types.values(), key=lambda ti: ti.name):
if type_info.schema is None:
continue
if type_info.item_type.startswith('testing_'):
continue
out.extend(node(type_info.name, type_info.schema['properties']))
for name, prop in type_info.schema['properties'].items():
if name in exclude or prop.get('notSubmittable'):
continue
prop = prop.get('items', prop)
if 'linkTo' in prop:
out.extend(edges(type_info.name, name, prop['linkTo'], exclude, subclasses))
out.append('}')
return '\n'.join(out)
@view_config(route_name='graph_dot', request_method='GET')
def schema_dot(request):
dot = digraph(request.registry[TYPES].by_item_type, request.params.getall('exclude'))
return Response(dot, content_type='text/vnd.graphviz', char | set= | 'utf-8')
@view_config(route_name='graph_svg', request_method='GET')
def schema_svg(request):
try:
return_code = subprocess_call(['which', 'dot'])
if return_code == 0:
dot = digraph(request.registry[TYPES].by_item_type, request.params.getall('exclude'))
p = Popen(['dot', '-Tsvg'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
svg, err = p.communicate(dot.encode('utf-8'))
assert p.returncode == 0, err.decode('utf-8')
return Response(svg, content_type='image/svg+xml', charset='utf-8')
except Exception as excpt:
log.warning("graph.svg is not available exception: {repr(excpt)}")
pass
msg = 'graph.svg is not available'
log.warning(msg)
return {'status_code': 404, 'message': msg}
|
silverbulleters/vanessa-behavoir | tools/Sikuli/FeatureLoadMany.sikuli/FeatureLoadMany.py | Python | apache-2.0 | 77 | 0.012987 | cli | ck("Sarnvzsm.png")
sleep(1.5)
hover("iarpvambqmai.png")
sleep(1.5)
ex | it(0) |
SPlanzer/AIMS | ElectoralAddress/Upload.py | Python | bsd-3-clause | 4,556 | 0.016023 |
from getpass import getuser
from Job import Job
from datetime import datetime
import os.path
import time
import re
import Database
class Upload( object ):
@classmethod
def CreateUpload( cls, filename=None ):
if not filename:
filename = Upload.defaultNewFilename()
id = Database.executeScalar('elc_CreateUpload',getuser(),filename)
return Upload(id)
@classmethod
def list( cls ):
uploads=[]
for r in Database. | execute('select upl_id, created_by, creation_time, filename, n_insert, n_delete from elc_GetUploadDetails(NULL)'):
created_date = r[2].strftime('%d-%b-%Y')
| uploads.append(dict(
upl_id=r[0],
created_by=r[1],
creation_time=r[2],
created_date=created_date,
filename=r[3],
n_insert=r[4],
n_delete=r[5]
))
return uploads
def __init__( self, id ):
r = Database.executeRow('select created_by, creation_time, filename, n_insert, n_delete from elc_GetUploadDetails(%s)',id)
self._id = id
self._created_by = r[0]
self._creation_time = r[1]
self._filename = r[2]
self._n_insert = r[3]
self._n_delete = r[4]
@classmethod
def defaultNewFilename( cls, upload_date=None ):
if not isinstance(upload_date,datetime):
upload_date = datetime.now()
return 'sad_'+upload_date.strftime('%d%b%y')+'.sql'
def id( self ): return self._id
def created_by( self ): return self._created_by
def creation_time( self ): return self._creation_time
def filename( self ): return self._filename
def n_insert( self ): return self._n_insert
def n_delete( self ): return self._n_delete
def defaultFilename( self ):
return Upload.defaultNewFilename( self._creation_time )
def addJob( self, job ):
if type(job) == int:
job = Job(job)
job.addToUpload( self )
job.save()
def writeSql( self, filename ):
sqlfile = open(filename,'w')
basename = os.path.splitext(os.path.basename(filename))[0]
txtfilename = os.path.splitext(filename)[0] + '.txt'
if txtfilename == filename:
txtfilename = txtfilename + '.txt'
txtfile = open(txtfilename,'w')
# Header
sqlfile.write("-- Bulk update of crs_street_address\n")
sqlfile.write("-- Upload id: %d\n" % (self._id,))
sqlfile.write("-- Created by: %s\n" % (self._created_by,))
sqlfile.write("-- Created on: %s\n" %
(self._creation_time.strftime('%d %B %Y at %H:%M'),))
sqlfile.write("\n")
# Insertions
sqlfile.write("\n")
nins = 0
for r in Database.execute('SELECT housenumber, range_low, range_high, status, rcl_id, rna_id, wkt, sufi from elc_UploadNewAddresses(%s)',self._id):
m = re.search(r"(\d+)(\.?\d*)\s+(\-\d+\.?\d*)",r[6])
wkt = '1 POINT(%d%s %s)'%(int(m.group(1))-160,m.group(2),m.group(3))
range_high = r[2] if r[2] != None else 'null'
if r[3] == "DELE": status = "HIST"
else: status = "CURR"
if r[3] == 'NEWA': sufi = 'null'
else: sufi = r[7]
unofficial_flag = "N"
sqlfile.write('''
INSERT INTO crs_street_address_stage(house_number, range_low, range_high, status, unofficial_flag, rcl_id, rna_id, shape, sufi) VALUES
('%s',%s,%s,'%s','%s',%d,%d,'%s', %s);''' % (r[0],r[1], range_high,status,unofficial_flag,r[4],r[5],wkt, sufi))
nins += 1
sqlfile.write("\n")
sqlfile.write("\n")
sqlfile.write(" EXECUTE PROCEDURE cp_cel_AddressStageUpdate();\n")
sqlfile.write("\n")
sqlfile.close()
txtfile.write('''
FTP the attached "%s" file to the production database server (crsprd1).
As the user "crsprd" run the script as follows:
sqf %s
The expected output is:
Database selected.
(constant)
Bulk insert of street addresses: id %d
1 row(s) retrieved.
1 row(s) inserted. ... repeated %d times
(constant)
Bulk update completed: id %d
1 row(s) retrieved.
Database closed.
''' % (basename,basename,self._id,nins,self._id))
txtfile.close()
Database.execute('elc_SetUploadFilename',self._id,basename)
|
DenL/pogom-webhook | pogom/pgoapi/protos/POGOProtos/Inventory/AppliedItem_pb2.py | Python | mit | 3,946 | 0.007603 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Inventory/AppliedItem.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Inventory.Item import ItemId_pb2 as POGOProtos_dot_Inventory_dot_Item_dot_ItemId__pb2
from POGOProtos.Inventory.Item import ItemType_pb2 as POGOProtos_dot_Inventory_dot_Item_dot_ItemType__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Inventory/AppliedItem.proto',
package='POGOProtos.Inventory',
syntax='proto3',
serialized_pb=_b('\n&POGOProtos/Inventory/AppliedItem.proto\x12\x14POGOProtos.Inventory\x1a&POGOProtos/Inventory/Item/ItemId.proto\x1a(POGOProtos/Inventory/Item/ItemType.proto\"\xa0\x01\n\x0b\x41ppliedItem\x12\x32\n\x07item_id\x18\x01 \x01(\x0e\x32!.POGOProtos.Inventory.Item.ItemId\x12\x36\n\titem_type\x18\x02 \x01(\x0e\x32#.POGOProtos.Inventory.Item.ItemType\x12\x11\n\texpire_ms\x18\x03 \x01(\x03\x12\x12\n\napplied_ms\x18\x04 \x01(\x03\x62\x06proto3')
,
dependencies=[POGOProtos_dot_Inventory_dot_Item_dot_ItemId__pb2.DESCRIPTOR,POGOProtos_dot_Inventory_dot_Item_dot_ItemType__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_APPLIEDITEM = _descriptor.Descriptor(
name='AppliedItem',
full_name='POGOProtos.Inventory.AppliedItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='item_id', full_name='POGOProtos.Inventory.AppliedItem.item_id', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='item_type', full_name='POGOProtos.Inventory.AppliedItem.item_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='expire_ms', full_name='POGOProtos.Inventory.AppliedItem.expire_ms', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='applied_ms', full_name='POGOProtos.Inventory.AppliedItem.applied_ms', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=147,
serialized_end=307,
)
| _APPLIEDITEM.fields_by_name['item_id'].enum_type = POGOProtos_dot_Inventory_dot_Item_dot_ItemId__pb2._ITEMID
_APPLIEDITEM.fields_by_name['item_type'].enum_type = POGOProtos_dot_Inventory_d | ot_Item_dot_ItemType__pb2._ITEMTYPE
DESCRIPTOR.message_types_by_name['AppliedItem'] = _APPLIEDITEM
AppliedItem = _reflection.GeneratedProtocolMessageType('AppliedItem', (_message.Message,), dict(
DESCRIPTOR = _APPLIEDITEM,
__module__ = 'POGOProtos.Inventory.AppliedItem_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Inventory.AppliedItem)
))
_sym_db.RegisterMessage(AppliedItem)
# @@protoc_insertion_point(module_scope)
|
wuhaochen/multinet | multinet/tests/test_classes.py | Python | mit | 11,570 | 0.001642 | """
Test the Multinet Class.
"""
import multinet as mn
import networkx as nx
class TestMultinet(object):
def test_build_multinet(self):
"""
Test building Multinet objects.
"""
mg = mn.Multinet()
assert mg.is_directed() == False
mg.add_edge(0, 1, 'L1')
mg.add_edge(0, 1, 'L2')
mg.add_edge(1, 0, 'L2')
mg.add_edge(1, 2, 'L2')
assert 'L1' in mg.layers()
assert 'L2' in mg.layers()
assert len(mg.edgelets) == 3
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 3
# Remove non-existed edge.
mg.remove_edgelet(2, 3, 'L3')
mg.remove_edgelet(0, 1, 'L2')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 2
mg.remove_edgelet(0, 1, 'L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 1
assert len(mg.empty_layers()) == 1
mg.remove_empty_layers()
assert mg.number_of_layers() == 1
def test_aggregate_edge(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['L2'] == 6
mg.add_edge(0, 1, 'L1', weight=10)
assert mg[0][1][mg.cid]['L1'] == 10
mg.aggregate_edge(0, 1, 'L1', weight=5)
assert mg[0][1][mg.cid]['L1'] == 15
mg.aggregate_edge(2, 3, 'L2', weight=7)
assert mg[2][3][mg.cid]['L2'] == 7
def test_sub_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = mg.sub_layer('L1')
assert type(sg) == nx.Graph
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 1
sg = mg.sub_layer('L2', remove_isolates=True)
assert type(sg) == nx.Graph
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
def test_sub_layers(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
sg = mg.sub_layers(['L1', 'L2'])
assert type(sg) == mn.Multinet
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 2
assert sg.number_of_layers() == 2
sg = mg.sub_layers(['L2', 'L3'], remove_isolates=True)
assert type(sg) == mn.Multinet
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
assert sg.number_of_layers() == 2
def test_aggregated(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
ag = mg.aggregated()
assert type(ag) == nx.Graph
assert ag.number_of_nodes() == 3
assert ag.number_of_edges() == 2
assert ag[1][2]['weight'] == 8
assert ag[1][2]['nlayer'] == 2
def test_merge_layers(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L1', 'L2'])
assert 'L1' not in mg.layers()
assert 'L2' not in mg.layers()
assert 'L1_L2' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1_L2'] == 5
assert mg[1][2][mg.cid]['L1_L2'] == 6
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L2', 'L3'], new_name='LN')
assert 'L2' not in mg.layers()
assert 'L3' not in mg.layers()
assert 'LN' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['LN'] == 8
def test_add_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = nx.Graph()
sg.add_edge(1, 2, weight=7)
sg.add_edge(2, 3)
mg.add_layer(sg, 'L3')
assert mg.number_of_nodes() == 4
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 3
assert mg[1][2][mg.cid]['L2'] == 6
assert mg[1][2][mg.cid]['L3'] == 7
assert mg[2][3][mg.cid]['L3'] == 1
def test_remove_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L3')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
class TestDiMultinet(object):
def test_build_dimultinet(self):
"""
Test building Multinet objects.
"""
mg = mn.DiMultinet()
assert mg.is_directed() == True
mg.add_edge(0, 1, 'L1')
mg.add_edge(0, 1, 'L2')
mg.add_edge(1, 0, 'L2')
mg.add_edge(1, 2, 'L2')
assert 'L1' in mg.layers()
assert 'L2' in mg.layers()
assert len(mg.edgelets) == 4
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 4
# Remove non-existed edge.
mg.remove_edgelet(2, 3, 'L3')
mg.remove_edgelet(0, 1, 'L2')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 3
mg.remove_edgelet(0, 1, 'L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 2
assert len(mg.empty_layers()) == 1
mg.remove_empty_layers()
assert mg.number_of_layers() == 1
def test_aggregate_edge(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['L2'] == 6
mg.add_edge(0, 1, 'L1', weight=10)
assert mg[0][1][mg.cid]['L1'] == 10
mg.aggregate_edge(0, 1, 'L1', weight=5)
assert mg[0][1][mg.ci | d]['L1'] == 15
mg.aggregate_edge(2, 3, 'L2', weight=7)
assert mg[2][3][mg.cid]['L2'] == 7
def test_sub_layer(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = mg.sub_layer('L1')
| assert type(sg) == nx.DiGraph
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 1
sg = mg.sub_layer('L2', remove_isolates=True)
assert type(sg) == nx.DiGraph
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
def test_sub_layers(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
sg = mg.sub_layers(['L1', 'L2'])
assert type(sg) == mn.DiMultinet
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 2
assert sg.number_of_layers() == 2
s |
hhauer/myinfo | CustomUser/admin.py | Python | mit | 1,737 | 0.003454 | from django import forms
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin
from CustomUser.models import PSUCustomUser
# Register your models here.
class UserChangeForm(forms.ModelForm):
class Meta:
model = PSUCustomUser
fields = ('username', 'is_active', 'is_admin')
class UserCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.HiddenInput, initial='!')
password2 = forms.CharField(label='Password confirmation', widget=forms.HiddenInput, initial='!')
class Meta:
model = PSUCustomUser
fields = ('username', 'is_active', 'is_admin')
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_unusable_password()
if commit:
user.save()
return user
class MyUserAdmin(UserAdmin):
# The forms to add and change user instances
form = UserChangeForm
add_form = UserCreationForm
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference specific fields on auth.User.
list_display = ('username', 'is_active', 'is_admin')
list_filter = ('is_admin',)
fieldsets = (
(None, {'fields': (' | username', 'is_active')}),
('Permissions', {'fields': ('is_admin',)}),
)
search_fields = ('username',)
ordering = ('username',)
filter_horizontal = ()
# Now register the new UserAdmin...
admin.site.register(PSUCustomUser, MyUserAdmin)
# ... and, since we're not using Django's built-in permissions,
# unregister the Group | model from admin.
admin.site.unregister(Group) |
Quva/sparkplug | sparkplug/helpers/helpers.py | Python | apache-2.0 | 339 | 0.020649 |
import sy | s
def dictContains(D, key):
if sys.version_info[0] == 2:
return D.has_key(key)
elif sys.version_info[0] == 3:
return key in D
else:
raise Exception("No support for self.__dictContains for python major " +
| "version: {}".format(sys.version_info[0]))
|
jtoppins/beaker | LabController/src/bkr/labcontroller/provision.py | Python | gpl-2.0 | 14,901 | 0.003624 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import sys
import os, os.path
import errno
import logging
import time
import random
import signal
import daemon
import datetime
import pkg_resources
import subprocess
import xmlrpclib
from daemon import pidfile
from optparse import OptionParser
import gevent, gevent.hub, gevent.socket, gevent.event, gevent.monkey
from bkr.labcontroller.exceptions import ShutdownException
from bkr.log import log_to_stream, log_to_syslog
from bkr.common.helpers import SensitiveUnicode, total_seconds
from bkr.labcontroller.config import load_conf, get_conf
from bkr.labcontroller.proxy import ProxyHelper
from bkr.labcontroller import netboot
logger = logging.getLogger(__name__)
class CommandQueuePoller(ProxyHelper):
def __init__(self, *args, **kwargs):
super(CommandQueuePoller, self).__init__(*args, **kwargs)
self.commands = {} #: dict of (id -> command info) for running commands
self.greenlets = {} #: dict of (command id -> greenlet which is running it)
self.last_command_datetime = {} # Last time a command was run against a system.
def get_queued_commands(self):
try:
commands = self.hub.labcontrollers.get_queued_command_details()
except xmlrpclib.Fault as fault:
if 'Anonymous access denied' in fault.faultString:
logger.debug('Session expired, re-authenticating')
self.hub._login()
commands = self.hub.labcontrollers.get_queued_command_details()
else:
raise
for command in commands:
# The 'is not None' check is important as we do not want to
# stringify the None type
if 'power' in command and 'passwd' in command['power'] and \
command['power']['passwd'] is not None:
command['power']['passwd'] = SensitiveUnicode(command['power']['passwd'])
return commands
def get_running_command_ids(self):
try:
ids = self.hub.labcontrollers.get_running_command_ids()
except xmlrpclib.Fault as fault:
if 'Anonymous access denied' in fault.faultString:
logger.debug('Session expired, re-authenticating')
self.hub._login()
ids = self.hub.labcontrollers.get_running_command_ids()
else:
raise
return ids
def mark_command_running(self, id):
self.hub.labcontrollers.mark_command_running(id)
def mark_command_completed(self, id):
self.hub.labcontrollers.mark_command_completed(id)
def mark_command_failed(self, id, message, system_broken):
self.hub.labcontrollers.mark_command_failed(id, message, system_broken)
def mark_command_aborted(self, id, message):
self.hub.labcontrollers.mark_command_aborted(id, message)
def clear_running_commands(self, message):
self.hub.labcontrollers.clear_running_commands(message)
def clear_orphaned_commands(self):
running_command_ids = self.get_running_command_ids()
orphaned_command_ids = set(running_command_ids).difference(self.commands.keys())
for id in orphaned_command_ids:
self.mark_command_aborted(id, "Command orphaned, aborting")
def poll(self):
logger.debug('Clearing orphaned commands')
self.clear_orphaned_commands()
logger.debug('Polling for queued commands')
for command in self.get_queued_commands():
if command['id'] in self.commands:
# We've already seen it, ignore
continue
# This command has to wait for any other existing commands against the
# same system, to prevent collisions
predecessors = [self.greenlets[c['id']]
for c in self.commands.itervalues()
if c['fqdn'] == command['fqdn']]
if 'power' in command and command['power'].get('address'):
# Also wait for other commands running against the same power address
predecessors.extend(self.greenlets[c['id']]
for c in self.commands.itervalues()
if 'power' in c and c['power'].get('address')
== command['power']['address'])
self.spawn_handler(command, predecessors)
def spawn_handler(self, command, predecessors):
self.commands[command['id']] = command
greenlet = gevent.spawn(self.handle, command, predecessors)
self.greenlets[command['id']] = greenlet
def completion_callback(greenlet):
if greenlet.exception:
logger.error('Command handler %r had unhandled exception: %r',
greenlet, greenlet.exception)
del self.commands[command['id']]
del self.greenlets[command['id']]
greenlet.link(completion_callback)
def handle(self, command, predecessors):
if command.get('delay'):
# Before anything else, we need to wait for our delay period.
# Instead of just doing time.sleep we do a timed wait on
# shutting_down, so that our delay doesn't hold up the shutdown.
logger.debug('Delaying %s seconds for command %s',
command['delay'], command['id'])
if shutting_down.wait(timeout=command['delay']):
return
gevent.joinall(predecessors)
if shutting_down.is_set():
return
quiescent_period = command.get('quiescent_period')
if quiescent_period:
system_fqdn = command. | get('fqdn')
last_command_finished_at = self.last_command_datetime.get(system_fqdn)
if last_command_fin | ished_at:
# Get the difference between the time now and the number of
# seconds until we can run another command
seconds_to_wait = total_seconds((last_command_finished_at +
datetime.timedelta(seconds=quiescent_period)) -
datetime.datetime.utcnow())
else:
# Play it safe, wait for the whole period.
seconds_to_wait = quiescent_period
if seconds_to_wait > 0:
logger.debug('Entering quiescent period, delaying %s seconds for'
' command %s' % (seconds_to_wait, command['id']))
if shutting_down.wait(timeout=seconds_to_wait):
return
logger.debug('Handling command %r', command)
self.mark_command_running(command['id'])
try:
if command['action'] in (u'on', u'off', 'interrupt'):
handle_power(self.conf, command)
elif command['action'] == u'reboot':
# For backwards compatibility only. The server now splits
# reboots into 'off' followed by 'on'.
handle_power(self.conf, dict(command.items() + [('action', u'off')]))
time.sleep(5)
handle_power(self.conf, dict(command.items() + [('action', u'on')]))
elif command['action'] == u'clear_logs':
handle_clear_logs(self.conf, command)
elif command['action'] == u'configure_netboot':
handle_configure_netboot(command)
elif command['action'] == u'clear_netboot':
handle_clear_netboot(command)
else:
raise ValueError('Unrecognised action %s' % command['action'])
# XXX or should we just ignore it and leave it queued?
except netboot.ImageFetchingError as e:
logger.exception('Error processing command %s', command['id'])
# It's not the system's fault so don't mark it as broken
self.mark_command_failed(command['id'], unicode(e), False)
except Exception, e:
logger.exception('Error processing command %s', comma |
dilworm/pytest | base/client.py | Python | gpl-2.0 | 3,292 | 0.027992 | # -*- coding=utf8 -*-
import os, sys
import select, threading, Queue
import configparser as cf
from mysocket import MySocket
g_messageQueues = {} #每个scoket分配一个队列用于读写
g_messageQueues['main'] = Queue.Queue() #主队列用于接收服务器的返回消息
svrSockets = [] #MySocket 列表,对应每一个服务器
class MsgItem:
def __init__(self, msgType, socket, data):
self.msgType = msgType
self.socket = socket
self.data = data
def userInputThreadHandler(mainQueue, svrSockets):
# print 'user input q', mainQueue
# print 'user input socket size', len(svrSockets)
while True:
print '\nplease input command: \n'
command = raw_input()
if command == 'exit':
#sys.exit()
os._exit(0)
#post command to every connected server's Queue
for s in svrSockets:
if s not in mainQueue:
mainQueue[s] = Queue.Queue()
mainQueue[s].put(MsgItem('command', s, command))
print 'put' + command + 'into ', s.getpeername()
def networkThreadHandler(msgQueues, svrSockets):
# print 'network socket size', len(svrSockets)
# print 'network msgQueue', msgQueues
rl, wl, xl = [], svrSockets, []
for s in svrSockets:
s.setnoblock() # 设置为非阻塞
while True:
readable, writable, exceptional = select.select(rl, wl, xl, 30)
if not (readable or writable or exceptional):
print 'not'
continue
for s in readable:
data = s.recv()
if data == '':
print u'Error: 连接断开', s.getpeername()
print 'remove'
rl.remove(s)
#del msgQueues[s]
else:
#print 'recv data ', data
item = MsgItem('read', s, data)
msgQueues['main'].put(item) # 接收到的消息投递到主队列进行处理
for s in writable:
try:
if s not in msgQueues:
msgQueues[s] = Queue.Queue()
item = msgQueues[s].get_nowait()
if item and item.socket:
s.send(item.data)
print u'send commomd \"', item.data , '\" to ', s.getpeername()
if s not in rl:
rl.append(s) # 准备接收服务器返回的消息
except Queue.Empty:
continue
for s in exceptional:
print u'select 异常,断开 ', s.getpeername()
xl.removes(s)
#del msgQueues[s]
def OnQueueMsg(msgItem):
if msgItem.msgType == 'read':
print '[%s]: %s' % (msgItem.socket.getpeername(), msgItem.data)
def main():
serverList = cf.GetServerList('./serverlist.txt')
print u'找到', | len(serverList), u'个服务器地址:'
for addr in serverList:
print addr
print ''
for addr in serverList:
try:
s = MySocket()
s.connect(addr[0], addr[1])
svrSockets.append(s)
except:
print u'Error: 连接', addr, u'失败'
# print 'golbal queue:', g_messageQueues
# print 'gl | obal socket list size', len(svrSockets)
# print 'svrsocklist ', svrSockets
network_thread = threading.Thread(target = networkThreadHandler, args = (g_messageQueues, svrSockets))
userInput_thread = threading.Thread(target = userInputThreadHandler, args =(g_messageQueues, svrSockets))
network_thread.start()
userInput_thread.start()
while True:
OnQueueMsg(g_messageQueues['main'].get())
if __name__ == '__main__':
main()
|
jcberquist/sublimetext-cfml | src/component_parser/regex.py | Python | mit | 2,805 | 0.000713 | import re
from collections import namedtuple
component = r"""
(?:/\*\*((?:\*(?!/)|[^*])*)\*/\s+)?
(
(?:<cf)?
component\b
)
([^>{]*)
"""
component = re.compile(component, re.I | re.X | re.S)
Component = namedtuple('Component', 'script docblock attributes')
script_function = r"""
(?:/\*\*((?:\*(?!/)|[^*])*)\*/\s+)?
(?:\b(private|package|public|remote|static|final|abstract)\s+)?
(?:\b(private|package|public|remote|static|final|abstract)\s+)?
(?:\b([A-Za-z0-9_\.$]+)\s+)?
function\s+
([_$a-zA-Z][$\w]*)\s*
(?=\()
"""
script_function = re.compile(script_function, re.I | re.X | re.S)
ScriptFunction = namedtuple(
'ScriptFunction',
'docblock storage_slot_1 storage_slot_2 returntype name parameters attributes'
)
script_parameter = r"""
(?:(required)\s+)?
(?:\b([\w.]+)\b\s+)?
(\b\w+\b)
(?:\s*=\s*(\{[^\}]*\}|\[[^\]]*\]|\([^\)]*\)|(?:(?!\b\w+\s*=).)+))?
(.*)?
"""
script_parameter = re.compile(script_parameter, re.I | re.S | re.X)
ScriptParameter = namedtuple(
'ScriptParameter',
'required type name default attributes'
)
attribute = r"""
\b(\w+)\b(?:\s*=\s*(?:(['"])(.*?)(\2)|([a-z0-9:.]+)))?
"""
attribute = re.compile(attribute, re.I | re.X | re.S)
Attribute = namedtuple('Attribute', 'key quote_start value quote_end unquoted_value')
|
docblock = r"""
\n\s*(?:\*\s*)?(?:@(\w+)(?:\.(\w+))?)?\s*(\S.*)
"""
docblock = re.compile(docblock, re.I | re.X)
Docblock = namedtuple('Dockblock', 'key subkey value')
strings = r"""
"[^"]*"|'[^']*'
"""
strings = re.compile(strings, re.X)
function_attributes = r"""
\)([^)]*)$
"""
function_attributes = re.compile(function_attributes, re.X | re.S)
function_block = r"""
<cffunction\b.*?</cffunction>
"""
function_block = re.compile(function_block, re. | X | re.I | re.S)
function_start_tag = r"""
<cffunction([^>]*)>
"""
function_start_tag = re.compile(function_start_tag, re.X | re.I)
function_end_tag = r"""
</cffunction>
"""
function_end_tag = re.compile(function_end_tag, re.X | re.I)
argument_tag = r"""
<cfargument([^>]*)>
"""
argument_tag = re.compile(argument_tag, re.X | re.I)
cfml_property = r"""
^\s*
(?:<cf)?
property\b
([^;>]*)
"""
cfml_property = re.compile(cfml_property, re.X | re.I | re.M)
property_type_name = r"""
\A[\s\n]*
(?!\b\w+\s*=)
(?:(\w+)\s+)?
\b(\w+)\b
"""
property_type_name = re.compile(property_type_name, re.X)
string_quoted_single = r"""
'[^']*'
"""
string_quoted_single = re.compile(string_quoted_single, re.X)
string_quoted_double = r"""
"[^"]*"
"""
string_quoted_double = re.compile(string_quoted_double, re.X)
line_comment = r"""
//[^\r\n]\r?\n
"""
line_comment = re.compile(line_comment, re.X)
multiline_comment = r"""
/\*.*?\*\/
"""
multiline_comment = re.compile(multiline_comment, re.X | re.S)
tag_comment = r"""
<!---.*?--->
"""
tag_comment = re.compile(tag_comment, re.X | re.S)
|
atmark-techno/atmark-dist | user/python/Lib/encodings/cp866.py | Python | gpl-2.0 | 7,002 | 0.040703 | """ Python Character Mapping Codec generated from 'CP866.TXT'.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry( | ):
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = {
0x0080: 0x0410, # CYRILLIC CAPITAL LETTER A
0x0081: 0x0411, # CYRILLIC CAPITAL LETTER BE
0x0082: 0x0412, # CYRILLIC CAPITAL LETTER VE
0x0083: 0x0413, # CYRILLIC CAPITAL LETTER GHE
0x0084: 0x0414, # CYRILLIC CAPITAL LETTER DE
0x0085: 0x0415, # CYRILLIC CAPITA | L LETTER IE
0x0086: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
0x0087: 0x0417, # CYRILLIC CAPITAL LETTER ZE
0x0088: 0x0418, # CYRILLIC CAPITAL LETTER I
0x0089: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
0x008a: 0x041a, # CYRILLIC CAPITAL LETTER KA
0x008b: 0x041b, # CYRILLIC CAPITAL LETTER EL
0x008c: 0x041c, # CYRILLIC CAPITAL LETTER EM
0x008d: 0x041d, # CYRILLIC CAPITAL LETTER EN
0x008e: 0x041e, # CYRILLIC CAPITAL LETTER O
0x008f: 0x041f, # CYRILLIC CAPITAL LETTER PE
0x0090: 0x0420, # CYRILLIC CAPITAL LETTER ER
0x0091: 0x0421, # CYRILLIC CAPITAL LETTER ES
0x0092: 0x0422, # CYRILLIC CAPITAL LETTER TE
0x0093: 0x0423, # CYRILLIC CAPITAL LETTER U
0x0094: 0x0424, # CYRILLIC CAPITAL LETTER EF
0x0095: 0x0425, # CYRILLIC CAPITAL LETTER HA
0x0096: 0x0426, # CYRILLIC CAPITAL LETTER TSE
0x0097: 0x0427, # CYRILLIC CAPITAL LETTER CHE
0x0098: 0x0428, # CYRILLIC CAPITAL LETTER SHA
0x0099: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
0x009a: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
0x009b: 0x042b, # CYRILLIC CAPITAL LETTER YERU
0x009c: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x009d: 0x042d, # CYRILLIC CAPITAL LETTER E
0x009e: 0x042e, # CYRILLIC CAPITAL LETTER YU
0x009f: 0x042f, # CYRILLIC CAPITAL LETTER YA
0x00a0: 0x0430, # CYRILLIC SMALL LETTER A
0x00a1: 0x0431, # CYRILLIC SMALL LETTER BE
0x00a2: 0x0432, # CYRILLIC SMALL LETTER VE
0x00a3: 0x0433, # CYRILLIC SMALL LETTER GHE
0x00a4: 0x0434, # CYRILLIC SMALL LETTER DE
0x00a5: 0x0435, # CYRILLIC SMALL LETTER IE
0x00a6: 0x0436, # CYRILLIC SMALL LETTER ZHE
0x00a7: 0x0437, # CYRILLIC SMALL LETTER ZE
0x00a8: 0x0438, # CYRILLIC SMALL LETTER I
0x00a9: 0x0439, # CYRILLIC SMALL LETTER SHORT I
0x00aa: 0x043a, # CYRILLIC SMALL LETTER KA
0x00ab: 0x043b, # CYRILLIC SMALL LETTER EL
0x00ac: 0x043c, # CYRILLIC SMALL LETTER EM
0x00ad: 0x043d, # CYRILLIC SMALL LETTER EN
0x00ae: 0x043e, # CYRILLIC SMALL LETTER O
0x00af: 0x043f, # CYRILLIC SMALL LETTER PE
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x0440, # CYRILLIC SMALL LETTER ER
0x00e1: 0x0441, # CYRILLIC SMALL LETTER ES
0x00e2: 0x0442, # CYRILLIC SMALL LETTER TE
0x00e3: 0x0443, # CYRILLIC SMALL LETTER U
0x00e4: 0x0444, # CYRILLIC SMALL LETTER EF
0x00e5: 0x0445, # CYRILLIC SMALL LETTER HA
0x00e6: 0x0446, # CYRILLIC SMALL LETTER TSE
0x00e7: 0x0447, # CYRILLIC SMALL LETTER CHE
0x00e8: 0x0448, # CYRILLIC SMALL LETTER SHA
0x00e9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
0x00ea: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
0x00eb: 0x044b, # CYRILLIC SMALL LETTER YERU
0x00ec: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
0x00ed: 0x044d, # CYRILLIC SMALL LETTER E
0x00ee: 0x044e, # CYRILLIC SMALL LETTER YU
0x00ef: 0x044f, # CYRILLIC SMALL LETTER YA
0x00f0: 0x0401, # CYRILLIC CAPITAL LETTER IO
0x00f1: 0x0451, # CYRILLIC SMALL LETTER IO
0x00f2: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x00f3: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x00f4: 0x0407, # CYRILLIC CAPITAL LETTER YI
0x00f5: 0x0457, # CYRILLIC SMALL LETTER YI
0x00f6: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
0x00f7: 0x045e, # CYRILLIC SMALL LETTER SHORT U
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x2116, # NUMERO SIGN
0x00fd: 0x00a4, # CURRENCY SIGN
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
}
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
|
sven-hm/pythonocc-generator | src/Modules.py | Python | gpl-3.0 | 30,110 | 0.001627 | ##Copyright 2008-2015 Thomas Paviot (tpaviot@gmail.com)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# OpenCASCADE Toolkits: each ToolKit is a list of modules
#
TOOLKIT_Foundation = {
'TKernel': ['FSD', 'MMgt', 'OSD', 'Plugin', 'Quantity', 'Resource',
'SortTools',
'Standard', 'StdFail', 'Storage', 'TColStd',
'TCollection', 'TShort', 'Units', 'UnitsAPI',
'IncludeLibrary', 'Dico', 'NCollection', 'Message'],
'TKMath': ['math', 'ElCLib', 'ElSLib', 'BSplCLib', 'BSplSLib',
'PLib', 'Precision', 'GeomAbs', 'Poly', 'CSLib',
'Convert', 'Bnd', 'gp', 'TColgp', 'TopLoc'],
'TKAdvTools': ['Dynamic', 'Materials', 'Expr', 'ExprIntrp',
'GraphDS', 'GraphTools']}
TOOLKIT_Modeling = {
'TKG2d': ['Geom2d', 'LProp', 'TColGeom2d', 'Adaptor2d',
'Geom2dLProp', 'Geom2dAdaptor', 'GProp'],
'TKG3d': ['Geom', 'TColGeom', 'GeomAdaptor', 'AdvApprox',
'GeomLProp', 'Adaptor3d', 'LProp3d', 'TopAbs'],
'TKGeomBase': ['ProjLib', 'GeomProjLib', 'GCPnts', 'CPnts',
'Approx', 'AppParCurves', 'FEmTool', 'AppCont',
'Extrema', 'IntAna', 'IntAna2d', 'GeomConvert',
'AdvApp2Var', 'GeomLib', 'Geom2dConvert', 'Hermit',
'BndLib', 'AppDef', 'GeomTools', 'GC', 'GCE2d',
'gce'],
'TKBRep': ['TopoDS', 'TopExp', 'TopTools', 'BRep', 'BRepLProp',
'BRepAdaptor', 'BRepTools'],
'TKGeomAlgo': ['Hatch', 'GeomInt', 'IntStart', 'IntWalk',
'IntImp', 'IntCurveSurface', 'IntSurf', 'IntPatch',
'Geom2dInt', 'IntImpParGen', 'IntRes2d', 'IntCurve',
'TopTrans', 'Intf', 'ApproxInt', 'TopTrans', 'Intf',
'ApproxInt', 'GccAna', 'GccEnt', 'GccInt', 'GccIter',
'GccGeo', 'HatchGen', 'Geom2dHatch', 'Law', 'TopTrans',
'Intf', 'ApproxInt', 'GccAna', 'GccEnt', 'GccInt',
'GccIter', 'GccGeo', 'HatchGen', 'Geom2dHatch',
'Law', 'AppBlend', 'Plate', 'GeomPlate',
'LocalAnalysis', 'GeomAPI', 'GeomFill', 'Geom2dAPI',
'Geom2dGcc', 'FairCurve', 'NLPlate', 'IntPolyh',
'TopClass'],
'TKTopAlgo': ['IntCurvesFace', 'MAT', 'MAT2d', 'Bisector',
'BRepMAT2d', 'BRepCheck', 'BRepBndLib', 'BRepExtrema',
'BRepClass', 'BRepClass3d', 'BRepLib', 'BRepGProp',
'BRepIntCurveSurface', 'BRepTopAdaptor',
'BRepBuilderAPI', 'BRepApprox'],
'TKPrim': ['BRepPrim', 'Primitives', 'BRepSweep', 'Sweep',
'BRepPrimAPI'],
'TKBO': ['IntTools', 'BRepAlgoAPI', 'BOPCol', 'BOPInt', 'BOPDS',
'BOPAlgo', 'BOPTools'],
'TKHLR': ['HLRTopoBRep', 'HLRBRep', 'HLRAlgo', 'HLRAppli',
'Intrv', 'TopBas', 'TopCnx', 'Contap'],
'TKMesh': ['BRepMesh', 'IntPoly'],
'TKShHealing': ['ShapeBuild', 'ShapeExtend', 'ShapeConstruct',
'ShapeCustom', 'ShapeAnalysis', 'ShapeFix',
'ShapeUpgrade', 'ShapeAlgo', 'ShapeProcess',
'ShapeProcessAPI'],
'TKXMesh': ['XBRepMesh'],
'TKBool': ['TopOpeBRep', 'TopOpeBRepDS', 'TopOpeBRepBuild',
'TopOpeBRepTool', 'BRepAlgo', 'BRepFill', 'BRepProj'],
'TKFillet': ['ChFiDS', 'ChFi2d', 'ChFi3d', 'ChFiKPart', 'Blend',
'BRepBlend', 'BlendFunc', 'BRepFilletAPI',
'FilletSurf'],
'TKFeat': ['LocOpe', 'BRepFeat'],
'TKOffset': ['BRepOffsetAPI', 'Draft', 'BRepOffset', 'BiTgte'],
}
TOOLKIT_Visualisation = {
'TKService': ['Aspect', 'SelectBasics', 'Image',
'InterfaceGraphic', 'TColQuantity'],
'TKV3d': ['V3d', 'Graphic3d', 'Visual3d', 'Select3D',
'Prs3d', 'StdPrs', 'SelectMgr', 'PrsMgr',
'AIS', 'DsgPrs', 'StdSelect'],
'TKMeshVS': ['MeshVS'],
}
TOOLKIT_DataExchange = {
'TKSTL': ['StlMesh', 'StlAPI', 'StlTransfer', 'RWStl'],
'TKSTEP': ['STEPControl'],
'TKIGES': ['IGESControl'],
'TKXSBase': ['Interface', 'IFSelect'],
}
TOOLKIT_OCAF = {
'TKCDF': [],
'PTKernel': [],
'TKLCAF': ['TDF', 'TDataStd', 'TFunction', 'TDocStd', 'AppStdL'],
'FWOSPlugin': [],
'TKPShape': [],
'TKBinL': [],
'TKXmlL': [],
'TKPLCAF': [],
'TKTObj': [],
'TKShapeSchema': [],
'TKStdLSchema': [],
'TKXCAF': ['XCAFApp', 'XCAFDoc', 'XCAFPrs'],
'TKXDESTEP': ['STEPCAFControl'],
'TKXDEIGES': ['IGESCAFControl'],
}
TOOLKIT_SMesh = {
'SMESH': ['SMDSAbs', 'SMDS', 'SMESH', 'SMESHDS', 'StdMeshers', 'NETGENPlugin']
}
# List of modules to export
#
# (string module_name, list additional headers, list classes_to_exclude,
# dict member_functions to exclude)
OCE_MODULES = [
###
### FOUNDATION
###
| ###
### TKernel
('Dico', [], []),
('FSD', [], ['*']),
('MMgt', [], []),
('Message', [], ['Message_Msg']),
('NCollection', [], ['*']),
('OSD', [], ['*']),
('Plugin', [], ['*']),
('Quantity', [], []),
('Resource', [], ['*']),
('SortTools', [], []),
| ('Standard', [], ['Standard_AncestorIterator',
'Standard_Persistent', 'Standard_Static_Assert',
'Standard_CLocaleSentry',
'Standard_Failure'],
{'Standard_MMgrOpt': 'SetCallBackFunction',
'Standard': 'Free'}),
('StdFail', [], ['*']),
('Storage', [], []),
('TColStd', [], ['TColStd_PackedMapOfInteger']),
('TCollection', [], []),
('TShort', [], []),
('Units', [], ['Units_Quantity', 'Units_Dimensions']),
('UnitsAPI', [], []),
('IncludeLibrary', [], ['*']),
### TKMath
('math', [], ['*']),
('ElCLib', [], []),
('ElSLib', [], []),
('BSplCLib', [], ['BSplCLib_EvaluatorFunction'], {'BSplCLib': ['DN']}),
('BSplSLib', [], ['BSplSLib_EvaluatorFunction']),
('PLib', [], []),
('Precision', [], []),
('GeomAbs', [], []),
('Poly', ['NCollection'],
['Poly_CoherentTriPtr', 'Poly_CoherentTriangulation',
'Poly_MakeLoops', 'Poly_MakeLoops3D', 'Poly_MakeLoops2D']),
('CSLib', [], []),
('Convert', [], []),
('Bnd', [], []),
('gp', [], [],
{'gp_Torus': 'Coefficients'}),
('TColgp', [], []),
('TopLoc', [], [],
{'TopLoc_SListNodeOfSListOfItemLocation': 'Count'}),
### TKAdvTools
('Dynamic', [], []),
('Materials', [], []),
('E |
sysbot/cloudenvy | cloudenvy/commands/dotfiles.py | Python | apache-2.0 | 1,933 | 0 | import logging
import tarfile
import tempfile
import os
import fabric.api
import fabric.operations
import cloudenvy.envy
class Dotfiles(cloudenvy.envy.Command):
def _build_subparser(self, subparsers):
help_str = 'Upload dotfiles from your local machine to an Envy.'
subparser = subparsers.add_parser('dotfiles', help=help_str,
description=help_str)
subparser.set_defaults(func=self.run)
subparser.add_argument('-n', '--name', action='store', default='',
help='Specify custom name for an Envy.')
subparser.add_argument('-f', '--files', action='store',
help='Limit operation to a specific list of '
'comma-separated files.')
return subparser
def run(self, config, args):
envy = cloudenvy.envy.Envy(config)
if envy.ip():
host_string = '%s@%s' % (envy.remote_user, envy.ip())
temp_tar = tempfile.NamedTemporaryFile(delete=True)
with fabric.api.settings(host_string=host_string):
if args.files:
dotfiles = args.files.split(',') |
else:
dotfiles = config['defaults']['dotfiles'].split(',')
dotfiles = [dotfile.strip() for dotfile in dotfiles]
with tarfile.open(temp_tar.n | ame, 'w') as archive:
for dotfile in dotfiles:
path = os.path.expanduser('~/%s' % dotfile)
if os.path.exists(path):
if not os.path.islink(path):
archive.add(path, arcname=dotfile)
fabric.operations.put(temp_tar, '~/dotfiles.tar')
fabric.operations.run('tar -xvf ~/dotfiles.tar')
else:
logging.error('Could not determine IP.')
|
Cahersan/SexySignOn | multilogger/multilogger/api/utils.py | Python | bsd-3-clause | 622 | 0.003215 | from datetime import datetime
from redis import Redis
from redis_sessions.session import SessionStore
from multilogger.users.models import User
def get_logged_users():
# Query all non-expired sessions
redconn = Redis()
store = SessionStore()
s | essions = [redconn.get(key) for key in | redconn.keys()]
uid_list = []
# Build a list of user ids from that query
for session_data in sessions:
data = store.decode(session_data)
uid_list.append(data.get('_auth_user_id', None))
# Query all logged in users based on id list
return User.objects.filter(id__in=uid_list)
|
jyejare/robottelo | tests/foreman/ui/test_hostcollection.py | Python | gpl-3.0 | 28,395 | 0.001585 | """Test class for Host Collection UI
:Requirement: Hostcollection
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: HostCollections
:Assignee: swadeley
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import time
import pytest
from broker import VMBroker
from nailgun import entities
from robottelo.api.utils import promote
from robottelo.api.utils import update_vm_host_location
from robottelo.config import settings
from robottelo.constants import DISTRO_DEFAULT
from robottelo.constants import DISTRO_RHEL8
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_GROUP
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_GROUP_NAME
from robottelo.constants import FAKE_0_CUSTOM_PACKAGE_NAME
from robottelo.constants import FAKE_0_MODULAR_ERRATA_ID
from robottelo.constants import FAKE_1_CUSTOM_PACKAGE
from robottelo.constants import FAKE_1_CUSTOM_PACKAGE_NAME
from robottelo.constants import FAKE_2_CUSTOM_PACKAGE
from robottelo.constants import FAKE_2_ERRATA_ID
from robottelo.constants import FAKE_3_CUSTOM_PACKAGE
from robottelo.constants import FAKE_3_CUSTOM_PACKAGE_NAME
from robottelo.constants import FAKE_4_CUSTOM_PACKAGE_NAME
from robottelo.constants import FAKE_5_CUSTOM_PACKAGE
from robottelo.constants.repos import CUSTOM_MODULE_STREAM_REPO_2
from robottelo.constants.repos import FAKE_1_YUM_REPO
from robottelo.constants.repos import FAKE_6_YUM_REPO
from robottelo.datafactory import gen_string
from robottelo.helpers import add_remote_execution_ssh_key
from robottelo.hosts import ContentHost
from robottelo.products import RepositoryCollection
from robottelo.products import SatelliteToolsRepository
from robottelo.products import YumRepository
@pytest.fixture(scope='module')
def module_org():
org = entities.Organization().create()
# adding remote_execution_connect_by_ip=Yes at org level
entities.Parameter(
name='remote_execution_connect_by_ip', value='Yes', organization=org.id
).create()
return org
@pytest.fixture(scope='module')
def module_loc():
return entities.Location().create()
@pytest.fixture(scope='module')
def module_lce(module_org):
return entities.LifecycleEnvironment(organization=module_org).create()
@pytest.fixture(scope='module')
def module_repos_collection(module_org, module_lce):
repos_collection = RepositoryCollection(
distro=DISTRO_DEFAULT,
repositories=[
SatelliteToolsRepository(),
YumRepository(url=FAKE_1_YUM_REPO),
YumRepository(url=FAKE_6_YUM_REPO),
],
)
repos_collection.setup_content(module_org.id, module_lce.id, upload_manifest=True)
return repos_collection
@pytest.fixture(scope='module')
def module_repos_collection_module_stream(module_org, module_lce):
repos_collection = RepositoryCollection(
distro=DISTRO_RHEL8, repositories=[YumRepository(url=CUSTOM_MODULE_STREAM_REPO_2)]
)
repos_collection.setup_content(module_org.id, module_lce.id, upload_manifest=True)
return repos_collection
@pytest.fixture
def vm_content_hosts(request, module_loc, module_repos_collection):
distro = module_repos_collection.distro
with VMBroker(nick=distro, host_classes={'host': ContentHost}, _count=2) as clients:
for client in clients:
module_repos_collection.setup_virtual_machine(client, install_katello_agent=False)
add_remote_execution_ssh_key(client.ip_addr)
update_vm_host_location(client, module_loc.id)
smart_proxy = (
entities.SmartProxy()
.search(query={'search': f'name={settings.server.hostname}'})[0]
.read()
)
smart_proxy.location.append(entities.Location(id=module_loc.id))
smart_proxy.update(['location'])
yield clients
@pytest.fixture
def vm_content_hosts_module_stream(module_loc, module_repos_collection_module_stream):
distro = module_repos_collection_module_stream.distro
with VMBroker(nick=distro, host_classes={'host': ContentHost}, _count=2) as clients:
for client in clients:
module_repos_collection_module_stream.setup_virtual_machine(
client, install_katello_agent=False
)
add_remote_execution_ssh_key(client.ip_addr)
update_vm_host_location(client, module_loc.id)
smart_proxy = (
entities.SmartProxy()
| .search(query={'search': f'name={settings.server.hostname}'})[0]
.read()
)
smart_proxy.location.append(entities.Location(id=module_loc.id))
smart_proxy.update(['location'])
yield clients
@pytest.fixture
def vm_host_collection(module_org, vm_content_hosts):
host_ids = [
entities.Host().search(query={'search': f'name={host.hostname}'})[0].id
for host in vm_content_hosts
]
host_collection = entities.HostCollect | ion(host=host_ids, organization=module_org).create()
return host_collection
@pytest.fixture
def vm_host_collection_module_stream(module_org, vm_content_hosts_module_stream):
host_ids = [
entities.Host().search(query={'search': f'name={host.hostname}'})[0].id
for host in vm_content_hosts_module_stream
]
host_collection = entities.HostCollection(host=host_ids, organization=module_org).create()
return host_collection
def _run_remote_command_on_content_hosts(command, vm_clients):
"""run remote command on content hosts"""
for vm_client in vm_clients:
result = vm_client.run(command)
assert result.status == 0
def _is_package_installed(
vm_clients, package_name, expect_installed=True, retries=10, iteration_sleep=15
):
"""Check whether package name was installed on the list of Virtual Machines
clients.
"""
assert len(vm_clients) > 0
installed = 0
if not expect_installed:
installed = len(vm_clients)
for vm_client in vm_clients:
for ind in range(retries):
result = vm_client.run(f'rpm -q {package_name}')
if result.status == 0 and expect_installed:
installed += 1
break
elif result.status != 0 and not expect_installed:
installed -= 1
break
if ind < retries - 1:
time.sleep(iteration_sleep)
else:
break
if expect_installed:
return installed == len(vm_clients)
else:
return bool(installed)
def _install_package_with_assertion(vm_clients, package_name):
"""Install package in Virtual machine clients and assert installed"""
for client in vm_clients:
result = client.run(f'yum install -y {package_name}')
assert result.status == 0
assert _is_package_installed(vm_clients, package_name)
def _get_content_repository_urls(repos_collection, lce, content_view):
"""Returns a list of the content repository urls"""
custom_url_template = (
'https://{hostname}/pulp/repos/{org_label}/{lce.name}'
'/{content_view.name}/custom/{product_label}/{repository_name}'
)
rh_sat_tools_url_template = (
'https://{hostname}/pulp/repos/{org_label}/{lce.name}'
'/{content_view.name}/content/dist/rhel/server/{major_version}'
'/{major_version}Server/$basearch/sat-tools/{product_version}/os'
)
repos_urls = [
custom_url_template.format(
hostname=settings.server.hostname,
org_label=repos_collection.organization['label'],
lce=lce,
content_view=content_view,
product_label=repos_collection.custom_product['label'],
repository_name=repository['name'],
)
for repository in repos_collection.custom_repos_info
]
# add sat-tool rh repo
# Note: if sat-tools is not cdn it must be already in repos_urls
for repo in repos_collection:
if isinstance(repo, SatelliteToolsRepository) and repo.cdn:
repos_urls.append(
rh_sat_tools_url_template.format(
hostname=settings.server.hostname,
org_label=repos_ |
alimony/django | django/db/backends/oracle/base.py | Python | bsd-3-clause | 23,451 | 0.001109 | """
Oracle database backend for Django.
Requires cx_Oracle: http://cx-oracle.sourceforge.net/
"""
import datetime
import decimal
import os
import platform
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.utils.encoding import force_bytes, force_text
from django.utils.functional import cached_property
def _setup_environment(environ):
# Cygwin requires some special voodoo to set the environment variables
# properly so that Oracle will see them.
if platform.system().upper().startswith('CYGWIN'):
try:
import ctypes
except ImportError as e:
raise ImproperlyConfigured("Error loading ctypes: %s; "
"the Oracle backend requires ctypes to "
"operate correctly under Cygwin." % e)
kernel32 = ctypes.CDLL('kernel32')
for name, value in environ:
kernel32.SetEnvironmentVariableA(name, value)
else:
os.environ.update(environ)
_setup_environment([
# Oracle takes client-side character set encoding from the environment.
('NLS_LANG', '.AL32UTF8'),
# This prevents unicode from getting mangled by getting encoded into the
# potentially non-unicode database character set.
('ORA_NCHAR_LITERAL_REPLACE', 'TRUE'),
])
try:
import cx_Oracle as Database
except ImportError as e:
raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e)
# Some of these import cx_Oracle, so import them after checking if it's installed.
from .client import DatabaseClient # NOQA isort:skip
from .creation import DatabaseCreation # NOQA isort:skip
from .features import DatabaseFeatures # NOQA isort:skip
from .introspection import DatabaseIntrospection # NOQA isort:skip
from .operations import DatabaseOperations # NOQA isort:skip
from .schema import DatabaseSchemaEditor | # NOQA isort:skip
from .utils import Oracle_datetime # NOQA isort:skip
class _UninitializedOperatorsDescriptor:
def __get__(self, instance, cls=None):
# If connection.operators is looked up before a connection has been
# created, transparently initialize connection.operators to avert an
# AttributeError.
if instance is None:
raise AttributeError("operators not available as class attribute")
# Creating a cursor will initialize the operators.
instance.cursor().close()
return instance.__dict__['operators']
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'oracle'
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BigAutoField': 'NUMBER(19)',
'BinaryField': 'BLOB',
'BooleanField': 'NUMBER(1)',
'CharField': 'NVARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'DurationField': 'INTERVAL DAY(9) TO SECOND(6)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'GenericIPAddressField': 'VARCHAR2(39)',
'NullBooleanField': 'NUMBER(1)',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11)',
'PositiveSmallIntegerField': 'NUMBER(11)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
'UUIDField': 'VARCHAR2(32)',
}
data_type_check_constraints = {
'BooleanField': '%(qn_column)s IN (0,1)',
'NullBooleanField': '(%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL)',
'PositiveIntegerField': '%(qn_column)s >= 0',
'PositiveSmallIntegerField': '%(qn_column)s >= 0',
}
operators = _UninitializedOperatorsDescriptor()
_standard_operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'icontains': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'endswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'istartswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'iendswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
}
_likec_operators = _standard_operators.copy()
_likec_operators.update({
'contains': "LIKEC %s ESCAPE '\\'",
'icontains': "LIKEC UPPER(%s) ESCAPE '\\'",
'startswith': "LIKEC %s ESCAPE '\\'",
'endswith': "LIKEC %s ESCAPE '\\'",
'istartswith': "LIKEC UPPER(%s) ESCAPE '\\'",
'iendswith': "LIKEC UPPER(%s) ESCAPE '\\'",
})
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
_pattern_ops = {
'contains': "'%%' || {} || '%%'",
'icontains': "'%%' || UPPER({}) || '%%'",
'startswith': "{} || '%%'",
'istartswith': "UPPER({}) || '%%'",
'endswith': "'%%' || {}",
'iendswith': "'%%' || UPPER({})",
}
_standard_pattern_ops = {k: "LIKE TRANSLATE( " + v + " USING NCHAR_CS)"
" ESCAPE TRANSLATE('\\' USING NCHAR_CS)"
for k, v in _pattern_ops.items()}
_likec_pattern_ops = {k: "LIKEC " + v + " ESCAPE '\\'"
for k, v in _pattern_ops.items()}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True)
self.features.can_return_id_from_insert = use_returning_into
def _connect_string(self):
settings_dict = self.settings_dict
if not settings_dict['HOST'].strip():
settings_dict['HOST'] = 'localhost'
if settings_dict['PORT']:
dsn = Database.makedsn(settings_dict['HOST'],
int(settings_dict['PORT']),
settings_dict['NAME'])
else:
dsn = settings_dict['NAME']
return "%s/%s@%s" % (settings_dict['USER'],
settings_dict['PASSWORD'], dsn)
def get_connection_p | |
watson-developer-cloud/discovery-starter-kit | notebooks/scripts/upload_training_data.py | Python | mit | 3,238 | 0.000309 | import json
import os
import glob
import sys
import logging
from watson_developer_cloud import WatsonException
if '__file__' in globals():
sys.path.insert(0, os.path.join(os.path.abspath(__file__), 'scripts'))
else:
sys.path.insert(0, os.path.join(os.path.abspath(os.getcwd()), 'scripts'))
from discovery_setup_utils import ( # noqa
discovery,
curdir,
get_constants,
write_progress
)
# set the DATA_TYPE the same to what was downloaded
DATA_TYPE = 'travel'
# set the TRAINING_PATH to the location of the training data relative
# to the 'data' directory
# by default, evaluates to <DATA_TYPE>/training
TRAINING_PATH = os.path.join(DATA_TYPE, 'training')
DATA_DIRECTORY = os.path.abspath(os.path.join(curdir, '..', 'data'))
TRAINING_DIRECTORY = os.path.join(DATA_DIRECTORY, TRAINING_PATH)
LOG_FILE_PATH = os.path.join(DATA_DIRECTORY, 'training_upload.log')
logging.basicConfig(filename=LOG_FILE_PATH,
filemode='w',
format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def upload_training_doc(training_json, environment_id, collection_id):
try:
r = discovery.add_training_data_query(
environment_id=environment_id,
collection_id=collection_id,
natural_language_query=training_json['natural_language_query'],
examples=training_json['examples'])
logging.info("Response:\n%s", json.dumps(r, indent=4))
except WatsonException as exception:
logging.error(e | xception)
def upload_training_data(training_directory):
print("Training directory: %s" % training_directory)
files = glob.glob(os.path.join(training_directory, '*.json'))
total_files = len(files)
print("Number of files to process: %d" % total_files)
training_data_uploaded = 0
done_percent = 0
write_progress(training_data_uploaded, total_files)
for file in files:
with open(fil | e, 'rb') as file_object:
logging.info("Processing file: %s", file_object.name)
upload_training_doc(
json.loads(file_object.read()),
discovery_constants['environment_id'],
discovery_constants['collection_id']['trained']
)
training_data_uploaded += 1
done_percent = write_progress(training_data_uploaded,
total_files,
done_percent)
logging.info("Finished uploading %d files", total_files)
print("\nFinished uploading %d files" % total_files)
print('Retrieving environment and collection constants...')
"""
retrieve the following:
{
environment_id: env_id,
collection_id: {
trained: trained_id
}
}
"""
discovery_constants = get_constants(
discovery,
trained_name=os.getenv(
'DISCOVERY_TRAINED_COLLECTION_NAME',
'knowledge_base_trained'
)
)
print('Constants retrieved!')
print(discovery_constants)
print("Log file located at: %s" % LOG_FILE_PATH)
upload_training_data(TRAINING_DIRECTORY)
|
martin-ejdestig/sork | sork/tests/test_case_with_tmp_dir.py | Python | gpl-3.0 | 3,027 | 0.000661 | # This file is part of Sork.
#
# Copyright (C) 2017-2019 Martin Ejdestig <marejde@gmail.com>
#
# Sork is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sork is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sork. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: GPL-3.0-or-later
import contextlib
import json
import os
import tempfile
import unittest
from typing import Any, Dict, Iterator, List, Optional, Union
from .. import paths
from ..config import Config
class TestCaseWithTmpDir(unittest.TestCase):
def setUp(self) -> None:
self.tmp_dir = tempfile.TemporaryDirectory()
self.addCleanup(self.tmp_dir.cleanup)
def tmp_path(self, path: str) -> str:
if path.startswith(self.tmp_dir.name):
return path
return os.path.join(self.tmp_dir.name, path)
def create_tmp_dir(self, dir_path: str) -> None:
os.makedirs(self.tmp_path(dir_path), exist_ok=True)
def create_tmp_file(self,
file_path: str,
content: Optional[Union[str, List[str]]] = None) -> None:
dir_path = os.path.dirname(file_path)
if dir_path:
self.create_tmp_dir(dir_path)
if isinstance(content, list):
content = '\n'.join(content)
with open(self.tmp_path(file_path), 'w') as file:
file.write(content if content else '')
def create_tmp_config(self, project_path: str, config: Config) -> None:
self.create_tmp_file(os.path.join(project_path, paths.DOT_SORK_PATH),
json.dumps(config))
@staticmethod
def comp_db_path(build_path: str) -> str:
return os.path.join(build_path, paths.COMPILE_COMMANDS_JSON_PATH)
def create_tmp_comp_db(self,
build_path: str,
content: Union[str, List[Dict[str, Any]]]) -> None:
if not isinstance(content, str):
content = json.dumps(content)
self.create_t | mp_file(self.comp_db_path(build_path), content)
def create_tmp_build_dir(self,
build_path: str,
comp_db: Optional[List[Dict[str, Any]]] = None) -> None:
se | lf.create_tmp_comp_db(build_path, comp_db or '[]')
@contextlib.contextmanager
def cd_tmp_dir(self, sub_dir: Optional[str] = None) -> Iterator[None]:
orig_work_dir = os.getcwd()
os.chdir(self.tmp_path(sub_dir) if sub_dir else self.tmp_dir.name)
try:
yield
finally:
os.chdir(orig_work_dir)
|
danielstp/gestionAgua | aguaEmbotellada/clientes/models.py | Python | gpl-3.0 | 842 | 0.010689 | from django.db import models
class Cliente(models.Model):
nombre = models.CharField(max_length=200)
apellidos = models.CharField(max_length=200)
primerServicio = models.DateField('fecha primer Servicio')
class Venta(models.Model):
CONTADO = 'CONTADO'
CREDITO = ' | CREDITO'
OPCIONES_VENTA = ((CONTADO,'Contado'),(CREDITO,'Credito'))
cliente = models.ForeignKey(Cliente)
Fecha = models.DateField('fecha venta')
contado = models.CharField(max_length=20,
choices=OPCIONES_VENTA,
default=CONTADO)
precio = models.DecimalField(max_digits=5, deci | mal_places=2)
cantidad = models.PositiveSmallIntegerField()
class Cobro(models.Model):
Cliente = models.ForeignKey(Cliente)
|
miltonruelas/cursotecnico | branch/pos_report/__init__.py | Python | agpl-3.0 | 1,405 | 0.000712 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2013 Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY | ; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##################### | #########################################################
import point_of_sale |
napjon/moocs_solution | ml-udacity/feature_selection/poi_flag_email.py | Python | mit | 1,941 | 0.015456 |
#!/usr/bin/python
###
### in poiFlagEmail() below, write code that returns a boolean
### indicating if a given emails is from a POI
###
import sys
import reader
import poi_emails
def getToFromStrings(f):
f.seek(0)
to_string, from_string, cc_string = reader.getAddresses(f)
to_emails = | reader.parseAddresses( to_string )
from_emails = reader.parseAddresses( from_string )
cc_emails = reader.parseAddresses( cc_string )
return to_emails, from_e | mails, cc_emails
### POI flag an email
def poiFlagEmail(f):
""" given an email file f,
return a trio of booleans for whether that email is
to, from, or cc'ing a poi """
to_emails, from_emails, cc_emails = getToFromStrings(f)
### list of email addresses of all the POIs
poi_email_list = poi_emails.poiEmails()
to_poi = False
from_poi = False
cc_poi = False
### to_poi and cc_poi are related functions, which flag whether
### the email under inspection is addressed to a POI, or if a POI is in cc
### you don't have to change this code at all
### there can be many "to" emails, but only one "from", so the
### "to" processing needs to be a little more complicated
if to_emails:
ctr = 0
while not to_poi and ctr < len(to_emails):
if to_emails[ctr] in poi_email_list:
to_poi = True
ctr += 1
if cc_emails:
ctr = 0
while not to_poi and ctr < len(cc_emails):
if cc_emails[ctr] in poi_email_list:
cc_poi = True
ctr += 1
#################################
######## your code below ########
### set from_poi to True if #####
### the email is from a POI #####
#################################
if from_emails and from_emails[0] in poi_email_list:
from_poi = True
#################################
return to_poi, from_poi, cc_poi |
futureneer/simple_ur | simple_ur_driver/src/simple_ur_driver/ur_driver_study.py | Python | bsd-2-clause | 19,251 | 0.005506 | #!/usr/bin/env python
# ROS IMPORTS
import roslib; roslib.load_manifest('simple_ur_driver')
import rospy
import tf; import tf_conversions as tf_c
import PyKDL
# MSGS and SERVICES
from simple_ur_msgs.srv import *
from sensor_msgs.msg import JointState
from geometry_msgs.msg import PoseStamped
from predicator_msgs.msg import *
from std_msgs.msg import *
import time
import threading
import socket
# URX Universal Robot Driver
import urx
# OTHER
import logging
import numpy as np
from pid import PID
class URDriver():
MAX_ACC = .5
MAX_VEL = 1.8
JOINT_NAMES = ['shoulder_pan_joint', 'shoulder_lift_joint', 'elbow_joint',
'wrist_1_joint', 'wrist_2_joint', 'wrist_3_joint']
MSG_QUIT = 2
MSG_TEST = 3
MSG_SETPOINT = 4
MULT_jointstate = 10000.0
MULT_time = 1000000.0
MULT_blend = 1000.0
PID_PROG = '''def pidProg():
textmsg("PID Follow Program Started")
MSG_QUIT = 3
MSG_TEST = 4
MSG_SETPOINT = 5
Socket_Closed = True
### PID VALUES ###
Kp = [2.0,2.0,2.0,5.0,5.0,5.0]
Ki = [0.0,0.0,0.0,0.0,0.0,0.0]
Kd = [0.0,0.0,0.0,0.0,0.0,0.0]
p_val = [0,0,0,0,0,0]
i_val = [0,0,0,0,0,0]
d_val = [0,0,0,0,0,0]
pid_deriv = [0.0,0.0,0.0,0.0,0.0,0.0]
pid_integ = [0.0,0.0,0.0,0.0,0.0,0.0]
pid_integ_max = 500
pid_integ_min = -500
set_point = [0.0,0.0,0.0,0.0,0.0,0.0]
set_pose = p[0.0,0.0,0.0,0.0,0.0,0.0]
current_point = [0.0,0.0,0.0,0.0,0.0,0.0]
current_pose = p[0.0,0.0,0.0,0.0,0.0,0.0]
cmd_vel = [0.0,0.0,0.0,0.0,0.0,0.0]
limit_vel = [0.0,0.0,0.0,0.0,0.0,0.0]
saved_vel = [0.0,0.0,0.0,0.0,0.0,0.0]
pid_error = [0.0,0.0,0.0,0.0,0.0,0.0]
max_vel = .75
max_vel_diff = [0.024,0.024,0.024,0.024,0.024,0.024]
D = 0
# Limit the Velocities to max_vel
def clamp_velocities():
# Impose velocity limits
limit_vel = cmd_vel
i = 0
while i < 6:
if cmd_vel[i] > max_vel:
limit_vel[i] = max_vel
end
if cmd_vel[i] < -max_vel:
limit_vel[i] = -max_vel
end
i = i + 1
end
end
def clamp_accelerations():
i = 0
vel_diff = 0
while i < 6:
vel_diff = saved_vel[i] - limit_vel[i]
if vel_diff > max_vel_diff[i]:
limit_vel[i] = saved_vel[i] - .1 * max_vel_diff[i]
end
if vel_diff < -max_vel_diff[i]:
limit_vel[i] = saved_vel[i] + .1 * max_vel_diff[i]
end
i = i + 1
end
# Update saved Velocities
saved_vel = limit_vel
end
# Set the PID setpoint from a packet
def set_pid_setpoint(data):
enter_critical
point = 0
while point < data[0]:
set_point[point] = data[point+1]
point = point + 1
end
set_pose = p[data[1],data[2],data[3],data[4],data[5],data[6]]
pid_integ = [0.0,0.0,0.0,0.0,0.0,0.0]
pid_deriv = [0.0,0.0,0.0,0.0,0.0,0.0]
exit_critical
end
# Set the PID setpoint from a pose
def set_pid_setpoint_from_pose(pose):
enter_critical
set_pose = pose
point = 0
while point < 6:
set_point[point] = pose[point]
point = point + 1
end
pid_integ = [0.0,0.0,0.0,0.0,0.0,0.0]
pid_deriv = [0.0,0.0,0.0,0.0,0.0,0.0]
exit_critical
end
# update the current pose of the robot in pose and list form
def get_current_point():
enter_critical
current_pose = get_actual_tcp_pose()
i = 0
while i < 6:
current_point[i] = current_pose[i]
i = i + 1
end
exit_critical
end
### PID UPDATE ###
def update_pid(i):
pid_error[i] = set_point[i] - current_point[i]
p_val[i] = Kp[i] * pid_error[i]
d_val[i] = Kd[i] * ( pid_error[i] - pid_deriv[i])
pid_deriv[i] = pid_error[i]
pid_integ[i] = pid_integ[i] + pid_error[i]
if pid_integ[i] > pid_integ_max:
pid_integ[i] = pid_integ_max
end
if pid_integ[i] < pid_integ_min:
pid_integ[i] = pid_integ_min
end
i_val[i] = pid_integ[i] * Ki[i]
upd = p_val[i] + i_val[i] + d_val[i]
return upd
end
### PID UPDATE THREAD ###
thread pid_update_thread():
while True:
get_current_point()
D = pose_dist(set_pose,current_pose)
if D > .001:
enter_critical
i = 0
while i < 6:
cmd_vel[i] = update_pid(i)
i = i + 1
end
clamp_velocities()
clamp_accelerations()
exit_critical
else:
enter_critical
limit_vel = [0.0,0.0,0.0,0.0,0.0,0.0]
exit_critical
end
sync()
end
end
thread move_thread():
while True:
speedl(limit_vel,1.0,.015)
end
end
#### RUN ####
# Set initial set point to robot position
textmsg("Setting Initial PID Set Point")
set_pid_setpoint_from_pose( get_actual_tcp_pose() )
textmsg(set_point)
thread_pid_h = run pid_update_thread()
thread_move_h = run move_thread()
## MAIN LOOP
| while (True):
if (Socket_Closed == True):
# Keep Checking socket to see if opening it failed
r = socket_open("192.168.1.5", 30000)
if r == True:
global Socket_Closed = False
else:
textmsg("Socket Failed to Open")
end
end
data = socket_read_asc | ii_float(6)
if data[0] == 1:
textmsg("Got Command")
if data[1] == MSG_QUIT:
textmsg("Recieved Quit Command ... DONE")
break
elif data[1] == MSG_TEST:
textmsg("Recieved Test Message")
end
elif data[0] == 6:
set_pid_setpoint(data)
# else:
# textmsg("Got a Bad Packet")
end
sleep(.1)
end
# When finished kill pid thread
kill thread_pid_h
kill thread_move_h
textmsg("Finished PID Thread")
end
pidProg()
'''
def __init__(self):
rospy.init_node('ur_driver',anonymous=True)
rospy.logwarn('SIMPLE_UR DRIVER LOADING')
# Set State First
self.robot_state = 'POWER OFF'
# TF
self.broadcaster_ = tf.TransformBroadcaster()
self.listener_ = tf.TransformListener()
# SERVICES
self.servo_to_pose_service = rospy.Service('simple_ur_msgs/ServoToPose', ServoToPose, self.servo_to_pose_call)
self.set_stop_service = rospy.Service('simple_ur_msgs/SetStop', SetStop, self.set_stop_call)
self.set_teach_mode_service = rospy.Service('simple_ur_msgs/SetTeachMode', SetTeachMode, self.set_teach_mode_call)
self.set_servo_mode_service = rospy.Service('simple_ur_msgs/SetServoMode', SetServoMode, self.set_servo_mode_call)
# PUBLISHERS AND SUBSCRIBERS
self.driver_status_publisher = rospy.Publisher('/ur_robot/driver_status',String)
self.robot_state_publisher = rospy.Publisher('/ur_robot/robot_state',String)
self.joint_state_publisher = rospy.Publisher('joint_states',JointState)
self.follow_pose_subscriber = rospy.Subscriber('/ur_robot/follow_goal',PoseStamped,self.follow_goal_cb)
self.sound_pub = rospy.Publisher('/audri/sound/sound_player', String)
# PREDICATOR INTERFACE
self.pub_list = rospy.Publisher('/predicator/input', PredicateList)
self.pub_valid = rospy.Publisher('/predicator/valid_input', ValidPredicates)
self.exceed_notify = False
rospy.sleep(.5)
pval = ValidPredicates()
pval.pheader.source = rospy.get_name()
pval.predicates = ['soft_force_exceeded', 'hard_force_exceeded']
pval.assignments = ['robot']
self.pub_valid.publish(pval)
# Rate
self.run_rate = rospy.Rate(30)
### Set Up Robot ###
self.rob = urx.Robot("192.168.1.155", logLevel=logging.INFO)
if not self.rob:
rospy.logwarn('SIMPLE UR - ROBOT NOT CONNECTED')
self.driver_status = 'DISCONNECTED'
self.robot_state = 'POWER OFF'
else:
rospy.logwarn('SIMPLE UR - ROBOT CONNECTED SUCCESSFULLY')
rospy.logwarn('SIMPLE UR - GOT REAL TIME <WRITE> INTERFACE TO ROBOT')
# self.rt_socket = socket.create_connection(('192.168.1.155', 30003), timeout=0.5)
rospy.logwarn('SIMPLE UR - GOT REAL TIME <READ> INTERFACE TO ROBOT')
self.rtm = self.rob.get_realtime_monitor()
self.driver_status = 'IDLE'
### Set Up PID |
rockwyc992/monkey-pdns | monkey_pdns/app/models.py | Python | mit | 1,285 | 0.018677 | from django.db import models
from django.contrib.auth.models import User
class Zone(models.Model):
name | = models.CharField(max_length = 64)
created = models.DateTimeField(auto_now_add = True)
def __str__(self):
return self.name
class Sub_Zone(models.Model) | :
owner = models.ForeignKey(User)
prefix = models.CharField(max_length = 32)
super_zone = models.ForeignKey(Zone)
created = models.DateTimeField(auto_now_add = True)
def __str__(self):
if self.prefix == '@':
return str(self.super_zone)
return self.prefix + '.' + str(self.super_zone)
class Record_Type(models.Model):
name = models.CharField(max_length = 8)
created = models.DateTimeField(auto_now_add = True)
def __str__(self):
return self.name
class Record(models.Model):
prefix = models.CharField(max_length = 32)
type = models.ForeignKey(Record_Type)
zone = models.ForeignKey(Sub_Zone)
context = models.CharField(max_length = 128)
created = models.DateTimeField(auto_now_add = True)
def __str__(self):
if self.prefix == '@':
return str(self.zone) + ' ' + str(self.type) + ' ' + self.context
return self.prefix + '.' + str(self.zone) + ' ' + str(self.type) + ' ' + self.context
|
SB-Technology-Holdings-International/WateringWebClient | regen.py | Python | bsd-3-clause | 5,942 | 0.001178 | #!/usr/bin/python -u
# coding=utf-8
"""
Generate certificates via Let's Encrypt
"""
import re
from subprocess import check_output, check_call
from os import path
import click
from colorama import Fore
import pexpect
# Extract the file/challenge from the LetsEncrypt output e.g.
CREX = re.compile(
".well-known\/acme-challenge\/(\S+) before continuing:\s+(\S+)",
re.MULTILINE
)
MODULE_CONFIG = 'module.yaml' # The file in our project root
APPENGINE_URL = ("https://console.cloud.google.com/" +
"appengine/settings/certificates")
def get_default_email():
"""Get a default user email from the git config."""
return check_output(['git', 'config', 'user.email']).strip()
@click.command()
@click.option('--appid', '-A', prompt=True)
@click.option('--test/--no-test', default=True)
@click.option('--domains', '-d', multiple=True)
@click.option('--app-path', default=path.abspath(path.dirname(__file__)))
@click.option('--acme-path', required=True)
@click.option('--email', default=get_default_email)
def gen(test, appid, domains, acme_path, app_path, email):
"""Regenerate the keys.
Run all the steps, being:
1. Call Let's Encrypt
2. Capture the challenges from the LE output
3. Deploy the AppEngine module
4. Print Cert. to terminal
"""
common_name = domains[0] # noqa
sans = " ".join(domains) # noqa
click.echo("""
APPID: {appid}
Test: {test}
Common Name: {common_name}
Domain(s): {sans}
App Path: {app_path}
ACME path: {acme_path}
User Email: {email}
""".format(**{
k: Fore.YELLOW + s | tr(v) + Fore.RESET
for k, v in locals().items()
}))
CERT_PATH = acm | e_path
KEY_PATH = acme_path
CHAIN_PATH = acme_path
FULLCHAIN_PATH = acme_path
CONFIG_DIR = acme_path
WORK_DIR = path.join(acme_path, 'tmp')
LOG_DIR = path.join(acme_path, 'logs')
cmd = [
'letsencrypt',
'certonly',
'--rsa-key-size',
'2048',
'--manual',
'--agree-tos',
'--manual-public-ip-logging-ok',
'--text',
'--cert-path', CERT_PATH,
'--key-path', KEY_PATH,
'--chain-path', CHAIN_PATH,
'--fullchain-path', FULLCHAIN_PATH,
'--config-dir', CONFIG_DIR,
'--work-dir', WORK_DIR,
'--logs-dir', LOG_DIR,
'--email', email,
'--domain', ",".join(domains),
]
if test:
cmd.append('--staging')
print("$ " + Fore.MAGENTA + " ".join(cmd) + Fore.RESET)
le = pexpect.spawn(" ".join(cmd))
out = ''
idx = le.expect(["Press ENTER", "Select the appropriate number"])
if idx == 1:
# 1: Keep the existing certificate for now
# 2: Renew & replace the cert (limit ~5 per 7 days)
print le.before + le.after
le.interact("\r")
print "..."
le.sendline("")
if le.expect(["Press ENTER", pexpect.EOF]) == 1:
# EOF - User chose to not update certs.
return
out += le.before
# Hit "Enter" for each domain; we extract all challenges at the end;
# We stop just at the last "Enter to continue" so we can publish
# our challenges on AppEngine.
for i in xrange(len(domains) - 1):
le.sendline("")
le.expect("Press ENTER")
out += le.before
# The challenges will be in `out` in the form of CREX
challenges = CREX.findall(out)
if not challenges:
raise Exception("Expected challenges from the output")
for filename, challenge in challenges:
filepath = path.join(app_path, "challenges", filename)
print "[%s]\n\t%s\n\t=> %s" % (
Fore.BLUE + filepath + Fore.RESET,
Fore.GREEN + filename + Fore.RESET,
Fore.YELLOW + challenge + Fore.RESET
)
with open(filepath, 'w') as f:
f.write(challenge)
# Deploy to AppEngine
cmd = [
'appcfg.py',
'update',
'-A', appid,
path.join(app_path, MODULE_CONFIG)
]
print("$ " + Fore.MAGENTA + " ".join(cmd) + Fore.RESET)
check_call(cmd)
# After deployment, continue the Let's Encrypt (which has been waiting
# on the last domain)
le.sendline("")
le.expect(pexpect.EOF)
le.close()
if le.exitstatus:
print Fore.RED + "\nletsencrypt failure: " + Fore.RESET + le.before
return
print "\nletsencrypt complete.", le.before
# Convert the key to a format AppEngine can use
# LE seems to choose the domain at random, so we have to pluck it.
CPATH_REX = (
"Your certificate and chain have been saved at (.+)fullchain\.pem\."
)
outstr = le.before.replace("\n", "").replace('\r', '')
results = re.search(CPATH_REX, outstr, re.MULTILINE)
LIVE_PATH = "".join(results.group(1).split())
CHAIN_PATH = path.join(LIVE_PATH, "fullchain.pem")
PRIVKEY_PATH = path.join(LIVE_PATH, "privkey.pem")
cmd = [
'openssl', 'rsa',
'-in', PRIVKEY_PATH,
'-outform', 'pem',
'-inform', 'pem'
]
print "$ " + Fore.MAGENTA + " ".join(cmd) + Fore.RESET
priv_text = check_output(cmd)
with open(CHAIN_PATH, 'r') as cp:
pub_text = cp.read()
print """
--- Private Key ---
at {PRIVKEY_PATH}
(the above file must be converted with {cmd} to a format usable by
AppEngine, the result of which will be as follows)
{priv_text}
--- Public Key Chain ---
at {CHAIN_PATH}
{pub_text}
✄ Copy the above into the respective fields of AppEngine at
https://console.cloud.google.com/appengine/settings/certificates
""".format(
PRIVKEY_PATH=PRIVKEY_PATH,
priv_text=Fore.RED + priv_text + Fore.RESET,
CHAIN_PATH=CHAIN_PATH,
pub_text=Fore.BLUE + pub_text + Fore.RESET,
cmd=Fore.MAGENTA + " ".join(cmd) + Fore.RESET,
)
if __name__ == '__main__':
gen()
|
Wooble/devnullunique | app/main.py | Python | mit | 5,223 | 0.001149 | """devnull nethack tournament unique deaths"""
from google.appengine.api import urlfetch
from google.appengine.ext import ndb
import collections
import csv
import re
import os
from cStringIO import StringIO
import logging
import webapp2
import jinja2
BLOCK_SIZE = 1024 * 400
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__),
'templates')),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
class LogMetadata(ndb.Model):
datadate = ndb.DateTimeProperty(auto_now=True)
@classmethod
def singleton(cls):
return cls.get_or_insert('SINGLE')
class LogfileSection(ndb.Model):
data = ndb.BlobProperty()
datadate = ndb.DateTimeProperty(auto_now=True)
@classmethod
def singleton(cls):
return cls.get_or_insert('SINGLE')
class MainPage(webapp2.RequestHandler):
def get(self):
bks = LogMetadata.singleton()
template_values = {'data_date': bks.datadate,
}
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render(template_values))
class ReloadLogfile(webapp2.RequestHandler):
def get(self):
data = urlfetch.fetch(
'https://nethack.devnull.net/tournament/scores.xlogfile',
deadline=120).content
savescores(data)
class UniqueDeaths(webapp2.RequestHandler):
def get(self, username):
mydeaths = []
possibledeaths = []
done = collections.Counter()
with open('death_yes.txt') as deathyes:
for line in deathyes:
possibledeaths.append(re.compile(line.rstrip()+'$'))
scores = readscores()
reader = csv.reader(scores, delimiter=':')
for i, line in enumerate(reader):
try:
if username == line[15].split('=')[1]:
mydeaths.append(line[16]
.split('=')[1].decode('unicode-escape'))
except IndexError:
logging.error("fa | iled for line %s [%s]", i, line)
raise
posstmp = possibledeaths[:]
for death in mydeaths:
# the tournament seems | to do this; if so it's a bug...
#death = death.replace('(with the Amulet)', '')
for i, exp in enumerate(possibledeaths):
if exp and exp.search(death.replace('\\', '').replace(' *', '')):
done[exp] += 1
possibledeaths[i] = None
break
deaths = []
for d in posstmp:
if d not in done:
deaths.append(('red', d.pattern))
else:
deaths.append(('green', d.pattern + ('(x%d)' % done[d])))
template_values = {'deaths': deaths,
'count': len(done),
'player': username,
}
template = JINJA_ENVIRONMENT.get_template('deaths.html')
self.response.write(template.render(template_values))
class UniqueRedir(webapp2.RequestHandler):
def post(self):
un = self.request.get('username')
if un is not None:
self.redirect("/unique/" + self.request.get('username'))
else:
self.redirect("/")
class Explodes(webapp2.RequestHandler):
def get(self):
template_values = {}
template = JINJA_ENVIRONMENT.get_template('explodes.html')
self.response.write(template.render(template_values))
class PlayerList(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('playerlist.html')
players = set()
scores = readscores()
reader = csv.reader(scores, delimiter=':')
for i, line in enumerate(reader):
try:
username = line[15].split('=')[1]
players.add(username)
except IndexError:
logging.error("failed for line %s [%s]", i, line)
raise
template_values = {'players': sorted(players)}
self.response.write(template.render(template_values))
application = webapp2.WSGIApplication(
[
('/', MainPage),
('/itexplodes', Explodes),
('/playerlist', PlayerList),
(r'/unique/(.*)', UniqueDeaths),
(r'/unique', UniqueRedir),
(r'/reload', ReloadLogfile),
], debug=True)
def readscores():
"""read scores from datastore, return filelike suitable for CSV reader"""
bks = LogfileSection.query().fetch(200)
data = ''.join(chunk.data for chunk in bks)
return StringIO(data)
def savescores(data):
"""write scores back to datastore, from string"""
logging.debug("Saving scores with data of length %s", len(data))
md = LogMetadata.singleton()
md.put_async()
for ind, offset in enumerate(range(0, len(data), BLOCK_SIZE)):
block = data[offset:offset + BLOCK_SIZE]
key = "BLOCK-%03d" % ind
logging.debug("made %s with length %s", key, len(block))
bks = LogfileSection.get_or_insert(key)
bks.data = block
bks.put()
|
rolando-contrib/scrapy | scrapy/shell.py | Python | bsd-3-clause | 7,530 | 0.000531 | """Scrapy Shell
See documentation in docs/topics/shell.rst
"""
from __future__ import print_function
import os
import signal
import warnings
from twisted.internet import reactor, threads, defer
from twisted.python import threadable
from w3lib.url import any_to_uri
from scrapy.crawler import Crawler
from scrapy.exceptions import IgnoreRequest, ScrapyDeprecationWarning
from scrapy.http import Request, Response
from scrapy.item import BaseItem
from scrapy.settings import Settings
from scrapy.spiders import Spider
from scrapy.utils.console import start_python_console
from scrapy.utils.datatypes import SequenceExclude
from scrapy.utils.misc import load_object
from scrapy.utils.response import open_in_browser
from scrapy.utils.conf import get_config
from scrapy.utils.console import DEFAULT_PYTHON_SHELLS
class Shell(object):
relevant_classes = (Crawler, Spider, Request, Response, BaseItem,
Settings)
def __init__(self, crawler, update_vars=None, code=None):
self.crawler = crawler
self.update_vars = update_vars or (lambda x: None)
self.item_class = load_object(crawler.settings['DEFAULT_ITEM_CLASS'])
self.spider = None
self.inthread = not threadable.isInIOThread()
self.code = code
self.vars = {}
def start(self, url=None, request=None, response=None, spider=None, redirect=True):
# disable accidental Ctrl-C key press from shutting down the engine
signal.signal(signal.SIGINT, signal.SIG_IGN)
if url:
self.fetch(url, spider, redirect=redirect)
elif request:
self.fetch(request, spider)
elif response:
request = response.request
self.populate_vars(response, request, spider)
else:
self.populate_vars()
if self.code:
print(eval(self.code, globals(), self.vars))
else:
"""
Detect interactive shell setting in scrapy.cfg
e.g.: ~/.config/scrapy.cfg or ~/.scrapy.cfg
[settings]
# shell can be one of ipython, bpython or python;
# to be used as the interactive python console, if available.
# (default is ipython, fallbacks in the order listed above)
shell = python
"""
cfg = get_config()
section, option = 'settings', 'shell'
env = os.environ.get('SCRAPY_PYTHON_SHELL')
shells = []
if env:
shells += env.strip().lower().split(',')
elif cfg.has_option(section, option):
shells += [cfg.get(section, option).strip().lower()]
else: # try all by default
shells += DEFAULT_PYTHON_SHELLS.keys()
# always add standard shell as fallback
shells += ['python']
start_python_console(self.vars, shells=shells,
banner=self.vars.pop('banner', ''))
def _schedule(self, request, spider):
spider = self._open_spider(request, spider)
d = _request_deferred(request)
d.addCallback(lambda x: (x, spider))
self.crawler.engine.crawl(request, spider)
return d
def _open_spider(self, request, spider):
if self.spider:
return self.spider
if spider is None:
spider = self.crawler.spider or self.crawler._create_spider()
self.crawler.spider = spider
self.crawler.engine.open_spider(spider, close_if_idle=False)
self.spider = spider
return spider
def fetch(self, request_or_url, spider=None, redirect=True, **kwargs):
if isinstance(request_or_url, Request):
request = request_or_url
else:
url = any_to_uri(request_or_url)
request = Request(url, dont_filter=True, **kwargs)
if redirect:
request.meta['handle_httpstatus_list'] = SequenceExclude(range(300, 400))
else:
request.meta['handle_httpstatus_all'] = True
response = None
try:
response, spider = threads.blockingCallFromThread(
reactor, self._schedule, request, spider)
except IgnoreRequest:
pass
self.populate_vars(response, request, spider)
def populate_vars(self, response=None, request=None, spider=None):
import scrapy
self.vars['scrapy'] = scrapy
self.vars['crawler'] = self.crawler
self.vars['item'] = self.item_class()
self.vars['settings'] = self.crawler.settings
self.vars['spider'] = spider
self.vars['request'] = request
self.vars['response'] = response
self.vars['sel'] = _SelectorProxy(response)
if self.inthread:
self.vars['fetch'] = self.fetch
self.vars['view'] = open_in_browser
self.vars['shelp'] = self.print_help
self.update_vars(self.vars)
if not self.code:
self.vars['banner'] = self.get_help()
def print_help(self):
print(self.get_help())
def get_help(self):
b = []
b.append("Available Scrapy objects:")
b.append(" scrapy scrapy module (contains scrapy.Request, scrapy.Selector, etc)")
for k, v in sorted(self.vars.items()):
if self._is_relevant(v):
b.append(" %-10s %s" % (k, v))
b.append("Useful shortcuts:")
if self.inthread:
b.append(" fetch(url[, redirect=True]) "
"Fetch URL and update local objects "
"(by default, redirects are followed)")
b.append(" fetch(req) "
"Fetch a scrapy.Request and update local objects ")
b.append(" shelp() Shell help (print this help)")
b.append(" view(response) View response in a browser")
return "\n".join("[s] %s" % l for l in b)
def _is_relevant(self, value):
return isinstance(value, self.relevant_classes)
def inspect_response(response, spider):
"""Open a shell to inspect the given response"""
Shell(spider.crawler).start(response=response)
def _request_deferred(request):
"""Wrap a request inside a Deferred.
This function is harmful, do not use it until you know what you are doing.
This returns a Deferred whose first pair of callbacks are the request
callback and errback. The Deferred also triggers when the request
callback/errback is executed (ie. when the request is downloaded)
WARNING: Do not call request.replace() until after the deferred is ca | lled.
"""
request_callback = request.callback
request_errback = request.errback
def _restore_callbacks(result):
request.callback = request_callback
request.errback = request_er | rback
return result
d = defer.Deferred()
d.addBoth(_restore_callbacks)
if request.callback:
d.addCallbacks(request.callback, request.errback)
request.callback, request.errback = d.callback, d.errback
return d
class _SelectorProxy(object):
def __init__(self, response):
self._proxiedresponse = response
def __getattr__(self, name):
warnings.warn('"sel" shortcut is deprecated. Use "response.xpath()", '
'"response.css()" or "response.selector" instead',
category=ScrapyDeprecationWarning, stacklevel=2)
return getattr(self._proxiedresponse.selector, name)
|
playingaround2017/test123 | gamera/confidence.py | Python | gpl-2.0 | 7,363 | 0.030151 | # -*- mode: python; indent-tabs-mode: nil; tab-width: 3 -*-
# vim: set tabstop=3 shiftwidth=3 expandtab:
#
# Copyright (C) 2003 Karl MacMillan
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from gamera.core import *
from gamera import knn, gamera_xml, cluster, stats
import math
init_gamera()
def max_distance(glyphs, k):
ud = k.unique_distances(glyphs, 0)
ud.sort()
return ud[-1]
def local_max_distance(glyphs, x, k):
d = k.distance_from_images(glyphs, x)
d.sort()
return d[-1][0]
def get_graph_stats(glyphs, k):
s = { }
gc = knn.glyphs_by_category(glyphs)
for x in gc.itervalues():
if len(x) == 1:
s[x[0].get_main_id()] = 1.0
continue
graph = cluster.make_spanning_tree(x, k)
edges = []
for edge in graph.get_edges():
edges.append(edge.cost)
id = x[0].get_main_id()
s[id] = stats.mean(edges)
#cluster.make_subtrees_stddev(graph, 1, 2, 0)
#print graph.nedges, len(x)
return s
def strip_small_categories(glyphs):
gc = knn.glyphs_by_category(glyphs)
new_glyphs = []
for v in gc.itervalues():
if len(v) > 3:
new_glyphs.extend | (v)
return new_glyphs
def test():
glyphs = gamera_xml.glyphs_from_xml(r"C:\Documents and Settings\Karl MacMillan\Desktop\test\prod.xml")
glyphs = strip_small_categories(glyphs)
from gamera.plugins import features
k | = knn.kNN()
print k.features
features.generate_features_list(glyphs, k.feature_functions)
print "Getting gstats"
graph_stats = get_graph_stats(glyphs, k)
gstats = knn.get_glyphs_stats(glyphs)
max_dist = max_distance(glyphs, k)
print max_dist
file = open("results.txt", "w")
global_max = [[],[]]
local_max = [[],[]]
all = [[],[]]
graph = [[],[]]
gr_ccorrect = 0
gr_icorrect = 0
for x in glyphs:
local_max_dist = local_max_distance(glyphs, x, k)
ans = k.classify_with_images(glyphs, x, 1)
file.write(ans[0][1] + ",")# + str(ans[0][0]) + ",")
correct = 0
if x.get_main_id() == ans[0][1]:
file.write("1,")
correct = 1
else:
file.write("0,")
g = 1.0 - (ans[0][0] / max_dist)
global_max[correct].append(g)
file.write(str(g) + ",")
l = 1.0 - (ans[0][0] / local_max_dist)
local_max[correct].append(l)
file.write(str(l) + ",")
a = stats.samplestdev([ans[0][0],gstats[ans[0][1]][1]])
all[correct].append(a)
file.write(str(a) + ",")
gr = stats.samplestdev([ans[0][0],graph_stats[ans[0][1]]])
if (gr <= 1 and correct):
gr_ccorrect += 1
if (gr > 1 and not correct):
gr_icorrect += 1
graph[correct].append(gr)
file.write(str(gr))
file.write("\n")
print "num correct: %d num incorrect: %d" % (len(global_max[1]), len(global_max[0]))
print "confidence %f %f %f" % (((gr_ccorrect + gr_icorrect) / float(len(glyphs))),
gr_ccorrect / float(len(glyphs) - len(global_max[0])),
gr_icorrect / float(len(glyphs) - len(global_max[1])))
cgm = -1
igm = -1
cgs = -1
igs = -1
if (len(global_max[0])):
igm = stats.mean(global_max[0])
igs = stats.samplestdev(global_max[0])
if (len(global_max[1])):
cgm = stats.mean(global_max[1])
cgs = stats.samplestdev(global_max[1])
clm = -1
ilm = -1
cls = -1
ils = -1
if (len(local_max[0])):
ilm = stats.mean(local_max[0])
ils = stats.samplestdev(local_max[0])
if (len(local_max[1])):
clm = stats.mean(local_max[1])
cls = stats.samplestdev(local_max[1])
cam = -1
iam = -1
cas = -1
ias = -1
if (len(all[0])):
iam = stats.mean(all[0])
ias = stats.samplestdev(all[0])
if (len(all[1])):
cam = stats.mean(all[1])
cas = stats.samplestdev(all[1])
cgraphm = -1
igraphm = -1
cgraphs = -1
igraphs = -1
if (len(graph[0])):
igraphm = stats.mean(graph[0])
igraphs = stats.samplestdev(graph[0])
if (len(graph[1])):
cgraphm = stats.mean(graph[1])
cgraphs = stats.samplestdev(graph[1])
print "global correct avg: %f stdev: %f incorrect avg: %f stddev: %f" % (cgm, cgs, igm, igs)
print "local correct avg: %f stdev: %f incorrect avg: %f stddev: %f" % (clm, cls, ilm, ils)
print "all correct avg: %f stdev: %f incorrect avg: %f stddev: %f" % (cam, cas, iam, ias)
print "graph correct avg: %f stdev: %f incorrect avg: %f stddev: %f" % (cgraphm, cgraphs, igraphm, igraphs)
def otsu_threshold(p):
l = len(p)
mu_T = 0.0
for i in range(l):
mu_T += i * p[i]
sigma_T = 0.0
for i in range(l):
sigma_T += (i-mu_T)*(i-mu_T)*p[i]
k_low = 0
while (p[k_low] == 0) and (k_low < (l - 1)):
k_low += 1
k_high = l - 1
while (p[k_high] == 0) and (k_high > 0):
k_low += 1
k_high -= 1
criterion = 0.0
thresh = 127
omega_k = 0.0
mu_k = 0.0
k = k_low
while k <= k_high:
omega_k += p[k]
mu_k += k*p[k]
expr_1 = (mu_T*omega_k - mu_k)
sigma_b_k = expr_1 * expr_1 / (omega_k*(1-omega_k))
if (criterion < sigma_b_k/sigma_T):
criterion = sigma_b_k/sigma_T
thresh = k;
k += 1
return thresh
graph_l = graph[0][:]
graph_l.extend(graph[1])
graph_l.sort()
threshold = stats.mean(graph_l)
print "threshold: " + str(threshold)
num_wrong = 0
for x in graph[0]:
if x < threshold:
num_wrong += 1
print num_wrong, num_wrong / float(len(graph[0])) * 100
num_wrong = 0
for x in graph[1]:
if x >= threshold:
num_wrong += 1
print num_wrong, num_wrong / float(len(graph[1])) * 100
graph_l = all[0][:]
graph_l.extend(all[1])
graph_l.sort()
threshold = stats.mean(graph_l)
print "threshold: " + str(threshold)
num_wrong = 0
for x in graph[0]:
if x < threshold:
num_wrong += 1
print num_wrong, num_wrong / float(len(graph[0])) * 100
num_wrong = 0
for x in graph[1]:
if x >= threshold:
num_wrong += 1
print num_wrong, num_wrong / float(len(graph[1])) * 100
graph_l = local_max[0][:]
graph_l.extend(local_max[1])
graph_l.sort()
threshold = stats.mean(graph_l)
print "threshold: " + str(threshold)
num_wrong = 0
for x in graph[0]:
if x < threshold:
num_wrong += 1
print num_wrong, num_wrong / float(len(graph[0])) * 100
num_wrong = 0
for x in graph[1]:
if x >= threshold:
num_wrong += 1
print num_wrong, num_wrong / float(len(graph[1])) * 100
#test()
|
erangre/Dioptas | dioptas/controller/integration/OptionsController.py | Python | gpl-3.0 | 5,126 | 0.003316 | # -*- coding: utf-8 -*-
# Dioptas - GUI program for fast processing of 2D X-ray diffraction data
# Principal author: Clemens Prescher (clemens.prescher@gmail.com)
# Copyright (C) 2014-2019 GSECARS, University of Chicago, USA
# Copyright (C) 2015-2018 Institute for Geology and Mineralogy, University of Cologne, Germany
# Copyright (C) 2019 DESY, Hamburg, Germany
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# imports for type hinting in PyCharm -- DO NOT DELETE
from ...widgets.integration import IntegrationWidget
from ...model.DioptasModel import DioptasModel
class OptionsController(object):
"""
IntegrationPatternController handles all the interaction from the IntegrationView with the pattern data.
It manages the auto integration of image files to in addition to pattern browsing and changing of units
(2 Theta, Q, A)
"""
def __init__(self, widget, dioptas_model):
"""
:param widget: Reference to an IntegrationWidget
:param dioptas_model: reference to DioptasModel object
:type widget: IntegrationWidget
:type dioptas_model: DioptasModel
"""
self.integration_widget = widget
self.options_widget = self.integration_widget.integration_control_widget.integration_options_widget
self.model = dioptas_model
self.connect_signals()
def connect_signals(self):
self.options_widget.correct_solid_angle_cb.stateChanged.connect(self.correct_solid_angle_cb_clicked)
self.model.configuration_selected.connect(self.update_gui)
self.model.pattern_changed.connect(self.update_gui)
self.options_widget.cake_azimuth_points_sb.valueChanged.connect(self.cake_azimuth_points_changed)
self.options_widget.cake_azimuth_min_txt.editingFinished.connect(self.cake_azimuth_range_changed)
self.options_widget.cake_azimuth_max_txt.editingFinished.connect(self.cake_azimuth_range_changed)
self.options_widget.cake_full_toggle_btn.toggled.connect(self.cake_full_toggled_btn_changed)
def correct_solid_angle_cb_clicked(self):
self.model.current_configuration.correct_solid_angle = self.options_widget.correct_solid_angle_cb.isChecked()
def update_gui(self):
self.options_widget.blockSignals(True)
self.options_widget.correct_solid_angle_cb.setChecked(self.model.current_configuration.correct_solid_angle)
self.options_widget.bin_count_txt.setText("{:1.0f}".format(self.model.calibration_model.num_points))
self.options_widget.cake_azimuth_points_sb.setValue(self.model.current_configuration.cake_azimuth_points)
if self.model.current_configuration.cake_azimuth_range is None:
self.enable_full_cake_range()
else:
self.options_widget.cake_azimuth_min_txt.setText(
'{}'.format(self.model.current_configuration.cake_azimuth_range[0]))
self.options_widget.cake_azimuth_max_txt.setText(
'{}'.format(self.model.current_configuration.cake_azimuth_range[1]))
self.options_widget.blockSignals(False)
self.dis | able_full_cake_range()
def cake_azimuth_range_changed(self):
range_min = float(self.options_widget.cake_azimuth_min_txt.text())
range_max = float(self.options_widget.cake_azimuth_max_txt.text())
self.model.current_configuration.cake_azimuth_range = (range_min, range_max)
def cake_azimuth_points_changed(self):
self.model.current_configuration.cake_azimuth_points = int(
self.options_widget.cake_azimu | th_points_sb.value())
def cake_full_toggled_btn_changed(self):
if self.options_widget.cake_full_toggle_btn.isChecked():
self.enable_full_cake_range()
self.model.current_configuration.cake_azimuth_range = None
elif not self.options_widget.cake_full_toggle_btn.isChecked():
self.disable_full_cake_range()
self.cake_azimuth_range_changed()
def enable_full_cake_range(self):
self.options_widget.cake_azimuth_min_txt.setDisabled(True)
self.options_widget.cake_azimuth_max_txt.setDisabled(True)
self.integration_widget.cake_shift_azimuth_sl.setDisabled(False)
def disable_full_cake_range(self):
self.options_widget.cake_azimuth_min_txt.setDisabled(False)
self.options_widget.cake_azimuth_max_txt.setDisabled(False)
self.integration_widget.cake_shift_azimuth_sl.setDisabled(True)
self.integration_widget.cake_shift_azimuth_sl.setValue(0)
|
Kupoman/blendergltf | blendergltf/extension_exporters/khr_technique_webgl.py | Python | apache-2.0 | 14,656 | 0.000887 | import base64
import os
import re
import bpy
import gpu
LAMP_TYPES = [
gpu.GPU_DYNAMIC_LAMP_DYNVEC,
gpu.GPU_DYNAMIC_LAMP_DYNCO,
gpu.GPU_DYNAMIC_LAMP_DYNIMAT,
gpu.GPU_DYNAMIC_LAMP_DYNPERSMAT,
gpu.GPU_DYNAMIC_LAMP_DYNENERGY,
gpu.GPU_DYNAMIC_LAMP_DYNENERGY,
gpu.GPU_DYNAMIC_LAMP_DYNCOL,
gpu.GPU_DYNAMIC_LAMP_DISTANCE,
gpu.GPU_DYNAMIC_LAMP_ATT1,
gpu.GPU_DYNAMIC_LAMP_ATT2,
gpu.GPU_DYNAMIC_LAMP_SPOTSIZE,
gpu.GPU_DYNAMIC_LAMP_SPOTBLEND,
]
MIST_TYPES = [
gpu.GPU_DYNAMIC_MIST_ENABLE,
gpu.GPU_DYNAMIC_MIST_START,
gpu.GPU_DYNAMIC_MIST_DISTANCE,
gpu.GPU_DYNAMIC_MIST_INTENSITY,
gpu.GPU_DYNAMIC_MIST_TYPE,
gpu.GPU_DYNAMIC_MIST_COLOR,
]
WORLD_TYPES = [
gpu.GPU_DYNAMIC_HORIZON_COLOR,
gpu.GPU_DYNAMIC_AMBIENT_COLOR,
]
MATERIAL_TYPES = [
gpu.GPU_DYNAMIC_MAT_DIFFRGB,
gpu.GPU_DYNAMIC_MAT_REF,
gpu.GPU_DYNAMIC_MAT_SPECRGB,
gpu.GPU_DYNAMIC_MAT_SPEC,
gpu.GPU_DYNAMIC_MAT_HARD,
gpu.GPU_DYNAMIC_MAT_EMIT,
gpu.GPU_DYNAMIC_MAT_AMB,
gpu.GPU_DYNAMIC_MAT_ALPHA,
]
TYPE_TO_NAME = {
gpu.GPU_DYNAMIC_OBJECT_VIEWMAT: 'view_mat',
gpu.GPU_DYNAMIC_OBJECT_MAT: 'model_mat',
gpu.GPU_DYNAMIC_OBJECT_VIEWIMAT: 'inv_view_mat',
gpu.GPU_DYNAMIC_OBJECT_IMAT: 'inv_model_mat',
gpu.GPU_DYNAMIC_OBJECT_COLOR: 'color',
gpu.GPU_DYNAMIC_OBJECT_AUTOBUMPSCALE: 'auto_bump_scale',
gpu.GPU_DYNAMIC_MIST_ENABLE: 'use_mist',
gpu.GPU_DYNAMIC_MIST_START: 'start',
gpu.GPU_DYNAMIC_MIST_DISTANCE: 'depth',
gpu.GPU_DYNAMIC_MIST_INTENSITY: 'intensity',
gpu.GPU_DYNAMIC_MIST_TYPE: 'falloff',
gpu.GPU_DYNAMIC_MIST_COLOR: 'color',
gpu.GPU_DYNAMIC_HORIZON_COLOR: 'horizon_color',
gpu.GPU_DYNAMIC_AMBIENT_COLOR: 'ambient_color',
gpu.GPU_DYNAMIC_LAMP_DYNVEC: 'dynvec',
gpu.GPU_DYNAMIC_LAMP_DYNCO: 'dynco',
gpu.GPU_DYNAMIC_LAMP_DYNIMAT: 'dynimat',
gpu.GPU_DYNAMIC_LAMP_DYNPERSMAT: 'dynpersmat',
gpu.GPU_DYNAMIC_LAMP_DYNENERGY: 'energy',
gpu.GPU_DYNAMIC_LAMP_DYNCOL: 'color',
gpu.GPU_DYNAMIC_LAMP_DISTANCE: 'distance',
gpu.GPU_DYNAMIC_LAMP_ATT1: 'linear_attenuation',
gpu.GPU_DYNAMIC_LAMP_ATT2: 'quadratic_attenuation',
gpu.GPU_DYNAMIC_LAMP_SPOTSIZE: 'spot_size',
gpu.GPU_DYNAMIC_LAMP_SPOTBLEND: 'spot_blend',
gpu.GPU_DYNAMIC_MAT_DIFFRGB: 'diffuse_color',
gpu.GPU_DYNAMIC_MAT_REF: 'diffuse_intensity',
gpu.GPU_DYNAMIC_MAT_SPECRGB: 'specular_color',
gpu.GPU_DYNAMIC_MAT_SPEC: 'specular_intensity',
gpu.GPU_DYNAMIC_MAT_HARD: 'specular_hardness',
gpu.GPU_DYNAMIC_MAT_EMIT: 'emit',
gpu.GPU_DYNAMIC_MAT_AMB: 'ambient',
gpu.GPU_DYNAMIC_MAT_ALPHA: 'alpha',
}
TYPE_TO_SEMANTIC = {
gpu.GPU_DYNAMIC_LAMP_DYNVEC: 'BL_DYNVEC',
gpu.GPU_DYNAMIC_LAMP_DYNCO: 'MODELVIEW', # dynco gets extracted from the matrix
gpu.GPU_DYNAMIC_LAMP_DYNIMAT: 'BL_DYNIMAT',
gpu.GPU_DYNAMIC_LAMP_DYNPERSMAT: 'BL_DYNPERSMAT',
gpu.CD_ORCO: 'POSITION',
gpu.CD_MTFACE: 'TEXCOORD_0',
-1: 'NORMAL' # Hack until the gpu module has something for normals
}
DATATYPE_TO_CONVERTER = {
gpu.GPU_DATA_1I: lambda x: x,
gpu.GPU_DATA_1F: lambda x: x,
gpu.GPU_DATA_2F: list,
gpu.GPU_DATA_3F: list,
gpu.GPU_DATA_4F: list,
}
DATATYPE_TO_GLTF_TYPE = {
gpu.GPU_DATA_1I: 5124, # INT
gpu.GPU_DATA_1F: 5126, # FLOAT
gpu.GPU_DATA_2F: 35664, # FLOAT_VEC2
gpu.GPU_DATA_3F: 35665, # FLOAT_VEC3
gpu.GPU_DATA_4F: 35666, # FLOAT_VEC4
gpu.GPU_DATA_9F: 35675, # FLOAT_MAT3
gpu.GPU_DATA_16F: 35676, # FLOAT_MAT4
}
def vs_to_130(data):
data['attributes'].append({
'varname': 'bl_Vertex',
'type': gpu.CD_ORCO,
'datatype': gpu.GPU_DATA_4F
})
data['attributes'].append({
'varname': 'bl_Normal',
'type': -1,
'datatype': gpu.GPU_DATA_3F
})
data['uniforms'].append({
'varname': 'bl_ModelViewMatrix',
'type': 0,
'datatype': gpu.GPU_DATA_16F,
})
data['uniforms'].append({
'varname': 'bl_ProjectionMatrix',
'type': 0,
'datatype': gpu.GPU_DATA_16F,
})
data['uniforms'].append({
'varname': 'bl_NormalMatrix',
'type': 0,
'datatype': gpu.GPU_DATA_9F,
})
src = '#version 130\n'
src += 'in vec4 bl_Vertex;\n'
src += 'in vec3 bl_Normal;\n'
src += 'uniform mat4 bl_ModelViewMatrix;\n'
src += 'uniform mat4 bl_ProjectionMatrix;\n'
src += 'uniform mat3 bl_NormalMatrix;\n'
src += data['vertex']
src = re.sub(r'#ifdef USE_OPENSUBDIV([^#]*)#endif', '', src)
src = re.sub(r'#ifndef USE_OPENSUBDIV([^#]*)#endif', r'\1', src)
src = re.sub(r'#ifdef CLIP_WORKAROUND(.*?)#endif', '', src, 0, re.DOTALL)
src = re.sub(r'\bvarying\b', 'out', src)
src = re.sub(r'\bgl_(?!Position)(.*?)\b', r'bl_\1', src)
data['vertex'] = src
def fs_to_130(data):
src = '#version 130\n'
src += 'out vec4 frag_color;\n'
src += 'uniform mat4 bl_ProjectionMatrix;\n'
src += 'uniform mat4 bl_ModelViewMatrix;\n'
src += 'uniform mat4 bl_ModelViewMatrixInverse;\n'
src += 'uniform mat3 bl_NormalMatrix;\n'
src += 'uniform mat4 bl_ProjectionMatrixInverse;\n'
src += data['fragment']
src = re.sub(r'\bvarying\b', 'in', src)
src = re.sub(r'\bgl_FragColor\b', 'frag_color', src)
src = re.sub(r'\bgl_(?!FrontFacing)(.*?)\b', r'bl_\1', src)
# Cannot support node_bsdf functions without resolving use of gl_Light
src = re.sub(r'void node_((bsdf)|(subsurface))_.*?^}', '', src, 0, re.DOTALL | re.MULTILINE)
# Need to gather light data from more general uniforms
light_count = 0
light_map = {}
decl_start_str = 'void main()\n{\n'
for uniform in data['uniforms']:
if uniform['type'] == gpu.GPU_DYNAMIC_LAMP_DYNCO:
lamp_name = uniform['lamp'].name
if lamp_name not in light_map:
light_map[lamp_name] = light_count
light_count += 1
light_index = light_map[lamp_name]
varname = 'light{}_transform'.format(light_index)
uniform['datatype'] = gpu.GPU_DATA_16F
src = src.replace(
'uniform vec3 {};'.format(uniform['varname']),
'uniform mat4 {};'.format(varname)
)
var_decl_start = src.find(decl_start_str) + len(decl_start_str)
decl_str = '\tvec3 {} = {}[3].xyz;\n'.format(uniform['varname'], varname)
src = src[:var_decl_start] + decl_str + src[var_decl_start:]
uniform['varname'] = varname
data['fragment'] = src.replace('\r\r\n', '')
def vs_to_web(data):
src = data['vertex']
precision_block = '\n'
for data_type in ('float', 'int'):
precision_block += 'precision mediump {};\n'.format(data_type)
src = src.replace('#version 130', '#version 100\n' + precision_block)
src = re.sub(r'\bin\b', 'attribute', src)
src = re.sub(r'\bout\b', 'varying', src)
data['vertex'] = src
def fs_to_web(data):
src = data['fragment']
precision_block = '\n'
for data_type in ('float', 'int'):
precision_block += 'precision mediump {};\n'.format(data_type)
header = '#version 100\n'
header += '#extension GL_OES_standard_der | ivatives: enable\n'
header += precision_block
src = src.replace('#version 130', header)
src = re.sub(r'\bin\b', 'varying', src)
src = src.replace('out vec4 frag_color;\n', '')
src = | re.sub(r'\bfrag_color\b', 'gl_FragColor', src)
# TODO: This should be fixed in Blender
src = src.replace('blend = (normalize(vec).z + 1)', 'blend = (normalize(vec).z + 1.0)')
# TODO: This likely breaks shadows
src = src.replace('sampler2DShadow', 'sampler2D')
src = src.replace('shadow2DProj', 'texture2DProj')
data['fragment'] = src
def to_130(data):
vs_to_130(data)
fs_to_130(data)
def to_web(data):
to_130(data)
vs_to_web(data)
fs_to_web(data)
class KhrTechniqueWebgl:
ext_meta = {
'name': 'KHR_technique_webgl',
'url': (
'https://github.com/KhronosGroup/glTF/tree/master/extensions/'
'Khronos/KHR_technique_webgl'
),
'isDraft': True,
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.