repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
pythonfoo/pythonfooLite
|
Level_02/passwort.py
|
Python
|
gpl-3.0
| 222
| 0.009009
|
#!/usr/bin/env python3
from getp
|
ass import getpass
PWD = "123456" # type: str
eingabe = getpass() # type: str
if eingabe == PWD:
print("Richtig.")
elif eingabe in PWD:
print("Fast.")
else:
print("Falsc
|
h.")
|
arenadata/ambari
|
ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin.py
|
Python
|
apache-2.0
| 4,473
| 0.014979
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR COND
|
ITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ["setup_ranger_plugin"]
import os
from datetime import datetime
from resource_management.libraries.functions.ranger_functions import Rangeradmin
from resource_management.core.resources impor
|
t File, Execute
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.get_stack_version import get_stack_version
from resource_management.core.logger import Logger
from resource_management.core.source import DownloadSource
from resource_management.libraries.resources import ModifyPropertiesFile
from resource_management.core.exceptions import Fail
from resource_management.libraries.functions.ranger_functions_v2 import RangeradminV2
from resource_management.libraries.script.script import Script
def setup_ranger_plugin(component_select_name, service_name, previous_jdbc_jar,
downloaded_custom_connector, driver_curl_source,
driver_curl_target, java_home,
repo_name, plugin_repo_dict,
ranger_env_properties, plugin_properties,
policy_user, policymgr_mgr_url,
plugin_enabled, component_user, component_group, api_version=None, skip_if_rangeradmin_down = True, **kwargs):
if driver_curl_source and not driver_curl_source.endswith("/None"):
if previous_jdbc_jar and os.path.isfile(previous_jdbc_jar):
File(previous_jdbc_jar, action='delete')
File(downloaded_custom_connector,
content = DownloadSource(driver_curl_source),
mode = 0644
)
Execute(('cp', '--remove-destination', downloaded_custom_connector, driver_curl_target),
path=["/bin", "/usr/bin/"],
sudo=True
)
File(driver_curl_target, mode=0644)
if policymgr_mgr_url.endswith('/'):
policymgr_mgr_url = policymgr_mgr_url.rstrip('/')
stack_root = Script.get_stack_root()
stack_version = get_stack_version(component_select_name)
file_path = format('{stack_root}/{stack_version}/ranger-{service_name}-plugin/install.properties')
if not os.path.isfile(file_path):
raise Fail(format('Ranger {service_name} plugin install.properties file does not exist at {file_path}'))
ModifyPropertiesFile(file_path,
properties = plugin_properties
)
custom_plugin_properties = dict()
custom_plugin_properties['CUSTOM_USER'] = component_user
custom_plugin_properties['CUSTOM_GROUP'] = component_group
ModifyPropertiesFile(file_path,properties = custom_plugin_properties)
if plugin_enabled:
cmd = (format('enable-{service_name}-plugin.sh'),)
if api_version == 'v2' and api_version is not None:
ranger_adm_obj = RangeradminV2(url=policymgr_mgr_url, skip_if_rangeradmin_down = skip_if_rangeradmin_down)
else:
ranger_adm_obj = Rangeradmin(url=policymgr_mgr_url, skip_if_rangeradmin_down = skip_if_rangeradmin_down)
ranger_adm_obj.create_ranger_repository(service_name, repo_name, plugin_repo_dict,
ranger_env_properties['ranger_admin_username'], ranger_env_properties['ranger_admin_password'],
ranger_env_properties['admin_username'], ranger_env_properties['admin_password'],
policy_user)
else:
cmd = (format('disable-{service_name}-plugin.sh'),)
cmd_env = {'JAVA_HOME': java_home,
'PWD': format('{stack_root}/{stack_version}/ranger-{service_name}-plugin'),
'PATH': format('{stack_root}/{stack_version}/ranger-{service_name}-plugin')}
Execute(cmd,
environment=cmd_env,
logoutput=True,
sudo=True,
)
|
dhruvaldarji/InternetProgramming
|
Assignment_6/Assignment_6/settings.py
|
Python
|
mit
| 3,248
| 0.001539
|
"""
Django settings for Assignment_6 project.
Generated by 'django-admin startproject' using Django 1.9.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ky31$91zy3!avtsc8=#1t962dvj#j^a+omn92c_5m0-z+-3j6$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'measurements',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Assignment_6.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'dj
|
ango.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Assignment_6.wsgi.app
|
lication'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
wengzhilai/family
|
iSoft/model/framework/PostBaseModel.py
|
Python
|
bsd-3-clause
| 130
| 0.055556
|
class PostBaseModel(o
|
bject):
#主键
Key=None
Token=None
def __init__(self,jsonObj):
self.__dict__=jsonO
|
bj
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/test_space_to_depth_op.py
|
Python
|
apache-2.0
| 5,176
| 0
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
from op_test import OpTest
class TestSpaceToDepthOp(OpTest):
@staticmethod
def helper(in_, width, height, channel, batch, blocksize, forward, out_):
channel_out = channel // (blocksize * blocksize)
for b in range(batch):
for k in range(channel):
for j in range(height):
for i in range(width):
in_index = i + width * (j + height * (k + channel * b))
channel2 = k % channel_out
offset = k // channel_out
width2 = i * blocksize + offset % blocksize
height2 = j * blocksize + offset // blocksize
out_index = width2 + width * blocksize * (
height2 + h
|
eight * blocksize *
(channel2 + channel_out * b))
if forward:
out_[out_index] = in_[in_index]
else:
out_[in_index] = in_[out_index]
def setUp(self):
self.init_data()
self.op_type = "space_to_depth"
self.inputs = {"X": self.x}
self.helper(self.x_1d, sel
|
f.x.shape[3], self.x.shape[2],
self.x.shape[1], self.x.shape[0], self.blocksize,
self.forward, self.out_1d)
self.out = np.reshape(self.out_1d, self.infered_shape)
self.attrs = {"blocksize": self.blocksize}
self.outputs = {"Out": self.out}
def init_data(self):
self.ori_shape = (32, 12, 6, 6)
self.infered_shape = (32, 48, 3, 3)
self.one_d_len = 32 * 48 * 3 * 3
self.blocksize = 2
self.x = np.random.random(self.ori_shape).astype('float64')
self.x_1d = np.reshape(self.x, self.one_d_len)
self.out = np.zeros(self.infered_shape).astype('float64')
self.out_1d = np.reshape(self.out, self.one_d_len)
self.forward = 1
def test_check_output(self):
place = fluid.core.CUDAPlace(0) if fluid.core.is_compiled_with_cuda(
) else fluid.core.CPUPlace()
self.check_output_with_place(place, 1e-5, None, False)
def test_check_grad(self):
place = fluid.core.CUDAPlace(0) if fluid.core.is_compiled_with_cuda(
) else fluid.core.CPUPlace()
self.check_grad_with_place(place, ['X'], 'Out')
class TestSpaceToDepthOpBasic(TestSpaceToDepthOp):
def init_data(self):
self.ori_shape = (32, 8, 6, 6)
self.infered_shape = (32, 32, 3, 3)
self.one_d_len = 32 * 32 * 3 * 3
self.blocksize = 2
self.x = np.random.random(self.ori_shape).astype('float64')
self.x_1d = np.reshape(self.x, self.one_d_len)
self.out = np.zeros(self.infered_shape).astype('float64')
self.out_1d = np.reshape(self.out, self.one_d_len)
self.forward = 1
class TestSpaceToDepthOpDoubleBasic(TestSpaceToDepthOp):
def init_data(self):
self.ori_shape = (32, 8, 6, 6)
self.infered_shape = (32, 32, 3, 3)
self.one_d_len = 32 * 32 * 3 * 3
self.blocksize = 2
self.x = np.random.random(self.ori_shape).astype('float64')
self.x_1d = np.reshape(self.x, self.one_d_len)
self.out = np.zeros(self.infered_shape).astype('float64')
self.out_1d = np.reshape(self.out, self.one_d_len)
self.forward = 1
class TestSpaceToDepthOpWithStride3(TestSpaceToDepthOp):
def init_data(self):
self.ori_shape = (32, 9, 6, 6)
self.infered_shape = (32, 81, 2, 2)
self.one_d_len = 32 * 81 * 2 * 2
self.blocksize = 3
self.x = np.random.random(self.ori_shape).astype('float64')
self.x_1d = np.reshape(self.x, self.one_d_len)
self.out = np.zeros(self.infered_shape).astype('float64')
self.out_1d = np.reshape(self.out, self.one_d_len)
self.forward = 1
class TestSpaceToDepthOpWithNotSquare(TestSpaceToDepthOp):
def init_data(self):
self.ori_shape = (32, 9, 9, 6)
self.infered_shape = (32, 81, 3, 2)
self.one_d_len = 32 * 81 * 3 * 2
self.blocksize = 3
self.x = np.random.random(self.ori_shape).astype('float64')
self.x_1d = np.reshape(self.x, self.one_d_len)
self.out = np.zeros(self.infered_shape).astype('float64')
self.out_1d = np.reshape(self.out, self.one_d_len)
self.forward = 1
if __name__ == '__main__':
unittest.main()
|
craws/OpenAtlas-Python
|
openatlas/database/connect.py
|
Python
|
gpl-2.0
| 946
| 0
|
from typing import Any, Dict
from flask import g
from psycopg2 import connect, extras
def open_connection(config: Dict[str, Any]) -> None:
try:
g.db = connect(
database=config['DATABASE_NAME'],
user=config['DATABASE_USER'],
password=config['DATABASE_PASS'],
port=config['DATABASE_PORT'],
host=config['DATABASE_HOST'])
g.db.autocommit = True
except Exception as e:
|
# pragma: no cover
print("Database connection failed")
raise Exception(e)
g.cursor = g.db.cursor(cursor_factory=extras.DictCursor)
def close_connection() -> None:
if hasattr(g, 'db'):
g.db.close()
class Transaction:
@staticmethod
def begin() -> None:
g.cursor.exe
|
cute('BEGIN')
@staticmethod
def commit() -> None:
g.cursor.execute('COMMIT')
@staticmethod
def rollback() -> None:
g.cursor.execute('ROLLBACK')
|
kerneltask/micropython
|
tests/basics/io_stringio1.py
|
Python
|
mit
| 894
| 0
|
try:
import uio as io
except ImportError:
import io
a = io.StringIO()
print('io.StringIO' in repr(a))
print(a.getvalue())
print(a.read())
a = io.StringIO("foobar")
print(a.getvalue())
print(a.read())
print(a.read())
a = io.StringIO()
a.writ
|
e("foo")
print(a.getvalue())
a = io.StringIO("foo")
a.write("12")
print(a.getvalue())
a = io.StringIO("foo")
a.write("123")
print(a.getvalue())
a = io.StringIO("foo")
a.write("1234")
print(a.getvalue())
a = io.StringIO()
a.write("foo")
print(a.read())
a = io.StringIO()
print(a.tell())
a.write("foo")
print(a.tell())
a = io.StringIO()
a.close()
for f in [a.read, a.getvalue, lambda:a.write("")]:
# CP
|
ython throws for operations on closed I/O, MicroPython makes
# the underlying string empty unless MICROPY_CPYTHON_COMPAT defined
try:
f()
print("ValueError")
except ValueError:
print("ValueError")
|
tomviner/pytest
|
testing/test_tmpdir.py
|
Python
|
mit
| 12,284
| 0.00057
|
import os
import stat
import sys
import attr
import pytest
from _pytest import pathlib
from _pytest.pathlib import Path
def test_tmpdir_fixture(testdir):
p = testdir.copy_example("tmpdir/tmpdir_fixture.py")
results = testdir.runpytest(p)
results.stdout.fnmatch_lines(["*1 passed*"])
@attr.s
class FakeConfig:
basetemp = attr.ib()
@property
def trace(self):
return self
def get(self, key):
return lambda *k: None
@property
def option(self):
return self
class TestTempdirHandler:
def test_mktemp(self, tmp_path):
from _pytest.tmpdir import TempdirFactory, TempPathFactory
config = FakeConfig(tmp_path)
t = TempdirFactory(TempPathFactory.from_config(config))
tmp = t.mktemp("world")
assert tmp.relto(t.getbasetemp()) == "world0"
tmp = t.mktemp("this")
assert tmp.relto(t.getbasetemp()).startswith("this")
tmp2 = t.mktemp("this")
assert tmp2.relto(t.getbase
|
temp()).startswith("this")
assert tmp2 != tmp
def test_tmppath_relative_basetemp_absolute(self, tmp_path, monkeypatch):
"""#4425"""
from _pytest.tmpdir import TempPathFactory
monkeypatch.chdir(tmp_path)
config = FakeConfig("hello")
t = TempPathFactory.from_config(config)
assert t.getbasetemp().resolve() == (tmp_path / "hello").resolve()
class TestConfigTmpdir:
|
def test_getbasetemp_custom_removes_old(self, testdir):
mytemp = testdir.tmpdir.join("xyz")
p = testdir.makepyfile(
"""
def test_1(tmpdir):
pass
"""
)
testdir.runpytest(p, "--basetemp=%s" % mytemp)
mytemp.check()
mytemp.ensure("hello")
testdir.runpytest(p, "--basetemp=%s" % mytemp)
mytemp.check()
assert not mytemp.join("hello").check()
def test_basetemp(testdir):
mytemp = testdir.tmpdir.mkdir("mytemp")
p = testdir.makepyfile(
"""
import pytest
def test_1(tmpdir_factory):
tmpdir_factory.mktemp('hello', numbered=False)
"""
)
result = testdir.runpytest(p, "--basetemp=%s" % mytemp)
assert result.ret == 0
print(mytemp)
assert mytemp.join("hello").check()
def test_tmpdir_always_is_realpath(testdir):
# the reason why tmpdir should be a realpath is that
# when you cd to it and do "os.getcwd()" you will anyway
# get the realpath. Using the symlinked path can thus
# easily result in path-inequality
# XXX if that proves to be a problem, consider using
# os.environ["PWD"]
realtemp = testdir.tmpdir.mkdir("myrealtemp")
linktemp = testdir.tmpdir.join("symlinktemp")
attempt_symlink_to(linktemp, str(realtemp))
p = testdir.makepyfile(
"""
def test_1(tmpdir):
import os
assert os.path.realpath(str(tmpdir)) == str(tmpdir)
"""
)
result = testdir.runpytest("-s", p, "--basetemp=%s/bt" % linktemp)
assert not result.ret
def test_tmp_path_always_is_realpath(testdir, monkeypatch):
# for reasoning see: test_tmpdir_always_is_realpath test-case
realtemp = testdir.tmpdir.mkdir("myrealtemp")
linktemp = testdir.tmpdir.join("symlinktemp")
attempt_symlink_to(linktemp, str(realtemp))
monkeypatch.setenv("PYTEST_DEBUG_TEMPROOT", str(linktemp))
testdir.makepyfile(
"""
def test_1(tmp_path):
assert tmp_path.resolve() == tmp_path
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_tmpdir_too_long_on_parametrization(testdir):
testdir.makepyfile(
"""
import pytest
@pytest.mark.parametrize("arg", ["1"*1000])
def test_some(arg, tmpdir):
tmpdir.ensure("hello")
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_tmpdir_factory(testdir):
testdir.makepyfile(
"""
import pytest
@pytest.fixture(scope='session')
def session_dir(tmpdir_factory):
return tmpdir_factory.mktemp('data', numbered=False)
def test_some(session_dir):
assert session_dir.isdir()
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_tmpdir_fallback_tox_env(testdir, monkeypatch):
"""Test that tmpdir works even if environment variables required by getpass
module are missing (#1010).
"""
monkeypatch.delenv("USER", raising=False)
monkeypatch.delenv("USERNAME", raising=False)
testdir.makepyfile(
"""
import pytest
def test_some(tmpdir):
assert tmpdir.isdir()
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
@pytest.fixture
def break_getuser(monkeypatch):
monkeypatch.setattr("os.getuid", lambda: -1)
# taken from python 2.7/3.4
for envvar in ("LOGNAME", "USER", "LNAME", "USERNAME"):
monkeypatch.delenv(envvar, raising=False)
@pytest.mark.usefixtures("break_getuser")
@pytest.mark.skipif(sys.platform.startswith("win"), reason="no os.getuid on windows")
def test_tmpdir_fallback_uid_not_found(testdir):
"""Test that tmpdir works even if the current process's user id does not
correspond to a valid user.
"""
testdir.makepyfile(
"""
import pytest
def test_some(tmpdir):
assert tmpdir.isdir()
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
@pytest.mark.usefixtures("break_getuser")
@pytest.mark.skipif(sys.platform.startswith("win"), reason="no os.getuid on windows")
def test_get_user_uid_not_found():
"""Test that get_user() function works even if the current process's
user id does not correspond to a valid user (e.g. running pytest in a
Docker container with 'docker run -u'.
"""
from _pytest.tmpdir import get_user
assert get_user() is None
@pytest.mark.skipif(not sys.platform.startswith("win"), reason="win only")
def test_get_user(monkeypatch):
"""Test that get_user() function works even if environment variables
required by getpass module are missing from the environment on Windows
(#1010).
"""
from _pytest.tmpdir import get_user
monkeypatch.delenv("USER", raising=False)
monkeypatch.delenv("USERNAME", raising=False)
assert get_user() is None
class TestNumberedDir:
PREFIX = "fun-"
def test_make(self, tmp_path):
from _pytest.pathlib import make_numbered_dir
for i in range(10):
d = make_numbered_dir(root=tmp_path, prefix=self.PREFIX)
assert d.name.startswith(self.PREFIX)
assert d.name.endswith(str(i))
symlink = tmp_path.joinpath(self.PREFIX + "current")
if symlink.exists():
# unix
assert symlink.is_symlink()
assert symlink.resolve() == d.resolve()
def test_cleanup_lock_create(self, tmp_path):
d = tmp_path.joinpath("test")
d.mkdir()
from _pytest.pathlib import create_cleanup_lock
lockfile = create_cleanup_lock(d)
with pytest.raises(EnvironmentError, match="cannot create lockfile in .*"):
create_cleanup_lock(d)
lockfile.unlink()
def test_lock_register_cleanup_removal(self, tmp_path):
from _pytest.pathlib import create_cleanup_lock, register_cleanup_lock_removal
lock = create_cleanup_lock(tmp_path)
registry = []
register_cleanup_lock_removal(lock, register=registry.append)
cleanup_func, = registry
assert lock.is_file()
cleanup_func(original_pid="intentionally_different")
assert lock.is_file()
cleanup_func()
assert not lock.exists()
cleanup_func()
assert not lock.exists()
def _do_cleanup(self, tmp_path):
self.test_make(tmp_path)
from _pytest.pathlib import cleanup_numbered_dir
cleanup_numbered_dir(
root=tmp_path,
prefix=self.PREFIX,
keep=2,
consider_lock_dead_if_created_before=0,
)
def test_cleanup_keep(self, tmp_pat
|
BhallaLab/moose
|
moose-core/python/moose/neuroml/NetworkML.py
|
Python
|
gpl-3.0
| 25,761
| 0.010947
|
# -*- coding: utf-8 -*-
## Description: class NetworkML for loading NetworkML from file or xml element into MOOSE
## Version 1.0 by Aditya Gilra, NCBS, Bangalore, India, 2011 for serial MOOSE
## Version 1.5 by Niraj Dudani, NCBS, Bangalore, India, 2012, ported to parallel MOOSE
## Version 1.6 by Aditya Gilra, NCBS, Bangalore, India, 2012, further changes for parallel MOOSE
## Version 1.7 by Aditya Gilra, NCBS, Bangalore, India, 2013, further support for NeuroML 1.8.1
## Version 1.8 by Aditya Gilra, NCBS, Bangalore, India, 2013, changes for new IntFire and SynHandler classes
"""
NeuroML.py is the preferred interface. Use this only if NeuroML L1,L2,L3 files are misnamed/scattered.
Instantiate NetworkML class, and thence use method:
readNetworkMLFromFile(...) to load a standalone NetworkML file, OR
readNetworkML(...) to load from an xml.etree xml element (could be part of a larger NeuroML file).
"""
from __future__ import print_function
from xml.etree import ElementTree as ET
import string
import os
from math import cos, sin
from moose.neuroml.MorphML import MorphML
from moose.neuroml.ChannelML import ChannelML, make_new_synapse
import moose
from moose.neuroml.utils import meta_ns, nml_ns, find_first_file, tweak_model
from moose import utils
import logging
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('moose.nml.networkml').addHandler(console)
_logger = logging.getLogger('moose.nml.networkml')
class NetworkML():
def __init__(self, nml_params):
self.nml_params = nml_params
self.model_dir = nml_params['model_dir']
def readNetworkMLFromFile(self,filename,cellSegmentDict,params={}):
"""
specify tweak params = {'excludePopulations':[popname1,...], 'excludeProjections':[projname1,...], \
'onlyInclude':{'includePopulation':(popname,[id1,...]),'includeProjections':(projname1,...)} }
If excludePopulations is present, then excludeProjections must also be present:
Thus if you exclude some populations,
ensure that you exclude projections that refer to those populations also!
Though for onlyInclude, you may specify only included cells and this reader will
also keep cells connected to those in onlyInclude.
This reader first prunes the exclude-s,
then keeps the onlyInclude-s and those that are connected.
Use 'includeProjections' if you want to keep some projections not connected to
the primary 'includePopulation' cells
but connected to secondary cells that connected to the primary ones:
e.g. baseline synapses on granule cells connected to 'includePopulation' mitrals;
these synapses receive file based pre-synaptic events,
not presynaptically connected to a cell.
In params, you further specify:
'createPotentialSynapses' : True (False by default)
|
to create synapses at all potential locations/compartments specified in the MorphML cell file
even before Projections tag is parsed.
'combineSegments' : True (False by default)
to ask neuroml to combine segments belonging to a cable
(Neuron generates multiple segments per section).
"""
_logg
|
er.info("Reading file %s " % filename)
tree = ET.parse(filename)
root_element = tree.getroot()
_logger.info("Tweaking model ... ")
tweak_model(root_element, params)
_logger.info("Loading model into MOOSE ... ")
return self.readNetworkML(root_element,cellSegmentDict,params,root_element.attrib['lengthUnits'])
def readNetworkML(self,network,cellSegmentDict,params={},lengthUnits="micrometer"):
"""
This returns populationDict = { 'populationname1':(cellname,{int(instanceid1):moosecell, ... }) , ... }
and projectionDict = { 'projectionname1':(source,target,[(syn_name1,pre_seg_path,post_seg_path),...]) , ... }
"""
if lengthUnits in ['micrometer','micron']:
self.length_factor = 1e-6
else:
self.length_factor = 1.0
self.network = network
self.cellSegmentDict = cellSegmentDict
self.params = params
self.populationDict = {}
[ self.createPopulation(pop) for pop in
self.network.findall(".//{"+nml_ns+"}population")
]
self.projectionDict={}
projections = self.network.find(".//{"+nml_ns+"}projections")
if projections:
# see pg 219 (sec 13.2) of Book of Genesis
if projections.attrib["units"] == 'Physiological Units':
Efactor = 1e-3 # V from mV
Tfactor = 1e-3 # s from ms
else:
Efactor = 1.0
Tfactor = 1.0
[ self.createProjection(proj, Efactor, Tfactor) for proj in projections ]
allinputs = self.network.findall(".//{"+nml_ns+"}inputs")
for inputs in allinputs:
_logger.info("Creating input under /elec ")
units = inputs.attrib['units']
# see pg 219 (sec 13.2) of Book of Genesis
if units == 'Physiological Units':
Vfactor, Tfactor, Ifactor = 1e-3, 1e-3, 1e-6
else:
Vfactor, Tfactor, Ifactor = 1.0, 1.0, 1.0
[ self.createInput(inputelem, Vfactor, Tfactor, Ifactor) for
inputelem in self.network.findall(".//{"+nml_ns+"}input")
]
return (self.populationDict,self.projectionDict)
def createInput(self, inputelem, Vfactor, Tfactor, Ifactor):
"""Create input """
inputname = inputelem.attrib['name']
pulseinput = inputelem.find(".//{"+nml_ns+"}pulse_input")
if pulseinput is not None:
## If /elec doesn't exists it creates /elec
## and returns a reference to it. If it does,
## it just returns its reference.
moose.Neutral('/elec')
pulsegen = moose.PulseGen('/elec/pulsegen_'+inputname)
iclamp = moose.DiffAmp('/elec/iclamp_'+inputname)
iclamp.saturation = 1e6
iclamp.gain = 1.0
pulsegen.trigMode = 0 # free run
pulsegen.baseLevel = 0.0
_logger.debug("Tfactor, Ifactor: %s, %s" % (Tfactor, Ifactor))
_logger.debug("Pulsegen attributes: %s" % str(pulseinput.attrib))
pulsegen.firstDelay = float(pulseinput.attrib['delay'])*Tfactor
pulsegen.firstWidth = float(pulseinput.attrib['duration'])*Tfactor
pulsegen.firstLevel = float(pulseinput.attrib['amplitude'])*Ifactor
pulsegen.secondDelay = 1e6 # to avoid repeat
pulsegen.secondLevel = 0.0
pulsegen.secondWidth = 0.0
## do not set count to 1, let it be at 2 by default
## else it will set secondDelay to 0.0 and repeat the first pulse!
#pulsegen.count = 1
moose.connect(pulsegen,'output',iclamp,'plusIn')
target = inputelem.find(".//{"+nml_ns+"}target")
population = target.attrib['population']
for site in target.findall(".//{"+nml_ns+"}site"):
cell_id = site.attrib['cell_id']
if 'segment_id' in site.attrib: segment_id = site.attrib['segment_id']
else: segment_id = 0 # default segment_id is specified to be 0
## population is populationname, self.populationDict[population][0] is cellname
cell_name = self.populationDict[population][0]
segment_path = self.populationDict[population][1][int(cell_id)].path+'/'+\
self.cellSegmentDict[cell_name][0][segment_id][0]
compartment = moose.element(segment_path)
_logger.debug("Adding pulse at {0}: {1}".format(
|
deepmind/graph_nets
|
graph_nets/tests/blocks_test.py
|
Python
|
apache-2.0
| 43,860
| 0.003694
|
# Copyright 2018 The GraphNets Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for blocks.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from absl.testing import parameterized
from graph_nets import blocks
from graph_nets import graphs
from graph_nets import utils_np
from graph_nets import utils_tf
import numpy as np
import sonnet as snt
import tensorflow as tf
SMALL_GRAPH_1 = {
"globals": [1.1, 1.2, 1.3, 1.4],
"nodes": [[10.1, 10.2], [20.1, 20.2], [30.1, 30.2]],
"edges": [[101., 102., 103., 104.], [201., 202., 203., 204.]],
"senders": [0, 1],
"receivers": [1, 2],
}
SMALL_GRAPH_2 = {
"globals": [-1.1, -1.2, -1.3, -1.4],
"nodes": [[-10.1, -10.2], [-20.1, -20.2], [-30.1, -30.2]],
"edges": [[-101., -102., -103., -104.]],
"senders": [1,],
"receivers": [2,],
}
SMALL_GRAPH_3 = {
"globals": [1.1, 1.2, 1.3, 1.4],
"nodes": [[10.1, 10.2], [20.1, 20.2], [30.1, 30.2]],
"edges": [[101., 102., 103., 104.], [201., 202., 203., 204.]],
"senders": [1, 1],
"receivers": [0, 2],
}
SMALL_GRAPH_4 = {
"globals": [1.1, 1.2, 1.3, 1.4],
"nodes": [[10.1, 10.2], [20.1, 20.2], [30.1, 30.2]],
"edges": [[101., 102., 103., 104.], [201., 202., 203., 204.]],
"senders": [0, 2],
"receivers": [1, 1],
}
class GraphModuleTest(tf.test.TestCase, parameterized.TestCase):
"""Base class for all the tests in this file."""
def setUp(self):
super(GraphModuleTest, self).setUp()
tf.set_random_seed(0)
def _get_input_graph(self, none_fields=None):
if none_fields is None:
none_fields = []
input_graph = utils_tf.data_dicts_to_graphs_tuple(
[SMALL_GRAPH_1, SMALL_GRAPH_2, SMALL_GRAPH_3, SMALL_GRAPH_4])
input_graph = input_graph.map(lambda _: None, none_fields)
return input_graph
def _get_shaped_input_graph(self):
return graphs.GraphsTuple(
nodes=tf.zeros([3, 4, 5, 11], dtype=tf.float32),
edges=tf.zeros([5, 4, 5, 12], dtype=tf.float32),
globals=tf.zeros([2, 4, 5, 13], dtype=tf.float32),
receivers=tf.range(5, dtype=tf.int32) // 3,
senders=tf.range(5, dtype=tf.int32) % 3,
n_node=tf.constant([2, 1], dtype=tf.int32),
n_edge=tf.constant([3, 2], dtype=tf.int32),
)
def _assert_build_and_run(self, network, input_graph):
# No error at construction time.
output = network(input_graph)
# No error at runtime.
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(output)
BROADCAST_GLOBAL_TO_EDGES = [
[1.1, 1.2, 1.3, 1.4],
[1.1, 1.2, 1.3, 1.4],
[-1.1, -1.2, -1.3, -1.4],
]
BROADCAST_GLOBAL_TO_NODES = [
[1.1, 1.2, 1.3, 1.4],
[1.1, 1.2, 1.3, 1.4],
[1.1, 1.2, 1.3, 1.4],
[-1.1, -1.2, -1.3, -1.4],
[-1.1, -1.2, -1.3, -1.4],
[-1.1, -1.2, -1.3, -1.4],
]
SENDER_NODES_TO_EDGES = [
[10.1, 10.2],
[20.1, 20.2],
[-20.1, -20.2],
]
RECEIVER_NODES_TO_EDGES = [
[20.1, 20.2],
[30.1, 30.2],
[-30.1, -30.2],
]
class BroadcastersTest(GraphModuleTest):
"""Tests for the broadcasters."""
@parameterized.named_parameters(
("globals_to_edges",
blocks.broadcast_globals_to_edges, BROADCAST_GLOBAL_TO_EDGES),
("globals_to_nodes",
blocks.broadcast_globals_to_nodes, BROADCAST_GLOBAL_TO_NODES),
("sender_nodes_to_edges",
blocks.broadcast_sender_nodes_to_edges, SENDER_NODES_TO_EDGES),
("receiver_nodes_to_edges",
blocks.broadcast_receiver_nodes_to_edges, RECEIVER_NODES_TO_EDGES),
)
def test_output_values(self, broadcaster, expected):
"""Test the broadcasted output value."""
input_graph = utils_tf.data_dicts_to_graphs_tuple(
[SMALL_GRAPH_1, SMALL_GRAPH_2])
broadcasted = broadcaster(input_graph)
with tf.Session() as sess:
broadcasted_out = sess.run(broadcasted)
self.assertNDArrayNear(
np.array(expected, dtype=np.float32), broadcasted_out, err=1e-4)
@parameterized.named_parameters(
("globals_to_edges",
blocks.broadcast_globals_to_edges, BROADCAST_GLOBAL_TO_EDGES),
("globals_to_nodes",
blocks.broadcast_globals_to_nodes, BROADCAST_GLOBAL_TO_NODES),
("sender_nodes_to_edges",
blocks.broadcast_sender_nodes_to_edges, SENDER_NODES_TO_EDGES),
("receiver_nodes_to_edges",
blocks.broadcast_receiver_nodes_to_edges, RECEIVER_NODES_TO_EDGES),
)
def test_output_values_larger_rank(self, broadcaster, expected):
"""Test the broadcasted output value."""
input_graph = utils_tf.data_dicts_to_graphs_tuple(
[SMALL_GRAPH_1, SMALL_GRAPH_2])
input_graph = input_graph.map(
lambda v: tf.reshape(v, [v.get_shape().as_list()[0]] + [2, -1]))
broadcasted = broadcaster(input_graph)
with tf.Session() as sess:
broadcasted_out = sess.run(broadcasted)
self.assertNDArrayNear(
np.reshape(np.array(expected, dtype=np.float32),
[len(expected)] + [2, -1]),
broadcasted_out,
err=1e-4)
@parameterized.named_parameters(
("globals_to_edges_no_globals",
blocks.broadcast_globals_to_edges, ("globals",)),
("globals_to_nodes_no_globals",
blocks.broadcast_globals_to_nodes, ("globals",)),
("sender_nodes_to_edges_none_nodes",
blocks.broadcast_sender_nodes_to_edges, ("nodes",)),
("sender_nodes_to_edges_none_senders",
blocks.broadcast_sender_nodes_to_edges,
("edges", "senders", "receivers")),
("receiver_nodes_to_edges_none_nodes",
blocks.broadcast_receiver_nodes_to_edges, ("nodes",)),
)
def test_missing_field_raises_exception(self, broadcaster, none_fields):
"""Test that an error is raised if a required field is `None`."""
input_graph = self._get_input_graph(none_fields)
with self.assertRaisesRegexp(
ValueError, "field cannot be None when broadcasting"):
broadcaster(input_graph)
class ReducersTest(GraphModuleTest):
"""Tests for the reducers."""
@parameterized.parameters(
(blocks.unsorted_segment_min_or_zero,
[[0., 0.],
[0.1, -0.1],
[0.2, -0.3],
[0.4, -0.6],
[0.7, -1.],
[0.9, -0.9],
[0., 0.]]),
(blocks.unsorted_segment_max_or_zero,
[[0., 0.],
[0.1, -0.1],
[0.3, -0.2],
[0.6, -0.4],
[1., -0.7],
[0.9, -0.9],
[0., 0.]]),
)
def test_output_values(self, r
|
educer, expected_values):
input_values_np = np.array([[0.1, -0.1],
[0.2, -0.2],
[0.3, -0.3],
[0.4, -0.4],
[0.5, -0.5],
|
[0.6, -0.6],
[0.7, -0.7],
[0.8, -0.8],
[0.9, -0.9],
[1., -1.]], dtype=np.float32)
input_indices_np = np.array([1, 2, 2, 3, 3, 3, 4, 4, 5, 4], dtype=np.int32)
num_groups_np = np.array(7, dtype=np.int32)
input_indices = tf.constant(input_indices_np, dtype=tf.int32)
input_values = tf.constant(input_values_np, dtype=tf.float32)
num_groups = tf.constant(num_groups_np, dtype=tf.int32)
reduced = reducer(input_values, input_indices, num_groups)
with tf.Session() as sess:
reduced_out = sess.run(reduced)
self.assertNDArrayNear(
np.array(expected_values, dtype=np.float32), reduced_out, err=1e-4)
SEGMENT_SUM_EDGES_TO_GLO
|
mozilla/verbatim
|
vendor/lib/python/translate/storage/test_dtd.py
|
Python
|
gpl-2.0
| 9,204
| 0.003151
|
#!/usr/bin/env python
import warnings
from py import test
from py.test import mark
from translate.misc import wStringIO
from translate.storage import dtd
from translate.storage import test_monolingual
def test_roundtrip_quoting():
specials = ['Fish & chips', 'five < six', 'six > five',
'Use ', 'Use &nbsp;'
'A "solution"', "skop 'n bal", '"""', "'''",
'\n', '\t', '\r',
'Escape at end \\',
'',
'\\n', '\\t', '\\r', '\\"', '\r\n', '\\r\\n', '\\']
for special in specials:
quoted_special = dtd.quotefordtd(special)
unquoted_special = dtd.unquotefromdtd(quoted_special)
print "special: %r\nquoted: %r\nunquoted: %r\n" % (special, quoted_special, unquoted_special)
assert special == unquoted_special
def test_quotefordtd():
"""Test quoting and unqouting dtd definitions"""
def tester(raw_original, dtd_ready_result):
#print dtd.quotefordtd(raw_original)
assert dtd.quotefordtd(raw_original) == dtd_ready_result
#print dtd.unquotefromdtd(dtd_ready_result)
assert dtd.unquotefromdtd(dtd_ready_result) == raw_original
tester("Unintentional variable %S", '"Unintentional variable %S"')
def test_quoteforandroid():
assert dtd.quoteforandroid("don't") == r'"don\'t"'
assert dtd.quoteforandroid('the "thing"') == r'"the \"thing\""'
def test_removeinvalidamp(recwarn):
"""tests the the removeinvalidamps function"""
def tester(actual, expected):
assert dtd.removeinvalidamps("test.name", actual) == expected
tester("Valid &entity; included", "Valid &entity; included")
tester("Valid &entity.name; included", "Valid &entity.name; included")
tester("Valid Ӓ included", "Valid Ӓ included")
tester("This & is broken", "This amp is broken")
tester("Mad & & &", "Mad amp &")
dtd.removeinvalidamps("simple.warningtest", "Dimpled &Ring")
assert recwarn.pop(UserWarning)
class TestDTDUnit(test_monolingual.TestMonolingualUnit):
UnitClass = dtd.dtdunit
def test_rich_get(self):
pass
def test_rich_set(self):
pass
class TestDTD(test_monolingual.TestMonolingualStore):
StoreClass = dtd.dtdfile
def dtdparse(self, dtdsource):
"""helper that parses dtd source without requiring files"""
dummyfile = wStringIO.StringIO(dtdsource)
dtdfile = dtd.dtdfile(dummyfile)
return dtdfile
def dtdregen(self, dtdsource):
"""helper that converts dtd source to dtdfile object and back"""
return str(self.dtdparse(dtdsource))
def test_simpleentity(self):
"""checks that a simple dtd entity definition is parsed correctly"""
dtdsource = '<!ENTITY test.me "bananas for sale">\n'
dtdfile = self.dtdparse(dtdsource)
assert len(dtdfile.units) == 1
dtdunit = dtdfile.units[0]
assert dtdunit.entity == "test.me"
assert dtdunit.definition == '"bananas for sale"'
def test_blanklines(self):
"""checks that blank lines don't break the parsing or regeneration"""
dtdsource = '<!ENTITY test.me "bananas for sale">\n\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_simpleentity_source(self):
"""checks that a simple dtd entity definition can be regenerated as source"""
dtdsource = '<!ENTITY test.me "">\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
dtdsource = '<!ENTITY test.me "bananas for sale">\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_hashcomment_source(self):
"""checks that a #expand comment is retained in the source"""
dtdsource = '#expand <!ENTITY lang.version "__MOZILLA_LOCALE_VERSION__">\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_commentclosing(self):
"""tests that comment closes with trailing space aren't duplicated"""
dtdsource = '<!-- little comment --> \n<!ENTITY pane.title "Notifications">\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_commententity(self):
"""check that we don't process messages in <!-- comments -->: bug 102"""
dtdsource = '''<!-- commenting out until bug 38906 is fixed
<!ENTITY messagesHeader.label "Messages"> -->'''
dtdfile = self.dtdparse(dtdsource)
assert len(dtdfile.units) == 1
dtdunit = dtdfile.units[0]
print dtdunit
assert dtdunit.isnull()
def test_newlines_in_entity(self):
"""tests that we can handle newlines in the entity itself"""
dtdsource = '''<!ENTITY fileNotFound.longDesc "
<ul>
<li>Check the file name for capitalisation or other typing errors.</li>
<li>Check to see if the file was moved, renamed or deleted.</li>
</ul>
">
'''
dtdregen = self.dtdregen(dtdsource)
print dtdregen
print dtdsource
assert dtdsource == dtdregen
def test_conflate_comments(self):
"""Tests that comments don't run onto the same line"""
dtdsource = '<!-- test comments -->\n<!-- getting conflated -->\n<!ENTITY sample.txt "hello">\n'
dtdregen = self.dtdregen(dtdsource)
print dtdsource
print dtdregen
assert dtdsource == dtdregen
def test_localisation_notes(self):
"""test to ensure that we retain the localisation note correctly"""
dtdsource = '''<!--LOCALIZATION NOTE (publishFtp.label): Edit box appears beside this label -->
<!ENTITY publishFtp.label "If publishing to a FTP site, enter the HTTP address to browse to:">
'''
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_entitityreference_in_source(self):
"""checks that an &entity; in the source is retained"""
dtdsource = '<!ENTITY % realBrandDTD SYSTEM "chrome://branding/locale/brand.dtd">\n%realBrandDTD;\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
#test for bug
|
#610
def test_entitityreference_order_in_source(self):
"""checks that an &entity; in the source is retained"""
dtdsource = '<!ENTITY % realBrandDTD SYSTEM "chrome://brand
|
ing/locale/brand.dtd">\n%realBrandDTD;\n<!-- some comment -->\n'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
# The following test is identical to the one above, except that the entity is split over two lines.
# This is to ensure that a recent bug fixed in dtdunit.parse() is at least partly documented.
# The essence of the bug was that after it had read "realBrandDTD", the line index is not reset
# before starting to parse the next line. It would then read the next available word (sequence of
# alphanum characters) in stead of SYSTEM and then get very confused by not finding an opening ' or
# " in the entity, borking the parsing for threst of the file.
dtdsource = '<!ENTITY % realBrandDTD\n SYSTEM "chrome://branding/locale/brand.dtd">\n%realBrandDTD;\n'
# FIXME: The following line is necessary, because of dtdfile's inability to remember the spacing of
# the source DTD file when converting back to DTD.
dtdregen = self.dtdregen(dtdsource).replace('realBrandDTD SYSTEM', 'realBrandDTD\n SYSTEM')
print dtdsource
print dtdregen
assert dtdsource == dtdregen
@mark.xfail(reason="Not Implemented")
def test_comment_following(self):
"""check that comments that appear after and entity are not pushed onto another line"""
dtdsource = '<!ENTITY textZoomEnlargeCmd.commandkey2 "="> <!-- + is above this key on many keyboards -->'
dtdregen = self.dtdregen(dtdsource)
assert dtdsource == dtdregen
def test_comment_newline_space_closing(self):
"""check that comments that are closed by a newline then space then --> don't break the following entries"""
dtdsource = '<!-- Comment\n -->\n<!ENTI
|
akretion/logistics-center
|
stef_logistics/__manifest__.py
|
Python
|
agpl-3.0
| 807
| 0
|
# © 2019 David BEAL @ Akretion
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Stef logistics center",
"version": "12.0.1.0.0",
"category": "Warehouse",
"summary": "Stef logistics center",
"author": "Akretion",
"license": "AGPL-3",
"website": "https://
|
www.akretion.com",
"depends": ["stock", "logistics_center"],
"external_dependencies": {"Python": []},
"data": [
"data/delivery_data.xml",
"data/warehouse_data.xml",
"data/logistics_flow_data.xml",
|
"views/partner_view.xml",
# 'data/sale_data.xml',
# 'data/repository_data.xml',
# 'data/repository.task.csv',
# 'data/backend_data.xml',
# 'data/cron_data.xml',
],
"demo": [],
"installable": True,
}
|
impallari/Impallari-Fontlab-Macros
|
IMP Kerning/20 ---.py
|
Python
|
apache-2.0
| 27
| 0.037037
|
#FL
|
M: ---------
pa
|
ss
|
xiaozhu36/terraform-provider
|
examples/fc/hello.py
|
Python
|
apache-2.0
| 130
| 0.038462
|
import logging
|
def handler(event, context):
lo
|
gger = logging.getLogger()
logger.info('hello world')
return 'hello world'
|
briancline/softlayer-python
|
SoftLayer/CLI/loadbal/service_delete.py
|
Python
|
mit
| 860
| 0
|
"""Deletes an existing load balancer service."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer
|
.CLI import environment
from SoftLayer.CLI imp
|
ort exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import loadbal
import click
@click.command()
@click.argument('identifier')
@environment.pass_env
def cli(env, identifier):
"""Deletes an existing load balancer service."""
mgr = SoftLayer.LoadBalancerManager(env.client)
_, service_id = loadbal.parse_id(identifier)
if not (env.skip_confirmations or
formatting.confirm("This action will cancel a service from your "
"load balancer. Continue?")):
raise exceptions.CLIAbort('Aborted.')
mgr.delete_service(service_id)
env.fout('Load balancer service %s is being cancelled!' % service_id)
|
Fabfm4/Sita-BackEnd
|
src/sita/authentication/serializers.py
|
Python
|
apache-2.0
| 6,676
| 0.003895
|
# -*- coding: utf-8 -*-
import hashlib
import random
from rest_framework import serializers
from sita.users.models import User
from sita.subscriptions.models import Subscription
from sita.utils.refresh_token
|
import create_token
from hashlib import md5
from datetime import datetime, timedelta
import pytz
class LoginSerializer(serializers.Serializer):
"""
Serializer for user login
"""
email = serializers.EmailField(
required=True
)
password = serializers.CharField(
required=True
)
device_os= serializers.ChoiceField(
required=False,
choices=['ANDROID', 'IOS']
)
device_token= serializers.CharField(
required
|
=False,
max_length=254
)
def validate(self, data):
"""
Validation email, password and active status
"""
try:
user = User.objects.get(email__exact=data.get('email'))
except User.DoesNotExist:
raise serializers.ValidationError({"email":"invalid credentials"})
if not user.check_password(data.get('password')):
raise serializers.ValidationError({"email":"invalid credentials"})
if data.get("device_os") or data.get("device_token"):
if not data.get("device_os") or not data.get("device_token"):
raise serializers.ValidationError(
{"device_token":"Don`t send device OS or device token"})
if not user.is_active:
raise serializers.ValidationError(
{"email":"The user is not actived"}
)
return data
def get_user(self, data):
"""
return user object
"""
return User.objects.get(email__exact=data.get('email'))
class SignUpSerializer(serializers.Serializer):
""""""
TYPE_OS = (
('1', 'IOS'),
('2', 'ANDROID')
)
email = serializers.EmailField(
max_length=254,
required=True
)
password = serializers.CharField(
max_length=100,
required=True
)
time_zone = serializers.CharField(
max_length=100,
required=True
)
name = serializers.CharField(
required=False,
max_length = 100
)
phone = serializers.CharField(
required=False,
max_length=10
)
device_os= serializers.ChoiceField(
required=False,
choices=['ANDROID', 'IOS']
)
device_token= serializers.CharField(
required=False,
max_length=254
)
conekta_card = serializers.CharField(
max_length=254,
required=False
)
subscription_id= serializers.IntegerField(
required=False
)
def validate(self, data):
if data.get("device_os") or data.get("device_token"):
if not data.get("device_os") or not data.get("device_token"):
raise serializers.ValidationError(
{"device_token":"Don`t send device OS or device token"})
if data.get("conekta_card"):
if not data.get("phone") or not data.get("name") or not data.get("subscription_id"):
raise serializers.ValidationError(
{"conekta_card":
"If send conektaCard you should send phone and name"})
try:
subscription = Subscription.objects.get(id=data.get('subscription_id'))
except Subscription.DoesNotExist:
raise serializers.ValidationError(
{"subscription_id":"That subscription don't exists"}
)
try:
user = User.objects.get(email__exact=data.get('email'))
raise serializers.ValidationError(
{"email":"The user is not actived"}
)
except User.DoesNotExist:
pass
try:
datetime.now(pytz.timezone(data.get("time_zone")))
except pytz.UnknownTimeZoneError:
raise serializers.ValidationError(
{"time_zone":"The time zone is not correct"}
)
return data
class LoginResponseSerializer(object):
"""
Serializer used to return the proper token, when the user was succesfully
logged in.
"""
def __init__(self):
pass
def get_token(self,obj):
"""
Create token.
"""
return create_token(obj)
class RecoveryPasswordSerializer(serializers.Serializer):
"""
Serializer for user recovery password
"""
email = serializers.EmailField(
required=True
)
def validate(self, data):
"""
Validation email and active status
"""
try:
user = User.objects.get(email__exact=data.get('email'))
except User.DoesNotExist:
raise serializers.ValidationError("invalid credentials")
if not user.is_active:
raise serializers.ValidationError(
{"email":"The user is not actived"}
)
return data
def generate_recovery_token(self, data):
""" Generate code to recovery password. """
user = User.objects.get(email__exact=data.get('email'))
email = user.email
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
if isinstance(email, unicode):
email = email.encode('utf-8')
key = hashlib.sha1(salt + email).hexdigest()
user.reset_pass_code = key
user.save()
return True
class ResetPasswordWithCodeSerializer(serializers.Serializer):
"""
Serializer for user login
"""
password = serializers.CharField(
required=True
)
password_confim = serializers.CharField(
required=True
)
recovery_code = serializers.CharField(
required=True
)
def validate(self, data):
"""
Validation email, password and active status
"""
try:
user = User.objects.get(reset_pass_code=data.get('recovery_code'))
except User.DoesNotExist:
raise serializers.ValidationError(
{"recovery_code":"Don't exits code"})
if not data.get('password') == data.get('password_confim'):
raise serializers.ValidationError(
{"password_confim":
"Password is not equals to Confirm Password"})
return data
def update_password(self, data):
"""
Change password
"""
user = User.objects.get(reset_pass_code=data.get('recovery_code'))
user.reset_pass_code = None
user.set_password(data.get('password'))
user.save()
return True
|
geolovic/TProfiler
|
test/06_TProfiler_test.py
|
Python
|
gpl-3.0
| 12,892
| 0.000388
|
# -*- coding: utf-8 -*-
"""
José Vicente Pérez
Granada University (Spain)
March, 2017
Testing suite for profiler.py
Last modified: 19 June 2017
"""
import time
import profiler as p
import praster as pr
import numpy as np
import matplotlib.pyplot as plt
print("Tests for TProfiler methods")
def test01():
"""
Creates a TProfiler from an array with profile_data
Test for get_x, get_y
"""
inicio = time.time()
print("=" * 40)
print("Test 01 para TProfiler")
print("Testing functions get_x(), get_y()")
print("Test in progress...")
# Test parameters
pf_data = np.load("data/in/darro_pfdata.npy")
dem = "data/in/darro25.tif"
demraster = pr.open_raster(dem)
srs = demraster.proj
cellsize = demraster.cellsize
# Creates the profile
perfil = p.TProfile(pf_data, cellsize, srs=srs)
# Test 01 get and print x and y arrays
fig = plt.figure()
ax1 = fig.add_subplot(121)
ax2 = fig.add_subplot(122)
xi1 = perfil.get_x(True)
yi1 = perfil.get_y(True)
xi2 = perfil.get_x(False)
yi2 = perfil.get_y(False)
ax1.plot(xi1, yi1)
ax2.plot(xi2, yi2)
ax1.set_title("head = True")
ax2.set_title("head = False")
fig.tight_layout()
plt.show()
fin = time.time()
print("Test finalizado en " + str(fin - inicio) + " segundos")
print("=" * 40)
def test02():
"""
Creates a TProfiler from an array with profile_data
Test for get_l, get_z
"""
inicio = time.time()
print("=" * 40)
print("Test 02 para TProfiler")
print("Testing functions get_l(), get_z()")
print("Test in progress...")
# Test parameters
pf_data = np.load("data/in/darro_pfdata.npy")
dem = "data/in/darro25.tif"
demraster = pr.open_raster(dem)
srs = demraster.proj
cellsize = demraster.cellsize
# Creates the profile
perfil = p.TProfile(pf_data, cellsize, srs=srs)
# Test 01 get and print x and y arrays
fig = plt.figure()
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(223)
ax4 = fig.add_subplot(224)
li1 = perfil.get_l(True)
zi1 = perfil.get_z(True)
ax1.plot(li1, zi1)
ax1.set_title("head = True")
li2 = perfil.get_l(False)
zi2 = perfil.get_z(False)
ax2.plot(li2, zi2)
ax2.set_title("head = False")
zi3 = perfil.get_z(True, True)
ax3.plot(li1, zi3)
ax3.set_title("Relative elevations, head = True")
zi4 = perfil.get_z(False, True)
ax4.plot(li2, zi4)
ax4.set_title("Relative elevations, head = False")
fig.tight_layout()
plt.show()
fin = time.time()
print("Test finalizado en " + str(fin - inicio) + " segundos")
print("=" * 40)
def test03():
"""
Creates a TProfiler from an array with profile_data
Test for raw_elevations and smooth
"""
inicio = time.time()
print("=" * 40)
print("Test 03 para TProfiler")
print("Testing functions smooth() and get_raw_z()")
print("Test in progress...")
# Test parameters
pf_data = np.load("data/in/darro_pfdata.npy")
dem = "data/in/darro25.tif"
demraster = pr.open_raster(dem)
srs = demraster.proj
cellsize = demraster.cellsize
# Creates the profile
perfil = p.TProfile(pf_data, cellsize, srs=srs)
# Print raw elevations vs peaks removed elevations
fig = plt.figure(figsize=(12, 6))
ax1 = fig.add_subplot(121)
ax2 = fig.add_subplot(122)
li = perfil.get_l(True)
zi = perfil.get_z(True)
raw_zi = perfil.get_raw_z(True)
ax1.plot(li, zi, label="Peaks removed")
ax1.plot(li, raw_zi, label="Raw elevations")
ax1.set_title("Raw elevations vs peak removed")
ax1.legend()
ax1.set_xlim((6850, 8950))
ax1.set_ylim((950, 1050))
# Test for smooth function
distance = 0
for n in range(5):
li = perfil.get_l(True)
zi = perfil.get_z(True)
perfil.smooth(distance)
ax2.plot(li, zi, label=str(distance) + " m")
distance += 50
ax2.set_title("Smooth with different distances")
ax2.legend()
ax2.set_xlim((8000, 9000))
ax2.set_ylim((950, 1000))
fig.tight_layout()
plt.show()
fin = time.time()
print("Test finalizado en " + str(fin - inicio) + " segundos")
print("=" * 40)
def test04():
"""
Creates a TProfiler from an array with profile_data
Test for get_area and get_slopes
"""
inicio = time.time()
print("=" * 40)
print("Test 04 para TProfiler")
print("Testing functions get_area() and get_slopes()")
print("Test in progress...")
# Test parameters
pf_data = np.load("data/in/darro_pfdata.npy")
dem = "data/in/darro25.tif"
demraster = pr.open_raster(dem)
srs = demraster.proj
cellsize = demraster.cellsize
# Creates the profile
perfil = p.TProfile(pf_data, cellsize, srs=srs)
# Get slope area and plot in log scale
fig = plt.figure(figsize=(12, 6))
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(223)
ax4 = fig.add_subplot(224)
for ax in (ax1, ax2, ax3, ax4):
ax.set_xscale("log")
ax.set_yscale("log")
ax.set_xlim((1000000, 100000000))
ax.set_ylim((0.001, 1))
ai = perfil.get_area(True)
s1 = perfil.get_slope()
ax1.plot(ai, s1, "b+")
ax1.set_title("Raw slopes (all)")
s2 = perfil.get_slope(threshold=0.9)
ax2.plot(ai, s2, "b+")
ax2.set_title("Slopes with threshold >= 0.9")
s3, lq3 = perfil.get_slope(threshold=0.9, lq=True)
ax3.plot(ai, lq3, "r+")
ax3.plot(ai, s3, "b+")
ax3.set_title("Slopes and low quality slopes (threshold 0.9)")
s4, lq4 = perfil.get_slope(threshold=0.9, lq=True, head=True)
a2 = perfil.get_area(head=True)
ax4.plot(a2, lq4, "r+")
ax4.plot(a2, s4, "b+")
ax4.set_title("Example 3 with head=True")
fig.tight_layout(pad=1
|
)
plt.show()
fin = time.time()
print("Test finalizado en " + str(fin - inicio) + " segundos")
print("=" * 40)
def test05():
"""
Creates a TProfiler from an array with profile_data
Test for calculate slopes
"""
inicio = time.time()
print("=" * 40)
print("Test 05 para TProfiler")
print("Test
|
ing functions calculate slopes")
print("Test in progress...")
# Test parameters
pf_data = np.load("data/in/darro_pfdata.npy")
dem = "data/in/darro25.tif"
demraster = pr.open_raster(dem)
srs = demraster.proj
cellsize = demraster.cellsize
# Creates the profile
perfil = p.TProfile(pf_data, cellsize, srs=srs)
reg_points = 4
# Get slope area and plot in log scale
fig = plt.figure(figsize=(12, 6))
for n in range(1, 9, 2):
ax1 = fig.add_subplot(4, 2, n)
ax2 = fig.add_subplot(4, 2, n+1)
perfil.calculate_slope(reg_points)
si = perfil.get_slope()
ai = perfil.get_area()
ax1.plot(ai, si, "b+")
ax1.set_xscale("log")
ax1.set_yscale("log")
ax1.set_xlim((1000000, 100000000))
ax1.set_ylim((0.001, 1))
ax1.set_title("reg_points = " + str(reg_points) + " (normal elevations)")
perfil.calculate_slope(reg_points, True)
si = perfil.get_slope(0.9)
ax2.plot(ai, si, "b+")
ax2.set_xscale("log")
ax2.set_yscale("log")
ax2.set_xlim((1000000, 100000000))
ax2.set_ylim((0.001, 1))
ax2.set_title("reg_points = " + str(reg_points) + " (raw elevations)")
reg_points += 4
fig.tight_layout(pad=1)
plt.show()
fin = time.time()
print("Test finalizado en " + str(fin - inicio) + " segundos")
print("=" * 40)
def test06():
"""
Creates a TProfiler from an array with profile_data
Test for calculate_chi() and get_chi()
"""
inicio = time.time()
print("=" * 40)
print("Test 06 para TProfiler")
print("Testing functions get_chi() and calculate_chi()")
print("Test in progress...")
# Test parameters
pf_data = np.load("data/in/darro_pfdata.npy")
dem = "data/in/darro25.tif"
demraster = pr.open_raster(dem)
srs = demraster.proj
cellsize = demraster.cellsize
# Creates the prof
|
emoronayuso/beeton
|
asterisk-bee/asteriskbee/api_status/scripts_graficas/script_crontab.py
|
Python
|
gpl-3.0
| 1,417
| 0.016231
|
from crontab import CronTab
from django.conf import settings
#####################################################
###Para mas info sobre el uso de python-crontab######
### https://pypi.python.org/pypi/python-crontab ######
#####################################################
##Directorio de la aplicaion
### STATIC_ROOT = '/var/www/asterisk-bee/asteriskbee/'
#directorio = settings.STATIC_ROOT+"api_status/scripts_graficas/"
directorio = '/var/www/asterisk-bee/asteriskbee/api_status/scripts_graficas/'
tab = CronTab(user='root')
#cmd = 'python '+directorio+'recoge_marcas_graficas.py > /root/peeeeeee'
#cmd = '/bin/bash top.sh'
#cmd = '/bin/bash /home/asterisk-bee/asteriskbee/api_status/scripts_graficas/top.sh'
cmd = 'python '+directorio+'recoge_marcas_graficas.py >/dev/null 2>&1'
#############INCLUIR UNA TAREA AL CRON ##################
cron_job = tab.new(cmd)
##Una tarea se lanzara cada vez que se inice la centralita
#cron_job.every_reboot()
#### Otra tarea cada 2 minutos #
cron_job.minute.every(2)
#Escribe el co
|
ntenido al archivo de cron
tab.write()
##Mostramos la nueva linea que se incluira en el archivo de cron
print tab.render()
##############################################
##PARA BORRAR UNA TAREA#############
#cron_job = tab.find_command(cmd)
#tab.remove_all(cmd)
#Escribe el contenido al archivo de cron
#tab.write()
#p
|
rint tab.render()
####################################
|
akx/shoop
|
shoop/core/pricing/default_pricing.py
|
Python
|
agpl-3.0
| 1,139
| 0
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.utils.translation import ugettext_lazy as _
from shoop.core.models import ShopProduct
from shoop.core.pricing import PriceInfo, PricingModule
class DefaultPricingModule(PricingModule):
identifier = "default_pricing"
name = _("Default Pricing")
def get_price_info(self, context, product, quantity=1):
"""
Return a `PriceInfo` calculated from `ShopProduct.default_price`
Since `ShopProduct.default_price` can be `None` it will
be set to zero (0) if `None`.
"""
shop = conte
|
xt.shop
shop_product = ShopProduct.objects.get(product=product, shop=shop)
default_price = (shop_product.default_price_value or 0)
return PriceInfo(
price=shop.create_price(default_price * quantity),
base_price
|
=shop.create_price(default_price * quantity),
quantity=quantity,
)
|
yausern/stlab
|
devices/Tektronics_Sequencer/element.py
|
Python
|
gpl-3.0
| 10,638
| 0.037413
|
# Implementation of sequence elements that are composed out of pulses
# Author : W. Pfaff
# modified by: Sarwan Peiter
import numpy as np
from copy import deepcopy
import pprint
import logging
class Element:
"""
Implementation of a sequence element.
Basic idea: add different pulses, and compose the actual numeric
arrays that form the amplitudes for the hardware (typically an AWG) (includes
discretization using clock)
"""
def __init__(self,name, **kw):
self.name = name
self.clock = kw.pop
|
('clock', 1e9)
self.granularity = kw.pop('granularity', 4)
self.min_samples = kw.pop('min_samples', 960)
self.pulsar = kw.pop('pulsar', None)
self.ignore_offset_correction = kw.pop('ignore_offset_correction',False)
self.global_ti
|
me = kw.pop('global_time', True)
self.time_offset = kw.pop('time_offset', 0)
self.ignore_delays = kw.pop('ignore_delays',False)
# Default fixed point, used for aligning RO elements. Aligns first RO
self.readout_fixed_point = kw.pop('readout_fixed_point', 1e-6)
# used to track if a correction has been applied
self.fixed_point_applied = False
self.pulses = {}
self._channels ={}
self._last_added_pulse = None
# Take clock of AWG
if self.pulsar is not None:
self.clock = self.pulsar.clock
for c in self.pulsar.channels:
chan = self.pulsar.channels[c]
delay = chan['delay'] if not (self.ignore_delays) else 0.
self.define_channel(name = c, type = chan['type'], high = chan['high'],
low = chan['low'], offset = chan['offset'], delay = delay, id = chan['id'])
# Channel managment
def define_channel(self, name, type = 'analog', high = 1, low = 1,
offset = 0, delay = 0, id = 'ch'):
self._channels[name]= {'type': type,
'delay': delay,
'offset': offset,
'high': high,
'low': low,
'id': id,
'distorted': False}
def get_channel_delay(self, cname):
return self._channels[cname]['delay']
# Auxillary functions for discretization
def _time2sample(self,t):
return int(t * self.clock + 0.5)
def _sample2time(self,s):
return s / self.clock
def offset(self):
"""
Returns the smallest t0 of all pulses/channels after correcting delay
"""
if self.ignore_offset_correction:
return 0
else:
t0s = []
for p in self.pulses:
for c in self.pulses[p].channels:
t0s.append(self.pulses[p].t0() - self._channels[c]['delay'])
return min(t0s)
def ideal_length(self):
"""
Returns the nomimal length of the element before taking into account the
discretization using clock
"""
ts = []
for p in self.pulses:
for c in self.pulses[p].channels:
ts.append(self.pulse_end_time(p,c))
return max (ts)
def pulse_end_time(self, pname, cname):
return self.pulses[pname].end() - self._channels[cname]['delay']- \
self.offset()
def pulse_length(self, pname):
return self.pulses[pname].length
def length(self):
"""
Returns the actual length of the sequence, including all corrections.
"""
return self.samples()/self.clock
def samples(self):
"""
Returns the number of samples the elements occupies
"""
ends = []
for p in self.pulses:
for c in self.pulses[p].channels:
ends.append(self.pulse_end_sample(p,c))
samples = max(ends) + 1
if samples < self.min_samples:
samples = self.min_samples
else:
while(samples % self.granularity > 0):
samples += 1
return samples
def pulse_end_sample(self,pname,cname):
return self.pulse_start_sample(pname,cname) + self.pulse_samples(pname) - 1
def pulse_start_sample(self,pname,cname):
return self._time2sample(self.pulse_start_time(pname,cname))
def pulse_start_time(self,pname,cname):
return self.pulses[pname].t0() - self._channels[cname]['delay'] - self.offset()
def pulse_samples(self,pname):
return self._time2sample(self.pulses[pname].length)
def shift_all_pulses(self, dt):
'''
Shifts all pulses by a time dt, this is used for correcting the phase
of a fixed reference.
'''
self.ignore_offset_correction = True
for name, pulse in self.pulses.items():
pulse._t0 += dt
def _auto_pulse_name(self, base='pulse'):
i = 0
while base+'-'+str(i) in self.pulses:
i += 1
return base+'-'+str(i)
def add(self, pulse, name=None, start=0,
refpulse=None, refpoint='end', refpoint_new='start',
operation_type='other',
fixed_point_freq=None):
'''
Function adds a pulse to the element, there are several options to set
where in the element the pulse is added.
name (str) : name used for referencing the pulse in the
element, if not specified generates one based on
the default pulse name
start (float) : time between refpoint and refpoint_new used to
define the start of the pulse
refpulse (str) : name of pulse used as reference for timing
refpoint ('start'|'end'|'center') : reference point in reference
pulse used
refpoint_new ('start'|'end'|'center'): reference point in added
pulse used
fixed_point_freq (float): if not None shifts all pulses so that
this pulse is at a multiple of 1/fixed_point_freq
'''
pulse = deepcopy(pulse)
pulse.operation_type = operation_type
if name is None:
name = self._auto_pulse_name(pulse.name)
t0 = start - pulse.start_offset
if refpoint not in ['start', 'center', 'end']:
raise ValueError('refpoint not recognized')
if refpoint_new not in ['start', 'center', 'end']:
raise ValueError('refpoint not recognized')
if refpulse is not None:
if refpoint is None:
refpoint = 'end'
if refpoint_new == 'start':
t0 += self.pulses[refpulse].effective_stop()
if refpoint == 'start':
t0 -= self.pulses[refpulse].effective_length()
elif refpoint == 'center':
t0 -= self.pulses[refpulse].effective_length()/2.
elif refpoint_new == 'end':
t0 += (self.pulses[refpulse].effective_stop() -
pulse.effective_length())
if refpoint == 'start':
t0 -= self.pulses[refpulse].effective_length()
elif refpoint == 'center':
t0 -= self.pulses[refpulse].effective_length()/2.
elif refpoint_new == 'center':
t0 += (self.pulses[refpulse].effective_stop() -
pulse.effective_length()/2.)
if refpoint == 'start':
t0 -= self.pulses[refpulse].effective_length()
elif refpoint == 'center':
t0 -= self.pulses[refpulse].effective_length()/2.
pulse._t0 = t0
self.pulses[name] = pulse
self._last_added_pulse = name
# Shift all pulses to the fixed point for the first RO pulse encountered
if operation_type == 'RO' and self.fixed_point_applied is False:
time_corr = calculate_time_correction(t0, self.readout_fixed_point)
self.shift_all_pulses(time_corr)
self.fixed_point_applied = True
return name
# computing the numerical waveform
def ideal_waveforms(self):
wfs = {}
tvals = np.arange(self.samples())/self.clock
for c in self._channels:
wfs[c] = np.zeros(self.samples()) + self._channels[c]['offset']
# we first compute the ideal function values
for p in self.pulses:
psamples = self.pulse_samples(p)
if not self.global_time:
pulse_tvals = tvals.copy()[:psamples]
pulsewfs = self.pulses[p].get_wfs(pulse_tvals)
else:
chan_tvals = {}
for c in self.pulses[p].channels:
idx0 = self.pulse_start_sample(p, c)
idx1 = self.pulse_end_sample(p, c) + 1
c_tvals = np.round(tvals.copy()[idx0:idx1] +
self.get_channel_delay(c) +
self.time_offset,
11) # significant digits 11
chan_tvals[c] = c_tvals
pulsewfs = self.pulses[p].get_wfs(chan_tvals)
for c in self.pulses[p].channels:
idx0 = self.pulse_start_sample(p, c)
idx1 = self.pulse_end_sample(p, c) + 1
wfs[c][idx0:idx1] += pulsewfs[c]
return tvals, wfs
def waveforms(self):
"""
return:
tvals, wfs
Returns the waveforms for all used channels.
Trunctates/clips (channel-imposed) all values
that are out of bounds
"""
tvals, wfs = self.ideal_waveforms()
for wf in wfs:
hi = self._channels[wf]['high']
lo = self._channels[wf]['low']
if self._cha
|
ayouwei/minivpn
|
server/server.py
|
Python
|
apache-2.0
| 857
| 0.009335
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import multiprocessing, Queue
|
import signal, time
import setting
from socketserver import SocketServer
from tunserver import TunServer
def sigHandler(signum, frame):
print "signal %s received, client going to shutdown" % signum
setting.running = False
if __name__ == "__main__":
signal.signal(signal.SIGINT, sigHandler)
inqueue = multiprocessing.Queue(maxsize = 10)
outqueu
|
e = multiprocessing.Queue(maxsize = 10)
server_addr = "0.0.0.0"
server_port = 8080
processes = []
processes.append(TunServer("tun Process", inqueue, outqueue))
processes.append(SocketServer("server Process", inqueue, outqueue, server_addr, server_port))
for t in processes:
t.start()
while setting.running:
time.sleep(1)
for t in processes:
t.join()
|
pombredanne/bokeh
|
examples/plotting/file/markers.py
|
Python
|
bsd-3-clause
| 1,583
| 0.001895
|
from numpy.random import random
from bokeh.plotting import figure, show, output_file
def mscatter(p, x, y, marker):
p.scatter(x, y, marker=marker, size=15,
line_color="navy", fill_color="orange", alpha=0.5)
def mtext(p, x, y, text):
p.text(x, y, text=[text],
text_color="firebrick", text_align="center", text_font_size="10pt")
p = figure(title="Bokeh Markers", toolbar_location=None)
p.grid.grid_line_color = None
p.background_fill_color = "#eeeeee"
N = 10
mscatter(p, random(N)+2, random(N)+1,
|
"circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
msc
|
atter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, 2.5, 0.5, "circle / o")
mtext(p, 4.5, 0.5, "square")
mtext(p, 6.5, 0.5, "triangle")
mtext(p, 8.5, 0.5, "asterisk / *")
mtext(p, 2.5, 3.5, "circle_x / ox")
mtext(p, 4.5, 3.5, "square_x")
mtext(p, 6.5, 3.5, "inverted_triangle")
mtext(p, 8.5, 3.5, "x")
mtext(p, 2.5, 6.5, "circle_cross / o+")
mtext(p, 4.5, 6.5, "square_cross")
mtext(p, 6.5, 6.5, "diamond")
mtext(p, 8.5, 6.5, "cross / +")
output_file("markers.html", title="markers.py example")
show(p) # open a browser
|
huntcsg/slackly
|
src/slackly/oauth_utils.py
|
Python
|
mit
| 1,841
| 0.000543
|
#!/usr/bin/python3
from .compat import BaseHTTPRequestHandler, HTTPServer
import urllib
import json
import sys
import time
import warnings
from slackly import SlackClient
warnings.warn("This part of slackly (oauth_utils) is highly experimental and will likely see api breaking changes")
class CodeServer(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
host, query = urllib.parse.splitquery(self
|
.path)
query_values = urllib.parse.parse_qs(query)
if 'code' in query_values:
query_values['code'] = query_values['code'][0]
if 'state' in query_values:
query_values['state'] = query_values['state'][0]
if query_values['state'] != self.state_validate:
|
print("Not a valid request")
return
print(json.dumps(query_values, indent=4))
client = SlackClient()
response = client.api.oauth.access(
client_id=client_id,
client_secret=client_secret,
code=query_values['code'],
redirect_uri=redirect_uri,
)
print(json.dumps(response.data, indent=4))
return
def main(host, port, state, client_id, client_secret, redirect_uri):
CodeServer.state_validate = state
server = HTTPServer((host, port), CodeServer)
print(time.asctime(), "Server Starts - %s:%s" % (host, port))
try:
server.serve_forever()
except KeyboardInterrupt:
pass
server.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (host, port))
if __name__ == '__main__':
host = sys.argv[1]
port = int(sys.argv[2])
state = sys.argv[3]
client_id = sys.argv[4]
client_secret = sys.argv[5]
redirect_uri = sys.argv[6]
main(host, port, state, client_id, client_secret, redirect_uri)
|
kfsone/tinker
|
python/packedstruct.py
|
Python
|
mit
| 9,560
| 0.001046
|
"""
Tool for converting simple 'C' struct and #define lists into Python classes.
The subset of C that is understand is limited to:
#define <group>_<name> <value>
struct X
{
<type> <name>;
<type> <name>[dimension];
};
Blank lines and single-line comments are ignored.
defines are expected to be in contiguous groups with a pattern of naming that
has each define start with a grouping, e.g.
#define GL_FOG_DENSITY 0x01
#define GL_FOG_START 0x02
#define GL_FOG_END 0x03
#define GL_SAMPLE_ALPHA ...
Optionally, you can specify a prefix that is taken and turned into a title.
Given a prefix of 'GL_', the above would create
class Gl_Fog:
DENSITY = 0x01
START = 0x02
END = 0x03
class Gl_SAMPLE:
ALPHA = ...
structs are converted into PackedStruct objects, which can then be accessed
by taking the field name, prefixing it with 'm' and using it as an attribute
struct myHeader
{
char name[8];
int value;
};
if parsed with a prefix of 'my' would produce 'class MyHeader' with members
'mName' and 'mValue'.
You can then use these to consume data from a stream by using
fh = open('file_with_my_header', 'rb')
struct = MyHeader(fh)
print(struct.mName, struct.mValue)
"""
from struct import calcsize, pack, unpack
import logging
import re
""" Map C types to the 'struct' representations. """
TYPE_MAP = {
'char': 's',
'int': 'i',
'short': 'h',
'float': 'f',
'double': 'd',
'uint16': 'H',
'uint32': 'I',
'sint16': 'h',
'sint32': 'i',
'uint16_t': 'H',
'uint32_t': 'I',
'sint16_t': 'h',
'sint32_t': 'i',
}
""" Pattern for removing single-line comments. """
CMT_REMOVE = re.compile('\s*//.*').sub
""" For matching the array dimensions of a field. """
ARRAY_MATCH = re.compile(r'(\S+)\[(\d+)\]').match
""" Format the definition of a struct. """
STRUCT_FMT = (
"{struct} = PackedStruct.create("
"'{struct}',"
" {fields},"
" net_endian={end}"
")\n"
"\n"
)
class PackedStruct(object):
"""
BaseClass for implementing pack/unpack struct representations, created
by calling `PackedStruct.create().
See the `Converter` class for generating this automatically from C/C++
structure definitions.
"""
def __init__(self, fh, *args):
self._values = unpack(self.STRUCT, fh.read(self.SIZE))
def __str__(self):
return '<'+self.__class__.__name__+'>'
def __repr__(self):
return "{}({})".format(self.__class__.__name__,
','.join('{}={}'.format(self.FIELDS[i], self._values[i])
for i in range(len(self.FIELDS))
if not self.FIELDS[i].startswith('mReserved'))
)
@staticmethod
def create(struct_name, fields, net_endian=False):
"""
Create a PackedStruct class describing the representation of a binary
data structure as might be specified via a C 'struct'.
The field list is an iterable of (defn, fieldname), where defn is the
`struct` field representation.
Fields can be accessed as attributes by using the name prefixed with
'm' and parsed as title(), e.g. "('2h', 'shorts')" would be mShorts.
:param struct_name: The name of the struct and class.
:param fields: An iterable of field descriptions where each entry is
a tuple of (`struct` representation, fieldname)
:param net_endian: True or False whether the data is stored in
network endian order.
:return: A class definition.
"""
struct_defs, field_names, members = [], [], {}
# build a list of lambdas to implement the members.
for defn, field_name in fields:
field_name = "m" + field_name.title()
field_names.append(field_name)
member_num = len(members)
members[field_name] = property(
lambda self, n=member_num: self._values[int(n)]
)
struct_defs.append(defn)
struct_def = "".join(struct_defs)
if net_endian: struct_def = '!' + struct_def
cls_members = {
'STRUCT': struct_def,
'SIZE': calcsize(struct_def),
'FIELDS': tuple(field_names),
}
cls_members.update(members)
return type(struct_name, (PackedStruct,), cls_members)
class Converter(object):
""" Helper for parsing .h file-like definitions to generate PackedStructs. """
def __init__(self, indent=" ", logger=None):
self.indent = indent
self.logger = logger or logging
self.types = set()
self._define_group = None
self._struct_name = None
def _read_define(self, line, prefix):
"""
Internal: Read a #define definition line.
:param line: the line to process
:param prefix: prefix being used
:return: text to add to the current definition.
"""
# #define <name> <value>
_, name, value = line.split()
# If there's a prefix, strip it.
if prefix:
assert name.startswith(prefix)
name = name[len(prefix):]
text = ""
# take the value up to the first _ as the grouping,
# everything after it is the name.
grouping, name = name.split('_', 1)
if grouping != self._define_group:
if self._define_group:
text += "\n"
self._define_group = grouping
self._struct_name = None
typename = prefix.title() + grouping.upper()
if typename in self.types:
raise ValueError("Duplicate type: %s" % typename)
|
self.logger.info("enum %s", typename)
self.types.add(typename)
text += "class %s:\n" % typename
text += self.indent + "# Enums\n"
self.logger.debug("enum %s.%s = %s", self._define_group,
name, value)
return text + self.indent + "%s = %s\n" % (name, value)
def parse(self, iterable, prefix="", net_endian=False):
"""
Process c-l
|
ike #defines, structs and members from an iterable of lines
of text, generating text to produce equivalent PackedStruct classes
in Python.
:param iterable: iterable of lines to parse
:param prefix: [optional] prefix to require infront of structs/defines.
:param net_endian: Set to True for the '!' prefix on struct definitions.
:return: Python text string that would generate the supplied structures.
"""
logger = self.logger
struct_fields, text = None, ""
for line in iterable:
# Get rid of cruft.
line = CMT_REMOVE('', line.rstrip().replace(';', ''))
line = line.replace('\t', ' ')
if not line: continue
# '#define' statements get converted into blocks.
if line.startswith('#define'):
text += self.read_define(line, prefix)
continue
# nop the open brace of a structure definition.
if line.startswith('{'):
assert self._struct_name
assert struct_fields is None
struct_fields = []
continue
# end of a struct
if line.startswith('}'):
assert self._struct_name
text += STRUCT_FMT.format(struct=self._struct_name,
fields=tuple(struct_fields), end=net_endian)
self._struct_name, fields = None, None
continue
# The remaining lines we understand are 'struct X' and 'type Name'.
try:
typename, field = line.split()
except Exception as e:
# Anything else we just ignore.
logger.debug("Ignoring %s: %s", line, str(e))
continue
# struct definition.
if typename == 'struct':
if self._define_group:
|
probcomp/bdbcontrib
|
tests/test_draw_cc_state.py
|
Python
|
apache-2.0
| 2,579
| 0.005041
|
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2016, MIT Probabilistic Computing Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied
|
.
# See the License for the specific language governing permissions and
# limitations under the License.
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import bayeslite
import os
import pandas as pd
import random
import cStringIO as StringIO
from bayeslite.read_pandas import bayesdb_read_pandas_df
fro
|
m bdbcontrib.crosscat_utils import draw_state
from crosscat.utils import data_utils as du
def draw_a_cc_state(filename):
rng_seed = random.randrange(10000)
num_rows = 100
num_cols = 50
num_splits = 5
num_clusters = 5
nan_prop = .25
table_name = 'plottest'
generator_name = 'plottest_cc'
# generate some clustered data
ccmd = du.generate_clean_state(rng_seed, num_clusters, num_cols, num_rows,
num_splits)
T, _M_c, _M_r, _X_L, _X_D = ccmd
for row in range(num_rows):
for col in range(num_cols):
if random.random() < nan_prop:
T[row][col] = float('nan')
input_df = pd.DataFrame(T, columns=['col_%i' % i for i in range(num_cols)])
os.environ['BAYESDB_WIZARD_MODE']='1'
bdb = bayeslite.bayesdb_open()
bayesdb_read_pandas_df(bdb, table_name, input_df, create=True)
bdb.execute('''
create generator {} for {} using crosscat(guess(*))
'''.format(generator_name, table_name))
bdb.execute('initialize 4 models for {}'.format(generator_name))
bdb.execute('analyze {} for 10 iterations wait'.format(generator_name))
plt.figure(facecolor='white', tight_layout=False)
draw_state(bdb, 'plottest', 'plottest_cc', 0,
separator_width=1, separator_color=(0., 0., 1., 1.),
short_names=False, nan_color=(1, .15, .25, 1.))
plt.savefig(filename)
def test_draw_cc_smoke():
f = StringIO.StringIO()
draw_a_cc_state(f)
assert len(f.getvalue()) > 1000
# For manually inspecting the generated figure.
if __name__ == '__main__':
draw_a_cc_state('state.png')
print "Figure saved to 'state.png'"
|
kjchalup/dtit
|
setup.py
|
Python
|
mit
| 2,610
| 0.002682
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='fcit',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.2.0',
description='A decision-tree based conditional independenc
|
e test',
long_description=long_description,
# The project's main homepage.
url = 'https://github.com/kjchalup/fcit',
# Author details
author = 'Krzysztof Chalupka',
author_email = 'janchatko@gmail.com',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How
|
mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='machine learning statistics decision trees',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['numpy', 'sklearn', 'scipy', 'joblib'],
)
|
ZachGangwer/Exercism
|
python/pangram/pangram.py
|
Python
|
gpl-3.0
| 273
| 0.007326
|
def is_pangram(word):
word = sorted(word)
i = 1
count =
|
0
while i < len(word):
if (word[i] != word[i-1]) & (word[i].isalpha()):
count += 1
i += 1
if count == 26:
return True
else:
return False
| |
driesdesmet/django-cms
|
cms/plugin_rendering.py
|
Python
|
bsd-3-clause
| 6,339
| 0.005048
|
# -*- coding: utf-8 -*-
from cms.models.placeholdermodel import Placeholder
from cms.plugin_processors import (plugin_meta_context_processor,
mark_safe_plugin_processor)
from cms.utils import get_language_from_request
from cms.utils.django_load import iterload_objects
from cms.utils.placeholder import (get_page_from_placeholder_if_exists,
get_placeholder_conf)
from django.conf impor
|
t settings
from django.template import Template, Context
from django.template.defaultfilters import title
from django.template.loader import render_to_string
from django.utils.trans
|
lation import ugettext_lazy as _
# these are always called before all other plugin context processors
DEFAULT_PLUGIN_CONTEXT_PROCESSORS = (
plugin_meta_context_processor,
)
# these are always called after all other plugin processors
DEFAULT_PLUGIN_PROCESSORS = (
mark_safe_plugin_processor,
)
class PluginContext(Context):
"""
This subclass of template.Context automatically populates itself using
the processors defined in CMS_PLUGIN_CONTEXT_PROCESSORS.
Additional processors can be specified as a list of callables
using the "processors" keyword argument.
"""
def __init__(self, dict, instance, placeholder, processors=None, current_app=None):
super(PluginContext, self).__init__(dict, current_app=current_app)
if not processors:
processors = []
for processor in DEFAULT_PLUGIN_CONTEXT_PROCESSORS:
self.update(processor(instance, placeholder))
for processor in iterload_objects(settings.CMS_PLUGIN_CONTEXT_PROCESSORS):
self.update(processor(instance, placeholder))
for processor in processors:
self.update(processor(instance, placeholder))
def render_plugin(context, instance, placeholder, template, processors=None,
current_app=None):
"""
Renders a single plugin and applies the post processors to it's rendered
content.
"""
if not processors:
processors = []
if isinstance(template, basestring):
content = render_to_string(template, context)
elif isinstance(template, Template):
content = template.render(context)
else:
content = ''
for processor in iterload_objects(settings.CMS_PLUGIN_PROCESSORS):
content = processor(instance, placeholder, content, context)
for processor in processors:
content = processor(instance, placeholder, content, context)
for processor in DEFAULT_PLUGIN_PROCESSORS:
content = processor(instance, placeholder, content, context)
return content
def render_plugins(plugins, context, placeholder, processors=None):
"""
Renders a collection of plugins with the given context, using the appropriate processors
for a given placeholder name, and returns a list containing a "rendered content" string
for each plugin.
This is the main plugin rendering utility function, use this function rather than
Plugin.render_plugin().
"""
c = []
total = len(plugins)
for index, plugin in enumerate(plugins):
plugin._render_meta.total = total
plugin._render_meta.index = index
context.push()
c.append(plugin.render_plugin(context, placeholder, processors=processors))
context.pop()
return c
def render_placeholder(placeholder, context_to_copy, name_fallback="Placeholder"):
"""
Renders plugins for a placeholder on the given page using shallow copies of the
given context, and returns a string containing the rendered output.
"""
from cms.plugins.utils import get_plugins
context = context_to_copy
context.push()
request = context['request']
plugins = [plugin for plugin in get_plugins(request, placeholder)]
page = get_page_from_placeholder_if_exists(placeholder)
if page:
template = page.template
else:
template = None
# Add extra context as defined in settings, but do not overwrite existing context variables,
# since settings are general and database/template are specific
# TODO this should actually happen as a plugin context processor, but these currently overwrite
# existing context -- maybe change this order?
slot = getattr(placeholder, 'slot', None)
extra_context = {}
if slot:
extra_context = get_placeholder_conf("extra_context", slot, template, {})
for key, value in extra_context.items():
if not key in context:
context[key] = value
c = []
# Prepend frontedit toolbar output if applicable
edit = False
toolbar = getattr(request, 'toolbar', None)
if (getattr(toolbar, 'edit_mode', False) and
(not page or page.has_change_permission(request))):
edit = True
if edit:
from cms.middleware.toolbar import toolbar_plugin_processor
processors = (toolbar_plugin_processor,)
else:
processors = None
c.extend(render_plugins(plugins, context, placeholder, processors))
content = "".join(c)
if edit:
content = render_placeholder_toolbar(placeholder, context, content, name_fallback)
context.pop()
return content
def render_placeholder_toolbar(placeholder, context, content, name_fallback=None):
from cms.plugin_pool import plugin_pool
request = context['request']
page = get_page_from_placeholder_if_exists(placeholder)
if not page:
page = getattr(request, 'current_page', None)
if page:
template = page.template
if name_fallback and not placeholder:
placeholder = Placeholder.objects.create(slot=name_fallback)
page.placeholders.add(placeholder)
else:
template = None
if placeholder:
slot = placeholder.slot
else:
slot = None
installed_plugins = plugin_pool.get_all_plugins(slot, page)
name = get_placeholder_conf(slot, template, "name", title(slot))
name = _(name)
context.push()
context.update({
'installed_plugins': installed_plugins,
'language': get_language_from_request(request),
'placeholder_label': name,
'placeholder': placeholder,
'page': page,
})
toolbar = render_to_string("cms/toolbar/placeholder.html", context)
context.pop()
return "".join([toolbar, content])
|
tinloaf/home-assistant
|
homeassistant/components/switch/aqualogic.py
|
Python
|
apache-2.0
| 3,306
| 0
|
"""
Support for AquaLogic switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.aqualogic/
"""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.core import callback
import homeassistant.components.aqualogic as aq
from homeassistant.components.switch import SwitchDevice, PLATFORM_SCHEMA
from homeassistant.const import (CONF_MONITORED_CONDITIONS)
DEPENDENCIES = ['aqualogic']
_LOGGER = logging.getLogger(__name__)
SWITCH_TYPES = {
'lights': 'Light
|
s',
'filter': 'Filter',
'
|
filter_low_speed': 'Filter Low Speed',
'aux_1': 'Aux 1',
'aux_2': 'Aux 2',
'aux_3': 'Aux 3',
'aux_4': 'Aux 4',
'aux_5': 'Aux 5',
'aux_6': 'Aux 6',
'aux_7': 'Aux 7',
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SWITCH_TYPES)):
vol.All(cv.ensure_list, [vol.In(SWITCH_TYPES)]),
})
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up the switch platform."""
switches = []
processor = hass.data[aq.DOMAIN]
for switch_type in config.get(CONF_MONITORED_CONDITIONS):
switches.append(AquaLogicSwitch(processor, switch_type))
async_add_entities(switches)
class AquaLogicSwitch(SwitchDevice):
"""Switch implementation for the AquaLogic component."""
def __init__(self, processor, switch_type):
"""Initialize switch."""
from aqualogic.core import States
self._processor = processor
self._type = switch_type
self._state_name = {
'lights': States.LIGHTS,
'filter': States.FILTER,
'filter_low_speed': States.FILTER_LOW_SPEED,
'aux_1': States.AUX_1,
'aux_2': States.AUX_2,
'aux_3': States.AUX_3,
'aux_4': States.AUX_4,
'aux_5': States.AUX_5,
'aux_6': States.AUX_6,
'aux_7': States.AUX_7
}[switch_type]
@property
def name(self):
"""Return the name of the switch."""
return "AquaLogic {}".format(SWITCH_TYPES[self._type])
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def is_on(self):
"""Return true if device is on."""
panel = self._processor.panel
if panel is None:
return False
state = panel.get_state(self._state_name)
return state
def turn_on(self, **kwargs):
"""Turn the device on."""
panel = self._processor.panel
if panel is None:
return
panel.set_state(self._state_name, True)
def turn_off(self, **kwargs):
"""Turn the device off."""
panel = self._processor.panel
if panel is None:
return
panel.set_state(self._state_name, False)
async def async_added_to_hass(self):
"""Register callbacks."""
self.hass.helpers.dispatcher.async_dispatcher_connect(
aq.UPDATE_TOPIC, self.async_update_callback)
@callback
def async_update_callback(self):
"""Update callback."""
self.async_schedule_update_ha_state()
|
samvarankashyap/linch-pin
|
linchpin/provision/roles/azure/filter_plugins/map_results.py
|
Python
|
gpl-3.0
| 292
| 0
|
#!/usr/bin/env python
from __future
|
__ import print_function
import linchpin.FilterUtils.FilterUtils as filter_utils
class FilterModule(object):
''' A filter to fix network format '''
def filters(self):
return {
'map_results': filter_utils.map_results
|
}
|
AABoyles/Tabular.ui
|
Scripts/dumpStatesToDB.py
|
Python
|
gpl-2.0
| 1,190
| 0.004202
|
#!/usr/bin/python
import time, s
|
qlite3, sys, urllib, csv
begin = time.time()
url = "http://www.correlatesofwar.org/COW2%20Data/SystemMembership/2011/states2011.csv"
print "Downloading from", url
response = urllib.urlretrieve(url, '../Data/states2011
|
.csv')
print "Opening Database"
con = sqlite3.connect('../Data/PyRBD.db')
cur = con.cursor()
rows = 0
with open(response[0], 'Ur') as csvFile:
reader = csv.reader(csvFile)
query = "INSERT INTO stateMembership("
for row in reader:
if rows == 0:
headers = ",".join(row)
query += headers + ") VALUES "
cur.execute("create table if not exists stateMembership(" + headers + ");")
else:
query += "(\"" + "\",\"".join(row) + "\"),"
cur.execute("INSERT INTO stateMembership(" + headers + ") VALUES (\"" + "\",\"".join(row) + "\");")
rows += 1
if rows % 1000 == 0:
query = "INSERT INTO stateMembership("
if rows % 10000 == 0:
print rows, "rows processed."
con.commit()
con.commit()
con.close()
end = time.time()
print rows, "rows processed in", end - begin, "seconds"
sys.exit()
|
j5shi/Thruster
|
pylibs/test/test_sys_settrace.py
|
Python
|
gpl-2.0
| 24,801
| 0.004798
|
# Testing the line trace facility.
from test import test_support
import unittest
import sys
import difflib
import gc
# A very basic example. If this fails, we're in deep trouble.
def basic():
return 1
basic.events = [(0, 'call'),
(1, 'line'),
(1, 'return')]
# Many of the tests below are tricky because they involve pass statements.
# If there is implicit control flow around a pass statement (in an except
# clause or else caluse) under what conditions do you set a line number
# following that clause?
# The entire "while 0:" statement is optimized away. No code
# exists for it, so the line numbers skip directly from "del x"
# to "x = 1".
def arigo_example():
x = 1
del x
while 0:
pass
x = 1
arigo_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(5, 'line'),
(5, 'return')]
# check that lines consisting of just one instruction get traced:
def one_instr_line():
x = 1
del x
x = 1
one_instr_line.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(3, 'return')]
def no_pop_tops(): # 0
x = 1 # 1
for a in range(2): # 2
if a: # 3
x = 1 # 4
else: # 5
x = 1 # 6
no_pop_tops.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(6, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(2, 'line'),
(2, 'return')]
def no_pop_blocks():
y = 1
while not y:
bla
x = 1
no_pop_blocks.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(4, 'line'),
(4, 'return')]
def called(): # line -3
x = 1
def call(): # line 0
called()
call.events = [(0, 'call'),
(1, 'line'),
(-3, 'call'),
(-2, 'line'),
(-2, 'return'),
(1, 'return')]
def raises():
raise Exception
def test_raise():
try:
raises()
except Exception, exc:
x = 1
test_raise.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(-3, 'call'),
(-2, 'line'),
(-2, 'exception'),
(-2, 'return'),
(2, 'exception'),
(3, 'line'),
(4, 'line'),
(4, 'return')]
def _settrace_and_return(tracefunc):
sys.settrace(tracefunc)
sys._getframe().f_back.f_trace = tracefunc
def settrace_and_return(tracefunc):
_settrace_and_return(tracefunc)
settrace_and_return.events = [(1, 'return')]
def _settrace_and_raise(tracefunc):
sys.settrace(tracefunc)
sys._getframe().f_back.f_trace = tracefunc
raise RuntimeError
def settrace_and_raise(tracefunc):
try:
_settrace_and_raise(tracefunc)
except RuntimeError, exc:
pass
settrace_and_raise.events = [(2, 'exception'),
(3, 'line'),
(4, 'line'),
(4, 'return')]
# implicit return example
# This test is interesting because of the else: pass
# part of the code. The code generate for the true
# part of the if contains a jump past the else branch.
# The compiler then generates an implicit "return None"
# Internally, the compiler visits the pass statement
# and stores its line number for use on the next instruction.
# The next instruction is the implicit return None.
def ireturn_example():
a = 5
b = 5
if a == b:
b = a+1
else:
pass
ireturn_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(6, 'line'),
(6, 'return')]
# Tight loop with while(1) example (SF #765624)
def tightloop_example():
items = range(0, 3)
try:
i = 0
while 1:
b = items[i]; i+=1
except IndexError:
pass
tightloop_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'exception'),
(6, 'line'),
(7, 'line'),
(7, 'return')]
def tighterloop_example():
items = range(1, 4)
try:
i = 0
while 1: i = items[i]
except IndexError:
pass
tighterloop_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'exception'),
(5, 'line'),
(6, 'line'),
(6, 'return')]
def generator_function():
try:
yield True
"continued"
finally:
"finally"
def generator_example():
# any() will leave the generator before its end
x = any(generator_function())
# the following lines were not traced
for x in range(10):
y = x
generator_example.events = ([(0, 'call'),
(2, 'line'),
(-6, 'call'),
(-5, 'line'),
(-4, 'line'),
|
(-4, 'return'),
(-4, 'call'),
|
(-4, 'exception'),
(-1, 'line'),
(-1, 'return')] +
[(5, 'line'), (6, 'line')] * 10 +
[(5, 'line'), (5, 'return')])
class Tracer:
def __init__(self):
self.events = []
def trace(self, frame, event, arg):
self.events.append((frame.f_lineno, event))
return self.trace
def traceWithGenexp(self, frame, event, arg):
(o for o in [1])
self.events.append((frame.f_lineno, event))
return self.trace
class TraceTestCase(unittest.TestCase):
# Disable gc collection when tracing, otherwise the
# deallocators may be traced as well.
def setUp(self):
self.using_gc = gc.isenabled()
gc.disable()
def tearDown(self):
if self.using_gc:
gc.enable()
def compare_events(self, line_offset, events, expected_events):
events = [(l - line_offset, e) for (l, e) in events]
if events != expected_events:
self.fail(
"events did not match expectation:\n" +
"\n".join(difflib.ndiff([str(x) for x in expected_events],
[str(x) for x in events])))
def run_and_compare(self, func, events):
tracer = Tracer()
sys.settrace(tracer.trace)
func()
sys.settrace(None)
self.compare_events(func.func_code.co_firstlineno,
tracer.events, events)
def run_test(self, func):
self.run_and_compare(func, func.events)
def r
|
sony/nnabla
|
python/src/nnabla/backward_function/tanh_shrink.py
|
Python
|
apache-2.0
| 1,106
| 0.002712
|
# Copyright 2019,2020
|
,2021 Sony Corporation.
# Copyright 2021 Sony Group Corpora
|
tion.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nnabla.functions as F
def tanh_shrink_backward(inputs):
"""
Args:
inputs (list of nn.Variable): Incomming grads/inputs to/of the forward function.
kwargs (dict of arguments): Dictionary of the corresponding function arguments.
Return:
list of Variable: Return the gradients wrt inputs of the corresponding function.
"""
dy = inputs[0]
x0 = inputs[1]
t0 = F.tanh(x0)
dx0 = dy * (1 - (1 - t0 ** 2))
return dx0
|
splotz90/urh
|
src/urh/models/SimulatorMessageTableModel.py
|
Python
|
gpl-3.0
| 3,129
| 0.003196
|
from collections import defaultdict
from PyQt5.QtCore import QModelIndex, Qt
from urh.models.TableModel import TableModel
from urh.signalprocessing.ProtocolAnalyzer import ProtocolAnalyzer
class SimulatorMessa
|
geTabl
|
eModel(TableModel):
def __init__(self, compare_frame_controller, generator_tab_controller, parent=None):
super().__init__(None, parent)
self.protocol = ProtocolAnalyzer(None)
self.compare_frame_controller = compare_frame_controller
self.generator_tab_controller = generator_tab_controller
self.decode = False
self.is_writeable = True
self.label_mask = defaultdict(lambda: False)
def update(self):
self.refresh_label_mask()
super().update()
def refresh_label_mask(self):
self.label_mask.clear()
for i, message in enumerate(self.protocol.messages):
for lbl in message.message_type:
if lbl.value_type_index == 0:
continue
start, end = message.get_label_range(lbl, self.proto_view, self.decode)
for j in range(start, end):
self.label_mask[i, j] = True
def refresh_vertical_header(self):
self.vertical_header_text.clear()
for i, msg in enumerate(self.protocol.messages):
participant_name = msg.participant.shortname if msg.participant else "?"
destination_name = msg.destination.shortname if msg.destination else "?"
self.vertical_header_text[i] = "{0} ({1} -> {2})".format(msg.index(), participant_name, destination_name)
def delete_range(self, msg_start: int, msg_end: int, index_start: int, index_end: int):
removable_messages = []
if msg_start > msg_end:
msg_start, msg_end = msg_end, msg_start
if index_start > index_end:
index_start, index_end = index_end, index_start
for i in range(msg_start, msg_end + 1):
try:
bs, be = self.protocol.convert_range(index_start, index_end, self.proto_view, 0, self.decode, message_indx=i)
self.protocol.messages[i].clear_decoded_bits()
del self.protocol.messages[i][bs:be + 1]
if len(self.protocol.messages[i]) == 0:
removable_messages.append(self.protocol.messages[i])
except IndexError:
continue
self.parent().sim_proto_manager.delete_items(removable_messages)
def data(self, index: QModelIndex, role=Qt.DisplayRole):
if not index.isValid():
return None
i = index.row()
j = index.column()
if role == Qt.DisplayRole and self.display_data:
if self.label_mask[i, j]:
return "."
return super().data(index, role)
def flags(self, index: QModelIndex):
if index.isValid():
if self.is_writeable:
return Qt.ItemIsEnabled | Qt.ItemIsEditable | Qt.ItemIsSelectable
else:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
else:
return Qt.NoItemFlags
|
balajikris/autorest
|
src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/StorageManagementClient/storagemanagementclient/operations/__init__.py
|
Python
|
mit
| 658
| 0
|
# coding=utf-8
# --------------------------------------------
|
------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .storage_accounts_operations import Stor
|
ageAccountsOperations
from .usage_operations import UsageOperations
__all__ = [
'StorageAccountsOperations',
'UsageOperations',
]
|
mkawalec/masters
|
contrib/plot_decay/plot2.py
|
Python
|
gpl-3.0
| 1,103
| 0.000907
|
#!/usr/bin/env python2
from glob import glob
import re
import matplotlib.py
|
plot as plt
import numpy as np
from sys import argv
def get_a1(pattern):
a1 = {}
for fit_file in glob(pattern):
with open(fit_file) as f:
line = f.readline()
coeffs = line.split(' ')
fit_params = fit_file.split('-')
if fit_params[0] not in a1:
a1[fit_params[0]] = []
a1[fit_params[0]].append((float(fit_params[1]), float(coeffs[1])))
# Sort and remove the soring hints
for key
|
in a1.keys():
a1[key] = sorted(a1[key], key=lambda x: x[0])
a1[key] = dict(y=map(lambda x: float(x[1]), a1[key]),
x=map(lambda x: float(x[0]), a1[key]))
return a1
def plot_a1():
a1 = get_a1(argv[1])
fig, ax = plt.subplots()
for domain in sorted(a1.keys(), key=lambda x: float(x)):
ax.plot(a1[domain]['x'], a1[domain]['y'],
label='%s pi' % (domain))
ax.legend(loc=0)
fig.savefig('a1.png', dpi=300)
plt.show()
if __name__ == '__main__':
plot_a1()
|
DirectXMan12/nova-hacking
|
nova/tests/api/openstack/compute/contrib/test_flavor_swap.py
|
Python
|
apache-2.0
| 3,130
| 0.000639
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import webob
from nova.compute import flavors
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
FAKE_FLAVORS = {
'flavor 1': {
"flavorid": '1',
"name": 'flavor 1',
"memory_mb": '256',
"root_gb": '10',
"swap": 512,
},
'flavor 2': {
"flavorid": '2',
"name": 'flavor 2',
"memory_mb": '512',
"root_gb": '10',
"swap": None,
},
}
#TOD(jogo) dedup these accross nova.api.openstack.contrib.test_flavor*
def fake_flavor_get_by_flavor_id(flavorid):
return FAKE_FLAVORS['flavor %s' % flavorid]
def fake_flavor_get_all(*args, **kwargs):
return FAKE_FLAVORS
class FlavorSwapTest(test.TestCase):
content_type = 'application/json'
prefix = ''
def setUp(self):
super(FlavorSwapTest, self).setUp()
ext = ('nova.api.openstack.compute.contrib'
'.flavor_swap.Flavor_swap')
self.flags(osapi_compute_extension=[ext])
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(flavors, "get_all_flavors",
fake_flavor_get_all)
self.stubs.Set(flavors,
"get_flavor_by_flavor_id",
fake_flavor_get_by_flavor_id)
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app())
return res
def _get_flavor(self, body):
return jsonutils.loads(body).get('flavor')
def _get_flavors(self, body):
return jsonutils.loads(body).get('flavors')
def assertFlavorSwap(self, flavor, swap):
self.assertEqual(str(flavor.get('%sswap' % self.prefix)), swap)
def test_show(self):
url = '/v2/fake/flavors/1'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
self.assertFlavorSwap(self._get_flavor(res.body), '512')
def test_detail(self):
url = '/v2/fake/flavors/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
flavors = self._get_flavors(res.body)
|
self.assertFlavorSwap(flavors[0], '512')
self.assertFlavorSwap(flavors[1], '')
class FlavorSwapXmlTest(FlavorSwapTest):
content_type
|
= 'application/xml'
def _get_flavor(self, body):
return etree.XML(body)
def _get_flavors(self, body):
return etree.XML(body).getchildren()
|
Kagee/youtube-dl
|
youtube_dl/extractor/teamcoco.py
|
Python
|
unlicense
| 3,246
| 0.001848
|
from __future__ import unicode_literals
import base64
import re
from .common import InfoExtractor
from ..utils import qualities
class TeamcocoIE(InfoExtractor):
_VALID_URL = r'http://teamcoco\.com/video/(?P<video_id>[0-9]+)?/?(?P<display_id>.*)'
_TESTS = [
{
'url': 'http://teamcoco.com/video/80187/conan-becomes-a-mary-kay-beauty-consultant',
'md5': '3f7746aa0dc86de18df7539903d399ea',
'info_dict': {
'id': '80187',
'ext': 'mp4',
'title': 'Conan Becomes A Mary Kay Beauty Consultant',
'description': 'Mary Kay is perhaps the most trusted name in female beauty, so of course Conan is a natural choice to sell their products.',
'age_limit': 0,
}
}, {
'url': 'http://teamcoco.com/video/louis-ck-interview-george-w-bush',
'md5': 'cde9ba0fa3506f5f017ce11ead928f9a',
'info_dict': {
'id': '19705',
'ext': 'mp4',
'description': 'Louis C.K. got starstruck by George W. Bush, so what? Part one.',
'title': 'Louis C.K. Interview Pt. 1 11/3/11',
'age_limit': 0,
}
}
]
_VIDEO_ID_REGEXES = (
r'"eVar42"\s*:\s*(\d+)',
r'Ginger\.TeamCoco\.openInApp\("video",\s*"([^"]+)"',
r'"id_not"\s*:\s*(\d+)'
)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
video_id = mobj.group('video_id')
if not video_id:
video_id = self._html_search_regex(
self._VIDEO_ID_REGEXES, webpage, 'video id')
embed_url = 'http://teamcoco
|
.com/embed/v/%s' % video_id
embed = self._download_webpage(
embed_url, video_id, 'Downloading embed page')
encoded_data = self._search_regex(
r'"preload"\s*:\s*"([^"]+)"', embed, 'encoded data')
data = self._parse_json(
bas
|
e64.b64decode(encoded_data.encode('ascii')).decode('utf-8'), video_id)
formats = []
get_quality = qualities(['500k', '480p', '1000k', '720p', '1080p'])
for filed in data['files']:
m_format = re.search(r'(\d+(k|p))\.mp4', filed['url'])
if m_format is not None:
format_id = m_format.group(1)
else:
format_id = filed['bitrate']
tbr = (
int(filed['bitrate'])
if filed['bitrate'].isdigit()
else None)
formats.append({
'url': filed['url'],
'ext': 'mp4',
'tbr': tbr,
'format_id': format_id,
'quality': get_quality(format_id),
})
self._sort_formats(formats)
return {
'id': video_id,
'display_id': display_id,
'formats': formats,
'title': data['title'],
'thumbnail': data.get('thumb', {}).get('href'),
'description': data.get('teaser'),
'age_limit': self._family_friendly_search(webpage),
}
|
tensorflow/tensorflow
|
tensorflow/python/framework/c_api_util.py
|
Python
|
apache-2.0
| 7,542
| 0.009281
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for using the TensorFlow C API."""
from tensorflow.core.framework import api_def_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.python.client import pywrap_tf_session as c_api
from tensorflow.python.util import compat
from tensorflow.python.util import tf_contextlib
class ScopedTFStatus(object):
"""Wrapper around TF_Status that handles deletion."""
__slots__ = ["status"]
def __init__(self):
self.status = c_api.TF_NewStatus()
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteStatus is not None:
c_api.TF_DeleteStatus(self.status)
class ScopedTFGraph(object):
"""Wrapper around TF_Graph that handles deletion."""
__slots__ = ["graph", "deleter"]
def __init__(self):
self.graph = c_api.TF_NewGraph()
# Note: when we're destructing the global context (i.e when the process is
# terminating) we may have already deleted other modules. By capturing the
# DeleteGraph function here, we retain the ability to cleanly destroy the
# graph at shutdown, which satisfies leak checkers.
self.deleter = c_api.TF_DeleteGraph
def __del__(self):
self.deleter(self.graph)
class ScopedTFImportGraphDefOptions(object):
"""Wrapper around TF_ImportGraphDefOptions that handles deletion."""
__slots__ = ["options"]
def __init__(self):
self.options = c_api.TF_NewImportGraphDefOptions()
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteImportGraphDefOptions is not None:
c_api.TF_DeleteImportGraphDefOptions(self.options)
class ScopedTFImportGraphDefResults(object):
"""Wrapper around TF_ImportGraphDefOptions that handles deletion."""
__slots__ = ["results"]
def __init__(self, results):
self.results = results
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteImportGraphDefResults is not None:
c_api.TF_DeleteImportGraphDefResults(self.results)
class ScopedTFFunction(object):
"""Wrapper around TF_Function that handles deletion."""
__slots__ = ["func", "deleter"]
def __init__(self, func):
self.func = func
# Note: when we're destructing the global context (i.e when the process is
# terminating) we may have already deleted other modules. By capturing the
# DeleteFunction function here, we retain the ability to cleanly destroy the
# Function at shutdown, which satisfies leak checkers.
self.deleter = c_api.TF_DeleteFunction
@property
def has_been_garbage_collected(self):
return self.func is None
def __del__(self):
if not self.has_been_garbage_collected:
self.deleter(self.func)
self.func = None
class ScopedTFBuffer(object):
"""An internal class to help manage the TF_Buffer lifetime."""
__slots__ = ["buffer"]
def __init__(self, buf_string):
self.buffer = c_api.TF_NewBufferFromString(compat.as_bytes(buf_string))
def __del__(self):
c_api.TF_DeleteBuffer(self.buffer)
class ApiDefMap(object):
"""Wrapper around Tf_ApiDefMap that handles querying and deletion.
The OpDef protos are also stored in this class so that they could
be queried by op name.
"""
__slots__ = ["_api_def_map", "_op_per_name"]
def __init__(self):
op_def_proto = op_def_pb2.OpList()
buf = c_api.TF_GetAllOpList()
try:
op_def_proto.ParseFromString(c_api.TF_GetBuffer(buf))
self._api_def_map = c_api.TF_NewApiDefMap(buf)
finally:
c_api.TF_DeleteBuffer(buf)
self._op_per_name = {}
for op in op_def_proto.op:
self._op_per_name[op.name] = op
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteApiDefMap is not None:
c_api.TF_DeleteApiDefMap(self._api_def_map)
def put_api_def(self, text):
c_api.TF_ApiDefMapPut(self._api_def_map, text, len(text))
def get_api_def(self, op_name):
api_def_proto = api_def_pb2.ApiDef()
buf = c_api.TF_ApiDefMapGet(self._api_def_map, op_name, len(op_name))
try:
api_def_proto.ParseFromString(c_api.TF_GetBuffer(buf))
finally:
c_api.TF_DeleteBuffer(buf)
return api_def_proto
def get_op_def(self, op_name):
if op_name in self._op_per_name:
return self._op_per_name[op_name]
raise ValueError(f"No op_def found for op name {op_name}.")
def op_names(s
|
elf):
return self._op_per_name.keys()
@tf_contextlib.contextmanager
def tf_buffer(data=None):
"""Context manager that creates and deletes TF_Buffer.
Example usage:
with tf_buffer() as buf:
# get serialized graph def into buf
...
proto_data = c_a
|
pi.TF_GetBuffer(buf)
graph_def.ParseFromString(compat.as_bytes(proto_data))
# buf has been deleted
with tf_buffer(some_string) as buf:
c_api.TF_SomeFunction(buf)
# buf has been deleted
Args:
data: An optional `bytes`, `str`, or `unicode` object. If not None, the
yielded buffer will contain this data.
Yields:
Created TF_Buffer
"""
if data:
buf = c_api.TF_NewBufferFromString(compat.as_bytes(data))
else:
buf = c_api.TF_NewBuffer()
try:
yield buf
finally:
c_api.TF_DeleteBuffer(buf)
def tf_output(c_op, index):
"""Returns a wrapped TF_Output with specified operation and index.
Args:
c_op: wrapped TF_Operation
index: integer
Returns:
Wrapped TF_Output
"""
ret = c_api.TF_Output()
ret.oper = c_op
ret.index = index
return ret
def tf_operations(graph):
"""Generator that yields every TF_Operation in `graph`.
Args:
graph: Graph
Yields:
wrapped TF_Operation
"""
# pylint: disable=protected-access
pos = 0
c_op, pos = c_api.TF_GraphNextOperation(graph._c_graph, pos)
while c_op is not None:
yield c_op
c_op, pos = c_api.TF_GraphNextOperation(graph._c_graph, pos)
# pylint: enable=protected-access
def new_tf_operations(graph):
"""Generator that yields newly-added TF_Operations in `graph`.
Specifically, yields TF_Operations that don't have associated Operations in
`graph`. This is useful for processing nodes added by the C API.
Args:
graph: Graph
Yields:
wrapped TF_Operation
"""
# TODO(b/69679162): do this more efficiently
for c_op in tf_operations(graph):
try:
graph._get_operation_by_tf_operation(c_op) # pylint: disable=protected-access
except KeyError:
yield c_op
|
bestK1ngArthur/IU5
|
Term 5/Development of Internet applications/Lab6/Lab6/settings.py
|
Python
|
mit
| 3,298
| 0.001819
|
"""
Django settings for Lab6 project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7==2hz#hwkd)m@auv!=a8#4rzx3c2gm+y@un!2k^7yf()&__vm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'lab_app.apps.LabAppConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Lab6.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.static',
],
},
},
]
WSGI_APPLICATION = 'Lab6.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'productsDatabase',
'USER': 'root',
'PASSWORD': 'root',
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validat
|
ors
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth
|
.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
zblz/naima
|
src/naima/models.py
|
Python
|
bsd-3-clause
| 15,997
| 0
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import astropy.units as u
import numpy as np
from astropy.table import Table
from astropy.utils.data import get_pkg_data_filename
from .extern.validator import (
validate_array,
validate_physical_type,
validate_scalar,
)
from .model_utils import memoize
from .radiative import Bremsstrahlung, InverseCompton, PionDecay, Synchrotron
__all__ = [
"Synchrotron",
"InverseCompton",
"PionDecay",
"Bremsstrahlung",
"BrokenPowerLaw",
"ExponentialCutoffPowerLaw",
"PowerLaw",
"LogParabola",
"ExponentialCutoffBrokenPowerLaw",
"TableModel",
"EblAbsorptionModel",
]
def _validate_ene(ene):
if isinstance(ene, dict) or isinstance(ene, Table):
try:
ene = validate_array(
"energy", u.Quantity(ene["energy"]), physical_type="energy"
)
except KeyError:
raise TypeError("Table or dict does not have 'ene' column")
else:
if not isinstance(ene, u.Quantity):
ene = u.Quantity(ene)
validate_physical_type("energy", ene, physical_type="energy")
return ene
class PowerLaw:
"""
One dimensional power law model.
Parameters
----------
amplitude : float
Model amplitude.
e_0 : `~astropy.units.Quantity` float
Reference energy
alpha : float
Power law index
See Also
--------
PowerLaw, BrokenPowerLaw, LogParabola
Notes
-----
Model formula (with :math:`A` for ``amplitude``, :math:`\\alpha` for
``alpha``):
.. math:: f(E) = A (E / E_0) ^ {-\\alpha}
"""
param_names = ["amplitude", "e_0", "alpha"]
_memoize = False
_cache = {}
_queue = []
def __init__(self, amplitude, e_0, alpha):
self.amplitude = amplitude
self.e_0 = validate_scalar(
"e_0", e_0, domain="positive", physical_type="energy"
)
self.alpha = alpha
@staticmethod
def eval(e, amplitude, e_0, alpha):
"""One dimensional power law model function"""
xx = e / e_0
return amplitude * xx ** (-alpha)
@memoize
def _calc(self, e):
return self.eval(
e.to("eV").value,
self.amplitude,
self.e_0.to("eV").value,
self.alpha,
)
def __call__(self, e):
"""One dimensional power law model function"""
e = _validate_ene(e)
return self._calc(e)
class ExponentialCutoffPowerLaw:
"""
One dimensional power law model with an exponential cutoff.
Parameters
----------
amplitude : float
Model amplitude
e_0 : `~astropy.units.Quantity` float
Reference point
alpha : float
Power law index
e_cutoff : `~astropy.units.Quantity` float
Cutoff point
beta : float
Cutoff exponent
See Also
--------
PowerLaw, BrokenPowerLaw, LogParabola
Notes
-----
Model formula (with :math:`A` for ``amplitude``, :math:`\\alpha` for
``alpha``, and :math:`\\beta` for ``beta``):
.. math:: f(E) = A (E / E_0) ^ {-\\alpha}
\\exp (- (E / E_{cutoff}) ^ \\beta)
"""
param_names = ["amplitude", "e_0", "alpha", "e_cutoff", "beta"]
_memoize = False
_cache = {}
_queue = []
def __init__(self, amplitude, e_0, alpha, e_cutoff, beta=1.0):
self.amplitude = amplitude
self.e_0 = validate_scalar(
"e_0", e_0, domain="positive", physical_type="energy"
)
self.alpha = alpha
self.e_cutoff = validate_scalar(
"e_cutoff", e_cutoff, domain="positive", physical_type="energy"
)
self.beta = beta
@staticmethod
def eval(e, amplitude, e_0, alpha, e_cutoff, beta):
"One dimensional power law with an exponential cutoff model function"
xx = e / e_0
return amplitude * xx ** (-alpha) * np.exp(-((e / e_cutoff) ** beta))
@memoize
def _calc(self, e):
return self.eval(
e.to("eV").value,
self.amplitude,
self.e_0.to("eV").value,
self.alpha,
self.e_cutoff.to("eV").value,
self.beta,
)
def __call__(self, e):
"One dimensional power law with an exponential cutoff model function"
e = _validate_ene(e)
return self._calc(e)
class BrokenPowerLaw:
"""
One dimensional power law model with a break.
Parameters
----------
amplitude : float
Model amplitude at the break energy
e_0 : `~astropy.units.Quantity` float
Reference point
e_break : `~astropy.units.Quantity` float
Break energy
alpha_1 : float
Power law index for x < x_break
alpha_2 : float
Power law index for x > x_break
See Also
--------
PowerLaw, ExponentialCutoffPowerLaw, LogParabola
Notes
-----
Model formula (with :math:`A` for ``amplitude``, :math:`E_0` for ``e_0``,
:math:`\\alpha_1` for ``alpha_1`` and :math:`\\alpha_2` for ``alpha_2``):
.. math::
f(E) = \\left \\{
\\begin{array}{ll}
A (E / E_0) ^ {-\\alpha_1} & : E < E_{break} \\\\
A (E_{break}/E_0) ^ {\\alpha_2-\\alpha_1}
(E / E_0) ^ {-\\alpha_2} & : E > E_{break} \\\\
\\end{array}
\\right.
"""
param_names = ["amplitude", "e_0", "e_break", "alpha_1", "alpha_2"]
_memoize = False
_cache = {}
_queue = []
def __init__(self, amplitude, e_0, e_break, alpha_1, alpha_2):
self.amplitude = amplitude
self.e_0 = validate_scalar(
"e_0", e_0, domain="positive", physical_type="energy"
)
self.e_break = validate_scalar(
"e_break", e_break, domain="positive", physical_type="energy"
)
self.alpha_1 = alpha_1
self.alpha_2 = alpha_2
@staticmethod
def eval(e, amplitude, e_0, e_break, alpha_1, alpha_2):
"""One dimensional broken power law model function"""
K = np.where(e < e_break, 1, (e_break / e_0) ** (alpha_2 - alpha_1))
alpha = np.where(e < e_break, alpha_1, alpha_2)
return amplitude * K * (e / e_0) ** -alpha
@memoize
def _calc(self, e):
return self.eval(
e.to("eV").value,
self.amplitude,
self.e_0.to("eV").value,
self.e_break.to("eV").value,
self.alpha_1,
self.alpha_2,
)
def __call__(self, e):
"""One dimensional broken power law model functio
|
n"""
e = _validate_ene(e)
return self._calc(e)
class ExponentialCutoffBrokenPowerLaw:
"""
One dimensional power law model with a break
|
.
Parameters
----------
amplitude : float
Model amplitude at the break point
e_0 : `~astropy.units.Quantity` float
Reference point
e_break : `~astropy.units.Quantity` float
Break energy
alpha_1 : float
Power law index for x < x_break
alpha_2 : float
Power law index for x > x_break
e_cutoff : `~astropy.units.Quantity` float
Exponential Cutoff energy
beta : float, optional
Exponential cutoff rapidity. Default is 1.
See Also
--------
PowerLaw, ExponentialCutoffPowerLaw, LogParabola
Notes
-----
Model formula (with :math:`A` for ``amplitude``, :math:`E_0` for ``e_0``,
:math:`\\alpha_1` for ``alpha_1``, :math:`\\alpha_2` for ``alpha_2``,
:math:`E_{cutoff}` for ``e_cutoff``, and :math:`\\beta` for ``beta``):
.. math::
f(E) = \\exp(-(E / E_{cutoff})^\\beta)\\left \\{
\\begin{array}{ll}
A (E / E_0) ^ {-\\alpha_1} & : E < E_{break} \\\\
A (E_{break}/E_0) ^ {\\alpha_2-\\alpha_1}
(E / E_0) ^ {-\\alpha_2} & : E > E_{break} \\\\
\\end{array}
\\right.
"""
param_names = [
"amplitude",
"e_0",
|
Jet-Streaming/gyp
|
test/win/gyptest-link-nodefaultlib.py
|
Python
|
bsd-3-clause
| 596
| 0.010067
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source
|
code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure nodefaultlib setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('nodefaultlib.gyp', chdir=CHDIR)
test.build('nodefaultlib.gyp', 'test_ok', chdir=CHDIR)
test.build('nodefaultlib.gyp', 'test_fa
|
il', chdir=CHDIR, status=1)
test.pass_test()
|
fengjz1/eloipool-litecoin
|
eloipool.py
|
Python
|
agpl-3.0
| 26,968
| 0.034634
|
#!/usr/bin/python3
# Eloipool - Python Bitcoin pool server
# Copyright (C) 2011-2013 Luke Dashjr <luke-jr+eloipool@utopios.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
if not hasattr(config, 'ServerName'):
config.ServerName = 'Unnamed Eloipool'
if not hasattr(config, 'ShareTarget'):
config.ShareTarget = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff
import logging
import logging.handlers
rootlogger = logging.getLogger(None)
logformat = getattr(config, 'LogFormat', '%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s')
logformatter = logging.Formatter(logformat)
if len(rootlogger.handlers) == 0:
logging.basicConfig(
format=logformat,
level=logging.DEBUG,
)
for infoOnly in (
'checkShare',
'getTarget',
'JSONRPCHandler',
'JSONRPCServer',
'merkleMaker',
'StratumServer',
'Waker for JSONRPCServer',
'Waker for StratumServer',
'WorkLogPruner'
):
logging.getLogger(infoOnly).setLevel(logging.INFO)
if getattr(config, 'LogToSysLog', False):
sysloghandler = logging.handlers.SysLogHandler(address = '/dev/log')
rootlogger.addHandler(sysloghandler)
if hasattr(config, 'LogFile'):
if isinstance(config.LogFile, str):
filehandler = logging.FileHandler(config.LogFile)
else:
filehandler = logging.handlers.TimedRotatingFileHandler(**config.LogFile)
filehandler.setFormatter(logformatter)
rootlogger.addHandler(filehandler)
def RaiseRedFlags(reason):
logging.getLogger('redflag').critical(reason)
return reason
from bitcoin.node import BitcoinLink, BitcoinNode
bcnode = BitcoinNode(config.UpstreamNetworkId)
bcnode.userAgent += b'Eloipool:0.1/'
import jsonrpc
try:
import jsonrpc.authproxy
jsonrpc.authproxy.USER_AGENT = 'Eloipool/0.1'
except:
pass
from bitcoin.script import BitcoinScript
from bitcoin.txn import Txn
from base58 import b58decode
from struct import pack
import subprocess
from time import time
def makeCoinbaseTxn(coinbaseValue, useCoinbaser = True):
txn = Txn.new()
if useCoinbaser and hasattr(config, 'CoinbaserCmd') and config.CoinbaserCmd:
coinbased = 0
try:
cmd = config.CoinbaserCmd
cmd = cmd.replace('%d', str(coinbaseValue))
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
nout = int(p.stdout.readline())
for i in range(nout):
amount = int(p.stdout.readline())
addr = p.stdout.readline().rstrip(b'\n').decode('utf8')
pkScript = BitcoinScript.toAddress(addr)
txn.addOutput(amount, pkScript)
coinbased += amount
except:
coinbased = coinbaseValue + 1
if coinbased >= coinbaseValue:
logging.getLogger('makeCoinbaseTxn').error('Coinbaser failed!')
txn.outputs = []
else:
coinbaseValue -= coinbased
pkScript = BitcoinScript.toAddress(config.TrackerAddr)
txn.addOutput(coinbaseValue, pkScript)
# TODO
# TODO: red flag on dupe coinbase
return txn
import jsonrpc_getwork
from util import Bits2Target
workLog = {}
userStatus = {}
networkTarget = None
DupeShareHACK = {}
server = None
stratumsrv = None
def updateBlocks():
server.wakeLongpoll()
stratumsrv.updateJob()
def blockChanged():
global MM, networkTarget, server
bits = MM.currentBlock[2]
if bits is None:
networkTarget = None
else:
networkTarget = Bits2Target(bits)
if MM.lastBlock != (None, None, None):
global DupeShareHACK
DupeShareHACK = {}
jsonrpc_getwork._CheckForDupesHACK = {}
workLog.clear()
server.wakeLongpoll(wantClear=True)
stratumsrv.updateJob(wantClear=True)
from time import sleep, time
import traceback
def _WorkLogPruner_I(wl):
now = time()
pruned = 0
for username in wl:
userwork = wl[username]
for wli in tuple(userwork.keys()):
if now > userwork[wli][1] + 120:
del userwork[wli]
pruned += 1
WorkLogPruner.logger.debug('Pruned %d jobs' % (pruned,))
def WorkLogPruner(wl):
while True:
try:
sleep(60)
_WorkLogPruner_I(wl)
except:
WorkLogPruner.logger.error(traceback.format_exc())
WorkLogPruner.logger = logging.getLogger('WorkLogPruner')
from merklemaker import merkleMaker
MM = merkleMaker()
MM.__dict__.update(config.__dict__)
MM.makeCoinbaseTxn = makeCoinbaseTxn
MM.onBlockChange = blockChanged
MM.onBlockUpdate = updateBlocks
from binascii import b2a_hex
from copy import deepcopy
from math import ceil, log
from merklemaker import MakeBlockHeader
from struct import pack, unpack
import threading
from time import time
from util import PendingUpstream, RejectedShare, bdiff1target, scrypt, LEhash2int, swap32, target2bdiff, target2pdiff
import jsonrpc
import traceback
gotwork = None
if hasattr(config, 'GotWorkURI'):
gotwork = jsonrpc.ServiceProxy(config.GotWorkURI)
if not hasattr(config, 'DelayLogForUpstream'):
config.DelayLogForUpstream = False
if not hasattr(config, 'DynamicTargetting'):
config.DynamicTargetting = 0
else:
if not hasattr(config, 'DynamicTargetWindow'):
config.DynamicTargetWindow = 120
config.DynamicTargetGoal *= config.DynamicTargetWindow / 60
def submitGotwork(info):
try:
gotwork.gotwork(info)
except:
checkShare.logger.warning('Failed to submit gotwork\n' + traceback.format_exc())
def clampTarget(target, DTMode):
# ShareTarget is the minimum
if target is None or target > config.ShareTarget:
target = config.ShareTarget
# Never target above the network, as we'd lose blocks
if target < networkTarget:
target = networkTarget
if DTMode == 2:
# Ceil target to a power of two :)
truebits = log(target, 2)
if target <= 2**int(truebits):
# Workaround for bug in Python's math.log function
truebits = int(truebits)
target = 2**ceil(truebits) - 1
elif DTMode == 3:
# Round target to multiple of bdiff 1
target = bdiff1target / int(round(target2bdiff(target)))
# Return None for ShareTarget to save memory
if target == config.ShareTarget:
return None
return target
def getTarget(username, now, DTMode = None, RequestedTarget = None):
if DTMode is None:
DTMode = config.DynamicTargetting
if not DTMode:
return None
if username in userStatus:
status = userStatus[username]
else:
# No record, use default target
RequestedTarget = clampTarget(RequestedTarget, DTMode)
userStatus[username] = [RequestedTarget, now, 0]
return RequestedTarget
(targetIn, lastUpdate, work) = status
if work <= config.DynamicTargetGoal:
if now < lastUpdate + config.DynamicTargetWindow and (targetIn is None or targetIn >= networkTarget):
# No reason to change it just yet
return clampTarget(targetIn, DTMode)
if not work:
# No shares received, reset to minimum
if targetIn:
getTarget.logger.debug("No shares from %s, resetting to minimum target" % (repr(username),))
userStatus[username] = [None, now, 0]
return clampTarget(None, DTMode)
deltaSec = now - lastUpdate
target = targetIn or config.ShareTarget
target = int(target * config.DynamicTargetGoal * deltaSec / config.DynamicTargetWindow / work)
target = clampTarget(target, DTMode)
if target != targetIn:
pfx = 'Retargetting %s' % (repr(username),)
tin = targetIn or config.ShareTarget
getTarget.logger.debug("%s from: %064x (pdiff %s)" % (pfx, tin, target2pdiff(tin)))
tgt = target or config.ShareTarget
getTarget.logger.d
|
ebug("%s to: %064x (pdiff %s)" % (pfx, tgt, target2pdiff(tgt)))
userStatus[username] = [target, now, 0]
return target
getTarget.logger = logging.getLogger('getTarget')
def TopTargets(n = 0x10):
tmp = list(k for k, v in userStatus.items() if not v[0] is None)
tmp.sort(key=lamb
|
da k: -userStatus[k][0])
tmp2 = {}
def t2d(t):
if t not in tmp2:
tmp2[t] = target2pdiff(t)
return tmp2[t]
for k in tmp[-n:]:
tgt = userS
|
affo/nova
|
nova/virt/vmwareapi/driver.py
|
Python
|
apache-2.0
| 27,905
| 0.000824
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A connection to the VMware vCenter platform.
"""
import re
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_vmware import api
from oslo_vmware import exceptions as vexc
from oslo_vmware import pbm
from oslo_vmware import vim
from oslo_vmware import vim_util
from nova import exception
from nova.i18n import _, _LI, _LW
from nova.virt import driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import host
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import volumeops
LOG = logging.getLogger(__name__)
vmwareapi_opts = [
cfg.StrOpt('host_ip',
help='Hostname or IP address for connection to VMware VC '
'host.'),
cfg.IntOpt('host_port',
default=443,
help='Port for connection to VMware VC host.'),
cfg.StrOpt('host_username',
help='Username for connection to VMware VC host.'),
cfg.StrOpt('host_password',
help='Password for connection to VMware VC host.',
secret=True),
cfg.MultiStrOpt('cluster_name',
help='Name of a VMware Cluster ComputeResource.'),
cfg.StrOpt('datastore_regex',
help='Regex to match the name of a datastore.'),
cfg.FloatOpt('task_poll_interval',
default=0.5,
help='The interval used for polling of remote tasks.'),
cfg.IntOpt('api_retry_count',
default=10,
help='The number of times we retry on failures, e.g., '
'socket error, etc.'),
cfg.IntOpt('vnc_port',
default=5900,
help='VNC starting port'),
cfg.IntOpt('vnc_port_total',
default=10000,
help='Total number of VNC ports'),
cfg.BoolOpt('use_linked_clone',
default=True,
help='Whether to use linked clone'),
cfg.StrOpt('wsdl_location',
help='Optional VIM Service WSDL Location '
'e.g http://<server>/vimService.wsdl. '
'Optional over-ride to default location for bug '
'work-arounds')
]
spbm_opts = [
cfg.BoolOpt('pbm_enabled',
default=False,
|
help='The PBM status.'),
cfg.StrOpt('pbm_wsdl_location',
help='PBM service WSDL file location URL. '
'e.g. file:///opt/SDK/spbm/wsdl/pbmService.wsdl '
'Not setting this will disable storage policy based '
'placement of instances.'),
cfg.StrOpt('pbm_default_policy',
help='The PBM default policy. If pbm_wsdl_location is set and '
|
'there is no defined storage policy for the specific '
'request then this policy will be used.'),
]
CONF = cfg.CONF
CONF.register_opts(vmwareapi_opts, 'vmware')
CONF.register_opts(spbm_opts, 'vmware')
TIME_BETWEEN_API_CALL_RETRIES = 1.0
class VMwareVCDriver(driver.ComputeDriver):
"""The VC host connection object."""
capabilities = {
"has_imagecache": True,
"supports_recreate": False,
}
# The vCenter driver includes API that acts on ESX hosts or groups
# of ESX hosts in clusters or non-cluster logical-groupings.
#
# vCenter is not a hypervisor itself, it works with multiple
# hypervisor host machines and their guests. This fact can
# subtly alter how vSphere and OpenStack interoperate.
def __init__(self, virtapi, scheme="https"):
super(VMwareVCDriver, self).__init__(virtapi)
if (CONF.vmware.host_ip is None or
CONF.vmware.host_username is None or
CONF.vmware.host_password is None):
raise Exception(_("Must specify host_ip, host_username and "
"host_password to use vmwareapi.VMwareVCDriver"))
self._datastore_regex = None
if CONF.vmware.datastore_regex:
try:
self._datastore_regex = re.compile(CONF.vmware.datastore_regex)
except re.error:
raise exception.InvalidInput(reason=
_("Invalid Regular Expression %s")
% CONF.vmware.datastore_regex)
self._session = VMwareAPISession(scheme=scheme)
# Update the PBM location if necessary
if CONF.vmware.pbm_enabled:
self._update_pbm_location()
self._validate_configuration()
# Get the list of clusters to be used
self._cluster_names = CONF.vmware.cluster_name
self.dict_mors = vm_util.get_all_cluster_refs_by_name(self._session,
self._cluster_names)
if not self.dict_mors:
raise exception.NotFound(_("All clusters specified %s were not"
" found in the vCenter")
% self._cluster_names)
# Check if there are any clusters that were specified in the nova.conf
# but are not in the vCenter, for missing clusters log a warning.
clusters_found = [v.get('name') for k, v in self.dict_mors.iteritems()]
missing_clusters = set(self._cluster_names) - set(clusters_found)
if missing_clusters:
LOG.warning(_LW("The following clusters could not be found in the "
"vCenter %s"), list(missing_clusters))
# The _resources is used to maintain the vmops, volumeops and vcstate
# objects per cluster
self._resources = {}
self._resource_keys = set()
self._virtapi = virtapi
self._update_resources()
# The following initialization is necessary since the base class does
# not use VC state.
first_cluster = self._resources.keys()[0]
self._vmops = self._resources.get(first_cluster).get('vmops')
self._volumeops = self._resources.get(first_cluster).get('volumeops')
self._vc_state = self._resources.get(first_cluster).get('vcstate')
# Register the OpenStack extension
self._register_openstack_extension()
@property
def need_legacy_block_device_info(self):
return False
def _update_pbm_location(self):
if CONF.vmware.pbm_wsdl_location:
pbm_wsdl_loc = CONF.vmware.pbm_wsdl_location
else:
version = vim_util.get_vc_version(self._session)
pbm_wsdl_loc = pbm.get_pbm_wsdl_location(version)
self._session.pbm_wsdl_loc_set(pbm_wsdl_loc)
def _validate_configuration(self):
if CONF.vmware.use_linked_clone is None:
raise vexc.UseLinkedCloneConfigurationFault()
if CONF.vmware.pbm_enabled:
if not CONF.vmware.pbm_default_policy:
raise error_util.PbmDefaultPolicyUnspecified()
if not pbm.get_profile_id_by_name(
self._session,
CONF.vmware.pbm_default_policy):
raise error_util.PbmDefaultPolicyDoesNotExist()
if CONF.vmware.datastore_regex:
LOG.warning(_LW(
"datastore_regex is ignored when PBM is enabled"))
self._datastore_regex = None
def init_host(self, host):
|
Neuvoo/legacy-portage
|
pym/portage/package/ebuild/fetch.py
|
Python
|
gpl-2.0
| 36,523
| 0.032254
|
# Copyright 2010 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import print_function
__all__ = ['fetch']
import codecs
import errno
import logging
import random
import re
import shutil
import stat
import sys
import tempfile
import portage
portage.proxy.lazyimport.lazyimport(globals(),
'portage.package.ebuild.config:check_config_instance,config',
'portage.package.ebuild.doebuild:doebuild_environment,' + \
'_doebuild_spawn',
'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
)
from portage import OrderedDict, os, selinux, _encodings, \
_shell_quote, _unicode_encode
from portage.checksum import perform_md5, verify_all
from portage.const import BASH_BINARY, CUSTOM_MIRRORS_FILE, \
GLOBAL_CONFIG_PATH
from portage.data import portage_gid, portage_uid, secpass, userpriv_groups
from portage.exception import FileNotFound, OperationNotPermitted, \
PermissionDenied, PortageException, TryAgain
from portage.localization import _
from portage.locks import lockfile, unlockfile
from portage.manifest import Manifest
from portage.output import colorize, EOutput
from portage.util import apply_recursive_permissions, \
apply_secpass_permissions, ensure_dirs, grabdict, shlex_split, \
varexpand, writemsg, writemsg_level, writemsg_stdout
from portage.process import spawn
_userpriv_spawn_kwargs = (
("uid", portage_uid),
("gid", portage_gid),
("groups", userpriv_groups),
("umask", 0o02),
)
def _spawn_fetch(settings, args, **kwargs):
"""
Spawn a process with appropriate settings for fetching, including
userfetch and selinux support.
"""
global _userpriv_spawn_kwargs
# Redirect all output to stdout since some fetchers like
# wget pollute stderr (if portage detects a problem then it
# can send it's own message to stderr).
if "fd_pipes" not in kwargs:
kwargs["fd_pipes"] = {
0 : sys.stdin.fileno(),
1 : sys.stdout.fileno(),
2 : sys.stdout.fileno(),
}
if "userfetch" in settings.features and \
os.getuid() == 0 and portage_gid and portage_uid:
kwargs.update(_userpriv_spawn_kwargs)
spawn_func = spawn
if settings.selinux_enabled():
spawn_func = selinux.spawn_wrapper(spawn_func,
settings["PORTAGE_FETCH_T"])
# bash is an allowed entrypoint, while most binaries are not
if args[0] != BASH_BINARY:
args = [BASH_BINARY, "-c", "exec \"$@\"", args[0]] + args
rval = spawn_func(args, env=settings.environ(), **kwargs)
return rval
_userpriv_test_write_file_cache = {}
_userpriv_test_write_cmd_script = "touch %(file_path)s 2>/dev/null ; rval=$? ; " + \
"rm -f %(file_path)s ; exit $rval"
def _userpriv_test_write_file(settings, file_path):
"""
Drop privileges and try to open a file for writing. The file may or
may not exist, and the parent directory is assumed to exist. The file
is removed before returning.
@param settings: A config instance which is passed to _spawn_fetch()
@param file_path: A file path to open and write.
@return: True if write succeeds, False otherwise.
"""
global _userpriv_test_write_file_cache, _userpriv_test_write_cmd_script
rval = _userpriv_test_write_file_cache.get(file_path)
if rval is not None:
return rval
args = [BASH_BINARY, "-c", _userpriv_test_write_cmd_script % \
{"file_path" : _shell_quote(file_path)}]
returncode = _spawn_fetch(settings, args)
rval = returncode == os.EX_OK
_userpriv_test_write_file_cache[file_path] = rval
return rval
def _checksum_failure_temp_file(distdir, basename):
"""
First try to find a duplicate temp file with the same checksum and return
that filename if available. Otherwise, use mkstemp to create a new unique
filename._checksum_failure_.$RANDOM, rename the given file, and return the
new filename. In any case, filename will be renamed or removed before this
function returns a temp filename.
"""
filename = os.path.join(distdir, basename)
size = os.stat(filename).st_size
checksum = None
tempfile_re = re.compile(re.escape(basename) + r'\._checksum_failure_\..*')
for temp_filename in os.listdir(distdir):
if not tempfile_re.match(temp_filename):
continue
temp_filename = os.path.join(distdir, temp_filename)
try:
if size != os.stat(temp_filename).st_size:
continue
except OSError:
continue
try:
temp_checksum = perform_md5(temp_filename)
except FileNotFound:
# Apparently the temp file disappeared. Let it go.
continue
if checksum is None:
checksum = perform_md5(filename)
if checksum == temp_checksum:
os.unlink(filename)
return temp_filename
fd, temp_filename = \
tempfile.mkstemp("", basename + "._checksum_failure_.", distdir)
os.close(fd)
os.rename(filename, temp_filename)
return temp_filename
def _check_digests(filename, digests, show_errors=1):
"""
Check digests and display a message if an error occurs.
@return True if all digests match, False otherwise.
"""
verified_ok, reason = verify_all(filename, digests)
if not verified_ok:
if show_errors:
writemsg(_("!!! Previously fetched"
" file: '%s'\n") % filename, noiselevel=-1)
writemsg(_("!!! Reason: %s\n") % reason[0],
noiselevel=-1)
writemsg(_("!!! Got: %s\n"
"!!! Expected: %s\n") % \
(reason[1], reason[2]), noiselevel=-1)
return False
return True
def _check_distfile(filename, digests, eout, show_errors=1):
"""
@return a tuple of (match, stat_obj) where match is True if filename
matches all given digests (if any) and stat_obj is a stat result, or
None if the file does not exist.
"""
if digests is None:
digests = {}
size = digests.get("size")
if size is not None and len(digests) == 1:
digests = None
try:
st = os.stat(filename)
except OSError:
return (False, None)
if size is not None and size != st.st_size:
return (False, st)
if not digests:
if size is not None:
eout.ebegin(_("%s size ;-)") % os.path.basename(filename))
eout.eend(0)
elif st.st_size == 0:
# Zero-byte distfiles are always invalid.
return (False, st)
else:
if _check_digests(filename, digests, show_errors=show_errors):
eout.ebegin("%s %s ;-)" % (os.path.basename(filename),
" ".join(sorted(digests))))
eout.eend(0)
else:
return (False, st)
return (True, st)
_fetch_resume_size_re = re.compile('(^[\d]+)([KMGTPEZY]?$)')
_size_suffix_map = {
'' : 0,
'K' : 10,
'M' : 20,
'G' : 30,
'T' : 40,
'P' : 50,
'E' : 60,
'Z' : 70,
'Y' : 80,
}
def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",use_locks=1, try_mirrors=1):
"fetch files. Will use digest file if available."
if not myuris:
return 1
features = mysettings.features
restrict = mysettings.get("PORTAGE_RESTRICT","").split()
userfetch = secpass >= 2 and "userfetch" in features
userpriv = secpass >= 2 and "userpriv" in features
|
# 'nomirror' is bad/negative logic. You Restrict mirroring, not no-mirroring.
if "mirror" in restrict or \
"nomirror" in restrict:
if ("mirror" in features) and ("lmirror" not in features):
# lmirror should allow you to bypass mirror restrictions.
# XXX: This is not a good thing, and is temporary at best.
print(_(">>>
|
\"mirror\" mode desired and \"mirror\" restriction found; skipping fetch."))
return 1
# Generally, downloading the same file repeatedly from
# every single available mirror is a waste of bandwidth
# and time, so there needs to be a cap.
checksum_failure_max_tries = 5
v = checksum_failure_max_tries
try:
v = int(mysettings.get("PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS",
checksum_failure_max_tries))
except (ValueError, OverflowError):
writemsg(_("!!! Variable PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS"
" contains non-integer value: '%s'\n") % \
mysettings["PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS"], noiselevel=-1)
writemsg(_("!!! Using PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS "
"default value: %s\n") % checksum_failure_max_tries,
noiselevel=-1)
v = checksum_failure_max_tries
if v < 1:
writemsg(_("!!! Variable PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS"
" contains value less than 1: '%s'\n") % v, noiselevel=-1)
writemsg(_("!!! Using PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS "
"default value: %s\n") % checksum_failure_max_tries,
noiselev
|
square/pants
|
tests/python/pants_test/tasks/test_markdown_to_html.py
|
Python
|
apache-2.0
| 2,784
| 0.003951
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
import unittest2 as unittest
from pants.backend.core.tasks import markdown_to_html
ABC = """able
baker
charlie"""
class ChooseLinesTest(unittest.TestCase):
def test_include_no_params(self):
self.assertEquals(
markdown_to_html.choose_include_text(ABC, '', 'fake.md'),
'\n'.join(['able', 'baker', 'charlie']))
def test_include_start_at(self):
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-at=abl', 'fake.md'),
'\n'.join(['able', 'baker', 'charlie']
|
))
self.assertEqual
|
s(
markdown_to_html.choose_include_text(ABC, 'start-at=bak', 'fake.md'),
'\n'.join(['baker', 'charlie']))
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-at=xxx', 'fake.md'),
'')
def test_include_start_after(self):
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-after=bak', 'fake.md'),
'charlie')
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-after=cha', 'fake.md'),
'')
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-after=xxx', 'fake.md'),
'')
def test_include_end_at(self):
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'end-at=abl', 'fake.md'),
'able')
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'end-at=bak', 'fake.md'),
'\n'.join(['able', 'baker']))
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'end-at=xxx', 'fake.md'),
'')
def test_include_end_before(self):
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'end-before=abl', 'fake.md'),
'')
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'end-before=xxx', 'fake.md'),
'')
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'end-before=bak', 'fake.md'),
'able')
def test_include_start_at_end_at(self):
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-at=abl&end-at=abl', 'fake.md'),
'able')
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-at=cha&end-at=cha', 'fake.md'),
'charlie')
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-at=abl&end-at=bak', 'fake.md'),
'\n'.join(['able', 'baker']))
self.assertEquals(
markdown_to_html.choose_include_text(ABC, 'start-at=bak&end-at=abl', 'fake.md'),
'')
|
platformio/platformio-core
|
platformio/util.py
|
Python
|
apache-2.0
| 9,050
| 0.000994
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import math
import os
import platform
import re
import shutil
import time
from functools import wraps
from glob import glob
import click
import zeroconf
from platformio import __version__, exception, proc
from platformio.compat import IS_MACOS, IS_WINDOWS
from platformio.fs import cd, load_json # pylint: disable=unused-import
from platformio.proc import exec_command # pylint: disable=unused-import
class memoized(object):
def __init__(self, expire=0):
expire = str(expire)
if expire.isdigit():
expire = "%ss" % int((int(expire) / 1000))
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert expire.endswith(tuple(tdmap))
self.expire = int(tdmap[expire[-1]] * int(expire[:-1]))
self.cache = {}
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in self.cache or (
self.expire > 0 and self.cache[key][0] < time.time() - self.expire
):
self.cache[key] = (time.time(), func(*args, **kwargs))
return self.cache[key][1]
wrapper.reset = self._reset
return wrapper
def _reset(self):
self.cache.clear()
class throttle(object):
def __init__(self, threshhold):
self.threshhold = threshhold # milliseconds
self.last = 0
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
diff = int(round((time.time() - self.last) * 1000))
if diff < self.threshhold:
time.sleep((self.threshhold - diff) * 0.001)
self.last = time.time()
return func(*args, **kwargs)
return wrapper
def singleton(cls):
"""From PEP-318 http://www.python.org/dev/peps/pep-0318/#examples"""
_instances = {}
def get_instance(*args, **kwargs):
if cls not in _instances:
_instances[cls] = cls(*args, **kwargs)
return _instances[cls]
return get_instance
def get_systype():
type_ = platform.system().lower()
arch = platform.machine().lower()
if type_ == "windows" and "x86" in arch:
arch = "amd64" if "64" in arch else "x86"
return "%s_%s" % (type_, arch) if arch else type_
def get_serial_ports(filter_hwid=False):
try:
# pylint: disable=import-outside-toplevel
from serial.tools.list_ports import comports
except ImportError:
raise exception.GetSerialPortsError(os.name)
result = []
for p, d, h in comports():
if not p:
continue
if not filter_hwid or "VID:PID" in h:
result.append({"port": p, "description": d, "hwid": h})
if filter_hwid:
return result
# fix for PySerial
if not result and IS_MACOS:
for p in glob("/dev/tty.*"):
result.append({"port": p, "description": "n/a", "hwid": "n/a"})
return result
# Backward compatibility for PIO Core <3.5
get_serialports = get_serial_ports
def get_logical_devices():
items = []
if IS_WINDOWS:
try:
result = proc.exec_command(
["wmic", "logicaldisk", "get", "name,VolumeName"]
).get("out", "")
devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
for line in result.split("\n"):
match = devicenamere.match(line.strip())
if not match:
continue
items.append({"path": match.group(1) + "\\", "name": match.group(2)})
return items
except WindowsError: # pylint: disable=undefined-variable
pass
# try "fsutil"
result = proc.exec_command(["fsutil", "fsinfo", "drives"]).get("out", "")
for device in re.findall(r"[A-Z]:\\", result):
items.append({"path": device, "name": None})
return items
result = proc.exec_command(["df"]).get("out")
devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
for line in result.split("\n"):
match = devicenamere.match(line.strip())
if not match:
continue
items.append({"path": match.group(1), "name": os.path.basename(match.group(1))})
return items
def get_mdns_services():
class mDNSListener(object):
def __init__(self):
self._zc = zeroconf.Zeroconf(interfaces=zeroconf.InterfaceChoice.All)
self._found_types = []
self._found_services = []
def __enter__(self):
zeroconf.ServiceBrowser(
self._zc,
[
"_http._tcp.local.",
"_hap._tcp.local.",
"_services._dns-sd._udp.local.",
],
self,
)
return self
def __exit__(self, etype, value, traceback):
self._zc.close()
def add_service(self, zc, type_, name):
try:
assert zeroconf.service_type_name(name)
assert str(name)
except (AssertionError, UnicodeError, zeroconf.BadTypeInNameException):
return
if name not in self._found_types:
self._found_types.append(name)
zeroconf.ServiceBrowser(self._zc, name, self)
if type_ in self._found_types:
s = zc.get_service_info(type_, name)
if s:
self._found_services.append(s)
def remove_service(self, zc, type_, name):
pass
def update_service(self, zc, type_, name):
pass
def get_services(self):
return self._found_services
items = []
with mDNSListener() as mdns:
time.sleep(3)
for service in mdns.get_services():
properties = None
if service.properties:
try:
pr
|
operties = {
k.decode("utf8"): v.decode("utf8")
|
if isinstance(v, bytes)
else v
for k, v in service.properties.items()
}
json.dumps(properties)
except UnicodeDecodeError:
properties = None
items.append(
{
"type": service.type,
"name": service.name,
"ip": ", ".join(service.parsed_addresses()),
"port": service.port,
"properties": properties,
}
)
return items
def pioversion_to_intstr():
"""Legacy for framework-zephyr/scripts/platformio/platformio-build-pre.py"""
vermatch = re.match(r"^([\d\.]+)", __version__)
assert vermatch
return [int(i) for i in vermatch.group(1).split(".")[:3]]
def items_to_list(items):
if isinstance(items, list):
return items
return [i.strip() for i in items.split(",") if i.strip()]
def items_in_list(needle, haystack):
needle = items_to_list(needle)
haystack = items_to_list(haystack)
if "*" in needle or "*" in haystack:
return True
return set(needle) & set(haystack)
def parse_date(datestr):
if "T" in datestr and "Z" in datestr:
return time.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
return time.strptime(datestr)
def merge_dicts(d1, d2, path=None):
if path is None:
path = []
for key in d2:
if key in d1 and isinsta
|
nicproulx/mne-python
|
mne/commands/tests/test_commands.py
|
Python
|
bsd-3-clause
| 9,165
| 0
|
# -*- coding: utf-8 -*-
import os
from os import path as op
import shutil
import glob
import warnings
from nose.tools import assert_true, assert_raises
from numpy.testing import assert_equal, assert_allclose
from mne import concatenate_raws, read_bem_surfaces
from mne.commands import (mne_browse_raw, mne_bti2fiff, mne_clean_eog_ecg,
mne_compute_proj_ecg, mne_compute_proj_eog,
mne_coreg, mne_kit2fiff,
mne_make_scalp_surfaces, mne_maxfilter,
mne_report, mne_surf2bem, mne_watershed_bem,
mne_compare_fiff, mne_flash_bem, mne_show_fiff,
mne_show_info)
from mne.datasets import testing, sample
from mne.io import read_raw_fif
from mne.utils import (run_tests_if_main, _TempDir, requires_mne, requires_PIL,
requires_mayavi, requires_tvtk, requires_freesurfer,
ArgvSetter, slow_test, ultra_slow_test)
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
warnings.simplefilter('always')
def check_usage(module, force_help=False):
"""Helper to ensure we print usage"""
args = ('--help',) if force_help else ()
with ArgvSetter(args) as out:
try:
module.run()
except SystemExit:
pass
assert_true('Usage: ' in out.stdout.getvalue())
@slow_test
def test_browse_raw():
"""Test mne browse_raw."""
check_usage(mne_browse_raw)
def test_bti2fiff():
"""Test mne bti2fiff."""
check_usage(mne_bti2fiff)
def test_compare_fiff():
"""Test mne compare_fiff."""
check_usage(mne_compare_fiff)
def test_show_fiff():
"""Test mne compare_fiff."""
check_usage(mne_show_fiff)
with ArgvSetter((raw_fname,)):
mne_show_fiff.run()
@requires_mne
def test_clean_eog_ecg():
"""Test mne clean_eog_ecg."""
check_usage(mne_clean_eog_ecg)
tempdir = _TempDir()
raw = concatenate_raws([read_raw_fif(f)
for f in [raw_fname, raw_fname, raw_fname]])
raw.info['bads'] = ['MEG 2443']
use_fname = op.join(tempdir, op.basename(raw_fname))
raw.save(use_fname)
with ArgvSetter(('-i', use_fname, '--quiet')):
mne_clean_eog_ecg.run()
fnames = glob.glob(op.join(tempdir, '*proj.fif'))
assert_true(len(fnames) == 2) # two projs
fnames = glob.glob(op.join(tempdir, '*-eve.fif'))
assert_true(len(fnames) == 3) # raw plus two projs
@slow_test
def test_compute_proj_ecg_eog():
"""Test mne compute_proj_ecg/eog."""
for fun in (mne_compute_proj_ecg, mne_compute_proj_eog):
check_usage(fun)
tempdir = _TempDir()
use_fname = op.join(tempdir, op.basename(raw_fname))
bad_fname = op.join(tempdir, 'bads.txt')
with open(bad_fname, 'w') as fid:
fid.write('MEG 2443\n')
shutil.copyfile(raw_fname, use_fname)
with ArgvSetter(('-i', use_fname, '--bad=' + bad_fname,
'--rej-eeg', '150')):
fun.run()
fnames = glob.glob(op.join(tempdir, '*proj.fif'))
assert_true(len(fnames) == 1)
fnames = glob.glob(op.join(tempdir, '*-eve.fif'))
assert_true(len(fnames) == 1)
def test_coreg():
"""Test mne coreg."""
assert_true(hasattr(mne_coreg, 'run'))
def test_kit2fiff():
"""Test mne kit2fiff."""
# Can't check
check_usage(mne_kit2fiff, force_help=True)
@requires_tvtk
@testing.requires_testing_data
def test_make_scalp_surfaces():
"""Test mne make_scalp_surfaces."""
check_usage(mne_make_scalp_surfaces)
# Copy necessary files to avoid FreeSurfer call
tempdir = _TempDir()
surf_path = op.join(subjects_dir, 'sample', 'surf')
surf_path_new = op.join(tempdir, 'sample', 'surf')
os.mkdir(op.join(tempdir, 'sample'))
os.mkdir(surf_path_new)
subj_dir = op.join(tempdir, 'sample', 'bem')
os.mkdir(subj_dir)
shutil.copy(op.join(surf_path, 'lh.seghead'), surf_path_new)
orig_fs = os.getenv('FREESURFER_HOME', None)
if orig_fs is not None:
del os.environ['FREESURFER_HOME']
cmd = ('-s', 'sample', '--subjects-dir', tempdir)
os.environ['_MNE_TESTING_SCALP'] = 'true'
dense_fname = op.join(subj_dir, 'sample-head-dense.fif')
medium_fname = op.join(su
|
bj_dir, 'sample-head-medium.fif')
try:
with ArgvSetter(cmd, disable_stdout=False, disable_stderr=False):
assert_raises(RuntimeError, mne_make_scalp_surfaces.run)
os.environ['FREESURFER_HOME'] = tempdir # don't actually use it
mne_make_scalp_surfaces.run()
assert_true(op.isfile(dense_fname))
assert_true(op.isfile(medium_fname))
assert_raises(IOErr
|
or, mne_make_scalp_surfaces.run) # no overwrite
finally:
if orig_fs is not None:
os.environ['FREESURFER_HOME'] = orig_fs
else:
del os.environ['FREESURFER_HOME']
del os.environ['_MNE_TESTING_SCALP']
# actually check the outputs
head_py = read_bem_surfaces(dense_fname)
assert_equal(len(head_py), 1)
head_py = head_py[0]
head_c = read_bem_surfaces(op.join(subjects_dir, 'sample', 'bem',
'sample-head-dense.fif'))[0]
assert_allclose(head_py['rr'], head_c['rr'])
def test_maxfilter():
"""Test mne maxfilter."""
check_usage(mne_maxfilter)
with ArgvSetter(('-i', raw_fname, '--st', '--movecomp', '--linefreq', '60',
'--trans', raw_fname)) as out:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
os.environ['_MNE_MAXFILTER_TEST'] = 'true'
try:
mne_maxfilter.run()
finally:
del os.environ['_MNE_MAXFILTER_TEST']
assert_true(len(w) == 1)
for check in ('maxfilter', '-trans', '-movecomp'):
assert_true(check in out.stdout.getvalue(), check)
@slow_test
@requires_mayavi
@requires_PIL
@testing.requires_testing_data
def test_report():
"""Test mne report."""
check_usage(mne_report)
tempdir = _TempDir()
use_fname = op.join(tempdir, op.basename(raw_fname))
shutil.copyfile(raw_fname, use_fname)
with ArgvSetter(('-p', tempdir, '-i', use_fname, '-d', subjects_dir,
'-s', 'sample', '--no-browser', '-m', '30')):
mne_report.run()
fnames = glob.glob(op.join(tempdir, '*.html'))
assert_true(len(fnames) == 1)
def test_surf2bem():
"""Test mne surf2bem."""
check_usage(mne_surf2bem)
@ultra_slow_test
@requires_freesurfer
@testing.requires_testing_data
def test_watershed_bem():
"""Test mne watershed bem."""
check_usage(mne_watershed_bem)
# Copy necessary files to tempdir
tempdir = _TempDir()
mridata_path = op.join(subjects_dir, 'sample', 'mri')
mridata_path_new = op.join(tempdir, 'sample', 'mri')
os.mkdir(op.join(tempdir, 'sample'))
os.mkdir(mridata_path_new)
if op.exists(op.join(mridata_path, 'T1')):
shutil.copytree(op.join(mridata_path, 'T1'), op.join(mridata_path_new,
'T1'))
if op.exists(op.join(mridata_path, 'T1.mgz')):
shutil.copyfile(op.join(mridata_path, 'T1.mgz'),
op.join(mridata_path_new, 'T1.mgz'))
with ArgvSetter(('-d', tempdir, '-s', 'sample', '-o'),
disable_stdout=False, disable_stderr=False):
mne_watershed_bem.run()
@ultra_slow_test
@requires_freesurfer
@sample.requires_sample_data
def test_flash_bem():
"""Test mne flash_bem."""
check_usage(mne_flash_bem, force_help=True)
# Using the sample dataset
subjects_dir = op.join(sample.data_path(download=False), 'subjects')
# Copy necessary files to tempdir
tempdir = _TempDir()
mridata_path = op.join(subjects_dir, 'sample', 'mri')
mridata_path_new = op.join(tempdir, 'sample', 'mri')
os.makedirs(op.join(mridata_path_new, 'flash'))
os.makedirs(op.join(tempdir, 'sample', 'bem'))
|
motherjones/mirrors
|
mirrors/migrations/0004_auto_20140609_1943.py
|
Python
|
mit
| 444
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_liter
|
als
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mirrors', '0003_componentrevision_version'),
]
operations = [
migrations.AlterField(
model_name='componentrevision',
name='data',
field=models.B
|
inaryField(null=True, editable=False, blank=True),
),
]
|
beiko-lab/gengis
|
bin/Lib/site-packages/numpy/polynomial/tests/test_hermite.py
|
Python
|
gpl-3.0
| 17,416
| 0.006833
|
"""Tests for hermite module.
"""
from __future__ import division
import numpy as np
import numpy.polynomial.hermite as herm
from numpy.polynomial.polynomial import polyval
from numpy.testing import (
TestCase, assert_almost_equal, assert_raises,
assert_equal, assert_, run_module_suite)
H0 = np.array([ 1])
H1 = np.array([0, 2])
H2 = np.array([ -2, 0, 4])
H3 = np.array([0, -12, 0, 8])
H4 = np.array([ 12, 0, -48, 0, 16])
H5 = np.array([0, 120, 0, -160, 0, 32])
H6 = np.array([-120, 0, 720, 0, -480, 0, 64])
H7 = np.array([0, -1680, 0, 3360, 0, -1344, 0, 128])
H8 = np.array([1680, 0, -13440, 0, 13440, 0, -3584, 0, 256])
H9 = np.array([0, 30240, 0, -80640, 0, 48384, 0, -9216, 0, 512])
Hlist = [H0, H1, H2, H3, H4, H5, H6, H7, H8, H9]
def trim(x) :
return herm.hermtrim(x, tol=1e-6)
class TestConstants(TestCase) :
def test_hermdomain(self) :
assert_equal(herm.hermdomain, [-1, 1])
def test_hermzero(self) :
assert_equal(herm.hermzero, [0])
def test_hermone(self) :
assert_equal(herm.hermone, [1])
def test_hermx(self) :
assert_equal(herm.hermx, [0, .5])
class TestArithmetic(TestCase) :
x = np.linspace(-3, 3, 100)
def test_hermadd(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] += 1
res = herm.hermadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_hermsub(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = herm.hermsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_hermmulx(self):
assert_equal(herm.hermmulx([0]), [0])
assert_equal(herm.hermmulx([1]), [0,.5])
for i in range(1, 5):
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [i, 0, .5]
assert_equal(herm.hermmulx(ser), tgt)
def test_hermmul(self) :
# check values of result
for i in range(5) :
pol1 = [0]*i + [1]
val1 = herm.hermval(self.x, pol1)
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
pol2 = [0]*j + [1]
val2 = herm.hermval(self.x, pol2)
pol3 = herm.hermmul(pol1, pol2)
val3 = herm.hermval(self.x, pol3)
assert_(len(pol3) == i + j + 1, msg)
assert_almost_equal(val3, val1*val2, err_msg=msg)
def test_hermdiv(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = herm.hermadd(ci, cj)
quo, rem = herm.hermdiv(tgt, ci)
res = herm.hermadd(herm.hermmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
class TestEvaluation(TestCase) :
# coefficients of 1 + 2*x + 3*x**2
c1d = np.array([2.5, 1., .75])
c2d = np.einsum('i,j->ij', c1d, c1d)
c3d = np.einsum('i,j,k->ijk', c1d, c1d, c1d)
# some random values in [-1, 1)
x = np.random.random((3, 5))*2 - 1
y = polyval(x, [1., 2., 3.])
def test_hermval(self) :
#check empty input
assert_equal(herm.hermval([], [1]).size, 0)
#check normal input)
x = np.linspace(-1,1)
y = [polyval(x, c) for c in Hlist]
for i in range(10) :
msg = "At i=%d" % i
ser = np.zeros
tgt = y[i]
res = herm.hermval(x, [0]*i + [1])
assert_almost_equal(res, tgt, err_msg=msg)
#check that shape is preserved
for i in range(3) :
dims = [2]*i
x = np.zeros(dims)
assert_equal(herm.hermval(x, [1]).shape, dims)
assert_equal(herm.hermval(x, [1,0]).shape, dims)
assert_equal(herm.hermval(x, [1,0,0]).shape, dims)
def test_hermval2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test exceptions
assert_raises(ValueError, herm.hermval2d, x1, x2[:2], self.c2d)
#test values
tgt = y1*y2
res = herm.hermval2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2,3))
res = herm.hermval2d(z, z, self.c2d)
assert_(res.shape == (2,3))
def test_hermval3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test exceptions
assert_raises(ValueError, herm.hermval3d, x1, x2, x3[:2], self.c3d)
#test values
tgt = y1*y2*y3
res = herm.hermval3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2,3))
res = herm.hermval3d(z, z, z, self.c3d)
assert_(res.shape == (2,3))
def test_hermgrid2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test values
tgt = np.einsum('i,j->ij', y1, y2)
res = herm.hermgrid2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2,3))
res = herm.hermgrid2d(z, z, self.c2d)
assert_(res.shape == (2, 3)*2)
def test_hermgrid3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test values
tgt = np.einsum('i,j,k->ijk', y1, y2, y3)
res = herm.hermgrid3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2,3))
res = herm.hermgrid3d(z, z, z, self.c3d)
assert_(res.shape == (2, 3)*3)
|
class TestIntegral(TestCase) :
def test_hermint(self) :
# check exceptions
assert_raises(ValueError, herm.hermint, [0], .5)
assert_raises(ValueError, herm.hermint, [0], -1)
assert_raises(ValueError, herm.hermin
|
t, [0], 1, [0,0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = herm.hermint([0], m=i, k=k)
assert_almost_equal(res, [0, .5])
# check single integration with integration constant
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i])
res = herm.herm2poly(hermint)
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(herm.hermval(-1, hermint), i)
# check single integration with integration constant and scaling
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i], scl=2)
res = herm.herm2poly(hermint)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = herm.hermint(tgt, m=1)
res = herm.hermint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5) :
|
sitture/trade-motors
|
src/vehicles/views.py
|
Python
|
mit
| 3,897
| 0.000257
|
from django.shortcuts import render, get_object_or_404
# import the custom context processor
from vehicles.context_processor import global_context_processor
from vehicles.models import Vehicle, VehicleMake, Category
from settings.models import SliderImage
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from dynamic_preferences.registries import global_preferences_registry
def home_page(request):
# instanciate a manager for global preferences
global_preferences = global_preferences_registry.manager()
MAX_VEHICLES_TO_SHOW = global_preferences['homepage__number_of_vehicles']
MAX_CATEGORIES_TO_SHOW = 4
# get list of slider objects
sliders = SliderImage.objects.all()
# get categories to show on homepage
top_categories = Category.objects.get_home_page_categories()
if top_categories:
top_categories = top_categories[:MAX_CATEGORIES_TO_SHOW]
# get recently added vehicles
top_vehicles = Vehicle.objects.all().order_by(
'-timestamp').prefetch_related('images')
if top_vehicles:
top_vehicles = top_vehicles[:MAX_VEHICLES_TO_SHOW]
context = global_context_processor(locals())
return render(request, "home_page.html", context)
def exports_page(request):
context = global_context_processor(locals())
return render(request, "exports_page.html", context)
def how_to_buy(request):
context = global_context_processor(locals())
return render(request, "how_to_buy.html", context)
def category_page(request, slug):
# check if make slug parameter is passed into the url
vehicle_make_slug = request.GET.get('make', None)
# get category by slug
category = Category.objects.get_category_by_slug(slug)
# get all the vehicles by the category and make (if provided)
if vehicle_make_slug:
# get make by slug
make = VehicleMake.objects.get_make_by_slug(vehicle_make_slug)
if category:
vehicles_list = Vehicle.objects.get_vehicles_by_category_and_make(
|
category, make
).prefetch_related('images')
else:
vehicles_list = Vehicle.objects.get_ve
|
hicles_by_make(
make
).prefetch_related('images')
else:
# if category is not found then get all of the vehicles
if category:
vehicles_list = Vehicle.objects.get_vehicles_by_category(
category
).prefetch_related('images')
else:
vehicles_list = Vehicle.objects.all().prefetch_related('images')
# paginate vehicle list for 10 items per page
paginator = Paginator(vehicles_list, 16)
try:
page = int(request.GET.get("page", '1'))
except ValueError:
page = 1
try:
vehicles = paginator.page(page)
except (InvalidPage, EmptyPage):
vehicles = paginator.page(paginator.num_pages)
makes = get_makes_in_category(category)
context = global_context_processor(locals())
return render(request, "categories_page.html", context)
def vehicle_detail_page(request, category_slug, vehicle_id, vehicle_slug):
# get vehicle details by vehicle_id
vehicle = get_object_or_404(Vehicle, id=vehicle_id)
related_vehicles = Vehicle.objects.get_vehicles_by_category(
vehicle.category)
return render(request, "detail_page.html", global_context_processor(locals()))
def get_makes_in_category(category):
makes_in_category = []
# get all the vehicle objects by category
vehicles_in_category = Vehicle.objects.get_vehicles_by_category(
category=category)
for vehicle in vehicles_in_category:
makes_in_category.append(vehicle.make)
# remove duplicate makes from the list
makes_in_category = list(set(makes_in_category))
makes_in_category = sorted(makes_in_category, key=lambda x: x.v_make)
return makes_in_category
|
3dfxsoftware/cbss-addons
|
sale_multicompany_report/order.py
|
Python
|
gpl-2.0
| 1,880
| 0.005319
|
# -*- encoding: utf-8 -*-
from openerp.osv import fields, osv
from openerp.tools.translate import _
class sale_order_line(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order.line'
_columns = {
'att_bro': fields.boolean('Attach Brochure', required=False, help="""If you check this
option, the first attachment related to the product_id marked as brochure will be printed
as extra info with sale order"""),
}
class sale_order(osv.Model):
"""
OpenERP Model : sale_order_line
"""
_inherit = 'sale.order'
def print_with_attachment(self, cr, user, ids, context={}):
for o in self.browse(cr, user, ids, context):
for ol in o.order_line:
if ol.att_bro:
print "Im Here i will go to print %s " % ol.name
return True
def __get_company_object(self, cr, uid):
user = self.pool.get('res.users').browse(cr, uid, uid)
print user
if not user.company_id:
raise except_osv(_('ERROR !'), _(
|
'There is no company configured for this user'))
return user.company_id
def _get_report_name(self, cr, uid, context):
report = self.__get_company_object(cr, uid).sale_report_id
if not report:
rep_id = self.pool.get("ir.actions.report.xml").search(
cr, uid, [('model', '=', 'sale.order'), ], order="id")[0]
report = self.pool.get(
"ir.actions.report.xml").browse(cr, uid, rep_id)
|
return report.report_name
def print_quotation(self, cr, uid, ids, context=None):
pq = super(sale_order, self).print_quotation(cr,uid,ids, context)
return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid,
context), 'datas': pq['datas'], 'nodestroy': True}
|
trivigy/aiologin
|
aiologin/__init__.py
|
Python
|
mit
| 7,901
| 0.000506
|
import asyncio
from abc import ABCMeta
from collections.abc import MutableMapping
from aiohttp import web
from aiohttp.web_request import Request
from aiohttp_session import get_session
from collections.abc import Sequence
AIOLOGIN_KEY = '__aiologin__'
ON_LOGIN = 1
ON_LOGOUT = 2
ON_AUTHENTICATED = 3
ON_FORBIDDEN = 4
ON_UNAUTHORIZED = 5
class AbstractUser(MutableMapping, metaclass=ABCMeta):
def __iter__(self):
return self.__dict__.__iter__()
def __len__(self):
return len(self.__dict__)
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
def __delitem__(self, key):
delattr(self, key)
@property
def authenticated(self):
raise NotImplemented()
@property
def forbidden(self):
raise NotImplemented()
class AnonymousUser(AbstractUser):
@property
def authenticated(self):
return False
@property
def forbidden(self):
return False
# noinspection PyUnusedLocal
@asyncio.coroutine
def _unauthorized(*args, **kwargs):
raise web.HTTPUnauthorized()
# noinspection PyUnusedLocal
@asyncio.coroutine
def _forbidden(*args, **kwargs):
raise web.HTTPForbidden()
# noinspection PyUnusedLocal
@asyncio.coroutine
def _void(*args, **kwargs):
raise NotImplemented()
class AioLogin:
def __init__(self, request, session_name=AIOLOGIN_KEY, disabled=False,
auth_by_form=_void, auth_by_header=_void,
auth_by_session=_void, forbidden=_forbidden,
unauthorized=_unauthorized, anonymous_user=AnonymousUser,
session=get_session, signals=None):
self._request = request
self._disabled = disabled
self._session_name = session_name
self._anonymous_user = anonymous_user
self._session = session
self._auth_by_form = auth_by_form
self._auth_by_header = auth_by_header
self._auth_by_session = auth_by_session
self._unauthorized = unauthorized
self._forbidden = forbidden
self._on_login = []
self._on_logout = []
self._on_authenticated = []
self._on_forbidden = []
self._on_unauthorized = []
assert isinstance(signals, (type(None), Sequence)), \
"Excepted {!r} but received {!r}".format(Sequence, signals)
signals = [] if signals is None else signals
for sig in signals:
assert isinstance(sig, Sequence), \
"Excepted {!r} but received {!r}".format(Sequence, signals)
is_coro = asyncio.iscoroutinefunction(sig[1])
assert len(sig) == 2 and 1 <= sig[0] <= 7 and is_coro, \
"Incorrectly formatted signal argument {}".format(sig)
if sig[0] == 1:
self._on_login.append(sig[1])
elif sig[0] == 2:
self._on_logout.append(sig[1])
elif sig[0] == 3:
self._on_authenticated.append(sig[1])
elif sig[0] == 4:
self._on_forbidden.append(sig[1])
elif sig[0] == 5:
self._on_unauthorized.append(sig[1])
@asyncio.coroutine
def authenticate(self, *args, remember=False, **kwargs):
assert isinstance(remember, bool), \
"Expected {!r} but received {!r}".format(type(bool), type(remember))
user = yield from self._auth_by_form(self._request, *args, **kwargs)
if user is None:
for coro in self._on_unauthorized:
yield from coro(self._request)
raise web.HTTPUnauthorized
for coro in self._on_authenticated:
yield from coro(self._request)
yield from self.login(user, remember=remember)
@asyncio.coroutine
def login(self, user, remember):
assert isinstance(user, AbstractUser), \
"Expected {} but received {}".format(type(AbstractUser), type(user))
assert isinstance(remember, bool), \
"Expected {!r} but received {!r}".format(type(bool), type(remember))
session = yield from self._session(self._request)
try:
session.remember = remember
except:
session['_remember'] = remember
session[self._session_name] = dict(user)
for coro in self._on_login:
yield from coro(self._request)
@asyncio.coroutine
def logout(self):
session = yield from self._session(self._request)
session.invalidate()
for coro in self._on_logout:
yield from coro(self._request)
@asyncio.coroutine
def auth_by_header(self):
key = self._request.headers.get('AUTHORIZATION', None)
if key is None:
return None
return (yield from self._auth_by_header(self._request, key))
@asyncio.coroutine
def auth_by_session(self):
session = yield from self._session(self._request)
profile = session.get(self._session_name, None)
if profile is None:
return None
user = yield from self._auth_by_session(self._request, profile)
if user is None:
return None
return user
@property
def on_login(self):
return self._on_login
@property
def disabled(self):
return self._disabled
@property
def unauthorized(self):
|
return self._unauthorized
@property
def forbidden(self):
return self._forbidden
@property
def anonymo
|
us_user(self):
return self._anonymous_user
def setup(app, **kwargs):
app.middlewares.append(middleware_factory(**kwargs))
def middleware_factory(**options):
# noinspection PyUnusedLocal
@asyncio.coroutine
def aiologin_middleware(app, handler):
@asyncio.coroutine
def aiologin_handler(*args, **kwargs):
request = kwargs['request'] if 'request' in kwargs else args[0]
kwargs = {k: v for (k, v) in kwargs.items() if k != 'request'}
# noinspection PyTypeChecker
manager = options.get('manager', AioLogin)
request.aiologin = manager(request=request, **options)
return (yield from handler(request=request, **kwargs))
return aiologin_handler
return aiologin_middleware
def secured(func):
@asyncio.coroutine
def wrapper(*args, **kwargs):
request = kwargs['request'] if 'request' in kwargs else args[0]
kwargs = {k: v for (k, v) in kwargs.items() if k != 'request'}
if not isinstance(request, Request):
request = args[0].request
elif request not in args:
args = (request,) + args
if request.aiologin.disabled:
return (yield from func(*args, **kwargs))
user = yield from request.aiologin.auth_by_header()
if user is None:
user = yield from request.aiologin.auth_by_session()
if user is None:
user = request.aiologin.anonymous_user()
assert isinstance(user, AbstractUser), \
"Expected 'user' of type AbstractUser by got {}".format(type(user))
if not user.authenticated:
# noinspection PyProtectedMember
for coro in request.aiologin._on_unauthorized:
yield from coro(request)
return (yield from request.aiologin.unauthorized(*args, **kwargs))
if user.forbidden:
# noinspection PyProtectedMember
for coro in request.aiologin._on_forbidden:
yield from coro(request)
return (yield from request.aiologin.forbidden(*args, **kwargs))
request.current_user = user
# noinspection PyProtectedMember
for coro in request.aiologin._on_authenticated:
yield from coro(request)
return (yield from func(*args, **kwargs))
return wrapper
|
TomMinor/MayaPerforce
|
Perforce/GUI.py
|
Python
|
mit
| 75,889
| 0.001146
|
import os
import re
import traceback
import logging
import platform
from distutils.version import StrictVersion
from PySide import QtCore
from PySide import QtGui
from P4 import P4, P4Exception, Progress, OutputHandler
import Utils
import AppUtils
import GlobalVars
import Callbacks
reload(Utils)
reload(AppUtils)
reload(GlobalVars)
reload(Callbacks)
version = '1.1.3'
mainParent = AppUtils.main_parent_window()
iconPath = GlobalVars.iconPath
tempPath = GlobalVars.tempPath
P4Icon = GlobalVars.P4Icon
sceneFiles = GlobalVars.sceneFiles
p4_logger = logging.getLogger("Perforce")
def displayErrorUI(e):
error_ui = QtGui.QMessageBox()
error_ui.setWindowFlags(QtCore.Qt.WA_DeleteOnClose)
eMsg, type = Utils.parsePerforceError(e)
if type == "warning":
error_ui.warning(mainParent, "Perforce Warning", eMsg)
elif type == "error":
error_ui.critical(mainParent, "Perforce Error", eMsg)
else:
error_ui.information(mainParent, "Perforce Error", eMsg)
error_ui.deleteLater()
class TestOutputAndProgress(Progress, OutputHandler):
def __init__(self, ui):
Progress.__init__(self)
OutputHandler.__init__(self)
self.totalFiles = 0
self.totalSizes = 0
self.ui = ui
self.ui.setMinimum(0)
self.ui.setHandler(self)
self.shouldCancel = False
def setCancel(self, val):
self.shouldCancel = val
def outputStat(self, stat):
if 'totalFileCount' in stat:
self.totalFileCount = int(stat['totalFileCount'])
print "TOTAL FILE COUNT: ", self.totalFileCount
if 'totalFileSize' in stat:
self.totalFileSize = int(stat['totalFileSize'])
print "TOTAL FILE SIZE: ", self.totalFileSize
if self.shouldCancel:
return OutputHandler.REPORT | OutputHandler.CANCEL
else:
return OutputHandler.HANDLED
def outputInfo(self, info):
AppUtils.refresh()
print "INFO :", info
if self.shouldCancel:
return OutputHandler.REPORT | OutputHandler.CANCEL
else:
return OutputHandler.HANDLED
def outputMessage(self, msg):
AppUtils.refresh()
print "Msg :", msg
if self.shouldCancel:
return OutputHandler.REPORT | OutputHandler.CANCEL
else:
return OutputHandler.HANDLED
def init(self, type):
AppUtils.refresh()
print "Begin :", type
self.type = type
self.ui.incrementCurrent()
def setDescription(self, description, unit):
AppUtils.refresh()
print "Desc :", description, unit
pass
def setTotal(self, total):
AppUtils.refresh()
print "Total :", total
self.ui.setMaximum(total)
pass
def update(self, position):
AppUtils.refresh()
print "Update : ", position
self.ui.setValue(position)
self.position = position
def done(self, fail):
AppUtils.refresh()
print "Failed :", fail
self.fail = fail
class SubmitProgressUI(QtGui.QD
|
ialog):
def __init__(self, totalFiles, parent=mainParent):
super(SubmitProgressUI, self).__init__(parent)
self.handler = None
self.totalFiles = totalFiles
self.currentFile = 0
def setHandler(self, handler):
self.handler = handler
def setMaximum(self, val):
self.fileProgressBar.setMaximum(val)
def setMinimum(self, val):
self.fileProgressBar.setMinimum(val)
def setValue(s
|
elf, val):
self.fileProgressBar.setValue(val)
def incrementCurrent(self):
self.currentFile += 1
self.overallProgressBar.setValue(self.currentFile)
print self.totalFiles, self.currentFile
if self.currentFile >= self.totalFiles:
setComplete(True)
def setComplete(self, success):
if not success:
self.overallProgressBar.setTextVisible(True)
self.overallProgressBar.setFormat("Cancelled/Error")
self.fileProgressBar.setTextVisible(True)
self.fileProgressBar.setFormat("Cancelled/Error")
self.quitBtn.setText("Quit")
def create(self, title, files=[]):
path = iconPath + "p4.png"
icon = QtGui.QIcon(path)
self.setWindowTitle(title)
self.setWindowIcon(icon)
self.setWindowFlags(QtCore.Qt.Dialog)
self.create_controls()
self.create_layout()
self.create_connections()
def create_controls(self):
'''
Create the widgets for the dialog
'''
self.overallProgressBar = QtGui.QProgressBar()
self.overallProgressBar.setMinimum(0)
self.overallProgressBar.setMaximum(self.totalFiles)
self.overallProgressBar.setValue(0)
self.fileProgressBar = QtGui.QProgressBar()
self.fileProgressBar.setMinimum(0)
self.fileProgressBar.setMaximum(100)
self.fileProgressBar.setValue(0)
self.quitBtn = QtGui.QPushButton("Cancel")
def create_layout(self):
'''
Create the layouts and add widgets
'''
main_layout = QtGui.QVBoxLayout()
main_layout.setContentsMargins(6, 6, 6, 6)
formlayout1 = QtGui.QFormLayout()
formlayout1.addRow("Total Progress:", self.overallProgressBar)
formlayout1.addRow("File Progress:", self.fileProgressBar)
main_layout.addLayout(formlayout1)
main_layout.addWidget(self.quitBtn)
self.setLayout(main_layout)
def create_connections(self):
'''
Create the signal/slot connections
'''
# self.fileTree.clicked.connect( self.loadFileLog )
self.quitBtn.clicked.connect(self.cancelProgress)
#--------------------------------------------------------------------------
# SLOTS
#--------------------------------------------------------------------------
def cancelProgress(self, *args):
self.quitBtn.setText("Cancelling...")
self.handler.setCancel(True)
#
class SubmitChangeUi(QtGui.QDialog):
def __init__(self, parent=mainParent):
super(SubmitChangeUi, self).__init__(parent)
def create(self, p4, files=[]):
self.p4 = p4
path = iconPath + P4Icon.iconName
icon = QtGui.QIcon(path)
self.setWindowTitle("Submit Change")
self.setWindowIcon(icon)
self.setWindowFlags(QtCore.Qt.Window)
self.fileList = files
self.create_controls()
self.create_layout()
self.create_connections()
self.validateText()
def create_controls(self):
'''
Create the widgets for the dialog
'''
self.submitBtn = QtGui.QPushButton("Submit")
self.descriptionWidget = QtGui.QPlainTextEdit("<Enter Description>")
self.descriptionLabel = QtGui.QLabel("Change Description:")
self.chkboxLockedWidget = QtGui.QCheckBox("Keep files checked out?")
headers = [" ", "File", "Type", "Action", "Folder"]
self.tableWidget = QtGui.QTableWidget(len(self.fileList), len(headers))
self.tableWidget.setMaximumHeight(200)
self.tableWidget.setMinimumWidth(500)
self.tableWidget.setHorizontalHeaderLabels(headers)
for i, file in enumerate(self.fileList):
# Saves us manually keeping track of the current column
column = 0
# Create checkbox in first column
widget = QtGui.QWidget()
layout = QtGui.QHBoxLayout()
chkbox = QtGui.QCheckBox()
chkbox.setCheckState(QtCore.Qt.Checked)
layout.addWidget(chkbox)
layout.setAlignment(QtCore.Qt.AlignCenter)
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
self.tableWidget.setCellWidget(i, column, widget)
column += 1
# Fill in the rest of the data
# File
fileName = file['File']
newItem = QtGui.QTableWidgetItem(os.path.basename(fileName))
newItem.setFlags(newItem.flags() ^ QtCore.Qt.ItemIsEditable)
self
|
TakesxiSximada/syaml
|
src/syaml/commands/__init__.py
|
Python
|
apache-2.0
| 53
| 0
|
import jumon
def main():
jumon.entry(__name__)
| ||
IQSS/miniverse
|
dv_apps/metrics/stats_views_dataverses.py
|
Python
|
mit
| 6,085
| 0.004108
|
from .stats_view_base import StatsViewSwagger, StatsViewSwaggerKeyRequired
from .stats_util_dataverses import StatsMakerDataverses
class DataverseCountByMonthView(StatsViewSwaggerKeyRequired):
"""API View - Dataverse counts by Month."""
# Define the swagger attributes
# Note: api_path must match the path in urls.py
#
api_path = '/dataverses/count/monthly'
summary = ('Number of published Dataverses by'
' the month they were created*. (*'
' Not month published)')
description = ('Returns a list of counts and'
' cumulative counts of all Dataverses added in a month')
description_200 = 'A list of Dataverse counts by month'
param_names = StatsViewSwagger.PARAM_DV_API_KEY +\
StatsViewSwagger.BASIC_DATE_PARAMS +\
StatsViewSwagger.PUBLISH_PARAMS +\
StatsViewSwagger.PRETTY_JSON_PARAM +\
StatsViewSwagger.PARAM_AS_CSV
tags = [StatsViewSwagger.TAG_DATAVERSES]
def get_stats_result(self, request):
"""Return the StatsResult object for this statistic"""
stats_datasets = StatsMakerDataverses(**request.GET.dict())
pub_state = self.get_pub_state(request)
if pub_state == self.PUB_STATE_ALL:
stats_result = stats_datasets.get_dataverse_counts_by_month()
elif pub_state == self.PUB_STATE_UNPUBLISHED:
stats_result = stats_datasets.get_dataverse_counts_by_month_unpublished()
else:
stats_result = stats_datasets.get_dataverse_counts_by_month_published()
return stats_result
class DataverseTotalCounts(StatsViewSwaggerKeyRequired):
"""API View - Total count of all Dataverses"""
# Define the swagger attributes
# Note: api_path must match the path in urls.py
#
api_path = '/dataverses/count'
summary = ('Simple count of published Dataverses')
description = ('Returns number of published Dataverses')
description_200 = 'Number of published Dataverses'
param_names = StatsViewSwagger.PARAM_DV_API_KEY + StatsViewSwagger.PUBLISH_PARAMS + StatsViewSwagger.PRETTY_JSON_PARAM
tags = [StatsViewSwagger.TAG_DATAVERSES]
result_name = StatsViewSwagger.RESULT_NAME_TOTAL_COUNT
def get_stats_result(self, request):
"""Return the StatsResult object for this statistic"""
stats_datasets = StatsMakerDataverses(**request.GET.dict())
pub_state = self.get_pub_state(request)
if pub_state == self.PUB_STATE_ALL:
stats_result = stats_datasets.get_dataverse_count()
elif pub_state == self.PUB_STATE_UNPUBLISHED:
stats_result = stats_datasets.get_dataverse_count_unpublished()
else:
stats_result = stats_datasets.get_dataverse_count_published()
return stats_result
class DataverseAffiliationCounts(StatsViewSwaggerKeyRequired):
"""API View - Number of Dataverses by Affiliation"""
# Define the swagger attributes
# Note: api_path must match the path in urls.py
#
api_path = '/dataverses/count/by-affiliation'
summary = ('Number of Dataverses by Affiliation')
description = ('Number of Dataverses by Affiliation.')
description_200 = 'Number of published Dataverses by Affiliation.'
param_names = StatsViewSwagger.PARAM_DV_API_KEY\
+ StatsViewSwagger.PUBLISH_PARAMS\
+ StatsViewSwagger.PRETTY_JSON_PARAM\
+ StatsViewSwagger.PARAM_AS_CSV
result_name = StatsViewSwagger.RESULT_NAME_AFFILIATION_COUNTS
tags = [StatsViewSwagger.TAG_DATAVERSES]
def get_stats_result(self, request):
"""Return the StatsResult object for this statistic"""
stats_datasets = StatsMakerDataverses(**request.GET.dict())
pub_state = self.get_pub_state(request)
if pub_state == self.PUB_STATE_ALL:
stats_result = stats_datasets.get_dataverse_affiliation_counts()
elif pub_state == self.PUB_STATE_UNPUBLISHED:
stats_result = stats_datasets.get_dataverse_affiliation_counts_unpublished()
else:
stats_result = stats_datasets.get_dataverse_affiliation_counts_published()
return stats_result
class DataverseTypeCounts(StatsViewSwaggerKeyRequired):
# Define the swagger attributes
# Note: api_path must match the path in urls.py
#
api_path = '/dataverses/count/by-type'
summary = ('Number of Dataverses by Type')
description = ('Number of Dataverses by Type.')
description_200 = 'Number of published Dataverses by Type.'
param_names = StatsViewSwagger.PARAM_DV_API_KEY + StatsViewSwagger.PUBLISH_PARAMS +\
StatsViewSwagger.PRETTY_JSON_PARAM +\
StatsViewSwagger.DV_TYPE_UNCATEGORIZED_PARAM +\
StatsViewSwagger.PARAM_AS_CSV
result_name = StatsViewSwagger.RESULT_NAME_DATAVERSE_TYPE_COUNTS
tags = [StatsViewSwagger.TAG_DATAVERSES]
def is_show_uncategorized(self, request):
"""Return the result of the "?show_uncategorized" query string param"""
show_uncategorized = request.GET.get('show_uncategorized', False)
if show_uncategorized is True or show_uncategorized == 'true':
return True
return False
def get_stats_result(self, request):
"""Return the StatsResult object for this statistic"""
stats_datasets = StatsMakerDataverses(**request.GET.dict())
if self.is_show_uncategorized(request):
exclude_uncategorized = False
|
else:
exclude_uncategorized = True
pub_state = se
|
lf.get_pub_state(request)
if pub_state == self.PUB_STATE_ALL:
stats_result = stats_datasets.get_dataverse_counts_by_type(exclude_uncategorized)
elif pub_state == self.PUB_STATE_UNPUBLISHED:
stats_result = stats_datasets.get_dataverse_counts_by_type_unpublished(exclude_uncategorized)
else:
stats_result = stats_datasets.get_dataverse_counts_by_type_published(exclude_uncategorized)
return stats_result
|
rickypc/dotfiles
|
.rflint.d/order.py
|
Python
|
mit
| 3,013
| 0.000664
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Robot Lint Rules - Lint rules for Robot Framework data files.
# Copyright (c) 2014, 2015, 2016
|
Richard Huang <rickypc@users.noreply.github.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public Lice
|
nse as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Robot Lint Rules - Lint rules for Robot Framework data files.
"""
from rflint.common import GeneralRule, WARNING
from rflint.parser import Keyword, KeywordTable, SettingTable, VariableTable
def _get_diffs(items, reporter, is_keyword=False):
"""Returns order differences if any."""
last = {'line': -1, 'name': ''}
for item in items:
name = item.name if is_keyword else item[0].strip() or ''
if name !='' and name != '...' and not name.startswith('#'):
if ((name > last['name']) - (name < last['name'])) < 0:
if len(name) <= len(last['name']):
reporter('"%s" comes before "%s".' % (last['name'], name),
last['line'])
last['line'] = item.linenumber
last['name'] = name
class KeywordNameOrder(GeneralRule):
"""Warn about un-ordered keyword name.
"""
severity = WARNING
def apply(self, robot_file):
"""Apply the rule to given robot file."""
# pylint: disable=expression-not-assigned
[_get_diffs(table.keywords, lambda message, line:
self.report(robot_file, message, line), True)
for table in robot_file.tables if isinstance(table, KeywordTable)]
class SettingNameOrder(GeneralRule):
"""Warn about un-ordered setting name.
"""
severity = WARNING
def apply(self, robot_file):
"""Apply the rule to given robot file."""
# pylint: disable=expression-not-assigned
[_get_diffs(table.rows, lambda message, line:
self.report(robot_file, message, line))
for table in robot_file.tables if isinstance(table, SettingTable)]
class VariableNameOrder(GeneralRule):
"""Warn about un-ordered variable name.
"""
severity = WARNING
def apply(self, robot_file):
"""Apply the rule to given robot file."""
# pylint: disable=expression-not-assigned
[_get_diffs(table.rows, lambda message, line:
self.report(robot_file, message, line))
for table in robot_file.tables if isinstance(table, VariableTable)]
|
kaedroho/wagtail
|
wagtail/core/signals.py
|
Python
|
bsd-3-clause
| 923
| 0.002167
|
from django.dispatch import Signal
page_published = Signal(providing_args=['instance', 'revision'])
page_unpublished = Signal(providing_args=['ins
|
tance'])
pre_page_move = Signal(providing_args=['instance', 'parent_page_before', 'parent_page_after', 'url_path_before', 'url_path_after'])
post_page_move = Signal(providing_args=['instance', 'parent_page_before', 'parent_page_after', 'url_path_before', 'url_path_after'])
workflow_approved = Signal(providing_args=['instance', 'user'])
workflow_rejected = Signal(providing_args=['instance', 'user'])
workflow_cancelled = Signal(providing_args=['instance', 'u
|
ser'])
workflow_submitted = Signal(providing_args=['instance', 'user'])
task_approved = Signal(providing_args=['instance', 'user'])
task_rejected = Signal(providing_args=['instance', 'user'])
task_submitted = Signal(providing_args=['instance', 'user'])
task_cancelled = Signal(providing_args=['instance' 'user'])
|
readhub/readhub
|
config.py
|
Python
|
mit
| 665
| 0.001504
|
import os
class Config(object):
DEBUG = False
# If using a DB do something like this:
SQLALCHEMY_DATABASE_URI = os.environ.get('
|
DATABASE_URL',
'postgresql+pg8000://readhub_user@localhost:5432/readhub_db')
# if using WTF forms you'll want some thing like this below
# CSRF_SESSION_KEY = os.environ.get('SESSION_KEY')
WTF_CSRF_ENABLED = True
SECRET_KEY = 'you-will-never-guess'
class DevelopmentConfig(Config):
DEBUG = True
class TestConfig(DevelopmentConfig):
DEBUG = True
|
TESTING = True
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///:memory:test.db'
|
pdorrell/melody_scripter
|
setup.py
|
Python
|
mit
| 3,795
| 0.001845
|
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='melody_scripter',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.0.8',
description='Melody Scripter, for parsing melodies from a simple textual format',
long_description=long_description,
# The project's main homepage.
url='https://github.com/pdorrell/melody_scripter',
# Author details
author='Philip Dorrell',
author_email='http://thinkinghard.com/email.html',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Musicians',
'Topic :: Software Development :: Music',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# What does your project relate to?
keywords='music parsing',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['regex>=2015.09.15',
'midi>=0.2.3'],
dependency_links=[
"https://github.com/vishnubob/python-midi/archive
|
/v0.2.3.zip#egg=midi-0.2.3"
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
't
|
est': ['nose'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'song2midi=melody_scripter.song2midi:main',
'play_song=melody_scripter.play_song:main',
],
},
)
|
mycodeday/crm-platform
|
stock_account/wizard/stock_valuation_history.py
|
Python
|
gpl-3.0
| 8,050
| 0.00323
|
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
class wizard_valuation_history(osv.osv_memory):
_name = 'wizard.valuation.history'
_description = 'Wizard that opens the stock valuation history table'
_columns = {
'choose_date': fields.boolean('Choose a Particular Date'),
'date': fields.datetime('Date', required=True),
}
_defaults = {
'choose_date': False,
'date': fields.datetime.now,
}
def open_table(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
ctx = context.copy()
ctx['history_date'] = data['date']
ctx['search_default_group_by_product'] = True
ctx['search_default_group_by_location'] = True
return {
'domain': "[('date', '<=', '" + data['date'] + "')]",
'name': _('Stock Value At Date'),
'view_type': 'form',
'view_mode': 'tree,graph',
'res_model': 'stock.history',
'type': 'ir.actions.act_window',
'context': ctx,
}
class stock_history(osv.osv):
_name = 'stock.history'
_auto = False
_order = 'date asc'
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True):
res = super(stock_history, self).read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby, lazy=lazy)
if context is None:
context = {}
date = context.get('history_date')
prod_dict = {}
if 'inventory_value' in fields:
for line in res:
lines = self.search(cr, uid, line.get('__domain', []), context=context)
inv_value = 0.0
product_tmpl_obj = self.pool.get("product.template")
lines_rec = self.browse(cr, uid, lines, context=context)
for line_rec in lines_rec:
if line_rec.product_id.cost_method == 'real':
price = line_rec.price_unit_on_quant
else:
if not line_rec.product_id.id in prod_dict:
prod_dict[line_rec.product_id.id] = product_tmpl_obj.get_history_price(cr, uid, line_rec.product_id.product_tmpl_id.id, line_rec.company_id.id, date=date, context=context)
price = prod_dict[line_rec.product_id.id]
inv_value += price * line_rec.quantity
line['inventory_value'] = inv_value
return res
def _get_inventory_value(self, cr, uid, ids, name, attr, context=None):
if context is None:
context = {}
date = context.get('history_date')
product_tmpl_obj = self.pool.get("product.template")
res = {}
for line in self.browse(cr, uid, ids, context=context):
if line.product_id.cost_method == 'real':
res[line.id] = line.quantity * line.price_unit_on_quant
else:
res[line.id] = line.quantity * product_tmpl_obj.get_history_price(cr, uid, line.product_id.product_tmpl_id.id, line.company_id.id, date=date, context=context)
return res
_columns = {
'move_id': fields.many2one('stock.move', 'Stock Move', required=True),
'location_id': fields.many2one('stock.location', 'Location', required=True),
'company_id': fields.many2one('res.company', 'Company'),
'product_id': fields.many2one('product.product', 'Product', required=True),
'product_categ_id': fields.many2one('product.category', 'Product Category', required=True),
'quantity': fields.float('Product Quantity'),
'date': fields.datetime('Operation Date'),
'price_unit_on_quant': fields.float('Value'),
'inventory_value': fields.function(_get_inventory_value, string="Inventory Value", type='float', readonly=True),
'source': fields.char('Source')
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'stock_history')
cr.execute("""
CREATE OR REPLACE VIEW stock_history AS (
SELECT MIN(id) as id,
move_id,
location_id,
company_id,
product_id,
product_categ_id,
SUM(quantity) as quantity,
date,
price_unit_on_quant,
source
FROM
|
((SELECT
stock_move.id::text || '-' || quant.id::text AS id,
quant.id AS quant_id,
stock_move.id AS move_id,
dest_location.id AS location_id,
dest_location.company_id AS company_id,
stock_move.product_id AS product_id,
|
product_template.categ_id AS product_categ_id,
quant.qty AS quantity,
stock_move.date AS date,
quant.cost as price_unit_on_quant,
stock_move.origin AS source
FROM
stock_quant as quant, stock_quant_move_rel, stock_move
LEFT JOIN
stock_location dest_location ON stock_move.location_dest_id = dest_location.id
LEFT JOIN
stock_location source_location ON stock_move.location_id = source_location.id
LEFT JOIN
product_product ON product_product.id = stock_move.product_id
LEFT JOIN
product_template ON product_template.id = product_product.product_tmpl_id
WHERE stock_move.state = 'done' AND dest_location.usage in ('internal', 'transit') AND stock_quant_move_rel.quant_id = quant.id
AND stock_quant_move_rel.move_id = stock_move.id AND ((source_location.company_id is null and dest_location.company_id is not null) or
(source_location.company_id is not null and dest_location.company_id is null) or source_location.company_id != dest_location.company_id)
) UNION
(SELECT
'-' || stock_move.id::text || '-' || quant.id::text AS id,
quant.id AS quant_id,
stock_move.id AS move_id,
source_location.id AS location_id,
source_location.company_id AS company_id,
stock_move.product_id AS product_id,
product_template.categ_id AS product_categ_id,
- quant.qty AS quantity,
stock_move.date AS date,
quant.cost as price_unit_on_quant,
stock_move.origin AS source
FROM
stock_quant as quant, stock_quant_move_rel, stock_move
LEFT JOIN
stock_location source_location ON stock_move.location_id = source_location.id
LEFT JOIN
stock_location dest_location ON stock_move.location_dest_id = dest_location.id
LEFT JOIN
product_product ON product_product.id = stock_move.product_id
LEFT JOIN
product_template ON product_template.id = product_product.product_tmpl_id
WHERE stock_move.state = 'done' AND source_location.usage in ('internal', 'transit') AND stock_quant_move_rel.quant_id = quant.id
AND stock_quant_move_rel.move_id = stock_move.id AND ((dest_location.company_id is null and source_location.company_id is not null) or
(dest_location.company_id is not null and source_location.company_id is null) or dest_location.company_id != source_location.company_id)
))
AS foo
GROUP BY move_id, location_id, company_id, product_id, product_categ_id, date, price_unit_on_quant, source
)""")
|
arulalant/txt2ipa
|
kannada2ipa/kannada2ipaMap.py
|
Python
|
gpl-3.0
| 588,062
| 0.000005
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Author : Arulalan.T <arulalant@gmail.com>
# Thanks to Mr. Govardhan Balaji <govigov03@gmail.com>
# who contributed to create kannada2ipaMap
#
#
from orddic import OrderedDict
kan2ipa = OrderedDict([
("ಳ್ಳಃ", "ɭɭəɦə"),
("ಳ್ಳಂ", "ɭɭəm"),
("ಳ್ಳೌ", "ɭɭəʋ"),
("ಳ್ಳೋ", "ɭɭo:"),
("ಳ್ಳೊ", "ɭɭo"),
("ಳ್ಳೈ", "ɭɭaj"),
("ಳ್ಳೇ", "ɭɭe:"),
("ಳ್ಳೆ", "ɭɭe"),
("ಳ್ಳೃ", "ɭɭɻ̩"),
("ಳ್ಳೂ", "ɭɭu:"),
("ಳ್ಳು", "ɭɭʊ"),
("ಳ್ಳೀ", "ɭɭi:"),
("ಳ್ಳಿ", "ɭɭi"),
("ಳ್ಳಾ", "ɭɭa:"),
("ಳ್ಳ", "ɭɭʌ"),
("ಳ್ಹಃ", "ɭɦəɦə"),
("ಳ್ಹಂ", "ɭɦəm"),
("ಳ್ಹೌ", "ɭɦəʋ"),
("ಳ್ಹೋ", "ɭɦo:"),
("ಳ್ಹೊ", "ɭɦo"),
("ಳ್ಹೈ", "ɭɦaj"),
("ಳ್ಹೇ", "ɭɦe:"),
("ಳ್ಹೆ", "ɭɦe"),
("ಳ್ಹೃ", "ɭɦɻ̩"),
("ಳ್ಹೂ", "ɭɦu:"),
("ಳ್ಹು", "ɭɦʊ"),
("ಳ್ಹೀ", "ɭɦi:"),
("ಳ್ಹಿ", "ɭɦi"),
("ಳ್ಹಾ", "ɭɦa:"),
("ಳ್ಹ", "ɭɦʌ"),
("ಳ್ಸಃ", "ɭsəɦə"),
("ಳ್ಸಂ", "ɭsəm"),
("ಳ್ಸೌ", "ɭsəʋ"),
("ಳ್ಸೋ", "ɭso:"),
("ಳ್ಸೊ", "ɭso"),
("ಳ್ಸೈ", "ɭsaj"),
("ಳ್ಸೇ", "ɭse:"),
("ಳ್ಸೆ", "ɭse"),
("ಳ್ಸೃ", "ɭsɻ̩"),
("ಳ್ಸೂ", "ɭsu:"),
("ಳ್ಸು", "ɭsʊ"),
("ಳ್ಸೀ", "ɭsi:"),
("ಳ್ಸಿ", "ɭsi"),
("ಳ್ಸಾ", "ɭsa:"),
("ಳ್ಸ", "ɭsʌ"),
("ಳ್ಷಃ", "ɭʂəɦə"),
("ಳ್ಷಂ", "ɭʂəm"),
("ಳ್ಷೌ", "ɭʂəʋ"),
("ಳ್ಷೋ", "ɭʂo:"),
("ಳ್ಷೊ", "ɭʂo"),
("ಳ್ಷೈ", "ɭʂaj"),
("ಳ್ಷೇ", "ɭʂe:"),
("ಳ್ಷೆ", "ɭʂe"),
("ಳ್ಷೃ", "ɭʂɻ̩"),
("ಳ್ಷೂ", "ɭʂu:"),
("ಳ್ಷು", "ɭʂʊ"),
("ಳ್ಷೀ", "ɭʂi:"),
("ಳ್ಷಿ", "ɭʂi"),
("ಳ್ಷಾ", "ɭʂa:"),
("ಳ್ಷ", "ɭʂʌ"),
("ಳ್ಶಃ", "ɭʃəɦə"),
("ಳ್ಶಂ", "ɭʃəm"),
("ಳ್ಶೌ", "ɭʃəʋ"),
("ಳ್ಶೋ", "ɭʃo:"),
("ಳ್ಶೊ", "ɭʃo"),
("ಳ್ಶೈ", "ɭʃaj"),
("ಳ್ಶೇ", "ɭʃe:"),
("ಳ್ಶೆ", "ɭʃe"),
("ಳ್ಶೃ", "ɭʃɻ̩"),
("ಳ್ಶೂ", "ɭʃu:"),
("ಳ್ಶು", "ɭʃʊ"),
("ಳ್ಶೀ", "ɭʃi:"),
("ಳ್ಶಿ", "ɭʃi"),
("ಳ್ಶಾ", "ɭʃa:"),
("ಳ್ಶ", "ɭʃʌ"),
("ಳ್ವಃ", "ɭʋəɦə"),
("ಳ್ವಂ", "ɭʋəm"),
("ಳ್ವೌ", "ɭʋəʋ"),
("ಳ್ವೋ", "ɭʋo:"),
("ಳ್ವೊ", "ɭʋo"),
("ಳ್ವೈ", "ɭʋaj"),
("ಳ್ವೇ", "ɭʋe:"),
("ಳ್ವೆ", "ɭʋe"),
("ಳ್ವೃ", "ɭʋɻ̩"),
("ಳ್ವೂ", "ɭʋu:"),
("ಳ್ವು", "ɭʋʊ"),
("ಳ್ವೀ", "ɭʋi:"),
("ಳ್ವಿ", "ɭʋi"),
("ಳ್ವಾ", "ɭʋa:"),
("ಳ್ವ", "ɭʋʌ"),
("ಳ್ಲಃ", "ɭləɦə"),
("ಳ್ಲಂ", "ɭləm"),
("ಳ್ಲೌ", "ɭləʋ"),
("ಳ್ಲೋ", "ɭlo:"),
("ಳ್ಲೊ", "ɭlo"),
("ಳ್ಲೈ", "ɭlaj"),
("ಳ್ಲೇ", "ɭle:"),
("ಳ್ಲೆ", "ɭle"),
("ಳ್ಲೃ", "ɭlɻ̩"),
("ಳ್ಲೂ", "ɭlu:"),
("ಳ್ಲು", "ɭlʊ"),
("ಳ್ಲೀ", "ɭli:"),
("ಳ್ಲಿ", "ɭli"),
("ಳ್ಲಾ", "ɭla:"),
("ಳ್ಲ", "ɭlʌ"),
("ಳ್ರಃ", "ɭɾəɦə"),
("ಳ್ರಂ", "ɭɾəm"),
("ಳ್ರೌ", "ɭɾəʋ"),
("ಳ್ರೋ", "ɭɾo:"),
("ಳ್ರೊ", "ɭɾo"),
("ಳ್ರೈ", "ɭɾaj"),
("ಳ್ರೇ", "ɭɾe:"),
("ಳ್ರೆ", "ɭɾe"),
("ಳ್ರೃ", "ɭɾɻ̩"),
("ಳ್ರೂ", "ɭɾu:"),
("ಳ್ರು", "ɭɾʊ"),
("ಳ್ರೀ", "ɭɾi:"),
("ಳ್ರಿ", "ɭɾi"),
("ಳ್ರಾ", "ɭɾa:"),
("ಳ್ರ", "ɭɾʌ"),
("ಳ್ಯಃ", "ɭjəɦə"),
("ಳ್ಯಂ", "ɭjəm"),
("ಳ್ಯೌ", "ɭjəʋ"),
("ಳ್ಯೋ", "ɭjo:"),
("ಳ್ಯೊ", "ɭjo"),
("ಳ್ಯೈ", "ɭjaj"),
("ಳ್ಯೇ", "ɭje:"),
("ಳ್ಯೆ", "ɭje"),
("ಳ್ಯೃ", "ɭjɻ̩"),
("ಳ್ಯೂ", "ɭju:"),
("ಳ್ಯು", "ɭjʊ"),
("ಳ್ಯೀ", "ɭji:"),
("ಳ್ಯಿ", "ɭji"),
("ಳ್ಯಾ", "ɭja:"),
("ಳ್ಯ", "ɭjʌ"),
("ಳ್ಮಃ", "ɭməɦə"),
("ಳ್ಮಂ", "ɭməm"),
("ಳ್ಮೌ", "ɭməʋ"),
("ಳ್ಮೋ", "ɭmo:"),
("ಳ್ಮೊ", "ɭmo"),
("ಳ್ಮೈ", "ɭmaj"),
("ಳ್ಮೇ", "ɭme:"),
("ಳ್ಮೆ", "ɭme"),
("ಳ್ಮೃ", "ɭmɻ̩"),
("ಳ್ಮೂ", "ɭmu:"),
("ಳ್ಮು", "ɭmʊ"),
("ಳ್ಮೀ", "ɭmi:"),
("ಳ್ಮಿ", "ɭmi"),
("ಳ್ಮಾ", "ɭma:"),
("ಳ್ಮ", "ɭmʌ"),
("ಳ್ಭಃ", "ɭbʰəɦə"),
("ಳ್ಭಂ", "ɭbʰəm"),
("ಳ್ಭೌ", "ɭbʰəʋ"),
("ಳ್ಭೋ", "ɭbʰo:"),
("ಳ್ಭೊ", "ɭbʰo"),
("ಳ್ಭೈ", "ɭbʰaj"),
("ಳ್ಭೇ", "ɭbʰe:"),
("ಳ್ಭೆ", "ɭbʰe"),
("ಳ್ಭೃ", "ɭbʰɻ̩"),
("ಳ್ಭೂ", "ɭbʰu:"),
("ಳ್ಭು", "ɭbʰʊ"),
("ಳ್ಭೀ", "ɭbʰi:"),
("ಳ್ಭಿ", "ɭbʰi"),
("ಳ್ಭಾ", "ɭbʰa:"),
("ಳ್ಭ", "ɭbʰʌ"),
("ಳ್ಬಃ", "ɭbəɦə"),
("ಳ್ಬಂ", "ɭbəm"),
("ಳ್ಬೌ", "ɭbəʋ"),
("ಳ್ಬೋ", "ɭbo:"),
("ಳ್ಬೊ", "ɭbo"),
("ಳ್ಬೈ", "ɭbaj"),
("ಳ್ಬೇ", "ɭbe:"),
("ಳ್ಬೆ", "ɭbe"),
("ಳ್ಬೃ", "ɭbɻ̩"),
("ಳ್ಬೂ", "ɭbu:"),
("ಳ್ಬು", "ɭbʊ"),
("ಳ್ಬೀ", "ɭbi:"),
("ಳ್ಬಿ", "ɭbi"),
("ಳ್ಬಾ", "ɭba:"),
("ಳ್ಬ", "ɭbʌ"),
("ಳ್ಫಃ", "ɭpʰəɦə"),
("ಳ್ಫಂ", "ɭpʰəm"),
("ಳ್ಫೌ", "ɭpʰəʋ"),
("ಳ್ಫೋ", "ɭpʰo:"),
("ಳ್ಫೊ", "ɭpʰo"),
("ಳ್ಫೈ", "ɭpʰaj"),
("ಳ್ಫೇ", "ɭpʰe:"),
("ಳ್ಫೆ", "ɭpʰe"),
("ಳ್ಫೃ", "ɭpʰɻ̩"),
("ಳ್ಫೂ", "ɭpʰu:"),
("ಳ್ಫು", "ɭpʰʊ"),
("ಳ್ಫೀ", "ɭpʰi:"),
("ಳ್ಫಿ", "ɭpʰi"),
("ಳ್ಫಾ", "ɭpʰa:"),
("ಳ್ಫ", "ɭpʰʌ"),
("ಳ್ಪಃ", "ɭpəɦə"),
("ಳ್ಪಂ", "ɭpəm"),
("ಳ್ಪೌ", "ɭpəʋ"),
("ಳ್ಪೋ", "ɭpo:"),
("ಳ್ಪೊ", "ɭpo"),
("ಳ್ಪೈ", "ɭpaj"),
("ಳ್ಪೇ", "ɭpe:"),
("ಳ್ಪೆ", "ɭpe"),
("ಳ್ಪೃ", "ɭpɻ̩"),
("ಳ್ಪೂ", "ɭpu:"),
("ಳ್ಪು", "ɭpʊ"),
("ಳ್ಪೀ", "ɭpi:"),
("ಳ್ಪಿ", "ɭpi"),
("ಳ್ಪಾ", "ɭpa:"),
("ಳ್ಪ", "ɭpʌ"),
("ಳ್ನಃ", "ɭnəɦə"),
("ಳ್ನಂ", "ɭnəm"),
("ಳ್ನೌ", "ɭnəʋ"),
("ಳ್ನೋ", "ɭno:"),
("ಳ್ನೊ", "ɭno"),
("ಳ್ನೈ", "ɭnaj"),
("ಳ್ನೇ", "ɭne:"),
("ಳ್ನೆ", "ɭne"),
("ಳ್ನೃ", "ɭnɻ̩"),
("ಳ್ನೂ", "ɭnu:"),
("ಳ್ನು", "ɭnʊ"),
("ಳ್ನೀ", "ɭni:"),
("ಳ್ನಿ", "ɭni"),
("ಳ್ನಾ", "ɭna:"),
("ಳ್ನ", "ɭnʌ"),
("ಳ್ಧಃ", "ɭd̪ʰəɦə"),
("ಳ್ಧಂ", "ɭd̪ʰəm"),
("ಳ್ಧೌ", "ɭd̪ʰəʋ"),
("ಳ್ಧೋ", "ɭd̪ʰo:"),
("ಳ್ಧೊ", "ɭd̪ʰo"),
("ಳ್ಧೈ", "ɭd̪ʰaj"),
("ಳ್ಧೇ", "ɭd̪ʰe:"),
("ಳ್ಧೆ", "ɭd̪ʰe"),
("ಳ್ಧೃ", "ɭd̪ʰɻ̩"),
("ಳ್ಧೂ", "ɭd̪ʰu:"),
("ಳ್ಧು", "ɭd̪ʰʊ"),
("ಳ್ಧೀ", "ɭd̪ʰi:"),
("ಳ್ಧಿ", "ɭd̪ʰi"),
("ಳ್ಧಾ", "ɭd̪ʰa:"),
("ಳ್ಧ", "ɭd̪ʰʌ"),
("ಳ್ದಃ", "ɭd̪əɦə"),
("ಳ್ದಂ", "ɭd̪əm"),
("ಳ್ದೌ", "ɭd̪əʋ"),
("ಳ್ದೋ", "ɭd̪o:"),
("ಳ್ದೊ", "ɭd̪o"),
("ಳ್ದೈ", "ɭd̪aj"),
("ಳ್ದೇ", "ɭd̪e:"),
("ಳ್ದೆ", "ɭd̪e"),
("ಳ್ದೃ", "ɭd̪ɻ̩"),
("ಳ್ದೂ", "ɭd̪u:"),
("ಳ್ದು", "ɭd̪ʊ"),
("ಳ್ದೀ", "ɭd̪i:"),
("ಳ್ದಿ", "ɭd̪i"),
("ಳ್ದಾ", "ɭd̪a:"),
("ಳ್ದ", "ɭd̪ʌ"),
("ಳ್ಥಃ", "ɭt̪ʰəɦə"),
("ಳ್ಥಂ", "ɭt̪ʰəm"),
("ಳ್ಥೌ", "ɭt̪ʰəʋ"),
("ಳ್ಥೋ", "ɭt̪ʰo:"),
("ಳ್ಥೊ", "ɭt̪ʰo"),
("ಳ್ಥೈ", "ɭt̪ʰaj"),
("ಳ್ಥೇ", "ɭt̪ʰe:"),
("ಳ್ಥೆ", "ɭt̪ʰe"),
("ಳ್ಥೃ", "ɭt̪ʰɻ̩"),
("ಳ್ಥೂ", "ɭt̪ʰu:"),
("ಳ್ಥು", "ɭt̪ʰʊ"),
("ಳ್ಥೀ", "ɭt̪ʰi:"),
("ಳ್ಥಿ", "ɭt̪ʰi"),
("ಳ್ಥಾ", "ɭt̪ʰa:"),
("ಳ್ಥ", "ɭt̪ʰʌ"),
("ಳ್ತಃ", "ɭt̪əɦə"),
("ಳ್ತಂ", "ɭt̪əm"),
("ಳ್ತೌ", "ɭt̪əʋ"),
("ಳ್ತೋ", "ɭt̪o:"),
("ಳ್ತೊ", "ɭt̪o"),
("ಳ್ತೈ", "ɭt̪aj"),
("ಳ್ತೇ", "ɭt̪e:"),
("ಳ್ತೆ", "ɭt̪e"),
("ಳ್ತೃ", "ɭt̪ɻ̩"),
("ಳ್ತೂ", "ɭt̪u:"),
("ಳ್ತು", "ɭt̪ʊ"),
("ಳ್ತೀ", "ɭt̪i:"),
("ಳ್ತಿ", "ɭt̪i"),
("ಳ್ತಾ", "ɭt̪a:"),
("ಳ್ತ", "ɭt̪ʌ"),
("ಳ್ಣಃ", "ɭɳəɦə"),
("ಳ್ಣಂ", "ɭɳəm"),
("ಳ್ಣೌ", "ɭɳəʋ"),
("ಳ್ಣೋ", "ɭɳo:"),
("ಳ್ಣೊ", "ɭɳo"),
("ಳ್ಣೈ", "ɭɳaj"),
("ಳ್ಣೇ", "ɭɳe:"),
("ಳ್ಣೆ", "ɭɳe"),
("ಳ್ಣೃ", "ɭɳɻ̩"),
("ಳ್ಣೂ", "ɭɳu:"),
("ಳ್ಣು", "ɭɳʊ"),
("ಳ್ಣೀ", "ɭɳi:"),
("ಳ್ಣಿ", "ɭɳi"),
("ಳ್ಣಾ", "ɭɳa:"),
("ಳ್ಣ", "ɭɳʌ"),
("ಳ್ಢಃ", "ɭɖʰəɦə"),
("ಳ್ಢಂ", "ɭɖʰəm"),
("ಳ್ಢೌ", "ɭɖʰəʋ"),
("ಳ್ಢೋ", "ɭɖʰo:"),
("ಳ್ಢೊ", "ɭɖʰo"),
("ಳ್ಢೈ", "ɭɖʰaj"),
("ಳ್ಢೇ", "ɭɖʰe:"),
("ಳ್ಢೆ", "ɭɖʰe"),
("ಳ್ಢೃ", "ɭɖʰɻ̩"),
("ಳ್ಢೂ", "ɭɖʰu:"),
("ಳ್ಢು", "ɭɖʰʊ"),
("ಳ್ಢೀ", "ɭɖʰi:"),
("ಳ್ಢಿ", "ɭɖʰi"),
("ಳ್ಢಾ", "ɭɖʰa:"),
("ಳ್ಢ", "ɭɖʰʌ"),
("ಳ್ಡಃ", "ɭɖəɦə"),
("ಳ್ಡಂ", "ɭɖəm"),
("ಳ್ಡೌ", "ɭɖəʋ"),
("ಳ್ಡೋ", "ɭɖo:"),
("ಳ್ಡೊ", "ɭɖo"),
("ಳ್ಡೈ", "ɭɖaj"),
("ಳ್ಡೇ", "ɭɖe:"),
("ಳ್ಡೆ", "ɭɖe"),
("ಳ್ಡೃ", "ɭɖɻ̩"),
("ಳ್ಡೂ", "ɭɖu:"),
("ಳ್ಡು", "ɭɖʊ"),
("ಳ್ಡೀ", "ɭɖi:"),
("ಳ್ಡಿ", "ɭɖi"),
|
("ಳ್ಡಾ", "ɭɖa:"),
("ಳ್ಡ", "ɭɖʌ"),
("ಳ್ಠಃ", "ɭʈʰəɦə"),
|
("ಳ್ಠಂ", "ɭʈʰəm"),
("ಳ್ಠೌ", "ɭʈʰəʋ"),
("ಳ್ಠೋ", "ɭʈʰo:"),
("ಳ್ಠೊ", "ɭʈʰo"),
("ಳ್ಠೈ", "ɭʈʰaj"),
("ಳ್ಠೇ", "ɭʈʰe:"),
("ಳ್ಠೆ", "ɭʈʰe"),
("ಳ್ಠೃ", "ɭʈʰɻ̩"),
("ಳ್ಠೂ", "ɭʈʰu:"),
("ಳ್ಠು", "ɭʈʰʊ"),
("ಳ್ಠೀ", "ɭʈʰi:"),
("ಳ್ಠಿ", "ɭʈʰi"),
("ಳ್ಠಾ", "ɭʈʰa:"),
("ಳ್ಠ", "ɭʈʰʌ"),
("ಳ್ಟಃ", "ɭʈəɦə"),
("ಳ್ಟಂ", "ɭʈəm"),
("ಳ್ಟೌ", "ɭʈəʋ"),
("ಳ್ಟೋ", "ɭʈo:"),
("ಳ್ಟೊ", "ɭʈo"),
("ಳ್ಟೈ", "ɭʈaj"),
("ಳ್ಟೇ", "ɭʈe:"),
("ಳ್ಟೆ", "ɭʈe"),
("ಳ್ಟೃ", "ɭʈɻ̩"),
("ಳ್ಟೂ", "ɭʈu:"),
("ಳ್ಟು", "ɭʈʊ"),
("ಳ್ಟೀ", "ɭʈi:"),
("ಳ್ಟಿ", "ɭʈi"),
("ಳ್ಟಾ", "ɭʈa
|
google/ashier
|
ashierlib/test/utils_test.py
|
Python
|
apache-2.0
| 2,733
| 0.006952
|
#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Ashier: Template-based scripting for terminal interactions.
Ashier is a program
|
that serves the same purpose as expect(1): it helps
users script terminal interactions. However, unlike expect, Ashier is
programming language agnostic and provides a readable template language
for terminal output matching. These features make scripted terminal
interactions simpler to create and easier to maintain.
This module contains unit tests for the utils module.
"""
__author__ = 'cklin@google.com (Chuan-kai Lin)'
import unittest
from .. import utils
class TestSplitNone(uni
|
ttest.TestCase):
"""Unit tests for utils.SplitNone()."""
def DoTest(self, arg, expected):
self.assertEqual(
utils.SplitNone(arg), expected)
def testEmpty(self):
self.DoTest([], [])
def testOnlyNone(self):
self.DoTest([None], [])
def testOnlyNones(self):
self.DoTest([None, None, None], [])
def testStartNone(self):
self.DoTest([None, 3, 5], [[3, 5]])
def testEndNone(self):
self.DoTest([4, 2, None, None], [[4, 2]])
def testStartEndNone(self):
self.DoTest([None, 5, 0, None, None], [[5, 0]])
def testSplitInTwo(self):
self.DoTest([7, None, None, 6, 2], [[7], [6, 2]])
def testSplitInThree(self):
self.DoTest([2, None, 5, 3, None, 4], [[2], [5, 3], [4]])
class TestRemoveRegexBindingGroups(unittest.TestCase):
"""Unit tests for utils.RemoveRegexBindingGroups()."""
def DoTest(self, arg, expected):
self.assertEqual(
utils.RemoveRegexBindingGroups(arg), expected)
def testNoBindingGroup(self):
self.DoTest(r'abc', r'abc')
def testBindingGroup(self):
self.DoTest(r'a(bc)', r'a(?:bc)')
def testBindingGroups(self):
self.DoTest(r'a(bc)(def)', r'a(?:bc)(?:def)')
def testNestedBindingGroups(self):
self.DoTest(r'a((bc))', r'a(?:(?:bc))')
def testEscapedParens(self):
self.DoTest(r'a\(b\)', r'a\(b\)')
def testEscapedBackSlashes(self):
self.DoTest(r'a\\(b\\)', r'a\\(?:b\\)')
self.DoTest(r'a\\\(b\\)', r'a\\\(b\\)')
self.DoTest(r'a\\\\(b\\)', r'a\\\\(?:b\\)')
if __name__ == '__main__':
unittest.main()
|
whitehorse-io/encarnia
|
pyenv/lib/python2.7/site-packages/twisted/test/test_sob.py
|
Python
|
mit
| 5,632
| 0.003374
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
import os
import sys
from textwrap import dedent
from twisted.trial import unittest
from twisted.persisted import sob
from twisted.python import components
from twisted.persisted.styles import Ephemeral
class Dummy(components.Componentized):
pass
objects = [
1,
"hello",
(1, "hello"),
[1, "hello"],
{1:"hello"},
]
class FakeModule(object):
pass
class PersistTests(unittest.TestCase):
def testStyles(self):
for o in objects:
p = sob.Persistent(o, '')
for style in 'source pickle'.split():
p.setStyle(style)
p.save(filename='persisttest.'+style)
o1 = sob.load('persisttest.'+style, style)
self.assertEqual(o, o1)
def testStylesBeingSet(self):
o = Dummy()
o.foo = 5
o.setComponent(sob.IPersistable, sob.Persistent(o, 'lala'))
for style in 'source pickle'.split():
sob.IPersistable(o).setStyle(style)
sob.IPersistable(o).save(filename='lala.'+style)
o1 = sob.load('lala.'+style, style)
self.assertEqual(o.foo, o1.foo)
self.assertEqual(sob.IPersistable(o1).style, style)
def testPassphraseError(self):
"""
Calling save() with a passphrase is an error.
"""
p = sob.Persistant(None, 'object')
self.assertRaises(
TypeError, p.save, 'filename.pickle', passphrase='abc')
def testNames(self):
o = [1,2,3]
p = sob.Persistent(o, 'object')
for style in 'source pickle'.split():
p.setStyle(style)
p.save()
o1 = sob.load('object.ta'+style[0], style)
self.assertEqual(o, o1)
for tag in 'lala lolo'.split():
p.save(tag)
o1 = sob.load('object-'+tag+'.ta'+style[0], style)
self.assertEqual(o, o1)
def testPython(self):
with open("persisttest.python", 'w') as f:
f.write('foo=[1,2,3] ')
o = sob.loadValueFromFile('persisttest.python', 'foo')
self.assertEqual(o, [1,2,3])
def testTypeGuesser(self):
self.assertRaises(KeyError, sob.guessType, "file.blah")
self.assertEqual('python', sob.guessType("file.py"))
self.assertEqual('python', sob.guessType("file.tac"))
self.assertEqual('python', sob.guessType("file.etac"))
self.assertEqual('pickle', sob.guessType("file.tap"))
self.assertEqual('pickle', sob.guessType("file.etap"))
self.assertEqual('source', sob.guessType("file.tas"))
self.assertEqual('source', sob.guessType("file.etas"))
def tes
|
tEverythingEphemeralGetattr(self):
"""
L{_EverythingEphermal.__getattr__} will proxy the __main__ module as an
L{Ephemeral} object, and during load w
|
ill be transparent, but after
load will return L{Ephemeral} objects from any accessed attributes.
"""
self.fakeMain.testMainModGetattr = 1
dirname = self.mktemp()
os.mkdir(dirname)
filename = os.path.join(dirname, 'persisttest.ee_getattr')
global mainWhileLoading
mainWhileLoading = None
with open(filename, "w") as f:
f.write(dedent("""
app = []
import __main__
app.append(__main__.testMainModGetattr == 1)
try:
__main__.somethingElse
except AttributeError:
app.append(True)
else:
app.append(False)
from twisted.test import test_sob
test_sob.mainWhileLoading = __main__
"""))
loaded = sob.load(filename, 'source')
self.assertIsInstance(loaded, list)
self.assertTrue(loaded[0], "Expected attribute not set.")
self.assertTrue(loaded[1], "Unexpected attribute set.")
self.assertIsInstance(mainWhileLoading, Ephemeral)
self.assertIsInstance(mainWhileLoading.somethingElse, Ephemeral)
del mainWhileLoading
def testEverythingEphemeralSetattr(self):
"""
Verify that _EverythingEphemeral.__setattr__ won't affect __main__.
"""
self.fakeMain.testMainModSetattr = 1
dirname = self.mktemp()
os.mkdir(dirname)
filename = os.path.join(dirname, 'persisttest.ee_setattr')
with open(filename, 'w') as f:
f.write('import __main__\n')
f.write('__main__.testMainModSetattr = 2\n')
f.write('app = None\n')
sob.load(filename, 'source')
self.assertEqual(self.fakeMain.testMainModSetattr, 1)
def testEverythingEphemeralException(self):
"""
Test that an exception during load() won't cause _EE to mask __main__
"""
dirname = self.mktemp()
os.mkdir(dirname)
filename = os.path.join(dirname, 'persisttest.ee_exception')
with open(filename, 'w') as f:
f.write('raise ValueError\n')
self.assertRaises(ValueError, sob.load, filename, 'source')
self.assertEqual(type(sys.modules['__main__']), FakeModule)
def setUp(self):
"""
Replace the __main__ module with a fake one, so that it can be mutated
in tests
"""
self.realMain = sys.modules['__main__']
self.fakeMain = sys.modules['__main__'] = FakeModule()
def tearDown(self):
"""
Restore __main__ to its original value
"""
sys.modules['__main__'] = self.realMain
|
jpajuelo/wirecloud
|
src/wirecloud/platform/core/models.py
|
Python
|
agpl-3.0
| 3,337
| 0.000899
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Conwet Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is
|
distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
|
the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from uuid import uuid4
from django.contrib.auth.models import User, Group
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from markdown.extensions.toc import slugify
__all__ = ('Organization', 'Team')
class OrganizationManager(models.Manager):
def is_available(self, name):
return not User.objects.filter(username=name).exists() and not Group.objects.filter(name=name).exists()
def search_available_name(self, username):
max_length = 30
uuid_length = 8
short_username = slugify(username, '-')[:max_length - uuid_length]
final_username = slugify(username, '-')[:max_length]
while not self.is_available(final_username):
final_username = short_username + uuid4().hex[:uuid_length]
return final_username
def create_organization(self, name, owners=[]):
user = User.objects.create(username=name)
group = Group.objects.create(name=name)
org = self.create(user=user, group=group)
team = Team.objects.create(organization=org, name='owners')
for owner in owners:
team.users.add(owner)
return org
@python_2_unicode_compatible
class Organization(models.Model):
user = models.OneToOneField(User)
group = models.OneToOneField(Group)
objects = OrganizationManager()
class Meta:
app_label = "platform"
def __str__(self):
return self.user.username
class TeamManager(models.Manager):
"""
The manager for the auth's Team model.
"""
def get_by_natural_key(self, organization, name):
return self.get(organization=organization, name=name)
@python_2_unicode_compatible
class Team(models.Model):
"""
Teams are a generic way of categorizing users to apply permissions, or
some other label, to those users. A user can belong to any number of
teams.
"""
organization = models.ForeignKey(Organization, on_delete=models.CASCADE)
name = models.CharField(_('name'), max_length=80)
users = models.ManyToManyField(User, verbose_name=_('users'), blank=True, related_name="teams")
objects = TeamManager()
class Meta:
app_label = "platform"
unique_together = ('organization', 'name')
verbose_name = _('team')
verbose_name_plural = _('teams')
def __str__(self):
return self.name
def natural_key(self):
return (self.organization, self.name)
|
WikiWatershed/model-my-watershed
|
src/mmw/apps/user/management/commands/drbusers.py
|
Python
|
apache-2.0
| 9,850
| 0
|
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from apps.user.models import UserProfile
# Thanks to @ajrobbins for generating this
DRB_ZIPS = ['07416', '07438', '07461', '07820', '07821', '07822', '07823',
'07825', '07826', '07827', '07828', '07832', '07833', '07836',
'07838', '07840', '07843', '07846', '07848', '07849', '07850',
'07851', '07852', '07853', '07856', '07857', '07860', '07863',
'07865', '07870', '07871', '07874', '07880', '07881', '07882',
'07885', '08001', '08002', '08003', '08007', '08009', '08010',
'08011', '08012', '08014', '08015', '08016', '08019', '08020',
'08021', '08022', '08023', '08026', '08027', '08028', '08029',
'08030', '08031', '08033', '08034', '08035', '08036', '08038',
'08039', '08041', '08042', '08043', '08045', '08046', '08048',
'08049', '08051', '08052', '08053', '08054', '08055', '08056',
'08057', '08059', '08060', '08061', '08062', '08063', '08064',
'08065', '08066', '08067', '08068', '08069', '08070', '08071',
'08072', '08073', '08074', '08075', '08077', '08078', '08079',
'08080', '08081', '08083', '08084', '08085', '08086', '08088',
'08090', '08091', '08093', '08094', '08096', '08097', '08098',
'08102', '08103', '08104', '08105', '08106', '08107', '08108',
'08109', '08110', '08204', '08210', '08212', '08230', '08242',
'08246', '08251', '08270', '08302', '08310', '08311', '08312',
'08314', '08316', '08318', '08320', '08321', '08322', '08323',
'08324', '08326', '08327', '08328', '08329', '08332', '08340',
'08341', '08343', '08344', '08345', '08348', '08349', '08350',
'08352', '08353', '08360', '08361', '08501', '08505', '08510',
'08511', '08514', '08515', '08518', '08520', '08525', '08527',
'08530', '08533', '08534', '08535', '08540', '08550', '08551',
'08554', '08555', '08559', '08560', '08561', '08562', '08608',
'08609', '08610', '08611', '08618', '08619', '08620', '08628',
'08629', '08638', '08640', '08641
|
', '08648', '08690', '08691',
'08759', '088
|
02', '08804', '08808', '08822', '08825', '08826',
'08827', '08848', '08865', '08867', '08886', '10940', '10963',
'12093', '12167', '12406', '12410', '12421', '12430', '12434',
'12438', '12441', '12455', '12459', '12464', '12465', '12468',
'12474', '12492', '12701', '12719', '12720', '12721', '12723',
'12724', '12725', '12726', '12729', '12732', '12733', '12734',
'12736', '12737', '12738', '12740', '12741', '12742', '12743',
'12745', '12746', '12747', '12748', '12749', '12750', '12751',
'12752', '12754', '12758', '12759', '12760', '12762', '12763',
'12764', '12765', '12766', '12767', '12768', '12769', '12770',
'12771', '12775', '12776', '12777', '12778', '12779', '12780',
'12781', '12783', '12784', '12785', '12786', '12787', '12788',
'12789', '12790', '12791', '12792', '13730', '13731', '13733',
'13739', '13740', '13752', '13753', '13754', '13755', '13756',
'13757', '13774', '13775', '13782', '13783', '13786', '13788',
'13804', '13806', '13813', '13839', '13842', '13847', '13849',
'13856', '13865', '17039', '17042', '17046', '17067', '17073',
'17087', '17088', '17517', '17527', '17555', '17569', '17901',
'17921', '17922', '17923', '17925', '17929', '17930', '17931',
'17933', '17944', '17948', '17951', '17952', '17953', '17954',
'17959', '17960', '17961', '17963', '17965', '17970', '17972',
'17974', '17979', '17981', '17982', '18011', '18013', '18014',
'18015', '18016', '18017', '18018', '18020', '18030', '18031',
'18032', '18034', '18035', '18036', '18037', '18038', '18040',
'18041', '18042', '18045', '18046', '18049', '18051', '18052',
'18053', '18054', '18055', '18056', '18058', '18059', '18062',
'18063', '18064', '18066', '18067', '18068', '18069', '18070',
'18071', '18072', '18073', '18074', '18076', '18077', '18078',
'18079', '18080', '18081', '18083', '18085', '18086', '18087',
'18088', '18091', '18092', '18101', '18102', '18103', '18104',
'18105', '18106', '18109', '18195', '18201', '18210', '18211',
'18212', '18214', '18216', '18218', '18220', '18222', '18224',
'18229', '18230', '18232', '18235', '18237', '18240', '18244',
'18245', '18250', '18252', '18254', '18255', '18301', '18302',
'18321', '18322', '18323', '18324', '18325', '18326', '18327',
'18328', '18330', '18331', '18332', '18333', '18334', '18335',
'18336', '18337', '18340', '18342', '18343', '18344', '18346',
'18347', '18349', '18350', '18351', '18352', '18353', '18354',
'18355', '18356', '18357', '18360', '18370', '18371', '18372',
'18403', '18405', '18407', '18415', '18417', '18421', '18424',
'18425', '18426', '18427', '18428', '18431', '18435', '18436',
'18437', '18438', '18439', '18443', '18444', '18445', '18451',
'18453', '18454', '18455', '18456', '18457', '18458', '18459',
'18460', '18461', '18462', '18463', '18464', '18466', '18469',
'18470', '18472', '18473', '18602', '18610', '18624', '18640',
'18661', '18702', '18847', '18901', '18902', '18912', '18913',
'18914', '18915', '18917', '18920', '18923', '18925', '18929',
'18930', '18932', '18935', '18936', '18938', '18940', '18942',
'18944', '18947', '18950', '18951', '18954', '18955', '18960',
'18962', '18964', '18966', '18969', '18970', '18972', '18974',
'18976', '18977', '18980', '19001', '19002', '19003', '19004',
'19006', '19007', '19008', '19009', '19010', '19012', '19013',
'19014', '19015', '19017', '19018', '19020', '19021', '19022',
'19023', '19025', '19026', '19027', '19029', '19030', '19031',
'19032', '19033', '19034', '19035', '19036', '19038', '19040',
'19041', '19043', '19044', '19046', '19047', '19050', '19053',
'19054', '19055', '19056', '19057', '19060', '19061', '19063',
'19064', '19066', '19067', '19070', '19072', '19073', '19074',
'19075', '19076', '19078', '19079', '19081', '19082', '19083',
'19085', '19086', '19087', '19090', '19094', '19095', '19096',
'19102', '19103', '19104', '19106', '19107', '19109', '19111',
'19112', '19113', '19114', '19115', '19116', '19118', '19119',
'19120', '19121', '19122', '19123', '19124', '19125', '19126',
'19127', '19128', '19129', '19130', '19131', '19132', '19133',
'19134', '19135', '19136', '19137', '19138', '19139', '19140',
'19141', '19142', '19143', '19144', '19145', '19146', '19147',
'19148', '19149', '19150', '19151', '19152', '19153', '19154',
'19301', '19311', '19312', '19316', '19317', '19319', '19320',
'19330', '19333', '19335', '19341', '19342', '19343', '19344',
'19345', '19348', '19350', '19352', '19355', '19358', '19365',
'19367', '19372', '19373', '19374', '19375', '19380', '19382',
'19383', '19390', '19401', '19403', '19405', '19406', '19422',
'19425', '19426', '19428', '19435', '19436', '19437', '19438',
'19440', '19442', '19444', '19446', '19453', '19454', '19456',
'19457', '19460', '19462', '19464', '19465', '19468', '19472',
'19473', '19474', '19475', '19477', '19492', '19503', '19504',
'19505', '19506', '19507', '19508', '19510', '19511', '19512',
'19518', '19519', '19520', '19522', '19523', '19525', '19526',
'19529', '19530', '19533', '19534', '19535', '19536', '1
|
stack-of-tasks/sot-pattern-generator
|
src/dynamic_graph/sot/pattern_generator/__init__.py
|
Python
|
isc
| 136
| 0
|
fro
|
m . import meta_selector # noqa
from .pg import PatternGenerator
from .selector import Selector
PatternGenerator('')
Selector(''
|
)
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_ddos_protection_plans_operations.py
|
Python
|
mit
| 23,738
| 0.005055
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DdosProtectionPlansOperations:
"""DdosProtectionPlansOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
ddos_protection_plan_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_pr
|
otection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscripti
|
on_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
ddos_protection_plan_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
ddos_protection_plan_name=ddos_protection_plan_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
async def get(
self,
resource_group_name: str,
ddos_protection_plan_name: str,
**kwargs: Any
) -> "_models.DdosProtectionPlan":
"""Gets information about the specified DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DdosProtectionPlan, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.DdosProtectionPlan
|
dawran6/project-euler
|
14-longest-collatz-sequence.py
|
Python
|
mit
| 588
| 0.008503
|
from functools import lru_cache
def
|
sequence(n):
'bad idea'
while n is not 1:
yield n
n = 3*n+1 if n%2 else n/2
yield n
def next_num(n):
if n % 2:
return 3
|
* n + 1
else:
return n / 2
@lru_cache(None)
def collatz_length(n):
if n == 1:
return 1
else:
return 1 + collatz_length(next_num(n))
if __name__ == '__main__':
i = 0
largest = 0
for n in range(1, 1_000_001):
length = collatz_length(n)
if length > largest:
largest = length
i = n
print(i, largest)
|
openstack/neutron
|
tools/files_in_patch.py
|
Python
|
apache-2.0
| 2,508
| 0
|
#!/usr/bin/env python3
# Copyright 2020 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY K
|
IND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import sys
file_names = set()
def parse_input(input_file):
global file_names
while True:
line_buffer
|
= input_file.readline()
if not line_buffer:
break
line_match = re.search(r"^\s*---\s+([^\s@]+)[\s@]+", line_buffer)
if not line_match:
line_match = re.search(r"^\s*\+\+\+\s+([^\s@]+)[\s@]+",
line_buffer)
if line_match:
curr_file_name = line_match.group(1)
# trim off 'a/' and 'b/' that you will normally see in git output
#
if len(curr_file_name) > 2 and curr_file_name[1] == '/' and (
curr_file_name[0] == 'a' or curr_file_name[0] == 'b'):
curr_file_name = curr_file_name[2:]
file_names.add(curr_file_name)
def prune_unwanted_names():
global file_names
unwanted_names = set(['/dev/null'])
for curr_file_name in file_names:
# ignore files that end in '.orig' as long as non-.orig exists
line_match = re.search(r"^(.+)\.[oO][Rr][iI][gG]$", curr_file_name)
if line_match and line_match.group(1) in file_names:
unwanted_names.add(curr_file_name)
continue
file_names -= unwanted_names
def print_file_names():
for name in sorted(file_names):
print(name)
if __name__ == '__main__':
if len(sys.argv) == 1:
parse_input(sys.stdin)
else:
for curr_input_name in sys.argv[1:]:
try:
with open(curr_input_name, 'r') as curr_input_file:
parse_input(curr_input_file)
except IOError as e_str:
sys.stderr.write(
"Cannot open {}: {}\n".format(curr_input_name, e_str))
sys.exit(255)
prune_unwanted_names()
print_file_names()
|
elpaxoudis/pattern-recognition
|
trainers.py
|
Python
|
gpl-2.0
| 1,725
| 0.031884
|
""" A class for training our perceptro
|
ns """
class Trainers:
""" Constructor """
def __init__(self, data, r=1):
self.data_vectors = data # training dataset
self.r = r
self.wrong_classified_vectors = ["dummy"]
def gradientDes
|
cent(self, perceptron):
print "Begin training..."
last = len(self.data_vectors[0])-1
t = 0
# As long we have wrong classified vectors training is not complete
while self.wrong_classified_vectors != []:
self.wrong_classified_vectors = []
# For each vector in training dataset we check the classifier
for i in range(len(self.data_vectors)):
if self.data_vectors[i][last]*perceptron.fire(self.data_vectors[i][:last]) < 0: # Wrong classification
self.wrong_classified_vectors = self.wrong_classified_vectors + [self.data_vectors[i]]
# Now that we have all the wrong classified vectors (or none)
# lets calculate the new weight vector
if self.wrong_classified_vectors == []:
break
new_weight_vector = perceptron.getWeightVector()
# For each wrong vector
error = [ self.wrong_classified_vectors[0][last]*x for x in self.wrong_classified_vectors[0][:last]]
for wr_v in self.wrong_classified_vectors[1:]:
#print wr_v
#print error
#raw_input()
for index in range(len(wr_v[:last])):
error[index] = error[index] + wr_v[last]*wr_v[index]
new_weight_vector = [y + self.r*x for y,x in zip(new_weight_vector,error)]
perceptron.setWeightVector(new_weight_vector)
#print "Data "+str(self.data_vectors)
#print "Wrong Classified Vectors "+str(self.wrong_classified_vectors)
#print "Current weight vector "+str(new_weight_vector)
#print "Itteration "+str(t)
t += 1
#raw_input("Press Enter:>")
return perceptron
|
atantet/transferCZ
|
tau/get_tau_norm_red.py
|
Python
|
gpl-2.0
| 13,933
| 0.002799
|
import numpy as np
from netCDF4 import Dataset
import matplotlib.pyplot as plt
from matplotlib import cm, colors
from mpl_toolkits.basemap import Basemap, addcyclic
from scipy.io import FortranFile
# Amplification factor for the mean wind stress
ampMean = 3.0
initDir = '../init/'
nlat = 31
nlon = 30
year0 = 1961
yearf = 1994
gridName = "%dx%d" % (nlat, nlon)
periodName = "%d%d" % (year0, yearf)
postfix = '%s_%s' % (gridName, periodName)
dstPostfix = ''
noiseType = 'Red'
sstFile = "ersst.%s.nc" % postfix
psFile = "pac.%s.nc" % postfix
T0 = 30.
rhoAir = 1.2
CD = 1.2e-3 # Large & Pond 1981 for w < 11 m/s
L = 1.5e7
c0 = 2.
rho = 1024
H = 200.
toStress = rhoAir * CD
toND = L / (c0**2 * rho * H) # = F0 / tau0
# Read cartesian coordinates
x = np.loadtxt('%s/x_%s.txt' % (initDir, gridName))
y = np.loadtxt('%s/y_%s.txt' % (initDir, gridName))
(X2, Y2) = np.meshgrid(x, y)
# Read sst
dset = Dataset(sstFile, "r")
sst = dset.variables["sst"][:]
lat = dset.variables["lat"][:]
lon = dset.variables["lon"][:]
(LON, LAT) = np.meshgrid(lon, lat)
nt
|
= sst.shape[0]
dset.close()
# Map definition
llcrnrlon = lon.min()
llcrnrlat = lat.min()
urcrnrlon = lon.max()
urcrnrlat = lat.max()
nlev = 10
map = Basemap(projection='merc', llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='c')
(x, y) = map(LON, LAT)
# Read zonal pseudo wind-stress
dset = Dataset(psFile, "r")
Wu = dset.variables["Wu"][:]
dset.close()
# Get mask
N = nlat * nlon
sst = sst.reshape
|
(nt, N)
Wu = Wu.reshape(nt, N)
mask = np.any(sst.mask, 0) | np.any(Wu.mask, 0)
sstMasked = np.array(sst[:, ~mask])
WuMasked = np.array(Wu[:, ~mask])
lonlMasked = LON.flatten()[~mask]
latlMasked = LAT.flatten()[~mask]
nValid = N - mask.sum()
# Remove radiative equilibrium temperature : Ta = T - T0
sstMasked -= T0
# Remove mean, remove seasonal cycle, put back mean
print 'Getting anomalies...'
sstMean = sstMasked.mean(0)
WuMean = WuMasked.mean(0)
sstMaskedAnom = sstMasked - np.tile(np.expand_dims(sstMean, 0), (nt, 1))
WuMaskedAnom = WuMasked - np.tile(np.expand_dims(WuMean, 0), (nt, 1))
ssta = np.copy(sstMaskedAnom,)
Wua = np.copy(WuMaskedAnom)
for k in np.arange(12):
ssta[k::12] -= np.tile(np.expand_dims(sstMaskedAnom[k::12].mean(0), 0),
(sstMaskedAnom[k::12].shape[0], 1))
Wua[k::12] -= np.tile(np.expand_dims(WuMaskedAnom[k::12].mean(0), 0),
(WuMaskedAnom[k::12].shape[0], 1))
ssta += np.tile(np.expand_dims(sstMean, 0), (nt, 1))
Wua += np.tile(np.expand_dims(WuMean, 0), (nt, 1))
# Regressions
print 'Getting wind stress residuals...'
WuResDim = np.copy(Wua)
for ij in np.arange(nValid):
X = np.matrix(ssta[:, ij]).T
Y = np.matrix(Wua[:, ij]).T
A = (X.T * X)**(-1) * (X.T * Y)
WuResDim[:, ij] = np.squeeze(np.array(Y - X * A))
# Adimensionalize ! F0=L tau0/(co^2 rho H)
print 'Get adimensional residual wind stress...'
WuRes = WuResDim * toStress * toND
# Decompose residual wind-stress
WuResMean = WuRes.mean(0)
WuResAnom = WuRes - np.tile(np.expand_dims(WuResMean, 0), (nt, 1))
# Plot WuResMean
fig = plt.figure()
field = np.ma.masked_all((nlat*nlon,))
field[~mask] = WuResMean
field = field.reshape(nlat, nlon)
vmax = np.max(field)
vmin = np.min(field)
levels = np.linspace(vmin, vmax, nlev)
cs = map.contourf(x, y, field, levels, cmap=cm.RdBu_r)
plt.title('Mean of the residual wind-stress')
map.drawcoastlines()
# draw parallels and meridians.
map.drawparallels(np.arange(0, 81.,10.))
map.drawmeridians(np.arange(-180.,181.,30.))
plt.colorbar(orientation='horizontal')
fig.savefig('tauResMean%s.png' % dstPostfix, bbox_inches='tight')
# Plot std of WuRes
fig = plt.figure()
field = np.ma.masked_all((nlat*nlon,))
field[~mask] = WuResAnom.std(0)
field = field.reshape(nlat, nlon)
vmax = np.max(field)
vmin = 0.
levels = np.linspace(vmin, vmax, nlev)
cs = map.contourf(x, y, field, levels, cmap=cm.hot_r)
plt.title('Std of residual wind-stress')
map.drawcoastlines()
# draw parallels and meridians.
map.drawparallels(np.arange(0, 81.,10.))
map.drawmeridians(np.arange(-180.,181.,30.))
plt.colorbar(orientation='horizontal')
fig.savefig('tauResStd%s.png' % dstPostfix, bbox_inches='tight')
# EOF Decomposition
print 'EOF decomposition...'
# Get covariance matrix
sim = np.cov(WuResAnom, rowvar=False)
# Eigenvector decomposition
(w, v) = np.linalg.eigh(sim)
# Get principal component
pc = np.dot(WuResAnom, v)
isort = np.argsort(w)
w = w[isort][::-1]
v = v[:, isort][:, ::-1]
pc = pc[:, isort][:, ::-1]
wn = w / w.sum()
# Plot first EOFs
nEOF = 1
#nEOF = 3
print 'First %d EOFs explained variance: ' % nEOF, (wn[:nEOF] * 100).astype(int)
print 'First %d EOFs cumulated explained variance: ' % nEOF, (wn[:nEOF].cumsum() * 100).astype(int)
for k in np.arange(nEOF):
fig = plt.figure()
eof = np.ma.masked_all((nlat*nlon,))
eof[~mask] = v[:, k]
eof = eof.reshape(nlat, nlon)
vmax = np.max(np.abs(eof))
vmin = -vmax
levels = np.linspace(vmin, vmax, nlev)
cs = map.contourf(x, y, eof, levels, cmap=cm.RdBu_r)
plt.title('EOF #' + str(k) + " explaining %2d%% of variance" % (wn[k] * 100,))
map.drawcoastlines()
# draw parallels and meridians.
map.drawparallels(np.arange(0, 81.,10.))
map.drawmeridians(np.arange(-180.,181.,30.))
plt.colorbar(orientation='horizontal')
fig.savefig('tau_eof%d%s.png' % (k, dstPostfix), bbox_inches='tight')
# Get Periodograms
T = 100000
sampPeriod = 1.
window = np.hamming(nt)
# Get nearest larger power of 2
if np.log2(nt) != int(np.log2(nt)):
nfft = 2**(int(np.log2(nt)) + 1)
else:
nfft = nt
# Get frequencies and shift zero frequency to center
freq = np.fft.fftfreq(nfft, d=sampPeriod)
freq = np.fft.fftshift(freq)
freqYear = freq * 12
windowrn = np.hamming(T)
if np.log2(T) != int(np.log2(T)):
nfftrn = 2**(int(np.log2(T)) + 1)
else:
nfftrn = T
# Get frequencies and shift zero frequency to center
#freqrn = np.fft.fftfreq(nfftrn, d=sampPeriod)
freqrn = np.fft.fftfreq(nfftrn, d=sampPeriod)
freqrn = np.fft.fftshift(freqrn)
freqrnYear = freqrn * 12
nRAVG = 5
nRAVGrn = int(nRAVG * nfftrn * 1. / nfft * 0.1)
# Get NINO4
print 'Getting NINO4 index...'
nino4slat = -5.
nino4nlat = 5.
nino4wlon = 160.
nino4elon = 210.
nino4 = WuResAnom[:, (lonlMasked >= nino4wlon) & (lonlMasked <= nino4elon)
& (latlMasked >= nino4slat) & (latlMasked <= nino4nlat)].mean(1) / toND
# Get periodogram of zonal wind stress averaged over nino4
ts = nino4# / nt
# Apply window
tsWindowed = ts * window
# Fourier transform and shift zero frequency to center
fts = np.fft.fft(tsWindowed, nfft, 0)
fts = np.fft.fftshift(fts)
# Get periodogram
perio = np.abs(fts / nt)#**2
# Apply running average
perioRAVG = perio.copy()
for k in np.arange(nRAVG/2, nfft-nRAVG/2):
perioRAVG[k] = perio[k-nRAVG/2:k+nRAVG/2 + 1].mean() / nRAVG
# Plot
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(freqYear, np.log10(perioRAVG))
# ax.set_xscale('log')
# ax.set_yscale('log')
ax.set_xlim(0, 4)
#ax.set_ylim(0, vmax)
plt.title('Periodogram of residual wind stress averaged over Nino4')
fig.savefig('nino4_perio%s.png' % dstPostfix, bbox_inches='tight')
# Get red-noise parameters
print 'Getting red-noise parameters of principal components...'
rn = np.empty((2, nEOF))
for k in np.arange(nEOF):
ts = pc[:, k]
# Get autocorrelation
rn[0, k] = np.corrcoef(ts[1:], ts[:-1])[0, 1]
rn[1, k] = ts.std() * np.sqrt(1. - rn[0, k]**2)
# Generate the red noise time series and plot the FFTs
print 'Generating red noise principal components...'
pcrn = np.empty((T, nEOF))
for k in np.arange(nEOF):
pcrn[0, k] = np.random.normal(0, rn[1, k])
for t in np.arange(1, T):
pcrn[t, k] = rn[0, k] * pcrn[t-1, k] + np.random.normal(0, rn[1, k])
# Plot FFTs
print 'Plotting periodograms...'
for k in np.arange(nEOF):
ts = pc[:, k]
# FFT
# Apply window
tsWindowed = ts * window
# Fourier transform and shift zero frequency to center
fts = np.fft.fft(tsWindowed, nfft, 0)
fts = np.fft.fftshift(fts)
# Get periodogram
perio = np.abs(fts / nt)**2
# Apply running average
perioRAVG = perio.copy()
for i in np.aran
|
ModernMT/MMT
|
src/textprocessing/script/pyflex.py
|
Python
|
apache-2.0
| 6,428
| 0.001557
|
import os
import sys
__author__ = 'Davide Caroselli'
def escape(string):
escaped = ''
for c in string:
if ('0' <= c <= '9') or ('A' <= c <= 'Z') or ('a' <= c <= 'z'):
escaped += c
else:
escaped += '\\' + c
return escaped
def _abspath(root, path):
if not os.path.isabs(path):
path = os.path.abspath(os.path.join(root, path))
return path
def _class(classname, super_class=None):
lines = ['%public',
'%class ' + classname]
if super_class is not None:
lines.append('%extends ' + super_class)
lines += ['%unicode',
'%integer',
'%function next',
'%pack',
'%char',
'%{',
'\tprotected int getStartRead() { return zzStartRead; }',
'\tprotected int getMarkedPosition() { return zzMarkedPos; }',
'\tprotected int yychar() { return yychar; }',
'%}']
return '\n'.join(lines)
def _include(path):
with open(path, 'r', encoding='utf-8') as content:
retur
|
n content.read()
def _process_prefix(line, caseless, patterns):
# Match any case only if caseless has been specified and line is not a single char
match_anycase = False
if caseless and len(line) > 1 and line[0].isalpha():
line = line.lower()
match_anycase = True
# No duplicates
if line in patt
|
erns:
return None
if match_anycase:
string = ''
for c in line:
string += '(' + escape(c.upper()) + '|' + escape(c.lower()) + ')'
line = string + '\\.'
else:
line = escape(line + '.')
return '(' + line + ')'
def _prefixes(path, caseless=None):
if caseless is not None and caseless != 'caseless':
raise Exception('Unrecognized argument ' + caseless)
caseless = False if caseless is None else True
regular_patterns = []
numeric_only_patterns = []
with open(path, 'r', encoding='utf-8') as source:
for line in source:
line = line.strip()
if len(line) == 0 or line.startswith('#'):
continue
if '#NUMERIC_ONLY#' in line:
patterns = numeric_only_patterns
line = line.replace('#NUMERIC_ONLY#', '').strip()
else:
patterns = regular_patterns
line = _process_prefix(line, caseless, patterns)
if line is not None:
patterns.append(line)
return regular_patterns, numeric_only_patterns
def _contractions(path):
result = []
with open(path, 'r', encoding='utf-8') as source:
for line in source:
line = line.strip()
if len(line) == 0 or line.startswith('#'):
continue
for token in (line.lower(), line.upper(), line[:1].upper() + line[1:].lower()):
pattern = token.replace('\'', '" "?{apos}" "?')
if pattern.endswith('" "?'):
pattern = pattern[:-4]
result.append(pattern)
return result
def _encode_prefixes(regular_patterns, numeric_only_patterns):
lines = []
if len(regular_patterns) > 0:
lines.append('ProtectedPatterns = (' + '|'.join(regular_patterns) + ')')
if len(numeric_only_patterns) > 0:
lines.append('NumericProtectedPatters = (' + '|'.join(numeric_only_patterns) + ')')
return '\n'.join(lines)
def generate_jflex(parent_dir, template_file, target_dir):
rel_path = os.path.dirname(template_file)
include_root = os.path.join(parent_dir, rel_path)
classname = os.path.splitext(os.path.basename(template_file))[0]
template_file = os.path.join(parent_dir, template_file)
parent_target = os.path.join(target_dir, rel_path)
target_file = os.path.join(parent_target, classname + '.jflex')
with open(template_file, 'r', encoding='utf-8') as stream:
content = [line.strip() for line in stream.readlines()]
has_regular_patterns = False
has_numeric_only_patterns = False
for i, line in enumerate(content):
if line.startswith('//pyflex '):
line = line.split()[1:]
args = line[1:]
command = line[0]
if command == 'class':
content[i] = _class(classname, super_class=(args[0] if len(args) > 0 else None))
elif command == 'include':
content[i] = _include(_abspath(include_root, args[0]))
elif command == 'prefixes':
regular_patterns, numeric_only_patterns = _prefixes(_abspath(include_root, args[0]), *(args[1:]))
has_regular_patterns = len(regular_patterns) > 0
has_numeric_only_patterns = len(numeric_only_patterns) > 0
content[i] = _encode_prefixes(regular_patterns, numeric_only_patterns)
elif command == 'contractions':
contractions = _contractions(_abspath(include_root, args[0]))
content[i] = 'Contractions = (%s)' % ('|'.join(contractions))
else:
raise Exception("Unknown command " + command)
if has_regular_patterns:
content.append('[ !¡\\"#$%&\'*+,\\-./:;<=>?¿@\\[\\]\\^_`{|}~()]{ProtectedPatterns} '
'{ zzStartReadOffset = 1; return PROTECT; }')
if has_numeric_only_patterns:
content.append('[ !¡\\"#$%&\'*+,\\-./:;<=>?¿@\\[\\]\\^_`{|}~()]{NumericProtectedPatters}" "[:digit:] '
'{ zzStartReadOffset = 1; yypushback(2); return PROTECT; }')
if not os.path.isdir(parent_target):
os.makedirs(parent_target)
with open(target_file, 'w', encoding='utf-8') as output:
for line in content:
output.write(line)
output.write('\n')
def main():
if len(sys.argv) != 3:
print('Usage: pyflex.py SOURCE_DIRECTORY TARGET_DIRECTORY')
exit(1)
source_dir = sys.argv[1]
target_dir = sys.argv[2]
source_files = []
for root, directories, filenames in os.walk(source_dir):
for filename in filenames:
if filename.endswith('.pyflex'):
rel_path = root.replace(source_dir, '').lstrip(os.path.sep)
source_files.append(os.path.join(rel_path, filename))
for f in source_files:
generate_jflex(source_dir, f, target_dir)
if __name__ == "__main__":
main()
|
ak2703/edx-platform
|
common/djangoapps/student/views.py
|
Python
|
agpl-3.0
| 92,551
| 0.002636
|
"""
Student Views
"""
import datetime
import logging
import uuid
import json
import warnings
from collections import defaultdict
from pytz import UTC
from requests import HTTPError
from ipware.ip import get_ip
from django.conf import settings
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm
from django.contrib import messages
from django.core.context_processors import csrf
from django.core import mail
from django.core.urlresolvers import reverse
from django.core.validators import validate_email, ValidationError
from django.db import IntegrityError, transaction
from django.http import (HttpResponse, HttpResponseBadRequest, HttpResponseForbidden,
HttpResponseServerError, Http404)
from django.shortcuts import redirect
from django.utils.translation import ungettext
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _, get_language
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_POST, require_GET
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.template.response import TemplateResponse
from ratelimitbackend.exceptions import RateLimitException
from social.apps.django_app import utils as social_utils
from social.backends import oauth as social_oauth
from social.exceptions impo
|
rt AuthException, AuthAlreadyAssociated
from edxmako.shortcuts import render_to_response, render_to_string
from course_modes.models im
|
port CourseMode
from shoppingcart.api import order_history
from student.models import (
Registration, UserProfile,
PendingEmailChange, CourseEnrollment, CourseEnrollmentAttribute, unique_id_for_user,
CourseEnrollmentAllowed, UserStanding, LoginFailures,
create_comments_service_user, PasswordHistory, UserSignupSource,
DashboardConfiguration, LinkedInAddToProfileConfiguration, ManualEnrollmentAudit, ALLOWEDTOENROLL_TO_ENROLLED)
from student.forms import AccountCreationForm, PasswordResetFormNoActive
from verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
from certificates.models import CertificateStatuses, certificate_status_for_student
from certificates.api import ( # pylint: disable=import-error
get_certificate_url,
has_html_certificates_enabled,
)
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore import ModuleStoreEnum
from collections import namedtuple
from courseware.courses import get_courses, sort_by_announcement, sort_by_start_date # pylint: disable=import-error
from courseware.access import has_access
from django_comment_common.models import Role
from external_auth.models import ExternalAuthMap
import external_auth.views
from external_auth.login_and_register import (
login as external_auth_login,
register as external_auth_register
)
from bulk_email.models import Optout, CourseAuthorization
from lang_pref import LANGUAGE_KEY
import track.views
import dogstats_wrapper as dog_stats_api
from util.db import commit_on_success_with_read_committed
from util.json_request import JsonResponse
from util.bad_request_rate_limiter import BadRequestRateLimiter
from util.milestones_helpers import (
get_pre_requisite_courses_not_completed,
)
from microsite_configuration import microsite
from util.password_policy_validators import (
validate_password_length, validate_password_complexity,
validate_password_dictionary
)
import third_party_auth
from third_party_auth import pipeline, provider
from student.helpers import (
check_verify_status_by_course,
auth_pipeline_urls, get_next_url_for_login_page
)
from student.cookies import set_logged_in_cookies, delete_logged_in_cookies
from student.models import anonymous_id_for_user
from shoppingcart.models import DonationConfiguration, CourseRegistrationCode
from embargo import api as embargo_api
import analytics
from eventtracking import tracker
# Note that this lives in LMS, so this dependency should be refactored.
from notification_prefs.views import enable_notifications
# Note that this lives in openedx, so this dependency should be refactored.
from openedx.core.djangoapps.user_api.preferences import api as preferences_api
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
ReverifyInfo = namedtuple('ReverifyInfo', 'course_id course_name course_number date status display') # pylint: disable=invalid-name
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
def csrf_token(context):
"""A csrf token that can be included in a form."""
token = context.get('csrf_token', '')
if token == 'NOTPROVIDED':
return ''
return (u'<div style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context=None, user=AnonymousUser()):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
if extra_context is None:
extra_context = {}
# The course selection work is done in courseware.courses.
domain = settings.FEATURES.get('FORCE_UNIVERSITY_DOMAIN') # normally False
# do explicit check, because domain=None is valid
if domain is False:
domain = request.META.get('HTTP_HOST')
courses = get_courses(user, domain=domain)
if microsite.get_value("ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"]):
courses = sort_by_start_date(courses)
else:
courses = sort_by_announcement(courses)
context = {'courses': courses}
context.update(extra_context)
return render_to_response('index.html', context)
def process_survey_link(survey_link, user):
"""
If {UNIQUE_ID} appears in the link, replace it with a unique id for the user.
Currently, this is sha1(user.username). Otherwise, return survey_link.
"""
return survey_link.format(UNIQUE_ID=unique_id_for_user(user))
def cert_info(user, course_overview, course_mode):
"""
Get the certificate info needed to render the dashboard section for the given
student and course.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
Returns:
dict: A dictionary with keys:
'status': one of 'generating', 'ready', 'notpassing', 'processing', 'restricted'
'show_download_url': bool
'download_url': url, only present if show_download_url is True
'show_disabled_download_button': bool -- true if state is 'generating'
'show_survey_button': bool
'survey_url': url, only if show_survey_button is True
'grade': if status is not 'processing'
"""
if not course_overview.may_certify():
return {}
return _cert_info(
user,
course_overview,
certificate_status_for_student(user, course_overview.id),
course_mode
)
def reverification_info(statuses):
"""
Returns reverification-related information for *all* of user's enrollments whose
reverification status is in statuses.
Args:
statuses (list): a list of reverification statuses we want information for
example: ["must_reverify"
|
KunihikoKido/sublime-elasticsearch-client
|
commands/put_search_template.py
|
Python
|
mit
| 474
| 0
|
from .base import CreateBaseCommand
class PutSearchTemplateCommand(CreateBaseCommand):
command_name = "elasticsearch:put-search
|
-template"
def run_request(self, template_id=None):
if not template_id:
self.show_input_panel(
'Search Template Id: ', '', self.run)
return
options = dict(
|
id=template_id,
body=self.get_text()
)
return self.client.put_template(**options)
|
cmouse/buildbot
|
worker/buildbot_worker/util/_hangcheck.py
|
Python
|
gpl-2.0
| 4,189
| 0.000239
|
"""
Protocol wrapper that will detect hung connections.
In particular, since PB expects the server to talk first and HTTP
expects the client to talk first, when a PB client talks to an HTTP
server, neither side will talk, leading to a hung connection. This
wrapper will disconnect in that case, and inform the caller.
"""
from __future__ import absolute_import
from __future__ import print_function
from twisted.internet.interfaces import IProtocol
from twisted.internet.interfaces import IProtocolFactory
from twisted.python.components import proxyForInterface
def _noop():
pass
class HangCheckProtocol(
proxyForInterface(IProtocol, '_wrapped_protocol'), object,
):
"""
Wrap a protocol, so the underlying connection will disconnect if
the other end doesn't send data within a given timeout.
"""
transport = None
_hungConnectionTimer = None
# hung connections wait for a relatively long time, since a busy master may
# take a while to get back to us.
_HUNG_CONNECTION_TIMEOUT = 120
def __init__(self, wrapped_protocol, hung_callback=_noop, reactor=None):
"""
:param IProtocol wrapped_protocol: The protocol to wrap.
:param hung_callback: Called when the connection has hung.
:type hung_callback: callable taking no arguments.
:param IReactorTime reactor: The reactor to use to schedule
the hang check.
"""
if reactor is None:
from twisted.internet import reactor
self._wrapped_protocol = wrapped_protocol
self._reactor = reactor
self._hung_callback = hung_callback
def makeConnection(self, transport):
# Note that we don't wrap the transport for the protocol,
# because we only care about noticing data received, not
# sent.
self.transport = transport
super(HangCheckProtocol, self).makeConnection(transport)
self._startHungConnectionTimer()
def dataReceived(self, data):
self._stopHungConnectionTimer()
super(
|
HangCheckProtocol, self).dataReceived(data)
def connectionLost(self, reason):
self._stopHungConnectionTimer()
super(HangCheckProtocol, self).connectionLost(reason)
def _startHungConnectionTimer(self):
"""
Start a timer to detect if the connection is hung.
"""
def hungConnection():
self._hung_callback()
self._hungConnectionTimer = None
self.transport.loseConnection()
self._hung
|
ConnectionTimer = self._reactor.callLater(
self._HUNG_CONNECTION_TIMEOUT, hungConnection)
def _stopHungConnectionTimer(self):
"""
Cancel the hang check timer, since we have received data or
been closed.
"""
if self._hungConnectionTimer:
self._hungConnectionTimer.cancel()
self._hungConnectionTimer = None
class HangCheckFactory(
proxyForInterface(IProtocolFactory, '_wrapped_factory'), object,
):
"""
Wrap a protocol factory, so the underlying connection will
disconnect if the other end doesn't send data within a given
timeout.
"""
def __init__(self, wrapped_factory, hung_callback):
"""
:param IProtocolFactory wrapped_factory: The factory to wrap.
:param hung_callback: Called when the connection has hung.
:type hung_callback: callable taking no arguments.
"""
self._wrapped_factory = wrapped_factory
self._hung_callback = hung_callback
def buildProtocol(self, addr):
protocol = self._wrapped_factory.buildProtocol(addr)
return HangCheckProtocol(protocol, hung_callback=self._hung_callback)
# This is used as a ClientFactory, which doesn't have a specific interface, so forward the
# additional methods.
def startedConnecting(self, connector):
self._wrapped_factory.startedConnecting(connector)
def clientConnectionFailed(self, connector, reason):
self._wrapped_factory.clientConnectionFailed(connector, reason)
def clientConnectionLost(self, connector, reason):
self._wrapped_factory.clientConnectionLost(connector, reason)
|
hdknr/paloma
|
src/paloma/models.py
|
Python
|
bsd-2-clause
| 33,915
| 0.000295
|
# -*- coding: utf-8 -*-
from django.db.models import Q
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now
from django.conf import settings
from django import template # import Template,Context
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.utils.deconstruct import deconstructible
from email import message_from_string
from celery.result import AsyncResult
from bs4 import BeautifulSoup as Soup
import traceback
import re
from utils import (
create_auto_secret,
create_auto_short_secret,
expire,
get_template_source,
)
import json
import logging
logger = logging.getLogger('paloma')
DEFAULT_RETURN_PATH_RE = r"bcmsg-(?P<message_id>\d+)@(?P<domain>.+)"
DEFAULT_RETURN_PATH_FORMAT = "bcmsg-%(message_id)s@%(domain)s"
RETURN_PATH_RE = r"^(?P<commnad>.+)-(?P<message_id>\d+)@(?P<domain>.+)"
RETURN_PATH_FORMAT = "%(command)s-%(message_id)s@%(domain)s"
def return_path_from_address(address):
return re.search(
DEFAULT_RETURN_PATH_RE,
address).groupdict()
def default_return_path(param):
return DEFAULT_RETURN_PATH_FORMAT % param
def read_return_path(address):
return re.search(
RETURN_PATH_RE,
address).groupdict()
def make_return_path(param):
return RETURN_PATH_FORMAT % param
def MDT(t=None):
return (t or now()).strftime('%m%d%H%M%S')
@deconstructible
class Domain(models.Model):
''' Domain
- virtual_transport_maps.cf
'''
domain = models.CharField(
_(u'Domain'),
unique=True, max_length=100, db_index=True, )
''' Domain
- key for virtual_transport_maps.cf
- key and return value for virtual_domains_maps.cf
'''
description = models.CharField(
_(u'Description'),
max_length=200, default='')
maxquota = models.BigIntegerField(null=True, blank=True, default=None)
quota = models.BigIntegerField(null=True, blank=True, default=None)
transport = models.CharField(max_length=765)
'''
- virtual_transport_maps.cf looks this for specified **domain**.
'''
backupmx = models.IntegerField(null=True, blank=True, default=None)
active = models.BooleanField(default=True)
class Meta:
verbose_name = _(u'Domain')
verbose_name_plural = _(u'Domains')
@deconstructible
class Alias(models.Model):
''' Alias
- local user - maildir
- remote user - alias
- for virtual_alias_maps.cf
'''
address = models.CharField(
_('Alias Address'), max_length=100)
'''
- key for virtual_alias_maps.cf
'''
alias = models.CharField(
_('Alias Forward'), max_length=100)
'''
- value for virtual_alias_maps.cf
'''
mailbox = models.CharField(
_(u'Mailbox'),
max_length=100, null=True, default=None, blank=True,
help_text=u'specify Maildir path if address is local user ')
'''
- for local usr
- value for virtual_alias_maps.cf
'''
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = _('Alias')
verbose_name_plural = _('Alias')
unique_together = (('address', 'alias', ), )
##
class AbstractProfile(models.Model):
''' Profile meta class'''
def target_context(self, member):
""" override this to return context dict for template rendering """
raise NotImplementedError
@classmethod
def target(cls, obj, *args, **kwargs):
context
|
= {}
subclasses = cls.__subclasses__()
for ref in obj._meta.get_all_related_objects():
if ref.model in subclasses:
try:
context.update(
getattr(obj, ref.var_name
).target_context(*args, **kwargs)
)
except Exception:
pass
return context
class Meta:
|
abstract = True
@deconstructible
class Site(models.Model):
''' Site
'''
name = models.CharField(
_(u'Owner Site Name'), help_text=_(u'Owner Site Name'),
max_length=100, db_index=True, unique=True)
''' Site Name '''
domain = models.CharField(
_(u'@Domain'), help_text=_(u'@Domain'),
max_length=100, default='localhost',
db_index=True, unique=True, null=False, blank=False, )
''' @Domain'''
url = models.CharField(
_(u'URL'), help_text=_(u'URL'),
max_length=150, db_index=True, unique=True, default="/",)
''' URL path '''
operators = models.ManyToManyField(
User, help_text=_('User'), verbose_name=_(u'Site Operators'))
''' Site Operators '''
class Meta:
verbose_name = _('Site')
verbose_name_plural = _('Site')
unique_together = (('name', 'domain'), )
@property
def authority_address(self):
return "{0}@{1}".format(self.name, self.domain)
@property
def default_circle(self):
try:
return self.circle_set.get(is_default=True,)
except:
#: if no, get default:
name = getattr(settings, 'PALOMA_NAME', 'all')
return self.circle_set.get_or_create(
site=self, name=name, symbol=name,)[0]
def __unicode__(self):
return self.domain
@classmethod
def app_site(cls):
name = getattr(settings, 'PALOMA_NAME', 'paloma')
domain = getattr(settings, 'PALOMA_DEFAULT_DOMAIN', 'example.com')
return Site.objects.get_or_create(name=name, domain=domain)[0]
# Mesage Tempalte
class TemplateManager(models.Manager):
def get_template(self, name, site=None):
site = site or Site.app_site()
ret, created = self.get_or_create(site=site, name=name)
if created or not ret.subject or not ret.text:
try:
path = 'paloma/mails/default_%s.html' % name.lower()
source = Soup(get_template_source(path))
ret.subject = source.select('subject')[0].text
ret.subject = ret.subject.replace('\n', '').replace('\r', '')
ret.text = source.select('text')[0].text
ret.save()
except Exception:
logger.debug(traceback.format_exc())
return ret
@deconstructible
class Template(models.Model):
''' Site Notice Text '''
site = models.ForeignKey(Site, verbose_name=_(u'Owner Site'))
''' Owner Site'''
name = models.CharField(
_(u'Template Name'),
max_length=200, db_index=True,)
''' Notice Name'''
subject = models.CharField(
_(u'Template Subject'),
max_length=100, default='',)
''' Subject '''
text = models.TextField(
_(u'Template Text'), default='',)
''' Text '''
objects = TemplateManager()
@classmethod
def get_default_template(cls, name='DEFAULT_TEMPLATE', site=None):
site = site or Site.app_site()
return Template.objects.get_or_create(site=site, name=name,)[0]
def render(self, *args, **kwargs):
'''
:param kwargs: Context dictionary
'''
return tuple([template.Template(t).render(template.Context(kwargs))
for t in [self.subject, self.text]])
def __unicode__(self):
return self.name
class Meta:
unique_together = (('site', 'name'),)
verbose_name = _(u'Template')
verbose_name_plural = _(u'Templates')
@deconstructible
class Targetting(models.Model):
''' '''
site = models.ForeignKey(Site, verbose_name=_(u'Owner Site'))
''' Owner Site'''
targetter_content_type = models.ForeignKey(
ContentType,
related_name="targetter")
''' targetter model class'''
targetter_object_id = models.PositiveIntegerField()
''' tragetter object id '''
targetter = generic.GenericForeignKey(
'targetter_content_type',
'targetter_object_id')
''' ta
|
matthewoliver/swift
|
test/unit/test_locale/test_locale.py
|
Python
|
apache-2.0
| 2,743
| 0
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import eventlet
import os
import unittest
import sys
threading = eventlet.patcher.original('threading')
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
class TestTranslations(unittest.TestCase):
def setUp(self):
self.orig_env = {}
for var in 'LC_ALL', 'SWIFT_LOCALEDIR', 'LANGUAGE':
self.orig_env[var] = os.environ.get(var)
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
os.environ['LANGUAGE'] = ''
self.orig_stop = threading._DummyThread._Thread__stop
# See http://stackoverflow.com/questions/13193278/\
# understand-python-threading-bug
threading._DummyThread._Thread__stop = lambda x: 42
def tearDown(self):
for var, val in self.orig_env.items(
|
):
if val is not None:
os.environ[var] = val
else:
del os.environ[var]
threading._DummyThread._Thread__stop = self.orig_stop
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEqual(translated_message, 'prov
|
a mesaĝo\n')
if __name__ == "__main__":
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
sys.path = sys.argv[1].split(':')
from swift import gettext_ as _
print(_('test message'))
|
monouno/site
|
judge/views/stats.py
|
Python
|
agpl-3.0
| 3,624
| 0.003311
|
from itertools import repeat, chain
from operator import itemgetter
from django.db.models import Count, Sum
|
, Case, When, IntegerField, Value, FloatField
from django.db.models.expressions import CombinedExpression
from django.http import JsonResponse
from django.shortcuts import render
from django.utils.translation import ugettext as _
from judge.models import Language, Submission
chart_colors = [0x3366CC, 0xDC3912, 0xFF9900, 0x109618, 0x990099, 0x3B3EAC, 0x0099C6, 0xDD4477, 0x66AA00, 0xB82E2E,
0x316395, 0x994499
|
, 0x22AA99, 0xAAAA11, 0x6633CC, 0xE67300, 0x8B0707, 0x329262, 0x5574A6, 0x3B3EAC]
highlight_colors = []
def _highlight_colors():
for color in chart_colors:
r, g, b = color >> 16, (color >> 8) & 0xFF, color & 0xFF
highlight_colors.append('#%02X%02X%02X' % (min(int(r * 1.2), 255),
min(int(g * 1.2), 255),
min(int(b * 1.2), 255)))
_highlight_colors()
del _highlight_colors
chart_colors = map('#%06X'.__mod__, chart_colors)
ac_count = Count(Case(When(submission__result='AC', then=Value(1)), output_field=IntegerField()))
def repeat_chain(iterable):
return chain.from_iterable(repeat(iterable))
def language_data(request, language_count=Language.objects.annotate(count=Count('submission'))):
languages = language_count.filter(count__gte=1000).values('key', 'name', 'short_name', 'count').order_by('-count')
data = []
for language, color, highlight in zip(languages, chart_colors, highlight_colors):
data.append({
'value': language['count'], 'label': language['name'],
'color': color, 'highlight': highlight,
})
data.append({
'value': language_count.filter(count__lt=1000).aggregate(total=Sum('count'))['total'],
'label': 'Other', 'color': '#FDB45C', 'highlight': '#FFC870',
})
return JsonResponse(data, safe=False)
def ac_language_data(request):
return language_data(request, Language.objects.annotate(count=ac_count))
def status_data(request, statuses=None):
if not statuses:
statuses = (Submission.objects.values('result').annotate(count=Count('result'))
.values('result', 'count').order_by('-count'))
data = []
total_count = 0
for status, color, highlight in zip(statuses, chart_colors, highlight_colors):
res = status['result']
if not res:
continue
count = status['count']
total_count += count
data.append({
'value': count, 'label': str(Submission.USER_DISPLAY_CODES[res]),
'color': color, 'highlight': highlight
})
return JsonResponse(data, safe=False)
def ac_rate(request):
rate = CombinedExpression(ac_count / Count('submission'), '*', Value(100.0), output_field=FloatField())
data = Language.objects.annotate(total=Count('submission'), ac_rate=rate).filter(total__gt=0) \
.values('key', 'name', 'short_name', 'ac_rate').order_by('total')
return JsonResponse({
'labels': map(itemgetter('name'), data),
'datasets': [
{
'fillColor': 'rgba(151,187,205,0.5)',
'strokeColor': 'rgba(151,187,205,0.8)',
'highlightFill': 'rgba(151,187,205,0.75)',
'highlightStroke': 'rgba(151,187,205,1)',
'data': map(itemgetter('ac_rate'), data),
}
]
})
def language(request):
return render(request, 'stats/language.html', {
'title': _('Language statistics'), 'tab': 'language'
})
|
Gabriel-p/UBV_move
|
modules/zams_solutions.py
|
Python
|
gpl-3.0
| 4,495
| 0
|
from .ext_solutions import intrsc_values
def main(id_star, x_star, y_star, extin_list, zams_indxs, zams_inter, M_abs,
sp_type, m_obs, bv_obsrv, e_bv, ub_obsrv, e_ub):
"""
For each solution assigned to each observed star, find its absolute
magnitude, intrinsic colors, spectral types, and distances (in kpc).
"""
# Generate final lists for writing to file.
M_abs_final = [[] for _ in range(len(id_star))]
bv_final = [[] for _ in range(len(id_star))]
ub_final = [[] for _ in range(len(id_star))]
sp_type_final = [[] for _ in range(len(id_star))]
dist = [[] for _ in range(len(id_star))]
# Store *all* extinction and distance values. Used to generate the
# extinction-distance density maps.
ext_dist_all = [[], []]
# Store *unique* extinction and distance values. Used to generate the
# extinction-distance density maps.
ext_unq = []
# Store unique solutions.
x_uniq, y_uniq, m_uniq, d_uniq = [], [], [], []
id_uniq, bv_obs_uniq, ub_obs_uniq, bv_int_uniq, ub_int_uniq =\
[], [[], [], [], []], [[], [], [], []], [], []
for indx, star_indxs in enumerate(zams_indxs):
if star_indxs and len(star_indxs) <= 4:
for indx2, ind in enumerate(star_indxs):
# Store absolute magnitudes.
M_abs_final[indx].append(round(zams_inter[2][ind], 3))
# Store intrinsic colors
bv_final[indx].append(round(zams_inter[0][ind], 3))
ub_final[indx].append(round(zams_inter[1][ind], 3))
# Get spectral type.
sp_in = min(list(range(len(M_abs))),
key=lambda i: abs(
M_abs[i] - zams_inter[2][ind]))
sp_type_final[indx].append(sp_type[sp_in])
# Calculate distance.
E_BV = extin_list[indx][indx2]
A_v = 3.1 * E_BV
dist_mod = m_obs[indx] - zams_inter[2][ind]
d_kpc = round((10 ** (0.2 * (dist_mod + 5 - A_v))) / 1000., 3)
dist[indx].append(d_kpc)
# Store all extinction and dist values.
ext_dist_all[0].append(E_BV)
ext_dist_all[1].append(d_kpc)
# Identify stars with a unique solution for plotting.
if len(star_indxs) == 1:
id_uniq.append(id_star[indx])
x_uniq.append(x_star[indx])
y_uniq.append(y_star[indx])
|
m_uniq.append(m_obs[indx])
bv_obs_uniq[0].append(bv_obsrv[indx]
|
)
bv_obs_uniq[1].append(e_bv[indx])
ub_obs_uniq[0].append(ub_obsrv[indx])
ub_obs_uniq[1].append(e_ub[indx])
# Distances.
E_BV = extin_list[indx][0]
A_v = 3.1 * E_BV
dist_mod = m_obs[indx] - zams_inter[2][star_indxs[0]]
d_kpc = round((10 ** (0.2 * (dist_mod + 5 - A_v))) / 1000., 3)
d_uniq.append(d_kpc)
ext_unq.append(E_BV)
# Corrected values.
bv_intrsc, ub_intrsc = intrsc_values(
bv_obsrv[indx], ub_obsrv[indx], extin_list[indx][0])
bv_int_uniq.append(bv_intrsc)
ub_int_uniq.append(ub_intrsc)
# Fill empty solutions with '--'.
if len(star_indxs) < 4:
for _ in range(4 - len(star_indxs)):
M_abs_final[indx].append('--')
bv_final[indx].append('--')
ub_final[indx].append('--')
sp_type_final[indx].append('--')
dist[indx].append('--')
extin_list[indx].append('--')
if len(star_indxs) > 4:
print(
'Star with too many solutions (>4):\n', int(id_star[indx]),
x_star[indx], y_star[indx], extin_list[indx])
for i in range(4):
M_abs_final[indx].append('--')
bv_final[indx].append('--')
ub_final[indx].append('--')
sp_type_final[indx].append('--')
dist[indx].append('--')
extin_list[indx][i] = 'ERR'
print('Distances, spectral types and intrinsic mags/colors obtained.')
print("N (stars w/ unique solutions) = {}".format(len(bv_int_uniq)))
return extin_list, ext_dist_all, M_abs_final, bv_final, ub_final, dist,\
sp_type_final, id_uniq, x_uniq, y_uniq, m_uniq, d_uniq, ext_unq,\
bv_obs_uniq, ub_obs_uniq, bv_int_uniq, ub_int_uniq
|
Alex-Chizhov/python_training
|
home_works/test/test_del_contact.py
|
Python
|
apache-2.0
| 595
| 0.005042
|
from model.info_contact import Infos
import random
def test_delete_some_contact(app, db, check_ui):
if app.contact.count() == 0:
app.contact.create(Infos(firstname="AAAAA"))
old_contacts = db.get_contact_list()
contact = ra
|
ndom.choice(old_contacts)
app.contact.delete_contact_by_id(contact.id)
new_contacts = db.get_contact_list()
old_contacts.remove(contact)
assert old_contacts =
|
= new_contacts
if check_ui:
assert sorted(map(app.contact.clean, new_contacts), key=Infos.id_or_max) == sorted(app.contact.get_contact_list(), key=Infos.id_or_max)
|
germs-lab/RefSoil
|
script_to_clean_list/remove_comma.py
|
Python
|
gpl-2.0
| 134
| 0.014925
|
#!/user/bin/python
import sys
for line i
|
n open(sys.argv[1],'r'):
|
spl = line.strip().split(',')
for x in spl:
print x
|
mlvfx/vfxAssetBox
|
assetbox/plugins/nuke/host.py
|
Python
|
cc0-1.0
| 458
| 0
|
"""
Host app for nu
|
ke, check if we are in nuke.
"""
from assetbox.base.plugins.host import BaseHost
import sys
class HostApp(BaseHost):
"""
The host application class, which is used to determine context.
"""
ID = 'Nuk
|
e'
filetypes = ['abc', 'exr']
def get_host(self):
"""Return True if we are in Nuke."""
return 'Nuke' in sys.executable
def start_QApp(self):
"""Create the QApplication."""
pass
|
AMorporkian/tagprostats
|
db.py
|
Python
|
mit
| 6,957
| 0.000862
|
from pony.orm import *
from datetime import datetime
db = Database('sqlite', 'players.sqlite', create_db=False)
pony.options.MAX_FETCH_COUNT=50000
class Players
|
(db.Entity):
_table_ = "profile_stats"
id = PrimaryKey(int, auto=True)
last_updated = Required(datetime)
name = Required(unicode, 50)
server = Optional(unicode, 25)
profile_string = Required(unicode, 30)
captures = Required(int)
disconnects = Required(int)
drops = Required(int)
games = Required(int)
grabs = Required(int)
hold = Required(int)
hours = Required(float)
losses = Required(int)
non_return_tags = Required(int)
popped = Required
|
(int)
prevent = Required(int)
returns = Required(int)
support = Required(int)
tags = Required(int)
wins = Required(int)
captures_per_hour = Optional(float)
disconnects_per_hour = Optional(float)
drops_per_hour = Optional(float)
games_per_hour = Optional(float)
grabs_per_hour = Optional(float)
hold_per_hour = Optional(float)
losses_per_hour = Optional(float)
non_return_tags_per_hour = Optional(float)
popped_per_hour = Optional(float)
prevent_per_hour = Optional(float)
returns_per_hour = Optional(float)
support_per_hour = Optional(float)
tags_per_hour = Optional(float)
wins_per_hour = Optional(float)
captures_per_game = Optional(float)
disconnects_per_game = Optional(float)
drops_per_game = Optional(float)
grabs_per_game = Optional(float)
hold_per_game = Optional(float)
losses_per_game = Optional(float)
non_return_tags_per_game = Optional(float)
popped_per_game = Optional(float)
prevent_per_game = Optional(float)
returns_per_game = Optional(float)
support_per_game = Optional(float)
tags_per_game = Optional(float)
wins_per_game = Optional(float)
ranks = Optional("Ranks")
monthly = Optional("Monthly")
weekly = Optional("Weekly")
daily = Optional("Daily")
class Ranks(db.Entity):
player_id = Required(Players)
captures = Optional(int)
disconnects = Optional(int)
drops = Optional(int)
games = Optional(int)
grabs = Optional(int)
hold = Optional(int)
hours = Optional(int)
losses = Optional(int)
non_return_tags = Optional(int)
popped = Optional(int)
prevent = Optional(int)
returns = Optional(int)
support = Optional(int)
tags = Optional(int)
wins = Optional(int)
class Monthly(db.Entity):
player = Required(Players)
captures = Required(int)
disconnects = Required(int)
drops = Required(int)
games = Required(int)
grabs = Required(int)
hold = Required(int)
hours = Required(float)
losses = Required(int)
popped = Required(int)
prevent = Required(int)
returns = Required(int)
support = Required(int)
tags = Required(int)
wins = Required(int)
captures_per_hour = Optional(float)
disconnects_per_hour = Optional(float)
drops_per_hour = Optional(float)
games_per_hour = Optional(float)
grabs_per_hour = Optional(float)
hold_per_hour = Optional(float)
losses_per_hour = Optional(float)
non_return_tags_per_hour = Optional(float)
popped_per_hour = Optional(float)
prevent_per_hour = Optional(float)
returns_per_hour = Optional(float)
support_per_hour = Optional(float)
tags_per_hour = Optional(float)
wins_per_hour = Optional(float)
captures_per_game = Optional(float)
disconnects_per_game = Optional(float)
drops_per_game = Optional(float)
grabs_per_game = Optional(float)
hold_per_game = Optional(float)
losses_per_game = Optional(float)
non_return_tags_per_game = Optional(float)
popped_per_game = Optional(float)
prevent_per_game = Optional(float)
returns_per_game = Optional(float)
support_per_game = Optional(float)
tags_per_game = Optional(float)
wins_per_game = Optional(float)
class Weekly(db.Entity):
player = Required(Players)
captures = Required(int)
disconnects = Required(int)
drops = Required(int)
games = Required(int)
grabs = Required(int)
hold = Required(int)
hours = Required(float)
losses = Required(int)
popped = Required(int)
prevent = Required(int)
returns = Required(int)
support = Required(int)
tags = Required(int)
wins = Required(int)
captures_per_hour = Optional(float)
disconnects_per_hour = Optional(float)
drops_per_hour = Optional(float)
games_per_hour = Optional(float)
grabs_per_hour = Optional(float)
hold_per_hour = Optional(float)
losses_per_hour = Optional(float)
non_return_tags_per_hour = Optional(float)
popped_per_hour = Optional(float)
prevent_per_hour = Optional(float)
returns_per_hour = Optional(float)
support_per_hour = Optional(float)
tags_per_hour = Optional(float)
wins_per_hour = Optional(float)
captures_per_game = Optional(float)
disconnects_per_game = Optional(float)
drops_per_game = Optional(float)
grabs_per_game = Optional(float)
hold_per_game = Optional(float)
losses_per_game = Optional(float)
non_return_tags_per_game = Optional(float)
popped_per_game = Optional(float)
prevent_per_game = Optional(float)
returns_per_game = Optional(float)
support_per_game = Optional(float)
tags_per_game = Optional(float)
wins_per_game = Optional(float)
class Daily(db.Entity):
player = Required(Players)
captures = Required(int)
disconnects = Required(int)
drops = Required(int)
games = Required(int)
grabs = Required(int)
hold = Required(int)
hours = Required(float)
losses = Required(int)
popped = Required(int)
prevent = Required(int)
returns = Required(int)
support = Required(int)
tags = Required(int)
wins = Required(int)
captures_per_hour = Optional(float)
disconnects_per_hour = Optional(float)
drops_per_hour = Optional(float)
games_per_hour = Optional(float)
grabs_per_hour = Optional(float)
hold_per_hour = Optional(float)
losses_per_hour = Optional(float)
non_return_tags_per_hour = Optional(float)
popped_per_hour = Optional(float)
prevent_per_hour = Optional(float)
returns_per_hour = Optional(float)
support_per_hour = Optional(float)
tags_per_hour = Optional(float)
wins_per_hour = Optional(float)
captures_per_game = Optional(float)
disconnects_per_game = Optional(float)
drops_per_game = Optional(float)
grabs_per_game = Optional(float)
hold_per_game = Optional(float)
losses_per_game = Optional(float)
non_return_tags_per_game = Optional(float)
popped_per_game = Optional(float)
prevent_per_game = Optional(float)
returns_per_game = Optional(float)
support_per_game = Optional(float)
tags_per_game = Optional(float)
wins_per_game = Optional(float)
db.generate_mapping(create_tables=False)
|
larsks/python-ftn
|
fidonet/nodelist.py
|
Python
|
gpl-3.0
| 5,703
| 0.002806
|
import re
import logging
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, backref
re_ip_in_phone = re.compile('000*-(\d+-\d+-\d+-\d+)')
re_phone_all_zero = re.compile('000*-0+-0+-0+-0+')
re_hostname = re.compile('[\w-]+\.[\w-]+')
fields = (
'kw',
'node',
'name',
'location',
'sysop',
'phone',
'speed'
)
metadata = None
engine = None
broker = None
Base = declarative_base()
class Flag(Base):
__tablename__ = 'flags'
id = Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('nodes.id'))
flag_name = Column(String)
flag_val = Column(String)
class Raw(Base):
__tablename__ = 'raw'
id = Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('nodes.id'))
entry = Column(String)
class Node(Base):
__tablename__ = 'nodes'
transform = {
'kw': lambda x: x.lower()
}
i
|
d = Column(Integer, primary_key=True)
kw = Column(String, index=True)
name = Column(String)
location = Column(String)
sysop = Column(String)
phone = Column(String)
speed = Column(String)
zone = Column(Integer, index=True)
region = Column(Integer, index=True)
net = Column(Integer, index=Tru
|
e)
node = Column(Integer)
address = Column(String, index=True, unique=True)
hub_id = Column(Integer, ForeignKey('nodes.id'))
flags = relationship(Flag, backref='node')
raw = relationship(Raw, backref='node')
def __repr__ (self):
return '<Node %s (%s)>' % (self.address, self.name)
def __str__ (self):
return self.__repr__()
def inet(self, for_flag=None):
'''Attempt to return the IP address or hostname for this
node. If you specify for_flag, look for a service specific address
first. Returns address or address:port if successful; returns None
if unable to determine an address from the nodelist.'''
ip = None
port = None
for flag in self.flags:
# If there is an address attache to the requested flag,
# prefer it over anything else. Note that unlike
# binkd_nodelister, we stop at the first instance
# of the flag right now.
if flag.flag_name == for_flag and flag.flag_val is not None:
if '.' in flag.flag_val:
if ':' in flag.flag_val:
ip, port = flag.flag_val.split(':')
else:
ip = flag.flag_val
break
else:
port = flag.flag_val
if ip is None:
# If the system name looks like an address, use it.
mo = re_hostname.match(self.name)
if mo:
ip = self.name
if ip is None:
# Use address from IP or INA flags.
for flag in self.flags:
if flag.flag_name == 'IP' and flag.flag_val:
ip = flag.flag_val
elif flag.flag_name == 'INA' and flag.flag_val:
ip = flag.flag_val
if ip is None:
# Extract IP address from phone number field. This
# is apparently a Thing That is Done, but I'm not
# sure it's FTSC kosher.
mo = re_ip_in_phone.match(self.phone)
if mo and not re_phone_all_zero.match(self.phone):
ip = mo.group(1).replace('-', '.')
if ip is not None and ':' in ip:
# Split an ip:port specification.
ip = ip.split(':')[0]
if ip:
return port and '%s:%s' % (ip, port) or ip
def to_nodelist(self):
return ','.join([str(getattr(self, x)) for x in fields])
def from_nodelist(self, line, addr):
self.raw.append(Raw(entry=line))
cols = line.rstrip().split(',')
if len(cols) < len(fields):
logging.debug('skipping invalid line: %s', line)
return
for k,v in (zip(fields, cols[:len(fields)])):
if k in self.transform:
v = self.transform[k](v)
setattr(self, k, v)
if self.kw == 'zone':
logging.debug('start zone %s' % self.node)
addr.zone = self.node
addr.region = self.node
addr.net = self.node
addr.node = 0
elif self.kw == 'region':
logging.debug('start region %s' % self.node)
addr.region = self.node
addr.net = self.node
addr.node = 0
elif self.kw == 'host':
logging.debug('start net %s' % self.node)
addr.net = self.node
addr.node = 0
else:
addr.node = self.node
self.zone = addr.zone
self.region = addr.region
self.net = addr.net
self.node = addr.node
self.address = addr.ftn
logging.debug('parsed node: %s' % self)
flags = cols[len(fields):]
for flag in flags:
if ':' in flag:
flag_name, flag_val = flag.split(':', 1)
else:
flag_name = flag
flag_val = None
self.flags.append(Flag(flag_name=flag_name, flag_val=flag_val))
class Nodelist (object):
def __init__ (self, dburi):
self.dburi = dburi
def setup(self, create=False):
self.metadata = Base.metadata
self.engine = create_engine(self.dburi)
if create:
self.metadata.create_all(self.engine)
self.broker = sessionmaker(bind=self.engine)
|
tkaitchuck/nupic
|
build_system/autobuild/deploy.py
|
Python
|
gpl-3.0
| 7,654
| 0.013457
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Deploys autobuild engineering release to shona and neo.
- Log basic info to ~/autobuild/status/deploy.syslog
- Grab deployment lock (~/autobuild/status/deploy.lock) but only with 2 minute
timeout. If can't grab it, then exit (this prevents us from doing
the disk-intensive searches for latest passed/deployed build)
- Find the latest passed build (unless --revision)
- Find the latest deployed build (~/autobuild/status/deploy.latest)
- If the latest deployed build is >= latest passed build, then exit (other
deploy scripts are only locked out for two minutes)
- Re-grab deployment lock with timeout of for 6 hours, so deployment happens at most every 6 hours.
- Write latest deployed build file (~/autobuild/status/deploy.latest)
- Deploy on neo and shona
(note: if deployment on neo/shona fails, that build will not be recopied
because deployed build file is already written)
Has options for:
- specifying a specific tarball to deploy (--tarball)
- specifying a specific revision number to deploy (--revision)
- deploying even if there is an unexpired lock (--force)
- using a link name other than "current"
"""
import sys
import os
import time
import getopt
import autobuild
mydir = sys.path[0]
|
buildSystemDi
|
r = os.path.abspath(os.path.normpath(os.path.join(mydir)))
sys.path.append(buildSystemDir)
import pybuild.utils as utils
import pybuild.test_release as test
testOnly = False
# Initially grab the lock only for two minutes
initialLockTime = 120
# If we decide to deploy, grab it for 6 hours to reduce frequency of copying
deployLockTime = 6 * 3600
def getLatestPassedRevision(releasesdir):
dirs = [f for f in os.listdir(releasesdir) if f.startswith("r")]
dirs = sorted(dirs, reverse=True)
found = False
for dir in dirs:
# strip off "r"
revision = int(dir[1:])
build = getTarballFromRevision(revision)
if os.path.exists(os.path.join(releasesdir, dir, build)):
found = True
break
if found == False:
raise Exception("No passed builds found")
return revision
def getLatestDeployedRevision(filename):
if not os.path.exists(filename):
return 0
lines = open(filename).readlines()
if len(lines) == 0:
try:
os.remove(filename)
except:
pass
raise Exception("getLatestDeployedRevision: filename %s is empty - deleting" % filename)
revision = int(lines[0].strip())
return revision
def setLatestDeployedRevision(filename, revision):
print "Setting latest deployed revision to %s" % revision
open(filename, "w").write(str(revision))
def getTarballFromRevision(revision):
return os.path.join(releasesdir,
"r%s" % revision,
"nupic-npp-r%s-linux64.tgz" % revision)
def deploy(tarball, host, label):
print "Deploying tarball %s to host %s with label %s" % (tarball, host, label)
if testOnly:
return
tarballFilename = os.path.basename(tarball)
tarballBasename, ext = os.path.splitext(tarballFilename)
print "Copying build %s to %s" % (tarballBasename, host)
utils.copyFilesToDirOnRemoteHost(tarball, host, "/tmp", "buildaccount")
command = 'ssh %s "cd /neo/nta; ' % host
command = command + "rm -rf %s; " % tarballBasename
command = command + "tar xzf /tmp/%s; " % tarballFilename
command = command + "rm -f %s; " % label
command = command + "ln -s %s %s; " % (tarballBasename, label)
command = command + "rm -f /tmp/%s; " % tarballFilename
command = command + '"'
print "Extracting tarball on host %s" % host
print "Running command: %s" % command
utils.runCommand(command)
def syslog(filename, message) :
"""
Append a single date-stamped message to the given file.
Used for build system startup/shutdown messages.
Heavyweight, because it opens and closes the file each time.
All other message go into a build logs with the
logger methods (INFO, DEBUG, WARN, ERROR).
"""
file = open(filename, "a")
out = "%s %s\n" % (time.strftime("%m/%d-%H:%M:%S "), message)
file.write(out)
print out,
file.close()
def usage():
print "usage: %s [--force] [[--revision <revision>] | [--tarball <tarball>]] [--label <label>]" % sys.argv[0]
sys.exit(1)
options = ["force", "revision=", "tarball=", "label="]
if __name__ == "__main__":
try:
opts, args = getopt.getopt(sys.argv[1:], "", options)
except getopt.GetoptError:
usage()
if len(args) > 0:
usage()
force = False
revision = None
tarball = None
label = "current"
for o, a in opts:
if o == "--force":
force = True
elif o == "--revision":
revision = int(a)
elif o == "--tarball":
if revision is not None:
print "Both --revision and --tarball specified. Only one allowed"
usage()
tarball = a
elif o == "--label":
label = a
rootdir = os.path.expanduser("~/autobuild")
statusdir = os.path.join(rootdir, "status")
releasesdir = os.path.join(rootdir, "releases")
latestDeployFile = os.path.join(statusdir, "deploy.latest")
utils.createDir(statusdir, True)
syslogFile = os.path.join(statusdir, "deploylog")
syslog(syslogFile, "Deploying")
deploylockfile = os.path.join(rootdir, "status", "deploy.lock")
try:
lock = utils.getLock(deploylockfile, initialLockTime, processFree=True, force=force)
if not lock:
raise Exception("Unable to get deployment lock %s. Use --force to override" % deploylockfile)
if tarball is None:
if revision is None:
revision = getLatestPassedRevision(releasesdir)
if revision is None:
raise Exception("Unable to get latest passed revision")
deployedRevision = getLatestDeployedRevision(latestDeployFile)
if revision <= deployedRevision:
raise Exception("Latest passed revision %d is not greater than latest deployed revision %d" % (revision, deployedRevision))
tarball = getTarballFromRevision(revision)
lock = utils.getLock(deploylockfile, deployLockTime, processFree=True, force=True)
if revision is not None:
setLatestDeployedRevision(latestDeployFile, revision)
# deploy(tarball, "shona1", label)
# syslog(syslogFile, "Deployed %s with label %s to shona1" % (tarball, label))
deploy(tarball, "matrix.numenta.com", label)
syslog(syslogFile, "Deployed %s with label %s to neo" % (tarball, label))
except Exception, e:
tb = sys.exc_info()[2]
import traceback
lineNumber = traceback.extract_tb(tb)[-1][1]
syslog(syslogFile, "Exception (line %d): %s" % (lineNumber, e))
# sys.exc_info()[2] is traceback object
# traceback.extract_tb(traceback,limit=1)[0] -> [filename, line number, function name, text]
|
SasView/sasview
|
src/sas/qtgui/Plotting/Slicers/Arc.py
|
Python
|
bsd-3-clause
| 4,351
| 0.002068
|
"""
Arc slicer for 2D data
"""
import numpy as np
from sas.qtgui.Plotting.Slicers.BaseInteractor import BaseInteractor
class ArcInteractor(BaseInteractor):
"""
Select an annulus through a 2D plot
"""
def __init__(self, base, axes, color='black', zorder=5, r=1.0,
theta1=np.pi / 8, theta2=np.pi / 4):
BaseInteractor.__init__(self, base, axes, color=color)
self.markers = []
self.axes = axes
self._mouse_x = r
self._mouse_y = 0
self._save_x = r
self._save_y = 0
self.scale = 10.0
self.theta1 = theta1
self.theta2 = theta2
self.radius = r
[self.arc] = self.axes.plot([], [], linestyle='-', marker='', color=self.color)
self.npts = 20
self.has_move = False
self.connect_markers([self.arc])
self.update()
def set_layer(self, n):
"""
Allow adding plot to the same panel
:param n: the number of layer
"""
self.layernum = n
self.update()
def clear(self):
"""
Clear this slicer and its markers
"""
self.clear_markers()
try:
for item in self.markers:
item.remove()
self.arc.remove()
except:
# Old version of matplotlib
for item in range(len(self.axes.lines)):
del self.axes.lines[0]
def get_radius(self):
"""
Return arc radius
"""
radius = np.sqrt(np.power(self._mouse_x, 2) + \
np.power(self._mouse_y, 2))
return radius
def update(self, theta1=None, theta2=None, nbins=None, r=None):
"""
Update the plotted arc
:param theta1: starting angle of the arc
:param theta2: ending angle of the arc
:param nbins: number of points along the arc
:param r: radius of the arc
"""
# Plot inner circle
x = []
y = []
if theta1 is not None:
self.theta1 = theta1
if theta2 is not None:
self.theta2 = theta2
while self.theta2 < self.theta1:
self.theta2 += (2 * np.pi)
while self.theta2 >= (self.theta1 + 2 * np.pi):
self.theta2 -= (2 * np.pi)
self.npts = int((self.theta2 - self.theta1) / (np.pi / 120))
if r is None:
self.radius = np.sqrt(np.power(self._mouse_x, 2) + \
np.power(self._mouse_y, 2))
else:
self.radius = r
for i in range(self.npts):
phi = (self.theta2 - self.theta1) / (self.npts - 1) * i + self.theta1
xval = 1.0 * self.radius * np.cos(phi)
yval = 1.0 * self.radius * np.sin(phi)
x.append(xval)
y.append(yval)
self.arc.set_data(x, y)
def save(self, ev):
"""
Remember the roughness for this layer and the next so that we
can restore on Esc.
"""
self._save_x = self._mouse_x
self._save_y = self._mouse_y
self.base.freeze_axes()
def moveend(self, ev):
"""
After a dragging motion reset the flag self.has_move to False
:param ev: event
"""
self.has_move = False
self.base.moveend(ev)
def restore(self):
"""
Restore the roughness for this layer.
"""
self._mouse_x = self._save_x
self._mouse_y = self._save_y
def move(self, x, y, ev):
"""
Process move to a new position, making sure that the move is allowed.
"""
self._mouse_x = x
self._mouse_y = y
self.has_move = True
self.base.base.update()
def set_cursor(self, radius, phi_min, phi_max, nbins):
"""
"""
self.theta1 = phi_min
self.theta2 = phi_max
self.update(nbins=nbins, r=radius)
def get_params(self):
"""
"""
|
params = {}
params["radius"] = self.radius
params["theta1"] = self.theta1
|
params["theta2"] = self.theta2
return params
def set_params(self, params):
"""
"""
x = params["radius"]
phi_max = self.theta2
nbins = self.npts
self.set_cursor(x, self._mouse_y, phi_max, nbins)
|
kasemir/org.csstudio.display.builder
|
org.csstudio.display.builder.runtime/scripts/test-script.py
|
Python
|
epl-1.0
| 348
| 0.005747
|
import sys
from connect2j import connectToJava
if (len(sys.argv) > 1):
gateway = None
try:
gateway = connectToJava(sys.argv[1])
map = g
|
ateway.getMap()
map['1'] = 1
gateway.setMap(map)
map["obj"].setValue("Hello")
finally:
if gateway != None:
|
gateway.shutdown()
|
pradyunsg/pip
|
src/pip/_vendor/rich/bar.py
|
Python
|
mit
| 3,264
| 0.001856
|
from typing import Optional, Union
from .color import Color
from .console import Console, ConsoleOptions, RenderResult
from .jupyter import JupyterMixin
from .measure import Measurement
from .segment import Segment
from .style import Style
# There are left-aligned characters for 1/8 to 7/8, but
# the right-aligned characters exist only for 1/8 and 4/8.
BEGIN_BLOCK_ELEMENTS = ["█", "█", "█", "▐", "▐", "▐", "▕", "▕"]
END_BLOCK_ELEMENTS = [" ", "▏", "▎", "▍", "▌", "▋", "▊", "▉"]
FULL_BLOCK = "█"
class Bar(JupyterMixin):
"""Renders a solid block bar.
Args:
size (float): Value for the end of the bar.
begin (float): Begin point (between 0 and size, inclusive).
end (float): End point (between 0 and size, inclusive).
width (int, optional): Width of the bar, or ``None`` for maximum width. Defaults to None.
color (Union[Color, str], optional): Color of the bar. Defaults to "default".
bgcolor (Union[Color, str], optional): Color of bar background. Defaults to "default".
"""
def __init__(
self,
size: float,
begin: float,
end: float,
*,
width: Optional[int] = None,
color: Union[Color, str] = "default",
bgcolor: Union[Color, str] = "default",
):
self.size = size
self.begin = max(begin, 0)
self.end = min(end, size)
self.width = width
self.style = Style(color=color, bgcolor=bgcolor)
def __repr__(self) -> str:
return f"Bar({self.size}, {self.begin}, {self.end})"
def __rich_console__(
self, console: Console, options: ConsoleOptions
) -> RenderResult:
width = min(
self.width if self.width is not None else options.max_width,
options.max_width,
)
if self.begin >= self.end:
yield Segment(" " * width, self.style)
yield Segment.line()
return
prefix_complete_eights = int(width * 8 * self.begin / self.size)
prefix_bar_count = prefix_complete_eights // 8
prefix_eights_count = prefix_complete_eights % 8
body_complete_eights = int(width * 8 * self.end / self.size)
body_bar_count = body_complete_eights // 8
body_eights_count = body_complete_eights % 8
# When start and end fall into the same cell, we ideally should
|
render
# a symbol that's "center-aligned", but there is no good symbol in Unicode.
# In this case, we fall back to right-aligned block symbol for simplicity.
prefix = " " * prefix_bar_count
if prefix_eights_count:
|
prefix += BEGIN_BLOCK_ELEMENTS[prefix_eights_count]
body = FULL_BLOCK * body_bar_count
if body_eights_count:
body += END_BLOCK_ELEMENTS[body_eights_count]
suffix = " " * (width - len(body))
yield Segment(prefix + body[len(prefix) :] + suffix, self.style)
yield Segment.line()
def __rich_measure__(
self, console: Console, options: ConsoleOptions
) -> Measurement:
return (
Measurement(self.width, self.width)
if self.width is not None
else Measurement(4, options.max_width)
)
|
dedoogong/asrada
|
FaceLandmark_Detector_DAN/generate_hd5.py
|
Python
|
apache-2.0
| 1,140
| 0.005263
|
import numpy as np
import cv2
import h5py
min_img_size = 12
label_path = './label.txt'
landmark_path = './landmark.txt'
regression_
|
box_pat
|
h = './regression_box.txt'
crop_image_path = './crop_image.txt'
train_file_path = './train_12.hd5'
label = np.loadtxt(label_path, int)
landmark = np.loadtxt(landmark_path, float)
regression_box = np.loadtxt(regression_box_path, float)
label = np.transpose([label])
#landmark = np.transpose(landmark)
labels = np.concatenate((label, regression_box, landmark), axis = 1)
img_array = []
for line in open(crop_image_path):
img = cv2.imread(line.strip())
img = cv2.resize(img, (min_img_size,min_img_size))
#img = cv2.convertTo(img, cv2.CV_32FC3, 0.0078125,-127.5*0.0078125)
img = cv2.transpose(img)
img_forward = np.array(img, dtype=np.float32)
img_forward = np.transpose(img_forward, (2, 0, 1))
img_forward = (img_forward - 127.5) * 0.0078125
img_array.append(img_forward)
a = np.array(img_array, dtype=np.float32)
with h5py.File(train_file_path, 'w') as f:
f['data'] = a
f['labels'] = labels
f['regression'] = regression_box
f['landmark'] = landmark
|
alexoneill/15-love
|
game/test.py
|
Python
|
mit
| 1,989
| 0.011061
|
#!/usr/bin/python2
# test.py
# nroberts 04/10/2017
# Instead of lighting up a bridge, we light up the terminal
from tennis_show import TennisShow
import current_bridge
from threading import Thread
import Queue
from colors import Colors
thread_continuing = True
class OutQueue:
def put(self, event):
print "Put in outqueue: %s" % str(event)
def main(bridge):
global thread_continuing
|
print "Usage: Press 1 for player 1 swing, 2 for player 2 swing (followed by Enter)"
print "To quit, press Ctrl+C and then Enter"
inqueue = Queue.Queue()
outqueue = OutQueue()
show = TennisShow(bridge(), inqueue=inqueue, outqueue=outqueue)
def cause_problems():
global thread_continuing
while thread_continuing:
inp = raw_input()
if inp == "r":
inqueue.put(("game_reset", None))
continue
try:
x = int(inp[0])
if len(inp) > 1:
if inp[1] == "s":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.RED }))
elif inp[1] == "t":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.GREEN }))
elif inp[1] == "c":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.PURPLE }))
elif inp[1] == "x":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.SKY_BLUE }))
else:
inqueue.put(("game_swing", { "player_num": x, "hand": 1, "strength": 1.0 }))
except:
pass
# put something new on the inqueue every 10 seconds
thread = Thread(target = cause_problems)
thread.start()
# run the show
try:
show.run(framerate=40)
finally:
thread_continuing = False
if __name__ == "__main__":
main(current_bridge.bridge)
|
|
RossBrunton/BMAT
|
bmat/context_processors.py
|
Python
|
mit
| 786
| 0.007634
|
"""Context processors, these get called and add things to template contexts"""
from django.conf import settings
def analytics_and_ads(request):
""" Adds the google analytics code to the context """
out = {}
if request.user.is_authen
|
ticated() and request.user.settings.no_analytics:
out["analytics_code"] = ""
else:
out["analytics_code"] = settings.ANALYTICS_CODE
if request.user.is_authenticated() and request.user.settings.no_ads:
out["ad_clie
|
nt"] = ""
else:
out["ad_client"] = settings.AD_CLIENT
out["ad_slot_top"] = settings.AD_SLOT_TOP
out["ad_slot_bottom"] = settings.AD_SLOT_BOTTOM
return out
def add_webstore_url(request):
return {"webstore_url":settings.CHROME_EXTENSION_WEBSTORE}
|
Kickflip/python-kickflip
|
kickflip/kickflip.py
|
Python
|
apache-2.0
| 9,824
| 0
|
#! /usr/bin/env python
import envoy
import boto
import requests
import os
import sys
import time
import random
import string
from oauthlib.oauth2 import MobileApplicationClient
from requests_oauthlib import OAuth2Session
from boto.s3.connection import Location
from boto.s3.lifecycle import Lifecycle, Transition, Rule
from boto.s3.key import Key
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from m3u8 import M3U8
###################
# Globals
###################
connected = False
connected_aws = False
kickflip_session = None
# URLs
KICKFLIP_BASE_URL = 'https://funkcity.ngrok.com/'
# KICKFLIP_BASE_URL = 'https://api.kickflip.io'
KICKFLIP_API_URL = KICKFLIP_BASE_URL + '/api/'
# Kickflip Keys
KICKFLIP_CLIENT_ID = ''
KICKFLIP_CLIENT_SECRET = ''
KICKFLIP_APP_NAME = ''
KICKFLIP_USER_NAME = ''
KICKFLIP_UUID = ''
KICKFLIP_ACCESS_TOKEN = ''
KICKFLIP_SECRET_ACCESS_TOKEN = ''
# Amazon
AWS_ACCESS_KEY = ''
AWS_SECRET_ACCESS_KEY = ''
s3 = None
# Video settings
VIDEO_BITRATE = '2000k'
AUDIO_BITRATE = '128k'
playlist = M3U8()
####################
# AWS
####################
def set_aws_keys(USERNAME, AWS_ACCESS_KEY_VAR, AWS_SECRET_ACCESS_KEY_VAR):
global AWS_ACCESS_KEY
global AWS_SECRET_ACCESS_KEY
global KICKFLIP_USER_NAME
AWS_ACCESS_KEY = AWS_ACCESS_KEY_VAR
AWS_SECRET_ACCESS_KEY = AWS_SECRET_ACCESS_KEY_VAR
KICKFLIP_USER_NAME = USERNAME
return True
def connect_aws():
global connected_aws
global AWS_ACCESS_KEY
global AWS_SECRET_ACCESS_KEY
global s3
if not connected_aws:
s3 = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY)
connected_aws = True
return connected_aws
def upload_file(filename):
return True
###################
# Kickflip Auth
###################
def connect():
global connected
global kickflip_session
global KICKFLIP_CLIENT_ID
global KICKFLIP_CLIENT_SECRET
global KICKFLIP_API_URL
if not connected:
endpoint = KICKFLIP_BASE_URL + '/o/token/'
payload = ({
'client_secret': KICKFLIP_CLIENT_SECRET,
'grant_type': 'client_credentials',
'client_id': KICKFLIP_CLIENT_ID,
})
response = requests.post(endpoint, payload)
if response.status_code != 200:
raise Exception("Error: Couldn't connect to Kickflip...")
token = response.json()
client = MobileApplicationClient(KICKFLIP_CLIENT_ID)
kickflip_session = OAuth2Session(
KICKFLIP_CLIENT_ID,
client=client,
token=token
)
connected = True
print "CONNECTED"
return connected
def auth_required(f):
global connected
global kickflip_session
def g(*args, **kwargs):
if not connected:
raise Exception("No session connected. connect() first?")
return f(*args, **kwargs)
return g
def set_keys(client_id, client_secret):
global KICKFLIP_CLIENT_ID
global KICKFLIP_CLIENT_SECRET
KICKFLIP_CLIENT_ID = client_id
KICKFLIP_CLIENT_SECRET = client_secret
def set_uuid(uuid):
global KICKFLIP_UUID
KICKFLIP_UUID = uuid
return True
def set_access_tokens():
global KICKFLIP_ACCESS_TOKEN
global KICKFLIP_SECRET_ACCESS_TOKEN
# requests-oauth.get_tokens()
KICKFLIP_ACCESS_TOKEN = key
KICKFLIP_SECRET_ACCESS_TOKEN = secret_key
return ''
####################
# Kickflip.io API
#####################
def get_account_status(username):
return ''
@auth_required
def create_user(username, password=""):
"""
Uses the `/user/new` endpoint taking the username as a parameter.
TODO: What happens when you specify no password?
e.g. username="banana1"
"""
endpoint = KICKFLIP_API_URL + '/user/new'
payload = {'username': username}
if password:
payload['password'] = password
user_response = kickflip_session.post(endpoint, payload)
return user_response.json()
@auth_required
def get_user_info(username):
"""
Uses the `/user/info` endpoint taking the username as a parameter.
e.g. username="banana1"
"""
endpoint = KICKFLIP_API_URL + '/user/info/'
payload = {'username': username}
user_response = kickflip_session.post(endpoint, payload)
return user_response.json()
def get_user(username):
return ''
def start_stream(file_path, stream_name=None, private=False, username=''):
"""
Uses the `/stream/start` endpoint taking the username as a parameter.
If you specify no username, it will fallback to the default
`KICKFLIP_USER_NAME` set in the set_aws_keys() function.
e.g. username="banana1"
"""
endpoint = KICKFLIP_API_URL + '/stream/start/'
payload = {'username': KICKFLIP_USER_NAME}
if username:
payload['username'] = username
user_response = kickflip_session.post(endpoint, payload)
import pdb
pdb.set_trace()
stream_video(file_path)
return ''
def pause_stream(stream_name):
return ''
def stop_stream():
return ''
@auth_required
def get_stream_info(stream_id):
"""
Uses the `/stream/info` endpoint taking the stream_id as a parameter.
e.g. stream_id="e83a515e-fe69-4b19-afba-20f30d56b719"
"""
endpoint = KICKFLIP_API_URL + '/stream/info/'
payload = {'stream_id': stream_id}
response = kickflip_session.post(endpoint, payload)
return response.json()
@auth_required
def get_stream_by_location(uuid, lat, lon, radius=0):
"""
Uses the `/search/location` endpoint.
takes the stream_id as a parameter.
e.g. uuid="", username="bej48snvvthy"
"""
endpoint = KICKFLIP_API_URL + '/search/location/'
payload = ({
'uuid': uuid,
'lat': lat,
'lon': lon
})
if radius != 0:
payload['radius'] = radius
response = kickflip_session.post(endpoint, payload)
return response.json()
@auth_required
def get_stream_credentials(username, password):
"""
Uses the `/user/uuid` endpoint.
takes a valid username and password as parameter.
It will return all the necessary credentials to use the API
and the upload endpoints.
e.g. username="bej48snvvthy", password=""
"""
endpoint = KICKFLIP_API_URL + '/user/uuid/'
payload = {'username': username, 'password': password}
response = kickflip_session.post(endpoint, payload)
return response.json()
@auth_required
def search_by_keyword(keyword="", uuid=""):
"""
Uses the `/search` endpoint.
takes a user uuid and a keyword.
If you specify no `uuid`, the search will not show private streams?
If the keyword is empty, it will return all the streams from the app.
e.g. uuid="e9c3d27e-406b-4f4a-9b87-6d3460c60ca6", keyword=""
reply: {
u'total_items': 3,
u'next_page_available': False,
u'success': True, u'page_number': 1,
u'streams': [...],
u'results_per_page': 25
}
"""
endpoint = KICKFLIP_API_URL + '/search/'
payload = {'uuid': uuid, 'keyword': keyword}
response = kickflip_session.post(endpoint, payload)
return response.json()
####################
# FFMPEG
####################
class SegmentHandler(PatternMatchingEventHandler):
patterns = ["*.ts", "*.m3u8"]
def process(self, event):
"""
event.event_type
'modified' | 'created' | 'moved' | 'deleted'
event.is_directory
True | False
event.src_path
path/to/observed/file
"""
# Process the file there
print event.src_path, event.event_type # Print for degug
if '.m3u8' not in event.src_path:
|
upload_file(event.src_path)
def on_modified(self, event):
global playlist
if '.m3u8' in event.src_path:
playlist.add_from_file(event.src_path)
playlist.dump_to_fil
|
e(event.src_path + '.complete.m3u8')
upload_file(event.src_path + '.complete.m3u8')
def on_created(self, event):
self.process(event)
def stream_video(video_path):
global VIDEO_BITRATE
g
|
laslabs/odoo-connector-carepoint
|
connector_carepoint/tests/test_related_action.py
|
Python
|
agpl-3.0
| 4,249
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2015-201
|
6 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import mock
from contextlib import contextmanager
from odoo import _
from odoo.addons.connector_carepoint import related_action
from .common import SetUpCarepointBase
mk_file = 'odoo.addons.connector_carepoint.related_action'
@contextmanager
def mock_connector_env():
with mock.patch('%s.ConnectorEnvironment' % mk_file) as env:
yield env
class StopTestE
|
xception(Exception):
pass
class TestRelatedAction(SetUpCarepointBase):
def setUp(self):
super(TestRelatedAction, self).setUp()
self.model = 'carepoint.carepoint.store'
self.binding_id = self._new_record()
self.job = mock.MagicMock()
self.job.args = [self.model, self.binding_id.id]
def _new_record(self):
return self.env[self.model].create({
'name': 'Test Pharm',
'carepoint_id': 1234567,
'backend_id': self.backend.id,
'warehouse_id': self.env.ref('stock.warehouse0').id,
})
def test_unwrap_binding_no_binding(self):
""" It should return None when no binding available """
self.binding_id.unlink()
res = related_action.unwrap_binding(self.session, self.job)
self.assertEqual(None, res)
def test_unwrap_binding_gets_correct_env(self):
""" It should init the ConnectorEnv w/ proper args """
with mock_connector_env() as env:
env.side_effect = StopTestException
with self.assertRaises(StopTestException):
related_action.unwrap_binding(self.session, self.job)
env.assert_called_once_with(
self.binding_id.backend_id, self.session, self.model,
)
def test_unwrap_binding_gets_connector_unit(self):
""" It should get the connector_unit w/ proper args """
expect = 'expect'
with mock_connector_env() as env:
env().get_connector_unit.side_effect = StopTestException
with self.assertRaises(StopTestException):
related_action.unwrap_binding(
self.session, self.job, binder_class=expect
)
env().get_connector_unit.assert_called_once_with(expect)
def test_unwrap_binding_unwraps_model(self):
""" It should unwrap model from binder """
with mock_connector_env() as env:
binder = env().get_connector_unit()
binder.unwrap_model.side_effect = StopTestException
with self.assertRaises(StopTestException):
related_action.unwrap_binding(self.session, self.job)
def test_unwrap_binding_unwraps_binding(self):
""" It should call unwrap_binding on binder w/ proper args """
with mock_connector_env() as env:
binder = env().get_connector_unit()
binder.unwrap_binding.side_effect = StopTestException
with self.assertRaises(StopTestException):
related_action.unwrap_binding(self.session, self.job)
binder.unwrap_binding.assert_called_once_with(self.binding_id.id)
def test_unwrap_binding_guards_value_error(self):
""" It should use binding record when value error on wrap """
with mock_connector_env() as env:
binder = env().get_connector_unit()
binder.unwrap_model.side_effect = ValueError
res = related_action.unwrap_binding(self.session, self.job)
self.assertEqual(self.model, res['res_model'])
self.assertEqual(self.binding_id.id, res['res_id'])
def test_unwrap_binding_return(self):
""" It should return proper action """
with mock_connector_env() as env:
binder = env().get_connector_unit()
res = related_action.unwrap_binding(self.session, self.job)
expect = {
'name': _('Related Record'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': binder.unwrap_model(),
'res_id': binder.unwrap_binding(),
}
self.assertDictEqual(expect, res)
|
komlenic/drubs
|
drubs/drubs.py
|
Python
|
gpl-2.0
| 9,647
| 0.014823
|
import yaml
import tasks
from os.path import isfile, isdir, dirname, abspath, join, basename, normpath, realpath
from os import getcwd
from fabric.state import env, output
from fabric.tasks import execute
from fabric.colors import red, yellow, green, cyan
from fabric.contrib.console import confirm
from fabric.api import lcd
from fabric.operations import local, prompt
from pprint import pprint
def load_config_file(config_file):
'''
Returns yaml file contents as an object.
Also sets the following fabric/global env vars:
env.config_file - the supplied path to the project config file. Under typical
usage without the -f parameter, this will be 'project.ym
|
l'
env.config_dir - the absolute path to the project config directory
env.config - the actual contents of the config file
Accepts one parameter 'config_file': the relative or absolute path to a drubs
project config file.
'''
if isfile(config_file):
env.config_file = config_file
env.config_dir = dirname(abspath(config_file))
|
with open(config_file, 'r') as stream:
env.config = yaml.load(stream)
# If env.config evaluates to false, nothing parseable existed in the file.
if not env.config:
print(red("The project config file '%s' does not contain anything or is not valid. Exiting..." % (config_file)))
exit(1)
if 'nodes' not in env.config:
print(red("The project config file '%s' does not contain a 'nodes' section. Exiting..." % (config_file)))
exit(1)
return env.config
else:
if config_file == 'project.yml':
print(red("No project config file found in current working directory. Either run drubs from a directory containing a valid project config file (named 'project.yml'), or use '-f'."))
exit(1)
else:
print(red("The project config file '%s' does not exist or could not be read." % (config_file)))
exit(1)
def check_config_requirements_per_node(nodes):
'''
Checks for required values per nodes supplied.
'''
for node in nodes:
if node not in env.config['nodes']:
print(red("No node named '%s' found in drubs project config file '%s'. Exiting..." % (node, env.config_file)))
exit(1)
required_node_keys = [
'account_mail',
'account_name',
'account_pass',
'backup_directory',
'backup_lifetime_days',
'backup_minimum_count',
'db_host',
'db_name',
'db_pass',
'db_user',
'destructive_action_protection',
'make_file',
'py_file',
'server_host',
'server_port',
'server_user',
'site_mail',
'site_name',
'site_root',
]
for key in required_node_keys:
if key not in env.config['nodes'][node]:
print(red("No key named '%s' for node '%s' found. Exiting..." % (key, node)))
exit(1)
elif env.config['nodes'][node][key].strip() == '':
print(red("No value for '%s' for node '%s' found. Exiting..." % (key, node)))
exit(1)
def get_fabric_hosts(nodes):
'''
Gets fabric hosts from associated node names.
Returns a list of fabric host strings (user@host:port) for the supplied nodes.
Passing 'all' for nodes, returns a list of fabric host strings for all nodes
found in the project's config file.
'''
hosts = []
for node in nodes:
user = env.config['nodes'][node]['server_user'].strip()
host = env.config['nodes'][node]['server_host'].strip()
port = env.config['nodes'][node]['server_port'].strip()
host_string = '%s@%s:%s' % (user, host, port)
hosts.append(host_string)
return hosts
def set_flags(args):
env.verbose = args.verbose
env.debug = args.debug
env.cache = args.cache
env.no_backup = args.no_backup
env.no_restore = args.no_restore
env.yes = args.yes
# If --no-backup is set, also always set --no-restore.
if env.no_backup:
env.no_restore = True
if args.fab_debug:
output.debug = True
def drubs(args):
'''
Main entry point from __init__.py and argparser.
'''
env.drubs_dir = dirname(abspath(__file__))
env.drubs_data_dir = join(env.drubs_dir, 'data')
set_flags(args)
if args.action == 'init':
drubs_init(args)
else:
# Return error if more than one node is specified.
if len(args.nodes) > 1:
if args.action == 'status':
print(red("More than one node parameter specified. Please specify exactly one node name (or the keyword 'all' to get the status of all nodes). Exiting..."))
else:
print(red("More than one node parameter specified. Please specify exactly one node name. Exiting..."))
exit(1)
# Return error if 'all' keyword is being attempted to be used on any action
# other than 'status'.
if args.action != 'status' and args.nodes[0] == 'all':
print(red("Cannot use the keyword 'all' with the action '%s' Exiting..." % (
args.action,
)
))
exit(1)
load_config_file(args.file)
# If 'all' has been supplied for the 'nodes' parameter, set 'nodes' to a
# list of all nodes found in the project config file.
if args.nodes[0] == 'all':
args.nodes = env.config['nodes'].keys()
check_config_requirements_per_node(args.nodes)
# Build/set fabric host strings.
hosts = get_fabric_hosts(args.nodes)
# Execute the requested task on the specified hosts. For passing variable
# task/action names to execute(), getattr() is used to load the tasks from
# tasks.py. See: http://stackoverflow.com/questions/23605418/in-fabric-
# how-%20can-i-execute-tasks-from-another-python-file
execute(getattr(tasks, args.action), hosts=hosts)
def drubs_init(args):
'''
Stubs out project configuration files.
@todo Make this work with -f argument. Presently it is only designed to work
if pwd = the project config directory. With -f pwd should be able to be
anything.
'''
project = dict()
if args.file == 'project.yml':
# No -f option supplied (or 'project.yml' supplied to -f).
project['location'] = realpath(normpath(getcwd()))
project['config_filename'] = 'project.yml'
else:
# -f option supplied and not 'project.yml'.
project['location'] = dirname(realpath(normpath(args.file)))
project['config_filename'] = basename(realpath(normpath(args.file)))
project['name'] = basename(normpath(project['location']))
project['config_file_abs_path'] = join(project['location'], project['config_filename'])
# If file exists, ask for confirmation before overwriting.
if isfile(args.file):
if not confirm(yellow("STOP! A project config file named '%s' already exists. Overwrite?" % args.file), default=False):
print(yellow('Exiting...'))
exit(0)
if not isdir(project['location']):
if confirm(yellow("'%s' location does not already exist. Create it and proceed?") % project['location'], default=True):
print(cyan("Creating '%s'...") % (project['location']))
local('mkdir -p %s' % (project['location']))
# Ask which drupal core version this project will use.
prompt(yellow("What major version of Drupal will this project use? (6,7,8)"), key="drupal_core_version", validate=r'^[6,7,8]{1}$', default="7")
# Create config file.
print(cyan("Creating a new project config file named '%s' file in '%s' with node(s) %s..." % (
project['config_filename'],
project['location'],
args.nodes
)))
node_output = dict()
for node in args.nodes:
node_output[node] = dict(
db_host = 'localhost',
db_name = project['name'],
db_user = '',
db_pass = '',
destructive_action_protection = 'off',
backup_directory = "",
backup_lifetime_days = "30",
backup_minimum_count = "3",
server_host = '',
site_root = '',
server_user = '',
server_port = '22',
site_name = '',
site_mail = '',
account_name = 'admin',
account_pass = '',
account_mail = '',
make_file = '%s.make' % (node),
py_file = '%s.py' % (node),
)
data = dict(
nodes = node_output,
project_settings = dict (
project_name = project['name'],
drupal_core_version = env.drupal_core_ve
|
layus/pylti
|
pylti/__init__.py
|
Python
|
bsd-2-clause
| 185
| 0
|
# -*- coding: utf-8 -
|
*-
"""
PyLTI is module that implements IMS LTI in python
The API uses decorators to wrap function with LTI functionality.
"""
V
|
ERSION = "0.3.2" # pragma: no cover
|
logpai/logparser
|
benchmark/LogMine_benchmark.py
|
Python
|
mit
| 5,994
| 0.009343
|
#!/usr/bin/env python
import sys
sys
|
.path.append('../')
from logparser import LogMine, evaluator
import os
import pandas as pd
input_dir = '../logs/' # The input directory of log file
output_dir = 'LogMine_result/' # The output directory of parsing results
benchmark_settings = {
'HDFS': {
'log_file': 'HDFS/HDFS_2k.log',
'log_format': '<Date> <Time> <Pid> <Level> <Component>: <Content>',
'regex': [r'blk_-?\d+', r'(\d+\.){3}\d+(:\d+)?'],
'max_dist': 0.005,
'k': 1,
'levels':
|
2
},
'Hadoop': {
'log_file': 'Hadoop/Hadoop_2k.log',
'log_format': '<Date> <Time> <Level> \[<Process>\] <Component>: <Content>',
'regex': [r'(\d+\.){3}\d+'],
'max_dist': 0.005,
'k': 1,
'levels': 2
},
'Spark': {
'log_file': 'Spark/Spark_2k.log',
'log_format': '<Date> <Time> <Level> <Component>: <Content>',
'regex': [r'(\d+\.){3}\d+', r'\b[KGTM]?B\b', r'([\w-]+\.){2,}[\w-]+'],
'max_dist': 0.01,
'k': 1,
'levels': 2
},
'Zookeeper': {
'log_file': 'Zookeeper/Zookeeper_2k.log',
'log_format': '<Date> <Time> - <Level> \[<Node>:<Component>@<Id>\] - <Content>',
'regex': [r'(/|)(\d+\.){3}\d+(:\d+)?'],
'max_dist': 0.001,
'k': 1,
'levels': 2
},
'BGL': {
'log_file': 'BGL/BGL_2k.log',
'log_format': '<Label> <Timestamp> <Date> <Node> <Time> <NodeRepeat> <Type> <Component> <Level> <Content>',
'regex': [r'core\.\d+'],
'max_dist': 0.01,
'k': 2,
'levels': 2
},
'HPC': {
'log_file': 'HPC/HPC_2k.log',
'log_format': '<LogId> <Node> <Component> <State> <Time> <Flag> <Content>',
'regex': [r'=\d+'],
'max_dist': 0.0001,
'k': 0.8,
'levels': 2
},
'Thunderbird': {
'log_file': 'Thunderbird/Thunderbird_2k.log',
'log_format': '<Label> <Timestamp> <Date> <User> <Month> <Day> <Time> <Location> <Component>(\[<PID>\])?: <Content>',
'regex': [r'(\d+\.){3}\d+'],
'max_dist': 0.005,
'k': 1,
'levels': 2
},
'Windows': {
'log_file': 'Windows/Windows_2k.log',
'log_format': '<Date> <Time>, <Level> <Component> <Content>',
'regex': [r'0x.*?\s'],
'max_dist': 0.003,
'k': 1,
'levels': 2
},
'Linux': {
'log_file': 'Linux/Linux_2k.log',
'log_format': '<Month> <Date> <Time> <Level> <Component>(\[<PID>\])?: <Content>',
'regex': [r'(\d+\.){3}\d+', r'\d{2}:\d{2}:\d{2}'],
'max_dist': 0.006,
'k': 1,
'levels': 2
},
'Andriod': {
'log_file': 'Andriod/Andriod_2k.log',
'log_format': '<Date> <Time> <Pid> <Tid> <Level> <Component>: <Content>',
'regex': [r'(/[\w-]+)+', r'([\w-]+\.){2,}[\w-]+', r'\b(\-?\+?\d+)\b|\b0[Xx][a-fA-F\d]+\b|\b[a-fA-F\d]{4,}\b'],
'max_dist': 0.01,
'k': 1 ,
'levels': 2
},
'HealthApp': {
'log_file': 'HealthApp/HealthApp_2k.log',
'log_format': '<Time>\|<Component>\|<Pid>\|<Content>',
'regex': [],
'max_dist': 0.008,
'k': 1,
'levels': 2
},
'Apache': {
'log_file': 'Apache/Apache_2k.log',
'log_format': '\[<Time>\] \[<Level>\] <Content>',
'regex': [r'(\d+\.){3}\d+'],
'max_dist': 0.005,
'k': 1,
'levels': 2
},
'Proxifier': {
'log_file': 'Proxifier/Proxifier_2k.log',
'log_format': '\[<Time>\] <Program> - <Content>',
'regex': [r'<\d+\ssec', r'([\w-]+\.)+[\w-]+(:\d+)?', r'\d{2}:\d{2}(:\d{2})*', r'[KGTM]B'],
'max_dist': 0.002,
'k': 1,
'levels': 2
},
'OpenSSH': {
'log_file': 'OpenSSH/OpenSSH_2k.log',
'log_format': '<Date> <Day> <Time> <Component> sshd\[<Pid>\]: <Content>',
'regex': [r'(\d+\.){3}\d+', r'([\w-]+\.){2,}[\w-]+'],
'max_dist': 0.001,
'k': 1,
'levels': 2
},
'OpenStack': {
'log_file': 'OpenStack/OpenStack_2k.log',
'log_format': '<Logrecord> <Date> <Time> <Pid> <Level> <Component> \[<ADDR>\] <Content>',
'regex': [r'((\d+\.){3}\d+,?)+', r'/.+?\s', r'\d+'],
'max_dist': 0.001,
'k': 0.1,
'levels': 2
},
'Mac': {
'log_file': 'Mac/Mac_2k.log',
'log_format': '<Month> <Date> <Time> <User> <Component>\[<PID>\]( \(<Address>\))?: <Content>',
'regex': [r'([\w-]+\.){2,}[\w-]+'],
'max_dist': 0.004,
'k': 1,
'levels': 2
},
}
bechmark_result = []
for dataset, setting in benchmark_settings.iteritems():
print('\n=== Evaluation on %s ==='%dataset)
indir = os.path.join(input_dir, os.path.dirname(setting['log_file']))
log_file = os.path.basename(setting['log_file'])
parser = LogMine.LogParser(log_format=setting['log_format'], indir=indir, outdir=output_dir,
rex=setting['regex'], max_dist=setting['max_dist'], k=setting['k'],
levels=setting['levels'])
parser.parse(log_file)
F1_measure, accuracy = evaluator.evaluate(
groundtruth=os.path.join(indir, log_file + '_structured.csv'),
parsedresult=os.path.join(output_dir, log_file + '_structured.csv')
)
bechmark_result.append([dataset, F1_measure, accuracy])
print('\n=== Overall evaluation results ===')
df_result = pd.DataFrame(bechmark_result, columns=['Dataset', 'F1_measure', 'Accuracy'])
df_result.set_index('Dataset', inplace=True)
print(df_result)
df_result.T.to_csv('LogMine_bechmark_result.csv')
|
groveco/django-sql-explorer
|
explorer/tests/test_csrf_cookie_name.py
|
Python
|
mit
| 1,142
| 0.003503
|
from django.test import TestCase, override_settings
try:
from django.urls import reverse
except
|
ImportError:
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.conf import settings
class TestCsrfCookieName(TestCase):
def test_csrf_cookie_name_in_context(self):
self.user = User.objects.create_superuser('admin', 'admin@admin-fake.com', 'pwd')
self.client.login(username='admin', password='pwd')
resp = se
|
lf.client.get(reverse('explorer_index'))
self.assertTrue('csrf_cookie_name' in resp.context)
self.assertEqual(resp.context['csrf_cookie_name'], settings.CSRF_COOKIE_NAME)
@override_settings(CSRF_COOKIE_NAME='TEST_CSRF_COOKIE_NAME')
def test_custom_csrf_cookie_name(self):
self.user = User.objects.create_superuser('admin', 'admin@admin-fake.com', 'pwd')
self.client.login(username='admin', password='pwd')
resp = self.client.get(reverse('explorer_index'))
self.assertTrue('csrf_cookie_name' in resp.context)
self.assertEqual(resp.context['csrf_cookie_name'], 'TEST_CSRF_COOKIE_NAME')
|
westernx/mayatools
|
mayatools/fluids/retime.py
|
Python
|
bsd-3-clause
| 6,890
| 0.003483
|
import math
import os
from optparse import OptionParser
import qbfutures
from .core import Cache, Frame, Shape, Channel
def frange(a, b, step):
v = float(a)
b = float(b)
step = float(step)
while v <= b:
yield v
v += step
def iter_ticks(src_start, src_end, dst_start, dst_end, sampling_rate):
for dst_time in frange(dst_start, dst_end, sampling_rate):
src_time = src_start + (src_end - src_start) * (dst_time - dst_start) / (dst_end - dst_start)
yield src_time, dst_time
def main():
option_parser = OptionParser(usage='%prog [options] input.xml, output.xml')
option_parser.
|
add_option('-s', '--start', type='float')
option_parser.add_option('-e', '--end', type='float')
option_parser.add_option('--src-start', '--os', type='float')
option_parser.add_option('--src-end', '--oe', type='float')
option_parser.add_option('-r', '--rate', type='float', default=1.0)
option_parser.add_option('-v', '--verbose', action='count', default=0)
option_parser.add_option('-f', '--farm'
|
, action='store_true')
option_parser.add_option('-w', '--workers', type='int', default=20)
option_parser.add_option('-a', '--advect', type='float', default=0.0)
opts, args = option_parser.parse_args()
if len(args) != 2:
option_parser.print_usage()
exit(1)
res = schedule_retime(*args,
src_start=opts.src_start,
src_end=opts.src_end,
dst_start=opts.start,
dst_end=opts.end,
sampling_rate=opts.rate,
verbose=opts.verbose,
farm=opts.farm,
workers=opts.workers,
advect=opts.advect
)
if opts.farm:
print 'Qube job ID', res
def schedule_retime(
src_path, dst_path,
src_start=None, src_end=None,
dst_start=None, dst_end=None,
sampling_rate=1.0,
farm=True,
workers=20,
verbose=0,
advect=0.0,
):
dst_path = os.path.abspath(dst_path)
src_path = os.path.abspath(src_path)
dst_base_name, dst_ext = os.path.splitext(dst_path)
if dst_ext != '.xml':
option_parser.print_usage()
exit(2)
dst_directory, dst_base_name = os.path.split(dst_base_name)
if not os.path.exists(dst_directory):
os.makedirs(dst_directory)
src_cache = Cache(src_path)
if verbose >= 2:
src_cache.pprint()
# Load the headers for all the frames, and sort them by time.
frame_times = []
for frame in src_cache.frames:
frame_times.append((frame.start_time, frame.path))
frame.free()
frame_times.sort()
if not frame_times:
print 'No frames in src_cache.'
exit(2)
# Reclaim the file handles.
src_cache.free()
# Construct the new src_cache that our frames will go into.
dst_cache = src_cache.clone()
dst_base_path = os.path.join(dst_directory, dst_base_name)
# Convert all time options into an integer of ticks.
if dst_start is None:
dst_start = dst_cache.frames[0].start_time
else:
dst_start = int(dst_start * dst_cache.time_per_frame)
if dst_end is None:
dst_end = dst_cache.frames[-1].end_time
else:
dst_end = int(dst_end * dst_cache.time_per_frame)
if src_start is None:
src_start = dst_start
else:
src_start = int(src_start * src_cache.time_per_frame)
if src_end is None:
src_end = dst_end
else:
src_end = int(src_end * src_cache.time_per_frame)
# This one remains a float.
sampling_rate = sampling_rate * src_cache.time_per_frame
# Isolate the frames requested via src-*.
frames = [f for f in frame_times if f[0] >= dst_start and f[0] <= dst_end]
# Write the new XML.
dst_cache.update_xml(dst_start, dst_end)
dst_cache.write_xml(dst_path)
def get_frames_for(src_time):
try:
frame_a_path = [f[1] for f in frame_times if f[0] <= src_time][-1]
frame_b_path = next(f[1] for f in frame_times if f[0] >= src_time)
except (IndexError, StopIteration):
def format_time(time):
frames, ticks = divmod(time, 250)
return '%d:%d' % (frames, ticks)
raise ValueError('Cannot find data for time %s; have from %s to %s' % (
format_time(src_time),
format_time(frames[0][0]),
format_time(frames[-1][0]),
))
return frame_a_path, frame_b_path
if farm:
executor = qbfutures.Executor(cpus=workers, groups='farm', reservations='host.processors=1')
with executor.batch(name='Retime Fluid:%s:%s' % (os.path.basename(src_cache.directory), src_cache.shape_specs.keys()[0])) as batch:
for src_time, dst_time in iter_ticks(src_start, src_end, dst_start, dst_end, sampling_rate):
frame_a_path, frame_b_path = get_frames_for(src_time)
batch.submit_ext(
func='mayatools.fluids.retime:blend_one_on_farm',
args=[src_cache.xml_path, src_time, dst_time, frame_a_path, frame_b_path, dst_base_path, advect],
name='Blend %d from %d' % (dst_time, src_time),
)
return batch.futures[0].job_id
# Iterate over the requested ticks.
for src_time, dst_time in iter_ticks(src_start, src_end, dst_start, dst_end, sampling_rate):
frame_a_path, frame_b_path = get_frames_for(src_time)
blend_one_on_farm(src_cache.xml_path, src_time, dst_time, frame_a_path, frame_b_path, dst_base_path, advect)
def blend_one_on_farm(cache, src_time, dst_time, frame_a, frame_b, dst_base_path, advect):
if isinstance(cache, basestring):
cache = Cache(cache)
if isinstance(frame_a, basestring):
frame_a = Frame(cache, frame_a)
if isinstance(frame_b, basestring):
frame_b = Frame(cache, frame_b)
dst_frame = Frame(cache)
dst_frame.set_times(dst_time, dst_time)
if frame_a.path == frame_b.path:
dst_frame.shapes.update(frame_a.shapes)
dst_frame.channels.update(frame_a.channels)
else:
blend_factor = float(src_time - frame_a.start_time) / float(frame_b.start_time - frame_a.start_time)
for shape_name, shape_a in sorted(frame_a.shapes.iteritems()):
dst_shape = Shape.setup_blend(dst_frame, shape_name, frame_a, frame_b)
dst_shape.blend(blend_factor, advect)
frame_no, tick = divmod(dst_time, cache.time_per_frame)
if tick:
dst_path = '%sFrame%dTick%d.mc' % (dst_base_path, frame_no, tick)
else:
dst_path = '%sFrame%d.mc' % (dst_base_path, frame_no)
print 'Saving to', dst_path
try:
os.makedirs(os.path.dirname(dst_path))
except OSError:
pass
with open(dst_path, 'wb') as fh:
for chunk in dst_frame.dumps_iter():
fh.write(chunk)
if __name__ == '__main__':
main()
|
zhaochl/python-utils
|
agrith_util/page_rank/page_rank_test.py
|
Python
|
apache-2.0
| 2,180
| 0.050781
|
#!/usr/bin/env python
# coding=utf-8
#-*- coding:utf-8 -*-
import random
N = 8 #八个网页
d = 0.85 #阻尼因子为0.85
delt = 0.00001 #迭代控制变量
#两个矩阵相乘
def matrix_multi(A,B):
result = [[0]*len(B[0]) for i in range(len(A))]
for i in range(len(A)):
for j in range(len(B[0])):
for k in range(len(B)):
result[i][j] += A[i][k]*B[k][j]
return result
#矩阵A的每个元素都乘以n
def matrix_multiN(n,A):
result = [[1]*len(A[0]) for i in range(len(A))]
for i in range(len(A)):
for j in range(len(A[0])):
result[i][j] = n*A[i][j]
return result
#两个矩阵相加
def matrix_add(A,B):
if len(A[0])!=len(B[0]) and len(A)!=len(B):
return
result = [[0]*len(A[0]) for i in range(len(A))]
for i in range(len(A)):
for j in range(len(A[0])):
result[i][j] = A[i][j]+B[i][j]
return result
def pageRank(A):
e = []
for i in range(N):
e.append(1)
norm = 100
New_P = []
for i in range(N):
New_P.append([random.random()])
r = [ [(1-d)*i*1/N] for i in e]
while norm > delt:
P = New_P
New_P = matrix_add(r,matrix
|
_multiN(d,matrix_multi(A,P))) #P=(1-d)*e/n+d*M'P PageRank算法的核心
norm = 0
#求解矩阵一阶范数
for i in range(N):
norm += abs(New_P[i][0]-P[i][0])
print New_P
#根据邻接矩阵求转移概率矩阵并转向
def tran_and_convert(A):
result = [[0]*len(A[0]) for i in range(len(A))]
result_convert = [[0]*len(A[0]) for i in range(len(A))]
for i in
|
range(len(A)):
for j in range(len(A[0])):
result[i][j] = A[i][j]*1.0/sum(A[i])
for i in range(len(result)):
for j in range(len(result[0])):
result_convert[i][j]=result[j][i]
return result_convert
def main():
A = [[0,1,1,0,0,1,0,0],\
[0,0,0,1,1,0,0,0],\
[0,0,0,1,0,1,0,0],\
[0,0,0,0,0,1,0,0],\
[1,0,0,1,0,0,1,1],\
[0,0,0,1,0,0,0,0],\
[0,0,1,0,0,0,0,0],\
[0,0,0,1,0,0,1,0]]
M = tran_and_convert(A)
pageRank(M)
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.