code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('order', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='message',
options={'ordering': ['-create_date']},
),
migrations.AlterModelOptions(
name='order',
options={'ordering': ['-purchase_date']},
),
]
|
sorz/isi
|
store/order/migrations/0002_auto_20150120_2243.py
|
Python
|
mit
| 494
|
import os
from six.moves.configparser import ConfigParser, NoSectionError
from six.moves import urllib
from conans.errors import ConanException
from conans.model.env_info import unquote
from conans.paths import conan_expand_user, DEFAULT_PROFILE_NAME
from conans.util.env_reader import get_env
from conans.util.files import load
MIN_SERVER_COMPATIBLE_VERSION = '0.12.0'
default_settings_yml = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS]
arch_build: [x86, x86_64]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, Arduino]
arch_target: [x86, x86_64, ppc64le, ppc64, armv6, armv7, armv7hf, armv8, sparc, sparcv9, mips, mips64, avr, armv7s, armv7k]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
Linux:
Macos:
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0"]
watchOS:
version: ["4.0"]
tvOS:
version: ["11.0"]
FreeBSD:
SunOS:
Arduino:
board: ANY
arch: [x86, x86_64, ppc64le, ppc64, armv6, armv7, armv7hf, armv8, sparc, sparcv9, mips, mips64, avr, armv7s, armv7k]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp, v140, v140_xp, v140_clang_c2, LLVM-vs2014, LLVM-vs2014_xp, v141, v141_xp, v141_clang_c2]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0", "5.0"]
libcxx: [libstdc++, libstdc++11, libc++]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0"]
libcxx: [libstdc++, libc++]
build_type: [None, Debug, Release]
"""
default_client_conf = """
[log]
run_to_output = True # environment CONAN_LOG_RUN_TO_OUTPUT
run_to_file = False # environment CONAN_LOG_RUN_TO_FILE
level = 50 # environment CONAN_LOGGING_LEVEL
# trace_file = # environment CONAN_TRACE_FILE
print_run_commands = False # environment CONAN_PRINT_RUN_COMMANDS
[general]
default_profile = %s
compression_level = 9 # environment CONAN_COMPRESSION_LEVEL
sysrequires_sudo = True # environment CONAN_SYSREQUIRES_SUDO
# verbose_traceback = False # environment CONAN_VERBOSE_TRACEBACK
# bash_path = "" # environment CONAN_BASH_PATH (only windows)
# recipe_linter = False # environment CONAN_RECIPE_LINTER
# read_only_cache = True # environment CONAN_READ_ONLY_CACHE
# pylintrc = path/to/pylintrc_file # environment CONAN_PYLINTRC
# cache_no_locks = True
# user_home_short = your_path # environment CONAN_USER_HOME_SHORT
# conan_make_program = make # environment CONAN_MAKE_PROGRAM (overrides the make program used in AutoToolsBuildEnvironment.make)
# cmake_generator # environment CONAN_CMAKE_GENERATOR
# http://www.vtk.org/Wiki/CMake_Cross_Compiling
# cmake_toolchain_file # environment CONAN_CMAKE_TOOLCHAIN_FILE
# cmake_system_name # environment CONAN_CMAKE_SYSTEM_NAME
# cmake_system_version # environment CONAN_CMAKE_SYSTEM_VERSION
# cmake_system_processor # environment CONAN_CMAKE_SYSTEM_PROCESSOR
# cmake_find_root_path # environment CONAN_CMAKE_FIND_ROOT_PATH
# cmake_find_root_path_mode_program # environment CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM
# cmake_find_root_path_mode_library # environment CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY
# cmake_find_root_path_mode_include # environment CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE
# cpu_count = 1 # environment CONAN_CPU_COUNT
[storage]
# This is the default path, but you can write your own. It must be an absolute path or a
# path beginning with "~" (if the environment var CONAN_USER_HOME is specified, this directory, even
# with "~/", will be relative to the conan user home, not to the system user home)
path = ~/.conan/data
[proxies]
# Empty section will try to use system proxies.
# If don't want proxy at all, remove section [proxies]
# As documented in http://docs.python-requests.org/en/latest/user/advanced/#proxies
# http = http://user:pass@10.10.1.10:3128/
# http = http://10.10.1.10:3128
# https = http://10.10.1.10:1080
# Default settings now declared in the default profile
""" % DEFAULT_PROFILE_NAME
class ConanClientConfigParser(ConfigParser, object):
def __init__(self, filename):
ConfigParser.__init__(self)
self.read(filename)
self.filename = filename
# So keys are not converted to lowercase, we override the default optionxform
optionxform = str
@property
def env_vars(self):
ret = {"CONAN_LOG_RUN_TO_OUTPUT": self._env_c("log.run_to_output", "CONAN_LOG_RUN_TO_OUTPUT", "True"),
"CONAN_LOG_RUN_TO_FILE": self._env_c("log.run_to_file", "CONAN_LOG_RUN_TO_FILE", "False"),
"CONAN_LOGGING_LEVEL": self._env_c("log.level", "CONAN_LOGGING_LEVEL", "50"),
"CONAN_TRACE_FILE": self._env_c("log.trace_file", "CONAN_TRACE_FILE", None),
"CONAN_PRINT_RUN_COMMANDS": self._env_c("log.print_run_commands", "CONAN_PRINT_RUN_COMMANDS", "False"),
"CONAN_COMPRESSION_LEVEL": self._env_c("general.compression_level", "CONAN_COMPRESSION_LEVEL", "9"),
"CONAN_PYLINTRC": self._env_c("general.pylintrc", "CONAN_PYLINTRC", None),
"CONAN_PYLINT_WERR": self._env_c("general.pylint_werr", "CONAN_PYLINT_WERR", None),
"CONAN_SYSREQUIRES_SUDO": self._env_c("general.sysrequires_sudo", "CONAN_SYSREQUIRES_SUDO", "False"),
"CONAN_RECIPE_LINTER": self._env_c("general.recipe_linter", "CONAN_RECIPE_LINTER", "True"),
"CONAN_CPU_COUNT": self._env_c("general.cpu_count", "CONAN_CPU_COUNT", None),
"CONAN_READ_ONLY_CACHE": self._env_c("general.read_only_cache", "CONAN_READ_ONLY_CACHE", None),
"CONAN_USER_HOME_SHORT": self._env_c("general.user_home_short", "CONAN_USER_HOME_SHORT", None),
"CONAN_VERBOSE_TRACEBACK": self._env_c("general.verbose_traceback", "CONAN_VERBOSE_TRACEBACK", None),
# http://www.vtk.org/Wiki/CMake_Cross_Compiling
"CONAN_CMAKE_GENERATOR": self._env_c("general.cmake_generator", "CONAN_CMAKE_GENERATOR", None),
"CONAN_CMAKE_TOOLCHAIN_FILE": self._env_c("general.cmake_toolchain_file", "CONAN_CMAKE_TOOLCHAIN_FILE", None),
"CONAN_CMAKE_SYSTEM_NAME": self._env_c("general.cmake_system_name", "CONAN_CMAKE_SYSTEM_NAME", None),
"CONAN_CMAKE_SYSTEM_VERSION": self._env_c("general.cmake_system_version", "CONAN_CMAKE_SYSTEM_VERSION", None),
"CONAN_CMAKE_SYSTEM_PROCESSOR": self._env_c("general.cmake_system_processor",
"CONAN_CMAKE_SYSTEM_PROCESSOR",
None),
"CONAN_CMAKE_FIND_ROOT_PATH": self._env_c("general.cmake_find_root_path",
"CONAN_CMAKE_FIND_ROOT_PATH",
None),
"CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM": self._env_c("general.cmake_find_root_path_mode_program",
"CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM",
None),
"CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY": self._env_c("general.cmake_find_root_path_mode_library",
"CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY",
None),
"CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE": self._env_c("general.cmake_find_root_path_mode_include",
"CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE",
None),
"CONAN_BASH_PATH": self._env_c("general.bash_path", "CONAN_BASH_PATH", None),
"CONAN_MAKE_PROGRAM": self._env_c("general.conan_make_program", "CONAN_MAKE_PROGRAM", None),
}
# Filter None values
return {name: value for name, value in ret.items() if value is not None}
def _env_c(self, var_name, env_var_name, default_value):
env = os.environ.get(env_var_name, None)
if env is not None:
return env
try:
return unquote(self.get_item(var_name))
except ConanException:
return default_value
def get_item(self, item):
if not item:
return load(self.filename)
tokens = item.split(".", 1)
section_name = tokens[0]
try:
section = self.items(section_name)
except NoSectionError:
raise ConanException("'%s' is not a section of conan.conf" % section_name)
if len(tokens) == 1:
result = []
for item in section:
result.append(" = ".join(item))
return "\n".join(result)
else:
key = tokens[1]
try:
value = dict(section)[key]
if " #" in value: # Comments
value = value[:value.find(" #")].strip()
except KeyError:
raise ConanException("'%s' doesn't exist in [%s]" % (key, section_name))
return value
def set_item(self, key, value):
tokens = key.split(".", 1)
section_name = tokens[0]
if not self.has_section(section_name):
self.add_section(section_name)
if len(tokens) == 1: # defining full section
raise ConanException("You can't set a full section, please specify a key=value")
key = tokens[1]
super(ConanClientConfigParser, self).set(section_name, key, value)
with open(self.filename, "w") as f:
self.write(f)
def rm_item(self, item):
tokens = item.split(".", 1)
section_name = tokens[0]
if not self.has_section(section_name):
raise ConanException("'%s' is not a section of conan.conf" % section_name)
if len(tokens) == 1:
self.remove_section(tokens[0])
else:
key = tokens[1]
if not self.has_option(section_name, key):
raise ConanException("'%s' doesn't exist in [%s]" % (key, section_name))
self.remove_option(section_name, key)
with open(self.filename, "w") as f:
self.write(f)
def get_conf(self, varname):
"""Gets the section from config file or raises an exception"""
try:
return self.items(varname)
except NoSectionError:
raise ConanException("Invalid configuration, missing %s" % varname)
@property
def default_profile(self):
try:
return self.get_item("general.default_profile")
except ConanException:
return DEFAULT_PROFILE_NAME
@property
def cache_no_locks(self):
try:
return self.get_item("general.cache_no_locks")
except ConanException:
return False
@property
def storage(self):
return dict(self.get_conf("storage"))
@property
def storage_path(self):
# Try with CONAN_STORAGE_PATH
result = get_env('CONAN_STORAGE_PATH', None)
# Try with conan.conf "path"
if not result:
try:
env_conan_user_home = os.getenv("CONAN_USER_HOME")
# if env var is declared, any specified path will be relative to CONAN_USER_HOME
# even with the ~/
if env_conan_user_home:
storage = self.storage["path"]
if storage[:2] == "~/":
storage = storage[2:]
result = os.path.join(env_conan_user_home, storage)
else:
result = self.storage["path"]
except KeyError:
pass
# expand the result and check if absolute
if result:
result = conan_expand_user(result)
if not os.path.isabs(result):
raise ConanException("Conan storage path has to be an absolute path")
return result
@property
def proxies(self):
""" optional field, might not exist
"""
try:
proxies = self.get_conf("proxies")
# If there is proxies section, but empty, it will try to use system proxy
if not proxies:
# We don't have evidences that this following line is necessary.
# If the proxies has been
# configured at system level, conan will use it, and shouldn't be necessary
# to return here the proxies read from the system.
# Furthermore, the urls excluded for use proxies at system level do not work in
# this case, then the only way is to remove the [proxies] section with
# conan config remote proxies, then this method will return None and the proxies
# dict passed to requests will be empty.
# We don't remove this line because we are afraid to break something, but maybe
# until now is working because no one is using system-wide proxies or those proxies
# rules don't contain excluded urls.c #1777
return urllib.request.getproxies()
result = {k: (None if v == "None" else v) for k, v in proxies}
return result
except:
return None
|
tivek/conan
|
conans/client/conf/__init__.py
|
Python
|
mit
| 14,958
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Run the python or pytorch profiler and prints the results.
## Examples
To make sure that bAbI task 1 (1k exs) loads one can run and to see a
few of them:
```shell
parlai profile_train --task babi:task1k:1 --model seq2seq --dict-file /tmp/dict
```
"""
from parlai.core.params import ParlaiParser
from parlai.core.script import ParlaiScript, register_script
from parlai.scripts.train_model import setup_args as train_args
from parlai.scripts.train_model import TrainLoop
import parlai.utils.logging as logging
import cProfile
import io
import pstats
try:
import torch
except ImportError:
logging.error('Torch not found -- only cProfile allowed with this tool.')
def setup_args(parser=None):
if parser is None:
parser = ParlaiParser(True, True, 'cProfile a training run')
parser = train_args(parser)
profile = parser.add_argument_group('Profiler Arguments')
profile.add_argument(
'--torch',
type='bool',
default=False,
help='If true, use the torch profiler. Otherwise use cProfile.',
)
profile.add_argument(
'--torch-cuda',
type='bool',
default=False,
help='If true, use the torch cuda profiler. Otherwise use cProfile.',
)
profile.add_argument(
'--debug',
type='bool',
default=False,
help='If true, enter debugger at end of run.',
)
profile.set_defaults(num_epochs=1)
return parser
def profile(opt):
if opt['torch'] or opt['torch_cuda']:
with torch.autograd.profiler.profile(use_cuda=opt['torch_cuda']) as prof:
TrainLoop(opt).train()
key = 'cpu_time_total' if opt['torch'] else 'cuda_time_total'
print(prof.key_averages().table(sort_by=key, row_limit=25))
return prof
else:
pr = cProfile.Profile()
pr.enable()
TrainLoop(opt).train()
pr.disable()
s = io.StringIO()
sortby = 'cumulative'
ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
ps.print_stats()
print(s.getvalue())
@register_script('profile_train', hidden=True)
class ProfileTrain(ParlaiScript):
@classmethod
def setup_args(cls):
return setup_args()
def run(self):
return profile(self.opt)
if __name__ == '__main__':
ProfileTrain.main()
|
facebookresearch/ParlAI
|
parlai/scripts/profile_train.py
|
Python
|
mit
| 2,526
|
__author__ = '@abhinavbom a.k.a darkl0rd'
import urllib2
import urlparse
import re
import os
import time
from lib.feeds import *
from lib.parse import *
def gather():
if not os.path.exists('intel'):
os.mkdir('intel')
os.chdir('.\\intel')
#print os.getcwd()
print "Starting feed update process"
counter = 0
ioc_list = []
timestr = time.strftime("%Y%m%d-%H%M%S")
for source in OSINT_IP.iteritems():
if not os.path.exists(str(source[0])):
os.mkdir(str(source[0]))
print os.getcwd()
os.chdir(str(source[0]))
print source[0]
name = str(source[0]) +"_" + timestr + ".txt"
print name
print "Building database"
file = open(name, 'a+')
r = requests.get(str(source[1]),
headers=create_basic_headers(),
proxies={'http': HTTP_PROXY, 'https': HTTPS_PROXY})
for line in r:
if line.startswith("/") or line.startswith('\n') or line.startswith("#"):
pass
else:
file.write(line+'\n')
os.chdir("..")
|
abhinavbom/Threat-Intelligence-Hunter
|
lib/updatefeed.py
|
Python
|
mit
| 1,127
|
from lbutils import as_callable
from lbworkflow import settings
# wf_send_sms(users, mail_type, event, ext_ctx)
# wf_send_mail(users, mail_type, event, ext_ctx)
def wf_send_msg(users, msg_type, event=None, ext_ctx=None):
if not users:
return
users = set(users)
if event: # ignore operator
if event.user in users:
users = users.remove(event.user)
for send_msg in settings.WF_SEND_MSG_FUNCS:
as_callable(send_msg)(users, msg_type, event, ext_ctx)
def wf_print(users, msg_type, event=None, ext_ctx=None):
print("wf_print: %s, %s, %s" % (users, msg_type, event))
|
vicalloy/django-lb-workflow
|
lbworkflow/core/sendmsg.py
|
Python
|
mit
| 624
|
# -*- coding: utf-8 -*-
# MIT Licensed, Copyright (c) 2016 Ryan Scott Brown <sb@ryansb.com>
import os
from placebo.utils import placebo_session
import serverless_helpers
def test_unset_environment():
os.environ.pop('SERVERLESS_PROJECT_NAME', None)
os.environ.pop('SERVERLESS_STAGE', None)
stack_name = serverless_helpers.cfn_detect.stack_name()
assert stack_name == ''
class TestCfnCalls(object):
@placebo_session
def test_normal_outputs(self, session):
os.environ['SERVERLESS_STAGE'] = 'dev'
os.environ['SERVERLESS_PROJECT_NAME'] = 'mws'
out = serverless_helpers.load_cfn_outputs(session)
assert len(out) == 2
assert 'Description' in out['IamRoleArnLambda']
assert 'Value' in out['IamRoleArnLambda']
assert out['IamRoleArnLambda']['Value'].startswith('arn:aws:iam::123456789012')
assert out['DynamoTable']['Description'] == 'Name of DDB table'
assert os.getenv('SERVERLESS_CF_IamRoleArnLambda').startswith('arn:aws:iam::123456789012')
@placebo_session
def test_notfound(self, session):
os.environ['SERVERLESS_STAGE'] = 'dev'
os.environ['SERVERLESS_PROJECT_NAME'] = 'nonexistent'
out = serverless_helpers.load_cfn_outputs(session)
assert out == {}
@placebo_session
def test_no_outputs(self, session):
os.environ['SERVERLESS_STAGE'] = 'dev'
os.environ['SERVERLESS_PROJECT_NAME'] = 'no_outputs'
out = serverless_helpers.load_cfn_outputs(session)
assert out == {}
|
serverless/serverless-helpers-py
|
tests/test_cfn.py
|
Python
|
mit
| 1,544
|
class GreedySearch:
default_configuration = None
option_iterator = None
best_performances = None
best_configurations = None
current_option_key = None
def __init__(self, settings):
self.default_configuration = self.get_default_configuration(settings)
self.option_iterator = self.get_option_iterator(settings)
self.best_performances = {}
self.best_configurations = {}
self.current_option_key = None
def next(self, previous_score):
option = self.option_iterator.__next__()
if option is None:
self.update_best_performance(previous_score)
if self.current_option_key is not None:
self.default_configuration[self.current_option_key[0]][self.current_option_key[1]] = self.best_configurations[self.current_option_key]
return None
option_key = (option[0], option[1])
if option_key not in self.best_performances:
self.initialize_best_performance(option, option_key)
self.update_best_performance(previous_score)
if self.current_option_key != option_key:
if self.current_option_key is not None:
self.default_configuration[self.current_option_key[0]][self.current_option_key[1]] = self.best_configurations[self.current_option_key]
self.current_option_key = option_key
self.default_configuration[option[0]][option[1]] = option[2]
return self.default_configuration
def initialize_best_performance(self, option, option_key):
if self.current_option_key is not None:
self.best_performances[option_key] = self.best_performances[self.current_option_key]
self.best_configurations[option_key] = self.default_configuration[option[0]][option[1]]
else:
self.best_performances[option_key] = None
def update_best_performance(self, previous_score):
if self.current_option_key is not None \
and (self.best_performances[self.current_option_key] is None \
or previous_score > self.best_performances[self.current_option_key]):
self.best_configurations[self.current_option_key] = \
self.default_configuration[self.current_option_key[0]][self.current_option_key[1]]
self.best_performances[self.current_option_key] = previous_score
def get_default_configuration(self, settings):
d = {}
for header, options in settings.items():
d[header] = {}
for k, v in options.items():
d[header][k] = v.split(",")[0]
return d
def get_option_iterator(self, settings):
first = True
all_options = []
stored_default = None
for header, options in settings.items():
for k, v in options.items():
options = [(header, k, option) for option in v.split(",")]
if first and len(options) > 1:
first = False
all_options.append(options[0])
all_options.extend(options[1:])
stored_default = options[0]
for option in all_options:
yield option
if len(all_options) == 0:
yield stored_default
yield None
|
MichSchli/QuestionAnsweringGCN
|
old_version/experiment_construction/search/greedy.py
|
Python
|
mit
| 3,307
|
# coding=utf-8
import re
from blackhole import ben
__all__ = ['cronwalk']
class Entry(object):
__slots__ = ('minute', 'hour', 'day', 'month', 'isoweekday')
def __getitem__(self, index):
return getattr(self, self.__slots__[index], None)
def __setitem__(self, index, value):
return setattr(self, self.__slots__[index], value)
def __str__(self):
return str({attr: getattr(self, attr, None) for attr in self.__slots__})
class cronwalk(object):
field_range = (
('minute', 0, 59),
('hour', 0, 23),
('day', 1, 31),
('month', 1, 12),
('isoweekday', 0, 6),
)
def __init__(self, expr, base=None):
"""
้่ฟexpr็ๆEntryๅฎไพ
:param expr:
:param base:
:return:
"""
self.base = ben(base) if base else ben()
if self.base.second or self.base.microsecond: # minute is min unit
self.base = self.base.shift(minute=1).floor('minute') # discard second and microsecond
self.cur = self.base.clone()
self.entry = Entry()
self.last_fit_year = self.cur.year
self.range_len = {
'month': 12,
'day': getattr(self.cur, 'days_in_month'),
'isoweekday': 7,
'hour': 24,
'minute': 60,
}
# ๅค็่กจ่พพๅผ
tokens = expr.strip().split()
if len(tokens) != 5:
raise Exception('invalid expr')
for position, token in enumerate(tokens):
item_list = token.split(',')
res = []
for item in item_list:
# 0-59/5 => start-end/step
mat = re.search(r'^(\d+)-(\d+)/?(.*)$',
str(item).replace('*', '{0[1]}-{0[2]}'.format(self.field_range[position])))
if mat:
start, end, step = mat.groups()
step = step or 1
start, end, step = map(int, (start, end, step))
_range_start, _range_end = self.field_range[position][1:3]
if not _range_start <= start <= _range_end:
raise Exception('invalid expr')
if not _range_start <= end <= _range_end:
raise Exception('invalid expr')
if start > end:
_rotate = range(_range_start, _range_end + 1)
_rotate = _rotate[_rotate.index(start):] + _rotate[:_rotate.index(end) + 1]
_list = _rotate[::step]
else:
_list = range(start, end + 1, step)
res.extend(_list)
else:
res.append(int(item))
self.entry[position] = sorted(res)
self.has_day = (tokens[2] != '*')
self.has_isoweekday = (tokens[4] != '*')
if not self.has_isoweekday:
self.entry.isoweekday = []
else:
if not self.has_day:
self.entry.day = []
def __iter__(self):
cur = self.cur
entry = self.entry
while 1:
if cur.year - self.last_fit_year >= 2:
raise Exception('invalid expr')
# fields = ('month', 'day', 'isoweekday', 'hour', 'minute')
fields = ('month', 'day', 'hour', 'minute')
for field in fields:
if field in ['day', 'isoweekday']: # day and isoweekday is union-like relative
_diff = min(self.get_diff(cur.day, entry.day, 'day'), self.get_diff(cur.isoweekday, entry.isoweekday, 'isoweekday'))
else:
_diff = self.get_diff(getattr(cur, field), getattr(entry, field), field)
if _diff:
old = cur.clone()
cur = cur.shifted(**{field: _diff})
changed = field
for m in ('month', 'day', 'hour'):
if getattr(cur, m) != getattr(old, m):
changed = m
break
cur = cur.floor(changed)
carry = (changed != field)
if carry: # loop again,because of the carry influence the prev element and the element may not satisfy any more
break
ok = True
for k in ('month', 'hour', 'minute'):
if getattr(cur, k) not in getattr(entry, k):
ok = False
break
if (cur.day not in entry.day) and (cur.isoweekday not in entry.isoweekday):
ok = False
if ok:
yield cur
self.last_fit_year = cur.year
cur = cur.shifted(**{'minute': 1})
def get_diff(self, x, seq, unit):
if not seq:
return float('inf')
temp = seq[0]
for y in seq:
if y >= x:
seq = seq[1:]
seq.append(temp)
return y - x
range_len = self.range_len[unit]
_diff = (seq[0] - x) % range_len
return _diff
|
liuhao1024/black-hole
|
blackhole/cronwalk.py
|
Python
|
mit
| 5,177
|
#!/usr/bin/env python
import os
import re
import struct
import socket
import select
import platform
"""
NAT-PMP client library
Provides functions to interact with NAT-PMP gateways implementing version 0
of the NAT-PMP draft specification.
This version does not completely implement the draft standard.
* It does not provide functionality to listen for address change packets.
* It does not have a proper request queuing system, meaning that
multiple requests may be issued in parallel, against spec recommendations.
For more information on NAT-PMP, see the NAT-PMP draft specification:
http://files.dns-sd.org/draft-cheshire-nat-pmp.txt
Requires Python 2.3 or later.
Tested on Python 2.5, 2.6 against Apple AirPort Express.
Coinbend change log:
* Changed gateway auto-detection to use netifaces which makes
this library more cross-platform.
* Added an easy to use port forwarding function with the same
interface as that seen in the UPnP module.
"""
__version__ = "0.2.3"
__license__ = """Copyright (c) 2008-2014, Yiming Liu, All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* The names of the author and contributors may not be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE."""
__author__ = "Yiming Liu <http://www.yimingliu.com/>"
NATPMP_PORT = 5351
NATPMP_RESERVED_VAL = 0
NATPMP_PROTOCOL_UDP = 1
NATPMP_PROTOCOL_TCP = 2
NATPMP_GATEWAY_NO_VALID_GATEWAY = -10
NATPMP_GATEWAY_NO_SUPPORT = -11
NATPMP_GATEWAY_CANNOT_FIND = -12
NATPMP_RESULT_SUCCESS = 0 # Success
NATPMP_RESULT_UNSUPPORTED_VERSION = 1 # Unsupported Version
NATPMP_RESULT_NOT_AUTHORIZED = 2 # Not Authorized/Refused/NATPMP turned off
NATPMP_RESULT_NETWORK_FAILURE = 3 # Network Failure
NATPMP_RESULT_OUT_OF_RESOURCES = 4 # can not create more mappings
NATPMP_RESULT_UNSUPPORTED_OPERATION = 5 # not a supported opcode
# all remaining results are fatal errors
NATPMP_ERROR_DICT = {
NATPMP_RESULT_SUCCESS: "No error.",
NATPMP_RESULT_UNSUPPORTED_VERSION: "The protocol version "
"specified is unsupported.",
NATPMP_RESULT_NOT_AUTHORIZED: "The operation was refused. "
"NAT-PMP may be turned off on "
"gateway.",
# network failure
NATPMP_RESULT_NETWORK_FAILURE: "There was a network failure. "
"The gateway may not have an IP "
"address.",
# can not create more mappings
NATPMP_RESULT_OUT_OF_RESOURCES: "The NAT-PMP gateway is out of "
"resources and cannot create "
"more mappings.",
# not a supported opcode
NATPMP_RESULT_UNSUPPORTED_OPERATION: "The NAT-PMP gateway does "
"not support this "
"operation",
NATPMP_GATEWAY_NO_SUPPORT: "The gateway does not support "
"NAT-PMP",
NATPMP_GATEWAY_NO_VALID_GATEWAY: "No valid gateway address was "
"specified.",
NATPMP_GATEWAY_CANNOT_FIND: "Cannot automatically determine "
"gateway address. Must specify "
"manually."
}
class NATPMPRequest(object):
"""Represents a basic NAT-PMP request. This currently consists of the
1-byte fields version and opcode.
Other requests are derived from NATPMPRequest.
"""
retry_increment = 0.250 # seconds
def __init__(self, version, opcode):
self.version = version
self.opcode = opcode
def toBytes(self):
"""Converts the request object to a byte string."""
return struct.pack('!BB', self.version, self.opcode)
class PublicAddressRequest(NATPMPRequest):
"""Represents a NAT-PMP request to the local gateway for a public address.
As per the specification, this is a generic request with the opcode = 0.
"""
def __init__(self, version=0):
NATPMPRequest.__init__(self, version, 0)
class PortMapRequest(NATPMPRequest):
"""Represents a NAT-PMP request to the local gateway for a port mapping.
As per the specification, this request extends NATPMPRequest with
the fields private_port, public_port, and lifetime. The first two
are 2-byte unsigned shorts, and the last is a 4-byte unsigned integer.
"""
def __init__(self, protocol, private_port, public_port, lifetime=3600,
version=0):
NATPMPRequest.__init__(self, version, protocol)
self.private_port = private_port
self.public_port = public_port
self.lifetime = lifetime
def toBytes(self):
s = NATPMPRequest.toBytes(self) +\
struct.pack('!HHHI', NATPMP_RESERVED_VAL, self.private_port
, self.public_port, self.lifetime)
return s
class NATPMPResponse(object):
"""Represents a generic NAT-PMP response from the local gateway. The
generic response has fields for version, opcode, result, and secs
since last epoch (last boot of the NAT gateway). As per the
specification, the opcode is offset by 128 from the opcode of
the original request.
"""
def __init__(self, version, opcode, result, sec_since_epoch):
self.version = version
self.opcode = opcode
self.result = result
self.sec_since_epoch = sec_since_epoch
def __str__(self):
return "NATPMPResponse(%d, %d, %d, $d)".format(self.version,
self.opcode,
self.result,
self.sec_since_epoch)
class PublicAddressResponse(NATPMPResponse):
"""Represents a NAT-PMP response from the local gateway to a
public-address request. It has one additional 4-byte field
containing the IP returned.
The member variable ip contains the Python-friendly string form, while
ip_int contains the same in the original 4-byte unsigned int.
"""
def __init__(self, data):
if len(data) > 12:
data = data[:12]
version, opcode, result, sec_since_epoch, self.ip_int =\
struct.unpack("!BBHII", data)
NATPMPResponse.__init__(self, version, opcode, result, sec_since_epoch)
self.ip = socket.inet_ntoa(data[8:8+4])
# self.ip = socket.inet_ntoa(self.ip_bytes)
def __str__(self):
return "PublicAddressResponse: version %d, opcode %d (%d)," \
" result %d, ssec %d, ip %s".format(self.version, self.opcode,
self.result,
self.sec_since_epoch,
self.ip)
class PortMapResponse(NATPMPResponse):
"""Represents a NAT-PMP response from the local gateway to a
public-address request. The response contains the private port,
public port, and the lifetime of the mapping in addition to typical
NAT-PMP headers. Note that the port mapping assigned is
NOT NECESSARILY the port requested (see the specification
for details).
"""
def __init__(self, data):
if len(data) > 16:
data = data[:16]
version, opcode, result, sec_since_epoch, self.private_port,\
self.public_port, self.lifetime = struct.unpack('!BBHIHHI', data)
NATPMPResponse.__init__(self, version, opcode, result, sec_since_epoch)
def __str__(self):
msg = "PortMapResponse: version %d, opcode %d (%d),"
msg += " result %d, ssec %d, private_port %d, public port %d,"
msg += " lifetime %d"
return msg % (self.version, self.opcode, self.opcode, self.result,
self.sec_since_epoch, self.private_port, self.public_port,
self.lifetime)
class NATPMPError(Exception):
"""Generic exception state. May be used to represent unknown errors."""
pass
class NATPMPResultError(NATPMPError):
"""Used when a NAT gateway responds with an error-state response."""
pass
class NATPMPNetworkError(NATPMPError):
"""Used when a network error occurred while communicating
with the NAT gateway."""
pass
class NATPMPUnsupportedError(NATPMPError):
"""Used when a NAT gateway does not support NAT-PMP."""
pass
def get_gateway_addr():
"""Use netifaces to get the gateway address, if we can't import it then
fall back to a hack to obtain the current gateway automatically, since
Python has no interface to sysctl().
This may or may not be the gateway we should be contacting.
It does not guarantee correct results.
This function requires the presence of netstat on the path on POSIX
and NT.
"""
try:
import netifaces
return netifaces.gateways()["default"][netifaces.AF_INET][0]
except ImportError:
shell_command = 'netstat -rn'
if os.name == "posix":
pattern = \
re.compile('(?:default|0\.0\.0\.0|::/0)\s+([\w\.:]+)\s+.*UG')
elif os.name == "nt":
if platform.version().startswith("6.1"):
pattern = re.compile(".*?0.0.0.0[ ]+0.0.0.0[ ]+(.*?)[ ]+?.*?\n")
else:
pattern = re.compile(".*?Default Gateway:[ ]+(.*?)\n")
system_out = os.popen(shell_command, 'r').read()
if not system_out:
raise NATPMPNetworkError(NATPMP_GATEWAY_CANNOT_FIND,
error_str(NATPMP_GATEWAY_CANNOT_FIND))
match = pattern.search(system_out)
if not match:
raise NATPMPNetworkError(NATPMP_GATEWAY_CANNOT_FIND,
error_str(NATPMP_GATEWAY_CANNOT_FIND))
addr = match.groups()[0].strip()
return addr
def error_str(result_code):
"""Takes a numerical error code and returns a human-readable
error string.
"""
result = NATPMP_ERROR_DICT.get(result_code)
if not result:
result = "Unknown fatal error."
return result
def get_gateway_socket(gateway):
"""Takes a gateway address string and returns a non-blocking UDP
socket to communicate with its NAT-PMP implementation on
NATPMP_PORT.
e.g. addr = get_gateway_socket('10.0.1.1')
"""
if not gateway:
raise NATPMPNetworkError(NATPMP_GATEWAY_NO_VALID_GATEWAY,
error_str(NATPMP_GATEWAY_NO_VALID_GATEWAY))
response_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
response_socket.setblocking(0)
response_socket.connect((gateway, NATPMP_PORT))
return response_socket
def get_public_address(gateway_ip=None, retry=9):
"""A high-level function that returns the public interface IP of
the current host by querying the NAT-PMP gateway. IP is
returned as string.
Takes two possible keyword arguments:
gateway_ip - the IP to the NAT-PMP compatible gateway.
Defaults to using auto-detection function
get_gateway_addr()
retry - the number of times to retry the request if unsuccessful.
Defaults to 9 as per specification.
"""
if gateway_ip is None:
gateway_ip = get_gateway_addr()
addr_request = PublicAddressRequest()
addr_response = send_request_with_retry(gateway_ip, addr_request,
response_data_class=
PublicAddressResponse,
retry=retry, response_size=12)
if addr_response.result != 0:
# sys.stderr.write("NAT-PMP error %d: %s\n" %
# (addr_response.result,
# error_str(addr_response.result)))
# sys.stderr.flush()
raise NATPMPResultError(addr_response.result,
error_str(addr_response.result), addr_response)
addr = addr_response.ip
return addr
def map_tcp_port(public_port, private_port, lifetime=3600, gateway_ip=None,
retry=9, use_exception=True):
"""A high-level wrapper to map_port() that requests a mapping
for a public TCP port on the NAT to a private TCP port on this host.
Returns the complete response on success.
public_port - the public port of the mapping requested
private_port - the private port of the mapping requested
lifetime - the duration of the mapping in seconds.
Defaults to 3600, per specification.
gateway_ip - the IP to the NAT-PMP compatible gateway.
Defaults to using auto-detection function
get_gateway_addr()
retry - the number of times to retry the request if unsuccessful.
Defaults to 9 as per specification.
use_exception - throw an exception if an error result is
received from the gateway. Defaults to True.
"""
return map_port(NATPMP_PROTOCOL_TCP, public_port, private_port, lifetime,
gateway_ip=gateway_ip, retry=retry,
use_exception=use_exception)
def map_udp_port(public_port, private_port, lifetime=3600, gateway_ip=None,
retry=9, use_exception=True):
"""A high-level wrapper to map_port() that requests a mapping for
a public UDP port on the NAT to a private UDP port on this host.
Returns the complete response on success.
public_port - the public port of the mapping requested
private_port - the private port of the mapping requested
lifetime - the duration of the mapping in seconds.
Defaults to 3600, per specification.
gateway_ip - the IP to the NAT-PMP compatible gateway.
Defaults to using auto-detection function
get_gateway_addr()
retry - the number of times to retry the request if unsuccessful.
Defaults to 9 as per specification.
use_exception - throw an exception if an error result is
received from the gateway. Defaults to True.
"""
return map_port(NATPMP_PROTOCOL_UDP, public_port, private_port, lifetime,
gateway_ip=gateway_ip, retry=retry,
use_exception=use_exception)
def map_port(protocol, public_port, private_port, lifetime=3600,
gateway_ip=None, retry=9, use_exception=True):
"""A function to map public_port to private_port of protocol.
Returns the complete response on success.
protocol - NATPMP_PROTOCOL_UDP or NATPMP_PROTOCOL_TCP
public_port - the public port of the mapping requested
private_port - the private port of the mapping requested
lifetime - the duration of the mapping in seconds.
Defaults to 3600, per specification.
gateway_ip - the IP to the NAT-PMP compatible gateway.
Defaults to using auto-detection function
get_gateway_addr()
retry - the number of times to retry the request if unsuccessful.
Defaults to 9 as per specification.
use_exception - throw an exception if an error result
is received from the gateway. Defaults to True.
"""
if protocol not in [NATPMP_PROTOCOL_UDP, NATPMP_PROTOCOL_TCP]:
raise ValueError("Must be either NATPMP_PROTOCOL_UDP or "
"NATPMP_PROTOCOL_TCP")
if gateway_ip is None:
gateway_ip = get_gateway_addr()
response = None
port_mapping_request = PortMapRequest(protocol, private_port,
public_port, lifetime)
port_mapping_response = \
send_request_with_retry(gateway_ip, port_mapping_request,
response_data_class=PortMapResponse,
retry=retry)
if port_mapping_response.result != 0 and use_exception:
raise NATPMPResultError(port_mapping_response.result,
error_str(port_mapping_response.result),
port_mapping_response)
return port_mapping_response
def send_request(gateway_socket, request):
gateway_socket.sendall(request.toBytes())
def read_response(gateway_socket, timeout, response_size=16):
data = ""
source_addr = ("", "")
rlist, wlist, xlist = select.select([gateway_socket], [], [], timeout)
if rlist:
resp_socket = rlist[0]
try:
data, source_addr = resp_socket.recvfrom(response_size)
except Exception:
return None, None
return data, source_addr
def send_request_with_retry(gateway_ip, request, response_data_class=None,
retry=9, response_size=16):
gateway_socket = get_gateway_socket(gateway_ip)
n = 1
data = ""
while n <= retry and not data:
send_request(gateway_socket, request)
data, source_addr = read_response(gateway_socket,
n * request.retry_increment,
response_size=response_size)
if data is None or source_addr[0] != gateway_ip or\
source_addr[1] != NATPMP_PORT:
data = "" # discard data if source mismatch, as per specification
n += 1
if n >= retry and not data:
raise NATPMPUnsupportedError(NATPMP_GATEWAY_NO_SUPPORT,
error_str(NATPMP_GATEWAY_NO_SUPPORT))
if data and response_data_class:
data = response_data_class(data)
return data
class NatPMP:
def __init__(self, interface="default"):
self.interface = interface
def forward_port(self, proto, src_port, dest_ip, dest_port=None):
proto = proto.upper()
valid_protos = ["TCP", "UDP"]
if proto not in valid_protos:
raise Exception("Invalid protocol for forwarding.")
valid_ports = range(1, 65535)
if src_port not in valid_ports:
raise Exception("Invalid port for forwarding.")
# Source port is forwarded to same destination port number.
if dest_port is None:
dest_port = src_port
if proto == "TCP":
proto = NATPMP_PROTOCOL_UDP
else:
proto = NATPMP_PROTOCOL_TCP
return map_port(proto, src_port, dest_port)
if __name__ == "__main__":
"""
#
addr = get_public_address()
map_resp = map_tcp_port(62001, 62001)
print (addr)
print (map_resp.__dict__)
#xxxxxx = NatPMP()
#print(xxxxxx.forward_port("TCP", 12156, "192.168.0.4"))
#print(n.is_port_forwarded(12156, "tcp"))
"""
|
Storj/pyp2p
|
pyp2p/nat_pmp.py
|
Python
|
mit
| 21,188
|
import timeit
import math
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
from colour import Color
X, Y = 0, 1
class Resolutions:
VGA = (640 , 480)
SVGA = (800 , 600)
WSVGA = (1024, 600)
XGA = (1024, 768)
XGAP = (1152, 864)
WXGA = (1280, 720)
WXGA = (1280, 768)
WXGA = (1280, 800)
SXGA = (1280, 1024)
HD = (1360, 768)
HD6 = (1366, 768)
WXGAP = (1440, 900)
HDP = (1600, 900)
UXGA = (1600, 1200)
WSXGAP = (1680, 1050)
FHD = (1920, 1080)
WUXGA = (1920, 1200)
QHD = (2560, 1440)
WQXGA = (2560, 1600)
FOURK = (3840, 2160)
class Utils(object):
def log(self, msg):
print('{} - {}'.format(
self.__class__.__name__,
msg
))
class Screen:
def __init__(self, title='razr engine - OpenGL', width=640, height=480, resolution=Resolutions.VGA):
self.title = title
self.width = width
self.height = height
self.scenes = []
self.scene = None
self.fps = 30
def set_default_scene(self, index):
'''
Most of the time this will be used to point the "menu" Scene
'''
self.scene = self.scenes[index]
def show(self):
self.run()
def run(self): # initialization
# initialize glut
glutInit()
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_ALPHA | GLUT_DEPTH)
# set window size
glutInitWindowSize(self.width, self.height)
# glutFullScreen()
# set window position
glutInitWindowPosition(0, 0)
# create window with title
self.window = glutCreateWindow(self.title)
# set draw function callback
glutDisplayFunc(self.draw)
# draw all the time
glutIdleFunc(self.draw)
# Keyboard Callbacks
glutKeyboardFunc(self.keyboard_callback)
glutSpecialFunc(self.keyboard_special_callback)
# start everything
glutMainLoop()
def keyboard_callback(self, code, x, y):
'''
Every Scene already have a callback like this one. No need to register
it manually
'''
if self.scene:
self.scene.keyboard_callback(code.decode(encoding='ascii'), x, y)
def keyboard_special_callback(self, code, x, y):
'''
Every Scene already have a callback like this one. No need to register
it manually
'''
if self.scene:
self.scene.keyboard_special_callback(code, x, y)
def refresh2d(self):
glViewport(0, 0, self.width, self.height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
# Upper-left origin
# glOrtho(0.0, self.width, 0.0, self.height, 0.0, 1.0)
glOrtho(0.0, self.width, 0.0, self.height, 0.0, 1.0)
glMatrixMode (GL_MODELVIEW)
glLoadIdentity()
def draw(self):
# clear the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# reset position
glLoadIdentity()
self.refresh2d()
# ToDo draw rectangle
if self.scene:
self.scene.draw()
# important for double buffering
glutSwapBuffers()
def quit(self):
try:
glutDestroyWindow(self.window)
except NameError:
print('looks like we never rendered squat!')
exit()
class Scene(object):
def __init__(self):
self.actors = []
def draw(self):
for actor in self.actors:
actor.draw()
def keyboard_callback(self, key, x, y):
pass
def keyboard_special_callback(self, key, x, y):
pass
def keyboard_modifier(self):
return glutGetModifiers()
class Actor(Utils):
def __init__(self, gforce=0):
self.gforce = gforce
def draw(self):
raise NotImplementedError('draw not defined')
class CommonPhysic(object):
def is_colliding(self, ):
pass
class Point(object):
def __init__(self, x=0, y=0, t=None):
if t:
self.x, self.y = t
else:
self.x = x
self.y = y
def __sub__(self, k):
if type(k) == tuple:
self.x -= k[X]
self.y -= k[Y]
elif type(k) == int:
self.x -= k
self.y -= k
return self
def __add__(self, k):
if type(k) == tuple:
self.x += k[X]
self.y += k[Y]
elif type(k) == int:
self.x += k
self.y += k
return self
def __mul__(self, k):
if type(k) == tuple:
self.x *= k[X]
self.y *= k[Y]
elif type(k) == int:
self.x *= k
self.y *= k
return self
def __iter__(self):
return iter([self.x, self.y])
def __str__(self):
return 'point({:0.2f}, {:0.2f})'.format(self.x, self.y)
class Polygon(Actor):
'''
Base Helper for geomtric based actor
'''
def __init__(self, origin, vertices, radius, degree=45, color="#000000"):
super(Polygon, self).__init__()
self.origin = origin
self.radius = radius
self.vertices = vertices
self.matrix = []
self.radians = math.radians(degree)
self.color = Color(color)
self.log('processing {} vertices'.format(self.vertices+1))
for vert in range(1,self.vertices+1):
theta = 2 * math.pi * vert / self.vertices
# there is a specific length for every point
if type(self.radius) == list:
r = radius[vert - 1]
# equal length to every point
else:
r = radius
# plotting the coordinates with a initial degree rotation
x = r * math.cos(theta - self.radians)
y = r * math.sin(theta - self.radians)
self.matrix.append([x + self.origin[0], y + self.origin[1]])
# self.matrix.append([x, y])
def draw(self):
# coloring
glColor3f(self.color.red, self.color.green, self.color.blue)
# filled object
glBegin(GL_TRIANGLE_FAN)
# drawing coordinates
for point in self.matrix:
glVertex2f(point[X], point[Y])
glEnd()
def move(self, x=0, y=0):
if x != 0 or y != 0:
self.origin = (self.origin[X] + x, self.origin[Y] + y)
for point in self.matrix:
self.log("({:0.2f}, {:0.2f}) -> ({:0.2f}, {:0.2f})'".format(point[X], point[Y], point[X]+x, point[Y]+y))
point[X] += x
point[Y] += y
def rotate(self, degree):
radians = math.radians(-1 * degree)
cos = math.cos(radians)
sin = math.sin(radians)
for point in list(self.matrix):
# move the coordinate to the origin
x, y = point[0] - self.origin[0], point[1] - self.origin[1]
# make the translation and than move the points to the real origin
point[0] = ((x * cos) - (y * sin)) + self.origin[0]
point[1] = ((x * sin) + (y * cos)) + self.origin[1]
# self.radians += radians
def show(self):
for c in self.matrix:
print(c)
class Triangle(Polygon):
def __init__(self, origin, size, color="#000000"):
super(Triangle, self).__init__(origin, 3, size/2, degree=30, color=color)
class Square(Polygon):
def __init__(self, origin, size, color="#000000"):
super(Square, self).__init__(origin, 4, size/2, color=color)
# class Rectangle(Polygon):
#
# def __init__(self, origin, width, height, color="#000000"):
# super(Rectangle, self).__init__(origin, 4, [height/2, width, height/2, width], degree=10, color=color)
|
vyscond/razr
|
razr.py
|
Python
|
mit
| 7,788
|
__version__ = '0.16.1'
|
python-poetry/poetry-core
|
src/poetry/core/_vendor/_pyrsistent_version.py
|
Python
|
mit
| 23
|
# run_downscale -- CMIP5 / CRU TS323 / CRU TS40
# for variables: tas, pr, vap, hur, clt
# *tasmin/tasmax require tas to be run first so we
# perform the computation in a second run.
# # # # # # # # # # # # # # # # # # # # # # # # # # #
import os, subprocess
base_dir = '/workspace/Shared/Tech_Projects/DeltaDownscaling/project_data'
# change the CCSM4 naming to NCAR-CCSM4 (if needed)
ccsm4_path = os.path.join( base_dir, 'downscaled', 'CCSM4' )
if os.path.exists( ccsm4_path ):
_ = subprocess.call([ 'mv', ccsm4_path, ccsm4_path.replace( 'CCSM4', 'NCAR-CCSM4' ) ])
print( 'changed CCSM4 error to NCAR-CCSM4 for proper min/max handling' )
del ccsm4_path # end naming change
# change dir to the scripts directory
os.chdir( '/workspace/UA/malindgren/repos/downscale/snap_scripts/downscaling_v2' )
run_scripts = [ 'wrap_downscaler_cmip5_slurm_minmax.py',
'wrap_downscaler_cru_40_slurm_minmax.py',
'wrap_downscaler_cru_slurm_minmax.py',
'wrap_downscaler_cru_324_01_slurm_minmax.py' ]
for script in run_scripts:
os.system( 'ipython {}'.format( script ) )
|
ua-snap/downscale
|
snap_scripts/downscaling_v2/OLD_downscaling_v2/run_downscale_wrappers_minmax.py
|
Python
|
mit
| 1,072
|
#!/usr/bin/env python
import datetime
import os
import sys
import log
import logging
import argparse
import math
from optimization_weight import *
from san_att_twolayer_theano import *
from data_provision_att_vqa import *
from data_processing_vqa import *
##################
# initialization #
##################
options = OrderedDict()
# data related
options['data_path'] = '../data_vqa'
options['feature_file'] = 'trainval_feat.h5'
options['expt_folder'] = '../expt'
options['model_name'] = 'imageqa'
options['train_split'] = 'trainval1'
options['val_split'] = 'val2'
options['shuffle'] = True
options['reverse'] = False
options['sample_answer'] = True
options['num_region'] = 196
options['region_dim'] = 512
options['n_words'] = 13746
options['n_output'] = 1000
# structure options
options['combined_num_mlp'] = 1
options['combined_mlp_drop_0'] = True
options['combined_mlp_act_0'] = 'linear'
options['sent_drop'] = False
options['use_tanh'] = False
options['use_unigram_conv'] = True
options['use_bigram_conv'] = True
options['use_trigram_conv'] = True
options['use_attention_drop'] = False
options['use_before_attention_drop'] = False
# dimensions
options['n_emb'] = 500
options['n_dim'] = 500
options['n_image_feat'] = options['region_dim']
options['n_common_feat'] = 500
options['num_filter_unigram'] = 256
options['num_filter_bigram'] = 512
options['num_filter_trigram'] = 512
options['n_attention'] = 512
# initialization
options['init_type'] = 'uniform'
options['range'] = 0.01
options['std'] = 0.01
options['init_lstm_svd'] = False
# learning parameters
options['optimization'] = 'sgd' # choices
options['batch_size'] = 100
options['lr'] = numpy.float32(0.1)
options['w_emb_lr'] = numpy.float32(80)
options['momentum'] = numpy.float32(0.9)
options['gamma'] = 1
options['step'] = 10
options['step_start'] = 100
options['max_epochs'] = 50
options['weight_decay'] = 0.0005
options['decay_rate'] = numpy.float32(0.999)
options['drop_ratio'] = numpy.float32(0.5)
options['smooth'] = numpy.float32(1e-8)
options['grad_clip'] = numpy.float32(0.1)
# log params
options['disp_interval'] = 10
options['eval_interval'] = 1000
options['save_interval'] = 500
def get_lr(options, curr_epoch):
if options['optimization'] == 'sgd':
power = max((curr_epoch - options['step_start']) / options['step'], 0)
power = math.ceil(power)
return options['lr'] * (options['gamma'] ** power) #
else:
return options['lr']
def train(options):
logger = logging.getLogger('root')
logger.info(options)
logger.info('start training')
data_provision_att_vqa = DataProvisionAttVqa(options['data_path'],
options['feature_file'])
batch_size = options['batch_size']
max_epochs = options['max_epochs']
###############
# build model #
###############
params = init_params(options)
shared_params = init_shared_params(params)
image_feat, input_idx, input_mask, \
label, dropout, cost, accu, pred_label, \
prob_attention_1, prob_attention_2 \
= build_model(shared_params, options)
logger.info('finished building model')
####################
# add weight decay #
####################
weight_decay = theano.shared(numpy.float32(options['weight_decay']),\
name = 'weight_decay')
reg_cost = 0
for k in shared_params.iterkeys():
if k != 'w_emb':
reg_cost += (shared_params[k]**2).sum()
reg_cost *= weight_decay
reg_cost = cost + reg_cost
###############
# # gradients #
###############
grads = T.grad(reg_cost, wrt = shared_params.values())
grad_buf = [theano.shared(p.get_value() * 0, name='%s_grad_buf' % k )
for k, p in shared_params.iteritems()]
# accumulate the gradients within one batch
update_grad = [(g_b, g) for g_b, g in zip(grad_buf, grads)]
# need to declare a share variable ??
grad_clip = options['grad_clip']
grad_norm = [T.sqrt(T.sum(g_b**2)) for g_b in grad_buf]
update_clip = [(g_b, T.switch(T.gt(g_norm, grad_clip),
g_b*grad_clip/g_norm, g_b))
for (g_norm, g_b) in zip(grad_norm, grad_buf)]
# corresponding update function
f_grad_clip = theano.function(inputs = [],
updates = update_clip)
f_output_grad_norm = theano.function(inputs = [],
outputs = grad_norm)
f_train = theano.function(inputs = [image_feat, input_idx, input_mask, label],
outputs = [cost, accu],
updates = update_grad,
on_unused_input='warn')
# validation function no gradient updates
f_val = theano.function(inputs = [image_feat, input_idx, input_mask, label],
outputs = [cost, accu],
on_unused_input='warn')
f_grad_cache_update, f_param_update \
= eval(options['optimization'])(shared_params, grad_buf, options)
logger.info('finished building function')
# calculate how many iterations we need
num_iters_one_epoch = data_provision_att_vqa.get_size(options['train_split']) / batch_size
max_iters = max_epochs * num_iters_one_epoch
eval_interval_in_iters = options['eval_interval']
save_interval_in_iters = options['save_interval']
disp_interval = options['disp_interval']
best_val_accu = 0.0
best_param = dict()
for itr in xrange(max_iters + 1):
if (itr % eval_interval_in_iters) == 0 or (itr == max_iters):
val_cost_list = []
val_accu_list = []
val_count = 0
dropout.set_value(numpy.float32(0.))
for batch_image_feat, batch_question, batch_answer_label \
in data_provision_att_vqa.iterate_batch(options['val_split'],
batch_size):
input_idx, input_mask \
= process_batch(batch_question,
reverse=options['reverse'])
batch_image_feat = reshape_image_feat(batch_image_feat,
options['num_region'],
options['region_dim'])
[cost, accu] = f_val(batch_image_feat, np.transpose(input_idx),
np.transpose(input_mask),
batch_answer_label.astype('int32').flatten())
val_count += batch_image_feat.shape[0]
val_cost_list.append(cost * batch_image_feat.shape[0])
val_accu_list.append(accu * batch_image_feat.shape[0])
ave_val_cost = sum(val_cost_list) / float(val_count)
ave_val_accu = sum(val_accu_list) / float(val_count)
if best_val_accu < ave_val_accu:
best_val_accu = ave_val_accu
shared_to_cpu(shared_params, best_param)
logger.info('validation cost: %f accu: %f' %(ave_val_cost, ave_val_accu))
dropout.set_value(numpy.float32(1.))
if options['sample_answer']:
batch_image_feat, batch_question, batch_answer_label \
= data_provision_att_vqa.next_batch_sample(options['train_split'],
batch_size)
else:
batch_image_feat, batch_question, batch_answer_label \
= data_provision_att_vqa.next_batch(options['train_split'], batch_size)
input_idx, input_mask \
= process_batch(batch_question, reverse=options['reverse'])
batch_image_feat = reshape_image_feat(batch_image_feat,
options['num_region'],
options['region_dim'])
[cost, accu] = f_train(batch_image_feat, np.transpose(input_idx),
np.transpose(input_mask),
batch_answer_label.astype('int32').flatten())
# output_norm = f_output_grad_norm()
# logger.info(output_norm)
# pdb.set_trace()
f_grad_clip()
f_grad_cache_update()
lr_t = get_lr(options, itr / float(num_iters_one_epoch))
f_param_update(lr_t)
if options['shuffle'] and itr > 0 and itr % num_iters_one_epoch == 0:
data_provision_att_vqa.random_shuffle()
if (itr % disp_interval) == 0 or (itr == max_iters):
logger.info('iteration %d/%d epoch %f/%d cost %f accu %f, lr %f' \
% (itr, max_iters,
itr / float(num_iters_one_epoch), max_epochs,
cost, accu, lr_t))
if np.isnan(cost):
logger.info('nan detected')
file_name = options['model_name'] + '_nan_debug.model'
logger.info('saving the debug model to %s' %(file_name))
save_model(os.path.join(options['expt_folder'], file_name), options,
best_param)
return 0
logger.info('best validation accu: %f', best_val_accu)
file_name = options['model_name'] + '_best_' + '%.3f' %(best_val_accu) + '.model'
logger.info('saving the best model to %s' %(file_name))
save_model(os.path.join(options['expt_folder'], file_name), options,
best_param)
return best_val_accu
if __name__ == '__main__':
logger = log.setup_custom_logger('root')
parser = argparse.ArgumentParser()
parser.add_argument('changes', nargs='*',
help='Changes to default values',
default = '')
args = parser.parse_args()
for change in args.changes:
logger.info('dict({%s})'%(change))
options.update(eval('dict({%s})'%(change)))
train(options)
|
codedecde/ImageQA
|
Src/TheanoModel/Code/san_att_conv_twolayer.py
|
Python
|
mit
| 9,975
|
# VMeter Python demos
# VMeter.net
# ver 1. 1/26/13
import pypm
import array
import time
from collections import deque
INPUT=0
OUTPUT=1
def PrintDevices(InOrOut):
for loop in range(pypm.CountDevices()):
interf,name,inp,outp,opened = pypm.GetDeviceInfo(loop)
if ((InOrOut == INPUT) & (inp == 1) |
(InOrOut == OUTPUT) & (outp ==1)):
print loop, name," ",
if (inp == 1): print "(input) ",
else: print "(output) ",
if (opened == 1): print "(opened)"
else: print "(unopened)"
print
# Using the psutil library, displays current activity. A top sided
# envelope follower is used to smooth out the display.
# The envelope follower will immediately jump to a higher level,
# but falls slowly.
def MonitorCpu(MidiOut):
import psutil # http://code.google.com/p/psutil/
cpu_percent = 0.0
while True:
new_cpu_percent = psutil.cpu_percent(interval=.05)
if new_cpu_percent > cpu_percent:
cpu_percent = new_cpu_percent
else:
cpu_percent = cpu_percent * .8;
output = int(cpu_percent * 1.27)
SendColumn(MidiOut,output)
def SendArray(array, MidiOut):
# assuming 38 length array
# need to split array into (6) 7bit chunks
# Individual LED control is sent to the aftertouch MIDI command and channels 14, 15 and 16.
# Each of the data bytes transmit 7 LED states.
bytes = [0,0,0,0,0,0]
bytes[0] = array[0] | array[1]<<1 | array[2]<<2 | array[3]<<3 | array[4]<<4 | array[5]<<5 | array[6]<<6
bytes[1] = array[7] | array[8]<<1 | array[9]<<2 | array[10]<<3 | array[11]<<4 | array[12]<<5 | array[13]<<6
bytes[2] = array[14] | array[15]<<1 | array[16]<<2 | array[17]<<3 | array[18]<<4 | array[19]<<5 | array[20]<<6
bytes[3] = array[21] | array[22]<<1 | array[23]<<2 | array[24]<<3 | array[25]<<4 | array[26]<<5 | array[27]<<6
bytes[4] = array[28] | array[29]<<1 | array[30]<<2 | array[31]<<3 | array[32]<<4 | array[33]<<5 | array[34]<<6
bytes[5] = array[35] | array[36]<<1 | array[37]<<2
MidiOut.WriteShort(0xAD,bytes[0],bytes[1])
MidiOut.WriteShort(0xAE,bytes[2],bytes[3])
MidiOut.WriteShort(0xAF,bytes[4],bytes[5])
def SetLEDsIgnoreTouch(MidiOut):
MidiOut.WriteShort(0xB0,119,107) # this causes the LEDs to no respond to touch, only MIDI input.
def EnableOnOffOutput(MidiOut):
MidiOut.WriteShort(0xB0,119,120) # now the VMeter will send 127 via ctrl #17 when touched, and 0 when released. 119 disables.
def SendColumn(MidiOut,height):
MidiOut.WriteShort(0xB0,20,height)
def EnablePressureOutput(MidiOut):
MidiOut.WriteShort(0xB0,119,122)
led_array = [1,0,1,0,1,0,1,0,1,0,
1,0,1,0,1,0,1,0,1,0,
1,0,1,0,1,0,1,0,1,0,
1,0,1,0,1,0,1,0]
led_array_deque = deque(led_array)
# Binary clock display.
# Each digit is displayed over 4 LEDs.
# Marker LEDs blink every half second to indicate the position of the digits.
# It displays hours, minutes and seconds, where hours are 24 hour format.
def BinaryClock(MidiOut):
from datetime import datetime
last_cycle_time = 0
led_array = [0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0]
update_time = 0
while True:
if pypm.Time() - last_cycle_time > 500:
last_cycle_time = pypm.Time()
led_array[11] = update_time # marker for minutes, just blinks with seconds
led_array[16] = update_time # marker for minutes, just blinks with seconds
led_array[26] = update_time # marker for hours, just blinks with seconds
led_array[31] = update_time # marker for hours, just blinks with seconds
if update_time == 0:
update_time = 1
else:
update_time = 0
## print "cycle"
seconds = datetime.now().strftime('%S')
seconds_first_digit = int(seconds[0])
seconds_second_digit = int(seconds[1])
minutes = datetime.now().strftime('%M')
minutes_first_digit = int(minutes[0])
minutes_second_digit = int(minutes[1])
hours = datetime.now().strftime('%H')
hours_first_digit = int(hours[0])
hours_seconds_digit = int(hours[1])
temp_counter = seconds_second_digit
for i in range(4):
led_array[i] = 0x01 & temp_counter
temp_counter = temp_counter >> 1
temp_counter = seconds_first_digit
for i in range(4):
led_array[i+4] = 0x01 & temp_counter
temp_counter = temp_counter >> 1
temp_counter = minutes_second_digit
for i in range(4):
led_array[i+12] = 0x01 & temp_counter
temp_counter = temp_counter >> 1
temp_counter = minutes_first_digit
for i in range(4):
led_array[i+17] = 0x01 & temp_counter
temp_counter = temp_counter >> 1
temp_counter = hours_seconds_digit
for i in range(4):
led_array[i+27] = 0x01 & temp_counter
temp_counter = temp_counter >> 1
temp_counter = hours_first_digit
for i in range(4):
led_array[i+32] = 0x01 & temp_counter
temp_counter = temp_counter >> 1
print hours, minutes, seconds
SendArray(led_array, MidiOut)
# A simple binary counter display.
def BinaryCounter(MidiOut):
last_cycle_time = 0
counter = 0
led_array = [0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0]
while True:
if pypm.Time() - last_cycle_time > 30:
## print "cycle"
last_cycle_time = pypm.Time()
temp_counter = counter
counter = counter + 1
for i in range(20):
led_array[i] = 0x01 & temp_counter
temp_counter = temp_counter >> 1
SendArray(led_array, MidiOut)
# A Game of Life simulation is usually performed on a 2D matrix, but here
# we apply similar rules to the 1D VMeter array of LEDs.
# Each cycle, a given LED is turned on or off based on how many of its neighbors
# are on or off.
# Different starting configurations will result in different patterns,
# some die off, some enter into a repeating cycle, and others continue to
# evolve.
# Touching the VMeter will cause the LEDs touched to switch states, which can restart
# a simulation that has died off.
def GameOfLife(MidiOut, MidiIn):
led_array = [1,1,1,1,1,1,1,0,0,0,
0,0,0,0,0,0,1,1,1,1,
0,1,1,0,0,1,1,1,0,0,
0,0,0,1,0,0,0,0]
# led_array = [1,0,1,1,1,1,1,0,0,0,
# 0,0,0,0,0,0,1,1,1,1,
# 0,1,1,0,0,1,1,1,0,0,
# 0,0,0,1,0,0,1,0]
# led_array = [1,0,0,0,0,0,0,0,0,0,
# 0,0,0,0,0,0,1,0,1,0,
# 0,1,0,0,0,1,0,0,0,0,
# 0,0,0,1,0,0,0,0]
last_cycle_time = 0
i = 0
while True:
while MidiIn.Poll(): # invert LEDs where touched
MidiData = MidiIn.Read(1)
if MidiData[0][0][0] == 0xB0:
if MidiData[0][0][1] == 20:
pos = MidiData[0][0][2]
index_pos = int(float(pos) / 127.0 * 37.0)
# print "index pos: ", index_pos
if led_array[index_pos] == 1:
led_array[index_pos] = 0
else:
led_array[index_pos] = 1
if pypm.Time() - last_cycle_time > 100:
last_cycle_time = pypm.Time()
index_array = range(2,35)
new_array = list(led_array)
# copy over 4 edge LEDs since they don't have 4 neighbors.
new_array[0] = led_array[0]
new_array[1] = led_array[1]
new_array[36] = led_array[36]
new_array[37] = led_array[37]
for i in index_array:
sum =led_array[i-2]+led_array[i-1]+led_array[i+1]+led_array[i+2]
if led_array[i] == 1: # live cell
if sum < 1:
new_array[i] = 0 # under population
elif sum < 3:
new_array[i] = 1 # just right
else:
new_array[i] = 0 # overcrowding
else: # dead cell
if sum == 2 or sum == 3:
new_array[i] = 1
else:
new_array[i] = 0
led_array = list(new_array)
SendArray(led_array, MidiOut)
def adjust_speed(new_speed,speed):
# here average the new_speed with the old speed
speed = new_speed * .2 + speed * .8
return speed
# this causes the LEDs to act like a scrolled page on a tablet.
# Simulated acceleration provides a smooth start and stop effect.
def ChaseDemoWithSpeedInput(MidiOut, MidiIn):
x = 1
speed = 500
last_time = 0
last_speed_calc_time = 0
prev_pos = 0
pos = 0
prev_last_input_time = 0
last_input_time = 0
speed = 0.0
new_speed = 0.0
pos_array = [0, 0, 0, 0, 0]
pos_array = deque(pos_array)
time_array = deque([0, 0, 0, 0, 0])
print_time = 0
led_shift_time = 0
touch_state = 0
brake_time = 0
led_deque = deque([1,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0])
SendArray(led_deque, MidiOut)
EnableOnOffOutput(MidiOut)
while True:
while MidiIn.Poll(): # throw out all but the latest input
MidiData = MidiIn.Read(1)
if MidiData[0][0][0] == 0xB0:
if MidiData[0][0][1] == 20:
pos = MidiData[0][0][2]
pos_array.appendleft(pos)
# pos_array.pop()
last_input_time = MidiData[0][1]
time_array.appendleft(last_input_time)
# time_array.pop()
# print(last_input_time)
elif MidiData[0][0][1] == 17: # on / off output. 127 is touch, 0 is release
if MidiData[0][0][2] == 127:
# print "touch"
touch_state = 1
else:
# print "release"
touch_state = 0
if last_input_time > last_speed_calc_time: # calc speed
last_speed_calc_time = pypm.Time()
pos_delta = pos_array[0] - pos_array[4]
time_delta = time_array[0] - time_array[4]
if time_delta > 0:
new_speed = float(pos_delta) / float(time_delta)
speed = adjust_speed(new_speed,speed)
# handle case where VMeter is being touched, but position isn't moving
if touch_state == 1 and pypm.Time() - last_input_time > 100:
# reduce speed to 0
if pypm.Time() - brake_time > 17:
brake_time = pypm.Time()
# print "braking"
speed = adjust_speed(0.0,speed)
if pypm.Time() - print_time > 150:
print_time = pypm.Time()
# if abs(speed) > .01:
# print "speed: ", speed, ", per: ", 1.0 / speed
if pypm.Time() - last_input_time > 100:
# friction braking
speed = adjust_speed(0.0,speed)
if abs(speed) > .001 and pypm.Time() - led_shift_time > int(2.5/abs(speed)):
led_shift_time = pypm.Time()
if speed > 0.0:
led_deque.rotate(1)
else:
led_deque.rotate(-1)
SendArray(led_deque, MidiOut)
# uses the rotate method of a deque to cause the LEDs to chase and wrap around.
def ChaseDemo2():
# led_array = [1,0,1,0,1,0,1,0,1,0,
# 1,0,1,0,1,0,1,0,1,0,
# 1,0,1,0,1,0,1,0,1,0,
# 1,0,1,0,1,0,1,0]
# led_array_deque = deque(led_array)
print(led_array_deque)
SendArray(led_array_deque, MidiOut)
timer_marker = pypm.Time()
while True:
timer_marker = pypm.Time()
while pypm.Time() < timer_marker + 500:
pass
SendArray(led_array_deque, MidiOut)
led_array_deque.rotate(1)
# another LED chasing demo utilizing bit shifting through 2x multiplication
def ChaseDemo(MidiOut):
x = 1;
while True:
MidiTime = pypm.Time()
MidiOut.WriteShort(0xAD,x,x)
MidiOut.WriteShort(0xAE,x,x)
MidiOut.WriteShort(0xAF,x,x)
x = x * 2;
if x == 128:
x = 1;
while pypm.Time() < MidiTime + 100:
pass
# Draws a single LED cursor under the finder position.
def DrawCursor(MidiOut,height): # uses global led_deque
# clear the deque - set all LEDs to off
for i in range(38):
led_array_deque[i] = 0
cursor_pos = int(float(height) / 127.0 * 38.0)
if cursor_pos > 37:
cursor_pos = 37
led_array_deque[cursor_pos] = 1 # turn on one LED
SendArray(led_array_deque, MidiOut)
# draws a bar centered at height position with a given size.
# Kinda like a fat cursor.
def DrawBar(MidiOut,height,size):
# clear the deque - set all LEDs to off
for i in range(38):
led_array_deque[i] = 0
cursor_pos = int(float(height) / 127.0 * 37.0)
lower_limit = cursor_pos - size / 2
if lower_limit < 0:
lower_limit = 0
upper_limit = cursor_pos + size / 2
if upper_limit > 37:
upper_limit = 37
i = lower_limit
while i <= upper_limit:
led_array_deque[i] = 1
i = i + 1
SendArray(led_array_deque, MidiOut)
# this draws a bar where touched instead of a cursor or column.
def DrawBarDemo(size):
# size == 0 --> pressure adj
bar_size = 1
input_pos = 64
while True:
if MidiIn.Poll():
MidiData = MidiIn.Read(1)
#print MidiData[0][0][0]," ",MidiData[0][0][1]," ",MidiData[0][0][2]
if MidiData[0][0][0] == 0xB0:
if MidiData[0][0][1] == 20:
input_pos = MidiData[0][0][2]
# SendColumn(MidiOut,input_pos)
# DrawCursor(MidiOut,input_pos)
if size == 0:
if MidiData[0][0][1] == 18:
bar_size = MidiData[0][0][2] / 6
else:
bar_size = size
DrawBar(MidiOut,input_pos,bar_size)
# main code begins here
pypm.Initialize() # always call this first, or OS may crash when you try to open a stream
PrintDevices(OUTPUT)
dev = int(raw_input("Type output number: "))
MidiOut = pypm.Output(dev, 0)
PrintDevices(INPUT)
dev = int(raw_input("Type input number: "))
MidiIn = pypm.Input(dev)
# turn off internal LED finger tracking
SetLEDsIgnoreTouch(MidiOut)
# set initial column
SendColumn(MidiOut,45)
# turn on pressure output
EnablePressureOutput(MidiOut)
demo_choice = int(raw_input("""
Choose a demo:
1) Cursor tracks finger position
2) Cursor size adjusts based on pressure
3) Monitor CPU level
4) Binary Counter
5) Binary Clock
6) Chase
7) Scrollable treadmill
8) Game of Life
"""))
if demo_choice == 1:
DrawBarDemo(1)
elif demo_choice == 2:
DrawBarDemo(0) # input 0 to adjust cursor size with pressure
elif demo_choice == 3:
MonitorCpu(MidiOut)
elif demo_choice == 4:
BinaryCounter(MidiOut)
elif demo_choice == 5:
BinaryClock(MidiOut)
elif demo_choice == 6:
ChaseDemo(MidiOut)
elif demo_choice == 7:
ChaseDemoWithSpeedInput(MidiOut,MidiIn)
elif demo_choice == 8:
GameOfLife(MidiOut, MidiIn)
# be sure to try out different starting patterns
dummy = raw_input("ready to close and terminate... (type RETURN):")
del MidiOut
del MidiIn
pypm.Terminate()
|
curiousinventor/VMeter
|
Software/python/VMeter_python_demos.py
|
Python
|
mit
| 16,528
|
import pathlib
from .._exceptions import ReadError
from .._helpers import register_format
from . import _vtk_42, _vtk_51
def read(filename):
filename = pathlib.Path(filename)
with open(filename.as_posix(), "rb") as f:
mesh = read_buffer(f)
return mesh
def read_buffer(f):
# The first line specifies the version
line = f.readline().decode().strip()
if not line.startswith("# vtk DataFile Version"):
raise ReadError("Illegal VTK header")
version = line[23:]
if version == "5.1":
return _vtk_51.read(f)
# this also works for older format versions
return _vtk_42.read(f)
def write(filename, mesh, fmt_version: str = "5.1", **kwargs):
if fmt_version == "4.2":
return _vtk_42.write(filename, mesh, **kwargs)
assert fmt_version == "5.1"
_vtk_51.write(filename, mesh, **kwargs)
register_format(
"vtk",
[".vtk"],
read,
{
"vtk42": _vtk_42.write,
"vtk51": _vtk_42.write,
"vtk": _vtk_51.write,
},
)
|
nschloe/meshio
|
src/meshio/vtk/_main.py
|
Python
|
mit
| 1,027
|
from sso import models
def test_user_get_username():
user = models.BusinessSSOUser(email='test@example.com')
assert user.get_username() == 'test@example.com'
def test_user_save():
user = models.BusinessSSOUser(email='test@example.com')
assert user.save() is None
|
uktrade/navigator
|
app/sso/tests/test_models.py
|
Python
|
mit
| 285
|
from synthetic.consts import (DEFAULT_SAMPLE_RATE, DEFAULT_NODES,
DEFAULT_EDGES, DEFAULT_GEN_TYPE)
from synthetic.generator import load_generator
from synthetic.commands.command import Command, arg_with_default
class EvalDistance(Command):
def __init__(self, cli_name):
Command.__init__(self, cli_name)
self.name = 'eval_distance'
self.description = 'compute behavioral distance between two generators'
self.mandatory_args = ['prg', 'prg2']
self.optional_args = ['undir', 'sr', 'nodes', 'edges', 'gentype']
def run(self, args):
self.error_msg = None
prog1 = args['prg']
prog2 = args['prg2']
sr = arg_with_default(args, 'sr', DEFAULT_SAMPLE_RATE)
directed = not args['undir']
nodes = arg_with_default(args, 'nodes', DEFAULT_NODES)
edges = arg_with_default(args, 'edges', DEFAULT_EDGES)
gentype = arg_with_default(args, 'gentype', DEFAULT_GEN_TYPE)
gen1 = load_generator(prog1, directed, gentype)
gen2 = load_generator(prog2, directed, gentype)
gen1.run(nodes, edges, sr, shadow=gen2)
dist1 = gen1.eval_distance
gen2.run(nodes, edges, sr, shadow=gen1)
dist2 = gen2.eval_distance
dist = (dist1 + dist2) / 2
print('eval distance: {}'.format(dist))
return True
|
telmomenezes/synthetic
|
synthetic/commands/eval_distance.py
|
Python
|
mit
| 1,380
|
from twisted.trial import unittest
from vertex import q2qclient
import sys
from StringIO import StringIO
class TimeoutTestCase(unittest.TestCase):
def testNoUsage(self):
"""
When the vertex Q2QClientProgram is run without any arguments, it
should print a usage error and exit.
"""
cp = q2qclient.Q2QClientProgram()
# smash stdout for the duration of the test.
sys.stdout, realout = StringIO(), sys.stdout
try:
# the act of showing the help will cause a sys.exit(0), catch that
# exception.
self.assertRaises(SystemExit, cp.parseOptions, [])
# check that the usage string was (roughly) output.
output = sys.stdout.getvalue()
self.assertIn('Usage:', output)
self.assertIn('Options:', output)
self.assertIn('Commands:', output)
finally:
# always restore stdout.
sys.stdout = realout
|
glyph/vertex
|
vertex/test/test_client.py
|
Python
|
mit
| 980
|
import config
import directorymonitor
import rsync
# project = { 'name': 'nvbio-internal', 'root_path': '/Users/nsubtil/nvbio-internal',
# 'remotes': [ { 'ssh_connection': connobj,
# 'remote_root': '/home/nsubtil/nvbio-internal' }, ... ]
class SynchronizedProjectDB (config.ConfigDB):
__config_db_key = "synchronized_project_database"
def __init__(self):
config.ConfigDB.__init__(self, self.__config_db_key)
def create_project(self, name, root):
project = SynchronizedProject(name, root)
self.set(name, project)
return project
class _SynchronizedProjectMonitor (directorymonitor.EventListener):
def __init__(self, project):
self.__project = project
def on_any_event(self, event):
print "Update on project %s: event [%s] path [%s] is_directory [%s]" % (self.__project.name, event.event_type, event.src_path, event.is_directory)
self.__project.main.update_queue.put(self.__project.name)
class SynchronizedProject:
def __init__(self, name, root):
self.__config = config
self.name = name
self.root = root
self.remotes = []
self.__file_event_handler = _SynchronizedProjectMonitor(self)
self.__monitor = directorymonitor.DirectoryMonitor(self.root, self.__file_event_handler)
def __getstate__(self):
return {'name': self.name, 'root': self.root, 'remotes': self.remotes}
def __setstate__(self, state):
self.__init__(state['name'], state['root'], state['remotes'])
def add_remote_build(self, remote_name, remote_root, auto_update=True):
remote = {'remote_name': remote_name, 'remote_root': remote_root, 'auto_update': auto_update}
print "remote build for %s at %s:%s" % (self.name, remote['remote_name'], remote['remote_root'])
# the magic of python pass-by-object-reference makes this work
self.remotes.append(remote)
SynchronizedProjectDB().set(self.name, self)
def update_remotes(self, main):
print "updating project %s" % self.name
for r in self.remotes:
if r['auto_update']:
print "--> remote %s" % r
rsync.run_rsync(self.root, r, main)
def start_monitor(self, main):
self.main = main
self.__monitor.run()
def stop_monitor(self):
self.__monitor.stop()
def sync_monitor(self):
self.__monitor.join()
|
nsubtil/remoter
|
project.py
|
Python
|
mit
| 2,456
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
#
# Objective of this small program is to report memory usage using
# useful memory metrics for Linux, by default for user postgres.
#
# It will group processes according to their URES (unique resident set size)
# and also do reports based on per-username, per-program name and per-cpu
# statistics.
#
# Inspired by work of Aleksandr Koltsoff (czr@iki.fi) meminfo.py 2006 released
# under GPL Version 2.
# https://github.com/majava3000/meminfo
# URES explanation: http://koltsoff.com/pub/ures/
#
# Current verison is not compatible with Python 3
# Modified by Bricklen Anderson, 2017
#
'''
import os
import sys
import pwd
import time
import argparse
import csv
# set this to 1 for debugging
DEBUG = 0
# we need to get the pagesize at this point
PAGE_SIZE = os.sysconf("SC_PAGESIZE")
# a map from /proc/PID/status memory-related fields into column headers
# other fields that start with Vm will user lower-case columns
VM_STATUS_MAP = {
'Peak': 'VIRT-P',
'Lck': 'LCKD',
'HWM': 'HWRES',
'Data': 'DATA',
'Stk': 'STACK',
'Exe': 'EXE',
'Lib': 'LIB',
'PTE': 'PTE'}
class UsernameCache:
'''
Utility class to act as a cache for UID lookups
Since UID lookups will cause possible NSS activity
over the network, it's better to cache all lookups.
'''
def __init__(self):
self.uid_map = {}
self.gid_map = {}
def get_uid(self, uid):
if uid in self.uid_map:
return self.uid_map[uid]
name_data = None
try:
name_data = pwd.getpwuid(uid)
except Exception:
pass
if name_data is not None:
name = name_data.pw_name
else:
# default the name to numeric representation in case it's not found
name = "%s" % uid
self.uid_map[uid] = name
return name
# use a global variable to hold the cache so that we don't
# need a separate context-object/dict
NAME_CACHE = UsernameCache()
# utility class to aid in formatting
# will calculate the necessary amount of left-justification for each
# column based on the width of entries
# last entry will be unjustified
class JustifiedTable:
def __init__(self):
# this will keep the row data in string format
self.rows = []
# this will keep the maximum width of each column so far
self.column_widths = []
def add_row(self, row):
# start by converting all entries into strings
# (we keep data in string format internally)
row = map(str, row)
# check if we have enough column_widths for this row
if len(self.column_widths) < len(row):
self.column_widths += [0] * (len(row) - len(self.column_widths))
# update column_widths if necessary
for idx in range(len(row)):
if self.column_widths[idx] < len(row[idx]):
self.column_widths[idx] = len(row[idx])
# add the row into data
self.rows.append(row)
def output_row(self, idx):
row = self.rows[idx]
for idx in range(len(row) - 1):
if row[idx] is not "None":
print "%*s" % (self.column_widths[idx], row[idx]),
print row[-1]
# we need to add optional header output every X lines
# it is done with an empty line and repeating first row
def output(self, max_lines=None):
# always start with the header
self.output_row(0)
for idx in range(1, len(self.rows)):
self.output_row(idx)
if max_lines is not None:
if idx % max_lines == 0:
print
self.output_row(0)
# parse parameters
def cli():
parser = argparse.ArgumentParser(description='pg_meminfo')
parser.add_argument('-u', '--user', type=str,
help='Retrieve mem info for processes owned by a'
' specific user. Omit to calculate for all user procs.')
parser.add_argument('-c', '--csv', action="store_true", default=None,
dest='csv_output',
help='Convert the output to CSV. Default is to STDOUT, '
'otherwise supply a file that the script can write to.')
parser.add_argument('-o', '--output', type=str, dest='output_file',
default='stdout', help='Output results to this file. '
'Default is to STDOUT.')
parser.add_argument('-s', '--sum-only', action="store_true", default=False,
dest='sum_only',
help='Emit the sum of the unique resident memory only. '
'If "-u" is supplied, sum will be only for that user.')
parser.add_argument('-p', '--postgres-query', action="store_true",
default=False, dest='postgres_query',
help='Retrieve the query currently executing for each '
'process. Be aware there is overhead from querying '
'the database for every pid, which adds time and '
'resource overhead to the execution of this script. '
'Getting the query per pid is more accurate than passing '
'in a list of pids and returning the queries because '
'transient queries might have completed by the time the '
'call to pg_stat_activity is done, so queries that '
'started after the pid was pulled from /proc but before '
'the call to pg_stat_activity will not be the ones that '
'are actually using the memory.')
parser.add_argument('-n', '--lines', default=None, dest='lines_of_output',
help='Retrieve only n lines of output. Default is all.')
return parser.parse_args()
# utility to read a file
def parse_file(filename):
f = open(filename, "rb")
line = f.readline()
del f
i = line.find(b'\x00')
if i == -1:
return line
return line[:i]
# utility to read and parse a comma delimited file (meminfo)
def parse_split_file(filename):
f = open(filename, "rb")
lines = f.readlines()
del f
lines = map(lambda x: x.strip().split(), lines)
return lines
# utility to parse a file which contains one line with delim entries
def parse_delim_file(filename):
f = open(filename, "rb")
line = f.readline()
del f
return line.split()
# utility to parse a file which contains one line with delim numbers
def parse_number_file(filename):
f = open(filename, "rb")
line = f.readline()
del f
return map(int, line.split())
# return a hash of 'COLUMN-NAME': value -entries for
# process specific memory info
def get_process_mem_from_status(pid):
ret = {}
lines = parse_split_file("/proc/%d/status" % pid)
for line in lines:
if line[0][:2] == 'Vm':
vm_label = line[0][2:-1]
if vm_label in VM_STATUS_MAP:
v = int(line[1])
if v > 4 * 1024 * 1024:
v = -1
ret[VM_STATUS_MAP[vm_label]] = v
if len(ret) == 0:
return None
return ret
def get_postgres_query(pid):
'''This function will only return results if "track_activities" is enabled'''
import psycopg2
conn = None
try:
qry_version = "SELECT current_setting('server_version_num')"
qry_pre_96 = "select state as qry_state, coalesce(waiting::text,'') as waiting_state, query from pg_catalog.pg_stat_activity WHERE pid = %s"
qry_96_up = "select state as qry_state, (case when wait_event_type is not null then wait_event_type || ':' || coalesce(wait_event,'') else '' end) as waiting_state, query from pg_catalog.pg_stat_activity where pid = %s"
conn = psycopg2.connect("dbname='postgres' user='postgres'")
conn.set_session(readonly=True)
cur = conn.cursor()
cur.execute(qry_version)
ver = cur.fetchone()
if int(ver[0]) >= 9200 and int(ver[0]) < 90600:
qry = qry_pre_96
elif int(ver[0]) >= 90600:
qry = qry_96_up
else:
if conn:
conn.close()
return
# get the stats from the db
cur.execute(qry,(pid,))
row = cur.fetchone()
if row:
qry_state = row[0]
waiting_state = row[1]
query = row[2]
return qry_state, waiting_state, query
except psycopg2.DatabaseError, e:
print 'Error %s' % e
sys.exit(1)
finally:
if conn:
conn.close()
# utility to return info for given pid (int)
# will return None if process doesn't exist anymore
# otherwise a hash:
# "pid" -> int(pid)
# "uid" -> int(uid)
# "gid" -> int(gid)
# "vmsize" -> int(vmsize in kilobytes)
# "res" -> int(res in kilobytes)
# "shared" -> int(shared in kilobytes)
# "ures" -> int(unique res in kilobytes)
# "cmd" -> string(command)
# "minflt" -> int(number of minor faults)
# "majflt" -> int(number of major faults)
# "state" -> string(state-char)
# "threads" -> int(number of threads, including main thread)
# "utime" -> int(ticks (0.01 secs) spent in user)
# "stime" -> int(ticks spent in kernel)
# "cpu" -> int(last cpu which executed code for this process)
# "status_mem" -> hash of additional fields
def get_process_info(pid, kernel_boot_ticks=0, uid=None):
global PAGE_SIZE
page_conv = PAGE_SIZE / 1024
ret = None
try:
pinfo = {}
# get process owner and group owner using stat
stats = os.stat("/proc/%d" % pid)
if uid is not None:
if uid != stats.st_uid:
return None
pinfo["uid"] = stats.st_uid
pinfo["gid"] = stats.st_gid
pmem = parse_number_file("/proc/%d/statm" % pid)
# size: total (VMSIZE)
# resident: rss (total RES)
# share: shared pages (SHARED)
# we don't need the other entries
del pmem[3:]
pmem = map(lambda x: x * page_conv, pmem)
# we ignore processes which seem to have zero vmsize (kernel threads)
if pmem[0] == 0:
return None
pinfo["vmsize"] = pmem[0]
pinfo["res"] = pmem[1]
pinfo["shared"] = pmem[2]
pinfo["ures"] = pmem[1] - pmem[2]
# get status (this changes between kernel releases)
psmem = get_process_mem_from_status(pid)
pinfo["status_mem"] = psmem
pstat = parse_delim_file("/proc/%d/stat" % pid)
pcmd = parse_file("/proc/%d/cmdline" % pid)
# 1: filename of the executable in parentheses
# 2: state
# 9: minflt %lu: minor faults (completed without disk access)
# 11: majflt %lu: major faults
pinfo["cmd"] = pcmd
pinfo["state"] = pstat[2]
pinfo["minflt"] = int(pstat[9])
pinfo["majflt"] = int(pstat[11])
pinfo["utime"] = int(pstat[13])
pinfo["stime"] = int(pstat[14])
pinfo["cpu"] = int(pstat[38])
pinfo["exists_for"] = kernel_boot_ticks - int(pstat[21])
# 13 = usertime (jiff)
# 14 = kernel time (jiff)
# 21 = start time (jiff)
# 38 = last CPU
# hah. these aren't actually in jiffies, but in USER_HZ
# which has been defined as 100 always
pinfo["pid"] = pid
pinfo["ppid"] = int(pstat[3])
# attempt to count the number of threads
# note than on older linuxen there is no /proc/X/task/
thread_count = 0
try:
if os.access("/proc/%d/task/" % pid, os.X_OK):
thread_count = len(os.listdir("/proc/%d/task" % pid))
except Exception:
pass
pinfo["threads"] = thread_count
try:
# Get the Postgresql query details, if any
qry_state, qry_waiting, query = get_postgres_query(pid)
if query:
pinfo['qry_state'] = qry_state
pinfo['waiting_state'] = qry_waiting
pinfo['query'] = query
except:
pass
ret = pinfo
except Exception:
pass
return ret
# utility to return process information (for all processes)
# this is basically where most of the work starts from
def get_process_infos():
# this will be the return structure
# the key will be the pid
pinfos = {}
args = cli()
filter_process_by_uid = None
if args.user:
try:
filter_process_by_uid = pwd.getpwnam(args.user).pw_uid
except KeyError:
print '[ERROR] User does not exist.'
sys.exit(1)
# start by getting kernel uptime
kernel_uptime, kernel_idle_time = parse_delim_file("/proc/uptime")
kernel_uptime = int(float(kernel_uptime) * 100)
# we need to iterate over the names under /proc at first
for n in os.listdir("/proc"):
# we shortcut the process by attempting a PID conversion first
# and statting only after that
# (based on the fact that the only entries in /proc which are
# integers are the process entries). so we don't do extra
# open/read/closes on proc when not necessary
try:
pid = int(n)
except Exception:
continue
# at this point we know that n is a number
# note that it might be so that the process doesn't exist anymore
# this is why we just ignore it if it has gone AWOL.
pinfo = get_process_info(pid, kernel_uptime, filter_process_by_uid)
if pinfo is not None:
pinfos[pid] = pinfo
return pinfos
# utility to return human readable time
# three return formats:
# < hour: x:%.2y
# rest: h:%.2y:%.2z
def get_time(ticks):
secs_total = ticks / 100.0
if secs_total < 60:
return "%ds" % secs_total
secs = secs_total % 60
secs_total -= secs
minutes = secs_total / 60
if minutes < 60:
return "%dm%.2ds" % (minutes, secs)
hours = minutes / 60
minutes = minutes % 60
return "%dh%.2dm%.2ds" % (hours, minutes, secs)
# routine that will tell when something started based on given value in ticks
# ticks is understood to mean "for" (when something was started X ticks ago)
# the label is "started", so an absolute timestamp would be nice
# if difference to current clock is more than one day, we display the date
def get_elapsed(ticks, now=time.time()):
ticks /= 100 # conv to seconds
if ticks < 60 * 60 * 24:
return time.strftime("%H:%M:%S", time.localtime(now - ticks))
return time.strftime("%Y-%m-%d", time.localtime(now - ticks))
# utility to get process info as a row suitable into tabling
# note that this might get a bit hairy wrt the extra memory fields
# we need to preserve order and insert "" if there are missing
# fields for this process.
#
# stat_map:
# ordered list of field-names that we want to output
def get_process_row(pinfo, stat_map, with_cpu=0, args=None, get_current_time=False):
# PID UID URES SHR VIRT MINFLT MAJFLT S CMD"
username = NAME_CACHE.get_uid(pinfo["uid"])
currentTime = []
if get_current_time:
currentTime = [time.time()]
cpu = None
if with_cpu:
cpu = pinfo["cpu"]
mainInfo = [
pinfo["pid"],
username,
pinfo["ures"],
pinfo["shared"],
pinfo["vmsize"]]
restInfo = [pinfo["minflt"],
pinfo["majflt"],
cpu,
pinfo["threads"],
get_elapsed(pinfo["exists_for"]),
pinfo["state"],
pinfo["cmd"]]
queryInfo = []
if args.postgres_query:
queryInfo = [pinfo.get('qry_state',''), pinfo.get('waiting_state',''), pinfo.get('query','')]
# generate the status_mem entries
status_mem = pinfo["status_mem"]
status_mem_entries = []
for label in stat_map:
if label in status_mem:
status_mem_entries.append(status_mem[label])
else:
status_mem_entries.append("")
return currentTime + mainInfo + status_mem_entries + restInfo + queryInfo
# utility to print a label:
# - print empty line
# - print text
# - print underscore for the line
def print_label(s):
print
print s
print '-' * len(s)
# main routine that gathers and outputs the reports
def run_it():
args = cli()
if args.output_file != 'stdout':
if args.csv_output is None:
print '[WARNING] Cannot emit process table to file unless it is in CSV format.'
return
# stat_map is created as follows:
# - we iterate over all process data and their status_mem-hash
# we insert the keys into statusMap-hash
# convert the statusMap into a list
# sort it
stat_map = {}
pinfos = get_process_infos()
# we now need to organize the list of entries according to their ures
# for this we'll create a list with two entries:
# [ures, pid]
# (since pid can be used to access the process from the pinfos-hash)
plist = []
max_cpu = 0
ures_sum = 0
for pid, v in pinfos.items():
max_cpu = max(max_cpu, v["cpu"])
plist.append((v["ures"], pid))
status_mem = v["status_mem"]
ures_sum += int(v["ures"])
# add the keys from this process status_mem
if status_mem:
for k in status_mem.keys():
stat_map[k] = None
# If user only wants the sum, print that and exit
if args.sum_only:
msg = 'Unique Resident Memory sum: ' + str(ures_sum) + ' Kilobytes'
if args.user:
msg += ', for user ' + str(args.user)
print msg
return
# use two steps in order to work on older pythons (newer ones
# can use reverse=True keyparam)
plist.sort()
plist.reverse()
# prepare the stat_map
stat_map = stat_map.keys()
stat_map.sort()
time_header = ["epoch_time"]
cpu_header = "CPU"
main_header = ["PID", "UID", "URES", "SHR", "VIRT"]
post_header = ["MINFLT", "MAJFLT", cpu_header, "threads", "started", "S", "CMD"]
stat_header = map(lambda x: x.lower(), stat_map)
query_header = []
result_rows_limit = None
if args.lines_of_output and args.lines_of_output > 0:
result_rows_limit = args.lines_of_output
get_current_time=False
if args.postgres_query:
i=0
j=0
for dummy, pid in plist:
# Only iterate result_rows_limit, if args.lines_of_output was supplied.
if not result_rows_limit or (result_rows_limit and j < int(result_rows_limit)):
row = get_process_row(pinfos[pid], stat_map, max_cpu > 0, args, get_current_time=get_current_time)
if len(row[-1]) > 0:
i += 1
if i > 0:
query_header = ['qry_state','qry_waiting','query']
j += 1
if args.csv_output is None:
process_table = JustifiedTable()
process_table.add_row(main_header + stat_header + post_header + query_header)
# Clear the file if one was supplied
try:
if args.output_file != 'stdout':
with open(args.output_file, 'w') as fout:
fout.truncate()
except IOError as err:
print 'Attempted to write to ' + args.output_file + '. Error: ' + err.strerror
sys.exit(1)
i = 0
j=0
if args.csv_output is not None:
get_current_time=True
for dummy, pid in plist:
if not result_rows_limit or (result_rows_limit and j < int(result_rows_limit)):
row = get_process_row(pinfos[pid], stat_map, max_cpu > 0, args, get_current_time=get_current_time)
if args.csv_output is not None:
# Write to file if one was defined.
if args.output_file != 'stdout':
with open(args.output_file, 'ab') as csvfile:
csvwriter = csv.writer(csvfile)
if i == 0:
csvwriter.writerow(time_header + main_header + stat_header + post_header + query_header)
csvwriter.writerow(map(str, row))
else:
csvstdout = csv.writer(sys.stdout)
if i == 0:
csvstdout.writerow(time_header + main_header + stat_header + post_header + query_header)
csvstdout.writerow(map(str, row))
else:
process_table.add_row(row)
i += 1
j += 1
# Write to stdout or to file
if args.output_file == 'stdout':
if args.csv_output is None:
process_table.output(None)
msg = 'Unique Resident Memory sum: ' + str(ures_sum) + ' Kilobytes'
if args.user:
msg += ', for user ' + str(args.user)
print msg
if __name__ == '__main__':
# If piping to head/tail, add "2>/dev/null"
# eg. python pg_meminfo.py 2>/dev/null | head -5
run_it()
'''Tests
# Fail because of permission denied if run by non-root user
python /tmp/pg_meminfo.py -c --output /root/foo.csv
# Return only the sum of the URES
python /tmp/pg_meminfo.py -s
# Return the Unique Resident Memory for all users
python /tmp/pg_meminfo.py
# Return the Unique Resident Memory for a specific user
python /tmp/pg_meminfo.py -u postgres
# Return only the sum of the URES, all other flags should be ignored except "user"
python /tmp/pg_meminfo.py -c -o /root/foo.csv --sum-only -u postgres
# output csv to stdout
python /tmp/pg_meminfo.py -c
# Output csv to file and include the queries (if any)
python /tmp/pg_meminfo.py -c -o /tmp/foo.csv --postgres-query
# Should fail because output file can only be used with csv
python /tmp/pg_meminfo.py --output /tmp/foo_a.csv
# Output 10 rows of csv (including header) without error
python /tmp/pg_meminfo.py -c --postgres-query -u postgres -n 10
'''
|
bricklen/pg-scripts
|
pg_meminfo.py
|
Python
|
mit
| 21,999
|
import platform
import select
import socket
import ssl
from typing import TYPE_CHECKING, Callable, Optional, Tuple, Union, overload
from unittest import mock
import pytest
from dummyserver.server import DEFAULT_CA, DEFAULT_CERTS
from dummyserver.testcase import SocketDummyServerTestCase, consume_socket
from urllib3.util import ssl_
from urllib3.util.ssltransport import SSLTransport
if TYPE_CHECKING:
from typing_extensions import Literal
# consume_socket can iterate forever, we add timeouts to prevent halting.
PER_TEST_TIMEOUT = 60
def server_client_ssl_contexts() -> Tuple[ssl.SSLContext, ssl.SSLContext]:
if hasattr(ssl, "PROTOCOL_TLS_SERVER"):
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
server_context.load_cert_chain(DEFAULT_CERTS["certfile"], DEFAULT_CERTS["keyfile"])
if hasattr(ssl, "PROTOCOL_TLS_CLIENT"):
client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
client_context.load_verify_locations(DEFAULT_CA)
return server_context, client_context
@overload
def sample_request(binary: "Literal[True]" = ...) -> bytes:
...
@overload
def sample_request(binary: "Literal[False]") -> str:
...
def sample_request(binary: bool = True) -> Union[bytes, str]:
request = (
b"GET http://www.testing.com/ HTTP/1.1\r\n"
b"Host: www.testing.com\r\n"
b"User-Agent: awesome-test\r\n"
b"\r\n"
)
return request if binary else request.decode("utf-8")
def validate_request(
provided_request: bytearray, binary: "Literal[False, True]" = True
) -> None:
assert provided_request is not None
expected_request = sample_request(binary)
assert provided_request == expected_request
@overload
def sample_response(binary: "Literal[True]" = ...) -> bytes:
...
@overload
def sample_response(binary: "Literal[False]") -> str:
...
@overload
def sample_response(binary: bool = ...) -> Union[bytes, str]:
...
def sample_response(binary: bool = True) -> Union[bytes, str]:
response = b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"
return response if binary else response.decode("utf-8")
def validate_response(
provided_response: Union[bytes, bytearray, str], binary: bool = True
) -> None:
assert provided_response is not None
expected_response = sample_response(binary)
assert provided_response == expected_response
def validate_peercert(ssl_socket: SSLTransport) -> None:
binary_cert = ssl_socket.getpeercert(binary_form=True)
assert type(binary_cert) == bytes
assert len(binary_cert) > 0
cert = ssl_socket.getpeercert()
assert type(cert) == dict
assert "serialNumber" in cert
assert cert["serialNumber"] != ""
class SingleTLSLayerTestCase(SocketDummyServerTestCase):
"""
Uses the SocketDummyServer to validate a single TLS layer can be
established through the SSLTransport.
"""
@classmethod
def setup_class(cls) -> None:
cls.server_context, cls.client_context = server_client_ssl_contexts()
def start_dummy_server(
self, handler: Optional[Callable[[socket.socket], None]] = None
) -> None:
def socket_handler(listener: socket.socket) -> None:
sock = listener.accept()[0]
with self.server_context.wrap_socket(sock, server_side=True) as ssock:
request = consume_socket(ssock)
validate_request(request)
ssock.send(sample_response())
chosen_handler = handler if handler else socket_handler
self._start_server(chosen_handler)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_start_closed_socket(self) -> None:
"""Errors generated from an unconnected socket should bubble up."""
sock = socket.socket(socket.AF_INET)
context = ssl.create_default_context()
sock.close()
with pytest.raises(OSError):
SSLTransport(sock, context)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_close_after_handshake(self) -> None:
"""Socket errors should be bubbled up"""
self.start_dummy_server()
sock = socket.create_connection((self.host, self.port))
with SSLTransport(
sock, self.client_context, server_hostname="localhost"
) as ssock:
ssock.close()
with pytest.raises(OSError):
ssock.send(b"blaaargh")
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_wrap_existing_socket(self) -> None:
"""Validates a single TLS layer can be established."""
self.start_dummy_server()
sock = socket.create_connection((self.host, self.port))
with SSLTransport(
sock, self.client_context, server_hostname="localhost"
) as ssock:
assert ssock.version() is not None
ssock.send(sample_request())
response = consume_socket(ssock)
validate_response(response)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_unbuffered_text_makefile(self) -> None:
self.start_dummy_server()
sock = socket.create_connection((self.host, self.port))
with SSLTransport(
sock, self.client_context, server_hostname="localhost"
) as ssock:
with pytest.raises(ValueError):
ssock.makefile("r", buffering=0)
ssock.send(sample_request())
response = consume_socket(ssock)
validate_response(response)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_unwrap_existing_socket(self) -> None:
"""
Validates we can break up the TLS layer
A full request/response is sent over TLS, and later over plain text.
"""
def shutdown_handler(listener: socket.socket) -> None:
sock = listener.accept()[0]
ssl_sock = self.server_context.wrap_socket(sock, server_side=True)
request = consume_socket(ssl_sock)
validate_request(request)
ssl_sock.sendall(sample_response())
unwrapped_sock = ssl_sock.unwrap()
request = consume_socket(unwrapped_sock)
validate_request(request)
unwrapped_sock.sendall(sample_response())
self.start_dummy_server(shutdown_handler)
sock = socket.create_connection((self.host, self.port))
ssock = SSLTransport(sock, self.client_context, server_hostname="localhost")
# request/response over TLS.
ssock.sendall(sample_request())
response = consume_socket(ssock)
validate_response(response)
# request/response over plaintext after unwrap.
ssock.unwrap()
sock.sendall(sample_request())
response = consume_socket(sock)
validate_response(response)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_ssl_object_attributes(self) -> None:
"""Ensures common ssl attributes are exposed"""
self.start_dummy_server()
sock = socket.create_connection((self.host, self.port))
with SSLTransport(
sock, self.client_context, server_hostname="localhost"
) as ssock:
cipher = ssock.cipher()
assert type(cipher) == tuple
# No chosen protocol through ALPN or NPN.
assert ssock.selected_alpn_protocol() is None
assert ssock.selected_npn_protocol() is None
shared_ciphers = ssock.shared_ciphers()
assert type(shared_ciphers) == list
assert len(shared_ciphers) > 0
assert ssock.compression() is None
validate_peercert(ssock)
ssock.send(sample_request())
response = consume_socket(ssock)
validate_response(response)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_socket_object_attributes(self) -> None:
"""Ensures common socket attributes are exposed"""
self.start_dummy_server()
sock = socket.create_connection((self.host, self.port))
with SSLTransport(
sock, self.client_context, server_hostname="localhost"
) as ssock:
assert ssock.fileno() is not None
test_timeout = 10
ssock.settimeout(test_timeout)
assert ssock.gettimeout() == test_timeout
assert ssock.socket.gettimeout() == test_timeout
ssock.send(sample_request())
response = consume_socket(ssock)
validate_response(response)
class SocketProxyDummyServer(SocketDummyServerTestCase):
"""
Simulates a proxy that performs a simple I/O loop on client/server
socket.
"""
def __init__(
self, destination_server_host: str, destination_server_port: int
) -> None:
self.destination_server_host = destination_server_host
self.destination_server_port = destination_server_port
self.server_ctx, _ = server_client_ssl_contexts()
def start_proxy_handler(self) -> None:
"""
Socket handler for the proxy. Terminates the first TLS layer and tunnels
any bytes needed for client <-> server communicatin.
"""
def proxy_handler(listener: socket.socket) -> None:
sock = listener.accept()[0]
with self.server_ctx.wrap_socket(sock, server_side=True) as client_sock:
upstream_sock = socket.create_connection(
(self.destination_server_host, self.destination_server_port)
)
self._read_write_loop(client_sock, upstream_sock)
upstream_sock.close()
client_sock.close()
self._start_server(proxy_handler)
def _read_write_loop(
self,
client_sock: socket.socket,
server_sock: socket.socket,
chunks: int = 65536,
) -> None:
inputs = [client_sock, server_sock]
output = [client_sock, server_sock]
while inputs:
readable, writable, exception = select.select(inputs, output, inputs)
if exception:
# Error occurred with either of the sockets, time to
# wrap up, parent func will close sockets.
break
for s in readable:
read_socket, write_socket = None, None
if s == client_sock:
read_socket = client_sock
write_socket = server_sock
else:
read_socket = server_sock
write_socket = client_sock
# Ensure buffer is not full before writing
if write_socket in writable:
try:
b = read_socket.recv(chunks)
if len(b) == 0:
# One of the sockets has EOFed, we return to close
# both.
return
write_socket.send(b)
except ssl.SSLEOFError:
# It's possible, depending on shutdown order, that we'll
# try to use a socket that was closed between select
# calls.
return
class TlsInTlsTestCase(SocketDummyServerTestCase):
"""
Creates a TLS in TLS tunnel by chaining a 'SocketProxyDummyServer' and a
`SocketDummyServerTestCase`.
Client will first connect to the proxy, who will then proxy any bytes send
to the destination server. First TLS layer terminates at the proxy, second
TLS layer terminates at the destination server.
"""
@classmethod
def setup_class(cls) -> None:
cls.server_context, cls.client_context = server_client_ssl_contexts()
@classmethod
def start_proxy_server(cls) -> None:
# Proxy server will handle the first TLS connection and create a
# connection to the destination server.
cls.proxy_server = SocketProxyDummyServer(cls.host, cls.port)
cls.proxy_server.start_proxy_handler()
@classmethod
def teardown_class(cls) -> None:
if hasattr(cls, "proxy_server"):
cls.proxy_server.teardown_class()
super().teardown_class()
@classmethod
def start_destination_server(cls) -> None:
"""
Socket handler for the destination_server. Terminates the second TLS
layer and send a basic HTTP response.
"""
def socket_handler(listener: socket.socket) -> None:
sock = listener.accept()[0]
with cls.server_context.wrap_socket(sock, server_side=True) as ssock:
request = consume_socket(ssock)
validate_request(request)
ssock.send(sample_response())
sock.close()
cls._start_server(socket_handler)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_tls_in_tls_tunnel(self) -> None:
"""
Basic communication over the TLS in TLS tunnel.
"""
self.start_destination_server()
self.start_proxy_server()
sock = socket.create_connection(
(self.proxy_server.host, self.proxy_server.port)
)
with self.client_context.wrap_socket(
sock, server_hostname="localhost"
) as proxy_sock:
with SSLTransport(
proxy_sock, self.client_context, server_hostname="localhost"
) as destination_sock:
assert destination_sock.version() is not None
destination_sock.send(sample_request())
response = consume_socket(destination_sock)
validate_response(response)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_wrong_sni_hint(self) -> None:
"""
Provides a wrong sni hint to validate an exception is thrown.
"""
self.start_destination_server()
self.start_proxy_server()
sock = socket.create_connection(
(self.proxy_server.host, self.proxy_server.port)
)
with self.client_context.wrap_socket(
sock, server_hostname="localhost"
) as proxy_sock:
with pytest.raises(ssl.SSLCertVerificationError):
SSLTransport(
proxy_sock, self.client_context, server_hostname="veryverywrong"
)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
@pytest.mark.parametrize("buffering", [None, 0])
def test_tls_in_tls_makefile_raw_rw_binary(self, buffering: Optional[int]) -> None:
"""
Uses makefile with read, write and binary modes without buffering.
"""
self.start_destination_server()
self.start_proxy_server()
sock = socket.create_connection(
(self.proxy_server.host, self.proxy_server.port)
)
with self.client_context.wrap_socket(
sock, server_hostname="localhost"
) as proxy_sock:
with SSLTransport(
proxy_sock, self.client_context, server_hostname="localhost"
) as destination_sock:
file = destination_sock.makefile("rwb", buffering)
file.write(sample_request()) # type: ignore[arg-type]
file.flush()
response = bytearray(65536)
wrote = file.readinto(response) # type: ignore[union-attr]
assert wrote is not None
# Allocated response is bigger than the actual response, we
# rtrim remaining x00 bytes.
str_response = response.decode("utf-8").rstrip("\x00")
validate_response(str_response, binary=False)
file.close()
@pytest.mark.skipif(
platform.system() == "Windows",
reason="Skipping windows due to text makefile support",
)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_tls_in_tls_makefile_rw_text(self) -> None:
"""
Creates a separate buffer for reading and writing using text mode and
utf-8 encoding.
"""
self.start_destination_server()
self.start_proxy_server()
sock = socket.create_connection(
(self.proxy_server.host, self.proxy_server.port)
)
with self.client_context.wrap_socket(
sock, server_hostname="localhost"
) as proxy_sock:
with SSLTransport(
proxy_sock, self.client_context, server_hostname="localhost"
) as destination_sock:
read = destination_sock.makefile("r", encoding="utf-8")
write = destination_sock.makefile("w", encoding="utf-8")
write.write(sample_request(binary=False)) # type: ignore[arg-type]
write.flush()
response = read.read()
if "\r" not in response:
# Carriage return will be removed when reading as a file on
# some platforms. We add it before the comparison.
assert isinstance(response, str)
response = response.replace("\n", "\r\n")
validate_response(response, binary=False)
@pytest.mark.timeout(PER_TEST_TIMEOUT)
def test_tls_in_tls_recv_into_sendall(self) -> None:
"""
Valides recv_into and sendall also work as expected. Other tests are
using recv/send.
"""
self.start_destination_server()
self.start_proxy_server()
sock = socket.create_connection(
(self.proxy_server.host, self.proxy_server.port)
)
with self.client_context.wrap_socket(
sock, server_hostname="localhost"
) as proxy_sock:
with SSLTransport(
proxy_sock, self.client_context, server_hostname="localhost"
) as destination_sock:
destination_sock.sendall(sample_request())
response = bytearray(65536)
destination_sock.recv_into(response)
str_response = response.decode("utf-8").rstrip("\x00")
validate_response(str_response, binary=False)
class TestSSLTransportWithMock:
def test_constructor_params(self) -> None:
server_hostname = "example-domain.com"
sock = mock.Mock()
context = mock.create_autospec(ssl_.SSLContext)
ssl_transport = SSLTransport(
sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False
)
context.wrap_bio.assert_called_with(
mock.ANY, mock.ANY, server_hostname=server_hostname
)
assert not ssl_transport.suppress_ragged_eofs
def test_various_flags_errors(self) -> None:
server_hostname = "example-domain.com"
sock = mock.Mock()
context = mock.create_autospec(ssl_.SSLContext)
ssl_transport = SSLTransport(
sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False
)
with pytest.raises(ValueError):
ssl_transport.recv(flags=1)
with pytest.raises(ValueError):
ssl_transport.recv_into(bytearray(), flags=1)
with pytest.raises(ValueError):
ssl_transport.sendall(bytearray(), flags=1)
with pytest.raises(ValueError):
ssl_transport.send(None, flags=1) # type: ignore[arg-type]
def test_makefile_wrong_mode_error(self) -> None:
server_hostname = "example-domain.com"
sock = mock.Mock()
context = mock.create_autospec(ssl_.SSLContext)
ssl_transport = SSLTransport(
sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False
)
with pytest.raises(ValueError):
ssl_transport.makefile(mode="x")
def test_wrap_ssl_read_error(self) -> None:
server_hostname = "example-domain.com"
sock = mock.Mock()
context = mock.create_autospec(ssl_.SSLContext)
ssl_transport = SSLTransport(
sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False
)
with mock.patch.object(ssl_transport, "_ssl_io_loop") as _ssl_io_loop:
_ssl_io_loop.side_effect = ssl.SSLError()
with pytest.raises(ssl.SSLError):
ssl_transport._wrap_ssl_read(1)
|
sigmavirus24/urllib3
|
test/test_ssltransport.py
|
Python
|
mit
| 20,253
|
# pylint:disable=too-many-branches,too-many-statements
from __future__ import absolute_import
from __future__ import unicode_literals
import collections
import os.path
import re
class Status(object):
ADDED = object()
DELETED = object()
ALREADY_EXISTING = object()
class SpecialFileType(object):
SUBMODULE = object()
SYMLINK = object()
BINARY = object()
SpecialFile = collections.namedtuple(
'SpecialFile',
['file_type', 'added', 'removed'],
)
class FileDiffStat(collections.namedtuple(
'FileDiffStat',
['path', 'lines_added', 'lines_removed', 'status', 'special_file'],
)):
__slots__ = ()
def __new__(cls, *args, **kwargs):
# Default special_file to None in the case it is not provided
# (mostly for backwards compatibility)
kwargs.setdefault('special_file', None)
return super(FileDiffStat, cls).__new__(cls, *args, **kwargs)
@property
def extension(self):
return os.path.splitext(self.path)[1]
@property
def filename(self):
return os.path.split(self.path)[1]
SUBMODULE_MODE = b'160000'
SYMLINK_MODE = b'120000'
def _to_file_diff_stat(file_diff):
lines = file_diff.split(b'\n')
diff_line_filename = lines[0].split()[-1].lstrip(b'b').lstrip(b'/')
is_binary = False
in_diff = False
mode = None
status = None
lines_added = []
lines_removed = []
for line in lines[1:]:
# Mode will be indicated somewhere between diff --git line
# and the file added / removed lines
# It has these forms:
# 1. 'new file mode 100644'
# 2. 'deleted file mode 100644'
# 3. 'index dc7827c..7b8b995 100644'
# 4. 'old mode 100755'
# 'new mode 100644'
if line.startswith(b'new file mode '):
assert status is None
assert mode is None
status = Status.ADDED
mode = line.split()[-1]
elif line.startswith(b'deleted file mode '):
assert status is None
assert mode is None
status = Status.DELETED
mode = line.split()[-1]
elif line.startswith(b'new mode '):
assert status is None
assert mode is None
status = Status.ALREADY_EXISTING
mode = line.split()[-1]
elif line.startswith(b'index') and len(line.split()) == 3:
assert status is None
assert mode is None
status = Status.ALREADY_EXISTING
mode = line.split()[-1]
elif line.startswith(b'Binary files'):
is_binary = True
# A diff contains lines that look like:
# --- foo/bar
# +++ foo/bar
# Which kind of look like diff lines but are definitely not
elif line.startswith(b'--- ') and not in_diff:
pass
elif line.startswith(b'+++ ') and not in_diff:
in_diff = True
elif in_diff and line.startswith(b'+'):
lines_added.append(line[1:])
elif in_diff and line.startswith(b'-'):
lines_removed.append(line[1:])
assert mode is not None
assert status is not None
# Process symlinks and submodules
special_file = None
if mode == SUBMODULE_MODE:
special_file = SpecialFile(
file_type=SpecialFileType.SUBMODULE,
added=lines_added[0].split()[-1] if lines_added else None,
removed=lines_removed[0].split()[-1] if lines_removed else None,
)
lines_added = []
lines_removed = []
elif mode == SYMLINK_MODE:
special_file = SpecialFile(
file_type=SpecialFileType.SYMLINK,
added=lines_added[0] if lines_added else None,
removed=lines_removed[0] if lines_removed else None,
)
lines_added = []
lines_removed = []
elif is_binary:
special_file = SpecialFile(
file_type=SpecialFileType.BINARY,
added=diff_line_filename if status is not Status.DELETED else None,
removed=diff_line_filename if status is not Status.ADDED else None,
)
return FileDiffStat(
diff_line_filename,
lines_added,
lines_removed,
status,
special_file=special_file,
)
GIT_DIFF_RE = re.compile(b'^diff --git', flags=re.MULTILINE)
def get_file_diff_stats_from_output(output):
assert type(output) is bytes, (type(output), output)
files = GIT_DIFF_RE.split(output)
assert not files[0].strip() or files[0].startswith(b'commit ')
return [_to_file_diff_stat(file_diff) for file_diff in files[1:]]
|
ucarion/git-code-debt
|
git_code_debt/file_diff_stat.py
|
Python
|
mit
| 4,630
|
#!/usr/bin/env python
"""Frequency manager."""
from mic import Mic
import numpy as np
import pylab as pl
class FrequencyStream(object):
"""Frequency stream."""
def __init__(self):
"""Construct FrequencyStream object."""
self.mic = Mic('Blue Snowball')
def __enter__(self):
"""Open and return frequency stream."""
self.mic.open()
return self
def __exit__(self, type, value, traceback):
"""Close stream."""
self.mic.close()
def fft(self, data, jump):
"""Return data in frequency domain."""
# cut up data
# start frame
start = 0
# go until interval reaches final frame
while start + 8192 <= len(data):
# get fft of interval
freq = np.absolute(np.fft.rfft(data[start:start+8192]))
# send out fft
yield freq
# move to next interval
start += jump
def read(self, jump=1024, frames=None):
"""Read a number of frames of data into the stream."""
# read all frames
self.mic.read(frames)
# iterate through buffers
for buff in self.fft(self.mic.stream.channel_1, jump):
yield buff
def main():
"""Plot one second of data in the frequency domain."""
with FrequencyStream() as stream:
stream.read()
pl.subplot(2, 1, 1)
pl.plot(stream.channel_1)
pl.subplot(2, 1, 2)
pl.plot(stream.channel_2)
pl.show()
if __name__ == '__main__':
main()
|
anassinator/beethoven
|
src/web/frequency.py
|
Python
|
mit
| 1,545
|
import _plotly_utils.basevalidators
class HoverinfoValidator(_plotly_utils.basevalidators.FlaglistValidator):
def __init__(self, plotly_name="hoverinfo", parent_name="table", **kwargs):
super(HoverinfoValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
extras=kwargs.pop("extras", ["all", "none", "skip"]),
flags=kwargs.pop("flags", ["x", "y", "z", "text", "name"]),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/table/_hoverinfo.py
|
Python
|
mit
| 636
|
from django.shortcuts import render
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect
#
# Show all cluster results and running tasks
#
@method_decorator(login_required, name='dispatch')
class IndexView(View):
template_name = 'statistics/index.html'
def get(self, request):
return render(request, self.template_name, {
})
|
mstrehse/geneclust-server
|
app/statistics/views.py
|
Python
|
mit
| 484
|
import logging
from functools import total_ordering
from typing import Tuple, NamedTuple, Dict, Callable, Set, Any, List
import numpy
import itertools
from decimal import Decimal
from datetime import datetime
import json
import pandas
class QuoteEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Decimal):
return str(o)
elif isinstance(o, datetime):
return o.isoformat()
return super(QuoteEncoder, self).default(0)
class PriceVolume(object):
price: Decimal
volume: Decimal
def __init__(self, price, volume):
self._price = price
self._volume = volume
@property
def price(self):
return self._price
@property
def volume(self):
return self._volume
def __repr__(self):
return '{}@{}'.format(self.volume, self.price)
def __hash__(self):
return hash(repr(self))
def __eq__(self, other):
return (self.price == other.price) and (self.volume == other.volume)
def __ne__(self, other):
return not self == other
class CurrencyTrade(object):
"""
Models a currency trade.
"""
def __init__(self, direction: str, pair: str, quantity: Decimal, price: Decimal, fill_ratio: float):
self._direction = direction
self._pair = pair
self._quantity = quantity
self._price = price
self._fill_ratio = fill_ratio
@property
def direction(self) -> str:
return self._direction
@property
def pair(self) -> str:
return self._pair
@property
def quantity(self) -> Decimal:
return self._quantity
@property
def price(self) -> Decimal:
return self._price
@property
def fill_ratio(self) -> float:
return self._fill_ratio
def scale(self, factor: float):
self._quantity *= factor
self._fill_ratio *= factor
def as_dict(self):
return {
'direction': self.direction,
'pair': self.pair,
'quantity': self.quantity,
'price': self.price,
'fill_ratio': self.fill_ratio,
}
def __repr__(self):
return '[{}, {}, {}, {}, {}]'.format(self.direction, self. pair, self.quantity, self.price, self.fill_ratio)
class CurrencyBalance(object):
"""
Models a currency balance.
"""
def __init__(self, currency: str, amount: Decimal):
self._currency = currency
self._amount = amount
@property
def currency(self) -> str:
return self._currency
@property
def amount(self) -> Decimal:
return self._amount
def scale(self, factor: float):
self._amount *= factor
def __repr__(self):
return '[{} {}]'.format(self.currency, self.amount)
def __hash__(self):
return hash(self.currency + str(self.amount))
def __eq__(self, other):
return (self.currency == other.currency) and (self.amount == other.amount)
def __ne__(self, other):
return not self == other
class CurrencyBalanceAggregate(object):
def __init__(self):
self._balances = dict()
def add_balance(self, currency: str, amount: Decimal):
self._balances[currency] = CurrencyBalance(currency, amount)
def scale(self, factor: float):
for currency in self._balances:
self._balances[currency].scale(factor)
def amount(self, currency) -> Decimal:
return self._balances[currency].amount
def as_dict(self):
return {currency: self._balances[currency].amount for currency in self._balances}
def assets(self):
return self._balances.keys()
class ForexQuote(object):
"""
Models a forex quote.
"""
def __init__(self, timestamp: datetime = None, bid: PriceVolume = None, ask: PriceVolume = None,
source: str = None):
if not timestamp:
self._timestamp = datetime.now()
else:
self._timestamp = timestamp
self._bid = bid
self._ask = ask
self._source = source
@property
def timestamp(self) -> datetime:
return self._timestamp
@property
def bid(self) -> PriceVolume:
return self._bid
@property
def ask(self) -> PriceVolume:
return self._ask
@property
def source(self) -> str:
return self._source
def is_complete(self) -> bool:
return self.bid is not None and self.ask is not None
def to_dict(self):
quote_data = {'timestamp': self.timestamp,
'bid': {'price': self.bid.price, 'amount': self.bid.volume},
'ask': {'price': self.ask.price, 'amount': self.ask.volume},
'source': self.source}
return quote_data
def to_json(self):
return json.dumps(self.to_dict(), cls=QuoteEncoder)
def __repr__(self):
return '[{}:{}/{}]'.format(self.timestamp, self.bid, self.ask)
@total_ordering
class CurrencyPair(object):
"""
Models a currency pair.
"""
def __init__(self, base_currency_code: str, quote_currency_code: str):
"""
The quotation EUR/USD 1.2500 means that one euro is exchanged for 1.2500 US dollars.
Here, EUR is the base currency and USD is the quote currency(counter currency).
:param base_currency_code: currency that is quoted
:param quote_currency_code: currency that is used as the reference
"""
self._base_currency_code = base_currency_code.upper()
self._quote_currency_code = quote_currency_code.upper()
def buy(self, quote: ForexQuote, volume: Decimal, illimited_volume: bool = False) -> Tuple[CurrencyBalanceAggregate, CurrencyTrade]:
"""
Computes the balance after the buy has taken place.
Example, provided volume is sufficient:
quote = EUR/USD <1.15, 1.16>, volume = +1 ---> Balances: EUR = +1, USD = -1.16
:param quote: ForexQuote instance
:param volume:
:param illimited_volume: emulates infinite liquidity
:return:
"""
price = quote.ask.price
if illimited_volume:
allowed_volume = volume
else:
allowed_volume = min(volume, quote.ask.volume)
fill_ratio = allowed_volume / volume
balances = CurrencyBalanceAggregate()
balances.add_balance(self.base, allowed_volume)
balances.add_balance(self.quote, Decimal(allowed_volume * price * -1))
trade = CurrencyTrade('buy', repr(self), allowed_volume, price, fill_ratio)
return balances, trade
def sell(self, quote: ForexQuote, volume: Decimal, illimited_volume: bool = False) -> Tuple[CurrencyBalanceAggregate, CurrencyTrade]:
"""
Computes the balance after the sell has taken place.
Example, provided volume is sufficient:
quote = EUR/USD <1.15, 1.16>, volume = 1 ---> Balances: EUR = -1, USD = +1.15
:param quote: ForexQuote instance
:param volume:
:param illimited_volume: emulates infinite liquidity
:return:
"""
volume = abs(volume)
price = quote.bid.price
if illimited_volume:
allowed_volume = volume
else:
allowed_volume = min(volume, quote.bid.volume)
fill_ratio = allowed_volume / volume
balances = CurrencyBalanceAggregate()
balances.add_balance(self.base, Decimal(allowed_volume * -1))
balances.add_balance(self.quote, Decimal(allowed_volume * price))
trade = CurrencyTrade('sell', repr(self), Decimal(allowed_volume * -1), price, fill_ratio)
return balances, trade
def buy_currency(self, currency: str, volume: Decimal, quote: ForexQuote, illimited_volume: bool = False) -> Tuple[
CurrencyBalanceAggregate, CurrencyTrade]:
"""
:param currency: currency to buy
:param volume: amount to buy denominated in currency
:param quote: current quote (ForexQuote instance)
:param illimited_volume: emulates infinite liquidity
:return: resulting balance and performed trades (balance, performed_trade)
"""
assert currency in self.assets, 'currency {} not in pair {}'.format(currency, self)
assert volume >= 0
logging.debug('buying {} {} using pair {}'.format(volume, currency, self))
if currency == self.base:
# Direct quotation
balances, performed_trade = self.buy(quote, volume, illimited_volume)
else:
# Indirect quotation
target_volume = Decimal(volume) / quote.bid.price
balances, performed_trade = self.sell(quote, target_volume, illimited_volume)
return balances, performed_trade
def sell_currency(self, currency: str, volume: Decimal, quote: ForexQuote, illimited_volume: bool = False) -> Tuple[
CurrencyBalanceAggregate, CurrencyTrade]:
"""
:param currency:
:param volume: amount to buy denominated in currency
:param quote: current quote (ForexQuote instance)
:param illimited_volume: emulates infinite liquidity
:return: resulting balance and performed trades (balance, performed_trade)
"""
assert currency in self.assets, 'currency {} not in pair {}'.format(currency, self)
assert volume >= 0
logging.debug('selling {} {} using pair {}'.format(volume, currency, self))
if currency == self.base:
# Direct quotation
balance, performed_trade = self.sell(quote, volume, illimited_volume)
else:
# Indirect quotation
target_volume = Decimal(volume) / quote.ask.price
balance, performed_trade = self.buy(quote, target_volume, illimited_volume)
return balance, performed_trade
def convert(self, currency: str, amount: Decimal, quote: ForexQuote):
if currency == self.base:
destination_currency = self.quote
else:
destination_currency = self.base
if amount >= 0:
balances, trade = self.sell_currency(currency, amount, quote, illimited_volume=True)
amount = balances.amount(destination_currency)
return abs(amount)
else:
balances, trade = self.buy_currency(currency, abs(amount), quote, illimited_volume=True)
amount = balances.amount(destination_currency)
return abs(amount) * -1
@property
def assets(self) -> Set[str]:
return {self._base_currency_code, self._quote_currency_code}
@property
def quote(self) -> str:
return self._quote_currency_code
@property
def base(self) -> str:
return self._base_currency_code
def to_direct(self, separator: str = '/') -> str:
return '{}{}{}'.format(self.base, separator, self.quote)
def to_indirect(self, separator: str = '/') -> str:
return '{}{}{}'.format(self.quote, separator, self.base)
def __repr__(self):
return '<{}/{}>'.format(self.base, self.quote)
def __hash__(self):
return hash(self.base + ',' + self.quote)
def __eq__(self, other):
return (self.base == other.base) and (self.quote == other.quote)
def __ne__(self, other):
return not self == other
def __le__(self, other):
return repr(self) <= repr(other)
@total_ordering
class ArbitrageStrategy(object):
"""
Models an arbitrage strategy.
"""
def __init__(self, pair1: CurrencyPair, pair2: CurrencyPair, pair3: CurrencyPair):
"""
:param pair1: CurrencyPair instance
:param pair2: CurrencyPair instance
:param pair3: CurrencyPair instance
"""
pairs = {pair1, pair2, pair3}
bases = [pair.base for pair in pairs]
bases_count = dict((base, len(list(group))) for base, group in itertools.groupby(sorted(bases)))
unique_bases = sorted([base for base in bases_count if bases_count[base] == 1])
if len(unique_bases) == 0:
raise SystemError('cannot arbitrage pairs: {}'.format(pairs))
common_currency = unique_bases[0]
direct_pair = None
for pair in pairs:
if common_currency not in pair.assets:
direct_pair = pair
break
if direct_pair is None:
raise SystemError('cannot arbitrage pairs: {}'.format(pairs))
self._pair1 = direct_pair
indirect_pairs = list(pairs.difference({direct_pair}))
if common_currency == indirect_pairs[0].quote:
self._pair2, self._pair3 = indirect_pairs[0], indirect_pairs[1]
else:
self._pair3, self._pair2 = indirect_pairs[0], indirect_pairs[1]
self._quotes = {
self._pair1: ForexQuote(),
self._pair2: ForexQuote(),
self._pair3: ForexQuote()
}
def __repr__(self):
return '[{},{}]'.format(self.indirect_pairs, self.direct_pair)
def __hash__(self):
return hash(repr(self))
def __eq__(self, other):
return self.indirect_pairs == other.indirect_pairs and self.direct_pair == other.direct_pair
def __ne__(self, other):
return not self == other
def __le__(self, other):
return repr(self) <= repr(other)
@property
def direct_pair(self) -> CurrencyPair:
return self._pair1
@property
def indirect_pairs(self) -> Tuple[CurrencyPair, CurrencyPair]:
return self._pair2, self._pair3
@property
def pairs(self) -> Tuple[CurrencyPair, CurrencyPair, CurrencyPair]:
sorted_pairs = sorted([self._pair1, self._pair2, self._pair3])
return sorted_pairs[0], sorted_pairs[1], sorted_pairs[2]
def update_quote(self, pair: CurrencyPair, quote: ForexQuote) -> None:
"""
:param pair:
:param quote:
:return:
"""
self._quotes[pair] = quote
@property
def quotes(self) -> Dict[CurrencyPair, ForexQuote]:
"""
:return:
"""
return self._quotes
@property
def quotes_valid(self) -> bool:
"""
:return:
"""
is_valid = True
for pair, quote in self.quotes.items():
is_valid = is_valid and quote.is_complete()
return is_valid
def find_opportunity(self, illimited_volume: bool) -> Tuple[Any, Any]:
"""
:param initial_amount: amount to be invested
:param illimited_volume: emulates infinite liquidity
:param skip_capped:
:return:
"""
opportunity = None, None
if self.quotes_valid:
logging.info('strategy book: {}'.format(self.quotes))
balances_df, trades_df = self.apply_arbitrage(illimited_volume=illimited_volume)
balances_by_currency = balances_df.sum(axis=1)
logging.info('adding new opportunity:\n{}'.format(trades_df))
logging.info('resulting balances:\n{}'.format(balances_by_currency))
opportunity = trades_df.to_dict(orient='records'), balances_by_currency.to_dict()
else:
logging.info('incomplete quotes')
return opportunity
def apply_arbitrage(self, illimited_volume: bool) -> Tuple[Any, Any]:
"""
Determines arbitrage operations:
- selling indirect pair 1
- selling indirect pair 2
- offsetting remaining balance
:param illimited_volume:
:return:
"""
logging.debug('accumulating currency: {}'.format(self.direct_pair.quote))
initial_amount = self.quotes[self.indirect_pairs[0]].bid.volume
balance_initial, trade_initial = self.indirect_pairs[0].sell(self.quotes[self.indirect_pairs[0]],
initial_amount, illimited_volume)
logging.debug('balance step 1: {}'.format(balance_initial))
balance_next, trade_next = self.indirect_pairs[1].sell(self.quotes[self.indirect_pairs[1]],
balance_initial.amount(self.indirect_pairs[0].quote),
illimited_volume)
volume_adjustment = trade_next.fill_ratio
balance_initial.scale(volume_adjustment)
trade_initial.scale(volume_adjustment)
logging.debug('balance step 2: {}'.format(balance_next))
if self.direct_pair.base in balance_initial.assets():
settling_amount = balance_initial.amount(self.direct_pair.base)
balance_final, trade_final = self.direct_pair.buy_currency(self.direct_pair.base, abs(settling_amount),
self.quotes[self.direct_pair], illimited_volume)
else:
settling_amount = balance_initial.amount(self.direct_pair.quote)
balance_final, trade_final = self.direct_pair.buy_currency(self.direct_pair.quote, abs(settling_amount),
self.quotes[self.direct_pair], illimited_volume)
volume_adjustment = trade_final.fill_ratio
balance_initial.scale(volume_adjustment)
trade_initial.scale(volume_adjustment)
balance_next.scale(volume_adjustment)
trade_next.scale(volume_adjustment)
logging.debug('balance step 3: {}'.format(balance_final))
balance1_series = pandas.Series(balance_initial.as_dict(), name='initial')
balance2_series = pandas.Series(balance_next.as_dict(), name='next')
balance3_series = pandas.Series(balance_final.as_dict(), name='final')
balance_series = [balance1_series, balance2_series, balance3_series]
balances_df = pandas.concat(balance_series, axis=1)
trades_df = pandas.DataFrame([trade_initial.as_dict(), trade_next.as_dict(), trade_final.as_dict()])
return balances_df, trades_df
class CurrencyConverter(object):
"""
Forex conversion.
"""
def __init__(self, market: Tuple[str, str], order_book_callback: Callable[[CurrencyPair], ForexQuote],
direct: bool = True):
"""
:param market_name: market name is the currency pair, for example ('USD', 'EUR')
:param order_book_callback: function returning a quote for a given CurrencyPair
:param direct: when foreign currency comes first in market name
"""
if direct:
self._domestic_currency, self._foreign_currency = market[0].upper(), market[1].upper()
else:
self._foreign_currency, self._domestic_currency = market[0].upper(), market[1].upper()
self._order_book_callback = order_book_callback
@property
def domestic_currency(self) -> str:
return self._domestic_currency
@property
def foreign_currency(self) -> str:
return self._foreign_currency
def exchange(self, currency: str, amount: Decimal) -> Decimal:
"""
:param currency:
:param amount:
:return:
"""
if currency == self.domestic_currency:
return amount
elif currency == self.foreign_currency:
target_pair = CurrencyPair(self.domestic_currency, currency)
else:
raise LookupError('unable to convert {}'.format(currency))
quote = self._order_book_callback(target_pair)
return target_pair.convert(currency, amount, quote)
def sell(self, currency: str, amount: Decimal) -> Decimal:
assert amount >= 0
return self.exchange(currency.upper(), amount)
def buy(self, currency: str, amount: Decimal) -> Decimal:
assert amount >= 0
return self.exchange(currency.upper(), -amount)
def order_entries(quotes: Dict[Decimal, Tuple[datetime, Decimal, int]], reverse=False) -> List[Dict[str, Any]]:
ordered_quotes = list()
for price in sorted(quotes, reverse=reverse):
ordered_quotes.append(quotes[price])
return ordered_quotes
class OrderBook(object):
"""
Models an order book.
"""
def __init__(self, pair: CurrencyPair, source: str):
self._quotes_bid_by_price = dict()
self._quotes_ask_by_price = dict()
self._pair = pair
self._source = source
@property
def source(self) -> str:
return self._source
@property
def pair(self) -> CurrencyPair:
return self._pair
@property
def quotes_bid(self) -> List[Dict[str, Any]]:
quotes_bid = order_entries(self._quotes_bid_by_price, reverse=True)
return quotes_bid
@property
def quotes_ask(self) -> List[Dict[str, Any]]:
quotes_ask = order_entries(self._quotes_ask_by_price, reverse=False)
return quotes_ask
def load_snapshot(self, snapshot) -> None:
"""
:param snapshot:
:return:
"""
channel_id, book_data = snapshot
for price, count, amount in book_data:
timestamp = datetime.utcnow()
if Decimal(amount) > 0:
self._quotes_bid_by_price[Decimal(price)] = {'timestamp': timestamp, 'price': Decimal(price),
'amount': Decimal(amount)}
else:
self._quotes_ask_by_price[Decimal(price)] = {'timestamp': timestamp, 'price': Decimal(price) * -1,
'amount': Decimal(amount)}
def remove_quote(self, price: Decimal, quotes_by_price: Dict[Decimal, ForexQuote]) -> bool:
"""
:param price:
:param quotes_by_price:
:return:
"""
if price in quotes_by_price:
quotes_by_price.pop(price)
return True
return False
def remove_bid(self, price: Decimal) -> bool:
"""
:param price:
:return:
"""
return self.remove_quote(price, self._quotes_bid_by_price)
def remove_ask(self, price: Decimal) -> bool:
"""
:param price:
:return:
"""
return self.remove_quote(price, self._quotes_ask_by_price)
def update_quote(self, quotes_by_price: Dict[Decimal, Dict[str, Any]], price: Decimal, amount: Decimal) -> bool:
"""
:param quotes_by_price:
:param price:
:param amount:
:return:
"""
timestamp = datetime.utcnow()
if price in self._quotes_bid_by_price:
quotes_by_price[price]['timestamp'] = timestamp
quotes_by_price[price]['amount'] = amount
else:
quotes_by_price[price] = {'timestamp': timestamp, 'price': price, 'amount': amount}
return True
def update_bid(self, price: Decimal, amount: Decimal) -> bool:
"""
:param price:
:param amount:
:return:
"""
return self.update_quote(self._quotes_bid_by_price, price, amount)
def update_ask(self, price: Decimal, amount: Decimal) -> bool:
"""
:param price:
:param amount:
:return:
"""
return self.update_quote(self._quotes_bid_by_price, price, amount)
def level_one(self) -> ForexQuote:
"""
:return:
"""
if len(self.quotes_bid) == 0 or len(self.quotes_ask) == 0:
logging.error('invalid state for quote: {} / {} for pair {}'.format(self.quotes_bid, self.quotes_ask, self.pair))
return ForexQuote(datetime.now(), source=self.source)
best_bid = self.quotes_bid[0]
best_ask = self.quotes_ask[0]
timestamp = max(best_bid['timestamp'], best_ask['timestamp'])
bid_side = PriceVolume(abs(best_bid['price']), abs(best_bid['amount']))
ask_side = PriceVolume(abs(best_ask['price']), abs(best_ask['amount']))
return ForexQuote(timestamp, bid_side, ask_side, source=self.source)
def to_json(self) -> str:
return json.dumps({'bid': self.quotes_bid, 'ask': self.quotes_ask}, cls=QuoteEncoder)
def __repr__(self) -> str():
return self.to_json()
|
chris-ch/coinarb
|
bf-arb-pylab/src/arbitrage/entities.py
|
Python
|
mit
| 24,192
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import external_documentation
except ImportError:
external_documentation = sys.modules[
"onshape_client.oas.models.external_documentation"
]
class Tag(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"description": (str,), # noqa: E501
"extensions": (
{str: (bool, date, datetime, dict, float, int, list, str,)},
), # noqa: E501
"external_docs": (
external_documentation.ExternalDocumentation,
), # noqa: E501
"name": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"description": "description", # noqa: E501
"extensions": "extensions", # noqa: E501
"external_docs": "externalDocs", # noqa: E501
"name": "name", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""tag.Tag - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
description (str): [optional] # noqa: E501
extensions ({str: (bool, date, datetime, dict, float, int, list, str,)}): [optional] # noqa: E501
external_docs (external_documentation.ExternalDocumentation): [optional] # noqa: E501
name (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
setattr(self, var_name, var_value)
|
onshape-public/onshape-clients
|
python/onshape_client/oas/models/tag.py
|
Python
|
mit
| 5,101
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-08 01:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('round', '0015_auto_20161105_2340'),
]
operations = [
migrations.AddField(
model_name='plot',
name='non_stationary_seq',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='plot',
name='seq',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AddField(
model_name='plot',
name='stationary_seq',
field=models.PositiveSmallIntegerField(null=True),
),
migrations.AlterField(
model_name='plot',
name='plot',
field=models.URLField(unique=True),
),
]
|
adminq80/Interactive_estimation
|
game/round/migrations/0016_auto_20161208_0154.py
|
Python
|
mit
| 942
|
from abc import ABCMeta, abstractmethod
from data_store import JSONDataStore
import os
import os.path
import json
from datetime import datetime
class Link:
__metaclass__ = ABCMeta
def __init__(self, settings_file):
self.settings = JSONDataStore(settings_file)
self.settings.load()
self.create_default_settings({"log_dir":"log/","received_log_file":"received.log","sent_log_file":"sent.log"})
@abstractmethod
def on_message_received(self, message):
pass
@abstractmethod
def send_message(self, message):
pass
def get_settings(self):
return self.settings
def create_default_settings(self, default_settings):
if self.get_settings().get_data() is None:
self.get_settings().set_data({})
for key in default_settings:
if key not in self.get_settings().get_data():
update = {key:default_settings[key]}
self.get_settings().get_data().update(update)
def write_to_log(self, logfile, message):
if os.path.isfile(logfile) == False:
data = self.settings.get_data()
logdir = data["log_dir"]
os.system("mkdir -p " + logdir)
os.system("touch " + logfile)
with open(logfile, "a") as f:
log_dict = {}
timestamp_dict = self.get_timestamp()
log_dict.update(timestamp_dict)
log_dict.update(message)
f.write(json.dumps(log_dict) + "\r\n")
def write_to_sent_log(self, message):
data = self.settings.get_data()
logfile = data["log_dir"] + "/" + data["sent_log_file"]
self.write_to_log(logfile, message)
def write_to_received_log(self, message):
data = self.settings.get_data()
logfile = data["log_dir"] + "/" + data["received_log_file"]
self.write_to_log(logfile, message)
def get_timestamp(self):
time_dict = {"timestamp":{"utc":datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")}}
return time_dict
class URLLink(Link):
__metaclass__ = ABCMeta
def __init__(self, settings_file):
super(URLLink, self).__init__(settings_file)
self.create_default_settings({"protocol":"http", "host":"localhost", "port":8000})
def get_url(self):
data = self.settings.get_data()
url = data["protocol"] + "://" + data["host"] + ":" + str(data["port"])
return url
class HTTPLink(URLLink):
__metaclass__ = ABCMeta
def __init__(self, settings_file):
super(HTTPLink, self).__init__(settings_file)
self.create_default_settings({"page":"index.html"})
def get_url(self):
url = super(HTTPLink, self).get_url()
data = self.settings.get_data()
url = url + "/" + data["page"]
return url
|
linusluotsinen/RPiAntiTheft
|
util/link.py
|
Python
|
mit
| 2,866
|
#!/usr/bin/env python
import binascii
import socket
import struct
import argparse
import sys
import logging
import os
import traceback
import socket
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import Ether, IP, IPv6, TCP, sendp, conf, sniff
from random import randint
from capability import *
from colorama import *
from interface import *
def args_error():
print BAD + "Invalid parameters."
def validate_ips(ips):
clean = []
if ips is None or not isinstance(ips, list):
return []
for ip in ips:
if "," in ip:
ips += filter(None, ip.split(","))
else:
try:
socket.inet_aton(ip)
except Exception as e:
print e
print("error: invalid ip address \"%s\", exiting." % ip)
return None
clean.append(ip)
return clean
def is_int(s):
try:
int(s)
return True
except ValueError:
return False
def validate_ports(ports):
clean = []
if ports is not None:
for port in ports:
if "," in port:
ports += port.split(",")
elif "-" in port:
low, high = port.split("-")
if not is_int(low) or not is_int(high):
print("error: invalid port range \"%s\", exiting." % port)
return None
elif not is_int(port):
return None
clean.append(port)
return clean
return []
def validate_args(args):
for arg in ["allow", "allow_source", "allow_destination", "target", "target_source", "target_destination"]:
if arg in args and args[arg] is not None and not validate_ips(args[arg]):
args_error()
for arg in ["allow_port", "allow_source_port", "allow_destination_port", "target_port", "target_source_port", "target_destination_port"]:
if arg in args and args[arg] is not None and not validate_ports(args[arg]):
args_error()
class TCPKiller(Capability):
def __init__(self, core):
super(TCPKiller, self).__init__(core)
self.name = "TCPKiller"
self.core = core
self.options = {
"interface": Option("interface", "eth0", "interface to act upon", True),
"allow": Option("allow", self.core.localIP, "do not attack this ip address's connections, whether it's the source or destination of a packet", False),
"allow_source": Option("allow-source", False, "do not attack this ip address's connections, but only if it's the source of a packet", False),
"allow_destination": Option("destination-source", False, "do not attack this ip address's connections, but only if it's the destination of a packet", False),
"target": Option("target", "0.0.0.0", "actively target given ip address, whether it is the source or destination of a packet", False),
"target_source": Option("target-source", "0.0.0.0", "actively target this ip address, but only if it's the source of a packet", False),
"target_destination": Option("target-destination", "0.0.0.0", "actively target this ip address, but only if it's the destination of a packet", False),
"allow_port": Option("allow-port", None, "do not attack any connections involving this port, whether it's the source or destination of a packet", False),
"allow_source_port": Option("allow-source-port", None, "do not attack any connections involving this port, but only if it's the source of a packet", False),
"allow_destination_port": Option("allow-source-port", None, "do not attack any connections involving this port, but only if it's the destination of a packet", False),
"target_port": Option("target-port", None, "actively target any connections involving these ports whether it is the source or destination of a packet", False),
"target_source_port": Option("target-source-port", None, "actively target any connections involving this port, but only if it's the source", False),
"target_destination_port": Option("target-source-port", None, "actively target any connections involving this port, but only if it's the destination", False),
"noisy": Option("noisy", False, "sends many more packets to attempt connection resets to increase effectiveness", False),
"randomize": Option("randomize", "all", "target only SOME of the matching packets for increased stealthiness", False),
"verbose": Option("verbose", "True", "verbose output", False)
}
self.help_text = INFO + "Forges TCP reset packets to hangup all ipv4 tcp connections"
def setup(self):
args = {}
for opt in self.options:
args[opt] = self.get_value(opt)
self.iface = args["interface"]
self.verbose = args["verbose"]
self.noisy = args["noisy"]
self.randomize = args["randomize"]
self.VERBOSE = False
self.allow = self.allow_source = self.allow_destination = []
self.target = self.target_source = self.target_destination = []
self.aports = self.allow_sport = self.allow_dport = []
self.tports = self.target_sport = self.target_dport = []
self.ranges = {}
self.allow = validate_ips(args["allow"])
self.allow_src = validate_ips(args["allow_source"])
self.allow_dst = validate_ips(args["allow_destination"])
self.target = validate_ips(args["target"])
self.target_src = validate_ips(args["target_source"])
self.target_dst = validate_ips(args["target_destination"])
self.allow_ports = validate_ports(args["allow_port"])
self.allow_sport = validate_ports(args["allow_source_port"])
self.allow_dport = validate_ports(args["allow_destination_port"])
self.target_ports = validate_ports(args["target_port"])
self.target_sport = validate_ports(args["target_source_port"])
self.target_dport = validate_ports(args["target_destination_port"])
self.args = args
self.stop_sniffing = False
try:
self.s = socket.socket(socket.PF_PACKET, socket.SOCK_RAW)
except:
print BAD + "Raw sockets are unavailable on this platform. Exiting."
return None
print("[*] Initialized tcpkiller on %s in %s mode, targeting %s%s. Press Ctrl-C to exit." %(self.iface, ("noisy" if self.noisy else "quiet"), (args["randomize"]), (" with verbosity enabled" if self.verbose else "")))
if self.allow:
print("[*] Allowing all connections involving " + ", ".join(self.allow))
if self.allow_src:
print("[*] Allowing all connections originating from " + ", ".join(self.allow_src))
if self.allow_dst:
print("[*] Allowing all connections coming from " + ", ".join(self.allow_dst))
if self.target:
print("[*] Targeting all connections involving " + ", ".join(self.target))
if self.target_src:
print("[*] Targeting all connections originating from " + ", ".join(self.target_src))
if self.target_dst:
print("[*] Targeting all connections coming from " + ", ".join(self.target_dst))
if self.allow_ports:
print("[*] Allowing all connections involving " + ", ".join(self.allow_ports))
if self.allow_sport:
print("[*] Allowing all connections originating from " + ", ".join(self.allow_sport))
if self.allow_dport:
print("[*] Allowing all connections coming from " + ", ".join(self.allow_dport))
if self.target_ports:
print("[*] Targeting all connections involving " + ", ".join(self.target_ports))
if self.target_sport:
print("[*] Targeting all connections originating from " + ", ".join(self.target_sport))
if self.target_dport:
print("[*] Targeting all connections coming from " + ", ".join(self.target_dport))
return True
###############################################################
# Packet Handling #
###############################################################
# Given command line arguements, method determines if this packet should be responded to
def ignore_packet(self, packet, proto):
src_ip = packet[proto].src
dst_ip = packet[proto].dst
src_port = packet[TCP].sport
dst_port = packet[TCP].dport
# Target or allow by IP
if (src_ip in self.allow or dst_ip in self.allow) or (src_ip in self.allow_src) or (dst_ip in self.allow_dst):
return True
elif (self.target and ( self.src_ip not in self.target and self.dst_ip not in self.target)) or (self.target_src and not src_ip in self.target_src) or (self.target_dst and not dst_ip in self.target_dst):
return True
# Target or allow by Port
if (src_port in self.allow_ports or dst_port in self.allow_ports) or (src_port in self.allow_sport) or (dst_port in self.allow_dport):
return True
elif (self.target_ports and (not src_port in self.target_ports and not dst_port in self.target_ports)) or (self.target_sport and not src_port in self.target_sport) or (self.target_dport and not dst_port in self.target_dport):
return True
# Target randomly
if self.randomize == "often" and randint(1, 10) < 2:
return True
elif self.randomize == "half" and randint(1, 10) < 5:
return True
elif self.randomize == "seldom" and randint(1, 10) < 8:
return True
else:
return False
###############################################################
# Scapy #
###############################################################
def send(self, packet):
self.s.send(packet)
def build_packet(self, src_mac, dst_mac, src_ip, dst_ip, src_port, dst_port, seq, proto):
eth = Ether(src=src_mac, dst=dst_mac, type=0x800)
if proto == IP:
ip = IP(src=src_ip, dst=dst_ip)
elif proto == IPv6:
ip = IPv6(src=src_ip, dst=dst_ip)
else:
return str(eth) #if unknown L2 protocol, send back dud ether packet
tcp = TCP(sport=src_port, dport=dst_port, seq=seq, flags="R")
return str(eth/ip/tcp)
def callback(self, packet):
flags = packet.sprintf("%TCP.flags%")
proto = IP
if IPv6 in packet:
proto = IPv6
if flags == "A" and not self.ignore_packet(packet, proto):
src_mac = packet[Ether].src
dst_mac = packet[Ether].dst
src_ip = packet[proto].src
dst_ip = packet[proto].dst
src_port = packet[TCP].sport
dst_port = packet[TCP].dport
seq = packet[TCP].seq
ack = packet[TCP].ack
if self.verbose:
print("RST from %s:%s (%s) --> %s:%s (%s) w/ %s" % (src_ip, src_port, src_mac, dst_ip, dst_port, dst_mac, ack))
if self.noisy:
self.send(self.build_packet(src_mac, dst_mac, src_ip, dst_ip, src_port, dst_port, seq, proto))
self.send(self.build_packet(dst_mac, src_mac, dst_ip, src_ip, dst_port, src_port, ack, proto))
def stop_cond(self, _):
return self.stop_sniffing
def launch(self):
success = self.setup()
if not success:
return
self.s.bind((self.iface, 0))
conf.sniff_promisc = True
sniff(filter='tcp', prn=self.callback, store=0, stop_filter=self.stop_cond)
|
ecthros/pina-colada
|
capabilities/dos/tcpkiller.py
|
Python
|
mit
| 11,912
|
"""Chapter 23 Practice Questions
Answers Chapter 23 Practice Questions via Python code.
"""
def main():
# 1. What is the difference between a symmetric cipher and an asymmetric
# cipher?
# Hint: Check page 336
message = ".noitpyrced dna noitpyrcne rof yek emas eht esu taht srehpiC :cirtemmyS"
message2 = ".noitpyrced rof rehtona dna noitpyrcne rof yek eno esu taht srehpiC :cirtemmysA"
#print(blank[::-1]) # Fill in the blank
#print(blank[::-1])
# 2. Alice generates a public key and a private key. Unfortunately, she later
# loses her private key.
# a. Will other people be able to send her encrypted messages?
# b. Will she be able to decrypt messages previously sent to her?
# c. Will she be able to digitally sign documents?
# d. Will other people be able to verify her previously signed documents?
# Hint: Check pages 336 and 338 - 339
yesno = ["Yes", "No"]
print("a.: %s" % yesno[8 * 0 + 4 * 5 * 0])
print("b.: %s" % yesno[3 + 7 - 6 - 3])
print("c.: %s" % yesno[10 * 10 // 50 - 1])
print("d.: %s" % yesno[100 // 25 + 6 - 5 * 2])
# 3. What are authentication and confidentiality? How are they different?
# Hint: Check page 338
# Don't do this - imports should be at the top of the file
import pythontutorials.books.CrackingCodes.Ch01.caesarCipher
message = "L65spy5tnl5tzy:H13zzqH5sl5H8szH0z6'3pHnzxx6ytnl5tyrH8t5sHt4H8szH5sp0H4l0H5sp0Hl3pK" # Key 11
message2 = "O1zrupqz6umxu6 :Iwqq2uzsI6tqIyq55msqImI5qo4q6L" # Key 12
diff = "X99Tz6?52ABT6 TC52Ty!!8T?A!E612Tz! 3612 C6x96CH,TyDCTxDC52 C6zxC6! T6BT3A2.D2 C9HTyxB21T! TF5!T5xBTC52TA645CT82HW" # Key 23
#print(books.CrackingCodes.Ch01.caesarCipher.decryptMessage(blank, blank)) # Fill in the blanks
#print(books.CrackingCodes.Ch01.caesarCipher.decryptMessage(blank, blank))
#print(books.CrackingCodes.Ch01.caesarCipher.decryptMessage(blank, blank))
# 4. What is non-repudiation?
# Hint: Check page 339
# Don't do this - imports should be at the top of the file
import pythontutorials.books.CrackingCodes.Ch20.vigenereDictionaryHacker
message = "Klt axirtvhrv xwuw aofmcav awi kis tchufvtx d uelaotv adh'w je tjzr ks syqg anbvbimca wpam usfjevy db a eihri xxgh."
#print(books.CrackingCodes.Ch20.vigenereDictionaryHacker.hackVigenereDictionary(blank)) # Fill in the blank
# If PracticeQuestions.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
|
JoseALermaIII/python-tutorials
|
pythontutorials/books/CrackingCodes/Ch23/PracticeQuestions.py
|
Python
|
mit
| 2,535
|
# -*- coding: utf-8 -*-
# Copyright
DB_USER = 'kungsliljans'
|
jimbao/johanochida
|
backend/__init__.py
|
Python
|
mit
| 61
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewayBackendHealth(Model):
"""List of ApplicationGatewayBackendHealthPool resources.
:param backend_address_pools:
:type backend_address_pools:
list[~azure.mgmt.network.v2018_01_01.models.ApplicationGatewayBackendHealthPool]
"""
_attribute_map = {
'backend_address_pools': {'key': 'backendAddressPools', 'type': '[ApplicationGatewayBackendHealthPool]'},
}
def __init__(self, *, backend_address_pools=None, **kwargs) -> None:
super(ApplicationGatewayBackendHealth, self).__init__(**kwargs)
self.backend_address_pools = backend_address_pools
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/application_gateway_backend_health_py3.py
|
Python
|
mit
| 1,134
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def SessionManagerLocalTicket(vim, *args, **kwargs):
'''This data object type contains the user name and location of the file
containing the password that clients can use for one-time logon to a server.'''
obj = vim.client.factory.create('ns0:SessionManagerLocalTicket')
# do some validation checking...
if (len(args) + len(kwargs)) < 2:
raise IndexError('Expected at least 3 arguments got: %d' % len(args))
required = [ 'passwordFilePath', 'userName' ]
optional = [ 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
xuru/pyvisdk
|
pyvisdk/do/session_manager_local_ticket.py
|
Python
|
mit
| 1,133
|
import re
import json
from csv import writer
from parsel import Selector
import os.path
import fnmatch
import glob2
from multiprocessing import Pool
from django.core.management.base import BaseCommand
def _parse_me(base_fname):
json_fname = "{}.json".format(base_fname)
html_fname = "{}.html".format(base_fname)
base_fname = os.path.basename(base_fname)
guid = base_fname[-36:]
name = base_fname[:-36].replace("_", " ").strip()
try:
with open(json_fname, "r") as fp:
data = json.load(fp)
with open(html_fname, "r") as fp:
raw_html = fp.read()
html = Selector(raw_html)
except ValueError:
print(
"File {} or it's HTML counterpart cannot be parsed".format(
json_fname))
return (name, guid)
except FileNotFoundError:
print(
"File {} or it's HTML counterpart cannot be found".format(
json_fname))
return (name, guid)
try:
data = data["data"]
except KeyError:
print("API brainfart: {}, {}".format(guid, base_fname))
return (name, guid)
if "step_0" not in data:
print("Bad header format: {}, {}".format(guid, base_fname))
return (name, guid)
return None
class Command(BaseCommand):
number_of_processes = 8
help = ('Checks the file storage for broken files')
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def add_arguments(self, parser):
parser.add_argument('file_path')
parser.add_argument('csv_out')
def handle(self, *args, **options):
base_dir = options['file_path']
missing_files = options['csv_out']
self.stdout.write("Gathering JSON documents from {}".format(base_dir))
jsons = []
for root, _, filenames in os.walk(base_dir):
for filename in fnmatch.filter(filenames, '*.json'):
jsons.append(os.path.join(root, filename))
htmls = []
for root, _, filenames in os.walk(base_dir):
for filename in fnmatch.filter(filenames, '*.html'):
htmls.append(os.path.join(root, filename))
self.stdout.write("Gathered {} JSON documents, {} HTML documents".format(
len(jsons), len(htmls)))
docs_to_check = (
set(j.replace(".json", "").lower() for j in jsons) |
set(h.replace(".html", "").lower() for h in htmls)
)
my_tiny_pool = Pool(self.number_of_processes)
result = list(
filter(
None,
my_tiny_pool.map(_parse_me, docs_to_check)
)
)
with open(missing_files, "w") as fp:
w = writer(fp)
for r in result:
w.writerow(r)
self.stdout.write(
'Found {} inconsistent or broken items'.format(len(result)))
|
dchaplinsky/declarations.com.ua
|
declarations_site/catalog/management/commands/find_missing.py
|
Python
|
mit
| 2,935
|
#!/usr/bin/python3 -S
# -*- coding: utf-8 -*-
"""
`Unit tests for cargo.builder.create_user`
--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--ยท--
2016 Jared Lunde ยฉ The MIT License (MIT)
http://github.com/jaredlunde
"""
import unittest
import random
from cargo import fields
from cargo.builder import create_cast
from cargo.builder.fields import Column, find_column
from vital.debug import line
from unit_tests import configure
class TestColumn(unittest.TestCase):
'''def test_init(self):
for field in dir(fields):
field = getattr(fields, field)
if hasattr(field, 'OID') and field is not fields.Field:
line('-')
print(field.__name__, '|', field.OID)
try:
field_ = field()
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
if hasattr(field, 'maxlen'):
try:
field_ = field(maxlen=250)
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
if hasattr(field, 'minlen'):
try:
field_ = field(maxlen=250, minlen=3)
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
if hasattr(field, 'minval'):
try:
field_ = field(minval=14)
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
if hasattr(field, 'maxval'):
try:
field_ = field(minval=14, maxval=32)
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
if hasattr(field, 'length'):
try:
field_ = field(length=15)
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
if hasattr(field, 'types'):
field_ = field(types=('foo', 'bar', 'baz'))
field_.field_name = 'foo'
c = find_column(field_)
print(c)
def test_bit(self):
pass
def test_varbit(self):
pass
def test_text(self):
pass
def test_numeric(self):
for field in [fields.Decimal, fields.Currency] * 16:
decimal_places = random.choice([-1, 0, 5])
precision = random.choice([-1, 5, 14, 18])
field_ = field(decimal_places=decimal_places, digits=precision)
field_.field_name = 'foo'
c = find_column(field_)
print(c)
for field in [fields.Float, fields.Double] * 10:
precision = random.choice([-1, 5, 14, 18, 21])
field_ = field(decimal_places=precision)
field_.field_name = 'foo'
c = find_column(field_)
print(c)
def test_int(self):
pass
def test_enum(self):
pass
def test_varchar(self):
pass
def test_char(self):
pass
def test_array(self):
for field in dir(fields):
field = getattr(fields, field)
if hasattr(field, 'OID') and field is not fields.Field and \
field is not fields.Array:
try:
line('-')
print(field.__name__, 'Array |', field.OID)
field_ = fields.Array(field(),
dimensions=random.randint(1, 3),
maxlen=random.randint(-1, 2),
minlen=random.randint(0, 4))
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
def test_encrypted(self):
for field in dir(fields):
field = getattr(fields, field)
if hasattr(field, 'OID') and field is not fields.Field and \
field is not fields.Encrypted:
try:
line('-')
print(field.__name__, 'Encrypted |', field.OID)
field_ = fields.Encrypted(fields.Encrypted.generate_secret(),
field())
field_.field_name = 'foo'
c = find_column(field_)
print(c)
except TypeError:
pass
def test_encrypted_array(self):
pass'''
if __name__ == '__main__':
# Unit test
configure.run_tests(TestColumn, failfast=True, verbosity=2)
|
jaredlunde/cargo-orm
|
unit_tests/builders/ColumnBuilder.py
|
Python
|
mit
| 5,201
|
import global_data as g
from util import *
import sys
import time
import select
import socket
import Queue
import os
from input_handler import HandleUserInput
import random
import hashlib
import json
import threading
import atexit, signal
class Connection:
def __init__(self, g_data):
self.auth = False
self.queue = Queue.Queue()
self.g_data = g_data
self.attached = False
def GDatas():
abs_prefix = os.path.join(os.path.dirname(__file__), "../data")
with open(abs_prefix + '/config.json','r') as f:
conf = json.load(f)
host = conf['host'] if 'host' in conf else 'localhost'
port = conf['port'] if 'port' in conf else 10001
g_datas = []
for i in xrange(len(conf["bots"])):
if "skip" in conf["bots"][i] and conf["bots"][i]:
continue
g_datas.append(g.GlobalData(g_data = None if len(g_datas) == 0 else g_datas[0], conf=conf["bots"][i], dbHost=conf["dbHost"]))
g_datas[-1].TraceInfo("Initialized!")
for i in xrange(len(g_datas)):
if g_datas[i].invalid:
g_datas[-1].TraceWarn("g_data INVALID!")
assert False
assert len(g_datas) != 0
return g_datas
def StartServer():
abs_prefix = os.path.join(os.path.dirname(__file__), "../data")
with open(abs_prefix + '/config.json','r') as f:
conf = json.load(f)
host = conf['host'] if 'host' in conf else 'localhost'
port = conf['port'] if 'port' in conf else 10001
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.setblocking(0)
g_datas = GDatas()
sys.excepthook = exceptionTrace
server_address = (host, port)
server.bind(server_address)
server.listen(5)
g_datas[0].TraceInfo("Starting up Tiara Boom Server on %s, port %s!" % server_address)
inputs = [server]
outputs = []
cons = {}
while inputs:
for g_data in g_datas:
g_data.SocialLogic().Act()
############# Begin server stuff ##################
readable, writable, exceptional = select.select(inputs, outputs, inputs, 60)
for s in readable:
if s in exceptional:
continue
if s is server:
connection, client_address = s.accept()
g_datas[0].TraceInfo('new connection from %s:%s' % client_address)
connection.setblocking(0)
inputs.append(connection)
cons[connection] = Connection(g_datas[0])
else:
data = s.recv(1024)
data = data.strip()
if data and not data in ["quit","_upgrade","_kill"]:
if cons[s].auth:
cons[s].g_data.TraceInfo('received "%s" from %s' % (data, s.getpeername()))
cons[s].queue.put(HandleUserInput(cons[s].g_data, data))
elif not cons[s].attached: # we haven't yet decided who to assign you to
if data in [g_data.myName for g_data in g_datas]:
this_bot = [g_data for g_data in g_datas if data == g_data.myName][0]
this_bot.TraceDebug("received hi message")
cons[s].attached = True
cons[s].g_data = this_bot
cons[s].pad = random.randrange(0,2**32)
cons[s].queue.put(str(cons[s].pad))
else: # you're assigned a bot but not authorized
h = hashlib.sha256(str(cons[s].pad))
h.update(cons[s].g_data.password)
if data == h.hexdigest():
cons[s].auth = True
cons[s].queue.put("welcome")
cons[s].g_data.TraceInfo('%s:%s entered the password' % s.getpeername())
else:
cons[s].queue.put("password denied")
cons[s].g_data.TraceInfo('%s:%s failed the password' % s.getpeername())
if s not in outputs:
outputs.append(s)
elif not data or data == "quit" or not cons[s].auth:
try:
cons[s].g_data.TraceInfo('closing %s:%s' % s.getpeername())
except Exception as e:
cons[s].g_data.TraceInfo("closing, cant get peer name")
if s in outputs:
outputs.remove(s)
inputs.remove(s)
s.close()
del cons[s]
elif data == "_upgrade":
s.close()
server.close()
os.system("git pull --rebase origin master")
os.execl(sys.executable, sys.executable, * sys.argv)
else:
assert data == "_kill"
s.close()
server.close()
cons[s].g_data.TraceInfo("Recieved _kill, going down NOW")
assert False
for s in writable:
if s in exceptional:
continue
try:
next_msg = cons[s].queue.get_nowait()
except Exception as e:
if s in outputs:
outputs.remove(s)
else:
cons[s].g_data.TraceInfo('sending "%s" to %s' % (next_msg[:min(50,len(next_msg))], s.getpeername()))
s.send(next_msg)
for s in exceptional:
try:
cons[s].g_data.TraceInfo('closing %s:%s' % s.getpeername())
except Exception as e:
pass
inputs.remove(s)
if s in outputs:
outputs.remove(s)
s.close()
del cons[s]
|
jvictor0/TiaraBoom
|
tiara/server.py
|
Python
|
mit
| 6,107
|
#!/home/pi/.venv/jns/bin/python
#
# last modified 2019/05/26
#
# Python helper script to download Julia 1.1.0 binaries
# not meant to be executed manually
# https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url
#
FILE_ID = '1fj6pNAJgmUD7bsSXqh8ocC1wESx8jkRh'
DESTINATION = './julia-1.1.0-arm32bit.zip'
import requests
def download_file_from_google_drive(id, destination):
URL = "https://docs.google.com/uc?export=download"
session = requests.Session()
response = session.get(URL, params = { 'id' : id }, stream = True)
token = get_confirm_token(response)
if token:
params = { 'id' : id, 'confirm' : token }
response = session.get(URL, params = params, stream = True)
save_response_content(response, destination)
def get_confirm_token(response):
for key, value in response.cookies.items():
if key.startswith('download_warning'):
return value
return None
def save_response_content(response, destination):
CHUNK_SIZE = 32768
with open(destination, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
if __name__ == "__main__":
file_id = FILE_ID
destination = DESTINATION
download_file_from_google_drive(file_id, destination)
|
kleinee/jns
|
scripts/dnld_julia-1.1.0-arm32bit.py
|
Python
|
mit
| 1,372
|
from test_fena.test_common import test_cmd
def test_simple_cmds():
test_cmd("tag @s + _lol", "tag @s add fena.lol")
test_cmd("tag @s - _lol", "tag @s remove fena.lol")
test_cmd(r"tag @s + _lol {Invulnerable:1b}", expect_error=True)
test_cmd(r"tag @s - _lol {Invulnerable:1b}", expect_error=True)
test_cmd("tag target + _lol", expect_error=True)
test_cmd("tag target - _lol", expect_error=True)
test_cmd("particle happy_villager ~ ~ ~")
test_cmd("particle minecraft:happy_villager ~ ~ ~")
# TODO: rework of the entire damn parser (must create whitespace tokens with replacement=' ')
# test_cmd(r'give @s diamond_pickaxe{display:{Name:"\"test\""}}')
# test_cmd(r'give @s minecraft:diamond_pickaxe{display:{Name:"\"test\""}}')
test_cmd(r'give @s minecraft:diamond_pickaxe')
|
Aquafina-water-bottle/Command-Compiler-Unlimited
|
test_fena/v1_13/test_simple_cmds.py
|
Python
|
mit
| 890
|
import merger
class Analyzer():
def analyze(self, path):
print 'Analyzing ' + path + '...'
return merger.main(path, 10, 4)
|
h2oloopan/easymerge
|
EasyMerge/merger/analyzer.py
|
Python
|
mit
| 145
|
import six
from grab import Grab
from grab.spider import Spider, Task
from grab.spider.error import SpiderError, FatalError
from tests.util import BaseGrabTestCase, build_spider
class SimpleSpider(Spider):
def task_baz(self, grab, unused_task):
self.stat.collect('SAVED_ITEM', grab.doc.body)
class BasicSpiderTestCase(BaseGrabTestCase):
def setUp(self):
self.server.reset()
def test_spider(self):
self.server.response['get.data'] = 'Hello spider!'
self.server.response['sleep'] = 0
bot = build_spider(SimpleSpider)
bot.setup_queue()
bot.add_task(Task('baz', self.server.get_url()))
bot.run()
self.assertEqual(b'Hello spider!',
bot.stat.collections['SAVED_ITEM'][0])
def test_network_limit(self):
class CustomSimpleSpider(SimpleSpider):
def create_grab_instance(self, **kwargs):
return Grab(connect_timeout=1, timeout=1)
self.server.response['get.data'] = 'Hello spider!'
self.server.response['sleep'] = 1.1
bot = build_spider(CustomSimpleSpider, network_try_limit=1)
bot.setup_queue()
#bot.setup_grab(connect_timeout=1, timeout=1)
bot.add_task(Task('baz', self.server.get_url()))
bot.run()
self.assertEqual(bot.stat.counters['spider:request-network'], 1)
bot = build_spider(CustomSimpleSpider, network_try_limit=2)
bot.setup_queue()
#bot.setup_grab(connect_timeout=1, timeout=1)
bot.add_task(Task('baz', self.server.get_url()))
bot.run()
self.assertEqual(bot.stat.counters['spider:request-network'], 2)
def test_task_limit(self):
class CustomSimpleSpider(SimpleSpider):
def create_grab_instance(self, **kwargs):
return Grab(connect_timeout=1, timeout=1)
self.server.response['get.data'] = 'Hello spider!'
self.server.response['sleep'] = 1.1
bot = build_spider(CustomSimpleSpider, network_try_limit=1)
#bot.setup_grab(connect_timeout=1, timeout=1)
bot.setup_queue()
bot.add_task(Task('baz', self.server.get_url()))
bot.run()
self.assertEqual(bot.stat.counters['spider:task-baz'], 1)
bot = build_spider(SimpleSpider, task_try_limit=2)
bot.setup_queue()
bot.add_task(Task('baz', self.server.get_url(), task_try_count=3))
bot.run()
self.assertEqual(bot.stat.counters['spider:request-network'], 0)
def test_task_retry(self):
self.server.response['get.data'] = 'xxx'
self.server.response_once['code'] = 403
bot = build_spider(SimpleSpider)
bot.setup_queue()
bot.add_task(Task('baz', self.server.get_url()))
bot.run()
self.assertEqual(b'xxx', bot.stat.collections['SAVED_ITEM'][0])
def test_generator(self):
server = self.server
class TestSpider(Spider):
def task_generator(self):
for _ in six.moves.range(1111):
yield Task('page', url=server.get_url())
def task_page(self, unused_grab, unused_task):
self.stat.inc('count')
bot = build_spider(TestSpider)
bot.run()
self.assertEqual(bot.stat.counters['count'], 1111)
def test_get_spider_name(self):
class TestSpider(Spider):
pass
self.assertEqual('test_spider', TestSpider.get_spider_name())
class TestSpider2(Spider):
spider_name = 'foo_bar'
self.assertEqual('foo_bar', TestSpider2.get_spider_name())
def test_handler_result_none(self):
class TestSpider(Spider):
def prepare(self):
# pylint: disable=attribute-defined-outside-init
self.points = []
def task_page(self, unused_grab, unused_task):
yield None
bot = build_spider(TestSpider)
bot.setup_queue()
bot.add_task(Task('page', url=self.server.get_url()))
bot.run()
def test_fallback_handler_by_default_name(self):
class TestSpider(Spider):
def prepare(self):
# pylint: disable=attribute-defined-outside-init
self.points = []
def task_page(self, grab, task):
pass
def task_page_fallback(self, unused_task):
self.points.append(1)
self.server.response['code'] = 403
bot = build_spider(TestSpider, network_try_limit=1)
bot.setup_queue()
bot.add_task(Task('page', url=self.server.get_url()))
bot.run()
self.assertEqual(bot.points, [1])
def test_fallback_handler_by_fallback_name(self):
class TestSpider(Spider):
def prepare(self):
# pylint: disable=attribute-defined-outside-init
self.points = []
def task_page(self, grab, task):
pass
def fallback_zz(self, unused_task):
self.points.append(1)
self.server.response['code'] = 403
bot = build_spider(TestSpider, network_try_limit=1)
bot.setup_queue()
bot.add_task(Task('page', url=self.server.get_url(),
fallback_name='fallback_zz'))
bot.run()
self.assertEqual(bot.points, [1])
def test_check_task_limits_invalid_value(self):
class TestSpider(Spider):
def task_page(self, grab, task):
pass
def check_task_limits(self, task):
return False, 'zz'
bot = build_spider(TestSpider)
bot.setup_queue()
bot.add_task(Task('page', url=self.server.get_url(),
fallback_name='fallback_zz'))
self.assertRaises(SpiderError, bot.run)
def test_handler_result_invalid(self):
class TestSpider(Spider):
def prepare(self):
# pylint: disable=attribute-defined-outside-init
self.points = []
def task_page(self, unused_grab, unused_task):
yield 1
bot = build_spider(TestSpider)
bot.setup_queue()
bot.add_task(Task('page', url=self.server.get_url()))
#bot.run()
#self.assertEqual(1, bot.stat.counters['spider:error-spidererror'])
self.assertRaises(SpiderError, bot.run)
def test_task_queue_clear(self):
class TestSpider(Spider):
def task_page(self, unused_grab, unused_task):
self.stop()
bot = build_spider(TestSpider)
bot.setup_queue()
for _ in six.moves.range(5):
bot.add_task(Task('page', url=self.server.get_url()))
self.assertEqual(5, bot.task_queue.size())
bot.run()
self.assertEqual(0, bot.task_queue.size())
def test_fatal_error(self):
class TestSpider(Spider):
def task_page(self, unused_grab, unused_task):
raise FatalError
bot = build_spider(TestSpider)
bot.setup_queue()
bot.add_task(Task('page', url=self.server.get_url()))
self.assertRaises(FatalError, bot.run)
|
istinspring/grab
|
tests/spider.py
|
Python
|
mit
| 7,171
|
# Generated by Django 2.2.4 on 2019-10-06 23:28
from django.db import migrations, models
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('text_search', '0009_annotatedtoken_speech_cat'),
]
operations = [
migrations.CreateModel(
name='SearchFacet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('key', models.CharField(choices=[('manuscript_number', 'manuscript_number'), ('lemma', 'lemma'), ('token', 'token'), ('section_number', 'section_number'), ('pos', 'pos'), ('is_rubric', 'is_rubric'), ('verse_cat', 'verse_cat'), ('speech_cat', 'speech_cat')], max_length=32, unique=True)),
('label', models.CharField(max_length=32)),
('tooltip', models.CharField(blank=True, max_length=255)),
('description', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
]
|
kingsdigitallab/tvof-django
|
tvof/text_search/migrations/0010_searchfacet.py
|
Python
|
mit
| 1,225
|
'''Write a function find_longest_word() that takes a list of words and returns the length of the longest one.'''
def maps(x):
k=[]
for i in x:
k.append(len(i))
print max(k)
maps(['apple','orange','cat'])
|
garg10may/Python-for-Beginners-Solve-50-Exercises-Live
|
15.py
|
Python
|
mit
| 238
|
from .settings import *
from .secrets_test import *
DATABASES['default']['NAME'] = ':memory:'
|
tomchuk/meetup
|
meetup/meetup/settings_test.py
|
Python
|
mit
| 95
|
#!/usr/bin/env python
import unittest
import random
import string
from vredis import VRedis
from mockredis import MockConnectionPool
class VRedisTest(unittest.TestCase):
def setUp(self):
# setup VRedis
self.vr = VRedis(
hosts=[
('1', 1, 85),
('2', 2, 170),
('3', 3, 255)
],
cls=MockConnectionPool
)
def tearDown(self):
self.vr.flushall()
def testhash(self):
results = {}
for i in range(256):
results[i] = 0
for i in range(256 * 1000):
# hash a random string
hash = self.vr.hash(''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(10)))
results[hash] += 1
for i in range(256):
self.assertIn(results[i], range(800, 1200))
def test_get_server(self):
s = self.vr.get_server("1") # hash starts with c4 - 196
self.assertEquals(s, "3:3")
s = self.vr.get_server("2") # hash starts with c8 - 200
self.assertEquals(s, "3:3")
s = self.vr.get_server("3") # hash starts with ec - 236
self.assertEquals(s, "3:3")
s = self.vr.get_server("4") # hash starts with a8 - 168
self.assertEquals(s, "2:2")
s = self.vr.get_server("6") # hash starts with 16 - 22
self.assertEquals(s, "1:1")
def test_get(self):
self.vr.set("test", "1")
self.assertEquals(self.vr.get("test"), "1")
self.assertEquals(self.vr.dbsize(), 1)
def test_mget(self):
data = {
"1": 1, # server 3
"2": 2, # server 3
"3": 3, # server 3
"4": 4, # server 2
"6": 6 # server 1
}
self.assertEquals(self.vr.mset(data), True)
self.assertEquals(self.vr.dbsize(), len(data))
self.assertEquals(self.vr.mget(data.keys()), data.values())
for i in data:
self.assertEquals(self.vr.get(i), data[i])
def test_mget_individual(self):
data = {
"1": 1, # server 3
"2": 2, # server 3
"3": 3, # server 3
"4": 4, # server 2
"6": 6 # server 1
}
for i in data:
self.assertEquals(self.vr.set(i, data[i]), True)
self.assertEquals(self.vr.mget(data.keys()), data.values())
for i in data:
self.assertEquals(self.vr.get(i), data[i])
self.assertEquals(self.vr.dbsize(), len(data))
def test_keys(self):
data = {
"1": 1, # server 3
"2": 2, # server 3
"3": 3, # server 3
"4": 4, # server 2
"6": 6 # server 1
}
self.assertEquals(self.vr.mset(data), True)
self.assertEquals(self.vr.dbsize(), len(data))
keys = self.vr.keys("*")
self.assertEquals(len(keys), len(data.keys()))
for key in data.keys():
self.assertTrue(key in keys)
if __name__ == '__main__':
unittest.main()
|
50onRed/vredis
|
tests/vredistest.py
|
Python
|
mit
| 3,062
|
import sys
import pytest
import numpy as np
import plotly.graph_objs as go
import plotly.io as pio
from plotly.io._utils import plotly_cdn_url
if sys.version_info >= (3, 3):
import unittest.mock as mock
from unittest.mock import MagicMock
else:
import mock
from mock import MagicMock
# fixtures
# --------
@pytest.fixture
def fig1(request):
return go.Figure(
data=[
{
"type": "scatter",
"y": np.array([2, 1, 3, 2, 4, 2]),
"marker": {"color": "green"},
}
],
layout={"title": {"text": "Figure title"}},
)
# HTML
# ----
def test_versioned_cdn_included(fig1):
assert plotly_cdn_url() in pio.to_html(fig1, include_plotlyjs="cdn")
def test_html_deterministic(fig1):
div_id = "plotly-root"
assert pio.to_html(fig1, include_plotlyjs="cdn", div_id=div_id) == pio.to_html(
fig1, include_plotlyjs="cdn", div_id=div_id
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/tests/test_io/test_html.py
|
Python
|
mit
| 965
|
# David Millar - July 17, 2016
# dave.millar@uwyo.edu
# NOTES: - ggplot code at the end is just used for evaluation and plotting, and can be omitted
# or commented out when integrating this module into TREES_Py_R
# - Non-linear least squares regression are currently used to determine empirical model
# parameter estimates.
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::#
# #
# Module for estimating the Gs_ref parameter using stomatal conductance #
# and vapor pressure deficit data. #
# #
#:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::;;:::::::#
# clear everything out of memory
#rm(list=ls())
# call to ggplot package
library(ggplot2)
#-----------------------------------------------------------------------------------------------------
# set the current working directory - make sure to change this as needed
#
# read in the vapor pressure deficit (VPD) and non-water-stressed, non-photosynthesis-limited
# canopy conductance (Gs) calculated from sap flux measurements.
# D = atmospheric vapor pressure deficit (kPa)
# Gs = non-water-stressed, non-photosynthesis-limited stomatal conductance (mol m^-2 s^-1)
Gs_data = read.csv("PICO_atm_demand_data.csv")
names(Gs_data)=c("number","D_obs", "Gs_obs")
D_obs = Gs_data["D_obs"]
Gs_obs = Gs_data["Gs_obs"]
#-----------------------------------------------------------------------------------------------------
#::::::::::::::::::::::::::::::::#
# Gs_ref estimation function #
#::::::::::::::::::::::::::::::::#
def Gs_ref_func(D_obs,Gs_obs):
# fit Gs_ref parameter to observed Gs and D data
Gs_ref.fit = nls(Gs_obs ~ Gs_ref - (0.6*Gs_ref)*log(D_obs), start = list(Gs_ref = 0.1))
Gs_ref.paras = coef(Gs_ref.fit)
Gs_ref = Gs_ref.paras[1]
return(Gs_ref)
#-----------------------------------------------------------------------------------------------------
#::::::::::::::::::::::::::::::::::::::::::::::::#
# create timeseries plot of obs and sim sfd #
#::::::::::::::::::::::::::::::::::::::::::::::::#
#
# Gs_ref <- Gs_ref_func(D_obs,Gs_obs)
#
# # simulate Gs in absence of water supply and/or photosynthetic limitation
# Gs_sim <- Gs_ref - (0.6*Gs_ref)*log(D_obs)
#
# Gs_obs <- Gs_obs
# D_obs <- D_obs
#
# #calculate R^2
# eval = summary(lm(Gs_sim~Gs_obs))
# R2 <- eval$r.squared
# R2
#
# ggdata <- cbind.data.frame(D_obs,Gs_sim,Gs_obs)
#
# Gs_test_plot <- ggplot(ggdata) +
# geom_point(aes(x=D_obs, y=Gs_obs, shape ='observed', linetype = 'observed', color ='observed',size ='observed')) +
# geom_line(aes(x=D_obs, y=Gs_sim, shape ='simulated', linetype = 'simulated', color ='simulated',size ='simulated')) +
# scale_shape_manual(values=c(19, NA)) +
# scale_linetype_manual(values=c(0, 1)) +
# scale_size_manual(values=c(4,1.5)) +
# scale_color_manual(values=c("blue","springgreen3")) +
# xlab("vapor pressure deficit (kPa)") +
# ylab("canopy conductance") +
# ylab(expression(paste("canopy conductance (mol ", m^-2," ",s^-1,")"))) +
# ggtitle(expression(paste("fitting ", Gs[ref]))) +
# theme(axis.text=element_text(size=18),
# strip.text=element_text(size=18),
# title=element_text(size=18),
# text=element_text(size=18),
# legend.text=element_text(size=18),
# legend.title=element_blank(),
# legend.key = element_blank())
#
# Gs_test_plot
#
# #-----------#
# # save plot #
# #-----------#
#
# #ggsave("CP_sf_decline_obs_and_sim_timeseries.png",width=10,height=4,units='in',dpi=500)
# #dev.off()
#
#
|
mcook42/Py_R_Converters
|
r2py-testfile.py
|
Python
|
mit
| 3,815
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest import Configuration
from .version import VERSION
class AutoSuggestClientConfiguration(Configuration):
"""Configuration for AutoSuggestClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param endpoint: Supported Cognitive Services endpoints (protocol and
hostname, for example: "https://westus.api.cognitive.microsoft.com",
"https://api.cognitive.microsoft.com").
:type endpoint: str
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
"""
def __init__(
self, endpoint, credentials):
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
base_url = '{Endpoint}/bing/v7.0'
super(AutoSuggestClientConfiguration, self).__init__(base_url)
# Starting Autorest.Python 4.0.64, make connection pool activated by default
self.keep_alive = True
self.add_user_agent('azure-cognitiveservices-search-autosuggest/{}'.format(VERSION))
self.endpoint = endpoint
self.credentials = credentials
|
Azure/azure-sdk-for-python
|
sdk/cognitiveservices/azure-cognitiveservices-search-autosuggest/azure/cognitiveservices/search/autosuggest/_configuration.py
|
Python
|
mit
| 1,750
|
# Display file for Gravity
from pylab import *
output = atleast_2d(loadtxt('sample.txt'))
num_particles = (output.shape[1] - 3)//6
print(num_particles)
figure(figsize=(13, 6))
ion()
hold(False)
for i in xrange(0, output.shape[0]):
model = output[i, 3:-1]
x, y = model[0:num_particles], model[num_particles:2*num_particles]
vx, vy = model[3*num_particles:4*num_particles], model[4*num_particles:5*num_particles]
subplot(1,2,1)
plot(x, y, 'k.')
axis([-10, 10, -10, 10])
subplot(1,2,2)
plot(vx, vy, 'k.')
axis([-10, 10, -10, 10])
title(i+1)
draw()
ioff()
show()
|
eggplantbren/TwinPeaks3
|
Code/C++/display_gravity.py
|
Python
|
mit
| 574
|
'''
Translate a .strings file from one language to another.
'''
from __future__ import with_statement
import re, sys, time
import codecs, locale
import PyGlang
k_langPathRegEx = re.compile('.*/([^\.]+)\.lproj.+$')
k_valueRegEx = re.compile('"([^"]*)"(\s*=\s*)"([^"]*)";', re.UNICODE)
def DetectEncoding(filepath):
'''
Try to detect the file's encoding.
If its not utf-16 assume it's utf-8, this should work for ascii
files becuase the first 128 characters are the same...
'''
with open(filepath, 'r') as f:
firstBytes = f.read(2)
if firstBytes == codecs.BOM_UTF16_BE:
return 'utf_16_be'
elif firstBytes == codecs.BOM_UTF16_LE:
return 'utf_16_le'
#use sig just encase there is a BOM in the file
return 'utf_8_sig'
def LangFromPath(filepath):
'''Get the languages from a filepath'''
pathMatch = k_langPathRegEx.match(filepath)
if pathMatch:
return pathMatch.group(1)
def Translate(fromFilepath, toFilepath, utf8=False):
'''
Read a .strings file and localize it for the language of another .strings file.
The language of each file is determined by the what 'lproj' directory they reside in.
'''
#detect encoding of output for printing
language, output_encoding = locale.getdefaultlocale()
#detect the encodeing of the file
fromFileEncoding = 'utf_8' if utf8 else DetectEncoding(fromFilepath)
#get the languages
fromLang = LangFromPath(fromFilepath)
toLang = LangFromPath(toFilepath)
#regular expression
def transValue(regExMatch):
value = regExMatch.group(3)
transText = PyGlang.Translate(value, fromLang=fromLang, toLang=toLang, encoding=fromFileEncoding)
#TODO: only write this in command line mode
print '%s > %s' % (value.encode(output_encoding), transText.encode(output_encoding))
return '"%s"%s"%s";' % (regExMatch.group(1), regExMatch.group(2), transText)
#read the file
with codecs.open(fromFilepath, 'r', fromFileEncoding) as fromFile:
with codecs.open(toFilepath, 'w', fromFileEncoding) as toFile:
for eachLine in fromFile:
toFile.write(k_valueRegEx.sub(transValue, eachLine))
if __name__ == '__main__':
#TODO: add more robust options
startTime = time.time()
Translate(sys.argv[1], sys.argv[2], True)
print 'Translated in %.2f seconds' % (time.time()-startTime)
|
kgn/pyglang
|
PyGlang/TranslateDotStrings.py
|
Python
|
mit
| 2,467
|
# -*- coding: utf-8 -*-
# Copyright ยฉ 2012-2019 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Compilers for Nikola."""
|
okin/nikola
|
nikola/plugins/compile/__init__.py
|
Python
|
mit
| 1,170
|
N = int(input())
R = [int(x) for x in input().split()]
g = [R[0]]
i = 1
while i < N and g[0] == R[i]: i += 1
if i != N:
g.append(R[i])
i += 1
while i < N:
if g[-2] < g[-1] < R[i] or g[-2] > g[-1] > R[i]:
g[-1] = R[i]
elif g[-2] < g[-1] > R[i] or g[-2] > g[-1] < R[i]:
g.append(R[i])
i += 1
G = len(g)
print(G if G >= 3 else 0)
|
knuu/competitive-programming
|
atcoder/corp/codefes2014f_e.py
|
Python
|
mit
| 364
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test re-org scenarios with a mempool that contains transactions
# that spend (directly or indirectly) coinbase transactions.
#
from test_framework.test_framework import DankcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(DankcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
#connect to a local machine for debugging
#url = "http://dankcoinrpc:DP6DvqZtqXarpeNWyN3LZTFchCCyCUuHwNF7E8pX99x1@%s:%d" % ('127.0.0.1', 18332)
#proxy = AuthServiceProxy(url)
#proxy.url = url # store URL on proxy for info
#self.nodes.append(proxy)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
#prepare some coins for multiple *rawtransaction commands
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
#########################################
# sendrawtransaction with missing input #
#########################################
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
rawtx = self.nodes[2].signrawtransaction(rawtx)
try:
rawtx = self.nodes[2].sendrawtransaction(rawtx['hex'])
except JSONRPCException as e:
assert("Missing inputs" in e.error['message'])
else:
assert(False)
#########################
# RAW TX MULTISIG TESTS #
#########################
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 PEPE to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
addr3Obj = self.nodes[2].validateaddress(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
sPK = rawTx['vout'][0]['scriptPubKey']['hex']
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS A INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises(JSONRPCException, self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises(JSONRPCException, self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
if __name__ == '__main__':
RawTransactionsTest().main()
|
dankcoin/dankcoin
|
qa/rpc-tests/rawtransactions.py
|
Python
|
mit
| 7,193
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Subscription.frequency_duration'
db.add_column('billing_subscription', 'frequency_duration',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Subscription.frequency_duration'
db.delete_column('billing_subscription', 'frequency_duration')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'billing.subscription': {
'Meta': {'object_name': 'Subscription'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'MONTHLY'", 'max_length': '10'}),
'frequency_duration': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'billing.usersubscription': {
'Meta': {'object_name': 'UserSubscription'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['billing.Subscription']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['billing']
|
artminster/artminster
|
contrib/billing/migrations/0003_auto__add_field_subscription_frequency_duration.py
|
Python
|
mit
| 5,697
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'QandaProfile'
db.delete_table('qanda_app_qandaprofile')
def backwards(self, orm):
# Adding model 'QandaProfile'
db.create_table('qanda_app_qandaprofile', (
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='qanda_profile', unique=True, to=orm['auth.User'])),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('mugshot', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)),
('privacy', self.gf('django.db.models.fields.CharField')(default='registered', max_length=15)),
))
db.send_create_signal('qanda_app', ['QandaProfile'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'django_notify.notification': {
'Meta': {'ordering': "('-id',)", 'object_name': 'Notification', 'db_table': "'notify_notification'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_emailed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_viewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'message': ('django.db.models.fields.TextField', [], {}),
'occurrences': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_notify.Subscription']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'django_notify.notificationtype': {
'Meta': {'object_name': 'NotificationType', 'db_table': "'notify_notificationtype'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'django_notify.settings': {
'Meta': {'object_name': 'Settings', 'db_table': "'notify_settings'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'django_notify.subscription': {
'Meta': {'object_name': 'Subscription', 'db_table': "'notify_subscription'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'latest_for'", 'null': 'True', 'to': "orm['django_notify.Notification']"}),
'notification_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_notify.NotificationType']"}),
'object_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'send_emails': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'settings': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_notify.Settings']"})
},
'qanda_app.answer': {
'Meta': {'object_name': 'Answer'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['qanda_app.QandaUser']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'editDate': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postDate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['qanda_app.Question']"}),
'relatedUsers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_answers'", 'symmetrical': 'False', 'through': "orm['qanda_app.AnswerRelatedUsers']", 'to': "orm['qanda_app.QandaUser']"}),
'text': ('django.db.models.fields.TextField', [], {})
},
'qanda_app.answerrelatedusers': {
'Meta': {'object_name': 'AnswerRelatedUsers'},
'downvote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notUseful': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'relatedAnswer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_relation'", 'to': "orm['qanda_app.Answer']"}),
'relatedUser': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answer_relation'", 'to': "orm['qanda_app.QandaUser']"}),
'star': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'upvote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'useful': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'qanda_app.answersubscription': {
'Meta': {'object_name': 'AnswerSubscription', '_ormbases': ['django_notify.Subscription']},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptions'", 'to': "orm['qanda_app.Answer']"}),
'subscription_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['django_notify.Subscription']", 'unique': 'True', 'primary_key': 'True'})
},
'qanda_app.category': {
'Meta': {'object_name': 'Category'},
'about': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'qanda_app.qandauser': {
'Meta': {'object_name': 'QandaUser'},
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'djangoUser': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'QandaUser'", 'unique': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relatedUsers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'relaterUsers'", 'symmetrical': 'False', 'through': "orm['qanda_app.UserRelations']", 'to': "orm['qanda_app.QandaUser']"})
},
'qanda_app.question': {
'Meta': {'object_name': 'Question'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['qanda_app.QandaUser']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['qanda_app.Category']", 'null': 'True'}),
'closeDate': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'closeMessage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'editDate': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postDate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'relatedUsers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_questions'", 'symmetrical': 'False', 'through': "orm['qanda_app.QuestionRelatedUsers']", 'to': "orm['qanda_app.QandaUser']"}),
'text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'viewCount': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'qanda_app.questionrelatedusers': {
'Meta': {'object_name': 'QuestionRelatedUsers'},
'downvote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notUseful': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'relatedQuestion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_relation'", 'to': "orm['qanda_app.Question']"}),
'relatedUser': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_relation'", 'to': "orm['qanda_app.QandaUser']"}),
'star': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'upvote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'useful': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'qanda_app.questionsubscription': {
'Meta': {'object_name': 'QuestionSubscription', '_ormbases': ['django_notify.Subscription']},
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptions'", 'to': "orm['qanda_app.Question']"}),
'subscription_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['django_notify.Subscription']", 'unique': 'True', 'primary_key': 'True'})
},
'qanda_app.reply': {
'Meta': {'object_name': 'Reply'},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replies'", 'null': 'True', 'to': "orm['qanda_app.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replies'", 'to': "orm['qanda_app.QandaUser']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'editDate': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postDate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'default': "''"})
},
'qanda_app.userrelations': {
'Meta': {'object_name': 'UserRelations'},
'flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'related': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relatedRelation'", 'to': "orm['qanda_app.QandaUser']"}),
'relater': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relaterRelation'", 'to': "orm['qanda_app.QandaUser']"}),
'star': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['qanda_app']
|
alpsayin/django-qanda
|
qanda/qanda_app/migrations/0020_auto__del_qandaprofile.py
|
Python
|
mit
| 16,080
|
#!/usr/bin/python
import sys, os, time, socket
import ConfigParser
from web_request.handlers import wsgi, mod_python, cgi
from web_request.response import Response
from FeatureFilter.Service.Service import Service
from FeatureFilter.Algorithms.Clustering.MarkerCluster import MarkerCluster
from lxml import etree
from lxml import objectify
import json
# First, check explicit FS_CONFIG env var
if 'FS_CONFIG' in os.environ:
cfgfiles = os.environ['FS_CONFIG'].split(",")
# Otherwise, make some guesses.
else:
# Windows doesn't always do the 'working directory' check correctly.
if sys.platform == 'win32':
workingdir = os.path.abspath(os.path.join(os.getcwd(), os.path.dirname(sys.argv[0])))
cfgfiles = (os.path.join(workingdir, "featurefilter.cfg"), os.path.join(workingdir,"..","featurefilter.cfg"))
else:
cfgfiles = ("featurefilter.cfg", os.path.join("..", "featurefilter.cfg"), "/etc/featurefilter.cfg")
class Server (object):
''' '''
def __init__ (self, services, config = {}, processes = {}):
self.services = services
self.config = config
self.processes = processes
def _loadFromSection (cls, config, section, **objargs):
for opt in config.options(section):
if opt != 'script' and opt != 'host' and opt != 'port' and opt != 'protocol':
objargs[opt] = config.get(section, opt)
return Service(config.get(section, 'script'),
config.get(section, 'host'),
config.get(section, 'port'),
config.get(section, 'protocol'),
**objargs)
loadFromSection = classmethod(_loadFromSection)
def _load (cls, *files):
parser = ConfigParser.ConfigParser()
parser.read(files)
config = {}
if parser.has_section("configuration"):
for key in parser.options("configuration"):
config[key] = parser.get("configuration", key)
processes = {}
services = {}
for section in parser.sections():
if section == "configuration": continue
else:
services[section] = cls.loadFromSection(parser, section)
return cls(services, config, processes)
load = classmethod(_load)
def dispatchRequest (self, base_path="", path_info="/", params={}, request_method = "GET", post_data = None, accepts = ""):
"""Read in request data, and return a (content-type, response string) tuple. May
raise an exception, which should be returned as a 500 error to the user."""
response_code = "200 OK"
host = base_path
path = path_info.split("/")
service = None
if params.has_key('server'):
service = self.services[params['server']]
else:
service = self.services[path[len(path)-1]]
# send data to harvester
if self.config.has_key('harvester_host') and self.config.has_key('harvester_port'):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((self.config['harvester_host'], int(self.config['harvester_port'])))
sock.sendall("{\"params\":[" + str(json.dumps(params)) + "], \"post_data\":" + json.dumps(post_data) + "}")
#received = sock.recv(1024)
finally:
sock.close()
service.request(params=params, post_data=post_data, method=request_method)
if params.has_key('clustering'):
if params['clustering'].lower() == "false":
return Response(data=service.getData(), content_type=service.getContentType(), headers=None, status_code=service.getStatusCode(), encoding='')
bbox = []
if params.has_key('bbox'):
bbox = params['bbox'].split(',')
else:
if len(post_data) > 0:
parser = objectify.makeparser(remove_blank_text=True, ns_clean=True)
dom = etree.XML(post_data, parser=parser)
query = dom.xpath("//*[local-name() = 'BBOX']")
if len(query) > 0:
bbox.extend(str(query[0].xpath("//*[local-name() = 'lowerCorner']")[0]).split(" "))
bbox.extend(str(query[0].xpath("//*[local-name() = 'upperCorner']")[0]).split(" "))
size = params['size'].split(',')
clusterList = MarkerCluster.cluster(service.convertToPoints(params), bbox, size, 15)
for cluster in clusterList:
if isinstance(cluster, dict) == False:
service.appendCentroid(MarkerCluster.getCentroid(cluster), cluster)
reponse = Response(data=service.getContent(), content_type=service.getContentType(), headers=None, status_code=service.getStatusCode(), encoding='utf-8')
return reponse
theServer = None
lastRead = 0
def handler (apacheReq):
global theServer
if not theServer:
options = apacheReq.get_options()
cfgs = cfgfiles
if options.has_key("FeatureFilterConfig"):
cfgs = (options["FeatureFilterConfig"],) + cfgs
theServer = Server.load(*cfgs)
return mod_python(theServer.dispatchRequest, apacheReq)
def wsgi_app (environ, start_response):
global theServer, lastRead
last = 0
for cfg in cfgfiles:
try:
cfgTime = os.stat(cfg)[8]
if cfgTime > last:
last = cfgTime
except:
pass
if not theServer or last > lastRead:
cfgs = cfgfiles
theServer = Server.load(*cfgs)
lastRead = time.time()
return wsgi(theServer.dispatchRequest, environ, start_response)
if __name__ == '__main__':
service = Server.load(*cfgfiles)
cgi(service)
|
iocast/featurefilter
|
FeatureFilter/Server.py
|
Python
|
mit
| 5,950
|
# coding=utf-8
from django.core.context_processors import csrf
from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import View
from reportlab.lib.styles import getSampleStyleSheet
from export import report_as_pdf, report_as_csv, get_filename
import datetime
import copy
import forms
import models
import remotesyc.models
import utils
styleSheet = getSampleStyleSheet()
class ContractView(View):
def get(self, request, *args, **kwargs):
return render(request, "contracts/contracts.html", {
'form': forms.CompanyForm(),
'form_step': 1
})
@staticmethod
def post_changed(request):
_post = copy.deepcopy(request.POST)
if 'status' not in _post: # Force default
_post['status'] = remotesyc.models.Ticket.STATUS.CLOSED
return _post
@staticmethod
def get_filename(context, fmt):
dtstr = datetime.date.today().strftime('%d-%m-%Y')
return "{0[contract].company}_{1!s}.{2!s}".format(context, dtstr, fmt)
@classmethod
def export_as_pdf(cls, context):
contract = context['contract']
intervals = context['intervals']
spent_hours = context['spent_hours']
remainder_hours = context['remainder_hours']
# Create the HttpResponse object with the appropriate PDF headers.
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="{0!s}"'.format(
get_filename(contract.company.name, 'pdf')
)
response.write(report_as_pdf(contract, intervals, spent_hours, remainder_hours))
return response
@classmethod
def export_as_csv(cls, context):
contract = context['contract']
intervals = context['intervals']
spent_hours = context['spent_hours']
remainder_hours = context['remainder_hours']
# Create the HttpResponse object with the appropriate CSV headers.
response = HttpResponse(
report_as_csv(contract, intervals, spent_hours, remainder_hours),
content_type='text/csv'
)
response['Content-Disposition'] = 'attachment; filename="{0:s}"'.format(
get_filename(contract.company.name, 'csv')
)
return response
# noinspection DjangoOrm
def post(self, request):
context = {
'form': forms.CompanyForm(self.post_changed(request)),
'form_step': 1
}
context.update(csrf(request))
if context['form'].is_valid():
company = models.Company.objects.get(pk=request.POST['name'])
context['form_step'] = 2
context_changed = context['form'].cleaned_data['context_changed']
if int(request.POST['form_step']) == context['form_step'] and not context_changed:
context['related_form'] = forms.ContractForm(request.POST, params={
'contracts': company.contract_set.filter(archive=False)
})
if context['related_form'].is_valid():
contract = models.Contract.objects.get(pk=request.POST['contracts'])
context['period_form'] = forms.PeriodForm(request.POST, params={
'period': contract.period_set,
})
if context['period_form'].is_valid():
periods = context['period_form'].cleaned_data['period']
periods = periods if len(periods) > 0 else contract.period_set.all()
context.update(self.extra_context(request, contract, periods))
else:
context['related_form'] = forms.ContractForm(params={
'contracts': company.contract_set.filter(archive=False)
})
if '_export_as' in request.POST and request.POST['_export_as']:
return getattr(self, 'export_as_' + request.POST['_export_as'])(context)
return render(request, "contracts/contracts.html", context)
@staticmethod
def extra_context(request, contract, periods):
tickets = remotesyc.models.Ticket.objects.filter(organization_id=contract.company.organization_external)
if not request.POST['status'] == remotesyc.models.Ticket.STATUS.ALL:
tickets = tickets.filter(status=request.POST['status'])
extra_context = {
'contract': contract,
'intervals': {}
}
for period in periods:
extra_context['intervals'][period] = tickets.filter(created_at__range=[period.dt_start, period.dt_end])
# horas do total de tickets nos perรญodos selecionados
extra_context['spent_hours'] = utils.calc_spent_hours(contract, extra_context['intervals'].values())
if len(periods) == 1:
spent_credits = utils.calc_spent_credits(contract, periods[0], request.POST['status'])
# total de horas vรกlidas
extra_context['valid_hours'] = contract.average_hours + spent_credits
# saldo devedor
extra_context['spent_credits'] = spent_credits
extra_context['remainder_hours'] = extra_context['valid_hours'] - extra_context['spent_hours']
else:
extra_context['remainder_hours'] = utils.calc_remainder_hours(contract, extra_context['spent_hours'])
return extra_context
|
alexsilva/zendeskspent
|
contracts/views.py
|
Python
|
mit
| 5,444
|
"""
QUESTION:
Given numRows, generate the first numRows of Pascal's triangle.
For example, given numRows = 5,
Return
[
[1],
[1,1],
[1,2,1],
[1,3,3,1],
[1,4,6,4,1]
]
ANSWER:
idea similar to dp
"""
class Solution:
# @param {integer} numRows
# @return {integer[][]}
def generate(self, numRows):
if numRows == 0:
return []
res = [[1]]
for i in range(1,numRows):
tmp = [1]
for j in range(1,len(res[-1])):
tmp.append(res[-1][j-1] + res[-1][j])
tmp.append(1)
res.append(tmp)
return res
if __name__ == '__main__':
for i in Solution().generate(10):
print i
|
tktrungna/leetcode
|
Python/pascals-triangle.py
|
Python
|
mit
| 698
|
"""Run occasionally via cron for maintenance tasks."""
from datetime import datetime, timedelta
import praw
from models import cfg_file, Log, session, Subreddit
def main():
r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
r.login(cfg_file.get('reddit', 'username'),
cfg_file.get('reddit', 'password'))
# update exclude_banned_modqueue values for subreddits
subreddits = (session.query(Subreddit)
.filter(Subreddit.enabled == True)
.all())
for sr in subreddits:
try:
settings = r.get_subreddit(sr.name).get_settings()
sr.exclude_banned_modqueue = settings['exclude_banned_modqueue']
except Exception as e:
sr.exclude_banned_modqueue = False
session.commit()
# delete old log entries
log_retention_days = int(cfg_file.get('database', 'log_retention_days'))
log_cutoff = datetime.utcnow() - timedelta(days=log_retention_days)
deleted = session.query(Log).filter(Log.datetime < log_cutoff).delete()
session.commit()
print 'Deleted {0} log rows'.format(deleted)
if __name__ == '__main__':
main()
|
sfwpn/AutoModerator
|
maintenance.py
|
Python
|
mit
| 1,181
|
"""mybook URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^cms/', include('cms.urls', namespace='cms')),
]
|
kyon-bll/django_mybook
|
mybook/mybook/urls.py
|
Python
|
mit
| 828
|
import os
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument("corpusdir", help = "Path to the directory containing corpus directories")
parser.add_argument("script", help = "name of the script to be run")
args = parser.parse_args()
## lists of corpora to skip
## and failed to run
skipped = ['spade-Penn-Neighborhood']
failed = []
## first check that the script exists
assert(os.path.isfile(args.script), "{} should be a script that exists".format(args.script))
## get the corpora from the directory
corpora = [f for f in sorted(os.listdir(args.corpusdir)) if f.startswith("spade-")]
## loop through corpus files
for corpus in corpora:
## check if the file is actually a directory since that is the expected format for the
## analysis scripts
if os.path.isdir(corpus):
if corpus in skipped:
print("Skipping {}".format(corpus))
continue
try:
print("Processing {}".format(corpus))
## first reset the corpus
subprocess.call(['python', 'reset_database.py', corpus])
## run the script on the corpus
subprocess.call(['python', args.script, corpus, "-s"])
## reset corpus afterwards to save memory
try:
subprocess.call(['python', 'reset_database.py', corpus])
except:
continue
except:
failed.append(corpus)
continue
print("Complete!")
print("Following corpora were not run: {}", failed)
|
MontrealCorpusTools/SPADE
|
run_all_corpora.py
|
Python
|
mit
| 1,538
|
import change_case
import csv
import datetime
import iso8601
import json
import os
import random
import sys
import tempfile
import warnings
class JSONSchemaToDatabase:
'''JSONSchemaToDatabase is the mother class for everything
:param schema: The JSON schema, as a native Python dict
:param database_flavor: Either "postgres" or "redshift"
:param postgres_schema: (optional) A string denoting a postgres schema (namespace) under which all tables will be created
:param debug: (optional) Set this to True if you want all queries to be printed to stderr
:param item_col_name: (optional) The name of the main object key (default is 'item_id')
:param item_col_type: (optional) Type of the main object key (uses the type identifiers from JSON Schema). Default is 'integer'
:param prefix_col_name: (optional) Postgres column name identifying the subpaths in the object (default is 'prefix')
:param abbreviations: (optional) A string to string mapping containing replacements applied to each part of the path
:param extra_columns: (optional) A list of pairs representing extra columns in the root table. The format is ('column_name', 'type')
:param root_table: (optional) Name of the root table
:param s3_client: (optional, Redshift only) A boto3 client object used for copying data through S3 (if not provided then it will use INSERT statements, which can be very slow)
:param s3_bucket: (optional, Redshift only) Required with s3_client
:param s3_prefix: (optional, Redshift only) Optional subdirectory within the S3 bucket
:param s3_iam_arn: (optional, Redshift only) Extra IAM argument
Typically you want to instantiate a `JSONSchemaToPostgres` object, and run :func:`create_tables` to create all the tables. After that, insert all data using :func:`insert_items`. Once you're done inserting, run :func:`create_links` to populate all references properly and add foreign keys between tables. Optionally you can run :func:`analyze` finally which optimizes the tables.
'''
def __init__(self, schema, database_flavor, postgres_schema=None, debug=False,
item_col_name='item_id', item_col_type='integer', prefix_col_name='prefix',
abbreviations={}, extra_columns=[], root_table='root',
s3_client=None, s3_bucket=None, s3_prefix='jsonschema2db', s3_iam_arn=None):
self._database_flavor = database_flavor
self._debug = debug
self._table_definitions = {}
self._links = {}
self._backlinks = {}
self._postgres_schema = postgres_schema
self._item_col_name = item_col_name
self._item_col_type = item_col_type
self._prefix_col_name = prefix_col_name
self._abbreviations = abbreviations
self._extra_columns = extra_columns
self._table_comments = {}
self._column_comments = {}
self._root_table = root_table
# Redshift-specific properties
self._s3_client = s3_client
self._s3_bucket = s3_bucket
self._s3_prefix = s3_prefix
self._s3_iam_arn = s3_iam_arn
# Various counters used for diagnostics during insertions
self.failure_count = {} # path -> count
self.json_path_count = {} # json path -> count
# Walk the schema and build up the translation tables
self._translation_tree = self._traverse(schema, schema, table=self._root_table, comment=schema.get('comment'))
# Need to compile all the backlinks that uniquely identify a parent and add columns for them
for child_table in self._backlinks:
if len(self._backlinks[child_table]) != 1:
# Need a unique path on the parent table for this to make sense
continue
parent_table, ref_col_name, _ = list(self._backlinks[child_table])[0]
self._backlinks[child_table] = (parent_table, ref_col_name)
self._table_definitions[child_table][ref_col_name] = 'link'
self._links.setdefault(child_table, {})[ref_col_name] = (None, parent_table)
# Construct tables and columns
self._table_columns = {}
max_column_length = {'postgres': 63, 'redshift': 127}[self._database_flavor]
for col, type in self._extra_columns:
if 0 < len(col) <= max_column_length:
self._table_definitions[self._root_table][col] = type
for table, column_types in self._table_definitions.items():
for column in column_types.keys():
if len(column) > max_column_length:
warnings.warn('Ignoring_column because it is too long: %s.%s' % (table, column))
columns = sorted(col for col in column_types.keys() if 0 < len(col) <= max_column_length)
self._table_columns[table] = columns
def _table_name(self, path):
return '__'.join(change_case.ChangeCase.camel_to_snake(self._abbreviations.get(p, p)) for p in path)
def _column_name(self, path):
return self._table_name(path) # same
def _execute(self, cursor, query, args=None, query_ok_to_print=True):
if self._debug and query_ok_to_print:
print(query, file=sys.stderr)
cursor.execute(query, args)
def _traverse(self, schema, tree, path=tuple(), table='root', parent=None, comment=None, json_path=tuple()):
# Computes a bunch of stuff
# 1. A list of tables and columns (used to create tables dynamically)
# 2. A tree (dicts of dicts) with a mapping for each fact into tables (used to map data)
# 3. Links between entities
if type(tree) != dict:
warnings.warn('%s.%s: Broken subtree' % (table, self._column_name(path)))
return
if parent is not None:
self._backlinks.setdefault(table, set()).add(parent)
if table not in self._table_definitions:
self._table_definitions[table] = {}
if comment:
self._table_comments[table] = comment
definition = None
new_json_path = json_path
while '$ref' in tree:
ref = tree['$ref']
p = ref.lstrip('#').lstrip('/').split('/')
tree = schema
for elem in p:
if elem not in tree:
warnings.warn('%s.%s: Broken definition: %s' % (table, self._column_name(path), ref))
return
tree = tree[elem]
new_json_path = ('#',) + tuple(p)
definition = p[-1] # TODO(erikbern): we should just make this a boolean variable
special_keys = set(tree.keys()).intersection(['oneOf', 'allOf', 'anyOf'])
if special_keys:
res = {}
for p in special_keys:
for q in tree[p]:
res.update(self._traverse(schema, q, path, table, json_path=new_json_path))
return res # This is a special node, don't store any more information
elif 'enum' in tree:
self._table_definitions[table][self._column_name(path)] = 'enum'
if 'comment' in tree:
self._column_comments.setdefault(table, {})[self._column_name(path)] = tree['comment']
res = {'_column': self._column_name(path), '_type': 'enum'}
elif 'type' not in tree:
res = {}
warnings.warn('%s.%s: Type info missing' % (table, self._column_name(path)))
elif tree['type'] == 'object':
print('object:', tree)
res = {}
if 'patternProperties' in tree:
# Always create a new table for the pattern properties
if len(tree['patternProperties']) > 1:
warnings.warn('%s.%s: Multiple patternProperties, will ignore all except first' % (table, self._column_name(path)))
for p in tree['patternProperties']:
ref_col_name = table + '_id'
res['*'] = self._traverse(schema, tree['patternProperties'][p], tuple(), self._table_name(path), (table, ref_col_name, self._column_name(path)), tree.get('comment'), new_json_path + (p,))
break
elif 'properties' in tree:
if definition:
# This is a shared definition, so create a new table (if not already exists)
if path == tuple():
ref_col_name = self._table_name([definition]) + '_id'
else:
ref_col_name = self._column_name(path) + '_id'
for p in tree['properties']:
res[p] = self._traverse(schema, tree['properties'][p], (p, ), self._table_name([definition]), (table, ref_col_name, self._column_name(path)), tree.get('comment'), new_json_path + (p,))
self._table_definitions[table][ref_col_name] = 'link'
self._links.setdefault(table, {})[ref_col_name] = ('/'.join(path), self._table_name([definition]))
else:
# Standard object, just traverse recursively
for p in tree['properties']:
res[p] = self._traverse(schema, tree['properties'][p], path + (p,), table, parent, tree.get('comment'), new_json_path + (p,))
else:
warnings.warn('%s.%s: Object with neither properties nor patternProperties' % (table, self._column_name(path)))
else:
if tree['type'] == 'null':
res = {}
elif tree['type'] not in ['string', 'boolean', 'number', 'integer']:
warnings.warn('%s.%s: Type error: %s' % (table, self._column_name(path), tree['type']))
res = {}
else:
if definition in ['date', 'timestamp']:
t = definition
else:
t = tree['type']
self._table_definitions[table][self._column_name(path)] = t
if 'comment' in tree:
self._column_comments.setdefault(table, {})[self._column_name(path)] = tree['comment']
res = {'_column': self._column_name(path), '_type': t}
res['_table'] = table
res['_suffix'] = '/'.join(path)
res['_json_path'] = '/'.join(json_path)
self.json_path_count['/'.join(json_path)] = 0
return res
def _coerce_type(self, t, value):
''' Returns a two-tuple (is_valid, new_value) where new_value is properly coerced. '''
try:
if t == 'number':
return type(value) != bool, float(value)
elif t == 'integer':
return type(value) != bool, int(value)
elif t == 'boolean':
return type(value) == bool, value
elif t == 'timestamp':
if type(value) == datetime.datetime:
return True, value
return True, iso8601.parse_date(value)
elif t == 'date':
if type(value) == datetime.date:
return True, value
return True, datetime.date(*(int(z) for z in value.split('-')))
elif t == 'string':
# Allow coercing ints/floats, but nothing else
return type(value) in [str, int, float], str(value)
elif t == 'enum':
return type(value) == str, str(value)
except:
pass
return False, None
def _flatten_dict(self, data, res=None, path=tuple()):
if res is None:
res = []
if type(data) == dict:
for k, v in data.items():
self._flatten_dict(v, res, path+(k,))
else:
res.append((path, data))
return res
def _postgres_table_name(self, table):
if self._postgres_schema is None:
return '"%s"' % table
else:
return '"%s"."%s"' % (self._postgres_schema, table)
def create_tables(self, con):
'''Creates tables
:param con: psycopg2 connection object
'''
postgres_types = {'boolean': 'bool', 'number': 'float', 'string': 'text', 'enum': 'text', 'integer': 'bigint', 'timestamp': 'timestamptz', 'date': 'date', 'link': 'integer'}
with con.cursor() as cursor:
if self._postgres_schema is not None:
self._execute(cursor, 'drop schema if exists %s cascade' % self._postgres_schema)
self._execute(cursor, 'create schema %s' % self._postgres_schema)
for table, columns in self._table_columns.items():
types = [self._table_definitions[table][column] for column in columns]
id_data_type = {'postgres': 'serial', 'redshift': 'int identity(1, 1) not null'}[self._database_flavor]
create_q = 'create table %s (id %s, "%s" %s not null, "%s" text not null, %s unique ("%s", "%s"), unique (id))' % \
(self._postgres_table_name(table), id_data_type, self._item_col_name, postgres_types[self._item_col_type], self._prefix_col_name,
''.join('"%s" %s, ' % (c, postgres_types[t]) for c, t in zip(columns, types)),
self._item_col_name, self._prefix_col_name)
self._execute(cursor, create_q)
if table in self._table_comments:
self._execute(cursor, 'comment on table %s is %%s' % self._postgres_table_name(table), (self._table_comments[table],))
for c in columns:
if c in self._column_comments.get(table, {}):
self._execute(cursor, 'comment on column %s."%s" is %%s' % (self._postgres_table_name(table), c), (self._column_comments[table][c],))
def _insert_items_generate_rows(self, items, extra_items, count):
# Helper function to generate data row by row for insertion
for item_id, data in items:
if type(data) == dict:
data = self._flatten_dict(data)
res = {}
for path, value in data:
if value is None:
continue
subtree = self._translation_tree
res.setdefault(subtree['_table'], {}).setdefault('', {})
if count:
self.json_path_count[subtree['_json_path']] += 1
for index, path_part in enumerate(path):
if '*' in subtree:
subtree = subtree['*']
elif not subtree.get(path_part):
if count:
self.failure_count[path] = self.failure_count.get(path, 0) + 1
break
else:
subtree = subtree[path_part]
# Compute the prefix, add an empty entry (TODO: should make the prefix customizeable)
table, suffix = subtree['_table'], subtree['_suffix']
prefix_suffix = '/' + '/'.join(path[:(index+1)])
assert prefix_suffix.endswith(suffix)
prefix = prefix_suffix[:len(prefix_suffix)-len(suffix)].rstrip('/')
res.setdefault(table, {}).setdefault(prefix, {})
if count:
self.json_path_count[subtree['_json_path']] += 1
# Leaf node with value, validate and prepare for insertion
if '_column' not in subtree:
if count:
self.failure_count[path] = self.failure_count.get(path, 0) + 1
continue
col, t = subtree['_column'], subtree['_type']
if table not in self._table_columns:
if count:
self.failure_count[path] = self.failure_count.get(path, 0) + 1
continue
is_valid, new_value = self._coerce_type(t, value)
if not is_valid:
if count:
self.failure_count[path] = self.failure_count.get(path, 0) + 1
continue
res.setdefault(table, {}).setdefault(prefix, {})[col] = new_value
for table, table_values in res.items():
if table == self._root_table and item_id in extra_items:
res[table][''].update(extra_items[item_id])
# Compile table rows for this item
for table, table_values in res.items():
for prefix, row_values in table_values.items():
row_array = [item_id, prefix] + [row_values.get(t) for t in self._table_columns[table]]
yield (table, row_array)
def insert_items(self, con, items, extra_items={}, mutate=True, count=False):
''' Inserts data into database.
:param con: psycopg2 connection object
:param items: is an iterable of tuples `(item id, values)` where `values` is either:
- A nested dict conforming to the JSON spec
- A list (or iterator) of pairs where the first item in the pair is a tuple specifying the path, and the second value in the pair is the value.
:param extra_items: A dictionary containing values for extra columns, where key is an extra column name.
:param mutate: If this is set to `False`, nothing is actually inserted. This might be useful if you just want to validate data.
:param count: if set to `True`, it will count some things. Defaults to `False`.
Updates `self.failure_count`, a dict counting the number of failures for paths (keys are tuples, values are integers).
This function has an optimized strategy for Redshift, where it writes the data to temporary files, copies those to S3, and uses the `COPY`
command to ingest the data into Redshift. However this strategy is only used if the `s3_client` is provided to the constructor.
Otherwise, it will fall back to the Postgres-based method of running batched insertions.
Note that the Postgres-based insertion builds up huge intermediary datastructures, so it will take a lot more memory.
'''
rows = self._insert_items_generate_rows(items=items, extra_items=extra_items, count=count)
if not mutate:
for table, row in rows:
# Just exhaust the iterator
pass
elif self._database_flavor == 'redshift' and self._s3_client:
with tempfile.TemporaryDirectory() as tmpdirname, con.cursor() as cursor:
# Flush the iterator to temporary files on disk
temp_files, writers, file_objs = {}, {}, []
for table, row in rows:
if table not in temp_files:
fn = temp_files[table] = os.path.join(tmpdirname, table + '.csv')
f = open(fn, 'wt')
writer = csv.writer(f)
if self._debug:
print('Creating temp file for table', table, 'at', fn, file=sys.stderr)
writers[table] = writer
file_objs.append(f)
writers[table].writerow(row)
# Close local temp files so all data gets flushed to disk
for f in file_objs:
f.close()
# Upload all files to S3 and load into Redshift
# TODO: might want to use a thread pool for this
batch_random = '%012d' % random.randint(0, 999999999999)
for table, fn in temp_files.items():
s3_path = '/%s/%s/%s.csv' % (self._s3_prefix, batch_random, table)
if self._debug:
print('Uploading data for table %s from %s (%d bytes) to %s' % (table, fn, os.path.getsize(fn), s3_path), file=sys.stderr)
self._s3_client.upload_file(Filename=fn, Bucket=self._s3_bucket, Key=s3_path)
query = 'copy %s from \'s3://%s/%s\' csv %s truncatecolumns compupdate off statupdate off' % (
self._postgres_table_name(table),
self._s3_bucket, s3_path, self._s3_iam_arn and 'iam_role \'%s\'' % self._s3_iam_arn or '')
self._execute(cursor, query)
else:
# Postgres-based insertion
with con.cursor() as cursor:
data_by_table = {}
for table, row in rows:
# Note that this flushes the iterator into an in-memory datastructure, so it will be far less memory efficient than the Redshift strategy
data_by_table.setdefault(table, []).append(row)
for table, data in data_by_table.items():
cols = '("%s","%s"%s)' % (self._item_col_name, self._prefix_col_name, ''.join(',"%s"' % c for c in self._table_columns[table]))
pattern = '(' + ','.join(['%s'] * len(data[0])) + ')'
args = b','.join(cursor.mogrify(pattern, tup) for tup in data)
self._execute(cursor, b'insert into %s %s values %s' % (self._postgres_table_name(table).encode(), cols.encode(), args), query_ok_to_print=False)
def create_links(self, con):
'''Adds foreign keys between tables.'''
for from_table, cols in self._links.items():
for ref_col_name, (prefix, to_table) in cols.items():
if from_table not in self._table_columns or to_table not in self._table_columns:
continue
args = {
'from_table': self._postgres_table_name(from_table),
'to_table': self._postgres_table_name(to_table),
'ref_col': ref_col_name,
'item_col': self._item_col_name,
'prefix_col': self._prefix_col_name,
'prefix': prefix,
}
update_q = 'update %(from_table)s set "%(ref_col)s" = to_table.id from (select "%(item_col)s", "%(prefix_col)s", id from %(to_table)s) to_table' % args
if prefix:
# Forward reference from table to a definition
update_q += ' where %(from_table)s."%(item_col)s" = to_table."%(item_col)s" and %(from_table)s."%(prefix_col)s" || \'/%(prefix)s\' = to_table."%(prefix_col)s"' % args
else:
# Backward definition from a table to its patternProperty parent
update_q += ' where %(from_table)s."%(item_col)s" = to_table."%(item_col)s" and strpos(%(from_table)s."%(prefix_col)s", to_table."%(prefix_col)s") = 1' % args
alter_q = 'alter table %(from_table)s add constraint fk_%(ref_col)s foreign key ("%(ref_col)s") references %(to_table)s (id)' % args
with con.cursor() as cursor:
self._execute(cursor, update_q)
self._execute(cursor, alter_q)
def analyze(self, con):
'''Runs `analyze` on each table. This improves performance.
See the `Postgres documentation for Analyze <https://www.postgresql.org/docs/9.1/static/sql-analyze.html>`_
'''
with con.cursor() as cursor:
for table in self._table_columns.keys():
self._execute(cursor, 'analyze %s' % self._postgres_table_name(table))
class JSONSchemaToPostgres(JSONSchemaToDatabase):
'''Shorthand for JSONSchemaToDatabase(..., database_flavor='postgres')'''
def __init__(self, *args, **kwargs):
kwargs['database_flavor'] = 'postgres'
return super(JSONSchemaToPostgres, self).__init__(*args, **kwargs)
class JSONSchemaToRedshift(JSONSchemaToDatabase):
'''Shorthand for JSONSchemaToDatabase(..., database_flavor='redshift')'''
def __init__(self, *args, **kwargs):
kwargs['database_flavor'] = 'redshift'
return super(JSONSchemaToRedshift, self).__init__(*args, **kwargs)
|
better/jsonschema2db
|
jsonschema2db.py
|
Python
|
mit
| 23,995
|
import unittest
import json
import time
from constellations import node
from constellations import message
from constellations import peer_node
class TestPeerNode(unittest.TestCase):
# TODO Cleanup the server and ports properly in order to run multiple independent tests
def setUp(self):
# Creates nodes in ports 4500 to 4500+N-1
N = 3
self.nodes = []
for i in range(4500, 4500+N):
address = ["localhost", i]
self.nodes.append(node.Node(host=address[0], port=address[1]))
# Creates peer nodes in ports 5000 to 5000+N-1
N = 3
self.peer_nodes = []
for i in range(5000, 5000+N):
address = ["localhost", i]
self.peer_nodes.append(peer_node.PeerNode(host=address[0], port=address[1]))
addr = self.peer_nodes[0].data.me['address']
for i in range(1, len(self.peer_nodes)):
self.peer_nodes[i].data.peers["1"] = {}
self.peer_nodes[i].data.peers["1"]["address"] = addr
def tearDown(self):
for n in self.nodes:
n.stop()
for pn in self.peer_nodes:
pn.stop()
def test_peer_nodes_address_sharing(self):
for i in range(0, 10):
for pn in self.peer_nodes:
print(str(pn.data.me) + "\n" + str(pn.data.peers))
print("----")
print("--------------")
time.sleep(2)
# TODO Add relevant assertions
# TODO assert that no exceptions were raised during running this test
# TODO get rid of sleeps
def test_new_peer_nodes_start_stop(self):
addr = self.peer_nodes[0].data.me['address']
# Creates new peer nodes in ports 5500 to 5500+N-1
N = 3
new_peer_nodes = []
for i in range(5500, 5500+N):
address = ["localhost", i]
new_peer_nodes.append(peer_node.PeerNode(host=address[0], port=address[1]))
for pn in new_peer_nodes:
pn.data.peers["1"] = {}
pn.data.peers["1"]["address"] = addr
for i in range(0, 3):
for pn in self.peer_nodes:
print(str(pn.data.me) + "\n" + str(pn.data.peers))
print("----")
print("--------------")
time.sleep(2)
for pn in new_peer_nodes:
pn.stop()
for i in range(0, 7):
for pn in self.peer_nodes:
print(str(pn.data.me) + "\n" + str(pn.data.peers))
print("----")
print("--------------")
time.sleep(2)
# TODO Add relevant assertions
# TODO assert that no exceptions were raised during running this test
# TODO get rid of sleeps
if __name__ == "__main__":
unittest.main()
|
pthomaid/constellations
|
tests/test_peer_node.py
|
Python
|
mit
| 2,850
|
import aws_curioh
from setuptools import find_packages, setup
version = aws_curioh.__version__
setup(
name='aws-curioh',
version=version,
description=('Simple Amazon AWS requests.'),
url='https://github.com/w2srobinho/aws-curioh',
author='Willian de Souza',
author_email='willianstosouza@gmail.com',
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=[
'boto3>=1.2.6'
],
)
|
w2srobinho/aws-curioh
|
setup.py
|
Python
|
mit
| 448
|
#!/usr/bin/env python
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
import sys
import setuptools
import os
__version__ = '0.0.1'
FASTTEXT_SRC = "../src"
# Based on https://github.com/pybind/python_example
class get_pybind_include(object):
"""Helper class to determine the pybind11 include path
The purpose of this class is to postpone importing pybind11
until it is actually installed, so that the ``get_include()``
method can be invoked. """
def __init__(self, user=False):
self.user = user
def __str__(self):
import pybind11
return pybind11.get_include(self.user)
fasttext_src_files = os.listdir(FASTTEXT_SRC)
fasttext_src_cc = list(filter(lambda x: x.endswith('.cc'), fasttext_src_files))
fasttext_src_cc = list(
map(lambda x: str(os.path.join(FASTTEXT_SRC, x)), fasttext_src_cc)
)
ext_modules = [
Extension(
str('fasttext_pybind'),
[
str('fastText/pybind/fasttext_pybind.cc'),
] + fasttext_src_cc,
include_dirs=[
# Path to pybind11 headers
get_pybind_include(),
get_pybind_include(user=True),
# Path to fasttext source code
FASTTEXT_SRC,
],
language='c++',
extra_compile_args=["-O3"],
),
]
# As of Python 3.6, CCompiler has a `has_flag` method.
# cf http://bugs.python.org/issue26689
def has_flag(compiler, flagname):
"""Return a boolean indicating whether a flag name is supported on
the specified compiler.
"""
import tempfile
with tempfile.NamedTemporaryFile('w', suffix='.cpp') as f:
f.write('int main (int argc, char **argv) { return 0; }')
try:
compiler.compile([f.name], extra_postargs=[flagname])
except setuptools.distutils.errors.CompileError:
return False
return True
def cpp_flag(compiler):
"""Return the -std=c++[11/14] compiler flag.
The c++14 is preferred over c++11 (when it is available).
"""
if has_flag(compiler, '-std=c++14'):
return '-std=c++14'
elif has_flag(compiler, '-std=c++11'):
return '-std=c++11'
else:
raise RuntimeError(
'Unsupported compiler -- at least C++11 support '
'is needed!'
)
class BuildExt(build_ext):
"""A custom build extension for adding compiler-specific options."""
c_opts = {
'msvc': ['/EHsc'],
'unix': [],
}
if sys.platform == 'darwin':
c_opts['unix'] += ['-stdlib=libc++', '-mmacosx-version-min=10.7']
def build_extensions(self):
ct = self.compiler.compiler_type
opts = self.c_opts.get(ct, [])
if ct == 'unix':
opts.append('-DVERSION_INFO="%s"' % self.distribution.get_version())
opts.append(cpp_flag(self.compiler))
if has_flag(self.compiler, '-fvisibility=hidden'):
opts.append('-fvisibility=hidden')
elif ct == 'msvc':
opts.append(
'/DVERSION_INFO=\\"%s\\"' % self.distribution.get_version()
)
for ext in self.extensions:
ext.extra_compile_args = opts
build_ext.build_extensions(self)
setup(
name='fastTextpy',
version=__version__,
author='Christian Puhrsch',
author_email='cpuhrsch@fb.com',
description='fastText Python bindings',
long_description='',
ext_modules=ext_modules,
url='https://github.com/facebookresearch/fastText',
license='BSD',
install_requires=['pybind11>=2.2'],
cmdclass={'build_ext': BuildExt},
packages=[str('fastText')],
zip_safe=False
)
|
walkowiak/mips
|
fastText/python/setup.py
|
Python
|
mit
| 4,116
|
from __future__ import annotations
from typing import Any, Callable, Dict, List, Optional
from typing_extensions import Literal, TypedDict
class CommandDefinition(TypedDict):
function: Callable
description: str
is_alias: bool
aliases: List[str]
MyfitnesspalUserId = str
class GoalDisplayDict(TypedDict):
id: str
display_type: str
nutrients: List[str]
class UnitPreferenceDict(TypedDict):
energy: str
weight: str
distance: str
height: str
water: str
class DiaryPreferencesDict(TypedDict):
default_foot_view: str
meal_names: List[str]
tracked_nutrients: List[str]
class UnitValueContainer(TypedDict):
unit: str
value: float
class GoalPreferencesDict(TypedDict):
workouts_per_week: int
weekly_workout_duration: int
weekly_exercise_energy: UnitValueContainer
weight_change_goal: UnitValueContainer
weight_goal: UnitValueContainer
diary_goal_display: str
home_goal_display: str
macro_goal_format: str
class LocationPreferencesDict(TypedDict):
time_zone: str
country_code: str
locale: str
postal_code: str
state: str
city: str
IsoDateStr = str
class AdminFlagDict(TypedDict):
status: str
has_changed_username: bool
forgot_password_or_username: bool
warnings: int
strikes: int
revoked_privileges: List
class AccountDict(TypedDict):
created_at: IsoDateStr
updated_at: IsoDateStr
last_login: IsoDateStr
valid_email: bool
registration_source: str
roles: List[str]
admin_flags: AdminFlagDict
class SystemDataDict(TypedDict):
login_streak: int
unseen_notifications: int
Unknown = Any
class UserProfile(TypedDict):
type: str
starting_weight_date: str
starting_weight: UnitValueContainer
main_image_url: str
main_image_id: Optional[Unknown]
birthdate: str
height: UnitValueContainer
first_name: Optional[str]
last_name: Optional[str]
sex: Literal["M", "F"]
activity_factor: str
headline: Optional[str]
about: Optional[str]
why: Optional[str]
inspirations: List
class UserMetadata(TypedDict):
id: MyfitnesspalUserId
username: str
email: str
goal_displays: List[GoalDisplayDict]
unit_preferences: UnitPreferenceDict
diary_preferences: DiaryPreferencesDict
goal_preferences: GoalPreferencesDict
location_preferences: LocationPreferencesDict
account: AccountDict
system_data: SystemDataDict
step_sources: List
profiles: List[UserProfile]
class AuthData(TypedDict):
token_type: str
access_token: str
expires_in: int
refresh_token: str
user_id: MyfitnesspalUserId
NutritionDict = Dict[str, float]
class MealEntry(TypedDict):
name: str
nutrition_information: NutritionDict
class NoteDataDict(TypedDict):
body: str
type: str
date: str
class FoodItemNutritionDict(TypedDict):
calcium: float
carbohydrates: float
cholesterol: float
fat: float
fiber: float
iron: float
monounsaturated_fat: float
polyunsaturated_fat: float
potassium: float
protein: float
saturated_fat: float
sodium: float
sugar: float
trans_fat: float
vitamin_a: float
vitamin_c: float
class ServingSizeDict(TypedDict):
id: str
nutrition_multiplier: float
value: float
unit: str
index: int
class FoodItemDetailsResponse(TypedDict):
description: str
brand_name: Optional[str]
verified: bool
nutrition: FoodItemNutritionDict
calories: float
confirmations: int
serving_sizes: List[ServingSizeDict]
|
coddingtonbear/python-myfitnesspal
|
myfitnesspal/types.py
|
Python
|
mit
| 3,628
|
from contentbase import upgrade_step
from .shared import ENCODE2_AWARDS, REFERENCES_UUID
from past.builtins import long
import re
from pyramid.traversal import find_root
def number(value):
if isinstance(value, (int, long, float, complex)):
return value
value = value.lower().replace(' ', '')
value = value.replace('x10^', 'e')
if value in ('', 'unknown'):
return None
try:
return int(value)
except ValueError:
return float(value)
@upgrade_step('biosample', '', '2')
def biosample_0_2(value, system):
# http://redmine.encodedcc.org/issues/794
if 'starting_amount' in value:
new_value = number(value['starting_amount'])
if new_value is None:
del value['starting_amount']
else:
value['starting_amount'] = new_value
@upgrade_step('biosample', '2', '3')
def biosample_2_3(value, system):
# http://redmine.encodedcc.org/issues/940
go_mapping = {
"nucleus": "GO:0005634",
"cytosol": "GO:0005829",
"chromatin": "GO:0000785",
"membrane": "GO:0016020",
"membrane fraction": "GO:0016020",
"mitochondria": "GO:0005739",
"nuclear matrix": "GO:0016363",
"nucleolus": "GO:0005730",
"nucleoplasm": "GO:0005654",
"polysome": "GO:0005844"
}
if 'subcellular_fraction' in value:
value['subcellular_fraction_term_id'] = go_mapping[value['subcellular_fraction']]
if value['subcellular_fraction'] == "membrane fraction":
value['subcellular_fraction'] = "membrane"
value['subcellular_fraction_term_name'] = value['subcellular_fraction']
del value['subcellular_fraction']
@upgrade_step('biosample', '3', '4')
def biosample_3_4(value, system):
# http://redmine.encodedcc.org/issues/575
if 'derived_from' in value:
if type(value['derived_from']) is list and value['derived_from']:
new_value = value['derived_from'][0]
value['derived_from'] = new_value
else:
del value['derived_from']
if 'part_of' in value:
if type(value['part_of']) is list and value['part_of']:
new_value = value['part_of'][0]
value['part_of'] = new_value
else:
del value['part_of']
# http://redmine.encodedcc.org/issues/817
value['dbxrefs'] = []
if 'encode2_dbxrefs' in value:
for encode2_dbxref in value['encode2_dbxrefs']:
new_dbxref = 'UCSC-ENCODE-cv:' + encode2_dbxref
value['dbxrefs'].append(new_dbxref)
del value['encode2_dbxrefs']
@upgrade_step('biosample', '4', '5')
def biosample_4_5(value, system):
# http://redmine.encodedcc.org/issues/1305
if 'status' in value:
if value['status'] == 'DELETED':
value['status'] = 'deleted'
elif value['status'] == 'CURRENT' and value['award'] in ENCODE2_AWARDS:
value['status'] = 'released'
elif value['status'] == 'CURRENT' and value['award'] not in ENCODE2_AWARDS:
value['status'] = 'in progress'
@upgrade_step('biosample', '5', '6')
def biosample_5_6(value, system):
# http://redmine.encodedcc.org/issues/1393
if value.get('biosample_type') == 'primary cell line':
value['biosample_type'] = 'primary cell'
@upgrade_step('biosample', '6', '7')
def biosample_6_7(value, system):
# http://encode.stanford.edu/issues/1131
update_properties = {
"sex": "model_organism_sex",
"age": "model_organism_age",
"age_units": "model_organism_age_units",
"health_status": "model_organism_health_status",
"life_stage": "mouse_life_stage"
}
for key, val in update_properties.items():
if key in value:
if value["organism"] != "7745b647-ff15-4ff3-9ced-b897d4e2983c":
if key == "life_stage" and value[key] == "newborn":
value[val] = "postnatal"
else:
value[val] = value[key]
del value[key]
@upgrade_step('biosample', '7', '8')
def biosample_7_8(value, system):
# http://redmine.encodedcc.org/issues/2456
if value.get('worm_life_stage') == 'embryonic':
value['worm_life_stage'] = 'mixed stage (embryonic)'
@upgrade_step('biosample', '8', '9')
def biosample_8_9(value, system):
# http://encode.stanford.edu/issues/1596
if 'model_organism_age' in value:
age = value['model_organism_age']
if re.match('\d+.0(-\d+.0)?', age):
new_age = age.replace('.0', '')
value['model_organism_age'] = new_age
@upgrade_step('biosample', '9', '10')
def biosample_9_10(value, system):
# http://redmine.encodedcc.org/issues/2591
context = system['context']
root = find_root(context)
publications = root['publications']
if 'references' in value:
new_references = []
for ref in value['references']:
if re.match('doi', ref):
new_references.append(REFERENCES_UUID[ref])
else:
item = publications[ref]
new_references.append(str(item.uuid))
value['references'] = new_references
@upgrade_step('biosample', '10', '11')
def biosample_10_11(value, system):
# http://redmine.encodedcc.org/issues/2905
if value.get('worm_synchronization_stage') == 'starved L1 larva':
value['worm_synchronization_stage'] = 'L1 larva starved after bleaching'
|
kidaa/encoded
|
src/encoded/upgrade/biosample.py
|
Python
|
mit
| 5,471
|
#!/usr/bin/env python
"""
Script for predicting TF binding with a trained model.
Use `predict.py -h` to see an auto-generated description of advanced options.
"""
import numpy as np
import pylab
import matplotlib
import pandas
import utils
import pickle
# Standard library imports
import sys
import os
import errno
import argparse
def make_argument_parser():
"""
Creates an ArgumentParser to read the options for this script from
sys.argv
"""
parser = argparse.ArgumentParser(
description="Generate predictions from a trained model.",
epilog='\n'.join(__doc__.strip().split('\n')[1:]).strip(),
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--inputdir', '-i', type=str, required=True,
help='Folder containing input data')
parser.add_argument('--modeldir', '-m', type=str, required=True,
help='Folder containing trained model generated by train.py.')
parser.add_argument('--factor', '-f', type=str, required=True,
help='The transcription factor to evaluate.')
parser.add_argument('--bed', '-b', type=str, required=True,
help='Sorted BED file containing intervals to predict on.')
parser.add_argument('--outputfile', '-o', type=str, required=True,
help='The output filename.')
return parser
def main():
"""
The main executable function
"""
parser = make_argument_parser()
args = parser.parse_args()
input_dir = args.inputdir
model_dir = args.modeldir
tf = args.factor
bed_file = args.bed
output_file = args.outputfile
print 'Loading genome'
genome = utils.load_genome()
print 'Loading model'
model_tfs, model_bigwig_names, features, model = utils.load_model(model_dir)
L = model.input_shape[0][1]
utils.L = L
assert tf in model_tfs
assert 'bigwig' in features
use_meta = 'meta' in features
use_gencode = 'gencode' in features
print 'Loading test data'
is_sorted = True
bigwig_names, meta_names, datagen_bed, nonblacklist_bools = utils.load_beddata(genome, bed_file, use_meta, use_gencode, input_dir, is_sorted)
assert bigwig_names == model_bigwig_names
if use_meta:
model_meta_file = model_dir + '/meta.txt'
assert os.path.isfile(model_meta_file)
model_meta_names = np.loadtxt(model_meta_file, dtype=str)
if len(model_meta_names.shape) == 0:
model_meta_names = [str(model_meta_names)]
else:
model_meta_names = list(model_meta_names)
assert meta_names == model_meta_names
print 'Generating predictions'
model_tf_index = model_tfs.index(tf)
model_predicts = model.predict_generator(datagen_bed, val_samples=len(datagen_bed), pickle_safe=True)
if len(model_tfs) > 1:
model_tf_predicts = model_predicts[:, model_tf_index]
else:
model_tf_predicts = model_predicts
final_scores = np.zeros(len(nonblacklist_bools))
final_scores[nonblacklist_bools] = model_tf_predicts
print 'Saving predictions'
df = pandas.read_csv(bed_file, sep='\t', header=None)
df[3] = final_scores
df.to_csv(output_file, sep='\t', compression='gzip', float_format='%.3e', header=False, index=False)
if __name__ == '__main__':
"""
See module-level docstring for a description of the script.
"""
main()
|
uci-cbcl/FactorNet
|
predict.py
|
Python
|
mit
| 3,431
|
#!/usr/bin/env python3
import argparse
import scipy
import matplotlib.pyplot as plot
import wells.publisher as publisher
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--interactive",
help="Interactive mode",
action="store_true")
parser.add_argument("-e", "--ext",
help="Output image extension",
type=str,
default="png")
parser.add_argument("-s", "--figsize",
help="Figure size",
type=str,
default=("2.8, 2.8"))
parser.add_argument("-m", "--mark",
help="Mark largest growth factor eigenvalue",
action="store_true")
parser.add_argument("--nx", "--xn",
help="Number of x ticks",
type=int,
default=5)
parser.add_argument("--ny", "--yn",
help="Number of y ticks",
type=int,
default=6)
parser.add_argument("--minz", "--zmin", "--minx", "--xmin",
help="Minimum x coordinate",
type=float)
parser.add_argument("--maxz", "--zmax", "--maxx", "--xmax",
help="Maximum x coordinate",
type=float)
parser.add_argument("--miny", "--ymin",
help="Minimum y coordinate",
type=float)
parser.add_argument("--maxy", "--ymax",
help="Maximum y coordinate",
type=float)
parser.add_argument("input",
help="Input file",
type=str)
args = parser.parse_args()
workspace = scipy.load(args.input)
delta = workspace["delta"]
es = workspace["stability_eigenvalues"]
n = es.imag.argmax()
e = es[n]
es = scipy.delete(es, n)
minx = args.minz if args.minz is not None else es.real.min()
maxx = args.maxz if args.maxz is not None else es.real.max()
miny = args.miny if args.miny is not None else es.imag.min()
maxy = args.maxy if args.maxy is not None else es.imag.max()
xticks = scipy.linspace(minx, maxx, args.nx)
yticks = scipy.linspace(miny, maxy, args.ny)
if not args.interactive:
figsize = [float(x) for x in args.figsize.split(",")]
filename = args.input.replace(".npz", "")
filename = filename + "_stev"
publisher.init({"figure.figsize": figsize})
plot.title(r"$\delta_p=%.1f$" % delta, y=1.05)
plot.scatter(es.real, es.imag, s=1, color="black")
if args.mark:
plot.scatter([e.real], [e.imag], s=6, color="red", marker="x")
else:
plot.scatter([e.real], [e.imag], s=1, color="black")
plot.xlim(minx, maxx)
plot.ylim(miny, maxy)
plot.xticks(xticks)
plot.yticks(yticks)
plot.xlabel("$\mathrm{Re}\,\lambda$")
plot.ylabel("$\mathrm{Im}\,\lambda$")
plot.show()
if args.interactive:
plot.show()
else:
publisher.publish(filename, args.ext)
|
ioreshnikov/wells
|
stability_eigenvalue.py
|
Python
|
mit
| 2,880
|
"""
All webhook types
:see https://developers.facebook.com/docs/messenger-platform/webhook-reference
"""
from __future__ import unicode_literals
MESSAGE_RECEIVED = 'message_received'
POSTBACK_RECEIVED = 'postback_received'
AUTHENTICATION = 'authentication'
ACCOUNT_LINKING = 'account_linking'
MESSAGE_DELIVERED = 'message_delivered'
MESSAGE_READ = 'message_read'
MESSAGE_ECHO = 'message_echo'
ALL_WEBHOOKS = (
MESSAGE_RECEIVED,
POSTBACK_RECEIVED,
AUTHENTICATION,
ACCOUNT_LINKING,
MESSAGE_DELIVERED,
MESSAGE_READ,
MESSAGE_ECHO,
)
|
shananin/fb_messenger
|
fb_messenger/types/webhook_types.py
|
Python
|
mit
| 559
|
"""
Django settings for ssbwp2 project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7wn87l+sc*py@nodx5wt)gzitgddv*znu9@h^bv_xei5pzalgb'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'service',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ssbwp2.urls'
WSGI_APPLICATION = 'ssbwp2.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'es-es'
TIME_ZONE = 'Europe/Madrid'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
antorof/django-simple
|
ssbwp2/settings.py
|
Python
|
mit
| 2,067
|
from django.contrib.auth.models import AbstractUser
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from findyour3d.utils.views import add_months, check_full_discount_for_premium
@python_2_unicode_compatible
class User(AbstractUser):
TYPE_CHOICES = (
(1, 'Customer'),
(2, 'Company')
)
PLAN_CHOICES = (
(1, 'Starter'),
(2, 'Premium - 3 month'),
(3, 'Premium - 12 month')
)
# First Name and Last Name do not cover name patterns
# around the globe.
name = models.CharField(_('Name of User'), blank=True, max_length=255)
user_type = models.IntegerField(choices=TYPE_CHOICES, default=1)
default_card = models.CharField(max_length=255, blank=True, null=True,
verbose_name='Default Card ID')
card_last = models.CharField(max_length=255, blank=True, null=True)
card_type = models.CharField(max_length=255, blank=True, null=True)
stripe_id = models.CharField(max_length=255, blank=True, null=True)
card_expiry = models.CharField(blank=True, null=True, max_length=255)
payment_active = models.BooleanField(default=False)
payment_issue = models.CharField(max_length=255, blank=True, null=True)
plan = models.IntegerField(choices=PLAN_CHOICES, blank=True, null=True)
is_cancelled = models.BooleanField(default=False)
paid_at = models.DateTimeField(blank=True, null=True)
def __str__(self):
return self.username
def get_absolute_url(self):
return reverse('users:detail', kwargs={'username': self.username})
def next_pay_day_on_three_month_plan(self):
if self.paid_at:
next_pay_day = add_months(self.paid_at, 3)
return next_pay_day
def next_pay_day_on_one_year_plan(self):
if self.paid_at:
next_pay_day = add_months(self.paid_at, 12)
return next_pay_day
def is_payment_active_or_free_coupon(self):
if self.payment_active:
return True
else:
return check_full_discount_for_premium(self)
|
hqpr/findyour3d
|
findyour3d/users/models.py
|
Python
|
mit
| 2,216
|
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.created_date_v30_rc1 import CreatedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30_rc1 import LastModifiedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.source_v30_rc1 import SourceV30Rc1 # noqa: F401,E501
class EmailV30Rc1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'created_date': 'CreatedDateV30Rc1',
'last_modified_date': 'LastModifiedDateV30Rc1',
'source': 'SourceV30Rc1',
'email': 'str',
'path': 'str',
'visibility': 'str',
'verified': 'bool',
'primary': 'bool',
'put_code': 'int'
}
attribute_map = {
'created_date': 'created-date',
'last_modified_date': 'last-modified-date',
'source': 'source',
'email': 'email',
'path': 'path',
'visibility': 'visibility',
'verified': 'verified',
'primary': 'primary',
'put_code': 'put-code'
}
def __init__(self, created_date=None, last_modified_date=None, source=None, email=None, path=None, visibility=None, verified=None, primary=None, put_code=None): # noqa: E501
"""EmailV30Rc1 - a model defined in Swagger""" # noqa: E501
self._created_date = None
self._last_modified_date = None
self._source = None
self._email = None
self._path = None
self._visibility = None
self._verified = None
self._primary = None
self._put_code = None
self.discriminator = None
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if source is not None:
self.source = source
if email is not None:
self.email = email
if path is not None:
self.path = path
if visibility is not None:
self.visibility = visibility
if verified is not None:
self.verified = verified
if primary is not None:
self.primary = primary
if put_code is not None:
self.put_code = put_code
@property
def created_date(self):
"""Gets the created_date of this EmailV30Rc1. # noqa: E501
:return: The created_date of this EmailV30Rc1. # noqa: E501
:rtype: CreatedDateV30Rc1
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this EmailV30Rc1.
:param created_date: The created_date of this EmailV30Rc1. # noqa: E501
:type: CreatedDateV30Rc1
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this EmailV30Rc1. # noqa: E501
:return: The last_modified_date of this EmailV30Rc1. # noqa: E501
:rtype: LastModifiedDateV30Rc1
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this EmailV30Rc1.
:param last_modified_date: The last_modified_date of this EmailV30Rc1. # noqa: E501
:type: LastModifiedDateV30Rc1
"""
self._last_modified_date = last_modified_date
@property
def source(self):
"""Gets the source of this EmailV30Rc1. # noqa: E501
:return: The source of this EmailV30Rc1. # noqa: E501
:rtype: SourceV30Rc1
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this EmailV30Rc1.
:param source: The source of this EmailV30Rc1. # noqa: E501
:type: SourceV30Rc1
"""
self._source = source
@property
def email(self):
"""Gets the email of this EmailV30Rc1. # noqa: E501
:return: The email of this EmailV30Rc1. # noqa: E501
:rtype: str
"""
return self._email
@email.setter
def email(self, email):
"""Sets the email of this EmailV30Rc1.
:param email: The email of this EmailV30Rc1. # noqa: E501
:type: str
"""
self._email = email
@property
def path(self):
"""Gets the path of this EmailV30Rc1. # noqa: E501
:return: The path of this EmailV30Rc1. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this EmailV30Rc1.
:param path: The path of this EmailV30Rc1. # noqa: E501
:type: str
"""
self._path = path
@property
def visibility(self):
"""Gets the visibility of this EmailV30Rc1. # noqa: E501
:return: The visibility of this EmailV30Rc1. # noqa: E501
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this EmailV30Rc1.
:param visibility: The visibility of this EmailV30Rc1. # noqa: E501
:type: str
"""
allowed_values = ["LIMITED", "REGISTERED_ONLY", "PUBLIC", "PRIVATE"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}" # noqa: E501
.format(visibility, allowed_values)
)
self._visibility = visibility
@property
def verified(self):
"""Gets the verified of this EmailV30Rc1. # noqa: E501
:return: The verified of this EmailV30Rc1. # noqa: E501
:rtype: bool
"""
return self._verified
@verified.setter
def verified(self, verified):
"""Sets the verified of this EmailV30Rc1.
:param verified: The verified of this EmailV30Rc1. # noqa: E501
:type: bool
"""
self._verified = verified
@property
def primary(self):
"""Gets the primary of this EmailV30Rc1. # noqa: E501
:return: The primary of this EmailV30Rc1. # noqa: E501
:rtype: bool
"""
return self._primary
@primary.setter
def primary(self, primary):
"""Sets the primary of this EmailV30Rc1.
:param primary: The primary of this EmailV30Rc1. # noqa: E501
:type: bool
"""
self._primary = primary
@property
def put_code(self):
"""Gets the put_code of this EmailV30Rc1. # noqa: E501
:return: The put_code of this EmailV30Rc1. # noqa: E501
:rtype: int
"""
return self._put_code
@put_code.setter
def put_code(self, put_code):
"""Sets the put_code of this EmailV30Rc1.
:param put_code: The put_code of this EmailV30Rc1. # noqa: E501
:type: int
"""
self._put_code = put_code
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(EmailV30Rc1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EmailV30Rc1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
Royal-Society-of-New-Zealand/NZ-ORCID-Hub
|
orcid_api_v3/models/email_v30_rc1.py
|
Python
|
mit
| 9,158
|
from compass.compass import get_bearing
|
piecakes/compass
|
compass/__init__.py
|
Python
|
mit
| 40
|
# -*- coding: utf-8 -*-
import os, sys
from django.conf import settings
from django.core.management import call_command
DIRNAME = os.path.dirname(__file__)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(DIRNAME, 'database.db'),
}
}
settings.configure(DEBUG = True,
DATABASES=DATABASES,
USE_TZ=True,
SNOWSHOESTAMP_KEY='12345678910111213141516171819201234567891011121314151617181920',
SNOWSHOESTAMP_SECRET='bananamonkeygorillachimp',
ROOT_URLCONF='dj_sendgrid.urls',
PASSWORD_HASHERS=('django.contrib.auth.hashers.MD5PasswordHasher',), # simple fast hasher but not secure
INSTALLED_APPS = ('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'dj_sendgrid',))
from django.test.simple import DjangoTestSuiteRunner
call_command('syncdb', interactive=False)
failures = DjangoTestSuiteRunner().run_tests(['dj_sendgrid',], verbosity=1)
if failures:
sys.exit(failures)
|
rosscdh/django-sendgrid
|
runtests.py
|
Python
|
mit
| 1,270
|
#!/usr/bin/env python
from mock import MagicMock
from importlib import import_module
class FixtureClient(object):
def __init__(self):
# Keep track of Service instances in order to do future assertions
self.loaded_services = {}
def __call__(self, *args, **kwargs):
return self
def __getitem__(self, service_name):
if service_name in self.loaded_services:
return self.loaded_services[service_name]
service = FixtureService(service_name)
self.loaded_services[service_name] = service
return service
def reset_mock(self):
self.loaded_services = {}
Client = FixtureClient()
class FixtureService(object):
def __init__(self, service_name):
self.service_name = service_name
try:
self.module = import_module('slos.test.fixtures.%s'
% service_name)
except ImportError:
raise NotImplementedError('%s fixture is not implemented'
% service_name)
# Keep track of MagicMock instances in order to do future assertions
self.loaded_methods = {}
def __getattr__(self, name):
if name in self.loaded_methods:
return self.loaded_methods[name]
call_handler = MagicMock()
fixture = getattr(self.module, name, None)
if fixture is not None:
call_handler.return_value = fixture
else:
raise NotImplementedError('%s::%s fixture is not implemented'
% (self.service_name, name))
self.loaded_methods[name] = call_handler
return call_handler
|
softlayer/softlayer-cinder-driver
|
slos/test/mocks/SoftLayer/__init__.py
|
Python
|
mit
| 1,697
|
#coding:utf-8
from scipy import stats
import numpy as np
from pandas import Series,DataFrame
from openpyxl import load_workbook
import math
import uuid
import os
def chart(data_ws,result_ws):
pass
def _produc_random_value(mean,stdrange):
b = np.random.uniform(*stdrange)
a = b/math.sqrt(2)
x1,x2 = mean-a, mean+a
return x1,x2,b
def _set_od_value(ws,row,x1,x2):
if row % 2 == 1:
ws['F'+str(row)]=x1
ws['F'+str(row+1)]=x2
def _get_mean_value(ws,row):
if row % 2 == 1:
return ws['G'+str(row)].value
else:
return ws['G'+str(row-1)].value
def _get_stdev_value(ws,row):
if row % 2 == 1:
return ws['H'+str(row)].value
else:
return ws['H'+str(row-1)].value
def _set_stdev_value(ws,row,stdev):
if row % 2 == 1:
ws['H'+str(row)] = stdev
def _get_one_row(ws,row):
time = ws['A'+str(row)].value
organ = ws['B'+str(row)].value
sp = ws['C'+str(row)].value
c = ws['D'+str(row)].value
rep = ws['E'+str(row)].value
od = ws['F'+str(row)].value
mean = _get_mean_value(ws,row)
stdev = _get_stdev_value(ws,row)
return Series([time,organ,sp,c,rep,float(od),float(mean),stdev],\
index=['time','organ','sp','c','rep','od','mean','stdev'])
def get_whole_dataframe(ws):
data={}
for i in range(3,ws.max_row+1):
data[i]=_get_one_row(ws,i)
return DataFrame(data).T
def _fill_data_ws(ws,stdrange):
for i in range(3,ws.max_row+1,2):
mean = _get_mean_value(ws,i)
x1,x2,b=_produc_random_value(mean,stdrange)
_set_od_value(ws,i,x1,x2)
_set_stdev_value(ws,i,b)
def _set_p_talbe_header(ws,result_ws):
for i in range(3,ws.max_row+1,10):
group = []
for j in range(i,i+10,2):
gname=ws['A'+str(j)].value+'_'+\
ws['B'+str(j)].value+'_'+\
ws['C'+str(j)].value+'_'+\
str(ws['D'+str(j)].value)
group.append(gname)
for k in range(5):
result_ws['B'+str(i+k+1)]=group[k]
result_ws[chr(ord('C')+k)+str(i)]=group[k]
# for i in range(3,ws.max_row+1,20):
# group = []
# for j in range(i,i+10,2):
# gname=ws['A'+str(j)].value+'_'+\
# ws['B'+str(j)].value+'_'+\
# ws['C'+str(j)].value+'_'+\
# ws['C'+str(j+10)].value+'_'+\
# str(ws['D'+str(j)].value)
# group.append(gname)
# for k in range(5):
# result_ws['J'+str(i+2*k+6)] = group[k]
def produce_p_table(ws,result_ws):
df = get_whole_dataframe(ws)
_set_p_talbe_header(ws,result_ws)
for (time,organ,sp),group_l1 in df.groupby(['time','organ','sp']):
group_l2 = [g for c,g in group_l1.groupby(['c'])]
i = group_l2[0].index[0]
for m in range(5):
for n in range(m+1,5):
g1 = group_l2[m]
g2 = group_l2[n]
f,p = stats.f_oneway(g1['od'],g2['od'])
result_ws[chr(ord('C')+m)+str(i+1+n)]=p
# for (time,organ,c),group_l1 in df.groupby(['time','organ','c']):
# group_l2 = [g for c,g in group_l1.groupby(['sp'])]
# i = group_l2[0].index[0]
# g1 = group_l2[0]
# g2 = group_l2[1]
# f,p = stats.f_oneway(g1['od'],g2['od'])
# result_ws['K'+str(i+6)]=p
def calc(data_ws,result_ws):
_fill_data_ws(data_ws,(0.1,0.6))
for i in range(3,data_ws.max_row+1,10):
group=[]
for j in range(i,i+10,2):
gname=data_ws['A'+str(j)].value+'_'+\
data_ws['B'+str(j)].value+'_'+\
data_ws['C'+str(j)].value+'_'+\
str(data_ws['D'+str(j)].value)
group.append([gname,Series([data_ws['F'+str(j)].value,\
data_ws['F'+str(j+1)].value])])
for k in range(5):
result_ws['B'+str(i+k+1)]=group[k][0]
result_ws[chr(ord('C')+k)+str(i)]=group[k][0]
for m in range(5):
for n in range(m,5):
args = [group[m][1],group[n][1]]
f,p = stats.f_oneway(*args)
result_ws[chr(ord('C')+m)+str(i+1+n)]=p
def main():
wb = load_workbook(filename = 'data/PODz.xlsx')
salt = wb.get_sheet_by_name('salt')
alkali = wb.get_sheet_by_name('alkali')
salt_result = wb.create_sheet(title="salt_result")
alkali_result = wb.create_sheet(title="alkali_result")
calc(salt,salt_result)
calc(alkali,alkali_result)
wb.save(filename = 'data/PODz_result.xlsx')
print('ๅค็ๅฎๆ๏ผ')
def test(data_file,result_file):
wb = load_workbook(data_file)
sheetnames = wb.get_sheet_names()
for name in sheetnames:
sheet = wb.get_sheet_by_name(name)
result_sheet = wb.create_sheet(title='result_'+name)
r = input(name+'->่ฏท่พๅ
ฅๆ ๅๅทฎ่ๅด๏ผไปฅ่ฑๆ้ๅท้ๅผ๏ผ๏ผ')
x,y = r.split(',')
x,y = float(x),float(y)
_fill_data_ws(sheet, (x,y))
print(name+"->ๅกซๅ
้ๆบๅผๅฎๆ๏ผ")
produce_p_table(sheet, result_sheet)
print(name+"->่ฎก็ฎPๅผๅฎๆ๏ผ")
# salt = wb.get_sheet_by_name('salt')
# alkali = wb.get_sheet_by_name('alkali')
# salt_result = wb.create_sheet(title='salt_result')
# alkali_result = wb.create_sheet(title="alkali_result")
# _fill_data_ws(salt, stdrange)
# _fill_data_ws(alkali, stdrange)
# produce_p_table(salt, salt_result)
# produce_p_table(alkali, alkali_result)
wb.save(result_file)
def add_tags(result_file):
wb = load_workbook(result_file)
if __name__ == "__main__":
# main()
data_file = 'data2/ggb (copy).xlsx'
result_file = data_file.split('.')[0]+'_result('\
+str(uuid.uuid1())[:8]+').xlsx'
test(data_file,result_file)
print(data_file+':ๅค็ๅฎๆ๏ผ')
|
ppmm/python-bits
|
yj_anova_test.py
|
Python
|
mit
| 6,003
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-11 03:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0003_post'),
]
operations = [
migrations.DeleteModel(
name='Post',
),
migrations.AddField(
model_name='user',
name='bio',
field=models.CharField(default='', max_length=255),
),
]
|
mijkal/DjStarter
|
apps/user/migrations/0004_auto_20161211_0309.py
|
Python
|
mit
| 508
|
import sys
verbose = sys.argv[1] == "true"
for line in sys.stdin:
if line.startswith('c("'):
features = line.rstrip("\n").replace('c("', '').replace(")", "").split(", ")
features = [x.replace('"', '') for x in features]
print(",".join(features).rstrip(","))
# if verbose:
# sys.stderr.write(line)
|
srp33/ShinyLearner
|
AlgorithmScripts/Helper/ReformatMlrFeatureSelectionOutput.py
|
Python
|
mit
| 346
|
from nextgen4b.analyze.to_csv import get_pos_stats, write_all_pos_stats, \
write_all_simple_misinc, get_stats
from nextgen4b.analyze.analyze import analyze_all_experiments
import nextgen4b.analyze.likelihood
import nextgen4b.analyze.words
__all__ = ['get_pos_stats', 'write_all_pos_stats', 'write_all_simple_misinc',
'analyze_all_experiments', 'likelihood', 'words']
|
tcyb/nextgen4b
|
nextgen4b/analyze/__init__.py
|
Python
|
mit
| 397
|
#
# This file is part of PySkiplist. PySkiplist is Copyright (c) 2012-2015 by
# the PySkiplist authors.
#
# PySkiplist is free software available under the MIT license. See the file
# named LICENSE distributed with this file for the exact licensing terms.
from __future__ import absolute_import, print_function
import unittest
from pyskiplist import dllist, Node
from pyskiplist.dllist import getsize
from support import MemoryTest
class TestDllist(MemoryTest):
def mem_node(self):
self.add_result(getsize(Node()))
def mem_dllist(self):
self.add_result(getsize(dllist()))
if __name__ == '__main__':
TestDllist.setup_loader()
unittest.main()
|
geertj/pyskiplist
|
tests/mem_dllist.py
|
Python
|
mit
| 682
|
""" smashlib.plugins.prompt
"""
#from IPython.utils.traitlets import Bool, Unicode
from smashlib import get_smash
from smashlib.plugins import Plugin
from smashlib.config import SmashConfig
from smashlib.prompt.component import PromptComponent
from smashlib._logging import smash_log
DEFAULT_IN_TEMPLATE = u'In [\\#]: '
DEFAULT_PROMPT = [
PromptComponent(
type='env',
value='$USER'),
PromptComponent(
type='literal', value=':'),
PromptComponent(
type='python',
value="smashlib.prompt.working_dir"),
PromptComponent(
type='literal', value=' ',),
PromptComponent(
type='python',
value='smashlib.prompt.git_branch',
color='blue',),
PromptComponent(
type='python',
value='smashlib.prompt.user_symbol',
color='red',),
PromptComponent(
type='python',
value='smashlib.prompt.venv',
color='yellow',),
]
class SmashPrompt(Plugin):
""" this extension requires ipy_cd_hook """
def uninstall(self):
super(SmashPrompt, self).uninstall()
self.shell.prompt_manager.in_template = DEFAULT_IN_TEMPLATE
def _load_prompt_config(self):
c = SmashConfig()
components = c.load_from_etc('prompt.json')
if not components:
components = DEFAULT_PROMPT
out = []
for component in components:
out.append(PromptComponent(**component))
components = out
self.prompt_components = components
def init(self):
smash_log.info("initializing")
self._load_prompt_config()
self.update_prompt()
self.contribute_hook('pre_prompt_hook', self.update_prompt)
def update_prompt(self, himself=None):
tmp = self.get_prompt()
self.shell.prompt_manager.update_prompt('in', tmp)
def get_prompt(self):
prompt = '\n'
for component in self.prompt_components:
smash_log.debug("calling prompt component: " + str(component))
assert callable(component), str(
"bad prompt component: " + str(component))
prompt += component()
prompt = prompt.replace(' ', ' ')
return prompt
def load_ipython_extension(ip):
""" called by %load_ext magic"""
ip = get_ipython()
smash = get_smash()
sp = SmashPrompt(ip)
smash.prompt_manager = sp
return sp
|
mattvonrocketstein/smash
|
smashlib/plugins/prompt.py
|
Python
|
mit
| 2,411
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class Application(Resource):
"""Application entity for Intune MAM.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param tags: Resource Tags
:type tags: dict
:param location: Resource Location
:type location: str
:param friendly_name:
:type friendly_name: str
:param platform: Possible values include: 'ios', 'android', 'windows'.
Default value: "ios" .
:type platform: str
:param app_id:
:type app_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'friendly_name': {'required': True},
'platform': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
'platform': {'key': 'properties.platform', 'type': 'str'},
'app_id': {'key': 'properties.appId', 'type': 'str'},
}
def __init__(self, friendly_name, tags=None, location=None, platform="ios", app_id=None):
super(Application, self).__init__(tags=tags, location=location)
self.friendly_name = friendly_name
self.platform = platform
self.app_id = app_id
|
rjschwei/azure-sdk-for-python
|
unreleased/azure-mgmt-intune/azure/mgmt/intune/models/application.py
|
Python
|
mit
| 2,147
|
# Weka Normalization
# Normalizes all numeric values in the given dataset
# (apart from the class attribute, if set).
# The resulting values are by default in [0,1] for the data used to compute
# the normalization intervals. But with the scale and translation parameters one can change that,
# e.g., with scale = 2.0 and translation = -1.0 you get values in the range [-1,+1].
# http://weka.sourceforge.net/doc.dev/weka/filters/unsupervised/attribute/Normalize.html
#------------------------------------------------
# Cristian R Munteanu
# University of A Coruna
# muntisa@gmail.com
#------------------------------------------------
import os, sys
#--------------------------------------
def WekaNormalize(sInput,sOutput):
# we consider that input is ARFF
# sOutput=sInput[:-5]+"_Norm.arff"
print "-> Weka Normalization for "+sOutput+" ..."
cmd = 'java weka.filters.unsupervised.attribute.Normalize -i %s -o %s' % (sInput, sOutput)
print cmd
os.system(cmd)
return
#--------------------------------------
#############################
## MAIN
sOrig=sys.argv[1] # ARFF file with original data
sNormal=sys.argv[2] # ARFF file with normalized data
WekaNormalize(sOrig,sNormal)
|
muntisa/pyWeka
|
PyScripts/wNormaliz.py
|
Python
|
mit
| 1,219
|
from django.core.management import call_command
from freezegun import freeze_time
import pytest
@pytest.mark.django_db
def test_postlinks_command_friday(share, mocker):
mocked = mocker.patch('amweekly.slack.jobs.process_incoming_webhook.delay')
with freeze_time("2017-06-23"):
call_command('postlinks')
assert mocked.called
@pytest.mark.django_db
def test_postlinks_command_not_friday(share, mocker):
mocked = mocker.patch('amweekly.slack.jobs.process_incoming_webhook.delay')
with freeze_time("2017-06-22"):
call_command('postlinks')
assert not mocked.called
|
akrawchyk/amweekly
|
amweekly/tests/test_management.py
|
Python
|
mit
| 605
|
import numpy as np
from scipy.special import *
import scipy.linalg as lin
import FE
from Tkinter import *
import triangle
import matplotlib as mpl
mpl.use('TkAgg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import sys
# GUI for a triangle explorer
class TriGUI:
def __init__(self,root,canvas_width,canvas_height):
self.root=root
self.canvas_width = canvas_width
self.canvas_height = canvas_height
self.moduli_space = {'vertices':np.array([[0.,0.],[1.,0.],[0.5,np.sqrt(3.)/2.]]),'triangles':[0,1,2]}
self.fig, self.ax = mpl.pyplot.subplots()
self.ax.clear()
self.ax.autoscale(enable=False)
self.ax.axis('off')
self.canvas = FigureCanvasTkAgg(self.fig,master=root)
self.canvas.show()
self.canvas.get_tk_widget().pack()
X = self.moduli_space['vertices'][:,0]
Y = self.moduli_space['vertices'][:,1]
outline = tri.Triangulation(X,Y)
self.ax.triplot(outline)
#self.canvas.mpl_connect("button_press_event", self.setVertex)
def quite(self):
self.root.destroy()
class EigenfunctionGUI:
def __init__(self,root,canvas_width,canvas_height,a,b,c):
self.root=root
self.canvas_width = canvas_width
if __name__ == '__main__':
root = Tk()
T = TriGUI(root,600,600)
root.mainloop()
T.quit()
|
necoleman/fepy
|
Applications/triangle_survey_gui.py
|
Python
|
mit
| 1,353
|
# Generated by Django 2.2.10 on 2020-03-27 00:40
import autoslug.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("monitorings", "0014_auto_20180917_0846"),
]
operations = [
migrations.AlterField(
model_name="monitoring",
name="name",
field=models.CharField(max_length=100, verbose_name="Name"),
),
migrations.AlterField(
model_name="monitoring",
name="slug",
field=autoslug.fields.AutoSlugField(
editable=False,
max_length=110,
populate_from="name",
unique=True,
verbose_name="Slug",
),
),
]
|
watchdogpolska/feder
|
feder/monitorings/migrations/0015_auto_20200327_0040.py
|
Python
|
mit
| 774
|
#!/usr/bin/env python
import sys
sys.path.append('/usr/share/inkscape/extensions') # or another path, as necessary
sys.path.append('/Applications/Inkscape.app/Contents/Resources/extensions')
sys.path.append('C:\Program Files\Inkscape\share\extensions')
#import xml.etree.ElementTree as ET
#ET.register_namespace('figurefirst', 'http://www.figurefirst.com')
# We will use the inkex module with the predefined Effect base class.
import inkex
# The simplestyle module provides functions for style parsing.
from simplestyle import *
class FigureFirstFigureTagEffect(inkex.Effect):
"""
Modified from example Inkscape effect extension. Tags object with axis tag.
"""
def __init__(self):
"""
Constructor.
Defines the "--name" option of a script.
"""
# Call the base class constructor.
inkex.Effect.__init__(self)
#import matplotlib
#Define string option "--name" with "-n" shortcut and default value "World".
self.OptionParser.add_option('-n', '--name', action = 'store',
type = 'string', dest = 'name', default = 'none',
help = 'Name figure')
self.OptionParser.add_option('-t', '--template', action = 'store',
type = 'string', dest = 'template', default = 'none',
help = 'Name template (optional)')
inkex.NSS[u"figurefirst"] = u"http://flyranch.github.io/figurefirst/"
try:
inkex.etree.register_namespace("figurefirst","http://flyranch.github.io/figurefirst/")
except AttributeError:
#inkex.etree._NamespaceRegistry.update(inkex.addNS("name", "figurefirst"))
#This happens on windows version of inkscape - it might be good to check
#and see if the namespace has been correctly added to the document
pass
def effect(self):
"""
Effect behaviour.
Overrides base class' method and inserts "Hello World" text into SVG document.
"""
# Get script's "--what" option value.
name = self.options.name
template = self.options.template
# Get access to main SVG document element and get its dimensions.
svg = self.document.getroot()
# or alternatively
# Create text element
if len(self.selected.values())>1:
raise Exception('too many items')
else:
el = self.selected.values()[0]
newElm = inkex.etree.Element(inkex.addNS("figure", "figurefirst"))
newElm.attrib[inkex.addNS("name", "figurefirst")] = name
if template != 'none':
newElm.attrib[inkex.addNS("template", "figurefirst")] = template
#print inkex.NSS
el.append(newElm)
# Create effect instance and apply it.
effect = FigureFirstFigureTagEffect()
effect.affect()
|
FlyRanch/figurefirst
|
inkscape_extensions/0.x/tag_figure.py
|
Python
|
mit
| 2,810
|
# -*- encoding: utf-8 -*-
import logging
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
import re
import time
from datetime import datetime
import requests
log = logging.getLogger('facebook')
log.setLevel(logging.WARN)
#MESSAGE_URL = 'https://m.facebook.com/messages/send/?icm=1&refid=12'
MESSAGE_URL = 'https://www.facebook.com/ajax/mercury/send_messages.php?dpr=1'
UPLOAD_URL = 'https://upload.facebook.com/ajax/mercury/upload.php?'
TYP_URL = 'https://www.facebook.com/ajax/messaging/typ.php'
READ_URL = 'https://www.facebook.com/ajax/mercury/change_read_status.php'
# define like sticker id
LIKE_STICKER = {
'l': '369239383222810',
'm': '369239343222814',
's': '369239263222822'
}
def send_group(fb, thread, body, pic=None, sticker=None, like=None):
data = {
"message_batch[0][action_type]": "ma-type:user-generated-message",
"message_batch[0][author]": "fbid:{}".format(fb.user_id),
"message_batch[0][source]": "source:chat:web",
"message_batch[0][body]": body,
"message_batch[0][signatureID]": "3c132b09",
"message_batch[0][ui_push_phase]": "V3",
"message_batch[0][status]": "0",
"message_batch[0][thread_fbid]": thread,
"client": "mercury",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
if pic:
# upload the picture and get picture form data
pic_data = upload_picture(fb, pic)
if pic_data:
# merge together to send message with picture
data.update(pic_data)
# add sticker if sticker is available
if any([sticker, like]):
# if like is not None, find the corresponding sticker id
if like:
try:
sticker = LIKE_STICKER[like.lower()]
except KeyError:
# if user doesn't enter l or m or s, then use the large one
sticker = LIKE_STICKER['l']
data["message_batch[0][sticker_id]"] = sticker
fb.session.post(MESSAGE_URL, data)
def send_person(fb, person, body, pic=None, sticker=None, like=None):
'''data = {
"message_batch[0][action_type]": "ma-type:user-generated-message",
"message_batch[0][author]": "fbid:{}".format(fb.user_id),
"message_batch[0][source]": "source:chat:web",
"message_batch[0][body]": body,
"message_batch[0][signatureID]": "3c132b09",
"message_batch[0][ui_push_phase]": "V3",
"message_batch[0][status]": "0",
"message_batch[0][specific_to_list][0]": "fbid:{}".format(person),
"message_batch[0][specific_to_list][1]": "fbid:{}".format(fb.user_id),
"client": "mercury",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}'''
data = {
"__a": "1",
"__dyn": "",
"__pc": "EXP1:DEFAULT",
"__req": "1d",
"__rev": "2274481",
"__user": fb.user_id,
"client": "mercury",
"fb_dtsg": fb.dtsg,
"message_batch[0][action_type]": "ma - type:user - generated - message",
"message_batch[0][author]": "fbid:{}".format(fb.user_id),
"message_batch[0][author_email]": "",
"message_batch[0][body]": body,
"message_batch[0][ephemeral_ttl_mode]": "0",
"message_batch[0][has_attachment]": "false",
"message_batch[0][html_body]": "false",
"message_batch[0][is_filtered_content]": "false",
"message_batch[0][is_filtered_content_account]": "false",
"message_batch[0][is_filtered_content_bh]": "false",
"message_batch[0][is_filtered_content_invalid_app]": "false",
"message_batch[0][is_filtered_content_quasar]": "false",
"message_batch[0][is_forward]": "false",
"message_batch[0][is_spoof_warning]": "false",
"message_batch[0][is_unread]": "false",
"message_batch[0][manual_retry_cnt]": "0",
"message_batch[0][other_user_fbid]": person,
"message_batch[0][source]": "source:chat:web",
"message_batch[0][source_tags][0]": "source:chat",
"message_batch[0][specific_to_list][0]": "fbid:{}".format(person),
"message_batch[0][specific_to_list][1]": "fbid:{}".format(fb.user_id),
"message_batch[0][status]": "0",
"message_batch[0][thread_id]": "",
"message_batch[0][timestamp]": str(round(time.mktime(datetime.now().timetuple()) * 1000)),
"message_batch[0][timestamp_absolute]": "Today",
"message_batch[0][timestamp_relative]": datetime.now().strftime("%I:%M%P"),
"message_batch[0][timestamp_time_passed]": "0",
"message_batch[0][ui_push_phase]": "V3",
"ttstamp": generate_ttstamp(fb.dtsg)
}
if pic:
# upload the picture and get picture form data
pic_data = upload_picture(fb, pic)
if pic_data:
# merge together to send message with picture
data.update(pic_data)
# add sticker if sticker is available
if any([sticker, like]):
# if like is not None, find the corresponding sticker id
if like:
try:
sticker = LIKE_STICKER[like.lower()]
except KeyError:
# if user doesn't enter l or m or s, then use the large one
sticker = LIKE_STICKER['l']
data["message_batch[0][sticker_id]"] = sticker
fb.session.post(MESSAGE_URL, data)
def upload_picture(fb, pic):
params = {
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
'ft[tn]': '+M',
}
# upload the image to facebook server, filename should be unique
res = fb.session.post(UPLOAD_URL + urlencode(params), files={
'images_only': 'true',
'upload_1024': (str(time.time()), requests.get(pic).content, 'image/jpeg')
})
# check status code
if res.status_code != 200:
return
# check image_id is valid
m = re.search(r'"image_id":(\d+),', res.text)
if not m:
return
image_id = m.group(1)
return {
"message_batch[0][has_attachment]": "true",
"message_batch[0][preview_attachments][0][upload_id]": "upload_1024",
"message_batch[0][preview_attachments][0][attach_type]": "photo",
"message_batch[0][preview_attachments][0][preview_uploading]": "true",
"message_batch[0][preview_attachments][0][preview_width]": "540",
"message_batch[0][preview_attachments][0][preview_height]": "720",
"message_batch[0][image_ids][0]": image_id,
}
def group_typing(fb, thread):
data = {
"typ": "1",
"to": "",
"source": "web-messenger",
"thread": thread,
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(TYP_URL, data)
def person_typing(fb, thread):
data = {
"typ": "1",
"to": thread,
"source": "web-messenger",
"thread": thread,
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(TYP_URL, data)
def read(fb, thread):
data = {
"ids[{}]".format(thread): "true",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(READ_URL, data)
def generate_ttstamp(dtsg):
u = ''
v = 0
while v < len(dtsg):
u += str(ord(dtsg[v]))
v += 1
ttstamp = '2%s' % u
return ttstamp
|
spartak1547/Facebot
|
facebot/message.py
|
Python
|
mit
| 8,619
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class PublicIPAddress(Resource):
"""Public IP address resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param public_ip_allocation_method: The public IP allocation method.
Possible values are: 'Static' and 'Dynamic'. Possible values include:
'Static', 'Dynamic'
:type public_ip_allocation_method: str or
~azure.mgmt.network.v2017_03_01.models.IPAllocationMethod
:param public_ip_address_version: The public IP address version. Possible
values are: 'IPv4' and 'IPv6'. Possible values include: 'IPv4', 'IPv6'
:type public_ip_address_version: str or
~azure.mgmt.network.v2017_03_01.models.IPVersion
:ivar ip_configuration:
:vartype ip_configuration:
~azure.mgmt.network.v2017_03_01.models.IPConfiguration
:param dns_settings: The FQDN of the DNS record associated with the public
IP address.
:type dns_settings:
~azure.mgmt.network.v2017_03_01.models.PublicIPAddressDnsSettings
:param ip_address:
:type ip_address: str
:param idle_timeout_in_minutes: The idle timeout of the public IP address.
:type idle_timeout_in_minutes: int
:param resource_guid: The resource GUID property of the public IP
resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the PublicIP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'ip_configuration': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'public_ip_allocation_method': {'key': 'properties.publicIPAllocationMethod', 'type': 'str'},
'public_ip_address_version': {'key': 'properties.publicIPAddressVersion', 'type': 'str'},
'ip_configuration': {'key': 'properties.ipConfiguration', 'type': 'IPConfiguration'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'PublicIPAddressDnsSettings'},
'ip_address': {'key': 'properties.ipAddress', 'type': 'str'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(PublicIPAddress, self).__init__(**kwargs)
self.public_ip_allocation_method = kwargs.get('public_ip_allocation_method', None)
self.public_ip_address_version = kwargs.get('public_ip_address_version', None)
self.ip_configuration = None
self.dns_settings = kwargs.get('dns_settings', None)
self.ip_address = kwargs.get('ip_address', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.etag = kwargs.get('etag', None)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_03_01/models/public_ip_address.py
|
Python
|
mit
| 4,256
|
def climbing_stairs(n):
"""
Returns a list of each way to climb n steps, where we can take either 1 step
or 2 steps at a time.
Intuition:
The last leap can either be 1 step or 2 steps. This yields a recurrence
relation that yields the Fibonacci numbers.
"""
prev, curr = [[]], [[1]]
if n == 0:
return prev
if n == 1:
return curr
for i in range(n - 1):
prev, curr = curr, take_step(prev, 2) + take_step(curr, 1)
return curr
def take_step(solutions, step):
"""
Takes a step for each solution.
"""
return [solution + [step] for solution in solutions]
def main():
test_n = [
0, 1, 2, 3, 4, 5, 6
]
for i, n in enumerate(test_n):
print("Test Case #{}:".format(i + 1))
text = [str(n)]
for solution in climbing_stairs(n):
text.append(" + ".join(map(str, solution)))
print("\n= ".join(text))
if __name__ == "__main__":
main()
|
adriano-arce/Interview-Problems
|
Recursion-Problems/Climbing-Stairs/Climbing-Stairs.py
|
Python
|
mit
| 985
|
####
# Figure S4
# needs:
# - varykernel*.npz produced by runvarykernel.py
# - fattailed*.npz produced by runfattailed.py
# - rdf_*.npz produced by calc-rdf.py
# - sq_*.npz produced by calc-rdf.py
####
import glob
import sys
sys.path.append('..')
from lib.mppaper import *
import lib.mpsetup as mpsetup
import lib.immune as immune
import matplotlib.gridspec as gridspec
fig = plt.figure(figsize=(figsize_double[0]*0.8, figsize_double[1]))
gridargs = dict(left=0.05, right=0.98, top=0.95, bottom=0.08,
wspace=0.4, hspace=0.65)
grid = gridspec.GridSpec(4, 4, **gridargs)
columns = [('varykernel_gammaa1', lambda x: np.exp(-np.abs(x))),
('varykernel_gammaa2', lambda x: np.exp(-np.abs(x)**2)),
('varykernel_gammaa4', lambda x: np.exp(-np.abs(x)**4)),
('fattailed', lambda x: 1.0 / (1.0 + np.abs(x)**2))]
import runvarykernel
eta = runvarykernel.eta
files = glob.glob('data/varykernel*.npz')
files.sort(key=lambda f: immune.parse_value(f, 'gammaa'))
gamma_as = sorted(list(set(immune.parse_value(f, 'gammaa') for f in files)))
## create nested list of axes
axes = []
for i in range(len(columns)):
axes.append([])
for j in range(4):
ax = plt.Subplot(fig, grid[j, i])
axes[i].append(ax)
for i, (name, kernel) in enumerate(columns):
## plot kernel
ax = axes[i][0]
x = np.linspace(-2, 2, 1000)
ax.plot(x, kernel(x))
ax.set_ylim(0.0, 1.05)
ax.set_yticks([0.0, 1.0])
ax.set_xticks([-1.0, 0.0, 1.0])
ax.set_ylabel('f(x)')
ax.set_xlabel('$x \; / \; \eta$')
## plot popt
ax = axes[i][1]
npz = np.load(glob.glob('data/%s*.npz' % name)[0])
popt = npz['P']
N = len(npz['x'])
vmin = 0
vmax = 100
im = ax.imshow(popt.reshape(N, N)*N**2, extent=(0, 1.0/eta, 0, 1.0/eta),
# same as nearest, but better for svg export
interpolation = 'none',
vmin = vmin, vmax = vmax)
im.set_cmap('gray_r')
ax.set_xlim(0.0, 10.0)
ax.set_ylim(0.0, 10.0)
ax.locator_params(axis='x', nbins=5)
ax.locator_params(axis='y', nbins=5)
ax.set_ylabel('$x \; / \; \eta$')
ax.set_xlabel('$y \; / \; \eta$')
## plot radial distribution function
ax = axes[i][2]
npz = np.load('data/rdf_%s.npz' % name)
binmid, rdfsa = npz['binmid'], npz['rdfs']
mean = np.mean(rdfsa, axis=1)
ax.plot(binmid/eta, mean)
ax.axhline(1.0, color=almostblack)
ax.set_xlabel('$R \; / \; \eta$')
ax.set_ylabel('g(R)')
ax.set_xlim(0, 7)
ax.set_ylim(0, 1.75)
ax.set_yticks([0.0, 1.0])
## plot structure factor
ax = axes[i][3]
npz = np.load('data/sq_%s.npz' % name)
bins, avpowers = npz['bins'], npz['avpowers']
ax.plot(2*np.pi*bins[1:]*eta, avpowers[1:], '-')
ax.axhline(1.0, color=almostblack)
ax.set_xlim(0, 30)
ax.set_ylim(0, 4)
ax.set_xlabel("$q \; \eta$")
ax.set_ylabel("$S(q)$")
ax.locator_params(axis='x', nbins=5)
ax.locator_params(axis='y', nbins=5)
#### finalize figure ####
for ax in [item for sublist in axes for item in sublist]:
ax.xaxis.labelpad = axis_labelpad
ax.yaxis.labelpad = axis_labelpad
fig.add_axes(ax)
for i, axl in enumerate(axes):
for j, ax in enumerate(axl):
if not j == 1:
mpsetup.despine(ax)
# label different plot types
mpsetup.label_axes([axes[0][i] for i in range(4)], xy=(0.01, 0.9),
xycoords=('figure fraction', 'axes fraction'),
fontsize = 'medium')
# label different gamma_as
for i, axl in enumerate(axes[:-1]):
axl[0].annotate(r'\textbf{exponential}, $\boldsymbol{\gamma = %g}$' % gamma_as[i],
xy=(0.5, 0.99),
xycoords=('axes fraction', 'figure fraction'),
ha = 'center', va = 'top',
fontsize = 'medium')
axes[-1][0].annotate(r'\textbf{longtailed}', xy=(0.5, 0.99),
xycoords=('axes fraction', 'figure fraction'),
ha='center', va='top',
fontsize='medium')
fig.savefig('figS4.svg')
plt.show()
|
andim/optimmune
|
figS4/figS4.py
|
Python
|
mit
| 4,120
|
import pandas as pd
import pandasql
def min_temperature_on_rainy_days(filename):
'''
This function should run a SQL query on a dataframe of
weather data. More specifically you want to find the average
minimum temperature on rainy days where the minimum temperature
is greater than 55 degrees.
You might also find that interpreting numbers as integers or floats may not
work initially. In order to get around this issue, it may be equal to cast
these numbers as integers. This can be done by writing cast(column as integer).
So for example, if we wanted to cast the maxtempi column as an integer, we would actually
write something like where cast(maxtempi as integer) = 76, as opposed to simply
where maxtempi = 76.
You can see the weather data that we are passing in below:
https://www.dropbox.com/s/7sf0yqc9ykpq3w8/weather_underground.csv
'''
weather_data = pd.read_csv(filename)
q = """
select
avg(cast (mintempi as integer))
from
weather_data
where
rain == 1 and cast (mintempi as integer) > 55;
"""
#Execute your SQL command against the pandas frame
mean_temp_weekends = pandasql.sqldf(q.lower(), locals())
return mean_temp_weekends
if __name__ == "__main__":
input_filename = "weather_underground.csv"
output_filename = "output.csv"
student_df = min_temperature_on_rainy_days(input_filename)
student_df.to_csv(output_filename)
|
davidbroadwater/nyc-subway-datascience-project
|
project_2/mean_temp_on_rainy_days/mean_temp_on_rainy_days.py
|
Python
|
mit
| 1,511
|
import numpy as np
import math as math
import word2vec
from utilities import Sentence, Product, AspectPattern
class AspectPatterns(object):
def __init__(self, pattern_name_list):
#possible pattern_name: adj_nn, nn, adj, adv
self.aspectPatterns_list = []
for pattern_name in pattern_name_list:
if pattern_name == 'adj_nn':
aspectPatterns_1 = []
pattern_structure ="""adj_nn:{<JJ><NN.?>}"""
aspectTagIndices = [0,1]
aspectPattern = AspectPattern(name='adj_nn', structure=pattern_structure, aspectTagIndices=aspectTagIndices)
aspectPatterns_1.append(aspectPattern)
pattern_structure ="""nn_nn:{<NN.?><NN.?>}"""
aspectPattern = AspectPattern(name='nn_nn', structure=pattern_structure, aspectTagIndices=aspectTagIndices)
aspectPatterns_1.append(aspectPattern)
self.aspectPatterns_list.append(aspectPatterns_1)
elif pattern_name == 'nn':
aspectPatterns_2 = []
pattern_structure ="""nn:{<NN.?>}"""
aspectTagIndices = [0]
aspectPattern = AspectPattern(name='nn', structure=pattern_structure, aspectTagIndices=aspectTagIndices)
aspectPatterns_2.append(aspectPattern)
self.aspectPatterns_list.append(aspectPatterns_2)
elif pattern_name == 'adj':
aspectPatterns_3 = []
pattern_structure ="""adj:{<JJ.?>}"""
aspectTagIndices = [0]
aspectPattern = AspectPattern(name='adj', structure=pattern_structure, aspectTagIndices=aspectTagIndices)
aspectPatterns_3.append(aspectPattern)
self.aspectPatterns_list.append(aspectPatterns_3)
elif pattern_name == 'adv':
aspectPatterns_4 = []
pattern_structure ="""adv_compara:{<RBR>}"""
aspectTagIndices = [0]
aspectPattern = AspectPattern(name='adv_compara', structure=pattern_structure, aspectTagIndices=aspectTagIndices)
aspectPatterns_4.append(aspectPattern)
pattern_structure ="""adv_super:{<RBS>}"""
aspectTagIndices = [0]
aspectPattern = AspectPattern(name='adv_super', structure=pattern_structure, aspectTagIndices=aspectTagIndices)
aspectPatterns_4.append(aspectPattern)
self.aspectPatterns_list.append(aspectPatterns_4)
def distill_dynamic(sentence, aspectPatterns):
if not sentence.pos_tagged_tokens:
sentence.pos_tag(stem=False)
sentence.word2vec_matchDaynamicAspectPatterns(aspectPatterns.aspectPatterns_list)
def distill_dynamic_sentencelist(sentence_list, aspectPatterns):
for sentence in sentence_list:
distill_dynamic(sentence, aspectPatterns)
def static_aspect_to_vec(wordlist_dict, model):
static_seedwords_vec={}
for static_aspect, seedwords in wordlist_dict.iteritems():
for word in seedwords:
if word in model:
if static_aspect in static_seedwords_vec:
static_seedwords_vec[static_aspect].append(model[word])
else:
static_seedwords_vec[static_aspect] = [model[word]]
return static_seedwords_vec
def getPhraseWordVectorsList(sentence, aspectPatterns, word2vecModel):
# match aspect patterns
distill_dynamic(sentence, aspectPatterns)
phrases = []
for item in sentence.word2vec_features_list:
phrases = phrases + item
phrasewordVecs_list=[]
for phrase in phrases:
phraseWords = phrase.split(' ')
phraseWordVecs=[]
for phraseWord in phraseWords:
if phraseWord in word2vecModel:
phrasewordVec = word2vecModel[phraseWord]
phraseWordVecs.append(phrasewordVec)
if phraseWordVecs:
phrasewordVecs_list.append(phraseWordVecs)
return phrasewordVecs_list
def getPhraseAspectCosineSimilarityMatrix(phrasewordVecs_list, static_seedwords_vec, similarity_measure='max'):
num_static_aspect = len(static_seedwords_vec)
static_aspects = sorted(static_seedwords_vec.keys())
num_phrase = len(phrasewordVecs_list)
similarity_matrix=np.zeros([num_phrase, num_static_aspect]) if num_phrase>=1 else np.zeros([1, num_static_aspect])
for i, phrasewordVecs in enumerate(phrasewordVecs_list):
for j, static_aspect in enumerate(static_aspects):
seedwordVecs = static_seedwords_vec[static_aspect]
similarity_inner_matrix=np.zeros([len(phrasewordVecs),len(seedwordVecs)])
for ii, phrasewordVec in enumerate(phrasewordVecs):
for jj, seedwordVec in enumerate(seedwordVecs):
similarity_inner_matrix[ii][jj]=np.dot(phrasewordVec,seedwordVec)
if similarity_measure == 'max':
similarity_inner_row=np.max(similarity_inner_matrix, axis=1)
similarity_inner=np.sum(similarity_inner_row)
elif similarity_measure == 'sum_row_max':
similarity_inner_row=np.max(similarity_inner_matrix,axis=1)
similarity_inner=np.sum(similarity_inner_row)
elif similarity_measure == 'max_column_mean':
similarity_inner_column=np.mean(similarity_inner_matrix,axis=0)
similarity_inner=np.max(similarity_inner_column)
elif similarity_measure == 'mean':
similarity_inner=np.mean(similarity_inner_matrix)
similarity_matrix[i][j]=similarity_inner
return similarity_matrix
def getSimilarityVectorFromSimilarityMatrix(similarity_matrix, criteria='max'):
num_static_aspect = similarity_matrix.shape[1]
similarity_vec=[0 for j in range(num_static_aspect)]
if criteria == 'max':
for j in range(num_static_aspect):
similarity_vec[j]=np.max(similarity_matrix[:,j])
elif criteria == 'sum':
for j in range(num_static_aspect):
similarity_vec[j]=np.sum(similarity_matrix[:,j])
elif criteria == 'sum_max':
similarity_maxmatching_matrix=np.zeros([len(phrasewordVecs_list),num_static_aspect])
for i in range(len(phrasewordVecs_list)):
k=0
max_k=0
for j in range(num_static_aspect):
if similarity_matrix[i,j]>max_k:
k=j
max_k=similarity_matrix[i,j]
similarity_maxmatching_matrix[i,k]=max_k
for j in range(num_static_aspect):
similarity_vec[j]=np.sum(similarity_maxmatching_matrix[:,j])
return np.array(similarity_vec)
def getCosineSimilarityVector(sentence, aspectPatterns, word2vecModel, static_seedwords_vec):
phrasewordVecs_list = getPhraseWordVectorsList(sentence, aspectPatterns, word2vecModel)
if phrasewordVecs_list:
similarity_matrix = getPhraseAspectCosineSimilarityMatrix(phrasewordVecs_list, static_seedwords_vec)
similarity_vec = getSimilarityVectorFromSimilarityMatrix(similarity_matrix, criteria='max')
else:
similarity_vec = np.zeros(len(static_seedwords_vec))
return similarity_vec
def predict_aspect_word2vec(sentence, word2vecModel, aspectPatterns, static_seedwords_vec, cp_threshold = 0.85, ratio_threshold = 0):
# aspectPattern_namelist: 'adj_nn': all adj+nn and nn+nn; 'nn': all nn; adj: all adj; adv: all adv
# cp_threshold: the threshold for the top class's similarity, for a sentence to be classified as useful
# ratio_threshold: the threshold for the ratio between top and second class's similarity, for a sentence to be classified as useful
phrasewordVecs_list = getPhraseWordVectorsList(sentence, aspectPatterns, word2vecModel)
if phrasewordVecs_list:
similarity_matrix = getPhraseAspectCosineSimilarityMatrix(phrasewordVecs_list, static_seedwords_vec)
similarity_vec = getSimilarityVectorFromSimilarityMatrix(similarity_matrix)
# Sort the sentence's similarity to each class
static_aspects = sorted(static_seedwords_vec.keys())
aspect_similarity_tups_sorted = sorted(zip(static_aspects, similarity_vec),
key=lambda tup: tup[1], reverse=True)
if aspect_similarity_tups_sorted[0][1]>cp_threshold:
if aspect_similarity_tups_sorted[1][1]!=0 and aspect_similarity_tups_sorted[0][1]/aspect_similarity_tups_sorted[1][1]>=ratio_threshold:
prediction=aspect_similarity_tups_sorted[0][0]
elif aspect_similarity_tups_sorted[1][1]==0 and aspect_similarity_tups_sorted[0][1]!=0:
prediction=aspect_similarity_tups_sorted[0][0]
else: prediction='no feature'
else: prediction='no feature'
return (prediction,aspect_similarity_tups_sorted)
else:
return ('no feature',[])
|
MachineLearningStudyGroup/Smart_Review_Summarization
|
srs/word2VecModel.py
|
Python
|
mit
| 7,782
|
'''
Client.py module. This module defines the Client class, which defines a Client type object.
A client is an object that needs to be managed by the application. Each client has an unique id,
an alphanumeric string for a name, and a 13-digit long integer which is the CNP. (personal numeric code)
It imports the ClientException module to throw any exceptions related to the Client class and its controller
and repository.
'''
from domain.ClientException import ClientException
class Client:
'''
Client class. A client is one of the main objects that a library manager needs to manage. This class has the
private properties ID, Name, CNP.
0 1 2
0 - ID is a positive number unique for each client, used as an identifier.
1 - Name is a string that can only contain alphanumeric characters and spaces.
2 - CNP is a positive 13 digit number that the romanian government uses to identify citizens.
'''
def __init__(self, _id, name, cnp):
'''
Constructor for the Client class. Takes three parameters and initializes each property with their values.
Validation is done using the ClientException class, any wrong constructor parameter type will throw a
ClientException.
Input:
self - object defined by this class
_id - an integer containing an unique value for each client
name - alphanumeric string defining the client's name
cnp - 13 digit long integer
'''
if type(_id) != type(0) or ( type(_id) == type(0) and _id < 0 ):
raise ClientException("Error! Initialized with bad id.")
valid = True
for i in range(len(name)):
if not (name[i] == ' ' or (name[i] >= 'a' and name[i] <= 'z') or (name[i] >= 'A' and name[i] <= 'Z')):
valid = False
break
if not valid:
raise ClientException("Error! Initialized with bad name.")
if len(str(cnp)) != 13:
raise ClientException("Error! Initialized with bad CNP.")
self._id = _id # used _id instead of id because name id is reserved
self._name = name
self._cnp = cnp
# <GETTERS AND SETTERS>
def getId(self):
'''
Getter for the id property of the Client class.
There is no setter for id.
Input:
self - object defined by this class
Output:
_id - integer value
'''
return self._id
def getName(self):
'''
Getter for the name property of the Client class.
Input:
self - object defined by this class
Output:
_name - string containing alphanumeric characters
'''
return self._name
def setName(self, name):
'''
Setter for the name property. Modifies the _name property of this class.
This method validates the name parameter and raises exception if it is not a string of alphanumeric
characters.
Input:
self - object defined by this class
name - string containing the new name
'''
for i in range(len(name)):
if not (name[i] >= 'a' and name[i] <= 'z') and not(name[i] >= 'A' and name[i] <= 'Z') and not name[i] == ' ':
raise ClientException("Error! Please only use letters of the alphabet!")
self._name = name
def getCnp(self):
'''
Getter for the CNP property of the Client class.
Input:
self - object defined by this class
Output:
_cnp - 13 digit long integer value
'''
return self._cnp
def setCnp(self, cnp):
'''
Setter for the cnp property of the Client class.
A CNP - cod numeric personal ( PNC - Personal numeric code ) is a code used by the romainan government
for identification of any citizen. Every romanian citizen has an unique personal numeric code.
For this reason, in case the program will find a user with the same CNP as the one that we're trying to
insert, a warning will be displayed.
Input:
self - object defined by this class
_cnp - integer of 13 digits
'''
if len(str(cnp)) != 13: # check that CNP has 13 digits
raise ClientException("CNP must be 13 digits long!")
self._cnp = cnp
# </GETTERS AND SETTERS>
def __repr__(self): # representation
return str(self._id) + " , name = " + self._name + ", cnp = " + str(self._cnp)
|
p0licat/university
|
FP - Fundamentals Of Programming/text-library/domain/Client.py
|
Python
|
mit
| 4,918
|
''' Image module '''
from karmaserver.data.models import db
class Image(db.Model):
''' Image class '''
_id = db.Column(db.String(64), primary_key=True)
observations = db.relationship('Observation', backref=db.backref('image', lazy='joined'))
x_size = db.Column(db.Integer)
y_size = db.Column(db.Integer)
probability = db.Column(db.Integer)
fwhm = db.Column(db.Integer)
def __init__(self, image_info):
self.observations = []
self._id = image_info['_id']
self.x_size = image_info['x_size']
self.y_size = image_info['y_size']
self.probability = image_info['probability']
self.fwhm = image_info['fwhm']
def __eq__(self, image):
return image == self._id
|
mnunezdm/cazasteroides
|
karmaserver/data/models/image.py
|
Python
|
mit
| 753
|
from .get import GetProductSchema # noqa
from .list import ProductListSchema # noqa
|
fastmonkeys/netvisor.py
|
netvisor/schemas/products/__init__.py
|
Python
|
mit
| 86
|
# coding: utf-8
from flask import Flask, render_template
from flask_googlemaps import GoogleMaps
from flask_googlemaps import Map, icons
app = Flask(__name__, template_folder="templates")
# you can set key as config
app.config['GOOGLEMAPS_KEY'] = "XXXXX"
# you can also pass key here
GoogleMaps(app, key="XXXX")
@app.route("/")
def mapview():
mymap = Map(
identifier="view-side", # for DOM element
varname="mymap", # for JS object name
lat=37.4419,
lng=-122.1419,
markers=[(37.4419, -122.1419)]
)
sndmap = Map(
identifier="sndmap",
varname="sndmap",
lat=37.4419,
lng=-122.1419,
markers={
icons.dots.green: [(37.4419, -122.1419), (37.4500, -122.1350)],
icons.dots.blue: [(37.4300, -122.1400, "Hello World")]
}
)
trdmap = Map(
identifier="trdmap",
varname="trdmap",
lat=37.4419,
lng=-122.1419,
markers=[
{
'icon': icons.alpha.B,
'lat': 37.4419,
'lng': -122.1419,
'infobox': "Hello I am <b style='color:green;'>GREEN</b>!"
},
{
'icon': icons.dots.blue,
'lat': 37.4300,
'lng': -122.1400,
'infobox': "Hello I am <b style='color:blue;'>BLUE</b>!"
},
{
'icon': '//maps.google.com/mapfiles/ms/icons/yellow-dot.png',
'lat': 37.4500,
'lng': -122.1350,
'infobox': (
"Hello I am <b style='color:#ffcc00;'>YELLOW</b>!"
"<h2>It is HTML title</h2>"
"<img src='//placehold.it/50'>"
"<br>Images allowed!"
)
}
]
)
clustermap = Map(
identifier="clustermap",
varname="clustermap",
lat=37.4419,
lng=-122.1419,
markers=[
{
'lat': 37.4500,
'lng': -122.1350
},
{
'lat': 37.4400,
'lng': -122.1350
},
{
'lat': 37.4300,
'lng': -122.1350
},
{
'lat': 36.4200,
'lng': -122.1350
},
{
'lat': 36.4100,
'lng': -121.1350
}
],
zoom=12,
cluster=True
)
movingmap = Map(
identifier="movingmap",
varname="movingmap",
lat=37.4419,
lng=-122.1419,
markers=[
{
'lat': 37.4500,
'lng': -122.1350
}
],
zoom=12
)
movingmarkers = [
{
'lat': 37.4400,
'lng': -122.1350
},
{
'lat': 37.4430,
'lng': -122.1350
},
{
'lat': 37.4450,
'lng': -122.1350
},
{
'lat': 37.4490,
'lng': -122.1350
}
]
rectangle = {
'stroke_color': '#0000FF',
'stroke_opacity': .8,
'stroke_weight': 5,
'fill_color': '#FFFFFF',
'fill_opacity': .1,
'bounds': {
'north': 33.685,
'south': 33.671,
'east': -116.234,
'west': -116.251
}
}
rectmap = Map(
identifier="rectmap",
varname="rectmap",
lat=33.678,
lng=-116.243,
rectangles=[
rectangle,
[33.678, -116.243, 33.671, -116.234],
(33.685, -116.251, 33.678, -116.243),
[(33.679, -116.254), (33.678, -116.243)],
([33.689, -116.260], [33.685, -116.250]),
]
)
circle = {
'stroke_color': '#FF00FF',
'stroke_opacity': 1.0,
'stroke_weight': 7,
'fill_color': '#FFFFFF',
'fill_opacity': .8,
'center': {
'lat': 33.685,
'lng': -116.251
},
'radius': 2000,
}
circlemap = Map(
identifier="circlemap",
varname="circlemap",
lat=33.678,
lng=-116.243,
circles=[
circle,
[33.685, -116.251, 1000],
(33.685, -116.251, 1500),
]
)
polyline = {
'stroke_color': '#0AB0DE',
'stroke_opacity': 1.0,
'stroke_weight': 3,
'path': [{'lat': 33.678, 'lng': -116.243},
{'lat': 33.679, 'lng': -116.244},
{'lat': 33.680, 'lng': -116.250},
{'lat': 33.681, 'lng': -116.239},
{'lat': 33.678, 'lng': -116.243}]
}
path1 = [(33.665, -116.235), (33.666, -116.256),
(33.667, -116.250), (33.668, -116.229)]
path2 = ((33.659, -116.243), (33.660, -116.244),
(33.649, -116.250), (33.644, -116.239))
path3 = ([33.688, -116.243], [33.680, -116.244],
[33.682, -116.250], [33.690, -116.239])
path4 = [[33.690, -116.243], [33.691, -116.244],
[33.692, -116.250], [33.693, -116.239]]
plinemap = Map(
identifier="plinemap",
varname="plinemap",
lat=33.678,
lng=-116.243,
polylines=[polyline, path1, path2, path3, path4]
)
polygon = {
'stroke_color': '#0AB0DE',
'stroke_opacity': 1.0,
'stroke_weight': 3,
'fill_color': '#ABC321',
'fill_opacity': .5,
'path': [{'lat': 33.678, 'lng': -116.243},
{'lat': 33.679, 'lng': -116.244},
{'lat': 33.680, 'lng': -116.250},
{'lat': 33.681, 'lng': -116.239},
{'lat': 33.678, 'lng': -116.243}]
}
pgonmap = Map(
identifier="pgonmap",
varname="pgonmap",
lat=33.678,
lng=-116.243,
polygons=[polygon, path1, path2, path3, path4]
)
collapsible = Map(
identifier="collapsible",
varname="collapsible",
lat=60.000025,
lng=30.288809,
zoom=13,
collapsible=True
)
infoboxmap = Map(
identifier="infoboxmap",
zoom=12,
lat=59.939012,
lng=30.315707,
markers=[{
'lat': 59.939,
'lng': 30.315,
'infobox': 'This is a marker'
}],
circles=[{
'stroke_color': '#FF00FF',
'stroke_opacity': 1.0,
'stroke_weight': 7,
'fill_color': '#FF00FF',
'fill_opacity': 0.2,
'center': {
'lat': 59.939,
'lng': 30.3
},
'radius': 200,
'infobox': "This is a circle"
}],
rectangles=[{
'stroke_color': '#0000FF',
'stroke_opacity': .8,
'stroke_weight': 5,
'fill_color': '#FFFFFF',
'fill_opacity': .1,
'bounds': {
'north': 59.935,
'south': 59.93,
'east': 30.325,
'west': 30.3,
},
'infobox': "This is a rectangle"
}],
polygons=[{
'stroke_color': '#0AB0DE',
'stroke_opacity': 1.0,
'stroke_weight': 3,
'path': [
[59.94, 30.318],
[59.946, 30.325],
[59.946, 30.34],
[59.941, 30.35],
[59.938, 30.33]
],
'infobox': 'This is a polygon'
}],
polylines=[{
'stroke_color': '#0AB0DE',
'stroke_opacity': 1.0,
'stroke_weight': 10,
'path': [
(59.941, 30.285),
(59.951, 30.31),
(59.95, 30.36),
(59.938, 30.358)
],
'infobox': 'This is a polyline'
}]
)
return render_template(
'example.html',
mymap=mymap,
sndmap=sndmap,
trdmap=trdmap,
rectmap=rectmap,
circlemap=circlemap,
plinemap=plinemap,
pgonmap=pgonmap,
clustermap=clustermap,
movingmap=movingmap,
movingmarkers=movingmarkers,
collapsible=collapsible,
infoboxmap=infoboxmap
)
@app.route('/fullmap')
def fullmap():
fullmap = Map(
identifier="fullmap",
varname="fullmap",
style=(
"height:100%;"
"width:100%;"
"top:0;"
"left:0;"
"position:absolute;"
"z-index:200;"
),
lat=37.4419,
lng=-122.1419,
markers=[
{
'icon': '//maps.google.com/mapfiles/ms/icons/green-dot.png',
'lat': 37.4419,
'lng': -122.1419,
'infobox': "Hello I am <b style='color:green;'>GREEN</b>!"
},
{
'icon': '//maps.google.com/mapfiles/ms/icons/blue-dot.png',
'lat': 37.4300,
'lng': -122.1400,
'infobox': "Hello I am <b style='color:blue;'>BLUE</b>!"
},
{
'icon': icons.dots.yellow,
'title': 'Click Here',
'lat': 37.4500,
'lng': -122.1350,
'infobox': (
"Hello I am <b style='color:#ffcc00;'>YELLOW</b>!"
"<h2>It is HTML title</h2>"
"<img src='//placehold.it/50'>"
"<br>Images allowed!"
)
}
],
# maptype = "TERRAIN",
# zoom="5"
)
return render_template('example_fullmap.html', fullmap=fullmap)
if __name__ == "__main__":
app.run(debug=True, use_reloader=True)
|
sharkwheels/Independet_study_2017
|
week8-google_maps/flask-maps/example.py
|
Python
|
mit
| 9,848
|
from django.contrib.messages import constants as messages
from .common import *
ALLOWED_HOSTS = SECRETS.get('allowed_hosts', ['localhost'])
DEBUG = True
MESSAGE_LEVEL = messages.DEBUG if DEBUG else messages.INFO
CSRF_COOKIE_SECURE = False
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_DOMAIN = next(iter(ALLOWED_HOSTS))
DEFAULT_FROM_EMAIL = 'noreply@' + EMAIL_DOMAIN
SERVER_EMAIL = 'root@' + EMAIL_DOMAIN
MEDIA_ROOT = str(DATA_DIR.joinpath('media_root'))
SESSION_COOKIE_SECURE = False
STATIC_ROOT = str(DATA_DIR.joinpath('static_root'))
|
randomic/aniauth-tdd
|
aniauth/settings/dev.py
|
Python
|
mit
| 573
|
# -*- coding: utf-8 -*-
RABBIT = '''
(\__/) ||
(โขใ
โข) ||
/ ใ ใฅ
'''
def rabbitsay(spacing, message):
"""
โโโโโโโโโโโโโ
| rabbitsay |
โโโโโโโโโโโโโ
(\__/) ||
(โขใ
โข) ||
/ ใ ใฅ
Function to generate rabbit and sign with custom content
"""
lines = message.split()
width = max(map(len, lines)) + spacing
edge = 'โ' * width
innards = '\n'.join(
' |{:^{width}}|'.format(line, width=width)
for line in lines
)
sign = ' โ{edge}โ\n{innards}\n โ{edge}โ{rabbit}'.format(
innards=innards,
edge=edge,
rabbit=RABBIT,
)
return sign
|
Cbeck527/rabbitsay
|
rabbitsay/rabbitsay.py
|
Python
|
mit
| 720
|