content stringlengths 5 1.05M |
|---|
"""add three time filed
Revision ID: 9299cbcf6d8b
Revises: ed535bd21f09
Create Date: 2019-12-12 15:35:20.075155
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9299cbcf6d8b'
down_revision = 'ed535bd21f09'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_questions_id', table_name='questions')
op.drop_index('ix_user_id', table_name='user')
op.drop_index('ix_verficode_id', table_name='verficode')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_verficode_id', 'verficode', ['id'], unique=False)
op.create_index('ix_user_id', 'user', ['id'], unique=False)
op.create_index('ix_questions_id', 'questions', ['id'], unique=False)
# ### end Alembic commands ###
|
import numpy as np
def clean_data(df, out_df_dir=""):
df.dropna(axis=1, inplace=True)
if out_df_dir:
df.to_csv(out_df_dir)
return df
# Calculate log change of daily price
def log_change(series):
return np.log(series[1] / series[0])
# Calculate correaltion
def calculate_cor(df, start, end):
return df[start:end].rolling(
window=2,
min_periods=2
).apply(
log_change,
raw=True
).corr(method="pearson")
# Calculate profit
def take_profit(price, start, end):
return price.iloc[end]/price.iloc[start] - 1 |
from re import search
import jsonpickle
COMMAND_RESULT_PREFIX = "command_json_result="
COMMAND_RESULT_POSTFIX = "=command_json_result_end"
def get_result_from_command_output(output):
"""
Extracts result from output
:param output: Console output as returned from command execution
:return: Deserialized object or deserialized JSON into dictionary
"""
match = _extract_result_from_output(output)
if not match:
return None
json = match.group('result')
return jsonpickle.decode(json)
def set_command_result(result, unpicklable=False):
"""
Serializes output as JSON and writes it to console output wrapped with special prefix and suffix
:param result: Result to return
:param unpicklable: If True adds JSON can be deserialized as real object.
When False will be deserialized as dictionary
"""
json = jsonpickle.encode(result, unpicklable=unpicklable)
result_for_output = COMMAND_RESULT_PREFIX + str(json) + COMMAND_RESULT_POSTFIX
print result_for_output
return result_for_output
def transfer_command_result(output):
match = _extract_result_from_output(output)
if match:
print COMMAND_RESULT_PREFIX + match.group('result') + COMMAND_RESULT_POSTFIX
def _extract_result_from_output(output):
match = search(COMMAND_RESULT_PREFIX + '(?P<result>.*)' + COMMAND_RESULT_POSTFIX, output)
return match
|
# coding: utf-8
from collections import OrderedDict
from pymemcache.client.base import _readline, _readvalue, Client
from pymemcache.exceptions import MemcacheUnknownError
class MockMcClient(Client):
def get_many(self, keys):
"""
Mock function for get_many to fix uq pop bug temporarily.
The uq pop processing in memcache protocol must receive the line-name
as the first key, but pymemcache get_many command may rearrange the
keys sequence, which leads uq library pop processing with error.
Args:
keys: list(str), see class docs for details.
Returns:
A dict in which the keys are elements of the "keys" argument list
and the values are values from the cache. The dict may contain all,
some or none of the given keys.
"""
if not keys:
return {}
return self._mock_fetch_cmd(b'get', keys, False)
def _mock_fetch_cmd(self, name, keys, expect_cas):
checked_keys = OrderedDict()
for k in keys:
checked_keys[self.check_key(k)] = k
cmd = name + b' ' + b' '.join(checked_keys) + b'\r\n'
try:
if not self.sock:
self._connect()
self.sock.sendall(cmd)
buf = b''
result = {}
while True:
buf, line = _readline(self.sock, buf)
self._raise_errors(line, name)
if line == b'END':
return result
elif line.startswith(b'VALUE'):
if expect_cas:
_, key, flags, size, cas = line.split()
else:
try:
_, key, flags, size = line.split()
except Exception as e:
raise ValueError("Unable to parse line %s: %s"
% (line, str(e)))
buf, value = _readvalue(self.sock, buf, int(size))
key = checked_keys[key]
if self.deserializer:
value = self.deserializer(key, value, int(flags))
if expect_cas:
result[key] = (value, cas)
else:
result[key] = value
elif name == b'stats' and line.startswith(b'STAT'):
_, key, value = line.split()
result[key] = value
else:
raise MemcacheUnknownError(line[:32])
except Exception:
self.close()
if self.ignore_exc:
return {}
raise
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Train Resnet50_quant on Cifar10"""
import pytest
import numpy as np
from easydict import EasyDict as ed
from mindspore import context
from mindspore import Tensor
from mindspore.nn.optim.momentum import Momentum
from mindspore.train.model import Model
from mindspore.compression.quant import QuantizationAwareTraining
from mindspore import set_seed
from resnet_quant_manual import resnet50_quant
from dataset import create_dataset
from lr_generator import get_lr
from utils import Monitor, CrossEntropy
config_quant = ed({
"class_num": 10,
"batch_size": 128,
"step_threshold": 20,
"loss_scale": 1024,
"momentum": 0.9,
"weight_decay": 1e-4,
"epoch_size": 1,
"pretrained_epoch_size": 90,
"buffer_size": 1000,
"image_height": 224,
"image_width": 224,
"data_load_mode": "original",
"save_checkpoint": True,
"save_checkpoint_epochs": 1,
"keep_checkpoint_max": 50,
"save_checkpoint_path": "./",
"warmup_epochs": 0,
"lr_decay_mode": "cosine",
"use_label_smooth": True,
"label_smooth_factor": 0.1,
"lr_init": 0,
"lr_max": 0.005,
})
dataset_path = "/home/workspace/mindspore_dataset/cifar-10-batches-bin/"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_resnet50_quant():
set_seed(1)
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
config = config_quant
print("training configure: {}".format(config))
epoch_size = config.epoch_size
# define network
net = resnet50_quant(class_num=config.class_num)
net.set_train(True)
# define loss
if not config.use_label_smooth:
config.label_smooth_factor = 0.0
loss = CrossEntropy(
smooth_factor=config.label_smooth_factor, num_classes=config.class_num)
#loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False)
# define dataset
dataset = create_dataset(dataset_path=dataset_path,
config=config,
repeat_num=1,
batch_size=config.batch_size)
step_size = dataset.get_dataset_size()
# convert fusion network to quantization aware network
quantizer = QuantizationAwareTraining(bn_fold=True,
per_channel=[True, False],
symmetric=[True, False])
net = quantizer.quantize(net)
# get learning rate
lr = Tensor(get_lr(lr_init=config.lr_init,
lr_end=0.0,
lr_max=config.lr_max,
warmup_epochs=config.warmup_epochs,
total_epochs=config.epoch_size,
steps_per_epoch=step_size,
lr_decay_mode='cosine'))
# define optimization
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), lr, config.momentum,
config.weight_decay, config.loss_scale)
# define model
#model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'})
model = Model(net, loss_fn=loss, optimizer=opt)
print("============== Starting Training ==============")
monitor = Monitor(lr_init=lr.asnumpy(),
step_threshold=config.step_threshold)
callbacks = [monitor]
model.train(epoch_size, dataset, callbacks=callbacks,
dataset_sink_mode=False)
print("============== End Training ==============")
expect_avg_step_loss = 2.60
avg_step_loss = np.mean(np.array(monitor.losses))
print("average step loss:{}".format(avg_step_loss))
assert avg_step_loss < expect_avg_step_loss
if __name__ == '__main__':
test_resnet50_quant()
|
# Advent of Code 2016
#
# From https://adventofcode.com/2016/day/2
#
import numpy as np
MOVES = {'L': [0, -1],
'R': [0, 1],
'U': [-1, 0],
'D': [1, 0],
}
pad1 = np.array([['1', '2', '3'],
['4', '5', '6'],
['7', '8', '9']])
pad2 = np.array([[None, None, '1', None, None],
[None, '2', '3', '4', None],
['5', '6', '7', '8', '9'],
[None, 'A', 'B', 'C', None],
[None, None, 'D', None, None]])
def get_pin(pad, start_key='5', filename=''):
pin = ''
keys = (row.strip() for row in open(f'../inputs2016/Advent2016_02{filename}.txt', 'r'))
a, b = pad.shape
keymap = np.full((a + 2, b + 2), None)
keymap[1:1 + a, 1:1 + b] = pad
position = np.array([np.where(keymap == start_key)[0][0], np.where(keymap == start_key)[1][0]])
for moves in keys:
for move in moves:
if keymap[tuple(position + MOVES[move])]:
position += MOVES[move]
pin += keymap[tuple(position)]
return pin
print(f"""AoC 2016 Day 2 Part 1 answer is: {get_pin(pad1)}""")
print(f"""AoC 2016 Day 2 Part 2 answer is: {get_pin(pad2)}""")
|
from classlist_get import Classlist_get
from a10 import A10
a10 = A10(username='admin',password='a10',ipaddr='192.168.201.15')
classlist_get = Classlist_get(username='admin',password='a10',ipaddr='192.168.201.15')
a10.login()
#sign = (a10.signature)
#print (a10)
print (a10.signature)
#a10 = A10(username='admin',password='a10',ipaddr='192.168.201.15')
Classlist_get()
|
"""
This script runs the miller optimal control problem with a given set of parameters and save the results.
The main function is used in main_comparison.py and main_convergence.py. to run the different Miller optimal control problem.
"""
import numpy as np
from bioptim import OdeSolver, CostType
from bioptim import Solver, Shooting
from miller_ocp import MillerOcp
import pickle
from time import time
from custom_dynamics.enums import MillerDynamics
from IPython import embed
def main(args: list = None):
"""
Main function for the miller_run.py script.
It runs the optimization and saves the results of a Miller Optimal Control Problem.
Parameters
----------
args : list
List of arguments containing the following:
args[0] : date
Date of the optimization.
args[1] : i_rand
Random seed.
args[2] : n_shooting
Number of shooting nodes.
args[3] : dynamics_type
Type of dynamics to use such as MillerDynamics.EXPLICIT, MillerDynamics.IMPLICIT, ...
args[4] : ode_solver
Type of ode solver to use such as OdeSolver.RK4, OdeSolver.RK2, ...
args[5] : nstep
Number of steps for the ode solver.
args[6] : n_threads
Number of threads to use.
args[7] : out_path_raw
Path to the raw results.
args[8] : biorbd_model_path
Path to the biorbd model.
args[9] : extra_obj
Extra objective to add to the cost function mainly for implicit formulations
"""
if args:
Date = args[0]
i_rand = args[1]
n_shooting = args[2]
dynamics_type = args[3]
ode_solver = args[4]
nstep = args[5]
n_threads = args[6]
out_path_raw = args[7]
biorbd_model_path = args[8]
extra_obj = args[9]
else:
Date = "11fev2022"
i_rand = 0
n_shooting = (125, 25)
dynamics_type = MillerDynamics.IMPLICIT_TAU_DRIVEN_QDDDOT
ode_solver = OdeSolver.RK4
nstep = 5
n_threads = 3
out_path_raw = "../OnDynamicsForSommersaults_results/test"
biorbd_model_path = "Model_JeCh_15DoFs.bioMod"
extra_obj = True
# to handle the random multi-start of the ocp
np.random.seed(i_rand)
# --- Solve the program --- #
miller = MillerOcp(
biorbd_model_path="Model_JeCh_15DoFs.bioMod",
n_shooting=n_shooting,
ode_solver=ode_solver(n_integration_steps=nstep),
dynamics_type=dynamics_type,
n_threads=n_threads,
somersaults=4 * np.pi,
twists=6 * np.pi,
extra_obj=extra_obj,
)
filename = f"miller_{dynamics_type}_irand{i_rand}_extraobj{extra_obj}_{n_shooting[0]}_{n_shooting[1]}"
outpath = f"{out_path_raw}/" + filename
# --- Solve the program --- #
solver = Solver.IPOPT(show_online_optim=False, show_options=dict(show_bounds=True))
solver.set_maximum_iterations(10000)
solver.set_print_level(5)
solver.set_linear_solver("ma57")
print(f"##########################################################")
print(
f"Solving dynamics_type={dynamics_type}, i_rand={i_rand}," f"n_shooting={n_shooting}, extra_obj={extra_obj}\n"
)
print(f"##########################################################")
# --- time to solve --- #
tic = time()
sol = miller.ocp.solve(solver)
toc = time() - tic
states = sol.states[0]["all"]
controls = sol.controls[0]["all"]
parameters = sol.parameters["all"]
states_2 = states[:, :2]
for i in range(1, np.shape(states)[1] - 1):
states_2 = np.hstack((states_2, states[:, i : i + 2]))
vals = miller.ocp.nlp[0].J[3].weighted_function(states_2, [], [], 10, [], parameters[0] / 125)
np.sum(vals)
# En Mayer avec vrai norme comme val : toujours -1.6674162217789566e-18
q_modifs = np.zeros((15, 126))
q_modifs[13:16] = states[13:16, :]
sol.print(cost_type=CostType.OBJECTIVES, to_console=False)
print(f"##########################################################")
print(
f"Time to solve dynamics_type={dynamics_type}, i_rand={i_rand}, extra_obj={extra_obj}"
f"n_shooting={n_shooting}, extra_obj={extra_obj}\n:\n {toc}sec\n"
)
print(f"##########################################################")
# --- Save the results --- #
sol_integrated = sol.integrate(
shooting_type=Shooting.MULTIPLE, keep_intermediate_points=True, merge_phases=True, continuous=False
)
q_integrated = sol_integrated.states["q"]
qdot_integrated = sol_integrated.states["qdot"]
if (
dynamics_type == MillerDynamics.IMPLICIT_TAU_DRIVEN_QDDDOT
or dynamics_type == MillerDynamics.ROOT_IMPLICIT_QDDDOT
):
qddot_integrated = sol_integrated.states["qddot"]
else:
qddot_integrated = np.nan
f = open(f"{outpath}.pckl", "wb")
data = {
"model_path": biorbd_model_path,
"irand": i_rand,
"extra_obj": extra_obj,
"computation_time": toc,
"cost": sol.cost,
"detailed_cost": sol.detailed_cost,
"iterations": sol.iterations,
"status": sol.status,
"states": sol.states,
"controls": sol.controls,
"parameters": sol.parameters,
"dynamics_type": dynamics_type,
"q_integrated": q_integrated,
"qdot_integrated": qdot_integrated,
"qddot_integrated": qddot_integrated,
"n_shooting": n_shooting,
"n_theads": n_threads,
}
pickle.dump(data, f)
f.close()
miller.ocp.save(sol, f"{outpath}.bo")
if __name__ == "__main__":
main()
|
import sys,re,os,numpy
startspace='\s*'
oneint='(?P<%s>[-\d]+)\s+'
endint='(?P<%s>[-\d]+)\s*'
onechar='(?P<%s>\S+)\s+'
endchar='(?P<%s>\S+)\s*'
twointpair='(?P<%s>[-\d]+),(?P<%s>[-\d]+)\s*'
next_re='\s*(?P<next>[\s\S]*)';
v_para=('version','fileformat_version');
v_re=(startspace+oneint+endint+next_re)#%v_para;
L_para=('x1','y1','x2','y2','color','width','capstyle','dashstyle','dashlength','dashspace')
L_re=(startspace+9*oneint+endint+next_re)#%L_para;
G_para=('x','y','width','height','angle','mirrored','embedded')
G_re=(startspace+5*oneint+2*onechar+next_re)#%G_para;
B_para=('x','y','width','height','color','linewidth','capstyle','dashstyle','dashlength','dashspace','filltype','fillwidth','angle1','pitch1','angle2','pitch2');
B_re=(startspace+15*oneint+endint+next_re)#%B_para
V_para=('x','y','radius','color','width','capstyle','dashstyle','dash_length','dashspace','filltype','fillwidth','angle1','pitch1','angle2','pitch2')
V_re=(startspace+14*oneint+endint+next_re)#%V_para;
A_para=('x','y','radius','startangle','sweepangle','color','width','capstyle','dashstyle','dashlength','dashspace')
A_re=(startspace+10*oneint+endint+next_re)#%A_para;
T_para=('x','y','color','size','visibility','show_name_value','angle','alignment','num_lines')
T_re=(startspace+8*oneint+endint+next_re)#%T_para
N_para=('x1','y1','x2','y2','color')
N_re=(startspace+4*oneint+endint+next_re)#%N_para
U_para=('x1','y1','x2','y2','color','ripperdir')
U_re=(startspace+5*oneint+endint+next_re)#%U_para
P_para=('x1','y1','x2','y2','color','pintype','whichend')
P_re=(startspace+6*oneint+endint+next_re)#%P_para;
C_para=('x','y','selectable','angle','mirror','basename')
C_re=(startspace+5*oneint+endchar+next_re)#%C_para;
H_para=('color','width','capstyle','dashstyle','dashlength','dashspace','filltype','fillwidth','angle1','pitch1','angle2','pitch2','num_lines')
H_re=(startspace+12*oneint+endint+next_re)#%H_para;
F_para=('character','width','flag')
F_re=(startspace+onechar+oneint+endint+next_re)#%F_para
a_para=('attributes')
a_re=(startspace+'(?P<%s>[\s\S]*?)}'+next_re)#%a_para;
M_para=('x','y','contents','endz')
M_re=(startspace+twointpair+'(?P<%s>[\s\S]*?)'+'(?P<%s>\n[zZ])\s*'+next_re)#%M_para;
type_para=('type')
type_re=(startspace+'(?P<%s>[vLGBVATNMUPCHF{])'+next_re)#%type_para;
np_para=('netname','pinstxt')
np_re=(startspace+'(?P<%s>\S+):(?P<%s>.*)')
class gaf_format:
def __init__(self,type_str,f_re_string,f_para):
self.f_re_string=(f_re_string%f_para);
self.f_para=f_para
self.groupdict={};
self.type_str=type_str
self.attrdict={};
def textline_re(self,linenums):
return linenums*r'(.*)\n'+'\s*(?P<next>[\s\S]*)';
def setattrdict(self,attrdict):
self.attrdict.update(attrdict)
def parse_gaf(self,line):
m=re.match(self.f_re_string,line)
if (m):
self.groupdict=m.groupdict();
def next_str(self):
return self.groupdict['next']
def output_mainline(self):
mainline=[str(self.groupdict[item]) for item in self.f_para]
mainline.insert(0,self.type_str)
mainline_str= ' '.join(mainline)
return mainline_str
def output_attrline(self):
attrline=''
if (len(self.attrdict)!=0):#groupdict.has_key('attributes'):
# print str(self.groupdict['attributes'])
attrline='\n{\n'
for attr in self.attrdict.keys():#groupdict['attributes']:
attrline+=(self.attrdict[attr].output_str()+'\n')
attrline+='}'
return attrline
def output_str(self):
return self.output_mainline()+self.output_attrline();
def findsym(self,symbolname):
dirlist=['.']# ?? parse gafrc for dirlists
dirlist.append('../txgls/submodule/hardware/')
symbolpath=None
for dirs in dirlist:
for dirpath,dirnames,filenames in os.walk(dirs):
for filename in [f for f in filenames if f==symbolname]:
symbolpath=os.path.join(dirpath,filename)
if (symbolpath==None):
print '%s not found'%symbolname
return symbolpath;
class version(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'v',v_re,v_para)
self.parse_gaf(line)
self.version=self.groupdict['version']
self.fileformat_version=self.groupdict['fileformat_version']
class line(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'L',L_re,L_para)
self.parse_gaf(line)
self.x1=self.groupdict['x1']
self.y1=self.groupdict['y1']
self.x2=self.groupdict['x2']
self.y2=self.groupdict['y2']
self.color=self.groupdict['color']
self.width=self.groupdict['width']
self.capstyle=self.groupdict['capstyle']
self.dashstyle=self.groupdict['dashstyle']
self.dashlength=self.groupdict['dashlength']
self.dashspace=self.groupdict['dashspace']
class picture(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'G',G_re,G_para)
self.parse_gaf(line)
self.x =self.groupdict['x']
self.y =self.groupdict['y']
self.width =self.groupdict['width']
self.height =self.groupdict['height']
self.angle =self.groupdict['angle']
self.mirrored =self.groupdict['mirrored']
self.embeded =self.groupdict['embeded']
class box(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'B',B_re,B_para)
self.parse_gaf(line)
self.x =self.groupdict['x']
self.y =self.groupdict['y']
self.width =self.groupdict['width']
self.height =self.groupdict['height']
self.color =self.groupdict['color']
self.linewidth =self.groupdict['linwidth']
self.capstyle =self.groupdict['capstyle']
self.dashstyle =self.groupdict['dashstyle']
self.dashlength =self.groupdict['dashlength']
self.dashspace =self.groupdict['dashspace']
self.filltype =self.groupdict['filltype']
self.fillwidth =self.groupdict['fillwidth']
self.angle1 =self.groupdict['angle1']
self.pitch1 =self.groupdict['pitch1']
self.angle2 =self.groupdict['angle2']
self.pitch2 =self.groupdict['pitch2']
class circle(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'V',V_re,V_para)
self.parse_gaf(line)
self.x=self.groupdict['x']
self.y=self.groupdict['y']
self.radius=self.groupdict['radius']
self.color=self.groupdict['color']
self.width=self.groupdict['width']
self.capstyle=self.groupdict['capstyle']
self.dashstyle=self.groupdict['dashstyle']
self.dash_length=self.groupdict['dash_length']
self.dashspace=self.groupdict['dashspace']
self.filltype=self.groupdict['filltype']
self.fillwidth=self.groupdict['fillwidth']
self.angle1=self.groupdict['angle1']
self.pitch1=self.groupdict['pitch1']
self.angle2=self.groupdict['angle2']
self.pitch2=self.groupdict['pitch2']
class arc(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'A',A_re,A_para)
self.parse_gaf(line)
self.x=self.groupdict['x']
self.y=self.groupdict['y']
self.radius=self.groupdict['radius']
self.startangle=self.groupdict['startangle']
self.sweepangle=self.groupdict['sweepangle']
self.color=self.groupdict['color']
self.width=self.groupdict['width']
self.capstyle=self.groupdict['capstyle']
self.dashstyle=self.groupdict['dashstyle']
self.dashlength=self.groupdict['dashlength']
self.dashspace=self.groupdict['dashspace']
class text(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'T',T_re,T_para)
gaf_format.parse_gaf(self,line)
self.parse_gaf(line)
self.x=self.groupdict['x']
self.y=self.groupdict['y']
self.color=self.groupdict['color']
self.size=self.groupdict['size']
self.visibility=self.groupdict['visibility']
self.show_name_value=self.groupdict['show_name_value']
self.angle=self.groupdict['angle']
self.alignment=self.groupdict['alignment']
self.num_lines=self.groupdict['num_lines']
self.attr_name=None
self.attr_value=None
def parse_gaf(self,line):
self.num_lines=int(self.groupdict['num_lines'])
m=re.match(self.textline_re(self.num_lines),self.groupdict['next'])
#print m.groups()
self.groupdict['string_lines']=m.groups()[0:-1]
self.groupdict['next']=m.group('next')
def output_str(self):
mainline=self.output_mainline()
textline='\n'.join(self.groupdict['string_lines'])
return mainline+'\n'+textline
def isattr(self):
self.parse_oneline_attribute_text()
return self.attr
def parse_oneline_attribute_text(self):
m=re.match('\s*(?P<attr_name>.+)\s*=\s*(?P<attr_value>.+)\s*',self.groupdict['string_lines'][0]);
self.attr=True if m else False
if (m):
self.attr_name=m.group('attr_name')
self.attr_value=m.group('attr_value')
return m
def getattrname(self):
return self.attr_name
def getattrvalue(self):
return self.attr_value
def setvalue(self,value):
self.attr_value
def output_str(self):
mainline=self.output_mainline()
attrline=str(self.groupdict['attr_name'])+'='+str(self.groupdict['attr_value'])
return mainline+'\n'+attrline
class net(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'N',N_re,N_para)
self.parse_gaf(line)
self.x1=self.groupdict['x1']
self.y1=self.groupdict['y1']
self.x2=self.groupdict['x2']
self.y2=self.groupdict['y2']
self.color=self.groupdict['color']
x1=int(self.groupdict['x1'])
y1=int(self.groupdict['y1'])
x2=int(self.groupdict['x2'])
y2=int(self.groupdict['y2'])
self.p1=(x1,y1)
self.p2=(x2,y2)
def get_netxy(self):
xylist=[(int(self.groupdict['x1']),int(self.groupdict['y1'])),(int(self.groupdict['x2']),int(self.groupdict['y2']))]
return xylist
def point_on_net(self,xy):
online=False
c=numpy.cross(self.p2-self.p1,self.p1-xy)
xin=min(self.p1[0],self.p2[0])<=xy[0] and max(self.p1[0],self.p2[0])>=xy[0]
yin=min(self.p1[1],self.p2[1])<=xy[1] and max(self.p1[1],self.p2[1])>=xy[1]
if (c==0 and xin and yin):
online=True
return online
class bus(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'U',U_re,U_para)
self.parse_gaf(line)
self.x1 =self.groupdict['x1']
self.y1 =self.groupdict['y1']
self.x2 =self.groupdict['x2']
self.y2 =self.groupdict['y2']
self.color =self.groupdict['color']
self.ripperdir =self.groupdict['ripperdir']
class pin(gaf_format):
def __init__(self,line,netpin=False):
self.init_para()
gaf_format.__init__(self,'P',P_re,P_para)
self.parse_gaf(line)
self.x1 =self.groupdict['x1']
self.y1 =self.groupdict['y1']
self.x2 =self.groupdict['x2']
self.y2 =self.groupdict['y2']
self.color =self.groupdict['color']
self.pintype =self.groupdict['pintype']
self.whichend =self.groupdict['whichend']
self.location()
def init_para(self):
self.x=None;
self.y=None;
self.component=None;
self.net={}
self.whichend=None;
#self.net=None
def getpinseq(self):
return self.attrdict['pinseq'][-1].attr_value
def set_comp(self,comp):
self.component=comp;
for value in self.net.values():
value['comp']=comp
def getxy(self):
return (self.x,self.y)
def location(self):
x=None
y=None
if (self.groupdict.has_key('whichend') and self.groupdict.has_key('x1') and self.groupdict.has_key('y1') and self.groupdict.has_key('x2') and self.groupdict.has_key('y2')):
if (int(self.groupdict['whichend'])==0):
x=int(self.groupdict['x1'])
y=int(self.groupdict['y1'])
elif (int(self.groupdict['whichend'])==1):
x=int(self.groupdict['x2'])
y=int(self.groupdict['y2'])
else:
x=0
y=0
self.x=x
self.y=y
#print int(obj.groupdict['whichend'])
return (x,y)
def setseq(self,newseq,label=None,number=None):
self.attrdict['pinseq'].setvalue(newseq)
self.attrdict['pinlabel'].setvalue(newseq)
self.attrdict['pinnumber'].setvalue(newseq)
#print len(self.attrdict),self.attrdict.keys()
#print len(self.groupdict),self.groupdict.keys()
class netpin(pin):
def __init__(self,net,pinseq):
self.attrdict={}
self.attrdict['net']=net
self.attrdict['pinseq']=pinseq
self.type_str='np'
#gaf_format.__init__(self,'np',np_re,np_para)
#self.parse_gaf(line)
#for pinseq in self.groupdict['pinstxt'].split(','):
# self.attrdict['pinseq']=pinseq
self.init_para()
def getpinseq(self):
return self.attrdict['pinseq']
#self.setseq(pinseq)
class component(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'C',C_re,C_para)
self.parse_gaf(line)
self.x =self.groupdict['x']
self.y =self.groupdict['y']
self.selectable =self.groupdict['selectable']
self.angle =self.groupdict['angle']
self.mirror =self.groupdict['mirror']
self.basename =self.groupdict['basename']
self.pinxy={}
self.update_attr()
def update_attr(self):
if self.attrdict.has_key('refdes'):
self.refdes=self.attrdict['refdes'][-1].groupdict['attr_value']
else:
self.refdes='UNKNOWN?'
def get_refdes(self):
return self.refdes
def get_pinxy(self):
symbol_path=self.findsym(self.basename)
if symbol_path:
self.symbol=geda_symbol(symbol_path)
sympinxy=self.symbol.getpinxy()
for pin in sympinxy.keys():
key={'comp':self,'pin':pin}
x=int(sympinxy[pin][0])+int(self.x)
y=int(sympinxy[pin][1])+int(self.y)
self.pinxy.update({key:(x,y)})
print key
print self.pinxy
class path(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'H',H_re,H_para)
self.parse_gaf(line)
self.color =self.groupdict['color']
self.width =self.groupdict['width']
self.capstyle =self.groupdict['capstyle']
self.dashstyle =self.groupdict['dashstyle']
self.dashlength =self.groupdict['dashlength']
self.dashspace =self.groupdict['dashspace']
self.filltype =self.groupdict['filltype']
self.fillwidth =self.groupdict['fillwidth']
self.angle1 =self.groupdict['angle1']
self.pitch1 =self.groupdict['pitch1']
self.angle2 =self.groupdict['angle2']
self.pitch2 =self.groupdict['pitch2']
self.num_lines =self.groupdict['num_lines']
class font(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'F',F_re,F_para)
self.parse_gaf(line)
self.character =self.groupdict['character']
self.width =self.groupdict['width']
self.flag =self.groupdict['flag']
class attributes(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'{',a_re,a_para)
self.parse_gaf(line)
self.attributes =self.groupdict['attributes']
self.attrdict={}
self.parse_attributes()
def getattrdict(self):
return self.attrdict
def parse_attributes(self):
attrsobj=gaf_string(self.groupdict['attributes']);
self.attrdict.update(attrsobj.attrdict)
return attrsobj.getattrdict()
# attrs=attrsobj.parse_string()
# attrtextobjs=[text(attr) for attr in attrsobj.objects]
# for attr in attrtextobjs:
# self.attrdict[attr.groupdict['attr_name']]=attr
# self.attrdict.update(attrsobj.getattrdict()
# return attrtextobjs
class pathdata(gaf_format):
def __init__(self,line):
gaf_format.__init__(self,'M',M_re,M_para)
self.parse_gaf(line)
self.x =self.groupdict['x']
self.y =self.groupdict['y']
self.contents =self.groupdict['contents']
self.endz =self.groupdict['endz']
def output_str(self):
#this is a temporary solution before I understand the detail structure
outstr="%s %s,%s\n%s%s"%(self.type_str,self.groupdict['x'],self.groupdict['y'],self.groupdict['contents'],self.groupdict['endz'])
return outstr
class gaf_string:
def __init__(self,string):
self.s_gaf=string
self.objects=[]
self.attrdict={}
self.parse_string()
self.nets=[]
def getattrdict(self):
return self.attrdict
def parse_string(self,string=None):
if (string==None):
string=self.s_gaf;
m=re.match(type_re%type_para,string);
gaf_object=None;
if (m):
type_str=m.group('type');
next_str=m.group('next');
if type_str=='v':
gaf_object=version(next_str);
elif type_str=='M':
gaf_object=pathdata(next_str);
elif type_str=='L':
gaf_object=line(next_str);
elif type_str=='G':
gaf_object=picture(next_str);
elif type_str=='B':
gaf_object=box(next_str);
elif type_str=='V':
gaf_object=circle(next_str);
elif type_str=='A':
gaf_object=arc(next_str);
elif type_str=='T':
gaf_object=text(next_str);
elif type_str=='N':
gaf_object=net(next_str);
elif type_str=='U':
gaf_object=bus(next_str);
elif type_str=='P':
gaf_object=pin(next_str);
elif type_str=='C':
gaf_object=component(next_str);
elif type_str=='H':
gaf_object=path(next_str);
elif type_str=='F':
gaf_object=font(next_str);
elif type_str=='{':
gaf_object=attributes(next_str);
else:
print "don't know ----------",next_str;
if (gaf_object):
if (gaf_object.type_str=='{'):#__class__.__name__=='attributes'):
self.objects[-1].setattrdict(gaf_object.getattrdict());
elif (gaf_object.type_str=='T' and gaf_object.isattr()):
attrobj=[gaf_object]
attrname=gaf_object.getattrname()
attrvalue=gaf_object.getattrvalue()
newattr=attrobj
if self.attrdict.has_key(attrname):
print 'has_key',attrname,self.attrdict[attrname],newattr
self.attrdict[attrname].extend(newattr)
else:
self.attrdict[attrname]=newattr
print 'new',attrname,self.attrdict[attrname],newattr
else:
self.objects.append(gaf_object);
next_str=gaf_object.next_str();
self.parse_string(next_str);
else:
next_str=None
return self.objects
class geda_schematic(gaf_string):
def __init__(self,filename):
string=open(filename).read()
gaf_string.__init__(self,string)
self.components=[]
self.nets=[];
self.buses=[];
self.node=[];
for obj in self.objects:
if (obj.type_str=='C'):
self.components.append(obj)
elif (obj.type_str=='N'):
self.nets.append(obj)
elif (obj.type_str=='U'):
self.buses.append(obj)
else:
pass
# for obj in self.components:
# self.node.update(obj.get_pinxy())
# for obj in self.nets:
# self.node.
def merge_nets(self):
processed=[];
#self.netlists=[{net:None} for net in self.nets]
for net1 in self.netlists:
for net2 in self.netlists:
if (net1 != net2):
print net1.get_netxy()
print net2.get_netxy()
if net1.connected_net(net2):
net.append(newnet)
processed.append(newnet)
unprocessed=[net for net in self.nets]
for newcomponent in self.components:
print newcomponent.get_pinxy()
#netlist.update(component=component)
#xy= net.get_netxy():
#for netlist in self.netlists:
# if xy[0] in netlist or xy[1] in netlist:
#self.netlists=[{((x0,y0),(x1,y1),(x2,y2)):name},....]
# print xy
# self.netlist.append(xy)
def available_part_list(self):
pass
def apply_parts(self):
pass
def RF_sim(self):
pass
def netlist(self):
pass
def generate_symbol_from_sch(self):
pass
class geda_symbol(gaf_string):
def __init__(self,filename):
self.pins=[]
string=open(filename).read()
gaf_string.__init__(self,string)
self.gennetpin()
self.update_pins()
self.genpindict()
self.genpinxy()
def gennetpin(self):
if self.attrdict.has_key('net'):
print self.attrdict['net']
for np in self.attrdict['net']:
print np
nptxt=np.attr_value
print 'nptxt is ',nptxt
m=re.match(np_re%np_para,nptxt)
if (m):
npdict=m.groupdict();
print npdict['pinstxt'].split(',')
for pinseq in npdict['pinstxt'].split(','):
newpin=netpin(npdict['netname'],pinseq)
self.objects.append(newpin)
# newpin=netpin(np.groupdict['attr_value'])#,netpin=True)
# self.objects.append(newpin)
# print np.groupdict['attr_value']#['attrvalue']
# if net not in self.nets:
# self.nets.append(net)
# else:
# pass # should add pin to net?
def derive_vendor_part_symbol(self):
pass
def add_attr(self,attrdict):
pass
def pin_map(self,oldnewdict):
for pin in oldnewdict.keys():
if self.pindict.has_key(pin):
self.pindict[pin].setseq(oldnewdict[pin])
pass
def RF_sim(self):
pass
def generate_from_verilog(self,verilogfile):
pass
def update_pins(self):
print 'update_pins',self.pins
for obj in self.objects:
if (obj.type_str=='P' or obj.type_str=='np'):
self.pins.append(obj)
if self.attrdict.has_key('net'):
print [net.__class__.__name__ for net in self.attrdict['net']]
def genpindict(self):
self.pindict={}
for obj in self.objects:
if (obj.type_str=='P' or obj.type_str=='np'):
print obj.type_str
print obj.getpinseq()
self.pindict.update({obj.getpinseq():obj})
def getpindict(self):
return self.pindict
def genpinxy(self):
self.pinxy={}
for p in self.pindict.keys():
self.pinxy.update({p:self.pindict[p].getxy()})
def getpinxy(self):
return self.pinxy
if __name__=="__main__":
if (0):
sym=geda_symbol(sys.argv[1])
print sym.getpindict()
# for o in sym.objects:
# print o.output_str()
#sym.pin_map({'1':'2','2':'1'})
for o in sym.objects:
print o.output_str()
print sym.getpindict()
print sym.getpinxy()
filename=sys.argv[1]
ext=os.path.basename(filename).split('.')[-1]
if ext=='sym':
sym=geda_symbol(filename)
print '''usage:
-x symbol filename apply vendor specification in json file'''
print 'obj class name',[obj.__class__.__name__ for obj in sym.objects]
print 'key,length for attrdict',[(key,len(sym.attrdict[key])) for key in sym.attrdict.keys()]
print '# of pins: ',len(sym.pins)
print filename,len(sym.pins),'pins'
for pin in sym.pins:
print 'pin',pin.attrdict,pin.getxy()
elif ext=='sch':
sch=geda_schematic(sys.argv[1])
print sch.components
print 'usage'
# sch.merge_nets()
# print [o.groupdict['basename'] if o.__class__.__name__=='component' else o.__class__.__name__ for o in sgaf.objects]
# for o in sgaf.objects:
# print o.output_str()
# http://wiki.geda-project.org/geda:file_format_spec#path_data
|
# you can write to stdout for debugging purposes, e.g.
# print("this is a debug message")
def solution(N, M):
# write your code in Python 3.6
if N == 1:
return 1
eat_set = {0}
counter = 1
while True:
eat_number = (M * counter) % N
if eat_number in eat_set:
return counter
eat_set.add(eat_number)
counter += 1 |
import io
import os
import sys
import time
from portalocker import LOCK_EX, lock, unlock
from contextlib import contextmanager
from secrets import randbits
from logging.handlers import BaseRotatingHandler, TimedRotatingFileHandler
from logging import FileHandler
_MIDNIGHT = 24 * 60 * 60 # number of seconds in a day
PY2 = False
if sys.version_info[0] == 2:
PY2 = True
# 根据开源包 ConcurrentRotatingFileHandler 抽象出的文件锁类
class ConcurrentLock(FileHandler):
def __init__(self, filename, mode='a', encoding=None, delay=False, umask=None):
"""
Use the specified filename for streamed logging
"""
self.mode = mode
self.encoding = encoding
FileHandler.__init__(self, filename, mode, encoding, delay)
self.terminator = "\n"
self.lockFilename = self.getLockFilename()
self.is_locked = False
self.stream_lock = None
self.umask = umask
self.unicode_error_policy = 'ignore'
def getLockFilename(self):
"""
Decide the lock filename. If the logfile is file.log, then we use `.__file.lock` and
not `file.log.lock`. This only removes the extension if it's `*.log`.
:return: the path to the lock file.
"""
if self.baseFilename.endswith(".log"):
lock_file = self.baseFilename[:-4]
else:
lock_file = self.baseFilename
lock_file += ".lock"
lock_path, lock_name = os.path.split(lock_file)
# hide the file on Unix and generally from file completion
lock_name = ".__" + lock_name
return os.path.join(lock_path, lock_name)
def _open_lockfile(self):
if self.stream_lock and not self.stream_lock.closed:
return
lock_file = self.lockFilename
with self._alter_umask():
self.stream_lock = open(lock_file, "wb", buffering=0)
def _open(self, mode=None):
# Normally we don't hold the stream open. Only do_open does that
# which is called from do_write().
return None
def do_open(self, mode=None):
"""
Open the current base file with the (original) mode and encoding.
Return the resulting stream.
Note: Copied from stdlib. Added option to override 'mode'
使用(原始)模式和编码打开当前基本文件。
返回结果流。
注:从stdlib复制。添加了覆盖“模式”的选项
"""
if mode is None:
mode = self.mode
with self._alter_umask():
stream = io.open(self.baseFilename, mode=mode)
return stream
@contextmanager
def _alter_umask(self):
"""Temporarily alter umask to custom setting, if applicable 临时将umask更改为自定义设置(如果适用)"""
if self.umask is None:
yield # nothing to do
else:
prev_umask = os.umask(self.umask)
try:
yield
finally:
os.umask(prev_umask)
def _close(self):
""" Close file stream. Unlike close(), we don't tear anything down, we
expect the log to be re-opened after rotation."""
if self.stream:
try:
if not self.stream.closed:
# Flushing probably isn't technically necessary, but it feels right
self.stream.flush()
self.stream.close()
finally:
self.stream = None
def flush(self):
"""Does nothing; stream is flushed on each write."""
return
def do_write(self, msg):
"""Handling writing an individual record; we do a fresh open every time.
This assumes emit() has already locked the file."""
self.stream = self.do_open()
stream = self.stream
if PY2:
self.do_write_py2(msg)
else:
msg = msg + self.terminator
try:
stream.write(msg)
except UnicodeError:
# Try to emit in a form acceptable to the output encoding
# The unicode_error_policy determines whether this is lossy.
try:
encoding = getattr(stream, 'encoding', self.encoding or 'us-ascii')
msg_bin = msg.encode(encoding, self.unicode_error_policy)
msg = msg_bin.decode(encoding, self.unicode_error_policy)
stream.write(msg)
except UnicodeError:
raise
stream.flush()
self._close()
return
# noinspection PyCompatibility,PyUnresolvedReferences
def do_write_py2(self, msg):
stream = self.stream
term = self.terminator
policy = self.unicode_error_policy
encoding = getattr(stream, 'encoding', None)
# as far as I can tell, this should always be set from io.open, but just in case...
if not encoding:
encoding = self.encoding or 'utf-8'
if not isinstance(msg, unicode):
msg = unicode(msg, encoding, policy)
# Add in the terminator.
if not isinstance(term, unicode):
term = unicode(term, encoding, policy)
msg = msg + term
stream.write(msg)
def _do_lock(self):
if self.is_locked:
raise # already locked... recursive?
self._open_lockfile()
if self.stream_lock:
for i in range(10):
# noinspection PyBroadException
try:
lock(self.stream_lock, LOCK_EX)
self.is_locked = True
break
except Exception:
continue
else:
raise RuntimeError("Cannot acquire lock after 10 attempts")
def _do_unlock(self):
if self.stream_lock:
if self.is_locked:
unlock(self.stream_lock)
self.is_locked = False
self.stream_lock.close()
self.stream_lock = None
# 继承TimedRotatingFileHandler类,然后修改了doRollover方法,和emit方法
class MyTimedRotatingFileHandler(TimedRotatingFileHandler, ConcurrentLock):
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False, atTime=None):
TimedRotatingFileHandler.__init__(self, filename, when, interval, backupCount, encoding, delay, utc, atTime)
ConcurrentLock.__init__(self, filename, 'a', encoding, delay)
self.nameFormat = "{basePath}/{filename}.{suffix}"
def shouldRollover(self, record):
del record
return self._shouldRollover()
def _shouldRollover(self):
self.stream = self.do_open()
t = int(time.time())
if t >= self.rolloverAt:
return 1
self._close()
return 0
def doRollover(self):
self._close()
currentTime = int(time.time())
dstNow = time.localtime(currentTime)[-1]
t = self.rolloverAt - self.interval
if self.utc:
timeTuple = time.gmtime(t)
else:
timeTuple = time.localtime(t)
dstThen = timeTuple[-1]
if dstNow != dstThen:
if dstNow:
addend = 3600
else:
addend = -3600
timeTuple = time.localtime(t + addend)
# dfn = "%s.%s" % (self.baseFilename, time.strftime(self.suffix, timeTuple))
basePath = os.path.dirname(self.baseFilename)
filename = os.path.basename(self.baseFilename)
dfn = self.nameFormat.format(basePath=basePath, filename=filename, suffix=time.strftime(self.suffix, timeTuple))
# if os.path.exists(dfn):
# os.remove(dfn)
if not os.path.exists(dfn) and os.path.exists(self.baseFilename):
os.rename(self.baseFilename, dfn)
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
if not self.delay:
self.stream = self.do_open()
newRolloverAt = self.computeRollover(currentTime)
while newRolloverAt <= currentTime:
newRolloverAt = newRolloverAt + self.interval
if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc:
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow:
addend = -3600
else:
addend = 3600
newRolloverAt += addend
self.rolloverAt = newRolloverAt
def emit(self, record):
"""
发出一个记录。从父类重写以在滚动和写入期间处理文件锁定。这也会在获取*锁之前格式化*以防格式本身记录内部的调用。锁定时也会发生翻转。
"""
# noinspection PyBroadException
try:
msg = self.format(record)
try:
self._do_lock()
try:
if self.shouldRollover(record):
self.doRollover()
except Exception as e:
pass
self.do_write(msg)
finally:
self._do_unlock()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.handleError(record)
# 重写了RotatingFileHandler方法
class MyRotatingFileHandler(BaseRotatingHandler, ConcurrentLock):
"""
Handler for logging to a set of files, which switches from one file to the
next when the current file reaches a certain size. Multiple processes can
write to the log file concurrently, but this may mean that the file will
exceed the given size.
"""
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=None):
self.maxBytes = maxBytes
self.backupCount = backupCount
# Construct the handler with the given arguments in "delayed" mode
# because we will handle opening the file as needed. File name
# handling is done by FileHandler since Python 2.5.
BaseRotatingHandler.__init__(self, filename, mode, encoding=encoding, delay=True)
ConcurrentLock.__init__(self, filename, mode, encoding=encoding, delay=True)
def doRollover(self):
"""
Do a rollover, as described in __init__().
"""
self._close()
if self.backupCount <= 0:
self.stream = self.do_open("w")
self._close()
return
tmpname = None
while not tmpname or os.path.exists(tmpname):
tmpname = "%s.rotate.%08d" % (self.baseFilename, randbits(64))
try:
# Do a rename test to determine if we can successfully rename the log file #执行重命名测试以确定是否可以成功重命名日志文件
os.rename(self.baseFilename, tmpname)
except (IOError, OSError):
return
def do_rename(source_fn, dest_fn):
if os.path.exists(dest_fn):
os.remove(dest_fn)
if os.path.exists(source_fn):
os.rename(source_fn, dest_fn)
for i in range(self.backupCount - 1, 0, -1):
sfn = "%s.%d" % (self.baseFilename, i)
dfn = "%s.%d" % (self.baseFilename, i + 1)
if os.path.exists(sfn):
do_rename(sfn, dfn)
dfn = self.baseFilename + ".1"
do_rename(tmpname, dfn)
def shouldRollover(self, record):
"""
Determine if rollover should occur.
For those that are keeping track. This differs from the standard
library's RotatingLogHandler class. Because there is no promise to keep
the file size under maxBytes we ignore the length of the current record.
确定是否应发生翻车。为了那些跟踪的人。这与标准库的RotatingLogHandler类不同。
因为没有保证将文件大小保持在maxBytes以下,所以我们忽略了当前记录的长度。
"""
del record # avoid pychecker warnings
return self._shouldRollover()
def _shouldRollover(self):
if self.maxBytes > 0: # are we rolling over?
self.stream = self.do_open()
try:
self.stream.seek(0, 2) # due to non-posix-compliant Windows feature
if self.stream.tell() >= self.maxBytes:
return True
finally:
self._close()
return False
def emit(self, record):
"""
发出一个记录。从父类重写以在滚动和写入期间处理文件锁定。这也会在获取*锁之前格式化*以防格式本身记录内部的调用。锁定时也会发生翻转。
"""
# noinspection PyBroadException
try:
msg = self.format(record)
try:
self._do_lock()
try:
if self.shouldRollover(record):
self.doRollover()
except Exception as e:
pass
self.do_write(msg)
finally:
self._do_unlock()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.handleError(record) |
import os, sys, pygame, random, array, gamemode, FlappyBird
import MAIN,spacein
import direction, bounds, timeout, menu
from pygame.locals import *
# Import game modules.
from loader import load_image
import player, maps, traffic, camera, tracks
mainClock = pygame.time.Clock()
from pygame.locals import *
bg = pygame.image.load('images\gm.jpg')
pygame.init()
pygame.display.set_caption('game base')
screen = pygame.display.set_mode((500, 400), 0, 32)
font = pygame.font.SysFont('Helvetica', 20)
def draw_text(text, font, color, surface, x, y):
textobj = font.render(text, 1, color)
textrect = textobj.get_rect()
textrect.topleft = (x, y)
surface.blit(textobj, textrect)
def addText(piu, x, y):
screen.blit(font.render(piu, True, (0, 0, 0)), (x, y))
pygame.display.update()
click = False
def main_menu():
global click
while True:
screen.fill((0, 0, 0))
screen.blit(bg,(0, 0))
draw_text('PLAY !!', font, (255, 0, 0), screen, 220, 180)
mx, my = pygame.mouse.get_pos()
button_1 = pygame.Rect(70, 80, 150, 50)
button_2 = pygame.Rect(70, 250, 150, 50)
button_3 = pygame.Rect(290, 80, 150, 50)
button_4 = pygame.Rect(290, 250, 150, 50)
if button_1.collidepoint((mx, my))== True:
if click:
FlappyBird.main()
if button_2.collidepoint((mx, my)):
if click:
import angry
if button_3.collidepoint((mx, my)):
if click:
MAIN.main()
if button_4.collidepoint((mx, my)):
if click:
spacein.main()
pygame.draw.rect(screen, (255, 203, 0), button_1)
addText('Flappy Bird', 80, 90)
pygame.draw.rect(screen, (255, 203, 0), button_2)
addText('Angry Bird', 80, 260)
pygame.draw.rect(screen, (255, 203, 0), button_3)
addText('Apple Race', 300, 90)
pygame.draw.rect(screen, (255, 203, 0), button_4)
addText('Space Invader', 300, 260)
click = False
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
mainClock.tick(60)
main_menu()
|
from datetime import date
atual = date.today().year
totmaior = 0
totmenor = 0
for p in range(1, 8):
nasc = int(input('Em que ano a {}° pessoa nasceu? '.format(p)))
idade = atual - nasc
print('Sua idade é {} anos'.format(idade))
if idade < 21:
totmenor += 1
else:
totmaior += 1
print('\033[33m{} pessoas são menores de idade.\033[m'.format(totmenor))
print('\033[32m{} pessoas são maiores de idade.\033[m'.format(totmaior)) |
from enum import IntEnum
from sdl2._sdl2 import lib
class KeyCode(IntEnum):
n0 = lib.SDLK_0
n1 = lib.SDLK_1
n2 = lib.SDLK_2
n3 = lib.SDLK_3
n4 = lib.SDLK_4
n5 = lib.SDLK_5
n6 = lib.SDLK_6
n7 = lib.SDLK_7
n8 = lib.SDLK_8
n9 = lib.SDLK_9
ac_back = lib.SDLK_AC_BACK
ac_bookmarks = lib.SDLK_AC_BOOKMARKS
ac_forward = lib.SDLK_AC_FORWARD
ac_home = lib.SDLK_AC_HOME
ac_refresh = lib.SDLK_AC_REFRESH
ac_search = lib.SDLK_AC_SEARCH
ac_stop = lib.SDLK_AC_STOP
again = lib.SDLK_AGAIN
alterase = lib.SDLK_ALTERASE
ampersand = lib.SDLK_AMPERSAND
application = lib.SDLK_APPLICATION
asterisk = lib.SDLK_ASTERISK
at = lib.SDLK_AT
audiomute = lib.SDLK_AUDIOMUTE
audionext = lib.SDLK_AUDIONEXT
audioplay = lib.SDLK_AUDIOPLAY
audioprev = lib.SDLK_AUDIOPREV
audiostop = lib.SDLK_AUDIOSTOP
backquote = lib.SDLK_BACKQUOTE
backslash = lib.SDLK_BACKSLASH
backspace = lib.SDLK_BACKSPACE
brightnessdown = lib.SDLK_BRIGHTNESSDOWN
brightnessup = lib.SDLK_BRIGHTNESSUP
calculator = lib.SDLK_CALCULATOR
cancel = lib.SDLK_CANCEL
capslock = lib.SDLK_CAPSLOCK
caret = lib.SDLK_CARET
clear = lib.SDLK_CLEAR
clearagain = lib.SDLK_CLEARAGAIN
colon = lib.SDLK_COLON
comma = lib.SDLK_COMMA
computer = lib.SDLK_COMPUTER
copy = lib.SDLK_COPY
crsel = lib.SDLK_CRSEL
currencysubunit = lib.SDLK_CURRENCYSUBUNIT
currencyunit = lib.SDLK_CURRENCYUNIT
cut = lib.SDLK_CUT
decimalseparator = lib.SDLK_DECIMALSEPARATOR
delete = lib.SDLK_DELETE
displayswitch = lib.SDLK_DISPLAYSWITCH
dollar = lib.SDLK_DOLLAR
down = lib.SDLK_DOWN
eject = lib.SDLK_EJECT
end = lib.SDLK_END
equals = lib.SDLK_EQUALS
escape = lib.SDLK_ESCAPE
exclaim = lib.SDLK_EXCLAIM
execute = lib.SDLK_EXECUTE
exsel = lib.SDLK_EXSEL
f1 = lib.SDLK_F1
f10 = lib.SDLK_F10
f11 = lib.SDLK_F11
f12 = lib.SDLK_F12
f13 = lib.SDLK_F13
f14 = lib.SDLK_F14
f15 = lib.SDLK_F15
f16 = lib.SDLK_F16
f17 = lib.SDLK_F17
f18 = lib.SDLK_F18
f19 = lib.SDLK_F19
f2 = lib.SDLK_F2
f20 = lib.SDLK_F20
f21 = lib.SDLK_F21
f22 = lib.SDLK_F22
f23 = lib.SDLK_F23
f24 = lib.SDLK_F24
f3 = lib.SDLK_F3
f4 = lib.SDLK_F4
f5 = lib.SDLK_F5
f6 = lib.SDLK_F6
f7 = lib.SDLK_F7
f8 = lib.SDLK_F8
f9 = lib.SDLK_F9
find = lib.SDLK_FIND
greater = lib.SDLK_GREATER
hash = lib.SDLK_HASH
help = lib.SDLK_HELP
home = lib.SDLK_HOME
insert = lib.SDLK_INSERT
kbdillumdown = lib.SDLK_KBDILLUMDOWN
kbdillumtoggle = lib.SDLK_KBDILLUMTOGGLE
kbdillumup = lib.SDLK_KBDILLUMUP
kp_0 = lib.SDLK_KP_0
kp_00 = lib.SDLK_KP_00
kp_000 = lib.SDLK_KP_000
kp_1 = lib.SDLK_KP_1
kp_2 = lib.SDLK_KP_2
kp_3 = lib.SDLK_KP_3
kp_4 = lib.SDLK_KP_4
kp_5 = lib.SDLK_KP_5
kp_6 = lib.SDLK_KP_6
kp_7 = lib.SDLK_KP_7
kp_8 = lib.SDLK_KP_8
kp_9 = lib.SDLK_KP_9
kp_a = lib.SDLK_KP_A
kp_ampersand = lib.SDLK_KP_AMPERSAND
kp_at = lib.SDLK_KP_AT
kp_b = lib.SDLK_KP_B
kp_backspace = lib.SDLK_KP_BACKSPACE
kp_binary = lib.SDLK_KP_BINARY
kp_c = lib.SDLK_KP_C
kp_clear = lib.SDLK_KP_CLEAR
kp_clearentry = lib.SDLK_KP_CLEARENTRY
kp_colon = lib.SDLK_KP_COLON
kp_comma = lib.SDLK_KP_COMMA
kp_d = lib.SDLK_KP_D
kp_dblampersand = lib.SDLK_KP_DBLAMPERSAND
kp_dblverticalbar = lib.SDLK_KP_DBLVERTICALBAR
kp_decimal = lib.SDLK_KP_DECIMAL
kp_divide = lib.SDLK_KP_DIVIDE
kp_e = lib.SDLK_KP_E
kp_enter = lib.SDLK_KP_ENTER
kp_equals = lib.SDLK_KP_EQUALS
kp_equalsas400 = lib.SDLK_KP_EQUALSAS400
kp_exclam = lib.SDLK_KP_EXCLAM
kp_f = lib.SDLK_KP_F
kp_greater = lib.SDLK_KP_GREATER
kp_hash = lib.SDLK_KP_HASH
kp_hexadecimal = lib.SDLK_KP_HEXADECIMAL
kp_leftbrace = lib.SDLK_KP_LEFTBRACE
kp_leftparen = lib.SDLK_KP_LEFTPAREN
kp_less = lib.SDLK_KP_LESS
kp_memadd = lib.SDLK_KP_MEMADD
kp_memclear = lib.SDLK_KP_MEMCLEAR
kp_memdivide = lib.SDLK_KP_MEMDIVIDE
kp_memmultiply = lib.SDLK_KP_MEMMULTIPLY
kp_memrecall = lib.SDLK_KP_MEMRECALL
kp_memstore = lib.SDLK_KP_MEMSTORE
kp_memsubtract = lib.SDLK_KP_MEMSUBTRACT
kp_minus = lib.SDLK_KP_MINUS
kp_multiply = lib.SDLK_KP_MULTIPLY
kp_octal = lib.SDLK_KP_OCTAL
kp_percent = lib.SDLK_KP_PERCENT
kp_period = lib.SDLK_KP_PERIOD
kp_plus = lib.SDLK_KP_PLUS
kp_plusminus = lib.SDLK_KP_PLUSMINUS
kp_power = lib.SDLK_KP_POWER
kp_rightbrace = lib.SDLK_KP_RIGHTBRACE
kp_rightparen = lib.SDLK_KP_RIGHTPAREN
kp_space = lib.SDLK_KP_SPACE
kp_tab = lib.SDLK_KP_TAB
kp_verticalbar = lib.SDLK_KP_VERTICALBAR
kp_xor = lib.SDLK_KP_XOR
lalt = lib.SDLK_LALT
lctrl = lib.SDLK_LCTRL
left = lib.SDLK_LEFT
leftbracket = lib.SDLK_LEFTBRACKET
leftparen = lib.SDLK_LEFTPAREN
less = lib.SDLK_LESS
lgui = lib.SDLK_LGUI
lshift = lib.SDLK_LSHIFT
mail = lib.SDLK_MAIL
mediaselect = lib.SDLK_MEDIASELECT
menu = lib.SDLK_MENU
minus = lib.SDLK_MINUS
mode = lib.SDLK_MODE
mute = lib.SDLK_MUTE
numlockclear = lib.SDLK_NUMLOCKCLEAR
oper = lib.SDLK_OPER
out = lib.SDLK_OUT
pagedown = lib.SDLK_PAGEDOWN
pageup = lib.SDLK_PAGEUP
paste = lib.SDLK_PASTE
pause = lib.SDLK_PAUSE
percent = lib.SDLK_PERCENT
period = lib.SDLK_PERIOD
plus = lib.SDLK_PLUS
power = lib.SDLK_POWER
printscreen = lib.SDLK_PRINTSCREEN
prior = lib.SDLK_PRIOR
question = lib.SDLK_QUESTION
quote = lib.SDLK_QUOTE
quotedbl = lib.SDLK_QUOTEDBL
ralt = lib.SDLK_RALT
rctrl = lib.SDLK_RCTRL
return_ = lib.SDLK_RETURN
return2 = lib.SDLK_RETURN2
rgui = lib.SDLK_RGUI
right = lib.SDLK_RIGHT
rightbracket = lib.SDLK_RIGHTBRACKET
rightparen = lib.SDLK_RIGHTPAREN
rshift = lib.SDLK_RSHIFT
scancode_mask = lib.SDLK_SCANCODE_MASK
scrolllock = lib.SDLK_SCROLLLOCK
select = lib.SDLK_SELECT
semicolon = lib.SDLK_SEMICOLON
separator = lib.SDLK_SEPARATOR
slash = lib.SDLK_SLASH
sleep = lib.SDLK_SLEEP
space = lib.SDLK_SPACE
stop = lib.SDLK_STOP
sysreq = lib.SDLK_SYSREQ
tab = lib.SDLK_TAB
thousandsseparator = lib.SDLK_THOUSANDSSEPARATOR
underscore = lib.SDLK_UNDERSCORE
undo = lib.SDLK_UNDO
unknown = lib.SDLK_UNKNOWN
up = lib.SDLK_UP
volumedown = lib.SDLK_VOLUMEDOWN
volumeup = lib.SDLK_VOLUMEUP
www = lib.SDLK_WWW
a = lib.SDLK_a
b = lib.SDLK_b
c = lib.SDLK_c
d = lib.SDLK_d
e = lib.SDLK_e
f = lib.SDLK_f
g = lib.SDLK_g
h = lib.SDLK_h
i = lib.SDLK_i
j = lib.SDLK_j
k = lib.SDLK_k
l = lib.SDLK_l
m = lib.SDLK_m
n = lib.SDLK_n
o = lib.SDLK_o
p = lib.SDLK_p
q = lib.SDLK_q
r = lib.SDLK_r
s = lib.SDLK_s
t = lib.SDLK_t
u = lib.SDLK_u
v = lib.SDLK_v
w = lib.SDLK_w
x = lib.SDLK_x
y = lib.SDLK_y
z = lib.SDLK_z
class KeyMod(IntEnum):
alt = lib.KMOD_ALT
caps = lib.KMOD_CAPS
ctrl = lib.KMOD_CTRL
gui = lib.KMOD_GUI
lalt = lib.KMOD_LALT
lctrl = lib.KMOD_LCTRL
lgui = lib.KMOD_LGUI
lshift = lib.KMOD_LSHIFT
mode = lib.KMOD_MODE
none = lib.KMOD_NONE
num = lib.KMOD_NUM
ralt = lib.KMOD_RALT
rctrl = lib.KMOD_RCTRL
reserved = lib.KMOD_RESERVED
rgui = lib.KMOD_RGUI
rshift = lib.KMOD_RSHIFT
shift = lib.KMOD_SHIFT
|
def main():
#problem1()
#problem2()
#problem3()
#problem4()
#challenge()
#PROBLEM1
#Create a variable named favoriteTeacher and assign it "Kevin".
# Send the favoriteTeacher variable to a function that will display the item when something is sent to it.
def problem1():
favoriteTeacher = "Kevin"
favoriteTeacherFunction(favoriteTeacher)
def favoriteTeacherFunction(teacher):
print(teacher)
################################################################################################################
#PROBLEM2
#Create a function that has a loop that quits with ‘q’.
# If the user doesn't enter 'q', ask them to input another string.
def problem2():
userInput = ""
while(userInput != "q"):
userInput = input("Put a string: ")
################################################################################################################
#PROBLEM3
#Create a sumOf3Numbers function that will print out the sum of the three numbers using the starting code below:
#sumOf3Numbers(number1, number2, number3)
def problem3():
print(sumOfNumbers(1,2,3))
def sumOfNumbers(number1,number2,number3):
return number1+number2+number3
################################################################################################################
#PROBLEM4
#Create a function that’s passed a name and the number of times a user wants to print Hello [NAME].
# Print Hello [NAME] that many times in the function.
def problem4():
greetingAndName("Didier",3)
def greetingAndName(name,number):
for eachEl in range(number):
print(f"Hello{eachEl}")
################################################################################################################
#CHALLENGE
#Create a function that’s passed an array and a string and returns the new updated array.
# Create another function that’s passed two integers and returns the difference.
# Create a third function that’s passed an integer array and prints it.
def challenge():
if __name__ == '__main__':
main() |
from aries_cloudagent.config.injection_context import InjectionContext
from aries_cloudagent.config.provider import ClassProvider
from aries_cloudagent.core.protocol_registry import ProtocolRegistry
from aries_cloudagent.core.plugin_registry import PluginRegistry
from .v1_0.message_types import MESSAGE_TYPES
from .definition import versions
from .patched_protocols.issue_credential.v1_0.message_types import MESSAGE_TYPES as ISSUE_CREDENTIAL_MESSAGE_TYPES
from .patched_protocols.present_proof.v1_0.message_types import MESSAGE_TYPES as PRESENT_PROOF_MESSAGE_TYPES
async def setup(context: InjectionContext):
# Register patched message types.
protocol_registry: ProtocolRegistry = await context.inject(ProtocolRegistry)
protocol_registry.register_message_types(MESSAGE_TYPES, version_definition=versions[0])
protocol_registry.register_message_types(ISSUE_CREDENTIAL_MESSAGE_TYPES, version_definition=versions[0])
protocol_registry.register_message_types(PRESENT_PROOF_MESSAGE_TYPES, version_definition=versions[0])
# Register patched protocol plugins
plugin_registry: PluginRegistry = await context.inject(PluginRegistry)
plugin_registry.register_plugin("mydata_did.patched_protocols.issue_credential.v1_0")
plugin_registry.register_plugin("mydata_did.patched_protocols.present_proof.v1_0")
# Unregister superseded protocols
plugin_registry._plugins.pop("aries_cloudagent.protocols.issue_credential")
plugin_registry._plugins.pop("aries_cloudagent.protocols.present_proof") |
#!/usr/bin/env python
# coding=utf-8
from BBScan.tmp import *
class Args(object):
# 现在默认一下
def __init__(self, host=None, f=None, network=None):
self.f = f
self.d = ""
self.crawler = ""
# host是一个列表
self.host = host
# 默认的为True
self.full_scan = True
self.timeout = 20
# 默认爬取C段
if network:
self.network = network
else:
self.network = 32
# 默认为8进程
self.p = 8
# 每个进程有5个线程
self.t = 5
# 默认不启用Markdown格式
self.md = False
self.no_check404 = False
self.no_crawl = False
self.browser = False
def save_report_thread(q_results, file, lock):
start_time = time.time()
a_template = template['html']
# if args.md:
# a_template = template['markdown']
# else:
# a_template = template['html']
t_general = Template(a_template['general'])
t_host = Template(a_template['host'])
t_list_item = Template(a_template['list_item'])
output_file_suffix = a_template['suffix']
all_results = []
#if not os.path.exists(os.path.basename(file).lower()):
report_name = os.path.basename(file).lower().replace('.txt', '') \
+ '_' + time.strftime('%Y%m%d_%H%M%S',
time.localtime()) + output_file_suffix
global STOP_ME
try:
while not STOP_ME:
if q_results.qsize() == 0:
time.sleep(0.1)
continue
html_doc = ""
while q_results.qsize() > 0:
all_results.append(q_results.get())
for item in all_results:
host, results = item
_str = ""
for key in results.keys():
for _ in results[key]:
_str += t_list_item.substitute(
{'status': _['status'], 'url': _[
'url'], 'title': _['title']}
)
_str = t_host.substitute({'host': host, 'list': _str})
html_doc += _str
cost_time = time.time() - start_time
cost_min = int(cost_time / 60)
cost_seconds = '%.2f' % (cost_time % 60)
html_doc = t_general.substitute(
{'cost_min': cost_min, 'cost_seconds': cost_seconds, 'content': html_doc}
)
if not os.path.exists("report/"):
os.mkdir("report")
with codecs.open('report/%s' % report_name, 'w', encoding='utf-8') as outFile:
outFile.write(html_doc)
if all_results:
print '[%s] Scan report saved to report/%s' % (get_time(), report_name)
#if args.browser:
# webbrowser.open_new_tab(
# os.path.abspath('report/%s' % report_name))
else:
lock.acquire()
print '[%s] No vulnerabilities found on sites in %s.' % (get_time(), file)
lock.release()
except Exception, e:
print '[save_report_thread Exception] %s %s' % (type(e), str(e))
sys.exit(-1)
def BBscanApi(args):
# 都放在这应该没有什么问题,主要是使用host这一个参数
# 但是args应该有这几个参数,默认为""
if args.f:
input_files = [args.f]
elif args.d:
input_files = glob.glob(args.d + '/*.txt')
elif args.crawler:
input_files = ['crawler']
elif args.host:
input_files = ['hosts'] # several hosts on command line
#print "f:{}\nd:{}\ncrawler:{}\nhost:{}\t".format(args.f, args.d, args.crawler, args.host)
ips_to_scan = [] # all IPs to be scanned during current scan
for file in input_files:
if args.host:
lines = [' '.join(args.host)]
print "lines=>{}".format(lines)
if args.f or args.d:
with open(file) as inFile:
lines.extend(inFile.readlines())
print "[-] len(lines):\t{}".format(len(lines))
try:
print '[%s] Batch web scan start.' % get_time()
q_results = multiprocessing.Manager().Queue()
q_targets = multiprocessing.Manager().Queue()
lock = multiprocessing.Manager().Lock()
global STOP_ME
STOP_ME = False
threading.Thread(target=save_report_thread,
args=(q_results, file, lock)).start()
print '[%s] Report thread created, prepare target Queue...' % get_time()
if args.crawler:
_input_files = glob.glob(args.crawler + '/*.log')
for _file in _input_files:
q_targets.put({'file': _file, 'url': ''})
if args.host or args.f or args.d:
q_hosts = Queue.Queue()
for line in lines:
if line.strip():
# Works with https://github.com/lijiejie/subDomainsBrute
# delimiter "," is acceptable
hosts = line.replace(',', ' ').strip().split()
#print "[-]hosts: \t{}".format(hosts)
for host in hosts:
q_hosts.put(host)
# 在这个版本里,可能不需要去DNS查询,因为需要查询的都扫描过了
all_threads = []
for _ in range(20):
t = threading.Thread(target=domain_lookup, args=(q_targets, q_hosts, lock, ips_to_scan))
all_threads.append(t)
t.start()
for t in all_threads:
t.join()
if args.network != 32:
for ip in ips_to_scan:
if ip.find('/') > 0:
continue
_network = u'%s/%s' % ('.'.join(ip.split('.')
[:3]), args.network)
if _network in ips_to_scan:
continue
ips_to_scan.append(_network)
_ips = ipaddress.IPv4Network(
u'%s/%s' % (ip, args.network), strict=False).hosts()
for _ip in _ips:
_ip = str(_ip)
if _ip not in ips_to_scan:
ips_to_scan.append(_ip)
q_targets.put({'file': '', 'url': _ip})
print '[%s] %s targets entered Queue.' % (get_time(), q_targets.qsize())
print '[%s] Create %s sub Processes...' % (get_time(), args.p)
scan_process = []
for _ in range(args.p):
p = multiprocessing.Process(
target=batch_scan, args=(q_targets, q_results, lock, args))
p.daemon = True
p.start()
scan_process.append(p)
print '[%s] %s sub process successfully created.' % (get_time(), args.p)
for p in scan_process:
p.join()
except KeyboardInterrupt, e:
print '[+] [%s] User aborted, running tasks crashed.' % get_time()
try:
while True:
q_targets.get_nowait()
except:
pass
except Exception, e:
print '[__main__.exception] %s %s' % (type(e), str(e))
traceback.print_exc()
STOP_ME = True
if __name__ == '__main__':
args = Args(f="/root/tools/inforgather/security-relate-script/SubDomainsResultDeal/toBBscan.txt", network=28)
print args.f
BBscanApi(args)
|
# Copyright 2018-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
"""
Common infrastructure for managing Python flavors and versions in third-party2.
"""
load("@fbcode_macros//build_defs/lib:target_utils.bzl", "target_utils")
load("@fbcode_macros//build_defs/lib:third_party.bzl", "third_party")
load("@fbcode_macros//build_defs:platform_utils.bzl", "platform_utils")
_TP2_PYTHON_PROJECT = third_party.get_tp2_project_target("python")
def _get_tp2_project_versions(project, platform):
"""
Return a list of configured versions for given `project` on `platform`.
Multiple versions of a TP2 project is only allowed for a small subset of
projects (see `WHITELISTED_VERSIONED_PROJECTS` in `buckify_tp2.py`).
"""
tp2_conf = third_party.get_third_party_config_for_platform(platform)
vers = tp2_conf["build"]["projects"][project]
if type(vers) == type(""):
return [vers]
res = []
for ver in vers:
# Each element is either a string, or a pair of the form
# (ORIGINAL_TP2_VERSION, ACTUAL_VERSION):
if type(ver) == type(""):
res.append(ver)
else:
res.append(ver[1])
return res
def _get_all_versions_for_platform(platform):
"""
Return a list of all configured Python versions for `platform`.
"""
return _get_tp2_project_versions("python", platform)
def _add_flavored_versions(versioned_resources):
"""
For each resource entry in `versioned_resources` that declares a Python
version, add a corresponding entry for every configured TP2 Python version
that subsumes the declared Python version.
Args:
versioned_resources: A list of versioned resource entries accepted by
Buck. Each entry in the list should be a pair of
the form
(
{
LABEL1 : VERSION1,
...
LABELn : VERSIONn
},
{
SRC_FILE1 : DST_FILE1,
...
SRC_FILEm : DST_FILEm
}
)
where LABELs are strings that identify dependency
targets, and VERSIONs are strings that specify the
required version of LABEL. Can be None.
Returns:
If `versioned_resources` is a list, then a copy of `versioned_resources`
extended with entries for Python flavors. Otherwise,
`versioned_resources` itself.
"""
if type(versioned_resources) != type([]):
return versioned_resources
platforms = platform_utils.get_platforms_for_host_architecture()
res = list(versioned_resources)
for p in platforms:
label = target_utils.target_to_label(_TP2_PYTHON_PROJECT, fbcode_platform = p)
for version_spec, resource_spec in versioned_resources:
if label in version_spec:
pyver = version_spec[label]
for cver in _get_all_versions_for_platform(p):
# Simple flavor subsumption rule -- version A subsumes
# version B if B is a proper suffix of A:
if cver != pyver and cver.endswith(pyver):
new_spec = dict(version_spec)
new_spec[label] = cver
res.append([new_spec, resource_spec])
return res
# TODO(T38084046): We would rather this to be a provider, but the structs returned from
# `provider()` are not hashable. This is likely just a bug in how buck
# implements `provider()` in skylark
#_PythonVersion = provider(fields = [
# "version_string",
# "flavor",
# "major",
# "minor",
# "patchlevel",
#])
def _PythonVersion(version_string, flavor, major, minor, patchlevel):
return struct(
version_string = version_string,
flavor = flavor,
major = major,
minor = minor,
patchlevel = patchlevel,
)
def _parse_python_version(version_string):
if not version_string:
fail("Empty version string provided")
version_pieces = version_string.split(".")
start_idx = 0
flavor = ""
if not version_pieces[0].isdigit():
flavor = version_pieces[0]
start_idx = 1
if len(version_pieces) == 1:
fail("Invalid version string {} provided".format(version_string))
major = int(version_pieces[start_idx])
minor = int(version_pieces[start_idx + 1]) if start_idx + 1 < len(version_pieces) else 0
patchlevel = int(version_pieces[start_idx + 2]) if start_idx + 2 < len(version_pieces) else 0
return _PythonVersion(
version_string = version_string,
flavor = flavor,
major = major,
minor = minor,
patchlevel = patchlevel,
)
# Versions selected based on most commonly specified version strings
_INTERNED_PYTHON_VERSIONS = {
"2": _parse_python_version("2"),
"2.6": _parse_python_version("2.6"),
"2.7": _parse_python_version("2.7"),
"3": _parse_python_version("3"),
"3.0": _parse_python_version("3.0"),
"3.2": _parse_python_version("3.2"),
"3.3": _parse_python_version("3.3"),
"3.4": _parse_python_version("3.4"),
"3.5": _parse_python_version("3.5"),
"3.6": _parse_python_version("3.6"),
"3.7": _parse_python_version("3.7"),
}
_DEFAULT_PYTHON_MAJOR_VERSION = "3"
def _python_version(version_string):
"""
An abstraction of tp2/python version strings that supports flavor prefixes.
See `get_python_platforms_config()` in `tools/build/buck/gen_modes.py` for
the format of flavored version strings.
Because these are immutable objects, they may also be cached instances
Args:
version_string: The aforementioned version string
Returns:
A struct with the 'version_string' (the raw string), 'flavor', 'major',
'minor', and 'patchlevel'. Minor and patchlevel are 0 if they were not
provided, though the 0 will not appear in the version string
"""
version_string = version_string or _DEFAULT_PYTHON_MAJOR_VERSION
interned = _INTERNED_PYTHON_VERSIONS.get(version_string)
if interned:
return interned
return _parse_python_version(version_string)
def _version_supports_flavor(python_version, flavor):
"""
Whether a `python_version` is compatible with a flavor
"""
return python_version.flavor.endswith(flavor)
_PythonVersionConstraint = provider(fields = ["op", "version"])
def _constraint_lt(left, right, _check_minor):
return (left.major, left.minor, left.patchlevel) < (right.major, right.minor, right.patchlevel)
def _constraint_lte(left, right, _check_minor):
return (left.major, left.minor, left.patchlevel) <= (right.major, right.minor, right.patchlevel)
def _constraint_gt(left, right, _check_minor):
return (left.major, left.minor, left.patchlevel) > (right.major, right.minor, right.patchlevel)
def _constraint_gte(left, right, _check_minor):
return (left.major, left.minor, left.patchlevel) >= (right.major, right.minor, right.patchlevel)
def _constraint_eq(left, right, check_minor):
return (
(left.major, left.minor, 0 if check_minor else left.patchlevel) ==
(right.major, right.minor, 0 if check_minor else right.patchlevel)
)
def _constraint_partial_match(left, right, check_minor):
return (left.major == right.major and (not check_minor or left.minor == right.minor))
def _parse_python_version_constraint(constraint_string):
if constraint_string.startswith("<="):
version_string = constraint_string[2:].lstrip()
op = _constraint_lte
elif constraint_string.startswith(">="):
version_string = constraint_string[2:].lstrip()
op = _constraint_gte
elif constraint_string.startswith("<"):
version_string = constraint_string[1:].lstrip()
op = _constraint_lt
elif constraint_string.startswith("="):
version_string = constraint_string[1:].lstrip()
op = _constraint_eq
elif constraint_string.startswith(">"):
version_string = constraint_string[1:].lstrip()
op = _constraint_gt
else:
version_string = constraint_string
op = _constraint_eq
version = _python_version(version_string)
return _PythonVersionConstraint(version = version, op = op)
def _intern_constraints():
""" Create a map of our most common constraints so that we can pull from the cache more often """
result = {
operator + version: _parse_python_version_constraint(operator + version)
for version in ["2", "2.7", "3", "3.6"]
for operator in ["", "<", "<=", "=", ">=", ">"]
}
result.update({
2: _PythonVersionConstraint(
version = _python_version("2"),
op = _constraint_partial_match,
),
3: _PythonVersionConstraint(
version = _python_version("3"),
op = _constraint_partial_match,
),
"2": _PythonVersionConstraint(
version = _python_version("2"),
op = _constraint_partial_match,
),
"3": _PythonVersionConstraint(
version = _python_version("3"),
op = _constraint_partial_match,
),
})
return result
_INTERNED_VERSION_CONSTRAINTS = _intern_constraints()
def _python_version_constraint(constraint_string):
"""
Parses and creates a struct that represents a 'version constraint'
This implements the semantics of the `py_version` and `versioned_srcs`
parameters of the 'python_xxx' rule types.
Note that this method may make use of internal caches of immutable objects
Args:
constraint_string: A string like '<3', '=2.7', or '3'
Returns:
A `PythonVersionConstraint` with a comparison `op` and a `version` set
"""
if not constraint_string:
constraint_string = _DEFAULT_PYTHON_MAJOR_VERSION
else:
# There are some versions that use integers, make sure we pick those up
constraint_string = str(constraint_string)
if constraint_string in _INTERNED_VERSION_CONSTRAINTS:
return _INTERNED_VERSION_CONSTRAINTS[constraint_string]
return _parse_python_version_constraint(constraint_string)
def _constraint_matches(constraint, version, check_minor = False):
"""
Whether or not a constraint matches a version
Args:
constraint: The result of a `python_version_constraint()` call
version: The result of a `python_version()` call
check_minor: If true, partial checks look at the minor in addition to the major
version. For raw constraints (e.g. '2.7'), only the first major
and minor versions will be checked. That is, '2.7.1' will match
the '2.7' constraint if check_minor is True
Returns:
Whether the version matches the constraint. Note that the matching effectively
checks against triples in most cases, and does not behave identically to
python distutils' LooseVersion
"""
if not _version_supports_flavor(version, constraint.version.flavor):
return False
return constraint.op(version, constraint.version, check_minor)
def _normalize_constraint(constraint):
"""
Normalizes `constraint` to be a `PythonVersionConstraint` object
Returns:
Either `constraint` if it is a `PythonVersionConstraint` struct, or parses
the string/int into a constraint
"""
if hasattr(constraint, "version") and hasattr(constraint, "op"):
return constraint
else:
return _python_version_constraint(constraint)
_ALL_PYTHON_VERSIONS = {
platform: [
_python_version(version_string)
for version_string in _get_all_versions_for_platform(platform)
]
for platform in platform_utils.get_all_platforms()
}
def _get_all_versions(fbcode_platform = None):
"""
Returns a list of `PythonVersion` instances corresponding to the active
Python versions for the given `platform`. If `platform` is not
specified, then return versions for all platforms.
"""
versions = {}
for p in platform_utils.get_platforms_for_host_architecture():
if fbcode_platform != None and fbcode_platform != p:
continue
for version in _ALL_PYTHON_VERSIONS[p]:
versions[version] = None
return versions.keys()
def _get_default_version(platform, constraint, flavor = ""):
"""
Returns a `PythonVersion` instance corresponding to the first Python
version that satisfies `constraint` and `flavor` for the given
`platform`.
"""
constraint = _normalize_constraint(constraint)
for version in _ALL_PYTHON_VERSIONS[platform]:
if _constraint_matches(constraint, version) and _version_supports_flavor(version, flavor):
return version
return None
def _constraint_matches_major(constraint, version):
"""
True if `constraint` can be satisfied by a Python version that is of major `version` on some active platform.
Args:
constraint: A constraint that should be satified (`PythonVersionConstraint` or str)
version: An integer major version that must be met in addition to the constraint
"""
constraint = python_versioning.normalize_constraint(constraint)
for platform_version in _get_all_versions():
if platform_version.major == version and _constraint_matches(constraint, platform_version):
return True
return False
def _platform_has_version(platform, version):
"""
Whether Python `version` is configured for `platform`.
Args:
platform: The fbcode platform to investigate
version: The `PythonVersion` to inspect
Returns:
Whether `version` is configured for `platform`
"""
for platform_version in _ALL_PYTHON_VERSIONS[platform]:
if version.version_string == platform_version.version_string:
return True
return False
python_versioning = struct(
add_flavored_versions = _add_flavored_versions,
constraint_matches_major = _constraint_matches_major,
get_all_versions = _get_all_versions,
get_default_version = _get_default_version,
python_version = _python_version,
version_supports_flavor = _version_supports_flavor,
platform_has_version = _platform_has_version,
python_version_constraint = _python_version_constraint,
constraint_matches = _constraint_matches,
normalize_constraint = _normalize_constraint,
)
|
import numpy as np
from config import *
def ensure_folder(folder):
if not os.path.exists(folder):
os.makedirs(folder)
class AverageMeter(object):
"""
Keeps track of most recent, average, sum, and count of a metric.
"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class ExpoAverageMeter(object):
# Exponential Weighted Average Meter
def __init__(self, beta=0.9):
self.reset()
def reset(self):
self.beta = 0.9
self.val = 0
self.avg = 0
def update(self, val):
self.avg = self.beta * self.avg + (1 - self.beta) * val
self.val = val
def KNN(mat, k):
mat = mat.float()
mat_square = torch.mm(mat, mat.t())
diag = torch.diagonal(mat_square)
# print(diag)
# print(diag.size())
val, index = diag.topk(k, largest=False, sorted=True)
return val, index
def batched_KNN(query, k, attributes):
batch_size = query.size()[0]
val_list = torch.zeros(batch_size, dtype=torch.float, device=device)
index_list = torch.zeros(batch_size, dtype=torch.int, device=device)
for i in range(batch_size):
q = query[i].to(device)
attributes = attributes.to(device)
diff = q - attributes
diff = torch.tensor(diff)
diff = diff.to(device)
val, index = KNN(diff, k)
val_list[i] = val
index_list[i] = index
return val_list, index_list
def accuracy(scores, targets):
batch_size = targets.size(0)
correct = scores.eq(targets)
# print('correct: ' + str(correct))
correct_total = correct.view(-1).float().sum() # 0D tensor
return correct_total.item() * (100.0 / batch_size)
def ensure_folder(folder):
if not os.path.exists(folder):
os.makedirs(folder)
def save_checkpoint(epoch, model, W, optimizer, val_acc, is_best, superclass):
ensure_folder(save_folder)
state = {'model': model,
'W': W,
'optimizer': optimizer}
if is_best:
filename = '{0}/checkpoint_{1}_{2}_{3:.3f}.tar'.format(save_folder, superclass, epoch, val_acc)
torch.save(state, filename)
# If this checkpoint is the best so far, store a copy so it doesn't get overwritten by a worse checkpoint
torch.save(state, '{}/BEST_{}_checkpoint.tar'.format(save_folder, superclass))
def adjust_learning_rate(optimizer, shrink_factor):
print("\nDECAYING learning rate.")
for param_group in optimizer.param_groups:
param_group['lr'] = param_group['lr'] * shrink_factor
print("The new learning rate is %f\n" % (optimizer.param_groups[0]['lr'],))
def get_label_list():
import pandas as pd
labels = pd.read_csv(
'data/ai_challenger_zsl2018_train_test_a_20180321/zsl_a_animals_train_20180321/zsl_a_animals_train_annotations_label_list_20180321.txt',
header=None)
labels.columns = ['label_name', 'cat_name_en', 'cat_name_zh']
labels['label_name'] = labels['label_name'].str.strip()
labels['cat_name_zh'] = labels['cat_name_zh'].str.strip()
label_list = []
for i in range(len(labels)):
label_list.append(labels['cat_name_zh'][i])
print('len(label_list): ' + str(len(label_list)))
return label_list
def get_annotations_by_superclass(superclass):
if superclass == 'Animals':
image_folder = zsl_a_animals_train_image_folder
annotations_labels = zsl_a_animals_train_annotations_labels
annotations_attributes_per_class = zsl_a_animals_train_annotations_attributes_per_class
annotations_attribute_list = zsl_a_animals_train_annotations_attribute_list
elif superclass == 'Fruits':
image_folder = zsl_a_fruits_train_image_folder
annotations_labels = zsl_a_fruits_train_annotations_labels
annotations_attributes_per_class = zsl_a_fruits_train_annotations_attributes_per_class
annotations_attribute_list = zsl_a_fruits_train_annotations_attribute_list
elif superclass == 'Vehicles':
image_folder = zsl_b_vehicles_train_image_folder
annotations_labels = zsl_b_vehicles_train_annotations_labels
annotations_attributes_per_class = zsl_b_vehicles_train_annotations_attributes_per_class
annotations_attribute_list = zsl_b_vehicles_train_annotations_attribute_list
elif superclass == 'Electronics':
image_folder = zsl_b_electronics_train_image_folder
annotations_labels = zsl_b_electronics_train_annotations_labels
annotations_attributes_per_class = zsl_b_electronics_train_annotations_attributes_per_class
annotations_attribute_list = zsl_b_electronics_train_annotations_attribute_list
else: # 'Hairstyles'
image_folder = zsl_b_hairstyles_train_image_folder
annotations_labels = zsl_b_hairstyles_train_annotations_labels
annotations_attributes_per_class = zsl_b_hairstyles_train_annotations_attributes_per_class
annotations_attribute_list = zsl_b_hairstyles_train_annotations_attribute_list
return image_folder, annotations_labels, annotations_attributes_per_class, annotations_attribute_list
def get_label_name2idx_by_superclass(superclass):
_, _, annotations_attributes_per_class, _ = get_annotations_by_superclass(superclass)
attributes = pd.read_csv(annotations_attributes_per_class, header=None)
attributes.columns = ['label_name', 'attributes']
attributes['attributes'] = attributes['attributes'].str.strip()
label_name2idx = dict()
for i in range(len(attributes)):
label_name2idx[attributes['label_name'][i]] = i
# print(label_name2idx)
return label_name2idx
def get_embedding_size_by_superclass(superclass):
_, _, _, annotations_attribute_list = get_annotations_by_superclass(superclass)
with open(annotations_attribute_list, 'r') as file:
lines = file.readlines()
lines = [line for line in lines if len(line.strip()) > 0]
return len(lines)
def get_attributes_per_class_by_superclass(superclass):
_, _, annotations_attributes_per_class, _ = get_annotations_by_superclass(superclass)
attributes = pd.read_csv(annotations_attributes_per_class, header=None)
attributes.columns = ['label_name', 'attributes']
attributes['attributes'] = attributes['attributes'].str.strip()
attributes_per_class = []
for i in range(len(attributes)):
attributes_per_class.append(parse_attributes(attributes['attributes'][i]))
attributes_per_class = torch.tensor(attributes_per_class)
attributes_per_class.to(device)
return attributes_per_class
def get_test_folder_by_superclass(superclass):
if superclass == 'Animals':
test_folder = zsl_a_animals_test_folder
elif superclass == 'Fruits':
test_folder = zsl_a_fruits_test_folder
elif superclass == 'Vehicles':
test_folder = zsl_b_vehicles_test_folder
elif superclass == 'Electronics':
test_folder = zsl_b_electronics_test_folder
else: # 'Hairstyles'
test_folder = zsl_b_hairstyles_test_folder
return test_folder
def get_attribute_names_by_superclass(superclass):
_, _, _, annotations_attribute_list = get_annotations_by_superclass(superclass)
attribute_list = pd.read_csv(annotations_attribute_list, header=None, usecols=[2])
attribute_list.columns = ['attribute_name']
attribute_list['attribute_name'] = attribute_list['attribute_name'].str.strip()
attribute_names = []
for i in range(len(attribute_list)):
attribute_name = attribute_list['attribute_name'][i]
attribute_name = attribute_name.split(': ')[1]
attribute_names.append(attribute_name)
attribute_names = np.array(attribute_names)
return attribute_names
|
import numpy as np
import math as m
# Write a function that takes as input a list of numbers, and returns
# the list of values given by the softmax function.
def softmax(L):
ret = []
i = 0
totalExp = 0.
while i < len(L):
totalExp+=m.e**L[i]
i+=1
i = 0
while i < len(L):
ret.append((m.e**L[i])/totalExp)
i+=1
return ret
def softmax2(L):
expL = np.exp(L)
sumExpL = sum(expL)
result = []
for i in expL:
result.append(i*1.0/sumExpL)
return result
# Note: The function np.divide can also be used here, as follows:
# def softmax(L):
# expL = np.exp(L)
# return np.divide (expL, expL.sum()) |
"""
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : __init__.py
# Abstract :
# Current Version: 1.0.0
# Date : 2021-05-31
##################################################################################################
"""
from .post_mango import PostMango
from .post_mask_rcnn_spot import PostMaskRCNNSpot
from .post_spotter_base import BasePostSpotter
__all__ = ['PostMango', 'PostMaskRCNNSpot', 'BasePostSpotter']
|
from django.contrib import admin
from scrape import views
from django.conf import settings
from django.conf.urls import patterns, include, url
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', views.LicenseeCertListView.as_view(), name='licenseecert-list'),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
# -*- encoding: utf-8 -*-
'''
@Time : 2018-3月26
@Author : EvilRecluse
@Contact : https://github.com/RecluseXU
@Desc : 圆检测
'''
# here put the import lib
import cv2 as cv
import numpy as np
def detect_circle_demo(image):
dst = cv.pyrMeanShiftFiltering(image, 10, 100) # 模糊
gray = cv.cvtColor(dst, cv.COLOR_BGR2GRAY) # 灰度
circles = cv.HoughCircles(
gray, cv.HOUGH_GRADIENT, 1, 20, param1=50, param2=30, minRadius=0, maxRadius=0)
# cv.HOUGH_GRADIENT,意思是用梯度的方法来做,比较的快
# 20:最小距离,当识别出来的的两个圆的圆心距离小于这个值,识别为一个圆,否则识别为两个圆
circles = np.uint16(np.around(circles))
for i in circles[0, :]:
cv.circle(image, (i[0], i[1]), i[2], (0, 0, 255), 2)
cv.circle(image, (i[0], i[1]), 2, (255, 0, 0), 2)
cv.imshow('detect_circle_demo', image)
src = cv.imread(
'example/0_Basic_usage_of_the_library/openCV/picture/goodmancard.jpg')
cv.imshow('src', src)
detect_circle_demo(src)
cv.waitKey(0)
cv.destroyAllWindows()
|
""" Cisco_IOS_XR_man_xml_ttyagent_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR man\-xml\-ttyagent package configuration.
This module contains definitions
for the following management objects\:
xr\-xml\: XML
netconf\: netconf
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class XrXml(_Entity_):
"""
XML
.. attribute:: agent
XML agent
**type**\: :py:class:`Agent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml, self).__init__()
self._top_entity = None
self.yang_name = "xr-xml"
self.yang_parent_name = "Cisco-IOS-XR-man-xml-ttyagent-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("agent", ("agent", XrXml.Agent))])
self._leafs = OrderedDict()
self.agent = XrXml.Agent()
self.agent.parent = self
self._children_name_map["agent"] = "agent"
self._segment_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml, [], name, value)
class Agent(_Entity_):
"""
XML agent
.. attribute:: default
XML default dedicated agent
**type**\: :py:class:`Default <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Default>`
.. attribute:: tty
XML TTY agent
**type**\: :py:class:`Tty <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Tty>`
.. attribute:: ssl
XML SSL agent
**type**\: :py:class:`Ssl <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Ssl>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent, self).__init__()
self.yang_name = "agent"
self.yang_parent_name = "xr-xml"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("default", ("default", XrXml.Agent.Default)), ("tty", ("tty", XrXml.Agent.Tty)), ("ssl", ("ssl", XrXml.Agent.Ssl))])
self._leafs = OrderedDict()
self.default = XrXml.Agent.Default()
self.default.parent = self
self._children_name_map["default"] = "default"
self.tty = XrXml.Agent.Tty()
self.tty.parent = self
self._children_name_map["tty"] = "tty"
self.ssl = XrXml.Agent.Ssl()
self.ssl.parent = self
self._children_name_map["ssl"] = "ssl"
self._segment_path = lambda: "agent"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent, [], name, value)
class Default(_Entity_):
"""
XML default dedicated agent
.. attribute:: ipv6_enable
IPv6 Transport State
**type**\: bool
.. attribute:: ipv4_disable
TRUE to disable IPV4
**type**\: bool
.. attribute:: session
Session attributes
**type**\: :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Default.Session>`
.. attribute:: iteration_size
Iterator size, in KBytes, of the XML response. Specify 0 to turn off the XML response iterator
**type**\: int
**range:** 0..100000
**units**\: kilobyte
**default value**\: 48
.. attribute:: throttle
XML agent throttling
**type**\: :py:class:`Throttle <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Default.Throttle>`
.. attribute:: enable
Enable specified XML agent
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: streaming_size
Streaming size, in KBytes, of the XML response
**type**\: int
**range:** 1..100000
**units**\: kilobyte
.. attribute:: vrfs
List of VRFs
**type**\: :py:class:`Vrfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Default.Vrfs>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Default, self).__init__()
self.yang_name = "default"
self.yang_parent_name = "agent"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session", ("session", XrXml.Agent.Default.Session)), ("throttle", ("throttle", XrXml.Agent.Default.Throttle)), ("vrfs", ("vrfs", XrXml.Agent.Default.Vrfs))])
self._leafs = OrderedDict([
('ipv6_enable', (YLeaf(YType.boolean, 'ipv6-enable'), ['bool'])),
('ipv4_disable', (YLeaf(YType.boolean, 'ipv4-disable'), ['bool'])),
('iteration_size', (YLeaf(YType.uint32, 'iteration-size'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
('streaming_size', (YLeaf(YType.uint32, 'streaming-size'), ['int'])),
])
self.ipv6_enable = None
self.ipv4_disable = None
self.iteration_size = None
self.enable = None
self.streaming_size = None
self.session = XrXml.Agent.Default.Session()
self.session.parent = self
self._children_name_map["session"] = "session"
self.throttle = XrXml.Agent.Default.Throttle()
self.throttle.parent = self
self._children_name_map["throttle"] = "throttle"
self.vrfs = XrXml.Agent.Default.Vrfs()
self.vrfs.parent = self
self._children_name_map["vrfs"] = "vrfs"
self._segment_path = lambda: "default"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Default, ['ipv6_enable', 'ipv4_disable', 'iteration_size', 'enable', 'streaming_size'], name, value)
class Session(_Entity_):
"""
Session attributes
.. attribute:: timeout
Timeout in minutes
**type**\: int
**range:** 1..1440
**units**\: minute
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Default.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "default"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('timeout', (YLeaf(YType.uint32, 'timeout'), ['int'])),
])
self.timeout = None
self._segment_path = lambda: "session"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/default/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Default.Session, ['timeout'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Default.Session']['meta_info']
class Throttle(_Entity_):
"""
XML agent throttling
.. attribute:: process_rate
Process rate in number of XML tags per second
**type**\: int
**range:** 1000..30000
.. attribute:: memory
Size of memory usage, in MBytes, per session
**type**\: int
**range:** 100..1024
**units**\: megabyte
**default value**\: 300
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Default.Throttle, self).__init__()
self.yang_name = "throttle"
self.yang_parent_name = "default"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('process_rate', (YLeaf(YType.uint32, 'process-rate'), ['int'])),
('memory', (YLeaf(YType.uint32, 'memory'), ['int'])),
])
self.process_rate = None
self.memory = None
self._segment_path = lambda: "throttle"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/default/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Default.Throttle, ['process_rate', 'memory'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Default.Throttle']['meta_info']
class Vrfs(_Entity_):
"""
List of VRFs
.. attribute:: vrf
A specific VRF
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Default.Vrfs.Vrf>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Default.Vrfs, self).__init__()
self.yang_name = "vrfs"
self.yang_parent_name = "default"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("vrf", ("vrf", XrXml.Agent.Default.Vrfs.Vrf))])
self._leafs = OrderedDict()
self.vrf = YList(self)
self._segment_path = lambda: "vrfs"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/default/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Default.Vrfs, [], name, value)
class Vrf(_Entity_):
"""
A specific VRF
.. attribute:: vrf_name (key)
VRF name
**type**\: str
**length:** 1..32
.. attribute:: ipv6_access_list
IPv6 Transport Access list for VRF
**type**\: str
**length:** 1..32
.. attribute:: ipv4_access_list
IPv4 Transport Access list for VRF
**type**\: str
**length:** 1..32
.. attribute:: access_list
Access list for XML agent
**type**\: str
**length:** 1..32
.. attribute:: shutdown
Shutdown default VRF. This is applicable only for VRF default
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Default.Vrfs.Vrf, self).__init__()
self.yang_name = "vrf"
self.yang_parent_name = "vrfs"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['vrf_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('ipv6_access_list', (YLeaf(YType.str, 'ipv6-access-list'), ['str'])),
('ipv4_access_list', (YLeaf(YType.str, 'ipv4-access-list'), ['str'])),
('access_list', (YLeaf(YType.str, 'access-list'), ['str'])),
('shutdown', (YLeaf(YType.empty, 'shutdown'), ['Empty'])),
])
self.vrf_name = None
self.ipv6_access_list = None
self.ipv4_access_list = None
self.access_list = None
self.shutdown = None
self._segment_path = lambda: "vrf" + "[vrf-name='" + str(self.vrf_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/default/vrfs/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Default.Vrfs.Vrf, ['vrf_name', 'ipv6_access_list', 'ipv4_access_list', 'access_list', 'shutdown'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Default.Vrfs.Vrf']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Default.Vrfs']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Default']['meta_info']
class Tty(_Entity_):
"""
XML TTY agent
.. attribute:: session
Session attributes
**type**\: :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Tty.Session>`
.. attribute:: iteration_size
Iterator size, in KBytes, of the XML response. Specify 0 to turn off the XML response iterator
**type**\: int
**range:** 0..100000
**units**\: kilobyte
**default value**\: 48
.. attribute:: throttle
XML agent throttling
**type**\: :py:class:`Throttle <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Tty.Throttle>`
.. attribute:: enable
Enable specified XML agent
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: streaming_size
Streaming size, in KBytes, of the XML response
**type**\: int
**range:** 1..100000
**units**\: kilobyte
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Tty, self).__init__()
self.yang_name = "tty"
self.yang_parent_name = "agent"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session", ("session", XrXml.Agent.Tty.Session)), ("throttle", ("throttle", XrXml.Agent.Tty.Throttle))])
self._leafs = OrderedDict([
('iteration_size', (YLeaf(YType.uint32, 'iteration-size'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
('streaming_size', (YLeaf(YType.uint32, 'streaming-size'), ['int'])),
])
self.iteration_size = None
self.enable = None
self.streaming_size = None
self.session = XrXml.Agent.Tty.Session()
self.session.parent = self
self._children_name_map["session"] = "session"
self.throttle = XrXml.Agent.Tty.Throttle()
self.throttle.parent = self
self._children_name_map["throttle"] = "throttle"
self._segment_path = lambda: "tty"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Tty, ['iteration_size', 'enable', 'streaming_size'], name, value)
class Session(_Entity_):
"""
Session attributes
.. attribute:: timeout
Timeout in minutes
**type**\: int
**range:** 1..1440
**units**\: minute
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Tty.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "tty"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('timeout', (YLeaf(YType.uint32, 'timeout'), ['int'])),
])
self.timeout = None
self._segment_path = lambda: "session"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/tty/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Tty.Session, ['timeout'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Tty.Session']['meta_info']
class Throttle(_Entity_):
"""
XML agent throttling
.. attribute:: process_rate
Process rate in number of XML tags per second
**type**\: int
**range:** 1000..30000
.. attribute:: memory
Size of memory usage, in MBytes, per session
**type**\: int
**range:** 100..1024
**units**\: megabyte
**default value**\: 300
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Tty.Throttle, self).__init__()
self.yang_name = "throttle"
self.yang_parent_name = "tty"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('process_rate', (YLeaf(YType.uint32, 'process-rate'), ['int'])),
('memory', (YLeaf(YType.uint32, 'memory'), ['int'])),
])
self.process_rate = None
self.memory = None
self._segment_path = lambda: "throttle"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/tty/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Tty.Throttle, ['process_rate', 'memory'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Tty.Throttle']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Tty']['meta_info']
class Ssl(_Entity_):
"""
XML SSL agent
.. attribute:: session
Session attributes
**type**\: :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Ssl.Session>`
.. attribute:: iteration_size
Iterator size, in KBytes, of the XML response. Specify 0 to turn off the XML response iterator
**type**\: int
**range:** 0..100000
**units**\: kilobyte
**default value**\: 48
.. attribute:: throttle
XML agent throttling
**type**\: :py:class:`Throttle <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Ssl.Throttle>`
.. attribute:: enable
Enable specified XML agent
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: streaming_size
Streaming size, in KBytes, of the XML response
**type**\: int
**range:** 1..100000
**units**\: kilobyte
.. attribute:: vrfs
List of VRFs
**type**\: :py:class:`Vrfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Ssl.Vrfs>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Ssl, self).__init__()
self.yang_name = "ssl"
self.yang_parent_name = "agent"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("session", ("session", XrXml.Agent.Ssl.Session)), ("throttle", ("throttle", XrXml.Agent.Ssl.Throttle)), ("vrfs", ("vrfs", XrXml.Agent.Ssl.Vrfs))])
self._leafs = OrderedDict([
('iteration_size', (YLeaf(YType.uint32, 'iteration-size'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
('streaming_size', (YLeaf(YType.uint32, 'streaming-size'), ['int'])),
])
self.iteration_size = None
self.enable = None
self.streaming_size = None
self.session = XrXml.Agent.Ssl.Session()
self.session.parent = self
self._children_name_map["session"] = "session"
self.throttle = XrXml.Agent.Ssl.Throttle()
self.throttle.parent = self
self._children_name_map["throttle"] = "throttle"
self.vrfs = XrXml.Agent.Ssl.Vrfs()
self.vrfs.parent = self
self._children_name_map["vrfs"] = "vrfs"
self._segment_path = lambda: "ssl"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Ssl, ['iteration_size', 'enable', 'streaming_size'], name, value)
class Session(_Entity_):
"""
Session attributes
.. attribute:: timeout
Timeout in minutes
**type**\: int
**range:** 1..1440
**units**\: minute
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Ssl.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "ssl"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('timeout', (YLeaf(YType.uint32, 'timeout'), ['int'])),
])
self.timeout = None
self._segment_path = lambda: "session"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/ssl/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Ssl.Session, ['timeout'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Ssl.Session']['meta_info']
class Throttle(_Entity_):
"""
XML agent throttling
.. attribute:: process_rate
Process rate in number of XML tags per second
**type**\: int
**range:** 1000..30000
.. attribute:: memory
Size of memory usage, in MBytes, per session
**type**\: int
**range:** 100..1024
**units**\: megabyte
**default value**\: 300
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Ssl.Throttle, self).__init__()
self.yang_name = "throttle"
self.yang_parent_name = "ssl"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('process_rate', (YLeaf(YType.uint32, 'process-rate'), ['int'])),
('memory', (YLeaf(YType.uint32, 'memory'), ['int'])),
])
self.process_rate = None
self.memory = None
self._segment_path = lambda: "throttle"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/ssl/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Ssl.Throttle, ['process_rate', 'memory'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Ssl.Throttle']['meta_info']
class Vrfs(_Entity_):
"""
List of VRFs
.. attribute:: vrf
A specific VRF
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.XrXml.Agent.Ssl.Vrfs.Vrf>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Ssl.Vrfs, self).__init__()
self.yang_name = "vrfs"
self.yang_parent_name = "ssl"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("vrf", ("vrf", XrXml.Agent.Ssl.Vrfs.Vrf))])
self._leafs = OrderedDict()
self.vrf = YList(self)
self._segment_path = lambda: "vrfs"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/ssl/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Ssl.Vrfs, [], name, value)
class Vrf(_Entity_):
"""
A specific VRF
.. attribute:: vrf_name (key)
VRF name
**type**\: str
**length:** 1..32
.. attribute:: ipv6_access_list
IPv6 Transport Access list for VRF
**type**\: str
**length:** 1..32
.. attribute:: ipv4_access_list
IPv4 Transport Access list for VRF
**type**\: str
**length:** 1..32
.. attribute:: access_list
Access list for XML agent
**type**\: str
**length:** 1..32
.. attribute:: shutdown
Shutdown default VRF. This is applicable only for VRF default
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(XrXml.Agent.Ssl.Vrfs.Vrf, self).__init__()
self.yang_name = "vrf"
self.yang_parent_name = "vrfs"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['vrf_name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('ipv6_access_list', (YLeaf(YType.str, 'ipv6-access-list'), ['str'])),
('ipv4_access_list', (YLeaf(YType.str, 'ipv4-access-list'), ['str'])),
('access_list', (YLeaf(YType.str, 'access-list'), ['str'])),
('shutdown', (YLeaf(YType.empty, 'shutdown'), ['Empty'])),
])
self.vrf_name = None
self.ipv6_access_list = None
self.ipv4_access_list = None
self.access_list = None
self.shutdown = None
self._segment_path = lambda: "vrf" + "[vrf-name='" + str(self.vrf_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:xr-xml/agent/ssl/vrfs/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(XrXml.Agent.Ssl.Vrfs.Vrf, ['vrf_name', 'ipv6_access_list', 'ipv4_access_list', 'access_list', 'shutdown'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Ssl.Vrfs.Vrf']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Ssl.Vrfs']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent.Ssl']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml.Agent']['meta_info']
def clone_ptr(self):
self._top_entity = XrXml()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['XrXml']['meta_info']
class Netconf(_Entity_):
"""
netconf
.. attribute:: agent
XML agent
**type**\: :py:class:`Agent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.Netconf.Agent>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf, self).__init__()
self._top_entity = None
self.yang_name = "netconf"
self.yang_parent_name = "Cisco-IOS-XR-man-xml-ttyagent-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("agent", ("agent", Netconf.Agent))])
self._leafs = OrderedDict()
self.agent = Netconf.Agent()
self.agent.parent = self
self._children_name_map["agent"] = "agent"
self._segment_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:netconf"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf, [], name, value)
class Agent(_Entity_):
"""
XML agent
.. attribute:: tty
NETCONF agent over TTY
**type**\: :py:class:`Tty <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.Netconf.Agent.Tty>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Agent, self).__init__()
self.yang_name = "agent"
self.yang_parent_name = "netconf"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("tty", ("tty", Netconf.Agent.Tty))])
self._leafs = OrderedDict()
self.tty = Netconf.Agent.Tty()
self.tty.parent = self
self._children_name_map["tty"] = "tty"
self._segment_path = lambda: "agent"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:netconf/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Agent, [], name, value)
class Tty(_Entity_):
"""
NETCONF agent over TTY
.. attribute:: throttle
NETCONF agent throttling
**type**\: :py:class:`Throttle <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.Netconf.Agent.Tty.Throttle>`
.. attribute:: session
Session attributes
**type**\: :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_man_xml_ttyagent_cfg.Netconf.Agent.Tty.Session>`
.. attribute:: enable
Enable specified NETCONF agent
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Agent.Tty, self).__init__()
self.yang_name = "tty"
self.yang_parent_name = "agent"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("throttle", ("throttle", Netconf.Agent.Tty.Throttle)), ("session", ("session", Netconf.Agent.Tty.Session))])
self._leafs = OrderedDict([
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.enable = None
self.throttle = Netconf.Agent.Tty.Throttle()
self.throttle.parent = self
self._children_name_map["throttle"] = "throttle"
self.session = Netconf.Agent.Tty.Session()
self.session.parent = self
self._children_name_map["session"] = "session"
self._segment_path = lambda: "tty"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:netconf/agent/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Agent.Tty, ['enable'], name, value)
class Throttle(_Entity_):
"""
NETCONF agent throttling
.. attribute:: memory
Size of memory usage, in MBytes, per session
**type**\: int
**range:** 100..1024
**units**\: megabyte
**default value**\: 300
.. attribute:: offload_memory
Size of memory usage, in MBytes, per session
**type**\: int
**range:** 0..12000
**units**\: megabyte
**default value**\: 0
.. attribute:: process_rate
Process rate in number of XML tags per second
**type**\: int
**range:** 1000..30000
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Agent.Tty.Throttle, self).__init__()
self.yang_name = "throttle"
self.yang_parent_name = "tty"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('memory', (YLeaf(YType.uint32, 'memory'), ['int'])),
('offload_memory', (YLeaf(YType.uint32, 'offload-memory'), ['int'])),
('process_rate', (YLeaf(YType.uint32, 'process-rate'), ['int'])),
])
self.memory = None
self.offload_memory = None
self.process_rate = None
self._segment_path = lambda: "throttle"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:netconf/agent/tty/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Agent.Tty.Throttle, ['memory', 'offload_memory', 'process_rate'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['Netconf.Agent.Tty.Throttle']['meta_info']
class Session(_Entity_):
"""
Session attributes
.. attribute:: timeout
Timeout in minutes
**type**\: int
**range:** 1..1440
**units**\: minute
"""
_prefix = 'man-xml-ttyagent-cfg'
_revision = '2017-05-01'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Agent.Tty.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "tty"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('timeout', (YLeaf(YType.uint32, 'timeout'), ['int'])),
])
self.timeout = None
self._segment_path = lambda: "session"
self._absolute_path = lambda: "Cisco-IOS-XR-man-xml-ttyagent-cfg:netconf/agent/tty/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Agent.Tty.Session, ['timeout'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['Netconf.Agent.Tty.Session']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['Netconf.Agent.Tty']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['Netconf.Agent']['meta_info']
def clone_ptr(self):
self._top_entity = Netconf()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_cfg as meta
return meta._meta_table['Netconf']['meta_info']
|
from PIL import Image, ImageDraw, ImageFont # pip install pillow
import numpy
import cv2
import sys, os
FONT_MARGIN = 2
COLOR_MAP = {
'1': (255, 35, 11),
'0': (16, 194, 20),
'C': (136, 0, 21),
'D': (7, 92, 10)}
OUTPATH = 'output'
def get_attr(info, default=None, isBoolean=False):
value = input(info + ('' if default is None \
else '(default: {})'.format(default)) + ': ').strip()
if value == '' and default is not None: value = default
if isBoolean: value = True if value[0] in 'yY' else False
return value
def get_cd_matrix(datafile, layoutfile, W, H):
with open(layoutfile) as f:
layout_str = f.read().replace('\n', '').replace('\t', '')
with open(datafile) as f:
player_str = f.read().replace('C', '1').replace('D', '0').split('\n')
data_str = ''
n_players = layout_str.count('_')
cnt = 0
if len(player_str) % n_players != 0:
print('It seems that you have choosen a wrong layout file, please check it.')
exit(1)
for i in range(int(len(player_str)/layout_str.count('_'))):
for p in layout_str:
if p == '_':
data_str += player_str[cnt]
cnt += 1
else:
data_str += p
matrix = list(numpy.array(list(data_str)).reshape((-1, W, H)))
for i in range(len(matrix)):
matrix[i] = matrix[i].T
return matrix
def get_image_list(matrix_data, width, height, psize, title_height):
length = len(matrix_data)
print('Generate rounds images (0/{})...'.format(length), end='')
if title_height:
fontsize = 1
font = ImageFont.truetype("arial.ttf", fontsize)
while font.getsize('Round: 999')[1] < title_height - FONT_MARGIN * 2:
fontsize += 1
font = ImageFont.truetype("arial.ttf", fontsize)
iml = []
for i, m in enumerate(matrix_data):
image = Image.new('RGB', (width, height), (255, 255, 255))
draw = ImageDraw.Draw(image)
for x in range(width):
for y in range(height - title_height):
draw.point((x, y), fill=get_color(m, psize, x, y))
if title_height > 0:
print('\rGenerate rounds images ({}/{})...'\
.format(i+1, length), end='')
draw.text((10, height-title_height + FONT_MARGIN), 'Round: ' + str(i+1),
font=font, fill='#000000')
iml.append(image)
print('Done')
return iml
def get_color(m, psize, x, y):
return COLOR_MAP[m[int(x/psize)][int(y/psize)]]
def save_iml(name, iml):
if not os.path.exists(OUTPATH+'/'+name):
os.makedirs(OUTPATH+'/'+name)
print('Saving Images (0/{})...'.format(len(iml)), end='')
for i, image in enumerate(iml):
print('\rSaving Images ({}/{})...'.format(i+1, len(iml)), end='')
image.save('{}/{}/{:02d}.jpg'.format(OUTPATH, name, i+1), 'jpeg')
print('Done')
def save_gif(name, iml, duration):
print('Saving GIF...', end='')
iml[0].save(OUTPATH+'/'+name+'.gif', save_all=True, append_images=iml,
duration=duration)
print('Done')
def save_video(name, iml, width, height, fps=24, vtype='mp4'):
if vtype.lower() not in ['mp4', 'avi']:
print('Error video type.')
return None
print('Saving {} (0/{})...'.format(vtype, len(iml)), end='')
codecs = {
'mp4': cv2.VideoWriter_fourcc(*'MP4V'),
'avi': cv2.VideoWriter_fourcc(*'XVID'),
}
video = cv2.VideoWriter(OUTPATH+'/' + name + '.' + vtype,
codecs[vtype], float(fps),
(width,height), isColor=False)
for i, im in enumerate(iml):
print('\rSaving {} ({}/{})...'.format(vtype, i+1, len(iml)), end='')
image = cv2.cvtColor(numpy.array(im), cv2.COLOR_RGB2BGR)
for i in range(int(fps)):
video.write(image)
cv2.destroyAllWindows()
video.release()
print('Done')
def process():
name = get_attr('Data file (without ".txt")')
layout = get_attr('Layout file (without ".txt")')
W = int(get_attr('Grid width'))
H = int(get_attr('Grid height'))
psize = int(get_attr('Pixels per person', 40))
title_height = int(get_attr('Height of title for round (0 for no title)', 0))
isSaveImage = get_attr('Save Images (y/n)', 'n', True)
isSaveGif = get_attr('Save GIF (y/n)', 'n', True)
isSaveAvi = get_attr('Save Avi (y/n)', 'n', True)
isSaveMp4 = get_attr('Save Mp4 (y/n)', 'y', True)
if not isSaveImage and not isSaveGif and not isSaveAvi and not isSaveMp4:
return None
width = W * psize
height = H * psize + title_height
print('[Processing:', name+'.txt]')
matrix_data = get_cd_matrix(name + '.txt', layout+'.txt', W, H)
iml = get_image_list(matrix_data, width, height, psize, title_height)
if isSaveImage: save_iml(name, iml)
if isSaveGif: save_gif(name, iml, 1000)
if isSaveAvi: save_video(name, iml, width, height, vtype='avi')
if isSaveMp4: save_video(name, iml, width, height, vtype='mp4')
def process_sequence(fname):
with open(fname) as f:
f.readline()
lines = f.read().strip().split('\n')
for line in lines:
data = line.split(',')
name = data[0]
layout = data[1]
W = int(data[2])
H = int(data[3])
psize = int(data[4])
title_height = int(data[5])
isSaveImage = True if data[6][0] in 'yY' else False
isSaveGif = True if data[7][0] in 'yY' else False
isSaveAvi = True if data[8][0] in 'yY' else False
isSaveMp4 = True if data[9][0] in 'yY' else False
if not isSaveImage and not isSaveGif and \
not isSaveAvi and not isSaveMp4: continue
width = W * psize
height = H * psize + title_height
print('[Processing:', name+'.txt]')
matrix_data = get_cd_matrix(name + '.txt', layout+'.txt', W, H)
iml = get_image_list(matrix_data, width, height, psize, title_height)
if isSaveImage: save_iml(name, iml)
if isSaveGif: save_gif(name, iml, 1000)
if isSaveAvi: save_video(name, iml, width, height, vtype='avi')
if isSaveMp4: save_video(name, iml, width, height, vtype='mp4')
if __name__ == '__main__':
if len(sys.argv) > 1:
process_sequence(sys.argv[1])
else:
process()
|
from flask import Flask
import pymongo
app = Flask(__name__)
@app.route("/")
def hello():
return "404 Page Not Found"
@app.route("/<sht>", methods=["GET"])
def retr(sht):
client = pymongo.MongoClient("mongodb+srv://username:password@cluster0-fvejx.gcp.mongodb.net/test?retryWrites=true&w=majority")
collection = client.get_database('Shortner')
db= collection.URL
for i in db.find({"Short":"http://127.0.0.1:60000/"+str(sht)}):
return "<script type=\"text/javascript\">window.open(\'"+i['URL']+"\',\'_self\');</script>"
if __name__ == '__main__':
app.run(port= 60000, debug=True)
|
#!/usr/bin/env python
# make-example.py
import latexpages
if __name__ == '__main__':
latexpages.make('example/latexpages.ini')
|
#! /usr/bin/env python
from setuptools import setup, find_packages
CLASSIFIERS = [
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='satchless',
author='Mirumee Software',
author_email='hello@mirumee.com',
description='An e-commerce framework',
license='BSD',
version='1.2.0',
url='http://satchless.com/',
packages=find_packages(),
classifiers=CLASSIFIERS,
platforms=['any'],
install_requires=['prices>=1.0,<1.1'],
include_package_data=True,
zip_safe=True)
|
import websocket
import ssl
import json
import time
import threading
import logging
import sys
logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s")
log = logging.getLogger(__name__)
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setFormatter(logFormatter)
log.addHandler(consoleHandler)
log.setLevel("DEBUG")
class Receiver(threading.Thread):
def __init__(self, symbol_manager, url):
log.debug("Initializing Poller service.")
super(Receiver, self).__init__()
self.url = url
self.ws = websocket.WebSocketApp(
self.url, on_message=self.on_message, on_error=self.on_error, on_close=self.on_close
)
self.ws.on_open = self.on_open
self.symbols = set()
for symbol in symbol_manager.get_symbols():
self.symbols.add(symbol)
self.symbol_manager = symbol_manager
log.debug("Successfully initialized Poller Service.")
def on_connect(self, *args, **kwargs):
log.debug("Establish connection to server")
def on_message(self, message):
self.symbol_manager.update(message)
def on_error(self, error):
log.error("Error ={}, will attemp rerun".format(error))
self._run()
def on_open(self):
messages = self.create_message()
for pair in self.symbols:
try:
params = dict(symbol=pair)
messages["params"] = params
self.ws.send(json.dumps(messages))
time.sleep(0.5)
except Exception as e:
log.warn(e)
def create_message(self):
message = dict()
message["method"] = "subscribeTicker"
message["id"] = 1
return message
# def _start(self):
def run(self):
log.debug("Starting Receiver service.")
self._run()
def _run(self):
self.ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})
def on_close(self):
log.debug("Closing the connection")
def add_symbol(self, symbol):
self.symbols.add(symbol)
try:
self.ws.send(symbol)
except Exception as e:
log.error("error while subscribing for symbol={}".format(e.message))
def stop(self):
log.debug("Shutting Down Receiver service.")
self.ws.close()
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import sys
def main():
import platform
print(platform.architecture())
print(sys.version)
options = Options()
options.add_argument('--headless')
options.add_argument('--no-sandbox')
options.add_argument('--disable-gpu')
driver = webdriver.Chrome(chrome_options=options, executable_path=r"C:\selenium\chromedriver.exe")
driver.get('https://www.google.co.jp')
driver.save_screenshot('screen.png')
if __name__ == "__main__":
main()
|
print('\033[30m-'*35)
print('ANÁLISE DE EXPRESSÕES')
print('-'*35)
expr = str(input('Digite uma expressão: ')).strip()
lista = []
'''for c in range(0, len(expr)):
lista.append(expr[c])
if lista.count('(') == lista.count(')') and '(' in lista[0] and ')' in lista[-1]:
print('Sua expressão está válida')
else:
print('Sua expressão está errada!')'''
print('-'*35)
for símb in expr:
if símb == '(':
lista.append('(')
elif símb == ')':
if len(lista) > 0:
lista.pop()
else:
lista.append(')')
break
if len(lista) == 0:
print('Sua expressão está válida!')
else:
print('Sua expressão está errada!')
|
from setuptools import setup
setup(
name='pyoxr',
packages=['pyoxr'],
version='1.6',
description='Python client for openexchangerates.org',
author='RetargetApp',
url='https://github.com/retargetapp/pyoxr',
keywords=['openexchangerate', 'client', 'api'],
classifiers=[]
)
|
from time import sleep
v1 = int(input('Primeiro valor: '))
v2 = int(input('Segundo valor: '))
opção = 0
while opção != 5:
print(''' [ 1 ] somar
[ 2 ] multiplicar
[ 3 ] maior
[ 4 ] novos números
[ 5 ] sair do programa''')
sleep(1)
opção = int(input('>>>>>> Qual é a sua opção? '))
sleep(1)
if opção == 1:
r1 = v1 + v2
print(f'O resultado de {v1} + {v2} é {r1}.')
elif opção == 2:
r2 = v1 * v2
print(f'O resultado de {v1} x {v2} é {r2}.')
elif opção == 3:
if v1 == v2:
print('Os números são iguais.')
elif v1 > v2:
print(f'O número {v1} é maior.')
else:
print(f'O número {v2} é maior.')
elif opção == 4:
print('Informe os números novamente: ')
v1 = int(input('Primeiro valor: '))
v2 = int(input('Segundo valor: '))
elif opção == 5:
print('Finalizando...')
sleep(2)
else:
print('Opção Inválida. Tente novamente.')
print('Fim do Programa. Volte Sempre!') |
from django.urls import path
from . import views
urlpatterns = [
path('', views.TaskListView.as_view(), name='index'),
path('<int:pk>', views.TaskDetailView.as_view(), name='task-detail'),
path('create/', views.TaskCreate.as_view(), name='task_create'),
path('<int:pk>/update/', views.TaskUpdate.as_view(), name='task_update'),
path('<int:pk>/delete/', views.TaskDelete.as_view(), name='task_delete'),
]
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
#################postprocess########################
"""
import os
import argparse
import numpy as np
from mindspore import Tensor
from src.config import config as cfg
from src.score import eval_pre_rec_f1
def parse_args(cloud_args=None):
"""parameters"""
parser = argparse.ArgumentParser('postprocess')
parser.add_argument('--rst_path', type=str, default='./result_Files/',
help='infer result path.')
args_opt = parser.parse_args()
args_opt.data_dir = cfg.data_dir
args_opt.train_image_dir_name = os.path.join(cfg.data_dir, cfg.train_image_dir_name)
args_opt.val_fname = cfg.val_fname
args_opt.train_label_dir_name = os.path.join(cfg.data_dir, cfg.train_label_dir_name)
args_opt.batch_size = 1
return args_opt
if __name__ == '__main__':
arg = parse_args()
obj = eval_pre_rec_f1()
with open(os.path.join(arg.data_dir, arg.val_fname), 'r') as f_val:
f_list = f_val.readlines()
batch_list = np.arange(0, len(f_list), arg.batch_size)
for idx in batch_list:
gt_list = []
for i in range(idx, min(idx + arg.batch_size, len(f_list))):
item = f_list[i]
img_filename = str(item).strip().split(',')[0]
gt_list.append(np.load(os.path.join(arg.train_label_dir_name, img_filename[:-4]) + '.npy'))
y = np.fromfile(os.path.join(arg.rst_path, img_filename + '_0.bin'), np.float32)
y = Tensor(y.reshape(1, 7, 112, 112))
obj.add(y, gt_list)
print(obj.val())
|
# -*- coding: utf-8 -*-
from django.db import models
class TimeStampedModel(models.Model):
"""
A abstract model that provides self-updating (created) and modified fields.
"""
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import collections
import io
import json
import librosa
import numpy as np
import soundfile as sf
import time
import torch
from scipy.io.wavfile import read
from .text import SOS_TOK, EOS_TOK
def get_mask_from_lengths(lengths):
max_len = torch.max(lengths).item()
ids = torch.arange(0, max_len, out=torch.cuda.LongTensor(max_len))
mask = (ids < lengths.unsqueeze(1))
return mask
def load_wav_to_torch(full_path, sr=None):
data, sr = librosa.load(full_path, sr=sr)
data = np.clip(data, -1, 1) # potentially out of [-1, 1] due to resampling
data = data * 32768.0 # match values loaded by scipy
return torch.FloatTensor(data.astype(np.float32)), sr
def read_binary_audio(bin_data, tar_sr=None):
"""
read binary audio (`bytes` or `uint8` `numpy.ndarray`) to `float32`
`numpy.ndarray`
RETURNS:
data (np.ndarray) : audio of shape (n,) or (2, n)
tar_sr (int) : sample rate
"""
data, ori_sr = sf.read(io.BytesIO(bin_data), dtype='float32')
data = data.T
if (tar_sr is not None) and (ori_sr != tar_sr):
data = librosa.resample(data, ori_sr, tar_sr)
else:
tar_sr = ori_sr
data = np.clip(data, -1, 1)
data = data * 32768.0
return torch.FloatTensor(data.astype(np.float32)), tar_sr
def load_filepaths_and_text(filename):
with open(filename, encoding='utf-8') as f:
data = [json.loads(line.rstrip()) for line in f]
return data
def to_gpu(x):
x = x.contiguous()
if torch.cuda.is_available():
x = x.cuda(non_blocking=True)
return torch.autograd.Variable(x)
def load_code_dict(path, add_sos=False, add_eos=False):
if not path:
return {}
with open(path, 'r') as f:
codes = ['_'] + [line.rstrip() for line in f] # '_' for pad
code_dict = {c: i for i, c in enumerate(codes)}
if add_sos:
code_dict[SOS_TOK] = len(code_dict)
if add_eos:
code_dict[EOS_TOK] = len(code_dict)
assert(set(code_dict.values()) == set(range(len(code_dict))))
return code_dict
def load_obs_label_dict(path):
if not path:
return {}
with open(path, 'r') as f:
obs_labels = [line.rstrip() for line in f]
return {c: i for i, c in enumerate(obs_labels)}
# A simple timer class inspired from `tnt.TimeMeter`
class CudaTimer:
def __init__(self, keys):
self.keys = keys
self.reset()
def start(self, key):
s = torch.cuda.Event(enable_timing=True)
s.record()
self.start_events[key].append(s)
return self
def stop(self, key):
e = torch.cuda.Event(enable_timing=True)
e.record()
self.end_events[key].append(e)
return self
def reset(self):
self.start_events = collections.defaultdict(list)
self.end_events = collections.defaultdict(list)
self.running_times = collections.defaultdict(float)
self.n = collections.defaultdict(int)
return self
def value(self):
self._synchronize()
return {k: self.running_times[k] / self.n[k] for k in self.keys}
def _synchronize(self):
torch.cuda.synchronize()
for k in self.keys:
starts = self.start_events[k]
ends = self.end_events[k]
if len(starts) == 0:
raise ValueError("Trying to divide by zero in TimeMeter")
if len(ends) != len(starts):
raise ValueError("Call stop before checking value!")
time = 0
for start, end in zip(starts, ends):
time += start.elapsed_time(end)
self.running_times[k] += time * 1e-3
self.n[k] += len(starts)
self.start_events = collections.defaultdict(list)
self.end_events = collections.defaultdict(list)
# Used to measure the time taken for multiple events
class Timer:
def __init__(self, keys):
self.keys = keys
self.n = {}
self.running_time = {}
self.total_time = {}
self.reset()
def start(self, key):
self.running_time[key] = time.time()
return self
def stop(self, key):
self.total_time[key] = time.time() - self.running_time[key]
self.n[key] += 1
self.running_time[key] = None
return self
def reset(self):
for k in self.keys:
self.total_time[k] = 0
self.running_time[k] = None
self.n[k] = 0
return self
def value(self):
vals = {}
for k in self.keys:
if self.n[k] == 0:
raise ValueError("Trying to divide by zero in TimeMeter")
else:
vals[k] = self.total_time[k] / self.n[k]
return vals
|
# validated: 2017-09-28 DS cedbafeb286d cpp/NetworkConnection.cpp cpp/NetworkConnection.h cpp/INetworkConnection.h
"""----------------------------------------------------------------------------"""
""" Copyright (c) FIRST 2017. All Rights Reserved. """
""" Open Source Software - may be modified and shared by FRC teams. The code """
""" must be accompanied by the FIRST BSD license file in the root directory of """
""" the project. """
"""----------------------------------------------------------------------------"""
import threading
from queue import Queue, Empty
from time import monotonic
from .constants import (
kEntryAssign,
kEntryUpdate,
kFlagsUpdate,
kEntryDelete,
kClearEntries,
msgtype_str,
)
from .message import Message
from .structs import ConnectionInfo
from .wire import WireCodec
from .support.lists import Pair
from .support.safe_thread import SafeThread
from .tcpsockets.tcp_stream import StreamEOF
import logging
logger = logging.getLogger("nt")
_empty_pair = Pair(0, 0)
_state_map = {
0: "created",
1: "init",
2: "handshake",
3: "synchronized",
4: "active",
5: "dead",
}
class NetworkConnection(object):
class State(object):
kCreated = 0
kInit = 1
kHandshake = 2
kSynchronized = 3
kActive = 4
kDead = 5
def __init__(self, uid, stream, notifier, handshake, get_entry_type, verbose=False):
# logging debugging
self.m_verbose = verbose
self.m_uid = uid
self.m_stream = stream
self.m_notifier = notifier
self.m_handshake = handshake
self.m_get_entry_type = get_entry_type
self.m_active = False
self.m_proto_rev = 0x0300
self.m_state = self.State.kCreated
self.m_state_mutex = threading.Lock()
self.m_last_update = 0
self.m_outgoing = Queue()
self.m_process_incoming = None
self.m_read_thread = None
self.m_write_thread = None
self.m_remote_id_mutex = threading.Lock()
self.m_remote_id = None
self.m_last_post = 0
self.m_pending_mutex = threading.Lock()
self.m_pending_outgoing = []
self.m_pending_update = {}
# Condition variables for shutdown
self.m_shutdown_mutex = threading.Lock()
# Not needed in python
# self.m_read_shutdown_cv = threading.Condition()
# self.m_write_shutdown_cv = threading.Condition()
self.m_read_shutdown = False
self.m_write_shutdown = False
# turn off Nagle algorithm; we bundle packets for transmission
try:
self.m_stream.setNoDelay()
except IOError as e:
logger.warning("Setting TCP_NODELAY: %s", e)
def start(self):
if self.m_active:
return
self.m_active = True
self.set_state(self.State.kInit)
# clear queue
try:
while True:
self.m_outgoing.get_nowait()
except Empty:
pass
# reset shutdown flags
with self.m_shutdown_mutex:
self.m_read_shutdown = False
self.m_write_shutdown = False
# start threads
self.m_write_thread = SafeThread(
target=self._writeThreadMain, name="nt-net-write"
)
self.m_read_thread = SafeThread(target=self._readThreadMain, name="nt-net-read")
def __repr__(self):
try:
return "<NetworkConnection 0x%x %s>" % (id(self), self.info())
except Exception:
return "<NetworkConnection 0x%x ???>" % id(self)
def stop(self):
logger.debug("NetworkConnection stopping (%s)", self)
if not self.m_active:
return
self.set_state(self.State.kDead)
self.m_active = False
# closing the stream so the read thread terminates
self.m_stream.close()
# send an empty outgoing message set so the write thread terminates
self.m_outgoing.put([])
# wait for threads to terminate, timeout
self.m_write_thread.join(1)
if self.m_write_thread.is_alive():
logger.warning("%s did not die", self.m_write_thread.name)
self.m_read_thread.join(1)
if self.m_read_thread.is_alive():
logger.warning("%s did not die", self.m_write_thread.name)
# clear queue
try:
while True:
self.m_outgoing.get_nowait()
except Empty:
pass
def get_proto_rev(self):
return self.m_proto_rev
def get_stream(self):
return self.m_stream
def info(self):
return ConnectionInfo(
self.remote_id(),
self.m_stream.getPeerIP(),
self.m_stream.getPeerPort(),
self.m_last_update,
self.m_proto_rev,
)
def is_connected(self):
return self.m_state == self.State.kActive
def last_update(self):
return self.m_last_update
def set_process_incoming(self, func):
self.m_process_incoming = func
def set_proto_rev(self, proto_rev):
self.m_proto_rev = proto_rev
def set_state(self, state):
with self.m_state_mutex:
State = self.State
# Don't update state any more once we've died
if self.m_state == State.kDead:
return
# One-shot notify state changes
if self.m_state != State.kActive and state == State.kActive:
info = self.info()
self.m_notifier.notifyConnection(True, info)
logger.info(
"CONNECTED %s port %s (%s)",
info.remote_ip,
info.remote_port,
info.remote_id,
)
elif self.m_state != State.kDead and state == State.kDead:
info = self.info()
self.m_notifier.notifyConnection(False, info)
logger.info(
"DISCONNECTED %s port %s (%s)",
info.remote_ip,
info.remote_port,
info.remote_id,
)
if self.m_verbose:
logger.debug(
"%s: %s -> %s", self, _state_map[self.m_state], _state_map[state]
)
self.m_state = state
def state(self):
return self.m_state
def remote_id(self):
with self.m_remote_id_mutex:
return self.m_remote_id
def set_remote_id(self, remote_id):
with self.m_remote_id_mutex:
self.m_remote_id = remote_id
def uid(self):
return self.m_uid
def _sendMessages(self, msgs):
self.m_outgoing.put(msgs)
def _readThreadMain(self):
decoder = WireCodec(self.m_proto_rev)
verbose = self.m_verbose
def _getMessage():
decoder.set_proto_rev(self.m_proto_rev)
try:
return Message.read(self.m_stream, decoder, self.m_get_entry_type)
except IOError as e:
logger.warning("read error in handshake: %s", e)
# terminate connection on bad message
self.m_stream.close()
return None
self.set_state(self.State.kHandshake)
try:
handshake_success = self.m_handshake(self, _getMessage, self._sendMessages)
except Exception:
logger.exception("Unhandled exception during handshake")
handshake_success = False
if not handshake_success:
self.set_state(self.State.kDead)
self.m_active = False
else:
self.set_state(self.State.kActive)
try:
while self.m_active:
if not self.m_stream:
break
decoder.set_proto_rev(self.m_proto_rev)
try:
msg = Message.read(
self.m_stream, decoder, self.m_get_entry_type
)
except Exception as e:
if not isinstance(e, StreamEOF):
if verbose:
logger.exception("read error")
else:
logger.warning("read error: %s", e)
# terminate connection on bad message
self.m_stream.close()
break
if verbose:
logger.debug(
"%s received type=%s with str=%s id=%s seq_num=%s value=%s",
self.m_stream.sock_type,
msgtype_str(msg.type),
msg.str,
msg.id,
msg.seq_num_uid,
msg.value,
)
self.m_last_update = monotonic()
self.m_process_incoming(msg, self)
except IOError as e:
# connection died probably
logger.debug("IOError in read thread: %s", e)
except Exception:
logger.warning("Unhandled exception in read thread", exc_info=True)
self.set_state(self.State.kDead)
self.m_active = False
# also kill write thread
self.m_outgoing.put([])
with self.m_shutdown_mutex:
self.m_read_shutdown = True
def _writeThreadMain(self):
encoder = WireCodec(self.m_proto_rev)
verbose = self.m_verbose
out = []
try:
while self.m_active:
msgs = self.m_outgoing.get()
if verbose:
logger.debug("write thread woke up")
if not msgs:
continue
encoder.set_proto_rev(self.m_proto_rev)
if verbose:
logger.debug(
"%s sending %s messages", self.m_stream.sock_type, len(msgs)
)
for msg in msgs:
if msg:
if verbose:
logger.debug(
"%s sending type=%s with str=%s id=%s seq_num=%s value=%s",
self.m_stream.sock_type,
msgtype_str(msg.type),
msg.str,
msg.id,
msg.seq_num_uid,
msg.value,
)
Message.write(msg, out, encoder)
if not self.m_stream:
break
if not out:
continue
self.m_stream.send(b"".join(out))
del out[:]
# if verbose:
# logger.debug('send %s bytes', encoder.size())
except IOError as e:
# connection died probably
if not isinstance(e, StreamEOF):
logger.debug("IOError in write thread: %s", e)
except Exception:
logger.warning("Unhandled exception in write thread", exc_info=True)
self.set_state(self.State.kDead)
self.m_active = False
self.m_stream.close() # also kill read thread
with self.m_shutdown_mutex:
self.m_write_shutdown = True
def queueOutgoing(self, msg):
with self.m_pending_mutex:
# Merge with previous. One case we don't combine: delete/assign loop.
msgtype = msg.type
if msgtype in [kEntryAssign, kEntryUpdate]:
# don't do this for unassigned id's
msg_id = msg.id
if msg_id == 0xFFFF:
self.m_pending_outgoing.append(msg)
return
mpend = self.m_pending_update.get(msg_id)
if mpend is not None and mpend.first != 0:
# overwrite the previous one for this id
oldidx = mpend.first - 1
oldmsg = self.m_pending_outgoing[oldidx]
if (
oldmsg
and oldmsg.type == kEntryAssign
and msgtype == kEntryUpdate
):
# need to update assignment with seq_num and value
oldmsg = Message.entryAssign(
oldmsg.str, msg_id, msg.seq_num_uid, msg.value, oldmsg.flags
)
else:
oldmsg = msg # easy update
self.m_pending_outgoing[oldidx] = oldmsg
else:
# new, remember it
pos = len(self.m_pending_outgoing)
self.m_pending_outgoing.append(msg)
self.m_pending_update[msg_id] = Pair(pos + 1, 0)
elif msgtype == kEntryDelete:
# don't do this for unassigned id's
msg_id = msg.id
if msg_id == 0xFFFF:
self.m_pending_outgoing.append(msg)
return
# clear previous updates
mpend = self.m_pending_update.get(msg_id)
if mpend is not None:
if mpend.first != 0:
self.m_pending_outgoing[mpend.first - 1] = None
if mpend.second != 0:
self.m_pending_outgoing[mpend.second - 1] = None
self.m_pending_update[msg_id] = _empty_pair
# add deletion
self.m_pending_outgoing.append(msg)
elif msgtype == kFlagsUpdate:
# don't do this for unassigned id's
msg_id = msg.id
if id == 0xFFFF:
self.m_pending_outgoing.append(msg)
return
mpend = self.m_pending_update.get(msg_id)
if mpend is not None and mpend.second != 0:
# overwrite the previous one for this id
self.m_pending_outgoing[mpend.second - 1] = msg
else:
# new, remember it
pos = len(self.m_pending_outgoing)
self.m_pending_outgoing.append(msg)
self.m_pending_update[msg_id] = Pair(0, pos + 1)
elif msgtype == kClearEntries:
# knock out all previous assigns/updates!
for i, m in enumerate(self.m_pending_outgoing):
if not m:
continue
t = m.type
if t in [
kEntryAssign,
kEntryUpdate,
kFlagsUpdate,
kEntryDelete,
kClearEntries,
]:
self.m_pending_outgoing[i] = None
self.m_pending_update.clear()
self.m_pending_outgoing.append(msg)
else:
self.m_pending_outgoing.append(msg)
def postOutgoing(self, keep_alive):
with self.m_pending_mutex:
now = monotonic()
if not self.m_pending_outgoing:
if not keep_alive:
return
# send keep-alives once a second (if no other messages have been sent)
if (now - self.m_last_post) < 1.0:
return
self.m_outgoing.put((Message.keepAlive(),))
else:
self.m_outgoing.put(self.m_pending_outgoing)
self.m_pending_outgoing = []
self.m_pending_update.clear()
self.m_last_post = now
|
from dotenv import load_dotenv
load_dotenv()
import os
from pyflarum.client import FlarumUser
from pyflarum.client.flarum.core.posts import PostFromNotification
from pyflarum.client.extensions import absolutely_all
EXTENSIONS = [
absolutely_all.AbsolutelyAllExtension
]
USER = FlarumUser(forum_url=os.environ['forum_url'], username_or_email='test', password=os.environ['account_password'], extensions=EXTENSIONS) # type: absolutely_all.AbsolutelyAllFlarumUserMixin
def all_notifications():
for notifications in USER.absolutely_all_notifications():
for notification in notifications:
print(notification.id)
if not notification.isRead:
subject = notification.get_subject()
if isinstance(subject, PostFromNotification):
print(subject.content)
if __name__ == "__main__":
print(f"{USER.username}'s notification IDs:")
all_notifications()
|
#!/usr/bin/python
"""
get sensor date from GLT export File
sort of power consumption, temperature, and climate data
there is a big utf-8 issue, because the keys are utf-8 encoded
decode early
unicode everywhere
encode late
"""
import requests
import datetime
import os
def get_data(url, headers):
res = requests.request(u"GET", url)
data = []
result = res.text
for line in res.text.split(u"\n")[1:]:
if len(line) == 0:
continue
values = line.strip().split(u";")
# convert custom datetest <str> to datetime object
dt = datetime.datetime.strptime(values[0], '%d.%m.%Y %H:%M')
# calculate seconds since 1970-01-01 Unixtimestamp
values[0] = u"%d" % (dt - datetime.datetime(1970, 1, 1)).total_seconds()
row_dict = dict(zip(headers, values))
data.append(row_dict)
return data
def main():
basedir = u"/var/rrd"
project = u"glt"
tablename = u"energiemonitor"
url = u"http://srvebien01.tilak.cc/energiemonitor_export.csv"
raw_basedir = os.path.join(basedir, project, u"raw")
if not os.path.isdir(raw_basedir):
print u"creating directory for raw data"
os.makedirs(raw_basedir)
headers = [u'ts', u'standort', u'bezeichnung', u'wert', u'einheit']
index_keys = [u'standort', u'bezeichnung']
value_keys = [u'wert', ]
ts_keyname = u'ts'
delimiter = u"\t"
outbuffer = {}
for row_dict in get_data(url, headers):
datestring = datetime.datetime.fromtimestamp(float(row_dict[u"ts"])).date().isoformat()
outfilename = os.path.join(raw_basedir, u"%s_%s.csv" % (tablename, datestring))
fh = None
if not os.path.isfile(outfilename):
# if this file will be created, write header in first line
fh = open(os.path.join(raw_basedir, outfilename), "w")
# enode late
fh.write(delimiter.join(headers).encode("utf-8") + "\n")
else:
fh = open(os.path.join(raw_basedir, outfilename), "a")
# encode late
fh.write(delimiter.join([row_dict[key] for key in headers]).encode("utf-8") + "\n")
fh.close()
if __name__ == "__main__":
main()
|
# Generated by Django 2.0 on 2018-09-21 11:19
import base.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('base', '0002_auto_20180921_1056'),
]
operations = [
migrations.AlterField(
model_name='key',
name='g',
field=base.models.BigBigField(),
),
migrations.AlterField(
model_name='key',
name='p',
field=base.models.BigBigField(),
),
migrations.AlterField(
model_name='key',
name='x',
field=base.models.BigBigField(blank=True, null=True),
),
migrations.AlterField(
model_name='key',
name='y',
field=base.models.BigBigField(),
),
]
|
"""
Tests for games.
"""
import unittest
from unit_tests import ModuleTest
from copy import deepcopy
from gym.envs.classic_control import cartpole, mountain_car
from spikey import Key
from spikey import games
from spikey.meta import metagames
class FakeTrainingLoop:
def __init__(self):
pass
def copy(self):
return self
def reset(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return [None, None, {}, {}]
def fitness_getter(*a):
return 0
class TestGame(unittest.TestCase, ModuleTest):
"""
Tests for games.Game.
"""
TYPES = [games.template.RL]
BASE_CONFIG = {}
class TestRL(unittest.TestCase, ModuleTest):
"""
Tests for games.RL.
"""
TYPES = [
games.Logic,
games.CartPole,
games.gym_wrapper(cartpole.CartPoleEnv),
games.gym_wrapper(mountain_car.MountainCarEnv),
]
BASE_CONFIG = {}
@ModuleTest.run_all_types
def test_init(self):
game_type = type(self.get_obj())
class game_template(game_type):
NECESSARY_KEYS = game_type.extend_keys(
[
Key("a", "a"),
Key("b", "b"),
]
)
config = deepcopy(self.BASE_CONFIG)
config.update({"a": 10, "b": 20})
a = 11
game = game_template(a=a)
self.assertEqual(game.params["a"], a)
self.assertEqual(game.params["b"], game_template.config["b"])
@ModuleTest.run_all_types
def test_usage(self):
game = self.get_obj()
state = game.reset()
for _ in range(100):
state, reward, done, info = game.step(0)
if done:
break
game.close()
class TestMetaRL(unittest.TestCase, ModuleTest):
"""
Tests for games.MetaRL.
"""
TYPES = [
metagames.MetaNQueens,
metagames.EvolveNetwork,
]
BASE_CONFIG = {
"training_loop": FakeTrainingLoop(),
"genotype_constraints": {"a": [1, 2, 3]},
"win_fitness": 1,
"fitness_getter": fitness_getter,
}
@ModuleTest.run_all_types
def test_init(self):
game_type = type(self.get_obj())
class game_template(game_type):
NECESSARY_KEYS = game_type.extend_keys(
[
Key("a", "a"),
Key("b", "b"),
]
)
config = deepcopy(self.BASE_CONFIG)
config.update({"a": 10, "b": 20})
a = 11
game = game_template(a=a)
self.assertEqual(game.params["a"], a)
self.assertEqual(game.params["b"], game_template.config["b"])
@ModuleTest.run_all_types
def test_usage(self):
game = self.get_obj()
state = game.reset()
for _ in range(100):
genotype = {key: 0 for key in game.GENOTYPE_CONSTRAINTS.keys()}
try:
fitness = game.get_fitness(genotype)
except AssertionError as e:
pass
try:
state, fitness, done, info = game.step(genotype)
if done:
break
except AssertionError as e:
pass
game.close()
if __name__ == "__main__":
unittest.main()
|
import os
import os.path as osp
import glob
from setuptools import setup, find_packages
import torch
from torch.utils.cpp_extension import BuildExtension
from torch.utils.cpp_extension import CppExtension, CUDAExtension, CUDA_HOME
WITH_CUDA = torch.cuda.is_available() and CUDA_HOME is not None
if os.getenv('FORCE_CUDA', '0') == '1':
WITH_CUDA = True
if os.getenv('FORCE_CPU', '0') == '1':
WITH_CUDA = False
BUILD_DOCS = os.getenv('BUILD_DOCS', '0') == '1'
def get_extensions():
Extension = CppExtension
define_macros = []
extra_compile_args = {'cxx': []}
if WITH_CUDA:
Extension = CUDAExtension
define_macros += [('WITH_CUDA', None)]
nvcc_flags = os.getenv('NVCC_FLAGS', '')
nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ')
nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr']
extra_compile_args['nvcc'] = nvcc_flags
extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc')
main_files = glob.glob(osp.join(extensions_dir, '*.cpp'))
extensions = []
for main in main_files:
name = main.split(os.sep)[-1][:-4]
sources = [main]
path = osp.join(extensions_dir, 'cpu', f'{name}_cpu.cpp')
if osp.exists(path):
sources += [path]
path = osp.join(extensions_dir, 'cuda', f'{name}_cuda.cu')
if WITH_CUDA and osp.exists(path):
sources += [path]
extension = Extension(
'torch_cluster._' + name,
sources,
include_dirs=[extensions_dir],
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
extensions += [extension]
return extensions
install_requires = ['scipy']
setup_requires = ['pytest-runner']
tests_require = ['pytest', 'pytest-cov']
setup(
name='torch_cluster',
version='1.5.4',
author='Matthias Fey',
author_email='matthias.fey@tu-dortmund.de',
url='https://github.com/rusty1s/pytorch_cluster',
description=('PyTorch Extension Library of Optimized Graph Cluster '
'Algorithms'),
keywords=[
'pytorch',
'geometric-deep-learning',
'graph-neural-networks',
'cluster-algorithms',
],
license='MIT',
python_requires='>=3.6',
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
ext_modules=get_extensions() if not BUILD_DOCS else [],
cmdclass={
'build_ext':
BuildExtension.with_options(no_python_abi_suffix=True, use_ninja=False)
},
packages=find_packages(),
)
|
def print_formatted(number):
n=len(bin(number)[2:])
for i in range(1,number+1):
print(str(i).rjust(n,' '),oct(i)[2:].rjust(n,' '),hex(i)[2:].upper().rjust(n,' '),bin(i)[2:].rjust(n,' '))
|
'''
Created on Jun 4, 2017
@author: Asus-PC
'''
|
import os
import pickle
import numpy as np
import pandas as pd
import feature_impact
from feature_generation import add_features
RESULT_DTYPES = {'BusinessModelRisk': np.int32,
'DesireToInvest': np.int32,
'ManagementShareholdersConflict': np.int32,
'NegativeShareholders': np.int32,
'OwnFundsTransaction': np.int32,
'OwnershipConflict': np.int32,
'PositiveShareholders': np.int32,
'RelevantRepayment': np.int32,
'WithdrawalFunds': np.int32,
'region': np.uint32,
'year_-1_1100': np.float32,
'year_-1_1150': np.float32,
'year_-1_1200': np.float32,
'year_-1_1210': np.float32,
'year_-1_1230': np.float32,
'year_-1_1250': np.float32,
'year_-1_1300': np.float32,
'year_-1_1310': np.float32,
'year_-1_1400': np.float32,
'year_-1_1500': np.float32,
'year_-1_1510': np.float32,
'year_-1_1520': np.float32,
'year_-1_1600': np.float32,
'year_-1_1700': np.float32,
'year_-1_2100': np.float32,
'year_-1_2110': np.float32,
'year_-1_2120': np.float32,
'year_-1_2200': np.float32,
'year_-1_2300': np.float32,
'year_-1_2400': np.float32,
'year_-1_okved': str,
'year_-2_1100': np.float32,
'year_-2_1150': np.float32,
'year_-2_1200': np.float32,
'year_-2_1210': np.float32,
'year_-2_1230': np.float32,
'year_-2_1250': np.float32,
'year_-2_1300': np.float32,
'year_-2_1310': np.float32,
'year_-2_1400': np.float32,
'year_-2_1500': np.float32,
'year_-2_1510': np.float32,
'year_-2_1520': np.float32,
'year_-2_1600': np.float32,
'year_-2_1700': np.float32,
'year_-2_2100': np.float32,
'year_-2_2110': np.float32,
'year_-2_2120': np.float32,
'year_-2_2200': np.float32,
'year_-2_2300': np.float32,
'year_-2_2400': np.float32,
'year_0_1100': np.float32,
'year_0_1150': np.float32,
'year_0_1200': np.float32,
'year_0_1210': np.float32,
'year_0_1230': np.float32,
'year_0_1250': np.float32,
'year_0_1300': np.float32,
'year_0_1310': np.float32,
'year_0_1400': np.float32,
'year_0_1500': np.float32,
'year_0_1510': np.float32,
'year_0_1520': np.float32,
'year_0_1600': np.float32,
'year_0_1700': np.float32,
'year_0_2100': np.float32,
'year_0_2110': np.float32,
'year_0_2120': np.float32,
'year_0_2200': np.float32,
'year_0_2300': np.float32,
'year_0_2400': np.float32,
'year_0_okved': str}
def get_models(path=''):
result = {}
for file in os.listdir(path):
if not file.endswith('.pkl'):
continue
with open(os.path.join(path, file), 'rb') as f:
clf = pickle.load(f)
name = clf.name
result[name] = clf
return result
def dict_to_df(data):
df = pd.DataFrame.from_dict(data, orient='index').T
res_types = {}
for key, dtype in RESULT_DTYPES.items():
if key in df.columns:
res_types[key] = dtype
if dtype == np.float:
df[key] = df[key].str.replace(',', '.')
return df[res_types.keys()].astype(dtype=res_types)
class CalcHandler:
def __init__(self, path):
self.models = get_models(path)
def calc_probability(self, data):
df = dict_to_df(data)
add_features(df)
result = {}
for name, model in self.models.items():
value = None
try:
value = model.predict_proba(df)
except Exception as e:
value = float('nan')
finally:
result[name] = value
return result
def get_plots(self, name):
return self.models[name].plots
def get_impact(self, name, data, feature, head, tail):
df = dict_to_df(data)
add_features(df)
return feature_impact.plt_graph_to_base64(
model=self.models[name],
data=df,
feature=feature,
head=head,
tail=tail
)
|
from django import template
from django.template import (Node, Variable, TemplateSyntaxError, Library)
from django.template.base import Parser as TokenParser
from phrase.compat import TOKEN_TEXT, TOKEN_VAR
from django.template.defaulttags import token_kwargs
from django.conf import settings
from django.utils import translation
from six import reraise
from django.utils.html import mark_safe
from django.templatetags.i18n import BlockTranslateNode, TranslateNode
from phrase import settings as phrase_settings
from phrase.nodes import PhraseBlockTranslateNode, PhraseTranslateNode
import logging
import re
register = template.Library()
@register.tag("translate")
@register.tag("trans")
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
It is possible to store the translated string into a variable::
{% trans "this is a test" as var %}
{{ var }}
Contextual translations are also supported::
{% trans "this is a test" context "greeting" %}
This is equivalent to calling pgettext instead of (u)gettext.
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
message_string = parser.compile_filter(bits[1])
remaining = bits[2:]
noop = False
asvar = None
message_context = None
seen = set()
invalid_context = {'as', 'noop'}
while remaining:
option = remaining.pop(0)
if option in seen:
raise TemplateSyntaxError(
"The '%s' option was specified more than once." % option,
)
elif option == 'noop':
noop = True
elif option == 'context':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the context option." % bits[0]
reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
if value in invalid_context:
raise TemplateSyntaxError(
"Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]),
)
message_context = parser.compile_filter(value)
elif option == 'as':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the as option." % bits[0]
reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
asvar = value
else:
raise TemplateSyntaxError(
"Unknown argument for '%s' tag: '%s'. The only options "
"available are 'noop', 'context' \"xxx\", and 'as VAR'." % (
bits[0], option,
)
)
seen.add(option)
if phrase_settings.PHRASE_ENABLED:
return PhraseTranslateNode(message_string, noop, asvar, message_context)
else:
return TranslateNode(message_string, noop, asvar, message_context)
@register.tag("blocktrans")
def do_block_translate(parser, token):
bits = token.split_contents()
options = {}
remaining_bits = bits[1:]
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more than once.' % option)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least one keyword argument.' % bits[0])
elif option == 'count':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if len(value) != 1:
raise TemplateSyntaxError('"count" in %r tag expected exactly one keyword argument.' % bits[0])
elif option == "context":
try:
value = remaining_bits.pop(0)
value = parser.compile_filter(value)
except Exception:
msg = ('"context" in %r tag expected exactly one argument.') % bits[0]
reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
elif option == "trimmed":
value = True
else:
raise TemplateSyntaxError('Unknown argument for %r tag: %r.' % (bits[0], option))
options[option] = value
trimmed = options.get("trimmed", False)
if 'count' in options:
countervar, counter = list(six.iteritems(options['count']))[0]
else:
countervar, counter = None, None
if 'context' in options:
message_context = options['context']
else:
message_context = None
extra_context = options.get('with', {})
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it")
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents)
if phrase_settings.PHRASE_ENABLED:
node = PhraseBlockTranslateNode(extra_context, singular, plural, countervar, counter, message_context, trimmed)
else:
node = BlockTranslateNode(extra_context, singular, plural, countervar, counter, message_context, trimmed=trimmed)
return node
@register.simple_tag
def phrase_javascript():
if not phrase_settings.PHRASE_ENABLED:
return ''
html = """<script>
window.PHRASEAPP_CONFIG = {
projectId: '%(project_id)s',
autoLowercase :false,
};
(function() {
var phrasejs = document.createElement('script');
phrasejs.type = 'text/javascript';
phrasejs.async = true;
phrasejs.src = ['%(protocol)s', '%(host)s/assets/in-context-editor/2.0/app.js?', new Date().getTime()].join('');
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(phrasejs, s); \
})();
</script>"""
formatted_html = html % dict(
project_id=phrase_settings.PHRASE_PROJECT_ID,
protocol='https://' if phrase_settings.PHRASE_JS_USE_SSL else 'http://',
host=phrase_settings.PHRASE_JS_HOST,
)
return mark_safe(formatted_html)
|
import os
import sys
os.path.join(os.path.abspath(os.path.dirname(__file__)))
|
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 28 10:15:29 2017
@author: Kjell
"""
import time
import math
from AirSimClient import *
# connect to the AirSim simulator
client = MultirotorClient()
client.confirmConnection()
client.enableApiControl(True)
client.armDisarm(True)
def straight(duration, speed):
pitch, roll, yaw = client.getPitchRollYaw()
vx = math.cos(yaw) * speed
vy = math.sin(yaw) * speed
client.moveByVelocityZ(vx, vy, -6, duration, DrivetrainType.ForwardOnly)
start = time.time()
return start, duration
def take_action():
start = time.time()
duration = 0
collided = False
start, duration = straight(5, 4) # for 5 sec with "speed" 4 or until it collides
while duration > time.time() - start:
if client.getCollisionInfo().has_collided == True:
client.moveByVelocity(0, 0, 0, 1)
return True
return collided
def reset():
client.reset()
client.enableApiControl(True)
client.armDisarm(True)
client.moveToZ(-6, 3)
time.sleep(3)
if __name__ == "__main__":
reset()
for idx in range(250000): #250k
collided = take_action()
if collided == True:
reset()
print("%d" % idx)
# that's enough fun for now. let's quite cleanly
client.enableApiControl(False)
|
import os
import unittest
try:
from unittest.mock import patch, call
except ImportError:
from mock import patch, call
from botocore.exceptions import ClientError as BotocoreClientError
from datadog.api.exceptions import ClientError
from datadog_lambda.metric import (
decrypt_kms_api_key,
lambda_metric,
ThreadStatsWriter,
KMS_ENCRYPTION_CONTEXT_KEY,
)
from datadog_lambda.tags import _format_dd_lambda_layer_tag
class TestLambdaMetric(unittest.TestCase):
def setUp(self):
patcher = patch("datadog_lambda.metric.lambda_stats")
self.mock_metric_lambda_stats = patcher.start()
self.addCleanup(patcher.stop)
def test_lambda_metric_tagged_with_dd_lambda_layer(self):
lambda_metric("test", 1)
lambda_metric("test", 1, 123, [])
lambda_metric("test", 1, tags=["tag1:test"])
expected_tag = _format_dd_lambda_layer_tag()
self.mock_metric_lambda_stats.distribution.assert_has_calls(
[
call("test", 1, timestamp=None, tags=[expected_tag]),
call("test", 1, timestamp=123, tags=[expected_tag]),
call("test", 1, timestamp=None, tags=["tag1:test", expected_tag]),
]
)
def test_lambda_metric_flush_to_log(self):
os.environ["DD_FLUSH_TO_LOG"] = "True"
lambda_metric("test", 1)
self.mock_metric_lambda_stats.distribution.assert_not_called()
del os.environ["DD_FLUSH_TO_LOG"]
class TestFlushThreadStats(unittest.TestCase):
def setUp(self):
patcher = patch(
"datadog.threadstats.reporters.HttpReporter.flush_distributions"
)
self.mock_threadstats_flush_distributions = patcher.start()
self.addCleanup(patcher.stop)
def test_retry_on_remote_disconnected(self):
# Raise the RemoteDisconnected error
lambda_stats = ThreadStatsWriter(True)
self.mock_threadstats_flush_distributions.side_effect = ClientError(
"POST",
"https://api.datadoghq.com/api/v1/distribution_points",
"RemoteDisconnected('Remote end closed connection without response')",
)
lambda_stats.flush()
self.assertEqual(self.mock_threadstats_flush_distributions.call_count, 2)
MOCK_FUNCTION_NAME = "myFunction"
# An API key encrypted with KMS and encoded as a base64 string
MOCK_ENCRYPTED_API_KEY_BASE64 = "MjIyMjIyMjIyMjIyMjIyMg=="
# The encrypted API key after it has been decoded from base64
MOCK_ENCRYPTED_API_KEY = "2222222222222222"
# The true value of the API key after decryption by KMS
EXPECTED_DECRYPTED_API_KEY = "1111111111111111"
class TestDecryptKMSApiKey(unittest.TestCase):
def test_key_encrypted_with_encryption_context(self):
os.environ["AWS_LAMBDA_FUNCTION_NAME"] = MOCK_FUNCTION_NAME
class MockKMSClient:
def decrypt(self, CiphertextBlob=None, EncryptionContext={}):
if (
EncryptionContext.get(KMS_ENCRYPTION_CONTEXT_KEY)
!= MOCK_FUNCTION_NAME
):
raise BotocoreClientError({}, "Decrypt")
if CiphertextBlob == MOCK_ENCRYPTED_API_KEY.encode("utf-8"):
return {
"Plaintext": EXPECTED_DECRYPTED_API_KEY.encode("utf-8"),
}
mock_kms_client = MockKMSClient()
decrypted_key = decrypt_kms_api_key(
mock_kms_client, MOCK_ENCRYPTED_API_KEY_BASE64
)
self.assertEqual(decrypted_key, EXPECTED_DECRYPTED_API_KEY)
del os.environ["AWS_LAMBDA_FUNCTION_NAME"]
def test_key_encrypted_without_encryption_context(self):
class MockKMSClient:
def decrypt(self, CiphertextBlob=None, EncryptionContext={}):
if EncryptionContext.get(KMS_ENCRYPTION_CONTEXT_KEY) != None:
raise BotocoreClientError({}, "Decrypt")
if CiphertextBlob == MOCK_ENCRYPTED_API_KEY.encode("utf-8"):
return {
"Plaintext": EXPECTED_DECRYPTED_API_KEY.encode("utf-8"),
}
mock_kms_client = MockKMSClient()
decrypted_key = decrypt_kms_api_key(
mock_kms_client, MOCK_ENCRYPTED_API_KEY_BASE64
)
self.assertEqual(decrypted_key, EXPECTED_DECRYPTED_API_KEY)
|
#!/usr/bin/python
import os, sys # low level handling, such as command line stuff
import string # string methods available
import re # regular expressions
import getopt # comand line argument handling
from low import * # custom functions, written by myself
from collections import defaultdict
import glob
import gff3
# =============================================================================
def show_help( ):
""" displays the program parameter list and usage information """
stdout( "usage: " + sys.argv[0] + " -f <path> ..." )
stdout( " " )
stdout( " option description" )
stdout( " -h help (this text here)" )
stdout( " -f flat file" )
stdout( " -s separator of flat file (default: tab)" )
stdout( " -a action [INSERT|UPDATE]" )
stdout( " -t sql table name" )
stdout( " " )
stdout( " Field names are extracted from the header line (first line, must start with #)." )
stdout( " NULL named fields are irgnored, the rest gets imported." )
stdout( " UPDATES are only possible with a given ID. Thus, the header must contain " )
stdout( " a column named ID which will be used to generate an UPDATE ... WHERE id='ID'." )
stdout( " " )
sys.exit(1)
# =============================================================================
def handle_arguments():
""" verifies the presence of all necessary arguments and returns the data dir """
if len ( sys.argv ) == 1:
stderr( "no arguments provided." )
show_help()
try: # check for the right arguments
keys, values = getopt.getopt( sys.argv[1:], "hf:a:s:t:" )
except getopt.GetoptError:
stderr( "invalid arguments provided." )
show_help()
args = {'separator':"\t", 'action':"INSERT"}
for key, value in keys:
if key == '-f': args['flatfile'] = value
if key == '-s': args['separator'] = value
if key == '-a': args['action'] = value.upper()
if key == '-t': args['table'] = value
for key in ['flatfile', 'separator', 'action', 'table']:
if key.endswith("file"):
if not args_file_exists(args, key): show_help()
elif key.endswith("dir"):
if not args_dir_exists(args, key): show_help()
elif not args.has_key(key): show_help()
return args
# =============================================================================
def get_blastout_hash(file):
hash = defaultdict(int)
fo = open(file)
for line in fo:
qid = line.split("\t")[0]
hash[qid] = 1
fo.close()
return hash
# =============================================================================
def gather_blast_output(bdir):
hash = {}
for filename in glob.glob(bdir + '/*.blastout'):
s = os.path.split(filename)[1][:4]
hash[s] = get_blastout_hash(filename)
return hash
# =============================================================================
def get_scaffolds(file):
def add_feature(hash, gf):
if not hash.has_key(gf.seqid): hash[gf.seqid] = {}
hash[gf.seqid][gf.start] = gf.get_attributes()['ID']
return hash
hash = {}
fo = open(file)
for line in fo:
if line.startswith("#"): continue
gf = gff3.GeneFeature(line)
if gf.ftype != "mRNA": continue
hash = add_feature(hash, gf)
fo.close()
outhash = {}
for scaffold, h in hash.iteritems():
outhash[scaffold] = [h[key] for key in sorted(h.iterkeys())]
return outhash
# =============================================================================
def gather_genes_on_scaffolds(gffdir):
hash = {}
for filename in glob.glob(gffdir + '/*.gff'):
s = os.path.split(filename)[1][:4]
hash[s] = get_scaffolds(filename)
return hash
# =============================================================================
def get_neighbors(pid, geneids):
index = geneids.index(pid)
left = geneids[max([index-3,0]):index]
right = geneids[index+1:min([index+4,len(geneids)])]
return (left, right)
# =============================================================================
def escape4sql(string):
if string.count("'") == 0: return string
return string.replace("'", "\\'")
# =============================================================================
# === MAIN ====================================================================
# =============================================================================
def main( args ):
fo = open(args['flatfile'])
action, table, fieldnames = args['action'], args['table'], ""
print "SET autocommit=0;"
for line in fo:
if line.startswith("#"):
if fieldnames == "": fieldnames = [e.strip().upper() for e in line[1:].split(args['separator'])]
else: continue
else:
values = line.strip().split(args['separator'])
sql = "%s " % action
if action == "INSERT": sql += "INTO "
sql += "`%s` SET " % table
for i in range(len(fieldnames)):
if fieldnames[i] == "NULL": continue
if fieldnames[i] == "ID" and action == "UPDATE": continue
if not sql.endswith(" "): sql += ", "
sql += "%s='%s'" %(fieldnames[i], escape4sql(values[i]))
if action == "UPDATE": sql += " WHERE ID='%s'" % values[fieldnames.index("ID")]
print sql + ";"
print "COMMIT;"
# =============================================================================
args = handle_arguments()
main( args )
|
from django.db import models
# Create your models here.
# each class -- a table in db
# each var in class -- column in that table
class Post(models.Model):
title = models.CharField(max_length = 240)
body = models.TextField()
def __str__(self):
return self.title
|
from django.conf import settings
from django.utils.module_loading import autodiscover_modules
default_app_config = 'djangae.contrib.search.apps.SearchConfig'
DJANGAE_SEARCH_QUEUE_SETTING = "DJANGAE_SEARCH_QUEUE"
_SEARCH_QUEUE = getattr(settings, DJANGAE_SEARCH_QUEUE_SETTING, "default")
def autodiscover():
# This will find all the search.py modules and add them to model_document._registry
# The act of importing them will call any register(model, document) statements
# in that file.
from djangae.contrib.search import model_document
autodiscover_modules('search', register_to=model_document)
|
from flask import Flask, render_template
from bokeh.embed import server_document
app = Flask(__name__)
@app.route('/', methods=['GET'])
def bkapp_page():
script = server_document('http://localhost:5006/bkapp')
return render_template("embed.html", script=script, template="Flask")
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000) |
#! /usr/bin/env python
# Viewer for archives packaged by archive.py
# Copyright (C) 2005-2011, Giovanni Bajo
# Based on previous work under copyright (c) 2002 McMillan Enterprises, Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
try:
import PyInstaller
except ImportError:
# if importing PyInstaller fails, try to load from parent
# directory to support running without installation
import imp, os
if not hasattr(os, "getuid") or os.getuid() != 0:
imp.load_module('PyInstaller', *imp.find_module('PyInstaller',
[os.path.dirname(os.path.dirname(__file__))]))
from PyInstaller.loader import archive, carchive
import PyInstaller.log
import tempfile, os
try:
import zlib
except ImportError:
zlib = archive.DummyZlib()
import pprint
import optparse
stack = []
cleanup = []
name = None
debug = False
rec_debug = False
brief = False
def main(opts, args):
global stack
global debug
global rec_debug
global name
global brief
name = args[0]
debug = opts.log
rec_debug = opts.rec
brief = opts.brief
if not os.path.isfile(name):
print "%s is an invalid file name!" % name
return 1
arch = getArchive(name)
stack.append((name, arch))
if debug or brief:
show_log(name, arch)
raise SystemExit(0)
else:
show(name, arch)
while 1:
try:
toks = raw_input('? ').split(None, 1)
except EOFError:
# Ctrl-D
print # clear line
break
if not toks:
usage()
continue
if len(toks) == 1:
cmd = toks[0]
arg = ''
else:
cmd, arg = toks
cmd = cmd.upper()
if cmd == 'U':
if len(stack) > 1:
arch = stack[-1][1]
arch.lib.close()
del stack[-1]
nm, arch = stack[-1]
show(nm, arch)
elif cmd == 'O':
if not arg:
arg = raw_input('open name? ')
arg = arg.strip()
arch = getArchive(arg)
if arch is None:
print arg, "not found"
continue
stack.append((arg, arch))
show(arg, arch)
elif cmd == 'X':
if not arg:
arg = raw_input('extract name? ')
arg = arg.strip()
data = getData(arg, arch)
if data is None:
print "Not found"
continue
fnm = raw_input('to filename? ')
if not fnm:
print `data`
else:
open(fnm, 'wb').write(data)
elif cmd == 'Q':
break
else:
usage()
for (nm, arch) in stack:
arch.lib.close()
stack = []
for fnm in cleanup:
try:
os.remove(fnm)
except Exception, e:
print "couldn't delete", fnm, e.args
def usage():
print "U: go Up one level"
print "O <nm>: open embedded archive nm"
print "X <nm>: extract nm"
print "Q: quit"
def getArchive(nm):
if not stack:
if nm[-4:].lower() == '.pyz':
return ZlibArchive(nm)
return carchive.CArchive(nm)
parent = stack[-1][1]
try:
return parent.openEmbedded(nm)
except KeyError, e:
return None
except (ValueError, RuntimeError):
ndx = parent.toc.find(nm)
dpos, dlen, ulen, flag, typcd, nm = parent.toc[ndx]
x, data = parent.extract(ndx)
tfnm = tempfile.mktemp()
cleanup.append(tfnm)
open(tfnm, 'wb').write(data)
if typcd == 'z':
return ZlibArchive(tfnm)
else:
return carchive.CArchive(tfnm)
def getData(nm, arch):
if type(arch.toc) is type({}):
(ispkg, pos, lngth) = arch.toc.get(nm, (0, None, 0))
if pos is None:
return None
arch.lib.seek(arch.start + pos)
return zlib.decompress(arch.lib.read(lngth))
ndx = arch.toc.find(nm)
dpos, dlen, ulen, flag, typcd, nm = arch.toc[ndx]
x, data = arch.extract(ndx)
return data
def show(nm, arch):
if type(arch.toc) == type({}):
print " Name: (ispkg, pos, len)"
toc = arch.toc
else:
print " pos, length, uncompressed, iscompressed, type, name"
toc = arch.toc.data
pprint.pprint(toc)
def show_log(nm, arch, output=[]):
if type(arch.toc) == type({}):
toc = arch.toc
if brief:
for name,_ in toc.items():
output.append(name)
else:
pprint.pprint(toc)
else:
toc = arch.toc.data
for el in toc:
if brief:
output.append(el[5])
else:
output.append(el)
if rec_debug:
if el[4] in ('z', 'a'):
show_log(el[5], getArchive(el[5]), output)
stack.pop()
pprint.pprint(output)
class ZlibArchive(archive.ZlibArchive):
def checkmagic(self):
""" Overridable.
Check to see if the file object self.lib actually has a file
we understand.
"""
self.lib.seek(self.start) #default - magic is at start of file
if self.lib.read(len(self.MAGIC)) != self.MAGIC:
raise RuntimeError("%s is not a valid %s archive file"
% (self.path, self.__class__.__name__))
if self.lib.read(len(self.pymagic)) != self.pymagic:
print "Warning: pyz is from a different Python version"
self.lib.read(4)
parser = optparse.OptionParser('%prog [options] pyi_archive')
parser.add_option('-l', '--log',
default=False,
action='store_true',
dest='log',
help='Print an archive log (default: %default)')
parser.add_option('-r', '--recursive',
default=False,
action='store_true',
dest='rec',
help='Recusively print an archive log (default: %default). Can be combined with -r')
parser.add_option('-b', '--brief',
default=False,
action='store_true',
dest='brief',
help='Print only file name. (default: %default). Can be combined with -r')
PyInstaller.log.__add_options(parser)
opts, args = parser.parse_args()
PyInstaller.log.__process_options(parser, opts)
if len(args) != 1:
parser.error('Requires exactly one pyinstaller archive')
try:
raise SystemExit(main(opts, args))
except KeyboardInterrupt:
raise SystemExit("Aborted by user request.")
|
import json
import os
import re
from flask import current_app
import src.flag
import src.md.md
from .cache import get_file_cache
from .const import index_url_key, index_title_key, index_parent_key, index_id_key, index_highlight_key, \
index_top_key, index_notags_key, index_fixed_key, index_tags_key, index_date_key, index_path_key, \
articles_url_name, attachments_url_name, index_bereferenced_key, index_noheader_key, index_nofooter_key, \
index_update_key
from .util import regexp_join, get_articles_dir_abspath, compute_digest_by_abspath, compute_digest_by_data, \
update_config_ignore_file_list, get_unique_find_dict, get_tag_parents, get_date_parents, clean_link
# 获取索引文件数据
def get_index_data():
# 组成索引文件绝对路径
index_file_path = os.path.join(get_articles_dir_abspath(), current_app.config["INDEX_FILE_NAME"])
return json.loads(get_file_cache(index_file_path))
def get_tags_parents():
return get_index_data()[2]
# 根据相对路径从索引文件中取出对应项目
def get_item_by_path(path):
for block in get_index_data()[:2]:
if path in block:
return block[path]
return {}
# 根据url从索引文件中取出对应项目
def get_item_by_url(url):
for block in get_index_data()[:2]:
for item_path in block:
item = block[item_path]
if item[index_url_key] == url:
return item
return {}
# 获取固定索引的文章列表
def get_fixed_articles():
fixed_articles = []
top_articles = []
index_data = get_index_data()
if index_data:
articles_block = index_data[0]
# 遍历文章块
for article_path in articles_block:
article = articles_block[article_path]
# 把固定索引的文章加入列表
if not current_app.config["IS_FREEZE"] or article[index_fixed_key]:
if article[index_top_key]:
top_articles.append(article)
else:
fixed_articles.append(article)
# 按照时间倒叙进行排序
fixed_articles.sort(key=lambda o: o[index_update_key] if o[index_update_key] else o[index_date_key],
reverse=True)
top_articles.sort(key=lambda o: o[index_update_key] if o[index_update_key] else o[index_date_key], reverse=True)
fixed_articles = top_articles + fixed_articles
return fixed_articles
# 用于搜索索引的过滤器,接受一组索引键名和搜索项的列表,以AND模式运行,同时计算出文章的最大标签数
def index_data_filter(searches):
articles = []
attachments = []
index_data = get_index_data()
if index_data:
articles_block = index_data[0]
attachments_block = index_data[1]
# 先处理文章块
for article_path in articles_block:
article = articles_block[article_path]
is_find = True
for index, search in searches:
# 根据搜索内容组成正则表达式,忽略大小写
pattern = re.compile(regexp_join(".*%s.*", search), re.I)
# 如果搜索的是标签则遍历所有标签进行匹配
if index == index_tags_key:
is_tag_find = False
# 有一个匹配则视为找到标签,结束循环
for key in article[index]:
if re.search(pattern, article[index][key]):
is_tag_find = True
break
# 如果标签都没匹配则视为没找到对应文章,结束循环
if not is_tag_find:
is_find = False
break
else:
# 如果没匹配则视为没找到对应文章,结束循环
if not re.search(pattern, str(article[index])):
is_find = False
break
# 如果找到则把文章加入列表
if is_find:
articles.append(article)
# 处理附件块
for attachment_path in attachments_block:
attachment = attachments_block[attachment_path]
is_find = True
for index, search in searches:
# 如果搜索的有日期或标签则视为没找到对应附件,结束循环
if index in (index_date_key, index_tags_key):
is_find = False
break
# 根据搜索内容组成正则表达式,忽略大小写
pattern = re.compile(regexp_join(".*%s.*", search), re.I)
# 如果没匹配则视为没找到对应附件,结束循环
if not re.search(pattern, str(attachment[index])):
is_find = False
break
# 如果找到则把附件加入列表
if is_find:
attachments.append(attachment)
return [articles, attachments]
# 重建索引
def reindex():
articles_dir_abspath = get_articles_dir_abspath()
articles_block = {}
attachments_block = {}
reference_dict = {}
tag_parents = {}
# 遍历文章目录下所有文件和子目录
for root, dirs, files in os.walk(articles_dir_abspath):
# 截取相对路径
path = root.split(articles_dir_abspath)[-1].lstrip(os.path.sep).replace("\\", "/")
# 排除忽略目录
is_ignore = False
for ignore_dir in current_app.config["IGNORE_DIR_LIST"]:
if path.startswith(ignore_dir):
is_ignore = True
break
if is_ignore:
continue
index = 1
for file in sorted(files):
# 组成文件路径
file_path = "/".join([path, file]).lstrip("/")
file_abspath = os.path.join(root, file)
if not path.startswith(current_app.config["ATTACHMENTS_DIR_NAME"]):
# 忽略无法以unicode编码的文件
try:
with open(file_abspath, encoding='utf-8') as file_data:
data = file_data.read()
except UnicodeDecodeError:
continue
# 识别文章中的忽略文件标识,来判断如何更新忽略列表
if src.flag.get_ignore_flag(data):
update_config_ignore_file_list(file_path, True)
# 识别文章中的取消忽略文件标识,来判断如何更新忽略列表
if src.flag.get_unignore_flag(data):
update_config_ignore_file_list(file_path, False)
# 排除忽略文件
if file_path in current_app.config["IGNORE_FILE_LIST"]:
continue
parent, title = os.path.split(file_path)
if not path.startswith(current_app.config["ATTACHMENTS_DIR_NAME"]):
title_match = re.match(src.flag.get_title_regexp(), data)
if title_match:
title = title_match.group(1)
# 获取标签并生成标签字典
tags = {compute_digest_by_data(tag): tag for tag in src.flag.get_tags_flag(data)}
tag_parents.update({compute_digest_by_data(tag_parent): tag_parent for key in tags
for tag_parent in get_tag_parents(tags[key])})
# 获取日期
date = src.flag.get_date_flag(data)
tag_parents.update({compute_digest_by_data(date_parent): date_parent
for date_parent in get_date_parents(date)})
# 计算文章哈希组成url
url = "/%s/%s" % (articles_url_name, compute_digest_by_data(data))
# 识别文章中固定索引标识,来判断是否更新哈希
fixed = src.flag.get_fixed_flag(data)
if fixed:
# 查找旧索引中对应的项目,如果存在则沿用哈希
item = get_item_by_path(file_path)
if item:
url = "/%s/%s" % (articles_url_name, item[index_url_key].split("/")[-1])
# 查找引用文件(文章图片内联、引入CSS和JS、片段链接)
reference_dict[file_path] = src.md.md.get_reference(data) + src.flag.get_custom_css_flag(data, True) + \
src.flag.get_custom_js_flag(data, True)
for value in get_unique_find_dict(src.md.md.get_template("(.*?)"), data).values():
value = clean_link(value).split("|")[0]
if get_item_by_path(value):
reference_dict[file_path].append(value)
# 组成一条文章索引
articles_block[file_path] = {index_id_key: index, index_parent_key: parent, index_title_key: title,
index_path_key: file_path, index_url_key: url, index_date_key: date,
index_update_key: src.flag.get_update_flag(data),
index_tags_key: tags, index_fixed_key: fixed,
index_notags_key: src.flag.get_notags_flag(data),
index_top_key: src.flag.get_top_flag(data),
index_highlight_key: src.flag.get_highlight_flag(data),
index_bereferenced_key: [],
index_noheader_key: src.flag.get_noheader_flag(data),
index_nofooter_key: src.flag.get_nofooter_flag(data)}
else:
# 组成一条附件索引
url = "/%s/%s" % (attachments_url_name, compute_digest_by_abspath(file_abspath))
attachments_block[file_path] = {index_id_key: index, index_parent_key: parent, index_title_key: title,
index_path_key: file_path, index_url_key: url,
index_bereferenced_key: []}
index += 1
be_referenced_dict = {}
for key in reference_dict:
for value in reference_dict[key]:
if value not in be_referenced_dict:
be_referenced_dict[value] = []
be_referenced_dict[value].append(key)
for key in be_referenced_dict:
for block in [articles_block, attachments_block]:
if key in block:
block[key][index_bereferenced_key] = be_referenced_dict[key]
break
# 写入索引文件
separators = (',', ':')
sort_keys = None
indent = None
if current_app.config["INDEX_PRETTY_PRINT"]:
separators = None
sort_keys = True
indent = 2
index_data = json.dumps([articles_block, attachments_block, tag_parents], separators=separators,
sort_keys=sort_keys, indent=indent, ensure_ascii=False)
with open(os.path.join(articles_dir_abspath, current_app.config["INDEX_FILE_NAME"]), 'w',
encoding='utf-8') as index_file:
index_file.write(index_data)
|
# Generated by Django 2.2.8 on 2020-01-20 17:40
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('django_eveonline_service_manager', '0003_auto_20200120_1740'),
]
operations = [
migrations.RenameField(
model_name='eveinvoice',
old_name='pending',
new_name='paid',
),
migrations.AlterField(
model_name='eveinvoice',
name='id',
field=models.UUIDField(default=uuid.UUID('e329d22e-232e-42d4-aa84-6cd38ce30cd3'), primary_key=True, serialize=False, unique=True),
),
]
|
# -*- coding: utf-8 -*-
"""
Written by Daniel Moseguí González
GitHub: user:mosegui
LinkedIn: https://www.linkedin.com/in/daniel-moseguí-gonzález-5aa02849/
"""
import logging
import numpy as np
import matplotlib.style
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import mpl_finance as fnc
from pandas.plotting import register_matplotlib_converters
from YahooFinanceDashboard._plots import _plots_axes
logger = logging.getLogger(__name__)
matplotlib.style.use('ggplot')
register_matplotlib_converters()
def plot_prices(input_data, plot_type='candlestick'): # TODO: imrpove plot presentation (title, metadata, etc...)
"""Plots the stocks prices day-wise and the traded volume from the inbound data
Parameters
----------
input_data : pd.DataFrame
stock price historical data with open, high, low and adj_close values
plot_type : str {'candlestick', 'ohlc', 'close'}
Returns
-------
price_axes : matplotlib.pyplot.axes object
the axes in which the historical prices are plotted
volume_axes : matplotlib.pyplot.axes object
the axes in which the historical traded volumes are plotted
"""
plot_data = input_data.copy()
plot_data.reset_index(inplace=True)
plot_data['Date'] = plot_data['Date'].map(mdates.date2num)
sequence = ['Date', 'Open', 'High', 'Low', 'Adj_Close']
price_axes, volume_axes = _plots_axes.get_pricevolume_axes()
if plot_type.lower() == 'candlestick':
fnc.candlestick_ohlc(price_axes, np.array(plot_data[sequence]), width=0.75, colorup='darkgreen')
elif plot_type.lower() == 'ohlc':
fnc.plot_day_summary_ohlc(price_axes, np.array(plot_data[sequence]), colorup='darkgreen')
elif plot_type.lower() == 'close':
price_axes.plot(plot_data.Date, plot_data.Adj_Close, color='k', linewidth=1)
else:
logger.error('invalid plot_type')
raise ValueError('invalid plot_type')
volume_axes.bar(plot_data.Date, plot_data.Volume, color='royalblue')
plt.show()
return price_axes, volume_axes
|
# Copyright (c) 2010 Doug Hellmann. All rights reserved.
#
"""Look up port numbers for a service by name.
"""
# end_pymotw_header
import socket
from urllib.parse import urlparse
URLS = [
"http://www.python.org",
"https://www.mybank.com",
"ftp://prep.ai.mit.edu",
"gopher://gopher.micro.umn.edu",
"smtp://mail.example.com",
"imap://mail.example.com",
"imaps://mail.example.com",
"pop3://pop.example.com",
"pop3s://pop.example.com",
]
for url in URLS:
parsed_url = urlparse(url)
port = socket.getservbyname(parsed_url.scheme)
print("{:>6} : {}".format(parsed_url.scheme, port))
|
from instruction import *
StringList = List[str]
metaPrefix = "#meta"
class Program:
def __init__(self, lines: StringList):
self.instructions = list()
# metadata
meta = ""
registers = set()
for l in lines:
if l.startswith(metaPrefix):
meta = l[len(metaPrefix):]
continue
i = common.parse_line(l)
self.instructions.append(i)
registers.update(set(i.registers()))
self.max_register = max(registers)
if meta != "":
self.arg_count = common.nat(meta)
else:
self.arg_count = 0
self.offset_instructions = []
self.register_offset = 0
self.instruction_offset = 0
def set_arg_count(self, n: int):
self.arg_count = n
def get_arg_count(self) -> int:
return self.arg_count
def instruction_count(self) -> int:
return len(self.instructions) + len(self.offset_instructions)
def lines(self) -> StringList:
l = list()
for o in self.offset_instructions:
l.append(o.to_command())
for i in self.instructions:
l.append(i.to_command())
return l
def __repr__(self):
lines = self.lines()
s = ""
for l in lines:
s += l
return s
def offset_registers_with_copy(self, n: int):
self.register_offset = n
self.offset_instructions = [Instruction("T", [i, i + n]) for i in range(1, self.arg_count + 1)]
for i in self.instructions:
i.offset_registers(n)
def offset_registers_from_list(self, n: int, reg_from: List):
new = [i+n for i in range(1, self.arg_count + 1)]
transpose_args = list(zip(reg_from, new))
self.register_offset = n
self.offset_instructions = [Instruction("T", list(i)) for i in transpose_args]
for i in self.instructions:
i.offset_registers(n)
def offset_commands(self, n: int):
self.instruction_offset = n
for i in self.instructions:
i.offset_command(n)
def set_exit_point(self, n: int):
for i in self.instructions:
if i.exit_point():
i.set_exit_point(n)
# if no_exit_point:
# self.instructions.append(Instruction("J", [0, 0, n]))
|
from ..utils.constants import *
from ..utils.vector3 import vec3
import numpy as np
class Primitive:
def __init__(self, center, material, max_ray_depth=1, shadow=True, mc=False):
self.center = center
self.material = material
self.material.assigned_primitive = self
self.shadow = shadow
self.collider_list = []
self.max_ray_depth = max_ray_depth
self.mc = mc
def rotate(self, θ, u):
u = u.normalize()
θ = θ / 180 * np.pi
cosθ = np.cos(θ)
sinθ = np.sqrt(1 - cosθ ** 2) * np.sign(θ)
# rotation matrix along u axis
M = np.array(
[
[
cosθ + u.x * u.x * (1 - cosθ),
u.x * u.y * (1 - cosθ) - u.z * sinθ,
u.x * u.z * (1 - cosθ) + u.y * sinθ,
],
[
u.y * u.x * (1 - cosθ) + u.z * sinθ,
cosθ + u.y ** 2 * (1 - cosθ),
u.y * u.z * (1 - cosθ) - u.x * sinθ,
],
[
u.z * u.x * (1 - cosθ) - u.y * sinθ,
u.z * u.y * (1 - cosθ) + u.x * sinθ,
cosθ + u.z * u.z * (1 - cosθ),
],
]
)
for c in self.collider_list:
c.rotate(M, self.center)
|
#!/usr/bin/python
import json
import subprocess
import docker
import sys
import time
from communication import Communication
from data import Data
from message import ComplexEncoder
from message import Message
from observation import Observation
def run(cmd):
res = subprocess.check_output(cmd, shell=True)
return res
# is container alive
def is_container_alive(container_name, url, communication):
# connect to docker
try:
# try connect to container
client = docker.APIClient(base_url='unix://var/run/docker.sock')
inspect_result = client.inspect_container(container_name)
status = inspect_result.get('State', None).get('Status', None)
print(status)
if status == 'running':
return True
except docker.errors.APIError:
pass
return False
def format(metric_value, messageId):
""""Format message to send."""
# message to sent to the server API (follow the json schema)
message = Message(probeId=201, resourceId=202, messageId=messageId, sentTime=int(time.time()), data=None)
# add cpu metric
dt = Data(type="measurement", descriptionId=203, metricId=10, observations=None)
obs = Observation(time=int(time.time()), value=metric_value)
dt.add_observation(observation=obs)
message.add_data(data=dt)
# return message formatted in json
return json.dumps(message.reprJSON(), cls=ComplexEncoder)
# send metrics to API server
def send_metrics(metric_value, url, communication, messageId):
# format the metrics from container
metric_formatted = format(metric_value, messageId)
print(f'---Sending message to monitor: {metric_formatted}')
response = communication.send_message(metric_formatted)
def verify_container_activity(container_name, url, communication):
"""Verify container activity."""
messageId = 1
failures = 1
while (True):
container_alive = is_container_alive(container_name, url, communication)
metric_value = 0.0
if not container_alive:
metric_value = 0.1
metric_value *= failures
failures += 1
send_metrics(metric_value, url, communication, messageId)
messageId += 1
if metric_value >= 0.3:
time.sleep(20)
time.sleep(5)
if __name__ == "__main__":
container_name = str(sys.argv[1] + '')
url = str(sys.argv[2] + '')
communication = Communication(url)
verify_container_activity(container_name, url, communication)
|
from flask import Flask, flash, render_template, redirect, send_from_directory, url_for, request
import os, random, copy
from werkzeug.useragents import UserAgent
# from flask_bootstrap import Bootstrap
from flask_login import (
LoginManager,
current_user,
login_required,
login_user,
logout_user,
)
import requests
from oauthlib.oauth2 import WebApplicationClient
# from db import init_db_command
from user import User
import json
import logging
# import sqlite3
app = Flask(__name__)
# Bootstrap(app)
app.secret_key = 'x4thHzLCyrLUpznsy1wKXSXW'
client_id = '360742249219-992pv8f1bsh7or9h9b5tpg3g7q62ve60'
gunicorn_logger = logging.getLogger('gunicorn.error')
app.logger.handlers = gunicorn_logger.handlers
app.logger.setLevel(gunicorn_logger.level)
app.config['UPLOAD_FOLDER'] = os.path.join('static', 'photos')
login_manager = LoginManager()
login_manager.init_app(app)
# Naive database setup
# try:
# init_db_command()
# except sqlite3.OperationalError:
# # Assume it's already been created
# pass
client = WebApplicationClient(client_id=client_id)
original_questions = {
os.path.join(app.config['UPLOAD_FOLDER'], 'a.PNG') : ['O(N)','O(N^2)','O(Log(N))','O(1)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'b.PNG') : ['O(N)','O(Log(N))','O(N*Log(N))','O(1)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'c.PNG') : ['O(N)','O(N^2)','O(Log(N))','O(N*Log(N))'],
os.path.join(app.config['UPLOAD_FOLDER'], 'd.PNG') : ['O(N^2)','O(N)','O(Log(N))','O(1)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'e.PNG') : ['O(N)','O(N^2)','O(Log(N))','O(1)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'f.PNG') : ['O(2^N)','O(N^2)','O(Log(N))','O(N!)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'g.PNG') : ['O(N)','O(N^3)','O(Log(N))','O(N^2)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'h.PNG') : ['O(Log(N))','O(1)','O(N^2)','O(N)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'i.PNG') : ['O(Log(N))','O(N^3)','O(1)','O(N)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'j.PNG') : ['O(N)','O(N^2)','O(Log(N))','O(N!)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'k.PNG') : ['O(N)','O(N^2)','O(Log(N))','O(1)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'l.PNG') : ['O(Log(N))','O(N^3)','O(N*Log(N))','O(N)'],
os.path.join(app.config['UPLOAD_FOLDER'], 'g.PNG') : ['O(N)','O(N^3)','O(N*Log(N))','O(Log(N))']
}
selected_questions = {}
questions = {}
question_max = 6
def log(data):
app.logger.info(data)
def get_google_provider_cfg():
return requests.get("https://accounts.google.com/.well-known/openid-configuration").json()
def shuffle(q):
"""
This function is for shuffling
the dictionary elements.
"""
global selected_questions
global questions
selected_questions = {}
curNumberOfQuestions = 0
while(curNumberOfQuestions < question_max):
current_selection = random.choice(list(q.keys()))
if current_selection not in selected_questions:
selected_questions[current_selection] = q[current_selection]
curNumberOfQuestions += 1
questions = copy.deepcopy(selected_questions)
return selected_questions
@login_manager.user_loader
def load_user(user_id):
return User.get(user_id)
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'), 'favicon.ico', mimetype='image/vnd.microsoft.icon')
@app.route('/')
def main():
flash("")
return redirect(location='/welcome-page')
@app.route('/welcome-page')
def welcome_page():
agent = UserAgent(request.headers.get('User-Agent'))
if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
return message
if (current_user.is_authenticated):
name = current_user.name
# email = current_user.email
profile_pic = current_user.profile_pic
log(f"Name: {name}")
log(f"Email: {current_user.email}")
# return f"<h1>Authenticated - {current_user.name}</h1><br><a href='/logout'>Sign Out</a>"
# return render_template('SignedInWelcomePage.html', name=name, profile_pic=profile_pic)
data = json.dumps({'name':name, 'profile_pic':profile_pic})
return redirect(url_for('.welcome_authenticated', data=data))
else:
return render_template('welcome.html')
@app.route('/welcome-authenticated')
@login_required
def welcome_authenticated():
data = json.loads(request.args['data'])
return render_template('welcome-authenticated.html', name=data['name'], profile_pic=data['profile_pic'])
# @app.route('/welcome')
# def welcome():
# agent = UserAgent(request.headers.get('User-Agent'))
# if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
# message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
# return message
# return render_template('welcome.html')
@app.route('/fundamentals-page')
def fundamentals_page():
agent = UserAgent(request.headers.get('User-Agent'))
if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
return message
if (current_user.is_authenticated):
name = current_user.name
profile_pic = current_user.profile_pic
log(f"Name: {name}")
log(f"Email: {current_user.email}")
data = json.dumps({'name':name, 'profile_pic':profile_pic})
return redirect(url_for('.fundamentals_authenticated', data=data))
else:
return render_template('fundamentals.html')
@app.route('/fundamentals-authenticated')
@login_required
def fundamentals_authenticated():
data = json.loads(request.args['data'])
return render_template('fundamentals-authenticated.html', name=data['name'], profile_pic=data['profile_pic'])
# @app.route('/fundamentals')
# def fundamentals():
# agent = UserAgent(request.headers.get('User-Agent'))
# if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
# message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
# return message
# return render_template('fundamentals.html')
@app.route('/time-complexity-page')
def time_complexity_page():
agent = UserAgent(request.headers.get('User-Agent'))
if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
return message
if (current_user.is_authenticated):
name = current_user.name
profile_pic = current_user.profile_pic
log(f"Name: {name}")
log(f"Email: {current_user.email}")
data = json.dumps({'name':name, 'profile_pic':profile_pic})
return redirect(url_for('.time_complexity_authenticated', data=data))
else:
return render_template('time-complexity.html')
@app.route('/time-complexity-authenticated')
@login_required
def time_complexity_authenticated():
data = json.loads(request.args['data'])
return render_template('time-complexity-authenticated.html', name=data['name'], profile_pic=data['profile_pic'])
# @app.route('/time-complexity')
# def time():
# agent = UserAgent(request.headers.get('User-Agent'))
# if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
# message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
# return message
# return render_template('time-complexity.html')
@app.route('/space-complexity-page')
def space_complexity_page():
agent = UserAgent(request.headers.get('User-Agent'))
if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
return message
if (current_user.is_authenticated):
name = current_user.name
profile_pic = current_user.profile_pic
log(f"Name: {name}")
log(f"Email: {current_user.email}")
data = json.dumps({'name':name, 'profile_pic':profile_pic})
return redirect(url_for('.space_complexity_authenticated', data=data))
else:
return render_template('space-complexity.html')
@app.route('/space-complexity-authenticated')
@login_required
def space_complexity_authenticated():
data = json.loads(request.args['data'])
return render_template('space-complexity-authenticated.html', name=data['name'], profile_pic=data['profile_pic'])
# @app.route('/space-complexity')
# def space():
# agent = UserAgent(request.headers.get('User-Agent'))
# if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
# message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
# return message
# return render_template('space-complexity.html')
@app.route('/problems-page')
def problems_page():
agent = UserAgent(request.headers.get('User-Agent'))
if (agent.platform in ['blackberry', 'android', 'iphone', 'ipad']):
message = f'<h1>Your {agent.platform} device is currently unsupported⏰<br> Please access LearnComplexity.io from a computer 🖥️</h1>'
return message
selected_questions = shuffle(original_questions)
for key in questions: random.shuffle(questions[key])
if (current_user.is_authenticated):
name = current_user.name
profile_pic = current_user.profile_pic
log(f"Name: {name}")
log(f"Email: {current_user.email}")
data = json.dumps({'name':name, 'profile_pic':profile_pic, 'selected_questions':selected_questions, 'questions': questions})
return redirect(url_for('.problems_authenticated', data=data))
else:
# print(questions)
return render_template('problems.html', q=selected_questions, o=questions)
@app.route('/problems-authenticated')
@login_required
def problems_authenticated():
data = json.loads(request.args['data'])
return render_template('problems-authenticated.html', q=selected_questions, o=questions, name=data['name'], profile_pic=data['profile_pic'])
@app.route('/earn')
def earn():
return redirect(location='https://youtu.be/dQw4w9WgXcQ?t=42')
@app.route('/result-authenticated', methods=['POST'])
@login_required
def result_authenticated():
if (len(request.form)==question_max):
correct = 0
for key in selected_questions:
answered = request.form[key]
# print(f'Selected Answer: {answered} - Correct Answer: {selected_questions[key][0]} ')
if selected_questions[key][0] == answered:
correct += 1
# print(correct)
if (correct == question_max):
return render_template('success.html')
else:
return render_template('failure.html')
else:
return redirect("/problems-page")
@app.route('/result', methods=['POST'])
def result():
if (len(request.form)==question_max):
correct = 0
for key in selected_questions:
answered = request.form[key]
# print(f'Selected Answer: {answered} - Correct Answer: {selected_questions[key][0]} ')
if selected_questions[key][0] == answered:
correct += 1
# print(correct)
if (correct == question_max):
return render_template('success.html')
else:
return render_template('failure.html')
else:
return redirect("/problems-page")
@app.route('/login')
def login():
google_provider_cfg = get_google_provider_cfg()
authorization_endpoint = google_provider_cfg["authorization_endpoint"]
request_uri = client.prepare_request_uri(
authorization_endpoint,
redirect_uri=request.base_url + "/callback",
scope=["openid", "email", "profile"],
)
return redirect(request_uri)
@app.route("/login/callback")
def callback():
code = request.args.get("code")
google_provider_cfg = get_google_provider_cfg()
token_endpoint = google_provider_cfg["token_endpoint"]
token_url, headers, body = client.prepare_token_request(
token_endpoint,
authorization_response=request.url,
redirect_url=request.base_url,
code=code
)
token_response = requests.post(
token_url,
headers=headers,
data=body,
auth=(client_id, app.secret_key)
)
client.parse_request_body_response(json.dumps(token_response.json()))
userinfo_endpoint = google_provider_cfg["userinfo_endpoint"]
uri, headers, body = client.add_token(userinfo_endpoint)
userinfo_response = requests.get(uri, headers=headers, data=body)
if userinfo_response.json().get("email_verified"):
unique_id = userinfo_response.json()["sub"]
users_email = userinfo_response.json()["email"]
picture = userinfo_response.json()["picture"]
users_name = userinfo_response.json()["given_name"]
else:
return "User email not available or not verified by Google.", 400
user = User(
id_=unique_id, name=users_name, email=users_email, profile_pic=picture
)
if not User.get(unique_id):
User.create(unique_id, users_name, users_email, picture)
login_user(user)
return redirect(url_for("main"))
@app.route("/logout")
@login_required
def logout():
logout_user()
return redirect(url_for("welcome_page"))
if __name__ == '__main__':
app.run(debug=True) |
import subprocess
import pexpect
import re
import os
from PySide2 import QtWidgets, QtGui, QtCore
from pygears.conf import Inject, MayInject, bind, inject
native_key_map = {
0xff08: QtCore.Qt.Key_Backspace,
0xff09: QtCore.Qt.Key_Tab,
0xff0b: QtCore.Qt.Key_Clear,
0xff0d: QtCore.Qt.Key_Return,
0xff13: QtCore.Qt.Key_Pause,
0xff14: QtCore.Qt.Key_ScrollLock,
0xff15: QtCore.Qt.Key_SysReq,
0xff1b: QtCore.Qt.Key_Escape,
0xffff: QtCore.Qt.Key_Delete,
0xff50: QtCore.Qt.Key_Home,
0xff51: QtCore.Qt.Key_Left,
0xff52: QtCore.Qt.Key_Up,
0xff53: QtCore.Qt.Key_Right,
0xff54: QtCore.Qt.Key_Down,
0xff55: QtCore.Qt.Key_PageUp,
0xff56: QtCore.Qt.Key_PageDown,
0xff57: QtCore.Qt.Key_End,
}
def convert_native_key(state, key):
key = native_key_map.get(key, key)
modifiers = 0
text = ''
if key < 127:
text = chr(key)
if state & 0x4:
modifiers += QtCore.Qt.CTRL
if ((state & 0x1) and (key > 127 or chr(key).isalpha())):
modifiers += QtCore.Qt.SHIFT
if state & 0x8:
modifiers += QtCore.Qt.ALT
return key, modifiers, text
class ForeignProc(QtCore.QObject):
key_press = QtCore.Signal(int, int, str)
window_up = QtCore.Signal(int)
response = QtCore.Signal(str, int)
def __init__(self, cmd, prompt=None, parent=None):
super().__init__(parent)
self.cmd = cmd
self.prompt = prompt
self.thrd = QtCore.QThread()
self.moveToThread(self.thrd)
self.exiting = False
self.cmd_id = None
self.thrd.started.connect(self.run)
self.thrd.finished.connect(self.quit)
self.thrd.start()
def key_press_slot(self, key, modifiers, text):
self.key_press.emit(key, modifiers, text)
def run(self):
# import os
# print(os.environ['PATH'])
# import pdb
# pdb.set_trace()
if self.prompt:
self.p = pexpect.spawnu(self.cmd)
self.p.setecho(False)
self.p.expect(self.prompt)
else:
self.p = subprocess.Popen(
self.cmd, stdout=subprocess.PIPE, shell=True)
window_id_pid = None
print(f'xdotool search --maxdepth 1 --pid {self.p.pid}')
self.thrd.msleep(1000)
# while (not window_id_pid):
# try:
# window_id_pid = subprocess.check_output(
# f'xdotool search --maxdepth 1 --pid {self.p.pid}',
# shell=True)
# except subprocess.CalledProcessError as e:
# print(f'Xdotool failed: {e.output}')
# self.thrd.msleep(1000)
# # window_id_str = window_id_pid.decode().strip().split('\n')[0]
# # window_id = int(window_id_str)
# self.thrd.msleep(200)
window_act_id = subprocess.check_output(
'xdotool getactivewindow', shell=True).decode().strip()
print(
f'Active window id: {int(window_act_id)}: {hex(int(window_act_id))}'
)
self.window_up.emit(int(window_act_id))
# self.xev_proc = ForeignXevProc(int(window_act_id))
# self.xev_proc.key_press.connect(self.key_press_slot)
if self.prompt:
while (1):
self.thrd.eventDispatcher().processEvents(
QtCore.QEventLoop.AllEvents)
if self.cmd_id is not None:
continue
try:
data = ''
while True:
data += self.p.read_nonblocking(
size=4096, timeout=0.01)
except pexpect.TIMEOUT:
pass
for d in data.strip().split('\n'):
print(f'Unsollicited: {data}')
# continue
# self.thrd.eventDispatcher().processEvents(
# QtCore.QEventLoop.AllEvents)
else:
for line in self.p.stdout:
self.thrd.eventDispatcher().processEvents(
QtCore.QEventLoop.AllEvents)
line = line.decode()
print(f'Unsollicited: {line}')
def command(self, cmd, cmd_id):
self.cmd_id = cmd_id
self.p.send(cmd + '\n')
# print(f'GtkWave: {cmd}')
self.p.expect('%')
# print(f'GtkWave: {self.p.before.strip()}')
self.response.emit(self.p.before.strip(), cmd_id)
self.cmd_id = None
def quit(self):
self.p.close()
self.thrd.quit()
class ForeignXevProc(QtCore.QObject):
key_press = QtCore.Signal(int, int, str)
def __init__(self, window_id, parent=None):
super().__init__(parent)
self.window_id = window_id
self.xev_thread = QtCore.QThread()
self.moveToThread(self.xev_thread)
self.xev_thread.started.connect(self.run)
self.xev_thread.start()
def run(self):
cmd = (f'xev -id {self.window_id} -event keyboard'
' | grep -A2 --line-buffered "^KeyRelease"')
# cmd = (f'xev -id {self.window_id}')
print(f"Running xev with: {cmd}")
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
for line in proc.stdout:
line = line.decode()
print(line, end='')
if line.strip().startswith("state"):
# print(line)
res = re.search(
r"state 0x([0-9a-fA-F]+).*keysym 0x([0-9a-fA-F]+).*",
line.strip())
key, modifiers, text = convert_native_key(
int(res.group(1), 16), int(res.group(2), 16))
self.key_press.emit(key, modifiers, text)
def quit(self):
self.xev_thread.quit()
class ForeignWindow(QtCore.QObject):
initialized = QtCore.Signal()
def __init__(self, cmd, parent=None):
super().__init__(parent)
self.proc = ForeignProc(cmd)
self.proc.key_press.connect(self.key_press)
self.proc.window_up.connect(self.window_up)
QtWidgets.QApplication.instance().aboutToQuit.connect(self.proc.quit)
@inject
def key_press(self, key, modifiers, text, main=Inject('gearbox/main/inst')):
app = QtWidgets.QApplication.instance()
# app.postEvent(
# graph, QtGui.QKeyEvent(QtGui.QKeyEvent.KeyPress, key, modifiers))
# print(f'key: {(key, modifiers, text)} -> {app.focusWidget()}')
app.postEvent(
app.focusWidget(),
QtGui.QKeyEvent(QtGui.QKeyEvent.ShortcutOverride, key, modifiers,
text))
# print(f'key: {(key, modifiers, text)} -> {app.focusWidget()}')
app.processEvents(QtCore.QEventLoop.AllEvents)
app.postEvent(
app.focusWidget(),
QtGui.QKeyEvent(QtGui.QKeyEvent.KeyPress, key, modifiers, text))
app.processEvents(QtCore.QEventLoop.AllEvents)
# app.postEvent(
# graph, QtGui.QKeyEvent(QtGui.QKeyEvent.KeyRelease, key, modifiers))
app.postEvent(
# main.centralWidget(),
app.focusWidget(),
QtGui.QKeyEvent(QtGui.QKeyEvent.KeyRelease, key, modifiers, text))
app.processEvents(QtCore.QEventLoop.AllEvents)
def eventFilter(self, obj, event):
print(f'bla: {event.type()}')
if event.type() == QtCore.QEvent.KeyPress:
pass
return QtCore.QObject.eventFilter(self, obj, event)
@inject
def window_up(self, window_id, graph=Inject('gearbox/graph')):
self.window_id = window_id
print(f'Window id: {window_id}: {hex(self.window_id)}')
self.window = QtGui.QWindow.fromWinId(window_id)
self.widget = QtWidgets.QWidget.createWindowContainer(self.window)
# self.widget.setFocusPolicy(QtCore.Qt.StrongFocus)
# graph.clearFocus()
self.widget.activateWindow()
self.widget.setFocus()
# self.widget.installEventFilter(self)
self.widget.setWindowFlag(QtCore.Qt.X11BypassWindowManagerHint)
self.widget.setWindowFlag(QtCore.Qt.BypassGraphicsProxyWidget)
self.widget.setWindowFlag(QtCore.Qt.BypassWindowManagerHint)
self.initialized.emit()
|
from math import sqrt, pi
from flavio.physics.bdecays.common import meson_ff
from flavio.physics.bdecays.formfactors import hqet
from flavio.physics.bdecays.formfactors import common
from flavio.classes import AuxiliaryQuantity
from flavio.physics.running import running
process_dict = {}
process_dict['B->D*'] = {'B': 'B0', 'V': 'D*+', 'q': 'b->c'}
def h_to_A(mB, mV, h, q2):
"""Convert HQET form factors to the standard basis.
See e.g. arXiv:1309.0301, eqs. (38), (39)"""
ff = {}
pre = 1 / 2 / sqrt(mB * mV)
ff['V'] = pre * (mB + mV) * h['V']
ff['A1'] = pre * ((mB + mV)**2 - q2) / (mB + mV) * h['A1']
ff['A2'] = pre * (mB + mV) * (h['A3'] + mV / mB * h['A2'])
ff['A0'] = pre * (((mB + mV)**2 - q2) / (2 * mV) * h['A1']
- (mB**2 - mV**2 + q2) / (2 * mB) * h['A2']
- (mB**2 - mV**2 - q2) / (2 * mV) * h['A3'])
ff['T1'] = pre * ((mB + mV) * h['T1'] - (mB - mV) * h['T2'])
ff['T2'] = pre * (((mB + mV)**2 - q2) / (mB + mV) * h['T1']
- ((mB - mV)**2 - q2) / (mB - mV) * h['T2'])
ff['T3'] = pre * ((mB - mV) * h['T1'] - (mB + mV) * h['T2']
- 2 * (mB**2 - mV**2) / mB * h['T3'])
# conversion from A_1, A_2 to A_12
ff['A12'] = ((ff['A1'] * (mB + mV)**2 * (mB**2 - mV**2 - q2)
- ff['A2'] * (mB**4 + (mV**2 - q2)**2
- 2 * mB**2 * (mV**2 + q2)))
/ (16. * mB * mV**2 * (mB + mV)))
del ff['A2']
# conversion from T_2, T_3 to T_23
ff['T23'] = ((mB**2 - mV**2) * (mB**2 + 3 * mV**2 - q2) * ff['T2']
- (mB**4 + (mV**2 - q2)**2
- 2 * mB**2 * (mV**2 + q2)) * ff['T3']
) / (8 * mB * (mB - mV) * mV**2)
del ff['T3']
return ff
def ff(process, q2, par, scale):
r"""Central value of $B\to V$ form factors in the lattice convention
CLN parametrization.
See arXiv:1703.05330.
"""
pd = process_dict[process]
mB = par['m_' + pd['B']]
mV = par['m_' + pd['V']]
w = max((mB**2 + mV**2 - q2) / (2 * mB * mV), 1)
phqet = hqet.get_hqet_parameters(par, scale)
ash = phqet['ash']
epsc = phqet['epsc']
epsb = phqet['epsb']
zc = phqet['zc']
# eq. (22) of arXiv:0809.0222
CV1 = hqet.CV1(w, zc)
CV2 = hqet.CV2(w, zc)
CV3 = hqet.CV3(w, zc)
CA1 = hqet.CA1(w, zc)
CA2 = hqet.CA2(w, zc)
CA3 = hqet.CA3(w, zc)
CT1 = hqet.CT1(w, zc)
CT2 = hqet.CT2(w, zc)
CT3 = hqet.CT3(w, zc)
L = hqet.L(par, w)
# leading, universal Isgur-Wise function
rho2 = par['CLN rho2_xi']
c = par['CLN c_xi']
z = common.z(mB, mV, q2, t0='tm')
xi = 1 - 8 * rho2 * z + (64 * c - 16 * rho2) * z**2
h = {}
h['V'] = xi * (1 + ash * CV1
+ epsc * (L[2] - L[5])
+ epsb * (L[1] - L[4]))
h['A1'] = xi * (1 + ash * CA1
+ epsc * (L[2] - L[5] * (w - 1)/(w + 1))
+ epsb * (L[1] - L[4] * (w - 1)/(w + 1))
+ epsc**2 * par['B->D* CLN deltac_hA1'])
h['A2'] = xi * (ash * CA2 + epsc * (L[3] + L[6]))
h['A3'] = xi * (1 + ash * (CA1 + CA3)
+ epsc * (L[2] - L[3] + L[6] - L[5])
+ epsb * (L[1] - L[4]))
h['T1'] = xi * (1 + ash * (CT1 + (w - 1)/2 * (CT2 - CT3))
+ epsc * L[2]
+ epsb * L[1]
+ epsc**2 * par['B->D* CLN deltac_hT1'])
h['T2'] = xi * (ash * (w + 1)/2 * (CT2 + CT3)
+ epsc * L[5]
- epsb * L[4])
h['T3'] = xi * (ash * CT2
+ epsc * (L[6] - L[3]))
return h_to_A(mB, mV, h, q2)
|
"""
List generation unit tests.
Copyright (c) 2018 Qualcomm Technologies, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the
limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Qualcomm Technologies, Inc. nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY
THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os
import csv
import datetime
import fnmatch
import glob
import zipfile
import re
import pytest
from click.testing import CliRunner
import luhn
from dirbs.cli.listgen import cli as dirbs_listgen_cli
from dirbs.cli.classify import cli as dirbs_classify_cli
from dirbs.importer.operator_data_importer import OperatorDataImporter
from dirbs.config import ConditionConfig
from _helpers import job_metadata_importer, expect_success
from _importer_params import OperatorDataParams, PairListParams, GoldenListParams,\
StolenListParams, RegistrationListParams
from _helpers import get_importer, from_cond_dict_list_to_cond_list, find_file_in_dir, find_subdirectory_in_dir, \
import_data, invoke_cli_classify_with_conditions_helper
from _fixtures import * # noqa: F403, F401
from dirbs.metadata import query_for_command_runs
def _verify_per_operator_lists_generated(dir_path, type_list):
"""Helper function to check that notification or exception lists are generated for operator_ids 1 to 4."""
for operator_id in range(1, 5):
pattern = '*_{0}_operator{1}.csv'.format(type_list, operator_id)
assert find_file_in_dir(pattern, dir_path)
def _cli_listgen_helper(db_conn, tmpdir, sub_temp_dir, mocked_config, date=None, base_run_id=None, no_full_list=None,
no_clean_up=None, unzip_files=True, combine_deltas=True):
"""Helper function for CLI list-gen."""
options_list = []
if date:
options_list.extend(['--curr-date', date])
if base_run_id:
options_list.extend(['--base', base_run_id])
if no_full_list:
options_list.extend(['--no-full-lists'])
if no_clean_up:
options_list.extend(['--no-cleanup'])
output_dir = str(tmpdir.mkdir(sub_temp_dir))
options_list.append(output_dir)
runner = CliRunner()
result = runner.invoke(dirbs_listgen_cli, options_list, obj={'APP_CONFIG': mocked_config},
catch_exceptions=False)
assert result.exit_code == 0
job_record_list = query_for_command_runs(db_conn, 'dirbs-listgen')
db_conn.commit()
assert job_record_list
run_id = [x.run_id for x in job_record_list][0]
# If requested, auto unzip all the files as well as many test rely on the .csv files being there
if unzip_files:
listgen_path = find_subdirectory_in_dir('listgen*', output_dir)
for zip_path in glob.glob(os.path.join(listgen_path, '*.zip')):
with zipfile.ZipFile(zip_path, 'r') as zf:
zf.extractall(path=listgen_path)
# If requested, combine all delta files into a single file containing an extra change_type column to match
# the previous spec that the tests were based off
if combine_deltas:
for delta_csv_path in glob.glob(os.path.join(listgen_path, '*delta*.csv')):
fn = os.path.basename(delta_csv_path)
file_type = re.sub(r'^\d+_\d+_([a-z]+)_.*$', r'\1', fn)
date_str = re.sub(r'^(.*)_{0}.*$'.format(file_type), r'\1', fn)
run_id_range = re.sub(r'^.*_delta_([-0-9]+_[0-9]+)_.*$', r'\1', fn)
change_type = re.sub(r'^.*{0}_(.*)\.csv$'.format(run_id_range), r'\1', fn)
if file_type == 'blacklist':
combined_fn = os.path.join(listgen_path,
'{0}_blacklist_delta_{1}.csv'.format(date_str, run_id_range))
else:
operator_id = re.sub(r'^.*_{0}_(.*)_delta.*csv$'.format(file_type), r'\1', fn)
combined_fn = os.path.join(listgen_path,
'{0}_{1}_{2}_delta_{3}.csv'.format(date_str,
file_type,
operator_id,
run_id_range))
write_header = True if not os.path.exists(combined_fn) else False
with open(delta_csv_path, 'r') as input_file, open(combined_fn, 'a') as output_file:
input_lines = input_file.read().splitlines()
if write_header:
output_file.write(input_lines[0] + ',change_type\n')
for input_line in input_lines[1:]:
output_file.write(input_line + ',{0}\n'.format(change_type))
os.remove(delta_csv_path)
return run_id, output_dir
def _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, sub_temp_dir, delta_fn='exceptions_operator1_delta',
date=None, base_run_id=None, no_full_list=None, no_clean_up=None):
"""Helper function to run list-gen specifying a base."""
# call CLI list-gen
run_id, output_dir_gen = _cli_listgen_helper(db_conn, tmpdir, sub_temp_dir, mocked_config, date=date,
base_run_id=base_run_id, no_full_list=no_full_list,
no_clean_up=no_clean_up)
# read csv
rows = _read_rows_from_file(delta_fn, tmpdir, output_dir=output_dir_gen)
return rows, run_id
def _read_rows_from_file(listgen_file_name, tmpdir, output_dir=None, dir_name=None):
"""Helper function to get rows in a file.
Given part of filename, get the rows from file. This code is usefull for those test who run list-gen once and
need to check multiple files.
"""
if not output_dir:
output_dir = os.path.join(str(tmpdir), dir_name)
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# we use assert len(matching_file_names) == 1 to assert that we always have one matching file
# in this function if there is no matching file we return None
file_names = [x for x in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, x))]
matching_file_names = fnmatch.filter(file_names, '*{0}*'.format(listgen_file_name))
if matching_file_names:
assert len(matching_file_names) == 1
with open(os.path.join(dir_path, matching_file_names[0]), 'r') as file:
rows = file.readlines()
return rows
return None
def import_operator_data(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd, imported_rows, content, operator_id):
"""Helper function to populate operator data tables."""
with get_importer(OperatorDataImporter,
db_conn,
metadata_db_conn,
mocked_config.db_config,
tmpdir,
logger,
mocked_statsd,
OperatorDataParams(
content=content,
extract=False,
perform_unclean_checks=False,
perform_leading_zero_check=False,
perform_region_checks=False,
perform_home_network_check=False,
perform_historic_checks=False,
operator=operator_id
)) as new_imp:
expect_success(new_imp, imported_rows, db_conn, logger)
def _notification_list_classification_state_common_code(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd):
"""Helper function to remove boilerplate in notification delta tests.
i.e. notifications_list_delta_changed, notifications_list_delta_blacklisted, notifications_list_delta_resolved.
"""
# Populate monthly_network_triplets tables including some triplets with the IMEI in classification_state table
# (but not all IMEIs)
# Do it for multiple operators (can be same triplets)
# Some of the triplets associated with the IMEI should have an IMSI starting with the MCC-MNC of
# one of the operators (12345678901230, 12345678901231)
# Other triplets associated with the IMEI should have an IMSI starting with none of the configured
# networks (12345678901228, 12345678901229)
# Have some other triplets with some other IMEI not meeting any condition(12345678901227)
# classification data:
# imei_norm,cond_name,start_date,end_date,block_date
# 12345678901227,duplicate_mk1,'2016-01-01',,'2016-04-01'
# 12345678901230,duplicate_mk1,'2016-01-01',,'2016-04-01'
# 12345678901233,duplicate_mk1,'2016-01-01',,'2016-04-01'
for i in range(1, 3):
import_operator_data(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd, 5 * i, content='date,imei,imsi,msisdn\n'
'20160222,12345678901227,11106678901234,1\n'
'20160222,12345678901228,11106678901234,1\n'
'20160222,12345678901229,11106678901234,1\n'
'20160222,12345678901230,11101678901234,1\n'
'20160221,12345678901231,11101678901234,1',
operator_id='operator{0}'.format(i))
# add MCC-MNC from operator 4 '20161122,12345678901233,11104678901234,1\n'
# and check it is on the list even if we don't import it.
import_operator_data(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd, 16, content='date,imei,imsi,msisdn\n'
'20160222,12345678901227,11106678901234,1\n'
'20160222,12345678901228,11106678901234,1\n'
'20160222,12345678901229,11106678901234,1\n'
'20160222,12345678901230,11101678901234,1\n'
'20160222,12345678901233,11104678901234,1\n'
'20160221,12345678901231,11101678901234,1',
operator_id='operator3')
# Run dirbs-listgen once, with curr_date set to ensure that at the one IMEI meeting a blocking condition is still
# in the grace period.
# Triplets whose IMSI start with the MCC-MNC of the operator should be in that operator's delta notifications
# with change_type new. They should not be in the other operator's delta list.
# Triplets whose IMSI does not start with the MCC-MNC of any operator should be in
# both operators' delta list with change_type new.
# Triplets with a different IMEI complete should not be on any list
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0',
delta_fn='notifications_operator1_delta', date='20160301')
assert len(rows_op_one) == 3
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons,change_type\n'
set(rows_op_one[1:]) == {'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected,new\n',
'12345678901230,11101678901234,1,20160401,Duplicate IMEI detected,new\n'}
rows_op_two = _read_rows_from_file('notifications_operator2.csv', tmpdir, dir_name='run0')
rows_op_three = _read_rows_from_file('notifications_operator3.csv', tmpdir, dir_name='run0')
assert len(rows_op_two) == 2
assert len(rows_op_three) == 2
assert rows_op_two[0] == rows_op_three[0] == 'imei,imsi,msisdn,block_date,reasons\n'
expected_rows = {'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected\n'}
assert set(rows_op_two[1:]) == set(rows_op_three[1:]) == expected_rows
# should have also this entry 20161122,12345678901233,11104678901234
rows_op_four = _read_rows_from_file('notifications_operator4.csv', tmpdir, dir_name='run0')
assert len(rows_op_four) == 2
assert rows_op_four[0] == 'imei,imsi,msisdn,block_date,reasons\n'
assert set(rows_op_four[1:]) == {'12345678901233,11104678901234,1,20160401,Duplicate IMEI detected\n'}
def _sql_func_gen_delta_list_common_code(db_conn, name_proc):
"""Helper function to remove boilerplate in sql function delta list tests."""
with db_conn, db_conn.cursor() as cursor:
cursor.callproc(name_proc, ['operator_1', 1])
cursor.callproc(name_proc, ['operator_1', -1, 2])
with pytest.raises(Exception) as ex:
cursor.callproc(name_proc, ['operator_1', -1, 'a'])
assert 'invalid input syntax for integer: "a"' in str(ex)
with db_conn, db_conn.cursor() as cursor:
with pytest.raises(Exception) as ex:
cursor.callproc(name_proc, ['operator_1', 'a'])
assert 'invalid input syntax for integer: "a"' in str(ex)
with db_conn, db_conn.cursor() as cursor:
with pytest.raises(Exception) as ex:
cursor.callproc(name_proc, ['operator_1', 8, 2])
assert 'Parameter base_run_id 8 greater than run_id 2' in str(ex)
def _sql_func_gen_list_common_code(db_conn, name_proc):
"""Helper function to remove boilerplate in sql function gen list tests."""
with db_conn, db_conn.cursor() as cursor:
cursor.callproc(name_proc, ['operator_1'])
cursor.callproc(name_proc, ['operator_1', -1])
with pytest.raises(Exception) as ex:
cursor.callproc(name_proc, ['operator_1', 'a'])
assert 'invalid input syntax for integer: "a"' in str(ex)
def test_cli_arg_no_full_lists(tmpdir, db_conn, mocked_config):
"""Test that the --no-full-lists CLI option works (doesn't produce CSV full lists)."""
row_bl_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0', date=None, base_run_id=None,
no_full_list=True, delta_fn='blacklist.csv')
rows_exc_op_two = _read_rows_from_file('exceptions_operator2.csv', tmpdir, dir_name='run0')
rows_not_op_two = _read_rows_from_file('notifications_operator2.csv', tmpdir, dir_name='run0')
rows_not_op_three = _read_rows_from_file('notifications_operator3.csv', tmpdir, dir_name='run0')
assert all([x is None for x in [row_bl_op_one, rows_exc_op_two, rows_not_op_two, rows_not_op_three]])
assert _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run0')
row_bl_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date=None, base_run_id=None,
delta_fn='blacklist.csv')
rows_exc_op_two = _read_rows_from_file('exceptions_operator2.csv', tmpdir, dir_name='run1')
rows_exc_op_two_delta = _read_rows_from_file('exceptions_operator2_delta', tmpdir, dir_name='run1')
rows_not_op_two = _read_rows_from_file('notifications_operator2.csv', tmpdir, dir_name='run1')
assert row_bl_op_one == ['imei,block_date,reasons\n']
assert rows_exc_op_two == ['imei,imsi\n']
assert rows_not_op_two == ['imei,imsi,msisdn,block_date,reasons\n']
assert rows_exc_op_two_delta == ['imei,imsi,change_type\n']
def test_cli_invalid_arg_base_test(tmpdir, db_conn, mocked_config):
"""Test invalid input handling for base CLI arg."""
with pytest.raises(Exception) as ex:
_cli_listgen_helper(db_conn, tmpdir, 'run_4', mocked_config, base_run_id=1)
assert 'Specified base run id 1 not found in list of successful dirbs-listgen runs' in str(ex)
run_id, _ = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config)
_cli_listgen_helper(db_conn, tmpdir, 'run_2', mocked_config, base_run_id=run_id)
with pytest.raises(Exception) as ex:
_cli_listgen_helper(db_conn, tmpdir, 'run_3', mocked_config, base_run_id='a')
def test_sql_func_overall_delta_reason(db_conn):
"""Test the SQL function, both with the run_id == -1 and run_id set to a real run_id.
This is already tested in aggregate_notifications_list_changes test, so just need to check that the function
signature hasn't changed.
"""
def overall_delta_reason_helper(reasons_list_input):
"""Helper function."""
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""SELECT overall_delta_reason(reason)
FROM (SELECT UNNEST(%s) AS reason) foo;""", [reasons_list_input])
res = cursor.fetchone().overall_delta_reason
return res
# If net_adds is non_zero, return the most recent add or remove reason
reasons_list = ['changed', 'new', 'removed', 'new']
assert overall_delta_reason_helper(reasons_list) == 'new'
reasons_list = ['changed', 'new']
assert overall_delta_reason_helper(reasons_list) == 'new'
reasons_list = ['removed', 'new', 'removed', 'changed']
assert overall_delta_reason_helper(reasons_list) == 'removed'
# Else if there was no change reason seen, return NULL
reasons_list = ['new', 'removed']
assert overall_delta_reason_helper(reasons_list) is None
reasons_list = ['removed', 'new', 'removed', 'new']
assert overall_delta_reason_helper(reasons_list) is None
# Else if there was a change, and the last add or remove reason was a add, return 'changed'
reasons_list = ['changed', 'new', 'removed']
assert overall_delta_reason_helper(reasons_list) == 'changed'
reasons_list = ['new', 'removed', 'changed']
assert overall_delta_reason_helper(reasons_list) == 'changed'
# Test some invalid combinations
reasons_list = ['new', 'new']
with pytest.raises(Exception) as ex:
overall_delta_reason_helper(reasons_list)
assert 'Multiple add reasons in a row - should not happen!' in str(ex)
# Test some invalid combinations
reasons_list = ['unblocked', 'unblocked']
with pytest.raises(Exception) as ex:
overall_delta_reason_helper(reasons_list)
assert 'Multiple remove reasons in a row - should not happen!' in str(ex)
reasons_list = ['foo', 'bar']
with pytest.raises(Exception) as ex:
overall_delta_reason_helper(reasons_list)
assert 'Unknown reason "foo" - not add, remove or change type!' in str(ex)
def test_sql_func_gen_blacklist(db_conn):
"""Test the SQL function, both with the run_id == -1 and run_id set to a real run_id.
This is already tested in aggregate_notifications_list_changes test, so just need to check that the function
signature hasn't changed.
"""
with db_conn, db_conn.cursor() as cursor:
cursor.callproc('gen_blacklist', [1])
cursor.callproc('gen_blacklist', [])
with pytest.raises(Exception) as ex:
cursor.callproc('gen_blacklist', ['a'])
assert 'invalid input syntax for integer: "a"' in str(ex)
def test_sql_func_gen_notifications_list(db_conn):
"""Test the SQL function, both with the run_id == -1 and run_id set to a real run_id.
This is already tested in aggregate_notifications_list_changes test, so just need to check that the function
signature hasn't changed.
"""
_sql_func_gen_list_common_code(db_conn, 'gen_notifications_list')
def test_sql_func_gen_exceptions_list(db_conn):
"""Test the SQL function, both with the run_id == -1 and run_id set to a real run_id.
This is already tested in aggregate_notifications_list_changes test, so just need to check that the function
signature hasn't changed.
"""
_sql_func_gen_list_common_code(db_conn, 'gen_exceptions_list')
def test_sql_func_gen_delta_blacklist(db_conn):
"""Test the SQL function, both with the run_id == -1 and run_id set to a real run_id.
This is already tested in aggregate_notifications_list_changes test, so just need to check that the function
signature hasn't changed.
"""
with db_conn, db_conn.cursor() as cursor:
cursor.callproc('gen_delta_blacklist', [1])
cursor.callproc('gen_delta_blacklist', [-1, 2])
with pytest.raises(Exception) as ex:
cursor.callproc('gen_delta_blacklist', [-1, 'a'])
assert 'invalid input syntax for integer: "a"' in str(ex)
with db_conn, db_conn.cursor() as cursor:
with pytest.raises(Exception) as ex:
cursor.callproc('gen_delta_blacklist', ['a'])
assert 'invalid input syntax for integer: "a"' in str(ex)
def test_sql_func_gen_delta_notifications_list(db_conn):
"""Test the SQL function, both with the run_id == -1 and run_id set to a real run_id.
This is already tested in aggregate_notifications_list_changes test, so just need to check that the function
signature hasn't changed.
"""
_sql_func_gen_delta_list_common_code(db_conn, 'gen_delta_notifications_list')
def test_sql_func_gen_delta_exceptions_list(db_conn):
"""Test the SQL function, both with the run_id == -1 and run_id set to a real run_id.
This is already tested in aggregate_notifications_list_changes test, so just need to check that the function
signature hasn't changed.
"""
_sql_func_gen_delta_list_common_code(db_conn, 'gen_delta_exceptions_list')
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v7.csv'],
indirect=True)
def test_blacklist_delta_blocked(postgres, db_conn, tmpdir, mocked_config, logger, classification_data):
"""Test blacklist_delta_blocked.
Test that the delta between two listgen rows is OK when a new IMEI is added into the
classification_state table for a blocking condition
"""
# Populate the classification_state table with some initial data
# imei_norm, cond_name, start_date, end_date, block_date
# 12345678901227, duplicate_mk1, '2016-01-01',, '2016-06-01'
# 12345678901230, duplicate_mk1, '2016-01-01',, '2016-04-01'
# 12345678901233, duplicate_mk1, '2016-01-01',, '2016-12-01'
# Run dirbs-listgen once, with curr_date set to ensure that at least one IMEI is blacklisted
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0', date='20160501',
delta_fn='blacklist_delta')
assert rows_op_one == ['imei,block_date,reasons,change_type\n',
'12345678901230,20160401,Duplicate IMEI detected,blocked\n']
# Add a different IMEI into classification_state
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""INSERT INTO classification_state (run_id, imei_norm, cond_name, start_date, end_date,
block_date, virt_imei_shard)
VALUES('1','12345678901231','gsma_not_found','2016-01-01',NULL,'2016-02-01',
calc_virt_imei_shard('12345678901231'))""")
# Run dirbs-listgen again, with curr_date set to ensure that the new IMEI is blacklisted
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160501',
delta_fn='blacklist_delta')
# Assert that delta blacklist contains one row with the IMEI and change_type == 'blocked'
assert rows_op_one == ['imei,block_date,reasons,change_type\n',
'12345678901231,20160201,TAC not found in GSMA TAC database,blocked\n']
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v7.csv'],
indirect=True)
def test_blacklist_delta_unblocked(postgres, db_conn, tmpdir, mocked_config, logger, classification_data):
"""Test blacklist_delta_unblocked.
Test that the delta between two listgen rows is OK when a new IMEI is added into the
classification_state table for a blocking condition
"""
# Populate the classification_state table with some initial data
# imei_norm, cond_name, start_date, end_date, block_date
# 12345678901227, duplicate_mk1, '2016-01-01',, '2016-06-01'
# 12345678901230, duplicate_mk1, '2016-01-01',, '2016-04-01'
# 12345678901233, duplicate_mk1, '2016-01-01',, '2016-12-01'
# Run dirbs-listgen once, with curr_date set to ensure that at least two IMEIs blacklisted
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0', date='20160701',
delta_fn='blacklist_delta')
assert rows_op_one == ['imei,block_date,reasons,change_type\n',
'12345678901227,20160601,Duplicate IMEI detected,blocked\n',
'12345678901230,20160401,Duplicate IMEI detected,blocked\n']
# Delete one IMEI from classification_state
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""DELETE FROM classification_state
WHERE imei_norm = '12345678901227'""")
# Run dirbs-listgen again, with curr_date set to ensure that the removed IMEI is unblocked
# Assert that delta blacklist contains one row with the IMEI and change_type == 'unblocked'
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160501',
delta_fn='blacklist_delta')
assert rows_op_one == ['imei,block_date,reasons,change_type\n',
'12345678901227,20160601,Duplicate IMEI detected,unblocked\n']
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v6.csv'],
indirect=True)
def test_blacklist_delta_changed(postgres, db_conn, tmpdir, mocked_config, logger, classification_data):
"""Test blacklist_delta_changed.
Test that the delta between two listgen rows is OK when a new IMEI is added into
the classification_state table for a blocking condition
"""
# Populate the classification_state table with some initial data
# imei_norm,cond_name,start_date,end_date,block_date
# 12345678901227,duplicate_mk1,'2016-01-01',,'2016-04-01'
# 12345678901230,duplicate_mk1,'2016-01-01',,'2016-02-01'
# 12345678901233,duplicate_mk1,'2016-01-01',,'2016-04-01'
# Run dirbs-listgen once, with curr_date set to ensure that at least one IMEI is blacklisted
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0', date='20160201',
delta_fn='blacklist_delta')
assert rows_op_one == ['imei,block_date,reasons,change_type\n',
'12345678901230,20160201,Duplicate IMEI detected,blocked\n']
# Add the same IMEI into classification_state with a different cond_name (blocking)
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""INSERT INTO classification_state (run_id, imei_norm, cond_name, start_date, end_date,
block_date, virt_imei_shard)
VALUES('1','12345678901230','gsma_not_found','2016-01-01',NULL,'2016-02-01',
calc_virt_imei_shard('12345678901230'))""")
# Run dirbs-listgen again, with curr_date set to ensure that the new condition is blacklisted
# Assert that delta blacklist contains one row with the IMEI and change_type == 'changed'
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160301',
delta_fn='blacklist_delta')
assert rows_op_one == ['imei,block_date,reasons,change_type\n',
'12345678901230,20160201,Duplicate IMEI detected|'
'TAC not found in GSMA TAC database,changed\n']
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v5.csv'],
indirect=True)
def test_notifications_list_delta_new(postgres, db_conn, tmpdir, mocked_config, logger,
metadata_db_conn, mocked_statsd, classification_data):
"""Test notifications_list_delta_new."""
# run common code for notification_list_delta tests to test classification_state and notification tables.
_notification_list_classification_state_common_code(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd)
# Add a new triplet with the IMEI meeting a blocking condition
import_operator_data(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd, 17, content='date,imei,imsi,msisdn\n'
'20160222,12345678901233,11102678901234,1',
operator_id='operator2')
rows_op_two, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160301',
delta_fn='notifications_operator2_delta')
assert rows_op_two == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901233,11102678901234,1,20160401,Duplicate IMEI detected,new\n']
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v5.csv'],
indirect=True)
@pytest.mark.parametrize('pairing_list_importer',
[PairListParams(content='imei,imsi\n'
'12345678901230,11101678901234')],
indirect=True)
@pytest.mark.parametrize('golden_list_importer',
[GoldenListParams(content='GOLDEN_IMEI\n'
'12345678901227')],
indirect=True)
def test_notifications_list_delta_resolved(postgres, db_conn, tmpdir, mocked_config, logger, pairing_list_importer,
metadata_db_conn, mocked_statsd, classification_data, golden_list_importer):
"""Test notifications_list_delta_resolved."""
# run common code for notification_list_delta tests to test classification_state and notification tables.
_notification_list_classification_state_common_code(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd)
# Add a pairing to the pairing list for one IMEI-IMSI and re-run dirbs-listgen ('12345678901230,11101678901234')
# IMEIs in notification_list before pairing (12345678901227, 12345678901230). Expect to remove 12345678901230.
import_data(pairing_list_importer, 'pairing_list', 1, db_conn, logger)
# Run dirbs-listgen, triplets with that IMEI-IMSI should now have a 'resolved' change_type
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160301',
delta_fn='notifications_operator1.csv')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons\n',
'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected\n']
rows_op_one = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run1')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901230,11101678901234,1,20160401,Duplicate IMEI detected,resolved\n']
# Add the IMEI to the golden list(12345678901227)
import_data(golden_list_importer, 'golden_list', 1, db_conn, logger)
# Run dirbs-listgen, remaining triplets with that IMEI should now have a 'resolved' change_type
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run2', date='20160301',
delta_fn='notifications_operator1.csv')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons\n']
rows_op_one = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run2')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected,resolved\n']
# Remove IMEI from the golden list
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""DELETE FROM golden_list""")
# Run dirbs-listgen, remaining triplets should be added back with 'new' change_type
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run3', date='20160301',
delta_fn='notifications_operator1.csv')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons\n',
'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected\n']
rows_op_one = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run3')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected,new\n']
# Remove IMEI from the classification_state table by setting end_date to non-NULL
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""UPDATE classification_state
SET end_date = '20160201'
WHERE imei_norm = '12345678901227'""")
# Run dirbs-listgen, remaining triplets with that IMEI should now have a 'resolved' change_type
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run4', date='20160301',
delta_fn='notifications_operator1.csv')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons\n']
rows_op_one_delta = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run4')
assert rows_op_one_delta == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected,resolved\n']
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v5.csv'],
indirect=True)
@pytest.mark.parametrize('pairing_list_importer',
[PairListParams(content='imei,imsi\n'
'12345678901230,11101678901234')],
indirect=True)
@pytest.mark.parametrize('golden_list_importer',
[GoldenListParams(content='GOLDEN_IMEI\n'
'12345678901233')],
indirect=True)
def test_notifications_list_delta_no_longer_seen(postgres, db_conn, tmpdir, mocked_config, logger,
pairing_list_importer, golden_list_importer,
metadata_db_conn, mocked_statsd, classification_data, monkeypatch):
"""Test notifications_list_delta_no_longer_seen."""
# run common code for notification_list_delta tests to test classification_state and notification tables.
_notification_list_classification_state_common_code(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd)
# If we reduce the lookback days to 0, we expect to see a whole bunch of no_longer_seen
monkeypatch.setattr(mocked_config.listgen_config, 'lookback_days', 0)
# Add a pairing to the pairing list for one IMEI-IMSI and re-run dirbs-listgen ('12345678901230,11101678901234')
# IMEIs in notification_list before pairing (12345678901227, 12345678901230). Expect to remove 12345678901230.
import_data(pairing_list_importer, 'pairing_list', 1, db_conn, logger)
# Add the IMEI to the golden list(12345678901233)
import_data(golden_list_importer, 'golden_list', 1, db_conn, logger)
# Run dirbs-listgen, all triplets should now have a 'no_longer_seen' change_type
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160301',
delta_fn='notifications_operator1.csv')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons\n']
rows_op_one = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run1')
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons,change_type\n'
# 12345678901227 should be no_longer_seen since it is not paired. 12345678901230 should be resolved since it
# was paired in the pairing list import
assert set(rows_op_one[1:]) == \
{'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected,no_longer_seen\n',
'12345678901230,11101678901234,1,20160401,Duplicate IMEI detected,resolved\n'}
rows_op_one = _read_rows_from_file('notifications_operator4.csv', tmpdir, dir_name='run1')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons\n']
rows_op_one = _read_rows_from_file('notifications_operator4_delta', tmpdir, dir_name='run1')
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons,change_type\n'
# 12345678901233 should be resolved since is it is on the golden_list -- should not be no_longer_seen
assert set(rows_op_one[1:]) == {'12345678901233,11104678901234,1,20160401,Duplicate IMEI detected,resolved\n'}
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v5.csv'],
indirect=True)
def test_notifications_list_delta_blacklisted(postgres, db_conn, tmpdir, mocked_config, logger,
metadata_db_conn, mocked_statsd, classification_data):
"""Test notifications_list_delta_changed."""
# run common code for notification_list_delta tests to test classification_state and notification tables.
_notification_list_classification_state_common_code(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd)
# Run dirbs-listgen again with a different curr_date after the block_date of that IMEI so that it on
# the blacklist.
# block date 20160401 - curr-date for list-gen 20160601
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160601',
delta_fn='notifications_operator1.csv')
# All triplets should be on the delta notifications list with a change_type of 'blacklisted'
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons\n']
rows_op_one = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run1')
assert len(rows_op_one) == 3
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons,change_type\n'
assert set(rows_op_one[1:]) == {'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected,blacklisted\n',
'12345678901230,11101678901234,1,20160401,Duplicate IMEI detected,blacklisted\n'}
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v5.csv'],
indirect=True)
def test_notifications_list_delta_changed(postgres, db_conn, tmpdir, mocked_config, logger,
metadata_db_conn, mocked_statsd, classification_data):
"""Test notifications_list_delta_changed."""
# run common code for notification_list_delta tests to test classification_state and notification tables.
_notification_list_classification_state_common_code(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd)
# Insert new row in classification_state with another blocking condition's cond_name for the same
# IMEI(12345678901230)
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""INSERT INTO classification_state (run_id, imei_norm, cond_name, start_date, end_date,
block_date, virt_imei_shard)
VALUES('1','12345678901230','gsma_not_found','2016-01-01',NULL,'2016-04-01',
calc_virt_imei_shard('12345678901230'))""")
# Run dirbs-listgen again, should see change_type of 'changed' for this IMEI and the delta list should
# contain the new reasons (2) (pipe-delimited on one row)
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', date='20160301',
delta_fn='notifications_operator1.csv')
assert len(rows_op_one) == 3
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons\n'
assert set(rows_op_one[1:]) == {'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected\n',
'12345678901230,11101678901234,1,20160401,Duplicate IMEI detected|TAC '
'not found in GSMA TAC database\n'}
rows_op_one = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run1')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901230,11101678901234,1,20160401,'
'Duplicate IMEI detected|TAC not found in GSMA TAC database,changed\n']
with db_conn, db_conn.cursor() as cursor:
# Remove first row from classification_state table for that IMEI
cursor.execute("""DELETE FROM classification_state
WHERE imei_norm = '12345678901230'
AND cond_name = 'duplicate_mk1'""")
# Run dirbs-listgen again, should see change_type of 'changed' for this IMEI and the delta
# list should contain the new reasons (the new one only)
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run2', date='20160301',
delta_fn='notifications_operator1.csv')
assert len(rows_op_one) == 3
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons\n'
assert set(rows_op_one[1:]) == {'12345678901227,11106678901234,1,20160401,Duplicate IMEI detected\n',
'12345678901230,11101678901234,1,20160401,TAC not found in GSMA TAC database\n'}
rows_op_one = _read_rows_from_file('notifications_operator1_delta', tmpdir, dir_name='run2')
assert rows_op_one == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901230,11101678901234,1,20160401,TAC not found in GSMA TAC database,changed\n']
with db_conn, db_conn.cursor() as cursor:
# Change the block_date of the row in the classification_state table
cursor.execute("""UPDATE classification_state
SET block_date = '20160403'
WHERE imei_norm IN ('12345678901230', '12345678901227')""")
# Run dirbs-listgen again, should see change_type of 'changed' for this IMEI
# and the delta list should contain the new reasons (the new one only) and the new block date
rows_op_one, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run3', date='20160301',
delta_fn='notifications_operator1.csv')
rows_op_one = _read_rows_from_file('notifications_operator1.csv', tmpdir, dir_name='run3')
assert len(rows_op_one) == 3
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons\n'
assert set(rows_op_one[1:]) == {'12345678901227,11106678901234,1,20160403,Duplicate IMEI detected\n',
'12345678901230,11101678901234,1,20160403,TAC not found in GSMA TAC database\n'}
delta_fn = 'notifications_operator1_delta'
rows_op_one = _read_rows_from_file(delta_fn, tmpdir, dir_name='run3')
assert len(rows_op_one) == 3
assert rows_op_one[0] == 'imei,imsi,msisdn,block_date,reasons,change_type\n'
assert set(rows_op_one[1:]) == {'12345678901227,11106678901234,1,20160403,Duplicate IMEI detected,changed\n',
'12345678901230,11101678901234,1,20160403,TAC not found in '
'GSMA TAC database,changed\n'}
@pytest.mark.parametrize('pairing_list_importer',
[PairListParams(content='imei,imsi\n'
'12345678901228,11105678901234\n'
'12345678901229,11106678901234\n'
'12345678901230,11107678901234\n'
'12345678901231,11108678901234\n'
'12345678901232,11109678901234\n'
'12345678901227,11110678901234\n'
'12345678901233,11101678901234\n'
'12345678901234,11101678901234\n'
'12345678901235,11102678901234\n'
'12345678901236,11102678901234')],
indirect=True)
@pytest.mark.parametrize('operator_data_importer',
[OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20161122,12345678901228,11105678901234,1\n'
'20161122,12345678901229,11106678901234,1\n'
'20161122,12345678901230,11107678901234,1',
extract=False,
perform_unclean_checks=False,
perform_leading_zero_check=False,
perform_region_checks=False,
perform_home_network_check=False,
operator='operator1'
)],
indirect=True)
def test_exceptions_list_delta_added(postgres, db_conn, tmpdir, mocked_config, pairing_list_importer, logger,
operator_data_importer, metadata_db_conn, mocked_statsd):
"""Test exceptions_list_delta_added."""
# Populate pairing list with some IMEI-IMSI pairs
# Have at least some IMSIs (at least 3) where the MCC-MNC does not start with the prefix for any operator
# '12345678901228,11105678901234\n'
# '12345678901229,11106678901234\n'
# '12345678901230,11107678901234\n'
# '12345678901231,11108678901234\n'
# '12345678901232,11109678901234\n'
# '12345678901233,11110678901234\n'
# Have some IMSIs that start with operator 1's prefix
# '12345678901233,11101678901234\n'
# '12345678901234,11101678901234\n'
# Have some IMSIs that start with operator 2's prefix
# '12345678901235,11102678901234\n'
# '12345678901236,11102678901234\n'
with db_conn:
expect_success(pairing_list_importer, 10, db_conn, logger)
# Populate monthly_network_triplets table
# Put some of the "unknown" IMSIs into operator 1's operator data
# '12345678901228,11105678901234\n'
# '20161122,12345678901230,11107678901234,1\n',
# Put some of the "unknown" IMSIs into operator 2's operator data
# '20161121,12345678901231,11108678901234,1\n',
# Put some of the "unknown" IMSIs into both operator's data
# '20161122,12345678901229,11106678901234,1\n'
# Some of the "unknown" IMSIs should not be in any operator's data
# '12345678901232,11109678901234\n'
# '12345678901227,11110678901234\n'
expect_success(operator_data_importer, 3, db_conn, logger)
import_operator_data(db_conn, metadata_db_conn, mocked_config, tmpdir, logger,
mocked_statsd, 5, content='date,imei,imsi,msisdn\n'
'20161122,12345678901229,11106678901234,1\n'
'20161121,12345678901231,11108678901234,1', operator_id='operator2')
# Run dirbs-listgen
rows_op_one_list, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0', date='20160301',
delta_fn='exceptions_operator1_delta')
# verify that:
# - Delta exceptions list for operator 1 contains all the IMEI-IMSIs
# where the prefix started with Operator 1's MCC-MNC (12345678901233, 12345678901234)
# or where the IMSI did not match any operator
# The change_type should be 'added'.
assert len(rows_op_one_list) == 9
assert rows_op_one_list[0] == 'imei,imsi,change_type\n'
assert set(rows_op_one_list[1:]) == {'12345678901228,11105678901234,added\n',
'12345678901229,11106678901234,added\n',
'12345678901230,11107678901234,added\n',
'12345678901233,11101678901234,added\n',
'12345678901234,11101678901234,added\n',
'12345678901231,11108678901234,added\n',
'12345678901227,11110678901234,added\n',
'12345678901232,11109678901234,added\n'}
# - Delta exceptions list for operator 2 contains all the IMEI-IMSIs where the prefix started with
# Operator 2's MCC-MNC or where the IMSI did not match any operator. The change_type should be 'added'.
rows_op_two_list = _read_rows_from_file('exceptions_operator2_delta', tmpdir, dir_name='run0')
assert len(rows_op_two_list) == 9
assert rows_op_two_list[0] == 'imei,imsi,change_type\n'
assert set(rows_op_two_list[1:]) == {'12345678901229,11106678901234,added\n',
'12345678901231,11108678901234,added\n',
'12345678901235,11102678901234,added\n',
'12345678901236,11102678901234,added\n',
'12345678901227,11110678901234,added\n',
'12345678901232,11109678901234,added\n',
'12345678901230,11107678901234,added\n',
'12345678901228,11105678901234,added\n'}
# - Delta exceptions list for all operators contain any "unknown" IMEIs that were not seen with any operator
# rows_op_one is a list of str
# i.e. ['imei,imsi,change_type\n','12345678901228,11105678901234,added\n','12345678901229,11106678901234,added\n']
for rows_op_list in [rows_op_one_list, rows_op_two_list]:
assert len(set([r[:14] for r in rows_op_list]) & {'12345678901232', '12345678901227'}) == 2
# IMSIs that were "unknown" and seen with both operator 1 and operator 2 should be in both operators'
# delta exceptions lists with a change_type of added.
for rows_op_list in [rows_op_one_list, rows_op_two_list]:
assert len(set([r[:14] for r in rows_op_list]) & {'12345678901229'}) == 1
@pytest.mark.parametrize('pairing_list_importer',
[PairListParams(content='imei,imsi\n'
'12345678901230,11107678901234\n'
'12345678901231,11108678901234\n'
'12345678901232,11109678901234\n'
'12345678901233,11101678901234\n'
'12345678901234,11101678901234\n'
'12345678901235,11102678901234\n'
'12345678901236,11102678901234\n')],
indirect=True)
def test_exceptions_list_delta_removed(postgres, db_conn, tmpdir, mocked_config, pairing_list_importer, logger):
"""Test exceptions_list_delta_removed."""
# Populate pairing list with some IMEI-IMSI pairs
# Have at least some IMSIs (at least 3) where the MCC-MNC does not start with the prefix for any operator
# '12345678901230,11107678901234\n'
# '12345678901231,11108678901234\n'
# '12345678901232,11109678901234\n'
# Have some IMSIs that start with operator 1's prefix
# '12345678901233,11101678901234\n'
# '12345678901234,11101678901234\n'
# Have some IMSIs that start with operator 2's prefix
# '12345678901235,11102678901234\n'
# '12345678901236,11102678901234\n'
with db_conn:
expect_success(pairing_list_importer, 7, db_conn, logger)
# Run dirbs-listgen once
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0',
delta_fn='exceptions_operator1_delta')
# Verify that rows have been added
assert len(rows) == 6
assert rows[0] == 'imei,imsi,change_type\n'
assert set(rows[1:]) == {'12345678901233,11101678901234,added\n',
'12345678901234,11101678901234,added\n',
'12345678901230,11107678901234,added\n',
'12345678901231,11108678901234,added\n',
'12345678901232,11109678901234,added\n'}
rows = _read_rows_from_file('exceptions_operator2_delta', tmpdir, dir_name='run0')
assert len(rows) == 6
assert rows[0] == 'imei,imsi,change_type\n'
assert set(rows[1:]) == {'12345678901235,11102678901234,added\n',
'12345678901236,11102678901234,added\n',
'12345678901230,11107678901234,added\n',
'12345678901231,11108678901234,added\n',
'12345678901232,11109678901234,added\n'}
rows = _read_rows_from_file('exceptions_operator3_delta', tmpdir, dir_name='run0')
assert len(rows) == 4
assert rows[0] == 'imei,imsi,change_type\n'
assert set(rows[1:]) == {'12345678901230,11107678901234,added\n',
'12345678901231,11108678901234,added\n',
'12345678901232,11109678901234,added\n'}
# Remove a pairing per each operator delta list
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""DELETE
FROM pairing_list
WHERE imei_norm IN ('12345678901230', '12345678901233', '12345678901236')""")
# Run dirbs-listgen, verify that:
# All exception list deltas are empty except for the one pairing with a change_type of 'removed'
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1',
delta_fn='exceptions_operator1_delta')
assert len(rows) == 3
assert rows[0] == 'imei,imsi,change_type\n'
assert set(rows[1:]) == {'12345678901233,11101678901234,removed\n',
'12345678901230,11107678901234,removed\n'}
rows = _read_rows_from_file('exceptions_operator2_delta', tmpdir, dir_name='run1')
assert len(rows) == 3
assert rows[0] == 'imei,imsi,change_type\n'
assert set(rows[1:]) == {'12345678901236,11102678901234,removed\n',
'12345678901230,11107678901234,removed\n'}
rows = _read_rows_from_file('exceptions_operator3_delta', tmpdir, dir_name='run1')
assert len(rows) == 2
assert set(rows[1:]) == {'12345678901230,11107678901234,removed\n'}
def test_aggregate_blacklist_changes(postgres, db_conn, tmpdir, mocked_config):
"""Test aggregate_blacklist_changes."""
# Run dirbs-listgen once to create empty partitions
_run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0', date='20160301')
# Manually populate one operator's exceptions_list
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""INSERT into blacklist(imei_norm, block_date, reasons, start_run_id, end_run_id, delta_reason,
virt_imei_shard)
VALUES('12345678901234', '20170201', ARRAY['condition2', 'condition3'],
1116, NULL, 'unblocked', calc_virt_imei_shard('12345678901234')),
('12345678901234', '20170201', ARRAY['condition2', 'condition3'], 1113, 1116,
'changed', calc_virt_imei_shard('12345678901234')),
('12345678901234', '20170301', ARRAY['condition2'], 1112, 1113, 'blocked',
calc_virt_imei_shard('12345678901234')),
('12345678901234', '20170301', ARRAY['condition1'], 1004, 1112, 'unblocked',
calc_virt_imei_shard('12345678901234')),
('12345678901234', '20170301', ARRAY['condition1'], 1000, 1004, 'blocked',
calc_virt_imei_shard('12345678901234'))""")
cursor.execute("""SELECT COUNT(*) AS count_bl FROM blacklist""")
assert cursor.fetchone().count_bl == 5
for i in [900, 1000, 1003, 1004, 1112, 1113, 1116]:
job_metadata_importer(db_conn=db_conn, command='dirbs-listgen', run_id=i,
status='success', extra_metadata={})
delta_fn = 'blacklist_delta'
# Run dirbs-listgen with --base 900, should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', delta_fn=delta_fn, base_run_id=900)
assert rows == ['imei,block_date,reasons,change_type\n']
# Run dirbs-listgen with --base 1000, should get change_type of unblocked
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run2', delta_fn=delta_fn,
base_run_id=1000)
assert rows == ['imei,block_date,reasons,change_type\n',
'12345678901234,20170201,condition2|condition3,unblocked\n']
# Run dirbs-listgen with --base 1003, should get change_type of unblocked
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run3', delta_fn=delta_fn,
base_run_id=1003)
assert rows == ['imei,block_date,reasons,change_type\n',
'12345678901234,20170201,condition2|condition3,unblocked\n']
# Run dirbs-listgen with --base 1004 should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run4', delta_fn=delta_fn,
base_run_id=1004)
assert rows == ['imei,block_date,reasons,change_type\n']
# Run dirbs-listgen with --base 1112 should get change_type of unblocked
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run5', delta_fn=delta_fn,
base_run_id=1112)
assert rows == ['imei,block_date,reasons,change_type\n',
'12345678901234,20170201,condition2|condition3,unblocked\n']
# Run dirbs-listgen with --base 1113 should get change_type of unblocked
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run6', delta_fn=delta_fn,
base_run_id=1113)
assert rows == ['imei,block_date,reasons,change_type\n',
'12345678901234,20170201,condition2|condition3,unblocked\n']
# Run dirbs-listgen with --base 1116 should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run7', delta_fn=delta_fn,
base_run_id=1116)
assert rows == ['imei,block_date,reasons,change_type\n']
def test_aggregate_notifications_list_changes(postgres, db_conn, tmpdir, mocked_config):
"""Test aggregate_notifications_list_changes."""
# Run dirbs-listgen once to create empty partitions
_run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0')
# Manually populate one operator's notifications_list
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""INSERT INTO notifications_lists_operator1 (operator_id, imei_norm, imsi, msisdn, block_date,
reasons, start_run_id, end_run_id, delta_reason,
virt_imei_shard)
VALUES ('operator1', '12345678901234', '12345678901234', '1', '20170110',
ARRAY['condition1'], 1125, NULL, 'new', calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170110',
ARRAY['condition1'], 1122, 1125, 'blacklisted',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170110',
ARRAY['condition1'], 1121, 1122, 'changed',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170105',
ARRAY['condition1', 'condition2'], 1120, 1121, 'changed',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170101',
ARRAY['condition1', 'condition2', 'condition3'], 1116, 1120, 'new',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170101',
ARRAY['condition1', 'condition2', 'condition3'], 1113, 1116, 'resolved',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170101',
ARRAY['condition1', 'condition2', 'condition3'], 1112, 1113, 'changed',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170201',
ARRAY['condition1', 'condition2'], 1004, 1112, 'changed',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', '1', '20170301',
ARRAY['condition1'], 1000, 1004, 'new', calc_virt_imei_shard('12345678901234'))
""")
cursor.execute("""SELECT COUNT(*) AS count_nl FROM notifications_lists_operator1""")
assert cursor.fetchone().count_nl == 9
for i in [900, 1000, 1003, 1004, 1112, 1113, 1116, 1120, 1121, 1122, 1125]:
job_metadata_importer(db_conn=db_conn, command='dirbs-listgen', run_id=i,
status='success', extra_metadata={})
# delta filename
delta_fn = 'notifications_operator1_delta'
# Run dirbs-listgen with --base 900, should be "new"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', delta_fn=delta_fn, base_run_id=900)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,new\n']
# Run dirbs-listgen with --base 1000, should get change_type of "changed"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run2', delta_fn=delta_fn,
base_run_id=1000)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,changed\n']
# Run dirbs-listgen with --base 1003, should get change_type of "changed"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run3', delta_fn=delta_fn,
base_run_id=1003)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,changed\n']
# Run dirbs-listgen with --base 1004, should get change_type of "changed"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run4', delta_fn=delta_fn,
base_run_id=1004)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,changed\n']
# Run dirbs-listgen with --base 1112, should get change_type of "changed"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run5', delta_fn=delta_fn,
base_run_id=1112)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,changed\n']
# Run dirbs-listgen with --base 1113 should get change_type of "new"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run6', delta_fn=delta_fn,
base_run_id=1113)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,new\n']
# Run dirbs-listgen with --base 1116 should get change_type of "changed"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run7', delta_fn=delta_fn,
base_run_id=1116)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,changed\n']
# Run dirbs-listgen with --base 1120 should get change_type of "changed"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run8', delta_fn=delta_fn,
base_run_id=1120)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,changed\n']
# Run dirbs-listgen with --base 1121 should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run9', delta_fn=delta_fn,
base_run_id=1121)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n']
# Run dirbs-listgen with --base 1122 should get change_type of "new"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run10', delta_fn=delta_fn,
base_run_id=1122)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n',
'12345678901234,12345678901234,1,20170110,condition1,new\n']
# Run dirbs-listgen with --base 1125 should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run11', delta_fn=delta_fn,
base_run_id=1125)
assert rows == ['imei,imsi,msisdn,block_date,reasons,change_type\n']
def test_aggregate_exceptions_list_changes(postgres, db_conn, tmpdir, mocked_config):
"""Test aggregate_exceptions_list_changes."""
# Run dirbs-listgen once to create empty partitions
_run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run0')
# Manually populate one operator's exceptions_list
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""INSERT INTO exceptions_lists_operator1 (operator_id, imei_norm, imsi, start_run_id,
end_run_id, delta_reason, virt_imei_shard)
VALUES ('operator1', '12345678901234', '12345678901234', 1116, NULL, 'removed',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', 1113, 1116, 'added',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', 1112, 1113, 'removed',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', 1004, 1112, 'added',
calc_virt_imei_shard('12345678901234')),
('operator1', '12345678901234', '12345678901234', 1000, 1004, 'removed',
calc_virt_imei_shard('12345678901234'))""")
cursor.execute('SELECT COUNT(*) AS count_ex FROM exceptions_lists_operator1')
assert cursor.fetchone().count_ex == 5
for i in [900, 1000, 1003, 1116, 1113, 1112, 1004]:
job_metadata_importer(db_conn=db_conn, command='dirbs-listgen', run_id=i,
status='success', extra_metadata={})
# Run dirbs-listgen with --base 900, should throw an exception
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run1', base_run_id=900)
assert rows == ['imei,imsi,change_type\n', '12345678901234,12345678901234,removed\n']
# Run dirbs-listgen with --base 1000, should throw an exception
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run2', base_run_id=1000)
assert rows == ['imei,imsi,change_type\n']
# Run dirbs-listgen with --base 1003, should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run3', base_run_id=1003)
assert rows == ['imei,imsi,change_type\n']
# Run dirbs-listgen with --base 1004, should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run4', base_run_id=1004)
assert rows == ['imei,imsi,change_type\n', '12345678901234,12345678901234,removed\n']
# Run dirbs-listgen with --base 1112, should be no change
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run5', base_run_id=1112)
assert rows == ['imei,imsi,change_type\n']
# Run dirbs-listgen with --base 1113 should be "removed"
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run6', base_run_id=1113)
assert rows == ['imei,imsi,change_type\n', '12345678901234,12345678901234,removed\n']
# Run dirbs-listgen with --base 1116 should be no change.
rows, _ = _run_list_gen_rows_run_id(db_conn, tmpdir, mocked_config, 'run7', base_run_id=1116)
assert rows == ['imei,imsi,change_type\n']
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state.csv'],
indirect=True)
def test_store_list_in_db_blacklist(classification_data, db_conn, tmpdir, mocked_config):
"""Test blacklist table.
Verify that dirbs-listgen stores balcklist in the database with run_id and operator_id
columns where appropriate.
"""
_cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config)
with db_conn, db_conn.cursor() as cursor:
cursor.execute('SELECT * FROM gen_blacklist() ORDER BY imei_norm')
blacklist_entries = [(x.imei_norm, x.block_date) for x in cursor.fetchall()]
assert blacklist_entries == [('35000000000000', datetime.date(2016, 4, 1)),
('35111111111110', datetime.date(2016, 4, 1)),
('35900000000000', datetime.date(2016, 4, 1)),
('86222222222226', datetime.date(2016, 4, 1))]
@pytest.mark.parametrize('operator_data_importer, pairing_list_importer',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,811111013136460,111018001111111,223338000000\n'
'20160203,311111060451100,111025111111111,223355000000\n'
'20160203,411111013659809,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '111', 'mnc': '02'},
{'mcc': '310', 'mnc': '03'}],
extract=False),
PairListParams(
content='imei,imsi\n'
'811111013136460,111018001111111\n'
'311111060451100,111025111111111\n'
'411111013659809,310035111111111'))],
indirect=True)
def test_store_list_in_db_exception_list(operator_data_importer, mocked_config,
pairing_list_importer, logger, db_conn, tmpdir):
"""Test exception-list table.
Test that dirbs-listgen stores exception list in the database with run_id and operator_id
columns where appropriate.
"""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
import_data(pairing_list_importer, 'pairing_list', 3, db_conn, logger)
_cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""SELECT imei_norm, imsi
FROM gen_exceptions_list('operator1')
ORDER BY imei_norm""")
exception_list_entries = {(x.imei_norm, x.imsi) for x in cursor.fetchall()}
assert exception_list_entries == {('41111101365980', '310035111111111'),
('81111101313646', '111018001111111')}
cursor.execute("""SELECT imei_norm, imsi
FROM gen_exceptions_list('operator2')
ORDER BY imei_norm""")
exception_list_entries = {(x.imei_norm, x.imsi) for x in cursor.fetchall()}
assert exception_list_entries == {('31111106045110', '111025111111111'),
('41111101365980', '310035111111111')}
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111018001111111,223338000000\n'
'20160203,35111111111110,111015111111111,223355000000\n'
'20160203,35900000000000,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '310', 'mnc': '03'}],
extract=False),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_store_list_in_db_notification_list(operator_data_importer, mocked_config,
classification_data, logger, db_conn, tmpdir):
"""Test notification-list table.
Verify that dirbs-listgen stores notification list in the database with run_id and operator_id
columns where appropriate.
"""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
with db_conn, db_conn.cursor() as cursor:
cursor.execute("""SELECT imei_norm, imsi FROM gen_notifications_list('operator1') ORDER BY imei_norm""")
notif_list_entries = {(x.imei_norm, x.imsi) for x in cursor.fetchall()}
assert notif_list_entries == {('35111111111110', '111015111111111'),
('35900000000000', '310035111111111'),
('86222222222226', '111018001111111')}
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state.csv'],
indirect=True)
def test_basic_cli_listgen(postgres, classification_data, db_conn, tmpdir, mocked_config):
"""Test that the dirbs-listgen instance runs without an error."""
run_id, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config)
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
for list_type in ['exceptions', 'notifications']:
# make sure exception and notification lists are generated for all operators
_verify_per_operator_lists_generated(dir_path, list_type)
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state.csv'],
indirect=True)
def test_basic_cli_listgen_zip_files(postgres, classification_data, db_conn, tmpdir, mocked_config):
"""Test that the dirbs-listgen instance runs without an error and generates zip files with the right members."""
run_id, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config)
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
zip_fn = find_file_in_dir('*blacklist.zip', dir_path)
with zipfile.ZipFile(zip_fn, mode='r') as zf:
members = zf.namelist()
assert len(members) == 4
assert any([re.match('^.*blacklist\.csv$', m) is not None for m in members])
for change_type in ['blocked', 'unblocked', 'changed']:
assert any([re.match('^.*blacklist_.*_{0}\.csv$'.format(change_type), m) is not None for m in members])
for op_id in range(1, 5):
zip_fn = find_file_in_dir('*notifications_operator{0}.zip'.format(op_id), dir_path)
with zipfile.ZipFile(zip_fn, mode='r') as zf:
members = zf.namelist()
assert len(members) == 6
assert any([re.match('^.*notifications_operator{0}.csv$'.format(op_id), m) is not None for m in members])
for change_type in ['new', 'resolved', 'blacklisted', 'no_longer_seen', 'changed']:
assert any([re.match('^.*notifications_operator{0}_delta_.*_{1}\.csv$'.format(op_id, change_type), m)
is not None for m in members])
zip_fn = find_file_in_dir('*exceptions_operator{0}.zip'.format(op_id), dir_path)
with zipfile.ZipFile(zip_fn, mode='r') as zf:
members = zf.namelist()
assert len(members) == 3
assert any([re.match('^.*exceptions_operator{0}.csv$'.format(op_id), m) is not None for m in members])
for change_type in ['added', 'removed']:
assert any([re.match('^.*exceptions_operator{0}_delta_.*_{1}\.csv$'.format(op_id, change_type), m)
is not None for m in members])
@pytest.mark.parametrize('operator_data_importer, pairing_list_importer',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,811111013136460,111018001111111,223338000000\n'
'20160203,311111060451100,111025111111111,223355000000\n'
'20160203,411111013659809,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '111', 'mnc': '02'},
{'mcc': '310', 'mnc': '03'}],
extract=False),
PairListParams(
content='imei,imsi\n'
'811111013136460,111018001111111\n'
'311111060451100,111025111111111\n'
'411111013659809,310035111111111'))],
indirect=True)
def test_exception_listgen_no_home_network(postgres, operator_data_importer, mocked_config,
pairing_list_importer, logger, db_conn, tmpdir):
"""Test that dirbs-listgen generates put pairing with no MCC-MNC match on every MNO's exception list."""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
import_data(pairing_list_importer, 'pairing_list', 3, db_conn, logger)
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# Check to make sure exception list is generated for all operators in config file
_verify_per_operator_lists_generated(dir_path, 'exceptions')
fn = find_file_in_dir('*exceptions_operator1.csv', dir_path)
# Check IMSI matching operator1 prefix is added to correct exception list
with open(fn, 'r') as file:
rows = file.readlines()
assert len(rows) == 3
assert ('81111101313646,111018001111111\n') in rows
assert ('41111101365980,310035111111111\n') in rows
fn = find_file_in_dir('*exceptions_operator2.csv', dir_path)
# Check IMSI matching operator2 prefix is added to correct exception list
with open(fn, 'r') as file:
rows = file.readlines()
assert len(rows) == 3
assert ('31111106045110,111025111111111\n') in rows
assert ('41111101365980,310035111111111\n') in rows
fn = find_file_in_dir('*exceptions_operator3.csv', dir_path)
# Check IMSI matching operator2 prefix is added to correct exception list
with open(fn, 'r') as file:
rows = file.readlines()
assert len(rows) == 2
assert ('41111101365980,310035111111111\n') in rows
@pytest.mark.parametrize('operator_data_importer, pairing_list_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,811111013136460,111018001111111,223338000000\n'
'20160203,359000000000000,111015113222222,223355000000\n'
'20160203,357756065985824,111015113333333,223355111111',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}],
operator='operator1',
extract=False),
PairListParams(
content='imei,imsi\n'
'811111013136460,111018001111111\n'
'359000000000000,111015113222222\n'
'357756065985824,111015113333333'),
'classification_state/imei_api_class_state_v2.csv')],
indirect=True)
def test_exception_listgen_with_only_blacklisted_imeis_for_valid_conditions(postgres, operator_data_importer,
pairing_list_importer, monkeypatch,
classification_data, mocked_config,
logger, db_conn, tmpdir):
"""Test that dirbs-listgen generates exception lists without the blacklisted IMEIs.
All the IMEIs in the exception list need to have valid conditions to not be ignored.
"""
# IMEI 35900000000000 in the exception list is not ignored because local_stolen condition is valid.
# valid condition = [local_stolen]
# imei_api_class_state_v2.csv contains the following row:
# imei_norm, cond_name, start_date, end_date, block_date
# '35900000000000,111015113222222, local_stolen
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
import_data(pairing_list_importer, 'pairing_list', 3, db_conn, logger)
# Run dirbs-listgen using db args from the temp postgres instance
cond = {
'label': 'local_stolen',
'reason': 'IMEI found on local stolen list',
'blocking': True,
'dimensions': [{
'module': 'stolen_list'}]
}
monkeypatch.setattr(mocked_config, 'conditions', [ConditionConfig(ignore_env=True, **cond)])
monkeypatch.setattr(mocked_config.listgen_config, 'restrict_exceptions_list', True)
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160401')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*exceptions_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = file.readlines()
# Assert two rows in file; with one being the header.
assert len(rows) == 2
# Check non-blacklisted IMEI on the pairing list is not on the exception list
assert ('81111101313646,111018001111111\n') not in rows
assert ('35775606598582,111015113333333\n') not in rows
# Check IMEI on the blacklist is on the exception list
assert ('35900000000000,111015113222222\n') in rows
@pytest.mark.parametrize('operator_data_importer, pairing_list_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,811111013136460,111018001111111,223338000000\n'
'20160203,359000000000000,111015113222222,223355000000\n'
'20160203,357756065985824,111015113333333,223355111111',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}],
operator='operator1',
extract=False),
PairListParams(
content='imei,imsi\n'
'811111013136460,111018001111111\n'
'359000000000000,111015113222222\n'
'357756065985824,111015113333333'),
'classification_state/imei_api_class_state_v1.csv')],
indirect=True)
def test_exception_listgen_ignores_invalid_conditions(postgres, operator_data_importer, pairing_list_importer,
classification_data, logger, db_conn, tmpdir,
mocked_config, monkeypatch):
"""Verify that all entries in the exception list with invalid conditions are ignored."""
# IMEI '35900000000000' on the blacklist should be on the exception list but is ignored
# because the condition names are not valid (duplicate_mk1, crazy_name)
# valid condition = ['stolen_list']
# imei_api_class_state_v1.csv significant rows:
# imei_norm, cond_name, start_date, end_date, block_date
# 35900000000000,duplicate_mk1,'2016-01-01',,'2016-04-01'
# 35900000000000,crazy_name,'2016-01-01',,'2016-04-01'
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
import_data(pairing_list_importer, 'pairing_list', 3, db_conn, logger)
cond = {
'label': 'local_stolen',
'reason': 'IMEI found on local stolen list',
'blocking': True,
'dimensions': [{
'module': 'stolen_list'}]
}
monkeypatch.setattr(mocked_config, 'conditions', [ConditionConfig(ignore_env=True, **cond)])
monkeypatch.setattr(mocked_config.listgen_config, 'restrict_exceptions_list', True)
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160401')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*exceptions_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = file.readlines()
# IMEI '35900000000000' on the blacklist should be on the exception list but is ignored
# because the condition name is not a valid one (stolen_list)
assert ('35900000000000,111015113222222\n') not in rows
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111018001111111,223338000000\n'
'20160203,35111111111110,111015111111111,223355000000\n'
'20160203,35900000000000,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '310', 'mnc': '03'}],
extract=False),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_notification_listgen_with_no_fallback_records(postgres, operator_data_importer, mocked_config,
classification_data, logger, db_conn, tmpdir):
"""Test that dirbs-listgen generates per-operator notification lists and no operator_undetermined lists."""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# Check to make sure notification list is generated for all operators in config file
_verify_per_operator_lists_generated(dir_path, 'notifications')
# Check IMSI matching operator1 prefix is added to correct notification list
fn = find_file_in_dir('*notifications_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:4] for i in file]
assert ('86222222222226', '111018001111111', '223338000000', '20160401') in rows
assert ('35111111111110', '111015111111111', '223355000000', '20160401') in rows
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111118001111111,223338000000\n'
'20160203,35111111111110,111115111111111,223355000000\n'
'20160203,35900000000000,111111111111111,743614000000',
operator='operator1',
perform_region_checks=False,
perform_home_network_check=False,
extract=False),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_notification_lists_with_seen_networks(postgres, operator_data_importer, mocked_config,
classification_data, logger, db_conn, metadata_db_conn,
tmpdir, mocked_statsd):
"""Test Depot ID Unknown.
Test that dirbs-listgen put subscribers on every network they were seen with if IMSI does not match the home
network for any configured operator.
The IMSI prefix 11111 is used as it does not match any operator MCC-MNC configured in the config file used
for unit testing. This means that we will fall back to the fallback method of looking at which operators
that triplet was seen on.
The same data is imported for 2 operators to simulate a roaming situation where the same triplet is seen
with multiple operators with no clear home network identified. In this case, we expect the triplets to be
output on both operator notification lists.
"""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
# Import the same data set again as operator 2 so that the same triplets appear in 2 operator data sets
with get_importer(OperatorDataImporter,
db_conn,
metadata_db_conn,
mocked_config.db_config,
tmpdir,
logger,
mocked_statsd,
OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111118001111111,223338000000\n'
'20160203,35111111111110,111115111111111,223355000000\n'
'20160203,35900000000000,111111111111111,743614000000',
operator='operator2',
perform_region_checks=False,
perform_home_network_check=False,
extract=False)) as operator_data_importer:
import_data(operator_data_importer, 'operator_data', 6, db_conn, logger)
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# Check to make sure notification list is generated for all operators in config file
_verify_per_operator_lists_generated(dir_path, 'notifications')
expected_header_cols = ['imei', 'imsi', 'msisdn', 'block_date', 'reasons']
expected_rows = [['86222222222226', '111118001111111', '223338000000', '20160401'],
['35900000000000', '111111111111111', '743614000000', '20160401'],
['35111111111110', '111115111111111', '223355000000', '20160401']]
# Make sure all triplets were seen on the operator 1 notifications list
fn = find_file_in_dir('*notifications_operator1.csv', dir_path)
with open(fn, 'r') as file:
csvreader = csv.reader(file)
rows = list(csvreader)
assert len(rows) == 4
assert rows[0] == expected_header_cols
for er in expected_rows:
assert er in [x[:4] for x in rows]
# Make sure all triplets were seen on the operator 2 notifications list
fn = find_file_in_dir('*notifications_operator2.csv', dir_path)
with open(fn, 'r') as file:
csvreader = csv.reader(file)
rows = list(csvreader)
assert len(rows) == 4
assert rows[0] == expected_header_cols
for er in expected_rows:
assert er in [x[:4] for x in rows]
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111018001111111,223338000000\n'
'20160203,35111111111110,111015111111111,223355000000\n'
'20160203,35900000000000,111011111111111,743614000000',
operator='operator1',
perform_region_checks=False,
perform_home_network_check=False,
extract=False),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_notification_lists_with_home_network_multiple_seen(postgres, operator_data_importer, mocked_config,
classification_data, logger, db_conn, metadata_db_conn,
tmpdir, mocked_statsd):
"""Test Depot ID Unnown.
Test that dirbs-listgen only puts a notification on the home network even if a triplet is seen with
multiple operators.
The IMSI prefix 11101 is used as it matches the operator1 MCC-MNC prefix in the unit test config file for
operator 1.
The same data is imported for 2 operators to simulate a roaming situation where the same triplet is seen
with multiple operators with a clear home network identified. In this case, we expect the triplets to be
output on only operator 1, since it is the home network
"""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
# Import the same data set again as operator 2 so that the same triplets appear in 2 operator data sets
with get_importer(OperatorDataImporter,
db_conn,
metadata_db_conn,
mocked_config.db_config,
tmpdir,
logger,
mocked_statsd,
OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111018001111111,223338000000\n'
'20160203,35111111111110,111015111111111,223355000000\n'
'20160203,35900000000000,111011111111111,743614000000',
operator='operator2',
perform_region_checks=False,
perform_home_network_check=False,
extract=False)) as operator_data_importer:
import_data(operator_data_importer, 'operator_data', 6, db_conn, logger)
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
file_list = os.listdir(dir_path)
# Check to make sure notification list is generated for all operators in config file
_verify_per_operator_lists_generated(dir_path, 'notifications')
expected_header_cols = ['imei', 'imsi', 'msisdn', 'block_date', 'reasons']
expected_rows = [['86222222222226', '111018001111111', '223338000000', '20160401'],
['35900000000000', '111011111111111', '743614000000', '20160401'],
['35111111111110', '111015111111111', '223355000000', '20160401']]
# Make sure all triplets were seen on the operator 1 notifications list
fn = find_file_in_dir('*notifications_operator1.csv', dir_path)
with open(fn, 'r') as file:
csvreader = csv.reader(file)
rows = list(csvreader)
assert len(rows) == 4
assert rows[0] == expected_header_cols
for er in expected_rows:
assert er in [x[:4] for x in rows]
fn = find_file_in_dir('*notifications_operator2.csv', dir_path)
# No triplets should appear on tje operator 2 list
with open(fn, 'r') as file:
csvreader = csv.reader(file)
rows = list(csvreader)
assert len(rows) == 1
assert rows[0] == expected_header_cols
# Every record should have been seen on a network, so there should be no undetermined records
assert not fnmatch.filter(file_list, '*exceptions_operator_undetermined.csv')
assert not fnmatch.filter(file_list, '*notifications_operator_undetermined.csv')
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,,223338000000\n'
'20160203,35111111111110,111015111111111,\n'
'20160203,35900000000000,310035111111111,743614000000',
cc=['%'],
mcc_mnc_pairs=[{'mcc': '%', 'mnc': '%'}],
operator='operator1',
extract=False,
perform_null_checks=False,
perform_unclean_checks=False),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_notification_listgen_with_null_imsi_msisdn(postgres, operator_data_importer, mocked_config,
classification_data, logger, db_conn, tmpdir):
"""Test that dirbs-listgen generates per-operator notification lists and no operator_undetermined lists."""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
output_dir = str(tmpdir)
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# Check IMSI matching operator1 prefix is added to correct notification list
fn = find_file_in_dir('*notifications_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:1] for i in file]
# Check IMEI with NULL IMSI does not end up on the notification list
assert ('86222222222226',) not in rows
# Check IMEI with NULL MSISDN does not end up on the notification list
assert ('35111111111110',) not in rows
# Check IMEI with valid IMSI/MSISDN does end up on the notification list
assert ('35900000000000',) in rows
file_list = os.listdir(dir_path)
assert not fnmatch.filter(file_list, '*exceptions_operator_undetermined.csv')
assert not fnmatch.filter(file_list, '*notifications_operator_undetermined.csv')
assert len(file_list) > 0
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state.csv'],
indirect=True)
def test_blacklist_listgen(postgres, classification_data, db_conn, tmpdir, mocked_config):
"""Test that dirbs-listgen generates a single blacklist."""
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
output_dir = str(tmpdir.mkdir('run1'))
result = runner.invoke(dirbs_classify_cli, ['--curr-date=20170101'], obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
# Test empty blacklist
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
file_list = os.listdir(dir_path)
assert fnmatch.filter(file_list, '*blacklist.csv')
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = file.readlines()
# Check if the file has a single row
assert len(rows) == 1
# Check that single row is the header
assert 'imei,block_date,reasons\n' in rows
# Test non-empty blacklist
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_2', mocked_config, date='20170101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
file_list = os.listdir(dir_path)
assert fnmatch.filter(file_list, '*blacklist.csv')
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:1] for i in file]
# Check IMEI with block date less than current date end up on the blacklist
assert ('86222222222226',) in rows
assert ('35000000000000',) in rows
assert ('35900000000000',) in rows
# Check IMEI with a non-NULL end date do not up on the blacklist
assert ('35111111111110',) not in rows
@pytest.mark.parametrize('operator_data_importer, pairing_list_importer',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,811111013136460,111018001111111,223338000000\n'
'20160203,311111060451100,111025111111111,223355000000\n'
'20160203,411111013659809,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '111', 'mnc': '02'},
{'mcc': '310', 'mnc': '03'}],
extract=False),
PairListParams(
content='imei,imsi\n'
'811111013136460,111018001111111\n'
'311111060451100,111025111111111\n'
'411111013659809,310035111111111'))],
indirect=True)
def test_exception_listgen_with_luhn_check_digit(postgres, operator_data_importer, mocked_config, monkeypatch,
pairing_list_importer, logger, db_conn, tmpdir):
"""Test luhn check digit for exception list.
Verify that dirbs-listgen generates per-operator exception lists containing IMEIs with luhn
check digit for IMEI.
"""
# The config setting to generate the Luhn digits is turned on in this test
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
import_data(pairing_list_importer, 'pairing_list', 3, db_conn, logger)
monkeypatch.setattr(mocked_config.listgen_config, 'generate_check_digit', True)
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# Check luhn check digit is in IMEI in exception list
imei_luhn_one = luhn.append('81111101313646')
imei_luhn_two = luhn.append('31111106045110')
imei_luhn_three = luhn.append('41111101365980')
# Check IMSI matching operator1 prefix is added to correct exception list
fn = find_file_in_dir('*exceptions_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = file.readlines()
assert (imei_luhn_one + ',111018001111111\n') in rows
# Check IMSI not conforming to any operator prefix within config file,
# but associated IMEI seen in operator data; is added to correct exception list
assert (imei_luhn_three + ',310035111111111\n') in rows
# Check IMSI matching operator2 prefix is added to correct exception list
fn = find_file_in_dir('*exceptions_operator2.csv', dir_path)
with open(fn, 'r') as file:
rows = file.readlines()
assert (imei_luhn_two + ',111025111111111\n') in rows
def test_luhn_check_digit_function(db_conn):
"""Test luhn check digit function."""
imei_to_process = ['81111101313646', '31111106045110', '41111101365980']
imei_lunh_from_module_list = [luhn.append(i) for i in imei_to_process]
imei_lunh_from_sql_function_list = []
with db_conn, db_conn.cursor() as cursor:
for i in imei_to_process:
cursor.callproc('luhn_check_digit_append', [i])
imei_luhn_check_digit_append = cursor.fetchone().luhn_check_digit_append
imei_lunh_from_sql_function_list.append(imei_luhn_check_digit_append)
cursor.callproc('luhn_check_digit_verify', [imei_luhn_check_digit_append])
assert cursor.fetchone().luhn_check_digit_verify is True
assert imei_lunh_from_module_list == imei_lunh_from_sql_function_list
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_restrict_valid_imeis.csv'],
indirect=True)
def test_blacklist_listgen_output_invalid_imeis_and_luhn(postgres, mocked_config, monkeypatch,
classification_data, db_conn, tmpdir, logger):
"""Test luhn check digit for black list.
Verify that list-gen is restricted to valid IMEIs if output_invalid_imeis is true.
Verify that dirbs-listgen generates a single blacklist containing IMEIs with luhn check digit for IMEI
if generate_check_digit is true and only if IMEIs are valid.
"""
# The config setting to generate the Luhn digits is turned on in this test
# restrict_valid_imeis.csv rows:
# imei_norm, cond_name, start_date, end_date, block_date
# 119300000000001, duplicate_mk1, '2016-01-01',, '2016-04-01' -- IMEI too long
# 21AA0000000000, duplicate_mk1, '2016-01-01',, '2016-04-01' -- IMEI hex
# 312222222222BB, duplicate_mk1, '2016-01-01',, '2016-04-01' -- IMEI hex
# 41222222222226, duplicate_mk1, '2016-01-01',, '2016-04-01' -- valid
monkeypatch.setattr(mocked_config.listgen_config, 'generate_check_digit', True)
monkeypatch.setattr(mocked_config.listgen_config, 'output_invalid_imeis', False)
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--curr-date=20170101'], obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
# Test empty blacklist
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = file.readlines()
# Check if the file has a single row
assert len(rows) == 1
# Check that single row is the header
assert 'imei,block_date,reasons\n' in rows
# Test non-empty blacklist
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_2', mocked_config, date='20170101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:1] for i in file]
# Append luhn check digit is in valid IMEI
imei_luhn_one = luhn.append('41222222222226')
assert rows == [('imei',), (imei_luhn_one,)]
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state.csv'],
indirect=True)
def test_blacklist_listgen_with_luhn_check_digit(postgres, mocked_config, monkeypatch,
classification_data, db_conn, tmpdir):
"""Test luhn check digit for black list.
Verify that dirbs-listgen generates a single blacklist containing IMEIs with luhn check digit for IMEI
if requested in yaml file using generate_check_digit param.
"""
# The config setting to generate the Luhn digits is turned on in this test
monkeypatch.setattr(mocked_config.listgen_config, 'generate_check_digit', True)
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--curr-date=20170101'], obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
# Test empty blacklist
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = file.readlines()
# Check if the file has a single row
assert len(rows) == 1
# Check that single row is the header
assert 'imei,block_date,reasons\n' in rows
# Test non-empty blacklist
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_2', mocked_config, date='20170101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:1] for i in file]
# Check luhn check digit is in IMEI with block date less than current date on the blacklist
imei_luhn_one = luhn.append('86222222222226')
imei_luhn_two = luhn.append('35000000000000')
imei_luhn_three = luhn.append('35900000000000')
assert (imei_luhn_one,) in rows
assert (imei_luhn_two,) in rows
assert (imei_luhn_three,) in rows
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v4.csv'],
indirect=True)
def test_listgen_with_only_informational_conditions(postgres, mocked_config, monkeypatch,
classification_data, db_conn, tmpdir):
"""Test that dirbs-listgen handles empty blocking_conditions."""
# All conditions in the yaml have blocking set to False
cond_dict_list = [{'label': 'local_stolen',
'reason': 'IMEI found on local stolen list',
'grace_period_days': 0,
'blocking': False,
'max_allowed_matching_ratio': 1.0,
'dimensions': [{'module': 'stolen_list'}]
},
{'label': 'malformed_imei',
'reason': 'Invalid characters detected in IMEI',
'grace_period_days': 30,
'blocking': False,
'dimensions': [{'module': 'inconsistent_rat'}]
}]
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
monkeypatch.setattr(mocked_config, 'conditions', from_cond_dict_list_to_cond_list(cond_dict_list))
result = runner.invoke(dirbs_classify_cli, ['--curr-date=20170101'], obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20170101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
for f in glob.glob(os.path.join(dir_path, '*.csv')):
with open(os.path.join(dir_path, f), 'r') as file:
rows = [tuple(map(str, i.split(',')))[:1] for i in file]
assert rows == [('imei',)]
all_cond_except_gsma_dict_list = [{'label': 'local_stolen',
'reason': 'IMEI found on local stolen list',
'grace_period_days': 0,
'blocking': True,
'max_allowed_matching_ratio': 1.0,
'dimensions': [{'module': 'stolen_list'}]
},
{'label': 'malformed_imei',
'reason': 'Invalid characters detected in IMEI',
'grace_period_days': 30,
'blocking': False,
'dimensions': [{'module': 'inconsistent_rat'}]
},
{'label': 'inconsistent_rat',
'reason': 'IMEI RAT inconsistent with device capability',
'grace_period_days': 0,
'blocking': False,
'max_allowed_matching_ratio': 1.0,
'dimensions': [{'module': 'malformed_imei'}]
},
{'label': 'duplicate_mk1',
'reason': 'Duplicate IMEI detected',
'grace_period_days': 60,
'blocking': True,
'sticky': True,
'dimensions': [{'module': 'duplicate_threshold',
'parameters': {'threshold': 10, 'period_days': 120}},
{'module': 'duplicate_daily_avg',
'parameters': {'threshold': 4.0,
'period_days': 30,
'min_seen_days': 5}}]
},
{'label': 'not_on_registration_list',
'reason': 'IMEI not found on local registration list',
'grace_period_days': 0,
'blocking': True,
'max_allowed_matching_ratio': 1.0,
'dimensions': [{'module': 'not_on_registration_list'}]
}]
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111018001111111,223338000000\n'
'20160203,35111111111110,111015111111111,223355000000\n'
'20160203,35900000000000,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '310', 'mnc': '03'}],
extract=False),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_notification_listgen_ignores_invalid_conditions(postgres, classification_data, monkeypatch,
operator_data_importer, mocked_config, logger, db_conn,
tmpdir):
"""Test that dirbs-listgen ignores IMEIs with invalid conditions in notification list."""
# Only IMEI 86222222222226,duplicate_mk1 is in notification-list because duplicate_mk1 condition is valid.
# valid conditions : all except gsma_not_found
# imei_api_class_state.csv relevant rows:
# imei_norm,cond_name,start_date,end_date,block_date
# 86222222222226,gsma_not_found,'2016-01-01',,'2016-04-01'
# 86222222222226,duplicate_mk1,'2016-01-01',,'2016-04-01'
# 35111111111110,gsma_not_found,'2016-01-01',,'2016-04-01'
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
monkeypatch.setattr(mocked_config, 'conditions', from_cond_dict_list_to_cond_list(all_cond_except_gsma_dict_list))
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*notifications_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:4] for i in file]
assert ('86222222222226', '111018001111111', '223338000000', '20160401') in rows
assert ('35111111111110', '111015111111111', '223355000000', '20160401') not in rows
@pytest.mark.parametrize('classification_data',
['classification_state/imei_api_class_state_v4.csv'],
indirect=True)
def test_blacklist_listgen_ignores_invalid_condition(postgres, monkeypatch, mocked_config,
classification_data, db_conn, tmpdir):
"""Test that dirbs-listgen ignores IMEIs with invalid conditions in black-list."""
# Only IMEI 86222222222226,duplicate_mk1 is not in the black-list because
# gsma_not_found cond is not a valid condition
# valid conditions : all except gsma_not_found
# imei_api_class_state_v4.csv relevant rows:
# imei_norm, cond_name, start_date, end_date, block_date
# 86222222222226, gsma_not_found, '2016-01-01',, '2016-04-01'
# 86222222222226, duplicate_mk1, '2016-01-01',, '2016-04-01'
# 35111111111110, gsma_not_found, '2016-01-01',, '2016-04-01'
# 35000000000000, gsma_not_found, '2016-01-01',, '2016-04-01'
# 35900000000000, gsma_not_found, '2016-01-01',, '2016-04-01'
# Run dirbs-listgen using db args from the temp postgres instance
monkeypatch.setattr(mocked_config, 'conditions', from_cond_dict_list_to_cond_list(all_cond_except_gsma_dict_list))
runner = CliRunner()
result = runner.invoke(dirbs_classify_cli, ['--curr-date=20170101'], obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20170101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:1] for i in file]
assert ('35000000000000',) not in rows
assert ('35900000000000',) not in rows
assert ('86222222222226',) in rows
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,86222222222226,111018001111111,223338000000\n'
'20160203,35111111111110,111015111111111,223355000000\n'
'20160203,35900000000000,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '310', 'mnc': '03'}],
extract=False),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_notification_listgen_with_luhn_check_digit(postgres, operator_data_importer, mocked_config, monkeypatch,
classification_data, logger, db_conn, tmpdir):
"""Test luhn check digit in notification list.
Verify that dirbs-listgen generates IMEIs with Luhn check digit in notification list if specified in
config settings.
"""
# The config setting to generate the Luhn digits is turned on in this test
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
monkeypatch.setattr(mocked_config.listgen_config, 'generate_check_digit', True)
output_dir = str(tmpdir)
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# Check luhn check digit is in IMEI for notification list
imei_luhn_one = luhn.append('86222222222226')
imei_luhn_two = luhn.append('35111111111110')
# Check IMSI matching operator1 prefix is added to correct notification list
fn = find_file_in_dir('*notifications_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:4] for i in file]
assert (imei_luhn_one, '111018001111111', '223338000000', '20160401') in rows
assert (imei_luhn_two, '111015111111111', '223355000000', '20160401') in rows
@pytest.mark.parametrize('operator_data_importer, classification_data',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20160203,3511AAB1111110,111015111111111,223355000000\n'
'20160203,3BAA0000000000,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '310', 'mnc': '03'}],
perform_unclean_checks=False,
extract=False),
'classification_state/imei_api_class_state_hex_char.csv')],
indirect=True)
def test_listgen_luhn_check_hex_character(postgres, operator_data_importer, mocked_config, monkeypatch,
classification_data, logger, db_conn, tmpdir):
"""Test that dirbs-listgen does not add check digit to hexadecimal IMEIs even if generate_check_digit is true."""
# The config setting to generate the Luhn digits is turned on in this test
# verify that hex IMEIs don't have a 15 digit Luhn check added
import_data(operator_data_importer, 'operator_data', 2, db_conn, logger)
monkeypatch.setattr(mocked_config.listgen_config, 'generate_check_digit', True)
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
output_dir = str(tmpdir)
result = runner.invoke(dirbs_classify_cli, ['--no-safety-check', '--curr-date=20170101'],
obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20160203')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
# Check IMSI matching operator1 prefix is added to correct notification list without Luhn check digit
fn = find_file_in_dir('*notifications_operator1.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:4] for i in file]
assert ('3511AAB1111110', '111015111111111', '223355000000', '20160401') in rows
assert ('3BAA0000000000', '310035111111111', '743614000000', '20160401') in rows
@pytest.mark.parametrize('pairing_list_importer, classification_data',
[(PairListParams(
content='imei,imsi\n'
'86222222222226,111018001111111\n'
'35000000000000,310035111111111'),
'classification_state/imei_api_class_state.csv')],
indirect=True)
def test_blacklist_with_pairing_list(postgres, pairing_list_importer, classification_data, logger, db_conn, tmpdir,
mocked_config):
"""Test that dirbs-listgen generates a single blacklist and includes IMEIs on pairing list."""
import_data(pairing_list_importer, 'pairing_list', 2, db_conn, logger)
# Run dirbs-listgen using db args from the temp postgres instance
runner = CliRunner()
output_dir = str(tmpdir)
result = runner.invoke(dirbs_classify_cli, ['--curr-date=20170101'], obj={'APP_CONFIG': mocked_config})
assert result.exit_code == 0
_, output_dir, = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20170101')
dir_path = find_subdirectory_in_dir('listgen*', output_dir)
fn = find_file_in_dir('*blacklist.csv', dir_path)
with open(fn, 'r') as file:
rows = [tuple(map(str, i.split(',')))[:1] for i in file]
# Check IMEI with block date less than current date end up on the blacklist
assert ('86222222222226',) in rows
assert ('35000000000000',) in rows
assert ('35900000000000',) in rows
# Check IMEI with a non-NULL end date do not up on the blacklist
assert ('35111111111110',) not in rows
def test_cli_arg_no_cleanup(tmpdir, db_conn, mocked_config):
"""Test cleanup option for list-gen.
That all tables are cleaned up when --no-cleanup is not used
That tables are not cleaned up when --no-cleanup is used
"""
with db_conn, db_conn.cursor() as cursor:
run_id, _ = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, no_clean_up=True)
cursor.execute("""SELECT TABLE_NAME
FROM information_schema.tables
WHERE TABLE_NAME LIKE 'listgen_temp_%'
ORDER BY TABLE_NAME""")
res = [x.table_name for x in cursor.fetchall()]
assert len(res) == 158
expected_res = ['listgen_temp_1_delta_blacklist',
'listgen_temp_1_delta_blacklist_0_24',
'listgen_temp_1_delta_blacklist_25_49',
'listgen_temp_1_delta_blacklist_50_74',
'listgen_temp_1_delta_blacklist_75_99',
'listgen_temp_1_delta_exceptions_lists',
'listgen_temp_1_delta_exceptions_lists_operator1',
'listgen_temp_1_delta_exceptions_lists_operator1_0_24',
'listgen_temp_1_delta_exceptions_lists_operator1_25_49',
'listgen_temp_1_delta_exceptions_lists_operator1_50_74',
'listgen_temp_1_delta_exceptions_lists_operator1_75_99',
'listgen_temp_1_delta_exceptions_lists_operator2',
'listgen_temp_1_delta_exceptions_lists_operator2_0_24',
'listgen_temp_1_delta_exceptions_lists_operator2_25_49',
'listgen_temp_1_delta_exceptions_lists_operator2_50_74',
'listgen_temp_1_delta_exceptions_lists_operator2_75_99',
'listgen_temp_1_delta_exceptions_lists_operator3',
'listgen_temp_1_delta_exceptions_lists_operator3_0_24',
'listgen_temp_1_delta_exceptions_lists_operator3_25_49',
'listgen_temp_1_delta_exceptions_lists_operator3_50_74',
'listgen_temp_1_delta_exceptions_lists_operator3_75_99',
'listgen_temp_1_delta_exceptions_lists_operator4',
'listgen_temp_1_delta_exceptions_lists_operator4_0_24',
'listgen_temp_1_delta_exceptions_lists_operator4_25_49',
'listgen_temp_1_delta_exceptions_lists_operator4_50_74',
'listgen_temp_1_delta_exceptions_lists_operator4_75_99',
'listgen_temp_1_delta_notifications_lists',
'listgen_temp_1_delta_notifications_lists_operator1',
'listgen_temp_1_delta_notifications_lists_operator1_0_24',
'listgen_temp_1_delta_notifications_lists_operator1_25_49',
'listgen_temp_1_delta_notifications_lists_operator1_50_74',
'listgen_temp_1_delta_notifications_lists_operator1_75_99',
'listgen_temp_1_delta_notifications_lists_operator2',
'listgen_temp_1_delta_notifications_lists_operator2_0_24',
'listgen_temp_1_delta_notifications_lists_operator2_25_49',
'listgen_temp_1_delta_notifications_lists_operator2_50_74',
'listgen_temp_1_delta_notifications_lists_operator2_75_99',
'listgen_temp_1_delta_notifications_lists_operator3',
'listgen_temp_1_delta_notifications_lists_operator3_0_24',
'listgen_temp_1_delta_notifications_lists_operator3_25_49',
'listgen_temp_1_delta_notifications_lists_operator3_50_74',
'listgen_temp_1_delta_notifications_lists_operator3_75_99',
'listgen_temp_1_delta_notifications_lists_operator4',
'listgen_temp_1_delta_notifications_lists_operator4_0_24',
'listgen_temp_1_delta_notifications_lists_operator4_25_49',
'listgen_temp_1_delta_notifications_lists_operator4_50_74',
'listgen_temp_1_delta_notifications_lists_operator4_75_99',
'listgen_temp_1_new_blacklist',
'listgen_temp_1_new_blacklist_0_24',
'listgen_temp_1_new_blacklist_25_49',
'listgen_temp_1_new_blacklist_50_74',
'listgen_temp_1_new_blacklist_75_99',
'listgen_temp_1_new_blocking_conditions_table',
'listgen_temp_1_new_exceptions_lists',
'listgen_temp_1_new_exceptions_lists_operator1',
'listgen_temp_1_new_exceptions_lists_operator1_0_24',
'listgen_temp_1_new_exceptions_lists_operator1_25_49',
'listgen_temp_1_new_exceptions_lists_operator1_50_74',
'listgen_temp_1_new_exceptions_lists_operator1_75_99',
'listgen_temp_1_new_exceptions_lists_operator2',
'listgen_temp_1_new_exceptions_lists_operator2_0_24',
'listgen_temp_1_new_exceptions_lists_operator2_25_49',
'listgen_temp_1_new_exceptions_lists_operator2_50_74',
'listgen_temp_1_new_exceptions_lists_operator2_75_99',
'listgen_temp_1_new_exceptions_lists_operator3',
'listgen_temp_1_new_exceptions_lists_operator3_0_24',
'listgen_temp_1_new_exceptions_lists_operator3_25_49',
'listgen_temp_1_new_exceptions_lists_operator3_50_74',
'listgen_temp_1_new_exceptions_lists_operator3_75_99',
'listgen_temp_1_new_exceptions_lists_operator4',
'listgen_temp_1_new_exceptions_lists_operator4_0_24',
'listgen_temp_1_new_exceptions_lists_operator4_25_49',
'listgen_temp_1_new_exceptions_lists_operator4_50_74',
'listgen_temp_1_new_exceptions_lists_operator4_75_99',
'listgen_temp_1_new_mcc_mnc_table',
'listgen_temp_1_new_notifications_imeis',
'listgen_temp_1_new_notifications_imeis_0_24',
'listgen_temp_1_new_notifications_imeis_25_49',
'listgen_temp_1_new_notifications_imeis_50_74',
'listgen_temp_1_new_notifications_imeis_75_99',
'listgen_temp_1_new_notifications_lists',
'listgen_temp_1_new_notifications_lists_operator1',
'listgen_temp_1_new_notifications_lists_operator1_0_24',
'listgen_temp_1_new_notifications_lists_operator1_25_49',
'listgen_temp_1_new_notifications_lists_operator1_50_74',
'listgen_temp_1_new_notifications_lists_operator1_75_99',
'listgen_temp_1_new_notifications_lists_operator2',
'listgen_temp_1_new_notifications_lists_operator2_0_24',
'listgen_temp_1_new_notifications_lists_operator2_25_49',
'listgen_temp_1_new_notifications_lists_operator2_50_74',
'listgen_temp_1_new_notifications_lists_operator2_75_99',
'listgen_temp_1_new_notifications_lists_operator3',
'listgen_temp_1_new_notifications_lists_operator3_0_24',
'listgen_temp_1_new_notifications_lists_operator3_25_49',
'listgen_temp_1_new_notifications_lists_operator3_50_74',
'listgen_temp_1_new_notifications_lists_operator3_75_99',
'listgen_temp_1_new_notifications_lists_operator4',
'listgen_temp_1_new_notifications_lists_operator4_0_24',
'listgen_temp_1_new_notifications_lists_operator4_25_49',
'listgen_temp_1_new_notifications_lists_operator4_50_74',
'listgen_temp_1_new_notifications_lists_operator4_75_99',
'listgen_temp_1_new_notifications_triplets',
'listgen_temp_1_new_notifications_triplets_0_24',
'listgen_temp_1_new_notifications_triplets_25_49',
'listgen_temp_1_new_notifications_triplets_50_74',
'listgen_temp_1_new_notifications_triplets_75_99',
'listgen_temp_1_new_pairings_imei_imsis',
'listgen_temp_1_new_pairings_imei_imsis_0_24',
'listgen_temp_1_new_pairings_imei_imsis_25_49',
'listgen_temp_1_new_pairings_imei_imsis_50_74',
'listgen_temp_1_new_pairings_imei_imsis_75_99',
'listgen_temp_1_old_blacklist',
'listgen_temp_1_old_blacklist_0_24',
'listgen_temp_1_old_blacklist_25_49',
'listgen_temp_1_old_blacklist_50_74',
'listgen_temp_1_old_blacklist_75_99',
'listgen_temp_1_old_exceptions_lists',
'listgen_temp_1_old_exceptions_lists_operator1',
'listgen_temp_1_old_exceptions_lists_operator1_0_24',
'listgen_temp_1_old_exceptions_lists_operator1_25_49',
'listgen_temp_1_old_exceptions_lists_operator1_50_74',
'listgen_temp_1_old_exceptions_lists_operator1_75_99',
'listgen_temp_1_old_exceptions_lists_operator2',
'listgen_temp_1_old_exceptions_lists_operator2_0_24',
'listgen_temp_1_old_exceptions_lists_operator2_25_49',
'listgen_temp_1_old_exceptions_lists_operator2_50_74',
'listgen_temp_1_old_exceptions_lists_operator2_75_99',
'listgen_temp_1_old_exceptions_lists_operator3',
'listgen_temp_1_old_exceptions_lists_operator3_0_24',
'listgen_temp_1_old_exceptions_lists_operator3_25_49',
'listgen_temp_1_old_exceptions_lists_operator3_50_74',
'listgen_temp_1_old_exceptions_lists_operator3_75_99',
'listgen_temp_1_old_exceptions_lists_operator4',
'listgen_temp_1_old_exceptions_lists_operator4_0_24',
'listgen_temp_1_old_exceptions_lists_operator4_25_49',
'listgen_temp_1_old_exceptions_lists_operator4_50_74',
'listgen_temp_1_old_exceptions_lists_operator4_75_99',
'listgen_temp_1_old_notifications_lists',
'listgen_temp_1_old_notifications_lists_operator1',
'listgen_temp_1_old_notifications_lists_operator1_0_24',
'listgen_temp_1_old_notifications_lists_operator1_25_49',
'listgen_temp_1_old_notifications_lists_operator1_50_74',
'listgen_temp_1_old_notifications_lists_operator1_75_99',
'listgen_temp_1_old_notifications_lists_operator2',
'listgen_temp_1_old_notifications_lists_operator2_0_24',
'listgen_temp_1_old_notifications_lists_operator2_25_49',
'listgen_temp_1_old_notifications_lists_operator2_50_74',
'listgen_temp_1_old_notifications_lists_operator2_75_99',
'listgen_temp_1_old_notifications_lists_operator3',
'listgen_temp_1_old_notifications_lists_operator3_0_24',
'listgen_temp_1_old_notifications_lists_operator3_25_49',
'listgen_temp_1_old_notifications_lists_operator3_50_74',
'listgen_temp_1_old_notifications_lists_operator3_75_99',
'listgen_temp_1_old_notifications_lists_operator4',
'listgen_temp_1_old_notifications_lists_operator4_0_24',
'listgen_temp_1_old_notifications_lists_operator4_25_49',
'listgen_temp_1_old_notifications_lists_operator4_50_74',
'listgen_temp_1_old_notifications_lists_operator4_75_99']
expected_res = {x.format(run_id) for x in expected_res}
assert set(res) == expected_res
_cli_listgen_helper(db_conn, tmpdir, 'run_2', mocked_config)
cursor.execute("""SELECT TABLE_NAME
FROM information_schema.tables
WHERE TABLE_NAME LIKE 'listgen_temp_%'""")
res = [x.table_name for x in cursor.fetchall()]
assert len(res) == 158
@pytest.mark.parametrize('operator_data_importer, registration_list_importer, stolen_list_importer',
[(OperatorDataParams(
content='date,imei,imsi,msisdn\n'
'20161203,86222222222226,111018001111111,223338000000\n'
'20161203,35111111111110,111015111111111,223355000000\n'
'20161203,35900000000000,310035111111111,743614000000',
operator='operator1',
cc=['22', '74'],
mcc_mnc_pairs=[{'mcc': '111', 'mnc': '01'}, {'mcc': '310', 'mnc': '03'}],
extract=False),
RegistrationListParams(content='approved_imei,make,model,status,model_number,'
'brand_name,device_type,radio_interface\n'
'35900000000000, , ,whitelist,,,,'),
StolenListParams(content='IMEI,reporting_date,status\n'
'35111111111110,20160930,blacklist\n'
'35900000000000,20160930,blacklist\n'))],
indirect=True)
def test_amnesty_enabled_listgen(postgres, operator_data_importer, stolen_list_importer, registration_list_importer,
mocked_config, monkeypatch, logger, db_conn, tmpdir):
"""Test notification list and blacklist are correctly generated when amnesty is enabled."""
import_data(operator_data_importer, 'operator_data', 3, db_conn, logger)
import_data(stolen_list_importer, 'stolen_list', 2, db_conn, logger)
import_data(registration_list_importer, 'registration_list', 1, db_conn, logger)
# Set amnesty config parameters
monkeypatch.setattr(mocked_config.amnesty_config, 'amnesty_enabled', True)
monkeypatch.setattr(mocked_config.amnesty_config, 'evaluation_period_end_date', datetime.date(2017, 1, 1))
monkeypatch.setattr(mocked_config.amnesty_config, 'amnesty_period_end_date', datetime.date(2017, 2, 1))
cond_list = [{
'label': 'not_on_registration_list',
'reason': 'not_registered',
'max_allowed_matching_ratio': 1.0,
'grace_period_days': 10,
'blocking': True,
'amnesty_eligible': True,
'dimensions': [{'module': 'not_on_registration_list'}]},
{
'label': 'local_stolen',
'reason': 'stolen',
'max_allowed_matching_ratio': 1.0,
'grace_period_days': 20,
'blocking': True,
'amnesty_eligible': False,
'dimensions': [{'module': 'stolen_list'}]}]
# Step 1: Test listgen works fine when in eval period.
invoke_cli_classify_with_conditions_helper(cond_list, mocked_config, monkeypatch, db_conn=db_conn,
curr_date='20170101')
_, output_dir = _cli_listgen_helper(db_conn, tmpdir, 'run_1', mocked_config, date='20170101')
# Verify non-amnesty triplets are in notification list when in eval period.
# IMEI '86222222222226' is eligible for amnesty and should not be in notification list.
rows = _read_rows_from_file('notifications_operator1.csv', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:6] for i in rows]
assert len(rows) == 3
# Verify the correct block_date is stored for IMEI meeting only non-amnesty condition
assert ('35900000000000', '310035111111111', '743614000000', '20170121', 'stolen', 'False\n') in rows
assert ('35111111111110', '111015111111111', '223355000000', '20170121', 'stolen', 'False\n') in rows
# Verify delta notification list is generated correctly.
rows = _read_rows_from_file('notifications_operator1_delta', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:7] for i in rows]
assert len(rows) == 3
assert ('35900000000000', '310035111111111', '743614000000', '20170121', 'stolen', 'False', 'new\n') in rows
assert ('35111111111110', '111015111111111', '223355000000', '20170121', 'stolen', 'False', 'new\n') in rows
# Verify blacklist is empty.
rows = _read_rows_from_file('blacklist.csv', tmpdir, output_dir=output_dir)
assert len(rows) == 1
# Step 2: Test listgen works fine when in amnesty period.
invoke_cli_classify_with_conditions_helper(cond_list, mocked_config, monkeypatch, db_conn=db_conn,
curr_date='20170102')
_, output_dir = _cli_listgen_helper(db_conn, tmpdir, 'run_2', mocked_config, date='20170102')
# Verify amnesty triplets are in notification list also when in amnesty period.
# IMEI '86222222222226' is eligible for amnesty and should be in notification list.
rows = _read_rows_from_file('notifications_operator1.csv', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:6] for i in rows]
assert len(rows) == 4
assert ('35900000000000', '310035111111111', '743614000000', '20170121', 'stolen', 'False\n') in rows
assert ('35111111111110', '111015111111111', '223355000000', '20170121',
'not_registered|stolen', 'False\n') in rows
assert ('86222222222226', '111018001111111', '223338000000', '20170201', 'not_registered', 'True\n') in rows
# Verify delta notification list is generated correctly.
# Verify changed notification is generated as new reason is added for the IMEI.
rows = _read_rows_from_file('notifications_operator1_delta', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:7] for i in rows]
assert len(rows) == 3
assert ('86222222222226', '111018001111111', '223338000000', '20170201', 'not_registered', 'True', 'new\n') in rows
assert ('35111111111110', '111015111111111', '223355000000', '20170121',
'not_registered|stolen', 'False', 'changed\n') in rows
# Verify blacklist is empty.
rows = _read_rows_from_file('blacklist.csv', tmpdir, output_dir=output_dir)
assert len(rows) == 1
# Step 3: Verify non-amnesty IMEIs are added to blacklist after they are past the block_date.
_, output_dir = _cli_listgen_helper(db_conn, tmpdir, 'run_3', mocked_config, date='20170122')
# Verify amnesty triplets are in notification list when in amnesty period.
rows = _read_rows_from_file('notifications_operator1.csv', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:6] for i in rows]
assert len(rows) == 2
assert ('86222222222226', '111018001111111', '223338000000', '20170201', 'not_registered', 'True\n') in rows
# Verify delta notification list is generated correctly.
rows = _read_rows_from_file('notifications_operator1_delta', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:7] for i in rows]
assert len(rows) == 3
assert ('35900000000000', '310035111111111', '743614000000', '20170121',
'stolen', 'False', 'blacklisted\n') in rows
assert ('35111111111110', '111015111111111', '223355000000', '20170121',
'not_registered|stolen', 'False', 'blacklisted\n') in rows
# Verify blacklist is non-empty.
# IMEIs '35900000000000' and '35111111111110' are stolen and past block date so they should be in blacklist.
rows = _read_rows_from_file('blacklist.csv', tmpdir, output_dir=output_dir)
assert len(rows) == 3
rows = [tuple(map(str, i.split(',')))[:3] for i in rows]
assert ('35900000000000', '20170121', 'stolen\n') in rows
assert ('35111111111110', '20170121', 'stolen\n') in rows
# Verify delta blacklist is generated correctly.
rows = _read_rows_from_file('blacklist_delta', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:4] for i in rows]
assert len(rows) == 3
assert ('35900000000000', '20170121', 'stolen', 'blocked\n') in rows
assert ('35111111111110', '20170121', 'stolen', 'blocked\n') in rows
# Step 4: Verify amnesty IMEIs are added to blacklist after amnesty period has ended.
_, output_dir = _cli_listgen_helper(db_conn, tmpdir, 'run_4', mocked_config, date='20170202')
# Verify notification list is empty
rows = _read_rows_from_file('notifications_operator1.csv', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:6] for i in rows]
assert len(rows) == 1
# Verify delta notification list is generated correctly.
rows = _read_rows_from_file('notifications_operator1_delta', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:7] for i in rows]
assert len(rows) == 2
assert ('86222222222226', '111018001111111', '223338000000', '20170201', 'not_registered',
'True', 'blacklisted\n') in rows
# Verify blacklist is non-empty.
# Amnesty IMEI '86222222222226' is now past block_date and should be in blacklist too.
rows = _read_rows_from_file('blacklist.csv', tmpdir, output_dir=output_dir)
assert len(rows) == 4
rows = [tuple(map(str, i.split(',')))[:3] for i in rows]
assert ('35900000000000', '20170121', 'stolen\n') in rows
assert ('35111111111110', '20170121', 'not_registered|stolen\n') in rows
assert ('86222222222226', '20170201', 'not_registered\n') in rows
# Verify delta blacklist is generated correctly.
# Note changed notification is generated as IMEI '35111111111110' was also eligible for amnesty
# and is now past amnesty_end_date, so new reason gets added for it.
rows = _read_rows_from_file('blacklist_delta', tmpdir, output_dir=output_dir)
rows = [tuple(map(str, i.split(',')))[:4] for i in rows]
assert len(rows) == 3
assert ('86222222222226', '20170201', 'not_registered', 'blocked\n') in rows
assert ('35111111111110', '20170121', 'not_registered|stolen', 'changed\n') in rows
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def change_chinese_plurals(apps, schema_editor):
Locale = apps.get_model("base", "Locale")
Translation = apps.get_model("base", "Translation")
for locale in Locale.objects.filter(name="Chinese"):
locale.nplurals = 1
locale.plural_rule = "0"
locale.cldr_plurals = "5"
locale.save()
# Delete plural translations
Translation.objects.filter(locale=locale, plural_form=1).delete()
class Migration(migrations.Migration):
dependencies = [
("base", "0017_entity_source_jsonfield"),
]
operations = [migrations.RunPython(change_chinese_plurals)]
|
__all__ = ['all_warnings']
from contextlib import contextmanager
import sys
import warnings
import inspect
@contextmanager
def all_warnings():
"""
Context for use in testing to ensure that all warnings are raised.
Examples
--------
>>> import warnings
>>> def foo():
... warnings.warn(RuntimeWarning("bar"))
We raise the warning once, while the warning filter is set to "once".
Hereafter, the warning is invisible, even with custom filters:
>>> with warnings.catch_warnings():
... warnings.simplefilter('once')
... foo()
We can now run ``foo()`` without a warning being raised:
>>> from numpy.testing import assert_warns
>>> foo()
To catch the warning, we call in the help of ``all_warnings``:
>>> with all_warnings():
... assert_warns(RuntimeWarning, foo)
"""
# Whenever a warning is triggered, Python adds a __warningregistry__
# member to the *calling* module. The exercize here is to find
# and eradicate all those breadcrumbs that were left lying around.
#
# We proceed by first searching all parent calling frames and explicitly
# clearing their warning registries (necessary for the doctests above to
# pass). Then, we search for all submodules of skimage and clear theirs
# as well (necessary for the skimage test suite to pass).
frame = inspect.currentframe()
if frame:
for f in inspect.getouterframes(frame):
f[0].f_locals['__warningregistry__'] = {}
del frame
for mod_name, mod in list(sys.modules.items()):
if 'six.moves' in mod_name:
continue
try:
mod.__warningregistry__.clear()
except AttributeError:
pass
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
yield w
|
#clean non numeric values from a column
df.loc[:, 'width'] = pd.to_numeric(df['width'], errors='coerce') |
import sys
#---#
print("Hello")
#---#
sys.exit(0)
#---#
print("Past the end")
|
import tensorflow as tf
feature_description = {
'image': tf.io.FixedLenFeature([], tf.string),
'xmins': tf.io.VarLenFeature(tf.float32),
'ymins': tf.io.VarLenFeature(tf.float32),
'xmaxs': tf.io.VarLenFeature(tf.float32),
'ymaxs': tf.io.VarLenFeature(tf.float32),
'labels': tf.io.VarLenFeature(tf.float32)
}
@tf.function
def parse_example(example_proto):
parsed_example = tf.io.parse_single_example(
example_proto, feature_description)
image = tf.image.decode_jpeg(parsed_example['image'], channels=3)
bboxes = tf.stack([
tf.sparse.to_dense(parsed_example['xmins']),
tf.sparse.to_dense(parsed_example['ymins']),
tf.sparse.to_dense(parsed_example['xmaxs']),
tf.sparse.to_dense(parsed_example['ymaxs'])
], axis=-1)
class_ids = tf.reshape(tf.sparse.to_dense(
parsed_example['labels']), [-1, 1])
return image, bboxes, class_ids
|
# quart_cors.py
from quart import Quart
from quart_cors import cors, route_cors
app = Quart(__name__)
app = cors(app, allow_origin="https://quart.com")
# app = cors(app, allow_origin="*")
@app.route("/api")
# @route_cors(allow_origin=["https://quart.com"])
async def my_microservice():
return {"Hello": "World!"}
if __name__ == "__main__":
# app.config["QUART_CORS_ALLOW_ORIGIN"] = ["http://quart.com"]
# app = cors(app, allow_origin=["http://befuddle.flummox.org:5200"])
print(app.config)
app.run(port=5200)
|
import src.livetracking.model.AbstractModel as AM
import cv2
import numpy as np
import src.data.utilities as u
class KerasModel(AM.AbstractModel):
def predict(self, x):
res = x[140:340, 220:420, :]
res = cv2.cvtColor(res, cv2.COLOR_BGR2RGB)
return self.model.predict(np.expand_dims(res, axis=0))
|
'''
给定一个无序的数组,找出数组在排序之后,相邻元素之间最大的差值。
如果数组元素个数小于 2,则返回 0。
示例 1:
输入: [3,6,9,1]
输出: 3
解释: 排序后的数组是 [1,3,6,9], 其中相邻元素 (3,6) 和 (6,9) 之间都存在最大差值 3。
示例 2:
输入: [10]
输出: 0
解释: 数组元素个数小于 2,因此返回 0。
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/maximum-gap
'''
class Solution:
def maximumGap(self, nums: List[int]) -> int:
if not nums or len(nums) < 2: return 0
max_ = max(nums)
min_ = min(nums)
max_gap = 0
each_bucket_len = max(1, (max_ - min_) // (len(nums) - 1))
buckets = [[] for _ in range((max_ - min_) // each_bucket_len + 1)]
for i in range(len(nums)):
loc = (nums[i] - min_) // each_bucket_len
buckets[loc].append(nums[i])
prev_max = float('inf')
for i in range(len(buckets)):
if buckets[i] and prev_max != float('inf'):
max_gap = max(max_gap, min(buckets[i]) - prev_max)
if buckets[i]:
prev_max = max(buckets[i])
return max_gap
|
import logging
import os
import textwrap
import threading
import time
import pytest
import salt.loader
import salt.utils.atomicfile
import salt.utils.files
import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.windows_whitelisted,
]
def test_show_highstate(state, state_testfile_dest_path):
"""
state.show_highstate
"""
high = state.show_highstate()
assert isinstance(high, dict)
assert str(state_testfile_dest_path) in high
assert high[str(state_testfile_dest_path)]["__env__"] == "base"
def test_show_lowstate(state):
"""
state.show_lowstate
"""
low = state.show_lowstate()
assert isinstance(low, list)
for entry in low:
assert isinstance(entry, dict)
def test_show_states(state):
"""
state.show_states
"""
states = state.show_states()
assert isinstance(states, list)
for entry in states:
assert isinstance(entry, str)
assert states == ["core"]
def test_show_states_missing_sls(state, state_tree):
"""
Test state.show_states with a sls file
defined in a top file is missing
"""
top_sls_contents = """
base:
'*':
- core
- does-not-exist
"""
with pytest.helpers.temp_file("top.sls", top_sls_contents, state_tree):
states = state.show_states()
assert isinstance(states, list)
assert states == ["No matching sls found for 'does-not-exist' in env 'base'"]
def test_catch_recurse(state, state_tree):
"""
state.show_sls used to catch a recursive ref
"""
sls_contents = """
mysql:
service:
- running
- require:
- file: /etc/mysql/my.cnf
/etc/mysql/my.cnf:
file:
- managed
- source: salt://master.cnf
- require:
- service: mysql
"""
with pytest.helpers.temp_file("recurse-fail.sls", sls_contents, state_tree):
ret = state.sls("recurse-fail")
assert ret.failed
assert (
'A recursive requisite was found, SLS "recurse-fail" ID "/etc/mysql/my.cnf" ID "mysql"'
in ret.errors
)
RECURSE_SLS_ONE = """
snmpd:
pkg:
- installed
service:
- running
- require:
- pkg: snmpd
- watch:
- file: /etc/snmp/snmpd.conf
/etc/snmp/snmpd.conf:
file:
- managed
- source: salt://snmpd/snmpd.conf.jinja
- template: jinja
- user: root
- group: root
- mode: "0600"
- require:
- pkg: snmpd
"""
RECURSE_SLS_TWO = """
nagios-nrpe-server:
pkg:
- installed
service:
- running
- watch:
- file: /etc/nagios/nrpe.cfg
/etc/nagios/nrpe.cfg:
file:
- managed
- source: salt://baseserver/nrpe.cfg
- require:
- pkg: nagios-nrpe-server
"""
@pytest.mark.parametrize(
"sls_contents, expected_in_output",
[(RECURSE_SLS_ONE, "snmpd"), (RECURSE_SLS_TWO, "/etc/nagios/nrpe.cfg")],
ids=("recurse-scenario-1", "recurse-scenario-2"),
)
def test_no_recurse(state, state_tree, sls_contents, expected_in_output):
"""
verify that a sls structure is NOT a recursive ref
"""
with pytest.helpers.temp_file("recurse-ok.sls", sls_contents, state_tree):
ret = state.show_sls("recurse-ok")
assert expected_in_output in ret
def test_running_dictionary_consistency(state):
"""
Test the structure of the running dictionary so we don't change it
without deprecating/documenting the change
"""
running_dict_fields = {
"__id__",
"__run_num__",
"__sls__",
"changes",
"comment",
"duration",
"name",
"result",
"start_time",
}
sls = state.single("test.succeed_without_changes", name="gndn")
ret_values_set = set(sls.full_return.keys())
assert running_dict_fields.issubset(ret_values_set)
def test_running_dictionary_key_sls(state, state_tree):
"""
Ensure the __sls__ key is either null or a string
"""
sls1 = state.single("test.succeed_with_changes", name="gndn")
assert "__sls__" in sls1.full_return
assert sls1.full_return["__sls__"] is None
sls_contents = """
gndn:
test.succeed_with_changes
"""
with pytest.helpers.temp_file("gndn.sls", sls_contents, state_tree):
sls2 = state.sls(mods="gndn")
for state_return in sls2:
assert "__sls__" in state_return.full_return
assert isinstance(state_return.full_return["__sls__"], str)
@pytest.fixture
def requested_sls_key(minion_opts, state_tree):
if not salt.utils.platform.is_windows():
sls_contents = """
count_root_dir_contents:
cmd.run:
- name: 'ls -a / | wc -l'
"""
sls_key = "cmd_|-count_root_dir_contents_|-ls -a / | wc -l_|-run"
else:
sls_contents = r"""
count_root_dir_contents:
cmd.run:
- name: 'Get-ChildItem C:\ | Measure-Object | %{$_.Count}'
- shell: powershell
"""
sls_key = (
r"cmd_|-count_root_dir_contents_|-Get-ChildItem C:\ | Measure-Object |"
r" %{$_.Count}_|-run"
)
try:
with pytest.helpers.temp_file(
"requested.sls", sls_contents, state_tree
) as sls_path:
yield sls_key
finally:
cache_file = os.path.join(minion_opts["cachedir"], "req_state.p")
if os.path.exists(cache_file):
os.remove(cache_file)
def test_request(state, requested_sls_key):
"""
verify sending a state request to the minion(s)
"""
ret = state.request("requested")
assert ret[requested_sls_key]["result"] is None
def test_check_request(state, requested_sls_key):
"""
verify checking a state request sent to the minion(s)
"""
ret = state.request("requested")
assert ret[requested_sls_key]["result"] is None
ret = state.check_request()
assert ret["default"]["test_run"][requested_sls_key]["result"] is None
def test_clear_request(state, requested_sls_key):
"""
verify clearing a state request sent to the minion(s)
"""
ret = state.request("requested")
assert ret[requested_sls_key]["result"] is None
ret = state.clear_request()
assert ret is True
def test_run_request_succeeded(state, requested_sls_key):
"""
verify running a state request sent to the minion(s)
"""
ret = state.request("requested")
assert ret[requested_sls_key]["result"] is None
ret = state.run_request()
assert ret[requested_sls_key]["result"] is True
def test_run_request_failed_no_request_staged(state, requested_sls_key):
"""
verify not running a state request sent to the minion(s)
"""
ret = state.request("requested")
assert ret[requested_sls_key]["result"] is None
ret = state.clear_request()
assert ret is True
ret = state.run_request()
assert ret == {}
def test_issue_1876_syntax_error(state, state_tree, tmp_path):
"""
verify that we catch the following syntax error::
/tmp/salttest/issue-1876:
file:
- managed
- source: salt://testfile
file.append:
- text: foo
"""
testfile = tmp_path / "issue-1876.txt"
sls_contents = """
{}:
file:
- managed
- source: salt://testfile
file.append:
- text: foo
""".format(
testfile
)
with pytest.helpers.temp_file("issue-1876.sls", sls_contents, state_tree):
ret = state.sls("issue-1876")
assert ret.failed
errmsg = (
"ID '{}' in SLS 'issue-1876' contains multiple state declarations of the"
" same type".format(testfile)
)
assert errmsg in ret.errors
def test_issue_1879_too_simple_contains_check(state, state_tree, tmp_path):
testfile = tmp_path / "issue-1979.txt"
init_sls_contents = """
{}:
file:
- touch
""".format(
testfile
)
step1_sls_contents = """
{}:
file.append:
- text: |
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
""".format(
testfile
)
step2_sls_contents = """
{}:
file.append:
- text: |
# enable bash completion in interactive shells
if [ -f /etc/bash_completion ] && ! shopt -oq posix; then
. /etc/bash_completion
fi
""".format(
testfile
)
expected = textwrap.dedent(
"""\
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# enable bash completion in interactive shells
if [ -f /etc/bash_completion ] && ! shopt -oq posix; then
. /etc/bash_completion
fi
"""
)
issue_1879_dir = state_tree / "issue-1879"
with pytest.helpers.temp_file(
"init.sls", init_sls_contents, issue_1879_dir
), pytest.helpers.temp_file(
"step-1.sls", step1_sls_contents, issue_1879_dir
), pytest.helpers.temp_file(
"step-2.sls", step2_sls_contents, issue_1879_dir
):
# Create the file
ret = state.sls("issue-1879")
for staterun in ret:
assert staterun.result is True
# The first append
ret = state.sls("issue-1879.step-1")
for staterun in ret:
assert staterun.result is True
# The second append
ret = state.sls("issue-1879.step-2")
for staterun in ret:
assert staterun.result is True
# Does it match?
contents = testfile.read_text()
assert contents == expected
# Make sure we don't re-append existing text
ret = state.sls("issue-1879.step-1")
for staterun in ret:
assert staterun.result is True
ret = state.sls("issue-1879.step-2")
for staterun in ret:
assert staterun.result is True
# Does it match?
contents = testfile.read_text()
assert contents == expected
def test_include(state, state_tree, tmp_path):
testfile_path = tmp_path / "testfile"
testfile_path.write_text("foo")
include_test_path = tmp_path / "include-test.txt"
to_include_test_path = tmp_path / "to-include-test.txt"
exclude_test_path = tmp_path / "exclude-test.txt"
to_include_sls_contents = """
{}:
file.managed:
- source: salt://testfile
""".format(
to_include_test_path
)
include_sls_contents = """
include:
- to-include-test
{}:
file.managed:
- source: salt://testfile
""".format(
include_test_path
)
with pytest.helpers.temp_file(
"testfile", "foo", state_tree
), pytest.helpers.temp_file(
"to-include-test.sls", to_include_sls_contents, state_tree
), pytest.helpers.temp_file(
"include-test.sls", include_sls_contents, state_tree
):
ret = state.sls("include-test")
for staterun in ret:
assert staterun.result is True
assert include_test_path.exists()
assert to_include_test_path.exists()
assert exclude_test_path.exists() is False
def test_exclude(state, state_tree, tmp_path):
testfile_path = tmp_path / "testfile"
testfile_path.write_text("foo")
include_test_path = tmp_path / "include-test.txt"
to_include_test_path = tmp_path / "to-include-test.txt"
exclude_test_path = tmp_path / "exclude-test.txt"
to_include_sls_contents = """
{}:
file.managed:
- source: salt://testfile
""".format(
to_include_test_path
)
include_sls_contents = """
include:
- to-include-test
{}:
file.managed:
- source: salt://testfile
""".format(
include_test_path
)
exclude_sls_contents = """
exclude:
- to-include-test
include:
- include-test
{}:
file.managed:
- source: salt://testfile
""".format(
exclude_test_path
)
with pytest.helpers.temp_file(
"testfile", "foo", state_tree
), pytest.helpers.temp_file(
"to-include-test.sls", to_include_sls_contents, state_tree
), pytest.helpers.temp_file(
"include-test.sls", include_sls_contents, state_tree
), pytest.helpers.temp_file(
"exclude-test.sls", exclude_sls_contents, state_tree
):
ret = state.sls("exclude-test")
for staterun in ret:
assert staterun.result is True
assert include_test_path.exists()
assert exclude_test_path.exists()
assert to_include_test_path.exists() is False
def test_issue_2068_template_str(state, state_tree):
template_str_no_dot_sls_contents = """
required_state:
test:
- succeed_without_changes
requiring_state:
test:
- succeed_without_changes
- require:
- test: required_state
"""
template_str_sls_contents = """
required_state: test.succeed_without_changes
requiring_state:
test.succeed_without_changes:
- require:
- test: required_state
"""
with pytest.helpers.temp_file(
"issue-2068-no-dot.sls", template_str_no_dot_sls_contents, state_tree
) as template_str_no_dot_path, pytest.helpers.temp_file(
"issue-2068.sls", template_str_sls_contents, state_tree
) as template_str_path:
# If running this state with state.sls works, so should using state.template_str
ret = state.sls("issue-2068-no-dot")
for staterun in ret:
assert staterun.result is True
template_str_no_dot_contents = template_str_no_dot_path.read_text()
ret = state.template_str(template_str_no_dot_contents)
for staterun in ret:
assert staterun.result is True
# Now using state.template
ret = state.template(str(template_str_no_dot_path))
for staterun in ret:
assert staterun.result is True
# Now the problematic #2068 including dot's
ret = state.sls("issue-2068")
for staterun in ret:
assert staterun.result is True
template_str_contents = template_str_path.read_text()
ret = state.template_str(template_str_contents)
for staterun in ret:
assert staterun.result is True
# Now using state.template
ret = state.template(str(template_str_path))
for staterun in ret:
assert staterun.result is True
@pytest.mark.parametrize("item", ("include", "exclude", "extends"))
def test_template_str_invalid_items(state, item):
TEMPLATE = textwrap.dedent(
"""\
{}:
- existing-state
/tmp/test-template-invalid-items:
file:
- managed
- source: salt://testfile
""".format(
item
)
)
ret = state.template_str(TEMPLATE.format(item))
assert ret.failed
errmsg = (
"The '{}' declaration found on '<template-str>' is invalid when "
"rendering single templates".format(item)
)
assert errmsg in ret.errors
@pytest.mark.skip_on_windows(
reason=(
"Functional testing this on windows raises unicode errors. "
"Tested in tests/pytests/integration/modules/state/test_state.py"
)
)
def test_pydsl(state, state_tree, tmp_path):
"""
Test the basics of the pydsl
"""
testfile = tmp_path / "testfile"
sls_contents = """
#!pydsl
state("{}").file("touch")
""".format(
testfile
)
with pytest.helpers.temp_file("pydsl.sls", sls_contents, state_tree):
ret = state.sls("pydsl")
for staterun in ret:
assert staterun.result is True
assert testfile.exists()
def test_issues_7905_and_8174_sls_syntax_error(state, state_tree):
"""
Call sls file with yaml syntax error.
Ensure theses errors are detected and presented to the user without
stack traces.
"""
badlist_1_sls_contents = """
# Missing " " between "-" and "foo" or "name"
A:
cmd.run:
-name: echo foo
-foo:
- bar
"""
badlist_2_sls_contents = """
# C should fail with bad list error message
B:
# ok
file.exist:
- name: /foo/bar/foobar
# ok
/foo/bar/foobar:
file.exist
# nok
C:
/foo/bar/foobar:
file.exist
"""
with pytest.helpers.temp_file(
"badlist1.sls", badlist_1_sls_contents, state_tree
), pytest.helpers.temp_file("badlist2.sls", badlist_2_sls_contents, state_tree):
ret = state.sls("badlist1")
assert ret.failed
assert ret.errors == ["State 'A' in SLS 'badlist1' is not formed as a list"]
ret = state.sls("badlist2")
assert ret.failed
assert ret.errors == ["State 'C' in SLS 'badlist2' is not formed as a list"]
@pytest.mark.slow_test
def test_retry_option(state, state_tree):
"""
test the retry option on a simple state with defaults
ensure comment is as expected
ensure state duration is greater than configured the passed (interval * attempts)
"""
sls_contents = """
file_test:
file.exists:
- name: /path/to/a/non-existent/file.txt
- retry:
until: True
attempts: 3
interval: 1
splay: 0
"""
expected_comment = (
'Attempt 1: Returned a result of "False", with the following '
'comment: "Specified path /path/to/a/non-existent/file.txt does not exist"'
)
with pytest.helpers.temp_file("retry.sls", sls_contents, state_tree):
ret = state.sls("retry")
for state_return in ret:
assert state_return.result is False
assert expected_comment in state_return.comment
assert state_return.full_return["duration"] >= 3
def test_retry_option_success(state, state_tree, tmp_path):
"""
test a state with the retry option that should return True immediately (i.e. no retries)
"""
testfile = tmp_path / "testfile"
testfile.touch()
sls_contents = """
file_test:
file.exists:
- name: {}
- retry:
until: True
attempts: 5
interval: 2
splay: 0
""".format(
testfile
)
duration = 4
if salt.utils.platform.is_windows():
duration = 16
with pytest.helpers.temp_file("retry.sls", sls_contents, state_tree):
ret = state.sls("retry")
for state_return in ret:
assert state_return.result is True
assert state_return.full_return["duration"] < duration
# It should not take 2 attempts
assert "Attempt 2" not in state_return.comment
@pytest.mark.slow_test
def test_retry_option_eventual_success(state, state_tree, tmp_path):
"""
test a state with the retry option that should return True, eventually
"""
testfile1 = tmp_path / "testfile-1"
testfile2 = tmp_path / "testfile-2"
def create_testfile(testfile1, testfile2):
while True:
if testfile1.exists():
break
time.sleep(2)
testfile2.touch()
thread = threading.Thread(target=create_testfile, args=(testfile1, testfile2))
sls_contents = """
file_test_a:
file.managed:
- name: {}
- content: 'a'
file_test:
file.exists:
- name: {}
- retry:
until: True
attempts: 5
interval: 2
splay: 0
- require:
- file_test_a
""".format(
testfile1, testfile2
)
with pytest.helpers.temp_file("retry.sls", sls_contents, state_tree):
thread.start()
ret = state.sls("retry")
for state_return in ret:
assert state_return.result is True
assert state_return.full_return["duration"] > 4
# It should not take 5 attempts
assert "Attempt 5" not in state_return.comment
@pytest.mark.slow_test
def test_state_non_base_environment(state, state_tree_prod, tmp_path):
"""
test state.sls with saltenv using a nonbase environment
with a salt source
"""
testfile = tmp_path / "testfile"
sls_contents = """
{}:
file.managed:
- content: foo
""".format(
testfile
)
with pytest.helpers.temp_file("non-base-env.sls", sls_contents, state_tree_prod):
ret = state.sls("non-base-env", saltenv="prod")
for state_return in ret:
assert state_return.result is True
assert testfile.exists()
@pytest.mark.skip_on_windows(
reason="Skipped until parallel states can be fixed on Windows"
)
def test_parallel_state_with_long_tag(state, state_tree):
"""
This tests the case where the state being executed has a long ID dec or
name and states are being run in parallel. The filenames used for the
parallel state cache were previously based on the tag for each chunk,
and longer ID decs or name params can cause the cache file to be longer
than the operating system's max file name length. To counter this we
instead generate a SHA1 hash of the chunk's tag to use as the cache
filename. This test will ensure that long tags don't cause caching
failures.
See https://github.com/saltstack/salt/issues/49738 for more info.
"""
short_command = "helloworld"
long_command = short_command * 25
sls_contents = """
test_cmd_short:
cmd.run:
- name: {}
- parallel: True
test_cmd_long:
cmd.run:
- name: {}
- parallel: True
""".format(
short_command, long_command
)
with pytest.helpers.temp_file("issue-49738.sls", sls_contents, state_tree):
ret = state.sls(
"issue-49738",
__pub_jid="1", # Because these run in parallel we need a fake JID
)
comments = sorted(x.comment for x in ret)
expected = sorted(
'Command "{}" run'.format(x) for x in (short_command, long_command)
)
assert comments == expected, "{} != {}".format(comments, expected)
@pytest.mark.skip_on_darwin(reason="Test is broken on macosx")
@pytest.mark.skip_on_windows(
reason=(
"Functional testing this on windows raises unicode errors. "
"Tested in tests/pytests/integration/modules/state/test_state.py"
)
)
def test_state_sls_unicode_characters(state, state_tree):
"""
test state.sls when state file contains non-ascii characters
"""
sls_contents = """
echo1:
cmd.run:
- name: "echo 'This is Æ test!'"
"""
with pytest.helpers.temp_file("issue-46672.sls", sls_contents, state_tree):
ret = state.sls("issue-46672")
expected = "cmd_|-echo1_|-echo 'This is Æ test!'_|-run"
assert expected in ret
def test_state_sls_integer_name(state, state_tree):
"""
This tests the case where the state file is named
only with integers
"""
sls_contents = """
always-passes:
test.succeed_without_changes
"""
state_id = "test_|-always-passes_|-always-passes_|-succeed_without_changes"
with pytest.helpers.temp_file("12345.sls", sls_contents, state_tree):
ret = state.sls("12345")
assert state_id in ret
for state_return in ret:
assert state_return.result is True
assert "Success!" in state_return.comment
ret = state.sls(mods=12345)
assert state_id in ret
for state_return in ret:
assert state_return.result is True
assert "Success!" in state_return.comment
def test_state_sls_lazyloader_allows_recursion(state, state_tree):
"""
This tests that referencing dunders like __salt__ work
context: https://github.com/saltstack/salt/pull/51499
"""
sls_contents = """
{% if 'nonexistent_module.function' in salt %}
{% do salt.log.warning("Module is available") %}
{% endif %}
always-passes:
test.succeed_without_changes:
- name: foo
"""
state_id = "test_|-always-passes_|-foo_|-succeed_without_changes"
with pytest.helpers.temp_file("issue-51499.sls", sls_contents, state_tree):
ret = state.sls("issue-51499")
assert state_id in ret
for state_return in ret:
assert state_return.result is True
assert "Success!" in state_return.comment
|
try:
import smbus
except ImportError:
smbus = None
from .register_state import RegisterState
__all__ = ['FileRegisterState']
class FileRegisterState(RegisterState):
"""
A `RegisterState` that is recorded in a file.
Parameters
----------
infn : str
Readable file to use as the input
outfn : str
Writeable file to use as the output, if None, means use the same as ``infn``.
update : bool
If True, read/write the file whenever the register is accessed, if
False waits for an explicit `read_file` `write_file` call.
hex : bool
If True, the file is in hex, if False, assume decimal
"""
def __init__(self, registers, infn, outfn=None, update=False, hex=True,
register_size=8):
super().__init__(registers, register_size)
self.infn = infn
self.outfn = outfn
self.update = update
self.hex = True
self._file_data = None
def read_file(self):
base = 16 if self.hex else 10
self._file_data = {}
with open(self.infn, 'r') as f:
for l in f:
if l.strip() == '':
continue
ls = l.split()
assert len(ls) == 2, 'Input file has row {} which is not two-element'.format(l)
addr, val = ls
self._file_data[int(addr, base=base)] = int(val, base=base)
def write_file(self):
if self._file_data is None:
return
if self.outfn is None:
outfn = self.infn
else:
outfn = self.outfn
with open(outfn, 'w') as f:
for addr in sorted(self._file_data):
if self.hex:
msg = '{:x} {:x}\n'
else:
msg = '{} {}\n'
f.write(msg.format(addr, self._file_data[addr]))
def _read_register(self, address, ntimes=None):
if self.update or self._file_data is None:
self.read_file()
if ntimes is None:
return self._file_data[address]
else:
return [self._file_data[address+i] for i in range(ntimes)]
def _write_register(self, address, value):
self._file_data[address] = value
|
from __future__ import annotations
import glob
import logging
import os
import sys
from types import MethodType
from typing import TYPE_CHECKING
from matchengine.internals import query_transform
from matchengine.internals.database_connectivity.mongo_connection import MongoDBConnection
from matchengine.internals.plugin_helpers.plugin_stub import (
QueryTransformerContainer,
TrialMatchDocumentCreator,
DBSecrets,
QueryNodeTransformer,
QueryNodeClinicalIDsSubsetter,
QueryNodeContainerTransformer
)
if TYPE_CHECKING:
from typing import Dict, List
from matchengine.internals.engine import MatchEngine
from matchengine.internals.typing.matchengine_types import MongoQuery
logging.basicConfig(level=logging.INFO)
log = logging.getLogger('matchengine')
async def perform_db_call(matchengine: MatchEngine, collection: str, query: MongoQuery, projection: Dict) -> List:
"""
Asynchronously executes a find query on the database, with specified query and projection and a collection
Used to parallelize DB calls, with asyncio.gather
"""
return await matchengine.async_db_ro[collection].find(query, projection).to_list(None)
def find_plugins(matchengine: MatchEngine):
"""
Plugins are *.py files located in the ./plugins directory. They must be python classes which inherit either from
QueryTransformerContainer or TrialMatchDocumentCreator.
For more information on how the plugins function, see the README.
"""
log.info(f"Checking for plugins in {matchengine.plugin_dir}")
potential_files = glob.glob(os.path.join(matchengine.plugin_dir, "*.py"))
to_load = [(None, 'matchengine.internals.query_transform')]
for potential_file_path in potential_files:
dir_path = os.path.dirname(potential_file_path)
module_name = ''.join(os.path.basename(potential_file_path).split('.')[0:-1])
to_load.append((dir_path, module_name))
for dir_path, module_name in to_load:
if dir_path is not None:
sys.path.append(dir_path)
module = __import__(module_name)
module_path = module_name.split('.')
if len(module_path) > 1:
for sub_item in module_path[1::]:
module = getattr(module, sub_item)
if dir_path is not None:
sys.path.pop()
for item_name in getattr(module, '__shared__', list()):
log.info(f"Found shared plugin resource {item_name} in module {module_name}, path {dir_path}")
setattr(matchengine.match_criteria_transform.transform, item_name, getattr(module, item_name))
for item_name in module.__export__:
item = getattr(module, item_name)
log.info(f"Found exported plugin item {item_name} in module {module_name}, path {dir_path}")
if issubclass(item, QueryTransformerContainer):
log.info(f"Loading QueryTransformerContainer {item_name} type: {item}")
query_transform.attach_transformers_to_match_criteria_transform(matchengine.match_criteria_transform,
item)
elif issubclass(item, TrialMatchDocumentCreator):
if item_name == matchengine.match_document_creator_class:
log.info(f"Loading TrialMatchDocumentCreator {item_name} type: {item}")
setattr(matchengine,
'create_trial_matches',
MethodType(getattr(item,
'create_trial_matches',
matchengine.create_trial_matches),
matchengine))
setattr(matchengine,
'results_transformer',
MethodType(getattr(item,
'results_transformer',
matchengine.results_transformer),
matchengine))
elif issubclass(item, DBSecrets):
if item_name == matchengine.db_secrets_class:
log.info(f"Loading DBSecrets {item_name} type: {item}")
secrets = item().get_secrets()
setattr(MongoDBConnection, 'secrets', secrets)
elif issubclass(item, QueryNodeTransformer):
if item_name == matchengine.query_node_transformer_class:
log.info(f"Loading QueryNodeTransformer {item_name} type: {item}")
setattr(matchengine,
"query_node_transform",
MethodType(getattr(item,
"query_node_transform"),
matchengine))
elif issubclass(item, QueryNodeClinicalIDsSubsetter):
if item_name == matchengine.query_node_subsetter_class:
log.info(f"Loading QueryNodeClinicalIDsSubsetter {item_name} type: {item}")
setattr(matchengine,
"genomic_query_node_clinical_ids_subsetter",
MethodType(getattr(item,
"genomic_query_node_clinical_ids_subsetter"),
matchengine))
setattr(matchengine,
"clinical_query_node_clinical_ids_subsetter",
MethodType(getattr(item,
"clinical_query_node_clinical_ids_subsetter"),
matchengine))
elif issubclass(item, QueryNodeContainerTransformer):
if item_name == matchengine.query_node_container_transformer_class:
log.info(f"Loading QueryNodeContainerTransformer {item_name} type: {item}")
setattr(matchengine,
"query_node_container_transform",
MethodType(getattr(item,
"query_node_container_transform"),
matchengine))
def get_sort_order(sort_map: Dict, match_document: Dict) -> list:
"""
Sort trial matches based on sorting order specified in config.json under the key 'trial_match_sorting'.
The function will iterate over the objects in the 'trial_match_sorting', and then look for that value
in the trial_match document, placing it in an array.
If being displayed, the matchminerAPI filters the array to output a single sort number.
The sorting is currently organized as follows:
1. MMR status
2. Tumor Mutational Burden
3. UVA/POLE/APOBEC/Tobacco Status
4. Tier 1
5. Tier 2
6. CNV
7. Tier 3
8. Tier 4
9. wild type
10. Variant Level
11. Gene-level
12. Exact cancer match
13. General cancer match (all solid/liquid)
14. DFCI Coordinating Center
15. All other Coordinating centers
16. Protocol Number
"""
sort_array = list()
for sort_dimension in sort_map:
sort_index = 99
for sort_key in sort_dimension:
if sort_key in match_document:
sorting_vals = sort_dimension[sort_key]
is_any = sorting_vals.get("ANY_VALUE", None)
trial_match_val = str(match_document[sort_key]) if is_any is None else "ANY_VALUE"
if (trial_match_val is not None and trial_match_val in sorting_vals) or is_any is not None:
matched_sort_int = sort_dimension[sort_key][trial_match_val]
if matched_sort_int < sort_index:
sort_index = matched_sort_int
sort_array.append(sort_index)
sort_array.append(int(match_document['protocol_no'].replace("-", "")))
return sort_array
|
"""
Herança múltipla - Python Orientado a Objetos
"""
from smartphone import Smartphone
smartphone = Smartphone('Pocophone F1 ')
smartphone.conectar()
smartphone.desligar()
smartphone.ligar()
smartphone.conectar()
smartphone.conectar()
smartphone.conectar()
smartphone.desligar()
smartphone.conectar()
smartphone.desconectar()
|
import asyncio
import asyncio.sslproto
import gc
import os
import select
import socket
import unittest.mock
import uvloop
import ssl
import sys
import threading
import time
import weakref
from OpenSSL import SSL as openssl_ssl
from uvloop import _testbase as tb
class MyBaseProto(asyncio.Protocol):
connected = None
done = None
def __init__(self, loop=None):
self.transport = None
self.state = 'INITIAL'
self.nbytes = 0
if loop is not None:
self.connected = asyncio.Future(loop=loop)
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
if self.connected:
self.connected.set_result(None)
def data_received(self, data):
assert self.state == 'CONNECTED', self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == 'CONNECTED', self.state
self.state = 'EOF'
def connection_lost(self, exc):
assert self.state in ('CONNECTED', 'EOF'), self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class _TestTCP:
def test_create_server_1(self):
if self.is_asyncio_loop() and sys.version_info[:3] == (3, 5, 2):
# See https://github.com/python/asyncio/pull/366 for details.
raise unittest.SkipTest()
CNT = 0 # number of clients that were successful
TOTAL_CNT = 25 # total number of clients that test will create
TIMEOUT = 5.0 # timeout for this test
A_DATA = b'A' * 1024 * 1024
B_DATA = b'B' * 1024 * 1024
async def handle_client(reader, writer):
nonlocal CNT
data = await reader.readexactly(len(A_DATA))
self.assertEqual(data, A_DATA)
writer.write(b'OK')
data = await reader.readexactly(len(B_DATA))
self.assertEqual(data, B_DATA)
writer.writelines([b'S', b'P'])
writer.write(bytearray(b'A'))
writer.write(memoryview(b'M'))
if self.implementation == 'uvloop':
tr = writer.transport
sock = tr.get_extra_info('socket')
self.assertTrue(
sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY))
await writer.drain()
writer.close()
CNT += 1
async def test_client(addr):
sock = socket.socket()
with sock:
sock.setblocking(False)
await self.loop.sock_connect(sock, addr)
await self.loop.sock_sendall(sock, A_DATA)
buf = b''
while len(buf) != 2:
buf += await self.loop.sock_recv(sock, 1)
self.assertEqual(buf, b'OK')
await self.loop.sock_sendall(sock, B_DATA)
buf = b''
while len(buf) != 4:
buf += await self.loop.sock_recv(sock, 1)
self.assertEqual(buf, b'SPAM')
self.assertEqual(sock.fileno(), -1)
self.assertEqual(sock._io_refs, 0)
self.assertTrue(sock._closed)
async def start_server():
nonlocal CNT
CNT = 0
addrs = ('127.0.0.1', 'localhost')
if not isinstance(self.loop, uvloop.Loop):
# Hack to let tests run on Python 3.5.0
# (asyncio doesn't support multiple hosts in 3.5.0)
addrs = '127.0.0.1'
srv = await asyncio.start_server(
handle_client,
addrs, 0,
family=socket.AF_INET)
srv_socks = srv.sockets
self.assertTrue(srv_socks)
if self.has_start_serving():
self.assertTrue(srv.is_serving())
addr = srv_socks[0].getsockname()
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(test_client(addr))
await asyncio.wait_for(asyncio.gather(*tasks), TIMEOUT)
self.loop.call_soon(srv.close)
await srv.wait_closed()
# Check that the server cleaned-up proxy-sockets
for srv_sock in srv_socks:
self.assertEqual(srv_sock.fileno(), -1)
if self.has_start_serving():
self.assertFalse(srv.is_serving())
async def start_server_sock():
nonlocal CNT
CNT = 0
sock = socket.socket()
sock.bind(('127.0.0.1', 0))
addr = sock.getsockname()
srv = await asyncio.start_server(
handle_client,
None, None,
family=socket.AF_INET,
sock=sock)
if self.PY37:
self.assertIs(srv.get_loop(), self.loop)
srv_socks = srv.sockets
self.assertTrue(srv_socks)
if self.has_start_serving():
self.assertTrue(srv.is_serving())
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(test_client(addr))
await asyncio.wait_for(asyncio.gather(*tasks), TIMEOUT)
srv.close()
await srv.wait_closed()
# Check that the server cleaned-up proxy-sockets
for srv_sock in srv_socks:
self.assertEqual(srv_sock.fileno(), -1)
if self.has_start_serving():
self.assertFalse(srv.is_serving())
self.loop.run_until_complete(start_server())
self.assertEqual(CNT, TOTAL_CNT)
self.loop.run_until_complete(start_server_sock())
self.assertEqual(CNT, TOTAL_CNT)
def test_create_server_2(self):
with self.assertRaisesRegex(ValueError, 'nor sock were specified'):
self.loop.run_until_complete(self.loop.create_server(object))
def test_create_server_3(self):
''' check ephemeral port can be used '''
async def start_server_ephemeral_ports():
for port_sentinel in [0, None]:
srv = await self.loop.create_server(
asyncio.Protocol,
'127.0.0.1', port_sentinel,
family=socket.AF_INET)
srv_socks = srv.sockets
self.assertTrue(srv_socks)
if self.has_start_serving():
self.assertTrue(srv.is_serving())
host, port = srv_socks[0].getsockname()
self.assertNotEqual(0, port)
self.loop.call_soon(srv.close)
await srv.wait_closed()
# Check that the server cleaned-up proxy-sockets
for srv_sock in srv_socks:
self.assertEqual(srv_sock.fileno(), -1)
if self.has_start_serving():
self.assertFalse(srv.is_serving())
self.loop.run_until_complete(start_server_ephemeral_ports())
def test_create_server_4(self):
sock = socket.socket()
sock.bind(('127.0.0.1', 0))
with sock:
addr = sock.getsockname()
with self.assertRaisesRegex(OSError,
r"error while attempting.*\('127.*: "
r"address already in use"):
self.loop.run_until_complete(
self.loop.create_server(object, *addr))
def test_create_server_5(self):
# Test that create_server sets the TCP_IPV6ONLY flag,
# so it can bind to ipv4 and ipv6 addresses
# simultaneously.
port = tb.find_free_port()
async def runner():
srv = await self.loop.create_server(
asyncio.Protocol,
None, port)
srv.close()
await srv.wait_closed()
self.loop.run_until_complete(runner())
def test_create_server_6(self):
if not hasattr(socket, 'SO_REUSEPORT'):
raise unittest.SkipTest(
'The system does not support SO_REUSEPORT')
if sys.version_info[:3] < (3, 5, 1):
raise unittest.SkipTest(
'asyncio in CPython 3.5.0 does not have the '
'reuse_port argument')
port = tb.find_free_port()
async def runner():
srv1 = await self.loop.create_server(
asyncio.Protocol,
None, port,
reuse_port=True)
srv2 = await self.loop.create_server(
asyncio.Protocol,
None, port,
reuse_port=True)
srv1.close()
srv2.close()
await srv1.wait_closed()
await srv2.wait_closed()
self.loop.run_until_complete(runner())
def test_create_server_7(self):
# Test that create_server() stores a hard ref to the server object
# somewhere in the loop. In asyncio it so happens that
# loop.sock_accept() has a reference to the server object so it
# never gets GCed.
class Proto(asyncio.Protocol):
def connection_made(self, tr):
self.tr = tr
self.tr.write(b'hello')
async def test():
port = tb.find_free_port()
srv = await self.loop.create_server(Proto, '127.0.0.1', port)
wsrv = weakref.ref(srv)
del srv
gc.collect()
gc.collect()
gc.collect()
s = socket.socket(socket.AF_INET)
with s:
s.setblocking(False)
await self.loop.sock_connect(s, ('127.0.0.1', port))
d = await self.loop.sock_recv(s, 100)
self.assertEqual(d, b'hello')
srv = wsrv()
srv.close()
await srv.wait_closed()
del srv
# Let all transports shutdown.
await asyncio.sleep(0.1)
gc.collect()
gc.collect()
gc.collect()
self.assertIsNone(wsrv())
self.loop.run_until_complete(test())
def test_create_server_8(self):
if self.implementation == 'asyncio' and not self.PY37:
raise unittest.SkipTest()
with self.assertRaisesRegex(
ValueError, 'ssl_handshake_timeout is only meaningful'):
self.loop.run_until_complete(
self.loop.create_server(
lambda: None, host='::', port=0, ssl_handshake_timeout=10))
def test_create_server_9(self):
if not self.has_start_serving():
raise unittest.SkipTest()
async def handle_client(reader, writer):
pass
async def start_server():
srv = await asyncio.start_server(
handle_client,
'127.0.0.1', 0,
family=socket.AF_INET,
start_serving=False)
await srv.start_serving()
self.assertTrue(srv.is_serving())
# call start_serving again
await srv.start_serving()
self.assertTrue(srv.is_serving())
srv.close()
await srv.wait_closed()
self.assertFalse(srv.is_serving())
self.loop.run_until_complete(start_server())
def test_create_server_10(self):
if not self.has_start_serving():
raise unittest.SkipTest()
async def handle_client(reader, writer):
pass
async def start_server():
srv = await asyncio.start_server(
handle_client,
'127.0.0.1', 0,
family=socket.AF_INET,
start_serving=False)
async with srv:
fut = asyncio.ensure_future(srv.serve_forever())
await asyncio.sleep(0)
self.assertTrue(srv.is_serving())
fut.cancel()
with self.assertRaises(asyncio.CancelledError):
await fut
self.assertFalse(srv.is_serving())
self.loop.run_until_complete(start_server())
def test_create_connection_open_con_addr(self):
async def client(addr):
reader, writer = await asyncio.open_connection(*addr)
writer.write(b'AAAA')
self.assertEqual(await reader.readexactly(2), b'OK')
re = r'(a bytes-like object)|(must be byte-ish)'
with self.assertRaisesRegex(TypeError, re):
writer.write('AAAA')
writer.write(b'BBBB')
self.assertEqual(await reader.readexactly(4), b'SPAM')
if self.implementation == 'uvloop':
tr = writer.transport
sock = tr.get_extra_info('socket')
self.assertTrue(
sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY))
writer.close()
await self.wait_closed(writer)
self._test_create_connection_1(client)
def test_create_connection_open_con_sock(self):
async def client(addr):
sock = socket.socket()
sock.connect(addr)
reader, writer = await asyncio.open_connection(sock=sock)
writer.write(b'AAAA')
self.assertEqual(await reader.readexactly(2), b'OK')
writer.write(b'BBBB')
self.assertEqual(await reader.readexactly(4), b'SPAM')
if self.implementation == 'uvloop':
tr = writer.transport
sock = tr.get_extra_info('socket')
self.assertTrue(
sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY))
writer.close()
await self.wait_closed(writer)
self._test_create_connection_1(client)
def _test_create_connection_1(self, client):
CNT = 0
TOTAL_CNT = 100
def server(sock):
data = sock.recv_all(4)
self.assertEqual(data, b'AAAA')
sock.send(b'OK')
data = sock.recv_all(4)
self.assertEqual(data, b'BBBB')
sock.send(b'SPAM')
async def client_wrapper(addr):
await client(addr)
nonlocal CNT
CNT += 1
def run(coro):
nonlocal CNT
CNT = 0
with self.tcp_server(server,
max_clients=TOTAL_CNT,
backlog=TOTAL_CNT) as srv:
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(coro(srv.addr))
self.loop.run_until_complete(asyncio.gather(*tasks))
self.assertEqual(CNT, TOTAL_CNT)
run(client_wrapper)
def test_create_connection_2(self):
sock = socket.socket()
with sock:
sock.bind(('127.0.0.1', 0))
addr = sock.getsockname()
async def client():
reader, writer = await asyncio.open_connection(*addr)
writer.close()
await self.wait_closed(writer)
async def runner():
with self.assertRaises(ConnectionRefusedError):
await client()
self.loop.run_until_complete(runner())
def test_create_connection_3(self):
CNT = 0
TOTAL_CNT = 100
def server(sock):
data = sock.recv_all(4)
self.assertEqual(data, b'AAAA')
sock.close()
async def client(addr):
reader, writer = await asyncio.open_connection(*addr)
writer.write(b'AAAA')
with self.assertRaises(asyncio.IncompleteReadError):
await reader.readexactly(10)
writer.close()
await self.wait_closed(writer)
nonlocal CNT
CNT += 1
def run(coro):
nonlocal CNT
CNT = 0
with self.tcp_server(server,
max_clients=TOTAL_CNT,
backlog=TOTAL_CNT) as srv:
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(coro(srv.addr))
self.loop.run_until_complete(asyncio.gather(*tasks))
self.assertEqual(CNT, TOTAL_CNT)
run(client)
def test_create_connection_4(self):
sock = socket.socket()
sock.close()
async def client():
reader, writer = await asyncio.open_connection(sock=sock)
writer.close()
await self.wait_closed(writer)
async def runner():
with self.assertRaisesRegex(OSError, 'Bad file'):
await client()
self.loop.run_until_complete(runner())
def test_create_connection_5(self):
def server(sock):
try:
data = sock.recv_all(4)
except ConnectionError:
return
self.assertEqual(data, b'AAAA')
sock.send(b'OK')
async def client(addr):
fut = asyncio.ensure_future(
self.loop.create_connection(asyncio.Protocol, *addr))
await asyncio.sleep(0)
fut.cancel()
with self.assertRaises(asyncio.CancelledError):
await fut
with self.tcp_server(server,
max_clients=1,
backlog=1) as srv:
self.loop.run_until_complete(client(srv.addr))
def test_create_connection_6(self):
if self.implementation == 'asyncio' and not self.PY37:
raise unittest.SkipTest()
with self.assertRaisesRegex(
ValueError, 'ssl_handshake_timeout is only meaningful'):
self.loop.run_until_complete(
self.loop.create_connection(
lambda: None, host='::', port=0, ssl_handshake_timeout=10))
def test_transport_shutdown(self):
CNT = 0 # number of clients that were successful
TOTAL_CNT = 100 # total number of clients that test will create
TIMEOUT = 5.0 # timeout for this test
async def handle_client(reader, writer):
nonlocal CNT
data = await reader.readexactly(4)
self.assertEqual(data, b'AAAA')
writer.write(b'OK')
writer.write_eof()
writer.write_eof()
await writer.drain()
writer.close()
CNT += 1
async def test_client(addr):
reader, writer = await asyncio.open_connection(*addr)
writer.write(b'AAAA')
data = await reader.readexactly(2)
self.assertEqual(data, b'OK')
writer.close()
await self.wait_closed(writer)
async def start_server():
nonlocal CNT
CNT = 0
srv = await asyncio.start_server(
handle_client,
'127.0.0.1', 0,
family=socket.AF_INET)
srv_socks = srv.sockets
self.assertTrue(srv_socks)
addr = srv_socks[0].getsockname()
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(test_client(addr))
await asyncio.wait_for(asyncio.gather(*tasks), TIMEOUT)
srv.close()
await srv.wait_closed()
self.loop.run_until_complete(start_server())
self.assertEqual(CNT, TOTAL_CNT)
def test_tcp_handle_exception_in_connection_made(self):
# Test that if connection_made raises an exception,
# 'create_connection' still returns.
# Silence error logging
self.loop.set_exception_handler(lambda *args: None)
fut = asyncio.Future()
connection_lost_called = asyncio.Future()
async def server(reader, writer):
try:
await reader.read()
finally:
writer.close()
class Proto(asyncio.Protocol):
def connection_made(self, tr):
1 / 0
def connection_lost(self, exc):
connection_lost_called.set_result(exc)
srv = self.loop.run_until_complete(asyncio.start_server(
server,
'127.0.0.1', 0,
family=socket.AF_INET))
async def runner():
tr, pr = await asyncio.wait_for(
self.loop.create_connection(
Proto, *srv.sockets[0].getsockname()),
timeout=1.0)
fut.set_result(None)
tr.close()
self.loop.run_until_complete(runner())
srv.close()
self.loop.run_until_complete(srv.wait_closed())
self.loop.run_until_complete(fut)
self.assertIsNone(
self.loop.run_until_complete(connection_lost_called))
class Test_UV_TCP(_TestTCP, tb.UVTestCase):
def test_create_server_buffered_1(self):
SIZE = 123123
eof = False
fut = asyncio.Future()
class Proto(asyncio.BaseProtocol):
def connection_made(self, tr):
self.tr = tr
self.recvd = b''
self.data = bytearray(50)
self.buf = memoryview(self.data)
def get_buffer(self, sizehint):
return self.buf
def buffer_updated(self, nbytes):
self.recvd += self.buf[:nbytes]
if self.recvd == b'a' * SIZE:
self.tr.write(b'hello')
def eof_received(self):
nonlocal eof
eof = True
def connection_lost(self, exc):
fut.set_result(exc)
async def test():
port = tb.find_free_port()
srv = await self.loop.create_server(Proto, '127.0.0.1', port)
s = socket.socket(socket.AF_INET)
with s:
s.setblocking(False)
await self.loop.sock_connect(s, ('127.0.0.1', port))
await self.loop.sock_sendall(s, b'a' * SIZE)
d = await self.loop.sock_recv(s, 100)
self.assertEqual(d, b'hello')
srv.close()
await srv.wait_closed()
self.loop.run_until_complete(test())
self.loop.run_until_complete(fut)
self.assertTrue(eof)
self.assertIsNone(fut.result())
def test_create_server_buffered_2(self):
class ProtoExc(asyncio.BaseProtocol):
def __init__(self):
self._lost_exc = None
def get_buffer(self, sizehint):
1 / 0
def buffer_updated(self, nbytes):
pass
def connection_lost(self, exc):
self._lost_exc = exc
def eof_received(self):
pass
class ProtoZeroBuf1(asyncio.BaseProtocol):
def __init__(self):
self._lost_exc = None
def get_buffer(self, sizehint):
return bytearray(0)
def buffer_updated(self, nbytes):
pass
def connection_lost(self, exc):
self._lost_exc = exc
def eof_received(self):
pass
class ProtoZeroBuf2(asyncio.BaseProtocol):
def __init__(self):
self._lost_exc = None
def get_buffer(self, sizehint):
return memoryview(bytearray(0))
def buffer_updated(self, nbytes):
pass
def connection_lost(self, exc):
self._lost_exc = exc
def eof_received(self):
pass
class ProtoUpdatedError(asyncio.BaseProtocol):
def __init__(self):
self._lost_exc = None
def get_buffer(self, sizehint):
return memoryview(bytearray(100))
def buffer_updated(self, nbytes):
raise RuntimeError('oups')
def connection_lost(self, exc):
self._lost_exc = exc
def eof_received(self):
pass
async def test(proto_factory, exc_type, exc_re):
port = tb.find_free_port()
proto = proto_factory()
srv = await self.loop.create_server(
lambda: proto, '127.0.0.1', port)
try:
s = socket.socket(socket.AF_INET)
with s:
s.setblocking(False)
await self.loop.sock_connect(s, ('127.0.0.1', port))
await self.loop.sock_sendall(s, b'a')
d = await self.loop.sock_recv(s, 100)
if not d:
raise ConnectionResetError
except ConnectionResetError:
pass
else:
self.fail("server didn't abort the connection")
return
finally:
srv.close()
await srv.wait_closed()
if proto._lost_exc is None:
self.fail("connection_lost() was not called")
return
with self.assertRaisesRegex(exc_type, exc_re):
raise proto._lost_exc
self.loop.set_exception_handler(lambda loop, ctx: None)
self.loop.run_until_complete(
test(ProtoExc, RuntimeError, 'unhandled error .* get_buffer'))
self.loop.run_until_complete(
test(ProtoZeroBuf1, RuntimeError, 'unhandled error .* get_buffer'))
self.loop.run_until_complete(
test(ProtoZeroBuf2, RuntimeError, 'unhandled error .* get_buffer'))
self.loop.run_until_complete(
test(ProtoUpdatedError, RuntimeError, r'^oups$'))
def test_transport_get_extra_info(self):
# This tests is only for uvloop. asyncio should pass it
# too in Python 3.6.
fut = asyncio.Future()
async def handle_client(reader, writer):
with self.assertRaises(asyncio.IncompleteReadError):
await reader.readexactly(4)
writer.close()
# Previously, when we used socket.fromfd to create a socket
# for UVTransports (to make get_extra_info() work), a duplicate
# of the socket was created, preventing UVTransport from being
# properly closed.
# This test ensures that server handle will receive an EOF
# and finish the request.
fut.set_result(None)
async def test_client(addr):
t, p = await self.loop.create_connection(
lambda: asyncio.Protocol(), *addr)
if hasattr(t, 'get_protocol'):
p2 = asyncio.Protocol()
self.assertIs(t.get_protocol(), p)
t.set_protocol(p2)
self.assertIs(t.get_protocol(), p2)
t.set_protocol(p)
self.assertFalse(t._paused)
self.assertTrue(t.is_reading())
t.pause_reading()
t.pause_reading() # Check that it's OK to call it 2nd time.
self.assertTrue(t._paused)
self.assertFalse(t.is_reading())
t.resume_reading()
t.resume_reading() # Check that it's OK to call it 2nd time.
self.assertFalse(t._paused)
self.assertTrue(t.is_reading())
sock = t.get_extra_info('socket')
self.assertIs(sock, t.get_extra_info('socket'))
sockname = sock.getsockname()
peername = sock.getpeername()
with self.assertRaisesRegex(RuntimeError, 'is used by transport'):
self.loop.add_writer(sock.fileno(), lambda: None)
with self.assertRaisesRegex(RuntimeError, 'is used by transport'):
self.loop.remove_writer(sock.fileno())
with self.assertRaisesRegex(RuntimeError, 'is used by transport'):
self.loop.add_reader(sock.fileno(), lambda: None)
with self.assertRaisesRegex(RuntimeError, 'is used by transport'):
self.loop.remove_reader(sock.fileno())
self.assertEqual(t.get_extra_info('sockname'),
sockname)
self.assertEqual(t.get_extra_info('peername'),
peername)
t.write(b'OK') # We want server to fail.
self.assertFalse(t._closing)
t.abort()
self.assertTrue(t._closing)
self.assertFalse(t.is_reading())
# Check that pause_reading and resume_reading don't raise
# errors if called after the transport is closed.
t.pause_reading()
t.resume_reading()
await fut
# Test that peername and sockname are available after
# the transport is closed.
self.assertEqual(t.get_extra_info('peername'),
peername)
self.assertEqual(t.get_extra_info('sockname'),
sockname)
async def start_server():
srv = await asyncio.start_server(
handle_client,
'127.0.0.1', 0,
family=socket.AF_INET)
addr = srv.sockets[0].getsockname()
await test_client(addr)
srv.close()
await srv.wait_closed()
self.loop.run_until_complete(start_server())
def test_create_server_float_backlog(self):
# asyncio spits out a warning we cannot suppress
async def runner(bl):
await self.loop.create_server(
asyncio.Protocol,
None, 0, backlog=bl)
for bl in (1.1, '1'):
with self.subTest(backlog=bl):
with self.assertRaisesRegex(TypeError, 'integer'):
self.loop.run_until_complete(runner(bl))
def test_many_small_writes(self):
N = 10000
TOTAL = 0
fut = self.loop.create_future()
async def server(reader, writer):
nonlocal TOTAL
while True:
d = await reader.read(10000)
if not d:
break
TOTAL += len(d)
fut.set_result(True)
writer.close()
async def run():
srv = await asyncio.start_server(
server,
'127.0.0.1', 0,
family=socket.AF_INET)
addr = srv.sockets[0].getsockname()
r, w = await asyncio.open_connection(*addr)
DATA = b'x' * 102400
# Test _StreamWriteContext with short sequences of writes
w.write(DATA)
await w.drain()
for _ in range(3):
w.write(DATA)
await w.drain()
for _ in range(10):
w.write(DATA)
await w.drain()
for _ in range(N):
w.write(DATA)
try:
w.write('a')
except TypeError:
pass
await w.drain()
for _ in range(N):
w.write(DATA)
await w.drain()
w.close()
await fut
await self.wait_closed(w)
srv.close()
await srv.wait_closed()
self.assertEqual(TOTAL, N * 2 * len(DATA) + 14 * len(DATA))
self.loop.run_until_complete(run())
@unittest.skipIf(sys.version_info[:3] >= (3, 8, 0),
"3.8 has a different method of GCing unclosed streams")
def test_tcp_handle_unclosed_gc(self):
fut = self.loop.create_future()
async def server(reader, writer):
writer.transport.abort()
fut.set_result(True)
async def run():
addr = srv.sockets[0].getsockname()
await asyncio.open_connection(*addr)
await fut
srv.close()
await srv.wait_closed()
srv = self.loop.run_until_complete(asyncio.start_server(
server,
'127.0.0.1', 0,
family=socket.AF_INET))
if self.loop.get_debug():
rx = r'unclosed resource <TCP.*; ' \
r'object created at(.|\n)*test_tcp_handle_unclosed_gc'
else:
rx = r'unclosed resource <TCP.*'
with self.assertWarnsRegex(ResourceWarning, rx):
self.loop.create_task(run())
self.loop.run_until_complete(srv.wait_closed())
self.loop.run_until_complete(asyncio.sleep(0.1))
srv = None
gc.collect()
gc.collect()
gc.collect()
self.loop.run_until_complete(asyncio.sleep(0.1))
# Since one TCPTransport handle wasn't closed correctly,
# we need to disable this check:
self.skip_unclosed_handles_check()
def test_tcp_handle_abort_in_connection_made(self):
async def server(reader, writer):
try:
await reader.read()
finally:
writer.close()
class Proto(asyncio.Protocol):
def connection_made(self, tr):
tr.abort()
srv = self.loop.run_until_complete(asyncio.start_server(
server,
'127.0.0.1', 0,
family=socket.AF_INET))
async def runner():
tr, pr = await asyncio.wait_for(
self.loop.create_connection(
Proto, *srv.sockets[0].getsockname()),
timeout=1.0)
# Asyncio would return a closed socket, which we
# can't do: the transport was aborted, hence there
# is no FD to attach a socket to (to make
# get_extra_info() work).
self.assertIsNone(tr.get_extra_info('socket'))
tr.close()
self.loop.run_until_complete(runner())
srv.close()
self.loop.run_until_complete(srv.wait_closed())
def test_connect_accepted_socket_ssl_args(self):
if self.implementation == 'asyncio' and not self.PY37:
raise unittest.SkipTest()
with self.assertRaisesRegex(
ValueError, 'ssl_handshake_timeout is only meaningful'):
with socket.socket() as s:
self.loop.run_until_complete(
self.loop.connect_accepted_socket(
(lambda: None), s, ssl_handshake_timeout=10.0))
def test_connect_accepted_socket(self, server_ssl=None, client_ssl=None):
loop = self.loop
class MyProto(MyBaseProto):
def connection_lost(self, exc):
super().connection_lost(exc)
loop.call_soon(loop.stop)
def data_received(self, data):
super().data_received(data)
self.transport.write(expected_response)
lsock = socket.socket(socket.AF_INET)
lsock.bind(('127.0.0.1', 0))
lsock.listen(1)
addr = lsock.getsockname()
message = b'test data'
response = None
expected_response = b'roger'
def client():
nonlocal response
try:
csock = socket.socket(socket.AF_INET)
if client_ssl is not None:
csock = client_ssl.wrap_socket(csock)
csock.connect(addr)
csock.sendall(message)
response = csock.recv(99)
csock.close()
except Exception as exc:
print(
"Failure in client thread in test_connect_accepted_socket",
exc)
thread = threading.Thread(target=client, daemon=True)
thread.start()
conn, _ = lsock.accept()
proto = MyProto(loop=loop)
proto.loop = loop
extras = {}
if server_ssl and (self.implementation != 'asyncio' or self.PY37):
extras = dict(ssl_handshake_timeout=10.0)
f = loop.create_task(
loop.connect_accepted_socket(
(lambda: proto), conn, ssl=server_ssl,
**extras))
loop.run_forever()
conn.close()
lsock.close()
thread.join(1)
self.assertFalse(thread.is_alive())
self.assertEqual(proto.state, 'CLOSED')
self.assertEqual(proto.nbytes, len(message))
self.assertEqual(response, expected_response)
tr, _ = f.result()
if server_ssl:
self.assertIn('SSL', tr.__class__.__name__)
tr.close()
# let it close
self.loop.run_until_complete(asyncio.sleep(0.1))
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'no Unix sockets')
def test_create_connection_wrong_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with sock:
coro = self.loop.create_connection(MyBaseProto, sock=sock)
with self.assertRaisesRegex(ValueError,
'A Stream Socket was expected'):
self.loop.run_until_complete(coro)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'no Unix sockets')
def test_create_server_wrong_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with sock:
coro = self.loop.create_server(MyBaseProto, sock=sock)
with self.assertRaisesRegex(ValueError,
'A Stream Socket was expected'):
self.loop.run_until_complete(coro)
@unittest.skipUnless(hasattr(socket, 'SOCK_NONBLOCK'),
'no socket.SOCK_NONBLOCK (linux only)')
def test_create_server_stream_bittype(self):
sock = socket.socket(
socket.AF_INET, socket.SOCK_STREAM | socket.SOCK_NONBLOCK)
with sock:
coro = self.loop.create_server(lambda: None, sock=sock)
srv = self.loop.run_until_complete(coro)
srv.close()
self.loop.run_until_complete(srv.wait_closed())
def test_flowcontrol_mixin_set_write_limits(self):
async def client(addr):
paused = False
class Protocol(asyncio.Protocol):
def pause_writing(self):
nonlocal paused
paused = True
def resume_writing(self):
nonlocal paused
paused = False
t, p = await self.loop.create_connection(Protocol, *addr)
t.write(b'q' * 512)
self.assertEqual(t.get_write_buffer_size(), 512)
t.set_write_buffer_limits(low=16385)
self.assertFalse(paused)
self.assertEqual(t.get_write_buffer_limits(), (16385, 65540))
with self.assertRaisesRegex(ValueError, 'high.*must be >= low'):
t.set_write_buffer_limits(high=0, low=1)
t.set_write_buffer_limits(high=1024, low=128)
self.assertFalse(paused)
self.assertEqual(t.get_write_buffer_limits(), (128, 1024))
t.set_write_buffer_limits(high=256, low=128)
self.assertTrue(paused)
self.assertEqual(t.get_write_buffer_limits(), (128, 256))
t.close()
with self.tcp_server(lambda sock: sock.recv_all(1),
max_clients=1,
backlog=1) as srv:
self.loop.run_until_complete(client(srv.addr))
class Test_AIO_TCP(_TestTCP, tb.AIOTestCase):
pass
class _TestSSL(tb.SSLTestCase):
ONLYCERT = tb._cert_fullname(__file__, 'ssl_cert.pem')
ONLYKEY = tb._cert_fullname(__file__, 'ssl_key.pem')
PAYLOAD_SIZE = 1024 * 100
TIMEOUT = 60
def test_create_server_ssl_1(self):
CNT = 0 # number of clients that were successful
TOTAL_CNT = 25 # total number of clients that test will create
TIMEOUT = 10.0 # timeout for this test
A_DATA = b'A' * 1024 * 1024
B_DATA = b'B' * 1024 * 1024
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context()
clients = []
async def handle_client(reader, writer):
nonlocal CNT
data = await reader.readexactly(len(A_DATA))
self.assertEqual(data, A_DATA)
writer.write(b'OK')
data = await reader.readexactly(len(B_DATA))
self.assertEqual(data, B_DATA)
writer.writelines([b'SP', bytearray(b'A'), memoryview(b'M')])
await writer.drain()
writer.close()
CNT += 1
async def test_client(addr):
fut = asyncio.Future()
def prog(sock):
try:
sock.starttls(client_sslctx)
sock.connect(addr)
sock.send(A_DATA)
data = sock.recv_all(2)
self.assertEqual(data, b'OK')
sock.send(B_DATA)
data = sock.recv_all(4)
self.assertEqual(data, b'SPAM')
sock.close()
except Exception as ex:
self.loop.call_soon_threadsafe(fut.set_exception, ex)
else:
self.loop.call_soon_threadsafe(fut.set_result, None)
client = self.tcp_client(prog)
client.start()
clients.append(client)
await fut
async def start_server():
extras = {}
if self.implementation != 'asyncio' or self.PY37:
extras = dict(ssl_handshake_timeout=10.0)
srv = await asyncio.start_server(
handle_client,
'127.0.0.1', 0,
family=socket.AF_INET,
ssl=sslctx,
**extras)
try:
srv_socks = srv.sockets
self.assertTrue(srv_socks)
addr = srv_socks[0].getsockname()
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(test_client(addr))
await asyncio.wait_for(asyncio.gather(*tasks), TIMEOUT)
finally:
self.loop.call_soon(srv.close)
await srv.wait_closed()
with self._silence_eof_received_warning():
self.loop.run_until_complete(start_server())
self.assertEqual(CNT, TOTAL_CNT)
for client in clients:
client.stop()
def test_create_connection_ssl_1(self):
if self.implementation == 'asyncio':
# Don't crash on asyncio errors
self.loop.set_exception_handler(None)
CNT = 0
TOTAL_CNT = 25
A_DATA = b'A' * 1024 * 1024
B_DATA = b'B' * 1024 * 1024
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context()
def server(sock):
sock.starttls(
sslctx,
server_side=True)
data = sock.recv_all(len(A_DATA))
self.assertEqual(data, A_DATA)
sock.send(b'OK')
data = sock.recv_all(len(B_DATA))
self.assertEqual(data, B_DATA)
sock.send(b'SPAM')
sock.close()
async def client(addr):
extras = {}
if self.implementation != 'asyncio' or self.PY37:
extras = dict(ssl_handshake_timeout=10.0)
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='',
**extras)
writer.write(A_DATA)
self.assertEqual(await reader.readexactly(2), b'OK')
writer.write(B_DATA)
self.assertEqual(await reader.readexactly(4), b'SPAM')
nonlocal CNT
CNT += 1
writer.close()
await self.wait_closed(writer)
async def client_sock(addr):
sock = socket.socket()
sock.connect(addr)
reader, writer = await asyncio.open_connection(
sock=sock,
ssl=client_sslctx,
server_hostname='')
writer.write(A_DATA)
self.assertEqual(await reader.readexactly(2), b'OK')
writer.write(B_DATA)
self.assertEqual(await reader.readexactly(4), b'SPAM')
nonlocal CNT
CNT += 1
writer.close()
await self.wait_closed(writer)
sock.close()
def run(coro):
nonlocal CNT
CNT = 0
with self.tcp_server(server,
max_clients=TOTAL_CNT,
backlog=TOTAL_CNT) as srv:
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(coro(srv.addr))
self.loop.run_until_complete(asyncio.gather(*tasks))
self.assertEqual(CNT, TOTAL_CNT)
with self._silence_eof_received_warning():
run(client)
with self._silence_eof_received_warning():
run(client_sock)
def test_create_connection_ssl_slow_handshake(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
client_sslctx = self._create_client_ssl_context()
# silence error logger
self.loop.set_exception_handler(lambda *args: None)
def server(sock):
try:
sock.recv_all(1024 * 1024)
except ConnectionAbortedError:
pass
finally:
sock.close()
async def client(addr):
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='',
ssl_handshake_timeout=1.0)
writer.close()
await self.wait_closed(writer)
with self.tcp_server(server,
max_clients=1,
backlog=1) as srv:
with self.assertRaisesRegex(
ConnectionAbortedError,
r'SSL handshake.*is taking longer'):
self.loop.run_until_complete(client(srv.addr))
def test_create_connection_ssl_failed_certificate(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
# silence error logger
self.loop.set_exception_handler(lambda *args: None)
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context(disable_verify=False)
def server(sock):
try:
sock.starttls(
sslctx,
server_side=True)
sock.connect()
except (ssl.SSLError, OSError):
pass
finally:
sock.close()
async def client(addr):
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='',
ssl_handshake_timeout=1.0)
writer.close()
await self.wait_closed(writer)
with self.tcp_server(server,
max_clients=1,
backlog=1) as srv:
exc_type = ssl.SSLError
if self.PY37:
exc_type = ssl.SSLCertVerificationError
with self.assertRaises(exc_type):
self.loop.run_until_complete(client(srv.addr))
def test_start_tls_wrong_args(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
async def main():
with self.assertRaisesRegex(TypeError, 'SSLContext, got'):
await self.loop.start_tls(None, None, None)
sslctx = self._create_server_ssl_context(
self.ONLYCERT, self.ONLYKEY)
with self.assertRaisesRegex(TypeError, 'is not supported'):
await self.loop.start_tls(None, None, sslctx)
self.loop.run_until_complete(main())
def test_ssl_handshake_timeout(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
# bpo-29970: Check that a connection is aborted if handshake is not
# completed in timeout period, instead of remaining open indefinitely
client_sslctx = self._create_client_ssl_context()
# silence error logger
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
server_side_aborted = False
def server(sock):
nonlocal server_side_aborted
try:
sock.recv_all(1024 * 1024)
except ConnectionAbortedError:
server_side_aborted = True
finally:
sock.close()
async def client(addr):
await asyncio.wait_for(
self.loop.create_connection(
asyncio.Protocol,
*addr,
ssl=client_sslctx,
server_hostname='',
ssl_handshake_timeout=10.0),
0.5)
with self.tcp_server(server,
max_clients=1,
backlog=1) as srv:
with self.assertRaises(asyncio.TimeoutError):
self.loop.run_until_complete(client(srv.addr))
self.assertTrue(server_side_aborted)
# Python issue #23197: cancelling a handshake must not raise an
# exception or log an error, even if the handshake failed
self.assertEqual(messages, [])
def test_ssl_handshake_connection_lost(self):
# #246: make sure that no connection_lost() is called before
# connection_made() is called first
client_sslctx = self._create_client_ssl_context()
# silence error logger
self.loop.set_exception_handler(lambda loop, ctx: None)
connection_made_called = False
connection_lost_called = False
def server(sock):
sock.recv(1024)
# break the connection during handshake
sock.close()
class ClientProto(asyncio.Protocol):
def connection_made(self, transport):
nonlocal connection_made_called
connection_made_called = True
def connection_lost(self, exc):
nonlocal connection_lost_called
connection_lost_called = True
async def client(addr):
await self.loop.create_connection(
ClientProto,
*addr,
ssl=client_sslctx,
server_hostname=''),
with self.tcp_server(server,
max_clients=1,
backlog=1) as srv:
with self.assertRaises(ConnectionResetError):
self.loop.run_until_complete(client(srv.addr))
if connection_lost_called:
if connection_made_called:
self.fail("unexpected call to connection_lost()")
else:
self.fail("unexpected call to connection_lost() without"
"calling connection_made()")
elif connection_made_called:
self.fail("unexpected call to connection_made()")
def test_ssl_connect_accepted_socket(self):
if hasattr(ssl, 'PROTOCOL_TLS'):
proto = ssl.PROTOCOL_TLS
else:
proto = ssl.PROTOCOL_SSLv23
server_context = ssl.SSLContext(proto)
server_context.load_cert_chain(self.ONLYCERT, self.ONLYKEY)
if hasattr(server_context, 'check_hostname'):
server_context.check_hostname = False
server_context.verify_mode = ssl.CERT_NONE
client_context = ssl.SSLContext(proto)
if hasattr(server_context, 'check_hostname'):
client_context.check_hostname = False
client_context.verify_mode = ssl.CERT_NONE
Test_UV_TCP.test_connect_accepted_socket(
self, server_context, client_context)
def test_start_tls_client_corrupted_ssl(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
self.loop.set_exception_handler(lambda loop, ctx: None)
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context()
def server(sock):
orig_sock = sock.dup()
try:
sock.starttls(
sslctx,
server_side=True)
sock.sendall(b'A\n')
sock.recv_all(1)
orig_sock.send(b'please corrupt the SSL connection')
except ssl.SSLError:
pass
finally:
sock.close()
orig_sock.close()
async def client(addr):
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='')
self.assertEqual(await reader.readline(), b'A\n')
writer.write(b'B')
with self.assertRaises(ssl.SSLError):
await reader.readline()
writer.close()
try:
await self.wait_closed(writer)
except ssl.SSLError:
pass
return 'OK'
with self.tcp_server(server,
max_clients=1,
backlog=1) as srv:
res = self.loop.run_until_complete(client(srv.addr))
self.assertEqual(res, 'OK')
def test_start_tls_client_reg_proto_1(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
HELLO_MSG = b'1' * self.PAYLOAD_SIZE
server_context = self._create_server_ssl_context(
self.ONLYCERT, self.ONLYKEY)
client_context = self._create_client_ssl_context()
def serve(sock):
sock.settimeout(self.TIMEOUT)
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
sock.starttls(server_context, server_side=True)
sock.sendall(b'O')
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
sock.unwrap()
sock.close()
class ClientProto(asyncio.Protocol):
def __init__(self, on_data, on_eof):
self.on_data = on_data
self.on_eof = on_eof
self.con_made_cnt = 0
def connection_made(proto, tr):
proto.con_made_cnt += 1
# Ensure connection_made gets called only once.
self.assertEqual(proto.con_made_cnt, 1)
def data_received(self, data):
self.on_data.set_result(data)
def eof_received(self):
self.on_eof.set_result(True)
async def client(addr):
await asyncio.sleep(0.5)
on_data = self.loop.create_future()
on_eof = self.loop.create_future()
tr, proto = await self.loop.create_connection(
lambda: ClientProto(on_data, on_eof), *addr)
tr.write(HELLO_MSG)
new_tr = await self.loop.start_tls(tr, proto, client_context)
self.assertEqual(await on_data, b'O')
new_tr.write(HELLO_MSG)
await on_eof
new_tr.close()
with self.tcp_server(serve, timeout=self.TIMEOUT) as srv:
self.loop.run_until_complete(
asyncio.wait_for(client(srv.addr), timeout=10))
def test_create_connection_memory_leak(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
HELLO_MSG = b'1' * self.PAYLOAD_SIZE
server_context = self._create_server_ssl_context(
self.ONLYCERT, self.ONLYKEY)
client_context = self._create_client_ssl_context()
def serve(sock):
sock.settimeout(self.TIMEOUT)
sock.starttls(server_context, server_side=True)
sock.sendall(b'O')
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
sock.unwrap()
sock.close()
class ClientProto(asyncio.Protocol):
def __init__(self, on_data, on_eof):
self.on_data = on_data
self.on_eof = on_eof
self.con_made_cnt = 0
def connection_made(proto, tr):
# XXX: We assume user stores the transport in protocol
proto.tr = tr
proto.con_made_cnt += 1
# Ensure connection_made gets called only once.
self.assertEqual(proto.con_made_cnt, 1)
def data_received(self, data):
self.on_data.set_result(data)
def eof_received(self):
self.on_eof.set_result(True)
async def client(addr):
await asyncio.sleep(0.5)
on_data = self.loop.create_future()
on_eof = self.loop.create_future()
tr, proto = await self.loop.create_connection(
lambda: ClientProto(on_data, on_eof), *addr,
ssl=client_context)
self.assertEqual(await on_data, b'O')
tr.write(HELLO_MSG)
await on_eof
tr.close()
with self.tcp_server(serve, timeout=self.TIMEOUT) as srv:
self.loop.run_until_complete(
asyncio.wait_for(client(srv.addr), timeout=10))
# No garbage is left for SSL client from loop.create_connection, even
# if user stores the SSLTransport in corresponding protocol instance
client_context = weakref.ref(client_context)
self.assertIsNone(client_context())
def test_start_tls_client_buf_proto_1(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
HELLO_MSG = b'1' * self.PAYLOAD_SIZE
server_context = self._create_server_ssl_context(
self.ONLYCERT, self.ONLYKEY)
client_context = self._create_client_ssl_context()
client_con_made_calls = 0
def serve(sock):
sock.settimeout(self.TIMEOUT)
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
sock.starttls(server_context, server_side=True)
sock.sendall(b'O')
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
sock.sendall(b'2')
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
sock.unwrap()
sock.close()
class ClientProtoFirst(asyncio.BaseProtocol):
def __init__(self, on_data):
self.on_data = on_data
self.buf = bytearray(1)
def connection_made(self, tr):
nonlocal client_con_made_calls
client_con_made_calls += 1
def get_buffer(self, sizehint):
return self.buf
def buffer_updated(self, nsize):
assert nsize == 1
self.on_data.set_result(bytes(self.buf[:nsize]))
def eof_received(self):
pass
class ClientProtoSecond(asyncio.Protocol):
def __init__(self, on_data, on_eof):
self.on_data = on_data
self.on_eof = on_eof
self.con_made_cnt = 0
def connection_made(self, tr):
nonlocal client_con_made_calls
client_con_made_calls += 1
def data_received(self, data):
self.on_data.set_result(data)
def eof_received(self):
self.on_eof.set_result(True)
async def client(addr):
await asyncio.sleep(0.5)
on_data1 = self.loop.create_future()
on_data2 = self.loop.create_future()
on_eof = self.loop.create_future()
tr, proto = await self.loop.create_connection(
lambda: ClientProtoFirst(on_data1), *addr)
tr.write(HELLO_MSG)
new_tr = await self.loop.start_tls(tr, proto, client_context)
self.assertEqual(await on_data1, b'O')
new_tr.write(HELLO_MSG)
new_tr.set_protocol(ClientProtoSecond(on_data2, on_eof))
self.assertEqual(await on_data2, b'2')
new_tr.write(HELLO_MSG)
await on_eof
new_tr.close()
# connection_made() should be called only once -- when
# we establish connection for the first time. Start TLS
# doesn't call connection_made() on application protocols.
self.assertEqual(client_con_made_calls, 1)
with self.tcp_server(serve, timeout=self.TIMEOUT) as srv:
self.loop.run_until_complete(
asyncio.wait_for(client(srv.addr),
timeout=self.TIMEOUT))
def test_start_tls_slow_client_cancel(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
HELLO_MSG = b'1' * self.PAYLOAD_SIZE
client_context = self._create_client_ssl_context()
server_waits_on_handshake = self.loop.create_future()
def serve(sock):
sock.settimeout(self.TIMEOUT)
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
try:
self.loop.call_soon_threadsafe(
server_waits_on_handshake.set_result, None)
data = sock.recv_all(1024 * 1024)
except ConnectionAbortedError:
pass
finally:
sock.close()
class ClientProto(asyncio.Protocol):
def __init__(self, on_data, on_eof):
self.on_data = on_data
self.on_eof = on_eof
self.con_made_cnt = 0
def connection_made(proto, tr):
proto.con_made_cnt += 1
# Ensure connection_made gets called only once.
self.assertEqual(proto.con_made_cnt, 1)
def data_received(self, data):
self.on_data.set_result(data)
def eof_received(self):
self.on_eof.set_result(True)
async def client(addr):
await asyncio.sleep(0.5)
on_data = self.loop.create_future()
on_eof = self.loop.create_future()
tr, proto = await self.loop.create_connection(
lambda: ClientProto(on_data, on_eof), *addr)
tr.write(HELLO_MSG)
await server_waits_on_handshake
with self.assertRaises(asyncio.TimeoutError):
await asyncio.wait_for(
self.loop.start_tls(tr, proto, client_context),
0.5)
with self.tcp_server(serve, timeout=self.TIMEOUT) as srv:
self.loop.run_until_complete(
asyncio.wait_for(client(srv.addr), timeout=10))
def test_start_tls_server_1(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
HELLO_MSG = b'1' * self.PAYLOAD_SIZE
server_context = self._create_server_ssl_context(
self.ONLYCERT, self.ONLYKEY)
client_context = self._create_client_ssl_context()
def client(sock, addr):
sock.settimeout(self.TIMEOUT)
sock.connect(addr)
data = sock.recv_all(len(HELLO_MSG))
self.assertEqual(len(data), len(HELLO_MSG))
sock.starttls(client_context)
sock.sendall(HELLO_MSG)
sock.unwrap()
sock.close()
class ServerProto(asyncio.Protocol):
def __init__(self, on_con, on_eof, on_con_lost):
self.on_con = on_con
self.on_eof = on_eof
self.on_con_lost = on_con_lost
self.data = b''
def connection_made(self, tr):
self.on_con.set_result(tr)
def data_received(self, data):
self.data += data
def eof_received(self):
self.on_eof.set_result(1)
def connection_lost(self, exc):
if exc is None:
self.on_con_lost.set_result(None)
else:
self.on_con_lost.set_exception(exc)
async def main(proto, on_con, on_eof, on_con_lost):
tr = await on_con
tr.write(HELLO_MSG)
self.assertEqual(proto.data, b'')
new_tr = await self.loop.start_tls(
tr, proto, server_context,
server_side=True,
ssl_handshake_timeout=self.TIMEOUT)
await on_eof
await on_con_lost
self.assertEqual(proto.data, HELLO_MSG)
new_tr.close()
async def run_main():
on_con = self.loop.create_future()
on_eof = self.loop.create_future()
on_con_lost = self.loop.create_future()
proto = ServerProto(on_con, on_eof, on_con_lost)
server = await self.loop.create_server(
lambda: proto, '127.0.0.1', 0)
addr = server.sockets[0].getsockname()
with self.tcp_client(lambda sock: client(sock, addr),
timeout=self.TIMEOUT):
await asyncio.wait_for(
main(proto, on_con, on_eof, on_con_lost),
timeout=self.TIMEOUT)
server.close()
await server.wait_closed()
self.loop.run_until_complete(run_main())
def test_create_server_ssl_over_ssl(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest('asyncio does not support SSL over SSL')
CNT = 0 # number of clients that were successful
TOTAL_CNT = 25 # total number of clients that test will create
TIMEOUT = 10.0 # timeout for this test
A_DATA = b'A' * 1024 * 1024
B_DATA = b'B' * 1024 * 1024
sslctx_1 = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx_1 = self._create_client_ssl_context()
sslctx_2 = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx_2 = self._create_client_ssl_context()
clients = []
async def handle_client(reader, writer):
nonlocal CNT
data = await reader.readexactly(len(A_DATA))
self.assertEqual(data, A_DATA)
writer.write(b'OK')
data = await reader.readexactly(len(B_DATA))
self.assertEqual(data, B_DATA)
writer.writelines([b'SP', bytearray(b'A'), memoryview(b'M')])
await writer.drain()
writer.close()
CNT += 1
class ServerProtocol(asyncio.StreamReaderProtocol):
def connection_made(self, transport):
super_ = super()
transport.pause_reading()
fut = self._loop.create_task(self._loop.start_tls(
transport, self, sslctx_2, server_side=True))
def cb(_):
try:
tr = fut.result()
except Exception as ex:
super_.connection_lost(ex)
else:
super_.connection_made(tr)
fut.add_done_callback(cb)
def server_protocol_factory():
reader = asyncio.StreamReader()
protocol = ServerProtocol(reader, handle_client)
return protocol
async def test_client(addr):
fut = asyncio.Future()
def prog(sock):
try:
sock.connect(addr)
sock.starttls(client_sslctx_1)
# because wrap_socket() doesn't work correctly on
# SSLSocket, we have to do the 2nd level SSL manually
incoming = ssl.MemoryBIO()
outgoing = ssl.MemoryBIO()
sslobj = client_sslctx_2.wrap_bio(incoming, outgoing)
def do(func, *args):
while True:
try:
rv = func(*args)
break
except ssl.SSLWantReadError:
if outgoing.pending:
sock.send(outgoing.read())
incoming.write(sock.recv(65536))
if outgoing.pending:
sock.send(outgoing.read())
return rv
do(sslobj.do_handshake)
do(sslobj.write, A_DATA)
data = do(sslobj.read, 2)
self.assertEqual(data, b'OK')
do(sslobj.write, B_DATA)
data = b''
while True:
chunk = do(sslobj.read, 4)
if not chunk:
break
data += chunk
self.assertEqual(data, b'SPAM')
do(sslobj.unwrap)
sock.close()
except Exception as ex:
self.loop.call_soon_threadsafe(fut.set_exception, ex)
sock.close()
else:
self.loop.call_soon_threadsafe(fut.set_result, None)
client = self.tcp_client(prog)
client.start()
clients.append(client)
await fut
async def start_server():
extras = {}
if self.implementation != 'asyncio' or self.PY37:
extras = dict(ssl_handshake_timeout=10.0)
srv = await self.loop.create_server(
server_protocol_factory,
'127.0.0.1', 0,
family=socket.AF_INET,
ssl=sslctx_1,
**extras)
try:
srv_socks = srv.sockets
self.assertTrue(srv_socks)
addr = srv_socks[0].getsockname()
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(test_client(addr))
await asyncio.wait_for(asyncio.gather(*tasks), TIMEOUT)
finally:
self.loop.call_soon(srv.close)
await srv.wait_closed()
with self._silence_eof_received_warning():
self.loop.run_until_complete(start_server())
self.assertEqual(CNT, TOTAL_CNT)
for client in clients:
client.stop()
def test_renegotiation(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest('asyncio does not support renegotiation')
CNT = 0
TOTAL_CNT = 25
A_DATA = b'A' * 1024 * 1024
B_DATA = b'B' * 1024 * 1024
sslctx = openssl_ssl.Context(openssl_ssl.TLSv1_2_METHOD)
if hasattr(openssl_ssl, 'OP_NO_SSLV2'):
sslctx.set_options(openssl_ssl.OP_NO_SSLV2)
sslctx.use_privatekey_file(self.ONLYKEY)
sslctx.use_certificate_chain_file(self.ONLYCERT)
client_sslctx = self._create_client_ssl_context()
if hasattr(ssl, 'OP_NO_TLSv1_3'):
client_sslctx.options |= ssl.OP_NO_TLSv1_3
def server(sock):
conn = openssl_ssl.Connection(sslctx, sock)
conn.set_accept_state()
data = b''
while len(data) < len(A_DATA):
try:
chunk = conn.recv(len(A_DATA) - len(data))
if not chunk:
break
data += chunk
except openssl_ssl.WantReadError:
pass
self.assertEqual(data, A_DATA)
conn.renegotiate()
if conn.renegotiate_pending():
conn.send(b'OK')
else:
conn.send(b'ER')
data = b''
while len(data) < len(B_DATA):
try:
chunk = conn.recv(len(B_DATA) - len(data))
if not chunk:
break
data += chunk
except openssl_ssl.WantReadError:
pass
self.assertEqual(data, B_DATA)
if conn.renegotiate_pending():
conn.send(b'ERRO')
else:
conn.send(b'SPAM')
conn.shutdown()
async def client(addr):
extras = {}
if self.implementation != 'asyncio' or self.PY37:
extras = dict(ssl_handshake_timeout=10.0)
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='',
**extras)
writer.write(A_DATA)
self.assertEqual(await reader.readexactly(2), b'OK')
writer.write(B_DATA)
self.assertEqual(await reader.readexactly(4), b'SPAM')
nonlocal CNT
CNT += 1
writer.close()
await self.wait_closed(writer)
async def client_sock(addr):
sock = socket.socket()
sock.connect(addr)
reader, writer = await asyncio.open_connection(
sock=sock,
ssl=client_sslctx,
server_hostname='')
writer.write(A_DATA)
self.assertEqual(await reader.readexactly(2), b'OK')
writer.write(B_DATA)
self.assertEqual(await reader.readexactly(4), b'SPAM')
nonlocal CNT
CNT += 1
writer.close()
await self.wait_closed(writer)
sock.close()
def run(coro):
nonlocal CNT
CNT = 0
with self.tcp_server(server,
max_clients=TOTAL_CNT,
backlog=TOTAL_CNT) as srv:
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(coro(srv.addr))
self.loop.run_until_complete(
asyncio.gather(*tasks))
self.assertEqual(CNT, TOTAL_CNT)
with self._silence_eof_received_warning():
run(client)
with self._silence_eof_received_warning():
run(client_sock)
def test_shutdown_timeout(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
CNT = 0 # number of clients that were successful
TOTAL_CNT = 25 # total number of clients that test will create
TIMEOUT = 10.0 # timeout for this test
A_DATA = b'A' * 1024 * 1024
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context()
clients = []
async def handle_client(reader, writer):
nonlocal CNT
data = await reader.readexactly(len(A_DATA))
self.assertEqual(data, A_DATA)
writer.write(b'OK')
await writer.drain()
writer.close()
with self.assertRaisesRegex(asyncio.TimeoutError,
'SSL shutdown timed out'):
await reader.read()
CNT += 1
async def test_client(addr):
fut = asyncio.Future()
def prog(sock):
try:
sock.starttls(client_sslctx)
sock.connect(addr)
sock.send(A_DATA)
data = sock.recv_all(2)
self.assertEqual(data, b'OK')
data = sock.recv(1024)
self.assertEqual(data, b'')
fd = sock.detach()
try:
select.select([fd], [], [], 3)
finally:
os.close(fd)
except Exception as ex:
self.loop.call_soon_threadsafe(fut.set_exception, ex)
else:
self.loop.call_soon_threadsafe(fut.set_result, None)
client = self.tcp_client(prog)
client.start()
clients.append(client)
await fut
async def start_server():
extras = {}
if self.implementation != 'asyncio' or self.PY37:
extras['ssl_handshake_timeout'] = 10.0
if self.implementation != 'asyncio': # or self.PY38
extras['ssl_shutdown_timeout'] = 0.5
srv = await asyncio.start_server(
handle_client,
'127.0.0.1', 0,
family=socket.AF_INET,
ssl=sslctx,
**extras)
try:
srv_socks = srv.sockets
self.assertTrue(srv_socks)
addr = srv_socks[0].getsockname()
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(test_client(addr))
await asyncio.wait_for(
asyncio.gather(*tasks),
TIMEOUT)
finally:
self.loop.call_soon(srv.close)
await srv.wait_closed()
with self._silence_eof_received_warning():
self.loop.run_until_complete(start_server())
self.assertEqual(CNT, TOTAL_CNT)
for client in clients:
client.stop()
def test_shutdown_cleanly(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
CNT = 0
TOTAL_CNT = 25
A_DATA = b'A' * 1024 * 1024
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context()
def server(sock):
sock.starttls(
sslctx,
server_side=True)
data = sock.recv_all(len(A_DATA))
self.assertEqual(data, A_DATA)
sock.send(b'OK')
sock.unwrap()
sock.close()
async def client(addr):
extras = {}
if self.implementation != 'asyncio' or self.PY37:
extras = dict(ssl_handshake_timeout=10.0)
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='',
**extras)
writer.write(A_DATA)
self.assertEqual(await reader.readexactly(2), b'OK')
self.assertEqual(await reader.read(), b'')
nonlocal CNT
CNT += 1
writer.close()
await self.wait_closed(writer)
def run(coro):
nonlocal CNT
CNT = 0
with self.tcp_server(server,
max_clients=TOTAL_CNT,
backlog=TOTAL_CNT) as srv:
tasks = []
for _ in range(TOTAL_CNT):
tasks.append(coro(srv.addr))
self.loop.run_until_complete(
asyncio.gather(*tasks))
self.assertEqual(CNT, TOTAL_CNT)
with self._silence_eof_received_warning():
run(client)
def test_write_to_closed_transport(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context()
future = None
def server(sock):
sock.starttls(sslctx, server_side=True)
sock.shutdown(socket.SHUT_RDWR)
sock.close()
def unwrap_server(sock):
sock.starttls(sslctx, server_side=True)
while True:
try:
sock.unwrap()
break
except ssl.SSLError as ex:
# Since OpenSSL 1.1.1, it raises "application data after
# close notify"
if ex.reason == 'KRB5_S_INIT':
break
except OSError as ex:
# OpenSSL < 1.1.1
if ex.errno != 0:
raise
sock.close()
async def client(addr):
nonlocal future
future = self.loop.create_future()
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='')
writer.write(b'I AM WRITING NOWHERE1' * 100)
try:
data = await reader.read()
self.assertEqual(data, b'')
except (ConnectionResetError, BrokenPipeError):
pass
for i in range(25):
writer.write(b'I AM WRITING NOWHERE2' * 100)
self.assertEqual(
writer.transport.get_write_buffer_size(), 0)
await future
writer.close()
await self.wait_closed(writer)
def run(meth):
def wrapper(sock):
try:
meth(sock)
except Exception as ex:
self.loop.call_soon_threadsafe(future.set_exception, ex)
else:
self.loop.call_soon_threadsafe(future.set_result, None)
return wrapper
with self._silence_eof_received_warning():
with self.tcp_server(run(server)) as srv:
self.loop.run_until_complete(client(srv.addr))
with self.tcp_server(run(unwrap_server)) as srv:
self.loop.run_until_complete(client(srv.addr))
def test_flush_before_shutdown(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
CHUNK = 1024 * 128
SIZE = 32
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
sslctx_openssl = openssl_ssl.Context(openssl_ssl.TLSv1_2_METHOD)
if hasattr(openssl_ssl, 'OP_NO_SSLV2'):
sslctx_openssl.set_options(openssl_ssl.OP_NO_SSLV2)
sslctx_openssl.use_privatekey_file(self.ONLYKEY)
sslctx_openssl.use_certificate_chain_file(self.ONLYCERT)
client_sslctx = self._create_client_ssl_context()
if hasattr(ssl, 'OP_NO_TLSv1_3'):
client_sslctx.options |= ssl.OP_NO_TLSv1_3
future = None
def server(sock):
sock.starttls(sslctx, server_side=True)
self.assertEqual(sock.recv_all(4), b'ping')
sock.send(b'pong')
time.sleep(0.5) # hopefully stuck the TCP buffer
data = sock.recv_all(CHUNK * SIZE)
self.assertEqual(len(data), CHUNK * SIZE)
sock.close()
def run(meth):
def wrapper(sock):
try:
meth(sock)
except Exception as ex:
self.loop.call_soon_threadsafe(future.set_exception, ex)
else:
self.loop.call_soon_threadsafe(future.set_result, None)
return wrapper
async def client(addr):
nonlocal future
future = self.loop.create_future()
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='')
sslprotocol = writer.get_extra_info('uvloop.sslproto')
writer.write(b'ping')
data = await reader.readexactly(4)
self.assertEqual(data, b'pong')
sslprotocol.pause_writing()
for _ in range(SIZE):
writer.write(b'x' * CHUNK)
writer.close()
sslprotocol.resume_writing()
await self.wait_closed(writer)
try:
data = await reader.read()
self.assertEqual(data, b'')
except ConnectionResetError:
pass
await future
with self.tcp_server(run(server)) as srv:
self.loop.run_until_complete(client(srv.addr))
def test_remote_shutdown_receives_trailing_data(self):
if self.implementation == 'asyncio':
raise unittest.SkipTest()
CHUNK = 1024 * 128
SIZE = 32
sslctx = self._create_server_ssl_context(self.ONLYCERT, self.ONLYKEY)
client_sslctx = self._create_client_ssl_context()
future = None
def server(sock):
incoming = ssl.MemoryBIO()
outgoing = ssl.MemoryBIO()
sslobj = sslctx.wrap_bio(incoming, outgoing, server_side=True)
while True:
try:
sslobj.do_handshake()
except ssl.SSLWantReadError:
if outgoing.pending:
sock.send(outgoing.read())
incoming.write(sock.recv(16384))
else:
if outgoing.pending:
sock.send(outgoing.read())
break
while True:
try:
data = sslobj.read(4)
except ssl.SSLWantReadError:
incoming.write(sock.recv(16384))
else:
break
self.assertEqual(data, b'ping')
sslobj.write(b'pong')
sock.send(outgoing.read())
time.sleep(0.2) # wait for the peer to fill its backlog
# send close_notify but don't wait for response
with self.assertRaises(ssl.SSLWantReadError):
sslobj.unwrap()
sock.send(outgoing.read())
# should receive all data
data_len = 0
while True:
try:
chunk = len(sslobj.read(16384))
data_len += chunk
except ssl.SSLWantReadError:
incoming.write(sock.recv(16384))
except ssl.SSLZeroReturnError:
break
self.assertEqual(data_len, CHUNK * SIZE)
# verify that close_notify is received
sslobj.unwrap()
sock.close()
def eof_server(sock):
sock.starttls(sslctx, server_side=True)
self.assertEqual(sock.recv_all(4), b'ping')
sock.send(b'pong')
time.sleep(0.2) # wait for the peer to fill its backlog
# send EOF
sock.shutdown(socket.SHUT_WR)
# should receive all data
data = sock.recv_all(CHUNK * SIZE)
self.assertEqual(len(data), CHUNK * SIZE)
sock.close()
async def client(addr):
nonlocal future
future = self.loop.create_future()
reader, writer = await asyncio.open_connection(
*addr,
ssl=client_sslctx,
server_hostname='')
writer.write(b'ping')
data = await reader.readexactly(4)
self.assertEqual(data, b'pong')
# fill write backlog in a hacky way - renegotiation won't help
for _ in range(SIZE):
writer.transport._test__append_write_backlog(b'x' * CHUNK)
try:
data = await reader.read()
self.assertEqual(data, b'')
except (BrokenPipeError, ConnectionResetError):
pass
await future
writer.close()
await self.wait_closed(writer)
def run(meth):
def wrapper(sock):
try:
meth(sock)
except Exception as ex:
self.loop.call_soon_threadsafe(future.set_exception, ex)
else:
self.loop.call_soon_threadsafe(future.set_result, None)
return wrapper
with self.tcp_server(run(server)) as srv:
self.loop.run_until_complete(client(srv.addr))
with self.tcp_server(run(eof_server)) as srv:
self.loop.run_until_complete(client(srv.addr))
def test_connect_timeout_warning(self):
s = socket.socket(socket.AF_INET)
s.bind(('127.0.0.1', 0))
addr = s.getsockname()
async def test():
try:
await asyncio.wait_for(
self.loop.create_connection(asyncio.Protocol,
*addr, ssl=True),
0.1)
except (ConnectionRefusedError, asyncio.TimeoutError):
pass
else:
self.fail('TimeoutError is not raised')
with s:
try:
with self.assertWarns(ResourceWarning) as cm:
self.loop.run_until_complete(test())
gc.collect()
gc.collect()
gc.collect()
except AssertionError as e:
self.assertEqual(str(e), 'ResourceWarning not triggered')
else:
self.fail('Unexpected ResourceWarning: {}'.format(cm.warning))
def test_handshake_timeout_handler_leak(self):
if self.implementation == 'asyncio':
# Okay this turns out to be an issue for asyncio.sslproto too
raise unittest.SkipTest()
s = socket.socket(socket.AF_INET)
s.bind(('127.0.0.1', 0))
s.listen(1)
addr = s.getsockname()
async def test(ctx):
try:
await asyncio.wait_for(
self.loop.create_connection(asyncio.Protocol, *addr,
ssl=ctx),
0.1)
except (ConnectionRefusedError, asyncio.TimeoutError):
pass
else:
self.fail('TimeoutError is not raised')
with s:
ctx = ssl.create_default_context()
self.loop.run_until_complete(test(ctx))
ctx = weakref.ref(ctx)
# SSLProtocol should be DECREF to 0
self.assertIsNone(ctx())
def test_shutdown_timeout_handler_leak(self):
loop = self.loop
def server(sock):
sslctx = self._create_server_ssl_context(self.ONLYCERT,
self.ONLYKEY)
sock = sslctx.wrap_socket(sock, server_side=True)
sock.recv(32)
sock.close()
class Protocol(asyncio.Protocol):
def __init__(self):
self.fut = asyncio.Future(loop=loop)
def connection_lost(self, exc):
self.fut.set_result(None)
async def client(addr, ctx):
tr, pr = await loop.create_connection(Protocol, *addr, ssl=ctx)
tr.close()
await pr.fut
with self.tcp_server(server) as srv:
ctx = self._create_client_ssl_context()
loop.run_until_complete(client(srv.addr, ctx))
ctx = weakref.ref(ctx)
if self.implementation == 'asyncio':
# asyncio has no shutdown timeout, but it ends up with a circular
# reference loop - not ideal (introduces gc glitches), but at least
# not leaking
gc.collect()
gc.collect()
gc.collect()
# SSLProtocol should be DECREF to 0
self.assertIsNone(ctx())
def test_shutdown_timeout_handler_not_set(self):
loop = self.loop
eof = asyncio.Event()
extra = None
def server(sock):
sslctx = self._create_server_ssl_context(self.ONLYCERT,
self.ONLYKEY)
sock = sslctx.wrap_socket(sock, server_side=True)
sock.send(b'hello')
assert sock.recv(1024) == b'world'
sock.send(b'extra bytes')
# sending EOF here
sock.shutdown(socket.SHUT_WR)
loop.call_soon_threadsafe(eof.set)
# make sure we have enough time to reproduce the issue
assert sock.recv(1024) == b''
sock.close()
class Protocol(asyncio.Protocol):
def __init__(self):
self.fut = asyncio.Future(loop=loop)
self.transport = None
def connection_made(self, transport):
self.transport = transport
def data_received(self, data):
if data == b'hello':
self.transport.write(b'world')
# pause reading would make incoming data stay in the sslobj
self.transport.pause_reading()
else:
nonlocal extra
extra = data
def connection_lost(self, exc):
if exc is None:
self.fut.set_result(None)
else:
self.fut.set_exception(exc)
async def client(addr):
ctx = self._create_client_ssl_context()
tr, pr = await loop.create_connection(Protocol, *addr, ssl=ctx)
await eof.wait()
tr.resume_reading()
await pr.fut
tr.close()
assert extra == b'extra bytes'
with self.tcp_server(server) as srv:
loop.run_until_complete(client(srv.addr))
class Test_UV_TCPSSL(_TestSSL, tb.UVTestCase):
pass
class Test_AIO_TCPSSL(_TestSSL, tb.AIOTestCase):
pass
|
import json
from apps.Utils.formresponse import formScssResp
from apps.Utils.message_constants import LOGGEDOUT_SCSS_MSG
def create_user(userObj):
res = formScssResp("000",LOGGEDOUT_SCSS_MSG,"logoutResp",{})
return res
|
import growattServer
api = growattServer.GrowattApi()
login_response = api.login(<username>, <password>)
#Get a list of growatt plants.
print(api.plant_list(login_response['userId']))
|
from pathlib import Path
from fhir.resources.valueset import ValueSet as _ValueSet
from oops_fhir.utils import ValueSet
__all__ = ["SpecimenCollection"]
_resource = _ValueSet.parse_file(Path(__file__).with_suffix(".json"))
class SpecimenCollection(ValueSet):
"""
Specimen collection methods
Actions that can be taken for the collection of specimen from a subject.
Status: draft - Version: 4.0.1
http://hl7.org/fhir/ValueSet/specimen-collection
"""
# TODO: fix this template issue1
pass
class Meta:
resource = _resource
|
###############################################################################
# Copyright 2015 The University of Texas at Austin #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
###############################################################################
import copy
import getpass
import json
import os
import socket
import subprocess
import threading
import urllib.request
import urllib.error
import urllib.parse
import time
import sysconfig
#######################################################################################################################
def configure():
print()
print("This script asks you for information and configures your IPF installation.")
print(" This script backs up your existing configuration, by renaming the existing configuration files with .backup-TIMESTAMP")
resource_name = getResourceName()
sched_name = getSchedulerName()
compute_json = getComputeJsonForScheduler(sched_name)
setResourceName(resource_name, compute_json)
setLocation(compute_json)
updateFilePublishPaths(resource_name, compute_json)
publish_to_xsede = addXsedeAmqpToCompute(compute_json)
writeComputeWorkflow(resource_name, compute_json)
writePeriodicComputeWorkflow(resource_name)
print()
print("You may need to modify the default environment in your init scripts so that the information gathering works correctly. For example:")
print(" * batch scheduler commands need to be in PATH")
print(" * scheduler-related environment variables may need to be set")
module_names = getModules()
env_vars = getEnvironmentVariables()
writeComputeInit(resource_name, module_names, env_vars)
answer = options("Do you want to publish job updates? Your scheduler log files must be readable. Condor users should answer 'no'.",
["yes", "no"], "yes")
if answer == "yes":
activity_json = getActivityJsonForScheduler(sched_name)
setResourceName(resource_name, activity_json)
updateActivityLogFile(resource_name, activity_json)
updateFilePublishPaths(resource_name, activity_json)
if (publish_to_xsede):
addXsedeAmqpToActivity(activity_json, compute_json)
writeActivityWorkflow(resource_name, activity_json)
writeActivityInit(resource_name, module_names, env_vars)
#modules_type = getModulesType()
# if modules_type == "modules":
# modules_json = getModulesJson()
# elif modules_type == "lmod":
# modules_json = getLModJson()
extmodules_json = getExtModulesJson()
setSupportContact(extmodules_json)
services_json = getAbstractServicesJson()
# setResourceName(resource_name,modules_json)
setResourceName(resource_name, extmodules_json)
setResourceName(resource_name, services_json)
# updateFilePublishPaths(resource_name,modules_json)
updateFilePublishPaths(resource_name, extmodules_json)
updateFilePublishPaths(resource_name, services_json)
# addXsedeAmqpToModules(modules_json,compute_json)
if (publish_to_xsede):
addXsedeAmqpToExtModules(extmodules_json, compute_json)
addXsedeAmqpToAbstractServices(services_json, compute_json)
# writeModulesWorkflow(resource_name,modules_json)
writeExtModulesWorkflow(resource_name, extmodules_json)
writeAbstractServicesWorkflow(resource_name, services_json)
# writePeriodicModulesWorkflow(resource_name)
writePeriodicExtModulesWorkflow(resource_name)
writePeriodicAbstractServicesWorkflow(resource_name)
# writeModulesInit(resource_name,module_names,env_vars)
writeExtModulesInit(resource_name, module_names, env_vars)
writeAbstractServicesInit(resource_name, module_names, env_vars)
ipfinfo_json = getIPFInfoJson()
if (publish_to_xsede):
addXsedeAmqpToIPFInfo(ipfinfo_json, compute_json)
writeIPFInfoWorkflow(ipfinfo_json)
writeIPFInfoInit(resource_name, module_names, env_vars)
#######################################################################################################################
# need to test this with an xdresourceid program
def getResourceName():
try:
process = subprocess.Popen(["xdresourceid"], stdout=subprocess.PIPE)
out, err = process.communicate()
except Exception as e:
print("Failed to use xdresourceid to get resource name: %s" % e)
xdresid_name = None
else:
xdresid_name = out.rstrip().encode('utf-8')
resource_name = question("Enter the XSEDE resource name", xdresid_name)
return resource_name
def getComputeJsonForScheduler(sched_name):
return readWorkflowFile(os.path.join(getGlueWorkflowDir(), "templates", sched_name+"_compute.json"))
def getActivityJsonForScheduler(sched_name):
parts = sched_name.split("_")
if len(parts) == 1:
sched_name = sched_name
elif len(parts) == 2:
sched_name = parts[1]
else:
print("Warning: expected one or two parts in scheduler name - may not find _activity workflow file")
sched_name = sched_name
return readWorkflowFile(os.path.join(getGlueWorkflowDir(), "templates", sched_name+"_activity.json"))
def getModulesJson():
return readWorkflowFile(os.path.join(getGlueWorkflowDir(), "templates", "modules.json"))
def getExtModulesJson():
return readWorkflowFile(os.path.join(getGlueWorkflowDir(), "templates", "extmodules.json"))
def setSupportContact(extmodules_json):
for step_json in extmodules_json["steps"]:
if step_json["name"] == "ipf.glue2.modules.ExtendedModApplicationsStep":
step_json["params"] = {}
step_json["params"]["default_support_contact"] = getSupportContact()
return
raise Exception("didn't find an ExtendedModApplicationsStep to modify")
def getAbstractServicesJson():
return readWorkflowFile(os.path.join(getGlueWorkflowDir(), "templates", "abstractservice.json"))
def getLModJson():
return readWorkflowFile(os.path.join(getGlueWorkflowDir(), "templates", "lmod.json"))
def getIPFInfoJson():
return readWorkflowFile(os.path.join(getGlueWorkflowDir(), "templates", "ipfinfo_publish.json"))
def getSchedulerName():
names = []
sched_dir = os.path.join(getGlueWorkflowDir(), "templates")
for file_name in os.listdir(sched_dir):
if file_name.endswith("_compute.json"):
parts = file_name.split("_")
if len(parts) == 2:
names.append(parts[0])
else:
names.append(parts[0]+"_"+parts[1])
names = sorted(names)
sched_name = options(
"Which scheduler/resource manager does this resource use?", names)
return sched_name
def setResourceName(resource_name, workflow_json):
res_name = resource_name.split(".")[0]
workflow_json["name"] = res_name + "_" + workflow_json["name"]
for step_json in workflow_json["steps"]:
if step_json["name"] == "ipf.sysinfo.ResourceNameStep":
step_json["params"] = {}
step_json["params"]["resource_name"] = resource_name
return
raise Exception("didn't find a ResourceNameStep to modify")
def setLocation(compute_json):
for step_json in compute_json["steps"]:
if step_json["name"] == "ipf.glue2.location.LocationStep":
updateLocationStep(step_json["params"]["location"])
return
raise Exception("didn't find a LocationStep to modify")
def updateLocationStep(params):
params["Name"] = question(
"Enter your organization", params.get("Name", None))
# if params.get("Place",None) == None:
# updateFromFreeGeoIp(params)
params["Place"] = question("Enter your city", params.get("Place", None))
params["Country"] = question(
"Enter your country", params.get("Country", None))
params["Latitude"] = question(
"Enter your latitude", params.get("Latitude", None))
params["Longitude"] = question(
"Enter your longitude", params.get("Longitude", None))
def updateFromFreeGeoIp(params):
text = getFreeGeoIp()
if text is None:
text = getFreeGeoIp(True)
if text is None:
return None
json_doc = json.loads(text)
params["Place"] = json_doc["city"]
params["Country"] = json_doc["country_code"]
params["Latitude"] = float(json_doc["latitude"])
params["Longitude"] = float(json_doc["longitude"])
def getFreeGeoIp(print_message=False):
print("Querying for physical location...")
thread = FreeGeoIp()
thread.start()
thread.join(60)
if thread.isAlive():
if print_message:
print("Warning: Query to http:/freegeoip.net didn't complete")
print(
" Enter location information manually or re-run this configuration program")
return None
return thread.output
class FreeGeoIp(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.output = None
def run(self):
host_name = socket.getfqdn()
self.output = urllib.request.urlopen(
"http://freegeoip.net/json/"+host_name).read()
def updateFilePublishPaths(resource_name, workflow_json):
res_name = resource_name.split(".")[0]
for step_json in workflow_json["steps"]:
if step_json["name"] == "ipf.publish.FileStep":
step_json["params"]["path"] = res_name + \
"_" + step_json["params"]["path"]
def addXsedeAmqpToCompute(compute_json, ask=True):
answer = options("Do you wish to publish to the XSEDE AMQP service?", [
"yes", "no"], "yes")
if answer == "no":
return False
answer = options("Will you authenticate using an X.509 certificate and key or a username and password?",
["X.509", "username/password"], "X.509")
if answer == "X.509":
cert_path = question("Where is your certificate?",
"/etc/grid-security/xdinfo-hostcert.pem")
while not testReadFile(cert_path):
cert_path = question("Where is your certificate?",
"/etc/grid-security/xdinfo-hostcert.pem")
key_path = question("Where is your key?",
"/etc/grid-security/xdinfo-hostkey.pem")
while not testReadFile(key_path):
key_path = question("Where is your key?",
"/etc/grid-security/xdinfo-hostkey.pem")
username = None
password = None
else:
cert_path = None
key_path = None
username = question("What is your username?")
password = question("What is your password?")
amqp_step = {}
amqp_step["name"] = "ipf.publish.AmqpStep"
amqp_step["description"] = "Publish compute resource description to XSEDE"
amqp_step["params"] = {}
amqp_step["params"]["publish"] = ["ipf.glue2.compute.PublicOgfJson"]
amqp_step["params"]["services"] = [
"infopub.xsede.org", "infopub-alt.xsede.org"]
amqp_step["params"]["vhost"] = "xsede"
amqp_step["params"]["exchange"] = "glue2.compute"
amqp_step["params"]["ssl_options"] = {}
amqp_step["params"]["ssl_options"]["ca_certs"] = "xsede/ca_certs.pem"
if cert_path is not None:
amqp_step["params"]["ssl_options"]["certfile"] = cert_path
amqp_step["params"]["ssl_options"]["keyfile"] = key_path
else:
amqp_step["params"]["username"] = username
amqp_step["params"]["password"] = password
compute_json["steps"].append(amqp_step)
amqp_step = copy.deepcopy(amqp_step)
amqp_step["description"] = "Publish description of current jobs to XSEDE"
amqp_step["params"]["publish"] = ["ipf.glue2.compute.PrivateOgfJson"]
amqp_step["params"]["exchange"] = "glue2.computing_activities"
compute_json["steps"].append(amqp_step)
return True
def updateActivityLogFile(resource_name, activity_json):
res_name = resource_name.split(".")[0]
for step in activity_json["steps"]:
if not "ActivityUpdateStep" in step["name"]:
continue
step["params"]["position_file"] = res_name+"_activity.pos"
if "pbs" in step["name"]:
if "PBS_HOME" not in os.environ:
print(
" Warning: PBS_HOME environment variable not set - can't check for server_logs directory")
log_dir = None
else:
log_dir = os.path.join(
os.environ["PBS_HOME"], "spool", "server_logs")
testReadDirectory(log_dir)
log_dir = question("Where is your server_logs directory?", log_dir)
if not testReadDirectory(log_dir):
return updateActivityLogFile(resource_name, activity_json)
step["params"]["server_logs_dir"] = log_dir
elif "sge" in step["name"]:
if "SGE_ROOT" not in os.environ:
print(
" Warning: SGE_ROOT environment variable not set - can't check for reporting file")
log_file = None
else:
log_file = os.path.join(
os.environ["SGE_ROOT"], "default", "common", "reporting")
testReadFile(log_file)
log_file = question("Where is your reporting file?", log_file)
if not testReadFile(log_file):
return updateActivityLogFile(resource_name, activity_json)
step["params"]["reporting_file"] = log_file
elif "slurm" in step["name"]:
if os.path.exists("/usr/local/slurm/var/slurmctl.log"):
default = "/usr/local/slurm/var/slurmctl.log"
else:
default = None
log_file = question(
"What is the full path (including filename) for your slurmctl.log file?", default)
if not testReadFile(log_file):
return updateActivityLogFile(resource_name, activity_json)
step["params"]["slurmctl_log_file"] = log_file
else:
raise Exception("ActivityUpdateStep isn't pbs, sge, or slurm")
break
def addXsedeAmqpToActivity(activity_json, compute_json):
for step in compute_json["steps"]:
if step["name"] == "ipf.publish.AmqpStep" and "xsede.org" in step["params"]["services"][0]:
amqp_step = copy.deepcopy(step)
amqp_step["description"] = "Publish job updates to XSEDE"
amqp_step["params"]["publish"] = [
"ipf.glue2.computing_activity.ComputingActivityOgfJson"]
amqp_step["params"]["exchange"] = "glue2.computing_activity"
activity_json["steps"].append(amqp_step)
return
raise Exception("didn't find AmqpStep in compute workflow")
def addXsedeAmqpToModules(modules_json, compute_json):
for step in compute_json["steps"]:
if step["name"] == "ipf.publish.AmqpStep" and "xsede.org" in step["params"]["services"][0]:
amqp_step = copy.deepcopy(step)
amqp_step["description"] = "Publish modules to XSEDE"
amqp_step["params"]["publish"] = [
"ipf.glue2.application.ApplicationsOgfJson"]
amqp_step["params"]["exchange"] = "glue2.applications"
modules_json["steps"].append(amqp_step)
return
raise Exception("didn't find AmqpStep in compute workflow")
def addXsedeAmqpToExtModules(modules_json, compute_json):
for step in compute_json["steps"]:
if step["name"] == "ipf.publish.AmqpStep" and "xsede.org" in step["params"]["services"][0]:
amqp_step = copy.deepcopy(step)
amqp_step["description"] = "Publish modules to XSEDE"
amqp_step["params"]["publish"] = [
"ipf.glue2.application.ApplicationsOgfJson"]
amqp_step["params"]["exchange"] = "glue2.applications"
modules_json["steps"].append(amqp_step)
return
raise Exception("didn't find AmqpStep in compute workflow")
def addXsedeAmqpToAbstractServices(modules_json, compute_json):
for step in compute_json["steps"]:
if step["name"] == "ipf.publish.AmqpStep" and "xsede.org" in step["params"]["services"][0]:
amqp_step = copy.deepcopy(step)
amqp_step["description"] = "Publish modules to XSEDE"
amqp_step["params"]["publish"] = [
"ipf.glue2.abstractservice.ASOgfJson"]
amqp_step["params"]["exchange"] = "glue2.compute"
modules_json["steps"].append(amqp_step)
return
raise Exception("didn't find AmqpStep in compute workflow")
def addXsedeAmqpToIPFInfo(modules_json, compute_json):
for step in compute_json["steps"]:
if step["name"] == "ipf.publish.AmqpStep" and "xsede.org" in step["params"]["services"][0]:
amqp_step = copy.deepcopy(step)
amqp_step["description"] = "Publish IPFInfo to XSEDE"
amqp_step["params"]["publish"] = ["ipf.ipfinfo.IPFInformationJson"]
amqp_step["params"]["exchange"] = "glue2.compute"
modules_json["steps"].append(amqp_step)
return
raise Exception("didn't find AmqpStep in compute workflow")
#######################################################################################################################
def getModules():
answer = options("Do you want to load any modules?", ["yes", "no"], "no")
if answer == "no":
return None
csv = question("Enter a comma-separated list of modules to load")
return csv.split(",")
def getEnvironmentVariables():
vars = {}
if "MODULEPATH" in os.environ:
_modulepath = os.environ["MODULEPATH"]
print("MODULEPATH=%s" % _modulepath)
answer = options("is set in your environment. Do you want to use this value in the Modules workflow?", [
"yes", "no"], "yes")
if answer == "no":
answer = options("do you want to set a different value for MODULEPATH for use in the Modules workflow?", [
"yes", "no"], "yes")
if answer == "yes":
_modulepath = question("Enter the value for MODULEPATH")
else:
_modulepath = None
if _modulepath is not None:
vars["MODULEPATH"] = _modulepath
if "SERVICEPATH" in os.environ:
_servicepath = os.environ["SERVICEPATH"]
print("SERVICEPATH=%s" % _servicepath)
answer = options("is set in your environment. Do you want to use this value in the Services workflow?", [
"yes", "no"], "yes")
if answer == "no":
answer = options("do you want to set a different value for SERVICEPATH for use in the Services workflow?", [
"yes", "no"], "yes")
if answer == "yes":
_servicepath = question("Enter the value for SERVICEPATH")
else:
_servicepath = None
if _servicepath is not None:
vars["SERVICEPATH"] = _servicepath
while True:
if len(vars) > 0:
print("current variables:")
for key in sorted(vars.keys()):
print(" %s = %s" % (key, vars[key]))
answer = options("Do you want to set an environment variable?", [
"yes", "no"], "no")
if answer == "no":
return vars
name = question("Enter the environment variable name")
value = question("Enter the environment variable value")
vars[name] = value
#######################################################################################################################
def writeComputeWorkflow(resource_name, compute_json):
res_name = resource_name.split(".")[0]
path = os.path.join(getGlueWorkflowDir(), res_name+"_compute.json")
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
print(" -> writing compute workflow to %s" % path)
f = open(path, "w")
f.write(json.dumps(compute_json, indent=4, sort_keys=True))
f.close()
def writePeriodicComputeWorkflow(resource_name):
res_name = resource_name.split(".")[0]
periodic_json = {}
periodic_json["name"] = res_name+"_compute_periodic"
periodic_json["description"] = "Gather GLUE2 compute information periodically"
periodic_json["steps"] = []
step_json = {}
step_json["name"] = "ipf.step.WorkflowStep"
step_json["params"] = {}
step_json["params"]["workflow"] = "glue2/"+res_name+"_compute.json"
interval_str = question(
"How often should compute information be gathered (seconds)?", "60")
step_json["params"]["maximum_interval"] = int(interval_str)
periodic_json["steps"].append(step_json)
path = os.path.join(getGlueWorkflowDir(), res_name +
"_compute_periodic.json")
print(" -> writing periodic compute workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(periodic_json, indent=4, sort_keys=True))
f.close()
def writeActivityWorkflow(resource_name, activity_json):
res_name = resource_name.split(".")[0]
path = os.path.join(getGlueWorkflowDir(), res_name+"_activity.json")
print(" -> writing activity workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(activity_json, indent=4, sort_keys=True))
f.close()
def writeModulesWorkflow(resource_name, modules_json):
res_name = resource_name.split(".")[0]
path = os.path.join(getGlueWorkflowDir(), res_name+"_modules.json")
print(" -> writing modules workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(modules_json, indent=4, sort_keys=True))
f.close()
def writeExtModulesWorkflow(resource_name, extmodules_json):
res_name = resource_name.split(".")[0]
path = os.path.join(getGlueWorkflowDir(), res_name+"_extmodules.json")
print(" -> writing extended modules workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(extmodules_json, indent=4, sort_keys=True))
f.close()
def writeAbstractServicesWorkflow(resource_name, services_json):
res_name = resource_name.split(".")[0]
path = os.path.join(getGlueWorkflowDir(), res_name+"_services.json")
print(" -> writing abstract services workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(services_json, indent=4, sort_keys=True))
f.close()
def writeIPFInfoWorkflow(ipfinfo_json):
path = os.path.join(getWorkflowDir(), "ipfinfo_publish.json")
print(" -> writing ipfinfo publish workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(ipfinfo_json, indent=4, sort_keys=True))
f.close()
def writePeriodicModulesWorkflow(resource_name):
res_name = resource_name.split(".")[0]
periodic_json = {}
periodic_json["name"] = res_name+"_modules_periodic"
periodic_json["description"] = "Gather GLUE2 module information periodically"
periodic_json["steps"] = []
step_json = {}
step_json["name"] = "ipf.step.WorkflowStep"
step_json["params"] = {}
step_json["params"]["workflow"] = "glue2/"+res_name+"_modules.json"
interval_str = question(
"How often should module information be gathered (hours)?", "1")
step_json["params"]["maximum_interval"] = int(interval_str) * 60 * 60
periodic_json["steps"].append(step_json)
path = os.path.join(getGlueWorkflowDir(), res_name +
"_modules_periodic.json")
print(" -> writing periodic modules workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(periodic_json, indent=4, sort_keys=True))
f.close()
def writePeriodicExtModulesWorkflow(resource_name):
res_name = resource_name.split(".")[0]
periodic_json = {}
periodic_json["name"] = res_name+"_extmodules_periodic"
periodic_json["description"] = "Gather GLUE2 Extended module (Software) information periodically"
periodic_json["steps"] = []
step_json = {}
step_json["name"] = "ipf.step.WorkflowStep"
step_json["params"] = {}
step_json["params"]["workflow"] = "glue2/"+res_name+"_extmodules.json"
interval_str = question(
"How often should extended module information (XSEDE software) be gathered (hours)?", "1")
step_json["params"]["maximum_interval"] = int(interval_str) * 60 * 60
periodic_json["steps"].append(step_json)
path = os.path.join(getGlueWorkflowDir(), res_name +
"_extmodules_periodic.json")
print(" -> writing periodic extended modules (software) workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(periodic_json, indent=4, sort_keys=True))
f.close()
def writePeriodicAbstractServicesWorkflow(resource_name):
res_name = resource_name.split(".")[0]
periodic_json = {}
periodic_json["name"] = res_name+"_services_periodic"
periodic_json["description"] = "Gather GLUE2 AbstractService information periodically"
periodic_json["steps"] = []
step_json = {}
step_json["name"] = "ipf.step.WorkflowStep"
step_json["params"] = {}
step_json["params"]["workflow"] = "glue2/"+res_name+"_services.json"
interval_str = question(
"How often should AbstractService (XSEDE Services) information be gathered (hours)?", "1")
step_json["params"]["maximum_interval"] = int(interval_str) * 60 * 60
periodic_json["steps"].append(step_json)
path = os.path.join(getGlueWorkflowDir(), res_name +
"_services_periodic.json")
print(" -> writing periodic Abstract Services workflow to %s" % path)
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
f = open(path, "w")
f.write(json.dumps(periodic_json, indent=4, sort_keys=True))
f.close()
#######################################################################################################################
def writeComputeInit(resource_name, module_names, env_vars):
res_name = resource_name.split(".")[0]
path = os.path.join(getBaseDir(), "etc", "ipf", "init.d",
"ipf-"+res_name+"-glue2-compute")
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
name = "%s_compute_periodic\n" % res_name
writeInit(resource_name, module_names, env_vars, name, path)
def writeActivityInit(resource_name, module_names, env_vars):
res_name = resource_name.split(".")[0]
path = os.path.join(getBaseDir(), "etc", "ipf", "init.d",
"ipf-"+res_name+"-glue2-activity")
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
name = "%s_activity\n" % res_name
writeInit(resource_name, module_names, env_vars, name, path)
def writeModulesInit(resource_name, module_names, env_vars):
res_name = resource_name.split(".")[0]
path = os.path.join(getBaseDir(), "etc", "ipf", "init.d",
"ipf-"+res_name+"-glue2-modules")
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
name = "%s_modules_periodic\n" % res_name
writeInit(resource_name, module_names, env_vars, name, path)
def writeExtModulesInit(resource_name, module_names, env_vars):
res_name = resource_name.split(".")[0]
path = os.path.join(getBaseDir(), "etc", "ipf", "init.d",
"ipf-"+res_name+"-glue2-extmodules")
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
name = "%s_extmodules_periodic\n" % res_name
writeInit(resource_name, module_names, env_vars, name, path)
def writeAbstractServicesInit(resource_name, module_names, env_vars):
res_name = resource_name.split(".")[0]
path = os.path.join(getBaseDir(), "etc", "ipf", "init.d",
"ipf-"+res_name+"-glue2-services")
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
name = "%s_services_periodic\n" % res_name
writeInit(resource_name, module_names, env_vars, name, path)
def writeIPFInfoInit(resource_name, module_names, env_vars):
res_name = resource_name.split(".")[0]
path = os.path.join(getBaseDir(), "etc", "ipf", "init.d", "ipfinfo")
if os.path.isfile(path):
os.rename(path, path+".backup-" +
time.strftime('%Y-%M-%d-%X', time.localtime()))
name = "ipfinfo_publish_periodic\n"
writeInit(resource_name, module_names, env_vars, name, path)
def writeInit(resource_name, module_names, env_vars, name, path):
res_name = resource_name.split(".")[0]
in_file = open(os.path.join(getBaseDir(), "etc",
"ipf", "init.d", "ipf-WORKFLOW"), "r")
out_file = open(path, "w")
for line in in_file:
if line.startswith("NAME="):
out_file.write("NAME=%s\n" % name)
elif line.startswith("WORKFLOW="):
if name == "ipf_publish_periodic\n":
out_file.write("WORKFLOW=${NAME}.json\n")
else:
out_file.write(line)
elif line.startswith("IPF_USER="):
out_file.write("IPF_USER=%s\n" % getpass.getuser())
elif line.startswith("export IPF_ETC_PATH="):
out_file.write("export IPF_ETC_PATH=%s\n" %
os.path.join(getBaseDir(), "etc/ipf"))
elif line.startswith("export IPF_VAR_PATH="):
out_file.write("export IPF_VAR_PATH=%s\n" %
os.path.join(getBaseDir(), "var/ipf"))
elif "modules" in line and module_names != None:
out_file.write(line)
out_file.write("source %s\n" % os.path.join(
os.environ["MODULESHOME"], "init", "bash"))
for module_name in module_names:
out_file.write("module load %s\n" % module_name)
elif "environment variables" in line and len(env_vars) > 0:
out_file.write(line)
for name in env_vars:
out_file.write("export %s=%s\n" % (name, env_vars[name]))
else:
out_file.write(line)
in_file.close()
out_file.close()
#######################################################################################################################
def getModulesType():
return options("What modules system is used on this resource?",
["lmod", "modules"])
def getSupportContact():
support_contact = question("What is your default SupportContact URL?",
"https://software.xsede.org/xcsr-db/v1/support-contacts/1553")
return support_contact
def getGlueWorkflowDir():
return os.path.join(getWorkflowDir(), "glue2")
def getWorkflowDir():
return os.path.join(getBaseDir(), "etc", "ipf", "workflow")
_base_dir = None
def getBaseDir():
global _base_dir
if _base_dir is not None:
return _base_dir
base_dir_opts = []
if os.path.exists(os.path.join("/etc", "ipf")):
base_dir_opts.append("/")
base_dir_opts.append(os.path.dirname(
os.path.dirname(os.path.abspath(__file__))))
base_dir_opts.append(sysconfig.get_paths()["purelib"])
base_dir_opts.append("other")
_base_dir = options("Select base directory (files will be read/written to $BASE/etc/ipf, $BASE/var/ipf - " +
" RPM install should use '/')" +
" pip install should use '("+(str(len(base_dir_opts)-1))+")'",
base_dir_opts)
if _base_dir == "other":
_base_dir = question("Enter base directory")
return _base_dir
def readWorkflowFile(path):
f = open(path)
text = f.read()
f.close()
return json.loads(text)
#######################################################################################################################
def question(text, default=None):
print()
if default is None:
answer = input("%s: " % text)
if answer == "":
raise Exception("no input provided")
else:
answer = input("%s (%s): " % (text, default))
if answer == "":
return default
return answer
def options(text, opts, default=None):
print()
if default is None:
print("%s:" % text)
else:
print("%s (%s):" % (text, default))
for i in range(len(opts)):
print(" (%d) %s" % ((i+1), opts[i]))
answer = input(": ")
if answer == "":
if default is None:
print("no options selected - pick a number")
return options(text, opts, default)
else:
return default
try:
index = int(answer)
except ValueError:
print("enter a number")
return options(text, opts, default)
if index < 1 or index > len(opts):
print("select an option between 1 and %d" % len(opts))
return options(text, opts, default)
return opts[index-1]
#######################################################################################################################
def testReadFile(path, print_warnings=True):
if not os.path.exists(path):
if print_warnings:
print(" Warning: file %s doesn't exist" % path)
return False
if not os.access(path, os.R_OK):
if print_warnings:
print(" Warning: file %s can't be read by current user" % path)
return False
return True
def testReadDirectory(path, print_warnings=True):
if not os.path.exists(path):
if print_warnings:
print(" Warning: directory %s doesn't exist" % path)
return False
if not os.path.isdir(path):
if print_warnings:
print(" Warning: %s is not a directory" % path)
return False
if not os.access(path, os.R_OK):
if print_warnings:
print(" Warning: directory %s can't be read by current user" % path)
return False
return True
#######################################################################################################################
if __name__ == "__main__":
configure()
|
"""
The `nntm.model_selection._split` module includes classes and
functions to split the data based on a preset strategy.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Olivier Grisel <olivier.grisel@ensta.org>
# Raghav RV <rvraghav93@gmail.com>
# Leandro Hermida <hermidal@cs.umd.edu>
# Rodion Martynov <marrodon@gmail.com>
# License: BSD 3 clause
# Author: Timo Sutterer <hi@timo-sutterer.de>
# License: MIT
import logging
import numbers
from collections.abc import Iterable
import numpy as np
import pandas as pd
from sklearn.model_selection import BaseCrossValidator
from sklearn.utils import indexable
from ..utils.validation import _num_samples
logger = logging.getLogger(__name__)
__all__ = ["PurgedKFold", "check_cv"]
class PurgedKFold(BaseCrossValidator):
"""Purged K-Folds cross-validator
Provides train/test indices to split data in train/test sets. Split
dataset into k consecutive folds. Training observations overlapping
in time with test observations are purged.
Optionally, the eras that immediately follow the test set can be
eliminated using the `embargo` argument.
Data is assumed to be contiguous (shuffle=False).
Parameters
----------
n_splits : int, default=5
Number of folds. Must be at least 2.
target_days : int, default=20
Days between the observation of samples and the target.
embargo : float between 0.0 and 1.0, default=None
Relative number of eras to be purged after every test set.
(`embargo` * `total_era_count`) eras are embargoed.
References
----------
.. [1] `Marcos Lopez de Prado (2018). Advances in Financial Machine
Learning. Chapter 7 (Cross-Validation in Finance).`_
.. [2] `Super Massive Data Release: Deep Dive
<https://forum.numer.ai/t/super-massive-data-release-deep-dive/4053>`_
"""
def __init__(self, n_splits=5, target_days=20, embargo=None):
if not isinstance(n_splits, numbers.Integral):
raise ValueError(
"The number of folds must be of Integral type. "
f"`n_splits={n_splits}` of type {type(n_splits)} was passed."
)
n_splits = int(n_splits)
if n_splits <= 1:
raise ValueError(
"k-fold cross-validation requires at least one "
"train/test split by setting `n_splits=2` or more, "
f"got `n_splits={n_splits}`."
)
if not isinstance(target_days, numbers.Integral):
raise ValueError(
"The number of target days must be of Integral type. "
f"`target_days={target_days}` of type {type(target_days)} was passed."
)
target_days = int(target_days)
if target_days % 5 != 0:
raise ValueError(
"The number of target days has to be a multiple of 5. "
f"`target_days={target_days}` was passed."
)
if embargo:
if not isinstance(embargo, float):
raise ValueError(
"Embargo must be of float type. "
f"`embargo={embargo}` of type {type(embargo)} was passed."
)
if not 0.0 < embargo < 1.0:
raise ValueError(
"Embargo must be between 0.0 and 1.0. "
f"`embargo={embargo}` was passed."
)
self.n_splits = n_splits
self.target_days = target_days
self.embargo = embargo
def split(self, X, y=None, groups=None):
"""Generate indices to split data into training and test set.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Training data, where `n_samples` is the number of samples
and `n_features` is the number of features.
y : array-like of shape (n_samples,), default=None
The target variable for supervised learning problems.
groups : array-like of shape (n_samples,), default=None
Eras for the samples used while splitting the dataset into
train/test set. This parameter is not required when X is
a pandas DataFrame containing an `era` column.
Yields
------
train : ndarray
The training set indices for that split.
test : ndarray
The testing set indices for that split.
"""
if isinstance(X, np.ndarray) and groups is None:
raise ValueError("`groups` parameter is required when X is a numpy array")
if isinstance(X, pd.DataFrame) and groups is None and "era" not in X.columns:
raise ValueError("`groups` parameter is required when X has no era column")
X, y, groups = indexable(X, y, groups)
n_samples = _num_samples(X)
if self.n_splits > n_samples:
raise ValueError(
(
f"Cannot have number of splits n_splits={self.n_splits} greater "
f"than the number of samples: n_samples={n_samples}."
)
)
eras = np.fromiter(self._get_eras(X, groups=groups), dtype=int)
target_weeks = self.target_days // 5
eras_target_release = np.array([era + target_weeks - 1 for era in eras])
embargo_era_count = 0
if self.embargo:
era_count = len(set(eras))
embargo_era_count = int(round(era_count * self.embargo))
indices = np.arange(_num_samples(X))
for test_index_mask in self._iter_test_masks(X, y, groups):
test_index = indices[test_index_mask]
test_era_min = min(eras[test_index])
test_era_max = max(eras[test_index])
test_era_target_release_max = max(eras_target_release[test_index])
train_index = indices[np.logical_not(test_index_mask)]
for idx, train_era, train_era_target_release in zip(
train_index, eras[train_index], eras_target_release[train_index]
):
purge = not (
train_era_target_release < test_era_min
or train_era > test_era_target_release_max
)
embargo = test_era_max <= train_era <= test_era_max + embargo_era_count
if purge or embargo:
train_index = train_index[train_index != idx]
yield train_index, test_index
def _iter_test_indices(self, X=None, y=None, groups=None):
"""Generates integer indices corresponding to test sets."""
n_samples = _num_samples(X)
indices = np.arange(n_samples)
n_splits = self.n_splits
# Fold sizes depend on n_samples (not n_eras)
fold_sizes = np.full(n_splits, n_samples // n_splits, dtype=int)
fold_sizes[: n_samples % n_splits] += 1
current = 0
for fold_size in fold_sizes:
start, stop = current, current + fold_size
yield indices[start:stop]
current = stop
def _get_eras(self, X, groups=None):
"""Generates integer eras."""
eras = groups if groups is not None else X["era"].tolist()
for era in eras:
yield int(era)
def get_n_splits(self, X=None, y=None, groups=None):
"""Returns the number of splitting iterations in the cross-validator
Parameters
----------
X : object
Always ignored, exists for sklearn compatibility.
y : object
Always ignored, exists for sklearn compatibility.
groups : object
Always ignored, exists for sklearn compatibility.
Returns
-------
n_splits : int
Returns the number of splitting iterations in the cross-validator.
"""
return self.n_splits
class _CVIterableWrapper(BaseCrossValidator):
"""Wrapper class for old style cv objects and iterables."""
def __init__(self, cv):
self.cv = list(cv)
def get_n_splits(self, X=None, y=None, groups=None):
"""Returns the number of splitting iterations in the cross-validator
Parameters
----------
X : object
Always ignored, exists for compatibility.
y : object
Always ignored, exists for compatibility.
groups : object
Always ignored, exists for compatibility.
Returns
-------
n_splits : int
Returns the number of splitting iterations in the cross-validator.
"""
return len(self.cv)
def split(self, X=None, y=None, groups=None):
"""Generate indices to split data into training and test set.
Parameters
----------
X : object
Always ignored, exists for compatibility.
y : object
Always ignored, exists for compatibility.
groups : object
Always ignored, exists for compatibility.
Yields
------
train : ndarray
The training set indices for that split.
test : ndarray
The testing set indices for that split.
"""
for train, test in self.cv:
yield train, test
def check_cv(cv=5, *, target_days=20, embargo=None):
"""Input checker utility for building a cross-validator
Parameters
----------
cv : int, cross-validation generator or an iterable, default=None
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold purged cross validation,
- integer, to specify the number of folds for purged cross
validation,
- An iterable yielding (train, test) splits as arrays of
indices.
target_days : int, default=20
Days between the observation of samples and the target.
embargo : float between 0.0 and 1.0, default=None
Relative number of eras to be purged after every test set.
(`embargo` * `total_era_count`) eras are embargoed.
Returns
-------
checked_cv : a cross-validator instance.
The return value is a cross-validator which generates the
train/test splits via the ``split`` method.
"""
cv = 5 if cv is None else cv
if isinstance(cv, numbers.Integral):
return PurgedKFold(n_splits=cv, target_days=target_days, embargo=embargo)
if not hasattr(cv, "split") or isinstance(cv, str):
if not isinstance(cv, Iterable) or isinstance(cv, str):
raise ValueError(
"Expected cv as an integer, cross-validation "
"object (from nntm.model_selection) "
f"or an iterable. Got {cv}."
)
return _CVIterableWrapper(cv)
return cv # New style cv objects are passed without any modification
|
import sys
import os
def main():
s = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Curabitur rhoncus tincidunt sem nec gravida. Sed id convallis quam. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus dui purus, ornare vitae metus vitae, feugiat ultricies ipsum. Etiam id ante quis purus rhoncus egestas. Donec vel ligula lacinia, iaculis ipsum vitae, vulputate nunc. Aliquam tellus libero, sagittis quis massa elementum, semper sodales lacus. Sed justo arcu, lobortis quis bibendum ac, fermentum ut mi. Quisque faucibus ex vitae sapien aliquam blandit. Donec fringilla sit amet augue id venenatis. Integer at nibh quis justo consectetur posuere. Nulla facilisis mi massa, sed congue lectus vehicula sed. Quisque placerat ultrices est congue vestibulum. Maecenas fermentum purus et ipsum volutpat, eu convallis risus interdum. Donec vel nunc suscipit, venenatis mi at, convallis sapien. Integer ut metus id neque rhoncus commodo."
x = len(s)
y = s.count('it')
z = s.lower()
print(f"How many characters does the string have? {x}")
# How many characters does the string have? 920
print(f"How many times does substring 'it' occur on it? {y}")
# How many times does substring 'it' occur on it? 13
print(f"Convert whole text into lowercase. {z}")
# Convert whole text into lowercase. lorem ipsum dolor sit amet, consectetur adipiscing elit. curabitur rhoncus tincidunt sem nec gravida. sed id convallis quam. lorem ipsum dolor sit amet, consectetur adipiscing elit. phasellus dui purus, ornare vitae metus vitae, feugiat ultricies ipsum. etiam id ante quis purus rhoncus egestas. donec vel ligula lacinia, iaculis ipsum vitae, vulputate nunc. aliquam tellus libero, sagittis quis massa elementum, semper sodales lacus. sed justo arcu, lobortis quis bibendum ac, fermentum ut mi. quisque faucibus ex vitae sapien aliquam blandit. donec fringilla sit amet augue id venenatis. integer at nibh quis justo consectetur posuere. nulla facilisis mi massa, sed congue lectus vehicula sed. quisque placerat ultrices est congue vestibulum. maecenas fermentum purus et ipsum volutpat, eu convallis risus interdum. donec vel nunc suscipit, venenatis mi at, convallis sapien. integer ut metus id neque rhoncus commodo.
print(s.replace("i", "*"))
# Replace all instances of the letter 'i' with an asterisk '*'. Lorem *psum dolor s*t amet, consectetur ad*p*sc*ng el*t. Curab*tur rhoncus t*nc*dunt sem nec grav*da. Sed *d convall*s quam. Lorem *psum dolor s*t amet, consectetur ad*p*sc*ng el*t. Phasellus du* purus, ornare v*tae metus v*tae, feug*at ultr*c*es *psum. Et*am *d ante qu*s purus rhoncus egestas. Donec vel l*gula lac*n*a, *acul*s *psum v*tae, vulputate nunc. Al*quam tellus l*bero, sag*tt*s qu*s massa elementum, semper sodales lacus. Sed justo arcu, lobort*s qu*s b*bendum ac, fermentum ut m*. Qu*sque fauc*bus ex v*tae sap*en al*quam bland*t. Donec fr*ng*lla s*t amet augue *d venenat*s. Integer at n*bh qu*s justo consectetur posuere. Nulla fac*l*s*s m* massa, sed congue lectus veh*cula sed. Qu*sque placerat ultr*ces est congue vest*bulum. Maecenas fermentum purus et *psum volutpat, eu convall*s r*sus *nterdum. Donec vel nunc susc*p*t, venenat*s m* at, convall*s sap*en. Integer ut metus *d neque rhoncus commodo.
return os.X_OK
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/env python3
# .. _`schema_loader`:
#
# #####################################################################
# Schema Loader Module -- Load Embedded or External Schema
# #####################################################################
#
# .. py:module:: schema.loader
#
# A *Schema Loader* loads the attributes of a schema from a source document.
# There are a variety of sources.
#
# - The first row of a sheet within a workbook.
# This version has to be injected into workbook processing
# so that the first row is separated from the data rows.
#
# - A separate sheet of a workbook.
# This version requires a sheet name.
#
# - A separate workbook. This, too, requires a named sheet.
#
# - COBOL Code. We'll set this aside as a subclass
# so complex it requires it's own module.
#
# A schema loader is paired with a specific kind of :py:class:`sheet.Sheet`.
#
# A workbook requires a schema, which requires a schema loader.
# A schema loader depends on a meta-workbook. Ideally that meta-workbook has
# an emedded schema, but it may have an external schema, meaning we could have a
# meta-schema required load the schema for the application data. Sheesh.
#
# First, let's hope that doesn't happen. Second, the circularity is resolved by making it the responsibility of the
# the application to handle schema loading.
#
# Embedded Schema Use Case
# ===============================
#
# A :py:class:`sheet.EmbeddedSchemaSheet` requires a loader class.
# The loader will
#
# 1. Be built with the sheet as an argument.
#
# 2. Be interrogated for the schema.
#
# 3. Be interrogated for the rows.
#
# The most typical case is the single-header-row case.
#
# In some cases, the loader is actually a
# a rather sophisticated parser that paritions the data into the embedded schema
# and the data rows.
#
# .. parsed-literal::
#
# with Workbook( name ) as wb:
# sheet = self.wb.sheet( 'Sheet2',
# stingray.sheet.EmbeddedSchemaSheet,
# loader_class= stingray.schema.loader.HeadingRowSchemaLoader )
#
# for row in sheet.rows():
# *process the row*
#
# External Schema Use Case
# ===============================
#
# A :py:class:`sheet.ExternalSchemaSheet` requires a schema.
#
# In the typical case, the external schema file has an emedded meta-schema.
# The first row has appropriate column names.
# This requires a subclass of :py:class:`schema.loader.ExternalSchemaLoader` to properly map the names that were found onto the attributes of the :py:class:`schema.Attribute` class.
#
# When the embedded meta-schema has unusual names, then a builder must be defined
# to map the names that are found in the schema and build an :py:class:`schema.Attribute` instance.
#
# .. parsed-literal::
#
# with open_workbook( schema_name ) as schema_wb:
# esl= stingray.schema.loader.ExternalSchemaLoader( schema_wb, "Schema" )
# schema= esl.schema()
# with Workbook( name, schema=schema ) as wb:
# sheet = self.wb.sheet( 'Sheet2',
# stingray.sheet.ExternalSchemaSheet,
# schema= schema )
# counts= process_sheet( sheet )
# pprint.pprint( counts )
#
# Manual Schema Use Case
# ===============================
#
# Also, a manually-defined :py:class:`schema.Schema` can be built rather than being loaded.
#
# .. parsed-literal::
#
# schema= stingray.schema.Schema(
# stingray.schema.Attribute( name='Column #1' ),
# stingray.schema.Attribute( name='Key' ),
# stingray.schema.Attribute( name='Value' ),
# stingray.schema.Attribute( name='Etc.' ),
# )
#
# Model
# ======
#
# .. code-block:: none
#
# http://yuml.me/diagram/scruffy;/class/
# #schema-loader,
# [Schema]<>-[Attribute],
# [SchemaLoader]-builds->[Schema],
# [SchemaLoader]^[HeadingRowSchemaLoader],
# [SchemaLoader]^[ExternalSchemaLoader],
# [ExternalSchemaLoader]-reads->[Workbook],
# [HeadingRowSchemaLoader]-reads->[Sheet].
#
#
# .. image:: schema_loader.png
# :width: 6in
#
# Overheads
# ===============
#
# We depend on :py:mod:`schema`, :py:mod:`cell` and :py:mod:`sheet`.
#
# ::
"""stingray.schema.loader -- Loads a Schema from a row of a Sheet or
from a separate Sheet. This is extended to load COBOL schema
from DDE files.
"""
from stingray.schema import Schema, Attribute
import stingray.cell
import stingray.sheet
import warnings
# No Schema Exception
# ====================
#
# In some circumstances, we can't load a schema. The most common situation
# is a :py:class:`HeadingRowSchemaLoader` which is applied to an empty workbook sheet.
# No rows means no schema.
#
# ::
class NoSchemaFound( Exception ):
pass
# The default behavior is to simply write a warning for an empty sheet.
# The lack of a schema means there's no data, also, and 99% of the time, silently ignoring
# an empty sheet is desirable.
#
# Schema Loader
# =================
#
# .. py:class:: SchemaLoader
#
# A Schema Loader has one mandatory contract: It must load the schema.
#
# A subclass may add a second contract, For example,
# an embedded schema loader will also return the non-schema rows.
#
# .. py:attribute:: sheet
#
# The :py:class:`Sheet` associated with this schema.
#
# .. py:attribute:: row_iter
#
# An iterator over the rows of this sheet; used to pick rows that
# belong to the header, separate from the rows that belong to data.
#
# ::
class SchemaLoader:
"""Locate schema information. Subclasses handle
all of the variations on schema representation.
"""
def __init__( self, sheet ):
"""A simple :py:class:`Sheet` instance."""
self.sheet= sheet
self.row_iter= iter( self.sheet.rows() )
def schema( self ):
"""Scan the sheet to get the schema.
:return: a :py:class:`Schema` object."""
return NotImplemented
def rows( self ):
"""Iterate all (or remaining) rows."""
return self.row_iter
# Embedded Schema Loader
# ===========================
#
# .. py:class:: HeadingRowSchemaLoader
#
# In many cases, the schema is first-row column titles or something similar.
# As we noted above, :py:class:`csv.DictReader` supports this simple case.
#
# All other cases have to be handled with something a bit more sophisticated.
# The :py:class:`schema.loader.SchemaLoader` can be further subclassed to provide for more
# complex schema definitions buried in the rows of a sheet.
#
# This means that we must make the schema parsing an application-provided
# plug-in that the Workbook uses when instantiating each Sheet.
#
# ::
class HeadingRowSchemaLoader( SchemaLoader ):
"""Read just the first row of a sheet to get embedded
schema information."""
def schema( self ):
"""Try to get the schema from row one. Remaining rows are data.
If the sheet is empty, emit a warning and return ``None``.
"""
try:
row_1= next( self.row_iter )
attributes = (
dict(name=c.to_str()) for c in row_1
)
schema = Schema(
*(Attribute(**col) for col in attributes)
)
return schema
except StopIteration:
warnings.warn( "Empty sheet: no schema present" )
# We'll open a :py:class:`sheet.Sheet` with a specific loader.
#
# .. parsed-literal::
#
# sheet= stingray.sheet.EmbeddedSchemaSheet(
# self.wb, 'The_Name',
# loader_class=HeadingRowSchemaLoader )
#
# .. py:class:: NonBlankHeadingRowSchemaLoader
#
# In many cases, we'd like to suppress the empty rows that are an inevitable feature of workbook sheets.
#
# Note that this doesn't work well for COBOL
# or Fixed format files, since an "empty" row may be difficult to discern.
#
# ::
class NonBlankHeadingRowSchemaLoader( HeadingRowSchemaLoader ):
def __init__( self, sheet ):
"""A simple :py:class:`Sheet` instance."""
self.sheet= sheet
self.row_iter= self.non_blank( self.sheet.rows() )
def non_blank( self, rows ):
for r in rows:
if all( c.is_empty() for c in r ):
continue
yield r
# External Schema Loader
# ==========================
#
# .. py:class:: ExternalSchemaLoader
#
# In some cases, the data workbook is described by a separate schema workbook, or a separate
# sheet within the data workbook. In these cases, the other sheet (or file) must be
# parsed to locate schema information.
#
# In the case of a fixed format file, we must examine a separate
# file to load schema information. This additional schems file may be in
# COBOL notation, leading to a more complex parser. See :ref:`cobol_loader`.
#
# The layout of the schema, of course, will be highly variable,
# so the "meta-schema" must be adjusted to the actual file.
#
# Note, also, that the schema loader is -- itself -- a typical of schema-based reader. It has a number of common features.
#
# 1. A dictionary-based "builder", :py:meth:`schema.loader.ExternalSchemaLoader.build_attr`, to handle Logical Layout.
# This transforms the input "raw" dictionary of :py:class:`cell.Cell` instances to an application dictionary of proper Python objects.
# See :ref:`developer`.
#
# 2. An iterator, :py:meth:`schema.loader.ExternalSchemaLoader.attr_dict_iter`,
# that provides "raw" dictionaries from each row (based on the schema) to the
# builder to create application dictionaries.
#
# 3. The overall function,
# :py:meth:`schema.loader.ExternalSchemaLoader.schema`,
# that iterates over application objects built from application dictionaries.
#
# .. py:attribute:: workbook
#
# The overall Workbook that we're parsing to locate schema information.
#
# .. py:attribute:: Sheet
#
# A specific sheet within that workbook.
#
# ::
class ExternalSchemaLoader( SchemaLoader ):
"""Open a workbook file in a well-known format.
Build a schema with attribute name, offset, size and type
information. The type is a string that names the
type of cell to create.
The meta-schema must be embedded as the first line of the schema sheet.
The assumed meta-schema is the following::
Schema(
Attribute("name",create="TextCell"),
Attribute("offset",create="NumberCell"),
Attribute("size",create="NumberCell"),
Attribute("type",create="TextCell"),
)
If the meta-schema has different names, then a subclass with
a different :py:meth:`build_attr` is required to map the actual
source columns to the attributes of a :py:class:`Attribute`.
Offsets are typically 1-based.
"""
def __init__( self, workbook, sheet_name='Sheet1' ):
self.workbook, self.sheet_name = workbook, sheet_name
self.sheet= self.workbook.sheet( self.sheet_name, stingray.sheet.EmbeddedSchemaSheet,
loader_class= HeadingRowSchemaLoader )
# .. py:method:: ExternalSchemaLoader.build_attr( row )
#
# There's potential for a great deal of variability in schema definition.
# Consequently, this ``build_attr`` method is merely a sample that
# covers one common case.
#
# ::
base= 1
type_to_cell = {
'text': "TextCell",
'number': "NumberCell",
'date': "DateCell",
'boolean': "BooleanCell",
}
@staticmethod
def build_attr( row ):
"""Build application dictionary from raw dictionary.
"""
try:
offset= row['offset'].to_int()-ExternalSchemaLoader.base
except KeyError:
offset= None
try:
size= row['size'].to_int()
except KeyError:
size= None
try:
type_name= row['type'].to_str()
create= ExternalSchemaLoader.type_to_cell[type_name]
except KeyError:
create= stingray.cell.TextCell
return dict(
name= row['name'].to_str(),
offset= offset,
size= size,
create= create,
)
# Schema loading involves a process of
#
# 1. Iterating through the source rows as dictionaries.
#
# - Build each raw row as a source dictionary.
#
# - Build an standardized attr dictionary from the source dictionary.
# This mapping, implemented by :py:meth:`schema.loader.ExternalSchemaLoader.build_attr`
# is subject to a great deal of change without notice.
#
# 2. Building each :py:class:`schema.Attribute` from the dictionary.
#
# .. py:method:: ExternalSchemaLoader.attr_dict_iter( sheet )
#
# Iterate over application dicts based on raw dicts built by the schema of the sheet.
#
# ::
def attr_dict_iter( self, sheet ):
"""Iterate over application dicts based on raw dicts
built by the schema of the sheet."""
return (
ExternalSchemaLoader.build_attr(r)
for r in sheet.schema.rows_as_dict_iter(sheet)
)
# .. py:method:: ExternalSchemaLoader.schema( )
#
# Scan a file to get the schema.
#
# :return: a :py:class:`Schema` object
#
# ::
def schema( self ):
"""Scan a file to get the schema.
:return: a :py:class:`Schema` object."""
self.row_iter= iter( [] )
source_dict = self.attr_dict_iter( self.sheet )
schema= Schema(
*(Attribute(**row) for row in source_dict)
)
return schema
# Worst-Case Loader
# ====================
#
# .. py:class:: BareExternalSchemaLoader
#
# This is a degenerate case loader where the schema sheet (or file) doesn't have
# an embedded schema on line one of the sheet.
#
# ::
class BareExternalSchemaLoader( SchemaLoader ):
"""Open a workbook file in a well-known format. Apply a schema parser
to the given sheet (or file) to build a schema.
The meta-schema is hard-coded in this class because the given
sheet has no headers.
"""
schema= Schema(
Attribute("name",create="TextCell"),
Attribute("offset",create="NumberCell"),
Attribute("size",create="NumberCell"),
Attribute("type",create="TextCell"),
)
def __init__( self, workbook, sheet_name='Sheet1' ):
self.workbook, self.sheet_name = workbook, sheet_name
self.sheet= self.workbook.sheet( self.sheet_name, stingray.sheet.ExternalSchemaSheet,
schema= self.schema )
# Parsing and Loading a COBOL Schema
# =====================================
#
# One logical extension to this is to parse COBOL DDE's to create
# a schema that allows us to process a COBOL file (in EBCDIC) directly
# as if it were a simple workbook.
#
# We'll delegate that to :ref:`cobol_loader`, since it's considerably
# more complex than simply loading rows from a sheet of a workbook.
|
from dataclasses import dataclass
from homeassistant.components.sensor import (
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
SensorEntityDescription,
)
from homeassistant.const import (
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_POWER_FACTOR,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_VOLTAGE,
ELECTRIC_CURRENT_AMPERE,
ELECTRIC_CURRENT_MILLIAMPERE,
ELECTRIC_POTENTIAL_VOLT,
ENERGY_KILO_WATT_HOUR,
FREQUENCY_HERTZ,
POWER_WATT,
TEMP_CELSIUS,
TIME_HOURS,
)
DOMAIN = "saj_modbus"
DEFAULT_NAME = "SAJ"
DEFAULT_SCAN_INTERVAL = 60
DEFAULT_PORT = 502
CONF_SAJ_HUB = "saj_hub"
ATTR_MANUFACTURER = "SAJ Electric"
@dataclass
class SajModbusSensorEntityDescription(SensorEntityDescription):
"""A class that describes Zoonneplan sensor entities."""
SENSOR_TYPES: dict[str, list[SajModbusSensorEntityDescription]] = {
"DevType": SajModbusSensorEntityDescription(
name="Device Type",
key="devtype",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"SubType": SajModbusSensorEntityDescription(
name="Sub Type",
key="subtype",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"CommVer": SajModbusSensorEntityDescription(
name="Comms Protocol Version",
key="commver",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"SN": SajModbusSensorEntityDescription(
name="Serial Number",
key="sn",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"PC": SajModbusSensorEntityDescription(
name="Product Code",
key="pc",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"DV": SajModbusSensorEntityDescription(
name="Display Software Version",
key="dv",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"MCV": SajModbusSensorEntityDescription(
name="Master Ctrl Software Version",
key="mcv",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"SCV": SajModbusSensorEntityDescription(
name="Slave Ctrl Software Version",
key="scv",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"DispHWVersion": SajModbusSensorEntityDescription(
name="Display Board Hardware Version",
key="disphwversion",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"CtrlHWVersion": SajModbusSensorEntityDescription(
name="Control Board Hardware Version",
key="ctrlhwversion",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"PowerHWVersion": SajModbusSensorEntityDescription(
name="Power Board Hardware Version",
key="powerhwversion",
icon="mdi:information-outline",
entity_registry_enabled_default=False,
),
"MPVStatus": SajModbusSensorEntityDescription(
name="Inverter status",
key="mpvstatus",
icon="mdi:information-outline",
),
"MPVMode": SajModbusSensorEntityDescription(
name="Inverter working mode",
key="mpvmode",
icon="mdi:information-outline",
),
"FaultMSG": SajModbusSensorEntityDescription(
name="Inverter error message",
key="faultmsg",
icon="mdi:message-alert-outline",
),
"PV1Volt": SajModbusSensorEntityDescription(
name="PV1 voltage",
key="pv1volt",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"PV1Curr": SajModbusSensorEntityDescription(
name="PV1 total current",
key="pv1curr",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
icon="mdi:current-ac",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"PV1Power": SajModbusSensorEntityDescription(
name="PV1 power",
key="pv1power",
native_unit_of_measurement=POWER_WATT,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
"PV2Volt": SajModbusSensorEntityDescription(
name="PV2 voltage",
key="pv2volt",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"PV2Curr": SajModbusSensorEntityDescription(
name="PV2 total current",
key="pv2curr",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
icon="mdi:current-ac",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"PV2Power": SajModbusSensorEntityDescription(
name="PV2 power",
key="pv2power",
native_unit_of_measurement=POWER_WATT,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
"PV3Volt": SajModbusSensorEntityDescription(
name="PV3 voltage",
key="pv3volt",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"PV3Curr": SajModbusSensorEntityDescription(
name="PV3 total current",
key="pv3curr",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
icon="mdi:current-ac",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"PV3Power": SajModbusSensorEntityDescription(
name="PV3 power",
key="pv3power",
native_unit_of_measurement=POWER_WATT,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"BusVolt": SajModbusSensorEntityDescription(
name="BUS voltage",
key="busvolt",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"InvTempC": SajModbusSensorEntityDescription(
name="Inverter temperature",
key="invtempc",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"GFCI": SajModbusSensorEntityDescription(
name="GFCI",
key="gfci",
native_unit_of_measurement=ELECTRIC_CURRENT_MILLIAMPERE,
icon="mdi:current-dc",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"Power": SajModbusSensorEntityDescription(
name="Active power of inverter total output",
key="power",
native_unit_of_measurement=POWER_WATT,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
"QPower": SajModbusSensorEntityDescription(
name="Reactive power of inverter total output",
key="qpower",
native_unit_of_measurement="VAR",
icon="mdi:flash",
state_class=STATE_CLASS_MEASUREMENT,
),
"PF": SajModbusSensorEntityDescription(
name="Total power factor of inverter",
key="pf",
device_class=DEVICE_CLASS_POWER_FACTOR,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L1Volt": SajModbusSensorEntityDescription(
name="L1 voltage",
key="l1volt",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L1Curr": SajModbusSensorEntityDescription(
name="L1 current",
key="l1curr",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
icon="mdi:current-ac",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L1Freq": SajModbusSensorEntityDescription(
name="L1 frequency",
key="l1freq",
native_unit_of_measurement=FREQUENCY_HERTZ,
icon="mdi:sine-wave",
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L1DCI": SajModbusSensorEntityDescription(
name="L1 DC component",
key="l1dci",
native_unit_of_measurement=ELECTRIC_CURRENT_MILLIAMPERE,
icon="mdi:current-dc",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L1Power": SajModbusSensorEntityDescription(
name="L1 power",
key="l1power",
native_unit_of_measurement=POWER_WATT,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
"L1PF": SajModbusSensorEntityDescription(
name="L1 power factor",
key="l1pf",
device_class=DEVICE_CLASS_POWER_FACTOR,
state_class=STATE_CLASS_MEASUREMENT,
),
"L2Volt": SajModbusSensorEntityDescription(
name="L2 voltage",
key="l2volt",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L2Curr": SajModbusSensorEntityDescription(
name="L2 current",
key="l2curr",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
icon="mdi:current-ac",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L2Freq": SajModbusSensorEntityDescription(
name="L2 frequency",
key="l2freq",
native_unit_of_measurement=FREQUENCY_HERTZ,
icon="mdi:sine-wave",
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L2DCI": SajModbusSensorEntityDescription(
name="L2 DC component",
key="l2dci",
native_unit_of_measurement=ELECTRIC_CURRENT_MILLIAMPERE,
icon="mdi:current-dc",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L2Power": SajModbusSensorEntityDescription(
name="L2 power",
key="l2power",
native_unit_of_measurement=POWER_WATT,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
"L2PF": SajModbusSensorEntityDescription(
name="L2 power factor",
key="l2pf",
device_class=DEVICE_CLASS_POWER_FACTOR,
state_class=STATE_CLASS_MEASUREMENT,
),
"L3Volt": SajModbusSensorEntityDescription(
name="L3 voltage",
key="l3volt",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L3Curr": SajModbusSensorEntityDescription(
name="L3 current",
key="l3curr",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
icon="mdi:current-ac",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L3Freq": SajModbusSensorEntityDescription(
name="L3 frequency",
key="l3freq",
native_unit_of_measurement=FREQUENCY_HERTZ,
icon="mdi:sine-wave",
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L3DCI": SajModbusSensorEntityDescription(
name="L3 DC component",
key="l3dci",
native_unit_of_measurement=ELECTRIC_CURRENT_MILLIAMPERE,
icon="mdi:current-dc",
device_class=DEVICE_CLASS_CURRENT,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
"L3Power": SajModbusSensorEntityDescription(
name="L3 power",
key="l3power",
native_unit_of_measurement=POWER_WATT,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
"L3PF": SajModbusSensorEntityDescription(
name="L3 power factor",
key="l3pf",
device_class=DEVICE_CLASS_POWER_FACTOR,
state_class=STATE_CLASS_MEASUREMENT,
),
"ISO1": SajModbusSensorEntityDescription(
name="PV1+_ISO",
key="iso1",
native_unit_of_measurement="kΩ",
icon="mdi:omega",
entity_registry_enabled_default=False,
),
"ISO2": SajModbusSensorEntityDescription(
name="PV2+_ISO",
key="iso2",
native_unit_of_measurement="kΩ",
icon="mdi:omega",
entity_registry_enabled_default=False,
),
"ISO3": SajModbusSensorEntityDescription(
name="PV3+_ISO",
key="iso3",
native_unit_of_measurement="kΩ",
icon="mdi:omega",
entity_registry_enabled_default=False,
),
"ISO4": SajModbusSensorEntityDescription(
name="PV__ISO",
key="iso4",
native_unit_of_measurement="kΩ",
icon="mdi:omega",
entity_registry_enabled_default=False,
),
"TodayEnergy": SajModbusSensorEntityDescription(
name="Power generation on current day",
key="todayenergy",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL_INCREASING,
),
"MonthEnergy": SajModbusSensorEntityDescription(
name="Power generation in current month",
key="monthenergy",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
"YearEnergy": SajModbusSensorEntityDescription(
name="Power generation in current year",
key="yearenergy",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
"TotalEnergy": SajModbusSensorEntityDescription(
name="Total power generation",
key="totalenergy",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
icon="mdi:solar-power",
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL_INCREASING,
),
"TodayHour": SajModbusSensorEntityDescription(
name="Daily working hours",
key="todayhour",
native_unit_of_measurement=TIME_HOURS,
icon="mdi:progress-clock",
state_class=STATE_CLASS_TOTAL_INCREASING,
),
"TotalHour": SajModbusSensorEntityDescription(
name="Total working hours",
key="totalhour",
native_unit_of_measurement=TIME_HOURS,
icon="mdi:progress-clock",
state_class=STATE_CLASS_TOTAL_INCREASING,
),
"ErrorCount": SajModbusSensorEntityDescription(
name="Error count",
key="errorcount",
icon="mdi:counter",
state_class=STATE_CLASS_TOTAL_INCREASING,
),
}
DEVICE_STATUSSES = {
0: "Not Connected",
1: "Waiting",
2: "Normal",
3: "Error",
4: "Upgrading",
}
FAULT_MESSAGES = {
0: {
0x80000000: "Code 81: Lost Communication D<->C",
0x00080000: "Code 48: Master Fan4 Error",
0x00040000: "Code 47: Master Fan3 Error",
0x00020000: "Code 46: Master Fan2 Error",
0x00010000: "Code 45: Master Fan1 Error",
0x00002000: "Code 43: Master HW Phase3 Current High",
0x00001000: "Code 42: Master HW Phase2 Current High",
0x00000800: "Code 41: Master HW Phase1 Current High",
0x00000400: "Code 40: Master HWPV2 Current High",
0x00000200: "Code 39: Master HWPV1 Current High",
0x00000100: "Code 38: Master HWBus Voltage High",
0x00000010: "Code 37: Master Phase3 Current High",
0x00000008: "Code 36: Master Phase2 Current High",
0x00000004: "Code 35: Master Phase1 Current High",
0x00000002: "Code 34: Master Bus Voltage Low",
0x00000001: "Code 33: Master Bus Voltage High",
},
1: {
0x80000000: "Code 32: Master Bus Voltage Balance Error",
0x40000000: "Code 31: Master ISO Error",
0x20000000: "Code 30: Master Phase3 DCI Error",
0x10000000: "Code 29: Master Phase2 DCI Error",
0x08000000: "Code 28: Master Phase1 DCI Error",
0x04000000: "Code 27: Master GFCI Error",
0x02000000: "Code 26: Master Phase3 No Grid Error",
0x01000000: "Code 25: Master Phase2 No Grid Error",
0x00800000: "Code 24: Master Phase1 No Grid Error",
0x00400000: "Code 23: Master Phase3 Frequency Low",
0x00200000: "Code 22: Master Phase3 Frequency High",
0x00100000: "Code 21: Master Phase2 Frequency Low",
0x00080000: "Code 20: Master Phase2 Frequency High",
0x00040000: "Code 19: Master Phase1 Frequency Low",
0x00020000: "Code 18: Master Phase1 Frequency High",
0x00010000: "Code 17: Master Phase3 Voltage 10Min High",
0x00008000: "Code 16: Master Phase2 Voltage 10Min High",
0x00004000: "Code 15: Master Phase1 Voltage 10Min High",
0x00002000: "Code 14: Master Phase3 Voltage Low",
0x00001000: "Code 13: Master Phase3 Voltage High",
0x00000800: "Code 12: Master Phase2 Voltage Low",
0x00000400: "Code 11: Master Phase2 Voltage High",
0x00000200: "Code 10: Master Phase1 Voltage Low",
0x00000100: "Code 09: Master Phase1 Voltage High",
0x00000080: "Code 08: Master Current Sensor Error",
0x00000040: "Code 07: Master DCI Device Error",
0x00000020: "Code 06: Master GFCI Device Error",
0x00000010: "Code 05: Master Lost Communication M<->S",
0x00000008: "Code 04: Master Temperature Low Error",
0x00000004: "Code 03: Master Temperature High Error",
0x00000002: "Code 02: Master EEPROM Error",
0x00000001: "Code 01: Master Relay Error",
},
2: {
0x40000000: "Code 80: Slave PV Voltage High Error",
0x20000000: "Code 79: Slave PV2 Current High Error",
0x10000000: "Code 78: Slave PV1 Current High Error",
0x08000000: "Code 77: Slave PV2 Voltage High Error",
0x04000000: "Code 76: Slave PV1 Voltage High Error",
0x02000000: "Code 75: Slave Phase3 No Grid Error",
0x01000000: "Code 74: Slave Phase2 No Grid Error",
0x00800000: "Code 73: Slave Phase1 No Grid Error",
0x00400000: "Code 72: Slave Phase3 Frequency Low",
0x00200000: "Code 71: Slave Phase3 Frequency High",
0x00100000: "Code 70: Slave Phase2 Frequency Low",
0x00080000: "Code 69: Slave Phase2 Frequency High",
0x00040000: "Code 68: Slave Phase1 Frequency Low",
0x00020000: "Code 67: Slave Phase1 Frequency High",
0x00010000: "Code 66: Slave Phase3 Voltage Low",
0x00008000: "Code 65: Slave Phase3 Voltage High",
0x00004000: "Code 64: Slave Phase2 Voltage Low",
0x00002000: "Code 63: Slave Phase2 Voltage High",
0x00001000: "Code 62: Slave Phase1 Voltage Low",
0x00000800: "Code 61: Slave Phase1 Voltage High",
0x00000400: "Code 60: Slave Phase3 DCI Consis Error",
0x00000200: "Code 59: Slave Phase2 DCI Consis Error",
0x00000100: "Code 58: Slave Phase1 DCI Consis Error",
0x00000080: "Code 57: Slave GFCI Consis Error",
0x00000040: "Code 56: Slave Phase3 Frequency Consis Error",
0x00000020: "Code 55: Slave Phase2 Frequency Consis Error",
0x00000010: "Code 54: Slave Phase1 Frequency Consis Error",
0x00000008: "Code 53: Slave Phase3 Voltage Consis Error",
0x00000004: "Code 52: Slave Phase2 Voltage Consis Error",
0x00000002: "Code 51: Slave Phase1 Voltage Consis Error",
0x00000001: "Code 50: Slave Lost Communication between M<->S",
},
}
|
# -*- coding: utf-8 -*-
import random
import string
import tarfile
from contextlib import contextmanager
import os
from easybackup.adapters.ftp import FtpRepositoryAdapter, LocalToFtp, FtpToLocal
from easybackup.adapters.local import (LocalBackupCreator,
LocalRepositoryAdapter)
from easybackup.core.backup_supervisor import BackupSupervisor
from easybackup.policy.backup import TimeIntervalBackupPolicy
from easybackup.core.repository import Repository
from easybackup.policy.cleanup import ClearAllCleanupPolicy
from easybackup.core.volume import Volume
from tests_core.mock import clock
# pytest fixture
from .utils import temp_directory
__author__ = "sne3ks"
__copyright__ = "sne3ks"
__license__ = "mit"
exec(open('./.env').read(), globals())
FTP_CONF = {
'host': FTP_HOST,
'user': FTP_USER,
'password': FTP_PASSWORD,
'directory' :FTP_BACKUP_DIRECTORY,
}
def randomfile(path):
content = ''.join(random.choice(string.ascii_lowercase) for i in range(10000))
file = open(path, 'w+')
file.write(content)
file.close()
return content
@contextmanager
def assert_backup_and_restore(source_file, archive_file):
file_content = randomfile(source_file)
yield True
tar = tarfile.open(archive_file)
tar.extractall(os.path.dirname(archive_file))
with open(source_file, "r") as extract:
assert extract.read() == file_content
@clock('20200102_120000')
def test_backup_then_restore_local_file_to_ftp_repository(temp_directory):
with assert_backup_and_restore(
source_file=temp_directory('production.txt'),
archive_file=temp_directory('restore/easybackup-myproject-db-20200102_120000.tar')
):
local_creator = LocalBackupCreator(
source=temp_directory('production.txt'),
backup_directory=temp_directory('backups')
)
ftp_adapter = FtpRepositoryAdapter(**FTP_CONF)
volume = Volume(name='db', project='myproject')
composer = BackupSupervisor(
project='myproject',
volume='db',
creator=local_creator,
repository=local_creator.target_repository,
cleanup_policy=False,
backup_policy=TimeIntervalBackupPolicy(10)
)
composer.run()
link = LocalToFtp(
source=local_creator.target_adapter(),
target=ftp_adapter,
volume=volume
)
link.synchronize()
target = link.target_adapter
backups = target.fetch_backups()
backup = volume.match(backups)[0]
local_restore = LocalRepositoryAdapter(directory=temp_directory('restore'))
link = FtpToLocal(
source=ftp_adapter,
target=local_restore,
volume=Volume(name='db', project='myproject')
)
link.copy_backup(backup)
backups = link.source_repository.fetch()
assert len(backups) > 0
link.source_repository.cleanup(policy=ClearAllCleanupPolicy())
backups = link.source_repository.fetch()
assert len(backups) == 0
@clock('20200102_120000')
def test_delete_backup_on_ftp_repository(temp_directory):
randomfile(temp_directory('production-A.txt'))
randomfile(temp_directory('production-B.txt'))
ftp_adapter = FtpRepositoryAdapter(**FTP_CONF)
local_creator_A = LocalBackupCreator(
source=temp_directory('production-A.txt'),
backup_directory=temp_directory('backups')
)
local_creator_B = LocalBackupCreator(
source=temp_directory('production-B.txt'),
backup_directory=temp_directory('backups-twine')
)
composerA = BackupSupervisor(
project='myprojectA',
volume='db',
creator=local_creator_A,
repository=local_creator_A.target_repository,
cleanup_policy=False,
backup_policy=TimeIntervalBackupPolicy(10)
)
composerB = BackupSupervisor(
project='myprojectB',
volume='db',
creator=local_creator_B,
repository=local_creator_B.target_repository,
cleanup_policy=ClearAllCleanupPolicy(),
backup_policy=TimeIntervalBackupPolicy(10)
)
# Backup
composerA.run()
composerB.run()
assert len(composerA.fetch()) == 1
assert len(composerB.fetch()) == 1
link = LocalToFtp(
source=local_creator_A.target_adapter(),
target=ftp_adapter
)
link.synchronize()
link = LocalToFtp(
source=local_creator_B.target_adapter(),
target=ftp_adapter
)
link.synchronize()
# Clear
rep = Repository(adapter=ftp_adapter)
rep.cleanup_backups(local_creator_A.target_adapter().fetch_backups())
backups = rep.fetch()
assert len(Volume('db', 'myprojectA').match(backups)) == 0
assert len(Volume('db', 'myprojectB').match(backups)) == 1
|
from ticket import Ticket
import conn
import shared
import ticket
import dialog
import gi
import os
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GObject
def main() -> None:
dir = os.path.dirname(os.path.abspath(__file__))
builder = Gtk.Builder()
builder.add_from_file(dir + '/app.glade')
shared.builder = builder
win = builder.get_object('win')
stack = builder.get_object('stack')
pages = builder.get_object('pages')
pages.connect('switch-page', lambda x, t, z: t.refresh_total())
conn.BASE_URL = 'http://localhost:8080'
conn.login(1, 'password')
conn.auto_refresh()
#dialog.login()
def set_visible(x, name):
stack.set_visible_child_name(name)
def add_product(x):
entry = builder.get_object('product_code')
code = entry.get_text()
pages.get_nth_page(pages.get_current_page()).add_product(code)
entry.set_text('')
def delete_ticket(x):
curr = pages.get_current_page()
t = pages.get_nth_page(curr)
conn.tickets.delete(t.id)
pages.remove_page(curr)
def process_ticket(x):
builder.get_object('charge').hide()
curr = pages.get_current_page()
t = pages.get_nth_page(curr)
conn.tickets.update(t.id, {'status': 'paid'})
nothing = conn.tickets.list(data={'status': 'pending', 'limit':1}).get('to') == None
print(nothing)
if nothing:
x = conn.tickets.create({'name': 'Nuevo Ticket'})
id = x.get('id')
name = 'Nuevo Ticket'
pages.append_page_menu(Ticket(id), Gtk.Label(label=name))
pages.remove_page(curr)
pages.show_all()
def update_search(x):
dialog.update_search()
def qty_change(x):
if x.has_focus() == False:
return
val = x.get_text()
other = builder.get_object('total_entry')
try:
num = float(val)
fin = num*float(shared.curr_product.get('price'))
other.set_text(f'{fin}')
except:
pass
def total_change(x):
if x.has_focus() == False:
return
val = x.get_text()
other = builder.get_object('qty_entry')
try:
num = float(val)
fin = num/float(shared.curr_product.get('price'))
other.set_text(f'{fin}')
except:
pass
treeview = builder.get_object('search_treeview')
for i, x in enumerate(['Código', 'Descripcion', 'Precio', 'Existencia']):
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(x, cell_renderer=renderer, text=i)
treeview.append_column(column)
tickets = conn.tickets.list(data={'status': 'pending'}).get('data')
if len(tickets) == 0:
tickets = [conn.tickets.create({'name': 'Nuevo Ticket'})]
tickets[0]['name'] = 'Nuevo Ticket'
handlers = {
'sales': (set_visible, 'sales'),
'clients': (set_visible, 'clients'),
'stock': (set_visible, 'stock'),
'products': (set_visible, 'products'),
'config': (set_visible, 'config'),
'summary': (set_visible, 'summary'),
'exit': Gtk.main_quit,
'addProduct': add_product,
'search': lambda x: dialog.search(),
'transaction': lambda x: dialog.transaction(),
'delete_ticket': delete_ticket,
'process': process_ticket,
'charge': ticket.charge,
'update_search': update_search,
'qty_change': qty_change,
'total_change': total_change
}
for x in tickets:
id = x.get('id')
name = x.get('name')
pages.append_page_menu(Ticket(id), Gtk.Label(label=name))
builder.connect_signals(handlers)
win.show_all()
win.maximize()
Gtk.main()
if __name__ == '__main__':
main()
|
from __future__ import print_function
import argparse
import os
import time
from datetime import datetime
import json
from platform import system
import logging
# Disable verbose TensorFlow looging...
# See https://github.com/LucaCappelletti94/silence_tensorflow
os.environ["KMP_AFFINITY"] = "noverbose"
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
tf.get_logger().setLevel('ERROR')
tf.autograph.set_verbosity(3)
import numpy as np
import librosa
from natsort import natsorted
from samplernn import SampleRNN
from dataset import (get_dataset, get_dataset_filenames_split)
from callbacks import (TrainingStepCallback, ModelCheckpointCallback)
# https://github.com/ibab/tensorflow-wavenet/issues/255
LOGDIR_ROOT = 'logdir' if system()=='Windows' else './logdir'
OUTDIR = './generated'
CONFIG_FILE = './default.config.json'
NUM_EPOCHS = 100
BATCH_SIZE = 64
LEARNING_RATE = 0.001
MOMENTUM = 0.9
SILENCE_THRESHOLD = None
OUTPUT_DUR = 3 # Duration of generated audio in seconds
CHECKPOINT_EVERY = 1
CHECKPOINT_POLICY = 'Always' # 'Always' or 'Best'
MAX_CHECKPOINTS = 5
RESUME = True
TRACKED_METRIC = 'val_loss'
EARLY_STOPPING_PATIENCE = 3
GENERATE = True
SAMPLE_RATE = 16000 # Sample rate of generated audio
SAMPLING_TEMPERATURE = [0.95]
SEED_OFFSET = 0
MAX_GENERATE_PER_EPOCH = 1
VAL_FRAC = 0.1
def get_arguments():
def check_bool(value):
val = str(value).upper()
if 'TRUE'.startswith(val):
return True
elif 'FALSE'.startswith(val):
return False
else:
raise ValueError('Argument is neither `True` nor `False`')
def check_positive(value):
val = int(value)
if val < 1:
raise argparse.ArgumentTypeError("%s is not positive" % value)
return val
def check_max_checkpoints(value):
if str(value).upper() != 'NONE':
return check_positive(value)
else:
return None
parser = argparse.ArgumentParser(description='PRiSM TensorFlow SampleRNN')
parser.add_argument('--data_dir', type=str, required=True,
help='Path to the directory containing the training data')
parser.add_argument('--id', type=str, default='default', help='Id for the current training session')
parser.add_argument('--verbose', type=check_bool,
help='Whether to print training step output to a new line each time (the default), or overwrite the last output', default=True)
parser.add_argument('--batch_size', type=check_positive, default=BATCH_SIZE, help='Size of the mini-batch')
parser.add_argument('--logdir_root', type=str, default=LOGDIR_ROOT,
help='Root directory for training log files')
parser.add_argument('--config_file', type=str, default=CONFIG_FILE,
help='Path to the JSON config for the model')
parser.add_argument('--output_dir', type=str, default=OUTDIR,
help='Path to the directory for audio generated during training')
parser.add_argument('--output_file_dur', type=check_positive, default=OUTPUT_DUR,
help='Duration of generated audio files (in seconds)')
parser.add_argument('--sample_rate', type=check_positive, default=SAMPLE_RATE,
help='Sample rate of the generated audio')
parser.add_argument('--num_epochs', type=check_positive, default=NUM_EPOCHS,
help='Number of training epochs')
parser.add_argument('--optimizer', type=str, default='adam', choices=optimizer_factory.keys(),
help='Type of training optimizer to use')
parser.add_argument('--learning_rate', type=float, default=LEARNING_RATE,
help='Learning rate of training')
parser.add_argument('--reduce_learning_rate_after', type=check_positive, help='Exponentially reduce learning rate after this many epochs')
parser.add_argument('--momentum', type=float, default=MOMENTUM,
help='Optimizer momentum')
parser.add_argument('--monitor', type=str, default=TRACKED_METRIC, choices=['loss', 'accuracy', 'val_loss', 'val_accuracy'],
help='Metric to track during training')
parser.add_argument('--checkpoint_every', type=check_positive, default=CHECKPOINT_EVERY,
help='Interval (in epochs) at which to generate a checkpoint file')
parser.add_argument('--checkpoint_policy', type=str, default=CHECKPOINT_POLICY, choices=['Always', 'Best'],
help='Policy for saving checkpoints')
parser.add_argument('--max_checkpoints', type=check_max_checkpoints, default=MAX_CHECKPOINTS,
help='Number of checkpoints to keep on disk while training. Defaults to 5. Pass None to keep all checkpoints.')
parser.add_argument('--resume', type=check_bool, default=RESUME,
help='Whether to resume training. When True the latest checkpoint from any previous runs will be used, unless a specific checkpoint is passed using the resume_from parameter.')
parser.add_argument('--resume_from', type=str, help='Checkpoint from which to resume training. Ignored when resume is False.')
parser.add_argument('--early_stopping_patience', type=check_positive, default=EARLY_STOPPING_PATIENCE,
help='Number of epochs with no improvement after which training will be stopped.')
parser.add_argument('--generate', type=check_bool, default=GENERATE,
help='Whether to generate audio output during training. Generation is aligned with checkpoints, meaning that audio is only generated after a new checkpoint has been created.')
parser.add_argument('--max_generate_per_epoch', type=check_positive, default=MAX_GENERATE_PER_EPOCH,
help='Maximum number of output files to generate at the end of each epoch')
parser.add_argument('--temperature', type=float, default=SAMPLING_TEMPERATURE, nargs='+',
help='Sampling temperature for generated audio')
parser.add_argument('--seed', type=str, help='Path to audio for seeding')
parser.add_argument('--seed_offset', type=int, default=SEED_OFFSET,
help='Starting offset of the seed audio')
parser.add_argument('--num_val_batches', type=int, default=1,
help='Number of batches to reserve for validation. DEPRECATED: This parameter now has no effect, it is retained for backward-compatibility only and will be removed in a future release. Use val_frac instead.')
# We use a '%' sign in the help string, which argparse complains about if not escaped with another '%' sign. See: https://stackoverflow.com/a/21168121/795131.
parser.add_argument('--val_frac', type=float, default=VAL_FRAC,
help='Fraction of the dataset to be set aside for validation, rounded to the nearest multiple of the batch size. Defaults to 0.1, or 10%%.')
return parser.parse_args()
# Optimizer factory adapted from WaveNet
# https://github.com/ibab/tensorflow-wavenet/blob/master/wavenet/ops.py
def create_adam_optimizer(learning_rate, momentum):
return tf.optimizers.Adam(learning_rate=learning_rate,
epsilon=1e-4)
def create_sgd_optimizer(learning_rate, momentum):
return tf.optimizers.SGD(learning_rate=learning_rate,
momentum=momentum)
def create_rmsprop_optimizer(learning_rate, momentum):
return tf.optimizers.RMSprop(learning_rate=learning_rate,
momentum=momentum,
epsilon=1e-5)
optimizer_factory = {'adam': create_adam_optimizer,
'sgd': create_sgd_optimizer,
'rmsprop': create_rmsprop_optimizer}
def create_model(batch_size, config):
seq_len = config.get('seq_len')
frame_sizes = config.get('frame_sizes')
q_type = config.get('q_type')
q_levels = 256 if q_type=='mu-law' else config.get('q_levels')
assert frame_sizes[0] < frame_sizes[1], 'Frame sizes should be specified in ascending order'
# The following model configuration interdependencies are sourced from the original implementation:
# https://github.com/soroushmehr/sampleRNN_ICLR2017/blob/master/models/three_tier/three_tier.py
assert seq_len % frame_sizes[1] == 0, 'seq_len should be evenly divisible by tier 2 frame size'
assert frame_sizes[1] % frame_sizes[0] == 0, 'Tier 2 frame size should be evenly divisible by tier 1 frame size'
return SampleRNN(
batch_size=batch_size,
frame_sizes=frame_sizes,
seq_len=seq_len,
q_type=q_type,
q_levels=q_levels,
dim=config.get('dim'),
rnn_type=config.get('rnn_type'),
num_rnn_layers=config.get('num_rnn_layers'),
emb_size=config.get('emb_size'),
skip_conn=config.get('skip_conn'),
rnn_dropout=config.get('rnn_dropout')
)
def get_latest_checkpoint(logdir):
rundir_datetimes = []
try:
for f in os.listdir(logdir):
if os.path.isdir(os.path.join(logdir, f)):
dt = datetime.strptime(f, '%d.%m.%Y_%H.%M.%S')
rundir_datetimes.append(dt)
except ValueError as err:
print(err)
if len(rundir_datetimes) > 0:
i = 0
rundir_datetimes = natsorted(rundir_datetimes, reverse=True)
latest_checkpoint = None
while (i < len(rundir_datetimes)) and (latest_checkpoint == None):
rundir = rundir_datetimes[i].strftime('%d.%m.%Y_%H.%M.%S')
latest_checkpoint = tf.train.latest_checkpoint(os.path.join(logdir, rundir))
i += 1
return latest_checkpoint
def get_initial_epoch(ckpt_path):
if ckpt_path:
epoch = int(ckpt_path.split('/')[-1].split('-')[-1])
else:
epoch = 0
return epoch
def main():
args = get_arguments()
train_split, val_split = get_dataset_filenames_split(
args.data_dir, args.val_frac, args.batch_size)
# Create training session directories
logdir = os.path.join(args.logdir_root, args.id)
if not os.path.exists(logdir):
os.makedirs(logdir)
generate_dir = os.path.join(args.output_dir, args.id)
if not os.path.exists(generate_dir):
os.makedirs(generate_dir)
# Time-stamped directory for the current run, which will be used to store
# checkpoints and summary files. We don't need to explicitly create it as we
# pass the name to the TensorBoard callback, which creates it for us.
rundir = '{}/{}'.format(logdir, datetime.now().strftime('%d.%m.%Y_%H.%M.%S'))
latest_checkpoint = get_latest_checkpoint(logdir)
# Load model configuration
with open(args.config_file, 'r') as config_file:
config = json.load(config_file)
# Create the model
model = create_model(args.batch_size, config)
seq_len = model.seq_len
overlap = model.big_frame_size
q_type = model.q_type
q_levels = model.q_levels
# Optimizer
opt = optimizer_factory[args.optimizer](
learning_rate=args.learning_rate,
momentum=args.momentum,
)
# Compile the model
compute_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy')
model.compile(optimizer=opt, loss=compute_loss, metrics=[train_accuracy])
resume_from = (args.resume_from or latest_checkpoint) if args.resume==True else None
initial_epoch = get_initial_epoch(resume_from)
# Datasets (training and validation)
num_epochs = args.num_epochs-initial_epoch
#val_batch_size = min(args.batch_size, len(val_split))
train_dataset = get_dataset(train_split, num_epochs, args.batch_size, seq_len, overlap,
drop_remainder=True, q_type=q_type, q_levels=q_levels)
val_dataset = get_dataset(val_split, 1, args.batch_size, seq_len, overlap, shuffle=False,
drop_remainder=True, q_type=q_type, q_levels=q_levels)
# This computes subseqs per batch...
samples0, _ = librosa.load(train_split[0], sr=None, mono=True)
steps_per_batch = int(np.floor(len(samples0) / float(seq_len)))
steps_per_epoch = len(train_split) // args.batch_size * steps_per_batch
# Arguments passed to the generate function called
# by the ModelCheckpointCallback...
generation_args = {
'generate_dir' : generate_dir,
'id' : args.id,
'config' : config,
'num_seqs' : args.max_generate_per_epoch,
'dur' : args.output_file_dur,
'sample_rate' : args.sample_rate,
'temperature' : args.temperature,
'seed' : args.seed,
'seed_offset' : args.seed_offset
}
# Callbacks
callbacks = [
TrainingStepCallback(
model = model,
num_epochs = args.num_epochs,
steps_per_epoch = steps_per_epoch,
steps_per_batch = steps_per_batch,
resume_from = resume_from,
verbose = args.verbose),
ModelCheckpointCallback(
dir = rundir,
max_to_keep = args.max_checkpoints,
generate = args.generate,
generation_args = generation_args,
filepath = '{0}/model.ckpt-{{epoch}}'.format(rundir),
monitor = args.monitor,
save_weights_only = True,
save_best_only = args.checkpoint_policy.lower()=='best',
save_freq = args.checkpoint_every * steps_per_epoch),
tf.keras.callbacks.EarlyStopping(
monitor = args.monitor,
patience = args.early_stopping_patience),
tf.keras.callbacks.TensorBoard(
log_dir = rundir, update_freq = 50)
]
reduce_lr_after = args.reduce_learning_rate_after
if reduce_lr_after and reduce_lr_after > 0:
def scheduler(epoch, learning_rate):
if epoch < reduce_lr_after:
return learning_rate
else:
return learning_rate * tf.math.exp(-0.1)
callbacks.append(
tf.keras.callbacks.LearningRateScheduler(scheduler)
)
# Train
init_data = np.random.randint(0, model.q_levels, (model.batch_size, overlap + model.seq_len, 1))
model(init_data)
try:
model.fit(
train_dataset,
epochs=args.num_epochs,
initial_epoch=initial_epoch,
steps_per_epoch=steps_per_epoch,
shuffle=False,
callbacks=callbacks,
validation_data=val_dataset,
verbose=0,
)
except KeyboardInterrupt:
print('\n')
print('Keyboard interrupt')
print()
if __name__ == '__main__':
main()
|
""" Module for credential based GUI elements """
import pyperclip
from PyQt5.Qt import QHBoxLayout, QVBoxLayout
from PyQt5.QtWidgets import QMessageBox, QPlainTextEdit, QPushButton
from incubus import IncubusFactory
from data.database import Account, System
from gui.toolkit import WidgetState
class CredentialWidget: # pylint: disable=R0902
""" Main class """
def __init__(self, state: WidgetState):
self._state = state
self._credential = QPlainTextEdit(self._state.main_widget)
self._selected_account = Account()
self._selected_system = System()
self._internal_value = ""
self._displayed_value = ""
self._masked = True
toggle_mask = QPushButton("Toggle")
toggle_mask.clicked.connect(self._toggle_clicked)
copy_pwd = QPushButton("Copy")
copy_pwd.clicked.connect(self._copy_clicked)
credential_save = QPushButton("Save")
credential_save.clicked.connect(self._credential_save_clicked)
button_layout = QHBoxLayout()
button_layout.addWidget(toggle_mask)
button_layout.addWidget(copy_pwd)
button_layout.addWidget(credential_save)
credential_layout = QVBoxLayout()
credential_layout.addWidget(self._credential)
credential_layout.addLayout(button_layout)
self._state.main_layout.addLayout(credential_layout)
self._incubus = IncubusFactory.get_instance()
@property
def masked(self) -> bool:
"""Are credentials hidden"""
return self._masked
@masked.setter
def masked(self, new_masked: bool):
"""Are credentials hidden"""
self._masked = new_masked
self._build_displayed_value()
@property
def selected_account(self) -> Account:
""" selected account """
return self._selected_account
@selected_account.setter
def selected_account(self, account: Account):
""" selected account """
self._selected_account = account
self._repaint_credential()
@property
def selected_system(self) -> System:
""" selected system """
return self._selected_system
@selected_system.setter
def selected_system(self, system: System):
""" selected system """
self._selected_system = system
self.selected_account = Account()
@property
def value(self) -> str:
""" Credential value """
return self._internal_value
@value.setter
def value(self, val: str):
""" Credential value """
self._internal_value = val
self._build_displayed_value()
def _copy_clicked(self):
self._incubus.user_event()
pyperclip.copy(self.value)
def _build_displayed_value(self):
if self.masked:
self._displayed_value = ""
for internal_char in self._internal_value: # pylint: disable=W0612
self._displayed_value += "*"
else:
self._displayed_value = self._internal_value
self._credential.setPlainText(self._displayed_value)
def _credential_save_clicked(self):
self._incubus.user_event()
if self.masked:
QMessageBox(QMessageBox.Critical, "Error", "Unmask before saving").exec_()
return
self.value = self._credential.toPlainText()
self._selected_account.credential = self.value
self._state.database.update_account(self._selected_system.name, self._selected_account)
def _repaint_credential(self):
self.value = self._selected_account.credential
def _toggle_clicked(self):
self._incubus.user_event()
if self._masked:
self.masked = False
else:
self.masked = True
|
import logging
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Event, Output
from metaswitch_tinder.app import app
from metaswitch_tinder.app_structure import href
from metaswitch_tinder.components import session
from metaswitch_tinder.components.about import about_div
from metaswitch_tinder.components.grid import create_equal_row
log = logging.getLogger(__name__)
NAME = __name__.replace(".", "_")
im_a_mentee = "im_a_mentee"
im_a_mentor = "im_a_mentor"
signin = f"signin-{NAME}"
def layout():
return html.Div(
[
html.H1("Metaswitch Tinder", className="cover-heading"),
html.P(
"Metaswitch Tinder is a match-making service for informal mentoring "
"and unofficial pastoral support "
"at Metaswitch.",
className="lead",
),
create_equal_row(
[
dcc.Link(
"Become a mentor!",
href=href(__name__, im_a_mentor),
className="btn btn-lg btn-info",
),
dcc.Link(
"Become a mentee!",
href=href(__name__, im_a_mentee),
className="btn btn-lg btn-primary",
),
]
),
html.Br(),
html.Br(),
html.A(
"I have an account - sign in.",
href="/login",
className="btn btn-primary",
id=signin,
),
html.Br(),
html.Br(),
about_div(),
html.Div(id="dummy", hidden=True),
],
className="container text-center",
)
@app.callback(Output("dummy", "children"), [], [], [Event(signin, "click")])
def submit_signup_information():
log.debug("%s - Signin clicked", NAME)
session.set_post_login_redirect(href(__name__, signin))
|
from conans import ConanFile, CMake, tools
import os, shutil
class JsoncppConan(ConanFile):
name = "jsoncpp"
license = "MIT"
homepage = "https://github.com/open-source-parsers/jsoncpp"
url = "https://github.com/conan-io/conan-center-index"
topics = ("conan", "json", "parser", "config")
description = "A C++ library for interacting with JSON."
settings = "os", "compiler", "arch", "build_type"
options = {"shared": [True, False], "fPIC": [True, False]}
default_options = {"shared": False, "fPIC": True}
exports_sources = "CMakeLists.txt",
generators = "cmake",
_source_subfolder = "source_subfolder"
def configure(self):
if self.settings.os == "Windows" or self.options.shared:
del self.options.fPIC
def source(self):
tools.get(**self.conan_data["sources"][self.version])
extracted_dir = "jsoncpp-{}".format(self.version)
os.rename(extracted_dir, self._source_subfolder)
def _patch_sources(self):
tools.replace_in_file(os.path.join(self._source_subfolder, "CMakeLists.txt"),
"${jsoncpp_SOURCE_DIR}",
"${JSONCPP_SOURCE_DIR}")
if self.settings.compiler == "Visual Studio" and self.settings.compiler.version == "11":
tools.replace_in_file(os.path.join(self._source_subfolder, "include", "json", "value.h"),
"explicit operator bool()",
"operator bool()")
if self.settings.os != "Windows" and not self.options.shared and not self.options.fPIC:
tools.replace_in_file(os.path.join(self._source_subfolder, "src", "lib_json", "CMakeLists.txt"),
"set_target_properties( jsoncpp_lib PROPERTIES POSITION_INDEPENDENT_CODE ON)",
"set_target_properties( jsoncpp_lib PROPERTIES POSITION_INDEPENDENT_CODE OFF)")
tools.replace_in_file(os.path.join(self._source_subfolder, "src", "lib_json", "CMakeLists.txt"),
"$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>",
"")
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["JSONCPP_WITH_TESTS"] = False
cmake.definitions["JSONCPP_WITH_CMAKE_PACKAGE"] = False
cmake.definitions["JSONCPP_WITH_STRICT_ISO"] = False
cmake.definitions["JSONCPP_WITH_PKGCONFIG_SUPPORT"] = False
cmake.configure()
return cmake
def build(self):
self._patch_sources()
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy("LICENSE", src=self._source_subfolder, dst="licenses")
cmake = self._configure_cmake()
cmake.install()
def package_info(self):
self.cpp_info.name = "JsonCpp"
self.cpp_info.libs = tools.collect_libs(self)
|
class MetricDetailsList:
def __init__(self, ascending=True):
"""
Create MetricDetailsList object
:param ascending: False if "the larger, the more important" applies to the sorting attribute. Otherwise True.
"""
self._details_list = []
self.ascending = ascending
self.readable_list_length = 0
def add(self, details):
"""
Add new MetricDetails into the list.
:param details: MetricDetails
"""
if not isinstance(details, MetricDetails):
raise TypeError("Cannot add the object to the MetricDetailsList - must be a MetricDetails object.")
self._details_list.append(details)
def length(self):
"""
Get a number of elements in the list
:return: a number of elements in the list
"""
return len(self._details_list)
def set_readable_list_length(self, length):
self.readable_list_length = length
def get_readable_list(self):
"""
Get a specified number of most important MetricDetails, sorted by severity
:param length: Number of items that should be retrieved, -1 for all of them
:return: list of most important MetricDetails or None
"""
sorted_list = sorted(self._details_list, key=lambda md: md.sort_attr, reverse=self.ascending)
if self.readable_list_length == -1 or self.readable_list_length >= len(sorted_list):
return sorted_list
elif self.readable_list_length < len(sorted_list):
return sorted_list[0:self.readable_list_length]
else:
ValueError(f"Invalid MetricDetailsList length: {self.readable_list_length}.")
class MetricDetails:
def __init__(self, entity_id=None, detail_string="", sort_attr=0, subdetails=[]):
"""
Class holding details of a metric
:param entity_id: id of an entity described by the metric (e.g. Stage or Executor)
:param detail_string: String containing details about the metric (a one liner)
:param sort_attr: Attribute determination what contributes the most to the severity
:param subdetails: List of strings with additional details (may occupy multiple lines)
"""
self.id = entity_id
self.detail_string = detail_string
self.sort_attr = sort_attr
self.subdetails = subdetails
|
from collections import defaultdict
from conda_build.config import Config
from conda_build.metadata import MetaData
def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0,
dependencies=(), home=None, license_name=None, summary=None, config=None):
# local import to avoid circular import, we provid create_metapackage in api
from conda_build.api import build
if not config:
config = Config()
d = defaultdict(dict)
d['package']['name'] = name
d['package']['version'] = version
d['build']['number'] = build_number
d['build']['entry_points'] = entry_points
# MetaData does the auto stuff if the build string is None
d['build']['string'] = build_string
d['requirements']['run'] = dependencies
d['about']['home'] = home
d['about']['license'] = license_name
d['about']['summary'] = summary
d = dict(d)
m = MetaData.fromdict(d, config=config)
config.compute_build_id(m.name())
return build(m, config=config, need_source_download=False)
|
from currency_exchanger.users.models import User
from django.contrib import admin
admin.register(User)
|
# -*- coding: utf-8 -*-
"""This module contains enough knowledge about `struct` format strings to
figure out size and position of fields.
"""
import re
import struct
from struct import Struct
from .utils import split_fields
class Field(object):
"""Representation of a field defined in a `struct` format string.
(should only be created from :class:`Layout`.
"""
def __init__(self, **kw):
# defaults
self.name = ""
self.index = 0
self.position = 0
self.layout = ""
self.count = 0
self.format = 'x'
self.type = ""
self.size = 0
# should padding bytes be stripped when pretty printing
self.strip = None
self.__dict__.update(kw)
if self.strip is None:
self.strip = self.format == 's'
def get_value(self, data):
"Get this field's value from `data`."
rec = struct.unpack_from(
'=' + self.layout,
str(data), self.position
)
if self.format == 's' and self.strip:
return rec[0].rstrip('\0')
return rec[0]
def set_value(self, data, value):
"Set this field to `value` in `data`."
struct.pack_into('=' + self.layout, data, self.position, value)
return value
def __repr__(self):
return '<Field:%s [%d] "%s" count=%d format=%r type="%s" size=%d>' % (
self.name,
self.index,
self.layout,
self.count,
self.format,
self.type,
self.size
)
def __str__(self):
return "#%-2d %14s %5s = %d" % (self.index, self.name, self.layout, self.size)
def __len__(self):
return self.size
class Layout(object):
"""Record layout.
Usage::
record_layout = Layout(
'=12x?3sQ16s16s68s128sHcc',
'pad',
'local',
'rectype',
'timestamp',
'salt',
'digest',
'key',
'data',
'chksum',
'cr',
'nl',
name="Record"
)
"""
#: types corresponding to struct character codes
struct_field_types = {
'x': "pad byte",
'c': "char",
'b': "signed char",
'B': "unsigned char",
'?': "_Bool",
'h': "short",
'H': "unsigned short",
'i': "int",
'I': "unsigned int",
'l': "long",
'L': "unsigned long",
'q': "long long",
'Q': "unsigned long long",
'n': "ssize_t",
'N': "size_t",
'f': "float",
'd': "double",
's': "char[]",
'p': "char[]",
'P': "void *",
}
#: byte sizes corresponding to struct character codes
struct_field_sizes = {
'x': 1,
'c': 1,
'b': 1,
'B': 1,
'?': 1,
'h': 2,
'H': 2,
'i': 4,
'I': 4,
'l': 4,
'L': 4,
'q': 8,
'Q': 8,
'n': 8,
'N': 4,
'f': 4,
'd': 8,
's': 1,
'p': 1,
'P': 4,
}
#: legal struct format string record prefixes
record_prefix = {
'@': "native aligned",
'=': "native",
'<': "little-endian",
'>': "big-endian",
'!': "network-endian",
}
#: regex matching one field in a struct format string
layoutre = re.compile(r'''
(?P<field>\d*[xcbB?hHiIlLqQnNfdspP])
''', re.VERBOSE)
def __init__(self, layout, *names, **kw):
self.name = kw.get('name')
self.names = names
self.layout = layout
self.struct = Struct(layout)
if not layout:
raise ValueError("illegal layout")
if layout[0] in self.record_prefix:
prefix = layout[0]
layout = layout[1:]
else:
prefix = '@'
fields = [] # get field by position
self._field = {} # get field by name
pos = 0
for i, field in enumerate(self.layoutre.findall(layout)):
count = int(field[:-1] or '0', 10)
fmtch = field[-1]
f = Field(
name=names[i] if names else "",
index=i,
layout=field,
count=count,
format=fmtch,
type=self.struct_field_types[fmtch],
size=max(1, count) * self.struct_field_sizes[fmtch],
position=pos
)
pos += len(f)
fields.append(f)
if f.name:
self._field[f.name] = f
self.prefix = prefix
self.fields = fields
def __getitem__(self, key):
"""Get field `key`, where key is either the position of the field
or its name.
"""
if isinstance(key, int):
return self.fields[key]
else:
return self._field[key]
def __contains__(self, fieldname):
"""Is there a field named `fieldname`?
"""
return fieldname in self._field
def __len__(self):
"""The length of the record is the sum of all fields.
.. todo: This is obviously not correct for any prefix other than '='
since it doesn't take padding into account.
"""
return sum(f.size for f in self.fields)
def __repr__(self):
return '{Layout(%d) "%s" %r}' % (len(self), self.prefix, self.fields)
def __str__(self):
# res = self.name + "[%d] (%s):\n" % (self.index, self.layout)
res = 'Layout for %r: (len=%d)\n' % (self.name, len(self))
for f in self.fields:
res += ' @%4d:' % f.position + str(f) + '\n'
return res
def split(self, data):
"""Split the byte string `data` into a list of substrings holding the
data for each field.
"""
llen = len(self)
dlen = len(data)
if dlen != llen:
msg = "Data (%d) doesn't match layout (%s) length (%d): %r" % (
dlen, self.layout, llen, data)
raise ValueError(msg)
return split_fields(data, [f.size for f in self.fields])
|
# coding: utf-8
# Copyright (c) Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department
# Distributed under the terms of "New BSD License", see the LICENSE file.
import unittest
import numpy as np
import time
from pyiron_atomistics.atomistics.structure.atoms import Atoms
from pyiron_atomistics.atomistics.structure.periodic_table import element, PeriodicTable
class TestAtoms(unittest.TestCase):
def test_cached_speed(self):
"""
Creating atoms should be faster after the first time, due to caches in periodictable/mendeleev.
"""
pos, cell = generate_fcc_lattice()
expected_speedup_factor = 15
n_timing_loop = 5
t1, t2, t3, t4, t5, t6, t7 = [np.array([0.0]*n_timing_loop) for _ in range(7)]
for i in range(n_timing_loop):
element.cache_clear()
PeriodicTable._get_periodic_table_df.cache_clear()
t1[i] = time.perf_counter()
Atoms(symbols="Al", positions=pos, cell=cell)
t2[i] = time.perf_counter()
Atoms(symbols="Al", positions=pos, cell=cell)
t3[i] = time.perf_counter()
Atoms(symbols="Cu", positions=pos, cell=cell)
t4[i] = time.perf_counter()
Atoms(symbols="CuAl", positions=[[0., 0., 0.], [0.5, 0.5, 0.5]], cell=cell)
t5[i] = time.perf_counter()
Atoms(symbols="MgO", positions=[[0., 0., 0.], [0.5, 0.5, 0.5]], cell=cell)
t6[i] = time.perf_counter()
Atoms(symbols="AlMgO", positions=[[0., 0., 0.], [0.5, 0.5, 0.5], [0.5, 0.5, 0.]], cell=cell)
t7[i] = time.perf_counter()
dt21 = np.mean(t2 - t1)
dt32 = np.mean(t3 - t2)
# check the simple case of structures with one element type
self.assertGreater(dt21, dt32, "Atom creation not speed up by caches!")
self.assertGreater(dt21 / dt32, expected_speedup_factor,
"Atom creation not speed up to the required level by caches!")
dt43 = np.mean(t4 - t3)
dt54 = np.mean(t5 - t4)
# check that speed up also holds when creating structures with multiple elements, but all the elements have been
# seen before
self.assertGreater(dt43 / dt54, expected_speedup_factor,
"Atom creation not speed up to the required level by caches!")
dt65 = np.mean(t6 - t5)
dt76 = np.mean(t7 - t6)
# check that again with three elements
self.assertGreater(dt65 / dt76, expected_speedup_factor,
"Atom creation not speed up to the required level by caches!")
def generate_fcc_lattice(a=4.2):
positions = [[0, 0, 0]]
cell = (np.ones((3, 3)) - np.eye(3)) * 0.5 * a
return positions, cell
if __name__ == "__main__":
unittest.main()
|
import argparse
import signal
import sys
from typing import Optional
from determined import ipc
def read_action(opt: str, val: str) -> Optional[signal.Signals]:
if val.lower() == "wait":
return None
out = {s.name.lower(): s for s in signal.Signals}.get(val.lower())
if out is None:
raise ValueError(
f"{opt} argument '{val}' is not valid; it should be a signal name ('SIGTERM', "
"'SIGKILL', etc) or 'WAIT'"
)
return out
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-x", "--on-fail", dest="on_fail", action="store", default="SIGTERM")
parser.add_argument("-e", "--on-exit", dest="on_exit", action="store", default="WAIT")
parser.add_argument("addr")
parser.add_argument("num_workers", type=int)
parser.add_argument("cmd")
parser.add_argument("cmd_args", nargs="*")
args = parser.parse_args()
on_fail = read_action("--on-fail", args.on_fail)
on_exit = read_action("--on-exit", args.on_exit)
addr = ipc.read_pid_server_addr(args.addr)
with ipc.PIDServer(addr, args.num_workers) as pid_server:
sys.exit(
pid_server.run_subprocess(
cmd=[args.cmd] + args.cmd_args,
on_fail=on_fail,
on_exit=on_exit,
),
)
|
#!/usr/bin/env python3
import numpy as np
import os
def loadmesh(fn, skiprows=0, strip=True):
if fn.endswith('.txt'):
return loadtxt(fn, skiprows=skiprows, strip=strip)
elif fn.endswith('.asc'):
return loadasc(fn, skiprows=skiprows, strip=strip)
elif fn.endswith('.xyz'):
return loadxyz(fn, skiprows=skiprows, strip=strip)
elif fn.endswith('.off'):
return loadoff(fn)
else:
raise ValueError('{}: Unknown filetype'.format(fn))
def savemesh(fn, *data):
if fn.endswith('.txt'):
savetxt(fn, data[0])
elif fn.endswith('.asc'):
saveasc(fn, data[0])
elif fn.endswith('.xyz'):
savexyz(fn, data[0])
elif fn.endswith('.off'):
saveoff(fn, data[0], data[1])
else:
raise ValueError('{}: Unknown filetype'.format(fn))
def loadtxt(fn, delimiter=',', comments='#', skiprows=0, strip=True):
result = []
with open(fn, 'r') as fid:
row = -1
for line in fid:
row = row + 1
if row < skiprows:
continue
if strip:
line = line.strip()
if line.startswith(comments):
continue
values = np.fromstring(line, sep=delimiter, dtype=np.float32)
result.append(values)
return np.array(result)
def savetxt(fn, data, delimiter=','):
np.savetxt(fn, data, delimiter=delimiter)
def loadasc(fn, delimiter=',', comments='#', skiprows=0, strip=True):
result = []
with open(fn, 'r') as fid:
row = -1
for line in fid:
row = row + 1
if row < skiprows:
continue
if strip:
line = line.strip()
if line.startswith(comments):
continue
values = np.fromstring(line, sep=delimiter, dtype=np.float32)
result.append(values)
return np.array(result)
def saveasc(fn, data, delimiter=','):
np.savetxt(fn, data, delimiter=delimiter)
def loadxyz(fn, delimiter=' ', comments='#', skiprows=0, strip=True):
result = []
with open(fn, 'r') as fid:
row = -1
for line in fid:
row = row + 1
if row < skiprows:
continue
if strip:
line = line.strip()
if line.startswith(comments):
continue
values = np.fromstring(line, sep=delimiter, dtype=np.float32)
result.append(values)
return np.array(result)
def savexyz(fn, data, delimiter=' '):
np.savetxt(fn, data, delimiter=delimiter)
def loadoff(fn, delimiter=' '):
vertices = []
faces = []
with open(fn, 'r') as fid:
line = fid.readline().strip()
if line != 'OFF':
raise ValueError('{}: {} is not a valid OFF header'.format(fn, line))
for line in fid:
line = line.strip()
if line.startswith('#'):
continue
if len(line) <= 0:
continue
num_vertices, num_faces, num_edges = [int(v) for v in line.split(' ')]
break
for vi in range(num_vertices):
line = fid.readline().strip()
vertex = [float(v) for v in line.split(' ')[:3]]
vertices.append(vertex)
for fi in range(num_faces):
line = fid.readline().strip()
face = [int(v) for v in line.split(' ')[1:4]]
faces.append(face)
vertices = np.array(vertices)
faces = np.array(faces)
return vertices, faces
def saveoff(fn, vertices, faces):
with open(fn, 'w') as fid:
fid.write('OFF\n')
fid.write('{} {} {}\n'.format(vertices.shape[0], faces.shape[0], 0))
for vert in vertices:
fid.write('{} {} {}\n'.format(vert[0], vert[1], vert[2]))
for face in faces:
fid.write('{} {} {} {}\n'.format(3, face[0], face[1], face[2]))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.