hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
217d631aee6a0e46f63c306242136b06ed113ba7
| 1,458
|
py
|
Python
|
scripts/nodes/filter/setupSensors.py
|
AutonomousFieldRoboticsLab/jetyak_uav_utils
|
7926df2cf34b0be2647b62896c98af82ca6f1e53
|
[
"MIT"
] | null | null | null |
scripts/nodes/filter/setupSensors.py
|
AutonomousFieldRoboticsLab/jetyak_uav_utils
|
7926df2cf34b0be2647b62896c98af82ca6f1e53
|
[
"MIT"
] | null | null | null |
scripts/nodes/filter/setupSensors.py
|
AutonomousFieldRoboticsLab/jetyak_uav_utils
|
7926df2cf34b0be2647b62896c98af82ca6f1e53
|
[
"MIT"
] | 1
|
2021-12-09T01:30:58.000Z
|
2021-12-09T01:30:58.000Z
|
import numpy as np
from sensor import Sensor
def setupSensors(n):
# Critical chi-squared values for P = 0.001 and different degrees of freedom
chiSquared_1 = 10.828
chiSquared_3 = 16.266
# Transition Matrix for Tag measurements
Htag = np.matrix(np.zeros((4, n)))
Htag[0:3, 0:3] = np.matrix(-1 * np.eye(3))
Htag[0:3, 6:9] = np.matrix(np.eye(3))
Htag[0:3, 11:14] = np.matrix(-1 * np.eye(3))
Htag[3, 14] = 1
# Covariance Matrix for Tag measurements
Rtag = np.asmatrix(1.0e-3 * np.eye(4))
# Transition Matrix for Drone velocity measurements
HvelD = np.matrix(np.zeros((3, n)))
HvelD[0:3, 3:6] = np.matrix(np.eye(3))
# Covariance Matrix for Drone velocity measurements
RvelD = np.asmatrix(1.0e-3 * np.eye(3))
# Transition Matrix for Drone GPS measurements
HgpsD = np.matrix(np.zeros((3, n)))
HgpsD[0:3, 0:3] = np.matrix(np.eye(3))
# Covariance Matrix for Drone GPS measurements
RgpsD = np.asmatrix(1.0e-1 * np.eye(3))
# Transition Matrix for Jetyak GPS measurements
HgpsJ = np.matrix(np.zeros((3, n)))
HgpsJ[0:3, 6:9] = np.matrix(np.eye(3))
# Covariance Matrix for Jetyak GPS measurements
RgpsJ = np.asmatrix(5.0e0 * np.eye(3))
# Setup sensors
tagS = Sensor('tag', Rtag, Htag, 39.0, chiSquared_3)
velDS = Sensor('dvel', RvelD, HvelD, 150.0, chiSquared_1)
gpsDS = Sensor('dgps', RgpsD, HgpsD, 150.0, chiSquared_1)
gpsJS = Sensor('jgps', RgpsJ, HgpsJ, 10.0, chiSquared_1)
return (tagS, velDS, gpsDS, gpsJS)
| 31.695652
| 77
| 0.678326
|
0341ee0a3f23cb6a01cd879ea1c2351af4b1b924
| 6,356
|
py
|
Python
|
oneflow/python/test/ops/test_argwhere.py
|
666DZY666/oneflow
|
2062cb211dd1e0619d610659e6d41598d5f73e17
|
[
"Apache-2.0"
] | null | null | null |
oneflow/python/test/ops/test_argwhere.py
|
666DZY666/oneflow
|
2062cb211dd1e0619d610659e6d41598d5f73e17
|
[
"Apache-2.0"
] | null | null | null |
oneflow/python/test/ops/test_argwhere.py
|
666DZY666/oneflow
|
2062cb211dd1e0619d610659e6d41598d5f73e17
|
[
"Apache-2.0"
] | 1
|
2021-11-10T07:57:01.000Z
|
2021-11-10T07:57:01.000Z
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
import unittest
from collections import OrderedDict
import oneflow as flow
from test_util import GenArgDict
def _np_dtype_to_of_dtype(np_dtype):
if np_dtype == np.float32:
return flow.float32
elif np_dtype == np.int32:
return flow.int32
elif np_dtype == np.int64:
return flow.int64
elif np_dtype == np.int8:
return flow.int8
else:
raise NotImplementedError
def _random_input(shape, dtype):
if dtype == np.float32:
rand_ = np.random.random_sample(shape).astype(np.float32)
rand_[np.nonzero(rand_ < 0.5)] = 0.0
return rand_
elif dtype == np.int32:
return np.random.randint(low=0, high=2, size=shape).astype(np.int32)
elif dtype == np.int8:
return np.random.randint(low=0, high=2, size=shape).astype(np.int8)
else:
raise NotImplementedError
def _of_argwhere(x, index_dtype, device_type="gpu", device_num=1, dynamic=False):
data_type = _np_dtype_to_of_dtype(x.dtype)
out_data_type = _np_dtype_to_of_dtype(index_dtype)
flow.clear_default_session()
if device_type == "gpu":
flow.config.gpu_device_num(device_num)
elif device_type == "cpu":
flow.config.cpu_device_num(device_num)
else:
raise ValueError
assert device_num > 0
func_config = flow.FunctionConfig()
func_config.default_data_type(data_type)
func_config.default_placement_scope(
flow.scope.placement(device_type, "0:0-{}".format(device_num - 1))
)
if dynamic is True:
func_config.default_logical_view(flow.scope.mirrored_view())
@flow.global_function("predict", function_config=func_config)
def argwhere_fn(
x: flow.typing.ListNumpy.Placeholder(x.shape, dtype=data_type)
) -> flow.typing.ListNumpy:
return flow.argwhere(x, dtype=out_data_type)
return argwhere_fn([x] * device_num)[0]
else:
func_config.default_logical_view(flow.scope.consistent_view())
@flow.global_function("predict", function_config=func_config)
def argwhere_fn(
x: flow.typing.Numpy.Placeholder(x.shape, dtype=data_type)
) -> flow.typing.ListNumpy:
return flow.argwhere(x, dtype=out_data_type)
return argwhere_fn(x)[0]
def _compare_with_np(
test_case,
shape,
value_dtype,
index_dtype,
device_type="gpu",
device_num=1,
dynamic=False,
verbose=False,
):
if verbose:
print("shape:", shape)
print("value_dtype:", value_dtype)
print("index_dtype:", index_dtype)
print("device_type:", device_type)
print("device_num:", device_num)
print("dynamic:", dynamic)
x = _random_input(shape, value_dtype)
y = np.argwhere(x)
of_y = _of_argwhere(
x, index_dtype, device_type=device_type, device_num=device_num, dynamic=dynamic
)
if verbose is True:
print("input:", x)
print("np result:", y)
print("of result:", of_y)
test_case.assertTrue(np.array_equal(y, of_y))
def _dynamic_multi_iter_compare(
test_case,
iter_num,
shape,
value_dtype,
index_dtype,
device_type="gpu",
verbose=False,
):
x = [_random_input(shape, value_dtype) for _ in range(iter_num)]
y = [np.argwhere(x_) for x_ in x]
data_type = _np_dtype_to_of_dtype(value_dtype)
out_data_type = _np_dtype_to_of_dtype(index_dtype)
flow.clear_default_session()
func_config = flow.FunctionConfig()
func_config.default_data_type(data_type)
func_config.default_placement_scope(flow.scope.placement(device_type, "0:0"))
func_config.default_logical_view(flow.scope.mirrored_view())
@flow.global_function("predict", function_config=func_config)
def argwhere_fn(
x: flow.typing.Numpy.Placeholder(tuple(shape), dtype=data_type)
) -> flow.typing.ListNumpy:
return flow.argwhere(x, dtype=out_data_type)
results = []
for x_ in x:
y_ = argwhere_fn(x_)[0]
results.append(y_)
for i, result in enumerate(results):
test_case.assertTrue(np.array_equal(result, y[i]))
@flow.unittest.skip_unless_1n1d()
class TestArgwhere(flow.unittest.TestCase):
def test_argwhere(test_case):
arg_dict = OrderedDict()
arg_dict["shape"] = [(10), (30, 4), (8, 256, 20)]
arg_dict["value_dtype"] = [np.float32, np.int32, np.int8]
arg_dict["index_dtype"] = [np.int32, np.int64]
arg_dict["device_type"] = ["cpu", "gpu"]
arg_dict["dynamic"] = [True, False]
arg_dict["verbose"] = [False]
for arg in GenArgDict(arg_dict):
_compare_with_np(test_case, **arg)
def test_argwhere_multi_iter(test_case):
arg_dict = OrderedDict()
arg_dict["iter_num"] = [2]
arg_dict["shape"] = [(20, 4)]
arg_dict["value_dtype"] = [np.float32, np.int32, np.int8]
arg_dict["index_dtype"] = [np.int32, np.int64]
arg_dict["device_type"] = ["cpu", "gpu"]
arg_dict["verbose"] = [False]
for arg in GenArgDict(arg_dict):
_dynamic_multi_iter_compare(test_case, **arg)
@flow.unittest.skip_unless_1n4d()
class TestArgwhere4D(flow.unittest.TestCase):
def test_argwhere(test_case):
arg_dict = OrderedDict()
arg_dict["shape"] = [(10, 5)]
arg_dict["value_dtype"] = [np.float32, np.int32, np.int8]
arg_dict["index_dtype"] = [np.int32, np.int64]
arg_dict["device_type"] = ["cpu", "gpu"]
arg_dict["device_num"] = [4]
arg_dict["dynamic"] = [True]
arg_dict["verbose"] = [False]
for arg in GenArgDict(arg_dict):
_compare_with_np(test_case, **arg)
if __name__ == "__main__":
unittest.main()
| 31.621891
| 87
| 0.66394
|
0f956d6ee6c26c231168507e7127ae034d5d9743
| 7,834
|
py
|
Python
|
eval_sem.py
|
arassadin/sgpn
|
d9bb6e2df0cc255051a7a97950bbb51950642e0c
|
[
"MIT"
] | 1
|
2020-05-12T15:53:46.000Z
|
2020-05-12T15:53:46.000Z
|
eval_sem.py
|
arassadin/sgpn
|
d9bb6e2df0cc255051a7a97950bbb51950642e0c
|
[
"MIT"
] | null | null | null |
eval_sem.py
|
arassadin/sgpn
|
d9bb6e2df0cc255051a7a97950bbb51950642e0c
|
[
"MIT"
] | null | null | null |
import argparse
import tensorflow as tf
import numpy as np
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
import sys
import torch
from utils import pc_util
from models import model
import torchnet as tnt
import scannet_dataset
import suncg_dataset
from utils import provider
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# need to be changed
# 0. input/output folder
# 1. filelist
# 2. batch size
# 3. is_training
# 4. '''''' part
# 5. loss
#----------------------------------------------------------
# set
# Parsing Arguments
parser = argparse.ArgumentParser()
# Experiment Settings
parser.add_argument('--gpu', type=str, default="3", help='GPU to use [default: GPU 1]')
parser.add_argument('--wd', type=float, default=0.9, help='Weight Decay [Default: 0.0]')
parser.add_argument('--epoch', type=int, default=200, help='Number of epochs [default: 50]')
parser.add_argument('--pretrain', type=bool, default=True, help='pretrain semantics segmenation')
parser.add_argument('--point_num', type=int, default=4096, help='Point Number')
parser.add_argument('--group_num', type=int, default=150, help='Maximum Group Number in one pc')
parser.add_argument('--cate_num', type=int, default=21, help='Number of categories')
parser.add_argument('--margin_same', type=float, default=1., help='Double hinge loss margin: same semantic')
parser.add_argument('--margin_diff', type=float, default=2., help='Double hinge loss margin: different semantic')
# Input&Output Settings
parser.add_argument('--output_dir', type=str, default='checkpoint/scannet_sem_seg2', help='Directory that stores all training logs and trained models')
parser.add_argument('--restore_model', type=str, default='checkpoint/scannet_sem_seg2', help='Pretrained model')
FLAGS = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = FLAGS.gpu
# dataloader
#DATA_ROOT = 'data/scannet_data/annotation'
#TEST_DATASET = scannet_dataset.ScannetDatasetWholeScene(root=DATA_ROOT, npoints=FLAGS.point_num, split='data/scannet_data/meta/scannet_test.txt')
DATA_ROOT = 'data/suncg_data/annotation'
TEST_DATASET = suncg_dataset.SUNCGDatasetWholeScene(root=DATA_ROOT, npoints=FLAGS.point_num, split='data/suncg_data/meta/suncg_test.txt')
PRETRAINED_MODEL_PATH = os.path.join(FLAGS.restore_model, 'trained_models/')
PRETRAIN = FLAGS.pretrain
POINT_NUM = FLAGS.point_num
OUTPUT_DIR = FLAGS.output_dir
if not os.path.exists(OUTPUT_DIR):
os.makedirs(OUTPUT_DIR)
NUM_GROUPS = FLAGS.group_num
NUM_CATEGORY = FLAGS.cate_num
print('#### Point Number: {0}'.format(POINT_NUM))
print('#### Training using GPU: {0}'.format(FLAGS.gpu))
DECAY_STEP = 200000.
DECAY_RATE = 0.7
BN_INIT_DECAY = 0.5
BN_DECAY_DECAY_RATE = 0.5
BN_DECAY_DECAY_STEP = float(DECAY_STEP)
BN_DECAY_CLIP = 0.99
LEARNING_RATE_CLIP = 1e-5
BASE_LEARNING_RATE = 5e-4
TRAINING_EPOCHES = FLAGS.epoch
MARGINS = [FLAGS.margin_same, FLAGS.margin_diff]
print('### Training epoch: {0}'.format(TRAINING_EPOCHES))
MODEL_STORAGE_PATH = os.path.join(OUTPUT_DIR, 'trained_models')
if not os.path.exists(MODEL_STORAGE_PATH):
os.mkdir(MODEL_STORAGE_PATH)
LOG_STORAGE_PATH = os.path.join(OUTPUT_DIR, 'logs')
if not os.path.exists(LOG_STORAGE_PATH):
os.mkdir(LOG_STORAGE_PATH)
SUMMARIES_FOLDER = os.path.join(OUTPUT_DIR, 'summaries')
if not os.path.exists(SUMMARIES_FOLDER):
os.mkdir(SUMMARIES_FOLDER)
LOG_DIR = FLAGS.output_dir
if not os.path.exists(LOG_DIR): os.mkdir(LOG_DIR)
os.system('cp %s %s' % (os.path.join(BASE_DIR, 'models/model.py'), LOG_DIR)) # bkp of model def
os.system('cp %s %s' % (os.path.join(BASE_DIR, 'train.py'), LOG_DIR)) # bkp of train procedure
def printout(flog, data):
print(data)
flog.write(data + '\n')
def get_test_batch(dataset, i):
batch_data = []
batch_label = []
batch_group = []
batch_smpw = []
ps,seg,group,smpw = dataset[i]
batch_data.append(ps)
batch_label.append(seg)
batch_group.append(group)
batch_smpw.append(smpw)
batch_data = np.concatenate(batch_data, 0)
batch_label = np.concatenate(batch_label, 0)
batch_group = np.concatenate(batch_group, 0)
batch_smpw = np.concatenate(batch_smpw, 0)
return batch_data, batch_label, batch_group, batch_smpw
def eval():
with tf.Graph().as_default():
with tf.device('/gpu:' + str(FLAGS.gpu)):
batch = tf.Variable(0, trainable=False, name='batch')
pointclouds_ph, ptsseglabel_ph, ptsseglabel_onehot_ph, ptsgroup_label_ph, pts_seglabel_mask_ph, pts_group_mask_ph, alpha_ph = \
model.placeholder_inputs(1, POINT_NUM, NUM_GROUPS, NUM_CATEGORY)
is_training_ph = tf.placeholder(tf.bool, shape=())
labels = {'ptsgroup': ptsgroup_label_ph,
'semseg': ptsseglabel_ph,
'semseg_onehot': ptsseglabel_onehot_ph,
'semseg_mask': pts_seglabel_mask_ph,
'group_mask': pts_group_mask_ph}
net_output = model.get_model(pointclouds_ph, is_training_ph, group_cate_num=NUM_CATEGORY, m=MARGINS[0])
ptsseg_loss, simmat_loss, loss, grouperr, same, same_cnt, diff, diff_cnt, pos, pos_cnt = model.get_loss(net_output, labels, alpha_ph, MARGINS)
loader = tf.train.Saver([v for v in tf.all_variables()#])
if
('conf_logits' not in v.name) and
('Fsim' not in v.name) and
('Fsconf' not in v.name) and
('batch' not in v.name)
])
saver = tf.train.Saver([v for v in tf.all_variables()], max_to_keep=200)
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.allow_soft_placement = True
sess = tf.Session(config=config)
init = tf.global_variables_initializer()
sess.run(init)
ckptstate = tf.train.get_checkpoint_state(PRETRAINED_MODEL_PATH)
if ckptstate is not None:
LOAD_MODEL_FILE = os.path.join(PRETRAINED_MODEL_PATH, os.path.basename(ckptstate.model_checkpoint_path))
loader.restore(sess, LOAD_MODEL_FILE)
## load test data into memory
is_training = False
for i in range(len(TEST_DATASET)):
print('{}/{}'.format(i, len(TEST_DATASET)))
cur_data, cur_seg, cur_group, cur_smpw = get_test_batch(TEST_DATASET, i)
data_res = []
seg_res = []
for j in range(cur_data.shape[0]):
pts_label_one_hot = model.convert_seg_to_one_hot(cur_seg[j:j+1])
feed_dict = {
pointclouds_ph: cur_data[j:j+1],
ptsseglabel_ph: cur_seg[j:j+1],
ptsseglabel_onehot_ph: pts_label_one_hot,
pts_seglabel_mask_ph: cur_smpw[j:j+1],
is_training_ph: is_training,
alpha_ph: min(10., (float(1.0) / 5.) * 2. + 2.),
}
ptsclassification_val0 = sess.run([net_output['semseg']], feed_dict=feed_dict)
ptsclassification_val = torch.from_numpy(ptsclassification_val0[0]).view(-1, NUM_CATEGORY)
ptsclassification_gt = torch.from_numpy(pts_label_one_hot).view(-1, NUM_CATEGORY)
data_res.append(cur_data[j:j+1])
seg_res.append(np.argmax(ptsclassification_val.numpy(), 1))
write_data = np.reshape(np.concatenate(data_res, 0)[:,:,:3], [-1,3])
write_seg = np.concatenate(seg_res, 0)
pc_util.write_ply_res(write_data, write_seg, '{}.ply'.format(TEST_DATASET.get_filename(i)))
pc_util.write_obj_color(write_data, write_seg, '{}.obj'.format(TEST_DATASET.get_filename(i)))
if __name__ == '__main__':
eval()
| 40.381443
| 154
| 0.667347
|
497c1180223ac2abc2c76c5329403d4a2797380c
| 229,713
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_infra_sla_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_infra_sla_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_infra_sla_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions',
False,
[
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of the profile used by the operation
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'configured-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions',
False,
[
_MetaInfoClassMember('ondemand-operation-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ID of the ondemand operation
''',
'ondemand_operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-count', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Total number of probes sent during the operation
''',
'probe_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'ondemand-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions',
False,
[
_MetaInfoClassMember('configured-operation-options', REFERENCE_CLASS, 'ConfiguredOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions',
[], [],
''' Parameters for a configured operation
''',
'configured_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ondemand-operation-options', REFERENCE_CLASS, 'OndemandOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions',
[], [],
''' Parameters for an ondemand operation
''',
'ondemand_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('oper-type', REFERENCE_ENUM_CLASS, 'SlaOperOperationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperationEnum',
[], [],
''' OperType
''',
'oper_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'specific-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule',
False,
[
_MetaInfoClassMember('schedule-duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Duration of a probe for the operation in seconds
''',
'schedule_duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('schedule-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between the start times of consecutive
probes, in seconds.
''',
'schedule_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Start time of the first probe, in seconds since
the Unix Epoch
''',
'start_time',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time-configured', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether or not the operation start time was
explicitly configured
''',
'start_time_configured',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-schedule',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config',
False,
[
_MetaInfoClassMember('bins-count', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Total number of bins into which to aggregate. 0
if no aggregation.
''',
'bins_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bins-width', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Width of each bin into which to aggregate. 0 if
no aggregation. For SLM, the units of this value
are in single units of percent; for LMM they are
in tenths of percent; for other measurements
they are in milliseconds.
''',
'bins_width',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Size of buckets into which measurements are
collected
''',
'bucket_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size-unit', REFERENCE_ENUM_CLASS, 'SlaBucketSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSizeEnum',
[], [],
''' Whether bucket size is 'per-probe' or 'probes'
''',
'bucket_size_unit',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum number of buckets to store in memory
''',
'buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-type', REFERENCE_ENUM_CLASS, 'SlaRecordableMetricEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetricEnum',
[], [],
''' Type of metric to which this configuration
applies
''',
'metric_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'config',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins',
False,
[
_MetaInfoClassMember('count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The total number of results in the bin
''',
'count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'lower_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'lower_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sum', ATTRIBUTE, 'int' , None, None,
[('-9223372036854775808', '9223372036854775807')], [],
''' The sum of the results in the bin, in
microseconds or millionths of a percent
''',
'sum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'upper_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'upper_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bins',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated',
False,
[
_MetaInfoClassMember('bins', REFERENCE_LIST, 'Bins' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins',
[], [],
''' The bins of an SLA metric bucket
''',
'bins',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'aggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample',
False,
[
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was corrupt
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames lost,
if available
''',
'frames_lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames sent,
if available
''',
'frames_sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('no-data-packets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether a measurement could not be made because
no data packets were sent in the sample period.
Only applicable for LMM measurements
''',
'no_data_packets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was received
out-of-order
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' The result (in microseconds or millionths of a
percent) of the sample, if available
''',
'result',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was sucessfully sent
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The time (in milliseconds relative to the start
time of the bucket) that the sample was sent at
''',
'sent_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('timed-out', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet timed out
''',
'timed_out',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'sample',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated',
False,
[
_MetaInfoClassMember('sample', REFERENCE_LIST, 'Sample' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample',
[], [],
''' The samples of an SLA metric bucket
''',
'sample',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'unaggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents',
False,
[
_MetaInfoClassMember('aggregated', REFERENCE_CLASS, 'Aggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated',
[], [],
''' Result bins in an SLA metric bucket
''',
'aggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-type', REFERENCE_ENUM_CLASS, 'SlaOperBucketEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucketEnum',
[], [],
''' BucketType
''',
'bucket_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('unaggregated', REFERENCE_CLASS, 'Unaggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated',
[], [],
''' Result samples in an SLA metric bucket
''',
'unaggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'contents',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket',
False,
[
_MetaInfoClassMember('average', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Mean of the results in the probe, in
microseconds or millionths of a percent
''',
'average',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('contents', REFERENCE_CLASS, 'Contents' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents',
[], [],
''' The contents of the bucket; bins or samples
''',
'contents',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of corrupt packets in the probe
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-lost-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets lost across the
bucket, used in the calculation of overall FLR.
''',
'data_lost_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-sent-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets sent across the
bucket, used in the calculation of overall FLR.
''',
'data_sent_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duplicates', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of duplicate packets received in the
probe
''',
'duplicates',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Length of time for which the bucket is being
filled in seconds
''',
'duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of lost packets in the probe
''',
'lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('maximum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('minimum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets recieved out-of-order in the
probe
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('overall-flr', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Frame Loss Ratio across the whole bucket, in
millionths of a percent
''',
'overall_flr',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' If the probe ended prematurely, the error that
caused a probe to end
''',
'premature_reason',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason-string', ATTRIBUTE, 'str' , None, None,
[], [],
''' Description of the error code that caused the
probe to end prematurely. For informational
purposes only
''',
'premature_reason_string',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The count of samples collected in the bucket.
''',
'result_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets sent in the probe
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('standard-deviation', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Standard deviation of the results in the probe,
in microseconds or millionths of a percent
''',
'standard_deviation',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the bucket started being
filled at
''',
'start_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-cleared-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as bucket was cleared mid-way
through being filled
''',
'suspect_cleared_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-clock-drift', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as more than 10 seconds time
drift detected
''',
'suspect_clock_drift',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-flr-low-packet-count', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as FLR calculated based on a low
packet count
''',
'suspect_flr_low_packet_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-management-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as processing of results has
been delayed
''',
'suspect_management_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-memory-allocation-failed', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a memory allocation
failure
''',
'suspect_memory_allocation_failed',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-misordering', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as misordering has been detected
, affecting results
''',
'suspect_misordering',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-multiple-buckets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as the probe has been configured
across multiple buckets
''',
'suspect_multiple_buckets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-premature-end', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe ending
prematurely
''',
'suspect_premature_end',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-probe-restarted', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as probe restarted mid-way
through the bucket
''',
'suspect_probe_restarted',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-schedule-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to scheduling latency
causing one or more packets to not be sent
''',
'suspect_schedule_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-send-fail', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to failure to send one or
more packets
''',
'suspect_send_fail',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-start-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe starting mid-way
through a bucket
''',
'suspect_start_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-maximum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the maximum value was
recorded
''',
'time_of_maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-minimum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the minimum value was
recorded
''',
'time_of_minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bucket',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric',
False,
[
_MetaInfoClassMember('bucket', REFERENCE_LIST, 'Bucket' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket',
[], [],
''' Buckets stored for the metric
''',
'bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('config', REFERENCE_CLASS, 'Config' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config',
[], [],
''' Configuration of the metric
''',
'config',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-metric',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent',
False,
[
_MetaInfoClassMember('display-long', ATTRIBUTE, 'str' , None, None,
[], [],
''' Long display name used by the operation
''',
'display_long',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('display-short', ATTRIBUTE, 'str' , None, None,
[], [],
''' Short display name used by the operation
''',
'display_short',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('domain-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain name
''',
'domain_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('flr-calculation-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between FLR calculations for SLM, in
milliseconds
''',
'flr_calculation_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Unicast MAC Address in xxxx.xxxx.xxxx format.
Either MEP ID or MAC address must be
specified.
''',
'mac_address',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mep-id', ATTRIBUTE, 'int' , None, None,
[('1', '8191')], [],
''' MEP ID in the range 1 to 8191. Either MEP ID
or MAC address must be specified.
''',
'mep_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Operation ID
''',
'operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-metric', REFERENCE_LIST, 'OperationMetric' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric',
[], [],
''' Metrics gathered for the operation
''',
'operation_metric',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-schedule', REFERENCE_CLASS, 'OperationSchedule' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule',
[], [],
''' Operation schedule
''',
'operation_schedule',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-type', ATTRIBUTE, 'str' , None, None,
[], [],
''' Type of probe used by the operation
''',
'probe_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('specific-options', REFERENCE_CLASS, 'SpecificOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions',
[], [],
''' Options specific to the type of operation
''',
'specific_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-on-demand-current',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandCurrents',
False,
[
_MetaInfoClassMember('statistics-on-demand-current', REFERENCE_LIST, 'StatisticsOnDemandCurrent' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent',
[], [],
''' Current statistics data for an SLA on-demand
operation
''',
'statistics_on_demand_current',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-on-demand-currents',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding',
False,
[
_MetaInfoClassMember('packet-pad-size', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Size that packets are being padded to
''',
'packet_pad_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('test-pattern-pad-hex-string', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Hex string that is used in the packet padding
''',
'test_pattern_pad_hex_string',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('test-pattern-pad-scheme', REFERENCE_ENUM_CLASS, 'SlaOperTestPatternSchemeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperTestPatternSchemeEnum',
[], [],
''' Test pattern scheme that is used in the packet
padding
''',
'test_pattern_pad_scheme',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'packet-padding',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority',
False,
[
_MetaInfoClassMember('cos', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' 3-bit COS priority value applied to packets
''',
'cos',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('priority-type', REFERENCE_ENUM_CLASS, 'SlaOperPacketPriorityEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperPacketPriorityEnum',
[], [],
''' PriorityType
''',
'priority_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'priority',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule',
False,
[
_MetaInfoClassMember('schedule-duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Duration of a probe for the operation in seconds
''',
'schedule_duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('schedule-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between the start times of consecutive
probes, in seconds.
''',
'schedule_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Start time of the first probe, in seconds since
the Unix Epoch
''',
'start_time',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time-configured', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether or not the operation start time was
explicitly configured
''',
'start_time_configured',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-schedule',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig',
False,
[
_MetaInfoClassMember('bins-count', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Total number of bins into which to aggregate. 0
if no aggregation.
''',
'bins_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bins-width', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Width of each bin into which to aggregate. 0 if
no aggregation. For SLM, the units of this value
are in single units of percent; for LMM they are
in tenths of percent; for other measurements
they are in milliseconds.
''',
'bins_width',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Size of buckets into which measurements are
collected
''',
'bucket_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size-unit', REFERENCE_ENUM_CLASS, 'SlaBucketSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSizeEnum',
[], [],
''' Whether bucket size is 'per-probe' or 'probes'
''',
'bucket_size_unit',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum number of buckets to store in memory
''',
'buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-type', REFERENCE_ENUM_CLASS, 'SlaRecordableMetricEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetricEnum',
[], [],
''' Type of metric to which this configuration
applies
''',
'metric_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'metric-config',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric',
False,
[
_MetaInfoClassMember('current-buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of valid buckets currently in the buckets
archive
''',
'current_buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-config', REFERENCE_CLASS, 'MetricConfig' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig',
[], [],
''' Configuration of the metric
''',
'metric_config',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-metric',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions',
False,
[
_MetaInfoClassMember('bursts-per-probe', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of bursts sent per probe
''',
'bursts_per_probe',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('flr-calculation-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between FLR calculations for SLM, in
milliseconds
''',
'flr_calculation_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('inter-burst-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between bursts within a probe in
milliseconds
''',
'inter_burst_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('inter-packet-interval', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Interval between packets within a burst in
milliseconds
''',
'inter_packet_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-metric', REFERENCE_LIST, 'OperationMetric' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric',
[], [],
''' Array of the metrics that are measured by the
operation
''',
'operation_metric',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-schedule', REFERENCE_CLASS, 'OperationSchedule' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule',
[], [],
''' Operation schedule
''',
'operation_schedule',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('packet-padding', REFERENCE_CLASS, 'PacketPadding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding',
[], [],
''' Configuration of the packet padding
''',
'packet_padding',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('packets-per-burst', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Number of packets sent per burst
''',
'packets_per_burst',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('priority', REFERENCE_CLASS, 'Priority' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority',
[], [],
''' Priority at which to send the packet, if
configured
''',
'priority',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-type', ATTRIBUTE, 'str' , None, None,
[], [],
''' Type of probe used by the operation
''',
'probe_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'profile-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions',
False,
[
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of the profile used by the operation
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'configured-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions',
False,
[
_MetaInfoClassMember('ondemand-operation-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ID of the ondemand operation
''',
'ondemand_operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-count', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Total number of probes sent during the operation
''',
'probe_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'ondemand-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions',
False,
[
_MetaInfoClassMember('configured-operation-options', REFERENCE_CLASS, 'ConfiguredOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions',
[], [],
''' Parameters for a configured operation
''',
'configured_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ondemand-operation-options', REFERENCE_CLASS, 'OndemandOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions',
[], [],
''' Parameters for an ondemand operation
''',
'ondemand_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('oper-type', REFERENCE_ENUM_CLASS, 'SlaOperOperationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperationEnum',
[], [],
''' OperType
''',
'oper_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'specific-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations.Operation' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations.Operation',
False,
[
_MetaInfoClassMember('display-long', ATTRIBUTE, 'str' , None, None,
[], [],
''' Long display name used by the operation
''',
'display_long',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('display-short', ATTRIBUTE, 'str' , None, None,
[], [],
''' Short display name used by the operation
''',
'display_short',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('domain-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain name
''',
'domain_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('last-run', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Time that the last probe for the operation was
run, NULL if never run.
''',
'last_run',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Unicast MAC Address in xxxx.xxxx.xxxx format.
Either MEP ID or MAC address must be
specified.
''',
'mac_address',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mep-id', ATTRIBUTE, 'int' , None, None,
[('1', '8191')], [],
''' MEP ID in the range 1 to 8191. Either MEP ID
or MAC address must be specified.
''',
'mep_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Profile Name
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-options', REFERENCE_CLASS, 'ProfileOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions',
[], [],
''' Options that are only valid if the operation has
a profile
''',
'profile_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('specific-options', REFERENCE_CLASS, 'SpecificOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions',
[], [],
''' Options specific to the type of operation
''',
'specific_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.Operations' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.Operations',
False,
[
_MetaInfoClassMember('operation', REFERENCE_LIST, 'Operation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations.Operation',
[], [],
''' SLA operation to get operation data for
''',
'operation',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operations',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions',
False,
[
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of the profile used by the operation
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'configured-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions',
False,
[
_MetaInfoClassMember('ondemand-operation-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ID of the ondemand operation
''',
'ondemand_operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-count', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Total number of probes sent during the operation
''',
'probe_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'ondemand-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions',
False,
[
_MetaInfoClassMember('configured-operation-options', REFERENCE_CLASS, 'ConfiguredOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions',
[], [],
''' Parameters for a configured operation
''',
'configured_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ondemand-operation-options', REFERENCE_CLASS, 'OndemandOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions',
[], [],
''' Parameters for an ondemand operation
''',
'ondemand_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('oper-type', REFERENCE_ENUM_CLASS, 'SlaOperOperationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperationEnum',
[], [],
''' OperType
''',
'oper_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'specific-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule',
False,
[
_MetaInfoClassMember('schedule-duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Duration of a probe for the operation in seconds
''',
'schedule_duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('schedule-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between the start times of consecutive
probes, in seconds.
''',
'schedule_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Start time of the first probe, in seconds since
the Unix Epoch
''',
'start_time',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time-configured', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether or not the operation start time was
explicitly configured
''',
'start_time_configured',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-schedule',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config',
False,
[
_MetaInfoClassMember('bins-count', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Total number of bins into which to aggregate. 0
if no aggregation.
''',
'bins_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bins-width', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Width of each bin into which to aggregate. 0 if
no aggregation. For SLM, the units of this value
are in single units of percent; for LMM they are
in tenths of percent; for other measurements
they are in milliseconds.
''',
'bins_width',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Size of buckets into which measurements are
collected
''',
'bucket_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size-unit', REFERENCE_ENUM_CLASS, 'SlaBucketSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSizeEnum',
[], [],
''' Whether bucket size is 'per-probe' or 'probes'
''',
'bucket_size_unit',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum number of buckets to store in memory
''',
'buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-type', REFERENCE_ENUM_CLASS, 'SlaRecordableMetricEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetricEnum',
[], [],
''' Type of metric to which this configuration
applies
''',
'metric_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'config',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins',
False,
[
_MetaInfoClassMember('count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The total number of results in the bin
''',
'count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'lower_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'lower_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sum', ATTRIBUTE, 'int' , None, None,
[('-9223372036854775808', '9223372036854775807')], [],
''' The sum of the results in the bin, in
microseconds or millionths of a percent
''',
'sum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'upper_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'upper_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bins',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated',
False,
[
_MetaInfoClassMember('bins', REFERENCE_LIST, 'Bins' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins',
[], [],
''' The bins of an SLA metric bucket
''',
'bins',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'aggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample',
False,
[
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was corrupt
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames lost,
if available
''',
'frames_lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames sent,
if available
''',
'frames_sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('no-data-packets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether a measurement could not be made because
no data packets were sent in the sample period.
Only applicable for LMM measurements
''',
'no_data_packets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was received
out-of-order
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' The result (in microseconds or millionths of a
percent) of the sample, if available
''',
'result',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was sucessfully sent
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The time (in milliseconds relative to the start
time of the bucket) that the sample was sent at
''',
'sent_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('timed-out', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet timed out
''',
'timed_out',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'sample',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated',
False,
[
_MetaInfoClassMember('sample', REFERENCE_LIST, 'Sample' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample',
[], [],
''' The samples of an SLA metric bucket
''',
'sample',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'unaggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents',
False,
[
_MetaInfoClassMember('aggregated', REFERENCE_CLASS, 'Aggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated',
[], [],
''' Result bins in an SLA metric bucket
''',
'aggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-type', REFERENCE_ENUM_CLASS, 'SlaOperBucketEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucketEnum',
[], [],
''' BucketType
''',
'bucket_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('unaggregated', REFERENCE_CLASS, 'Unaggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated',
[], [],
''' Result samples in an SLA metric bucket
''',
'unaggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'contents',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket',
False,
[
_MetaInfoClassMember('average', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Mean of the results in the probe, in
microseconds or millionths of a percent
''',
'average',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('contents', REFERENCE_CLASS, 'Contents' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents',
[], [],
''' The contents of the bucket; bins or samples
''',
'contents',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of corrupt packets in the probe
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-lost-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets lost across the
bucket, used in the calculation of overall FLR.
''',
'data_lost_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-sent-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets sent across the
bucket, used in the calculation of overall FLR.
''',
'data_sent_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duplicates', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of duplicate packets received in the
probe
''',
'duplicates',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Length of time for which the bucket is being
filled in seconds
''',
'duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of lost packets in the probe
''',
'lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('maximum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('minimum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets recieved out-of-order in the
probe
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('overall-flr', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Frame Loss Ratio across the whole bucket, in
millionths of a percent
''',
'overall_flr',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' If the probe ended prematurely, the error that
caused a probe to end
''',
'premature_reason',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason-string', ATTRIBUTE, 'str' , None, None,
[], [],
''' Description of the error code that caused the
probe to end prematurely. For informational
purposes only
''',
'premature_reason_string',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The count of samples collected in the bucket.
''',
'result_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets sent in the probe
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('standard-deviation', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Standard deviation of the results in the probe,
in microseconds or millionths of a percent
''',
'standard_deviation',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the bucket started being
filled at
''',
'start_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-cleared-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as bucket was cleared mid-way
through being filled
''',
'suspect_cleared_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-clock-drift', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as more than 10 seconds time
drift detected
''',
'suspect_clock_drift',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-flr-low-packet-count', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as FLR calculated based on a low
packet count
''',
'suspect_flr_low_packet_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-management-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as processing of results has
been delayed
''',
'suspect_management_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-memory-allocation-failed', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a memory allocation
failure
''',
'suspect_memory_allocation_failed',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-misordering', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as misordering has been detected
, affecting results
''',
'suspect_misordering',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-multiple-buckets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as the probe has been configured
across multiple buckets
''',
'suspect_multiple_buckets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-premature-end', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe ending
prematurely
''',
'suspect_premature_end',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-probe-restarted', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as probe restarted mid-way
through the bucket
''',
'suspect_probe_restarted',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-schedule-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to scheduling latency
causing one or more packets to not be sent
''',
'suspect_schedule_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-send-fail', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to failure to send one or
more packets
''',
'suspect_send_fail',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-start-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe starting mid-way
through a bucket
''',
'suspect_start_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-maximum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the maximum value was
recorded
''',
'time_of_maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-minimum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the minimum value was
recorded
''',
'time_of_minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bucket',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric',
False,
[
_MetaInfoClassMember('bucket', REFERENCE_LIST, 'Bucket' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket',
[], [],
''' Buckets stored for the metric
''',
'bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('config', REFERENCE_CLASS, 'Config' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config',
[], [],
''' Configuration of the metric
''',
'config',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-metric',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical',
False,
[
_MetaInfoClassMember('display-long', ATTRIBUTE, 'str' , None, None,
[], [],
''' Long display name used by the operation
''',
'display_long',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('display-short', ATTRIBUTE, 'str' , None, None,
[], [],
''' Short display name used by the operation
''',
'display_short',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('domain-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain name
''',
'domain_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('flr-calculation-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between FLR calculations for SLM, in
milliseconds
''',
'flr_calculation_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Unicast MAC Address in xxxx.xxxx.xxxx format.
Either MEP ID or MAC address must be
specified.
''',
'mac_address',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mep-id', ATTRIBUTE, 'int' , None, None,
[('1', '8191')], [],
''' MEP ID in the range 1 to 8191. Either MEP ID
or MAC address must be specified.
''',
'mep_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-metric', REFERENCE_LIST, 'OperationMetric' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric',
[], [],
''' Metrics gathered for the operation
''',
'operation_metric',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-schedule', REFERENCE_CLASS, 'OperationSchedule' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule',
[], [],
''' Operation schedule
''',
'operation_schedule',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-type', ATTRIBUTE, 'str' , None, None,
[], [],
''' Type of probe used by the operation
''',
'probe_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Profile Name
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('specific-options', REFERENCE_CLASS, 'SpecificOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions',
[], [],
''' Options specific to the type of operation
''',
'specific_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-historical',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsHistoricals' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsHistoricals',
False,
[
_MetaInfoClassMember('statistics-historical', REFERENCE_LIST, 'StatisticsHistorical' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical',
[], [],
''' Historical statistics data for an SLA
configured operation
''',
'statistics_historical',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-historicals',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions',
False,
[
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of the profile used by the operation
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'configured-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions',
False,
[
_MetaInfoClassMember('ondemand-operation-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ID of the ondemand operation
''',
'ondemand_operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-count', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Total number of probes sent during the operation
''',
'probe_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'ondemand-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions',
False,
[
_MetaInfoClassMember('configured-operation-options', REFERENCE_CLASS, 'ConfiguredOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions',
[], [],
''' Parameters for a configured operation
''',
'configured_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ondemand-operation-options', REFERENCE_CLASS, 'OndemandOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions',
[], [],
''' Parameters for an ondemand operation
''',
'ondemand_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('oper-type', REFERENCE_ENUM_CLASS, 'SlaOperOperationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperationEnum',
[], [],
''' OperType
''',
'oper_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'specific-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule',
False,
[
_MetaInfoClassMember('schedule-duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Duration of a probe for the operation in seconds
''',
'schedule_duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('schedule-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between the start times of consecutive
probes, in seconds.
''',
'schedule_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Start time of the first probe, in seconds since
the Unix Epoch
''',
'start_time',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time-configured', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether or not the operation start time was
explicitly configured
''',
'start_time_configured',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-schedule',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config',
False,
[
_MetaInfoClassMember('bins-count', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Total number of bins into which to aggregate. 0
if no aggregation.
''',
'bins_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bins-width', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Width of each bin into which to aggregate. 0 if
no aggregation. For SLM, the units of this value
are in single units of percent; for LMM they are
in tenths of percent; for other measurements
they are in milliseconds.
''',
'bins_width',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Size of buckets into which measurements are
collected
''',
'bucket_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size-unit', REFERENCE_ENUM_CLASS, 'SlaBucketSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSizeEnum',
[], [],
''' Whether bucket size is 'per-probe' or 'probes'
''',
'bucket_size_unit',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum number of buckets to store in memory
''',
'buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-type', REFERENCE_ENUM_CLASS, 'SlaRecordableMetricEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetricEnum',
[], [],
''' Type of metric to which this configuration
applies
''',
'metric_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'config',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins',
False,
[
_MetaInfoClassMember('count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The total number of results in the bin
''',
'count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'lower_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'lower_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sum', ATTRIBUTE, 'int' , None, None,
[('-9223372036854775808', '9223372036854775807')], [],
''' The sum of the results in the bin, in
microseconds or millionths of a percent
''',
'sum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'upper_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'upper_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bins',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated',
False,
[
_MetaInfoClassMember('bins', REFERENCE_LIST, 'Bins' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins',
[], [],
''' The bins of an SLA metric bucket
''',
'bins',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'aggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample',
False,
[
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was corrupt
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames lost,
if available
''',
'frames_lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames sent,
if available
''',
'frames_sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('no-data-packets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether a measurement could not be made because
no data packets were sent in the sample period.
Only applicable for LMM measurements
''',
'no_data_packets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was received
out-of-order
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' The result (in microseconds or millionths of a
percent) of the sample, if available
''',
'result',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was sucessfully sent
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The time (in milliseconds relative to the start
time of the bucket) that the sample was sent at
''',
'sent_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('timed-out', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet timed out
''',
'timed_out',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'sample',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated',
False,
[
_MetaInfoClassMember('sample', REFERENCE_LIST, 'Sample' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample',
[], [],
''' The samples of an SLA metric bucket
''',
'sample',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'unaggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents',
False,
[
_MetaInfoClassMember('aggregated', REFERENCE_CLASS, 'Aggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated',
[], [],
''' Result bins in an SLA metric bucket
''',
'aggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-type', REFERENCE_ENUM_CLASS, 'SlaOperBucketEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucketEnum',
[], [],
''' BucketType
''',
'bucket_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('unaggregated', REFERENCE_CLASS, 'Unaggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated',
[], [],
''' Result samples in an SLA metric bucket
''',
'unaggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'contents',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket',
False,
[
_MetaInfoClassMember('average', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Mean of the results in the probe, in
microseconds or millionths of a percent
''',
'average',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('contents', REFERENCE_CLASS, 'Contents' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents',
[], [],
''' The contents of the bucket; bins or samples
''',
'contents',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of corrupt packets in the probe
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-lost-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets lost across the
bucket, used in the calculation of overall FLR.
''',
'data_lost_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-sent-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets sent across the
bucket, used in the calculation of overall FLR.
''',
'data_sent_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duplicates', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of duplicate packets received in the
probe
''',
'duplicates',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Length of time for which the bucket is being
filled in seconds
''',
'duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of lost packets in the probe
''',
'lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('maximum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('minimum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets recieved out-of-order in the
probe
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('overall-flr', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Frame Loss Ratio across the whole bucket, in
millionths of a percent
''',
'overall_flr',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' If the probe ended prematurely, the error that
caused a probe to end
''',
'premature_reason',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason-string', ATTRIBUTE, 'str' , None, None,
[], [],
''' Description of the error code that caused the
probe to end prematurely. For informational
purposes only
''',
'premature_reason_string',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The count of samples collected in the bucket.
''',
'result_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets sent in the probe
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('standard-deviation', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Standard deviation of the results in the probe,
in microseconds or millionths of a percent
''',
'standard_deviation',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the bucket started being
filled at
''',
'start_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-cleared-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as bucket was cleared mid-way
through being filled
''',
'suspect_cleared_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-clock-drift', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as more than 10 seconds time
drift detected
''',
'suspect_clock_drift',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-flr-low-packet-count', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as FLR calculated based on a low
packet count
''',
'suspect_flr_low_packet_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-management-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as processing of results has
been delayed
''',
'suspect_management_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-memory-allocation-failed', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a memory allocation
failure
''',
'suspect_memory_allocation_failed',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-misordering', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as misordering has been detected
, affecting results
''',
'suspect_misordering',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-multiple-buckets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as the probe has been configured
across multiple buckets
''',
'suspect_multiple_buckets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-premature-end', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe ending
prematurely
''',
'suspect_premature_end',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-probe-restarted', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as probe restarted mid-way
through the bucket
''',
'suspect_probe_restarted',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-schedule-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to scheduling latency
causing one or more packets to not be sent
''',
'suspect_schedule_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-send-fail', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to failure to send one or
more packets
''',
'suspect_send_fail',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-start-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe starting mid-way
through a bucket
''',
'suspect_start_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-maximum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the maximum value was
recorded
''',
'time_of_maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-minimum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the minimum value was
recorded
''',
'time_of_minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bucket',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric',
False,
[
_MetaInfoClassMember('bucket', REFERENCE_LIST, 'Bucket' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket',
[], [],
''' Buckets stored for the metric
''',
'bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('config', REFERENCE_CLASS, 'Config' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config',
[], [],
''' Configuration of the metric
''',
'config',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-metric',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical',
False,
[
_MetaInfoClassMember('display-long', ATTRIBUTE, 'str' , None, None,
[], [],
''' Long display name used by the operation
''',
'display_long',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('display-short', ATTRIBUTE, 'str' , None, None,
[], [],
''' Short display name used by the operation
''',
'display_short',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('domain-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain name
''',
'domain_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('flr-calculation-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between FLR calculations for SLM, in
milliseconds
''',
'flr_calculation_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Unicast MAC Address in xxxx.xxxx.xxxx format.
Either MEP ID or MAC address must be
specified.
''',
'mac_address',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mep-id', ATTRIBUTE, 'int' , None, None,
[('1', '8191')], [],
''' MEP ID in the range 1 to 8191. Either MEP ID
or MAC address must be specified.
''',
'mep_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Operation ID
''',
'operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-metric', REFERENCE_LIST, 'OperationMetric' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric',
[], [],
''' Metrics gathered for the operation
''',
'operation_metric',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-schedule', REFERENCE_CLASS, 'OperationSchedule' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule',
[], [],
''' Operation schedule
''',
'operation_schedule',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-type', ATTRIBUTE, 'str' , None, None,
[], [],
''' Type of probe used by the operation
''',
'probe_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('specific-options', REFERENCE_CLASS, 'SpecificOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions',
[], [],
''' Options specific to the type of operation
''',
'specific_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-on-demand-historical',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals',
False,
[
_MetaInfoClassMember('statistics-on-demand-historical', REFERENCE_LIST, 'StatisticsOnDemandHistorical' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical',
[], [],
''' Historical statistics data for an SLA
on-demand operation
''',
'statistics_on_demand_historical',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-on-demand-historicals',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.ConfigErrors.ConfigError' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.ConfigErrors.ConfigError',
False,
[
_MetaInfoClassMember('display-short', ATTRIBUTE, 'str' , None, None,
[], [],
''' Short display name used by the operation
''',
'display_short',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('domain-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain name
''',
'domain_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('error-string', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Displays other issues not indicated from the
flags above, for example MIB incompatibility
issues.
''',
'error_string',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Unicast MAC Address in xxxx.xxxx.xxxx format
''',
'mac_address',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mep-id', ATTRIBUTE, 'int' , None, None,
[('1', '8191')], [],
''' MEP ID in the range 1 to 8191
''',
'mep_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('min-packet-interval-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to send packets more
frequently than the protocol allows?
''',
'min_packet_interval_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ow-delay-ds-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect OW Delay
(DS) but the packet type doesn't support it?
''',
'ow_delay_ds_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ow-delay-sd-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect OW Delay
(SD) but the packet type doesn't support it?
''',
'ow_delay_sd_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ow-jitter-ds-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect OW Delay
(DS) but the packet type doesn't support it?
''',
'ow_jitter_ds_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ow-jitter-sd-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect OW Jitter
(SD) but the packet type doesn't support it?
''',
'ow_jitter_sd_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ow-loss-ds-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect OW Frame
Loss (DS) but the packet type doesn't support it
?
''',
'ow_loss_ds_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ow-loss-sd-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect OW Frame
Loss (SD) but the packet type doesn't support it
?
''',
'ow_loss_sd_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('packet-pad-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to pad packets but the
packet type doesn't support it?
''',
'packet_pad_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('packet-rand-pad-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to pad packets with a
pseudo-random string but the packet type doesn't
support it?
''',
'packet_rand_pad_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('packet-type-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to use a packet type
that isn't supported by any protocols?
''',
'packet_type_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('priority-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to use a packet
priority scheme that the protocol does not
support?
''',
'priority_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-too-big', ATTRIBUTE, 'bool' , None, None,
[], [],
''' The profile is configured to use a packet type
which does not allow more than 72000 packets per
probe and greater than 72000 packets per probe
have been configured
''',
'probe_too_big',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-doesnt-exist', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the operation configured to use a profile
that is not currently defined for the protocol?
''',
'profile_doesnt_exist',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Profile Name
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-name-xr', ATTRIBUTE, 'str' , None, None,
[], [],
''' The name of the operation profile.
''',
'profile_name_xr',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('rt-delay-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect RT Delay
but the packet type doesn't support it?
''',
'rt_delay_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('rt-jitter-inconsistent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is the profile configured to collect RT Jitter
but the packet type doesn't support it?
''',
'rt_jitter_inconsistent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('synthetic-loss-not-supported', ATTRIBUTE, 'bool' , None, None,
[], [],
''' The profile is configured to use a packet type
which doesn't support synthetic loss measurement
and the number of packets per FLR calculation
has been configured
''',
'synthetic_loss_not_supported',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'config-error',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.ConfigErrors' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.ConfigErrors',
False,
[
_MetaInfoClassMember('config-error', REFERENCE_LIST, 'ConfigError' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.ConfigErrors.ConfigError',
[], [],
''' SLA operation to get configuration errors data
for
''',
'config_error',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'config-errors',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding',
False,
[
_MetaInfoClassMember('packet-pad-size', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Size that packets are being padded to
''',
'packet_pad_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('test-pattern-pad-hex-string', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Hex string that is used in the packet padding
''',
'test_pattern_pad_hex_string',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('test-pattern-pad-scheme', REFERENCE_ENUM_CLASS, 'SlaOperTestPatternSchemeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperTestPatternSchemeEnum',
[], [],
''' Test pattern scheme that is used in the packet
padding
''',
'test_pattern_pad_scheme',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'packet-padding',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority',
False,
[
_MetaInfoClassMember('cos', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' 3-bit COS priority value applied to packets
''',
'cos',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('priority-type', REFERENCE_ENUM_CLASS, 'SlaOperPacketPriorityEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperPacketPriorityEnum',
[], [],
''' PriorityType
''',
'priority_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'priority',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule',
False,
[
_MetaInfoClassMember('schedule-duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Duration of a probe for the operation in seconds
''',
'schedule_duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('schedule-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between the start times of consecutive
probes, in seconds.
''',
'schedule_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Start time of the first probe, in seconds since
the Unix Epoch
''',
'start_time',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time-configured', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether or not the operation start time was
explicitly configured
''',
'start_time_configured',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-schedule',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig',
False,
[
_MetaInfoClassMember('bins-count', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Total number of bins into which to aggregate. 0
if no aggregation.
''',
'bins_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bins-width', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Width of each bin into which to aggregate. 0 if
no aggregation. For SLM, the units of this value
are in single units of percent; for LMM they are
in tenths of percent; for other measurements
they are in milliseconds.
''',
'bins_width',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Size of buckets into which measurements are
collected
''',
'bucket_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size-unit', REFERENCE_ENUM_CLASS, 'SlaBucketSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSizeEnum',
[], [],
''' Whether bucket size is 'per-probe' or 'probes'
''',
'bucket_size_unit',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum number of buckets to store in memory
''',
'buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-type', REFERENCE_ENUM_CLASS, 'SlaRecordableMetricEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetricEnum',
[], [],
''' Type of metric to which this configuration
applies
''',
'metric_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'metric-config',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric',
False,
[
_MetaInfoClassMember('current-buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of valid buckets currently in the buckets
archive
''',
'current_buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-config', REFERENCE_CLASS, 'MetricConfig' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig',
[], [],
''' Configuration of the metric
''',
'metric_config',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-metric',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions',
False,
[
_MetaInfoClassMember('bursts-per-probe', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of bursts sent per probe
''',
'bursts_per_probe',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('flr-calculation-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between FLR calculations for SLM, in
milliseconds
''',
'flr_calculation_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('inter-burst-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between bursts within a probe in
milliseconds
''',
'inter_burst_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('inter-packet-interval', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Interval between packets within a burst in
milliseconds
''',
'inter_packet_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-metric', REFERENCE_LIST, 'OperationMetric' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric',
[], [],
''' Array of the metrics that are measured by the
operation
''',
'operation_metric',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-schedule', REFERENCE_CLASS, 'OperationSchedule' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule',
[], [],
''' Operation schedule
''',
'operation_schedule',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('packet-padding', REFERENCE_CLASS, 'PacketPadding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding',
[], [],
''' Configuration of the packet padding
''',
'packet_padding',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('packets-per-burst', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Number of packets sent per burst
''',
'packets_per_burst',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('priority', REFERENCE_CLASS, 'Priority' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority',
[], [],
''' Priority at which to send the packet, if
configured
''',
'priority',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-type', ATTRIBUTE, 'str' , None, None,
[], [],
''' Type of probe used by the operation
''',
'probe_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'profile-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions',
False,
[
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of the profile used by the operation
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'configured-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions',
False,
[
_MetaInfoClassMember('ondemand-operation-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ID of the ondemand operation
''',
'ondemand_operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-count', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Total number of probes sent during the operation
''',
'probe_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'ondemand-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions',
False,
[
_MetaInfoClassMember('configured-operation-options', REFERENCE_CLASS, 'ConfiguredOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions',
[], [],
''' Parameters for a configured operation
''',
'configured_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ondemand-operation-options', REFERENCE_CLASS, 'OndemandOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions',
[], [],
''' Parameters for an ondemand operation
''',
'ondemand_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('oper-type', REFERENCE_ENUM_CLASS, 'SlaOperOperationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperationEnum',
[], [],
''' OperType
''',
'oper_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'specific-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation',
False,
[
_MetaInfoClassMember('display-long', ATTRIBUTE, 'str' , None, None,
[], [],
''' Long display name used by the operation
''',
'display_long',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('display-short', ATTRIBUTE, 'str' , None, None,
[], [],
''' Short display name used by the operation
''',
'display_short',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('domain-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain name
''',
'domain_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('last-run', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Time that the last probe for the operation was
run, NULL if never run.
''',
'last_run',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Unicast MAC Address in xxxx.xxxx.xxxx format.
Either MEP ID or MAC address must be
specified.
''',
'mac_address',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mep-id', ATTRIBUTE, 'int' , None, None,
[('1', '8191')], [],
''' MEP ID in the range 1 to 8191. Either MEP ID
or MAC address must be specified.
''',
'mep_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Operation ID
''',
'operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-options', REFERENCE_CLASS, 'ProfileOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions',
[], [],
''' Options that are only valid if the operation has
a profile
''',
'profile_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('specific-options', REFERENCE_CLASS, 'SpecificOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions',
[], [],
''' Options specific to the type of operation
''',
'specific_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'on-demand-operation',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.OnDemandOperations' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.OnDemandOperations',
False,
[
_MetaInfoClassMember('on-demand-operation', REFERENCE_LIST, 'OnDemandOperation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation',
[], [],
''' SLA on-demand operation to get operation data
for
''',
'on_demand_operation',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'on-demand-operations',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions',
False,
[
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of the profile used by the operation
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'configured-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions',
False,
[
_MetaInfoClassMember('ondemand-operation-id', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ID of the ondemand operation
''',
'ondemand_operation_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-count', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Total number of probes sent during the operation
''',
'probe_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'ondemand-operation-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions',
False,
[
_MetaInfoClassMember('configured-operation-options', REFERENCE_CLASS, 'ConfiguredOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions',
[], [],
''' Parameters for a configured operation
''',
'configured_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('ondemand-operation-options', REFERENCE_CLASS, 'OndemandOperationOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions',
[], [],
''' Parameters for an ondemand operation
''',
'ondemand_operation_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('oper-type', REFERENCE_ENUM_CLASS, 'SlaOperOperationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperationEnum',
[], [],
''' OperType
''',
'oper_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'specific-options',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule',
False,
[
_MetaInfoClassMember('schedule-duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Duration of a probe for the operation in seconds
''',
'schedule_duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('schedule-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between the start times of consecutive
probes, in seconds.
''',
'schedule_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Start time of the first probe, in seconds since
the Unix Epoch
''',
'start_time',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-time-configured', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether or not the operation start time was
explicitly configured
''',
'start_time_configured',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-schedule',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config',
False,
[
_MetaInfoClassMember('bins-count', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Total number of bins into which to aggregate. 0
if no aggregation.
''',
'bins_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bins-width', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Width of each bin into which to aggregate. 0 if
no aggregation. For SLM, the units of this value
are in single units of percent; for LMM they are
in tenths of percent; for other measurements
they are in milliseconds.
''',
'bins_width',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Size of buckets into which measurements are
collected
''',
'bucket_size',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-size-unit', REFERENCE_ENUM_CLASS, 'SlaBucketSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSizeEnum',
[], [],
''' Whether bucket size is 'per-probe' or 'probes'
''',
'bucket_size_unit',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('buckets-archive', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum number of buckets to store in memory
''',
'buckets_archive',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('metric-type', REFERENCE_ENUM_CLASS, 'SlaRecordableMetricEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetricEnum',
[], [],
''' Type of metric to which this configuration
applies
''',
'metric_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'config',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins',
False,
[
_MetaInfoClassMember('count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The total number of results in the bin
''',
'count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'lower_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lower-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Lower bound (inclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'lower_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sum', ATTRIBUTE, 'int' , None, None,
[('-9223372036854775808', '9223372036854775807')], [],
''' The sum of the results in the bin, in
microseconds or millionths of a percent
''',
'sum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in
milliseconds or single units of percent. This
field is not used for LMM measurements
''',
'upper_bound',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('upper-bound-tenths', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Upper bound (exclusive) of the bin, in tenths of
percent. This field is only used for LMM
measurements
''',
'upper_bound_tenths',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bins',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated',
False,
[
_MetaInfoClassMember('bins', REFERENCE_LIST, 'Bins' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins',
[], [],
''' The bins of an SLA metric bucket
''',
'bins',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'aggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample',
False,
[
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was corrupt
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames lost,
if available
''',
'frames_lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('frames-sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' For FLR measurements, the number of frames sent,
if available
''',
'frames_sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('no-data-packets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether a measurement could not be made because
no data packets were sent in the sample period.
Only applicable for LMM measurements
''',
'no_data_packets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was received
out-of-order
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' The result (in microseconds or millionths of a
percent) of the sample, if available
''',
'result',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet was sucessfully sent
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The time (in milliseconds relative to the start
time of the bucket) that the sample was sent at
''',
'sent_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('timed-out', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the sample packet timed out
''',
'timed_out',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'sample',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated',
False,
[
_MetaInfoClassMember('sample', REFERENCE_LIST, 'Sample' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample',
[], [],
''' The samples of an SLA metric bucket
''',
'sample',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'unaggregated',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents',
False,
[
_MetaInfoClassMember('aggregated', REFERENCE_CLASS, 'Aggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated',
[], [],
''' Result bins in an SLA metric bucket
''',
'aggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('bucket-type', REFERENCE_ENUM_CLASS, 'SlaOperBucketEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucketEnum',
[], [],
''' BucketType
''',
'bucket_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('unaggregated', REFERENCE_CLASS, 'Unaggregated' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated',
[], [],
''' Result samples in an SLA metric bucket
''',
'unaggregated',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'contents',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket',
False,
[
_MetaInfoClassMember('average', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Mean of the results in the probe, in
microseconds or millionths of a percent
''',
'average',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('contents', REFERENCE_CLASS, 'Contents' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents',
[], [],
''' The contents of the bucket; bins or samples
''',
'contents',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('corrupt', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of corrupt packets in the probe
''',
'corrupt',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-lost-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets lost across the
bucket, used in the calculation of overall FLR.
''',
'data_lost_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('data-sent-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The number of data packets sent across the
bucket, used in the calculation of overall FLR.
''',
'data_sent_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duplicates', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of duplicate packets received in the
probe
''',
'duplicates',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('duration', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Length of time for which the bucket is being
filled in seconds
''',
'duration',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('lost', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of lost packets in the probe
''',
'lost',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('maximum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('minimum', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Overall minimum result in the probe, in
microseconds or millionths of a percent
''',
'minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('out-of-order', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets recieved out-of-order in the
probe
''',
'out_of_order',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('overall-flr', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Frame Loss Ratio across the whole bucket, in
millionths of a percent
''',
'overall_flr',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' If the probe ended prematurely, the error that
caused a probe to end
''',
'premature_reason',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('premature-reason-string', ATTRIBUTE, 'str' , None, None,
[], [],
''' Description of the error code that caused the
probe to end prematurely. For informational
purposes only
''',
'premature_reason_string',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('result-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The count of samples collected in the bucket.
''',
'result_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('sent', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of packets sent in the probe
''',
'sent',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('standard-deviation', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Standard deviation of the results in the probe,
in microseconds or millionths of a percent
''',
'standard_deviation',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('start-at', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the bucket started being
filled at
''',
'start_at',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-cleared-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as bucket was cleared mid-way
through being filled
''',
'suspect_cleared_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-clock-drift', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as more than 10 seconds time
drift detected
''',
'suspect_clock_drift',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-flr-low-packet-count', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as FLR calculated based on a low
packet count
''',
'suspect_flr_low_packet_count',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-management-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as processing of results has
been delayed
''',
'suspect_management_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-memory-allocation-failed', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a memory allocation
failure
''',
'suspect_memory_allocation_failed',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-misordering', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as misordering has been detected
, affecting results
''',
'suspect_misordering',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-multiple-buckets', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as the probe has been configured
across multiple buckets
''',
'suspect_multiple_buckets',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-premature-end', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe ending
prematurely
''',
'suspect_premature_end',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-probe-restarted', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect as probe restarted mid-way
through the bucket
''',
'suspect_probe_restarted',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-schedule-latency', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to scheduling latency
causing one or more packets to not be sent
''',
'suspect_schedule_latency',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-send-fail', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to failure to send one or
more packets
''',
'suspect_send_fail',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('suspect-start-mid-bucket', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Results suspect due to a probe starting mid-way
through a bucket
''',
'suspect_start_mid_bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-maximum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the maximum value was
recorded
''',
'time_of_maximum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('time-of-minimum', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Absolute time that the minimum value was
recorded
''',
'time_of_minimum',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'bucket',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric',
False,
[
_MetaInfoClassMember('bucket', REFERENCE_LIST, 'Bucket' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket',
[], [],
''' Buckets stored for the metric
''',
'bucket',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('config', REFERENCE_CLASS, 'Config' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config',
[], [],
''' Configuration of the metric
''',
'config',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'operation-metric',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent',
False,
[
_MetaInfoClassMember('display-long', ATTRIBUTE, 'str' , None, None,
[], [],
''' Long display name used by the operation
''',
'display_long',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('display-short', ATTRIBUTE, 'str' , None, None,
[], [],
''' Short display name used by the operation
''',
'display_short',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('domain-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain name
''',
'domain_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('flr-calculation-interval', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interval between FLR calculations for SLM, in
milliseconds
''',
'flr_calculation_interval',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Unicast MAC Address in xxxx.xxxx.xxxx format.
Either MEP ID or MAC address must be
specified.
''',
'mac_address',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('mep-id', ATTRIBUTE, 'int' , None, None,
[('1', '8191')], [],
''' MEP ID in the range 1 to 8191. Either MEP ID
or MAC address must be specified.
''',
'mep_id',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-metric', REFERENCE_LIST, 'OperationMetric' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric',
[], [],
''' Metrics gathered for the operation
''',
'operation_metric',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operation-schedule', REFERENCE_CLASS, 'OperationSchedule' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule',
[], [],
''' Operation schedule
''',
'operation_schedule',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('probe-type', ATTRIBUTE, 'str' , None, None,
[], [],
''' Type of probe used by the operation
''',
'probe_type',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('profile-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Profile Name
''',
'profile_name',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('specific-options', REFERENCE_CLASS, 'SpecificOptions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions',
[], [],
''' Options specific to the type of operation
''',
'specific_options',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-current',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet.StatisticsCurrents' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet.StatisticsCurrents',
False,
[
_MetaInfoClassMember('statistics-current', REFERENCE_LIST, 'StatisticsCurrent' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent',
[], [],
''' Current statistics data for an SLA configured
operation
''',
'statistics_current',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'statistics-currents',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols.Ethernet' : {
'meta_info' : _MetaInfoClass('Sla.Protocols.Ethernet',
False,
[
_MetaInfoClassMember('config-errors', REFERENCE_CLASS, 'ConfigErrors' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.ConfigErrors',
[], [],
''' Table of SLA configuration errors on configured
operations
''',
'config_errors',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('on-demand-operations', REFERENCE_CLASS, 'OnDemandOperations' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.OnDemandOperations',
[], [],
''' Table of SLA on-demand operations
''',
'on_demand_operations',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('operations', REFERENCE_CLASS, 'Operations' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.Operations',
[], [],
''' Table of SLA operations
''',
'operations',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('statistics-currents', REFERENCE_CLASS, 'StatisticsCurrents' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsCurrents',
[], [],
''' Table of current statistics for SLA operations
''',
'statistics_currents',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('statistics-historicals', REFERENCE_CLASS, 'StatisticsHistoricals' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsHistoricals',
[], [],
''' Table of historical statistics for SLA
operations
''',
'statistics_historicals',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('statistics-on-demand-currents', REFERENCE_CLASS, 'StatisticsOnDemandCurrents' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandCurrents',
[], [],
''' Table of current statistics for SLA on-demand
operations
''',
'statistics_on_demand_currents',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
_MetaInfoClassMember('statistics-on-demand-historicals', REFERENCE_CLASS, 'StatisticsOnDemandHistoricals' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals',
[], [],
''' Table of historical statistics for SLA
on-demand operations
''',
'statistics_on_demand_historicals',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-ethernet-cfm-oper',
'ethernet',
_yang_ns._namespaces['Cisco-IOS-XR-ethernet-cfm-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla.Protocols' : {
'meta_info' : _MetaInfoClass('Sla.Protocols',
False,
[
_MetaInfoClassMember('ethernet', REFERENCE_CLASS, 'Ethernet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols.Ethernet',
[], [],
''' The Ethernet SLA protocol
''',
'ethernet',
'Cisco-IOS-XR-ethernet-cfm-oper', False),
],
'Cisco-IOS-XR-infra-sla-oper',
'protocols',
_yang_ns._namespaces['Cisco-IOS-XR-infra-sla-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'Sla' : {
'meta_info' : _MetaInfoClass('Sla',
False,
[
_MetaInfoClassMember('protocols', REFERENCE_CLASS, 'Protocols' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper', 'Sla.Protocols',
[], [],
''' Table of all SLA protocols
''',
'protocols',
'Cisco-IOS-XR-infra-sla-oper', False),
],
'Cisco-IOS-XR-infra-sla-oper',
'sla',
_yang_ns._namespaces['Cisco-IOS-XR-infra-sla-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
'SlaNodes' : {
'meta_info' : _MetaInfoClass('SlaNodes',
False,
[
],
'Cisco-IOS-XR-infra-sla-oper',
'sla-nodes',
_yang_ns._namespaces['Cisco-IOS-XR-infra-sla-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper'
),
},
}
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations.Operation']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations.Operation']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.Operations']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals']['meta_info']
_meta_table['Sla.Protocols.Ethernet.ConfigErrors.ConfigError']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.ConfigErrors']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.OnDemandOperations']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandCurrents']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet']['meta_info']
_meta_table['Sla.Protocols.Ethernet.Operations']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsHistoricals']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet']['meta_info']
_meta_table['Sla.Protocols.Ethernet.ConfigErrors']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet']['meta_info']
_meta_table['Sla.Protocols.Ethernet.OnDemandOperations']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet']['meta_info']
_meta_table['Sla.Protocols.Ethernet.StatisticsCurrents']['meta_info'].parent =_meta_table['Sla.Protocols.Ethernet']['meta_info']
_meta_table['Sla.Protocols.Ethernet']['meta_info'].parent =_meta_table['Sla.Protocols']['meta_info']
_meta_table['Sla.Protocols']['meta_info'].parent =_meta_table['Sla']['meta_info']
| 56.579557
| 324
| 0.548223
|
32705d3d0392df580342584c2bd9765216b0a3c2
| 8,667
|
py
|
Python
|
resurces/docking/prepare_dpf42.py
|
jRicciL/Taller_Simulacion_Molecular
|
d8141e7197bf5c6b19f7446b72a0765966d06982
|
[
"MIT"
] | null | null | null |
resurces/docking/prepare_dpf42.py
|
jRicciL/Taller_Simulacion_Molecular
|
d8141e7197bf5c6b19f7446b72a0765966d06982
|
[
"MIT"
] | null | null | null |
resurces/docking/prepare_dpf42.py
|
jRicciL/Taller_Simulacion_Molecular
|
d8141e7197bf5c6b19f7446b72a0765966d06982
|
[
"MIT"
] | 1
|
2022-01-03T20:01:03.000Z
|
2022-01-03T20:01:03.000Z
|
#!/usr/bin/env python
#
#
#
# $Header: /mnt/raid/services/cvs/python/packages/share1.5/AutoDockTools/Utilities24/prepare_dpf42.py,v 1.13.2.3 2016/03/11 00:52:51 annao Exp $
#
import string
import os.path
from MolKit import Read
from AutoDockTools.DockingParameters import DockingParameters, genetic_algorithm_list4_2, \
genetic_algorithm_local_search_list4_2, local_search_list4_2,\
simulated_annealing_list4_2, epdb_list4_2,\
DockingParameter42FileMaker
from AutoDockTools.atomTypeTools import AutoDock4_AtomTyper
import numpy
def usage():
print "Usage: prepare_dpf42.py -l pdbqt_file -r pdbqt_file"
print "Description of command..."
print "Prepare a docking parameter file (DPF) for AutoDock42: <ligand>_<receptor>.dpf"
print " containing genetic_algorithm_local_search_list4_2 parameters (GALS)"
print " -l ligand_filename"
print " -r receptor_filename"
print
print "Optional parameters:"
print " [-o output dpf_filename]"
print " [-i template dpf_filename]"
print " [-x flexres_filename]"
print " [-p parameter_name=new_value]"
print " [-k list of parameters to write]"
print " [-e write epdb dpf ]"
print " [-v] verbose output"
print " [-L] use local search parameters"
print " [-S] use simulated annealing search parameters"
print " [-s] seed population using ligand's present conformation"
print " [-A] use only root atom coordinates to calculate about"
print
if __name__ == '__main__':
import getopt
import sys
try:
opt_list, args = getopt.getopt(sys.argv[1:], 'sLShveAl:r:i:o:x:p:k:')
except getopt.GetoptError, msg:
print 'prepare_dpf42.py: %s' % msg
usage()
sys.exit(2)
receptor_filename = ligand_filename = None
dpf_filename = None
template_filename = None
flexres_filename = None
parameters = []
parameter_list = genetic_algorithm_local_search_list4_2
pop_seed = False
local_search = False
verbose = None
epdb_output = False
about_root_atoms_only = False
for o, a in opt_list:
if verbose: print "o=", o, ' a=', a
if o in ('-v', '--v'):
verbose = 1
if verbose: print 'verbose output'
if o in ('-l', '--l'): #ligand filename
ligand_filename = a
if verbose: print 'ligand_filename =', ligand_filename
if o in ('-r', '--r'): #receptor filename
receptor_filename = a
if verbose: print 'receptor_filename =', receptor_filename
if o in ('-x', '--x'): #flexres_filename
flexres_filename = a
if verbose: print 'flexres_filename =', flexres_filename
if o in ('-i', '--i'): #input reference
template_filename = a
if verbose: print 'template_filename =', template_filename
if o in ('-o', '--o'): #output filename
dpf_filename = a
if verbose: print 'output dpf_filename =', dpf_filename
if o in ('-p', '--p'): #parameter
parameters.append(a)
if verbose: print 'parameters =', parameters
if o in ('-e', '--e'):
epdb_output = True
if verbose: print 'output epdb file'
parameter_list = epdb_list4_2
if o in ('-k', '--k'): #parameter_list_to_write
parameter_list = a
if verbose: print 'parameter_list =', parameter_list
if o in ('-L', '--L'): #parameter_list_to_write
local_search = 1
parameter_list = local_search_list4_2
if verbose: print 'parameter_list =', parameter_list
if o in ('-S', '--S'): #parameter_list_to_write
parameter_list = simulated_annealing_list4_2
if verbose: print 'parameter_list =', parameter_list
if o in ('-A', '--A'): #set about to average of coords of root atoms
about_root_atoms_only = True
if verbose: print 'about_root_atoms_only =', about_root_atoms_only
if o in ('-h', '--'):
usage()
sys.exit()
if o in ('-s', '--s'):
pop_seed = True
if (not receptor_filename) or (not ligand_filename):
print "prepare_dpf42.py: ligand and receptor filenames"
print " must be specified."
usage()
sys.exit()
#11/2011: fixing local_search bugs:
# specifically:
# 1. quaternion0 0 0 0 0
# 2. dihe0 0 0 0 0 0 <one per rotatable bond>
# 3. about == tran0
# 4. remove tstep qstep and dstep
# 5. remove ls_search_freq
#local_search = local_search_list4_2
#parameter_list =local_search_list4_2
dm = DockingParameter42FileMaker(verbose=verbose, pop_seed=pop_seed)
if template_filename is not None: #setup values by reading dpf
dm.dpo.read(template_filename)
dm.set_ligand(ligand_filename)
dm.set_receptor(receptor_filename)
if flexres_filename is not None:
flexmol = Read(flexres_filename)[0]
flexres_types = flexmol.allAtoms.autodock_element
lig_types = dm.dpo['ligand_types']['value'].split()
all_types = lig_types
for t in flexres_types:
if t not in all_types:
all_types.append(t)
all_types_string = all_types[0]
if len(all_types)>1:
for t in all_types[1:]:
all_types_string = all_types_string + " " + t
if verbose: print "adding ", t, " to all_types->", all_types_string
dm.dpo['ligand_types']['value'] = all_types_string
dm.dpo['flexres']['value'] = flexres_filename
dm.dpo['flexres_flag']['value'] = True
#dm.set_docking_parameters( ga_num_evals=1750000,ga_pop_size=150, ga_run=20, rmstol=2.0)
kw = {}
for p in parameters:
key,newvalue = string.split(p, '=')
#detect string reps of lists: eg "[1.,1.,1.]"
if key=='parameter_file':
if key in parameter_list:
print "removing parameter_file keyword"
parameter_list.remove('parameter_file')
parameter_list.insert(1, key)
dm.dpo['custom_parameter_file']['value']=1
if newvalue[0]=='[':
nv = []
for item in newvalue[1:-1].split(','):
nv.append(float(item))
#print "nv=", nv
newvalue = nv
kw[key] = nv
if verbose: print "newvalue=", nv, " kw=", kw
elif key=='epdb_flag':
if verbose: print "setting epdb_flag to", newvalue
kw['epdb_flag'] = 1
elif 'flag' in key:
if verbose: print "key=", key, ' newvalue=', newvalue
if newvalue in ['1','0']:
newvalue = int(newvalue)
if newvalue =='False':
newvalue = False
if newvalue =='True':
newvalue = True
elif local_search and 'about' in key:
kw['about'] = newvalue
kw['tran0'] = newvalue
else:
kw[key] = newvalue
if verbose: print "set ", key, " to ", newvalue
if verbose: print "calling set_docking_parameters with kw=", kw
apply(dm.set_docking_parameters, (), kw)
if key not in parameter_list:
#special hacks for output_pop_file,set_sw1,...
if key=='output_pop_file':
parameter_list.insert(parameter_list.index('set_ga'), key)
elif key=='set_sw1':
parameter_list.insert(parameter_list.index('ls_search_freq')+1, key)
else:
parameter_list.append(key)
if about_root_atoms_only:
lines = dm.ligand.parser.allLines
for ix, lll in enumerate(lines):
if lll.find("ROOT")==0:
root_ix = ix
if lll.find("ENDROOT")==0:
endroot_ix = ix
break
last_ix = endroot_ix - (root_ix + 1) #47-(18+1)
crds = dm.ligand.allAtoms[0:last_ix].coords
about = (numpy.add.reduce(crds)/float(len(crds))).tolist()
dm.dpo['about']['value'] = (round(about[0],3), round(about[1],3), round(about[2],3))
if epdb_output:
dm.dpo['epdb_flag']['value'] = 1
dm.write_dpf(dpf_filename, parm_list=epdb_list4_2)
else:
if verbose: print "not epdb_output"
dm.write_dpf(dpf_filename, parameter_list, pop_seed=pop_seed)
#prepare_dpf42.py -l indinavir.pdbqt -r 1hsg.pdbqt -p ga_num_evals=25000000 -p ga_pop_size=150 -p ga_run=17 -i ref.dpf -o testing.dpf
| 39.940092
| 144
| 0.591785
|
74b0d37ca70ddaf563de68175a17473a70d3ee69
| 686
|
py
|
Python
|
exatomic/adf/dirac/dirac.py
|
tjduigna/exatomic
|
3e27233084588bc6a58b63fc81aaf5a6b67a968d
|
[
"Apache-2.0"
] | null | null | null |
exatomic/adf/dirac/dirac.py
|
tjduigna/exatomic
|
3e27233084588bc6a58b63fc81aaf5a6b67a968d
|
[
"Apache-2.0"
] | 1
|
2017-05-25T21:05:40.000Z
|
2017-05-25T23:54:15.000Z
|
exatomic/adf/dirac/dirac.py
|
tjduigna/exatomic
|
3e27233084588bc6a58b63fc81aaf5a6b67a968d
|
[
"Apache-2.0"
] | 1
|
2017-05-25T20:48:33.000Z
|
2017-05-25T20:48:33.000Z
|
## -*- coding: utf-8 -*-
## Copyright (c) 2015-2018, Exa Analytics Development Team
## Distributed under the terms of the Apache License 2.0
#"""
#ADF Dirac Parser
##########################
#Parser for the output of the DIRAC program (part of the ADF suite).
#"""
#from exa import Parser, Matches
#
#
#class DIRAC(Parser):
# """
# """
# _start = "* | D I R A C | *"
# _stop = -1
#
# def _parse_stops_1(self, starts):
# """Find the end of the section."""
# key = "Hash table lookups:"
# matches = [self.find_next(key, cursor=s[0]) for s in starts]
# return Matches(key, *matches)
| 29.826087
| 96
| 0.521866
|
30bc77b44ad2e719566d27e5eb87e6dbd6545a39
| 412
|
py
|
Python
|
Python32/ED/quicksort.py
|
andersonsilvade/python_C
|
ffc00184883089f1c2d9b8a6c32503b2c8b8d035
|
[
"MIT"
] | null | null | null |
Python32/ED/quicksort.py
|
andersonsilvade/python_C
|
ffc00184883089f1c2d9b8a6c32503b2c8b8d035
|
[
"MIT"
] | null | null | null |
Python32/ED/quicksort.py
|
andersonsilvade/python_C
|
ffc00184883089f1c2d9b8a6c32503b2c8b8d035
|
[
"MIT"
] | 1
|
2020-11-04T08:36:28.000Z
|
2020-11-04T08:36:28.000Z
|
def quicksort(lista):
lista = list(range(8))
random.shuffle(lista)
if len(lista) <= 1:
return lista
pivô = lista[0]
iguais = [x for x in lista if x == pivô]
menores = [x for x in lista if x < pivô]
maiores = [x for x in lista if x > pivô]
return quicksort(menores) + iguais + quicksort(maiores)
print (quicksort(lista))
| 7.357143
| 59
| 0.538835
|
37eda6e7d3567d25fa9ebb1229e4abda54d34713
| 2,077
|
py
|
Python
|
pi/perma-proto-organized-hw-test.py
|
CrazyJ36/python
|
4cff6e7240672a273d978521bb511065f45d4312
|
[
"MIT"
] | null | null | null |
pi/perma-proto-organized-hw-test.py
|
CrazyJ36/python
|
4cff6e7240672a273d978521bb511065f45d4312
|
[
"MIT"
] | null | null | null |
pi/perma-proto-organized-hw-test.py
|
CrazyJ36/python
|
4cff6e7240672a273d978521bb511065f45d4312
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# This test my "organized" hardware setup on perma-proto
print("Loading.. Press Ctrl-c to exit A test.")
import sys
from time import sleep
from signal import pause
from gpiozero import Button, ButtonBoard, LEDBoard, Buzzer
usage = "Usage: ./perma-proto-org... [leds, buttons, buzzer, all]"
try:
cmd = sys.argv[1]
except IndexError:
print(usage)
exit(0)
try:
leds = LEDBoard(4, 18, 24, 11, 12, 7, 16, 6)
btns = ButtonBoard(23, 27, 22, 25, 9, 5, 8)
buzz = Buzzer(2)
except Exception:
print("gpiozero or pin didn't load correctly, Exiting..")
exit(0)
def test_leds():
try:
print("Total desired LEDs: ", end='' )
print(len(leds))
print("Flashing All LEDs in order 3 times, watch PI..")
sleep(1)
i0 = 0
while i0 < 3:
i1 = 0
while i1 < len(leds):
leds[i1].on()
sleep(0.1)
leds[i1].off()
i1 = i1 + 1
i0 = i0 + 1
except KeyboardInterrupt:
print("\rExiting..\n")
return
def test_btns():
print("Testing Buttons...")
print("Total desired button devices: ", end='')
print(len(btns))
print("Press any push button on the board, you'll see it active in A list of all.")
while True:
try:
if btns.is_pressed:
print(btns.value)
sleep(0.2)
except KeyboardInterrupt:
print("\rExiting\n")
return
def test_buzz():
print("Testing Buzzer.. Beep(repeated on/off) for A short time...")
buzz.beep(0.005, 0.005, 50, False)
return
# Switch Method Reference
'''
def test_switch():
print("Toggle the switch(the one on the right of the two)")
try:
while True:
if switch.is_pressed:
print("Switch is on")
switch.wait_for_release()
else:
print("Switch is off")
switch.wait_for_press()
except KeyboardInterrupt:
print("\nExiting..\n")
return
'''
if cmd == "leds":
test_leds()
elif cmd == "buttons":
test_btns()
elif cmd == "buzzer":
test_buzz()
elif cmd == "all":
test_leds()
test_btns()
test_buzz()
else:
print(usage)
exit(0)
exit(0)
| 20.362745
| 85
| 0.616273
|
44cbec44471edbf7fabcf10a3a6888eb36f362b0
| 3,435
|
py
|
Python
|
examples/OSPFv2/8400/ospfv2_network_lsa_count_monitor.1.0.py
|
nishanthprakash-hpe/nae-scripts
|
bf14e5155308683d59e7a95d21436a767d9132a8
|
[
"Apache-2.0"
] | null | null | null |
examples/OSPFv2/8400/ospfv2_network_lsa_count_monitor.1.0.py
|
nishanthprakash-hpe/nae-scripts
|
bf14e5155308683d59e7a95d21436a767d9132a8
|
[
"Apache-2.0"
] | null | null | null |
examples/OSPFv2/8400/ospfv2_network_lsa_count_monitor.1.0.py
|
nishanthprakash-hpe/nae-scripts
|
bf14e5155308683d59e7a95d21436a767d9132a8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
Manifest = {
'Name': 'ospfv2_network_lsa_count_monitor',
'Description': 'OSPFv2 Network LSA count Monitor Policy',
'Version': '1.0',
'Author': 'Aruba Networks'
}
ParameterDefinitions = {
'vrf_name': {
'Name': 'Vrf Name',
'Description': 'Vrf to be Monitor',
'Type': 'String',
'Default': 'default'
},
'ospf_process_id': {
'Name': 'OSPFv2 process id',
'Description': 'OSPFv2 process id to be Monitor',
'Type': 'integer',
'Default': 1
},
'ospf_area_id': {
'Name': 'OSPFv2 area id',
'Description': 'OSPFv2 area id to be Monitor',
'Type': 'String',
'Default': '0.0.0.0'
}
}
class Policy(NAE):
def __init__(self):
uri1 = '/rest/v1/system/vrfs/{}/ospf_routers/{}/areas/{}?' \
'attributes=lsa_counts.network_lsa'
self.m1 = Monitor(
uri1,
'Network LSA Count Monitor (count)',
[self.params['vrf_name'],
self.params['ospf_process_id'],
self.params['ospf_area_id']])
uri2 = '/rest/v1/system/vrfs/{}/ospf_routers/{}/areas/{}?' \
'attributes=lsa_counts.network_summary_asbr_lsa'
self.m2 = Monitor(
uri2,
'Network Summary ASBR LSA Count Monitor (count)',
[self.params['vrf_name'],
self.params['ospf_process_id'],
self.params['ospf_area_id']])
uri3 = '/rest/v1/system/vrfs/{}/ospf_routers/{}/areas/{}?' \
'attributes=lsa_counts.network_summary_lsa'
self.m3 = Monitor(
uri3,
'Network Summary LSA Count Monitor (count)',
[self.params['vrf_name'],
self.params['ospf_process_id'],
self.params['ospf_area_id']])
uri4 = '/rest/v1/system/vrfs/{}/ospf_routers/{}/areas/{}?' \
'attributes=lsa_counts.nssa_lsa'
self.m4 = Monitor(
uri4,
'Network NSSA LSA Count Monitor (count)',
[self.params['vrf_name'],
self.params['ospf_process_id'],
self.params['ospf_area_id']])
uri5 = '/rest/v1/system/vrfs/{}/ospf_routers/{}/areas/{}?' \
'attributes=lsa_counts.router_lsa'
self.m5 = Monitor(
uri5,
'Network Router LSA Count Monitor (count)',
[self.params['vrf_name'],
self.params['ospf_process_id'],
self.params['ospf_area_id']])
uri6 = '/rest/v1/system/vrfs/{}/ospf_routers/{}?' \
'attributes=lsa_counts.external_lsa'
self.m6 = Monitor(
uri6,
'External LSA Count Monitor (count)',
[self.params['vrf_name'],
self.params['ospf_process_id']])
| 33.349515
| 68
| 0.572344
|
8757270c227802e35ac150eec2a474f5c4e99af7
| 9,986
|
py
|
Python
|
cky/cky.py
|
dqd/heap
|
6247eebfc87ea0447d199f3c2ef044727b7d3ad6
|
[
"WTFPL"
] | 1
|
2017-07-03T19:58:08.000Z
|
2017-07-03T19:58:08.000Z
|
cky/cky.py
|
dqd/heap
|
6247eebfc87ea0447d199f3c2ef044727b7d3ad6
|
[
"WTFPL"
] | null | null | null |
cky/cky.py
|
dqd/heap
|
6247eebfc87ea0447d199f3c2ef044727b7d3ad6
|
[
"WTFPL"
] | null | null | null |
#! /usr/bin/python
# coding=utf-8
"""
The Cocke–Kasami–Younger algorithm
2010 Pavel Dvořák <id@dqd.cz>
"""
# Feel free to edit.
ENCODING = 'latin2'
AJKA_CMD = '/nlp/projekty/ajka/bin/ajka -b'
GRAMMAR_FILE = 'cky.data'
GRAMMAR_LIMIT = 100000
import re
import sys
import nltk
import getopt
import subprocess
import unicodedata
from os import path
sys.setrecursionlimit(10000) # Uh, oh.
def utfize(string):
"""
Convert to the Unicode string when it is necessary.
@param string: a string.
@type string: String
@return: a string in Unicode.
@rtype: String
"""
if isinstance(string, str):
return string.decode('utf-8')
return string
def tr(string, table):
"""
Replace parts of the string according to the replacement table.
@param string: a string.
@type string: String
@param table: a dictionary of replacements (keys to values).
@type table: {a}
@return: a string with applied replacements.
@rtype: String
"""
return re.compile('|'.join(map(re.escape, table))).sub(lambda x: table[x.group(0)], string)
def morph(word):
"""
Perform the morphological analysis of the Czech word. It uses
the external morphological analyser Ajka.
@param word: any Czech word.
@type word: String
@return: A list of word classes. If the word is not known, an empty
list is returned.
@rtype: [String]
"""
if re.match(r'\W+', word, re.UNICODE):
return [word]
ajka = subprocess.Popen(AJKA_CMD, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
output = ajka.communicate(utfize(word).encode(ENCODING))[0].decode(ENCODING)
if '--not found' in output or output[:5] != 'ajka>':
return []
return [wc[3:] for wc in filter(lambda x: x[:3] == '<c>', output.split())]
def cky(sentence, verbose):
"""
Perform the syntactic analysis on the sentence. The function uses
the Cocke–Kasami–Younger algorithm. If a word in the sentence is
not recognized, the function terminate the program with an error
message.
@param sentence: any correctly formed Czech sentence.
@type sentence: String
@param verbose: verbose output.
@type verbose: Bool
"""
if verbose:
print 'Lexical analysis...'
lexical = []
for word in nltk.tokenize.WordPunctTokenizer().tokenize(utfize(sentence)):
classes = morph(word)
if not classes:
print 'Error: the word \'%s\' is not recognized by the morphological analyser Ajka.' % word
sys.exit(1)
lexical.append((word, classes))
if verbose:
for l in lexical:
print '%s: %s' % (l[0], u', '.join(l[1]))
print '\nLoading the grammar...'
try:
f = open(GRAMMAR_FILE, 'r')
grammar = nltk.parse_cfg(f.read())
f.close()
except IOError, err:
print 'Error: %s.' % err
sys.exit(1)
def rhs(x):
terminals = grammar.productions(lhs=x)
if not terminals:
print 'Error: a correct terminal for the nonterminal \'%s\' cannot be found.' % x
sys.exit(1)
return grammar.productions(rhs=terminals[0].rhs()[0])
def abc(x, y):
return filter(lambda x: re.match(r'.* %s$' % y, str(x)), rhs(x))
if verbose:
print 'Performing the analysis...'
chart = []
for ls in lexical:
chart.append([set([g.lhs() for l in ls[1] for g in grammar.productions(rhs=l)])])
if verbose:
print ls[0], ' ',
for j in range(1, len(lexical)):
for i in range(len(lexical) - j):
chart[i].insert(j, set([]))
for k in range(j):
chart[i][j].update([a.lhs() for b in chart[i][k] for c in chart[i + k + 1][j - k - 1] for a in abc(b, c)])
if verbose:
print
for j in range(len(lexical)):
for i in range(len(lexical) - j):
print list(chart[i][j]), ' ',
print
print 'The sentence is',
if not nltk.grammar.Nonterminal('S') in [b.lhs() for c in chart[0][-1] for b in grammar.productions(rhs=c)]:
print 'NOT',
print 'OK according to our Czech grammar.'
def generate(corpus, verbose):
"""
Generate a grammar file of the given corpus. The corpus contains
a set of sentences having their words tagged.
The grammar is going to be in the Chomsky normal form (CNF), so
the sentences should not be more complex than the context-free
language.
CNF is defined by the following rules:\n
A → BC or\n
A → α or\n
S → ε.
@param corpus: a filepath to the corpus (e.g. /nlp/corpora/vertical/desam/source).
@type corpus: String
@param verbose: verbose output.
@type verbose: Bool
"""
if not path.isfile(corpus):
print 'Error: the corpus file \'%s\' does not exist.' % corpus
sys.exit(1)
try:
f = open(corpus, 'r')
except IOError, err:
print 'Error: %s.' % err
sys.exit(1)
t = nltk.Tree('(S)')
processing = False
opening = False
if verbose:
print 'Generation of the sentences...'
for line in f.readlines()[:GRAMMAR_LIMIT]:
line = unicode(line, ENCODING)
tag = re.match(r'^<.+>', line)
word = unicodedata.normalize('NFKD', line.split()[-1]).encode('ascii', 'ignore')
if not processing and opening and not tag and not re.match(r'^\d+\)', line):
sentence = [word]
processing = True
elif processing and not tag:
sentence.append(word)
elif processing and tag:
t = add(t, sentence)
processing = False
opening = re.match(r'^<\w+.*>', line)
f.close()
if verbose:
print 'Done. Generated %d sentences.' % len(t)
print 'Normalization to CNF...'
t.chomsky_normal_form()
if verbose:
print 'Done. The tree now reaches a height of %d nodes.' % t.height()
try:
f = open(GRAMMAR_FILE, 'w')
if verbose:
print 'Transformation into the grammar...'
i = 0
for rule in set(t.productions()):
rule = re.sub(r'<([\w-]+)>', r'\1', '%s\n' % rule)
rule = re.sub(r'(\w+)-', r'\1_', rule) # this is not needed in the newer versions of NLTK
rule = re.sub(r' S\|(.+)', r' \1', rule)
rule = re.sub(r'^S\|([\w-]+) (.+)', r'S \2 \n\1 \2', rule)
f.write(rule)
if verbose:
i += rule.count('\n')
f.close()
except IOError, err:
print 'Error: %s.' % err
sys.exit(1)
if verbose:
print 'Done. The grammar contains %d rules.' % i
def add(t, l):
"""
Recursively add the list to the tree.
@param t: a tree; defined in the nltk.Tree module.
@type t: Tree
@param l: a list of items -- in this case, it is a list of words.
@type l: [a]
"""
if not l:
return t
a = l.pop(0)
table = {'[': 'LPAREN',
']': 'RPAREN',
'+': 'PLUS',
'=': 'EQUALS',
'.': 'DOT',
':': 'COLON',
';': 'SEMIC',
',': 'COMMA',
'`': 'BACKT',
'!': 'EXCLAM',
'?': 'QUEST',
'|': 'PIPE',
'"': 'QUOTE',
"'": 'APOST',
'-': 'DASH',
'&': 'AMPER',
'/': 'SLASH',
'%': 'PERCEN',
'*': 'ASTER'}
a = tr(a, {'(': '[', ')': ']'}) # parenthesis are reserved
b = tr(a, table)
for x in t:
if isinstance(x, nltk.Tree) and x.node == a:
x = add(x, l)
return t
t.append(add(nltk.Tree('(X%s %s)' % (b, a)), l))
return t
def usage(prog_name):
"""
Usage: _name_ [OPTIONS]
The Cocke–Kasami–Younger algorithm performs the syntactic analysis
of a sentence using the grammar in a Chomsky normal form (CNF) and
outputs a parsing table.
The analysed sentence is expected to be entered by the standard input
(e.g. echo "Model hradu v použitelném stavu." | _name_).
OPTIONS:
_tab_-h, --help display this help and exit
_tab_-g, --generate=CORPUS generate a grammar file of the CORPUS
_tab_-v, --verbose verbose output
The CORPUS file (e.g. /nlp/corpora/vertical/desam/source) should be
saved in the _encoding_ encoding.
"""
output = '\n'.join([line[4:] for line in usage.__doc__.splitlines()][1:-1])
print tr(output, {'_name_': path.basename(prog_name), '_encoding_': ENCODING, '_tab_': '\t'})
def main(argv):
"""
Handle the input parameters and run the program. If anything goes wrong,
report an error.
@param argv: input arguments.
@type argv: [String]
"""
try:
opts, args = getopt.getopt(argv[1:], 'hg:v', ['help', 'generate=', 'verbose'])
except getopt.GetoptError, err:
print 'Error: %s.' % err
sys.exit(1)
g = ''
v = False
for opt, arg in opts:
if opt in ['-h', '--help']:
usage(argv[0])
sys.exit()
elif opt in ['-g', '--generate']:
g = arg
elif opt in ['-v', '--verbose']:
v = True
if g:
generate(g, v)
sys.exit()
try:
input = sys.stdin.readlines()
except KeyboardInterrupt:
print 'Error: enter the text and press the Ctrl + D key combination, not the Ctrl + C one.'
sys.exit(1)
if not input or not input[0].strip():
print 'Error: you have to enter the analysed sentence by the standard input.'
sys.exit(1)
elif not path.isfile(GRAMMAR_FILE) or path.getsize(GRAMMAR_FILE) < 127: # Let's say 128 is a reasonably small grammar.
print 'Error: the grammar file is empty. You have to generate it first.'
sys.exit(1)
cky(input[0], v)
if __name__ == "__main__":
main(sys.argv)
| 27.66205
| 122
| 0.558983
|
9c42304aa0998e62a7e7d7dcec114f7bd5b89ce6
| 8,408
|
py
|
Python
|
docs/conf.py
|
PyXRD/pyxrd
|
26bacdf64f3153fa74b8caa62e219b76d91a55c1
|
[
"BSD-2-Clause"
] | 27
|
2018-06-15T15:28:18.000Z
|
2022-03-10T12:23:50.000Z
|
docs/conf.py
|
PyXRD/pyxrd
|
26bacdf64f3153fa74b8caa62e219b76d91a55c1
|
[
"BSD-2-Clause"
] | 22
|
2018-06-14T08:29:16.000Z
|
2021-07-05T13:33:44.000Z
|
docs/conf.py
|
PyXRD/pyxrd
|
26bacdf64f3153fa74b8caa62e219b76d91a55c1
|
[
"BSD-2-Clause"
] | 8
|
2019-04-13T13:03:51.000Z
|
2021-06-19T09:29:11.000Z
|
# -*- coding: utf-8 -*-
#
# PyXRD documentation build configuration file, created by
# sphinx-quickstart on Wed Feb 12 10:27:39 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
top_dir = os.path.dirname(os.path.abspath("."))
if os.path.exists(os.path.join(top_dir, "pyxrd")):
sys.path.insert(0, top_dir)
pass
import pyxrd # @UnusedImport
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.pngmath',
'sphinx.ext.intersphinx'
]
intersphinx_mapping = {'python': ('http://docs.python.org/2.7', None)}
pngmath_latex_preamble = r"""
\usepackage{mathtools}
\usepackage{units}
\usepackage[document]{ragged2e}
\usepackage[fontsize=8pt]{scrextend}
"""
# User __init__ docstrings:
autoclass_content = 'both'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyXRD'
copyright = u'2014, Mathijs Dumon'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(map(str, pyxrd.__version__.split('.')[0:2]))
# The full version, including alpha/beta/rc tags.
release = pyxrd.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyxrddoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'PyXRD.tex', u'PyXRD Documentation',
u'Mathijs Dumon', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyxrd', u'PyXRD Documentation',
[u'Mathijs Dumon'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'PyXRD', u'PyXRD Documentation',
u'Mathijs Dumon', 'PyXRD', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
autodoc_member_order = 'bysource'
| 31.373134
| 80
| 0.7098
|
817636fb7de1c2e46351604b0691300599cf8823
| 7,411
|
py
|
Python
|
shiiba/regression3a.py
|
yaukwankiu/armor
|
6c57df82fe3e7761f43f9fbfe4f3b21882c91436
|
[
"CC0-1.0"
] | 1
|
2015-11-06T06:41:33.000Z
|
2015-11-06T06:41:33.000Z
|
shiiba/regression3a.py
|
yaukwankiu/armor
|
6c57df82fe3e7761f43f9fbfe4f3b21882c91436
|
[
"CC0-1.0"
] | null | null | null |
shiiba/regression3a.py
|
yaukwankiu/armor
|
6c57df82fe3e7761f43f9fbfe4f3b21882c91436
|
[
"CC0-1.0"
] | null | null | null |
# test script for regression3
# regression3 : a more streamlined version of the shiiba regression and advection function/module
###########
#imports
import numpy as np
import numpy.ma as ma
from .. import pattern
from . import regression2 as regression
from ..advection import semiLagrangian
sl = semiLagrangian
from imp import reload
import time
lsq = np.linalg.lstsq
time0= time.time()
def tic():
global timeStart
timeStart = time.time()
def toc():
print "time spent:", time.time()-timeStart
dbz=pattern.DBZ
################
# set up
def regress(a, b):
phi0 = a.matrix
phi1 = b.matrix
phi0
phi1
phi1-phi0
##################
# test
phi0.sharedmask #check
phi0.unshare_mask()
phi1.unshare_mask()
phi0.sharedmask #check
phi0.up = np.roll(phi0, 1, axis=0)
phi0.down = np.roll(phi0,-1, axis=0)
phi0.right = np.roll(phi0, 1, axis=1)
phi0.left = np.roll(phi0,-1, axis=1)
phi0.sharedmask
phi0.left.sharedmask #check
phi0.up.sharedmask
#masking the four edges
for v in [phi0.up, phi0.down, phi0.left, phi0.right]:
v.mask[ :, 0] = True
v.mask[ :,-1] = True
v.mask[ 0, :] = True
v.mask[-1, :] = True
phi0.sharedmask
phi0.left.sharedmask #check
phi0.up.sharedmask
phi0.up
phi0.left
#########################################################################
# CENTRAL DIFFERENCE SCHEME
# preparing for the regression: defining the X and Y
# advection equation:
# phi1-phi0 = -dt [ (u,v). ((phidown-phiup)/2dI, (phileft-phiright)/2dJ) - q]
# shiiba assumption: u = c1*I+c2*J+c3, v=c4*I+c5*J+c6, q=c7*I+c8*J+c9
# for simplicity we let dt=dI=dJ=1
print "\n=================================\nCentral difference scheme"
#xxx= raw_input('press enter:')
height, width = phi0.shape
X, Y = np.meshgrid(range(width), range(height))
I, J = Y, X
I = I.view(ma.MaskedArray)
J = J.view(ma.MaskedArray)
I.mask = None
J.mask = None
imageList = [phi0, phi1, phi0.up, phi0.down, phi0.left, phi0.right, I, J]
mask = phi0.mask
#
# get the union of masks...
#
for v in imageList:
mask += v.mask
# ... and share it
for v in imageList:
v.mask = mask
phi0, phi1, phi0.up, phi0.down, phi0.left, phi0.right, I, J
#
#
######################################################################
#################################################################################
#
# and compress the data into one dim before we do further computation.
# the two-dimensional structure is no longer needed.
#
phi0.sharedmask # check
phi1.sharedmask
phi =phi0.compressed() # compressing phi0 into 1-dimensional phi
phi_next =phi1.compressed() # same
phiup =phi0.up.compressed()
phidown =phi0.down.compressed()
phileft =phi0.left.compressed()
phiright =phi0.right.compressed()
I = I.compressed()
J = J.compressed()
xxx = np.vstack([phi, phi_next, phiup, phidown, phileft, phiright, I, J]) #test
xxx[:,:10]
xxx[:,10:20]
xxx[:,20:]
regressand = phi_next - phi
A = -(phidown-phiup)/2
B = -(phileft-phiright)/2
regressor = np.zeros((9, len(regressand))) # c1; c2; ... c9 one row for each coeff
regressor[0,:] = A*I
regressor[1,:] = A*J
regressor[2,:] = A
regressor[3,:] = B*I
regressor[4,:] = B*J
regressor[5,:] = B
regressor[6,:] = I
regressor[7,:] = J
regressor[8,:] = 1
regressor[:,:10]
regressor[:,10:20]
regressor[:,20:]
C, residual, rank, s = lsq(regressor.T, regressand)
residual = residual[0]
SStotal = regressand.var() * len(regressand)
Rsquared = 1 - residual/SStotal
print "For the central difference scheme, C, Rsquared =" , C, Rsquared
#
# the above - central difference scheme, 22 Feb 2013
######################################################################
#########################################################################
# UPWIND SCHEME - 23 Feb 2013
# preparing for the regression: defining the X and Y
# advection equation:
# phi1-phi0 = -dt [ (u,v) . ((phidown-phiup)/2dI, (phileft-phiright)/2dJ) - q +
# upWindCorrectionTerm],
# where upWindCorrectionTerm
# = (|u|,|v|). ((2phi-phidown-phiup)/2dI, (2phi-phileft-phiright)/2dJ)
#
# shiiba assumption: u = c1*I+c2*J+c3, v=c4*I+c5*J+c6, q=c7*I+c8*J+c9
# for simplicity we let dt=dI=dJ=1
print "\n=================================\nupWInd scheme"
#xxx= raw_input('press enter:')
###############################
# upWind scheme parameters
convergenceMark = 0.000000001
C_ = np.arange(9)*999 #just some random initialisation
loopCount = 0
while abs(C_-C).sum() > convergenceMark:
loopCount +=1
c1, c2, c3, c4, c5, c6, c7, c8, c9 = C
C_ = C # keeping the old C
U0 = c1*I + c2*J + c3
V0 = c4*I + c5*J + c6
# q = c7*I + c8*J + c9 # just in case it's needed
# In the following, dI=dJ=dt = 1 for simplicity
upWindCorrectionTerm = abs(U0)*(2*phi-phidown-phiup)/2 + abs(V0)*(2*phi-phileft-phiright)/2
regressand = phi_next - phi + upWindCorrectionTerm
# regressor unchanged - see equation (4.3) on p.32, annual report, december 2012
C, residual, rank, s = lsq(regressor.T, regressand)
residual = residual[0]
SStotal = regressand.var() * len(regressand)
Rsquared = 1 - residual/SStotal
print "\n-------------------------------\n"
print "Loop:", loopCount
print "abs(C_-C).sum():", abs(C_-C).sum()
print "Rsquared:", Rsquared
print "shiiba coeffs:", C
#print "upWindCorrectionTerm: (", len(upWindCorrectionTerm), ")",upWindCorrectionTerm,
##################################
# Shiiba regression results
print "\n\n\n=============== Shiiba regression results for", a.name, "and", b.name
print "Rsquared = ", Rsquared
print "C = ", C
return C, Rsquared
def interpolate(C, a):
#####################################
# interpolation
scalar1 = regression.convert2(C, a)
scalar1.name = "source term for" + a.name + "and" + b.name
scalar1.show2()
scalar1.show3()
vect1 = regression.convert(C, a)
vect1.show()
tic()
a1 = sl.interpolate2(a, vect1)
toc()
a1.show()
return a1, vect1, scalar1
def corr(a,b):
phi0 = a.matrix.flatten()
phi1 = b.matrix.flatten()
return ma.corrcoef(phi0,phi1)
def main(a, b):
C, Rsquared = regress(a, b)
a1, vect1, scalar1 = interpolate(C,a)
diff = b-a1
diff.cmap = 'hsv'
diff.show()
corr_a_b = corr(a,b)
corr_a1_b = corr(a1,b)
print diff.matrix.max()
print diff.matrix.min()
print diff.matrix.mean()
print diff.matrix.var()
return {'a1':a1, 'vect1':vect1, 'scalar1':scalar1, 'C':C, 'Rsquared':Rsquared,
'corr_a_b':corr_a_b, 'corr_a1_b': corr_a1_b}
if __name__ == "__main__":
a = dbz('20120612.0200')
b = dbz('20120612.0210')
a.load()
b.load()
main()
"""
| 27.14652
| 99
| 0.532722
|
7b030e72ee242d8cf5253784fcbe02d5089814d2
| 88
|
py
|
Python
|
main.py
|
Ruiii-w/ImageRetrieval
|
76fde37de3dc7fc4401315c26513f82cadf582cd
|
[
"MIT"
] | null | null | null |
main.py
|
Ruiii-w/ImageRetrieval
|
76fde37de3dc7fc4401315c26513f82cadf582cd
|
[
"MIT"
] | null | null | null |
main.py
|
Ruiii-w/ImageRetrieval
|
76fde37de3dc7fc4401315c26513f82cadf582cd
|
[
"MIT"
] | null | null | null |
import model.WindowModel as WM
if __name__ == '__main__':
WM.SearchingWindowModel()
| 22
| 30
| 0.75
|
03553bb166dd3a23cb9249888664551a938f62e2
| 23,844
|
py
|
Python
|
tensorlayerx/nn/core/common.py
|
tensorlayer/TensorLayerX
|
4e3e6f13687309dda7787f0b86e35a62bb3adbad
|
[
"Apache-2.0"
] | 34
|
2021-12-03T08:19:23.000Z
|
2022-03-13T08:34:34.000Z
|
tensorlayerx/nn/core/common.py
|
tensorlayer/TensorLayerX
|
4e3e6f13687309dda7787f0b86e35a62bb3adbad
|
[
"Apache-2.0"
] | null | null | null |
tensorlayerx/nn/core/common.py
|
tensorlayer/TensorLayerX
|
4e3e6f13687309dda7787f0b86e35a62bb3adbad
|
[
"Apache-2.0"
] | 3
|
2021-12-28T16:57:20.000Z
|
2022-03-18T02:23:14.000Z
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
import os
import tensorlayerx as tlx
from tensorlayerx.files import utils
from tensorlayerx import logging
import numpy as np
from queue import Queue
from tensorlayerx.nn.initializers import *
if tlx.BACKEND == 'mindspore':
from mindspore.ops.operations import Assign
from mindspore.nn import Cell
from mindspore import Tensor
import mindspore as ms
_act_dict = {
"relu": tlx.ops.ReLU,
"relu6": tlx.ops.ReLU6,
"leaky_relu": tlx.ops.LeakyReLU,
"lrelu": tlx.ops.LeakyReLU,
"softplus": tlx.ops.Softplus,
"tanh": tlx.ops.Tanh,
"sigmoid": tlx.ops.Sigmoid,
"softmax": tlx.ops.Softmax
}
_initializers_dict = {
"ones": ones(),
"zeros": zeros(),
"constant": constant(value=0.0),
"random_uniform": random_uniform(minval=-1.0, maxval=1.0),
"random_normal": random_normal(mean=0.0, stddev=0.05),
"truncated_normal": truncated_normal(stddev=0.02),
"he_normal": he_normal(),
"xavier_uniform": XavierUniform(),
"xavier_normal": XavierNormal()
}
def check_parameter(parameter, dim='2d'):
if dim == '2d':
if isinstance(parameter, int):
out = (parameter, parameter)
else:
out = parameter
elif dim == '3d':
if isinstance(parameter, int):
out = (parameter, parameter, parameter)
else:
out = parameter
else:
raise ("dim must be 2d or 3d.")
return out
def str2init(initializer):
if isinstance(initializer, str):
if initializer not in _initializers_dict.keys():
raise Exception(
"Unsupported string initialization: {}".format(initializer),
"String initialization supports these methods: {}".format(_initializers_dict.keys())
)
return _initializers_dict[initializer]
else:
return initializer
def str2act(act):
if len(act) > 5 and act[0:5] == "lrelu":
try:
alpha = float(act[5:])
return tlx.ops.LeakyReLU(negative_slope=alpha)
except Exception as e:
raise Exception("{} can not be parsed as a float".format(act[5:]))
if len(act) > 10 and act[0:10] == "leaky_relu":
try:
alpha = float(act[10:])
return tlx.ops.LeakyReLU(negative_slope=alpha)
except Exception as e:
raise Exception("{} can not be parsed as a float".format(act[10:]))
if act not in _act_dict.keys():
raise Exception("Unsupported act: {}".format(act))
return _act_dict[act]
def processing_act(act):
# Processing strings as input, activation functions without parameters。
if isinstance(act, str):
str_act = str2act(act)
if act:
# Processing strings as input, activation functions with parameters。
if isinstance(act, str) and (len(act) > 5 and act[0:5] == "lrelu" or
len(act) > 10 and act[0:10] == "leaky_relu"):
out_act = str_act
elif isinstance(act, str):
out_act = str_act()
else:
# Processing classes or functions as input, activation functions without parameters
try:
out_act = act()
# Processing class or function as input, activation function with parameters
except:
out_act = act
else:
# Processing act is None
out_act = act
return out_act
def _save_weights(net, file_path, format=None):
"""Input file_path, save model weights into a file of given format.
Use net.load_weights() to restore.
Parameters
----------
file_path : str
Filename to which the model weights will be saved.
format : str or None
Saved file format.
Value should be None, 'hdf5', 'npz', 'npz_dict' or 'ckpt'. Other format is not supported now.
1) If this is set to None, then the postfix of file_path will be used to decide saved format.
If the postfix is not in ['h5', 'hdf5', 'npz', 'ckpt'], then file will be saved in hdf5 format by default.
2) 'hdf5' will save model weights name in a list and each layer has its weights stored in a group of
the hdf5 file.
3) 'npz' will save model weights sequentially into a npz file.
4) 'npz_dict' will save model weights along with its name as a dict into a npz file.
5) 'ckpt' will save model weights into a tensorflow ckpt file.
Default None.
Examples
--------
1) Save model weights in hdf5 format by default.
>>> net = vgg16()
>>> optimizer = tlx.optimizers.Adam(learning_rate=0.001)
>>> metrics = tlx.metrics.Accuracy()
>>> model = tlx.model.Model(network=net, loss_fn=tlx.losses.cross_entropy, optimizer=optimizer, metrics=metrics)
>>> model.save_weights('./model.h5')
...
>>> model.load_weights('./model.h5')
2) Save model weights in npz/npz_dict format
>>> model.save_weights('./model.npz')
>>> model.save_weights('./model.npz', format='npz_dict')
"""
if tlx.BACKEND != 'torch' and net.all_weights is None or len(net.all_weights) == 0:
logging.warning("Model contains no weights or layers haven't been built, nothing will be saved")
return
if format is None:
postfix = file_path.split('.')[-1]
if postfix in ['h5', 'hdf5', 'npz', 'ckpt']:
format = postfix
else:
format = 'hdf5'
if format == 'hdf5' or format == 'h5':
raise NotImplementedError("hdf5 load/save is not supported now.")
# utils.save_weights_to_hdf5(file_path, net)
elif format == 'npz':
utils.save_npz(net.all_weights, file_path)
elif format == 'npz_dict':
if tlx.BACKEND == 'torch':
utils.save_npz_dict(net.named_parameters(), file_path)
else:
utils.save_npz_dict(net.all_weights, file_path)
elif format == 'ckpt':
# TODO: enable this when tf save ckpt is enabled
raise NotImplementedError("ckpt load/save is not supported now.")
else:
raise ValueError(
"Save format must be 'hdf5', 'npz', 'npz_dict' or 'ckpt'."
"Other format is not supported now."
)
def _load_weights(net, file_path, format=None, in_order=True, skip=False):
"""Load model weights from a given file, which should be previously saved by net.save_weights().
Parameters
----------
file_path : str
Filename from which the model weights will be loaded.
format : str or None
If not specified (None), the postfix of the file_path will be used to decide its format. If specified,
value should be 'hdf5', 'npz', 'npz_dict' or 'ckpt'. Other format is not supported now.
In addition, it should be the same format when you saved the file using net.save_weights().
Default is None.
in_order : bool
Allow loading weights into model in a sequential way or by name. Only useful when 'format' is 'hdf5'.
If 'in_order' is True, weights from the file will be loaded into model in a sequential way.
If 'in_order' is False, weights from the file will be loaded into model by matching the name
with the weights of the model, particularly useful when trying to restore model in eager(graph) mode from
a weights file which is saved in graph(eager) mode.
Default is True.
skip : bool
Allow skipping weights whose name is mismatched between the file and model. Only useful when 'format' is
'hdf5' or 'npz_dict'. If 'skip' is True, 'in_order' argument will be ignored and those loaded weights
whose name is not found in model weights (net.all_weights) will be skipped. If 'skip' is False, error will
occur when mismatch is found.
Default is False.
Examples
--------
1) load model from a hdf5 file.
>>> net = vgg16()
>>> optimizer = tlx.optimizers.Adam(learning_rate=0.001)
>>> metrics = tlx.metrics.Accuracy()
>>> model = tlx.model.Model(network=net, loss_fn=tlx.losses.cross_entropy, optimizer=optimizer, metrics=metrics)
>>> model.load_weights('./model_graph.h5', in_order=False, skip=True) # load weights by name, skipping mismatch
>>> model.load_weights('./model_eager.h5') # load sequentially
2) load model from a npz file
>>> model.load_weights('./model.npz')
3) load model from a npz file, which is saved as npz_dict previously
>>> model.load_weights('./model.npz', format='npz_dict')
Notes
-------
1) 'in_order' is only useful when 'format' is 'hdf5'. If you are trying to load a weights file which is
saved in a different mode, it is recommended to set 'in_order' be True.
2) 'skip' is useful when 'format' is 'hdf5' or 'npz_dict'. If 'skip' is True,
'in_order' argument will be ignored.
"""
if not os.path.exists(file_path):
raise FileNotFoundError("file {} doesn't exist.".format(file_path))
if format is None:
format = file_path.split('.')[-1]
if format == 'hdf5' or format == 'h5':
raise NotImplementedError("hdf5 load/save is not supported now.")
# if skip ==True or in_order == False:
# # load by weights name
# utils.load_hdf5_to_weights(file_path, net, skip)
# else:
# # load in order
# utils.load_hdf5_to_weights_in_order(file_path, net)
elif format == 'npz':
utils.load_and_assign_npz(file_path, net)
elif format == 'npz_dict':
utils.load_and_assign_npz_dict(file_path, net, skip)
elif format == 'ckpt':
# TODO: enable this when tf save ckpt is enabled
raise NotImplementedError("ckpt load/save is not supported now.")
else:
raise ValueError(
"File format must be 'hdf5', 'npz', 'npz_dict' or 'ckpt'. "
"Other format is not supported now."
)
def _save_standard_weights_dict(net, file_path):
# Eliminate parameter naming differences between frameworks.
if tlx.BACKEND == 'torch':
save_standard_npz_dict(net.named_parameters(), file_path)
else:
save_standard_npz_dict(net.all_weights, file_path)
def encode_list_name(list_name):
# TensorFlow weights format: conv1.weight:0, conv1.bias:0
# Paddle weights format: conv1.weight, conv1.bias
# PyTorch weights format: conv1.W, conv1.W
# MindSpore weights format: conv1.weights, conv1.bias
# standard weights format: conv1.weights, conv1.bias
for i in range(len(list_name)):
if tlx.BACKEND == 'tensorflow':
list_name[i] = list_name[i][:-2]
if tlx.BACKEND == 'torch':
if list_name[i][-1] == 'W' and 'conv' not in list_name[i]:
list_name[i] = list_name[i][:-2] + str('/weights')
elif list_name[i][-1] == 'W' and 'conv' in list_name[i]:
list_name[i] = list_name[i][:-2] + str('/filters')
elif list_name[i][-1] == 'b':
list_name[i] = list_name[i][:-2] + str('/biases')
elif list_name[i].split('.')[-1] in ['beta', 'gamma', 'moving_mean', 'moving_var']:
pass
else:
raise NotImplementedError('This weights cannot be converted.')
return list_name
def decode_key_name(key_name):
if tlx.BACKEND == 'tensorflow':
key_name = key_name + str(':0')
if tlx.BACKEND == 'torch':
if key_name.split('/')[-1] in ['weights', 'filters']:
key_name = key_name[:-8] + str('.W')
elif key_name.split('/')[-1] == 'biases':
key_name = key_name[:-7] + str('.b')
else:
raise NotImplementedError('This weights cannot be converted.')
return key_name
def save_standard_npz_dict(save_list=None, name='model.npz'):
"""Input parameters and the file name, save parameters as a dictionary into standard npz_dict file.
Use ``tlx.files.load_and_assign_npz_dict()`` to restore.
Parameters
----------
save_list : list of parameters
A list of parameters (tensor) to be saved.
name : str
The name of the `.npz` file.
"""
if save_list is None:
save_list = []
if tlx.BACKEND != 'torch':
save_list_names = [tensor.name for tensor in save_list]
if tlx.BACKEND == 'tensorflow':
save_list_var = utils.tf_variables_to_numpy(save_list)
elif tlx.BACKEND == 'mindspore':
save_list_var = utils.ms_variables_to_numpy(save_list)
elif tlx.BACKEND == 'paddle':
save_list_var = utils.pd_variables_to_numpy(save_list)
elif tlx.BACKEND == 'torch':
save_list_names = []
save_list_var = []
for named, values in save_list:
save_list_names.append(named)
save_list_var.append(values.cpu().detach().numpy())
else:
raise NotImplementedError('Not implemented')
save_list_names = encode_list_name(save_list_names)
save_var_dict = {save_list_names[idx]: val for idx, val in enumerate(save_list_var)}
np.savez(name, **save_var_dict)
save_list_var = None
save_var_dict = None
del save_list_var
del save_var_dict
logging.info("[*] Model saved in npz_dict %s" % name)
def _load_standard_weights_dict(net, file_path, skip=False, reshape=False, format='npz_dict'):
if format == 'npz_dict':
load_and_assign_standard_npz_dict(net, file_path, skip, reshape)
elif format == 'npz':
load_and_assign_standard_npz(file_path, net, reshape)
def load_and_assign_standard_npz_dict(net, file_path, skip=False, reshape=False):
if not os.path.exists(file_path):
logging.error("file {} doesn't exist.".format(file_path))
return False
weights = np.load(file_path, allow_pickle=True)
if len(weights.keys()) != len(set(weights.keys())):
raise Exception("Duplication in model npz_dict %s" % file_path)
if tlx.BACKEND == 'torch':
net_weights_name = [n for n, v in net.named_parameters()]
torch_weights_dict = {n: v for n, v in net.named_parameters()}
else:
net_weights_name = [w.name for w in net.all_weights]
for key in weights.keys():
de_key = decode_key_name(key)
if de_key not in net_weights_name:
if skip:
logging.warning("Weights named '%s' not found in network. Skip it." % key)
else:
raise RuntimeError(
"Weights named '%s' not found in network. Hint: set argument skip=Ture "
"if you want to skip redundant or mismatch weights." % key
)
else:
if tlx.BACKEND == 'tensorflow':
reshape_weights = weight_reshape(weights[key], reshape)
check_reshape(reshape_weights, net.all_weights[net_weights_name.index(de_key)])
utils.assign_tf_variable(net.all_weights[net_weights_name.index(de_key)], reshape_weights)
elif tlx.BACKEND == 'mindspore':
reshape_weights = weight_reshape(weights[key], reshape)
import mindspore as ms
assign_param = ms.Tensor(reshape_weights, dtype=ms.float32)
check_reshape(assign_param, net.all_weights[net_weights_name.index(de_key)])
utils.assign_ms_variable(net.all_weights[net_weights_name.index(de_key)], assign_param)
elif tlx.BACKEND == 'paddle':
reshape_weights = weight_reshape(weights[key], reshape)
check_reshape(reshape_weights, net.all_weights[net_weights_name.index(de_key)])
utils.assign_pd_variable(net.all_weights[net_weights_name.index(de_key)], reshape_weights)
elif tlx.BACKEND == 'torch':
reshape_weights = weight_reshape(weights[key], reshape)
check_reshape(reshape_weights, net.all_weights[net_weights_name.index(de_key)])
utils.assign_th_variable(torch_weights_dict[de_key], reshape_weights)
else:
raise NotImplementedError('Not implemented')
logging.info("[*] Model restored from npz_dict %s" % file_path)
def load_and_assign_standard_npz(file_path=None, network=None, reshape=False):
if network is None:
raise ValueError("network is None.")
if not os.path.exists(file_path):
logging.error("file {} doesn't exist.".format(file_path))
return False
else:
weights = utils.load_npz(name=file_path)
ops = []
if tlx.BACKEND == 'tensorflow':
for idx, param in enumerate(weights):
param = weight_reshape(param, reshape)
check_reshape(param, network.all_weights[idx])
ops.append(network.all_weights[idx].assign(param))
elif tlx.BACKEND == 'mindspore':
class Assign_net(Cell):
def __init__(self, y):
super(Assign_net, self).__init__()
self.y = y
def construct(self, x):
Assign()(self.y, x)
for idx, param in enumerate(weights):
assign_param = Tensor(param, dtype=ms.float32)
assign_param = weight_reshape(assign_param, reshape)
check_reshape(assign_param, network.all_weights[idx])
Assign()(network.all_weights[idx], assign_param)
elif tlx.BACKEND == 'paddle':
for idx, param in enumerate(weights):
param = weight_reshape(param, reshape)
check_reshape(param, network.all_weights[idx])
utils.assign_pd_variable(network.all_weights[idx], param)
elif tlx.BACKEND == 'torch':
for idx, param in enumerate(weights):
param = weight_reshape(param, reshape)
check_reshape(param, network.all_weights[idx])
utils.assign_th_variable(network.all_weights[idx], param)
else:
raise NotImplementedError("This backend is not supported")
return ops
logging.info("[*] Load {} SUCCESS!".format(file_path))
def check_reshape(weight, shape_weights):
if len(weight.shape) >= 4 and weight.shape[::-1] == tuple(shape_weights.shape):
if tlx.BACKEND == 'tensorflow':
raise Warning(
'Set reshape to True only when importing weights from MindSpore/PyTorch/PaddlePaddle to TensorFlow.'
)
if tlx.BACKEND == 'torch':
raise Warning('Set reshape to True only when importing weights from TensorFlow to PyTorch.')
if tlx.BACKEND == 'paddle':
raise Warning('Set reshape to True only when importing weights from TensorFlow to PaddlePaddle.')
if tlx.BACKEND == 'mindspore':
raise Warning('Set reshape to True only when importing weights from TensorFlow to MindSpore.')
def weight_reshape(weight, reshape=False):
# TODO In this case only 2D convolution is considered. 3D convolution tests need to be supplemented.
if reshape:
if len(weight.shape) == 4:
weight = np.moveaxis(weight, (2, 3), (1, 0))
if len(weight.shape) == 5:
weight = np.moveaxis(weight, (3, 4), (1, 0))
return weight
def tolist(tensors):
if isinstance(tensors, list) or isinstance(tensors, tuple):
ntensors = list()
for t in tensors:
ntensors += tolist(t)
return ntensors
else:
return [tensors]
def construct_graph(inputs, outputs):
"""construct computation graph for model using ModuleNode object"""
all_layers = []
node_by_depth = []
input_tensors_list = inputs if isinstance(inputs, list) else inputs
queue_node = Queue()
# BFS to visit all nodes that should be involved in the computation graph
output_tensors_list = outputs if isinstance(outputs, list) else [outputs]
output_nodes = [tensor._info[0] for tensor in output_tensors_list]
visited_node_names = set()
for out_node in output_nodes:
if out_node.visited:
continue
queue_node.put(out_node)
while not queue_node.empty():
cur_node = queue_node.get()
in_nodes = cur_node.in_nodes
for node in in_nodes:
node.out_nodes.append(cur_node)
if not node.visited:
queue_node.put(node)
node.visited = True
if node.node_name not in visited_node_names:
visited_node_names.add(node.node_name)
# else have multiple layers with the same name
else:
raise ValueError(
'Layer name \'%s\' has already been used by another layer. Please change the layer name.'
% node.layer.name
)
# construct the computation graph in top-sort order
cur_depth = [tensor._info[0] for tensor in input_tensors_list]
next_depth = []
indegrees = {}
visited_layer_names = []
while not len(cur_depth) == 0:
node_by_depth.append(cur_depth)
for node in cur_depth:
if node.layer.name not in visited_layer_names:
all_layers.append(node.layer)
visited_layer_names.append(node.layer.name)
for out_node in node.out_nodes:
if out_node.node_name not in indegrees.keys():
indegrees[out_node.node_name] = len(out_node.in_nodes)
indegrees[out_node.node_name] -= 1
if indegrees[out_node.node_name] == 0:
next_depth.append(out_node)
cur_depth = next_depth
next_depth = []
return node_by_depth, all_layers
def select_attrs(obj):
attrs_dict = obj.__dict__
attrs = {}
_select_key = ['kernel_size', 'stride', 'act', 'padding', 'data_format', 'concat_dim', 'dilation', 'bias']
for k in _select_key:
if k in attrs_dict:
if k == 'act':
attrs[k] = attrs_dict[k].__class__.__name__
else:
attrs[k] = attrs_dict[k]
return attrs
class ModuleNode(object):
"""
The class :class:`ModuleNode` class represents a conceptional node for a layer.
ModuleNode is used for building topology and it is actually a light weighted
wrapper over Layer.
Parameters
----------
layer : tl.layers.Layer
A tl layer that wants to create a node.
node_index : int
Index of this node in layer._nodes.
in_nodes :a list of ModuleNode
Father nodes to this node.
in_tensors : a list of tensors
Input tensors to this node.
out_tensors : a list of tensors
Output tensors to this node.
in_tensor_idxes : a list of int
Indexes of each input tensor in its corresponding node's out_tensors.
Methods
---------
__init__()
Initializing the ModuleNode.
__call__()
(1) Forwarding through the layer. (2) Update its input/output tensors.
"""
def __init__(self, layer, node_index, in_nodes, in_tensors, out_tensors, in_tensor_idxes, attr):
self.layer = layer
self.node_index = node_index
self.in_nodes = in_nodes
self.out_nodes = []
self.in_tensors = in_tensors
self.out_tensors = out_tensors
self.node_name = layer.name + "_node_{}".format(node_index)
self.in_tensors_idxes = in_tensor_idxes
self.attr = attr
self.visited = False
def __call__(self, inputs, **kwargs):
"""(1) Forwarding through the layer. (2) Update its input/output tensors."""
outputs = self.layer(inputs, **kwargs)
self.in_tensors = tolist(inputs)
self.out_tensors = tolist(outputs)
return self.out_tensors
| 39.02455
| 117
| 0.62305
|
abafb95f761dec82ce3fb36e979308e247b2fe15
| 26,639
|
py
|
Python
|
async_upnp_client/client.py
|
Consolatis/async_upnp_client
|
6303ae832e3184915a120772e5c4bc559e33ceaa
|
[
"Apache-2.0"
] | null | null | null |
async_upnp_client/client.py
|
Consolatis/async_upnp_client
|
6303ae832e3184915a120772e5c4bc559e33ceaa
|
[
"Apache-2.0"
] | null | null | null |
async_upnp_client/client.py
|
Consolatis/async_upnp_client
|
6303ae832e3184915a120772e5c4bc559e33ceaa
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""UPnP client module."""
import logging
import urllib.parse
from datetime import datetime, timezone
from typing import (
Any,
Callable,
Generic,
List,
Mapping,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
from xml.etree import ElementTree as ET
from xml.sax.saxutils import escape, unescape
import defusedxml.ElementTree as DET
import voluptuous as vol
from async_upnp_client.const import (
NS,
ActionArgumentInfo,
ActionInfo,
DeviceInfo,
ServiceInfo,
StateVariableInfo,
)
_LOGGER = logging.getLogger(__name__)
_LOGGER_TRAFFIC = logging.getLogger("async_upnp_client.traffic")
EventCallbackType = Callable[["UpnpService", Sequence["UpnpStateVariable"]], None]
class UpnpRequester:
"""
Abstract base class used for performing async HTTP requests.
Implement method async_do_http_request() in your concrete class.
"""
async def async_http_request(
self,
method: str,
url: str,
headers: Optional[Mapping[str, str]] = None,
body: Optional[str] = None,
body_type: str = "text",
) -> Tuple[int, Mapping, Union[str, bytes, None]]:
"""
Do a HTTP request.
:param method HTTP Method
:param url URL to call
:param headers Headers to send
:param body Body to send
:param body_type How to interpret body: 'text', 'raw', 'ignore'
:return status code, headers, body
"""
# pylint: disable=too-many-arguments
_LOGGER_TRAFFIC.debug(
"Sending request:\n%s %s\n%s\n%s\n",
method,
url,
"\n".join([key + ": " + value for key, value in (headers or {}).items()]),
body or "",
)
(
response_status,
response_headers,
response_body,
) = await self.async_do_http_request(
method, url, headers=headers, body=body, body_type=body_type
)
log_response_body = (
response_body if body_type == "text" else "async_upnp_client: OMITTING BODY"
)
_LOGGER_TRAFFIC.debug(
"Got response:\n%s\n%s\n\n%s",
response_status,
"\n".join([key + ": " + value for key, value in response_headers.items()]),
log_response_body,
)
return response_status, response_headers, response_body
async def async_do_http_request(
self,
method: str,
url: str,
headers: Optional[Mapping[str, str]] = None,
body: Optional[str] = None,
body_type: str = "text",
) -> Tuple[int, Mapping, Union[str, bytes, None]]:
"""
Actually do a HTTP request.
:param method HTTP Method
:param url URL to call
:param headers Headers to send
:param body Body to send
:param body_type How to interpret body: 'text', 'raw', 'ignore'
:return status code, headers, body
"""
# pylint: disable=too-many-arguments
raise NotImplementedError()
class UpnpError(Exception):
"""UpnpError."""
class UpnpValueError(UpnpError):
"""Invalid value error."""
def __init__(self, name: str, value: Any) -> None:
"""Initialize."""
super().__init__("Invalid value for %s: '%s'" % (name, value))
class UpnpDevice:
"""UPnP Device representation."""
def __init__(
self,
requester: UpnpRequester,
device_info: DeviceInfo,
services: Sequence["UpnpService"],
) -> None:
"""Initialize."""
self.requester = requester
self._device_info = device_info
self.services = {service.service_type: service for service in services}
# bind services to ourselves
for service in services:
service.device = self
@property
def name(self) -> str:
"""Get the name of this device."""
return self._device_info.friendly_name
@property
def friendly_name(self) -> str:
"""Get the friendly name of this device, alias for name."""
return self._device_info.friendly_name
@property
def manufacturer(self) -> str:
"""Get the manufacturer of this device."""
return self._device_info.manufacturer
@property
def model_description(self) -> Optional[str]:
"""Get the model description of this device."""
return self._device_info.model_description
@property
def model_name(self) -> str:
"""Get the model name of this device."""
return self._device_info.model_name
@property
def model_number(self) -> Optional[str]:
"""Get the model number of this device."""
return self._device_info.model_number
@property
def serial_number(self) -> Optional[str]:
"""Get the serial number of this device."""
return self._device_info.serial_number
@property
def udn(self) -> str:
"""Get UDN of this device."""
return self._device_info.udn
@property
def device_url(self) -> str:
"""Get the URL of this device."""
return self._device_info.url
@property
def device_type(self) -> str:
"""Get the device type of this device."""
return self._device_info.device_type
@property
def xml(self) -> ET.Element:
"""Get the XML description for this device."""
return self._device_info.xml
def has_service(self, service_type: str) -> bool:
"""Check if service by service_type is available."""
return service_type in self.services
def service(self, service_type: str) -> "UpnpService":
"""Get service by service_type."""
return self.services[service_type]
async def async_ping(self) -> None:
"""Ping the device."""
await self.requester.async_http_request("GET", self.device_url)
def __str__(self) -> str:
"""To string."""
return "<UpnpDevice({0})>".format(self.udn)
class UpnpService:
"""UPnP Service representation."""
# pylint: disable=too-many-instance-attributes
def __init__(
self,
requester: UpnpRequester,
service_info: ServiceInfo,
state_variables: Sequence["UpnpStateVariable"],
actions: Sequence["UpnpAction"],
) -> None:
"""Initialize."""
self.requester = requester
self._service_info = service_info
self.state_variables = {sv.name: sv for sv in state_variables}
self.actions = {ac.name: ac for ac in actions}
self.on_event: Optional[EventCallbackType] = None
self._device: Optional[UpnpDevice] = None
# bind state variables to ourselves
for state_var in state_variables:
state_var.service = self
# bind actions to ourselves
for action in actions:
action.service = self
@property
def device(self) -> UpnpDevice:
"""Get parent UpnpDevice."""
if not self._device:
raise UpnpError("UpnpService not bound to UpnpDevice")
return self._device
@device.setter
def device(self, device: UpnpDevice) -> None:
"""Set parent UpnpDevice."""
if self._device:
raise UpnpError("UpnpService already bound to UpnpDevice")
self._device = device
@property
def service_type(self) -> str:
"""Get service type for this UpnpService."""
return self._service_info.service_type
@property
def service_id(self) -> str:
"""Get service ID for this UpnpService."""
return self._service_info.service_id
@property
def scpd_url(self) -> str:
"""Get full SCPD-url for this UpnpService."""
url: str = urllib.parse.urljoin(
self.device.device_url, self._service_info.scpd_url
)
return url
@property
def control_url(self) -> str:
"""Get full control-url for this UpnpService."""
url: str = urllib.parse.urljoin(
self.device.device_url, self._service_info.control_url
)
return url
@property
def event_sub_url(self) -> str:
"""Get full event sub-url for this UpnpService."""
url: str = urllib.parse.urljoin(
self.device.device_url, self._service_info.event_sub_url
)
return url
@property
def xml(self) -> ET.Element:
"""Get the XML description for this service."""
return self._service_info.xml
def has_state_variable(self, name: str) -> bool:
"""Check if self has state variable called name."""
if name not in self.state_variables and "}" in name:
# possibly messed up namespaces, try again without namespace
name = name.split("}")[1]
return name in self.state_variables
def state_variable(self, name: str) -> "UpnpStateVariable":
"""Get UPnpStateVariable by name."""
state_var = self.state_variables.get(name, None)
# possibly messed up namespaces, try again without namespace
if not state_var and "}" in name:
name = name.split("}")[1]
state_var = self.state_variables.get(name, None)
if state_var is None:
raise KeyError(name)
return state_var
def has_action(self, name: str) -> bool:
"""Check if self has action called name."""
return name in self.actions
def action(self, name: str) -> "UpnpAction":
"""Get UPnpAction by name."""
return self.actions[name]
async def async_call_action(
self, action: "UpnpAction", **kwargs: Any
) -> Mapping[str, Any]:
"""
Call a UpnpAction.
Parameters are in Python-values and coerced automatically to UPnP values.
"""
if isinstance(action, str):
action = self.actions[action]
result = await action.async_call(**kwargs)
return result
def notify_changed_state_variables(self, changes: Mapping[str, str]) -> None:
"""Do callback on UpnpStateVariable.value changes."""
changed_state_variables = []
for name, value in changes.items():
if not self.has_state_variable(name):
_LOGGER.debug("State variable %s does not exist, ignoring", name)
continue
state_var = self.state_variable(name)
try:
state_var.upnp_value = value
changed_state_variables.append(state_var)
except UpnpValueError:
_LOGGER.error("Got invalid value for %s: %s", state_var, value)
if self.on_event:
# pylint: disable=not-callable
self.on_event(self, changed_state_variables)
def __str__(self) -> str:
"""To string."""
udn = "unbound"
if self._device:
udn = self._device.udn
return "<UpnpService({}, {})>".format(self.service_id, udn)
def __repr__(self) -> str:
"""To repr."""
udn = "unbound"
if self._device:
udn = self._device.udn
return "<UpnpService({}, {})>".format(self.service_id, udn)
class UpnpAction:
"""Representation of an Action."""
class Argument:
"""Representation of an Argument of an Action."""
def __init__(
self, argument_info: ActionArgumentInfo, state_variable: "UpnpStateVariable"
) -> None:
"""Initialize."""
self._argument_info = argument_info
self._related_state_variable = state_variable
self._value = None
self.raw_upnp_value: Optional[str] = None
def validate_value(self, value: Any) -> None:
"""Validate value against related UpnpStateVariable."""
self.related_state_variable.validate_value(value)
@property
def name(self) -> str:
"""Get the name."""
return self._argument_info.name
@property
def direction(self) -> str:
"""Get the direction."""
return self._argument_info.direction
@property
def related_state_variable(self) -> "UpnpStateVariable":
"""Get the related state variable."""
return self._related_state_variable
@property
def xml(self) -> ET.Element:
"""Get the XML description for this device."""
return self._argument_info.xml
@property
def value(self) -> Any:
"""Get Python value for this argument."""
return self._value
@value.setter
def value(self, value: Any) -> None:
"""Set Python value for this argument."""
self.validate_value(value)
self._value = value
@property
def upnp_value(self) -> str:
"""Get UPnP value for this argument."""
return self.coerce_upnp(self.value)
@upnp_value.setter
def upnp_value(self, upnp_value: str) -> None:
"""Set UPnP value for this argument."""
self._value = self.coerce_python(upnp_value)
def coerce_python(self, upnp_value: str) -> Any:
"""Coerce UPnP value to Python."""
return self.related_state_variable.coerce_python(upnp_value)
def coerce_upnp(self, value: Any) -> str:
"""Coerce Python value to UPnP value."""
return self.related_state_variable.coerce_upnp(value)
def __repr__(self) -> str:
"""To repr."""
return "<UpnpAction.Argument({}, {})>".format(self.name, self.direction)
def __init__(
self,
action_info: ActionInfo,
arguments: List["UpnpAction.Argument"],
disable_unknown_out_argument_error: bool = False,
) -> None:
"""Initialize."""
self._action_info = action_info
self._arguments = arguments
self._service: Optional[UpnpService] = None
self._properties = {
"disable_unknown_out_argument_error": disable_unknown_out_argument_error,
}
@property
def name(self) -> str:
"""Get the name."""
return self._action_info.name
@property
def arguments(self) -> List["UpnpAction.Argument"]:
"""Get the arguments."""
return self._arguments
@property
def xml(self) -> ET.Element:
"""Get the XML for this action."""
return self._action_info.xml
@property
def service(self) -> UpnpService:
"""Get parent UpnpService."""
if not self._service:
raise UpnpError("UpnpAction not bound to UpnpService")
return self._service
@service.setter
def service(self, service: UpnpService) -> None:
"""Set parent UpnpService."""
if self._service:
raise UpnpError("UpnpAction already bound to UpnpService")
self._service = service
def __str__(self) -> str:
"""To string."""
return "<UpnpAction({0})>".format(self.name)
def __repr__(self) -> str:
"""To repr."""
return "<UpnpAction({0})({1}) -> {2}>".format(
self.name, self.in_arguments(), self.out_arguments()
)
def validate_arguments(self, **kwargs: Any) -> None:
"""
Validate arguments against in-arguments of self.
The python type is expected.
"""
for arg in self.in_arguments():
if arg.name not in kwargs:
raise UpnpError(f"Missing argument: {arg.name}")
value = kwargs[arg.name]
arg.validate_value(value)
def in_arguments(self) -> List["UpnpAction.Argument"]:
"""Get all in-arguments."""
return [arg for arg in self.arguments if arg.direction == "in"]
def out_arguments(self) -> List["UpnpAction.Argument"]:
"""Get all out-arguments."""
return [arg for arg in self.arguments if arg.direction == "out"]
def argument(
self, name: str, direction: Optional[str] = None
) -> Optional["UpnpAction.Argument"]:
"""Get an UpnpAction.Argument by name (and possibliy direction)."""
for arg in self.arguments:
if arg.name != name:
continue
if direction is not None and arg.direction != direction:
continue
return arg
return None
async def async_call(self, **kwargs: Any) -> Mapping[str, Any]:
"""Call an action with arguments."""
# do request
url, headers, body = self.create_request(**kwargs)
(
status_code,
response_headers,
response_body,
) = await self.service.requester.async_http_request("POST", url, headers, body)
if not isinstance(response_body, str):
raise UpnpError("Did not receive a body")
if status_code != 200:
raise UpnpError(
"Error during async_call(), status: %s, body: %s"
% (status_code, response_body)
)
# parse body
response_args = self.parse_response(
self.service.service_type, response_headers, response_body
)
return response_args
def create_request(self, **kwargs: Any) -> Tuple[str, Mapping[str, str], str]:
"""Create headers and headers for this to-be-called UpnpAction."""
# build URL
control_url = self.service.control_url
# construct SOAP body
service_type = self.service.service_type
soap_args = self._format_request_args(**kwargs)
body = (
'<?xml version="1.0"?>'
'<s:Envelope s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"'
' xmlns:s="http://schemas.xmlsoap.org/soap/envelope/">'
"<s:Body>"
'<u:{1} xmlns:u="{0}">'
"{2}"
"</u:{1}>"
"</s:Body>"
"</s:Envelope>".format(service_type, self.name, soap_args)
)
# construct SOAP header
soap_action = "{0}#{1}".format(service_type, self.name)
headers = {
"SOAPAction": '"{0}"'.format(soap_action),
"Host": urllib.parse.urlparse(control_url).netloc,
"Content-Type": 'text/xml; charset="utf-8"',
"Content-Length": str(len(body)),
}
return control_url, headers, body
def _format_request_args(self, **kwargs: Any) -> str:
self.validate_arguments(**kwargs)
arg_strs = [
"<{0}>{1}</{0}>".format(arg.name, escape(arg.coerce_upnp(kwargs[arg.name])))
for arg in self.in_arguments()
]
return "\n".join(arg_strs)
def parse_response(
self, service_type: str, response_headers: Mapping, response_body: str
) -> Mapping[str, Any]:
"""Parse response from called Action."""
# pylint: disable=unused-argument
xml = DET.fromstring(response_body)
query = ".//soap_envelope:Body/soap_envelope:Fault"
if xml.find(query, NS):
error_code = xml.findtext(".//control:errorCode", None, NS)
error_description = xml.findtext(".//control:errorDescription", None, NS)
raise UpnpError(
"Error during call_action, error_code: %s, error_description: %s"
% (error_code, error_description)
)
try:
return self._parse_response_args(service_type, xml)
except AttributeError:
_LOGGER.debug("Error during unescape of: %s", response_body)
raise
def _parse_response_args(
self, service_type: str, xml: ET.Element
) -> Mapping[str, Any]:
"""Parse response arguments."""
args = {}
query = ".//{{{0}}}{1}Response".format(service_type, self.name)
response = xml.find(query, NS)
if response is None:
raise UpnpError(
"Invalid response: %s" % (ET.tostring(xml, encoding="unicode"),)
)
for arg_xml in response.findall("./"):
name = arg_xml.tag
arg = self.argument(name, "out")
if not arg:
if self._properties["disable_unknown_out_argument_error"]:
continue
raise UpnpError(
"Invalid response, unknown argument: %s, %s"
% (name, ET.tostring(xml, encoding="unicode"))
)
arg.raw_upnp_value = arg_xml.text
try:
arg.upnp_value = unescape(arg_xml.text or "")
except AttributeError:
_LOGGER.debug("Error during unescape of: %s", arg_xml.text)
raise
args[name] = arg.value
return args
T = TypeVar("T") # pylint: disable=invalid-name
class UpnpStateVariable(Generic[T]):
"""Representation of a State Variable."""
UPNP_VALUE_ERROR = object()
def __init__(
self, state_variable_info: StateVariableInfo, schema: vol.Schema
) -> None:
"""Initialize."""
self._state_variable_info = state_variable_info
self._schema = schema
self._service: Optional[UpnpService] = None
self._value: Optional[Any] = None # None, T or UPNP_VALUE_ERROR
self._updated_at: Optional[datetime] = None
@property
def service(self) -> UpnpService:
"""Get parent UpnpService."""
if not self._service:
raise UpnpError("UpnpStateVariable not bound to UpnpService")
return self._service
@service.setter
def service(self, service: UpnpService) -> None:
"""Set parent UpnpService."""
if self._service:
raise UpnpError("UpnpStateVariable already bound to UpnpService")
self._service = service
@property
def xml(self) -> ET.Element:
"""Get the XML for this State Variable."""
return self._state_variable_info.xml
@property
def data_type_mapping(self) -> Mapping[str, Callable]:
"""Get the data type (coercer) for this State Variable."""
type_info = self._state_variable_info.type_info
return type_info.data_type_mapping
@property
def data_type_python(self) -> Callable[[str], Any]:
"""Get the Python data type for this State Variable."""
return self.data_type_mapping["type"]
@property
def min_value(self) -> Optional[T]:
"""Min value for this UpnpStateVariable, if defined."""
type_info = self._state_variable_info.type_info
min_ = type_info.allowed_value_range.get("min")
if min_ is not None:
value: T = self.coerce_python(min_)
return value
return None
@property
def max_value(self) -> Optional[T]:
"""Max value for this UpnpStateVariable, if defined."""
type_info = self._state_variable_info.type_info
max_ = type_info.allowed_value_range.get("max")
if max_ is not None:
value: T = self.coerce_python(max_)
return value
return None
@property
def allowed_values(self) -> List[T]:
"""List with allowed values for this UpnpStateVariable, if defined."""
type_info = self._state_variable_info.type_info
allowed_values = type_info.allowed_values or []
return [self.coerce_python(allowed_value) for allowed_value in allowed_values]
@property
def send_events(self) -> bool:
"""Check if this UpnpStatevariable send events."""
send_events = self._state_variable_info.send_events
return send_events
@property
def name(self) -> str:
"""Name of the UpnpStatevariable."""
name: str = self._state_variable_info.name
return name
@property
def data_type(self) -> str:
"""UPNP data type of UpnpStateVariable."""
return self._state_variable_info.type_info.data_type
@property
def default_value(self) -> Optional[T]:
"""Get default value for UpnpStateVariable, if defined."""
type_info = self._state_variable_info.type_info
default_value = type_info.default_value
if default_value is not None:
value: T = self.coerce_python(default_value)
return value
return None
def validate_value(self, value: T) -> None:
"""Validate value."""
try:
self._schema(value)
except vol.error.MultipleInvalid as ex:
raise UpnpValueError(self.name, value) from ex
@property
def value(self) -> Optional[T]:
"""
Get the value, python typed.
Invalid values are returned as None.
"""
if self._value is UpnpStateVariable.UPNP_VALUE_ERROR:
return None
return self._value
@value.setter
def value(self, value: Any) -> None:
"""Set value, python typed."""
self.validate_value(value)
self._value = value
self._updated_at = datetime.now(timezone.utc)
@property
def value_unchecked(self) -> Optional[T]:
"""
Get the value, python typed.
If an event was received with an invalid value for this StateVariable
(e.g., 'abc' for a 'ui4' StateVariable), then this will return
UpnpStateVariable.UPNP_VALUE_ERROR instead of None.
"""
return self._value
@property
def upnp_value(self) -> str:
"""Get the value, UPnP typed."""
return self.coerce_upnp(self.value)
@upnp_value.setter
def upnp_value(self, upnp_value: str) -> None:
"""Set the value, UPnP typed."""
try:
self.value = self.coerce_python(upnp_value)
except ValueError as err:
_LOGGER.debug('Error setting upnp_value "%s", error: %s', upnp_value, err)
self._value = UpnpStateVariable.UPNP_VALUE_ERROR
def coerce_python(self, upnp_value: str) -> Any:
"""Coerce value from UPNP to python."""
coercer = self.data_type_mapping["in"]
return coercer(upnp_value)
def coerce_upnp(self, value: Any) -> str:
"""Coerce value from python to UPNP."""
coercer = self.data_type_mapping["out"]
coerced_value: str = coercer(value)
return coerced_value
@property
def updated_at(self) -> Optional[datetime]:
"""
Get timestamp at which this UpnpStateVariable was updated.
Return time in UTC.
"""
return self._updated_at
def __str__(self) -> str:
"""To string."""
return "<UpnpStateVariable({0}, {1})>".format(self.name, self.data_type)
| 31.48818
| 88
| 0.596118
|
7c1fe26e9f7975a5c3f58fa68165fd1815843e40
| 12,885
|
py
|
Python
|
scipy/sparse/construct.py
|
lesserwhirls/scipy-cwt
|
ee673656d879d9356892621e23ed0ced3d358621
|
[
"BSD-3-Clause"
] | 8
|
2015-10-07T00:37:32.000Z
|
2022-01-21T17:02:33.000Z
|
scipy/sparse/construct.py
|
lesserwhirls/scipy-cwt
|
ee673656d879d9356892621e23ed0ced3d358621
|
[
"BSD-3-Clause"
] | null | null | null |
scipy/sparse/construct.py
|
lesserwhirls/scipy-cwt
|
ee673656d879d9356892621e23ed0ced3d358621
|
[
"BSD-3-Clause"
] | 8
|
2015-05-09T14:23:57.000Z
|
2018-11-15T05:56:00.000Z
|
"""Functions to construct sparse matrices
"""
__docformat__ = "restructuredtext en"
__all__ = [ 'spdiags', 'eye', 'identity', 'kron', 'kronsum',
'hstack', 'vstack', 'bmat', 'rand']
from warnings import warn
import numpy as np
from sputils import upcast
from csr import csr_matrix
from csc import csc_matrix
from bsr import bsr_matrix
from coo import coo_matrix
from lil import lil_matrix
from dia import dia_matrix
def spdiags(data, diags, m, n, format=None):
"""
Return a sparse matrix from diagonals.
Parameters
----------
data : array_like
matrix diagonals stored row-wise
diags : diagonals to set
- k = 0 the main diagonal
- k > 0 the k-th upper diagonal
- k < 0 the k-th lower diagonal
m, n : int
shape of the result
format : format of the result (e.g. "csr")
By default (format=None) an appropriate sparse matrix
format is returned. This choice is subject to change.
See Also
--------
dia_matrix : the sparse DIAgonal format.
Examples
--------
>>> data = array([[1,2,3,4],[1,2,3,4],[1,2,3,4]])
>>> diags = array([0,-1,2])
>>> spdiags(data, diags, 4, 4).todense()
matrix([[1, 0, 3, 0],
[1, 2, 0, 4],
[0, 2, 3, 0],
[0, 0, 3, 4]])
"""
return dia_matrix((data, diags), shape=(m,n)).asformat(format)
def identity(n, dtype='d', format=None):
"""Identity matrix in sparse format
Returns an identity matrix with shape (n,n) using a given
sparse format and dtype.
Parameters
----------
n : integer
Shape of the identity matrix.
dtype :
Data type of the matrix
format : string
Sparse format of the result, e.g. format="csr", etc.
Examples
--------
>>> identity(3).todense()
matrix([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> identity(3, dtype='int8', format='dia')
<3x3 sparse matrix of type '<type 'numpy.int8'>'
with 3 stored elements (1 diagonals) in DIAgonal format>
"""
if format in ['csr','csc']:
indptr = np.arange(n+1, dtype=np.intc)
indices = np.arange(n, dtype=np.intc)
data = np.ones(n, dtype=dtype)
cls = eval('%s_matrix' % format)
return cls((data,indices,indptr),(n,n))
elif format == 'coo':
row = np.arange(n, dtype=np.intc)
col = np.arange(n, dtype=np.intc)
data = np.ones(n, dtype=dtype)
return coo_matrix((data,(row,col)),(n,n))
elif format == 'dia':
data = np.ones(n, dtype=dtype)
diags = [0]
return dia_matrix((data,diags), shape=(n,n))
else:
return identity(n, dtype=dtype, format='csr').asformat(format)
def eye(m, n, k=0, dtype='d', format=None):
"""eye(m, n) returns a sparse (m x n) matrix where the k-th diagonal
is all ones and everything else is zeros.
"""
m,n = int(m),int(n)
diags = np.ones((1, max(0, min(m + k, n))), dtype=dtype)
return spdiags(diags, k, m, n).asformat(format)
def kron(A, B, format=None):
"""kronecker product of sparse matrices A and B
Parameters
----------
A : sparse or dense matrix
first matrix of the product
B : sparse or dense matrix
second matrix of the product
format : string
format of the result (e.g. "csr")
Returns
-------
kronecker product in a sparse matrix format
Examples
--------
>>> A = csr_matrix(array([[0,2],[5,0]]))
>>> B = csr_matrix(array([[1,2],[3,4]]))
>>> kron(A,B).todense()
matrix([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
>>> kron(A,[[1,2],[3,4]]).todense()
matrix([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
"""
B = coo_matrix(B)
if (format is None or format == "bsr") and 2*B.nnz >= B.shape[0] * B.shape[1]:
#B is fairly dense, use BSR
A = csr_matrix(A,copy=True)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix( output_shape )
B = B.toarray()
data = A.data.repeat(B.size).reshape(-1,B.shape[0],B.shape[1])
data = data * B
return bsr_matrix((data,A.indices,A.indptr), shape=output_shape)
else:
#use COO
A = coo_matrix(A)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix( output_shape )
# expand entries of a into blocks
row = A.row.repeat(B.nnz)
col = A.col.repeat(B.nnz)
data = A.data.repeat(B.nnz)
row *= B.shape[0]
col *= B.shape[1]
# increment block indices
row,col = row.reshape(-1,B.nnz),col.reshape(-1,B.nnz)
row += B.row
col += B.col
row,col = row.reshape(-1),col.reshape(-1)
# compute block entries
data = data.reshape(-1,B.nnz) * B.data
data = data.reshape(-1)
return coo_matrix((data,(row,col)), shape=output_shape).asformat(format)
def kronsum(A, B, format=None):
"""kronecker sum of sparse matrices A and B
Kronecker sum of two sparse matrices is a sum of two Kronecker
products kron(I_n,A) + kron(B,I_m) where A has shape (m,m)
and B has shape (n,n) and I_m and I_n are identity matrices
of shape (m,m) and (n,n) respectively.
Parameters
----------
A
square matrix
B
square matrix
format : string
format of the result (e.g. "csr")
Returns
-------
kronecker sum in a sparse matrix format
Examples
--------
"""
A = coo_matrix(A)
B = coo_matrix(B)
if A.shape[0] != A.shape[1]:
raise ValueError('A is not square')
if B.shape[0] != B.shape[1]:
raise ValueError('B is not square')
dtype = upcast(A.dtype, B.dtype)
L = kron(identity(B.shape[0],dtype=dtype), A, format=format)
R = kron(B, identity(A.shape[0],dtype=dtype), format=format)
return (L+R).asformat(format) #since L + R is not always same format
def hstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices horizontally (column wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : string
sparse format of the result (e.g. "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
See Also
--------
vstack : stack sparse matrices vertically (row wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, vstack
>>> A = coo_matrix([[1,2],[3,4]])
>>> B = coo_matrix([[5],[6]])
>>> hstack( [A,B] ).todense()
matrix([[1, 2, 5],
[3, 4, 6]])
"""
return bmat([blocks], format=format, dtype=dtype)
def vstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices vertically (row wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : string
sparse format of the result (e.g. "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
See Also
--------
hstack : stack sparse matrices horizontally (column wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, vstack
>>> A = coo_matrix([[1,2],[3,4]])
>>> B = coo_matrix([[5,6]])
>>> vstack( [A,B] ).todense()
matrix([[1, 2],
[3, 4],
[5, 6]])
"""
return bmat([ [b] for b in blocks ], format=format, dtype=dtype)
def bmat(blocks, format=None, dtype=None):
"""
Build a sparse matrix from sparse sub-blocks
Parameters
----------
blocks
grid of sparse matrices with compatible shapes
an entry of None implies an all-zero matrix
format : sparse format of the result (e.g. "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
Examples
--------
>>> from scipy.sparse import coo_matrix, bmat
>>> A = coo_matrix([[1,2],[3,4]])
>>> B = coo_matrix([[5],[6]])
>>> C = coo_matrix([[7]])
>>> bmat( [[A,B],[None,C]] ).todense()
matrix([[1, 2, 5],
[3, 4, 6],
[0, 0, 7]])
>>> bmat( [[A,None],[None,C]] ).todense()
matrix([[1, 2, 0],
[3, 4, 0],
[0, 0, 7]])
"""
blocks = np.asarray(blocks, dtype='object')
if np.rank(blocks) != 2:
raise ValueError('blocks must have rank 2')
M,N = blocks.shape
block_mask = np.zeros(blocks.shape, dtype=np.bool)
brow_lengths = np.zeros(blocks.shape[0], dtype=np.intc)
bcol_lengths = np.zeros(blocks.shape[1], dtype=np.intc)
# convert everything to COO format
for i in range(M):
for j in range(N):
if blocks[i,j] is not None:
A = coo_matrix(blocks[i,j])
blocks[i,j] = A
block_mask[i,j] = True
if brow_lengths[i] == 0:
brow_lengths[i] = A.shape[0]
else:
if brow_lengths[i] != A.shape[0]:
raise ValueError('blocks[%d,:] has incompatible row dimensions' % i)
if bcol_lengths[j] == 0:
bcol_lengths[j] = A.shape[1]
else:
if bcol_lengths[j] != A.shape[1]:
raise ValueError('blocks[:,%d] has incompatible column dimensions' % j)
# ensure that at least one value in each row and col is not None
if brow_lengths.min() == 0:
raise ValueError('blocks[%d,:] is all None' % brow_lengths.argmin() )
if bcol_lengths.min() == 0:
raise ValueError('blocks[:,%d] is all None' % bcol_lengths.argmin() )
nnz = sum([ A.nnz for A in blocks[block_mask] ])
if dtype is None:
dtype = upcast( *tuple([A.dtype for A in blocks[block_mask]]) )
row_offsets = np.concatenate(([0], np.cumsum(brow_lengths)))
col_offsets = np.concatenate(([0], np.cumsum(bcol_lengths)))
data = np.empty(nnz, dtype=dtype)
row = np.empty(nnz, dtype=np.intc)
col = np.empty(nnz, dtype=np.intc)
nnz = 0
for i in range(M):
for j in range(N):
if blocks[i,j] is not None:
A = blocks[i,j]
data[nnz:nnz + A.nnz] = A.data
row[nnz:nnz + A.nnz] = A.row
col[nnz:nnz + A.nnz] = A.col
row[nnz:nnz + A.nnz] += row_offsets[i]
col[nnz:nnz + A.nnz] += col_offsets[j]
nnz += A.nnz
shape = (np.sum(brow_lengths), np.sum(bcol_lengths))
return coo_matrix((data, (row, col)), shape=shape).asformat(format)
def rand(m, n, density=0.01, format="coo", dtype=None):
"""Generate a sparse matrix of the given shape and density with uniformely
distributed values.
Parameters
----------
m, n: int
shape of the matrix
density: real
density of the generated matrix: density equal to one means a full
matrix, density of 0 means a matrix with no non-zero items.
format: str
sparse matrix format.
dtype: dtype
type of the returned matrix values.
Notes
-----
Only float types are supported for now.
"""
if density < 0 or density > 1:
raise ValueError("density expected to be 0 <= density <= 1")
if dtype and not dtype in [np.float32, np.float64, np.longdouble]:
raise NotImplementedError("type %s not supported" % dtype)
mn = m * n
# XXX: sparse uses intc instead of intp...
tp = np.intp
if mn > np.iinfo(tp).max:
msg = """\
Trying to generate a random sparse matrix such as the product of dimensions is
greater than %d - this is not supported on this machine
"""
raise ValueError(msg % np.iinfo(tp).max)
# Number of non zero values
k = long(density * m * n)
# Generate a few more values than k so that we can get unique values
# afterwards.
# XXX: one could be smarter here
mlow = 5
fac = 1.02
gk = min(k + mlow, fac * k)
def _gen_unique_rand(_gk):
id = np.random.rand(_gk)
return np.unique(np.floor(id * mn))[:k]
id = _gen_unique_rand(gk)
while id.size < k:
gk *= 1.05
id = _gen_unique_rand(gk)
j = np.floor(id * 1. / m).astype(tp)
i = (id - j * m).astype(tp)
vals = np.random.rand(k).astype(dtype)
return coo_matrix((vals, (i, j)), shape=(m, n)).asformat(format)
| 28.506637
| 95
| 0.554366
|
cfd7c8397ae9dddeb154e4ef45c1529ebaec7ef9
| 464
|
py
|
Python
|
redis_helpers/tests/test_redis_helpers.py
|
ChristopherHaydenTodd/ctodd-python-lib-redis
|
d59018937c0f4d786af291e8cf82b901af9fcc53
|
[
"MIT"
] | 1
|
2019-02-24T14:04:30.000Z
|
2019-02-24T14:04:30.000Z
|
redis_helpers/tests/test_redis_helpers.py
|
ChristopherHaydenTodd/ctodd-python-lib-redis
|
d59018937c0f4d786af291e8cf82b901af9fcc53
|
[
"MIT"
] | null | null | null |
redis_helpers/tests/test_redis_helpers.py
|
ChristopherHaydenTodd/ctodd-python-lib-redis
|
d59018937c0f4d786af291e8cf82b901af9fcc53
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Purpose:
Test File for redis_helpers.py
"""
# Python Library Imports
import os
import sys
import fakeredis
import pytest
import redis
from unittest import mock
# Import File to Test
from redis_helpers import redis_helpers
###
# Fixtures
###
# None at the Moment (Empty Test Suite)
###
# Mocked Functions
###
# None at the Moment (Empty Test Suite)
###
# Test Payload
###
# None at the Moment (Empty Test Suite)
| 11.317073
| 39
| 0.6875
|
1f991022c01781b3b5d89ba160d05a83ef29db94
| 31,598
|
py
|
Python
|
statsmodels/graphics/gofplots.py
|
ADI10HERO/statsmodels
|
d932507dc71c93b5d162a678042fb0701ee4bf57
|
[
"BSD-3-Clause"
] | 76
|
2019-12-28T08:37:10.000Z
|
2022-03-29T02:19:41.000Z
|
statsmodels/graphics/gofplots.py
|
ADI10HERO/statsmodels
|
d932507dc71c93b5d162a678042fb0701ee4bf57
|
[
"BSD-3-Clause"
] | null | null | null |
statsmodels/graphics/gofplots.py
|
ADI10HERO/statsmodels
|
d932507dc71c93b5d162a678042fb0701ee4bf57
|
[
"BSD-3-Clause"
] | 35
|
2020-02-04T14:46:25.000Z
|
2022-03-24T03:56:17.000Z
|
from statsmodels.compat.python import lzip
import numpy as np
from scipy import stats
from statsmodels.regression.linear_model import OLS
from statsmodels.tools.tools import add_constant
from statsmodels.tools.decorators import cache_readonly
from statsmodels.distributions import ECDF
from . import utils
__all__ = ['qqplot', 'qqplot_2samples', 'qqline', 'ProbPlot']
class ProbPlot(object):
"""
Q-Q and P-P Probability Plots
Can take arguments specifying the parameters for dist or fit them
automatically. (See fit under kwargs.)
Parameters
----------
data : array_like
1d data array
dist : A scipy.stats or statsmodels distribution
Compare x against dist. The default is
scipy.stats.distributions.norm (a standard normal).
distargs : tuple
A tuple of arguments passed to dist to specify it fully
so dist.ppf may be called. distargs must not contain loc
or scale. These values must be passed using the loc or
scale inputs.
a : float
Offset for the plotting position of an expected order
statistic, for example. The plotting positions are given
by (i - a)/(nobs - 2*a + 1) for i in range(0,nobs+1)
loc : float
Location parameter for dist
scale : float
Scale parameter for dist
fit : bool
If fit is false, loc, scale, and distargs are passed to the
distribution. If fit is True then the parameters for dist
are fit automatically using dist.fit. The quantiles are formed
from the standardized data, after subtracting the fitted loc
and dividing by the fitted scale.
See Also
--------
scipy.stats.probplot
Notes
-----
1) Depends on matplotlib.
2) If `fit` is True then the parameters are fit using the
distribution's `fit()` method.
3) The call signatures for the `qqplot`, `ppplot`, and `probplot`
methods are similar, so examples 1 through 4 apply to all
three methods.
4) The three plotting methods are summarized below:
ppplot : Probability-Probability plot
Compares the sample and theoretical probabilities (percentiles).
qqplot : Quantile-Quantile plot
Compares the sample and theoretical quantiles
probplot : Probability plot
Same as a Q-Q plot, however probabilities are shown in the scale of
the theoretical distribution (x-axis) and the y-axis contains
unscaled quantiles of the sample data.
Examples
--------
The first example shows a Q-Q plot for regression residuals
>>> # example 1
>>> import statsmodels.api as sm
>>> from matplotlib import pyplot as plt
>>> data = sm.datasets.longley.load(as_pandas=False)
>>> data.exog = sm.add_constant(data.exog)
>>> model = sm.OLS(data.endog, data.exog)
>>> mod_fit = model.fit()
>>> res = mod_fit.resid # residuals
>>> probplot = sm.ProbPlot(res)
>>> fig = probplot.qqplot()
>>> h = plt.title('Ex. 1 - qqplot - residuals of OLS fit')
>>> plt.show()
qqplot of the residuals against quantiles of t-distribution with 4
degrees of freedom:
>>> # example 2
>>> import scipy.stats as stats
>>> probplot = sm.ProbPlot(res, stats.t, distargs=(4,))
>>> fig = probplot.qqplot()
>>> h = plt.title('Ex. 2 - qqplot - residuals against quantiles of t-dist')
>>> plt.show()
qqplot against same as above, but with mean 3 and std 10:
>>> # example 3
>>> probplot = sm.ProbPlot(res, stats.t, distargs=(4,), loc=3, scale=10)
>>> fig = probplot.qqplot()
>>> h = plt.title('Ex. 3 - qqplot - resids vs quantiles of t-dist')
>>> plt.show()
Automatically determine parameters for t distribution including the
loc and scale:
>>> # example 4
>>> probplot = sm.ProbPlot(res, stats.t, fit=True)
>>> fig = probplot.qqplot(line='45')
>>> h = plt.title('Ex. 4 - qqplot - resids vs. quantiles of fitted t-dist')
>>> plt.show()
A second `ProbPlot` object can be used to compare two separate sample
sets by using the `other` kwarg in the `qqplot` and `ppplot` methods.
>>> # example 5
>>> import numpy as np
>>> x = np.random.normal(loc=8.25, scale=2.75, size=37)
>>> y = np.random.normal(loc=8.75, scale=3.25, size=37)
>>> pp_x = sm.ProbPlot(x, fit=True)
>>> pp_y = sm.ProbPlot(y, fit=True)
>>> fig = pp_x.qqplot(line='45', other=pp_y)
>>> h = plt.title('Ex. 5 - qqplot - compare two sample sets')
>>> plt.show()
In qqplot, sample size of `other` can be equal or larger than the first.
In case of larger, size of `other` samples will be reduced to match the
size of the first by interpolation
>>> # example 6
>>> x = np.random.normal(loc=8.25, scale=2.75, size=37)
>>> y = np.random.normal(loc=8.75, scale=3.25, size=57)
>>> pp_x = sm.ProbPlot(x, fit=True)
>>> pp_y = sm.ProbPlot(y, fit=True)
>>> fig = pp_x.qqplot(line='45', other=pp_y)
>>> title = 'Ex. 6 - qqplot - compare different sample sizes'
>>> h = plt.title(title)
>>> plt.show()
In ppplot, sample size of `other` and the first can be different. `other`
will be used to estimate an empirical cumulative distribution function
(ECDF). ECDF(x) will be plotted against p(x)=0.5/n, 1.5/n, ..., (n-0.5)/n
where x are sorted samples from the first.
>>> # example 7
>>> x = np.random.normal(loc=8.25, scale=2.75, size=37)
>>> y = np.random.normal(loc=8.75, scale=3.25, size=57)
>>> pp_x = sm.ProbPlot(x, fit=True)
>>> pp_y = sm.ProbPlot(y, fit=True)
>>> fig = pp_y.ppplot(line='45', other=pp_x)
>>> h = plt.title('Ex. 7A- ppplot - compare two sample sets, other=pp_x')
>>> fig = pp_x.ppplot(line='45', other=pp_y)
>>> h = plt.title('Ex. 7B- ppplot - compare two sample sets, other=pp_y')
>>> plt.show()
The following plot displays some options, follow the link to see the
code.
.. plot:: plots/graphics_gofplots_qqplot.py
"""
def __init__(self, data, dist=stats.norm, fit=False,
distargs=(), a=0, loc=0, scale=1):
self.data = data
self.a = a
self.nobs = data.shape[0]
self.distargs = distargs
self.fit = fit
if isinstance(dist, str):
dist = getattr(stats, dist)
if fit:
self.fit_params = dist.fit(data)
self.loc = self.fit_params[-2]
self.scale = self.fit_params[-1]
if len(self.fit_params) > 2:
self.dist = dist(*self.fit_params[:-2],
**dict(loc=0, scale=1))
else:
self.dist = dist(loc=0, scale=1)
elif distargs or loc != 0 or scale != 1:
try:
self.dist = dist(*distargs, **dict(loc=loc, scale=scale))
except Exception:
distargs = ', '.join([str(da) for da in distargs])
cmd = 'dist({distargs}, loc={loc}, scale={scale})'
cmd = cmd.format(distargs=distargs, loc=loc, scale=scale)
raise TypeError('Initializing the distribution failed. This '
'can occur if distargs contains loc or scale. '
'The distribution initialization command '
'is:\n{cmd}'.format(cmd=cmd))
self.loc = loc
self.scale = scale
self.fit_params = np.r_[distargs, loc, scale]
else:
self.dist = dist
self.loc = loc
self.scale = scale
self.fit_params = np.r_[loc, scale]
# propertes
self._cache = {}
@cache_readonly
def theoretical_percentiles(self):
"""Theoretical percentiles"""
return plotting_pos(self.nobs, self.a)
@cache_readonly
def theoretical_quantiles(self):
"""Theoretical quantiles"""
try:
return self.dist.ppf(self.theoretical_percentiles)
except TypeError:
msg = '%s requires more parameters to ' \
'compute ppf'.format(self.dist.name,)
raise TypeError(msg)
except:
msg = 'failed to compute the ppf of {0}'.format(self.dist.name,)
raise
@cache_readonly
def sorted_data(self):
"""sorted data"""
sorted_data = np.array(self.data, copy=True)
sorted_data.sort()
return sorted_data
@cache_readonly
def sample_quantiles(self):
"""sample quantiles"""
if self.fit and self.loc != 0 and self.scale != 1:
return (self.sorted_data-self.loc)/self.scale
else:
return self.sorted_data
@cache_readonly
def sample_percentiles(self):
"""Sample percentiles"""
quantiles = \
(self.sorted_data - self.fit_params[-2])/self.fit_params[-1]
return self.dist.cdf(quantiles)
def ppplot(self, xlabel=None, ylabel=None, line=None, other=None,
ax=None, **plotkwargs):
"""
P-P plot of the percentiles (probabilities) of x versus the
probabilities (percentiles) of a distribution.
Parameters
----------
xlabel : str or None, optional
User-provided labels for the x-axis. If None (default),
other values are used depending on the status of the kwarg `other`.
ylabel : str or None, optional
User-provided labels for the y-axis. If None (default),
other values are used depending on the status of the kwarg `other`.
line : str {'45', 's', 'r', q'} or None, optional
Options for the reference line to which the data is compared:
- '45': 45-degree line
- 's': standardized line, the expected order statistics are
scaled by the standard deviation of the given sample and have
the mean added to them
- 'r': A regression line is fit
- 'q': A line is fit through the quartiles.
- None: by default no reference line is added to the plot.
other : ProbPlot, array_like, or None, optional
If provided, ECDF(x) will be plotted against p(x) where x are
sorted samples from `self`. ECDF is an empirical cumulative
distribution function estimated from `other` and
p(x) = 0.5/n, 1.5/n, ..., (n-0.5)/n where n is the number of
samples in `self`. If an array-object is provided, it will be
turned into a `ProbPlot` instance default parameters. If not
provided (default), `self.dist(x)` is be plotted against p(x).
ax : Matplotlib AxesSubplot instance, optional
If given, this subplot is used to plot in instead of a new figure
being created.
**plotkwargs : additional matplotlib arguments to be passed to the
`plot` command.
Returns
-------
fig : Matplotlib figure instance
If `ax` is None, the created figure. Otherwise the figure to which
`ax` is connected.
"""
if other is not None:
check_other = isinstance(other, ProbPlot)
if not check_other:
other = ProbPlot(other)
p_x = self.theoretical_percentiles
ecdf_x = ECDF(other.sample_quantiles)(self.sample_quantiles)
fig, ax = _do_plot(p_x, ecdf_x, self.dist, ax=ax, line=line,
**plotkwargs)
if xlabel is None:
xlabel = 'Probabilities of 2nd Sample'
if ylabel is None:
ylabel = 'Probabilities of 1st Sample'
else:
fig, ax = _do_plot(self.theoretical_percentiles,
self.sample_percentiles,
self.dist, ax=ax, line=line,
**plotkwargs)
if xlabel is None:
xlabel = "Theoretical Probabilities"
if ylabel is None:
ylabel = "Sample Probabilities"
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.0])
return fig
def qqplot(self, xlabel=None, ylabel=None, line=None, other=None,
ax=None, **plotkwargs):
"""
Q-Q plot of the quantiles of x versus the quantiles/ppf of a
distribution or the quantiles of another `ProbPlot` instance.
Parameters
----------
xlabel, ylabel : str or None, optional
User-provided labels for the x-axis and y-axis. If None (default),
other values are used depending on the status of the kwarg `other`.
line : str {'45', 's', 'r', q'} or None, optional
Options for the reference line to which the data is compared:
- '45' - 45-degree line
- 's' - standardized line, the expected order statistics are scaled
by the standard deviation of the given sample and have the mean
added to them
- 'r' - A regression line is fit
- 'q' - A line is fit through the quartiles.
- None - by default no reference line is added to the plot.
other : `ProbPlot` instance, array_like, or None, optional
If provided, the sample quantiles of this `ProbPlot` instance are
plotted against the sample quantiles of the `other` `ProbPlot`
instance. Sample size of `other` must be equal or larger than
this `ProbPlot` instance. If the sample size is larger, sample
quantiles of `other` will be interpolated to match the sample size
of this `ProbPlot` instance. If an array-like object is provided,
it will be turned into a `ProbPlot` instance using default
parameters. If not provided (default), the theoretical quantiles
are used.
ax : Matplotlib AxesSubplot instance, optional
If given, this subplot is used to plot in instead of a new figure
being created.
**plotkwargs : additional matplotlib arguments to be passed to the
`plot` command.
Returns
-------
fig : Matplotlib figure instance
If `ax` is None, the created figure. Otherwise the figure to which
`ax` is connected.
"""
if other is not None:
check_other = isinstance(other, ProbPlot)
if not check_other:
other = ProbPlot(other)
s_self = self.sample_quantiles
s_other = other.sample_quantiles
if len(s_self) > len(s_other):
raise ValueError("Sample size of `other` must be equal or " +
"larger than this `ProbPlot` instance")
elif len(s_self) < len(s_other):
# Use quantiles of the smaller set and interpolate quantiles of
# the larger data set
p = plotting_pos(self.nobs, self.a)
s_other = stats.mstats.mquantiles(s_other, p)
fig, ax = _do_plot(s_other, s_self, self.dist, ax=ax, line=line,
**plotkwargs)
if xlabel is None:
xlabel = 'Quantiles of 2nd Sample'
if ylabel is None:
ylabel = 'Quantiles of 1st Sample'
else:
fig, ax = _do_plot(self.theoretical_quantiles,
self.sample_quantiles,
self.dist, ax=ax, line=line,
**plotkwargs)
if xlabel is None:
xlabel = "Theoretical Quantiles"
if ylabel is None:
ylabel = "Sample Quantiles"
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
return fig
def probplot(self, xlabel=None, ylabel=None, line=None,
exceed=False, ax=None, **plotkwargs):
"""
Probability plot of the unscaled quantiles of x versus the
probabilities of a distribution (not to be confused with a P-P plot).
The x-axis is scaled linearly with the quantiles, but the probabilities
are used to label the axis.
Parameters
----------
xlabel, ylabel : str or None, optional
User-provided labels for the x-axis and y-axis. If None (default),
other values are used depending on the status of the kwarg `other`.
line : str {'45', 's', 'r', q'} or None, optional
Options for the reference line to which the data is compared:
- '45' - 45-degree line
- 's' - standardized line, the expected order statistics are scaled
by the standard deviation of the given sample and have the mean
added to them
- 'r' - A regression line is fit
- 'q' - A line is fit through the quartiles.
- None - by default no reference line is added to the plot.
exceed : bool, optional
- If False (default) the raw sample quantiles are plotted against
the theoretical quantiles, show the probability that a sample
will not exceed a given value
- If True, the theoretical quantiles are flipped such that the
figure displays the probability that a sample will exceed a
given value.
ax : Matplotlib AxesSubplot instance, optional
If given, this subplot is used to plot in instead of a new figure
being created.
**plotkwargs : additional matplotlib arguments to be passed to the
`plot` command.
Returns
-------
fig : Matplotlib figure instance
If `ax` is None, the created figure. Otherwise the figure to which
`ax` is connected.
"""
if exceed:
fig, ax = _do_plot(self.theoretical_quantiles[::-1],
self.sorted_data,
self.dist, ax=ax, line=line,
**plotkwargs)
if xlabel is None:
xlabel = 'Probability of Exceedance (%)'
else:
fig, ax = _do_plot(self.theoretical_quantiles,
self.sorted_data,
self.dist, ax=ax, line=line,
**plotkwargs)
if xlabel is None:
xlabel = 'Non-exceedance Probability (%)'
if ylabel is None:
ylabel = "Sample Quantiles"
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
_fmt_probplot_axis(ax, self.dist, self.nobs)
return fig
def qqplot(data, dist=stats.norm, distargs=(), a=0, loc=0, scale=1, fit=False,
line=None, ax=None, **plotkwargs):
"""
Q-Q plot of the quantiles of x versus the quantiles/ppf of a distribution.
Can take arguments specifying the parameters for dist or fit them
automatically. (See fit under Parameters.)
Parameters
----------
data : array_like
1d data array
dist : A scipy.stats or statsmodels distribution
Compare x against dist. The default
is scipy.stats.distributions.norm (a standard normal).
distargs : tuple
A tuple of arguments passed to dist to specify it fully
so dist.ppf may be called.
loc : float
Location parameter for dist
a : float
Offset for the plotting position of an expected order statistic, for
example. The plotting positions are given by (i - a)/(nobs - 2*a + 1)
for i in range(0,nobs+1)
scale : float
Scale parameter for dist
fit : bool
If fit is false, loc, scale, and distargs are passed to the
distribution. If fit is True then the parameters for dist
are fit automatically using dist.fit. The quantiles are formed
from the standardized data, after subtracting the fitted loc
and dividing by the fitted scale.
line : str {'45', 's', 'r', q'} or None
Options for the reference line to which the data is compared:
- '45' - 45-degree line
- 's' - standardized line, the expected order statistics are scaled
by the standard deviation of the given sample and have the mean
added to them
- 'r' - A regression line is fit
- 'q' - A line is fit through the quartiles.
- None - by default no reference line is added to the plot.
ax : Matplotlib AxesSubplot instance, optional
If given, this subplot is used to plot in instead of a new figure being
created.
**plotkwargs : additional matplotlib arguments to be passed to the
`plot` command.
Returns
-------
fig : Matplotlib figure instance
If `ax` is None, the created figure. Otherwise the figure to which
`ax` is connected.
See Also
--------
scipy.stats.probplot
Examples
--------
>>> import statsmodels.api as sm
>>> from matplotlib import pyplot as plt
>>> data = sm.datasets.longley.load(as_pandas=False)
>>> data.exog = sm.add_constant(data.exog)
>>> mod_fit = sm.OLS(data.endog, data.exog).fit()
>>> res = mod_fit.resid # residuals
>>> fig = sm.qqplot(res)
>>> plt.show()
qqplot of the residuals against quantiles of t-distribution with 4 degrees
of freedom:
>>> import scipy.stats as stats
>>> fig = sm.qqplot(res, stats.t, distargs=(4,))
>>> plt.show()
qqplot against same as above, but with mean 3 and std 10:
>>> fig = sm.qqplot(res, stats.t, distargs=(4,), loc=3, scale=10)
>>> plt.show()
Automatically determine parameters for t distribution including the
loc and scale:
>>> fig = sm.qqplot(res, stats.t, fit=True, line='45')
>>> plt.show()
The following plot displays some options, follow the link to see the code.
.. plot:: plots/graphics_gofplots_qqplot.py
Notes
-----
Depends on matplotlib. If `fit` is True then the parameters are fit using
the distribution's fit() method.
"""
probplot = ProbPlot(data, dist=dist, distargs=distargs,
fit=fit, a=a, loc=loc, scale=scale)
fig = probplot.qqplot(ax=ax, line=line, **plotkwargs)
return fig
def qqplot_2samples(data1, data2, xlabel=None, ylabel=None, line=None,
ax=None):
"""
Q-Q Plot of two samples' quantiles.
Can take either two `ProbPlot` instances or two array-like objects. In the
case of the latter, both inputs will be converted to `ProbPlot` instances
using only the default values - so use `ProbPlot` instances if
finer-grained control of the quantile computations is required.
Parameters
----------
data1, data2 : array_like (1d) or `ProbPlot` instances
xlabel, ylabel : str or None
User-provided labels for the x-axis and y-axis. If None (default),
other values are used.
line : str {'45', 's', 'r', q'} or None
Options for the reference line to which the data is compared:
- '45' - 45-degree line
- 's' - standardized line, the expected order statistics are scaled
by the standard deviation of the given sample and have the mean
added to them
- 'r' - A regression line is fit
- 'q' - A line is fit through the quartiles.
- None - by default no reference line is added to the plot.
ax : Matplotlib AxesSubplot instance, optional
If given, this subplot is used to plot in instead of a new figure being
created.
Returns
-------
fig : Matplotlib figure instance
If `ax` is None, the created figure. Otherwise the figure to which
`ax` is connected.
See Also
--------
scipy.stats.probplot
Examples
--------
>>> import statsmodels.api as sm
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> from statsmodels.graphics.gofplots import qqplot_2samples
>>> x = np.random.normal(loc=8.5, scale=2.5, size=37)
>>> y = np.random.normal(loc=8.0, scale=3.0, size=37)
>>> pp_x = sm.ProbPlot(x)
>>> pp_y = sm.ProbPlot(y)
>>> qqplot_2samples(pp_x, pp_y)
>>> plt.show()
.. plot:: plots/graphics_gofplots_qqplot_2samples.py
>>> fig = qqplot_2samples(pp_x, pp_y, xlabel=None, ylabel=None, \
... line=None, ax=None)
Notes
-----
1) Depends on matplotlib.
2) If `data1` and `data2` are not `ProbPlot` instances, instances will be
created using the default parameters. Therefore, it is recommended to use
`ProbPlot` instance if fine-grained control is needed in the computation
of the quantiles.
"""
if not isinstance(data1, ProbPlot):
data1 = ProbPlot(data1)
if not isinstance(data2, ProbPlot):
data2 = ProbPlot(data2)
fig = data1.qqplot(xlabel=xlabel, ylabel=ylabel,
line=line, other=data2, ax=ax)
return fig
def qqline(ax, line, x=None, y=None, dist=None, fmt='r-'):
"""
Plot a reference line for a qqplot.
Parameters
----------
ax : matplotlib axes instance
The axes on which to plot the line
line : str {'45','r','s','q'}
Options for the reference line to which the data is compared.:
- '45' - 45-degree line
- 's' - standardized line, the expected order statistics are scaled by
the standard deviation of the given sample and have the mean
added to them
- 'r' - A regression line is fit
- 'q' - A line is fit through the quartiles.
- None - By default no reference line is added to the plot.
x : array
X data for plot. Not needed if line is '45'.
y : array
Y data for plot. Not needed if line is '45'.
dist : scipy.stats.distribution
A scipy.stats distribution, needed if line is 'q'.
Notes
-----
There is no return value. The line is plotted on the given `ax`.
Examples
--------
Import the food expenditure dataset. Plot annual food expenditure on x-axis
and household income on y-axis. Use qqline to add regression line into the
plot.
>>> import statsmodels.api as sm
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> from statsmodels.graphics.gofplots import qqline
>>> foodexp = sm.datasets.engel.load(as_pandas=False)
>>> x = foodexp.exog
>>> y = foodexp.endog
>>> ax = plt.subplot(111)
>>> plt.scatter(x, y)
>>> ax.set_xlabel(foodexp.exog_name[0])
>>> ax.set_ylabel(foodexp.endog_name)
>>> qqline(ax, 'r', x, y)
>>> plt.show()
.. plot:: plots/graphics_gofplots_qqplot_qqline.py
"""
if line == '45':
end_pts = lzip(ax.get_xlim(), ax.get_ylim())
end_pts[0] = min(end_pts[0])
end_pts[1] = max(end_pts[1])
ax.plot(end_pts, end_pts, fmt)
ax.set_xlim(end_pts)
ax.set_ylim(end_pts)
return # does this have any side effects?
if x is None and y is None:
raise ValueError("If line is not 45, x and y cannot be None.")
elif line == 'r':
# could use ax.lines[0].get_xdata(), get_ydata(),
# but do not know axes are 'clean'
y = OLS(y, add_constant(x)).fit().fittedvalues
ax.plot(x,y,fmt)
elif line == 's':
m,b = y.std(), y.mean()
ref_line = x*m + b
ax.plot(x, ref_line, fmt)
elif line == 'q':
_check_for_ppf(dist)
q25 = stats.scoreatpercentile(y, 25)
q75 = stats.scoreatpercentile(y, 75)
theoretical_quartiles = dist.ppf([0.25, 0.75])
m = (q75 - q25) / np.diff(theoretical_quartiles)
b = q25 - m*theoretical_quartiles[0]
ax.plot(x, m*x + b, fmt)
# about 10x faster than plotting_position in sandbox and mstats
def plotting_pos(nobs, a):
"""
Generates sequence of plotting positions
Parameters
----------
nobs : int
Number of probability points to plot
a : float
Offset for the plotting position of an expected order statistic, for
example.
Returns
-------
plotting_positions : array
The plotting positions
Notes
-----
The plotting positions are given by (i - a)/(nobs - 2*a + 1) for i in
range(0,nobs+1)
See Also
--------
scipy.stats.mstats.plotting_positions
"""
return (np.arange(1., nobs + 1) - a)/(nobs - 2 * a + 1)
def _fmt_probplot_axis(ax, dist, nobs):
"""
Formats a theoretical quantile axis to display the corresponding
probabilities on the quantiles' scale.
Parameteters
------------
ax : Matplotlib AxesSubplot instance, optional
The axis to be formatted
nobs : scalar
Numbero of observations in the sample
dist : scipy.stats.distribution
A scipy.stats distribution sufficiently specified to impletment its
ppf() method.
Returns
-------
There is no return value. This operates on `ax` in place
"""
_check_for_ppf(dist)
if nobs < 50:
axis_probs = np.array([1, 2, 5, 10, 20, 30, 40, 50, 60,
70, 80, 90, 95, 98, 99, ]) / 100.0
elif nobs < 500:
axis_probs = np.array([0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 30, 40, 50, 60,
70, 80, 90, 95, 98, 99, 99.5, 99.8,
99.9]) / 100.0
else:
axis_probs = np.array([0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10,
20, 30, 40, 50, 60, 70, 80, 90, 95, 98, 99,
99.5, 99.8, 99.9, 99.95, 99.98, 99.99]) / 100.0
axis_qntls = dist.ppf(axis_probs)
ax.set_xticks(axis_qntls)
ax.set_xticklabels(axis_probs*100, rotation=45,
rotation_mode='anchor',
horizontalalignment='right',
verticalalignment='center')
ax.set_xlim([axis_qntls.min(), axis_qntls.max()])
def _do_plot(x, y, dist=None, line=False, ax=None, fmt='bo', **kwargs):
"""
Boiler plate plotting function for the `ppplot`, `qqplot`, and
`probplot` methods of the `ProbPlot` class
Parameteters
------------
x, y : array_like
Data to be plotted
dist : scipy.stats.distribution
A scipy.stats distribution, needed if `line` is 'q'.
line : str {'45', 's', 'r', q'} or None
Options for the reference line to which the data is compared.
ax : Matplotlib AxesSubplot instance, optional
If given, this subplot is used to plot in instead of a new figure being
created.
fmt : str, optional
matplotlib-compatible formatting string for the data markers
kwargs : keywords
These are passed to matplotlib.plot
Returns
-------
fig : Matplotlib Figure instance
ax : Matplotlib AxesSubplot instance (see Parameters)
"""
fig, ax = utils.create_mpl_ax(ax)
ax.set_xmargin(0.02)
ax.plot(x, y, fmt, **kwargs)
if line:
if line not in ['r','q','45','s']:
msg = "%s option for line not understood" % line
raise ValueError(msg)
qqline(ax, line, x=x, y=y, dist=dist)
return fig, ax
def _check_for_ppf(dist):
if not hasattr(dist, 'ppf'):
raise ValueError("distribution must have a ppf method")
| 36.784633
| 80
| 0.589151
|
7f2c483caefba7c32792fbfa7c5db3cf5a6f907e
| 149
|
py
|
Python
|
nostrint/redat.py
|
CiKu370/no-strint
|
4f4fbe71517b84c00abd4fbdee15bacba01c0079
|
[
"MIT"
] | 1
|
2020-02-05T00:18:27.000Z
|
2020-02-05T00:18:27.000Z
|
nostrint/redat.py
|
CiKu370/no-strint
|
4f4fbe71517b84c00abd4fbdee15bacba01c0079
|
[
"MIT"
] | null | null | null |
nostrint/redat.py
|
CiKu370/no-strint
|
4f4fbe71517b84c00abd4fbdee15bacba01c0079
|
[
"MIT"
] | null | null | null |
__version__ = '1.4.9'
BANNER = ''' _
(o)
(_|_) <no strint> {0} @ zvtyrdt.id
||| (https://github.com/zevtyardt)
'''.format(__version__)
| 16.555556
| 38
| 0.550336
|
12d2487557834a5c5bda41b512ea74ae5b38dfdf
| 315
|
py
|
Python
|
src/osmo/config_osmo.py
|
0xChief/staketaxcsv
|
3122736c4044e9a22237fffacee80ca1d7604be1
|
[
"MIT"
] | null | null | null |
src/osmo/config_osmo.py
|
0xChief/staketaxcsv
|
3122736c4044e9a22237fffacee80ca1d7604be1
|
[
"MIT"
] | null | null | null |
src/osmo/config_osmo.py
|
0xChief/staketaxcsv
|
3122736c4044e9a22237fffacee80ca1d7604be1
|
[
"MIT"
] | null | null | null |
from common import ExporterTypes as et
class localconfig:
job = None
debug = False
limit = 10000 # max txs
# Treat LP deposits/withdrawals as "transfers"/"omit"/"trades" (ignored for koinly)
lp_treatment = et.LP_TREATMENT_DEFAULT
cache = False
ibc_addresses = {}
exponents = {}
| 21
| 87
| 0.669841
|
d95058fcc73b500e17507ab146c253f12afd3e16
| 704
|
py
|
Python
|
wardrobe/use_cases/user_detail.py
|
wardrobe-auth/wardrobe
|
fd2f95e50c1d035a2a60e0fdc68685ce45b6e653
|
[
"MIT"
] | null | null | null |
wardrobe/use_cases/user_detail.py
|
wardrobe-auth/wardrobe
|
fd2f95e50c1d035a2a60e0fdc68685ce45b6e653
|
[
"MIT"
] | null | null | null |
wardrobe/use_cases/user_detail.py
|
wardrobe-auth/wardrobe
|
fd2f95e50c1d035a2a60e0fdc68685ce45b6e653
|
[
"MIT"
] | null | null | null |
from ca_util import ResponseFailure, ResponseSuccess
from wardrobe.request_objects.user_id import UserIdRequestObject
from wardrobe.request_objects.user_paginate import UserPaginateRequestObject
class UserDetailUseCase:
def __init__(self, user_repo):
self.user_repo = user_repo
def execute(self, request_object: UserIdRequestObject):
if not request_object:
return ResponseFailure.build_from_invalid_request_object(request_object)
user_id = request_object.user_id
user = self.user_repo.get(user_id)
if user is None:
return ResponseFailure.build_resource_error(f'User:{user_id} not found.')
return ResponseSuccess(user)
| 33.52381
| 85
| 0.755682
|
3a9aa7ad202c0fff2185e87998722978a676998b
| 46,815
|
py
|
Python
|
src/benchmarks/gc/src/analysis/types.py
|
BruceForstall/performance
|
47524b9060aa7cb13205fa281a6be6e2d404995c
|
[
"MIT"
] | 547
|
2018-11-06T21:14:57.000Z
|
2022-03-31T21:14:57.000Z
|
src/benchmarks/gc/src/analysis/types.py
|
BruceForstall/performance
|
47524b9060aa7cb13205fa281a6be6e2d404995c
|
[
"MIT"
] | 1,572
|
2018-11-06T21:30:31.000Z
|
2022-03-31T23:31:25.000Z
|
src/benchmarks/gc/src/analysis/types.py
|
BruceForstall/performance
|
47524b9060aa7cb13205fa281a6be6e2d404995c
|
[
"MIT"
] | 196
|
2018-11-06T20:58:21.000Z
|
2022-03-29T21:04:21.000Z
|
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the MIT license.
# See the LICENSE file in the project root for more information.
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum
from functools import reduce
from math import isnan
from pathlib import Path
from statistics import mean, median, stdev
from typing import (
Any,
Callable,
cast,
Dict,
FrozenSet,
Iterable,
Mapping,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
from result import Err, Ok, Result
from ..commonlib.bench_file import GCPerfSimResult, ProcessQuery, TestResult, TestRunStatus
from ..commonlib.collection_util import count, empty_mapping, is_empty, map_to_mapping
from ..commonlib.document import Cell
from ..commonlib.frozen_dict import FrozenDict
from ..commonlib.option import map_option, non_null
from ..commonlib.result_utils import (
all_non_err,
fn_to_ok,
flat_map_ok,
map_ok,
map_ok_2,
option_to_result,
unwrap,
)
from ..commonlib.score_spec import ScoreElement, ScoreSpec
from ..commonlib.type_utils import check_cast, enum_value, E, T, U, with_slots
from ..commonlib.util import (
bytes_to_gb,
bytes_to_mb,
float_to_str_smaller,
get_95th_percentile,
get_or_did_you_mean,
get_percent,
mb_to_gb,
msec_to_seconds,
remove_extension,
stdev_frac,
)
from .clr import Clr
from .clr_types import (
AbstractGCPerHeapHistory,
AbstractGCPerHeapHistoryGenData,
AbstractGCStats,
AbstractIProcessIDToProcessName,
AbstractIThreadIDToProcessID,
AbstractJoinInfoForProcess,
AbstractJoinInfoForGC,
AbstractJoinInfoForHeap,
AbstractServerGcHistory,
AbstractTimeSpan,
AbstractTraceGC,
AbstractTraceProcess,
AbstractTraceLoadedDotNetRuntime,
)
from .enums import (
EMPTY_GC_GLOBAL_MECHANISMS,
GCGlobalMechanisms,
gc_heap_compact_reason,
gc_heap_expand_mechanism,
gc_reason,
GCType,
Gens,
invert_gc_global_mechanisms,
MarkRootType,
StartupFlags,
union_gc_global_mechanisms,
)
@with_slots
@dataclass(frozen=True)
class ValueAndErrors:
value: float
err_neg: float
err_pos: float
def to_tuple(self) -> Tuple[float, float, float]:
return self.value, self.err_neg, self.err_pos
class SpecialSampleKind(Enum):
mean = 0
median = 1
min = 2
max = 3
SampleKind = Union[int, SpecialSampleKind]
SAMPLE_KIND_DOC = """
When multiple iterations of a test were run, what to statistic as the representative sample.
If an integer, uses the nth iteration.
""".strip()
RUN_METRICS_DOC = """
Metrics applying to entire test run.
See `metrics.md` for a list.
"""
SINGLE_GC_METRICS_DOC = """
Metrics applying to each individual GC.
See `metrics.md` for a list.
"""
SINGLE_HEAP_METRICS_DOC = """
Metrics applying to each individual heap within each individual GC.
See `metrics.md` for a list.
"""
@with_slots
@dataclass(frozen=True)
class MetricValue:
all_samples: Sequence[float]
n_samples: float
# Determined by sample_kind
sample: float
stdev: float
median: float
min: float
max: float
def __post_init__(self) -> None:
assert self.min in self.all_samples and self.max in self.all_samples
def sample_and_errors(self) -> ValueAndErrors:
return ValueAndErrors(self.sample, self.sample - self.min, self.max - self.sample)
@property
def stdev_frac(self) -> float:
return stdev_frac(self.stdev, self.sample)
def cells_sample_stdev_pct(self) -> Sequence[Cell]:
return (
Cell(self.sample),
Cell(
float_to_str_smaller(self.stdev_frac * 100),
color="red" if self.stdev_frac > 0.1 else None,
),
)
# When we fail to get a metric, we use Err.
# This is because we want to display the failure but continue to show other metrics,
# rather than exiting the program immediately with an exception.
Failable = Result[str, T]
# Allowed kinds of metric values. Note all of these can convert to float.
AnyValue = Union[bool, int, float]
FailableValue = Failable[AnyValue]
FailableBool = Failable[bool]
FailableInt = Failable[int]
FailableFloat = Failable[float]
FailableValues = Failable[Sequence[FailableValue]]
FailableMetricValue = Failable[MetricValue]
def _take_sample(values: Sequence[AnyValue], sample_kind: SampleKind) -> AnyValue:
if isinstance(sample_kind, int):
return values[sample_kind]
else:
fn: Callable[[Sequence[float]], float]
if sample_kind == SpecialSampleKind.mean:
fn = mean
elif sample_kind == SpecialSampleKind.median:
fn = median
elif sample_kind == SpecialSampleKind.min:
fn = min
elif sample_kind == SpecialSampleKind.max:
fn = max
else:
raise Exception(sample_kind)
return fn(values)
def metric_value_of(
values: Sequence[FailableValue], sample_kind: SampleKind
) -> FailableMetricValue:
assert not is_empty(values)
return flat_map_ok(
all_non_err(values),
lambda vs: Ok(
MetricValue(
all_samples=vs,
n_samples=len(vs),
sample=_take_sample(vs, sample_kind),
stdev=0 if len(vs) == 1 else stdev(vs),
median=median(vs),
min=min(vs),
max=max(vs),
)
),
)
# Maps event name to its frequency
EventNames = Mapping[str, int]
@with_slots
@dataclass(frozen=True)
class ProcessInfo:
event_names: Optional[EventNames]
# Not necessarily the name of the process,
# since we may run two tests with the same process name and want to tell them apart
name: str
trace_path: Path
process: AbstractTraceProcess
mang: AbstractTraceLoadedDotNetRuntime
all_gcs_including_incomplete: Sequence[AbstractTraceGC]
gcs: Sequence[AbstractTraceGC]
stats: AbstractGCStats
# Note: can't use process.StartTime and EndTime properties
# as those are just 1/1/0001 12:00:00 AM
events_time_span: Optional[AbstractTimeSpan]
per_heap_history_times: Optional[Sequence[float]]
@property
def id(self) -> int:
return self.process.ProcessID
@property
def process_name(self) -> str:
return self.process.Name
@property
def startup_flags(self) -> StartupFlags:
return StartupFlags(self.mang.StartupFlags)
@property
def uses_server_gc(self) -> Optional[bool]:
i = self.stats.IsServerGCUsed
if i == -1:
return None
elif i == 0:
return False
else:
return True
class MetricType(Enum):
bool = 0
float = 1
class Better(Enum):
"""Is it better for a metric to be lower or higher?"""
less = 0
greater = 1
class MetricBase(ABC):
@property
@abstractmethod
def name(self) -> str:
raise NotImplementedError()
@property
@abstractmethod
def type(self) -> MetricType:
raise NotImplementedError()
@property
@abstractmethod
def is_from_test_status(self) -> bool:
raise NotImplementedError()
@property
@abstractmethod
def doc(self) -> Optional[str]:
raise NotImplementedError()
@property
@abstractmethod
def is_aggregate(self) -> bool:
raise NotImplementedError
@property
@abstractmethod
def do_not_use_scientific_notation(self) -> bool:
raise NotImplementedError()
@property
@abstractmethod
def better(self) -> Better:
raise NotImplementedError()
@with_slots
@dataclass(frozen=True)
class NamedMetricBase(MetricBase):
name: str
type: MetricType = MetricType.float
doc: Optional[str] = None
# True for a metric that aggregates others, e.g. a run metric aggregating single GCs
is_aggregate: bool = False
do_not_use_scientific_notation: bool = False
is_from_test_status: bool = False
better: Better = Better.less
def __post_init__(self) -> None:
# Apparently the base class must have this or it won't be called on subclasses
pass
def __eq__(self, other: object) -> bool:
assert self is other or other is None or self.name != check_cast(self.__class__, other).name
return self is other
def __lt__(self, other: object) -> bool:
return self.name < check_cast(self.__class__, other).name
def __hash__(self) -> int:
return hash(self.name)
TMetric = TypeVar("TMetric", bound=MetricBase)
TMetricB = TypeVar("TMetricB", bound=MetricBase)
TNamedMetric = TypeVar("TNamedMetric", bound=NamedMetricBase)
class NamedRunMetric(NamedMetricBase):
"""
Statistic that applies to a test run as a whole.
Contrast with SingleGcStat.
"""
def __post_init__(self) -> None:
super().__post_init__()
assert self.name not in NAME_TO_RUN_METRIC, f"Already exists a metric {self.name}"
NAME_TO_RUN_METRIC[self.name] = self
@with_slots
@dataclass(frozen=True)
class ScoreRunMetric(MetricBase):
name: str
spec: ScoreSpec = empty_mapping() # TODO: shouldn't have a default
@property
# @overrides
def type(self) -> MetricType:
return MetricType.float
@property
# @overrides
def is_from_test_status(self) -> bool:
return False
@property
# @overrides
def doc(self) -> Optional[str]:
return None
@property
# @overrides
def is_aggregate(self) -> bool:
return True
@property
# @overrides
def do_not_use_scientific_notation(self) -> bool:
return False
@property
# @overrides
def better(self) -> Better:
return Better.less
RunMetric = Union[NamedRunMetric, ScoreRunMetric]
# Can't use ScoreRunMetric or ScoreElement as those are frozen. (Must be frozen to be hashable.)
# Tuples are (weight, par) pairs.
SerializedRunMetric = Union[str, Tuple[str, Mapping[str, Tuple[float, Optional[float]]]]]
NAME_TO_RUN_METRIC: Dict[str, NamedRunMetric] = {}
def run_metric_must_exist_for_name(name: str) -> NamedRunMetric:
return get_or_did_you_mean(NAME_TO_RUN_METRIC, name, "run metric")
def serialize_run_metric(r: RunMetric) -> SerializedRunMetric:
if isinstance(r, NamedRunMetric):
return r.name
else:
return r.name, {k: (v.weight, v.par) for k, v in r.spec.items()}
def deserialize_run_metric(m: SerializedRunMetric) -> RunMetric:
if isinstance(m, str):
return run_metric_must_exist_for_name(m)
else:
return ScoreRunMetric(m[0], FrozenDict((k, ScoreElement(*v)) for k, v in m[1].items()))
class SingleGCMetric(NamedMetricBase):
"""Statistic that applies to a single invocation of the GC."""
def __post_init__(self) -> None:
super().__post_init__()
assert self.name not in NAME_TO_SINGLE_GC_METRIC, f"Duplicate SingleGcMetric {self.name}"
NAME_TO_SINGLE_GC_METRIC[self.name] = self
NAME_TO_SINGLE_GC_METRIC: Dict[str, SingleGCMetric] = {}
def single_gc_metric_must_exist_for_name(name: str) -> SingleGCMetric:
return get_or_did_you_mean(NAME_TO_SINGLE_GC_METRIC, name, "single-gc metric")
class SingleHeapMetric(NamedMetricBase):
"""Statistic for a single heap from a single GC"""
def __post_init__(self) -> None:
super().__post_init__()
assert (
self.name not in NAME_TO_SINGLE_HEAP_METRIC
), f"Duplicate SingleHeapMetric {self.name}"
NAME_TO_SINGLE_HEAP_METRIC[self.name] = self
NAME_TO_SINGLE_HEAP_METRIC: Dict[str, SingleHeapMetric] = {}
def single_heap_metric_must_exist_for_name(name: str) -> SingleHeapMetric:
return get_or_did_you_mean(NAME_TO_SINGLE_HEAP_METRIC, name, "single-heap metric")
# Using sequence to preserve ordering. FrozenSet has non-deterministic ordering.
RunMetrics = Sequence[RunMetric]
SingleGCMetrics = Sequence[SingleGCMetric]
SingleHeapMetrics = Sequence[SingleHeapMetric]
MetricValuesForSingleIteration = Mapping[RunMetric, FailableValue]
# Will be a single CouldNotGetValue if the test as a whole failed or has no GCs.
# Else will have CouldNotGetValue for each individual metric that failed.
MaybeMetricValuesForSingleIteration = Failable[MetricValuesForSingleIteration]
MetricStatisticsFromAllIterations = Mapping[RunMetric, FailableMetricValue]
MaybeMetricStatisticsFromAllIterations = Failable[MetricStatisticsFromAllIterations]
@with_slots
@dataclass(frozen=True)
class ProcessedGenData:
_g: AbstractGCPerHeapHistoryGenData
@property
def surv_rate(self) -> int:
return self._g.SurvRate
@property
def pinned_surv(self) -> int:
return self._g.PinnedSurv
@property
def non_pinned_surv(self) -> int:
return self._g.NonePinnedSurv
@property
def non_free_size_before(self) -> int:
return self.size_before - self.free_list_space_before - self.free_obj_space_before
@property
def non_free_size_after(self) -> int:
return self.size_after - self.free_list_space_after - self.free_obj_space_after
@property
def size_before(self) -> int:
return self._g.SizeBefore
@property
def size_after(self) -> int:
return self._g.SizeAfter
@property
def obj_space_before(self) -> int:
return self._g.ObjSpaceBefore
@property
def fragmentation(self) -> int:
return self._g.Fragmentation
@property
def obj_size_after(self) -> int:
return self._g.ObjSizeAfter
@property
def free_list_space_before(self) -> int:
return self._g.FreeListSpaceBefore
@property
def free_obj_space_before(self) -> int:
return self._g.FreeObjSpaceBefore
@property
def free_list_space_after(self) -> int:
return self._g.FreeListSpaceAfter
@property
def free_obj_space_after(self) -> int:
return self._g.FreeObjSpaceAfter
@property
def in_bytes(self) -> int:
return self._g.In
@property
def in_mb(self) -> float:
return bytes_to_mb(self.in_bytes)
@property
def out_bytes(self) -> int:
return self._g.Out
@property
def out_mb(self) -> float:
return bytes_to_mb(self.out_bytes)
@property
def budget(self) -> int:
return self._g.Budget
@with_slots
@dataclass(frozen=True)
class ProcessedHeap:
gc: "ProcessedGC"
index: int
per_heap_history: Result[str, AbstractGCPerHeapHistory]
# Missing for BGCs (don't know why...)
server_gc_history: Result[str, AbstractServerGcHistory]
## Index is a member of MarkRootType enum
_mark_times: Failable[Sequence[float]]
_mark_promoted: Failable[Sequence[float]]
join_info: Result[str, AbstractJoinInfoForHeap]
@property
def clr(self) -> Clr:
return self.gc.clr
def metric(self, metric: SingleHeapMetric) -> FailableValue:
# pylint:disable=import-outside-toplevel
from .single_heap_metrics import get_single_heap_stat
return get_single_heap_stat(self, metric)
def gen(self, gen: Gens) -> ProcessedGenData:
return unwrap(self.gen_result(gen))
def gen_result(self, gen: Gens) -> Result[str, ProcessedGenData]:
return map_ok(
self.per_heap_history, lambda phh: ProcessedGenData(phh.GenData[enum_value(gen)])
)
@property
def gens(self) -> Result[str, Sequence[ProcessedGenData]]:
return map_ok(
self.per_heap_history,
lambda phh: [ProcessedGenData(phh.GenData[enum_value(gen)]) for gen in Gens],
)
@property
def FreeListAllocated(self) -> Result[str, int]:
return map_ok(self.per_heap_history, lambda phh: phh.FreeListAllocated)
@property
def FreeListRejected(self) -> Result[str, int]:
return map_ok(self.per_heap_history, lambda phh: phh.FreeListRejected)
def mark_time(self, mark_type: MarkRootType) -> FailableFloat:
return map_ok(self._mark_times, lambda m: m[enum_value(mark_type)])
def mark_promoted(self, mark_type: MarkRootType) -> FailableFloat:
return map_ok(self._mark_promoted, lambda m: m[enum_value(mark_type)])
@property
def TotalMarkMSec(self) -> FailableFloat:
return map_ok(self._mark_times, sum)
@property
def TotalMarkPromoted(self) -> FailableFloat:
return map_ok(self._mark_promoted, sum)
# TODO: better return type
@property
def compact_mechanisms(self) -> Result[str, int]:
return map_ok(self.per_heap_history, lambda phh: phh.CompactMechanisms)
# TODO: better return type
@property
def expand_mechanisms(self) -> Result[str, int]:
return map_ok(self.per_heap_history, lambda phh: phh.ExpandMechanisms)
def _fixup_mb(fake_mb: float) -> float:
return bytes_to_mb(fake_mb * 1_000_000)
@with_slots
@dataclass(frozen=True)
class GenInfoGetter:
_gc: "ProcessedGC"
_gen: Gens
@property
def _trace_gc(self) -> AbstractTraceGC:
return self._gc.trace_gc
@property
def _gen_value(self) -> int:
return enum_value(self._gen)
@property
def UserAllocatedMB(self) -> float:
# Amount is already in MB
return self._trace_gc.UserAllocated[self._gen_value]
@property
def SizeBeforeMB(self) -> float:
return _fixup_mb(self._trace_gc.GenSizeBeforeMB[self._gen_value])
@property
def SizeAfterMB(self) -> float:
return _fixup_mb(self._trace_gc.GenSizeAfterMB(self._gen_value))
@property
def SurvivalPct(self) -> FailableFloat:
pct = self._trace_gc.SurvivalPercent(self._gen_value)
if isnan(pct):
return Err(f"{Gens(self._gen_value).name} not collected?")
else:
assert 0 <= pct <= 100
return Ok(pct)
@property
def FragmentationMB(self) -> float:
return _fixup_mb(self._trace_gc.GenFragmentationMB(self._gen_value))
@property
def FragmentationPct(self) -> float:
return self._trace_gc.GenFragmentationPercent(self._gen_value)
@property
def InMB(self) -> float:
return _fixup_mb(self._trace_gc.GenInMB(self._gen_value))
@property
def PromotedMB(self) -> float:
return _fixup_mb(self._trace_gc.GenPromotedMB(self._gen_value))
@property
def BudgetMB(self) -> float:
return _fixup_mb(self._trace_gc.GenBudgetMB(self._gen_value))
@property
def ObjSizeAfterMB(self) -> float:
return _fixup_mb(self._trace_gc.GenObjSizeAfterMB(self._gen_value))
@property
def FreeListSpaceBeforeMB(self) -> float:
return bytes_to_mb(sum(hp.gen(self._gen).free_list_space_before for hp in self._gc.heaps))
@property
def FreeListSpaceAfterMB(self) -> float:
return bytes_to_mb(sum(hp.gen(self._gen).free_list_space_after for hp in self._gc.heaps))
@property
def FreeObjSpaceBeforeMB(self) -> float:
return bytes_to_mb(sum(hp.gen(self._gen).free_obj_space_before for hp in self._gc.heaps))
@property
def FreeObjSpaceAfterMB(self) -> float:
return bytes_to_mb(sum(hp.gen(self._gen).free_obj_space_after for hp in self._gc.heaps))
@with_slots
# frozen=False so heaps can be set lazily
@dataclass(frozen=False)
class ProcessedGC:
proc: "ProcessedTrace"
index: int
trace_gc: AbstractTraceGC
join_info: Result[str, AbstractJoinInfoForGC]
heaps: Sequence[ProcessedHeap]
@property
def prev_gc(self) -> Optional["ProcessedGC"]:
if self.index == 0:
return None
else:
res = self.proc.gcs[self.index - 1]
assert res.index == self.index - 1
return res
@property
def clr(self) -> Clr:
return self.proc.clr
@property
def SuspendDurationMSec(self) -> float:
return self.trace_gc.SuspendDurationMSec
def metric(self, single_gc_metric: SingleGCMetric) -> FailableValue:
from .single_gc_metrics import get_single_gc_stat # pylint:disable=import-outside-toplevel
return get_single_gc_stat(self.proc, self.proc.gcs, self.index, single_gc_metric)
def metric_from_name(self, name: str) -> FailableValue:
# pylint:disable=import-outside-toplevel
from .parse_metrics import parse_single_gc_metric_arg
return self.metric(parse_single_gc_metric_arg(name))
def unwrap_metric_from_name(self, name: str) -> AnyValue:
return unwrap(self.metric_from_name(name))
@property
def Number(self) -> int:
return self.trace_gc.Number
@property
def Generation(self) -> Gens:
return Gens(self.trace_gc.Generation)
def collects_generation(self, gen: Gens) -> bool:
return {
Gens.Gen0: True,
Gens.Gen1: self.Generation != Gens.Gen0,
Gens.Gen2: self.Generation == Gens.Gen2,
Gens.GenLargeObj: self.Generation == Gens.Gen2,
}[gen]
@property
def AllocedSinceLastGCMB(self) -> float:
return self.trace_gc.AllocedSinceLastGCMB
_alloced_mb_accumulated: Optional[float] = None
@property
def AllocedMBAccumulated(self) -> float:
if self._alloced_mb_accumulated is None:
prev = 0 if self.prev_gc is None else self.prev_gc.AllocedMBAccumulated
self._alloced_mb_accumulated = prev + self.AllocedSinceLastGCMB
return self._alloced_mb_accumulated
else:
return self._alloced_mb_accumulated
@property
def AllocRateMBSec(self) -> float:
return self.trace_gc.AllocRateMBSec
@property
def BGCFinalPauseMSec(self) -> float:
return self.trace_gc.BGCFinalPauseMSec
@property
def Type(self) -> GCType:
return GCType(self.trace_gc.Type)
@property
def DurationMSec(self) -> float:
return self.trace_gc.DurationMSec
@property
def DurationSeconds(self) -> float:
return msec_to_seconds(self.DurationMSec)
@property
def DurationSinceLastRestartMSec(self) -> float:
return self.trace_gc.DurationSinceLastRestartMSec
@property
def GCCpuMSec(self) -> float:
return self.trace_gc.GCCpuMSec
def gen_info(self, gen: Gens) -> GenInfoGetter:
return GenInfoGetter(self, gen)
@property
def gen0(self) -> GenInfoGetter:
return self.gen_info(Gens.Gen0)
@property
def gen1(self) -> GenInfoGetter:
return self.gen_info(Gens.Gen1)
@property
def gen2(self) -> GenInfoGetter:
return self.gen_info(Gens.Gen2)
@property
def loh(self) -> GenInfoGetter:
return self.gen_info(Gens.GenLargeObj)
@property
def Gen0UserAllocatedMB(self) -> float:
return self.gen0.UserAllocatedMB
# User can't allocate directly to gen1 or gen2
@property
def LOHUserAllocatedMB(self) -> float:
return self.loh.UserAllocatedMB
@property
def Gen0SizeBeforeMB(self) -> float:
return self.gen0.SizeBeforeMB
@property
def Gen1SizeBeforeMB(self) -> float:
return self.gen1.SizeBeforeMB
@property
def Gen2SizeBeforeMB(self) -> float:
return self.gen2.SizeBeforeMB
@property
def LOHSizeBeforeMB(self) -> float:
return self.loh.SizeBeforeMB
@property
def Gen0BudgetMB(self) -> float:
return self.gen0.BudgetMB
@property
def Gen1BudgetMB(self) -> float:
return self.gen1.BudgetMB
@property
def Gen2BudgetMB(self) -> float:
return self.gen2.BudgetMB
@property
def LOHBudgetMB(self) -> float:
return self.loh.BudgetMB
@property
def Gen0SizeAfterMB(self) -> float:
return self.gen0.SizeAfterMB
@property
def Gen1SizeAfterMB(self) -> float:
return self.gen1.SizeAfterMB
@property
def Gen2SizeAfterMB(self) -> float:
return self.gen2.SizeAfterMB
@property
def LOHSizeAfterMB(self) -> float:
return self.loh.SizeAfterMB
@property
def Gen0FreeListSpaceBeforeMB(self) -> float:
return self.gen0.FreeListSpaceBeforeMB
@property
def Gen1FreeListSpaceBeforeMB(self) -> float:
return self.gen1.FreeListSpaceBeforeMB
@property
def Gen2FreeListSpaceBeforeMB(self) -> float:
return self.gen2.FreeListSpaceBeforeMB
@property
def LOHFreeListSpaceBeforeMB(self) -> float:
return self.loh.FreeListSpaceBeforeMB
@property
def Gen0FreeListSpaceAfterMB(self) -> float:
return self.gen0.FreeListSpaceAfterMB
@property
def Gen1FreeListSpaceAfterMB(self) -> float:
return self.gen1.FreeListSpaceAfterMB
@property
def Gen2FreeListSpaceAfterMB(self) -> float:
return self.gen2.FreeListSpaceAfterMB
@property
def LOHFreeListSpaceAfterMB(self) -> float:
return self.loh.FreeListSpaceAfterMB
@property
def Gen0FreeObjSpaceBeforeMB(self) -> float:
return self.gen0.FreeObjSpaceBeforeMB
@property
def Gen1FreeObjSpaceBeforeMB(self) -> float:
return self.gen1.FreeObjSpaceBeforeMB
@property
def Gen2FreeObjSpaceBeforeMB(self) -> float:
return self.gen2.FreeObjSpaceBeforeMB
@property
def LOHFreeObjSpaceBeforeMB(self) -> float:
return self.loh.FreeObjSpaceBeforeMB
@property
def Gen0FreeObjSpaceAfterMB(self) -> float:
return self.gen0.FreeObjSpaceAfterMB
@property
def Gen1FreeObjSpaceAfterMB(self) -> float:
return self.gen1.FreeObjSpaceAfterMB
@property
def Gen2FreeObjSpaceAfterMB(self) -> float:
return self.gen2.FreeObjSpaceAfterMB
@property
def LOHFreeObjSpaceAfterMB(self) -> float:
return self.loh.FreeObjSpaceAfterMB
@property
def HeapSizeBeforeMB(self) -> float:
return self.trace_gc.HeapSizeBeforeMB
@property
def HeapSizeAfterMB(self) -> float:
return self.trace_gc.HeapSizeAfterMB
@property
def HeapSizePeakMB(self) -> float:
return self.trace_gc.HeapSizePeakMB
@property
def PinnedObjectSizes(self) -> int:
return self.trace_gc.GetPinnedObjectSizes()
@property
def PinnedObjectPercentage(self) -> Optional[float]:
pct = self.trace_gc.GetPinnedObjectPercentage()
return None if pct == -1 else pct
@property
def TotalGCTime(self) -> Optional[float]:
t = self.trace_gc.GetTotalGCTime()
return None if t == 0 else t
@property
def PromotedMB(self) -> float:
return self.trace_gc.PromotedMB
@property
def RatioPeakAfter(self) -> float:
return self.trace_gc.RatioPeakAfter
@property
def suspend_duration_msec(self) -> float:
return self.trace_gc.SuspendDurationMSec
@property
def PauseStartRelativeMSec(self) -> float:
return self.trace_gc.PauseStartRelativeMSec
@property
def SuspendToGCStartMSec(self) -> float:
return self.trace_gc.StartRelativeMSec - self.trace_gc.PauseStartRelativeMSec
@property
def PauseDurationMSec(self) -> float:
return self.trace_gc.PauseDurationMSec
@property
def PromotedMBPerSec(self) -> float:
return self.PromotedMB / self.DurationSeconds
@property
def PromotedGBPerSec(self) -> float:
return mb_to_gb(self.PromotedMB) / self.DurationSeconds
@property
def PauseTimePercentageSinceLastGC(self) -> float:
return self.trace_gc.PauseTimePercentageSinceLastGC
@property
def ProcessCpuMSec(self) -> float:
return self.trace_gc.ProcessCpuMSec
@property
def StartRelativeMSec(self) -> float:
return self.trace_gc.StartRelativeMSec
@property
def EndRelativeMSec(self) -> float:
return self.StartRelativeMSec + self.DurationMSec
@property
def reason(self) -> gc_reason:
return gc_reason(self.trace_gc.Reason)
@property
def PercentTimeInGC(self) -> float:
return self.trace_gc.PercentTimeInGC
@property
def IsEphemeral(self) -> bool:
return self.Generation in (Gens.Gen0, Gens.Gen1)
@property
def IsGen0(self) -> bool:
return self.Generation == Gens.Gen0
@property
def IsGen1(self) -> bool:
return self.Generation == Gens.Gen1
@property
def IsGen2(self) -> bool:
return self.Generation == Gens.Gen2
@property
def IsBlockingGen2(self) -> bool:
return self.IsGen2 and self.IsNonConcurrent
@property
def PctReductionInHeapSize(self) -> float:
return get_percent(1.0 - (self.HeapSizeAfterMB / self.HeapSizeBeforeMB))
@property
def IsBackground(self) -> bool:
res = self.Type == GCType.BackgroundGC
if res:
assert self.Generation == Gens.Gen2
return res
@property
def IsForeground(self) -> bool:
return self.Type == GCType.ForegroundGC
@property
def IsNonConcurrent(self) -> bool:
# TODO: is this just not is_concurrent?
return self.Type == GCType.NonConcurrentGC
# TODO: is this just is_background?
@property
def IsConcurrent(self) -> FailableBool:
res = self.has_mechanisms(lambda m: m.concurrent)
if res.is_ok():
is_c = res.unwrap()
assert is_c == self.IsBackground
if is_c:
# Only gen2 gcs are concurrent
assert self.Generation == Gens.Gen2
return res
def has_mechanisms(self, cb: Callable[[GCGlobalMechanisms], bool]) -> FailableBool:
if self.trace_gc.GlobalHeapHistory is None:
return Err("null GlobalHeapHistory")
else:
return Ok(cb(GCGlobalMechanisms(self.trace_gc.GlobalHeapHistory.GlobalMechanisms)))
# WARN: Does *NOT* include LOH compaction!
@property
def UsesCompaction(self) -> FailableBool:
return self.has_mechanisms(lambda m: m.compaction)
@property
def UsesPromotion(self) -> FailableBool:
return self.has_mechanisms(lambda m: m.promotion)
@property
def UsesDemotion(self) -> FailableBool:
return self.has_mechanisms(lambda m: m.demotion)
@property
def UsesCardBundles(self) -> FailableBool:
return self.has_mechanisms(lambda m: m.cardbundles)
@property
def UsesElevation(self) -> FailableBool:
return self.has_mechanisms(lambda m: m.elevation)
@property
def UsesLOHCompaction(self) -> FailableBool:
# Not implemented on the GC side
return Err("<not implemented>")
# return has_mechanisms(gc, lambda m: m.loh_compaction)
@property
def MemoryPressure(self) -> FailableFloat:
ghh = self.trace_gc.GlobalHeapHistory
if ghh is None:
return Err("No GlobalHeapHistory")
elif ghh.HasMemoryPressure:
return Ok(ghh.MemoryPressure)
else:
return Err("GlobalHeapHistory#HasMemoryPressure was false")
@property
def HeapCount(self) -> int:
return len(self.heaps)
def total_bytes_before(self, gen: Gens) -> Result[str, int]:
return map_ok(
all_non_err([hp.gen_result(gen) for hp in self.heaps]),
lambda gen_datas: sum(gd.non_free_size_before for gd in gen_datas),
)
def total_bytes_after(self, gen: Gens) -> Result[str, int]:
return map_ok(
all_non_err([hp.gen_result(gen) for hp in self.heaps]),
lambda gen_datas: sum(gd.non_free_size_after for gd in gen_datas),
)
# TODO: revisit info for BGCs
# TODO: condemned reason
# TODO: suspend_duration_msec
# TODO: GlobalHeapHistory
PerHeapGetter = Callable[[ProcessedHeap], FailableValue]
@with_slots
@dataclass(frozen=True)
class ThreadToProcessToName:
# Maps process ID to name
thread_id_to_process_id: AbstractIThreadIDToProcessID
process_id_to_name: AbstractIProcessIDToProcessName
def get_process_id_for_thread_id(self, thread_id: int, time_msec: float) -> Optional[int]:
pid = self.thread_id_to_process_id.ThreadIDToProcessID(thread_id, _msec_to_qpc(time_msec))
return None if pid == -1 else pid
def get_process_name_for_process_id(self, process_id: int, time_msec: float) -> Optional[str]:
# TODO: Use a HistDict, then use time_msec
res = self.process_id_to_name.ProcessIDToProcessName(process_id, _msec_to_qpc(time_msec))
assert res != ""
if res is None:
# TODO: only on windows
return {0: "Idle", 4: "System"}.get(process_id)
else:
return res
def _msec_to_qpc(time_msec: float) -> int:
return int(time_msec * 10000)
# Used to analyze the *kinds* of gcs we see. Don't care how much.
@with_slots
@dataclass(frozen=False) # Must be unfrozen to serialize
class MechanismsAndReasons:
types: FrozenSet[GCType]
mechanisms: GCGlobalMechanisms
reasons: FrozenSet[gc_reason]
heap_expand: FrozenSet[gc_heap_expand_mechanism]
heap_compact: FrozenSet[gc_heap_compact_reason]
def is_empty(self) -> bool:
return (
is_empty(self.types)
and self.mechanisms == EMPTY_GC_GLOBAL_MECHANISMS
and is_empty(self.reasons)
and is_empty(self.heap_expand)
and is_empty(self.heap_compact)
)
def to_strs(self) -> Sequence[str]:
return (
*(str(t) for t in sorted(self.types)),
*self.mechanisms.names(),
*(str(r) for r in sorted(self.reasons)),
*(str(e) for e in sorted(self.heap_expand)),
*(str(c) for c in sorted(self.heap_compact)),
)
EMPTY_MECHANISMS_AND_REASONS = MechanismsAndReasons(
types=frozenset(),
mechanisms=EMPTY_GC_GLOBAL_MECHANISMS,
reasons=frozenset(),
heap_expand=frozenset(),
heap_compact=frozenset(),
)
def invert_mechanisms(m: MechanismsAndReasons) -> MechanismsAndReasons:
return MechanismsAndReasons(
types=frozenset(GCType) - m.types,
mechanisms=invert_gc_global_mechanisms(m.mechanisms),
reasons=frozenset(gc_reason) - m.reasons,
heap_expand=frozenset(gc_heap_expand_mechanism) - m.heap_expand,
heap_compact=frozenset(gc_heap_compact_reason) - m.heap_compact,
)
def union_all_mechanisms(i: Iterable[MechanismsAndReasons]) -> MechanismsAndReasons:
return reduce(union_mechanisms, i, EMPTY_MECHANISMS_AND_REASONS)
def union_mechanisms(a: MechanismsAndReasons, b: MechanismsAndReasons) -> MechanismsAndReasons:
return MechanismsAndReasons(
types=a.types | b.types,
mechanisms=union_gc_global_mechanisms(a.mechanisms, b.mechanisms),
reasons=a.reasons | b.reasons,
heap_expand=a.heap_expand | b.heap_expand,
heap_compact=a.heap_compact | b.heap_compact,
)
@with_slots
# frozen=False so we can set GCs lazily
@dataclass(frozen=False)
class ProcessedTrace:
clr: Clr
test_result: TestResult
test_status: Failable[TestRunStatus]
process_info: Optional[ProcessInfo]
process_names: ThreadToProcessToName
# '--process' that was used to get this
process_query: ProcessQuery
gcs_result: Result[str, Sequence[ProcessedGC]]
mechanisms_and_reasons: Optional[MechanismsAndReasons]
join_info: Result[str, AbstractJoinInfoForProcess]
def Aggregate(
self,
cb_gc: Callable[[ProcessedGC], FailableFloat],
cb_aggregate: Callable[[Iterable[float]], float],
) -> FailableFloat:
return flat_map_ok(
self.gcs_result,
lambda gcs: Err("<no gcs>")
if is_empty(gcs)
else map_ok(all_non_err(cb_gc(gc) for gc in gcs), cb_aggregate),
)
def Max(self, cb: Callable[[ProcessedGC], FailableFloat]) -> FailableFloat:
return self.Aggregate(cb, max)
def Sum(self, cb: Callable[[ProcessedGC], FailableFloat]) -> FailableFloat:
return self.Aggregate(cb, sum)
@property
def HeapSizePeakMB_Max(self) -> FailableFloat:
return self.Max(lambda gc: Ok(gc.HeapSizePeakMB))
def Get95P(self, cb: Callable[[ProcessedGC], float]) -> FailableFloat:
return get_95th_percentile([cb(gc) for gc in self.gcs])
@property
def gcperfsim_result(self) -> Failable[GCPerfSimResult]:
return flat_map_ok(
self.test_status,
lambda ts: option_to_result(
ts.gcperfsim_result, lambda: "This metric only available for GCPerfSim"
),
)
@property
def TotalSecondsTaken(self) -> FailableFloat:
return flat_map_ok(
self.test_status,
lambda ts: option_to_result(
ts.seconds_taken, lambda: "Test status file does not contain seconds_taken"
),
)
@property
def Gen0Size(self) -> FailableFloat:
return flat_map_ok(
self.test_status,
lambda ts: option_to_result(
None if ts.test is None else ts.test.config.config.complus_gcgen0size,
lambda: "Gen0size not specified in config",
),
)
@property
def ThreadCount(self) -> FailableFloat:
return flat_map_ok(
self.test_status,
lambda ts: option_to_result(
map_option(
None if ts.test is None else ts.test.benchmark.benchmark.get_argument("-tc"),
int,
),
lambda: "tc not specified in benchmark",
),
)
@property
def InternalSecondsTaken(self) -> FailableFloat:
return map_ok(self.gcperfsim_result, lambda g: g.seconds_taken)
@property
def FinalHeapSizeGB(self) -> FailableFloat:
return flat_map_ok(
self.gcperfsim_result,
lambda g: Err(
"final_heap_size_bytes was not in test result\n"
+ "this can happen on runtimes < 3.0"
)
if g.final_heap_size_bytes is None
else Ok(bytes_to_gb(g.final_heap_size_bytes)),
)
@property
def FinalFragmentationGB(self) -> FailableFloat:
return flat_map_ok(
self.gcperfsim_result,
lambda g: Err(
"final_fragmentation_bytes was not in test result\n"
+ "this can happen on runtimes < 3.0"
)
if g.final_fragmentation_bytes is None
else Ok(bytes_to_gb(g.final_fragmentation_bytes)),
)
@property
def FinalTotalMemoryGB(self) -> FailableFloat:
return map_ok(self.gcperfsim_result, lambda g: bytes_to_gb(g.final_total_memory_bytes))
@property
def NumCreatedWithFinalizers(self) -> FailableValue:
return map_ok(self.gcperfsim_result, lambda g: g.num_created_with_finalizers)
@property
def NumFinalized(self) -> FailableValue:
return map_ok(self.gcperfsim_result, lambda g: g.num_finalized)
@property
def Gen0CollectionCount(self) -> FailableValue:
return map_ok(self.gcperfsim_result, lambda g: g.collection_counts[0])
@property
def Gen1CollectionCount(self) -> FailableValue:
return map_ok(self.gcperfsim_result, lambda g: g.collection_counts[1])
@property
def Gen2CollectionCount(self) -> FailableValue:
return map_ok(self.gcperfsim_result, lambda g: g.collection_counts[2])
@property
def gcs(self) -> Sequence[ProcessedGC]:
return unwrap(self.gcs_result)
def metric(self, run_metric: RunMetric) -> FailableValue:
from .run_metrics import stat_for_proc # pylint:disable=import-outside-toplevel
return stat_for_proc(self, run_metric)
def metric_from_name(self, name: str) -> FailableValue:
from .parse_metrics import parse_run_metric_arg # pylint:disable=import-outside-toplevel
return self.metric(parse_run_metric_arg(name))
def unwrap_metric_from_name(self, name: str) -> AnyValue:
return unwrap(self.metric_from_name(name))
@property
def name(self) -> str:
if self.test_result.test_status_path is not None:
return self.test_result.test_status_path.name
else:
return remove_extension(non_null(self.test_result.trace_path)).name
@property
def process_id(self) -> Optional[int]:
return map_option(self.process_info, lambda p: p.id)
@property
def process_name(self) -> Optional[str]:
return map_option(self.process_info, lambda p: p.process_name)
@property
def UsesServerGC(self) -> Optional[bool]:
"""None if this is unknown"""
return map_option(self.process_info, lambda p: p.uses_server_gc)
@property
def event_names(self) -> Optional[Mapping[str, int]]:
return map_option(self.process_info, lambda p: p.event_names)
@property
def has_mechanisms_and_reasons(self) -> bool:
return self.mechanisms_and_reasons is not None
@property
def has_join_info(self) -> bool:
return self.join_info is not None
@property
def FirstToLastGCSeconds(self) -> FailableFloat:
if self.process_info is None:
return Err("Need a trace")
gcs = self.process_info.all_gcs_including_incomplete
if len(gcs) < 2:
return Err("Need at least 2 gcs")
else:
return Ok(msec_to_seconds(gcs[-1].StartRelativeMSec - gcs[0].StartRelativeMSec))
@property
def FirstEventToFirstGCSeconds(self) -> FailableFloat:
if self.process_info is None:
return Err("Need a trace")
ts = self.process_info.events_time_span
if ts is None:
return Err("Did not specify to collect events")
else:
return Ok(
msec_to_seconds(
self.process_info.all_gcs_including_incomplete[0].StartRelativeMSec
- ts.StartMSec
)
)
@property
def TotalNonGCSeconds(self) -> FailableFloat:
return map_ok_2(
self.TotalSecondsTaken,
self.gcs_result,
lambda t, gcs: t - msec_to_seconds(sum(gc.PauseDurationMSec for gc in gcs)),
)
@property
def TotalAllocatedMB(self) -> FailableFloat:
return self.Sum(lambda gc: Ok(gc.AllocedSinceLastGCMB))
@property
def HeapCount(self) -> int:
return unwrap(self.HeapCountResult)
@property
def HeapCountResult(self) -> FailableInt:
def f(gcs: Sequence[ProcessedGC]) -> int:
n_heaps = gcs[0].trace_gc.HeapCount
for i, gc in enumerate(gcs):
assert gc.trace_gc.HeapCount == n_heaps
if gc.trace_gc.GlobalHeapHistory is None:
print(f"WARN: GC{i} has null GlobalHeapHistory. It's a {gc.Type}")
phh_count = gc.HeapCount
if n_heaps != phh_count:
print(
f"WARN: GC{i} has {phh_count} PerHeapHistories but {n_heaps} heaps. "
+ f"It's a {gc.Type}"
)
return n_heaps
return map_ok(self.gcs_result, f)
@property
def NumberGCs(self) -> int:
return len(self.gcs)
def number_gcs_per_generation(self, gen: Gens) -> int:
return count(() for gc in self.gcs if gc.Generation == gen)
@property
def number_gcs_in_each_generation(self) -> Mapping[Gens, int]:
return map_to_mapping(Gens, self.number_gcs_per_generation)
# WARN: `prop` type is deliberately *not* the correct type --
# mypy checks a property access on a class `C.x` as being of the property's callable type
# when it is actually a `property` instacnce. See https://github.com/python/mypy/issues/6192
def fn_of_property(prop: Callable[[T], U]) -> Callable[[T], U]:
res = check_cast(property, prop).__get__
assert callable(res)
return res
def ok_of_property(prop: Callable[[T], U]) -> Callable[[T], Result[E, U]]:
return fn_to_ok(fn_of_property(prop))
class RegressionKind(Enum):
# Note: the order of these determines the order we'll print them in.
LARGE_REGRESSION = 1
LARGE_IMPROVEMENT = 2
REGRESSION = 3
IMPROVEMENT = 4
STALE = 5
def __lt__(self, other: Any) -> bool: # other: RegressionKind
sv: int = self.value
ov: int = other.value
return sv < ov
def title(self) -> str:
return {
RegressionKind.LARGE_REGRESSION: "Large Regressions (Regression of >20%)",
RegressionKind.REGRESSION: "Regressions (Regression of 5% - 20%)",
RegressionKind.LARGE_IMPROVEMENT: "Large Improvements (Improvement of >20%)",
RegressionKind.IMPROVEMENT: "Improvements (Improvement of 5-20%)",
RegressionKind.STALE: "Stale (Same, or percent difference within 5% margin)",
}[self]
def text_color(self) -> Optional[str]:
return {
RegressionKind.LARGE_REGRESSION: "red",
RegressionKind.REGRESSION: "red",
RegressionKind.LARGE_IMPROVEMENT: "green",
RegressionKind.IMPROVEMENT: "green",
RegressionKind.STALE: None,
}[self]
def get_regression_kind(factor_diff: float, better: Better) -> RegressionKind:
if better == Better.greater:
factor_diff *= -1
if factor_diff > 0.20:
return RegressionKind.LARGE_REGRESSION
elif factor_diff > 0.05:
return RegressionKind.REGRESSION
elif factor_diff < -0.20:
return RegressionKind.LARGE_IMPROVEMENT
elif factor_diff < -0.05:
return RegressionKind.IMPROVEMENT
else:
return RegressionKind.STALE
@with_slots
@dataclass(frozen=True)
class FloatsOrStrs:
is_floats: bool
_values: Union[Sequence[float], Sequence[str]]
@property
def as_floats(self) -> Sequence[float]:
assert self.is_floats
return cast(Sequence[float], self._values)
@property
def as_strs(self) -> Sequence[str]:
assert not self.is_floats
return cast(Sequence[str], self._values)
def __len__(self) -> int:
return len(self._values)
def floats(s: Sequence[float]) -> FloatsOrStrs:
return FloatsOrStrs(True, s)
def strs(s: Sequence[str]) -> FloatsOrStrs:
return FloatsOrStrs(False, s)
class GCKind(Enum):
NGC0 = 0
NGC1 = 1
BGC = 2
NGC2 = 3
def get_gc_kind(gc: ProcessedGC) -> GCKind:
return get_gc_kind_for_abstract_trace_gc(gc.trace_gc)
def get_gc_kind_for_abstract_trace_gc(gc: AbstractTraceGC) -> GCKind:
return {
Gens.Gen0: GCKind.NGC0,
Gens.Gen1: GCKind.NGC1,
Gens.Gen2: GCKind.BGC if GCType(gc.Type) == GCType.BackgroundGC else GCKind.NGC2,
}[Gens(gc.Generation)]
| 29.517654
| 100
| 0.67049
|
e9367ed77eb2a2219ddf1398fa36142efc89937d
| 1,470
|
py
|
Python
|
script.py
|
konay1122/Auto-Filter-Bot-V2
|
436940c83bd4c8943b089f6b1b02df7656dad70f
|
[
"MIT"
] | 1
|
2022-03-29T09:34:12.000Z
|
2022-03-29T09:34:12.000Z
|
script.py
|
konay1122/Auto-Filter-Bot-V2
|
436940c83bd4c8943b089f6b1b02df7656dad70f
|
[
"MIT"
] | null | null | null |
script.py
|
konay1122/Auto-Filter-Bot-V2
|
436940c83bd4c8943b089f6b1b02df7656dad70f
|
[
"MIT"
] | 1
|
2021-12-24T04:46:32.000Z
|
2021-12-24T04:46:32.000Z
|
class script(object):
START_MSG = """ <b>ဟဲဟဲ😊 {}
ဒါကို ဒီလိုခေါ်ပါတယ်😁😁 :D
ဟဲဟဲ😊
ဟဲဟဲ😊 !
တခြားဖေတာတွေမထည့်ရတော့ဘူး!
For more click <i>help</i></b>"""
HELP_MSG = """<b>How to use the bot??</b>
<i>
* ဖြုတ်ထားတယ် 😂
* ဖြုတ်ထားတယ် 😂 <b>all admin rights</b>!
</i>
<b>Bot Commands - Works in Group only</b>
(You need to be a Auth User in order to use these commands)
* <code>/add channelid</code> - Links channel to your group.
or
* <code>/add @channelusername</code> - Links channel to your group.
<i>NOTE : You can get your channel ID from @nas0055😆😆 </i>
* <code>/del channelid</code> - Delinks channel from group
or
* <code>/del @channelusername</code> - Delinks channel from group
<i>NOTE : You can get connected channel details by <code>/filterstats</code> </i>
* <code>/delall</code> - Removes all connected channels and filters from group!
<i>Note : Dont add command delete bots in group! Otherwise, delall command wont work</i>
* <code>/filterstats</code> - Check connected channels and number of filters.
No need add each filter again!
Bot will automatically search for your files and give links to that!
<b>© @nas</b>"""
ABOUT_MSG = """⭕️<b>My Name : ဟဲဟဲ😊</b>
⭕️<b>Creater :</b> @ဟဲဟဲ😊
⭕️<b>Language :</b> <code>unicode</code>
⭕️<b>Library :</b> <a href='https://docs.pyrogram.org/'>Pyrogram 1.0.7</a>
⭕️<b>Tutorial Video :</b> <a href='https://www.youtube.com/watch?v=XHr7vaHJvq4'>Video Link</a>
"""
| 21
| 95
| 0.626531
|
f250ba4e8b27a55e521c87c613e60a85dec828bd
| 3,160
|
py
|
Python
|
drivers.py
|
AvalZ/RevOK
|
4a697e448d0130d2757dad0467298bb05fdc5fb6
|
[
"Apache-2.0"
] | 24
|
2020-06-18T09:55:18.000Z
|
2021-12-19T17:54:16.000Z
|
drivers.py
|
watchmen-coder/RevOK
|
855b7b1f871c72b6a4965ab700ed6ee945fe9fa5
|
[
"Apache-2.0"
] | 2
|
2021-06-08T22:33:09.000Z
|
2022-03-12T00:49:43.000Z
|
drivers.py
|
watchmen-coder/RevOK
|
855b7b1f871c72b6a4965ab700ed6ee945fe9fa5
|
[
"Apache-2.0"
] | 4
|
2021-03-12T03:03:40.000Z
|
2021-10-22T01:27:33.000Z
|
import asyncio
import time
from abc import abstractmethod, ABC
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from loguru import logger
class Driver(ABC):
@abstractmethod
def start_scan(self):
pass
@abstractmethod
def check_for_flag(self, flag):
pass
@abstractmethod
def is_scan_done(self):
pass
class MetasploitDriver(Driver):
def __init__(self, baseurl):
options = webdriver.FirefoxOptions()
# options.add_argument('-headless')
self.driver = webdriver.Firefox(options=options)
self.baseurl = baseurl
def __del__(self):
self.driver.close()
def start_scan(self):
self.driver.get("{}/login".format(self.baseurl))
self.driver.set_window_size(640, 1080)
self.driver.find_element(By.ID, "user_session_username").clear()
self.driver.find_element(By.ID, "user_session_username").send_keys("avalz")
self.driver.find_element(By.ID, "user_session_password").clear()
self.driver.find_element(By.ID, "user_session_password").send_keys("~Nv+=:K26S7CeUzE")
self.driver.find_element(By.NAME, "commit").click()
self.driver.find_element(By.LINK_TEXT, "Fuzzer").click()
self.driver.find_element(By.LINK_TEXT, "Scan...").click()
self.driver.find_element(By.ID, "scan_task_address_string").click()
self.driver.find_element(By.ID, "scan_task_address_string").click()
element = self.driver.find_element(By.ID, "scan_task_address_string")
actions = ActionChains(self.driver)
actions.double_click(element).perform()
self.driver.find_element(By.ID, "scan_task_address_string").click()
self.driver.find_element(By.ID, "scan_task_address_string").clear()
self.driver.find_element(By.ID, "scan_task_address_string").send_keys("localhost")
self.driver.find_element(By.ID, "popup_submit").click()
def check_for_flag(self, flag):
self.driver.get("https://localhost:3790/")
self.driver.set_window_size(640, 1080)
self.driver.find_element(By.LINK_TEXT, "Fuzzer").click()
# FIXME: change number of services
self.driver.find_element(By.LINK_TEXT, "4 services").click()
print(self.driver.page_source)
return flag in self.driver.page_source
class MockDriver(Driver):
def __init__(self, running_file='running'):
self.running_file = running_file
def start_scan(self):
with open(self.running_file, 'w') as f:
f.write(str(1))
def check_for_flag(self, flag):
pass
def is_scan_done(self):
with open(self.running_file, 'r') as f:
running = bool(int(f.read().strip('\n')))
return not running
def wait_for_scan_done(self):
while not md.is_scan_done():
time.sleep(1)
if __name__ == "__main__":
md = MockDriver()
logger.info('Starting scan')
md.start_scan()
md.wait_for_scan_done()
logger.success('Scan done')
# print(md.check_for_flag("4f6a142e-56ff-f229-4d1d-aa055c000de7"))
| 31.287129
| 94
| 0.673101
|
43e7a78f6e7f2f3bb659df06b6a0063e3ff99a16
| 6,011
|
py
|
Python
|
src/NUPathCourseScraper.py
|
chauhankaranraj/EquivalentCourseFinder
|
b0eef6bc3e8533f79ea6526d4e17ef5b977b9b98
|
[
"MIT"
] | null | null | null |
src/NUPathCourseScraper.py
|
chauhankaranraj/EquivalentCourseFinder
|
b0eef6bc3e8533f79ea6526d4e17ef5b977b9b98
|
[
"MIT"
] | null | null | null |
src/NUPathCourseScraper.py
|
chauhankaranraj/EquivalentCourseFinder
|
b0eef6bc3e8533f79ea6526d4e17ef5b977b9b98
|
[
"MIT"
] | null | null | null |
import argparse
import pickle as pkl
import os.path as osp
import datetime as dt
from tqdm import tqdm
import pandas as pd
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support import expected_conditions as EC
def parse_scrape_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--chromedriver-path',
default='/home/kachau/Downloads/chromedriver',
type=str,
required=False,
help="path to chromedriver for selenium",
)
parser.add_argument(
'--start-url',
default='https://ugadmissions.northeastern.edu/transfercredit/TransferCreditEvaluatedStudent2.asp',
type=str,
required=False,
help="URL which has the list of colleges, i.e. the first page from where the scraping should begin",
)
parser.add_argument(
'--nupath-names',
default=['NUpath Difference/Diversity', 'NUpath Interpreting Culture'],
type=str,
nargs='+',
required=False,
help="NUPath name(s) which is to be satisfied. CASE SENSITIVE",
)
parser.add_argument(
'--save-fname',
default=f'../reports/results_{dt.datetime.now().strftime("%b-%d-%Y-%H-%M-%S")}.pkl',
type=str,
required=False,
help='Path + filename where results pickle should be stored',
)
parser.add_argument(
'--college-keyword',
type=str,
required=False,
help="Will search only colleges which contain the keyword specified in this arg. CASE SENSITIVE",
)
return parser.parse_args()
# get args
args = parse_scrape_args()
# xpaths for various elements on the page to be scraped
# NOTE: these can be easily obtained using google chrome plugins
PROCEED_BTN_XP = '//*[(@id = "button1")]'
INSTITUTES_DRPDWN_XP = '//*[(@id = "FICE")]'
DEPARTMENT_DRPDWN_XP = '//*[(@id = "tseg")]'
NUPATH_CSS_SEL = 'tr+ tr td:nth-child(5)'
NUCORE_CSS_SEL = 'tr+ tr td:nth-child(4)'
EFF_DATE_CSS_SEL = 'tr+ tr td:nth-child(3)'
HOME_COURSE_CSS_SEL = 'table+ table tr+ tr td:nth-child(2)'
AWAY_COURSE_CSS_SEL = 'table+ table tr+ tr td:nth-child(1)'
# init chrome driver
driver = webdriver.Chrome(args.chromedriver_path)
driver.get(args.start_url)
# click on proceed to rules search
driver.find_element_by_xpath(PROCEED_BTN_XP).click()
# get a list of available colleges
colleges_dropdown = driver.find_element_by_xpath(INSTITUTES_DRPDWN_XP)
college_dropdown_opts = colleges_dropdown.find_elements_by_tag_name("option")
college_names = []
for c in tqdm(college_dropdown_opts):
college_names.append(c.text)
# with open('../reports/college_names.pkl', 'wb') as f:
# pkl.dump(college_names, f)
# with open('../reports/college_names.pkl', 'rb') as f:
# college_names = pkl.load(f)
# split save fname into head and tail
head_fname, tail_fname = osp.split(args.save_fname)
# list of dictionaries, each dictionary contains details of a match
results = []
# iterate through all colleges and see if NUPath contains *all* of the matching paths
# NOTE: skip the first one since its an empty string
for ci in range(1, len(college_names)):
# if college_keyword argument is specified then search only those colleges which
# match the input keyword
if args.college_keyword is not None and args.college_keyword not in college_names[ci]:
continue
colleges_dropdown = Select(driver.find_element_by_xpath(INSTITUTES_DRPDWN_XP))
# click on college
colleges_dropdown.select_by_index(ci)
# get all departmets in this college
departments = [
i.text for i in driver.find_element_by_xpath(DEPARTMENT_DRPDWN_XP).find_elements_by_tag_name("option")
]
# iterate through all departments
# NOTE: skip the first one since its an empty string
for di in range(1, len(departments)):
# get the list of courses in this department
departments_dropdown = Select(driver.find_element_by_xpath(DEPARTMENT_DRPDWN_XP))
departments_dropdown.select_by_index(di)
# see if any course matches the conditons
courses_nupaths = driver.find_elements_by_css_selector(NUPATH_CSS_SEL)
if len(courses_nupaths) > 0:
# get other course metadata
courses_away_names = driver.find_elements_by_css_selector(AWAY_COURSE_CSS_SEL)
courses_home_names = driver.find_elements_by_css_selector(HOME_COURSE_CSS_SEL)
courses_effective_dates = driver.find_elements_by_css_selector(EFF_DATE_CSS_SEL)
courses_nu_cores = driver.find_elements_by_css_selector(NUCORE_CSS_SEL)
for course_i, course_nupath in enumerate(courses_nupaths):
if all(to_match in course_nupath.text for to_match in args.nupath_names):
# add coures name, neu name, effective dates, nucore
results.append(
{
'college': college_names[ci],
'department': departments[di],
'transfer_course': courses_away_names[course_i].text,
'neu_course': courses_home_names[course_i].text,
'effective_dates': courses_effective_dates[course_i].text,
'nu_core': courses_nu_cores[course_i].text,
'nupath': course_nupath.text,
}
)
# save data every iteration, in case it gets stopped in the middle
with open(args.save_fname, "wb") as f:
pkl.dump(results, f)
# save as excel, so users can see results directly
pd.DataFrame(results).to_excel(
osp.join(head_fname, tail_fname.rsplit('.', 1)[0] + '.xlsx')
)
# exit
driver.close()
| 38.286624
| 110
| 0.679754
|
3121f6d54690f58bccaeaeef9d09947ba23f2cf9
| 1,716
|
py
|
Python
|
Day 12/day12.py
|
neethan/adventofcode2021
|
862d4062cdfc22a6e2f03378db6755e20e5f8f81
|
[
"WTFPL"
] | 1
|
2021-12-04T20:25:48.000Z
|
2021-12-04T20:25:48.000Z
|
Day 12/day12.py
|
neethan/adventofcode2021
|
862d4062cdfc22a6e2f03378db6755e20e5f8f81
|
[
"WTFPL"
] | null | null | null |
Day 12/day12.py
|
neethan/adventofcode2021
|
862d4062cdfc22a6e2f03378db6755e20e5f8f81
|
[
"WTFPL"
] | null | null | null |
import re
import numpy as np
from pprint import pprint as pp
from collections import defaultdict
def getInput():
with open("input.txt") as fp:
lines = [x.strip() for x in fp.readlines()]
return lines
def findRoutes(idx, caves, currentRoute, routes):
#print(" " * len(currentRoute), "Now on:", currentRoute, " - ", idx, " - ", caves[idx])
if idx == "end":
#print(" " * len(currentRoute), "Added")
routes.append(currentRoute)
return
# Check all these children for their paths
for cave in caves[idx]:
#print(" " * len(currentRoute), "-> Attempting", cave)
# Try this path on the current path
newRoute = currentRoute.copy()
newRoute.append(cave)
# If we visit a small twice, don't do this path
checkDupes = [x for x in newRoute if x == x.lower() and len(x) < 3]
# Part 2 on this line
if len(checkDupes) != len(set(checkDupes)) and abs(len(checkDupes) - len(set(checkDupes))) == 2:
#print(" " * len(newRoute), "-> Dupe! Continue... (", cave, ")")
continue
#print(" " * len(newRoute), "-> Looking for routes under", cave)
route = findRoutes(cave, caves.copy(), newRoute, routes)
cavesConnections = np.array(getInput())
caves = {}
for connection in cavesConnections:
start, end = connection.split("-")
if not start in caves:
caves[start] = []
if not end in caves:
caves[end] = []
if start != "end" or end != "start":
caves[start].append(end)
if (start != "start" and end != "end"):
caves[end].append(start)
if "start" in caves[start]: caves[start].remove("start")
if "start" in caves[end]: caves[end].remove("start")
routes = []
findRoutes("start", caves.copy(), [], routes)
print(len(routes))
| 30.105263
| 99
| 0.629953
|
7274ab275bb4909264b8377115af76cf3b650b54
| 285
|
py
|
Python
|
imagefit/urls.py
|
serdardurbaris/django-imagefit
|
5d2a8f7e99eb165dbabc455a1e69654f74887542
|
[
"BSD-3-Clause"
] | null | null | null |
imagefit/urls.py
|
serdardurbaris/django-imagefit
|
5d2a8f7e99eb165dbabc455a1e69654f74887542
|
[
"BSD-3-Clause"
] | null | null | null |
imagefit/urls.py
|
serdardurbaris/django-imagefit
|
5d2a8f7e99eb165dbabc455a1e69654f74887542
|
[
"BSD-3-Clause"
] | null | null | null |
from django.conf.urls import url, re_path
from . import views
"""
Master URL using
path('image', include('imagefit.urls')),
"""
urlpatterns = [
url(
r'^(?P<url>.*)/(?P<format>[,\w-]+)/(?P<path_name>[\w_-]*)/?$',
views.resize,
name="imagefit_resize"),
]
| 19
| 70
| 0.561404
|
62a097df40eb174792bc7dcc97ae5d2d3f27ef6a
| 2,140
|
py
|
Python
|
setup.py
|
bhumikapahariapuresoftware/visions
|
8838d89b4f02e401112378b4662a779227ead9f8
|
[
"BSD-4-Clause"
] | null | null | null |
setup.py
|
bhumikapahariapuresoftware/visions
|
8838d89b4f02e401112378b4662a779227ead9f8
|
[
"BSD-4-Clause"
] | null | null | null |
setup.py
|
bhumikapahariapuresoftware/visions
|
8838d89b4f02e401112378b4662a779227ead9f8
|
[
"BSD-4-Clause"
] | null | null | null |
from pathlib import Path
from setuptools import find_packages, setup
# Read the contents of README file
source_root = Path(".")
with (source_root / "README.md").open(encoding="utf-8") as f:
long_description = f.read()
# Read the requirements
with (source_root / "requirements.txt").open(encoding="utf8") as f:
requirements = f.readlines()
with (source_root / "requirements_dev.txt").open(encoding="utf8") as f:
dev_requirements = f.readlines()
with (source_root / "requirements_test.txt").open(encoding="utf8") as f:
test_requirements = f.readlines()
type_geometry_requires = ["shapely"]
type_image_path_requires = ["imagehash", "Pillow"]
extras_requires = {
"type_geometry": type_geometry_requires,
"type_image_path": type_image_path_requires,
"plotting": ["pydot", "pygraphviz", "matplotlib"],
"dev": dev_requirements,
"test": test_requirements,
}
extras_requires["all"] = requirements + [
dependency
for name, dependencies in extras_requires.items()
if name.startswith("type_") or name == "plotting"
for dependency in dependencies
]
__version__ = None
with (source_root / "src/visions/version.py").open(encoding="utf8") as f:
exec(f.read())
setup(
name="visions",
version=__version__,
url="https://github.com/dylan-profiler/visions",
description="Visions",
license="BSD License",
author="Dylan Profiler",
author_email="visions@ictopzee.nl",
package_data={"vision": ["py.typed"]},
packages=find_packages("src"),
package_dir={"": "src"},
install_requires=requirements,
include_package_data=True,
extras_require=extras_requires,
tests_require=test_requirements,
python_requires=">=3.6",
long_description=long_description,
long_description_content_type="text/x-rst",
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
)
| 30.571429
| 73
| 0.684112
|
832486bd35787328483693ffe42ccbb083fc169b
| 32,197
|
py
|
Python
|
transformers/data/processors/factcheck_old.py
|
HLTCHKUST/Perplexity-FactChecking
|
aec341410d7c66273ecdb52daa7d39abd786edc3
|
[
"MIT"
] | 7
|
2021-04-22T06:42:36.000Z
|
2021-09-28T11:43:06.000Z
|
transformers/data/processors/factcheck_old.py
|
HLTCHKUST/Perplexity-FactChecking
|
aec341410d7c66273ecdb52daa7d39abd786edc3
|
[
"MIT"
] | null | null | null |
transformers/data/processors/factcheck_old.py
|
HLTCHKUST/Perplexity-FactChecking
|
aec341410d7c66273ecdb52daa7d39abd786edc3
|
[
"MIT"
] | 1
|
2021-08-03T04:18:43.000Z
|
2021-08-03T04:18:43.000Z
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" GLUE processors and helpers """
import numpy as np
import logging
import os
import json
import jsonlines
from ...file_utils import is_tf_available
from .utils import DataProcessor, InputExample, InputFeatures
import random
if is_tf_available():
import tensorflow as tf
logger = logging.getLogger(__name__)
logger.disabled = True
CLASSIFICATION_TO_REGRESSION = {
"true" : '0.0',
"mostly-true": '0.2',
"half-true": '0.4',
"barely-true": '0.6',
"false": '0.8',
"pants-fire": '1.0'
}
def factcheck_convert_examples_to_features(
examples,
tokenizer,
max_length=512,
task=None,
label_list=None,
output_mode=None,
pad_on_left=False,
pad_token=0,
pad_token_segment_id=0,
mask_padding_with_zero=True,
):
"""
Loads a data file into a list of ``InputFeatures``
Args:
examples: List of ``InputExamples`` or ``tf.data.Dataset`` containing the examples.
tokenizer: Instance of a tokenizer that will tokenize the examples
max_length: Maximum example length
task: GLUE task
label_list: List of labels. Can be obtained from the processor using the ``processor.get_labels()`` method
output_mode: String indicating the output mode. Either ``regression`` or ``classification``
pad_on_left: If set to ``True``, the examples will be padded on the left rather than on the right (default)
pad_token: Padding token
pad_token_segment_id: The segment ID for the padding token (It is usually 0, but can vary such as for XLNet where it is 4)
mask_padding_with_zero: If set to ``True``, the attention mask will be filled by ``1`` for actual values
and by ``0`` for padded values. If set to ``False``, inverts it (``1`` for padded values, ``0`` for
actual values)
Returns:
If the ``examples`` input is a ``tf.data.Dataset``, will return a ``tf.data.Dataset``
containing the task-specific features. If the input is a list of ``InputExamples``, will return
a list of task-specific ``InputFeatures`` which can be fed to the model.
"""
is_tf_dataset = False
if is_tf_available() and isinstance(examples, tf.data.Dataset):
is_tf_dataset = True
if task is not None:
processor = glue_processors[task]()
if label_list is None:
label_list = processor.get_labels()
logger.info("Using label list %s for task %s" % (label_list, task))
if output_mode is None:
output_mode = glue_output_modes[task]
logger.info("Using output mode %s for task %s" % (output_mode, task))
label_map = {label: i for i, label in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
len_examples = 0
if is_tf_dataset:
example = processor.get_example_from_tensor_dict(example)
example = processor.tfds_map(example)
len_examples = tf.data.experimental.cardinality(examples)
else:
len_examples = len(examples)
if ex_index % 10000 == 0:
logger.info("Writing example %d/%d" % (ex_index, len_examples))
inputs = tokenizer.encode_plus(
example.text_a, example.text_b, add_special_tokens=True, max_length=max_length, return_token_type_ids=True,
)
input_ids, token_type_ids = inputs["input_ids"], inputs["token_type_ids"]
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
attention_mask = [1 if mask_padding_with_zero else 0] * len(input_ids)
# Zero-pad up to the sequence length.
padding_length = max_length - len(input_ids)
if pad_on_left:
input_ids = ([pad_token] * padding_length) + input_ids
attention_mask = ([0 if mask_padding_with_zero else 1] * padding_length) + attention_mask
token_type_ids = ([pad_token_segment_id] * padding_length) + token_type_ids
else:
input_ids = input_ids + ([pad_token] * padding_length)
attention_mask = attention_mask + ([0 if mask_padding_with_zero else 1] * padding_length)
token_type_ids = token_type_ids + ([pad_token_segment_id] * padding_length)
assert len(input_ids) == max_length, "Error with input length {} vs {}".format(len(input_ids), max_length)
assert len(attention_mask) == max_length, "Error with input length {} vs {}".format(
len(attention_mask), max_length
)
assert len(token_type_ids) == max_length, "Error with input length {} vs {}".format(
len(token_type_ids), max_length
)
if output_mode == "classification":
label = label_map[example.label]
elif output_mode == "regression":
label = float(example.label)
else:
raise KeyError(output_mode)
if ex_index < 5:
logger.info("*** Example ***")
logger.info("guid: %s" % (example.guid))
logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids]))
logger.info("attention_mask: %s" % " ".join([str(x) for x in attention_mask]))
logger.info("token_type_ids: %s" % " ".join([str(x) for x in token_type_ids]))
logger.info("label: %s (id = %d)" % (example.label, label))
features.append(
InputFeatures(
input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, label=label
)
)
if is_tf_available() and is_tf_dataset:
def gen():
for ex in features:
yield (
{
"input_ids": ex.input_ids,
"attention_mask": ex.attention_mask,
"token_type_ids": ex.token_type_ids,
},
ex.label,
)
return tf.data.Dataset.from_generator(
gen,
({"input_ids": tf.int32, "attention_mask": tf.int32, "token_type_ids": tf.int32}, tf.int64),
(
{
"input_ids": tf.TensorShape([None]),
"attention_mask": tf.TensorShape([None]),
"token_type_ids": tf.TensorShape([None]),
},
tf.TensorShape([]),
),
)
return features
class RteProcessor(DataProcessor):
"""Processor for the RTE data set (GLUE version)."""
def __init__(self, args):
self.rte2misinfo_map = {
'entailment': 'true',
'not_entailment': 'false'
}
self.output_mode = args.output_mode
if self.output_mode == 'regression':
self.labels = [None]
else:
self.labels = ["true", "false"]
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence1"].numpy().decode("utf-8"),
tensor_dict["sentence2"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir, data_source=""):
"""See base class."""
path = "{}/RTE/train{}.tsv".format(data_dir, data_source)
print("loading from {}".format(path))
return self._create_examples(self._read_tsv(path), "train")
# return self._create_examples(self._read_tsv(os.path.join(data_dir, "train{}.tsv".format(self.data_source))), "train")
def get_dev_examples(self, data_dir, data_source=""):
"""See base class."""
path = "{}/RTE/dev{}.tsv".format(data_dir, data_source)
print("loading from dev {}".format(path))
return self._create_examples(self._read_tsv(path), "dev")
# return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev{}.tsv".format(self.data_source))), "dev")
def get_test_examples(self, data_dir, data_source=""):
"""See base class."""
path = "{}/RTE/test{}.tsv".format(data_dir, data_source)
print("loading from {}".format(path))
return self._create_examples(self._read_tsv(path), "test")
def get_labels(self):
"""See base class."""
# return ["entailment", "not_entailment"]
return self.labels
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[1]
text_b = line[2]
label = self.rte2misinfo_map[line[-1]]
if self.output_mode == 'regression':
label = CLASSIFICATION_TO_REGRESSION[label]
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class SciTailProcessor(DataProcessor):
"""Processor for the RTE data set (GLUE version)."""
def get_example_from_tensor_dict(self, tensor_dict):
"""See base class."""
return InputExample(
tensor_dict["idx"].numpy(),
tensor_dict["sentence1"].numpy().decode("utf-8"),
tensor_dict["sentence2"].numpy().decode("utf-8"),
str(tensor_dict["label"].numpy()),
)
def get_train_examples(self, data_dir, data_source=""):
"""See base class."""
path = "{}/SciTail/train{}.tsv".format(data_dir, data_source)
print("loading from {}".format(path))
return self._create_examples(self._read_tsv(path), "train")
# return self._create_examples(self._read_tsv(os.path.join(data_dir, "train{}.tsv".format(self.data_source))), "train")
def get_dev_examples(self, data_dir, data_source=""):
"""See base class."""
path = "{}/SciTail/dev{}.tsv".format(data_dir, data_source)
print("loading from {}".format(path))
return self._create_examples(self._read_tsv(path), "dev")
# return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev{}.tsv".format(self.data_source))), "dev")
def get_test_examples(self, data_dir, data_source=""):
"""See base class."""
path = "{}/SciTail/test{}.tsv".format(data_dir, data_source)
print("loading from {}".format(path))
return self._create_examples(self._read_tsv(path), "test")
def get_labels(self):
"""See base class."""
return ["entails", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "" #"%s-%s" % (set_type, line[0])
text_a = line[1]
text_b = line[2]
label = line[-1]
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class PolifactProcessor(DataProcessor):
def __init__(self, args):
self.is_binary = args.is_binary # binary or multi
self.has_evidence = args.has_evidence #False
self.subtask = args.politifact_subtask #'liar' # liar, covid
self.output_mode = args.output_mode
self.filter_middle_classes = args.filter_middle_classes
# self.use_credit = args.use_credit
# self.use_metainfo = args.use_metainfo
# self.use_creditscore = args.use_creditscore
# self.use_ppl_vector = args.use_ppl_vector
# self.use_ppl = args.use_ppl
self.few_shot = args.few_shot
# self.claim_only = args.claim_only
self.myth = args.myth
self.fever = args.fever
self.liar = args.liar
# self.cross_validation = args.cross_validation
self.seed_ = args.seed
self.covidpoli = args.covidpoli
self.multi2binary = {
"true" : "true",
"mostly-true": "true",
"half-true": "true",
"barely-true": "false",
"false": "false",
"pants-fire": "false",
"NOT ENOUGH INFO": "false",
"REFUTES": "_",
"SUPPORTS": "true"
}
if self.output_mode == 'regression':
self.labels = [None]
elif self.is_binary:
# classification binary
self.labels = ["true", "false"]
else:
# classification full
if self.fever:
self.labels = ["REFUTES", "SUPPORTS", "NOT ENOUGH INFO"]
else:
self.labels = ["true", "mostly-true", "half-true", "barely-true", "false", "pants-fire"]
def get_train_examples(self, data_dir, data_source=""):
if self.has_evidence:
# if self.fever:
# path_ = '/home/yejin/fever/data/fever_train_for_bert.jsonl'
# # path_ = "{}/naacl/fever_train_for_bert_w_ppl.jsonl".format(data_dir)
# elif self.myth:
# path_ = '/home/yejin/covid19_factcheck/data/covid_myth_test_v3.jsonl'
# elif self.liar:
# path_ = "{}/politifact/{}/liar-plus_train_v3.jsonl".format(data_dir, self.subtask)
# # path_ ='/home/nayeon/covid19_factcheck/data/liar-plus_train_v3_justification_top1_naacl.jsonl'
# elif self.covidpoli:
# path_='/home/yejin/covid19_factcheck/data/factcheck_data/politifact/liar/test_covid19_justification_naacl.jsonl'
# else:
# # using FEVER-based evidences
# if any([self.use_credit, self.use_metainfo, self.use_creditscore]):
# path_ = "{}/politifact/{}/train_evidence_meta_fever_v4a.jsonl".format(data_dir, self.subtask)
# else:
# print("reading data")
# path_ = "{}/politifact/{}/train_evidence_meta_fever_v4a.jsonl".format(data_dir, self.subtask)
# # ============ PATH DONE ============
# print("loading from {}".format(path_))
# with jsonlines.open(path_) as reader:
# obj_list = [obj for obj in reader]
# if self.filter_middle_classes:
# obj_list = [obj for obj in obj_list if obj['label'] not in ['half-true','barely-true']]
if self.few_shot:
if self.fever:
path_ = '/home/yejin/fever/data/fever_train_for_bert_s.jsonl'
eval_file ='/home/nayeon/covid19_factcheck/ppl_results/naacl.gpt2.uni.fever_train_small.npy'
elif self.liar:
path_ = "/home/nayeon/covid19_factcheck/data/liar-plus_train_v3_justification_top1_naacl.jsonl".format(data_dir, self.subtask)
eval_file ='/home/nayeon/covid19_factcheck/ppl_results/naacl.gpt2.uni.liar_train_justification_top1.npy'
elif self.covidpoli:
path_ = '/home/yejin/covid19_factcheck/data/factcheck_data/politifact/liar/test_covid19_justification_naacl.jsonl'
eval_file ='/home/nayeon/covid19_factcheck/ppl_results/naacl.gpt2.uni.naacl_covid_politifact_justification.npy'
elif self.myth:
path_ = '/home/yejin/covid19_factcheck/data/covid_myth_test_v3.jsonl'
eval_file ='/home/nayeon/covid19_factcheck/ppl_results/naacl.gpt2.uni.naacl_covid_myth_v3.npy'
all_objs = self.load_full_liar_with_ppl(path_, eval_file)
combined_all_objs = all_objs['true'] + all_objs['false']
random.shuffle(combined_all_objs)
random.seed(self.seed_)
obj_list = combined_all_objs[:self.few_shot]
print("Using few shot!!!! LEN: ", len(obj_list))
return self._create_examples_with_evidences(obj_list, "train")
else:
if self.fever:
path_ = "{}/naacl/fever_train_for_bert_w_ppl.jsonl".format(data_dir)
with jsonlines.open(path_) as reader:
obj_list = [obj for obj in reader if obj['evidences'] != [] and obj['evidences'][0][0] != 0]
if self.few_shot:
new_obj_list = obj_list[:self.few_shot]
obj_list = new_obj_list
print("Using few shot!!!! LEN: ", len(obj_list))
return self._create_fever_examples(obj_list, "train")
else:
path_ = "{}/politifact/{}/train{}.tsv".format(data_dir, self.subtask, data_source)
print("loading from {}".format(path_))
return self._create_examples(self._read_tsv(path_), "train")
# return self._create_examples(self._read_tsv(os.path.join(data_dir, "train{}.tsv".format(self.data_source))), "train")
def get_dev_examples(self, data_dir, data_source=""):
if self.has_evidence:
# if self.fever:
# path_ = "{}/naacl/fever_test_for_bert_w_ppl.jsonl".format(data_dir)
# elif self.liar:
# path_ ='/home/nayeon/covid19_factcheck/data/liar-plus_test_v3_justification_top1_naacl.jsonl'
# with jsonlines.open(path_) as reader:
# obj_list = [obj for obj in reader if obj['label'] != 'REFUTES']
if self.few_shot:
if self.fever:
path_ = "{}/naacl/fever_test_for_bert_w_ppl.jsonl".format(data_dir)
with jsonlines.open(path_) as reader:
obj_list = [obj for obj in reader if obj['label'] != 'REFUTES']
elif self.liar:
path_ ='/home/nayeon/covid19_factcheck/data/liar-plus_test_v3_justification_top1_naacl.jsonl'
with jsonlines.open(path_) as reader:
obj_list = [obj for obj in reader if obj['label'] != 'REFUTES']
if self.myth:
path_ = '/home/yejin/covid19_factcheck/data/covid_myth_test_v3.jsonl'
eval_file = '/home/nayeon/covid19_factcheck/ppl_results/naacl.gpt2.uni.naacl_covid_myth_v3.npy'
all_objs = self.load_full_liar_with_ppl(path_, eval_file)
combined_all_objs = all_objs['true'] + all_objs['false']
random.shuffle(combined_all_objs)
# random.seed(self.seed_)
obj_list = combined_all_objs[self.few_shot + 1:]
elif self.covidpoli:
path_ = '/home/yejin/covid19_factcheck/data/factcheck_data/politifact/liar/test_covid19_justification_naacl.jsonl'
eval_file = '/home/nayeon/covid19_factcheck/ppl_results/naacl.gpt2.uni.naacl_covid_politifact_justification.npy'
all_objs = self.load_full_liar_with_ppl(path_, eval_file)
combined_all_objs = all_objs['true'] + all_objs['false']
random.shuffle(combined_all_objs)
# random.seed(self.seed_)
print(len(combined_all_objs))
obj_list = combined_all_objs[self.few_shot+1:]
# random.seed(self.seed_)
# obj_list = obj_list[:self.few_shot]
print("Using few dev shot!!!! LEN: ", len(obj_list))
print("loading from dev !! {}".format(path_))
return self._create_examples_with_evidences(obj_list, "dev")
else:
if self.fever:
path_ = "{}/naacl/fever_valid_for_bert_w_ppl_s.jsonl".format(data_dir)
# path_ = "{}/naacl/fever_test_for_bert_w_ppl_{}_test.jsonl".format(data_dir, self.cross_validation)
with jsonlines.open(path_) as reader:
obj_list = [obj for obj in reader if obj['evidences'] != [] and obj['evidences'][0][0] != 0]
return self._create_fever_examples(obj_list, "dev")
else:
path_ = "{}/politifact/{}/valid{}.tsv".format(data_dir, self.subtask, data_source)
print("loading from {}".format(path_))
return self._create_examples(self._read_tsv(os.path.join(data_dir, path_)), "dev")
# return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev{}.tsv".format(self.data_source))), "dev")
def get_test_examples(self, data_dir, data_source=""):
"""See base class."""
if self.has_evidence:
if self.cross_validation:
if self.use_ppl:
if self.myth:
path_="{}/naacl/covid_myth_test_w_{}_test.jsonl".format(data_dir, self.cross_validation)
elif self.fever:
path_ = "{}/naacl/fever_test_for_bert_w_ppl_{}_test.jsonl".format(data_dir,
self.cross_validation)
else:
path_="{}/naacl/test_covid19_justification_w_{}_test.jsonl".format(data_dir, self.cross_validation)
# path_="{}/naacl/covid_myth_test_w_{}_test.jsonl".format(data_dir, self.cross_validation)
else:
if self.fever:
path_ = "{}/naacl/fever_test_for_bert_w_ppl_{}_test.jsonl".format(data_dir, self.cross_validation)
elif self.myth:
path_ = "{}/naacl/covid_myth_test_w_{}_test.jsonl".format(data_dir, self.cross_validation)
else:
path_ = "{}/naacl/test_covid19_justification_w_{}_test.jsonl".format(data_dir,
self.cross_validation)
# else:
# path_ = "{}/politifact/{}/cross_validation/{}_test.jsonl".format(data_dir, self.subtask, self.cross_validation)
else:
if self.fever:
# if self.claim_only:
# path_ = "{}/naacl/fever_test_for_bert_w_claimonly_ppl.jsonl".format(data_dir)
# else:
path_ = "{}/naacl/fever_test_for_bert.jsonl".format(data_dir)
elif self.liar:
path_ = "{}/politifact/{}/liar-plus_test_v3.jsonl".format(data_dir, self.subtask)
# path_ = '/home/nayeon/covid19_factcheck/data/liar-plus_test_v3_justification_top1_naacl.jsonl'
else:
if any([self.use_credit, self.use_metainfo, self.use_creditscore, self.use_ppl]):
path_ = "{}/politifact/{}/test_evidence_meta_fever_v4a.jsonl".format(data_dir, self.subtask)
else:
path_ = "{}/politifact/{}/test_evidence_meta_fever_v4a.jsonl".format(data_dir, self.subtask)
print("loading from {}".format(path_))
if self.few_shot and self.fever:
with jsonlines.open(path_) as reader:
obj_list = [obj for obj in reader if obj['label'] != 'NOT ENOUGH INFO']
else:
with jsonlines.open(path_) as reader:
obj_list = [obj for obj in reader]
# if self.fever and self.use_ppl:
# if self.claim_only:
# RESULT_PATH = '/home/nayeon/covid19_factcheck/ppl_results/naacl_fever_test_claim_only.npy'
# else:
# RESULT_PATH = '/home/nayeon/covid19_factcheck/ppl_results/naacl_fever_test_for_bert_cleaned.npy'
# new_obj_list = []
# ppl_results = np.load(RESULT_PATH, allow_pickle=True)
# ppls = [ppl['perplexity'] for ppl in ppl_results]
#
# for obj, ppl in zip(obj_list, ppl_results):
# obj['ppl'] = ppl['perplexity']
# obj['ppl_avg'] = ppl['perplexity'] / np.mean(ppls)
# new_obj_list.append(obj)
# obj_list = new_obj_list
#
# if self.liar and self.use_ppl:
# new_obj_list = []
# RESULT_PATH = '/home/nayeon/covid19_factcheck/ppl_results/liar_test_justification_top1_ppl.npy'
# ppl_results = np.load(RESULT_PATH, allow_pickle=True)
# ppls = [ppl['perplexity'] for ppl in ppl_results]
#
# for obj, ppl in zip(obj_list, ppl_results):
# obj['ppl'] = ppl['perplexity']
# obj['ppl_avg'] = ppl['perplexity'] / np.mean(ppls)
# new_obj_list.append(obj)
# obj_list = new_obj_list
return self._create_examples_with_evidences(obj_list, "test")
else:
if self.fever:
path_ = "{}/naacl/fever_test_for_bert_w_ppl.jsonl".format(data_dir)
# path_ = "{}/naacl/fever_test_for_bert_w_ppl_{}_test.jsonl".format(data_dir, self.cross_validation)
with jsonlines.open(path_) as reader:
obj_list = [obj for obj in reader if obj['evidences'] != [] and obj['evidences'][0][0] != 0]
return self._create_fever_examples(obj_list, "test")
else:
path_ = "{}/politifact/{}/test{}.tsv".format(data_dir, self.subtask, data_source)
print("loading from {}".format(path_))
return self._create_examples(self._read_tsv(os.path.join(data_dir, path_)), "test")
def get_labels(self):
"""See base class."""
return self.labels
def _create_examples_with_evidences(self, obj_list, set_type, evidence_option='concat'):
examples = []
for (i, obj) in enumerate(obj_list):
try:
guid = "%s-%s" % (set_type, obj['claim_id'])
except:
guid = "%s-%s" % (set_type, obj['id'])
text_a = obj['claim']
if evidence_option == 'concat':
# concat all evidence sentences into one "context"
self.is_t3 = True
if self.is_t3:
text_b = " ".join([e_tuple[0] for e_tuple in obj['evidences'][:3]])
# e_tuple e.g. = ['"building a wall" on the border "will take literally years.', 28.547153555348473]
else:
text_b = obj['evidences'][0][0]
elif evidence_option == 'use_body':
raise NotImplementedError
elif evidence_option == 'separate_evidences':
# create multiple claim-evidence pair from one obj.
# e.g. {claim, [evidence1, evidence2, evidence3]} => {claim, evidence1}, {claim, evidence2}, {claim, evidence3}
# evidence_list = obj['evidences']
raise NotImplementedError
label = obj['label']
if self.is_binary:
# map to 6 label to binary label
label = self.multi2binary[label]
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def _create_fever_examples(self, obj_list, set_type, evidence_option='concat'):
examples = []
if self.use_ppl:
print("using cap")
ppls = [ppl['ppl'] for ppl in obj_list]
cap = np.mean(ppls)+np.std(ppls)
for (i, obj) in enumerate(obj_list):
guid = "%s-%s" % (set_type, obj['id'])
text_a = obj['claim']
label = obj['label']
text_b = None
if self.use_ppl:
# ppl = [obj['ppl']]
# ppl = [obj['big_ppl']]
# ppl = obj['ppl']
# ppl = 1 if ppl >= 500 else (ppl / 500)
# ppl = [float(ppl)]
# if self.use_ppl == 'avg':
# ppl = [float(obj['ppl_avg'])]
# elif self.use_ppl == 'cap':
# ppl = 1 if ppl >= cap else (ppl / cap)
# ppl = [ppl]
# else:
print("using cap")
cap = 192
ppl = 1 if ppl >= cap else (ppl / cap)
ppl = [ppl]
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label, ppl=ppl))
else:
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def _create_examples(self, lines, set_type):
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, line[0])
text_a = line[2]
text_b = None
label = line[1]
if self.is_binary:
# map to 6 label to binary label
label = self.multi2binary[label]
if self.output_mode == 'regression':
label = CLASSIFICATION_TO_REGRESSION[label]
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def load_full_liar_with_ppl(self, data_path, ppl_result_path):
with jsonlines.open(data_path) as reader:
og_objs = [obj for obj in reader]
ppl_results = np.load(ppl_result_path, allow_pickle=True)
all_objs = {
'true': [],
'false': [],
'_': []
}
for obj, ppl in zip(og_objs, ppl_results):
label = self.multi2binary[obj['label']]
if 'fever' in data_path:
claim_id = obj['id']
else:
claim_id = obj['claim_id']
claim = obj['claim']
evs = obj['evidences'][:3]
ppl = ppl['perplexity']
new_objs = {'ppl': ppl, 'label': label, 'claim': claim, 'evidences': evs, 'claim_id': claim_id}
all_objs[label].append(new_objs)
return all_objs
class FusionProcessor(DataProcessor):
def __init__(self, args):
self.politifact_dataset = PolifactProcessor(args)
self.rte_dataset = RteProcessor(args)
self.labels = self.politifact_dataset.get_labels()
def get_train_examples(self, data_dir, data_source=""):
politifact = self.politifact_dataset.get_train_examples(data_dir, data_source)
rte = self.rte_dataset.get_train_examples(data_dir, data_source)
fusion = politifact + rte
return fusion
def get_dev_examples(self, data_dir, data_source=""):
politifact = self.politifact_dataset.get_dev_examples(data_dir, data_source)
rte = self.rte_dataset.get_dev_examples(data_dir, data_source)
fusion = politifact + rte
return fusion
def get_test_examples(self, data_dir, data_source=""):
"""Since we only care about politifact performance, only use politifact-test"""
politifact = self.politifact_dataset.get_test_examples(data_dir, data_source)
# rte = self.rte_dataset.get_test_examples(data_dir, data_source)
# fusion = politifact + rte
return politifact
def get_labels(self):
"""See base class."""
return self.labels
factcheck_processors = {
"rte": RteProcessor,
"scitail": SciTailProcessor,
'gpt2baseline': PolifactProcessor,
'politifact': PolifactProcessor,
'fusion': FusionProcessor
}
| 44.594183
| 146
| 0.580178
|
38ccec60b7cd795d3982ec33d9e5c5f33790a2b9
| 859
|
py
|
Python
|
tests/test_feed.py
|
nacknime-official/freelancehunt-api
|
fddbb45470e644be7fe12766177e05ba6889bf71
|
[
"MIT"
] | 3
|
2020-10-19T06:41:19.000Z
|
2022-01-09T03:33:54.000Z
|
tests/test_feed.py
|
nacknime-official/freelancehunt-api
|
fddbb45470e644be7fe12766177e05ba6889bf71
|
[
"MIT"
] | 5
|
2020-10-20T06:16:58.000Z
|
2021-06-19T00:24:59.000Z
|
tests/test_feed.py
|
nacknime-official/freelancehunt-api
|
fddbb45470e644be7fe12766177e05ba6889bf71
|
[
"MIT"
] | 1
|
2020-10-20T09:21:57.000Z
|
2020-10-20T09:21:57.000Z
|
#!usr/bin/python3
"""#TODO: Write comments."""
from freelancehunt import Feed
from freelancehunt.models.feed import FeedMessage
class Feed:
def __init__(self, token=None, **kwargs):
pass
def update(self):
pass
def read(self):
pass
#property
def projects(self):
pass
#property
def contests(self):
pass
def get_new(self):
pass
#property
def list(self):
pass
class FeedEntity:
def __init__(self, id, message_from, message, created_at, is_new,
project=None, contest=None, **kwargs):
pass
#property
def project(self):
pass
#property
def contest(self):
pass
#classmethod
def de_json(cls, **data):
pass
def __str__(self):
pass
def __repr__(self):
pass
| 15.070175
| 69
| 0.571595
|
f3b38c1828817382df2118db56f53472a342a91e
| 1,305
|
py
|
Python
|
stack/stack.py
|
komratovaanastasiia/programming-2021-19fpl
|
a3a71f760ff27fac35e81f9b692bd930349650c2
|
[
"MIT"
] | null | null | null |
stack/stack.py
|
komratovaanastasiia/programming-2021-19fpl
|
a3a71f760ff27fac35e81f9b692bd930349650c2
|
[
"MIT"
] | null | null | null |
stack/stack.py
|
komratovaanastasiia/programming-2021-19fpl
|
a3a71f760ff27fac35e81f9b692bd930349650c2
|
[
"MIT"
] | null | null | null |
"""
Implementation of the data structure "Stack"
"""
from typing import Iterable
class Stack:
"""
Stack Data Structure
"""
def __init__(self, data: Iterable = None):
if isinstance(data, Iterable):
self.data = list(data)
else:
self.data = []
def push(self, element):
"""
Add the element ‘element’ at the top of stack
:param element: element to add to stack
"""
self.data.append(element)
def pop(self):
"""
Delete the element on the top of stack
"""
if self.empty():
raise ValueError
self.data.pop()
def top(self):
"""
Return the element on the top of stack
:return: the element that is on the top of stack
"""
if self.empty():
raise ValueError
return self.data[-1]
def size(self) -> int:
"""
Return the number of elements in stack
:return: Number of elements in stack
"""
return len(self.data)
def empty(self) -> bool:
"""
Return whether stack is empty or not
:return: True if stack does not contain any elements
False if stack contains elements
"""
return not self.data
| 22.5
| 60
| 0.535632
|
6bddf00505cb768c45ad38bd5d989415e1c77307
| 647
|
py
|
Python
|
lib/pylint/test/input/func_w0112.py
|
willemneal/Docky
|
d3504e1671b4a6557468234c263950bfab461ce4
|
[
"MIT"
] | 3
|
2018-11-25T01:09:55.000Z
|
2021-08-24T01:56:36.000Z
|
lib/pylint/test/input/func_w0112.py
|
willemneal/Docky
|
d3504e1671b4a6557468234c263950bfab461ce4
|
[
"MIT"
] | null | null | null |
lib/pylint/test/input/func_w0112.py
|
willemneal/Docky
|
d3504e1671b4a6557468234c263950bfab461ce4
|
[
"MIT"
] | 3
|
2018-11-09T03:38:09.000Z
|
2020-02-24T06:26:10.000Z
|
"""test max branch
"""
# pylint: disable=print-statement
__revision__ = ''
def stupid_function(arg):
"""reallly stupid function"""
if arg == 1:
print 1
elif arg == 2:
print 2
elif arg == 3:
print 3
elif arg == 4:
print 4
elif arg == 5:
print 5
elif arg == 6:
print 6
elif arg == 7:
print 7
elif arg == 8:
print 8
elif arg == 9:
print 9
elif arg == 10:
print 10
else:
if arg < 1:
print 0
else:
print 100
arg = 0
if arg:
print None
else:
print arg
| 17.026316
| 33
| 0.451314
|
57a3900e29b7546a8c6ebdad0a6e0ecdc19a7466
| 1,298
|
py
|
Python
|
openmdao.gui/src/openmdao/gui/test/functional/pageobjects/login.py
|
Kenneth-T-Moore/OpenMDAO-Framework
|
76e0ebbd6f424a03b547ff7b6039dea73d8d44dc
|
[
"Apache-2.0"
] | null | null | null |
openmdao.gui/src/openmdao/gui/test/functional/pageobjects/login.py
|
Kenneth-T-Moore/OpenMDAO-Framework
|
76e0ebbd6f424a03b547ff7b6039dea73d8d44dc
|
[
"Apache-2.0"
] | null | null | null |
openmdao.gui/src/openmdao/gui/test/functional/pageobjects/login.py
|
Kenneth-T-Moore/OpenMDAO-Framework
|
76e0ebbd6f424a03b547ff7b6039dea73d8d44dc
|
[
"Apache-2.0"
] | null | null | null |
from selenium.webdriver.common.by import By
from basepageobject import BasePageObject
from elements import ButtonElement, InputElement
class LoginPage(BasePageObject):
""" There doesn't seem to be a 'login' page anymore... """
# url = '/accounts/login/?next=/'
url = '/'
username = InputElement((By.ID, 'id_username'))
password = InputElement((By.ID, 'id_password'))
submit_button = ButtonElement((By.XPATH,
'/html/body/div/div[2]/form/input'))
def login_successfully(self, username, password):
""" Login using valid parameters. """
self.username = username
self.password = password
self.submit()
from project import ProjectsListPage
return ProjectsListPage(self.browser, self.port)
def login_unsuccessfully(self, username, password):
""" Login using invalid parameters. """
self.username = username
self.password = password
self.submit()
return LoginPage(self.browser, self.port)
def magic_login(self, username, password):
'''Need a way to login to the app directly,
not manually via the GUI'''
pass
def submit(self):
""" Clicks the login button. """
self('submit_button').click()
| 30.904762
| 71
| 0.631741
|
6dd564c7f678c695c5d1ec79f33c0b1b4b8e7a67
| 2,031
|
py
|
Python
|
dataactcore/utils/jsonResponse.py
|
COEJKnight/one
|
6a5f8cd9468ab368019eb2597821b7837f74d9e2
|
[
"CC0-1.0"
] | 1
|
2018-10-29T12:54:44.000Z
|
2018-10-29T12:54:44.000Z
|
dataactcore/utils/jsonResponse.py
|
COEJKnight/one
|
6a5f8cd9468ab368019eb2597821b7837f74d9e2
|
[
"CC0-1.0"
] | null | null | null |
dataactcore/utils/jsonResponse.py
|
COEJKnight/one
|
6a5f8cd9468ab368019eb2597821b7837f74d9e2
|
[
"CC0-1.0"
] | null | null | null |
import json
import logging
import traceback
import flask
from dataactcore.utils.responseException import ResponseException
logger = logging.getLogger(__name__)
class JsonResponse:
""" Used to create an http response object containing JSON """
debugMode = True
@staticmethod
def create(code, dictionary_data):
"""
Creates a JSON response object
if debugMode is enabled errors are added
"""
jsondata = flask.Response()
jsondata.headers["Content-Type"] = "application/json"
jsondata.status_code = code
jsondata.set_data(json.dumps(dictionary_data))
return jsondata
@staticmethod
def error(exception, error_code, **kwargs):
""" Create an http response object for specified error. We assume
we're in an exception context
Args:
exception: Exception to be represented by response object
error_code: Status code to be used in response
kwargs: Extra fields and values to be included in response
Returns:
Http response object containing specified error
"""
response_dict = {}
for key in kwargs:
response_dict[key] = kwargs[key]
trace = traceback.extract_tb(exception.__traceback__, 10)
logger.exception('Route Error')
if JsonResponse.debugMode:
response_dict["message"] = str(exception)
response_dict["errorType"] = str(type(exception))
if isinstance(exception, ResponseException) and exception.wrappedException:
response_dict["wrappedType"] = str(type(exception.wrappedException))
response_dict["wrappedMessage"] = str(exception.wrappedException)
response_dict["trace"] = [str(entry) for entry in trace]
return JsonResponse.create(error_code, response_dict)
else:
response_dict["message"] = "An error has occurred"
return JsonResponse.create(error_code, response_dict)
| 34.423729
| 87
| 0.656819
|
1e27fd99b9b7966939c6cd2029add6d453606285
| 14,679
|
py
|
Python
|
poolings/SAGPool.py
|
PangYunsheng8/CGIPool
|
2cf22019bad510804021f768c6a0d76bf79b62f6
|
[
"MIT"
] | 1
|
2022-01-28T02:57:38.000Z
|
2022-01-28T02:57:38.000Z
|
poolings/SAGPool.py
|
LiuChuang0059/CGIPool
|
2cf22019bad510804021f768c6a0d76bf79b62f6
|
[
"MIT"
] | null | null | null |
poolings/SAGPool.py
|
LiuChuang0059/CGIPool
|
2cf22019bad510804021f768c6a0d76bf79b62f6
|
[
"MIT"
] | 5
|
2021-06-14T04:42:39.000Z
|
2021-12-21T03:39:19.000Z
|
from torch_geometric.nn import GCNConv, GATConv, LEConv, SAGEConv, GraphConv
from torch_geometric.data import Data
from torch_geometric.nn.pool.topk_pool import topk,filter_adj
from torch.nn import Parameter
import torch
from torch_geometric.nn.conv import MessagePassing
from torch_geometric.utils import add_remaining_self_loops
from torch_scatter import scatter
from torch_geometric.utils import softmax, dense_to_sparse, add_remaining_self_loops, remove_self_loops
import torch.nn as nn
import torch.nn.functional as F
from torch_sparse import spspmm, coalesce
from torch_scatter import scatter_add, scatter
from torch_geometric.nn import global_mean_pool as gap, global_max_pool as gmp
class TwoHopNeighborhood(object):
def __call__(self, data, include_self=False):
edge_index = data.edge_index
n = data.num_nodes
value = edge_index.new_ones((edge_index.size(1), ), dtype=torch.float)
index, value = spspmm(edge_index, value, edge_index, value, n, n, n)
value.fill_(0)
if include_self:
edge_index = torch.cat([edge_index, index], dim=1)
else:
edge_index = index
data.edge_index, _ = coalesce(edge_index, None, n, n)
return data
def __repr__(self):
return '{}()'.format(self.__class__.__name__)
# class CalStructureAtt(object):
# def __call__(self, original_x, edge_index, original_batch, new_edge_index):
# sorted_topk_trans = self.cal_transition_matrix(original_x, edge_index, original_batch)
# row, col = new_edge_index
# weights_structure = self.js_divergence(sorted_topk_trans[row], sorted_topk_trans[col])
# return weights_structure
# def compute_rwr(self, adj, c=0.1):
# d = torch.diag(torch.sum(adj, 1))
# d_inv = d.pow(-0.5)
# d_inv[d_inv == float('inf')] = 0
# w_tilda = torch.matmul(d_inv, adj)
# w_tilda = torch.matmul(w_tilda, d_inv)
# q = torch.eye(w_tilda.size(0)) - c * w_tilda
# q_inv = torch.inverse(q)
# e = torch.eye(w_tilda.size(0))
# r = (1 - c) * q_inv
# r = torch.matmul(r, e)
# return r
# def cal_transition_matrix(self, x, edge_index, batch, k=8):
# """
# Compute random walk with restart score
# """
# num_nodes = scatter_add(batch.new_ones(x.size(0)), batch, dim=0)
# shift_cum_num_nodes = torch.cat([num_nodes.new_zeros(1), num_nodes.cumsum(dim=0)[:-1]], dim=0)
# cum_num_nodes = num_nodes.cumsum(dim=0)
# transition_matrix = torch.zeros((x.size(0), torch.max(num_nodes)), dtype=torch.float, device=x.device)
# adj_graph = torch.zeros((x.size(0), x.size(0)))
# row, col = edge_index
# adj_graph[row, col] = 1
# rwr_out = self.compute_rwr(adj_graph)
# for idx_i, idx_j in zip(shift_cum_num_nodes, cum_num_nodes):
# transition_matrix[idx_i: idx_j, :(idx_j - idx_i)] = rwr_out[idx_i: idx_j, idx_i: idx_j]
# sorted_trans, _ = torch.sort(transition_matrix, descending=True)
# sorted_topk_trans = sorted_trans[:, :k]
# return sorted_topk_trans
# def js_divergence(self, P, Q):
# def kl_divergence(P, Q):
# return (P * torch.log2(P / Q)).sum(dim=1)
# P[P == 0] = 1e-15
# Q[Q == 0] = 1e-15
# P = P / P.sum(dim=1)[:, None]
# Q = Q / Q.sum(dim=1)[:, None]
# M = 0.5 * (P + Q)
# js = 0.5 * kl_divergence(P, M) + 0.5 * kl_divergence(Q, M)
# js[js < 0] = 0
# return js
class GraphEncoder(torch.nn.Module):
def __init__(self, in_channels, out_channels):
super(GraphEncoder, self).__init__()
self.in_channels = in_channels
self.conv1 = SAGEConv(in_channels, out_channels)
self.conv2 = SAGEConv(out_channels, in_channels)
def forward(self, x, edge_index, edge_attr=None, batch=None):
x = F.relu(self.conv1(x, edge_index))
x = F.relu(self.conv2(x, edge_index))
return x
class Discriminator(torch.nn.Module):
def __init__(self, in_channels):
super(Discriminator, self).__init__()
self.fc1 = nn.Linear(in_channels * 2, in_channels)
self.fc2 = nn.Linear(in_channels, 1)
def forward(self, x):
x = F.leaky_relu(self.fc1(x), 0.2)
x = F.sigmoid(self.fc2(x))
return x
class FGPool1(torch.nn.Module):
def __init__(self, in_channels, ratio=0.8, non_lin=torch.tanh):
super(FGPool1, self).__init__()
self.in_channels = in_channels
self.ratio = ratio
self.non_lin = non_lin
self.hidden_dim = in_channels
self.transform = GraphConv(in_channels, self.hidden_dim)
self.pp_conv = GraphConv(self.hidden_dim, self.hidden_dim)
self.np_conv = GraphConv(self.hidden_dim, self.hidden_dim)
self.positive_pooling = GraphConv(self.hidden_dim, 1)
self.negative_pooling = GraphConv(self.hidden_dim, 1)
self.discriminator = Discriminator(self.hidden_dim)
self.loss_fn = nn.BCELoss()
def forward(self, x, edge_index, edge_attr=None, batch=None):
if batch is None:
batch = edge_index.new_zeros(x.size(0))
x_transform = F.leaky_relu(self.transform(x, edge_index), 0.2)
x_tp = F.leaky_relu(self.pp_conv(x, edge_index), 0.2)
x_tn = F.leaky_relu(self.np_conv(x, edge_index), 0.2)
s_pp = self.positive_pooling(x_tp, edge_index).squeeze()
s_np = self.negative_pooling(x_tn, edge_index).squeeze()
perm_positive = topk(s_pp, 1, batch)
perm_negative = topk(s_np, 1, batch)
x_pp = x_transform[perm_positive] * self.non_lin(s_pp[perm_positive]).view(-1, 1)
x_np = x_transform[perm_negative] * self.non_lin(s_np[perm_negative]).view(-1, 1)
#f_pp, _ = filter_adj(edge_index, edge_attr, perm_positive, num_nodes=s_pp.size(0))
#f_np, _ = filter_adj(edge_index, edge_attr, perm_negative, num_nodes=s_np.size(0))
#x_pp = F.leaky_relu(self.pp_conv(x_pp, f_pp), 0.2)
#x_np = F.leaky_relu(self.np_conv(x_np, f_np), 0.2)
x_pp_readout = gap(x_pp, batch[perm_positive])
x_np_readout = gap(x_np, batch[perm_negative])
x_readout = gap(x_transform, batch)
positive_pair = torch.cat([x_pp_readout, x_readout], dim=1)
negative_pair = torch.cat([x_np_readout, x_readout], dim=1)
real = torch.ones(positive_pair.shape[0]).cuda()
fake = torch.zeros(negative_pair.shape[0]).cuda()
real_loss = self.loss_fn(self.discriminator(positive_pair), real)
fake_loss = self.loss_fn(self.discriminator(negative_pair), fake)
discrimination_loss = (real_loss + fake_loss) / 2
score = (s_pp - s_np)#.squeeze()
perm = topk(score, self.ratio, batch)
x = x_transform[perm] * self.non_lin(score[perm]).view(-1, 1)
batch = batch[perm]
filter_edge_index, filter_edge_attr = filter_adj(edge_index, edge_attr, perm, num_nodes=score.size(0))
return x, filter_edge_index, filter_edge_attr, batch, perm, discrimination_loss
class RedConv(torch.nn.Module):
def __init__(self, in_channels, ratio, negative_slope=0.2, lamb=0.1):
super(RedConv, self).__init__()
self.in_channels = in_channels
self.ratio = ratio
self.negative_slope = negative_slope
self.lamb = lamb
self.hidden_channels = in_channels
self.att = nn.Linear(2 * in_channels, 1)
self.gnn_score = GraphConv(self.in_channels, 1)
self.transform_neighbor = GCNConv(self.in_channels, self.hidden_channels)
self.key_linear = torch.nn.Linear(self.hidden_channels, 1)
self.query_linear = torch.nn.Linear(self.hidden_channels, 1)
def reset_parameters(self):
self.att.reset_parameters()
self.gnn_score.reset_parameters()
def forward(self, x, edge_index, edge_attr=None, batch=None):
if batch is None:
batch = edge_index.new_zeros(x.size(0))
x = x.unsqueeze(-1) if x.dim() == 1 else x
N = x.size(0)
edge_index, _ = add_remaining_self_loops(edge_index, None, fill_value=1, num_nodes=N)
x_transform = self.transform_neighbor(x=x, edge_index=edge_index, edge_weight=edge_attr)
x_neighbor = x_transform[edge_index[1]]
# build neighbor
x_key_score = F.leaky_relu(self.key_linear(x_neighbor))
x_key_score = softmax(x_key_score, edge_index[0])
x_reweight_key = x_neighbor * x_key_score
x_reweight_key = scatter(x_reweight_key, edge_index[0], dim=0, reduce='add')
x_query_score = F.leaky_relu(self.query_linear(x_neighbor))
x_query_score = softmax(x_query_score, edge_index[0])
x_reweight_query = x_neighbor * x_query_score
x_reweight_query = scatter(x_reweight_query, edge_index[0], dim=0, reduce='add')
ker_error = torch.sum(torch.abs(x_reweight_key[edge_index[0]] - x_transform[edge_index[0]]), dim=1)
query_error = torch.sum(torch.abs(x_reweight_query[edge_index[0]] - x_transform[edge_index[1]]), dim=1)
red_score = ker_error - query_error
# score = self.att(torch.cat([x[edge_index[0]], x[edge_index[1]]], dim=-1)).view(-1)
# score = F.leaky_relu(score, self.negative_slope)
fitness = self.gnn_score(x, edge_index).sigmoid().view(-1)
red_score[-N:] = 0
score_sum = scatter(red_score, edge_index[0], dim=0, reduce='add')
final_score = fitness - self.lamb * score_sum
return final_score
class SAGPool(torch.nn.Module):
def __init__(self, in_channels, ratio=0.8, Conv=RedConv, non_lin=torch.tanh):
super(SAGPool, self).__init__()
self.in_channels = in_channels
self.ratio = ratio
self.score_layer = Conv(in_channels, 1)
self.non_lin = non_lin
# refining layer
# self.att_sl = Parameter(torch.Tensor(1, self.in_channels * 2))
# nn.init.xavier_uniform_(self.att_sl.data)
# self.neighbor_augment = TwoHopNeighborhood()
# self.cal_struc_att = CalStructureAtt()
def forward(self, x, edge_index, edge_attr=None, batch=None):
if batch is None:
batch = edge_index.new_zeros(x.size(0))
original_x, original_edge_index, original_batch = x, edge_index, batch
score = self.score_layer(x, edge_index, edge_attr, batch).squeeze()
perm = topk(score, self.ratio, batch)
x = x[perm] * self.non_lin(score[perm]).view(-1, 1)
batch = batch[perm]
filter_edge_index, filter_edge_attr = filter_adj(edge_index, edge_attr, perm, num_nodes=score.size(0))
return x, filter_edge_index, filter_edge_attr, batch, perm
# def cal_nhop_neighbor(self, x, edge_index, edge_attr, perm, n_hop=2):
# if edge_attr is None:
# edge_attr = torch.ones((edge_index.size(1), ), dtype=torch.float, device=edge_index.device)
# hop_data = Data(x=x, edge_index=edge_index, edge_attr=edge_attr)
# hop_data = self.neighbor_augment(hop_data, include_self=False)
# for _ in range(n_hop - 2):
# hop_data = self.neighbor_augment(hop_data, include_self=True)
# hop_edge_index, hop_edge_attr = hop_data.edge_index, hop_data.edge_attr
# new_edge_index, new_edge_attr = self.remove_adj(hop_edge_index, None, perm, num_nodes=x.size(0))
# new_edge_index, new_edge_attr = remove_self_loops(new_edge_index, new_edge_attr)
# return new_edge_index, new_edge_attr
# def remove_adj(self, edge_index, edge_attr, perm, num_nodes=None):
# mask = perm.new_full((num_nodes, ), -1)
# mask[perm] = perm
# row, col = edge_index
# row, col = mask[row], mask[col]
# mask = (row >= 0) & (col >= 0)
# row, col = row[mask], col[mask]
# if edge_attr is not None:
# edge_attr = edge_attr[mask]
# return torch.stack([row, col], dim=0), edge_attr
# else:
# # get n-hop neighbor exclude 1-hop
# new_edge_index, new_edge_attr = self.cal_nhop_neighbor(original_x, original_edge_index, edge_attr, perm)
# new_edge_index, _ = torch.sort(new_edge_index, dim=0)
# new_edge_index, _ = coalesce(new_edge_index, None, perm.size(0), perm.size(0))
# row, col = new_edge_index
# new_batch = original_batch[row]
# if new_batch.size(0) == 0:
# filter_edge_index, filter_edge_attr = filter_adj(edge_index, edge_attr, perm, num_nodes=score.size(0))
# return x, filter_edge_index, filter_edge_attr, batch, perm
# # attentive edge score
# weights_feature = (torch.cat([original_x[row], original_x[col]], dim=1) * self.att_sl).sum(dim=-1)
# weights_feature = F.leaky_relu(weights_feature, 0.2)
# weights_structure = 1 - self.cal_struc_att(original_x, edge_index, original_batch, new_edge_index)
# weights = weights_feature + 1 * weights_structure
# # sign = LBSign.apply
# # weights = sign(weights)
# # select topk edges
# print(weights)
# print(new_batch)
# perm_edge = topk(weights, 0.7, new_batch)
# print(perm_edge)
# refine_edge_index = new_edge_index[:, perm_edge]
# index = torch.LongTensor([1, 0])
# refine_reverse_index = torch.zeros_like(refine_edge_index)
# refine_reverse_index[index] = refine_edge_index
# refine_edge_index = torch.cat([refine_edge_index, refine_reverse_index], dim=1)
# # combine 1-hop edges and topk edges
# final_edge_index = torch.cat([original_edge_index, refine_edge_index], dim=1)
# final_edge_index, _ = filter_adj(final_edge_index, _, perm, num_nodes=original_x.size(0))
# final_edge_index, _ = coalesce(final_edge_index, None, perm.size(0), perm.size(0))
# return x, final_edge_index, None, batch, perm
# class LBSign(torch.autograd.Function):
# @staticmethod
# def forward(ctx, input):
# return torch.where(input > 0, 1, 0)
# @staticmethod
# def backward(ctx, grad_output):
# return grad_output.clamp_(0, 1)
| 42.921053
| 121
| 0.630356
|
74b6d993ec38fe9856920e124776427a09fb070f
| 3,530
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/chlamydiasprubis.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/chlamydiasprubis.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/chlamydiasprubis.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Chlamydia sp. Rubis.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def ChlamydiaSpRubis(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Chlamydia sp. Rubis graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.0
- homology.v11.5
- physical.links.v11.0
- physical.links.v11.5
- links.v11.0
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Chlamydia sp. Rubis graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="ChlamydiaSpRubis",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 32.685185
| 223
| 0.674221
|
e7aa9ba91b88d8e09a3b52be4296da2cdd04d142
| 1,192
|
py
|
Python
|
trash/really weird bug when separating files/corpus.py
|
mounir4023/MSA_POS_Tagger
|
d34cace777504a4121b675048ddbaa12297cc20b
|
[
"MIT"
] | 3
|
2020-02-04T19:01:30.000Z
|
2021-07-05T18:09:36.000Z
|
trash/really weird bug when separating files/corpus.py
|
mounir4023/MSA_POS_Tagger
|
d34cace777504a4121b675048ddbaa12297cc20b
|
[
"MIT"
] | null | null | null |
trash/really weird bug when separating files/corpus.py
|
mounir4023/MSA_POS_Tagger
|
d34cace777504a4121b675048ddbaa12297cc20b
|
[
"MIT"
] | null | null | null |
from lxml import etree
import random
import re
import nltk
##### preparing data into dict
def get_data( path ):
# init
root = etree.parse("corpus.xml")
sents = [ ]
# xml to dict for each sentence
for s in root.xpath("/CORPUS/Phrase"):
tokens = re.sub(r'\s+',' ',s[2].text)
tags = re.sub(r'\s+',' ',s[3].text)
sents.append({
'num':s[0].text,
'len':s[5].text,
'raw':s[1].text,
'tokens':tokens.split(" "),
'tags':tags.split(" "),
})
# cleaning empty tokens and tags
for s in sents:
for t in s["tokens"]:
if t == '' or t == ' ':
s["tokens"].remove(t)
for t in s["tags"]:
if t == '' or t == ' ':
s["tags"].remove(t)
# removing the begining NULL tag
for s in sents:
s["tags"][0:2] = s["tags"][1:2]
if len(s["tags"]) == len(s["tokens"]):
s["len"] = len(s["tags"])
else:
print("LENGTH ERROR IN SENTENCE: ",s["num"])
return sents
##### preparing lexicon into dict
# def get_lexicon( path ):
| 21.285714
| 56
| 0.453859
|
f43d41773d077e59a3d34f3e24a48f313c9ef4b2
| 68,735
|
py
|
Python
|
gitee/api/gists_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
gitee/api/gists_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
gitee/api/gists_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from gitee.api_client import ApiClient
class GistsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_v5_gists_gist_id_comments_id(self, gist_id, id, **kwargs): # noqa: E501
"""删除代码片段的评论 # noqa: E501
删除代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_v5_gists_gist_id_comments_id(gist_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gist_id: 代码片段的ID (required)
:param int id: 评论的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_v5_gists_gist_id_comments_id_with_http_info(gist_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_v5_gists_gist_id_comments_id_with_http_info(gist_id, id, **kwargs) # noqa: E501
return data
def delete_v5_gists_gist_id_comments_id_with_http_info(self, gist_id, id, **kwargs): # noqa: E501
"""删除代码片段的评论 # noqa: E501
删除代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_v5_gists_gist_id_comments_id_with_http_info(gist_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gist_id: 代码片段的ID (required)
:param int id: 评论的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gist_id', 'id', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_v5_gists_gist_id_comments_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gist_id' is set
if ('gist_id' not in params or
params['gist_id'] is None):
raise ValueError(
"Missing the required parameter `gist_id` when calling `delete_v5_gists_gist_id_comments_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError(
"Missing the required parameter `id` when calling `delete_v5_gists_gist_id_comments_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gist_id' in params:
path_params['gist_id'] = params['gist_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{gist_id}/comments/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_v5_gists_id(self, id, **kwargs): # noqa: E501
"""删除指定代码片段 # noqa: E501
删除指定代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_v5_gists_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_v5_gists_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_v5_gists_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_v5_gists_id_with_http_info(self, id, **kwargs): # noqa: E501
"""删除指定代码片段 # noqa: E501
删除指定代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_v5_gists_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_v5_gists_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_v5_gists_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_v5_gists_id_star(self, id, **kwargs): # noqa: E501
"""取消Star代码片段 # noqa: E501
取消Star代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_v5_gists_id_star(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_v5_gists_id_star_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_v5_gists_id_star_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_v5_gists_id_star_with_http_info(self, id, **kwargs): # noqa: E501
"""取消Star代码片段 # noqa: E501
取消Star代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_v5_gists_id_star_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_v5_gists_id_star" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_v5_gists_id_star`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}/star', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists(self, **kwargs): # noqa: E501
"""获取代码片段 # noqa: E501
获取代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str access_token: 用户授权码
:param str since: 起始的更新时间,要求时间格式为 ISO 8601
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: list[Code]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_with_http_info(**kwargs) # noqa: E501
return data
def get_v5_gists_with_http_info(self, **kwargs): # noqa: E501
"""获取代码片段 # noqa: E501
获取代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str access_token: 用户授权码
:param str since: 起始的更新时间,要求时间格式为 ISO 8601
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: list[Code]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'since', 'page', 'per_page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
if 'since' in params:
query_params.append(('since', params['since'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Code]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists_gist_id_comments(self, gist_id, **kwargs): # noqa: E501
"""获取代码片段的评论 # noqa: E501
获取代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_gist_id_comments(gist_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gist_id: 代码片段的ID (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: list[CodeComment]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_gist_id_comments_with_http_info(gist_id, **kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_gist_id_comments_with_http_info(gist_id, **kwargs) # noqa: E501
return data
def get_v5_gists_gist_id_comments_with_http_info(self, gist_id, **kwargs): # noqa: E501
"""获取代码片段的评论 # noqa: E501
获取代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_gist_id_comments_with_http_info(gist_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gist_id: 代码片段的ID (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: list[CodeComment]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gist_id', 'access_token', 'page', 'per_page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists_gist_id_comments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gist_id' is set
if ('gist_id' not in params or
params['gist_id'] is None):
raise ValueError(
"Missing the required parameter `gist_id` when calling `get_v5_gists_gist_id_comments`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gist_id' in params:
path_params['gist_id'] = params['gist_id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{gist_id}/comments', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CodeComment]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists_gist_id_comments_id(self, gist_id, id, **kwargs): # noqa: E501
"""获取单条代码片段的评论 # noqa: E501
获取单条代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_gist_id_comments_id(gist_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gist_id: 代码片段的ID (required)
:param int id: 评论的ID (required)
:param str access_token: 用户授权码
:return: CodeComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_gist_id_comments_id_with_http_info(gist_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_gist_id_comments_id_with_http_info(gist_id, id, **kwargs) # noqa: E501
return data
def get_v5_gists_gist_id_comments_id_with_http_info(self, gist_id, id, **kwargs): # noqa: E501
"""获取单条代码片段的评论 # noqa: E501
获取单条代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_gist_id_comments_id_with_http_info(gist_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gist_id: 代码片段的ID (required)
:param int id: 评论的ID (required)
:param str access_token: 用户授权码
:return: CodeComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gist_id', 'id', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists_gist_id_comments_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gist_id' is set
if ('gist_id' not in params or
params['gist_id'] is None):
raise ValueError(
"Missing the required parameter `gist_id` when calling `get_v5_gists_gist_id_comments_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError(
"Missing the required parameter `id` when calling `get_v5_gists_gist_id_comments_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gist_id' in params:
path_params['gist_id'] = params['gist_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{gist_id}/comments/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CodeComment', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists_id(self, id, **kwargs): # noqa: E501
"""获取单条代码片段 # noqa: E501
获取单条代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: CodeForksHistory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_v5_gists_id_with_http_info(self, id, **kwargs): # noqa: E501
"""获取单条代码片段 # noqa: E501
获取单条代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: CodeForksHistory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_v5_gists_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CodeForksHistory', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists_id_commits(self, id, **kwargs): # noqa: E501
"""获取代码片段的commit # noqa: E501
获取代码片段的commit # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id_commits(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: CodeForksHistory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_id_commits_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_id_commits_with_http_info(id, **kwargs) # noqa: E501
return data
def get_v5_gists_id_commits_with_http_info(self, id, **kwargs): # noqa: E501
"""获取代码片段的commit # noqa: E501
获取代码片段的commit # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id_commits_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: CodeForksHistory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists_id_commits" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_v5_gists_id_commits`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}/commits', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CodeForksHistory', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists_id_forks(self, id, **kwargs): # noqa: E501
"""获取 Fork 了指定代码片段的列表 # noqa: E501
获取 Fork 了指定代码片段的列表 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id_forks(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: CodeForks
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_id_forks_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_id_forks_with_http_info(id, **kwargs) # noqa: E501
return data
def get_v5_gists_id_forks_with_http_info(self, id, **kwargs): # noqa: E501
"""获取 Fork 了指定代码片段的列表 # noqa: E501
获取 Fork 了指定代码片段的列表 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id_forks_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: CodeForks
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'access_token', 'page', 'per_page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists_id_forks" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_v5_gists_id_forks`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}/forks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CodeForks', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists_id_star(self, id, **kwargs): # noqa: E501
"""判断代码片段是否已Star # noqa: E501
判断代码片段是否已Star # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id_star(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_id_star_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_id_star_with_http_info(id, **kwargs) # noqa: E501
return data
def get_v5_gists_id_star_with_http_info(self, id, **kwargs): # noqa: E501
"""判断代码片段是否已Star # noqa: E501
判断代码片段是否已Star # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_id_star_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token: 用户授权码
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'access_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists_id_star" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_v5_gists_id_star`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}/star', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_gists_starred(self, **kwargs): # noqa: E501
"""获取用户Star的代码片段 # noqa: E501
获取用户Star的代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_starred(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str access_token: 用户授权码
:param str since: 起始的更新时间,要求时间格式为 ISO 8601
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: list[Code]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_gists_starred_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_v5_gists_starred_with_http_info(**kwargs) # noqa: E501
return data
def get_v5_gists_starred_with_http_info(self, **kwargs): # noqa: E501
"""获取用户Star的代码片段 # noqa: E501
获取用户Star的代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_gists_starred_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str access_token: 用户授权码
:param str since: 起始的更新时间,要求时间格式为 ISO 8601
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:return: list[Code]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'since', 'page', 'per_page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_gists_starred" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
if 'since' in params:
query_params.append(('since', params['since'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/starred', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Code]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_v5_gists_gist_id_comments_id(self, access_token, body, gist_id, id, **kwargs): # noqa: E501
"""修改代码片段的评论 # noqa: E501
修改代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_v5_gists_gist_id_comments_id(access_token, body, gist_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str access_token: (required)
:param str body: (required)
:param str gist_id: 代码片段的ID (required)
:param int id: 评论的ID (required)
:return: CodeComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_v5_gists_gist_id_comments_id_with_http_info(access_token, body, gist_id, id,
**kwargs) # noqa: E501
else:
(data) = self.patch_v5_gists_gist_id_comments_id_with_http_info(access_token, body, gist_id, id,
**kwargs) # noqa: E501
return data
def patch_v5_gists_gist_id_comments_id_with_http_info(self, access_token, body, gist_id, id,
**kwargs): # noqa: E501
"""修改代码片段的评论 # noqa: E501
修改代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_v5_gists_gist_id_comments_id_with_http_info(access_token, body, gist_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str access_token: (required)
:param str body: (required)
:param str gist_id: 代码片段的ID (required)
:param int id: 评论的ID (required)
:return: CodeComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'body', 'gist_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_v5_gists_gist_id_comments_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params or
params['access_token'] is None):
raise ValueError(
"Missing the required parameter `access_token` when calling `patch_v5_gists_gist_id_comments_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError(
"Missing the required parameter `body` when calling `patch_v5_gists_gist_id_comments_id`") # noqa: E501
# verify the required parameter 'gist_id' is set
if ('gist_id' not in params or
params['gist_id'] is None):
raise ValueError(
"Missing the required parameter `gist_id` when calling `patch_v5_gists_gist_id_comments_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError(
"Missing the required parameter `id` when calling `patch_v5_gists_gist_id_comments_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gist_id' in params:
path_params['gist_id'] = params['gist_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'access_token' in params:
form_params.append(('access_token', params['access_token'])) # noqa: E501
if 'body' in params:
form_params.append(('body', params['body'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{gist_id}/comments/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CodeComment', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_v5_gists_id(self, id, **kwargs): # noqa: E501
"""修改代码片段 # noqa: E501
修改代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_v5_gists_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token:
:param object files:
:param str description:
:param bool public:
:return: CodeForksHistory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_v5_gists_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.patch_v5_gists_id_with_http_info(id, **kwargs) # noqa: E501
return data
def patch_v5_gists_id_with_http_info(self, id, **kwargs): # noqa: E501
"""修改代码片段 # noqa: E501
修改代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_v5_gists_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param str access_token:
:param object files:
:param str description:
:param bool public:
:return: CodeForksHistory
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'access_token', 'files', 'description', 'public'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_v5_gists_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_v5_gists_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'access_token' in params:
form_params.append(('access_token', params['access_token'])) # noqa: E501
if 'files' in params:
form_params.append(('files', params['files'])) # noqa: E501
if 'description' in params:
form_params.append(('description', params['description'])) # noqa: E501
if 'public' in params:
form_params.append(('public', params['public'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CodeForksHistory', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_v5_gists(self, body, **kwargs): # noqa: E501
"""创建代码片段 # noqa: E501
创建代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_v5_gists(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Body51 body: (required)
:return: list[CodeForksHistory]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_v5_gists_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_v5_gists_with_http_info(body, **kwargs) # noqa: E501
return data
def post_v5_gists_with_http_info(self, body, **kwargs): # noqa: E501
"""创建代码片段 # noqa: E501
创建代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_v5_gists_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Body51 body: (required)
:return: list[CodeForksHistory]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_v5_gists" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_v5_gists`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CodeForksHistory]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_v5_gists_gist_id_comments(self, body, gist_id, **kwargs): # noqa: E501
"""增加代码片段的评论 # noqa: E501
增加代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_v5_gists_gist_id_comments(body, gist_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Body52 body: (required)
:param str gist_id: 代码片段的ID (required)
:return: CodeComment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_v5_gists_gist_id_comments_with_http_info(body, gist_id, **kwargs) # noqa: E501
else:
(data) = self.post_v5_gists_gist_id_comments_with_http_info(body, gist_id, **kwargs) # noqa: E501
return data
def post_v5_gists_gist_id_comments_with_http_info(self, body, gist_id, **kwargs): # noqa: E501
"""增加代码片段的评论 # noqa: E501
增加代码片段的评论 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_v5_gists_gist_id_comments_with_http_info(body, gist_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Body52 body: (required)
:param str gist_id: 代码片段的ID (required)
:return: CodeComment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'gist_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_v5_gists_gist_id_comments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError(
"Missing the required parameter `body` when calling `post_v5_gists_gist_id_comments`") # noqa: E501
# verify the required parameter 'gist_id' is set
if ('gist_id' not in params or
params['gist_id'] is None):
raise ValueError(
"Missing the required parameter `gist_id` when calling `post_v5_gists_gist_id_comments`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gist_id' in params:
path_params['gist_id'] = params['gist_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{gist_id}/comments', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CodeComment', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_v5_gists_id_forks(self, id, **kwargs): # noqa: E501
"""Fork代码片段 # noqa: E501
Fork代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_v5_gists_id_forks(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param Body56 body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_v5_gists_id_forks_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.post_v5_gists_id_forks_with_http_info(id, **kwargs) # noqa: E501
return data
def post_v5_gists_id_forks_with_http_info(self, id, **kwargs): # noqa: E501
"""Fork代码片段 # noqa: E501
Fork代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_v5_gists_id_forks_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param Body56 body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_v5_gists_id_forks" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_v5_gists_id_forks`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}/forks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def put_v5_gists_id_star(self, id, **kwargs): # noqa: E501
"""Star代码片段 # noqa: E501
Star代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_v5_gists_id_star(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param Body55 body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_v5_gists_id_star_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.put_v5_gists_id_star_with_http_info(id, **kwargs) # noqa: E501
return data
def put_v5_gists_id_star_with_http_info(self, id, **kwargs): # noqa: E501
"""Star代码片段 # noqa: E501
Star代码片段 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_v5_gists_id_star_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: 代码片段的ID (required)
:param Body55 body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_v5_gists_id_star" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `put_v5_gists_id_star`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/gists/{id}/star', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.313824
| 128
| 0.592667
|
841e1b4ea059f5fa1805d43217f2334f808d0f31
| 322
|
py
|
Python
|
sound.py
|
AileenBanshee/MicroPython
|
0f51759851d601780b6eac9d2201bf3e02022638
|
[
"CC0-1.0"
] | null | null | null |
sound.py
|
AileenBanshee/MicroPython
|
0f51759851d601780b6eac9d2201bf3e02022638
|
[
"CC0-1.0"
] | null | null | null |
sound.py
|
AileenBanshee/MicroPython
|
0f51759851d601780b6eac9d2201bf3e02022638
|
[
"CC0-1.0"
] | null | null | null |
from machine import Pin, ADC
from time import sleep_ms
import time
# sound_sensor functionality (sensor big sound):
adc = ADC (Pin (26))
sound = Pin(27, Pin.IN, Pin.PULL_DOWN)
while(True):
q = 0
for i in range(10000):
q = q + sound.value()
print(q)
#print(snd_secret)
#time.sleep(1)
| 18.941176
| 48
| 0.627329
|
ce718f2abd81a257988bd5841be9afd57588e9b0
| 1,246
|
py
|
Python
|
src/tests/test_e2e_install.py
|
paul-maidment/assisted-test-infra
|
2b2c86e37a1a7c6b3c25b3899e6d87d6313333c4
|
[
"Apache-2.0"
] | null | null | null |
src/tests/test_e2e_install.py
|
paul-maidment/assisted-test-infra
|
2b2c86e37a1a7c6b3c25b3899e6d87d6313333c4
|
[
"Apache-2.0"
] | null | null | null |
src/tests/test_e2e_install.py
|
paul-maidment/assisted-test-infra
|
2b2c86e37a1a7c6b3c25b3899e6d87d6313333c4
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from junit_report import JunitTestSuite
import consts
from tests.base_test import BaseTest
from tests.conftest import get_available_openshift_versions, get_supported_operators
class TestInstall(BaseTest):
@JunitTestSuite()
@pytest.mark.parametrize("openshift_version", get_available_openshift_versions())
def test_install(self, cluster, openshift_version):
cluster.prepare_for_installation()
cluster.start_install_and_wait_for_installed()
@JunitTestSuite()
@pytest.mark.parametrize("openshift_version", get_available_openshift_versions())
def test_infra_env_install(self, infra_env, openshift_version):
infra_env.prepare_for_installation()
@JunitTestSuite()
@pytest.mark.parametrize("network_type", [consts.NetworkType.OpenShiftSDN, consts.NetworkType.OVNKubernetes])
def test_networking(self, cluster, network_type):
cluster.prepare_for_installation()
cluster.start_install_and_wait_for_installed()
@JunitTestSuite()
@pytest.mark.parametrize("olm_operators", get_supported_operators())
def test_olm_operator(self, cluster, olm_operators):
cluster.prepare_for_installation()
cluster.start_install_and_wait_for_installed()
| 38.9375
| 113
| 0.784109
|
72a3b7ac51fcd592cea11eb8491ba340b108c076
| 2,257
|
py
|
Python
|
manim/utils/caching.py
|
naveen521kk/manimce-deprecated
|
52a0cb0e49f79cb48f78b51c724f049d522fc465
|
[
"MIT"
] | 1
|
2021-05-06T13:05:01.000Z
|
2021-05-06T13:05:01.000Z
|
manim/utils/caching.py
|
naveen521kk/manimce-deprecated
|
52a0cb0e49f79cb48f78b51c724f049d522fc465
|
[
"MIT"
] | 1
|
2020-11-01T03:27:09.000Z
|
2020-11-01T03:27:09.000Z
|
manim/utils/caching.py
|
naveen521kk/manimce-deprecated
|
52a0cb0e49f79cb48f78b51c724f049d522fc465
|
[
"MIT"
] | null | null | null |
from .. import config, logger
from ..utils.hashing import get_hash_from_play_call
def handle_caching_play(func):
"""Decorator that returns a wrapped version of func that will compute
the hash of the play invocation.
The returned function will act according to the computed hash: either skip
the animation because it's already cached, or let the invoked function
play normally.
Parameters
----------
func : Callable[[...], None]
The play like function that has to be written to the video file stream.
Take the same parameters as `scene.play`.
"""
def wrapper(self, scene, *args, **kwargs):
self.skip_animations = self.original_skipping_status
self.update_skipping_status()
animations = scene.compile_play_args_to_animation_list(*args, **kwargs)
scene.add_mobjects_from_animations(animations)
if self.skip_animations:
logger.debug(f"Skipping animation {self.num_plays}")
func(self, scene, *args, **kwargs)
# If the animation is skipped, we mark its hash as None.
# When sceneFileWriter will start combining partial movie files, it won't take into account None hashes.
self.animations_hashes.append(None)
self.file_writer.add_partial_movie_file(None)
return
if not config["disable_caching"]:
mobjects_on_scene = scene.mobjects
hash_play = get_hash_from_play_call(
self, self.camera, animations, mobjects_on_scene
)
if self.file_writer.is_already_cached(hash_play):
logger.info(
f"Animation {self.num_plays} : Using cached data (hash : %(hash_play)s)",
{"hash_play": hash_play},
)
self.skip_animations = True
else:
hash_play = "uncached_{:05}".format(self.num_plays)
self.animations_hashes.append(hash_play)
self.file_writer.add_partial_movie_file(hash_play)
logger.debug(
"List of the first few animation hashes of the scene: %(h)s",
{"h": str(self.animations_hashes[:5])},
)
func(self, scene, *args, **kwargs)
return wrapper
| 41.036364
| 116
| 0.6358
|
36459d4d2df050326f22f0deafbc3688585e698e
| 924
|
py
|
Python
|
setup.py
|
trialspark/aptible-api
|
c0b843b8de93b50eb3cafe051e13421bd8c3f039
|
[
"MIT"
] | 1
|
2021-07-13T14:43:43.000Z
|
2021-07-13T14:43:43.000Z
|
setup.py
|
trialspark/aptible-api
|
c0b843b8de93b50eb3cafe051e13421bd8c3f039
|
[
"MIT"
] | 1
|
2021-09-23T19:20:20.000Z
|
2021-09-23T19:20:20.000Z
|
setup.py
|
trialspark/aptible-api
|
c0b843b8de93b50eb3cafe051e13421bd8c3f039
|
[
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="aptible-api",
version="0.4.0",
author="Zachary Elliott",
author_email="zellio@trialspark.com",
description="Object Oriented interface for Aptible API",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/TrialSpark/aptible-api",
project_urls={
"Bug Tracker": "https://github.com/TrialSpark/aptible-api",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
python_requires=">=3.6",
install_requires=[
'PyYAML>=5.4',
'inflection>=0.5',
'requests>=2.25',
]
)
| 28.875
| 67
| 0.632035
|
e28aede06f39d2feaa354ecc49e4ce1f3b2df4a0
| 685
|
py
|
Python
|
space_script/pdm_space_netbeans.py
|
irving-muller/TraceSim_EMSE
|
f0d1f7c99fe4f427e40ac29f7736a30b8b513009
|
[
"MIT"
] | 3
|
2022-02-18T12:58:07.000Z
|
2022-03-02T09:48:39.000Z
|
space_script/pdm_space_netbeans.py
|
irving-muller/TraceSim_EMSE
|
f0d1f7c99fe4f427e40ac29f7736a30b8b513009
|
[
"MIT"
] | null | null | null |
space_script/pdm_space_netbeans.py
|
irving-muller/TraceSim_EMSE
|
f0d1f7c99fe4f427e40ac29f7736a30b8b513009
|
[
"MIT"
] | null | null | null |
from hyperopt import hp
fixed_values= {
"filter_func": "threshold_trim"
}
space = {
"c": hp.uniform("c", 0.0, 30.0),
"o": hp.uniform("o", 0.0, 30.0),
# "keep_ukn": hp.choice("keep_ukn", (False, True)),
# "static_df_ukn": hp.choice("static_df_ukn", (False, True)),
"aggregate": hp.choice("aggregate", ('max', 'avg_query', 'avg_cand', 'avg_short', 'avg_long', 'avg_query_cand')),
"rm_dup_stacks": hp.choice("rm_dup_stacks", (False, True)),
"freq_by_stacks": hp.choice("freq_by_stacks", (False, True)),
"filter_func_k": hp.uniform("filter_func_k", 0.0, 130.0),
"filter_recursion": hp.choice("filter_recursion", (None, 'modani', 'brodie')),
}
| 32.619048
| 117
| 0.630657
|
f7f120247722d959a07182e68bad0b7a2ba61c0d
| 2,169
|
py
|
Python
|
geotrek/feedback/views.py
|
fossabot/Geotrek-admin
|
ea2c873511ad724c742c64d81cbf31f37dbe3093
|
[
"BSD-2-Clause"
] | null | null | null |
geotrek/feedback/views.py
|
fossabot/Geotrek-admin
|
ea2c873511ad724c742c64d81cbf31f37dbe3093
|
[
"BSD-2-Clause"
] | null | null | null |
geotrek/feedback/views.py
|
fossabot/Geotrek-admin
|
ea2c873511ad724c742c64d81cbf31f37dbe3093
|
[
"BSD-2-Clause"
] | null | null | null |
from django.conf import settings
from django.views.generic.list import ListView
from django.core.mail import send_mail
from rest_framework.decorators import list_route
from rest_framework.permissions import AllowAny
from mapentity import views as mapentity_views
from geotrek.feedback.filters import ReportFilterSet
from geotrek.feedback import models as feedback_models
from geotrek.feedback import serializers as feedback_serializers
class ReportLayer(mapentity_views.MapEntityLayer):
model = feedback_models.Report
filterform = ReportFilterSet
properties = ['name']
class ReportList(mapentity_views.MapEntityList):
model = feedback_models.Report
filterform = ReportFilterSet
columns = ['id', 'name', 'email', 'category', 'status', 'date_insert']
class ReportJsonList(mapentity_views.MapEntityJsonList, ReportList):
pass
class ReportFormatList(mapentity_views.MapEntityFormat, ReportList):
columns = [
'id', 'name', 'email', 'comment', 'category', 'status',
'date_insert', 'date_update',
]
class CategoryList(mapentity_views.JSONResponseMixin, ListView):
model = feedback_models.ReportCategory
def dispatch(self, *args, **kwargs):
return super(CategoryList, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
return [{'id': c.id,
'label': c.category} for c in self.object_list]
class ReportViewSet(mapentity_views.MapEntityViewSet):
"""Disable permissions requirement"""
model = feedback_models.Report
queryset = feedback_models.Report.objects.all()
serializer_class = feedback_serializers.ReportSerializer
authentication_classes = []
permission_classes = [AllowAny]
@list_route(methods=['post'])
def report(self, request, lang=None):
response = super(ReportViewSet, self).create(request)
if settings.MAILALERTSUBJECT and response.status_code == 201:
send_mail(
settings.MAILALERTSUBJECT,
settings.MAILALERTMESSAGE,
settings.DEFAULT_FROM_EMAIL,
[request.data.get('email')]
)
return response
| 32.863636
| 74
| 0.716459
|
ee5cf98a9afa3d611128c6b9d084f0f8ea329b38
| 1,216
|
py
|
Python
|
lib/spack/spack/schema/bootstrap.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
lib/spack/spack/schema/bootstrap.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 8
|
2021-11-09T20:28:40.000Z
|
2022-03-15T03:26:33.000Z
|
lib/spack/spack/schema/bootstrap.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2019-02-08T20:37:20.000Z
|
2019-03-31T15:19:26.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for bootstrap.yaml configuration file."""
#: Schema of a single source
_source_schema = {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'metadata': {'type': 'string'}
},
'additionalProperties': False,
'required': ['name', 'metadata']
}
properties = {
'bootstrap': {
'type': 'object',
'properties': {
'enable': {'type': 'boolean'},
'root': {
'type': 'string'
},
'sources': {
'type': 'array',
'items': _source_schema
},
'trusted': {
'type': 'object',
'patternProperties': {r'\w[\w-]*': {'type': 'boolean'}}
}
}
}
}
#: Full schema with metadata
schema = {
'$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack bootstrap configuration file schema',
'type': 'object',
'additionalProperties': False,
'properties': properties,
}
| 26.434783
| 73
| 0.525493
|
0ade2dfd347d83daf0039aab394d3fac6f3ec8b6
| 161,382
|
py
|
Python
|
bin/temp/var/usd/cd/tmp/usd/data/data/data/data/data/temp/twitter.py
|
RazorKenway/All-Downloader
|
e1c6d9ee277166faff8876e967b521fd752f0e7f
|
[
"MIT"
] | 44
|
2021-06-28T15:57:18.000Z
|
2022-03-22T07:36:13.000Z
|
bin/temp/var/usd/cd/tmp/usd/data/data/data/data/data/temp/twitter.py
|
RazorKenway/All-Downloader
|
e1c6d9ee277166faff8876e967b521fd752f0e7f
|
[
"MIT"
] | 1
|
2021-11-26T13:28:10.000Z
|
2022-01-10T21:23:41.000Z
|
bin/temp/var/usd/cd/tmp/usd/data/data/data/data/data/temp/twitter.py
|
RazorKenway/All-Downloader
|
e1c6d9ee277166faff8876e967b521fd752f0e7f
|
[
"MIT"
] | 5
|
2021-08-23T17:34:56.000Z
|
2022-02-25T19:23:59.000Z
|
#ENCODE BY CRYPTO
#YOU CAN TRY THIS DECODE GOD BLESS
import gzip,marshal,zlib,base64,binascii,lzma
try:
exec(gzip.decompress(marshal.loads(b's\xd2\xdb\x00\x00\x1f\x8b\x08\x002\x87\x98a\x02\xffl]Y[\x14K\xb0|\xbf\xbf\xc2\x05\x95M\xe8\xbd\xabQdS\xdc\x15\x14Da\x04z\x05Q\xf6MA\xf8\xedw"2\xca9\xdfw\xef\x83\xe7\xb0\x0c3\xdd]U\xb9DFF\xfe\xd8?:<9\xbbS\x95\xa7m\x96\x8c\xef\x97\'\xa7\xbb\xe5\xaf\xf1\xea\xc7AyZ\xff\xf81\xbes\xf5\xe3h\xfc\xea\xd7\x8f\xea\x7f\xceN\xfeL\xb5\xbf\xdbzX\xaf\x99\xf8uX6\xa7\xc3x\xc1D\xd3\xd6\x87\xfbG\'\xed\xe9\xe9p\xf5\xa8\xf7;\xecz\xbf]\xd5\xfb\x1d\xb8\xfe\xbf \xea\x7f\x93\xf7~\x17\xae\xec\x7f\xd7\xff\xa6\xebz\xbd\xfe\x8f\xda\xcf\xfd\x9f\x05\x0f\xfb_\x05\xfd/\xfa?/k\xfe\xeew\x95\xed\xf7~7\xfd?/\xea\xff|\xf3\x7f\xbf\xf8\xcf\xaf\xa7\xf7\xfb_\xd7\xfd\xbf-\xda\x9b\xfe\x0f\x8b\xe3\xb1\xd1\xfe{\xc5\xfd\xf7\xec\xbf\xce\xf5\xaf\xa2.q\x11I\xff\x02\xb2\xfe/\xfa/\xce\xfb/\x0e\xfa/\xe8\x1a^\xee\x19~\xfa\xb2\xff\xbav\x13\x17\x11\xf6\x7f\x18\xf3\xca\xcep\x91\xbd\x93\xd9\xde\xef\xb6\xb2\xcb\xc3M\x95x\xa7\xb4\xff}\xb2\x85{\xfa\xd9\xffOh\xbf-\xf2;\xebw\xf0\xe5\x83\xfe\xab\xfa\xaf\x0c3\xfd\x1fO\xa0\x1e\xc1c\x18Y\xb2\x97Wn\xe4\x95}\xe5\x92\xde\xd9\xd1\xdd\xa5\xbbK\xfd\'\x11\xf2\xdf,\xaelf\xab\xc5\xf3\x1b\xee_\x16~\xd8\xff\xcc\xa6\xff\x98\x8a\xfe\x85\x05\xb9\xae#\xb0\xeb\xe0\xbf\xfe\xef\xbb`\xbe\xff\x13\x97fx_\xbcqj\xb7\x8e\xbb.\x9c=\xea\xb6\xff\xb5\xc32\xd4[\x8bxH3#\x8f\x87\xf7\xed"q\x03u\xff\x81t\xfd\xcfm\xfa_\xb7x\xdb\xfe\xd7mb\xff\xe7\xc7\xf5\xff\xb4\xc2\xa3pxMv\x82g\xd6\xbf\xb8\xfe\xbf\x1a\xaf\xc6+\x82\xb9d\x08\x9f5\xf2\xb4\xff\xc3\xd8n\xa9sk\xbdG\xdfq\tx\xb6\xb8\xfb\xfee\xd4Y\xb2aK\x81u\xe2\'gv\x15Ms\xd0\xff\xd8\xce\xb6R\xdd]\xc5v\xddu\xb9\x7f\xd8\x7fq\xff\x83\x9a\xfe%\xb5\xedq\x16\xda\xc3)\xb1op\xa5\xf8\xfc\xce\xae\x05\xfb\x01o\x81\xd5\xec\xc2\xebg\xb6\x18xlx+|jU\xae\xe39\x04v\xddU\xa8\xfd\xd9\xbf\x0e\x87WV\x8d\xed\x8b^\xffZB\\ \xfe\xb4\xb5\x0b\x0c\xa3\x17\xd8\x19/\xfbkY\'\xb3\xab\xfd\xf5\n\x0b\xdbY\x1d.\x07\xd7\x8b\xaf+,\\i\xcb\x1f\xe8\xda\xfc\xff\xfb\x8f\xe5\xc0\x9f\x91u\xbb lk\xee\xad\x80\x9b\x82\xff\xef\xa2\x1b{\xe3\xb6\xc1\x0f\xf0I\x19\xceVp\xc7n\xd1\x85O\xcb3[\xda\xb0^\xc6\xc6\r\xde\xe0Z\x0b\xbb\x94\x02G\xa6\xb1\xaf\xebl\xa9w\xb0\x8a\xcf\xbc\xb0G\x83E\xc6[\xd7\x15\x1e9\xee\xb6\xd1\xce\xc6U`\xdd\xb0\xe4]\xff\xe2\xabtg\xdf\x8e)\xde\x10{\r\xa7\x07\xff\xaf\xdc\x98\xdd#.\xb6\x8ep\x15\xeb\xbazn\x93\x1f\xfd7-zg\xfd\xcb\xa9\xeaa;\xe5\xae\xed\x1f\x9b&\xd6\xf1\xed\xf0\xe1\xa1}L\x8d-\xd7\x95\xcf~\xe2\xdb\xf3\xfeE\xf6\x7fV\xf6\xdf\xbb\xec\xdfA\x19\xda\xe7\x14\xfd\xff\x97\xfd\xcfiK{\xe8E\xb5k/\xe8\n\xde\x05\x16\xf0DW\xde=\xef\xff\x08v\xa2\xda\xc3\x93\xb4\r\x86\xcdVh\xb3\xe2\x8e\xb8\x95\xb1\xe0\t.\xb8x\x8c+\xa8l\x1f\xf4O\xc2\xc9U}\xc35?\xc1\xcf\xb7\xf5\xf3\x1a\xdfV\xb8\x99\xaa\xb9\xb4\'\\D\xb7\xfd\xff$v|\xc2\xeco\xff\x9bL\xdf$\xf8&\xfdk\xfb\x80?\x89\xec\xffN\x1f\xdf\xe9\x90\x96\x91l`\xba\xac\xa3V\xdb\x93.\xd2\xd9s[D\xfc\x14W\xde`\x81\xf3\xd9Y\xbbg\x1c\x80\xb2\xff\x06M\xf0\xc0\x96\x15\xdf\x14\xc17\xdb\x18Mt\xf9\x94\x86\xecw\xc1\xc31l\xa7\x05\xe7\x18\x0f\x00o\xd9$\x8b\xb4\t\xf6\xbc\x9an\xdf\xbe\xa8C3\xb4\xf8\x7f\x90\xcf\xd9\x07\xe1\x03\xb1\xf5\x1a\\%\x8e?.\xdb%\xb3v\x16\xeb\xea\x95\x8eY\xd0\xb7\xf3W\xcfpQ\xfd_w\xcf\xf0\xe4\x8ftH\xea\xf8)\xb6\xfb\x17\xdb\xf3\xf0\x0fu\xb3h\xdf`c\xbax\xd2\xceZ\x95\xd8\xda\x86:\x94\xb8\xa6\x02{)\xb7K\xc7\xcf\xca\xf8\x12Ot\xeeo`\xb7\x8e\x93\x84{\xe5"\xf6\xdf\xa3h\x0em\xb7`q\n\\n\xf7\xf6\xee\xd1\xdd#X\x92#\xfc\xb4\xbf\xa8m\x88\xcf[\x82\xa5\xb8\xb0m\x0c\xa3W\xe2\xb4\xa7\x17\xb6\x9d\xdb\xe6\xbbm\xd2.?\xc4fy\xd5{\xf4*\x871\xfbp\x17F@\xe7\x04\xa7#tf,\x1c\x7f=*\xb3\x84#\xectn\x9c-;}J\xfb\xd0\xf6TY\xbc\xd2\xc9\xc7\xab\xdd6\xfe$\x88\xa7\x7f\xcbz\xa4O~_\xc2p\xc0\nd[\xf6>x\xeeA\xfa\xc5\x9e)\x96\x89\x1b\xa3\xb4\xa3P\x14\xfd7\xaes\xed\x8d\xd6\x0e\x90\xd3\x01j\xea}Y\x95\xd0\x8c\x1bm2\x1c"\xee$\x90\xaf\xa9\x9c\xb9Q\x9cy\xac@\xdb\xae\xbe\x98\xf0;\x04;5\x1b\xd2\xe7\xb9E{\xdf\x90\xe7|\xff\xbc\xff*\x87%\x8b\xa6\x9c\x9d\xf9\xb2\xc0E\x1c\xe3\x9e\x1f\xda\x03\xc1;p\xf9\xb2cl\xf8_\xa7W\x0f\xf1\x7f,\xd5\xf6\xebY3E<\xa6\\\xf7\xfc&\xb4cRe\xc1\xa6=Z\x1c\xba\xba~c\x87\xb7k\xaf\xcd/\xc2\xdc\xe3(\xc1%a\xd9\xaa\xbew\xffk~\xb6n\x7f\xee\xd8]\xd3!\xe2\xba\xc3_\xb6\xc7\xba\x04\xe7\x02\x07=\xe1&X\xb6s\xd8\x96\xe6\xa9\n\xf9\x88\xc0\xfd\xb1\xb3\xd0\xc9\xa7\xe3D\xba\xe8\x18\x8e\xeaB\xebZ\xda\x13\xc7.\xac\xab\xd9)\\\xc1\xcb\x17\xb26\x9dm\x0c\x1c\xffZN\xb5\r\xef\xe0y\xdc\xef\xdf\x06~\x13\x0cO\xe8\xdd\xf1-\xdcgG\xcb>4\x8e\xab8\xf9\x08\xc72\x8b\xe3\xf4\xd9|v\xdd^\xca\xc4!\x84J\xbe\xd8\xd3\xae\xb8\xe4\x872\x9c|\xda\xed\xd2\xa9\x9dD<$\x17\xe2E\xe5\x07\xacVq(GC\xe3r\x8a\xabr\xf6\x94\x1a\xed(\xfc}\x1b\xd9v\x85]\t\xf2\xdc~\xe8\xf2+|\xdeM\xdf\xc6\x97\xb9\xb9\x91.j\xed\x0b8Y\x9c\x882\xbf\xbeA\xd4\x12\xc3\x9cW3\xb8\x9ce\xfc\xf5\xbb5X\xd0nyd_\xa7&\x9e6\xcbRdO\x9f\xcchyt\xd2i\x16\xeb\xab{7\xb8\xc2i\xbc\xf7\xc4\x93gwd\xe8\xdc\xe6-\xcd\xcbZ\xff\xb7ma\'\x1c[\x03\xc7\xbf\xa2\x13\x9e\xc6\x13\xfdp\xc3s\xfc\xd5\x9en\xd1\xc9\x8f\xe3\xc6\xab-=\xf2\xf0\x18;\xcd\x82\x94"\x99\xdb\xc5\x8f\xcfls\xb8\xbc\x7fMN\xfe\x1d\xae\x18>\x9e\xbf\x08\x7f\x8f`\xb5\'\x14_\xe4\xd7\xb08\xf6\x1e\x01\x0c?\xe2\x8a\xa6\x1d\xb1\x8f\xa8\x8b\xdf\x8atS\xb3\x96\x9d{\x87=\xf3\xdc~\x8d\xed_\xff\xc72\xb7\xed\x1b;\xc38LMz\xd5\x7f\x86\x08)\xb8%\x19\xb9\x9e\x7f\xbd\xb6\xbd]\xb4\xdf\xec\x90\xf9\xed\xda\xc6_\xec\xbd\xb0\x18\xdcG\xcd\x84}\x81\xa8\x10O.\xcc\xa6_|\xf8\xfe\xdb\xfe\x1e\xe1J\x11\xcf\xe3\n\x8fv\xcc\xf1\x8d\xbe>3\x8b\x8c\rO\xbf\x8d\x8b\x88G`\x17N\xcd8\xe0yW\xc9\xe7\xf4]\xfa\xfe\x0b\x82_\xbc3>\xb2=\xd5\x19\xcb\xa2\x05[\t\xd8\x97V\xbb9\xc0\xebpF\xb0Gh\x9d\xea\xb3i$\x05u}_.\xcf\xc9\x86%\xb6\x96!\xef\xf0\x19\xb6\xd74\x8c\xd8\x85\x99;\x18ClLZ\xfb\xc8B\x05z\xf8@\x91\x0c\x9eC>j\xcb\x0c_\x8b\xdb\xe6\xa1\x835\xca\xff\xec\xec \xd0(^\xda\x87\xd1\x0b\xe1\x9d\xca\x91\xdc\xbc\x8f\xb3$\xc0\xac~\x8a]f\xd7\x8b\x15A\xd8R0f\xbb\x863\xbeT\xcaPj\xbd\xd2|\xb0\x14\x88n\x10\xee4\xf1Z\x86\x98\x8f\x99Pnv\xab\xd2\xeb\xf0\xb3\xae~2\x82k\xac\xce\xff\xb4\xb6\x98u\xb1aqTX|\xb4\xedWG\n\x95\xda\xe2\xe5\xca\xe0j\x10\x18\xd9O\xe1\xda\xe3\xd2\x16\xb5\x84\x95\xc0M\xb4\xc1\xe6\xf9\x90r\x06g\x87\x15\xfb\x13\x0b\x0bG\xce\xfb\x0bg\xdc\x1b=\xbe\xee\xa0\xf7h\x17\xcef\xce\xee\x91\xc9\\k^\x04yD\x7f\x8d\x1f\xc9\xc2\x16<\r\x07v\xa3U<\xfb\x00\xdb\xe8\t\\\xeb\x07\xfbmYn\xe2\xb1\xc2\'\x17/\xf0\x08_\xd8\xb3\xc3{r{\x16\xb6\xe4\xf8\xba\xb5\xfd\xd3\xdf\taj\xa6\xc6\xe1\xb94\xcd]\xf8\xa0\x8d^/0+\x99w\x7f3\xdb\x9a\x0c\xae#\x04&0\xd5\xf0\xd9\xb8\xa76\xdc\xc8\xe6\x0e\xec\xf3\x9bv\xdc>\x00O\xb9r\xe9{x\x99\xea\xc7\xf8\xbe\x99\xcc\xaa\xb4\x80"d\xa4y\xc7\\\x12\xf6\x15R\x08\xa6\x90\xc13s\xdcau\xa6OaP\xbf"\xa7\x9f\xbe\xb8D\xe6\x88\xd3\x8a\xc3\x85W2H\t\x9f\xe0\t}\xb8\xbb\x03;\xd0\xae\xe17\x9bf\x9cx\xec\xf3\xd5K\xec\xcd\xcd\xcf\xb65\x82lc\x1d\x07\xc8V\xb4\xc3\xa6f\x8e\x1b\x7f\x86\x01\xb90w\xdcaS\xc1\xdd\x16Yw\x8a\'>\xca\x1c\xcc\x0cO[\xed\\4\xfa\xec\xfc\'\xfe`\n\x87\x00\xb6\x11\x9fY\x9b\x9f\x0b\x195\xdc\xca\x06\x05\xe3fr\x19\x9f$\xe7\xef\xed4\x85\xdd\xcc\xeb{#\x96qV\xb87F\xab\x95\xfd\xab\xb2\xa7\xdf\xfc\x82\xdd\xda\xe3\xeb\xd2l\x1d_M\xe2\xed\x97a\xa1\xce\x95\x10\xd7\xf0\xe3\x81\x16\x19\x1b\x0ev\xadQ\xae\x88\xb3M\x03\x80Wfcv\xc2\xcaz\xc7\x8eo\x1dO\xd4k\xd8e\x8c\x02u@\x8a\xf2\xbe\x92`8\xd9\xe2-^z\xa6\x14&\x19\xb1\xbd\x8a\xa0\x1b\x07\xa5\x8c\xc6\xf1\xde\xcf\x86\x0f\xe5V\xdc\xaa\x05\xcd\xfc\x97}\xc4{c/\xe1\xf58\xa0\x8c\x8e"\xad\x01\xccM^/.\xdab\xc1\x94\xf47\xda\x81\xbd\xd0\x0c\xc3\xbeb\'\x06S\x8d\xfd\x14\x91k\x90\xde\x9b\xb4\xa3QU\xbfq\x16_(\x15\xa9\xf5\xd7\xc9\tl\xf7\x9c=\x97\xa2\xfenO\xbf\xb6\x14\xa9N\x16,\xda\xc2\r\xbah\xe5\xef\xbf\xb3\xaa\xab\x851l\x8b9\xac\xf3\xd4w{\x8e\xc07\xda\xf8\x8dE\x98e\xdal"\x1c\x8d\xee\xdd\xbbU\xf4\x9bX\x10\xd4\xb74\xdfm\xcb\xb7X\t\xb8\xb9"9\xa5S?[\x19\x85mpv\xe4\xea\\O\xa0\xc5qw\xb4\x7f0^\xc5\xcf\xe5\x1f\xbd\x83\xe9\r\xdb\x18A\xf0\ta\x89\xed,\xc4\xa25\x9ec2o\x07\x08\xa7)\xcc\xdf\x9b\xb5(q\xcb\xf8I\xdb\xb9\x17\xca\xc9;K\xc7p\xcc\x19\xa2\xa62\xa6\xd9\xca\xa5\xde\xce\xfd\xb4{\x0f\x84\xd40\x13\x90\xe3,\x89\x0cL}\xb0\x9dO\xe3\xde=xoO\x88\x8b\x91\x1e\x9a1\xa9x\xf0b\x8b\xe8;Y\xb0:[\xb5\xfc\xa7\x89fp\t[\x0f\n;\xf2Mu`K\x87\x05o\xe0\xf9-\x95h>\xe2\xf6\xdf(~+u\xca\x9a\xe4\xe1\x17{\xb2\xf4i\x15R\xcd\xa2\x83\t\xc5\xb9,jl\xbf\xfc\xf6\x8bb\xa36\x9b\xfe\xc8\xbd{\xf2S6&3\xa7\xdb\x147\x88K\xf0^\x01]\x1ec\xc3\x9b\x9bm\xcb\xcbh\x9e\xc3\xed\xef0\x1d\xe3\xdf\xec\xba\x19d\xa7\xf6\xafpD\xb3na\x81\xcb\x1fx\xeds\xc5/\xa99\x19l\x11\x1e\xb34\xba\xcb\x07}\x80?\x9cC\xc8\x1a\xce\x9aA\xe2\xa9\xe5\xe3\xe2R\xec\x8d\xec\x19\x08@\xa3\x11\x0b\xbc*\x17\x9e>y7\xd2\x11*\xda\xb00\x1b\x11\x00\xfe\x15\xd5\xed\x96Y\x07\xacc\xd8\x84\x9b\xfb\xa7v"\nd}\xb8\xc2P \r\xb1\x1b\x1c\xc0\xf4\x0c\x16\xf5\xcf\x07\xb3\xbeEyo\xe3\xadV*1O\x8c\x18\xcb[,ZL\x982\xd8\x91"]\xbd\xb1\x83\xd07\xef\'\xc5k\xd8\xa7w\x07\xb0\x05\xf1[%O\xd8.\x85\xad\x8dO\xf2\xdb\xfa\nk=\x87\x9b\xdc~j\xae\xb0\xe8\xce\xb08W\x06\xaa\xb9t\xb1\x93\xc9w:1\x88)\n\x9a\xaew\x0f\xec\xd1s\xe7e\x99\x99\xe1 >\xbb\xb1\xa7\x84M\xd7\xff\x8b\xde\x97\rl\x10\x87H\x14k\x8b\x85\x87\x85\xc4a\x83\x17gx\xea\xec\xdc\xe0\xc15\xb9\xad\r\xd6\xca\x85\xb3\xdft\x83\xf2\xf0\xc0`\xca\xe4\xc6v\x1b!T\x18\x06\xbe_d\x17\x88x\xa4\n\x91\xb5\x84\'\xd7/\x9e\xda}t\xe5\x1fY1|z{\xf3\xfc\xcb{\xf3\x8a\x0cD+\xe5-.\x7f%\xfc&XR0S\x00\xe1H\x16\xfe\xfe\x98\xc1y\xf9\xb0-\x88Bq\x02\xf2\x1al\x16X\xa6"]\xb0`\xcb\xac\xbc]2\x93\xd9\xc2B\x81\xd7fW\xf1\xe0\xc3\xc6C\x83\x87\xdcaYd\xb1s\x9d>?\x97\xa1\xc4RD\x1f\x9e\xe8\xd9\xc0\x906#\x16m\xb6\xf5=\x9c\'\xac\x0b\xe2\x85\x8e\x18\'\xbc\x1b\x8cgP=\xb5\xe8&l\xf3[\xf8\xae\xf4`\x80-2n\xd0\xda\x13\xb8\x83\xebLil\x0f\xae\x15\x9dW\xa3>\xb5\xbd\x9a\xb0\x80\xbaUL\xdb_\xa0\xde\xf3t\x05[t\xcf\xc2\x88"|\xbab\xf7\\\x08\x8fj\xb2k3`\xf88\xfc!WQ\x11x\x18\xfcVN\x81\x8d^\xa6\xeb\x82sr\x0b\\\xfb;\x0c\xd7\x1a]\xd9\x85V\xd1[`\xa5\xcd\xdb\xa7\xef\xb0DS\x8aj\xebOo\x84<fX\xad\x86H`f\x9e\x93\xceY\xe6\xb1\x10P\xda\x16\xbff\xd7\x94\xd6wH\xc8\x0bd>\xc4\xd8\x93\x1d{\x0c\xfd\x1b8\xf9\xfc\xcb\xfe\x1c\xd7\x8b\x1b\xa9\xf0@\xdb`\xc5\x0cY\x13\xd5\x07g\x88\xb4c\x18\x91\xce\xe3P\xb1\xbd\x9e1_\xf9\x10O\xeb\xfe\x10a6\x1c\x8f\x0c^?\x85\x93\xa8\x0e\x05`1\x03\xc1\x06N\xaff~\x98Eu\xf5\xd1\x93\x85\xefB\x14\x1a=\xe8lT[*>R\xa4\x8b\x18\x0e~\xa4\xf5p\x853CY\x868\xc1\xcelH\x15\xc4\x0b\xd3\xf0\xac\xb7\xf6>D\x13;\xfbk"^\xee\x9e\xb9\x8a \xc1)\x08\xaa\x0cn4\xc9\xcc\xf6V:\x8f]\xee>\xa2\x0c\x91\xe2\ns\x98n\x04\x18-\x03\xef\xd2\x9cE\x95\xad\xe9o\x14\'\xd0\xf8\x96\xc1\x1e\x9c\xe5\xc8\n\x8e@ kQ\xcb\xf06rB\xb5p\xf1\x1a\xc8]z+\xe3\x17\x1d)\xdb\x00\xc2\x03+\x95{_\xd8\xf1i\xc2Wv\xc3\xb8\x8a\xbb\x1f\xcc:T\xf5L\xb4\xdf;\xf0 }\xf8\xe3=\xbe\xba\xb0\x83\xec\x92NXs\xfd\xf2\xd55\x0eyd\xa7\x83\x87 0\'\x81s\x8e\x1b\xc2\xe1\xa8\x19\xe3|\xc3]\xdf\xf9!4\xb3\xdc\x1aY\x05\x8a\x9b\xce\x08\xf9\x05\xfcVu\xe7\x08\xf2F6\x15\xfb7\xb6\x8a]1:o\xc7\x0e6\xaa\x89\x808\xf4\x0fY\xcf\x1ew[\x96\x0f\x17\x19\xa5\xae\xd9\x9f!\xda\x03V\xd6?\x9d\xd3\xd8\xb9;S\xb0rnJi\x8b\xfbl7\xfc\x14\x87~Z\xd9E\x04T\xa9~\xea\xcc\xa13\xca\x82\xd1\x88\x0e\xe7_\xac\xc2\x89f\xcf\xcd\x14\x84)\x9e\x04\x1e/n\x90\xe6\xae\x99\xb7\xfc6\xcc\x97\x87\xe5 \xdb1\x1c\xa0\xcb]\x83\x83\x1c\xc2\xe2\nY$.\xbf\x7fr\x1e!\xb4\x82\r/\x1c\x90\x94${\xb1\x88\xe3{\xf1K\xb0D=\x06cV\x0e\xcb\x1d\xf5\xaf\xf2\xc7\xa5\xc0\xad\xe0\xeb\x85\x1dr\xc7\x00\x18G\xa3{\x8d?\x01\xdc\x1b\x8c\xcd~\xb5s@\x141\xb6\xa3R \xdd/\x04\xf3\xf3\x90\x94}\x0fu\xa2\x17E\x16k\xc3\x93\xdc9\xb1\xa3_\x9a\xfbx\xb4\xa0\xb2K\xf8^h$\x9e-\xd6\x0by\x97kV\x87\x04\xc9\x86x\xb3\xd6\x01e\x88\x04\x90c\xdb1\x00\x8f\xf1\xd4\x9b\xc70\xbb\xc1=\x01A\xdd\x13\x0b{\x1d\x1d2~R\x9f}<9\xbf\xc4\xb6@\x19\x07;\x94\xe5\x81\xc0\x8e\x81\x13lY\xc4\x0b\xf0\xcb\xbf\xf1\xb6\xeb\x08kO\x7f\xd8\x92V\xf1/\xd8\x81\xb7I\x8c\x0b\t/{\xbd\xafw\xcc\xe7\xe3Vp\x8du0a\x9f\t\x84\xa0J/U\x9e\xab\xde\xcb\xb4\x11\x18,u\xea`\xce\xdb{\x16C\xb7\x0c\xcb\x9e\xdao[\xf7\xcd\\G\xd8`\xeb\xe6#\xbd\xb3o\x02\x0b\x12&9\xb3\xe6{\x1a\x1c\xe0\x9ay\xb7\xad\xb2\x1d\xff6\xb8|\xf5\x0c\'\xf9\x93b\x81r\xf2\x0e\x00X\x17\xdf\xb7H\xba\xe8^\xdaec\xb5\xba\x0c\xe9p\xff-\x1f\xd9\xce\xc3\xf9\xf6\xb9D\x1d\x9cj? \x17\x02\xc4\xe9\xa2y\xfb3<\xb8\xa0Z\x1a\xb5\xdb\xc5\xe5\xbbtv\xde\xb2H\xdc]\xdb\xce\x08~\xa4;\x13\x14_E\xf0\x16\xe9\x15V|Bo\x12(\xa6\x8eQ\xc7Hw\xee\x98\xc9\xb1\xa4\x13+\x03|\x89\xb0]\xf1+\xdf\xb3\xc7\x86\xd4\xb8\xe5E\\\xe1\x04\xa4SC\xcc\x86\x95\x8b\x04\xf9\x8eL+\xae\xbc\x9e\x87\t\xfd2\x88\x93\xf9A\x05v,c\x00\xc4+\xf5IN\x84\x8d\xbe\xfb\x13\xc0\xc4\x04\x90~\xcc`\x11\xd8}}\xf6\t\x8e\xe7\x13\x0ex\xf2\xdb\xde\x0bV\x03\x85\xbd\x86\xf0\xe1\xfa\xd4\x9b\x05\xb3\xe1]\xfb\xc1\xee\x88\xa1v&\xe3]\xed\xed\x08\xec\x0b\x95\' \xd3\xe8~\xd9\x96\x87)\xa9\xb8\xf2\x1fv\xf0\xeb3\xb3\xb5\x0cN\nnU8\xde&@\x06\x91\x15\xb6\xfae\x93\xfe\xde\xb2\x9d\xe6\xdd\x18s\x8d\xec\xa5\x120\x9c\xde\x0eqp\xd7\x9a\xa9\xa9\xa2msa\x8e\x88 \x8cKuo\xd9Y\xac\xc3\xf5\x160TF\x1fT0l}\xa9\xb8\x8b\xe6\xecR\xeb\xf2\x8bR\xf1\xf8\xdaB\xeb6{f\x7f\x1eV\xf3\xa3\xe6\xb2a\xb4\xe8kT\xe4*\xd2\xef\xe6\xcf\xab\xdc\x1b\xd3\xa9n\xcb\x1c\x0b\xa1\xd8\xc8\xfd\xb2<+tG:\x95\xd8\x04\xe1\x1f3\x1ba\xb6h\x01I\x9b`\xcb\xe1\xedq\xecai<4O\xd0\r!e\xd5\xee\xe9\xba\xaa\xe19\xb9\xf6r|\xc3\x12*~`\x15\x8d\xda#g\x1c\xcf\xf2\x18\xf61j-e\xf6}\xe6!<Dyd;\x1b\xd7\x84\xdd[\xf0\r\xd7&\x84\x8e\x96\xb4E\x00\xfb\xc2\xee\x1e\xf6\x93;W\xdd\xbc\xb1\x95$$W|\xc6\x83\xcb+\x81\xd5\xe5c\xbbdGl\xed\xc1\xd0\xe3\xc4\xc2\xd3P\x85\n\x8bj\xb1C\xcb\xa7xh\xcd/\x18\xcf\x9d=\x1fm\xde\xe3~\xea\x99\xc5(;\x02\xca\xbb\xf8%Np\xf8\r\xcb>\xd1{t\xd7\xcc_\xffN\x0f\xc6\x95L\x08\x8bT\x18k.\'\x18\xb1\xd5\xc0zsU`9\xf2\xb1\xbf\xf8\t?m\xdc\xb6\x19A\x98x\xd7\xbd\x1e\xb1[\xc7\xcd\xf5\xcf v(\x90\x8dV\xb1>.)\x8c\xbd\xe3^|s\xfbZ\xf9}\xac\xba^f1vK\xe8\x1a{)\x1fS\n\xa6h\x1a\xc7\xb7\xcc&al~\xd93\xc2\xb2\xb6\xdd\xed\xbce\x158N\xa5\x8a\x16\xdc\x13\x08\x9b]=a\xbb\xccc\xea\xdc$\xa9\x8eDgq\x02\x1c\x10kq\xcd\x05]2\xd2-\xc3\x93\xbf*0l\xde\xab\xd0\x9b\x9f08\xc53\xbb}\xa1jQ\xf7\xf61\xb7c\xef\xee\x8c"P\xb9n\x17o\xaaXYX"G\xeaE\xc2\x8a>\x0cA\xb2M`\xa5\xb4\xcd\x1a\xd6\x1fqz\xde)\x83\xc9TP!\xcc\xa8"T\xd5\xda\xe9`\xd5\n\x17\xdf\xcc\xd8\xbd"\xc4o\xa3\xf7\xb6cKbe\xb1=\xdf\x12\x00a\xff\xf8\xe0.waqG\x17\x15F\xb6;B\xf1Q\xb4,T\xfd\'#!8z\xab\x0c8\xe3\xc5\xc0GG\xcf\xaep\x1d\x93Xp\xa7\x1c#{`ykS|\x85\xfb\xba;\xe0{\x10\x9c)\xd7\x8fd\xb5*Ux\xb2ys\x05\xa1\xfb+\xf4\x13\xa1&\xeaXU0:\x8eE\xbd\xf7\\\xf5\xb2\xa6\xb6\xbdO\x17\x90\x87\x97\x1b_\xa2\xdd\x19\x9d\xcf\xf2Z\xa0|p\x0e\x07\xdc\x10$,\xd7\xde\x99E\xe7#pf\xb9\x99\x06\xa9V\xc5D\xaf\xb1\xdd\xc8X\xba\xfej\xab\xd32\xe9<]\x14\xf3\x03\xd9@\xe8l\xa7\x04\xed\xac\xb9D.+>2\xb9\x03\xab\xb8n\xe7\xc1\x05g\x80\x14\xb2\x17\x0f&\x8b\xf7\x87\xa2m`y\x8bQ\x0bi\nf\x91\xaf\x04\xf7\x86o\xe7\r^*\xdb\x12\x01f\x05H/\x1e\xdf\xb6\x88\xbc(?\xee\xe8+\x1c+:\xd2\xc0\x8e@\x15\x9d\t\xa0*\xcc\xc1y/\xca\xd2B\xfd\xc4\x8eC\xfb\x9f\xe3\x14&g\x82\x95J\xe2g\xb9\x180L\x03?\x99\xd9\xaa\x9bo7XY,y?\xee<\xb8\xb0<\xa9P9\xb4\xa8\x96\xf0\x90\xc4\x86\xc0SD\x86\xc1\xd2R\xe9\xe3\x84q\xad\x16\xf3\x94aAk8Y\xc5\x11n3u_uY\xd5=\x8bX\x98Wx\x18_\x9c\x98\xb2\xbbz\xca\xba\xcb\x8d\x1d\xc9\xc6M\x06BY\x98z\xbd\x99\xd1\x9b\xe0\xf4D\x91\xec\xad\xf8\x18N\xb5\x10\xfb\xff\xc9\xe3\x9d\xbb\xf6\xfbP\xbc\x19\xac$K\xebN.\x01\xfb1\xbdy#\x8f\xd675\x7f\x10\x9a\xac0\xcf\xee\xf5\x90X\x85?\xccB\x96*\x850-H\x01\x9c\xe4\xcd\xfc\xaa\xed\x128u\x87\x00\xd1S?H8A\xf2_\xc6\xf6\xce@bB\xa2)\xaffp\xaep}\x05\ni\xa8\x0e\x14\xd5\x0f\x03\xa7[dvX0\xc3\x0c\xf6uU\xd9}[\xbd\n/n\xac\xccs0i\xa7!\xf4\x91\t\x1cs\x85\xb2]\x82\xcc\x17\x08\x10\xeb\xc5\xb4t\xf0\xbd\xcej\xe4\xd8\xc5\xc09\x98\r\xf2\x9eH\x92\xea\xec\xc6\x10\x9d\x13\xca\x08m\x8dIA\x88T\x1c\xf7\x01c\xa2j\xaf\xa8.\x8e\x8c\xaf\xe11\xf3z]{\x81\xa2G\\\xee\xdeef\x9f\x0b\x10U\x11\x9a\xdc\x8eL\x956\xbc\xdc1\x93;\xc5\x19\x1f\xdeH\x9f\x98\xa9"\x92T\xed\xc2X\xb7\x9f_\xdf\x9a\xa9+d\xfd\xbbt_\x8c\x92x\x05;\x1a<\x1e\xd6\x9c\xf1\xcc\xd3\x17\xb63\xea\xf8\x8e\x12b\'\xd3\x81d\x88\x9b\xb9\x90\x0bu\x87p\xca\x95\x9b\xc6z\xb4\x8a\'\xba#\xfc\xba\xf8)\xac?P\x99\xba\\z`\xab_\xd7/Q\xad\xcd\xef\xfc\xb22\xfc\x9e\x07\xe9\xedM\xf9A\xf1w3\xa5\xde]\xc1n\x87\xff)\x98\x11`\x84;\xee\xe6_\xe1\xe6K\x9d\xa1@i2\xf9[\x08\xc1k%\xc3\xad\x103\x1c\x13\xb2\x1db\xdb\xb9\x0e;\xac)O\xed\x10\x95@\xd8\xb1\x1d\xf0\xf4j\xfc\x94\x84?x\x9ef\xbb3\xeb\xc3\xe833\xb7\xd1\xb47\x0f\xc5\xa2#\x8b\xe9\xf3F`\x99X\xad\xa2=\x13\xe7J\xc8.\x82\xf4|k\xcd>\xaa\x0e\x96\xb1\x04\x7f>\xf1@"<G\x94\x97\xaf\xe3\xa9.bG\\\xb7\xe6\xe6\xca\xa4^P\xba\x9f\xca7U(2\xa6c\xe6\xe9\x18k\x88PG|\xcc=\xd3\xad\x15\xf6\xfc`\xb2\x19\xf2\xb5\xac\x1a*\xed\x02g\xa3\xe4m\x1c\xfd5kJ\xee\x05\xaaV\x9e@\xc4}\x8f\'\x0b\x1f\xddi\xd7\x19W\xef\xeec2|\x96\xb0\x17@9hP\\p\x88lY\xd7\x95\x8b\n\x83i\xa4\xd0`S\x10\xfe\xef\xe4\x03\x82\xd5\xdf2\\t|\xc5S\xb3$m\xfduRQ\x89J<\xae\xa8\xb1\xad\xabO2A\xed\xcb\tp&*\x80\x92\xa9\x98\x16e\xf5c\xe8\xef=\xac\xc2\x85Ly\xfd\x05~?\x04_\x0b\x07\xd0!\x9e/@/jT\x1f%\x7f\xa7\xd6\x16\xaa\x04\xf06\xd5\xa6\x80\xd5r_l\x95\xd4\xbcq%\x9eK\x1b\xde\x93\x83\x8d\xe5\xc3T\xa0e\xf5:\x11\xcb\x83\x80\x19\xf8t\xed\xd2\x8e\xbd\xb4\x88\x8ak\xf0A\xf3\x91\xbd\x138\xd7\x97\x87\xd1\xce\xf7\x9f\\\xa0G\xf14P:P?\xab\xc4\xec\x04\xc1\xe6<\xbc\x03\xe7\x028\xa2\r\xea\xe0\xc6\xee\x84\xd5wZ3a[.9\x19W\xd4\x16\xfa\x88p\x1f\x85\xa6&\x037\x8f\x1c\x0b\xd8\xf4\x06\tZ\x95\xbc\x9f%$~\xa2\x14!\x12@\x9a\xee\xff\x9c\xc3b!\xe8\n\x10\xf6\xb7\x08\xa2\xb2\x17[\xd8L\xf4\x0ec\xf9\x12\xce\xda0LAm;\x9c\x1fZ\x1c\x88"!\xb6\x12si\x90\x1b\xc8\xc3\xab\x85\xeb:;"\x85\x8a\xdf\xcc\xd0\x94G\xb0\x04\x95\x8a\xe4\xd1\x9e\xe1QN\xc2|\xbf\x98\xe8\x9b\xbeyA\x1e\x99\xb9\xfe*\xbc\x02,P\x9c\x90r7-\x04\t\xc0X\x15\xbf8\xb5\xfdN\xc2d\xe9\xf9\x97W\xca\x0c\x92a%\x8f\xa9\xb6\x1c)\xc4\x9b\x07S\x7fd\xa6\x158\xc15\xf3\xfeX\x99;\xb1K+\xdc\x07\x16\x91[\xa1p\x99,px\x7f\x00\xaa\xb0\x12\x1b\x8d\x1c\xe0;\xf2]\xce\xbf\t\x18\x04\x03\xc8\xb5\x1fy\'\'O<\xa9M\x90k\xfb\x1f\xc0;\xd3n\xac\x14JD\'\xe6j`\xcb\xc3J\xc4K\xd6\x8f\x8b\xf2D\x1c\xb6F\x86:\xb5#\x17\xc6\xed\xec\xb8\x19\xa8\x92pv\xf5\x00\xe9Xy\xf1aV\xa1_\xa1p(\xd7At\xa8\x18\xe7\xd7Oe\xaa\xe1\x82\xdd\xadBy\xbci\xd0(d\xe2\r\xc3Ut\xef\xc0r\x08_\xdbZ\x87\xe4k\x86n\xdf|\x04.\x88\x9c\xb8z\xd3\xdcK\x0e\x08\xab\x96\xdf\x0e\xc5C\x81Y\xe1#p\xaa\x04\xe3\xc4d\xaf-\xa2%\xa6\x93\xfcP}X\x04\xd9Z\xa8]\xc8\x87\x07\x98\x96\x89\x8d\xc8d%K\xee\xc1Zgg9H_\xddQ\x8d\x88\xa1U\xf5\xf2\ra\x99\x83\xdf\xe2:\xa7fL\x9a@\x0f$\xb9\xb0\x87\xdb\xf4\x1f\xee\x81\\+\xb6l\xfd\xbcw6\x8bc\x1a7"V\xaa\x12FVV`\xeb\xe4\x14~\x91i\x1b\xf5\x1e}\xf8\xf4\x1b\x86z{\xd46O\xd8\x8d\x88\xee\xd4\xac9\x05\x93\x99v\xae\x0cQ\x07p\x06\xb7\x1d6\x17\xdaIp@\x19\xacb\x89\x12\x19\xb6L\xe5\x0e\x18\xe8\x82\x9c\x9bNN\x9e\x9f\xda\xba0\x08\x06\xd3\xa2\x9f<\xfd\x1a\xda\x9e\xfec\x01; 0\xc2\xcb\xac]\xfc\xb0\xab-\x8a\x1f\n\x02\x90\x90\x06\xcb\xbf>\xac\xda"\x91+\xdd\xad\x8a\xf4\xde\x98\x7f\xc4\xb9\xcf\x15|\xd3\xff\xb5\xb3W\xf6\xa7\xf4\xa4M\xef\xd1}\x01;\xad\xeak]\xb2q+{W\xbc\x02_\x02D\x8bV\xfc\xda\xba\x02T\xe5\x94\xc8\x90\xaf\x1d\xca\x96\xe2\xf1\xd3+\'*\xebG+\xe6\xea\xaa\xe0\xe5\xed\xe7\xcb\x8b\x07\xa3L;\x9e\xf2\x81\x1e\x90h9b\x11(\xbd_2y\xa5t\x9a\xc1\xc4\xfdb\x92|\xabo\xb8\xa7]1T\xea7\x1b\xda*\x95\xaan\xe9\xd8\x03\xd9wX\xae\x025\xdc&(eC\x1b^\xe6:loq=\x1a\xa3\xa4\x1a\x7f\x13i\'&Y\x85\x0c l\xfdz\xac\xd1m\xd5[8\xc5\x11O\x15\x0b\x803\xac\xc4"\x9a;\xfc\xf9\\\xc7W\x89e\x97\xff5c\xd3d\x9f\xf6\xec\x80\xb0\xaa\xd5\xed\xd8! K\x0f\x8c\x88\xc6\xb8\xd4\x06\xe4\xa4\xa3")\x15\x8a\xa3\x8am<1\xe0\x07\xe57\x11\x83\x02\xe1k\xcad\xb0;\x99\xad\xb1\x13 \x07\x08\x99G\xcav\xdd\xccO\xf3b\x15\xae\xb2b\xc9\x074\x93\xea\x9d \xbfb\x05\xdb\x11\xe1y<\xb4\xad0\x1cO\xa8XY\xb6P\x90\xccH\xc4~L\xaf\x9d\xf3\x88\x11 \xd6P\xfe3\x14!\x9f;=\xb0\xddC\'\x1b\xda"\x93\xa0V\x0b\x82\xd6fl\x93\xfd{\xf6\xa0p\xdaH\xe1\x82#\x84\xcf\xc0\xf5\x86\xd1\xf1\x18\xea\x1e\xc9\xfb\xaf\xc7v\xa9L\x90\x92F\x98\x11\xde)\x10\xb1\x0eG\xb4\xd4\x89\x08\xd0\x8fQ\x05\x9fp\x92\xf2\xe1q\xbc\xf2.\x08`0\x82\xa1\xa88\x05KmL\xa7\xe6\x9f\x883P\xad<\xc6\x1e\x01\xe9\xaeA\x05:\x1c\x11\xb9C\xcc\xab*C\x19\x82\x8f\x9ep!\xf8\x9e\x8dp\x9cV\xe4QF\x0c\x085k\x10\x84\xb1\x0f\x9a\xd8\xed\x82\xb5\x03D#\xcc\xd0\xa1\x80r\x97\x0fH\xd8\xb7\xc0\xd3\x8bk\x8e?\xd8\xf1\x08\xc5\x9d\x038C:Rb\x1b\xd6\x91\xde\xe5Y\xdb*N\x86\x81\xcf\xd8\x1f+\xe2Q\xcb\x01\x1eR\xadP\xb2\x01G\x93\xe8X\xfc^5Mwv7\xb2\x8f`\xd9*\x84\xf3#\xe4\x97h\xe3 \x1a\x83\xf1/a\xfc\xebD\xe9Z\x1bND\xaf\x0cs\x0fd\xba]\xf6^\x04\xc9\xe0o4\xe8\x08\xa9\xcb-\x86\xc9\x95Y\xc62<\xfe5f\xd1\x02\xa9G\xc1[1\xb6\xf0x\xb1\x85\xe0\x1f\xca\xbe}:\x9b\x943\xc2\xc6\x0e\xfe\x8a\x80\x17#\x9ci\x88\xab\x15\xdb$\xddm\xae\x0b\xa8\xe9\xa6\x16I\xfai\x82\rE\xb7\x95x \xd8J5\x8b\xb9\xe0\x16F\x17f\xf7PL\r\x93\xf7\x07\xa3\xd8\xd8d\xba\xb5s*\x0bX\xde\xd7\xc3\xe5=\xeb\xbe\x9cM#\x9a\xeb\x1e\xdf\x9dz\xf0n\x17[1\xddP]\x8c\xe8\xeb"\x0c\xd2\x93A\x82Vw2u\xd121\xff\x96\xe9Cr\x8eO\x05`\xc0::@\xfb\xfa\xad}.k\xf44\xd3\x99m\xcd:z\xa2\x08S\xf1\x13\xcc\x1a#\xd0\xe8c\xef\xec\x9a\xd6\xf3D\xe0Y\xb6\xf3\xda\xd2\x05,C\x83\ny\x9b\x9e\xd4\xcf,\xe0\x0e\x14{1ig\xe8@\xee|\x9a\r+\x0b\xaf\x15;\x14Q\xff~\xcf\x14\x07W\x03\x86\x00}Yy}\x8e\xebA\xe4\x8fM\xd5\xe5_\x90\x84Uo\xcd\x14\x06\xa2\xd5\x97\x04\xeeVw\x10\xcbGw?u\xb6\xa9\xc8NWg\xc8A\xbb\xfb\n~\xefG%\x9f\xd5\xaa\xfaV\x7f\x16\xaa\x044\xaa&\xd6V\xbe\xb1\x00\xa5&\x1f\xb1U\xb3\x0b\xfc\x05\xd0\x1a\xb6b1C\xdd\xb1]Z5\xc9\xf6\xb1\x12\xc2\xca\x1cDM\x92\xeb+\xbb\x84\x82\xcf{V\xc0\x0fk\xe7\xc9\xd9\xe8\x1f;\x03\xe5\xbf \x0c\xe8\xc1\x17\x0bC\xbb\x02E\x0e\xf2\xf6\xd5\x0cT\x86K8\xc0\'\xd7\xdev\xe0\x0e@\xa5n\xb7\xd5P\x80ut\xcfy\xe1\x8f\xcc X\x98\x04&.[P\x8aOb\xb2\x8a<Y\x12Ym\x19\xb0\xc1\xf6%\x9f\x16l\x0f96\xf8\x14.\x8bgd\xfd\xe3_\x9d*P\xe5\x00\xa2\xeb\xc8ZK\x04t\xf2\x1c_\xa8O*\x9b\xd1o\xe2gC\xf3\xcb l\xe4O\xc5\xc4\xa8\xc7~w\x9f\xae==\xec1\xc2\xf5MsKU\xb2\xba\x89\x8d\x85\xaa~!\xcc5\x8c\xa7\xd6\r\xfc"\x01\xb8z5\xfd\x18+\x02\x87J~\x10?l\xe4\x1co\x05\xdb\\\xbeR2\x18+\x03g\xe13\xf6T\xd4\xb7\xda\xd4-\xbcQ\xf7\x81\xcf\xaa\xa7\xd3\x14\xe9\x90;o\x15K9\x8d\xd0,j(\x06\xba%\x01Bt#q\xd9#\x15\nSu\x82\xe1n\xe2%\xf474_\x81\xd8V\x0f\xee\xdd_xm\xd7[#\xd8"\xb4\x19\xef\xdd\xed=z\xa5\r\x93\nRs\xbe\xed\xc4\xc77\x89\x99Z\xdf2\xd7\xfaJ\x10vZ\xf0\xe7\xdc\xf6\x1a\x99q\x992\xfb\xb0^T\x80\x95\x08J&Lt(\x86/\xeb\xbe\xe8S3\xa6\xd5\xdf\x07\xc2\xbb\tq#>\r\xbe\x80\xd3\x15L\xa24\xd5,\xd8\xeb\x9a\xec\xc3C\x0b\x18\xb8a\x85\'\x86Nu\\\xd5FXzb\xa9f\xeb\nO\x96\r\\\xd8\x1d\xa4\x90\xe2)\x02\xb65\x17\xfe\xd7\x0e8\xbb\xd8\xaa?\x80\x7fS4\x1dt`^\xb14!>\x00\xc3\x90\xdc\x0em\xe8\xd9\x17\xbe\xdaJN/\t\xd4\xfb\xb6D-\x99\x94xy\xb25e\xf9I\x1d\xb3Y\xeb\x89yJW/\xfd\xbc\x9a\xc6\xda\x9d\x8ag\x06#\xd8\xa8\xcb\xae\xe9\x86\xd4\xb6\xd2\x99q!\xab+!\xf9\xb9\xd6\xeb\xa3U\xf34y\x17aS7\xe7\xe2jw\xc9~#>nh\xa7\xa5\xc8\xd1\xfc\x00\x80\xaf\x88\xee\xa07 \xf4\x18\x1d)\x8b\x8fS\xb0\xa4\xe1\x7fB\xf7\x13K\x16\xaf\xde\x13\x84S}\x1a\x12=\x9e0Kd\xfb0\xefD\xac#\xd7\x8f\x9c\x17\xf5\xe0\x84(\xfeU\xdd\x04\xf0\x91jV\xf6\xb50+G\x87\xca\xd8\xb4;\x02\xc5\xb2mf\xc0\x11@\x85&\xf0|}\x16\x8e\xd2k`C\xe1\\7yk\x7fU$H\t\xb2\xb1\xd6\x9e\x057-i\xaa)Zt\n\xf5\xee\x90\xac \xb2^\xd7\xc2\x90\xb7\x8eY\xd0\xb9\xca\x84\xc8n\xe1\x12+5\n\xb4\xc1\x7fZ\x19\xed\xc2q=\xab\xab\xb6ck6gT\xbe\xe8g\xc7\xb1U3"\xfe\xc6\xa5\xa1\xc2\xc3\xd4\xec|\x0b\xe2\xa2c\xb0\x1b_\xa9.\x89B)\xd1m\x16\x9f\xceW\xbe\x08!\xc5\xce\xad\xd6\xf6\x94\x86\xd1p\xa2\xca\xe6\xd4\xd8\xd36\xda]L\x19\x10e\x90u\xc6OCWZ\xd7\xb7Y\xd5\x83m$\x8d\x0c\xdf\xbamp>\xb2C\xbc\xcf\xaa8\x1c]8i\x98\x85K\xe7\x15\x1e`\xdf\xa2I\xa1\x12N\xd7\xa1\xf6\xd9\xe6\xc3$\x02\'pp\xe9\xeb\xad\xed)\x1d\xe4fp\xdcq/u\x17\x8a\xa3\x00\xe3Rw\x07\xcf\xed\xb42l\xe0\xc9\x05\xed\xa9\x9a\x9d\xfc3}\xc4C\x83g\x15\xbcR1\x13\xeb\x18\xe3R\x8b=8\xbaoF^\xe9\xafT\xa3\xbc\x94\t[=\xe0\xf2\x14b\x11\x92\x15^#T+&&@#\x8c\xa3oo\x01\x9a\x91\x9c\xdf\x9a\xd7\xcc=\x8dTLxf\x11\xe2\x02\xd6jj)\xc4\x84\xc4\xd3\xf6\xbdyl\x88\xc4Z\xe6\xbf\x14\xa4\xa7v\xc4H"\x8c\xed\x88\x14\xe5\x8ez\xa7S>\x95\x9e:GkC\xae\x1c\xf7%\x00L\xf2\xe8k\x0b\x03j\x94o\xda\xfcV\x18}X-\xa2\xf3\xab=&\x8bu\xfd\x8b!\xc4\x8ck\xaa\x85\xb7\x7f\x95q\xa2\xec^\x16v8[6\x13\xb2l\xf0\xc4\xae\xc7\x15@o\\\xb05t\x87%\xc6f\x98\xdc;\xdcq`\x86\xa9\x16P\xdb\xa0\xf9\x93\xf0Yb\x07\x8b\xf0x\xdb\xfc\xb0G\xec"_5\x1b\xfd\x8d\x1d\xd1\x9d\xa2\x02\x85b\xad\xe83\x0c.Z\x15 \x1a\xf0\xddab\x83\xce\x93\x96\xccf\x927\x1e+\x0e\xac\xc5\xd7,#\xf6q\xb3\xe5\xa0\xfdk>\xb0\x03\xd7\xa4\x89.\x14U\xc4\x83&\xde\xba\xfd\xf9\xf4\xe6\xc5\xdc\xb9rTl\x1b \x8e\r\x031\x86\xf8b\x82\x91\xd5\x80S\xc8h;W\xf4\xda_\xfa\xb3\xefbzFO\xc7\xd4\xd7\x9c\xbcG\x84;)\xa7\xd37\x05\xa0\x01\xb2\xc97\x13\xf7\xb3\xed\x1d|\x14\xd3\xb0.\xa7^\x830Xnvb\xa1g7j\xe7\xa9\x7f\x99Ea\xae\x95\x8e\xab\x8e\x01o\x18\x07\xbf\xec\xf2\x88\x15\xa8_\xb2\x10\xe8\xe7\xb29K\x06j\xb5\xa9a\x93\xd1\x9f\xa7NM\x0f\xad\xfd%Y}%\xe0\xcc\xfcvY\xa8\x10\x89\x7f\xd8$\xf9\xd0w\xb9\xfdD!$\xee-\xbc\xd5\xceQ\x8bsa\r$\xfd\r}\x89o\x9a\xe3\xb1\x83u\xa6\xbcj\x00\x0f"\xe0\x00<\x9ed!aY\xf2\xfb\xd8\xe4\xe9\xe9\x00\xee&4\x17\x99y,Y\x0em\xedi\xf9\x84\x0f\xa1a\x7f/\x1c\xb0\xe2\xc2\xb5!a\xf3jV\r\x15\x89\xda:\xdc\xc636,\xbf\xf8,\xf2w\xe7\x0fJ)C\xc1r\xb95\x17__\xe8\xa6\x0c\xd4\x98\x00f\xcdM\x81\x80\x8f\xa0I8\xb9\x85\x8f\xfa\xa6\xfaK\xa0DK)_\xa7\x06U\xe6\x86%\x9f\xf3#Xc\xb2\x83\x1fL\x81PR\xbcWUL\xcbP0$\xba\xb3\x0c2I\xf8\xf0\x03\x16o\xad\x11\x95\xa4}\xfc]\x8d\xac)n\xab}3\xf3M\xeb\x0fk\xc9\x15@\xe1\x06g\x96\xcd\xad\xa0\xef\xb0C\']\x7f\xf9\x03\xfci\x00\xf7\xd8\x96%\xb0\x8e&\x0e\x17._\xec\xf8z\xbd\x8a\x88\xb9\xca\x02D\xab\x1e`}\x7f{R\xe8\xc1\x8a\x92\x9cF8AUm\x8e\x88\xe7\\\xca\x855C\xa0\xa0\x95/\xb1A\xd1\xb8Y_\xa3\x0c\x0bX\x16!\x8ak\x8e\xd6\xccd\xb3\x19>Y\xfd\xd9{\xf4E\xd1\x0f\xf1\xb8\x95\xc5[\xf5\x83s?L\x02u\x8aO\x95w\x92G}\xaa\xc8.\xf1\x90\x9c-?9\x84\x01\xab\xd6K\x8f\xb5;H\xcf\x89\x04\xec\xf5\xb3L\x00\xe8E\x81\xbd\x893\xcb\x9e\xe8\xe0b\xeaZo\xd2\n\xd5A\t\x08O\xafU\xabH\x83\xee\xf0*\x14nP\xe7\xa7\xab\xdflQi\\\x0808\xd8\xa5B\x12\x12\xfc\xb4\xf0iG\x18SpO\xab\xb6\x06\xc6\xa5\x8d\x07\xb8`"iy\xc1\x8c\xad\xc4\xc0&\x87\xb0{\'\xa9\x00\x10[pu\xec\xda\xa9\xfe\xb0\xe8\xf6\xfe@l)\x1a\x1e0\x86\xe1\x1eY \x03\n\x1b\xa8\xed?t\xdd\x9f\x7f\xd5\xa13\xf9\x0e\x84\xb4\xe8\xa3)\xc1Kc\xbb\x04{I\x9f=\xdbxc\xdf{\xd8\x87dl\x10\xe4\xba\xec\xc9[\xd5Da\xee\x1d\xb2\xf5"\xbe\'\x92\xaf\xcf\x0fA\xeflh\x80\xf7.\x9e\x93\x9a\xe5T\xd7\xf6\r\xedx\xea\xc1\xc6\xdew\xf5\x98\x95\xbe\xc7\x8c\xae\x97Ucp\x06;\x15\x9eK\xd4\xdc\xc9\xdf\x8c\x1f\xbcb\xb0\xa0L\xdau\xe9D\xefl\x0b\x7fYD\xcf\xcdHq\xef\xaa\x18^*$\xe8\x7fvO\x14<\x16\xe6a1+\xf1x;\xd1&:\xed\xf5Zy\xbb\xcbVT\x14\x89\xd7W\xd6\xec!\x87\xc4[\xd3\xe7h\xc8if\x95\xe9d*\x0f\x80\x14Gf\xa3\xde\x90\x00\x80\xca\x03tv\xe5w\x8a\n4\n\x03\xd4xVD\xe0 \xe7s\x9bc3*\xe1a!\xb2\x0ba\xeb\xe1\xaeP\xc7\x1a>2\x14XV(&\xacb\xd14k\x9e\x82I.-.\x9eU\x16\xb7L\xca\xfdcU\xea\x10\xc3\xe1\xee*\xa6\x1e\x0f\x01\xb2\xb5\x7f\xbf\xaa\x87&W\xab\xa4W\x16\xa8\xbak\xf1p\xe2\x97*\xfe\x88\xb3SV\xbe]B9\x18\x16\xb3\xbe\x0c\xcf\x1e(\xc5*\xd4\xf0S\xfc`\x8f\xd6\xaf?\xb6\xb1x\xc4re6\xb5p\xfaP\x1dpA\xa1\xad\x80 \xd0\xcb\xb84\xf1\xed\x18\x0ed\xf8\xf3\xae\xcf\x10\xb6\x06\xc1V\xd0ZJ\t\x9f\xdf\xe6\x1e\xb2\x13\x80\x15\x0c\xa9\x14\\{\xb5\x8a\x9f\xb6\r]{\xbd=\x81Z_\x03&\x19+\xd1\x9d=KW\t\xfb\xed\x87\x1cg\xb3\xe8\xef\xcd\x11\x7f\x80|Q\xd7GxB\xa3"B\x8b\x8f\x1f\xc6\xed\xbc\x1e\xad\xba\x01\xb1\xdcl\x06\xc6\xd3\xcf\x7f\xdd\x13\xa2W\xda\t"%[\xe7\xbf\n_\xf2\xe2N^~\x15v\xac8\xa7\x0cO\x84\xc2\xe0\xf1\xb6\x8b\xf4\x9d\x0f\xed\xb0\xe6\x1e\x9e\x93\xbc\x08\x0b\xa5\x85\x96\xa7Q\xcd\x1d\'\x9b%\x9a"\xfc\x11\xed\xae\xa0\x92H\xa6\'\xdb\x02c\x7f\x02\x06Y;C\x81rN\x17\x9e\x8ac#6W\x83\xaab\xa7\xd3D_\x97\xf4N\x9cH\x91\xe1\x9aj\xe4\x95\x146\xbc\xd2F\xed\x1b1\xb0\xb2\rYq\xbe\xb2S\xdd\xb5]E\xba\x05\xa54Z\x8b1\x98\x81V* au\xa3\xe5|e\x03\xc7\x97\xad\x821{zj\xbb=/6T9\xdf\xf6\xbd\xacMCQ\x04\xd5\xdb\xb8\xa68\x98\xc1\xda\xdc+\xed\xac:\xf6\xc4i\xd8\xd3\x9a\xad\xa2\xac\xbb\x1e\xca\x18\xc0n3\x10\x8dz=pW\x03\xd5\xb4\x1a\xf2\xaf\x8f\x9f.\xa9\xc1Ql\x86V=\xc0m\xf8\x9c\xb5\xea\xbb2]\xdc"\xc1\xef\xfc\x83z\x1f#\xcf/\x1d}\x0f\x03\x99V\xaav4\x15LI\xf4\x11\xef:\xa4\xfe8\xc43\x15\xd1\xdb\xe4\xb5\x08\xfb\xb1\x1a\xf0\xa8\x85\x12~\x11=\xc7M\x02\xd5\t>\xca\xca\xc3\x07\x82\xe6\xc9\xf2(\\U7\xad\xa2zQN\x96\xc7\x85\x9d\x16l\x03\x97\xbe\xb5%&|\x12\x81K\x99\xb3\xaf\xfc\xa7mP6\x89\x95c+x0o\x04%`3tjy(\xcbI\xd6\x01p6\x1b0C\x1b\xc9,\xb9x\xd6}\x14\xb70\xfbgz\xadc\xc34\x0e^\xd9f-\xf3;\xb0\xc8d\xde\xfdc\x1eF\xb6\xbb\x0b\xb1\xc1\xdar\xf7\x9d\xa07\xe7\xfb!\x9bXJ\x04E\xe9;!\x1f\xfa\x98}Q\\\x14\xdf\x04\x11Z \xd2\xc4?\x15\xf0\xd1\x06\xbe\x1b\x90\xf3+\xf7\xaew\xb2\xd1\xac\x08\xd9DQ\x8b\xb6Km\x9e\xb5\xa8Z\x05y\xe9\xf7?\xde\x1b4\xdc\xd7\xe5G\xbb\xc30X\xb3\xfd\x1c\xa8\xd5\xc2\x01d\xabTU\t=\xf6\x92n\x86\xea.W\x8d\x925!\x1aB\tu\x04\xe0\x8d\x16l:\xad\xcb\xc5u\x94!\x82\xcfbO\x94\xdf\x15\xd5\x88\rYQ\xed\xa3w\xb0u\xa2\xf4\xbd\xde\xf8\x8a\xb83\x99\xd2\xf25\x1f\'\x9f\xf1r\x1e\xc1\x1d\x96\xe0\x1f7\x8bCG\xc3>\xbc\xff\xa3D\xc0{"\x18\xf7j\xf3\x10\xbdu\xc6\x8c9\xfdO\x91\xab\xd9\x9dYR\x96\x97\xfd\xb1\x9b\xa5\r\xaan\x00\x1e\xf5\x8f\xfb#<\x1eT\\:5\x195n\x83?\x84\x81\xcd\xbb\xbb;PHHQ\x93\x0eV\xd7\x1e\x927-U\x022\'\xdb\xfbzX,\x8ag\xcfn\xces\xe5\xc2d\xae\x95x\x0f\x04\xba5hj\xcd\xdf]\xa6m\xd8\x0e\x15\x1b_}3\x05A*\xe4(\xeed\xd0\xc5\xc8\xf4\x19\xd8\x19\x9dt:u\xcc`\x8c8m\xd9z\x96\xcd_\xe9\r\xb0\x80\x02\x04&yw\xf1v\x0c\x82O\xa2\x82\xf2\xa1\xb0\x92\x06\x8fU\xac\xc25\xc4K\xb2c%\x05?~\x8e\xb6\xc7\x02\xf5\xda\xfc\xbe\xe2l\x16\x03@3\xc9\xf7-\xbc`\xda\xa2\xc2o\x99\xaf2\xcc;A\xe5*\xfe\x0cK\xe7\xbac\xa6-\x17?\x1e\x88\x82\x1d\xc6C\xaf\xbc\xa1\xc0\xc3\xf8\xa5L\xcfk\x929E\xab\xd1\xdf\x11\xdbiA5}\xf9DRg!\xf8?\x89XFT\xed\n\xd51X\rJ\x90]\x82\xa0:\xdaQ\xe3_\xfe\xf2D"\\\x89\x08\x0b\x91\x1dX\xd6FS\xc9LP~+\x83\xe7r\xe2gz.\x92\x93\x88CM\xc1\xb6}\xf4\x8c\x80\x93P\xfbJ\x189L\xe0\x12\xb9\x93\xab5\xa1\xdeB\xfe\xfa\x8bw`o\xcd\xa2\x16+k\x1b\xa2\xfc"\xfd\x02H\x1d*\x855\xb2\xc5\xf4\x179\xff\xec\x8d\x82\xd9\\\xa7\xd5jW\x00\t*\x1e"6a\x13\x7f\xe1\t\xab\xa4AP\x04\xc1\x05\xb6\x92c\x042\xb2!\xdd#\xc8 \x10D\x8a\xd4D\x19\\\xec\x88Xj\xe0\x176w\xfe\x10\xf1Q\xfdN\x97\xccO%\xb5\xbc}\xf5\xe6\x83:\x1a\xa8(\x85\xb2M{\xb6\xa5\xdd\x9e~6\xefN\x99<\xf2\xb0$\xa9\x13\xa2\xb2\xc4\x0e\t\xd6X\xc1\x1a\x0eA\xcdh%m\xd2\xd5g\x11(H\xd1\xf5\xfb;\x93\x16\x8d\x96\xe1\xfe\x1e\xbb+#9o\'\x92o\xc6u>QMQ=\x13\xfd\x87\xf2\xc8\xb60\xdb\xe0\x1b\x91\x83\x89\x98\xe0\x0e\xd2o^N\x82\xd4\xe7\x8f\xaa$\xb8\xf1\x8f/#_\xe7\xc2#\x08\xc6\xa7\xb0\xde\xc8o\xa2\x911\\\xdc\x01#w\x96\x1b\xbf\xaa\r\x84\xb5L\x1c\x11\xb7\x01f\\\x01\xcel<\xa6\xfeC\xca\x19\x852\xf8@\x1e]\x08&T\xf9\x0e6\xa4\\[\xd7s\x087\xc7uEl\x8e\xdcUA#_T\xc8\xa9\xac-\xe8~\x01*\xe9{\x96\x13y\'\'E6\xd46+l?j\xe5E\xcb[Z\xc4h#\xdcW\x8aT\xea\x01Sx\xad\xb1cF\xb6\xa7\xd5\x02:HIu\x88%\xabX\xa5\xd9@\x85\r\xa7\xa2F\x95\x8d\xeb\xbc\xa1\xe5\xaf\x8a \xe2\xc78-\x12F\xa4\x9e\xda.\xfc\xf0\xfa\x9d\x14\x11\xca\xc3c\xed%\x06\xdd\xb9b\x88\xf4\xc5\xda\xec\t0d4L\x15\xf9\xf4V\xea^\x8b\xbe\xddm\x0b\xbbL_+\xabQ\xd3\x14\xbbK%\x81TV\x7f\xf0\xa1yp12\xa0\x15\x92\xb3\x96\xc9\x81W/,\x9ak2u\x02\xc1\x8a\xe0U-\x8cE\xa8\xf2h\x11?\xd9\xd3\xcd\xc1h\x15\xcb\x8a\xb7}\xc0\xc8R\xce\xa1\xb8z\xb5\x1a\xb7\x8aV%\x80\xa6F\xfd.\x9d\xfc\xac\xa01\xfc\xa8 RS7\x16\xadL\x11\xa8\x80\xf7B\xd6|\xc6\x1a\x19\xeb\xfa\xc4\xea6\xd4\xc6\xc2f\xdf%\\\xef\x95\x02l\x13\xe9\xabEt\xa6\xc2\x02\xa3\xcf\xf0\xea\xab]\x19C`!z\x07\xff\xba\xfe\x0cH\x91\x02\x15\x0b\xfc\r,s\xe5nQ\xbc\xcf\x9e\xbf_1\xaf]*H\xee\xc4r\xf5\x9d\xd0]\xf3\xf2\x98\x1a\x11\x19W\xf3L\xa5\xc0<\xfaj\xa73HK\xb4.\x07\xd5\xf3V\x1aZmuoV*5\xd0h\xa3\x03J\x8e\x02\x0b^\xcbrcTjMU4(\x8c\xb9LM\xf1\xa1D\x1b\xebr\xf4B5\xff\xd0g\xbc\x92\x12#\x15\xbd\x12\xe4F\x03\x02\xa9\xa1\x10\xfd}\x9d\xd6\x91Azy(zC!\xa3!\x10\xaa\xc9\xee\xd8\x02\x99\xac\xda1\xee\xfd\xd9;\x01T\x85$sh2\x8a\xd63\x90\x98\xad\xbe\x15c\x97%\xdb+KY)\xb8!\xb3_\x02y\x0c\x12\xf4\r\xe5_\xfeU\xd8O\xb4w\xfa\xae\xfbpZl\x8f\xc2K\xc24\x13\xfdh\xe6\x95(\x1e\xe5\x8c\x8a9\x0c\x1c\xdf\x0b\x0b\x81\xc9\xa2{\x07\xa7\x96\xdd1\xe4\x83>\x14\xcf\x81\xf1~~\xc1\x96\xebtmI\xe42\x9e5\x84\xe4\xdd\xa7\x8f\xaa\xc0u\xea\xce\x8f\xafP\xd5\x04M\xaf\xc4\xa1\xa0\xbaR\xbd\xea;\xe3\x95LJ\xfe\xb3\x02\x0f>\x97.N\x01\x85#\x12\xdf\x0b\x03g\x99\x90\x00\xf3a\xb0I\n.\xb6o\xb8\xc7zrw\xb6\xfd\xc6NIY\x0f\xc5\xff)\xe40\\9\xb6\xf5\x0f\xd4Q\x0e\xea\xd2z\x9a\xf5\xce\x86m\x1b\xb0fG\xa0\x10\x80s\xd6J\xff"\xdeV~\xdc\x82\x94\xea\x90\x9b\x16J\xeb\x1bm\x86~\x86x $\xd4\x91\x1d\xa2vS\xd2\nC\xdb\x11\x9d,\x19UC(5 \xed>\xd6b\x13\xe9"(\x7f%\xed;\xc3\x1dv\x9bj\xd8*\xdf^\xabq\x93\x1dRus"e\xc0V%JQ\xd1Ih(\xe7)b1\xac\xbf\xad\xc5C\nU\x08u\x02n\xa8PD\xb2\x87\xaa\r<\x1e\xc5\xd6\x15S\x8d\x03\xb1\x17\xf0 X\xe7\xcc\x0fO\xd5\xb0\x19\xa8\x10\x19\x8f\x01 \x02hB\xb8\x82A\xc1c\xdb\xbe\xd4s(\x9f.\x7f\xbb\xbb\xa0P\xae\x1d\xbb\xd0j\xb0\xfb{\\@$\xc4\xd9\xdallZ\xeb\xe6\xc4\xe4\xa14\xa2\x84\x03\x9a\x98\x94\x93\xab\xcdKE\x86\xddee\xa1\x01H\x92\xc1\xd6\xf1wI\xf8\xd4\x10$\x89 \x19\xd8?#\x07\x1fT\xfeCU\x95\xb1"z\xac\x8a\xb2\x82\xced\xa1\nx\xc0\xed\xc3\xf0|cFt\x0f4\xac\x17\xe5,\x008\xd3+\xba\xfe\x0bbG\xf1Lv\xbe\x10\xfc)RD\xd8P\x02\x8d\xc9D*\xc7\xe1\xfd\xad\x85\xb1=q\x05L&q[J\x05\xe5[\xec"\x94\x96*\xb1\xba\xba\x88B3\xc3\xbfU\xf0\xac\t\xc8\xc3\x81\x9b\xf7X\x11\x87\xc4s.\x9b\n\xcd\x95m\xe1[\x08\xb1\xe3>\r:Y*&\xf8@M<\x8c\xc1\xf4\x96a1\xd1\xf4\xf3_*?\xa8\xbaR\x14\xe3\xe4\xee\x02<\xc6M9(\x88\x162\xd0\xae<Y\xd1\t\x15\xa7\xd8I\xab\x81%\x8e\xc2v2\x9b\xe3c\t\r\xf8\xd6\xba\\\xde@QZ)\xad"\x87v^\x12\x83\nJ\xcd\xcdag%\xcaS\x19O#\x9c\xab\xd3gR\xb2\x90,i\xa1\x93D\xe2@\xadH\\z\xbc\x08\x9a\xc9\xcb\xf3\n\x1d\x84\x8eWt\x0b\xa1R@\xd7\x80\xb1\x15\x0c\xdfx}d\x14\xb6I\xea\x89l\xe7\xb1\xb8\x91J.\xadTXG\xb3\x9fN\xf0\r\xf5\xee\xa9\x17?~{W\x91A+\xf1>\xf2\xec\xbb\xb3\xcd\x1b\x0b\x10\x19\xd8G\x16\x1f\xfc\xd3\x90+e%\xa8\xe5w;\xdc\xebm\x10iG\xfbm;\xb9#\\\x0b\xee\xac\xcc\x1b\xd1i\xda\xa2\x04\xbd1:\x7f8$\x8b\x9e\xaa\x84\xd8\x15\xdf\x84\xc2\xb6{\xaa6\xa5^\xd1\xad\x1b\xf8\x9e\xaaT\x12\x10T8;\x11\x8b\xcb\xc9\xcd\xea\xcb\x11IKD{+\xaaGU\x88\xb9\xdb\x02\xbc\x15\x80\xfb${\xc0\xbe\xf3\x01%\xc7p\x9e\x05SW\xa0*\xae]\xb3\xa4\xd5\x9eL\x17\xdd\x937*\x83\xcd\r\x05.\xc9\xb4o\xda\x16\xac\xc8c\x07\xcfPOJ\xd7\x91\x05\x12\xea\xdb1$@;\xa1\xfb\xf4E\xaeN\xd4\xf8\xa6\x94H3\xe9\xd9\x8c\xa1\x1fb\x0f\xe5+\xe6\x1a\x89\xa6\xe5\xaf\x86\x08\x87\xec<\x91\x99f\xf7\xda\x8e\xe15\xec\xe5\x8c\xc44\t\x06B\xb1LC\x98\xd7\x8d\t\xd8%\xdf[\x1d\x0f\xa1\xd0\x10l\x12\xa7\xde\xb0J\xd5o\x86\x19^\xb5\xb2y\x0f\xa4\x1dzll\nl\x8a\xcdF\x18\x0br\x96\xae\xf9\xac\xaa3\xe5j\x11&g\xbfF}3\xe0\xfd\xbf\x08\r3v\xc70\xf2\x07% \xa3\xfc\xe1\xb2\xd9\x93N;\r!A\x93\x8e\xde#\xa8\xb3\xbd+^h \xaa\x1bHV!\xd5\x9c\x9a\x89\xdeY\xa2\xa8\x04\x11\x8c\xf3\x82\xd9\x9d\x18\x11j\x94\xf6*\xe1e4u\xaaN\x0e\x15ZB\x98%\x8f4\x12\xb1\xf6\xcd4\xcc\xbb\x87\xd4\xa7i\xe7\xe3\x1d\xd6\x0e\xf1|\x99K\x8d\xac\x90s\x8a\xe3\xb8\n\x82\xc1A+(\xda\xb7x\xa44)%\xc4\x8b\xa5^\x92\xa0L j<.\xd0\xfd\xfa\xacf\xafh\xfd\xf7\xdc\x07\xb1\xc6\xf3\x7fIb?n\x81)t\xdb\x0b\xea\x87R\xe9\x9d\x1e3%\x9b\xe8\xb9\xac\x17c\xab\x97\xaa\xd35\x81o\xed}9`/81\xd3\xc3\x02\xde\x81\xd4S\xe4\xc3Na|\xe5H \xbdzi\x16\xa5\x04]\x98\x1aE\x12@)\x13\x9f1\xeai\'\x92\xcd\xc0\x8d\'O_\xccJ\xe2\n\x92wA\x87\xc8\xd4\t\x84\xa9\xb8\xf3\x8ahy\xdaRj\xb7\xa2\xf8\xd2\xe3\x18A<r*x\xb4\xf9S\xdaZ;\xf1\x8d\x1a\xb5\xf6\x97\x04!a\t\xb2/\x12+r"ew\x80w\xcbV\x8d\xbb\xec^C\x11\xcb\xb7\xc5\x17\xbe\xa90Rd\x9f(\xe5aG\xf9\xe9\x1f\t\xad\xa1}\xa8\x89}i\xe2t\xe7h\x81\xac\xd2tji\xed\xb1b~\x00\xd9>\r\xf1}\x9d\x8d\xd6\x94h\x13\xfc2[\x9ee\xbc\x19RIl\x92;\x112Ou0\xbb$\x9e\x15y\xdfNg\x1e\xcf\xd3\x8d\xef\xda\xa6\t\xa8#B\xd2+\xa2\x97p\xdf\xc3\x19\xd8=\xabB\x19[\x95\xa7\x1a\x91\xe6Ji\x8c\xd6\x0e$\x9b\xfa\xf7\x8c\xb2\xbe:\xf2\x85\xc31\xa9\xcf\t\xc9\xf1\xa4\xaeB\x14\xb8\x80 i\xdb\xfd]\x12I\xd4\x89\x86Wy\xa2\t\xc9\xde\xc2\xacZ5\x93t5\x0c)\x88\xae\xb5\'\xa5\xa9J\xcf\x07\x14O\r\xa9\x81E7`\xdd\xd8\xc7r\xe5\xa18\x90\x85\\C\xf9\x8b}8l\xec~.0\x104\xe0\xa2\x13\xe8\x15(\xaac\x0f\xb7>\xa4@\x8a^es<\xb5\xc4\x80\x80\xcb;/\xab\xe3^nJ\xef\xa7\xd6\xe3%6\xc18&\xd1y\n}U\x9dExpc\x8b\xa5y\xf9\xb2J\x8cv \xbf\x84uc\xf6j\xcc\xa3\xca\x9a0L\xae+\xaf\xdd\x84\xa7p\xb4r\xad\x1d,\xffL6dk\xb8\xa31H\x0b\xb4M7\x0f\xc5W\xa5\xecA\xd0\x1e\xb2c\xe7B\xc0s\xecY\x7fwU4S;\x0b?G\xb2g\x05\xc9F\xa2\x82Q\xfc\xad\x9a\xf7|\x18\xf5\xcfH\x1c\xa0\xff\x97\x8f\x96_C\xf4\xa0\xe9\x94h\xa4\x12\x80+\xaa}\xb4\xc0\x87\x0b\xba\x9at\x9c\x9d\xc3k\xb5\xaa\xe2t\xd1\xa8H\xc4\x00o\x9d\x1256\xc3\xc3\xafs\xe9mc\x00Io%\x82\x1cf\xeb\xe5\xd9}\xb9o\x04F\x11\xbbT\x87\xd5,*L*p\xd7\xaa\x98\x91\xf6\xb5\xaf\xcat+r[\xa0\x06\xd0\x18\xcc\x90\x02\xf4\x83\xbc\xbb/o\xed\xf6\xeeQ=\x08\x16\xe6\x81,E!\xbd\xcd\xc66a\xc7\xd24V\x8c$\xd6\x02\x8c\x11\xab\x8b\xbfQ3\x06\xab\xd9\xb7f\x1d\xca\x0c\xdb\xc4\x1d?\xd61O\x9f3jx\xa8\x1al\xb7\xffgRe\xc9ZX\x02\xf5E\x90\xf3\xd4\x90\xa4h\xcb\x8b\xe9qq6\x92|a\xde\xe2\xa8\xb2T\x06V\xa9\xf8\xcb\x96j\x800\x84F\xa8\xc5\xe6\xd6\x15\rx\xe1\x7f\xa8\x7fU\x12\xac\xa6\x8b\x8eT:\xa8\xc5\x8b\xee:\x91\x1d\x1b\xbb:\xe7Pu\n!\xda\x1e\x08\x9f\xab\xea\xbb:\xda\xd2\xbamZ\xf0\x02\xea\xf6\xca\xf6N\xc5]\xc7\x9a\xb5\x80\xb9\xc2\x92\xecD"\'\xcc\xdd\x00s\x85K\x87\xc0\xcfC\xcb\x924\xdb\x80\x02\r\xd5\xe0\xf0\x94P\x85\xf6J\x9b\x9d\xb4\x00\x8b\x88\xa2\xe9^\x14W\xc4\xb7&\x1b\xc1\xf3\x1b\xb1H\xbaN\xbf\xa2<\xc0\xa0-\x10+\x93A\xdd\xf6\xa6*\xc4\xd1\xa0\xd5\x90\x1b4\xcf7$v\x1a\x9cX\xd4\xc4CF%\xf8\xaf\xaa\x0c\x07L\xc9?~\x1c\xc8\x81\x96\xf0\na>9\xffctB\xc5aq9Zf-\x85\xcfo\x05\xd1\xf57QolHrj\x05\x9bSQ\x98\xa9\xbe\xeek\x83\x86o\xc7o\x86\x86\xb0\x0e\x7fT\x93\x0fw\xbc\x98\x07\xc3\xd5\xc7\x03"\xa4\x8f\x89\x1d\xc6|\x04\xd9\xfdU\\\xcf\xb7aq\x1ap\xeb\xe1\xd2\x1c[\x91\x10\xeb\xcd\xfa\xaa\xf3\x8e<\x8c:\xf7)\x9e\xd0\xcc?\xf42=\x8a#\n6\'<!&\x8a\xb2\x0f\xa2*\xbe\x80\x18]4h\xcasr\x90>\xda\xab\xdc\x82\x12VZ\xd6\xa78J\xb5\xea\xc0\xc9\xef\xd7*\xc3j\xea@-\xeeg[\x8b\xe1N\xb6\x1cXW\x81\x1f\xbb\xa1\'\xdd*\n\x0b\xab_\x1f,\x8c\xe4<\x10O\xb0\x94\xcb"\xdckE\x1d\x11\x9c2b\xfc\xf7\xb1%\xceD{\x0b$\xb4\x1e\x8d\xad\xa0P^|\xfeB\xff\x8c\xf6\x91\xa0\xbe\xab\x03\xc1\xf6\xa3\x14\xd8U<w\xc4n\x10k;f)E\x08@\x00\xe1~\xd7<\xe6\xb19\xb1\xe5\x0e\xd4s\x1a\xfa2l\xf4\xce\xeb\xe6P\x80\xf2\xa9\xa8\x1dj\x1b\xc2R\xd5b(7n\xc2\xd34\x16V\x90*\x87G^\x02\xe2\x9d\xda\xf2\xa8\xbd\xf5\xe51{E$\xc5\xc0\xadJ\xa5\xf3\xc3`\xb5\xf7hE\xads\x9a\xb5\x12\x16\x90@\xeb"\x89\xe1 #-\x92\xd5\x03\xd5\xe2\xc91\x9fV[@\xfb\x1f#\x0fY\x0f\xfa\x03i\xb0{\xf1L5\x07\xf4f\x8f\x10G\xe7w\xd1i\xd8|\x04\x81\xbf\xfaBP\xaeR\xfe\x93\xa9\x9b\x96F\xfd\x9d\xca#is\xfd\xe2\xa7\xee\x9b\xc4\xa1i\xbc+(v9\xd4\x8d\xd1\x14^D>\x9d\x13$\x1c\xca#4n\xef\xe6\xf3\xf8W\x1f\x8e\xd4k\x7fd\xf5P\xf3,!\xd2\x10\xa0\xb0\xdct\x8f\x95\xa2cG\xb8]e\x1d\xd2\x1d\xa1\x08\x80\xa8oA*.\x95Z\xb2K\xf2\n\xc0w\xce\xba\x91|m\xce\xc7\xacj\x8f\xa0\xbb[\x9f\xfc%\\Y\xf5\xaa\xbc\x8bo\xe5\xa13\xc5[\xe8#\xa9$\xeem\xa8\n,\x8c\x08tuz\xc7\x03\xda#\x81\xe0\xd2@\xad\xc5A9\xa83\xd5\xe1g\xc9\xce\x89\xb4\xd9\xb2\xf3\x04\xb5\x93fH\x94\xbd\x10\x8a\xdf\xd8\xa1\x9d\xb1&\xe9\x18\xc5\xc1\xafZ\x84N\xe5\x81\x00\xeb\xc0O\xba\xb1p\xbdV\xcd\x8b{\x14\x02?\x85\x02F\xf2\xbaA\xdbs\x1an\xe3i\xfb\x85XX\xbe\xa2O\x9f\x93<PJP|\x1b\x11\x0e.Np)T!\x8c\xc2\xd3\'\x9fE\x03\x0e\x1f\x7f\xb1ce\xb0\xc8O\xf5\x11y\x9d\x16cm\x1e\x88^\x9a~R2\x84R\x07\t\xa34\xd4[\x9c\x0e\xc3\xc6\xd4V\x0c\x96\xc6c\xf8XRr\xd1\xf3wK\xa2\xfcC\xe5\xb0\xac\xff\xd5\xd70\xa3\xa2\x8d\xff1\xc5\xc4\xde\x05Q\x95\xdc#f&b\xec\x94\t9\r_\xe57\x9a\xe7z\x0e\xa5\x8aj,\xb1d?pt`-\xa16W&o\xed\xd3\x8aFB\xc1U3\x052#s}\n\xd7\xec\xdb1+\x9a\xe5\x05\x1d:\x05\xc3\r\x04(\x83\x80ZK\xa7\x92F\x13,FR\xb2O,C\xf1\xe4\xd9\xb5\x96A\x8d\x93DX\xebIB1&\xeeT,I\xa43\xa1\x83\xd8P,v\x02\x05\x14\xb4\xa5\xd2\xaf\xb9;v2\xca\xf8\xef\xd1\x98\xb7\x9d\xb0\xa9\xb0,d\xf0Q!+\x94D\xa0\x86\xc6\xd0\xb1\xd7\xea\xd3\x04?\xaa\x15\x18\x10\xaae\xb4\r\xf7\xb9\xd0\x97\xbe\x1d~07\xc5\x81nI\xd8\xb0\xe5X\x10\xca\xd8.(\xd3@)\xad\xca\x8e\xa4\xb7\x9b\x98\x9f\xeb\xac\xeb\xfd\xd1\r\xde\xadX\xbbo\x87\xbe\xcc\xc67\xf1\x95o!\x00O\x88\xd1\x16\tG\xaf\x15V2\x10\xf0c\x8er\xe955:rm}*2\xbbk\x11\x87\xa2_\xba\x83\xc8\x94I\xb2x^T7F\x19\x88\xa7\xe2\x1fQ\xfd\xf5\xdd7\xa6z\x07\xff\x94\xd2\x0f\xfe\t$J\x9a\xcb\xd6\xdb\xc4\n\x9f\x9eZ\x0c\xd9!\xd5\xe3\xb9\x02TQE\x8f\xa5\rV\xf9\t\x12\xec3\x88d< \xd2\xd3\x88\xdbL\x92\x85\xba\xaa\xb9\xd5c\xbf\x19\xe1\x9e\x82\x07\x14\xc0\x9f\x96n1\xfd\xb6r\xe9PQLC\x95\xf0\xf3]\x91\xfa\xe9j\x16\x9e|\xd8RC\x7f\x9b\xfd$\xee>\xae\xbf`\xd0\x02\x16G-\xd13\xe3H\xa2\xa1\x81r\xf4\xad\xe7\x84\x92P\x92\x82\x04\xd7<\x91\x1ew&\x1d\xee\xd6\xe6&\xa0\xdc\xe2\x86P\xc3u\x92Lm\x95\xd1\xf4\xff\xbewX~aK\x17\x92\x04*\xfd\xc5?\xcf\xd5 O(\x11I2\xcb\x00a\xfam\xcb\xebO\xa9\xb77Vy\xd3O\x8aR\xf9\xdd\xe8Fx\xf6\x07,\xe73\x1c-\x06P\x1aG\x8f\xa4^R\xa4@O\x1et\xb8I\x83\xe0\xf4\x917l\x89\x84^=\x84\xe6(,/\xc7Xy\nX \x02f)\x8d\xf0\xfahT\xd9\xb5\xbb\x1a(l\xd5`Y\xb1l\xd5\xdeU_\xb5\xc0\x8b\x06\'\xb5\xcb\xd2kAf\x81\xc7Z\xd6\xbd\xa8\xe6\xaeY\x99\xb6\xb9\xde\x934K=\xfd\xf3\x82\xf5=\xb0g\xad\x9d\xa6\x9d)\xd5!mz(\x8fl\xd8\x11\xb7\x07\xe2\xe4\xaeE\xb7G\xb3\xa7\xf91\xc9\x84\xdc1\xe1?\xc5\xb6<\x94\xc1\x9d_\xcaA\xdc\xca\x85\x8a9T\x8dE\xea\xc27)$/QwWb\xe9E\xb3\x1fW\x99eh\x8cT\x13\xff\xd9\xf8\x861d5hM\x90Y\xa1\xba,//\x07m\xec\x04\xc7 \xbc\xba\xd4A\xc5)d\xe3\xa9\xfb\xbd>\xd0J\xa9\xeb9\x91\xc4\xa2\xa15\xbc\xf2\x16\xec\xb2\xe6+[>\xeceA4\xf6\xc1\xee1l\xd0n\x1afg\xb2K\xa4\r\xff<\xc5\xe6\xb9\x19\x1e\xf1\xaac\x82\xd8Y\x06d\xab\xe7\x1f\xb9\'\xd1\t\xc9\xf3\xa8T\xf5by\xb2\x13 \x10l\xa5\xca\xfe\xd0\xd2\x13t\x1b\x12/l\x0e\xaf\xa7\xa7\xfc\x9c\xbd\xef\xe2q\x86r\x9c\xd6\xb8x\xaa\x19i\xb5\x9a\xa6}fU5\x1b\xf6\xdc\xd9\x82J\xc6F+\xb5\xd5.\x19\xbbQP-\xa8\xbe#\x1d\x8e\x8a\x9d\xc3\xa2\x84J_\xa8,_\x08{\xa8\x161P\xa3\xca\xd8~J\xcd\xbaW\xa2\xc5S}\x08\xdf$\xf7w\xa4\x83Alq\xe3\x87"\x84\x04\xd8\\\xbb\xe8\x11\xacJ\xc4#b\x99\xe1\x93kU$\xd8\xaf6C\x18|V\xf5*\xd1~\nin\x98n\x9dOV\x80q|\x9b\xb1\x1b\xa4\xb8*9\xa1_\x95\xf1\xa4\x7f\x06\xa5o\x9a\x9aF\x98f"a\xbe\x84d\x10\xeb\xd6\xfa\xa5\x99\x11\x82\xe5Z\x91DYE\x0f\xb6\xc5\x8e\xcb\x06M\x1da\xb8\xff\x84\xa3]~=V6Y\r\x84W\x9cx\xacdb\x80\xc0\xd8\xa4\x8b\xe8\xe4C\xe1\xc8\xcfW)\xa4\x0c\xd1\x88\'\xd1\x84s\xe4g\x93[O\x04\xf5\xd3\xd2\xf0\x0b\xb9-\x8a\x8e\xed\x885\xcfz^4\x16\xc9H\xb03\x1aq\x7f\xa5\x0e\x15\xaf`\xcaYQ\x95\x18#\xe8\x01\xae\x83W?\xd4\xcd\x11\x9e\x82R\x9dH\xdb\xa3N\xbe\x9b\xad\xa4\xdao\xba\xa5\x1d\\z\x81)IAa\xdf\xe7O\x96&}\x88%\x15[\xe6\xbb\xe1\x17f\x06_\x00\xa7 >\xae\xb0\x8d\xa9\x1f\x87\xab\x80*\x19\xa1aB\x0ek*2\x00\xad\xb0\x93\xfeKz\'\x85\xfa\xd8I%y\xef\xf5,\xa5\x93\xd2\xa9Q\xb5N\x0e4\xa3G\xa1}\x93\x7f\x96\xdbc\xe0A\x86\xd6s}`0\xfa\xf1Z\xd5\\\x8e@H\x06C\x92jd\xbdN\xfa#ed\xbct\x8d\xeb+\x13\xafj%\xf9?\x8c\xc8#\x18\x0b\x1d\x90\x7fs\xcd$w\x16\xb0.\x92\x97k\xc7\xc8%\t\xf46\xb7WO4\xf0\x85\x92\xdf\xbd\x03\xb8\x9c\xf4\x95\n\xdaN\x12;\x12Rrz\xbc\x84\xe8\x9bN\xed\xc1Q\x0bP\xb0\x99\xa4\x1c|\xe0O\x1e\xa4\xd0\xa8\xd9\x90~\x9c\\\xd2\n\'\x84\xea\xfe\xea\xdc\xeb(Q;9\xd8]\xa00h\xa8\n\x10h\xd4\x95n\xb0\xc2\x80\x9d:\xf5\x92{\xd1\xc8\x9a\x1e\x10\xb1K\xc0\xc0\xe9\xe8\xfe\xd8\xbb\r\xc9\x08\xf2\xb9>\x13a\x964\x08,"2\xba\xfeI=Y\xb6M\xe3|(\x98\xaa\x92HA\x9c\xdc\xd7av|E\x1bd\xb2`\xea\x96i\x1c\xeb\xa6\xdcpCG\n\xdf\x04\x0e\xdb@\xa9\xe2\x83bH\xa2\xe7k\xaa9I\xc4\xcb\'1\x04<:\xa8\xdaD\xbb\xbd\x93\xd7\xd4\x8b\x82y\xdb\xa6\x18MQ\xde\x15Bc\xb8w-R\xdd\x7f\xeaUa.\xf6\xaa\x84\xbd\xbc\\N\x91.\xbe\x19t\xbe{\x89\tjVE\xc1\x1d\t\x9a\x0ft\xda\xe0\x02b$\x14\xdc\xd0\x98\xb5\x16\xf8\xa1/\x19;\x13\x10\x13\xd0\xbe\xba!3\xe7\x95\xc4\xe2\x9c\xa7\xda\xc5\xe2\xe0D*\xc1\x86\xa2\x03\x06h\x05\x81\x1e.\xb9\x8fB\xb8\x0b\x95t\x89\xcdd\xcf?Mnk6\x9aP\x17\x82\xde\xb1\x8e\x06\xa8\xc7L\xbd\xd25-\x97\xc4\xea\x08\xd5\xe5bC\xb3\xa3\x935[\x97\xf9>\x19\xeb\xdd\xc2\x15\'3\xcfw\x89\xeb\xab\x19\x995\xf4\x04cE\xc3f\xc0\xe5\xa5\xf8\x00[\x8d|W\x9a\xf0\xdd"z2^\xf9\x1c\xef\x84\x99\x12\xcb\xe0?$\xe4\x13\xbeU\x13<\xa6\x10\xf4/\xfe`aL\xa9mx\x17\x9a[\xf5~\xfe\xea\xa3\xef\x97\x9f~\xce\xb8EP\x03yo\'\x02@\xd8-5\xaae\x8e\xceU/\x88Exid\xdb;Il\x13h\x8ed\xf8\xbb\x06:{\xc9\xf0O\x89#\xb9\x01\x01\xc2J\x07W2\x01\xf5\x07\xa9M(pk*H\x96\xf30\xbb\x195\x84\x01\x08e\x95\x8e5f/\x00\xcc&\x07NVYR \xa0\x8ek\xf6F`\x0eLS,\xa9i\x97pI\xf3\x9fI\xab\x9d\x8f_8*gW\x15]@\xfeUt\xb3\xa9\xb63\x8e\xc9\xf8\xaa\x00\x9b\x9a8\x89~\xca\xff\xf4C\xe7\x9e\x8a\x02\xb1\xe0\xb4\xc6\xeb\x9bI\xc86\xb8\x8b\x1e>Lf(\xd4I\xc0\x12Z5\tx\xa1\xff\xd6=\xb0\x90 \x0e\'\xd3\\\x9ao\xa2f\x87\n\xff\xc4\'\x1b\x19\xc2\xb6\xdd[>\xdb\x91Li?\x1c\xd9\xda\xef\xbe\xb1uqRY\x0c.$\x1c\x93\xfa\x9eg\xd1p/8%\xc3\x81\xc6\xb9uJ4\x9dj\xa1\x9e\x08\xdcA\xda\xdc\x8f@!\x83\x0b\xad\xfa\xad\\(\xbb\x06:\xaf\xc7\x97-\x7f\xfey\xad~5@k-jT*f\xd8`\x97\xb8w\xf2\x94\x06*_~\x06\x95\xd7*\x991\xd3\xd0\xf8\xf1\x88\xa1\xea\xcd9\xdb+I\xa2<\x14\xcb\x03\xb7\x10~gN&\x10\x82\x0foo\xf7\x8eV/\xf0\xc7\x07\xbc\xd0(\xb9<\xdf\xfe\xda?\rl\xd0v{\x90\x19\xc0q*J\xec~\xf6\xfaG\x7f$\xc1B\x95\xd7\xe3-p\xb3\xa1\xc8h\x11\xf2\x1d\x1a\x9c3\xb6\xd0k\xda&\x1b\xce2/\xfd.\xd5\x9eZ\x03aCN\xc7\xc0u@\xd4\xa7\xd3p\x1e6K\xd7{\xebj{\x0c\xab\x91~\x1epW\x99\x05\xa4KC\xcc\xc1\xe0\xde/\x90}\xc4\xc3\xbf\x0f\x0eT%\x88\xec\x1f[\xf7\x1c\xc4\xf8\x8d\x19\x81\xe7\xb8\x06\x96r\xe7\xe5z\xa8\xf1\xc4\x11#\xb2\xbc\x0c\x89(\x9ap9\xc9Sp\xb2\x9cJ\xf6*\\G\xf6\x92_\x9aU,4\xf3\xd1\x18\xa0 \xde\x85\xd1\xb5\x82\x14\x95\x12\x83\xe8\xf0\xdd\xeb|\rDY\x07g\x83\xbe\xe2\x16\xcc\xc9 \xdb\x8fd9\xbdtJ$\xeeg4\x88\xfd\xcal\x7f\xc8\x12/S\xe4\xc3\xd2\xbd\xa8N\x95\x87eo\xd6\xbc\x8a-\xca\xfe\xcd\xb2\xaa\x02\xf1\xb8\x8a\xea\x12\x94d\xcd\x82\xa6\x82\xc4\x8b\r\xd5D\x9cz\xc3\x83^\xaf\x99\xc7\xaa\xe3\x8d\xfb\xb7\xb8\xc0\xfa\x0b1\xddl\xc0\xef\x0e\xb4\xf998\x87\x0e\xef\x03\x07\xc3\x86\xd2pm\x9f`\xabo\x83h\x98g\x9b^\xf3\x83\xb1\xf2\x88\xd4\x96\x15\xdbt\xf2\xa8\xb5\xb0\xd5\xaa\x9b\x06\xd3\xa6\xd8\x9bWV\xdd>\xdey\xab~\xbd0;~%\x0c\xaf\x90BB\x0b\xd4\xafy\x8eqG\xc0\x1f[\xdf=GU\xa7q\xec \xaf{B\xdd\xdc\tP\xc8\xbb\xad%A\x85\xa5\xd4\xd7\x11\xdfZ.\x94Pz\x1e\xf9hx\xd1r\xd4\xc1{\x959\x91:C\x96\xbf\x13\xaedM\xa2\xf3\n\xf9S\x9f\x86\xe0\x11rN\x87Z\xc28\xd7\x8a\xf7\xfai\x7f\x0b\x15\td\xc6\xfff\xfe5S\x14\xfc\xda\x9e\xb0\xe3C\xa9&\xfc\x11\xa0\x90\xda\xfd\x8b\xfeL!S\xa8(]n\xbd0#x[\x02\x14m\xaeu\xce\xbb\x8f\xf2\xef\xac\x8f?\xb7\x8f\xcf;\xf5Cx\xae-\x0b\x15$\xce\xce\xc5\x9b\xab\xbar.\xd4\xdfI\xa5$\xa4B\xaa\xe5\x8e\xe6\xdf\r\xbf\x95\x12\x82\xaa\x81!\x01\x10R_\xb3\xd9\xee_\xd6\x92\xcf\x9b]\xab\x80[\xd4$\xb3q\x10\rw\x8f`\x1b\xd7l\xd9\xad\xd1\xa1W\xa4\xe3\xc1#\xc6\x1bwWP\xb0\xaa\x8b\xbd\xf7^\x16n\xed\xdfXv3\xe0\x99d]\xcaz\xe3ZGD=\xa5\xce73iND\xab\x19?\x95\xb8\x02\x8dD\xabB\x92\xde\xdc\xe2\\{=\xb1\x80B\x1ez\x8bh\x975\xb8\x9c\x91\xab\xdb~\xb7,:C7(\xd7w\x9a_\xcf\xfa{\xe1\xb5\x05\x11\x89L\x8dk\xb8&k\xc6\x18mJ\xa0E\xb5o\xaf\xe8J\xbe[\x04\xc5sN!\xad\x95~w\xf5\xa1ZR\xf3\x1fp\'\xf9\x89fJ\xb4\xaa\x8d;\x13u9\xf1\x03\xa8\xd55\xdey\xa9\x86r\x13.7\xbd\xb7\xb0\xaf\x1aQ\x8e\x86\xd4F\x12\xb8\x81\xfb\x9c\xa3e\x15=H\xa47\x18\xdbl\xe2\xf9\xf4\xfd\xff\xd0\xd00"\xae\x94\xdeZ\xe5Qo\x9e\xc2\xef^EKN\x8dS\x0e\xd7\xc3\xf1%\xdf\xd4\xb5p#\x05P_R@\xb3\xc1\xa39\x81u\x10\x07r\xad\xb4q\x828\xf8!Nr\xf9t\xc0\xe7\xae\xc4\xf4)5\x8d\x8b\x13y\xa3%\xca\x00?\x01)\xab|q"\xd9K\x8ewR\xa4\xcb!\x11\xd2?e\x8b\x0b\x80\r\x9a]\xcev\xf14]\xee96\x93H`:,\xc783\xad\x85[\xcew3\xcf\t\xc4\x12`n\xaf}!\x84\x99\xedc\xd5b\'Y\x02\xf0\x9a\x9b\xf6%\x06\xf8al\x90W/n\x94\x02w \xf6W\xe8\xe0XB\x1b\xedp\xef\x04\xf3\\\xc2P\x13\xc6\xeazJ\xa9u\xa5q\xa9\x01H\xaf\xe8\xd8\xa4\xa5JX):\xde\x0e.\x14A;?\xc7\x1c\x86\x06\xa0G*\xb1p\'=Nf\xd5|\xd4\xa7\xdb\xca\xb5\xa9\x83\xb4\xf4I\xc5f\x05p\xd4\xccN8\x1b2\xf1\x1d\xa0\x7f\x84\xb3\x08\xe5l\xe2m\x8d\xcdp;\xaf8\xdd\xf7\xfa\xaf:R\xa48\x1fH\xe1\xb5\x84E\x0c\x82O{\xcaxK\xc9\xcd\x04\xda\x03\xa1;\xa6a\xe7\xe0\xa17\nHtv\x8a\xb8[\xdd\x13\x00\x84.=\xd65\x02\xe8\x0e\x97\xe5\x82\x9a&\xe8\xcc\x87\xe3\xc5\x87\xe2\xb1\xc4\x00\xc0pP\xdazE\x90\x9f\\\x0e\xaf\x06Pg\x10\x0ck\x84\x89\x13\xd6@\xec)\x1b\x99\xc6\xd1{\x0f\x81b\xb7\x05L\xb9~;%\xb5\x1c\xc6\xda\xe2\x98\x10\xd8\xd1\x1f\xda{?At\x02\rG\xff\x00\n7/9\xd0\x9c^{X\xc0z\xc6\x82\xd2\x99*K$\xd3\xc7\x18K\xea\x92\x81\xce?\xcb\x1c\x18PS\x87\xa3\xb4\x8b\x9d1\r{\x82\xfer\x0edg\xfb\xfb\xb6\xe6T4\x92;\x8a8#\xe3\x83\xf2\t=hvT@\xa6\'DI\xa7\n\xef\x9d)\xc4c\xd2\xbd"\xb9\x07o\xc8\x15d\xd4\xf9\x81\xc0u,C\xe7\xfe\xa3\x99(\xf1\x02\x0e\xa6-6\xa1vZ\xde\x855\xaf\xe6\x01>\x00\x89j\xba\xdb{\xf0\x8f\x11\x9a\x1e\xa1\x7f\xd5\xe1\x01\x14\xee\xd9a\xb7D3\x1e\xaa]\xbf\x14\xe3\x9dI\x97\xd0\xfcB\x8d\xd4\x81\xca\xb5\xad\xf8Ae\xb4\x11\x8a\xc1\xc9\x90w\xc8\x12\xbbR\x94d\xbe<\xbey}w\xf8\xccg7\x03\xf8\xccu\x9ap\xd1\xa49\x0f\xf9=\xdbg\x81"7"\xe85H[d\xc3Pi\xa0*0\xd2\xc7\xad\xde\x87m\xf9\xa1Oe]\xed\x17\xc9\x02H\xae\xbc\xc2y\xeb\xb5\xeb\xf6\x04\xf1\xd9\xb8YQ\xd5\x02tf\x10Td\x08\xbd\xf0kC\xb0\x0b\'\xaaj\\@\xd3\x16[\x8c\x8a&G\xc5g\xa6\xa4\xe7\x82\xd4_\x00j\xd9\xf0\xb5}\xdb!\x8cUYD\x18B\xd3l\xf5@%\xd8X\xd4\x80p\x11\x84\x95\xf8\x81:Y\xa4\xc9\x10`x\xa9z]\x0f`j\xdd\xe5*\t\xed\xd8\xbb\xe1(\x12\xd4@\xf5\xfcBST\x1b\xc5\xbdmy>l\x86\t2\x1c\x07"\xd4\xb14\x16iX\x95F\x1c\xb0z\x11\x9e\xe9\x08kZ\x8eC\xcca\xc8\x8d@\x92">D\x89\xb19\xfd`+\x99w\xc7:\xf9\x00\xb6\x83\x8e\xbb\x0f\xc3tB\xe5\xf4\x857c\xca\xd2\x02\xc8J\xb1\x85#,\xe7\xd5>\x15x\xa608o\x8c\x90I\x80\x05c\x04\x88\x1eY\xd7\xf9]m;\x02\x1bg\x92\xf5S78\xb3\xe4t\xca\xcc^\xe0\x162\xe9\xdb\xd5\xaa\xc0\xb63\x1e\x0b\x8f\x1f\xff\x82\xebOs7\xfc\xed\xef\xcd\xeb\x7f\xed\x1f\xfd\xeb{\x10\x03\xfb\x0f\x8b\xb5/Ja\xb2\xeat]E\x84\xf6\xe1\x88\x82\'Vc\x9e\rz3\xe94\x83y\xd1\xdfYd\x19\xb1\xd81\xf7\xbc{\\$F\x9b\xd6\xb1\xdfQ\x8f\xc5"\x94\n@\x03.`\xa7n\nZ\xee\x94\x19zw\xab\xe6\x0cL\x14\xa7\xa0Mr\xbd\xb3\xd6;\xf9\xa0.ap\xa8\xc8\xbe\xf3-\xfc\xc9\xc0\x8a\xbbv\x8a\x03\xd70\x06\xda)S65\x9dA7uK\x82\xd9\x9d\xe37\x9e\x85\x8e\xb1G\xed\x0f\xd5\xdfr\x1f.\xae\x8b,\x85\xee\x83\xb2\xfe\xf1\xf1\xfe\xd1k_.\xc14\x97\xfc\xbf#\x04"\xe8\r$P\xb9\xe5\x9c\x01\x97\xa6\xaa!\xd2\xdbr"\x126A\xb4Fa\xc8P\xc0h\xf6\x0ew\x82\x06\xc3 \xfd\x88\x9e\x8d0\xc0\xe9\xf5\xa3\xdf\x8bz]\xad\xca\xee\xea\xd4w\xe8^/J\xcfO\'\x85u%6\xc9\xe6~\x1c\xdc}\xcfM\x9e\x1a\xac3\xdb\x02\xbd\xd23[j\x02\xd1\x84\x19\xa7w\xbd\x13/7\x19\x17\x13\xab\x0f\xab\x93]\xfbU\xae\xd2\x9a!\x16\x93\xd2LfX\xf4FJ\x85\xb5\xdfvL\r^lH\x15\r\x9awD\xa8 \x94\\\xe6\x87w\xb45\xa8\x1b\xf3Vz\xef\x98\x9a\x14v\x9f\x94\x93\xd2\xfc\xd2\xd2\x90\x15v\x1f\xe2[Nm\xb8\x9cN@\xc3\x05\xe1\xd3:\xbc\x04f\x1a\xad\x9dzUC\x80\x83\x15\x1aV\xd3\x1d\x94\xa2\x92\xcf\x1c\xe9}.nT\xa1\xa8\x90\x90\xbf\xcd\xa3\x13\x8d\xc6\x97w\xd9\x9e\x91\xc8\x96u\x00\xfa\xa9\x96\\Mi\xeci\xa0\x0c\xd6\xa1\xcd\xb4M(\xdeBdqRg\xb2V\xf0C\xa5\x84u9\x880\xc9\x07\xa3\x18\x88\xed\x05T9\xf8\xac^\x10\xd6\xd3\x92\r\xd5\x87\xff\x91\xc0E\xd2\x97f%{\xe0\xaal\x0fT\x04T&\xf0\x9a.\xf9\xb7\xb7A\x9c\xc0\xf5\x96\xe1\xb0d\x1d[\x91\xa8:\r\xdb\xc0\x19\xed\x8aw_\xae\x14b\xb5g7\x9fl?y\x96`\xab\xc1\xd9]\x9ax\x95\x12\xdf\t\xc0^\xf4\xf7*^0x\xfdv\xa5\xe4\x97\xee\xf6\xbd\x1a\x04\xe8\x18\xfdd1\xb5\x17\x988\xb4X\xbc\xe2\x85\xb7f!Yd\xf7\x8d\x86\xd9@n\x99u=\t\x98\xd4&\x0b\xf5Hd\xe1Du\xa2JYk8\xfa\xfc\xf3\xf3\x17fo\x83n\xf9\x81\xe4\x8b\x02\x9c\xd7\xea\xd56\x1d\xc0\xc9\x0e\x18bh\x9b\xac4\x84\xdb\xd4\x91\x9d\xe2P\xafz\xe9B\xff\xa6\xaf\xdd\x9d\xcf\xcf`~\x0bJ\xf3}\x1a\x01\x8d!\xe3@m`\xd3\xc59\xc7\xb1\xe7+\xeb\x1cD\xa7\xfe\xdd&\xdf\xfcDe\x80\xd8\xaboP8\xef\x16\xc0\rj0\xec\x0c\xc9\xef7\xe6\xe2\xdbz\x9e\x95\xf1O\xf9\xf5\xea\xa0\x97\xa0H\'\xdf\xfe\x0b\x1a(b\xb2,\xcf\xfa\x8fY\x8f\x89\x9dnY\xbd,\xad\xb6\x98D\xccC\xe5\x8c\x95\x04K\x1a\xd1\xa3J\xcd\xff\xac5Q\x99j\xd1\x98\xdb\xd0\xf9\x8e\xa1f\xefHes\xd6\xd3\x10op`_\xe1\xfe\x0c\xa8<M.\xaf^\x15_/\xd5\x8a\x84H\x853\x10\x9b\x1f/\x16\x11tc\x04\x83K\xa8\xfc\xbb\xfbSD5\xaaM1\xffLWE/\x87\x84H+\xb9\xd7F\x1d\x92\xa5\x0e#\xf3\xb6\xea\xd7\x9d;G\x8av\x81\xb0\xd0\xe8\x90\x99\x8b\xb1p\x907\xa8\xb2i9\x07\xf6\xf5\xb19fF<\xa5\xf0E \xc2\xb2[\x1aSh\xab\x02C\x18\xbd\x99\x148\xc7\x80x\xb5\xd7\xc3\nv\x07\x03\xe0\xd3\x12\xd8\xc7B\x9b\x91\xd26\xe9@\xe4$\xa8A\xae\xa1\x1bJ\xae\x9f1y\x7f\xa8<\xa1\xdc\xa5Z(j\x02\xc9\xab\x87\xeaQ\x881\xe7\xc8\xf1d\x92\x80\xb4\xa8\x84=\xc6\xfbUc\xbd^*T\xc4k\xda\x06\xe1\xb4\x9c\xbbF)\x07\xf5E\xef\x00\x01Jy3\xab2\x18z\x90\xfaK} v\x1a\xfd\x1e&\xb4\xd7`\x7f3\xb4\x00\xe3.DX\xdc\x16\x0fT\xbd\xedvU f\xa5\xb0\xd3\xc6A+N\xff\xb3\x91\x88\x02\xc9q\xe9\x9aH\xbda;\xf4\xea\x9d\x84=\xddD\xeb\xc70(\xe7\x87G\xeb\xe0c\xbb48\xdd\x9c\xa5\xb4\xc4\xbb\xe5o"\x1a\xd6\x83\xfe\x1a\x9b\xec\xb1\xbc9\xfb\x9f\xd9\x8f\xb9Wh\xc5\x15?\xc0\xe1\x8b\x7f_B\xcd\x9d\x9d\x0b\xe4\xc5mH\xd3\xbeY\xe2\x14$\x16\xba\xbf\xdd\x7f\xaaF{\x8e\xb8~\xc3I\xe1\xc3\xfb 1\x87/\x7f\x1d_\x0b\x80h!\xffV\xa8\xd2\xcb\xda^\xbdq\xcav\xff\xd3-%-\x8d\x17o:\xd9\xeb\xf5\x1e\xaa\x86\xa3\x10\x9c\r\x8c\xd5L\xba\x85\xae\xde\xae\x1e\x94$LI\xf8\xe3\x91\x10\x10\xea\xfa\xd2\xaf_\xc3&"\x87\xcb ZN\xfc\x1fI4\xe5Z\xea\xb9\x91W/\x114\xdf\xee\xb0\xd5\xc7\xd2h:\xc6\x03\x19k\xf8R\xd4\x19\xadOpy0\n\xa1e\r\xaa\xd1xr\x10=\xdb\xfc;L@\xa7V\x17"C\x1c3\xbc\xa2B\xa2\xb60\x1b\x8fR\xb5\x04\xa8W\xc3T\xa0H\x8f\x1b\xc3\xb0\xda"9\x10"\xe9\xc0\xb6\x08P%,\x11\xd6\xb7I$55O\x17\xa1\x11\xc7\xb1\x01\xbd\x85\xac\xbb\n\x83\x08\x82\x85\xf7{\x92pw/\x05\x83w\xf3Z\xc1\xcc\x13\x1ah\xc9d\xe7K\xd2\xda\x83\xfa\x1b\xd9\xe7\xb8\xa9\xdb\xf1\t]v\xfd-\xe3\xc4P\x94c\x18\x90\xd5\xb7\xcf\x80)\x15\x81\x1a\xb0kuqWp\x97m0)#D\xe7J\xa8>*\x9e)Q!;\xbe;\x8b\xd4\x0f\xdc\xa9\xd8^\xdb&nS\xf5\x0c\x97\x03j\x9c\xe3\xb8\x1d\xb6\xa0\xdf~\x94\xb8\x1e\xe7~R/lA\xe9\xa5\xaa.mr\xb3?7u!a\x8d`GIN~5\xfcESX\xdd\xdf\xbdAu\xbdF\xc0O3I4\x8c\xd2\xa4\xf0\xe8\xec\xf4\x83\x9b(\x86\xd1W\x11~\xf7\x0c\x04u\xd2q\xfeX\xb2\xf7#\xf7\xaa\x8b\x9a] \xdc\xb4\x934ZM2qt)w\x9d\'\xed\xfa\x86z\xf1S\xd1\xc98\x11e\xcf.\xc0U\xecy\xe1C\xe8\xfc\x88?<\xd0\xdbKz\x9aG\x9a\xd7Wj\xf8(\xceq\x8e\x8c\xa1\x8c\xee7\xbe\xa2\xdc\x8cQ\x11\xf9P\xc3W+\xd5yZ\xf7\xf3\x982\xa4\xc9\xd2`\x96b[~\xfc*O \xa2s\x90^\x1cQf\xc7O\xe7ak\x0b\xf3\xaeE\xa9\x98\xd0\x01\x9a\xca\x05.YPL\x88\xa5\xe77\xca\xb9\x9b\xd6\xcf\xf5\xc1\xc1\x9b\xdf\'e\xf7u\xed\xd5 \x99\x08\xaa\xc8T2\n\x9d\xbfz\xbfu\x8e\xaag\xb4P\x8b\x03\xd4j\xbe\x9cF\xd0T\xf9\x9d\xa3\xc2\xcf\xee\xb3\xcbbK%sE\xe2\xe5\xaf\x94\xa2\xc6\x1f/\'_*hnA\x7f\x0fE\xe6n\xe4m9\x16.\x1e$\xb5\xa5\x9f\xe4\xd9\x0e\x86"p\xa7Q\x04\x19\x91\x7f\x85jh\x99\x8fl@ \x1e\x1dMA:\xa9\x864\xaaV\xaaa\x83\x10}\xbd\xf2n \\P\xa3U\xb20-}\xca8kR\x05\xe1\x91\xe8\xa5\x08\x94\xce\'\xe8\x1cQ\xfb\xd7\x0b\xce\xef\xfeV\xb3\x80p\x15Z\x05\xe9%4^\xdcDs\x0fC\xb2#\xbb\xfe\xf2\xa0\x14\t)\x03\x97\x83\xdc^\xdd\xff\xc6\x88\xf2\x11\x98\x1fu\x99IH\x93a"x\xa9\x14\xa2q\x01\xd3\x88\xf8\\\x0e\xc2\xb8\xf20\xc0\xd9O\x11\xcb\xf8\xfcXc\xfa\xe2\xe1\x90\xcbg\xe1\x94*\xdd\xb5\xa3\x98$u\x8d\x88\xa6\xde\x1b\x10\x14\xda\xe6\xe4\x0fw\xb6\nj$\x96\x15\xe4\xfe\x07\x92Al\xb2\xa3u\xa5\xbd*(\xb4\xaa02\xdd"v\xb8\xaa\xca,Y\x11o\xde\xcbn\xd6\x83QL\x14\xbfD\xc3H\x08\xc9&\xae\xa9\x93\x02v\xa0Z\n\xcb\xb3\xec\xea\'Q\x89R\x80\xc1\xdb#\x91w\x11\x0b\xa3\x9ef2\xcacjm\xc2\xba@m\xb0d\xbf\xf6\xc7Oh\xf2i\xbf\x8aYRI\x8f\x8a\xec\xbf@\xca\x1a,\xd0\xa4\xd3\xac\x86|\xc0<d\xf0\x0e\xbb\x06v\xbc\x1c\xed=:\x87%\x05\x98\xd0\xa6\x8b\x97\x92)\x8b\x94\xabD{\xa8IXK\xdd\x14v\x8e\xcd\xf6\x98\x97jP\xad\xde\x85&\xbc\xa6|\x16\xa1\x91%=\x94l\x95u\x85c\x19\xd5b\x03\x04\xd5\xe2\xdd\x8c\x1c|\x02D\x12\xc5\xfa\xfa\x1fA\xef\x9b*\xb9\xa8C\xd5\xa2n\xb5\xaa#6\xe2\xdcS\x1b\n\x8a=\x04\x06P{n[u{\xb4\x1cC\xcen\xd5jT\x13fI\xe0\x91\xea\x19S\x1a:\xb9\xcf\xd3\nue\x01\xd8\xb4P(w\x83\xe6A#N^\xa1\xb9\x19A;\'\x01$\xf2\x02\x10\xa9"\x93g\xdf\x06\xe6\xfeU\xdd@\xef\x84\x04\x10\x88\xc3\x94U\xfbjD\xc2\x81a\xa5\xec\x86$\xb6V\xed\xd9\xa14\x06\x18#L\xf8L\x12\xd7\xb4\xa0\xcd\x8b\xcc\x9ef\x01S]\x9b\xea\xa5o\xedy+/\x93i\xb0W\x1bI\xdb\x97mA\xf9\x03)\xe3\xd0\xef\xddj\xb0\x8d\x8a\xc16\xdbC\xb4r\xf2\xa2\xa8\xba\xf2\xac\xf7\xe8\x9b\n\n\x894\xa6j\xc5\xb3\x8d\xea\xd3\x0c\x87\xe3\x1f\x80\xef\x92\x8f\xd3b#\x16\xaa\xc7\xc0\x0f\xa4\x9f\x17 \x05Q@\xd1\x8d$\xf9\xee\x9e,D\xad\x16Y \xa5T\xa7\xce5\x87\xb5\xd1@\x8d.Q\x97\x9eS\x80\xcb&\x0f\xb1\xc2\xa8\x01Zi,O36\xaf\x13\x14c\xfcU\x99%\xbd\x03Lx\x82T"\xd9\xb8\xe1\x1dqv\xca\x11\xafZ\xb3-\x85>)\xcf\xb3o/8b\x86\xabF\x08\xc9_\xb2\xf9&Y\xb6*q \x9aL\x90}D~\x96\xc3\x88Q[N\xc2\x00\x9d\xd4cK\rU/\x10{Q\xad\x91@Ur\xbc\xab\xe2r&\xad[\n\xa7\x9fx\xcc\x14yIB\x81\xdb?\x1cw\xf6[\x03\xcc9\r\x89\x00}\xf8\xeb\xfa\xe0\xad\xa0\xb3dN\x02\xf9\x18\xc5TIU\xcb\xd3\t\x19B\xa4\xc8E\xa2\xe5y\xb9:E\xcf\x8d\xb8\xd15J\xa2U\xba{\xee%w\xa0:\x10\xdd\x90B\xaaJy\xa6\xe4&~\xf0[\xb8x \xd9\xfa\xb2E\x93<h\xeb\xad\xc9\xca\xbf_T\xbc\x17\x00U\x04C\x8fy8\xc58\xa9&[\x08\xaak2D8\xe5\xd2\x8fYI=\x85*\xba\x87\x1a\xe1S\x9e\x08\x1a"[\xa0\xf6\x8d\xd9\x0bx\xfcc\xb7\xea\xaeE\x8f\x1c\x9f \x02\xfeJ\xbd\xa2\x94\x1d\x8a\xf1\xe8\xd0\x10\x93\xdd\nC\xe8r\xfc\x15ZK9_\xb5>\xc5\x8d\xe6S\xdd{6\xdc.\x9a7a\x80$\x1d{\xf6\'\xe4o\xf5\r\xfcX\xf4\xe47\xc2p\x84\x9e\x04\x83\xe3\xc7\x1c#\x8f\xdb\xaf\x9a\xe1|\x17h\\\x02\x89\x15>\x7f\x064\x9b\x9a\x19\xd2\xb0s\xb7b\xefC\rWm\x8a\xa4\xcf\xd4j\x1dJ%\xa6\x19B\x06\xdc>\x91\xb6\x81R\xf3\xae\xdc\xbbT\xcf\x07\x1a\xbe\xac\x83\xfbs\xef\x00<\xf4\xec\x0c\x97\x1b\x9f(n\x96\xd8v\x10\x1dj"\x13UF\x861/\x84\xda\xd1x\x12\x86\xb0\xdeP\xe8\xef>\x04\n;q\xb5C)\xe18\xfd\x0b\xf3\xec\xd3\xb3?\x9e\x89\x8b\xd1\xea\x0c\xc3\xdc\x18y\xfe\x1c\xdcF\xb0|\x1e<\xb7\xec\xe3\x075\xcdk\x18y\xedU\xebD<cSN,\x7f\xe5\xc5\xdd\xc3\xb2\xf0\xd3\xe1\x8eT\xe0\x05\xf10\x14\x9a\xe8\x04\x1f\xb2\xeaP\x89\x03\xdd\x94+\x95\x0e\xaeM\x9e\xd4\x0c\x14v\x83\xd7\x98\xf0b\xdd\xb6\xcb\x1e\x06\xfe\xfa[\xb3\xed5Q\xa0I\xea\x97[~\xe2\x99\xda&;\t\x03\x8a\xb0\xd5\xb9U\xce\xd0\xe6\xb0"\x11L\xc9\x13\x08~\xa3\x1b\x05\x19\x86\r@S\xad\x96\xc5\x9b\x1d_M\xd5\xa8\x0c\x12\xdf01\xb9\x95\x96A\x18}\xfd\x8f\x04Z\xec\xd9\x7f\xbbsr\x92\xd0\xf7\xa0\xfc\x90\x88\x92\x15\x0fY\xed\xe7c\x7f\x976\x80\xaaR\xa5\xb8\x03\xfd\x87\xd3\x13\x88\xcet\x0b\xcc\x07\xaeu\x8b\xb1U\x01TI\xc3P\xa3\xf6\x88+\xa9\x1d\xc5\x0f5\xaf\xcb\t\xb5\xa8\xb1\x0b\xe0R]\xb9j.)\x08\x7fg\xf7\xa2\x1f\xa7\xaa#R\x96l[S\xb3\x087-\xfb1O\xb6\xf5\x19W\xd5\x88iI\x85\x95j\x81\x8b\xe6\x04\x1a*\xb3\x0c\x92E\xec\xe6\xe2\xf8e\xef\xec\xb5Fc\xb5\xed/\xcd\xde\xf2\xab\xc1Rs\xf6\xe4\xe4t\x8a\xd3\xa1\xbf\xa8B\xd6\n \x91\x80c\x85\xf0\x86\x12:\x0c\x9a\xef\x0fPp\xa3\xc0\x0e\xa9c\x8e\xbaR\xd2H\'i\x9f\xf2C(3s\xe6\xa1\x94\x06+-m\x1d\xbf\x16Xfr"_\x01\xe39\x1ep/\xfa\xde\r\x9a\x07\xd9\x98U\xa0\xfeU\x16\x00,\xd8\xd7MF\x11\xfa7\t\xaep\x80K\xed\xe7\xc7dR\xa0\xf2\xecm6}4\x03\xaa+\xbb\x8a"4{\xc7w\xd6>\xe5xyW\x0e\x88\x8e\xd6\xe0MT:}p=3\xe0N\x94\xd2H$\x11>\xde{\xb8.\xeet"\x91\xe1Zef\x06\xd7OQ\xbd)\xbfr\x93\x1c\xab\xe3(\xca\xfd\x14"\xd9A\xa7\xf6>O\xe2s\x1a\x13\xd7\xe6\xb5\xf8z\x9a\xc5\xd0q\x04\x90\x062\xb6\xe1\xd0G\xe8\x03vs\x0b\xff\x91\xf0\xe2\xa4\x07\xc9\xa54\x8e\xd3\x00\xdb\xf1{\xb6\xb5;\xefoK\xe9\xfci>O#\xf9@\xdbO\x8d$\x95\xabA\x04\x13R\xaa\xe8\xbd\xc5\x08\xa1H\xa5\x05F\x15P\xac\xcc\x8fL\xd3)o\xa0\x1cl\xe7\x95p\x07l\x9c\x97\xd0\xab\xba\xc1\xac\x1b\x92\xff*z\xa0\xf4\x1e\x1a\xbe\x80n\xd7t6(z\xd9L\xa4)y\xf0xP\xe8unvGY\x9e\xc99\x1c\x8ca\xcfuO\xe6\xe6/>+\x91\x95\x123\x91\xacV\x18`\x8bV7F\x9e\x8dz\xde\x99\xee]\xa9\xde\xd9\xbdG\xd2\xd3\xbe\x15:\xc9\x9e\x90\x80\x98\x8a`J\xf2\x0cjEy<\xb7P\xbd\xef\x92U]^\x18\r\xf2\x7f\xaf\x15\x12R\x13\x943\xe1_/<V\xcbE)\xce\x14z\xd5=A\xd4\xb1\xc4F\xe9}j\x05?\xfby)\xdaSc\xf6\x84\xd0U\xe5u\xff\xe6\xaf\x15\xdbi\xf6`\x95|\xea\x9d|\x17\xff\xa8P\xe9\xcc\xfd\xf5\xd3\xb1\xa3\xc1<\xd9.\xfe\xbc-2p5?\xa5\xf1E\xb5\x1a\x8di\xf6\x1b\'P\xadk\x96zg\xcf\xbc\x92f\xa7\t\xb8\x9c\xf2\x03$\x885X\x1a`\r:o4v\xbcLn}s8\xcf\xdd\xb3\xbe\x05\xa5\x1a\t\x12\xf2\x06M\x83\xc9\x95z\x1c\n\x94\x8e\xeb\xf0\xe6`\xe1Z\x99\xb3\x8d\xae9\xf3\xc3G\x9f\xde\x95\x025\x1d\xb8\xa6\xff\x14\xe8\xb74\x94\xf2\x95\x04\x95\xb2\xf1;r\x88\xac:\xc3y\xb3\x9fK3e\\6\x88\xa6\x0b\xf7\x90\xfd\x98\x1f\xb1\r9K\xf2\x92\xfa{w4\x0e5G\xcfQc\xd8\x07\x069\x87k\xf3\x9f\x18\x01A\xc9\xa1x\t\x9eO]\xa3 \x89\xe8\x90\x05\xed(b\xedq\xd4"\x99\xa2\xf0-\x8b\xe25UR\\\xab\xd7\xbe\xde}\xad(\x9f\xed\xad\x9fAT*V\xd7\xd5\xaeP\xaa"\xa8\xde\xfb"\x14\xd6\xe7\x00x\xd7\xb5\x1fJ\xe2\x87\xa5\xb1\xc7\xf2\xf0U(\xcc5W\xb7Y\xab\xe9\xb7<\xab\x84\xa0\x9eO\xa1Bbr\x8c\xd2i\xa4\r\x0e/\xb0\x9b\xebf \xac\x1a\x12\x00#\x83\xdb\x01\x9a\x8e\xf6\xd5\x84\x97\xea\xb0A\xd9\xb9\x8c\xee\x8f\xcb/D\xfb\xe3B\x05\xda\xa5\x8dT%@\xb6\xd0q\xf6\xea\x92\x9a \xa5\xf5O\x9a\n\x1b\n\x88\x98\xbf\x86`]\xadf\x98\xb6}\xaf\x06\xdfF:\xd9\x8c\x8c\xa9\x1b\xf7b@B\r2\xc6\x91\xf9\x00{\xadR\x89\x00\x16n\x8d\xf9\x15\x18\x17\xe1\x10\xa5\\4U\x8d\xea\n\xf5\xf0\x1b%}NrG\x1a\xaa\\\xa5(]c\xae\x1d9`\xad\xe4\x06\xa5\xb6\x11jt\\\xa7\xf8\xcf\x85\xbe\xe1\\\xc45\xba!\x94C\xc2W\x9bl0\xe9\xc4$G4\x1a\xf6\xa3X<Au\x16\x16\xf9G\xf1\x18\x9a\xf2\xf9.\xda\xff\xe2o\x7f\xbe\xae\xcd>%\xa5\xfa\xb7\xe6(\x95|\x1b\x9d)\xc7\xf0# \xc3\xa1J\xce\'\x94_\x84Z%p\xa1+\xa9\xc0\xb4\xe9\xcbuEg\xce#\xa4xrk\xdb\xeaP\xcc\xbd|\xcd\xb7\xdf\xbe/2U^\xa4\xce\x8d&\xfb\xb8\x8e2oL@\xeb\xdd\xa6\x1f\x1f\xe4\xf9\xb4\xb8\xd7\x8e%?\xb6\x9e\xbd=c\x185\xabp\x8bj8\xa7\xd2\x8c\x11\xb3\x9b\xdc\x02\x88n\xb7\xe1\xe2\x8a\xfa\x0e\x88\xff\x80\xfc\xd2\xf8\x9cW}\xcc\xc45ch\xb8uBd\x1b)\x151\xca\xd7\x9d\xf9,1\xa4\xee\x18KV\x0f5RIl\x13\x97+hLU\x85\xcb\xfd\xf8^I\x90\x90\xb8A]\xc2\x0b\x11\x9e\x9c\x1fR2\x87@\x02\\\xff\xaa\x18\xd6\xf3R\x01\xac\x91v\x03\x91\xb7\xe0\x9d\xfa\x8bU\xfcr\xea\xdabz\x8d[(;\x8e\x198\x7fq\xa8\t.$\xe8\x86^\xaa\xf6\xa2X@s\x98#J\xb5q\xe3\xbb\x9en%EH\xf6\xd6\x90Wz\x85\xbe\x1d\xa5{)\xe4\x8f\xdd\x02\x15x\xb2\xacX\x1a^\xfa3\xa68\x87\xddn\xafE/\rEj-5\xb7\x97z:\xdc\x1f\xd5\xe4W!\xa0\xe9\xce>b+_\x02c\x91\x87\xd3\x8a^O\xbdUm]B\x04U\x16@Q,>\x1c\x08o\x15\xdd[/b\xac\xd7!\x00M\xe7\xd4\xb6\xae\x86\xf2\xa2\xbc\xd1\x84\x81 \xfc\x8f\x14\x9d\xd7\r\xa9\x06\xb9i\xa8\xa6-vg\x16*9\x07\xf74\xc6B\xa3\xf2\xd8K\xd8\xd8s\xa4LG\xed\x91\t\x8dEg\x8fx\xa7R\x12\x15x\x89AU\x8f\xd7\x87A\xdao\x035\x8a\x07\xc0 \x8d\x00\x89G\xff\xd8\x9ev\x174\x9c[\xfb\x84^\xfd\xa0L\xdf\xc1\x06\x82#]\xc5\x83\xeeG:\x9f`\xb1w\xb2;\x90\xe8k\x9aK\xe2\xf5V\xa3c\xce\x08\x88$\xd2\x90\xab0\xdf&\xff\x882\xc8\xd3\x1eFX"\xd3\xed\x1c$\xe4dL}V-\xf0\x9c\xea\xec\xa1$\xe0\x12\xddE\xf4\xf7Pep5Q\x04\x980]\xa4\xab+\x0fw\xcf0\xc3\x89P:\xdaa\xd9\x16O\x19/\x08\xfe6-8\xfa\x08l\xda`\x90 \xb9f\xe8\x9b\xf8m:\xde\x9dr\xe5P\xf29\x9d\x04\x89)E\x005S@\xd5A0<\x00\xaf\x8b\x94\xcd\xed_^Ki\xbf@\x19\xba\x91]5(6WO_\xe4\x1d\x03\x07\xd0$\xad\xa2\xb0ZA9\xf1:\x99ZN\xf3\r\x0f\xaf1\x11\xad\x81\xde\xa0\x8f\x8d\x0b\x91\x97H\xc2\x8d&^\xaf\xaa\n@e\xba\x85\xf7\xaa\xb0\xc3+\xb3\x07\x96\xd5 \xa4\xeal\x15\xae3\x0e\x81\xd9\xd3gi\x12i)\xfeh\xe1^\x02\x7f\x8a\xbe\xae(\x92G\xb1\xa3\xf43&\x88\xecx\xb6V&\xc5<fP\xeb\xca%\xd4,\xd8\x8a\x16N\xab\xa7\x91la\xbd8\xf7g\xa0\xa7\xcd8\xdef)\\r\xb7\xd3_}\x9d\xdd\xea\x9d}\xb1\x95\xeb\xa4\x17B\x1b\x17\xb6\xa2U\xca\xce\x04\x1a,X\x91\x01\x95\x0f\xd4o\xa82R\r\xee\x8a\x08\t\x96\xa0B\xed\xb9\xd00.\xf6\xf5\x97\xe0u\x96\xcd\xbb\x19z\\h\x08\x12\xff\x0c\x9f\x904Ts\xe6j\xae\xe0\xad\x19\xd4\xb5\xe8\xba9\x12|o\x1c9]\x91?\xf8\xbb\xae\x9e\xdfN\xca\xd8\xd0J\t\xfd\xc0\xd1\xf0@T\xc3v\xad\x13>M\x1a/\xb3[\xc4N\x05j2\xe5^G\xf0|IZW\x99\xcc\x9f\xd4\x0f\x19\x88f\xd2\x00P?5\'\xe2\x00_`\x19\x8b<i\x8a\x0bnh\xe28\xa7\xc2\x81u_7^g\xd4\xcb\xae\xd8\xa3-s\x8a+\x05\x12\xf8c\x12\xbbk\x9f\xd9\xf7lg\xcf*\xc1\x19\x11xw\xb5Fmrj4_O\x86\xe8\x83\x97\x9f\x8f\x95\xeeub\xd5\xb7G\xbe\xa6>`\x08\xd4\x85\xe6c\xbb \xdfA\xbe\x86"F\xa0T!D\x8c\xea\x1b\x03\x1b6\xf3Tv\xbc\xea\xe4.\x994\xca\xb5\x9d\xc6\x19\xb5\xe8\xd9\xebx+ l\x14j,aW\x1ea\x9b\x87\'*\x1d\xa8\x91\xadMs\xa5j\x9d\xcd\x16\xe7*qh\xd2\n4$\xba\xf0\xc9\xa4\xf0(\xf2\xfa\xb1\x1d\xbbv\x1a\xac\xf6v\xedP\xa2\xcc\xf9\x17?\xb8\x9b\x8a\xa1\xe8\xa6k\x10\xde\x14\xc1\xc9\x97\x0bu\xcb\x83g\xdc\xf9N\xd2rRN"\xf9\xfc[(\x9b\xf8\xf6\xa1\xe9r>b\xa5\xc8\xaf\x0c%\xfe\xaa\x0f^\xe3\xfd\xd7\x9eHK\xb5\xdaa\xf25e\x81\x91\x9f)\x86\x1d&\x99\x9d.=\x1b\x12\xed1\x01\x86\x9b\x1d\x03\xee\xc8#s\'m>\x10\xde\xa9\x82u\x89\x9eD\xb2U\xdc,\x99\x9f\xfc\xb4q\xa1!\xadE \x96.\x1b\xe6\xbfkh=\x9brH\xb8\xbf\xfc-^\xa3`m.\x9a\xf0.\x17\xceK?\xa9\x95\xed\xee\x9b\x1e\xd4\xd8\xd0\x99\xde%h\x0f\xee\x9a\x0b5\xff9\xe8;\x12X\x80>\x98\rn}(\xb0\x1b\x17\xe9G\xd88\x15\xfb:\xd0\x11\xabn\xb7w\xb6q\xa1\xa3\x01\xdegS\xdd~\xd2\\\xadFr\x14\xa5\x8e\xbf1|/\xbeMH\x9cJ\x82&\xd6\xcf\xda\nc\xa9j\x8f\xa9)\x9d$*V_\x85\xbd\x93#{+v\xa4v\x93g\x07"7\xc0G\x06\xec\x88\x8c\xd4\xc2\x8a\xc12\x9d\xa6]u\xd1\xa7!X\x94\xe0\xdbGb,\xddW\x8dc+]5\xe05\x93\xbfA\xa1\x95\xf1\x1d"\xda\xa1\x86\xa7\x12\xb5LA\xdd\xb0\xf2\xf1\xdbQ\xd0\x16\xd1\x9a\xe8\x85Rj\x11G\x0b4g3\xcfh\xbb\xbd-\r\xa4\'\x1b\x7fDjX<\x18\x9b"X\xc9\xb9\x18a\x1b\x9d\x8e\xc4\x89;\x16\x07 ^_\xed\x93&\xaf\xb1\xac\xa5pE\xa6(\xd0T\xc6\'\x07\x12\xae(\xf2\xc1$\xe2&\xb4\xe8$\x88$\x05]\x87\xa3s\xcf\xc5&g\xc4w\xe7\xad\x1a\xfc|\xdc\xe2Um\xc4r`\xd8\xdc\xa5v\xf8X\xe5\x97\x90M\x17{i[\xe7I\xa4_40V[\x92\xac\xa8v\xe4\xf2\xd0\x0bZ\xc8\xd0\x872\xffD\xb0\xb6\x8f\xce\x94\x87Fk\x10Y\xe9F\xccq\x10D!\x0f\x91\x13\xd0\xcf\xd5\xc0\xaa\x1eT&\\\xf1\x13Q\xb8\xf8\xce\xf9\x04\x9b\xaex\xf9\x0c\x00\x9f)&O|{\x84\xc0\xe3\xa0\x1b\xbf\xa1\xc1\xc8<\xbb\x84\x15\xbe\x03T\x92\xc3\xdb\xf1\xbe%<\xbb9W\x9a%\xbc",\x06\xe0\x16\xa3x\xcf\xa7\xe0(^\xf4\xf86\x9a\xd2\x17\x80\x9b\\\xfa\xd6\x1f\x0c\x86\xed\xc0R`\x05+\x83~G]$\xb2j\x1cUP\xb3\x12\x80\xc1\xe84\x96\xc0\n-\xd0\xea=\x9a\x86\xd9\x8f\x87m\xa1\xc8\x1b*\xf6\x07\xed=\xc4\\1\x83-H\xd1?[\x07\x1f\xd4"\xaa\x99kL\x07\xe2;^\x98u\xc4T\x934j(\xf8\xf3\xe9\xb9\xd2\xac\xea\xab$x;\xa1\xef\xadN\x8e@\xc30A\x1b\x00\xa1\xf6\xfa\x13\xe7n\x81\x0e\xdaj.+1^\x11MJ\xa9\xaa\x10\x85IY\x0f\x03\x08\xc58\x0e5\xc9\xc6\x17G\x8a[\xc9\xc9J\xe7\x86\xf1w\xbd\x98b\xf7$\x04\x02\x95R9U\x00\n\xe9\x1c\xd5M\xb10%n\x01[\x8d\x17\x06\x8aX\xa4S\x177\xf2\x94T\xb5\xab\xae\x10\xee\xa3\xe7\xba\x1ff?z\xa5\xb0A\x1a\x8f\x8d\xa6f\x85\xdd\xef\xe5U?\xda\xa2\xf6\xc8\xfc2\xaee\xe4\x8bY\x81\xae\xfa\xcf\xf8a\x96LD\xd6\x0c\xa5\xb7\xc8\xbe\xe7H\xcd\xf5\x84\xaf\x8e\xc50W\x9boWk^]\xf7o\x98\xfb\xfe`\x084ge\xa4\x83i\xb1^\x04\xa5\x92\x18])qW?\xc3\xa4h \x04\x1a\x04k\x17\xd7\xc2mc4T8\xcb\xa5@\xeb\r\xab\xeb\x13\x8d\x7f\xae\x06\x03\x0c\x1bQ\x8d\xab\xee\x0bH\x1c\x886\xaabS\x07\x83\xbdq\xab\xbe\xb9\x1e\xf7u\x7f \xde\xe0\x12\xb4\xba$s\x8f\xf1X\xee\xc8\x0b\x11\xd1\xe2\xa4;L\xf7(\xa4\xed\xc2\xdc\xc1I\x02\xd8Kf\xa0\xae\xd8\xa0\xc2\xcf\xb4\x9f\xba\xdb\xcb\x1e{\xa2\xf2c\xf2\xf8\x9bb\xc5N\xad"\x80\xbfh\xbd#\x89\xb7\xf4MoO\xe47\xfc\xd7\x06r\xa8Y\x96\xa8\xe8\xc6\x84\xe8(n\xeb\x8e\x0fd\x94\xe67\xca\x15\xc2\xec\x8d\x90&\x1a\x9c\xf72q\xf2\xdfyw\xa4\x05\xa0\x96\xcb\xd7\xb3?k\n\x94\xfdT\xe06\xdf\xe5 \x87u\x0b\x81\xbdb \xf1\xa0\xe2\xf0\xda\xf3?\xd2\x11e\xb5\x1c\xff\xb2%\xce6\xec\t\xa6\xb2\x9bf\x04\xd1\xe6%\xa6\xe6\xa0\x07W#\xa2P\xb8\xa5}]\x84\xb0e\x93\xd5D\xd3u\x00e$N\xc5\x16hZ\xb1\n\xf5\xe9\x85H]\xce{\x18\x84:d\xa3\xddC\xe4\x9a\xd6\x13\xbf4\xed\xb9\x138\x19\xd6\xb3j\xb4\xe2`\x9f5\xa1X\x9a\xe6\xd2t\xe1\x1c\xa8\x81&1\x08\xedQ\x8cx\xab\xa1\xf3\x1dB\xf7\xae\xc0\x94\xf1\xa6Z\xfb,:$;\xeb\xfd\xf8\xa6 \x94,n\xc8\xcf\x1a\xb2\xb4\xae\x96td Qh\xaa\xef\x14\x1a\x82Z\xbeS\xab>\xdb\xed?l\x85z\xe0\xf9j\xb4\xbb\x8d\x9a+\xd8\x81\xa4\x87S3\xfe\xf5\xdf\x1d\x8dy\xce6vf\x9e+\xfbe\x8f\xc6\xd2\xf2\x9d\xbb\xc7\xbd\x93\x8b33.M\xa4\x06\xe5Fm\x80\xa1\xfb\xc2y#\xd9{\x84\x91M\x868\x1cS\xb4\xe8\x8a0 \xdb\xd5\xe7\xbdG\xa2`8\x08\xd4\x90\xa7\x16>9\xfe+\xbaF\xeei\xa48\xae\x1e\xb9\xa5Fh)3\xd8\xc9\x80V~\x84I\xe1i\x8d\x8f\x85~B\xf4\x84\xe4y\x9f\x8d\x94\xc6\xe1y\xe4\x85$Ju\xae\n\xc4\xf1::,\xd7\xfaa\xd8\x8dh\xd2\x0ec\x8c\\\xf7\xdc\xc6\xcd\xb2_\xf3\x10\x1b\n\xe2\x16\x84Z2\r\xba\xa8|a\xc8\x1d{\x7ft\xed3J\x11_\xe9\xc9c\xa97w~\xcca\xe0\x1b[\xc6Uv$\x0f\x18\xf4\xf8\xf0\x89\xb4\xa2\xb0bI7y\xae\xe19\xb4W\x17\xf2A\xaa/;E\xfeE|\xec\x1f\xaf\x0e\x91S\x0f\\\xb6\xb1\xa8\x1c\xc5kG\x8b\x1e\xc8\x8e\xca\xea\x13\x99cl`^\x97\x10\x08\xb3\x9d\xb0wp\xc2\xf9\xf0\xb1\x80\xa9*\xf7\x83n\x05\xa6\xb0\xb5p\xaa\xd7{\xf1\x90\x87\xef\x0c2c]\xe7\xa5\x9f\x04E\xb3\xd6\xab\xf1\xb9\x8c\x1f\x085<\x0b~3\x97\xde\x14\xe5]\xb5\xa4*\xfeW3\xf4r1\x1c\xbe8&\x98G"n\xd4\xb0\xa9\x8ag\x9b\x02}\x98\xdd\x8f\x0f)S\x8c\xee\x0e\xe4\xfd\xeapu\xc4\x0e\x9c5\x7f\x01\xe1\x86\xf2\x00\x19\x14\xf8.\x8c\xee}9PE\xbe\xd3&#EpA\x8a\xfd\x99\xd2\\\xb4\xbb\x90\xfc\x18C\x92\x8c[)\xb8\xdbh\xb8\x04\x1bMj\xd0\xf3\xbb\xe8tAM\xba^\x8a\x03\x82\xc0$\xddC\xa1\x87\xbeGb\x08-\xa6I\x04\xb9j\xb3E\xfbL\x86;\xf9\xd3\x08\x02(\x93\xe9\x87t{\x7f\xec\xe6\x83\xda\x0fI\xaa\xff\xa3\x93\xc8\xaa8.8,\xc4\xbf\xabU\x04\rM\xe9$z\x00\xcc\xb8\xf1\x12{\xe0\xebX+\x07\x0e"`\xcd\xd2\x1d\xf4\xceF\x86\xc6\xc5\x1f\xa6\xdd\x07N\xd9u\xca\xa8;\xb4\x7f2\xc7\xe6|^\x1a\x14\x91\x8f\xc8\x8f\t\x1f\xcbN\xb2h\x87\x82Z2&V\x87\x1f\r\x18\x90h]C\xfe\xd1\xf0\xc5\xad\xe7c\xb8\xaa\xc0\x8b&\x9dKe(\x1d\xccc!\xdf\x06C\xe5\xac]/\x05\x1a\x85\xe6\xe1J\x93&\x9d\x08t<\x00\xe1c\x96\xba\xd1\x11\xe8~<\xbe\xd0D\xa62\xbf\xfcr\x177\xbd b\x8c\x1b\x15\xac_N)c\x95ff[_\xbd\x1aU\xda\xda\xcfO\xf0\x143s\xf0\xc1?R\xee\xba\\\x82B\x8e\xbc\xfb\x7f\x8c\x81\x8bh\x15\x17E\xbeI\x8fo\x95:\xb2S\x84\xf87\xd8\x0e\x8cc\xb8\x13\xa8*F*\x16p\x03\xc0P\x1cG\x051\xf8\n\xa1\x9e\x8b\xbc\x06.\xee\xfa\xc2\xb3#\xfc\xcc\x83\xf6\x95\xe6\x95\x8beW\xa2\xc4\x03{\xc1\x013\xed\xa5\x9a\xaaH\x94~+j\x1e\x0f\xc3\xcc\xc8\xf3e|b\xf6m\x99\xd5\xb0k\x8b\x0c\xfc\xfc\xf3\xaa\x15\xbc\x1cj\xd4I\x08\xc2\x04\x877p\xdc\xfaW\r,.\x8e\xe6\xf0\xa5M\xe5\x99\x95\xbe\xaf\xd4\xa6\xda\x00d\xebBt#\x1e\xf1\xe4\xcd1v|9\xeb\xdb\xf0IKY\x12\xc4\xe6s\x99\x9c0\xef\x1e\x1b\x08_\x909C\xe1F\xfa\x94\x8f"0\xe5b\xba\x88\xd2\xeb\xfcD\x9eBsE\xda`S\x1d\x10\x8cj;\xcf[R=\x81*\xeb\xbb,\x81\x81\xde\x1b\xec\x0b\x17\xe5<\xb3pda0\x0f\xfb\x9f\xd1\xec\x84\xfcp\xb2\x07\x1b\xe2\xdf\x08T.\xbb\xc3\x7fS\xa6z\xaa\xebK\xcd\xd8\x920\xdc\xf2\x0bV\xbc_n27\xb9T\xc3A\xa0\x81ka\xf5vV\x84\x1f?\x8f\x9esz\x8bv$?T,\x0c!\xf7\x0e\x002\xcfd\xfc\xf2\x95\xca\x8a\xf1`L\x05\x87\x8d\xd6~~\x83P\n\x13\x99\x7f~\xaa\xd0\xa9\xb9x\xbe7\xa3x\xd0m\xdd\n\x7fd>\x97?\x94JV\xf1\xe2\xbd\x98Q\x0c>\xf7\x05\xe3\xf7\xcf\xff\x99z\xb7\xba\x19\xc6\'~H\x98\xf0-\']\xd2 \xfa\xb0K\xee \x82\x19\xd4\x1cc\xb4x+\xe0\xd4\xe4D4\'\xa0\x9c\xd5\xe6\xdb\x92\n\xa3\x86\xc6\xf3o\x02\xb6Y\x88\xda\x12m\xa5\xbe\x19U\xa7\xa3z\xeaIh\x01\x03\xbdl\xdep\xd6\xbc\xe6t\xf9\xe1#\xad\x86e\xb0H\xa5qn\x84\xf8\xd4\xf5X\x87\xc9\xc2\x03\xb3\x84N\x8c\'V\xa5\xdc{\x12\xeb\xa4/\x9bw!\xa8|\xc9\xab\xd7\xb3\x16j1kh}\x140\xd8\t0\xa5o\x89\xa1\xd0\x80\xf64\x94,\x19\x8cr\xeb\xa4x\x17j\xb8p\x90-M\x02&\x06=\x8d<\xa4\x10\x00\x15\xcfCu_\xb0\xa8\xf3\xb2\xc5\xe8\x02\x830C-i\xb8\xc6\xfd\xe4Nc\xbc\x1a\x0c\xa6N\xd4\x81\x1fi\xf5T\xca\xf9\xa10\x05\x88\x1b\x90\x19A*q\xacCJ\x13\xday\xde(8i\xa1\xa6Y\x85\x9d&P\x08\r>\x13X\xd4\xfc\xbd\x18\xa8\xf7y\xad\x90::f \x99=\xd6\xda\x91|\xf2tbF\x91JAy9D\xe3\xd8\xc8<P\xd4*y\xf7S0[\xe4\xe7!\x12\xed\xd68\x1d\xd8\xd8 \xbb\xf3\xfd\xcf\x9e\xeaKB\x87m\x12\xf9\xb2vg\xe1\x06\xa9j\tx\xad\xd3y\rc\xcdH(\x92\xbd\xcf\xe2\x13\x17~`\x1b\xb6\xd4\xfd\x01\xc5\x93\x8b\xd8^h\xf7s\n\xc4\x1b\xb1\xdbT\xa1\xae\xc3pB\x0ch\x914\xc87\x11\xcf\xdck\x1f\x05\x9e\x19\xd1D\xf3D<;\r\xa6-!\xa3X\x82\x8e\xd5v\xef/\x89\x9f\x81\x9aZ\x18x\xfcHDc\x11\xb4\xc2xVL\xf7ne\x01\xeb\xa2\xfaj M\r?\x05\xd8y\x07\x84\x8al\xab(\x8ab\xeb<\x86\xe7\xbd\x1e\x07T\xfe\xee\xf5TD\xa0j\x86z\xbe[\x95\x1d\x1b\xe8\x08\x84\xacK\x06?\xeek\xb4P(\xba(\xe3\x01\xc9\x1c7t\xe1\x81\xd2\x82 \x18\xd4{K\x9d\xb12\xfb0\xe6+\xe5\x7f^\xb3\x97\x1f\x92\xc4(H\x909Y\xac\xaa1R\xc6\xd8\xa1z\x10$\'\xd3~\x08\x0e\xa0\x81l\x11\xc7\x8e-LW7=1\x87\xe9\xb2\xe3j\xb0\xdaN\xe4~Sp\xc5c\x83N\x85c\xe3M00 \x85\x84\xda\xcbfT\xe1+\x12\xf0\x12\xa8\xaa\xd3\xe0\xadPc\xe7K\xe8\xdfV\xe5\xa2P\x02\xa8\t\x97B\xc18\x97\x908\x17h\xd9N\xacw\xb6\xf6%\xab\xa1opPcW\xe9\xc7ej\xecY\x1b\x1d\xbc\x93\xe8\x82\xe6\xe0\x95\xb1\xb4tC\x18\x176\x92\x9bB\x04|B\xb0\xb2\xe6\xe7.\x88\x94U\x93\xdb\x8d\x9209\xea6\xff\x10\'(\x9ePN\x1b\xfc\x87\xc1X{\xdb\x14\xcdj:\x14\xbb\x84\xf7\x86|lU\xe8\xcf\xa5E\xe4\xaf\xbdc^F\x11\xfd\x17\xea\xc6\xcalB\xe7\xa39\x99\xd5\xba\x1d\xa1\xc4\xcd?\xd1n\x03\x06s\xa5\xf3\xa5bG\xb6\xdaI\x1f.\x14\xfa\xdai\xb0H\x99\xbd\xfbT\x0fF53(\r\xc1\xc3o\xc8\x8fF5\x9al\xc5\xfa\xefZ\xa9\x83\x93\xcflh\xc7S\xe5\xf0u<\xa8m\xb3L\xc4t\xbc\x03\xe9\xac\xce\xcev\xe9l\xe1\xa3\xca\xf5\x13E\xbd\xfdh\xe1@\x01\x03\xa3\xe7\x16mM\x80\x04\xff\x89I\x1a\xebQ\xe6\xb4ngT|n\xac\x83\xc7\xfa\xddr\x01\xc7a8\xe0p\x07\x122\n=\x8d0\xf7bN\xb3\x1a\xbf\xc0\xa24\xfc\xb4;\xbbz3\x06\xf0\xb4\xaa\xa6%gA\xc5\x97\x0e\x93\xe0\xa3\xef\x87\xb2\x88\xf6\xa9g:x\x82\x1d\xea\xb4z\xf6\t/\xe08k\x0f6\xfc\xf7\xcc\xb3\xab\x1b2N\x04\x1fIm\xb8\xb2\'I`\xdf\xd5V\x9d.\xa5\xefEwC\x91\xb8\xea?(N\xac\xf5(\x1e&\xcb?\xc9\x14P3~\xf4a\xca\xee\xb5\xd5\xac5"\xa5\xe2#\xbb\xbc8\x8bz=u\x19\x12ap\x13\xc9\xba-W\xe14\x90\xda)\n\xaaDnk5\xd7*t\xe7\x0f\xe5\x93\x99)\xe4\xb1W\x14\x1332\x1a\xb8fr\xd2(*\xc1\xd3\x81\xbc\xb9\xb8:X\x18\xf8\xfdR\xe3zk\x04\x14L> CHq\xb0\xd6\xd3Okt\x89\x86\xdd\x93w\x83\xa1&d\xe1V22\x8d\xbb\x7f\x0e\xd5\xb18\x1c\xdf\xbc\xfbY\x17[>A\xc8\x8e^\x1e\xbeO\xe5\'%E\x0bt\xc2&@\xcf\xae\xddr\xeb\xb3\xcc"\xe9\xb5WJa[\x94\\\x9b\x8f\xa3\xdf\xd5a-N&\xfb|\x18|\x9c>\x14O\xa9\x1b{&p\x8aC\xe3\xa0\x8cT\x80]\xc2\x08\xc2\x07\xa0\xd0\x91k\t9\xaeB\xe8\x9aXL\xeb{h\xf8\xd9\x8f\xe4\xeb\x10\xd0\xb1\xe4\x82\x90\xa2,n_K\xf0\xaf\x16\x0e\xc5\xc7\xba\xa1\xd0\xe6\x7f\x99\xfa\xee\xbe(\x96\xef\xe9\xb7\xa2\x12\x04\x01\x99\x99\x9d\xd0\r\n*\x82\x8a \x88Hr\t\x13A\x91\xe0\xba$\x05^\xfb\xa5\xaa\xab\x9d\xfb\xc7\xf3\xf9}\x1f\xaf\xc2\xee\x84\xd3\xe7\xd4\xa9\x80\xac\xae\x87I\x9av|\x06cb\x11\xcc\xca\xbf\x04\x0f\xb8\xbd\xde\xce}d\xc5\xdbG\xf2\x06\xe08\xc4\xc5\xcc{Y\xeb6\x0e\x17pp\xa76\xd0e\x1a\xc9\xca\xad\xc4\xe2\xc7(\x81\xb2* U\x86\x19r\x9dl\x00~\xa8\xde\xe1U\xbf\x97$\xb3\x80\x80\xb8\xa9\xb4g\xe2\xdb\x98\x0f?9?\xd8rtA\xc6[,\xb9\x16?\x10\x1d\x96q\xd0&]\xbbQ\x08\x13\x19\xe5>L\xb0\x12\xc95\xbbyR\x89\xbaB\xbf\x08\x89phY\x13\x8c\xe0\x92\xe7}u\xcdn\xe3\x86\xef\xf8dQ\'\xbd\x97\xa04\x01\xd3]I\x1b\xc3\x97\x83\xa5g\xe5\x8er\xe8\xa4Cd\xc42\xef\xabX\x93\xaf\'\xcd\xde\xe7$\x03\xa1h\x08"y hU\xed\xd9:\x85\x10[N\xdb\xea\xdfH\xc2\xec\xb4\xb1U\x9c\xee\xccKW\x1fk_aEC\x0b\x10\xc1\xdaho\x12jm\xa1\x94T\xa4\xb3\x8529\xb7\xf9\x8c\xfe\xdb\xff\xff\x81\x95<\xa4\xaa\xdeOrZV\xc3X\xe7>uO%\t\xa7K^]\xccj\n\x15g\xbb\x8c\x8a\x9f\xb2\x99\x84/C\xa8\x94\xab\x02 5[-`Ae\xbc\xf8\xa6h9\x15d\ry\x00\xcc\xb4\xcb\x8e\x87\xa2\xd5\x7fM\x19\xe3\\+V5\x82@l\xfe\xe4\x89\x1c\x12\x98I\x1b\xebZg\xfe|f&\xcf\xca\x13]<\xc2\xd1\xde\xe1\x10cp\xa0\x92n\x83z\xefB\x14\x16r\xa2H\x91\x8a\x86\xfb\xf2#\xe2\x0c|,f\x88\x97\x1dT\xcf\x98\xac\xbcrL\xd1\xc7O\x89\x1c#\xe9\x85\x83\x7f\xb2\xba>\x1av(]\xeb\xe6\xfe\x97\x17h\xa8y\x86\xca\'t\xb3.O\xcf\x9e\xecy\x9bh\x0c\xe3^p\xef\xff\xb4\x12\x8c\xe6\x9d}l}\xfa\\P\x97\x9eMR\x8b\x15\x0c\xce\xcd\xb3\t\xda\x84\xf8<\xf8 \xa0&\x13\xb0\x90\xf6\xde\xbb\xab\x12H\x8e\x9e#\x0f\xa5\x10\xc94\x80\xad]\xa9\xbf\x9fG\x1f\xb7\x15U\x1aHc\xc6x6\n\xc8?q\xf7\xb6#\x8c\xa4\x16/\xcczq`\xb7\xaf\xa8Y.\x07\xcd\xf0\x02{D\xa3\xddK\xd8\xb6\x0e\xa1\x02\xe9\x1aq\xdf=k%\xe4rQ\xc3\x86;\x07\x00\x046\'\x07c\x03\xc2\xcc\xe2\xde\xff\x0c\xfaD\xa8u\xb1\xc8\xd6\xe7\xc0=\x13<\x15B\x03V\'c\x17\x8c\xa7\xbfS\x10`\xad\x93\x86K\x8d_j{\x813\xd1\xc03\xc3\xb2$\xd9LU\xf4\xb42kt\xf0\x07Q\xcbl\xf7\xb5\xae\xd224\x94\x13]\xdd,"\x97\xd6\x04\xe0\x80T\t\x08\xa0\t\xfd+}\xa3H\xce\xfd\x05\x0c\xe9\xf2\xdd\xcf\xc3"S\x84:<\x15\xa5\x92+\x90\x86\xbc\\\xfd\xdfRP\xb8\xcbM\xd2L\x1e\x06\xff\xe3"\x17\xb2u\xcd\xc23\xd7\x9e\xd9\xce\xe4c\xf9E$\x92\xa4\xd3P\x91=..\x13\x9c\xbcr\xf8\xe9\x14\n\x80\x0f\xe0\x0fX\xe5\xff\x0e\xb7\x0c\x85:\xbaX\xc7\xc5\x1e\x93\xef\x1ejfs\'\x8b\xb0t\\ab\xaa\x166\x83h\xb4|q\x88\xf2t\x81\'>\x9b~\xcd\xfd<\xe4%\xecTI\xaax+\xb3\xd3\xdc\xaf\xb7\xa1\x9a\t\xd3m\xf1\xe0Ju\\\xa2\xa8\x11\xdc\xef\\J\x07\x84_Sl1\xd2\xba\xf8\xe6\x9e{"\xff\xe9:\x85\x84\x07\xf2\xeaf`\x00V\xa4\x8dQ\xf4\xa7\x17F\xf0W\x14=\x05\x01\xd2\x97\xd4\x997\x8b\x8dA\x86\xe3\xdfx\xae\x10*\x9f\x89N\xc7\xe7\xc5.\xbev\xef[\xc1h\x96\xe6_V\xcdC\xed\x04\xae\x97-w\x9fB.\x15?\x96\xca\x03\x86\xc6\xdcw\xe3\x92\x97\x1di2\xb4\x96\xa8DJ\xad\xb0\xcb\xc8#|t\xac\xb1\xf4*\xfass\xc8\x17\x05Lc\xf5_\xb0h\xe8\xf6%\xd3\'G\x08\x08\x7f,\xe9\rWRt!\x17\x16f\x1e\x06\xcdq\xeb\x8fTA\xce\x93CA\x98\xbbC\x07\x8d0\r+\xf5\xd1\x1c\xd5\xa3Y\x83F\xa3\xb3\xd3\x1e7\xa5\xd7\x1d$-\x0eK1C}\xf2H\x05\xb4\xac\xbe\xaa\x83\x08\xe8\x0e\x8fk\xb3t\x84\xaaB|>\xfa\xa3\xf3\xbe\x11Y\xd7\xe6g~\x96\x14t\x92\x8b P\x95#\xe7\xee\xb9\xe79\x19?\x92#S\xe3)KJ\x1bv\xfc\x8c}\xf9\xc3h\x1e\xb6\xc1\x9d\xc8\xadVW\xa8\xc4\xf4\x9d+\xc1\x8d\x8c\xd2\xf8\xb4\x18\x9e\x94XK"c\xae\xa9\xf2_"h%-\xcd\xc7\x87\xf0\xd5`\xc8\x96\xb2\x11\xf7\xc6pd\xccV\xe6\xd55\x1c\x03\x82\xc7\x99\x00\xeaz\xd7y\x06\x01\x927\xab\xbbW\xde\xfe\x8c\xb3\xc3\xd9\x11\x14Nq\xfd\xf9\xe0TT(\x1a]\xc6^\x8a7#\x9d\'L\x9f\x8bd\xf8\x1b\xbeZ\xb6\xbf\x84\xb3\xc7\xbe\xa2\'\x84\x8f\x0e?E\xa15\xd4%\xca\x7f\x84\xd0Iu\n\xa5\x0c\xe9(\x94Q|\xbf\xe9\xf6\x00\x0e$\xb1\x84\x02$\x8cS\xcfI\xbe\x1a,\x89Jz%\xc51\xf9 \xb7>\x83K\xca\x80\xd0]t\xb5)\x15m\x052\x01\x9a6<\x84J\x8a\xaf\x81.Nc\xa7y=\x9fz\xb5\x13\xb6\x10H\xc0\xe2\xbf\x16C\x86E\x84\x90\xf3\xba\xf4\xbf\xe0>\x1b\xa6\xc5\xe1\xcb\xd4Z\xa9\xd5\xc0\x0e\xbd\x87Z\xaeA\xb1\x0c\x06\xa5ki\xbc\xe3Z\xa5.\x03\xc0\x13\xbe_P\xdf\xa2#\x08\xe7&\xd5R*\xd4\xb2\x08\xda\x07)\x97~\xbdH\xfdn\xb0&U\x86\x81\rN\x7f\x8d\x9b\x81\xb8H\x87Y\xc0\xb8!~\rd\x03z\x112\xc2\xc0\xbe\xabc\xcd\xb9\xb9\x98.\x14`U\xa7c\x1a\x96\xd9\xa7SE\xc2]\x18\xe9Y.\x95\xcfg>\xe0\x035\x832b\xcd%\xf2 C\xba@\xb6z\xa3\xd0\x932\xfb\xcdD\x0b\xe2\x03\x83Z@\xe6\xe57\x18p\x15\xe3\xd8"\xc4\x10:5\xe5\xfbm8O@\xd0Yu\xee@~-\xe6_\xa1[3\xf9\xe5\xd1\xa2h\x92<s=\xfe\xa9\xb7\x8f\xee\x1d\x9d\xbb\xfe\xaf\x13\xe9\x07\xa4$\xa5\xda\x82\xceI\xb4.{+:w-{U"\xee\x89\xf8\x12\xe5\x8a\xd0\x94Pv\xc5\x92\x06\xd6\xd0\xa9\x86\xf23\xa2\xdd(\tp\xb3\xa2o\xa0\x19\t\xfdJ\x9ez\x97\xcd\xbe\xacI\xf3\x11h\x0cjPh\r\xe7a\xd6\xea\x8f=\xf7\x8a\xdb\x08\xde\xd9\xe9\xc5\xadRFH.\x84\xb6\xdb\xec<\xf9z\xf3\xfd\xe7+Y\x1c\x96\x9fn\xa4\xdcKC\x94!\xf86\xe5\xd2\xd1T\x087\x0b\xbc\xf9n\xfa\xfc\x16\xbc\x9bp\xf8\xcc\xa7\xb2\xdec\xf6\x02\xb1\xbbQ\xeah\x03\xd18\xec\xcc\x1f\x0b\x84\xaddD\x11\xf0d\xff\x84\xddY:\xfa\xb6\xa5\xa6T`1\xd8z\x9da\n\xd7\xf3\xf2+\xf0\x8fptL\n\x9e\x02\x18M~\x81\x13\x83.2\xe1\xc9\xb1\x90kg\\\xc4\xe8\xa3\xc7\xaa\xdf\n\x1e\xacd\x9bRa\xafk\x99\xf8G\xa0\xb8\xe39r\xcfE\xb9\'c\x16\xfe\x9c\x88\x00,\x8bc\xc0V\xd1\xdd\x97\xdf\xbb\xed\xe9[\x97\x7f>\x06[\xf0nI\x95z\x93\xebT\xe1\xa3\xace/\xfbc\x07\x0bP\x858\xbb\xfaL\x8a\x9dJ\x90_\x81E\xb21\xb0\x8a\xae!\xc7\xa7\xed\x8d\xf1\\\xf2jwiM\x9f\x07\xd1*\x85=:V\xbca!\x1f9o\xd4_\xd7PtqL\n\x14{g\x15xH?R\xceh\xe5\xb0Dd\x89\x0f\xba\x00\xe3\xca|\xdc\xd2\x12\x08Gr\x88\xbd\xae\x0b\xb6\xb6\xa0\xbd\x99L\x9c\xc3Rd^bN0\x13\xa7K\x15\xfd\xbf\xce\xb6!\xbc\x85\xa5Kh{C\n\xaa\x0c\xb5\x04\xc8\x9b\x90l\x92A){\xa9\x11\xab\xe8\xb9\x06>a\xf2\xf5\x85Z3m2\xb8\xaa\x81\x8c\xb9\xb1\x9fJ?\xb2\x0e\x89\x06\x17h\xcag\xd4\xe9\xcf\x13\xe9\xeeB\xf78[\xfb\x83\xec!2\x93|\xfe\x11\xa5\x14\xbb\x7f\x0f\x9f\x0f\xee\xafi\x05\x15\xcf\x7f\xd16\x95Q\xa0;\xb8\x8d\x95\xf9\xf3\xa2\xa4\xdeao\xfeJ$\\\x92\x12\x0b\xd7]\x86\x19jj8\xe5.\\\xad\xac&\xa2?n\x8f\xff4\x90\x0b\x92\xdd\x14%\xd4!\xcf\x9bkjM\xb9\xc8\x16NX\xd9duu\x1b~+\xc5\xe7n\xff\xd7\xf1\x980\xbebV\xda(r\xc9\xa4md\xe9m\xae\x84\xf8f\xb4\x19\xa55K9\xa2E\x86w\xc3\x95sq!0\xb8\xac[_\xea\x9cJ\x86\x94\x1b\xd1\x0f\xfa\xd9lc\xab\xd9y\xbc\xee\xb2\xe3\t\xa5:n\xe8\x91\x97Lw\xfb\xd7b>\xe2\xc5-\xd62\xc5t\xc1\xb9\x13\xef\x0c+\x9a\xf9\xa4\x063\xd6U\x91\x03\x10\x9bf8\xd7U\x9a\xdesZ\xed\x0fMs_\xc5\x99\xeeN\xadS\xc5\x0cD\xf8\x1c\xd0\x14\'\x05\xcc\x14\xfb\xc5;\twCp\xd0f\xa6e)\xf6\x07E~\xe8\x88\x0bE\xd5\xb8sj\xbe\xdb\x85\xe4\xa4>8\xf9\xa1\xd5\x98\x0f\xe2j\xda\x90\xbe&\xefw\xfb}\x9dD\xde\xee\xbb\xc2\r\t\xcew%\xff\x11\xd5\x86\xa2*\r/\xb9}\xbd\xa8\xfeC\x1bp\xd3\xec_\xbc\xd0\xcf\xee\x1ck\xd4\x11~l=\xcd.\xfa%\xdd\xa9\xedI#!\x7fpj\xfd4Z4\xd2\xf6\xb0c\x86\xff&+_\xb1KwP\xcd\xb6U~\xab>\xb3\x9e\x91\xfd\x10\x8d\xd7S\xf8\x15\xa6c7\xbb\xbe\xb2\xe0\x93pp\x8a\x8fA\xd8\xb3\xf1\xb7\x9f/\xe6\xdf\xd9n\xf7\x9d\x16\xd3NB\xc0\x16m\xfcs\xf6\x82FW/i\x9a\xf6F\xa3\xbd\x15W%j\xa5G\x95<\xb7\xe87\x95@R\\]\xfeD\x8f\xb37\xbd\xbd\xdd\x95][\xc1\x85\x10\'\xdc\xe2D\xe3\x0c\xebh\x87\xc1LH\x8e\xa4\xf35Ov\xed\xc1M\xfd\'\xf7a\xf7E\x87z\x1c\xacM\xa8:\xab?\x9d\x8b\xc6\xcd\x05\x93\x96\xd5\xd4\x83X6\xf6\xc9\x96\x00\xf7l\xb4x\x02\x0e@.\xfePP\xc8b\x87\xff\x95aa4e\x18P|\x193z\x99\x1f\xad\x186\x8e\xc5e\xe6M\xd4\xf0:\xa0.\xdb\xaf\xf3\xbe\xdaMk{m\xa0v\x8cF\x95\x1d/\xae\x16\xc9E\xc1m\xe5N\xfd\xc6\x0e\x8a\x8a\x12\x8d\xec\xdd1\xebjc\x90\xe6#3w\xc2`\n\x19\xa3\xfaP(\x8bUK\xd3\x00/\xb5\xb7$\xc2\xc3\xd4.\xbc\x1f\x16\xdf\xae\xa3\xc1B\xa7K\xa0-\x14\x17\xd2\x1d\xc5\xdb3\xfb7\x14\xf9\xd5\x99/\x0e~~\xf6C"\xc0f\x0e\x04#\xfb]sv*}\xb5R\x1a\x02\xad-\x8cb"B\x85\xa3Pn\xa98P+\x86IS\xfb\x8c>\xdc\xa3\x95q+\xbf\xa3\xcc\xeb\x16\xc0\x00\xe1\xab\xcb=\xc9&v\xc6X\xa3Tz\xd4C\xd1i\x9da\x82\x06\xb2\xc2\x8b\x07\xdd\xff3\xb2=cE\xf29\x1e|\x1f;\xb2y\xe7\x12N1=6\xdc\x9aX\xe4\x1d\xed\xe9\xf0\x93$\x98\xdbL\xaf)a\xb7\xe1\x9c\xfd\xa3\xab;QR\x02\x86\xad\xd2QgW\xb5\xcc~\x9c\xd4PI\xd4\xfcfY\x0b\xab\xd4\x8b\xfaT\xcfh\xbb.\x92\x14\xbb\xb0\x98x\n\x8d\xdb\x86\xb5u\xc3Wa,\x01]\xf1~k\xd1\x1cJ\xa0\xea7\xf2\xc42\xa2\x8c\xeb\xda\xf5o\xec\x18\x9f\xaaO\x8b\xd4\x85\xe7\xe2\x81V\xf9\xac\xc8_\xc2\x9e\x82R)\xa5\xb5\x02&\x9d\xfeAvF\x05\xebi\xe863|\x90\xabu\xe6\xc5,\x9e\xabR\xb3@\xefJ\xbe\xc9\x04$\xae\xb3\x92k\x9c\x949\xc4\xad\xc5K\xdfe\x9ez\xbd\xbd\x1c\x1f*s/\xa4\x99\xf0\xea\x13Q\xd2\x08\x12\xec\x1d\x8c\xce\xb4\xee\xaf4{3\xb7z\xd0\x9c\x94\xaa\xdfv\xce4K\r\x1f\xadI\xfc$V\x82\xe5\x84\x14\x14\xdeY\x05m,\xe3\x96\x9d\x83\xad<\xfe\r\x970\xf1\x93\xab\x1fB\x81\x92\xfe\x1e\x02\xca\xe8\xd8\xcc\xd2\x85\x10\xdc\xa2\xe6\xfe\x1bn\xeed\x1d*7\xb8\xd0\xc2\xd6M\xed\x7f\x95H@)\x06\xf7\x9e?4ZA2\xeb\xd2\xd9\xa9\x1cA\xe1!\xb0M\x7f0<\xd3\x9d\xa9T\x03\x82\xce\xadP_\xe2a\xa0`\xd8\xcf\x8fG\x8b`\xac$0\xda$\xf4`i+\xfdR\x0e\xe9`\r\xd7Z\xd9\x90\xca\xab\xbc.\xd6$8m\x199\xc5\x84\x91\xf6\xe2\x05|\xce\xe8\x8b`\xeb12&\xe4\x02\r[\x90Z\xa7|\xe0ucE\xb5\xd5\x9b\xc6\xd8\x9f\xe9\x10\xb2\x1d\xa90\t{{\xaaD\x13^\xcc^\xecO\x8chq\xcc\xea\x89\xcc\xcaPv\x84\x0f=\xf6\xd3%\x89g\x8b\xdb\xbf:\xc3`\x0bS\xd3M+\xabN\x06.\x14Pe6\xe71\x1c\x9a\x1a\xb2\xc8\xe4\xfb[9\xe2\xfc3R\x98\x82\'\x9cY8\x17\xfe\x13v\xb0\xa0*\x1ao\x1d\xb0\xb6\xe9,\x03a@\x19\x7f4\xe0\xe1\xc3^;(6\xb7\x8f\xf1\x83y\x80\xcbE\xdax+Iz\xe9\x7f\x978\x05\xbf\xa5\xbc\xc4\xa5CV"fB\xfa~ZI\xd4\x02Y\x96T\x9aV\x1d\x89\xf3\xe5\xb2\xa2\xd9\xcb\xcf\xb1\x92\x98\x98\x8a\xd3\xf9\xf9g@\x9c5\x92\xda{j\xad\x1b\xc9\xac,\xf9\x19W\x1c\xe2\x91\xbc\xde\xc8\xb5%D\xbc$\x8f\xa3\xe4\xf1\xac\x04\xa8\x91\xbf1\xef\x942\xd0i"\xf9\xe0qG\x8f\xa9\xb1AX\xb5\xcd\xb7|\xd4\xd8\xb5\xf2\xec+\xbf\x1c\xedpa;!w\x85\xfa\xd5\x92O\xe8n\xe3Qk\xd1\xd0I\x8f\x8e\x99\x9a\x06ja."\x1b\x03\x00`\x92R\xda\x9dC,\xbe\x8c\x99\x12\x1dB)[F04\x8fP\xdc\x82\xa6\x98^W\x82\x14\x17\x00\xe77J\xc2\x12\xe1\x9a63E\xfcS\xdb\xd8\xfcN"\xb62\xfa\x8d\xf7\xc6bER_~\xc3R\x0b"\xc4\x1aD\x90\xba\x10_\x8a\xe8\xaa\x11\xf5\xdf\x93\x92\xd9\x08\x0c\xc1v\xdfh\xdc&0bv\xfe\xf8Xn\xb8L\x15\xaf\xf6\xa0\xf0\xed\xc0U\xbd\x88\x01\xcf\x80\x1f\xc7\xbd\x91\xd0iXK\xb9\xca\x93\xc9\xaa\x98\xf3\xd3\xef0\xbb\\\x15)\x1a\x1c;[\xc7\x9f\xbd\xb5t\x00Gd\xf6c\xe4\x99\xe2\xfe\x97Z@\x88\x07t\xba\xdeB\xd7\xa5\xf5\xf8\xfb\xce\x84\xa8\xfcA\xbb\xe70\xc6s\xe9\x85te\x13c\xea\x11\xe5\x97t*\x03\xbd\xf2vK%\xb9\xce\xbfh\xb2e*\xb5\x11\x8b@6R\xec\xa0\x94\x11T\xbb\xf0\xf7\xaePl\xaf\xfc\xc9\x96\x8egY\x11\xde\xf2\xef\xc9c\xae\x90\xec\x86\x8b\x10\xd8\xbb4\xe1\xae\xe6\xa9\\\xefC \x1b\xa5Rkxz8P\xa3\x8eW\x90K\x89\x1c\x04<\xa0\x17\xa5\x92x\xf8\xb0W\x16\xd6(u\xf0G\xb5\xb9#\x9f\x80\xce\x91\x10J{z%\xbej\xc7}\nn\xe3\x8ag\xab:#\xf0R\xd6\x7f\x8e\x04\x9f\xc5\x8b\xde\xbf\xc2\x8bo?\xc87!\x17\x12\xe4\xf9U\xb0\x06/\xed\x12;\xb4\x81\x15|\xe7\xb3{i\xff+=\xcb\xb9\x08P\x88\xaf\xf4q\xa7\x14\x1da\xcf^\x93f&\xe6;\xc5\xd9,\x07\x90\xd1r\x9c\xad\xb7dN\xd0\xd8\xbf{\xba(")q\x15Q\xc0\xb0&\xfd\x9dz(T\x16}!l-\x98\x92\xe4\xe9\x11\xd4\\\x9b\x83^ \x7fe\xc9\x8a\xad\x92\xe1H~-\xe4\n\xe8Mp\xa3\xe1_\xbf%\xd5\x94\xefZX\xb7\xda\x9b&?\x9a\xf9\xa5\x9f\xe2\xe9\xfb\xd2=T\x8e\xf8\xa80L\x03\xc1v\xe1e\x86t:\xc7\x86\xd5\xfa\x18\xc2x\xd3]X\xa3/\x154\xe70\x93438\xa9ka?dN\x191p\xd1\xde\xf8(\xf3J\xe7\xb8\x11}"\xa4\xec\xd7l\xb3\x0f\x86P\xd1\xc2\xf8\xb1\xf3^\xed\x1b\xaaQ\xa3D\x922\xa5\xc3\xe4)\xd9H\x880\xec\xa0\xda\x99\xa3\x00\x13LyF\xd5B\xd4\xc7\xd8\x98\xfcP\xa0!\x1b\xb2\xd3\x9dnwu^\xd9 \xe5\x17O\xef9\xd6\xe9ZC\x13d\x19\x92\xceh\x07,y\x89b\xc7?\xba\xbdH<\x0cm~\xc2\xf4\xabZ\xda\x00\x14\x89R\xeb&\xca\x15R|\xf2\xca\xab\xa6\x1aq\x8b<\xa1\x87\x9c\xf5ue\x89\x06\x7f\'\xc5d\xa0\x9b\xd5\xf6\xf6\xff\xa2\\`\x17Q\xd3\xffB\xd6\x1e\xb9r]*\xe5\xb6rsG\xf0v\xa2]\xf0zcE\xe7p\xbb\xa7\x153\x04\x7f8\xb8\x0b\x81\\U2\xf9\xa8{\xdaz\xca9\xbf,pkr\xce\xc8\xc5\xb1{\xb1\x8d\xdb__@\xd2\x12\x0f\xdc\x080N\xafoq\xbf~\nb\xa1s\xfdc0j\x83\xccs\xd9v\xb4\x93\x16\xbe\\)\x12\xc6e\xbf\x94\xf7\x0f\r\xd1S\xb9<\x87"9\xa3\xdb\xf3\x89\xbfM8\xa4>\x89jF\xf4\xf3\xd5+\xa5?\xe5m\xf4b\x81\xed;3X+M\xe2\xcd\x00\xf8V\xf9\xde\xc9\xe1.S\x8c\xa5J\xb4\xb2I\x0fmJ\xd7\xfd\xf9M\x11*p\xbc\xf1\x01\xac\xc06\x01I\xdb\xe5\x95\r\xf5\xda\xbc\x95\xd0\xf9t\xbc\xc5\rX\x9fU,F\xd2:y;\xf3\xb1\xde\xc3\x17\x82\x93F\xb8r0(\x10\xd1\xa8r\t$o\xea\xd6S\x92\x94\x00Fx\xa6=\xfc10\xc4\xa6\xa6c\xc8o\x85\xbe\xe6\x0b#s\xb0wNK\xcda\x86\xcb\xf8m\x8f\xe82Y\xf2\x9e\x1d\xb6\x08\x8a9\xe13*t\xc0\xdb\xa7-P\xb9\xbf\xa0?\xacd:\x13V-\xe7\xaa\x91}@\x13|k}l\xadd\x88\x01L\xf2\\(\xee\x1f\xdc\x1b\x1a\xd0\xe9\xe0\xcakd\x89\xe4\x03\xdf\x17\xdd=\xe5M\xb0\'\xd3\x97\xd8\x1f\xd72\xa8\xf1\xc9\x80\x05\xe2\'\x02\x10J\x03\xf9\x1b\x95\x9a$B\'\x01\xe4\x95#\x0f\xb2\xf3S\x9a\xb3B\xf3\xa3\xcc\xdb\xad\xcf\x85.eu\x13&\x90\x0c\xd5\x0b\x9f\x0edM\x9c\t8*\x951\xd5\x84\xaf\xdf{\xb7\n\xef\xaf\xca*\x07U@\xfen@\\O\xea\x96&\xb4\x80\xaa\xe5\x9bDa\x90\xf6\x0c5w\x93\xa5_\xfe\xcaO$\x1d\xbd\xb9\xeb\x8b\xc4_\x89\x99%7\x8b\xb0\xe3\xc5\x05\x81\xfb\x95\x85W\x15*\xe5\x90@\x00\x8b\xfe\xa4V8u\x95i#\xe2\xfd}\xfeq\xca:\xf1\x8a\xdb\xe4tY\xdb\xfb\x82\\R\x05\xcfS\x01\\^\xe6r\xe3\xeb,_\xff8\x1a\\S\xd2:\xce\x18b\xa1\xac\x9f\xcf$Q\xe6\xfau\xe6\xab\xf6>2\x9b\xaa\xa3\x8f\xfdVJf\x8b\xce\xd4\xe2,\xde\xf6zD\xf9\x04!\xbb9\xbe\x8d\xc3<\xa3\xbb\x7fE\xadPT\xbc\xd1YX\xe98#\x06\xc3*\nM\x0b\t\xa0\xa1D\xba\xdal\xf9&\x9c\xb7\xa5\x16\x0f\x99\xda6p\x12A\x91\xe0\xd0+\x91z\x8d;H)W\xcc\x88\xb8\xef\xd2\xce\xca\xeb\x91\xfc\xc9j\xce}\x96L\xc2a#\x0f3\x0e\xf1\xf1\xb1\x9c\xa0\xb2nw\xee\x87**\xf1E\x861-\x8f\xcd\x89aJ9#\xe9\xa9O\xb8\x0c\xcb\xfat8;q\xf6\x81R\x13i,\xa5+,\x95\x11\x17\x02C\x92W\xbf4\xb5\xbb\xfe\xd9]5\x8bW\x93\xe0\x89Bt\x88\xb7P\xa80\xc5\xfc\xc1\\\x9cy\x0b\xc9\x93Q0\x1f\xb1\n\x9a\xc3<__Pw\xa4\xdc\xb2Z\x86\xbe\x85\xf5fB\x9d\xec\xf6\x91\xbc\xfc\xf0\x05|\x9eJ\x15o\xf3\xed\xab}@\xf7\xf1g\xbf`Yh\xf3\xbdl\xf5B\xf4\xf9\xfc&y"\x1bcfM!\x93\xdc\x9b\xd6\xe7r\x0c\xc9q\xa1\x99^\x1fF\x9b\xd2\xd4\xd1h\x98\xdd\xc6\x81\xc7\x97\xb0\\u\xab\x8b\xfd!\x0f\xdf`\xaf_\x8b\xc5\x96\x07\xd7\xd2\x97Wv\x05>\x10,\xce\xd5d\xb7O\xb2\xe0\x88t\x1f\rt\x0cy\xb2\xed\xfe\x11\x1dc\xc3\x01\xe5\x8d\x12>\xb88y\xa4%X +\xcdJ"\'\xc2\x87r\xa1\x0c$7n\xa2\x97\x83\xee\xa1\'\xea\xcaS)\xfb\xe5t\xb7=\x9f\xc1\x94\x8a\xc9\x85\xf5%\xe7\xc2\xce\xbc\x94\xc0\\{\xe3\xab4\x1b\'^\'\xa91*\xf4\x12EYm\xe7\xf9\xde]\xba\xa0\xa6\x0f\xa1\\\xb4|&\x91A\xf1\xd2U\x82#\xa3X\xbc\xc4\xf7ZC\x15\xf4\xa6\x85\x01\x84\xa0N\x05\\j(\x8b\xa03\x8f\xb02\xa3+l\xb9%\x91\n)j\x13S\x9e\x1b\x85J\x05\xdc\x1f\x0c\x87\xb0\xd3\x0e\x9d\xe4V\x88vC\xbb\xd0\xf8\x8d\xce%\xd3nH\x0b\xc5h\xd9b\x844H\x82\xd3TV\xa3\xe1\xce\x0f\xb4\r\xd1\xb8\xc7\x93BA\x82&yF\x0c\xf7\xad2\xd0\xd8}\xc7\xff\x82\x97Nc\xf98`d\xe4\xee |\x0b?\x1bp\x94\xac\\=\xc3\x98-\xe6\xfd\xcc\xfbs5\x942\xb0`\xc5T\x8c\xb9-\xbe\xf3f\x83g\xcf\x8b\xa1\xee\xae\xa8\xeck\xd7\xeb\x99\x8e\x99}\x84\xfbY\xae\x93J\xd1D\xb5\xbe\x98\xe4\x1c\r\xe8\xd2n\xb1\xef\xc7\x97\xd7\xb2\x82\xa3%\x0b\xc2\x89\xb3?{\xcf%2\x95\r\x14Mx\x1e\xeeH\xef\x98\xcd\xdc\xe9\xb3\xbd\xaf\xdd\xde\x0f\x9e\xa9\xdd;5\xdfJP\xa7\xda4\x7f\x92\xbc\xfd\xf3v\x18\x8b\xa0*\xf9\xbb"\x91\xb5\x999\x9fx\xa2(q\xf3*\xf5\x89\xafK\x9a[\xf4\x90\xf1\xbeI\xef\xc3\xd8\x05F\x15\x18\xae!\x9f\xe3\xac\xac\xf8\x02\xf4g\xf7%\x8d\xc9\xf5\xd5\xe5\xd8\xd3\x10\x1eRJ\x95\x01\xfb\xbd\x8eG\x1c5u\xe2\x0b\x7flO\xdc\xb2\xb8M\xb6\xaa\r\x92\x9d\x1f\xfe\x14 \x1a$\xb1A$\xf3\xbcP\xe6\rt\x1fF\t\x0e\xe7?\x01\xce\x80\x15\x91s\xd6\xe8a\x9d\x9b,Mz\x16u\xf3\x1c\xbdd\xf2H\x92\x07\xe0\\<@\x18?\xf1z\xe2\x13\x06\xd5p\xee\x19\xb1\x93Ua\xebR\xe9\xd7fh\x99\xff\xac\x7f?9%\xec\x0e\x0f\xcd\xbf\x9c\x1f\x8e\t\x99^Rr\x0e!\x1d7\x9d\x8d%PGry\xac\xf3\x85\xec0\xae\xb3\xdb\x1d\xad\xda\r\x8dQ\x10\x8e\xd5\xcbL\x82C\x13\x01\xd0\'b\xe0\x15\xb8\n\x1b\xb3h\xe6\x89\x0bB\xb0\xc6\x19E/\x92#[\xf4\x86\x0f@\xd6#.\xcbn\x91\xc4\xec_\xb3\xfa\x044\x07\xd1\x8f\xab\x85\xe5p\x85\x94\x8fbq\x07\x98\x06\x1f\xa8\x12ho\x84\xfd\x06\xf5\x90\xf2\x17j\t\x850\x14\xd6%\x99\xbd\xe1\r\xd7\t\xc9\xc9O~T\xb6\xaawb7\xd4\xea\x04\xea[f\xf3 \x023\xcc\xe7ne\xb3\xc0\x99CFx\r\x82\x95mr>\x8d\x10\x10xf\xd6\xd1\x9c\x14\r\x1c\x127\x05\rr\x9f\\\xc6\x9f\x84/\n\x08\xcd;\xcf\xe08o\xe2\x83+\xf6\t==x\xc5\x1f\x01\x89\x1c>?k\xdf\x15Q\xd8g[\xa5EQ\xdc2o\xe4\xa5\xb6X\xd5\x1bUj\x9b\xbd\xfa\xa0e{0 \xbf\x85:\xd9\x95\xcb2\x90\x1a\xa6\x1b9\x93\x803\xcf<|\xb8\x08\x7f\x85uI\xbaW"\xe9\x86\xdc\x84`\xe6\xcc\xdd3C\xd6M>\xa8\x83W\xb5\xa9\t^H\x82\x96\xdc\x7f\x98\xe8v/?<\x97\xa1\x8a\xf7\x11\x13\x01\xcej\x90s\xfaA\xc1O\x0f\xad\x07\x88d\xe0\x05W\x92\xa6q\x0e)V\x7f\n\x97#\xe2+\xd4\x01\xbf7\x88$$\x08\x8b\xbf.M\xb8\xe7W:\xb2\xbf3Z@\x14\xf5*M\xa4\x01\x80\xd4\xb7\x9dZ\xb7\xb8\x8c\xde-\x8c\x1d\xca\x10\xb8\xa1n\x97\xb5\xd3\xbc\xbb\xfc#\x1f\x06\xef^\x8c_\xdd9\xf4\x8bC\x8cT\x85r\xb7\x08>5\x84\xa6\xc7%+\x8a\xd5\xc3\xc9\xe2\x8f\xb1"xe\xec_\xb1T\x89\xfc\x1f\xb6d\xbbR\xb6\x1ceJ\xe8\x7fR\x92\xf3\x90\x01~\x10\x17\x92q\x0bKz\xefZ^G\x9ej\x9c\xcb\x0bR\x82\xe0B\xb1\x8el\xf6(\xf2\xf9\x89\xafB\x8f\xd4\xf7,\xaf>\xc4\xc6\xc8\xf6"\xf4\xf1o\x8a\x8c\xaeR\r\x99\x1d\xcd\xdayz\xac\xb7NA~\xce\x16m\xe0Yks_4{\x03\x9aT\x94\xb8[W\xdbm\xcc \x11%\x88\xa7\r1\xbdJ\x94\xc3\xf4hNd\xb9\x08\x1e\xb8e5O\xd5\xd3\xe5\x84b*\x0c\xb5\xda\xff\x1b\xb2ra\xeaEg\xe1P\x14\x01\x824[\xee\xf35\xde\x010\xf2q\x98\xd9\xe8\x89\n\x8ar0Hx\xc8Z\xab\x03gk\xb3\xb5\xcc\xaf\xf1\x94\xa4\xcd\xb5A\xcf\x1e\xd3\x18CW\x87[\x10\x08\x03\xa0\xed\xe5\xc8\xe66\xa3c\x9e\r\x93\x1e\xf9\xe6\x89\x06\xad\x04\x02\xf7\x8a\x0b\xa40\xbe\xfb\xa4mriPK\xcb;\xf9\xec\x98\xfa\x92T\tE\xcf6\xc9?\xd7\xbc>\xc84vM\xf8\xb5\xce\x8b\x1a\x19\xbe\x0f\x85\xbb\xbfO\x8c\x1b\xade\xbc\xa6MwG^>!4\xa7\xe1\x91\x12\xb5\xd4\xd9:\xce\x0c4\xac\x9d\x8e\xcb\r|\xa2w\xc1Gm\x14bU2\xb2Z!D\x81\xa0\xa5:~\xb9\xbd\xe8>\x99\xd7\xa5\xf9\x85\x11\x1b\xff\xe4O\xa4\xec\xb6@~\x1a\x8e\xc19\xff\x1b-\xc4\x93\x05o\xfc\xa2\x8d\xbbz\xab\xaa\xeaAU[4g\xf0\xd9\xce\xf3en\xd8\x87\xee7Th\xf8\x91\x0f\xe4\xfa\t8\xd3VgR\xa5\xe6\xb2\x18)\x833\x8f5 X\xd7\x98\xd9W\x8b\x82n(\x08\x84\x99\x02w\xde%x\r\x11H\xce\xe5\xff\x9e\xe9\xc2\x0e\n\x1f,\xa1y\xab\x82\xca\xb7\xe2OF\x90\x19\x89\xf5\xb1{\'\xf0\xd6\x91\x91\xd3L@\xf9\x1fP5\x8d*\xe9\xe2J\x97\xf6g\xe7]-\xcc\x9a6\xbe\xc4\x99&>\xbf\x93\xba\x910d\xa2,H\x9e\xb7\xc74\x10\xdet\xf3\x1eg+\x062\xd2\x02\x85\xa2\x96!\xd9\xed1r\x1c\x0f\x9e\xdd\x15\x81&|%\xc8\x8e\xfb98\xb8\x94\x92\xce\x19\xe9\xc3\xaa\x1c1\xb1\xf1\x9c\xf4f.\x8cd\xa7\xa5\x0f\x93\x8fQ\xcf\x1e\xbe\xef>\x95\xcf \xfb\x96\x8e\xe4\xae\xcc\xce\x1e\x11$\x14\\@\x88\x19\xad\xb5\xbd\r\xa7\xfaxeK\xde\x03>J#W\x0fK>\xd8gA\xc3\xf4\x83\x84+\x95C\xa0\xe3?\x13\xf4\x16\x13o\xbf\x16#\xb1\xa9\xde\xdd\nT\xa7\xad\xc1\xba\xc0?\xac\xb3\xcar\xfctYaXQk\xc3k\xf3\x1b\x1e\x1f8\x90\x83o\x8b\xc7B\xfcd\xb8V\x84\xdbS\xd2f1\x8f\x0ee\xa13~\xacK\x96Cp\xc7\x86\xba\\\x15\x9f\xc2\xc8\x9e\xd08Bg\xef\xe1\xd5\xea\x02E\x04\xe5\x8db\x11\xa2\xf1\xf9\xc2\xa3\xb3)\x05^\xd1B\xfb\xf38\xa0\xf8\x08\x8b\xdc\\\x1e\xbeu\xde&eU\xc1\xd5\xcc\xac\x14\xcci\x9b!V\xba\x05\x14\x1a\xf3f\xe4ab\xec\xea\x89\xa6G6\x85\xb2\\R|\xbfo\xb4\xc3\x10\xb2_\x0bq\xe3\x1b\x9d\x8f\x01O3\x10,\x11\xfc\xce\x9f)|*\xe0\x8c\xacC6\xafo\xe1c\x01\xc8!\x10fW\xa4\x172EH\\\x8b\xad\xfdn(W1S\x0c\x03\xe0\x07i\xd3\xcdD\x7f\x7f\x7f\xe6\xcc4j\x84\xf0\xd6\xff\xcb\xb3*\xa56,UD\x08\xdf\xd7\xc4\xf5\x80\xf1\xa4C\xdd\xa7\xf0U7\'\xf0\x03\x01\xf3\xaf\x04\xc2\xcd\xbe\xd3o\x19\x03m[k\xc1\'\x18\xb2L\x19\xf7\x9fi\xa5\x15\xbb\x97\x88~\xf7\x01TO<Lh\xad"\xd6B\x18\x8f\xb3\xd59\x96D\xd6\xe6?\xd7\xcb\x93\xde\xabQIOlo\xb6Mo,\xa1\x01\xe0\x1b\x99\xb4\xeb^\x96\x05\xee\x1ag\xd4n\x06\x92\xa76\xd7\xd3\xe2|\x08ep\xc9\x07\x18ja$K\xb07\x10\xd0"54\x0e:\x87(A\xa3\x02\xbbZ\xc2\x9a|j\x87ET\xf1F\x0c\x81$\x8d\xdc{~\xd3\xb5\x8c\xb1\xa9\xb1q\xf6\x16y!\xd9\x8e\xc7\xf6\x1e\xe1\xef\x8f\\\x8dh\xde\x94\xdf\x8a\xf1a\x16\x8ch@\xf2fY\xdc\x8cxK\xa3\xee\xe9\xb5\xb7q\x11\x9bHe\xc8x\x7f\xf4\x1cJ8\x96\xb5\xa6\xfa%\xc1\x95W\x8bD\xe3\xaa\x91\xa4\x8a\x0cR0\x99}\x92Sp\x1d\x89|\xc1\xa9\xde\x9e\x02\x99(a\xd8\xdd\xa0\xadf{\x97\xbe\x1e\x12e\xa4\xd6^I;\xecP}5a\x9b\x8e\xe4\x14\xd5\xd8\x8d\x16\x02\xec\xb6\x19_?\x03Z6\xb7\x16,\x98=\xad\xd5%\xd7a\xd7\x81JP\x05\xfd\xf5\xc1\x91Y\xa5}\x95cc\xcav\xcc~\xd1\x00\x05Bj\x8b\xdbE\xcc\xac\x9c\xd1\xa1\xe7]\xec`#IJ\x1c\xbbYZ\x8a\x88\x02\xc1\x13\x87\xcaN/y\xab\xbaO\xb9?\xc9$$\xac\xbc\x84\xbc\xb3\xa70\x13m\x8cj0\xd9\xf3\x0e\xe3F\x96\xa1y\x02\xe4Gl;v9\xd4\xeaY(\xcd\xad\x90\xe0\x93+\x12\xd1\xe7*s\x80\x89\xd3\x97\x8b\x8f\xe49Y\x8a\x8b\x10\xb8\xd4\xb5S\xec\xd5\xb3\xea\xc3\x94\xb8@5\xe2\xe8R;\xe3\xd2\xb1\x1e\xbe\xfcK\xb9(zf\xa2\x80\xc9\\\xe6\xe4e\xc4|\x88\xb9c\xb2\xb5\xb2\x9el\x91(\x00\xf3\x0c\x8cN\x1b\x86\xc9~+\x99\xfa\xd2\xc6d\x86\x94\xa3\x99\xaf\x7f\xb4\xcb\xceFc0\xb3\xcc\x8b1\xd5v\xc5{\x18\x94\x9c x/\\\xb4n\x9d\xc3\xc2\xf8\x87"\xa7\x8a\xc5w7 \xfd\x90qD\x9f[JBQ\xfaS\x00\xf9v^\x19/\x95\x00\x81@\xbf\xa0V\x93D\xf1t\xbd\xf0F\xd1\xc6Ek\xe7\x98\'\xfed\xb8W\xe1\x82\x0b\x80\xb5\x0b\xe24\xf2!D9v\xd2\x06\x82\xe0\x93\x1a+]\xe2N\xd5\x92\x8c\xc3dS\x97\x1b\xc1\xe55\xa4\xb0.\xc9\x1b\xa1N\x10)\x98\xb2\xad\x15l\x030\x13\x16\xf6Z\xf1\xd7B\x9e\x9ar\xf1\xca\xdda\xcf\xa9\xa3\x17\x03{\\\x90\xe6+\xb5\xdc,T\x99\xb8\x8aH\xf7\xb6\x8a\xad\xe5P\x02B\xa2\xed\x08<#t\xda\xff\x04|\x80\xe5\xc9\xfc\xa2kU\xd3\x12\x82\x1b2g\xf2b\xf9\x1b\xb6\xb3\x88Q+\xb37\xab\xa3x]#?\x83\xd6\x9fD\xe1\xe9\x88\xde\x8c\x0f\xddl\x0fyl\xbf\xd8\x1f\xf7\x14B\xd9\xca\x17\xb2\x95\xcf\xe1\x0b\xd3\xc8X\xc4\xe6\x1b\xd8I\x1a-%+\xf9\x9f\x05X-\xd6\xa5$\xd6y\xe1M\x96\x9afv\xd9\xaf\x87g\xfcBzQ\xfao\x1a9\xcd.\x8c\xde\xca\xa58\xbc\x14\xcb\xca\x8eI7cb\xbd\x1a\xa6\x1aYV\xad\x83Id\xee\xa9mp\xa2\xaa\x04\x15s\xe0\xa3\x7f\x9f\n\x02\x99|T>r\xa2\xdd\xdc\x14\xdf\xaes\xf5\x0c\x87\xa3_\xf8\xe7\x8a\x98\xe2(\x1en\xbf\xcaZ\xcd4\x9f\x00\xcc\xebu!o\x02\xd3\xec=!e\xa2\xce\xe5\x8b\x8e\x82\xc8\xbb\xa8\xfbm\xd3\xcf\x14\x98\xc2E\xc5\xe2\xb2\x84N\x95t\xea\x98^\x01\xdc\x95y\xde\xe4\xbb\xcbw;\xbfvl\xf9\xe9\xe5\x86{\x0e\x99\xb2\x06\xd8\xc2\x99+r\xca\x94\x07\xb6\xf5^\xc2\x81\xd7?\xd2\xb1iQ\x9b\x1c~AxD[\x98\x9b\x93\x9a\x1c\'\xff\xe4\x8e\xfd\x03\x9cD(\xb6\xe1\xcc\xd4/\x84\x8d28\xd6\xd8\xe3\xbbI\x9c\x14\x90\x13p\xb7h\xa0\x82L~\xba\x98O#\xf1}\x93T\x84u\xe0[^.\x9cj{S\xcd\xccI\x07\t\xb8\xfe\xff\x89\xac\x06\xbe\xd4lv\xb9\xdc\x9f\x1c\xdbR\xa2\x8b}\xa6O\n\xb1\x8d\xf3\xfd\xdd\xb8\x13\x97.V\xe2\x0f\xf7\x8b\x9f\xc4\xc2%\x9d\xb6\xa3E\x8cK}AP2\xc5#\xa8\xc8\xf1y2\xf3\x96\x00\x06\rm\xae\xdb\rH!\x9b\x1a\x12Q\xb8\xa4\x9e\xd3~\x0e\xf9\x9c\x84\xa8\xec\x9f\x197%\x16U\xeb\xc6\xcb\x94\x9b\xd8\x07\xac\xde\x89\xd6d\xc4\x060E\xe9\r\x84\xe0W\\\xcb\x00\x85\xecL\xcbK\x16\x1e\x0fxsn\xd4\xf35\xdc\x82\xf7kC\x88G\xac\x93\x15\xc9\n\xc9\xd3\x1cRn\x9a\xbcs\x9e\x8a\xadD\x06\x80b~\x8d\xf8\xc9TL\x15}i\xb4\xb1\xcd\xf9\x17._5\x13T\xd1\x85s\xa3\xc0v\xaaW\xbfQ\xdf\xff\xb4\xeb\xa0\xaaF\xcap\r\xdd\x03\x7f\x9b\xd2\x83\x1d\xe2G\xb1\x0e\x0f\xf1\xd37\x9e\x00<\xacS\xd8\xa9;Nu\xa4\x977\xaf\xd4-y\'\xac\xda\x03:U3\x02\x181\xed\xab~\x86\xb6\x19\x18\xf60:4\xc6y\xb8\xe1O\xac\x06^\xbby\xb1\xb2\xa3\xf2f[\n\x94\xc9\xb5\xaf`\xc7\x9cM\x02>\xef\x1c\r~ \xbe\x8c\xa72\xd1\tV\xfe\x9bXz>L\x13\xbf~\xdb=.N\xef3\x0eiPFv\xdb\x19ZdFj\x07r\x07\xc1\xad\x8d%n\xc0\xbdh\xbcGk\xe4\x91\xfa\xf8&\xf7\x04Xzn-\xb9\xe9\xaef\xaa;ulx\x0b\xca\x8f\x1fd\xbaY\xf8\x87;_\x06=\x8a\xce\x1d\xfam<\xa3`\xb4XU\xed\xe2\xc3"u\x1b\xcfR\xe9\xf7\xa0\xe9\xd0\x947\\f\xac\xf3W\xc5\xe1\x96\xebw/|W\xbb\xd6\x13\xe0M\xe9\xf3\x8d\xe0W\x9cV\x9c\xd8I\x18u2]\x11e\xaavi]k\x88\xe2\'\x88\x17Z]/}\xeec\xff\x1f\x9e\xadi\xd5\xa8\xf5F\x19~\x91W~.wA\x0b\x1f?C:\xc6\x10>\xd8\xe1\x07Z\x03\xc8\x1a\x9f\xad\x045\x8d\xc0\xf0\x83/\xdf\xab\xbb\xcb\\Kg\xf8`\xd3\xef)\xdc\xfa\xa5a\xd4\x99\xf2\xf7Q\x1fjl\x16\xf9\x16\x82\xf0\x18\x86\xa4\xac\xdft~k\xb5]\x8e\x0f\xa1\xb8\xe88,\x8aG\xf7o|>\x90Wv\xd7\xeeI\xc8\xfcp\x85"\xe3\x9c\x9e\xb6\x94\xb6\x12]\xe8@\xe3\xe6\x19\xd1\x15\xb9\xdb\xc8\x9d\n\xfc\xe8\x80\xaf\x97\xdd\x9d\x892\xa2eAi\x06E\xbd\x8f\x81~\x10TR\xd0Vn\x90\x95\xe9\xf4I\xa3\xd2\x87VK\xf7\xd3:\x1c\xbdq\xafwM\xa0\x8d\x9d\xec\xb6\xc9?)\xf7g\xa5\x03\x88>I^D\x04n\x92ok\x7fvEk\xff\xeaX\x13o\xd8\xed\x1a<\x10\xb0\xb2\xb0\xb2B\xa9\x93\xde\xfd\xbd&i\xd2V\xb6\x05\x0fC\xe0g\x00\xd9\xd6\x85\xb4[\nR,\xea\xef@\x12jx\x1c\xd6\xa1\xc6Cy!\x96%\xf5\x87\x98\x99x\xf5\x89\xbe\xde\xfd\x8f\xe1Q!\t*^\xd7\xe6\xb62o\xcf\x84\xbc\x13\r\xc7;\x95\xaf\xbc\x15\x80E\xf5\xce\xd2\x06K\xfe\xadF=\rQ\xd4xV\xb0_\xaa\xe5\xfb\x97\xcb\xf0\x9c\x8c\xa3\xce\x1cM$\xe2I\x01M\x88\xb4h\xc4\x82\xe7,M\xff\xec\x85\xc6\xe7\x83\x01_\x89\xf3O\xf9\xfd\xfa\x04\x13\xeb\xb3sRf\xbf\xaa=\x89w\xf5\x1b\x8d=\xf4|\x98\xbf\x8aL\xf1\x9c_\xeeUd^\x1b0\xda\x9e\xe0\x01\xd6C\x9e7S\xa6~\xd6l\xf3\x9al\xe9=\xf80P\xda=9\xf1\x06\xd9\xa8&Z\x17\x9f\xb5(iQX\xf6\x87DGkV\\\xcdr\x81\'\x1f4x7\x87\xa2\x13\xe9\xc1&\xde]\xafK\x06\xefm1\x00E\xfb\x8aV\xa0_\xb2\x0e\xdc\xed\x1e\xdd\xc8\x802"N\xfeG\xc0\x1c\xaemv?\xabVE\x0c,\x86B\xc5G\x95h\xf3\x9d\xf5m/\r\xfd\xc2\x10\xe6\x80R\xde1\xaa\x99iAnerBOE\xac\x10\x91\x08\xc4\xa0\xbb<\xfa\xb1\xed\xa3\xc2d\xe7F\x07\xe9;\\\xc6c9V&\xb4\x96x\x8e\xe7\xe4t\x9cS\xedG\xf5\xf8\xde3\x82r\xc4\xcc{\xe4\x02\xceIV\xc4w5\xd7-m\xcc"Y+W\xe2w\xed/\x14\x163N\x183?7\xa3#\x8aZ\xeaR\x03|\xd5\xb4F\x07\x15\xb7\xc7Ak\xf6\xfb\xf0\x19\xbaZ\xbapB\x14-\x92\xdb\x90\xb2\xef\xef\x83\xcfs\x95\xe51>\x18n\xf4?\xcd6L\x10P_\x98h\x0bf\'\xa7\xa2d]d\x80\xfa\xf4\xbb\x00E\xa3@\x87\x06\xe5\xa9\xac.\xee\x03o\x98>!\xb8L1\x84\\*6\xf3Z\x85I\x90EZ/\xcf\xf09-)8k\xf5e^T\td#\x1ec\xc6\x8f&\xe5\xc2\x18IT\xab\xe5\xae\xa9\xd8\xd4\x90\x8b\xa5\xb3\xc0&L\xc3\xd5H[\x05\xaf)\x99\xd8;\x95;\xb8\xa0D\x87Q(\xd5\xda:\xb2\xecS\xb9\x80i\x8d\x10\xc6\x877\xd2\xb1\x90\xbc\xf4A\x90"\xb9*\xd5b\xb7\xbb\xd7\xaa^\xc2B\x86\xa1\x9c\xff\x9aFD`\xfe\xb6\xcez\x94\xedz6h\xe3\x19\xa9\xeb\x8fd\xc4Q\xa8\x19\n\x0b\x06\x06-\x9f\xcfj\xc0c\xf9\x19D\xfc\x07x\x14\x06\xc7\x16\xa9N\xc5\xd5\xdc\xa9;\x0f\x02\xdc\x11\x86\xed\x16\x9b\xdd\xd3\xa3A\xb9bq\xc2>:Rx\xa5f\xe40\x05I\x94\x80\x9fj7\xeb2s\xb0\xe8\xf6\xcb\x85\x17\x8e\x14\xdcsbUE\xf7\xe91\r\xc2\x98\x047{\xdf\xa7\xc1h-Q\x1d\xf8FV\x02\xbeJ-\r\xcfC\x17\x88\x8bd\xb6p\xde]\xa1\xc0\x87\xb1f\x13\xd8eV\x7f\xbb\xa7K\xabBqS\xfd\xabHt4,\xfd*o\n\xaa\x19\xad\x8e\xd4\x8d\xe5\xab\xea\xddI\x83Y\xc1\xd6\x19V\xe0\xb58\xcbt\x18\x16\x03\xd5\xd1[\xba\xfd\x05)\x03\xad~\x1e\xb9V\x82\x07\xc5\xcf\x83f!\x03\xbeMu\xa7\x1d_\xf2\xd6\x12cZD\x82\xfbha;_\x985\x14\xfe\xe6\xfd\x90^\xc1\xe4\rnMG\xc6By\xfc\xe1\xfa\xdd\xe4\xc0\xbd\xc6\x7ft\xc3\xe0\xf7\x07\xe5\xdf\x1bl\x881JU\xe9\x97\r\xf1\x13,\xd7\xa4Z\xff9\xa0\x1d\x8f\xc3\xb8\x8f\xba{\xe5\xbex\xe1\x84FM\xcbx7,\xb9\xe4\x85\xbf\xc3k\x89\xf7\x8fqc4\xac3O&4\xc9\x94\xf5V_T\xdcP\x8a}\x96\xb6\xec\x9d\xcf-\xa6\xa9\x0bf\x1d\xf8\xf5\xe6\xde-\x0fOL\xb1\xdf=\xfd#\xf9\xa8l\xccJ\x9a\x13*!\xd0\xc8\x7f\xc3f\x90\xcfVQ\x07.\xb7\x05b\x06YQ0Qq*\x01\x85\x8a\x9b\xbc\xe6R\x8a\xcbx\x89\xdd\n6\xf7u\xf2Erx\xc7\xc1\x9b\xd2\x08S\xae~V\x8b\x86\xa6\x82:wJ\x88\xf6\xa6E\xd6\xa6\x95jgCvF\xdc\xe5\xc5\x92@T\xde\x880F\x0e\x0bq\x9e\xf4\xa5\x90i\xfa\xe0N\x1c\xefojI*\xa3Aj\xf1\x15\xb4E\xf8\x87\xb6x\x1c\xa0bg\xf5\x0bT+1\xbb\x9e\x11\x80d\x95\x02\xc6m\xae\xa2\xf6\xb6\xbb\xdd\x0b69\xbf\xa75M\x12\xca.\x95\xb4\x1a\xaa\xa9\xa3\xcdiy\xddB?.\x82R\xf6\x01\xd9\xeaW%\xfd\x11\x8e}/\xb34\xc9\xb2\xb9\xbb\x02U\x8e0[\x87vk\xd5\x15M\x8cG\xc5\x84\xd2\x8c\x9d\x8b\xb0\xcd\xf8\x10<\xad`\x9b\x12|\xa4\xa7^\xbd\xd6\xed\xed\xf9\xd0M\x1d?vU\x93\x19\xd4\xc4U9/\xfalGQ\xc7\xb5\x02G\xaabj\xc1\x0b\xcfD~\x8e0\x8b\xc6C\xc2\xa9=s\'t\xca(\x1a\x86\xae=\xbe\xf7\xe2\xa8/\x0ckY\xfdr!\xc9\x1d@\xdd\xc0\xb8\x1d\xb5\xa4\xd9\xb9\xfd\xa1\x9fU\x07\xef?\xf8\x7f\x98{r\xa1\xfcF:\xc2mM@]&\x14h\x9d\xe5;\x85\x88!\x02\xc8aK$\xabO\xc0\x06\xba\x10\xa5\x85*d\xbb\xbe\x8c\x96\xf8\x13L\xa4s\xc9$\x02\x03O\xf1\xce\xb8\xd68\xc0\xc4\xaa\xc0\xd5\xf6\xbeF\x0bP\t\xf2b\xce\r\xabhY\xc6}~\xc7\x8ePY\x92\x13\x07d\xb5!\x0e\x01\xa3\x9b*(\xc2\xach\xbc\x06\xc4\xeb:{\x03\x82Q\x80\xf2[\xd6>j\xefwgT\x03\x10^~\xb2\xa9k\xf9K\xf1\x1b\x14\x18\x08`!DZ\x16\x87|H\xd3\x93\xe9K\xea\x02\xe1\x0b\xc40\xb9\xdagsb\x93Y\x8d\xeej\xad\x9bB\x97\x96\x91\xa3x\xbe\xb5\xab=X\x9d]\x8b\xa4\x19\xe3\x11\xcb\'\xbbO\x1f\xb5\xa6\xfaE\xbc\xd1\xeeNm\xe1\xb3\x92\xf5\xdc\xe6T\xcd\xf0I\xf21W5V\xbf\x1c5\xa2=<\x1f\xc7X\xe9\x16\xa8\xc2\xa1l\xbc\xc9\xeaa`\xab\xfc\xffx&%B\xbe\xaan\xff\xf7\x1c\x94\x90\x01\xfa\xe6x\xe6=/\x0e\xdd\x1d\x95\xaf\xcb\xc6\x8a\xa2\xf8\x99\x8b\xb5qP\xa2L\xba\x08\xe6J8\xfbU\xfe\xde\xeakI^\xc21\x15\x8an\x19\xd2\x96\x95<I\xf3\xc1ND\xc32\r\xa6\xd0+c\\\xfa\xa9\x0f\x88-\xa4\r\xe1^\xeav]\x83h\xf3G\xb9\xadE\xe8s\xb5\x16\x05O\x86\xa2\xa1\x98`\xacE\x1e\xa9\xc4VT^\xe1\xa9\t5\xea_\xa2\xb9\x96\xdc\x16\x08\xb5\x1e\xaa\x7fO\x7f!\xfb>\xb99\xb1\xcd\xddB(\xde\x1d\xfef2I5\x05\xa9\xfe30`\xaa\x96$:5\xad\xc5\xaeI\xe0\x94\\.\xbf\x17\xf8\x94\xba\xaf\x18*\xc2\xa5\x06;\x9bK\xe3\xb4\xdb\x13I\xa5\xf1\xd6\x8e\x16acu\xf6\xac\xfb\x94a\xa9\x13*Y~\xa9\xcb\x88\xa8\xfdK\x1d}\xf4\xc4]\xd7<\x13(\x15\xa1\xcc\xcc\xff\x06ZZ\x9a5\xda\xbe;\x8b\x9b\x8dV\x83\xc6\x8dg\xe7B\xb9g\x06;$\xb08\x0c(0\xb9NiNUD8~\x8e\xc0\x1e\xc2\x80\x19\x8fU\x15\xb9\\\x91\x83&\xf3\xec\xa3\xc0\xce\x00\xfd\x01N\xf22\x87\xc0+x\xdd\xed}\xf4F\x01r\xc7\xed\xd0\xd1\xfcF\x12\r4\xe8.?\x00\x9fi\xbbt\xcf\t\xd7\x1b\xb5\xde@\x8e\xbe\xb4\xec\x9a\xbdm\x9d\xbb\r&\x16\xd2\xa7\x1ej@_\x16\x82\xf82\x81\x0et4\x9b\r\t\x97\x91\xdc\xfeEL\xb0D\x06\xecx\xecm\xf5\xf0\x9f8z\xef\xfe\xd1>M\xf8m\xae\xd5/5"\xd5\x94DvA\x02\xfa "\xeb\xb9\xef\x13\xfaa\xe5\xbb\x1b>\xbc\xd4\xfd\x83\x01\xae\xca\x9ek\x8c\xc9\x05\xe8\xe5\xf7\xda\x0c\xa1\xa6G+\xab0\x013\xe9\xca\x92\xaf\x9b"\xf9\x98q\x9d\x1c\\D\x13\x10\xae%\x83\xa6X\x0f\x0fO\xdc\xff2"_\x98\xaa\xfb\xf4\xf7\x13\xc53\xe6\xe9\xc7\x19\xbc 9O\x9a\xd5\x01\xf9Pd[\xf3\xbc\xff\x00\xfd!\x8e\xe7\xcb\x89\xb7\xc0J\x8cb\xe5w\xc3\x92\xcd\xe2\xf1VW\xa6\xfc;\'\xe3X\x8a\xa7ne\x86+\xaaE\xd5<\xe1>q\xf5\x87\xd4\x0e\n\x11\xc5\x7f\x0b\xa3\x0fY\xebxY6\x80\xdfIEH?A\x0f\xc0\xf6\xa2\xb9\x1fUf\xa9\xfeZ\x80\xb8+\xde\xb5\xe4\xa5\xdf\x89\xe1c.z\xfd\xe5\x81\xfe>\xdd\x90\xe7\xf5\xd1\x8cz\xff\xf2\xd3]s7\xf2\x16\x0c\x18\xd8j\xf10\x87?[]|\xfb\xe2u\x86\xf8\xf6J\r/r(-*\x1c=\xa1L\x92\x1b\xe3c5\xa6@\xa0\xa4<$\xbfW\xb6N\x08\x9b\x81\x92\xd2\xe8\xa6\xf4\x0bd\xfc\x8aC\xf7v5\xd4\x166\xf6\xcb{\xf9\xe8\xc7\xe2XZ\xe2\xbc\xec\xd57\x8e\xf4\xfakd\xe6\xb0\xc0o\x85\x89\xca\xbcy\xdfJ\xfdX\'+\x19\x86\xa2,A^UWs\xfe\x85mM\x91\xd9\xc3\xd4\xef\x89->\xf6\x1b\xc8\x13\xd9H\x87\xf6\x9d+\xa3\xdem!\xa4\x10\x9fB\x14`8\xa5\xee[]\r_\xbe\x979I \x94\x8c\xe4\x9c[\xd6\xce\xb7\x83\xdf\'\xd4\\\xe6C\xe7\xa0M\x80:\xd0\x94{\x87\xee\xef\x87\xd9o\xb0R\xa3\xc5\xd1\xdb\xd7\xc8\xb4\x83\xba\xab\xee\x04\xf7\x98\xc0\x815\x05\xdet\xb2\xf1$$\x99M\xe5\x9d\xc7\xcb\xa5ZPN\x85\x04\xbd\xd2\x1e\x9a\x01\x8e\x82\xe5\x0eH\xcc\xf8\x19n\xf6\xb9\x7f\x9ck@5\xde\xef\xfa\xf3O\x85\x95P\xb6\x889\x9f>\x97\xf4N\x0f`\xb4^\xe4\x97\x8a\xba\xd1\xfbL\xcbF\x06\xd0H\x03K\x13\xb7FI\xd4\xdc\x08(\xef\xde\')\x07\xe1\t\xcc\x03\x8b\xcd6\xc4\x8a$E\xe5\x9b\xd6"-P\x7f\x00\xf5^\x19Ob\xa4\t\xb0\xf9H\x87E6\xa3;\xa15\xf3\x13\\\xaf\xe0{\xd5\xd1w\xca\xc3?<\x96\x99e\xa5\xfe\x99\x9f\xe5\xfa;\xf8c9\xb4\x06<\xf1\x18\x7f|\xe5\x00\x02\xae\xb8c\xc7\xf7F\xd9\xc6\xe9\x9f\xdd\xfa\x9e\xdb\xdd|\xe3=\xe2;kB\x13\xac\x19\\\xb9\xc0\x14\x08@\x80\xda\xf0\x87\x16\xe95\x9f\x8e\xbe\xd0J\xba\x8fn\xb8\xff\x11\x98\xb73\x83jjsor\x80\xd2t\xe8\xe8p>\xd5bO\xa5\x99S|X/\x93\x93\x1d\xba\x99\xaa\xb2\xf3\xbc\xf0\xdd\xf0\x1d\x04\xc9\xf1\x1d\x8dR\xd0:\xc6+\x9b\xba\xc2\x9e\x84@H\xc6\xc7\x14\xd0\r\xb8{\xfa\xa1\x8b]\x89O\xd2h`\x8fYE\x8cClb+\nB\xd6E\x06P\x85\x8b\xc9\xe1\xae3\xa0d\xb6F$(\xf9\x17\x97r\xb7%\x11<Iw\xbaO\xbf<\xfc \xec\xf2\x92\xb3w\x0e\x19\xa3\x16\x89m\xdc\'\xe5\x83Y\xf4\x8a\xdc\x84Q\x1b\xf8\xe8\\Z$\x99\xdeZ\x84,4\xa5\xf7\xb0%\xc9\x7f\xe6pe\x9a\x17\x025t\x1d\xd6\n\xc1 \xde\\`\xde\x81F$\xe22F\x1b}Xy\xe6\xcd`.$\x8a\xa1T\xb0\xec3\xa2\x9c8\xe8W\xff;|}*vV\xb9&GS\xf2\xf9\x06\xdd\xfb\x1b \x11\xb2\xcc\xda\xe0:\x8e\x95X\xff\x97\x1c\xb7\x00\xe2\x99\x91\x17r\xe5%\x14\xb1x +\x99\xf4\xaa\xc7w\xa9\xd7\x99a\x02no\xeb\xa1\xe0-\xc9\tX\x92H\x9b<\xfe%C4\x1b\xbc\xec\xf6\x04\x19\x94\xe9\xae\x18\x99\xdd\xf72\xaa\x90\xb2\x97z\xfax*\xd5\x06\xa0\xd3=}-\x1b\x02\x11\x14\xc9\x7f\r\xdc\xb3j5\nRt+\x89;\xd1\x04\xb1A\x1a3\x9f\tL\r\x7f\x9d\xe9\xc4\xca\xfd\xfa/\x12x\xa0\xedx\x80\xa0\xbb\xd0\x87S\xf8@m*\x18\xb6(\xd79\x1c\xe6o\xefq\xdb?+\xd26`\x9d\x00\x97\xb8\xceI:\x95\x80\xb4\xd1\x86\xa0\xee\xc8B\xa0\xf1yw\x04>&\xb9.\xce\x9f{\x9e\xec\xed\xff\xc2\x01\xd9p/\xb6\x8c\x83\x87\xae\x1d\x08Mg\x03\xe3D-\xec\xc4\xfc\xf3b\x95`\x92\xc3\xe1\xf3\x9e\xb7\x1a\x13JD\xc6h\x7f\xa5?\xae|\'\xfa\xe56\x1a\x8eI4\xdb\xf0\t\xc9;\xdb\xdd\x9e\xcfZvO\xb5\xf7\xcek4\xb8\x13\xf07\xbf\x0en\xa5\xd0!\x1c\xb7\xaa&({\xb8\x97}\xbd,n]\xf7\xf0\x0c\xcc]y\xcc\xbcQ\xbb\x96Mx\xe8\x82\x1d\xd1\xd2\xc4\x13\xabEaG\xae\x0f\xcc~\x01\xc8\x1b\x0eJ\xdf\xcf\x97[RJ\xfa6\xe8\xc54\xfe\xa5\xe2\x98;/\xab\xbd\xf4\r\x94.\xd96\x8c\xb8\x18!\t\xdf\xbd\xb2\x91\x1f\xb23!%\xf4\xf6\xde}S.@\xb3TK\x04\x99=\xd8je:\xdbR\x8e\x9bZv~2\xd0H2\x05x{\x0f\x03\xab\x18\x00\xe2g\xc5\r\n@1\xd1\xeaD\x1a\xd1\x80\xac\xf8\x1cL\xe01>\xd8\xc0M\xff,\x07\xc5\x9b\xe1U\xdcY\xb4\xc8\x1c\x191\xe3\x135\x86\x08\xd6\xe4~;O\xea\xec\xba\xce!\xcf\xcaP|G\xd6<3\n\xe0b&\xb1\x9d\xd8|\x8f\x8c&*/\xb8p\xb4f\xff\x1b!\xd6\x7f\xe0\x1c>\xe2\x01\xdbh\xd4\xc9|Ju\x8d\xe7\xf7\x079g\x01zrV\xd9\x7f\x19\xef\xf4M\x81Q\x8dl\x9dm\xf2V\xf8\xb3\xf5\xf1\xc8\xf5\x8aPG\xabb\xa9V\xa4\x06Y\x01\x17\x95\x87F9\x88A\x9a\xc6\'\x90\xb5r\x08"M\xf3S\x1bfj\x8b\xaf\xad/+\xfe\xaf\x0bs\xfc\xabG\xc0\xcc\xa6^\x1d\xef\x82\xc2x\x91\xde|\x15L\xc7\xe6\xb3\x16\x86\xcb}f\x9d\xf8\x98\x84\x8f\xb0\xbai$p\xb69\x92\xa4\xebq\xa1<\x8c\xda\xc1\x9fD\xde\xc4\x92\x03\xcc\xf8\xf6\xe4\x05\x9d\\\xb1$\x82\xb4\x89\xd2Ds\xa5s\x9e\xc7v\x90\xb4"[\xe2\x8b8\xf8h\xcd\xe7\x89\x17\xe1\x8a\x10\x9d0\xbf|\xac\xf8\xe5\x84\xbc\xd2\xabgZX\x12\\\xb8\xd3"\x8b\r^-\'\x1a\xff\xbe\x93\xf7\xf6\\\xd3\x92\xf9\xadEk\x82\x9e*\x1e_\x16E=v\x86\xbd\x0fos\xa0(U\x9f\r\x86\xf9\xdf\xc2a1\xa8wN\x0e\xe1\xe2a.\xe0\xbe\x90\xd2L\x1fTa\xee\xeb\xec\xc9\xea\x81\x1a\x96\x90C\xb9\x15\xa8\xaf,\x1f\xcac\x13\x86\xda\xf8\x19){\xbc.\x86R\x8d}(\xb6\xa3\\~3 5\xd24\x95\xfc\x04\x86\xe3B\xbca`\x07k\x9e*\xd1\xda\xbf\xf0Kq\x80\xc6VkCFG\x94\xea\x94K}\n:=\xfd\xd2\x1d\x92\x85\xeb?\xb7\xa2lH\xe2\x86\xa2\x1a\x8b\xfc{\xec\x93\xde\xb1\xe8\x0c\x1aj\x18N\x86\xb9_U\xe7"7I\xe7\xc0\xd8\xcc\xe6\xae\xd7C\xcf\xc3\\3c\xda<\xa0RO{\xc9\xfc\xa2j\xc6\xabJ\x95\xa1\xe9\xe5{F\xf6[t\x0e\x0c<\xe1\xa5J\xdb\xbcj\x8a\x8b\xcd8p\xe7\xc2\xe7ws!\xb3<\xe2\xee\x83\xa7\xb0\x16\xdc\xa62y\xe2\x83\xd0\x9fh_~E\x15\xb4\xdca\xfa\xfe\x84\x17\xef\xa9xn\x91 o\xea[@^a\xdb\x87L\xb6\x10\xfeV9-\x1a\xaf\xa55D\xabnI\xfd\xc6\x98_\xa4\'vi\'s\x15!k<\x9f\xad\x18\xe8\xf6\xcfh\xa9\xe1=\x02\x83\x9fQ\x9b\xd8\xce\xc3\r\xb1K\xe4\x98\x95\xd4\xce\xe8\x10\xc0\xa3j\xcc\xdd\xdf\xfd\x91\x05\x11!\x1c\xa3\x89`N\x10+\x82\xba\xf6j\xae\xb2\x8d\x83\xe6\x1aL\x14U\x1b\xf7FE\x92\xe4\x92\xf4\x05\xda\xf8z\xe2\xb0?\x87\x83\xa3Y\xfe_]\xa6jZki\x93Q\xf7\xf3D\x04kj\x1bD\x974\xf2\x92\xe2h\x90.\x9c\xa8\xf8\xc0\x07\xd7J\xa8\xcc\x9e3\xf6\x1c\x90@^\x1c%\xa0Z\xd2f\xa3\xbf\x9d\xd6V2\xf0\xa9\xd7Lw\xf2~\xa2V\xb1n\x01)\xca{~J\x8d\xe8\x8b\x16v\xf6]q\x0f\x90~G\xdf\x0b(~\xc3r\x13T\xeab;\xd3V\x96\xcc\x1d0\xc8\xc1\xf0\xad\x81u\x92\xcc\x93:\t\x97\xce\x82\xba\xfc\x02I\x05\xcf\xaep+\xdcT\xeeT\xf0\xf8\xb1\xe4n)U\xadu\xf2kF[I\xf3~\x19\xd4\x11\xd2\xf5\xa0Mb\xcezr\xac\x8d1\xfa\x9a\xc6\x9f3\xe5\x95\x08l8\xe4\x91\xb0\xc3\xfa\x96 &\xbd\xec\xd0Ww\xfd\xaf\x0e\x11J\xed\xb7\xc4\x08\x08Z\xa5J-\x1b\x02\xfay\xa4\x9f\x95u\xe6i\x88F[Gvc\x1d\x06z\xfbf#\xd4;_\x14\xcf^\x82\xd8\xc2\xaf\xa6\xd7\xbe\x0e\xeek\x85\x89\xa4\xfel\x00w\x18\xd6\xd5\x1c\x94\xf2\x1f\xf8\xad\\\x9aI/n\xbd\x15B\xd9\xac\x8b\x88^\xe4\x90H\x85\x03\xf3\xda>\xd1\x07\x11/\x04\x8c\x8eL\x83\xdd\\=\xe7\xfd\xd1Q\x13@8n\xee\xdfN\xcd\\\xaa\x90w\xfc\xd9\x16\xcfK\xcf\x80,\xdf\xc0\xb4\x8f\x9a\x9f/L\xd2_\x96\x94\\\xc4\x04\x0e\xd2\xe5\xb8zur\x00\xd2g\xd2T\xa2\xf3\r\x01$\xda\x0f\xe4\xa2\xbf\x8b\xc3?>\t\nEuJ\xfe/\xc5\xaa\x16W.\xed\x81\xf6\x1e\xdb~1\x81\x9a;\xd3\xed&\xa3w\x02g\x01`\x1b-;\x03qkr2\xfa\x9d\xc3\x92N\xea2\xf3b\x88\xc2\x87\xb9/*\x0fZ\xc2\xb9\\bW:\x04\x07\xb3\x7f\x85\xe4\xca\x1c\x87-@\xa5Z\x14\xaa@\x97\xbeXvZI\x10#\x86\x9a\xb3;\xb1\xddU9\x0b\x81\xa6\x0c\xf4\x8e\x16\xa7\x87uMR\'{BC\x13\xcc\x1c\xf1dU\x9e\x83\x16,\x01\xb1\x1f\x8f\x03tZ\x83}/Y1ZCZmG\xa9\xf8o\xc08l\xb2Q\x85r\xb3:\x13n\\\x17X\x1b\xfa\xf8\x9f)A\xbf\xf8\x16\x15\xf0\x07\xdepj\xde\xe7\x94\xd0N\xf9^"[Q\x93\xceOL^\xbdR\x83\xc8\xd4o>\x10OX\xac\xba\xe2\xf3z\x14R~\xeaA\xf2\xc4\xd0+\xa5ziU\x1br\xf14K\xc9&\x95{\xee\x88z\xde\x83hMybL\x9e\xd0\xca-\x04\x9b\x93n\xfd@?\x18\xf2\x1d*\xf0!\xd7\xd6\x8b?\x04\xacf\xe7\x06\x80kfd\x17\xd2\xc4\xaf\x97\x16\xe5\xa9\x9db?\xd2t\x82\x96e\xf0/\xab\x92\xd4\xc3\x97\xef\xdb\x9c\xef\x1c"\xd6\x9c\xe4\xff\xfc\x85@\xb5\xe4\x87\x86VUW\xca\x91;k\xc3\xa1\xaa?\x00\x81l\xfa{;>\xd3\xb1\x92\xda\xa37\xdfU\xd1\xb8\xf6\xb3\xcf\x10wY\x04\x93bW\xd0\xc4\x81\x8b6e\x9d\x17\xc2\xd6\x1f>I\x0f//QK\x97\x93\xfe\\\xdaE2;$\xd4/}\x06l\xeaZ\xfe\xa7\xfb\n\x99\xcf\xefO\xdcyOu;\xaeR\xfe\xc9}\xfbJ\xb8l\x03:rX\x0fjiYv\x1e]\xe9\xd9/\xd5}\x96\xb2+I%\xa6\xe9\xdf\x1f\xc3\xb5\x95P\x14\x1dl\xbe\xcd\xef\xa2\x8a=\x93\xabU\x93\xcd\xca\x9c+h\xfdz\x18q\x9d\xf9D\xc62x\xb7\xbf\t\xb7P2\xf3\xed\xe0\xf1\x05\x82\xb2\x02\xeb\x9e\xb1\x12A\xdc\xa6\xfc\x88\x9e\xac\xec\x9f\xba\xd7\x98g\xaf\xd4\xc7\x8eK\x9d\xc9\'P\xb6q\x0f\x07\xf8\xb6\xbe\xa3\xa8e4\x8fc\xfd\xc9\xdd9\x84\xf5S\xa0\xf5y\xe8\x0c\x8b\xba8\xab\xe2e\xdd\x1cmR\xf1u\xfdN\xafAT\x12\xeb\t\r\xcd\x16\x87q|\xc4\x03\xa2\xdd\xca\xb5\xd6v\x96:J\x87\x080\xc7\x15~\x9d\x05\xad\t\xf7\xac\xe5\xd1\x9d\x8c>H\xaf\x05R\x1d\xbd\xbbP\xcc:\x1e@\xc2U\xe6\x8d8\x9e\x85\x17@\xcdI\xa2\xd6\x8cz\xe5G\xd5\xc6\xb6\xf3=\x8a\xc8\xed\x88\x01\xc3\xa5}\xf9j\x96\x93\xa7\x9f\xe8/v\xe3\xae|X\xee\xefk)\x8b\xde\xb6"!\xe7\x05Z\x92bn\xd5\x0b>p\xa3\x06$\xee\xa0m\t\xfd\x97\x13\x9f\xd4\xa0\x9dr\x04\xa4\xd6\x1c\x1dnIl\x82\xee\xdd4\xd0~\x13i\x05\x9d\x89(\x9b4\xe8\x99\x02?K\xaf\x9a\xc3\xad4\x0b\xfa\x81\xa2[\xe5>=\xdd(\xcd\x8cT\xb8\xc9ya*\x9c\xaa87\x0f\x91g\x17\xb6v>5\xe0\xc7\x003\x05Q\xd1J\xe68\xdcR"\x9a\xa9\xb8\xa4\xd2u\xf4\x8f\x9c\xf54\xb8\x93\xfd\x94}\xff1\xa4\xb1\'"\xb9\x8bR\xe6y\xaa\xb5\x16\x1ba\xd7\\b5\xcf\xb5\xc1\xaf\xbd\xab\x8e\x1eFJ\xb4\x1e\x9aOp\xa3`N\xcc\x1b\x82\x8dl\x01\xf29\xaf\x03N\x13#- \xabI,\x9a\x8e\xb6\xf7\xdc)\x08\xb5\xad\x0b\xc8?\xcd\xe2\xf0\xc0\rl{\x9b\xe0\x8c\xe3\xfc\xd8\x82_\xef\xc8Y1\x17(\x16\x13\xba\x1c\x9a\x95\x07H\xae\x86\x93fo\xdf\xa4"\x8e\xe9d\xb6+\xd2\xa5\xa8gE\xe3\xd5\x00\xe0\x93F\x87\xb3\xc2z"\x909\xcd\xe4\x07\xf1}\xa5\xd7\xb2\x1c\xcb\xa6\xc4\xd8\x0b5\xa7y\xc5\x0f\xe2$\x8d\x17\x0b\x13$\xa1\r\xe6(?\xf9\xba;\xa6\n9\x11Q\xb4k\xdb\x94\x1e\xebI\xb3!\xec\x00\x8b\xfa\xf0n\xc9g\x16\x7fw?\x9e\xa8\x1e\x9d\x16\xbdO\x9d\x9a\x95\xbc\xe4\x8ew\x16\xbdZ\xb5\x82\xd3\x88lcp\xee\xdc\x83\xbbJ\xce\xea\x177\x8c\x04\x1e\x17\xaf\xc51\x01\x8eT\xd8\xf1\xe7-\xf2\x977\x1bXn\xd0\xcd\xd6K\x19\x8b\xf7\xe7\xf3}\xe5\xe9\x86\xee,/\xca\xf4\xbb\x18{\xc5\xbf\xf0\xc3o\x93\xcf\xbc5\xdf\x84\xbbn6\xd9\xdfS+N\x14~gzF\xbc\xccD\x89\xb5ax"J>\x1d\xe8\xaf\xba=\x16\xc6\xbd\xd6;\x85Sa~w\xc1\xd0\xdb]\xe1FT\x9c\x8e\t\x19\x0b7T\xab;\xeaE\x99\xf6\xb9\xb8\x84\xe0\xecT[\xa6\xc2z\xbb\x89P\xbbM\xb8)\xfc\xcbO\x92\x00\xd1\xe6\x83\xf7+\x02\xe0Hj\xc7?\xc70Pt\xfe\x12\xf6s\xba\xd8\xeeS\xfe\xb0\x81\xda\x8b\x15d\xb1\x90T\x0c2\x98\xe6\x959\xc5\x99Sa2\x0f\xf1*8\xa3\x8f\r\tF\x12\xe9\x00C}q6\x82\xd7\xcd\xa5a\xc8\x9e\xec}\xd8\xbb5\xdc\xc4l\xb2C\x85/\x82G\xdd\xc2\xdc|dr|\x7fR\r`\xc7\x13\xc9\xb8\xa2\xf2\xe4\xab\x1f\xf7\x92p\xd7\xdb\x1f\xb4\x86+^bef\x80\xceD\xc3\x1a\xdc\xd3\xffs\x98\x06\xd5\xce\xc9m\x8d@h\xe2\xd7\xd2\x94\xc09\n\xe0o\xb2\x01\xfb\xd2\xef\n\x8c#\x10_\xd0\x15\xe9P\xf3-\xdf\xb51`y\x88/@%\xaf\xa1W\xe7\x89S4\x08K\xad<\xc5\x98\xb4\xc0\r\xdduu\xa9\xb69\xd7V\x12\xa6Su\xcc\xa3\x08\xaa\xa4\x00\xa96Y\x7f\x87!\xd6b\xa0\xd1\xab\xd2\x84hci\xad\x05Yb\x9e\xa9\xaea\xb7c\xed\x81\xec/\xe5\xd4\xefi\x9fauT~\xd1\xccb,\xd9\xa7J\x10\xcd\xb1\x0cn\x94\x1ec1\xc0\xe5I\xa3\x9c\xd0"%\xc7\xf0\xf9\x887v\xdd\x9c<Q\xfb\x06CS\xb2\xe6\x9aWwm8g#O\xb8\x9a\xe3\xb6\x00@:7:\x18\xe0\x80\xebG0\x91x\xddHG\x9b\xe2\x9f\xb11\xb9\x91\xbe\x81\xdb\xc4#\x9e\x86\xfd\xff\xb9?\xe6W\xab>\x04rV(Zgo\xdd+^\x8f\x14\xbb\x0b\xcd\x82&\x8d\xfew\xbd\xe9\xa1\xa2\xc5"\xbcV\xc1\xdd:oTO\xaeA"3\xe5\xb1\xe6\x06\xa4\xe3\xb2CS\xf82M\x8d\xe2{\xad\xdc\xf046x\xfc\x83\xc3\xfe[y\xd6x\xdb\xc2\x80\x18h\xf6\x8c>+\xc1\xc2\x98`A\xb9\xb9\xe7\x96\xb5\x1f\x9cB\x93\\\xd3\xfb\xc1\xda=e-\xd1\xed\xbbz\xa4\x04\x9e\n\xcbZ\xa6J\x05\\\x05\xadm\xaf\x8e\xfdQ\x95\xa2\xb5\xc1\xa0\x9c\x9e#o\xeb\x88\xcb\xd7\x93\x9e\xb3~\r&\xacm\xfe\x85\x8a9\x11K\xb2\xde\xd9\x98\x96\xbbv\xb6\x06Aa\x8e\xb6\xd9,\xfe\xc0gXRj\x1f\x9b\xa2\x8d+\x11\xe3\xec{\xff\x1c\x9d\x95C;\x1a\xab\xa5\xcc\xa5A#\nY\xd2QBI]\x8dF>\xda\x99\xdd0]\xf1cZG\xa0|\xa5r\x10Jf\xa5\xde\xcf\x7f\x0b\xf4\xc8[p\x9a\xde6\xd1\xcc;A\xb9\x9d\xd6J\xd9\xe5}>G\xadW\xc62}V\xe1\xa1\x9c#\xf9\xb30\xe8\'\x93\xfa\xe5\x0fq^(\xc0B\xe7\x90+\xc2=\x14\xa18/&\xe5v\x1f\xa5\x1f~}\xfd\x8a\'\xff\xc5\x9e\x0f\t\x1dS\x0f\x16\xf8\xa8\xd2UY\x17R\xc8W\xde%\xea\xf8R\x96\xde\x9b\xd7H\xfa\xca+/>}!>\xa0\x1c,h\xb2H{\xcb\xb53\xcf\xbdz&$\xcf\x0fG.\x89W,0\x06\x0f\x17\xe9\xcc\xad\x16B\x85\\\xa2H\x01s\xa1o2\xff\xb2\xd5\x08\x1d\xfav\xdd}\xe7\x9c\x0e\x8eL\x08\x18\x91\x1c\x0c\xe2\x0e Q5;@\xd92\xd0\x8d\xf2\xe5Ob\xb5\x90P}!rA\xfe\xbf\xcf\xec\xf5\x0c|#\xdd\\\xe6\x1d\xb0Ih8\xed\x1dy#\xfcw*\xa7\xc6\xcb&\xac\xdaB\x1a\\U\x07>2\x99IM\xd4\x81Izk``\x11\xca\x1f\xd3\x00\x01c\xa6T\xb2\x7f |\xc7\x07\x91\xc3\x0c\xb8\x16M\x88\x86:I\xfe{\x8f[J\xd7n\x9bz{\xfa\xeb\xd2\xb2\xd8\xf3\xf1\x8b\xa9C\xed\xc4\x8dG\xe6\xb8!\x8d\x1e\xa9\xca\x90\xb4\xb7\xa4M%\xda\xb1F2\xac\\\xb7\x842\x04\x0bv`!\x91);\xd6\xf4lX\xe0&\x8f\x0e\xf8\x8eE\x9ds\xd1I\xc8\xd4\xaaw\xe4\xdb\xca5=H(\xceiQ\xfb\x99\xb4m1\xaa\xcet)\x86\x08wl\x90\xe52L:K\xaeO\xdb\x8cK\x9f\x0f\xc9W\x9c[\x10\xd3<Sg\x98o\x0f@\x1c\x96N\xa9\xb45G,%\x8b\xde\xc1s\x0cw\x05\x18\x0be\xd7\xd1\x84\xab\x84N\x81\x8a\xef\xc1p\xad\x8d\r\xdb\x9a*\xfaI\xb9(\x8a\xef\xeb?P\xbc\xe2r\xce\xe7\x99\x96!\x92er\x926B\xd1\xad\x83xU$+\xed\xbe]x\xf3\x90\xb6.\xda\tp\x0cP\x87\xc8\xfd\x10\xcc\xfc\xcb|\x8e\xee$\xb7\x84pn~\xbc\xd4\xa8\x10,\x8b\xe9\x10v{\x0b\xc2>\x99Sy\xbd\xebM\x0f?\xcbI\x98\x9f\xe4\xc7\x01\xa0g\xc0cU\xe6\x8d\x9fW\x8e\x05\x88\xc8\xcb\x8dw\xd1~\xc5\xe5\x85\xe5T\x88\xed\xbfM\xb9};[oop\x98\xf6|Z\xde\xce\x92\xb61\x9d\xc8\xe3<\\\x81\xf2\x1cF\xa7a\xde\xcf\xe0-\xe9)\xd5\x83e+\xfa}\xaf\x95P\xaex2#m{.\x07\xf8\xa2>\xbb\\T\x7fX\x9e\xeb \x15!\x89\x83\x8d=u\x97\x8dn\x13|\xe6\xb8\x8a\xfd%n\x89\x04\xec\x05\x9a\x84\xc27Q\x8c#D\xba\t\xf1\x8f\x00\xdc\xd9|hE68\xb4\x93\xbfmc\x1dK\x911\x03\xa4B\x11\x93\xa04\xd2\xca\xe5!D\xe7Q(t\x84\x8f\x04\xdd\xa0\x18D\x89f1S\x92$)\xf6\xd1\xf5.\xb1\x8f\xdd\xef\x87O8\xf9u\xa5%\x8c\x84c6\x1f\xbfC\x1aP_i\xd4V\x81\xa9T\x0e\xb8\xf4UBo\x95\xdea\xe9\x83,[:\xc2v>\xcf\xc9\x95\xa7T:\x887\xe2 r@\x8e\xaa\x08\x91>\x06\x89\xd5,x\xacm\x12S\x8d\x8c\xd0^>\x16H\xb31\x05\xa9\xb9)b\x06\xb8\xce\x8c\xae\xa6\xee\xe6D\xedQ\xa8O]/|\xd8\x97\x99\x8e\x10\x0e\x9e` w\xb2\x97@\xd0k\xad\xd6\xea\xe1\x89d`gOv\x10\x94^?4-o\x7f\xa9\xf5\xa2C\x1e%\x81\x08\xfcd}\xc4p\xc1\x15\x8b\xa1L\xb9/\x1aE\xd2}\xfaNT\x15\x16;)\x18\xad\xa7\x1e\x06\xb2\xe7\x00\xdd\xf9\x9dhc*\xb7\x8d|\x96\xbc\xae\x85\xbb>|\x15\xf3M\x01*\xfc\t\x1c\n\x10\x8eDG\xae2\xf7n/8\xed\xb6thE\xfb\x82h\xe5Da}lE(\x9aCg\x99#\xff\xe1\x9d\xe6\xe5B\xabI^*Uj\xda\x0c&\'\xdd>\x19XK\x9e\xcc\x19zI\x0b\x8a\xfa\xe3\xf7P(\xd8?\':\x00h\xe8\xfa\xea\x7f)r\x9819T\xe6\xf2\x1f"\x80G\xf6\x97\xe3\x94}\x9d#\xcc\xf2{I\xfds%q*\xb2\xdeB34\xe4\xcd\xad\xbez\t\xd8OwD\x04\xf5\xf4\x85\x96B\xbc\xaf\x17\x12\xe0f\x83\xc9\x10\xfen(\x1fO\x9b\xc2\x00\t29zB<\\\xbd\xde\x0e\x9d\x8f\x98\x9d\xbc3\xad\xc2\x02\x83\x85\xc2\x8bsx\xc7`\xcc\xee}4(\xa0\x894`w\xf0\xf3\xe9\xd6di\xac\xbb\xc2FG+\xd8\\\xf2\x1f\xee!\xdf\xb6\xfe\x8e\x04\xb6\nUg\x89\x9bl\xa6\xc8\x96\\f\x85M\xe7^\xcdA\xe9\x9d\xb5\x0f\xc0\xd7.V\xf0\xc0\x8c\xc9\x92\xb8\n\xb4\'\xcd\x01sZi\xa1\x1b)\x15\xf3\xe8\xbdf$\x10\x8a\x9b\xd8\xcf\x9a_u\x1fD/+upYs\xff\x1c\x8c,\x1b\x99\xf7W\x92\xeb\xe1\x00\xf4.1\x86f\x0egr\xbc\x97\xe1\x9d\xf7G\xa9\xd3\xdb[\xb9\xdc\xc3\x1d:\xd0\xf7"\x17!\xc1+C\x92|\xb4\xf1S\xff\x0c\xedc^MA\x07S\xaf\xcc\x91\x8d\x02J\\4x/nL%\xe8\xc5-\x7f\xc1\x104\xe6\x82,\xa8\x99I1\x88!\x1c/\xe2\xbf\xe3\xb9t\xfb\x8a\xed\xe6\xc6,z\'\x96{\xd1\xed\xbe\x95\xbf\x02\x96\x89\xb4\x16qo\x13|{(p\x05\xfd74\xf6\xf6\xe0.\x07h\xd1\xa18\x83X\xce\xb0p\xc7T\xa0A\xe76\x14\x0c\xde\xc8\xc4\xba0Hol\x16\xef\xcbP\xe9\x93\x15=\x0c\xb26\xab\x9d\xe2\x83J\xd7Z\xaf/\x11?\xb4\x87qDQ(\xbc\xb3\n\x04$\x96y\xb0\xfd{\xac\xdc\x1a\xfe\xfd/\xc0\xea)y\x1d\x86\xf1W\xd3\xab\xbaw\xd1\x8fI\xe5"P,G\xaa\xd4\x94*\rI-\xd1\xac\x1c\xf9\x18n\xb5@H\xad\xfa\xa6\x03XN\x9a\xb5\x19\xe1\x8d\x1e\x90j\xad\x14\xe18\x97K\x94\xfd\xa1;)\x13"[\xf5>\x0fI\xaf\x19\xd43\x027\x19\x14\xff\xb5\xf5\xda+B&u.LI\xb6U$\x19\xa3{\xef\x84\xfb*r\xb2\x81\xfb+\xf3J\xf3\xe6xY\x9e_b\x00\x19\xd4\xc5B>\xbc%\xbdW\t\xe1\x0f\xd2"V\xa6#\xde\x19\x8e*\xa7kw\xbe\x92f\x843\xa4\xb9DY\xe9|\x13\xd9\x82j\xb4\xba\xf5\x07\xa6\x11/\xfb\xe2\x97C\xf3\x03PD\x98m.@\x8f\x13\x10\x11\xbc3u\xcd\xf5}\xf9M-q\xc4`"\xcb\xa4\xcc]d\xea\xdaiLZ)aKa\xbbe\x0c/\xd1T\x81|\x8d2\xc8L\xe2E\xc5\x06\xe0+\x99[\x88O\xe2#DS?\xb3\xe3\x1e,nvx\x03\xbe,x\'a\xca_~\xcb#\xc2+K\x83\xfd\xd7S\xddn.\x97\xb1\xfav\x04gV\xa5\xdc\x0f[\xf8%\x86\xeev\x03*|\xf1\xa6O\xc3\x87\xb236#\x8e\x86\xb7\x80\xc6\xa6\x9e\xe6,\xf2\x85#\xc7"\x06"\xe8X+mr/E\xcb\xe9\xff\\\xf0\x99\x86\xf2\\e@i\xc4u|\xbc\xea\x03:\xe0\xc4\x10*\xc8\x88\xfdK>\xba\xa5\x87L\x11QU\xb8\'\xc3Js*T9\xf2F\x19\r\x13\xa8\x84\xa83\x84\xec\xcd\x16W\n\xb7\x8f\x07\x8f\x08\xb5\x9e\xe3\xaa\x02\xaa\x82\x02\xde\x00\xfao\n\x04\xb5\x87\xb4t\xfa\xe1r\x18\xfb !\xf0x\x10\x8d\xa1\xc9\xfe\xbcs\xea\x8e\x9e\xd4N\x18\xa0\xc2\xf7Stp4\x938\r\xec\x95\x96\xf6h\xc4b\xb10\x00a\xac=\xd2\xb4\x1e\xb8s9\x08_\x7f\x1fX\xa1\x12\x03\x94\xa2T\xc6\x1bar\x05s\xb2\xec\xb1\xfc\xff\xc9\x11\xbc>\x01\xdc\x18}\xd9}%\xaa\xa9i\xa3\xbf\x99\x98F\xad\x9ed\xc0\x81\xb2\xdc\x1a\xd1.\xd9\x13G\xbe\xe3\xdc\xde\xdce\xc8\xe3#\x99\x0b\xc9J\xa5V\x87\xe1L\xff\x1bE\xdf\x98}Q\xcd\xe4\xa2\xd0$\xa3b\xfak\xa7\xd6\x80,\xf0p[\xbaj\xf7k\xa7as\xd3H\xce\x9c6\x06\xf7<?\x10[\xca\xea\x03\xf9\x08\x12nP*Y\x178\x8a!\x9e\xf0\xe2\xbd\xc4\'\x958\xdce\xd9F\x1b\x11\xa3\x11y\x95Mh\xb9\x9bl\xfe\xfe\xc4@\xd1\xa9\xbd\xad\x17\x8e\ru\xac\t\x8a\xd4\x06T+^a&g%ow\xb1-\x8f\xf6\x8d;\x9b3\xbf\xa3L\x95cR\x894\xeb/3\x9b\xe0\x956\xa5\x81\x001#\xaap\x0c\xc4\xa5X\x9f\x1er\xc3\x9e\xa2\xf4\x16\x0c\xa5\xe8?\xc2\xe0Lyz~\xe5f)R1\x18OZiq\x16\n\xef\x89\x9e\xb1bU\xef\xde\xeauG\r\xa4\xf57\x1e8\xe41\xd7\xbe\xf1\x17E\x96\x04\xaar\xf1\xfa\xcc\xdbb\xe1\x07\xe4\xd5#\xb1\xb2R\xe3\xf9\xb5\x82g\xcb\xcd\xb7Jc&\xa2\x01U@4\x80\x8e\xb8\xe6\xc1I~\xa30\x8b\x87\'\xf9t\x1f\x10\x1c\xb7\xc4\xe5c\xfc\xaf\xce\xf1o\x15\x89\xce\xc2\xb1\'\xe0\x1fHP\x9dh\x85\xca\xf5ZS\xdd\xfdt/\n\x89\xb0MV?\x93c\x92\xec\xab\xc3x\x06\xfb\xe8\x92\xf7^\xe8\xb7m\xe4\n^\xe0\xfc\r\x9c\xf4\xee)F\xdb\x82\x8e\xa8\xd8\xa1%~\xc1\xd5Ya\x95\x83whM</\xa4\xa5E\x10\xf9\x96eoO\xac\x08\xa3d\x80(\xea+}\x90\xe6\xb7s\xaa\x97>\x0f\xbeY\x97\x8a\xb3\x0c\xae\xd1Z\xd4_\x17t\x0c1\x88a\xb1YY\x944\xbe\x1e\x9f\xd9\xde\x14\x04,Z\x109\x13\x91\'\\l\xa0q\x84\x97T\xe9\xca\xcb)\xf81u\xd4\x86\x066fi\xc8\xcd6\xa1,\x0c\x19V\xc1\xb5R}\x88\x0f^\xcb\xcb\xa0\xcc\x17(\r\xed;))\xd2\x14`\xfdV\xc2\xe8\xd0\xb7\xf1\x95b\x95\xea\x7f\xb6%\xf8\xd4f\xdc\xf7&\xb8\x06}q\x9ak\xc4\xa6qiX\xe9\xc1.?C\x9d\xd4\xcc\x9e\x81\xb1W\x8e\xe0\xd4\xeeL\xbe\xf2>X\xfd\xf83\x8d\x8f\xe5\x15WGk\xcf\xfd\xf9\xd8\x10\xe6\xfb\xe2C\xb5\x1d\xb5\x00\\\xaf\xfc\xcb=\xccT:W\xf7\xa2)Q<\\}\x05\xaa\xc2{W\xb8\xedU\xc0\x00\xba\xf4\x9bwV\x105\xdc@\xf7`\x9a{YUC\xba]\xc8\xb1\xa1tS!0\x9d`\x0b\x1a\xc2\xa6\x9c{\xc1g\xb6\xffRP8g\x9f;I\x9a}\xe1\x15\xfcb\xaa3Up\xd3GXX\x9d\xfc._m^\xb1\x8e7\xde\xdcI\xc8\x12Bmy\x1d\xd3i<\\5\x0cA\x829y`hC\xeb\x96\x8eo\xb4O3\x1d\xbf\x9f\x95:\xb8z\xde\xed\xdd\x02\xdd\x80\xff\x14Y\x05\xea\x8f\xad\xb8\xde\xa4\xa8\x88\x07\xe2\xcb\x1a\xdd\xa0C\xc4\xf9\x99\xfa\x83P/\x84\x86\x16\x01\xa3C!\x90\xa2{\x98\xa4y\x0foI\x7f\xc6}u\x93\xf5\xc6\xba=h]\x8a/\xee\x1ff\x8dJ&z\x19\xcc\x03\x85=\xb8\x9e\xd0\x9a\xc6\x8f\xeb\xb5\xbb\x19\xf4\xe5Bj\x1b\xc9;\xc9\xc9\xaa<\n C\xcd#y\xe5Z@\x8cD\t\x9d\xc2\xe1\xe1\xd4y&\xfbB\xcd/e\xbe\x0b\x95n\xf2S\x06\xd0\x0c\xb4\x81\xa3-\xd0\xb60A}\xb3\x87\x08\xe50\x8cC\xda\xd2\xd1\x1e\xcb\x97@\rI^O\x0b\xc6\xed\xf8u\xc1_\xf7\x040\xd3\x8f\xd6\n\xa1|"5b\xf8m\x0f=\xe2\t\xb07\xe52\xeb\x02\xfeC}\xbf\x03\xfaG\xad\xa9\xd9\x8a\x1ah\xcaO;\xab<\x9ezp\xc7\xcd\xb9\x05>\xfa\x81\xf7\x01\xbd\x02$\xa0\xb5\xb7\xf7B\xfa\\\xa9\x807\xf2\x9c\xab\x91\rI9i3\xfav]\x14\x00/\x89\x13\x88Yf?\x0b\xef\xcd\xaam\x0b\x99\xdf\x99\xa6\xc3\xfa\xa5\\\x82\x826\xef\x8e\xfb.\x17p\xf3Z0 m\x1c\xe10L\x127#=@\xd0\xb3\xf6\xe8\x1bkB\xd0{C_\xc1\xfb#m-:\xaftvb`K\xd5\xd1\x06\xea\x14*\x99;p\xf0A\xd4g\x1e\x0e\xef-\xba\x0fP\x86$\x95\x81F\xdd9\xdfk#\'\xb9\xdc\x91\xb42\xd0\x93\x13\xa8\x89(\xc4\xcc)\x9d\x8cK\xe9\xdb%\xcc\xfc*\xbf\x8d\xce\xdb\x1c\xa9\xa6\xea\xb7\xfe\xd3r\x1e\x94]\\\x0e\xefpvX|\xfc\xc9!\xe8\x8bTG\xd6\xdd\r\x8e\x81\xf2\x07:\xe9J=+\xcf,\xd4\x89<!\x90\x17\xffv\x03>\t\x1c8\xb0\xb3A\xe9\x07\x88\xd4\xb3\x8b~\xad\'\xbd\xdaZ\xd7\xba\x07\x84\xe1\\\x0cR\x93\xe4k\x9a\x11+P"\xf08\xd42\xca\n\x1f\xc6\xa0y^\xcaS\x99\xd7\x82\xaf^\xab\xc5\xa9:\xed|Yj\xf0\xa0[e\xb1\x0fl\xb2\xb8\x00\x8d\x05\xb3L\xd3\x81\xadx\xa1\x8d?%G<D\xae\x85o\x1a\xe9\xc4\xe4y^:\xb2D\x87\xe4\xa8Y\xf1\x19M\xd0\xb8o\xef[\xef\x1c\xd5\x94\xf6y\x16\n{\x83-\xb4\x85\xa3t\xc0D\xe7\xe4\xee\x1d\xf1\x9c\xde\x10\x03g;\xbb\x9a\xb6+\x9f \xf8Y\tL\xa1\xd9\x18\xc5s\xdf\xd73A\xe5r\x02\x8c\x1b\x9b\\n\x81\xa8{:\xd0\xe5R\xa0\n]N9\xfe\x8eK\xcfL8\xa01\x1f>\xc8\xf3\xb1>~)\xae\x9d<\x0eX\xb0\xeb\xaf\x9f4y\'\x7f\xe2\x9d\x1d\x02k\x8e\xdfN\x0c\xadT\x99\xca\xb4w\xac\x14\xe6\xc0\xe8\x8a\xea\\\xedSG\x1b\xe1\x9a\x9c\xb4\xb97\xa3^\xbf\xed\xd2_\xa4\xcc\xad\xb0\xd14\x1a\xb6\xf3`\x9fZ\xf5\x9e\xda*\xe5x\xf2\x8d\xa8\xa4\xac\xf4I\x97yz\xa8\x11R\x94\xff\xd0\xca\x89\x87xx2\xf8\xd7\x99\xd3\xe2Gp\xa3\x81A&z#\xf8\x84\xcc\x98N\xab\xd4\xe0F\xc0\x1c\r\xe9\xec\xa3\x07\xfc\xa8\xf7n\xd9\xc5\x03\xf8\xe5\xb9p[G\xaaim\xdesQ)\x19-\xecT\xe0\xc2\x04)t?\x9f\x908\xa4\xday%\xaf\x16e\xc7\xe6\xf9\xbb\xaf\xa7x\xceJ\xad\xe7)eU9,\xb3W\xff\x94\xf4\x0f5sM\x13\xb08!\x81\xd4"V\xbdj\xa3\xdd(\x05/\xa2\xa8Uv\x8a\x94\x8e#\x98\xaev\xb2\x7f\xde\xdd}o\x01{\xe4~[\xae"\x17\n\x92`\xb5"\x87\xf0\x86M\x95\xa6\xac\x10\xe8t\xe8\xec\xb0K\x1fW\xe0\xdc\xd4(\x18\x84\x88\xa0\x19\xd7\x8bBj\xb2\x8c\xfd\xac\xec\xbe\x1e>+\xe6\xefxs\xe0\xd5\x0bM\xcc\xf1\xbb\xd1\x1dA\x1f\x9d\'8\xb9\x82Goe\x9c\xa4/\xc2\x0f\xc4n\xc6\xef\xc3\x1e~w\xf7\xe6B;n\x92<\xd5\xd5\x16\xe1\xad\xf3\xb6\x17?>gP&\x87{N\x00P\xf4\xc0\xa7\xc4\x80\x03Z\xc4\x92|\x94\x90\xd9\x07\xcd\xb4dc\xe1-\xda+(\xdc\xdc\xb3S\x8b\x1dRu\\\x0e\x8bp\xd3\xc6\x93I\xee\xbb\xa7{\xf4\xb3\xc2\x01V\xce\xfc\x8f\x1eQNO]|\xee\xf8\xa8\xd7/\xafe\x19\xc9\x1a\x86n\xb1\xa8~\xefk\xf5\x1f\xfb\xd8i\xbc\x0b\xce\x86\xf5\xcfu_\xae\x89\xa4\xd6\xc1\xe9$\x97\xa0\xa8\xd46\xa1B\xc6S\x15\x0e\xb4\x04xn\xddB\xa6\xaen\xe9\x1e\xe2\xa3\'g\x93\xca\xa7\xb7\xc9\xad>P\xb6",\xa5\x8c\xd6\xc6\xe8\x82,\x08F\xbeh\x81~\x11\x17\xcb Qp4\r\x16\x83h\xeao\xb7\xfbQv\x05\xda\xccR\xfd\x97zc\xbf@\xe7P\t}{hW@\xc6\xad\xd2\xabi\xedS\x1e\x06\xae>Xt\xc6\xb6A\n\x958oU\xe0\xf5\xad\xff\xb2k\x85\xbe\xd8\x8a\xd8\xcd\xdc\xad\xf0\xd9*\x9a\xa9\xd5;\xa6\x92\xcat\x00\xe3\x01q\xa6t*\xdbG\xc0#\xb5*\x9ey\x9a\xe0\xec\xae\xb613\x81 \x13\xc2`\xb2*\xb4\x0b\xe0h\x0b\xc7\x91\xc6@\x89^\xa3p\xf3\xe6p\xc2\xecH\x88\x15\xb4\xd4\xf6\x87\xae\\C\x91\xc4|A8\x7fG\xb6\xfb\xb5\xda\x7f\x12\x96\x16:-\x83\xb5\x81r\x8ds4\x8b\xfc\xeb\xe2\xa5\xd8\xfe6h\x99Qe\xe7\xb8\x91\xf3\x00-\xc1\xe1\x8d\xd3l>\xd3\xcbYe>\x154\xd7Tl5~\xd3@\xc7\xca\xfe\x87\xf7\x82\xe9t\xc1#\xe2\x82\n\xbe\xa1\x9eF\xf3}\x9d\x0f\xb8a)\xc8\x12\xdd\xb1\x8c\x94\\\xf4\xb5\xd5\x1d\xf2a\xcc/mL$Tc\x12J\x80\x06\xa9\xe3S\x12\x11]L\x8dS\xba\xea\x13\xa7w\xa5sN?x\xc4v\r\xd7\xa5X\xf2jYL\x18\x90a\xb0\x00\'\x08\xf1*\xc1\n\xe0\x93\x90p00\xb7\x93RC\x17\xe2~\xe6\n\xb1\xe9l\xf4|F\xea\xa6P\xa9\xfa\xa3\x96\xf9\xb2\x06\xe2U$T\xad\xb8{o\xebG\xfd^\x825\x8f\t>\xcc\xac\xf8p\xb1[2[D:%#\x19\xc9\x18\xdc\x9bU\xaf\xf00#f\x91&}\x8c)b\x19\r>\xbcz\xc2\xb3\x10\xbf7(\xa7\x96|\xde\x00?\xe3\x95\x16\xa4\x05\xd8L\x95H\x99\x9e\x17\xe0\x83\xc9y~\xe0\xb1\xae\x10\x16P\xc4O`\x9b\xcapl;\xf9\\}\xa1\x0f\x9d/E\x8f"\x05\x99E\xbb\x94\xd4\x84\x9c\xab\xc4\x0f\x90\x94\x0c\xcbo\xcat\xe0\x8c\xe8b\x145-\xc6\x87g\x8a\xbe\xa8\xa7\x11\xfe\x01?,v\xc1p\x94\xb1\xe2tR\x85\x90\x88\xf3[t\xda\xe8w\xaevH\xf8\xaf\x15\xa5R*\xe6\x8d\xcer\xc0:\n8gp\xe1J\xba\xc0g\x9fvw\x04\xf1\xdcC\x89:\x15\x19\x81U\x0f\xea\x04\xca#:\xd2\x9fg\x02\xeb\xac\x0c\xe8lJ\x9a\xfd\xe4\xd9\xd0\xf6\x17\x81Ba1\xa4\xe7\x9e\xe2\xcc}\xf4\xc69\xa0\x98R\xa3u\xc0\xd8CE\xaaso\x1aD\x91\xf7y\x11o\xa4J\\D\xc4\xbd\x0c\xf6\xd8\x96\xc4\xafN?\r\xbcU\xee\xb4 #F)(\xe7\xa0\x94\xc4\x80\xce\xd5%\xf6\x96\xa1\x0c\x0fL\xf3\xf9\x8d\xe8\n\xe5#m\x18B\x83]G8\x89\xa7\xe2\xdb\xc0\xd1\x8a\xf7w\x19\xc6\x1f\xe7R\xf0\xf0\xc7Z\xcd3\xe9%\xbe\xc0gmQQ\x1c\xf3e\x915\xcaCU}B\xfb\x8f%\xdc\x8a\xfb#\x8c4Z\x14\xaaT\xa4\xcf\x15\x9eD\x9b\xb5jL\xc6*\x89\xde]\xe1|\x9e \xe0\xa4<\x1f]\xd7\xd6\x88\xd7\xe5R\xcc&\t$|\x99\xeek\x1d_a\xedL\xca*\xa3\xaa\x8d@\xd1`\xe2\xba\xbf\x1b`\t\\p\xcaO\xbf\xbeV\x9f\xfb\xd0\xaat\x0f%\xb1\n\xdck\xc6\xeaS\xfbP\xc2\t\xber\xcd\x9f\xe2\xfd\x96l\xd6e\xecDZO\xcc\xba{%\x8a\xb2Y<\x8f6/N=\xd7\x02\x06\x93\xc5\xef\x15\xf9\xa1\xc5\x92n\x00(~\x1dl\x88\xb3\xc5\xe6?<\xbf\x16{\x97B\x91]\x97H\xfd4y$wGR\x92\xc0\xc8i:8.M\x8a9\xb3x\'\xa2\x11\x9aI\x148j\x17R\xefe\xab8\xba\x10\xae\xb9|\xaf9e\xe0\xcc\x81\xd0\xc9\x98\xfdg\xec%7\xd2\x10t\xb7z\xef\xb5\xf4\x828\x16\xa1\x03\xaf\xf5\x83\x98\xe5\xa0pQn}\xf5\xc05\xd2lX\xacX\xaa\xf8\xeb\x170\x82\x8as\xc1\xea\xde\xe0Wr\x94:\xccN\x98x\xff\xe6\x12\xfb4\x83\x08U48\xd6\xee\x8f\x8f\xa3\xd2\xad^\x0c\x1e\xea\xbe2\x1a~\x07\xa9}\x18\xd4<\xa9\xc7\x02\xc3\xa8"f\xaa\xefJ+\x14\xfe|\xf2\xf3\xf6ZP,-\x9a\xbe\xca\x81\xf1\x7f)d\xecJ\xbc9\x8e\xe4x\x0eT\xc6)\x10=\x1e\xf9\xaa\xfd\x9bz\xc9<U\xa2R\xad\x1e\x83\x07$9M\xb4\xa3M8\xa1\x9e:\xb1N\xa0\xdaGI\xdaOi\xfb\xb8.\x05c\x03\xef7\x1be\x12\x8d\x8bVp\x13\xd2\'GD\xf7"[\xbd\x9c\x16,\x99\x8bU\xcf\xc2f\xed\xe5WYu\x11\x8d\xb7\xde${F$L\x9aN\x99C\xf5Zi\xf6\xba/\x11+;\xd2=\x89\xd3\xc8-\xff\x04;\rC|\xf5\x0c\xc6\xa2\x05\x03\xcd\xb9\xbc\xd8\x1e&z\'\x18\x05\n\x8a0\x1c\xbf\x99\x9eADv\x9e\xd2k6\x83is\xa3\xf8\xcb\x1aq\xd6\xceu\xf9H_\xa7\xe9\xf6\xbf\xba\x0f\xcd\x89\x1cd6r\xd2\xc8>E%\x83q]\x85\xa8\xcd2\x18\x98\x92\x07.\xfd\x82\xbe\xc8C\xa3\x19\xf6\x19i"\x8ds\xbf\xb3"\xba\x80=?\xf4\x0c\xe7w_\x88\xb9q\x9dt\xef\x8d\xfc\xc7\xf5f\xeade2\xb6\xf0\x9a\x92\xf6;7R\x1fw<\xa9Y\x14e\xe5m;\x9b\x8f-\t\x92\xac\xd7\xcf\xa9\xb47\xbbW\xbd-m.\xffY1\xbdS\xb9\n\xaf\xe4\x02\x8fc\xc3\xd4\xaf<AP\x1cB\x9b\x1e\xfc\x84\x9dw\x93\x8f\xfe\xddg\xc4\xae\xa0\xfd@\xd0Hc\xf6\x14\x82\x12\xf9\xceJ\xecq#\xac\x93*\x8b*%\x07b_H \x03\xe77\xf8\x92>\xd5WR\x7f\x166\xa7\xdeC[\x99Yd\xe6p\x1cy\x81\xb73\xfe\xea\x9e\x97\xdaG\x11*\x17\xaf\x92X9t\xd8YO:\x7f\xad\n\x9c\xdc\xae{\n\xc8\x0b\xae\x9ey\x07\xc0\xb4\x05Q,\xf2\xb8y2\x0c\xd4\x96??\xda\x83(7:\xf1\xae\x90\x87\x1e\xc9\xf8-?U5\xbb\x9c\xd6p\x9c\xe4\xfa\xedL#\x87\xd8\x99u\xa8\x10\x0eQ \xca\x8c\x90g\xf8\xbcMA\r\xa3\x173\x83tP\r)\xb9\x1c\xf0}(~\xd9\xbd<\'\xc08n\xb4*\xb2\xf2r/\xb5\xaa\xa9\xcc\xb2l\xf0`\xe1\xff\xf0$\x9f\x8a^\xc0\xb6\xcc\x07}\x00\')\xd7\xa5\xdd\x02\xf1\xa2D\x18A\x98u\xd0\x81\xc3>\xdf4^\'&\x9e\xb1\xb8\x1a\x14r\xab%\xcfI+\x97\xd1oc@*\x89O\xc8\xc8\xcc\xe1\xf9\x13\xa2\xae\x85_?\xea\x10\xaa=\xabK\xebO\x1eh\xc5s\xf3S\x9f\x87Ngrr\xa040X\xf9\t\x80\xd9\x9e\xe8\x15e\x878\xce\xb0`\x9f(\xd0\x13\xd3\x94G\xe2\'\x91(aGl\x15\xe0\xc4\xe3C\x9e7\x94\x97\xc3\xa0\xb3\x91n\xb0n\xee\xc7$\xd1\xa8\x84\xe5\xa5\xf2g\xa3\xc9\x1a\x88\x8d\xa1\x8c\xe7HL\xaa\x99\xf6\xf5\xdb;\x9b\xe3j|+ne\x00\x95\xfd%\x00\t\xef\x85\xa8\xfc\xb7\x9f\xeb\x89\xd2\x90<\xd6X\x0c\x7f\x18\x93\x93\xee\xb6\xfcK\x81p\xa9po\xf5\xc0\\\x80\xc0k\x95;\x92\xce\xf4G\xf1|Rzz\xd3C\x15\xbf*\xbf\xfe\xc7\xea\xe4\x04G\xbf\xf3\xec\xef-4s\xf9\x1b\xd6\xe2\xde\xa5{\x1ckI\xb7\\U\xa9\x9f\xa8\x17\x0b\x8e\xb9\x99QF\x19\xdd\x90pND\x97oG\xd6\xc4\xc3+\xdb\xd0d\x0b\x17\x0fj2\x03r\xba07\xe0b\x90\xa4\x88G7?[E\xfb\x0c\xa7g\xc6\x15\x92]\xa8\xd5\xb8\x9fS\x1b\x1f?Sa\xdd\x05\'\x15\xe7\x16N\xb8\xff\xf4\xdd \x1ef\xb9f\x14\xf2\xf1\xa2X\x88\x81I\r\xdb\xff;\x04\x130\xb5\x12Rgz\xbc\xc2\xe0\xa1\xc4>\xdb6B\x85\xd84S\x19 \x1dFe\xceW]\xfd\xa1\x00>f\x1d\x9b\xd4Y\x1c\xb6HT\xd9(\xe5\x9eO\xbe3q\x9e\x924\x07H\x14Q\xd2\x14\xb1\x8d&?i\xcb^\x99;\xbf\x88\x89\x11\x99+15\t\x8c&\xdek\xf3\xd2\x87\xc2\xe3\xa9\xc8\xdb5\x1ccc\xc8\xdf\xc6I\xd8\x80\x9a\xcb\x7f@>\xd6\xcf\xbe\xba+\x11\x00r\x91\x16\x9a\xe0\x1f\xc9\xabw\xa0\x8bd7DZ6W\xaf\x84\x8e\xf8\x98\x89\xd2\xb4\x03\x9aO\x17#\x1c!\x9e\xa8\x95GD\xa1\xa8&\xda:4Z\xe8\x90IWPi#"}\xf4b\xd5=le\xb3\xe6\xc5A*\xbd\xd4<ZsT\xbb?s\x05c\xce\r\x04\xb9\x16\x00A\xba\xc6\x87pF2\'\x14\xbe\xfc\x0e1k<\xea\x8a\x0f\xf0;\x03\xe5\xd8`~\xe1zA\x94\xef\xb0\x83Z\x11\x16\xdf\xfd\x8a\xf5\x19\x82w\x1a\x85tp=Xi\x9cI$\t\xa5M\xe4\xfc\x02_29w{\xb7Cn\xab\xa0\x14\t\x93\x11\x85\x12\xc5\xc7\'\xafg!\x0bN\xe6X\xc6`5\x1f\xca\x18\x9c\xf0\x9eMF\x97H\xfeZW\x1f\xa6\xe9\xc5\n2\x08\x05\x8ez\x16W\x10\xcf\xe8\x10\xaa>\x7f\xd0X!\xcf\xa3\x87jt\nJq\xa1\x90\xe9\xb23\xaa\x84cZ\xbat\xde^\x9fxb\x1a\x95\xf0\x9cQ/\x94m\xa3@\xd0\xa2\xf3\x0c\xadsM\xc9\xbd\xd4"$\xa6\x84\x8c\r@\xa7a\xf6\xb6UT\x02\x11wX\xfb\xa3\x1b\xa9x\xd3\xe9n\xf7\x88\x83G\xff\x86:T\x94\xd6Sy\xee\xd1.\xfa\']\xf1W\x07U\x14\x99\xa9\xed7\xd7F\xf0\xb2\xa1\xc5,\x16\x9aHto\xf2\xef\x9f\xf4T*\xb5)0B\xef\x9c\xdf\x1f\xbe\xfd\xed\xa97\xcd\xfc\xe3{\xd4_\xda\xd0\xda\xf5w\xfa\xacD[\xce\xd4z\x047j\xa7\xa2\xdd\xf9\xc5\xdf>\x94\x0e\x1ab\xea\x18q\x03\x01\x1c\x1a\xe3\xa7\xcc\x12\xdf(\x1a\x92\xdc\x93\xca\xb1\t%\xa7\xd6f\xf2\xe5\xb1\xb0K8\x07\x84"j\x19~F)7\x82\x02J7\xeez\xd36\xff\x8d\x8e\xb4\x92\x15R$*2~ ?_\x12\x1f\x15\xf5\xcd\x94(\xd9aq\x9b\xe3\x17z\xa2s\x19\xebW\xad\n\xd5(\xed\xba\x0f\x8b"\x1d\xe2\xd1\x13\xd5?uB\x84\x1a\xb5\xfb12h\xb4\x10R\xd5r*\xf1\x80);\x1e\xd4+2\x8d2\xbeDOa8\xd7\xf4[i1\x0bB\xa2b\x04\x0b]EH\xfd\x0bl\xb6\xc1\xc7\xb5\xc5V\x16P\xe1b\x86\xa0,\x15\xb1G\xd5\xa5j\xe3Vm\xeeJ2\'\xa2\xfb\x0b\xfc-\xeew-\x94\xd3\x1c\xb4O\xf4\x8b\xc5\xac\xa8@[\xc2}\xfc\xf7\x87\xe4\x07\xd3$\xec\xb7F\x040\xc9\xaaj\xf6\x17c\x06\xc9q\xd3\xa0V\x83yf\x8b\xd1Z,R:\x01\x92\x17=\xb2?\x00\xc8(\xae\xdc\x15\xcfS\xd8S>\xdc\xd5\xde\xc1\xe7y\xbc"\x16~\x91e\xd8FS4\x12\x99z\x03\x0cv\xc1\xde\xfdWkJ\xa3\xadjE\xa4\xf7\xb7\xe8D\xb5\xec\xb6\xd4c\x94\x89d\x8b\xc4\x9fb2\xdd\'\xf1s\x15\x89\x90[\x12\xae\x97\x84T\x99n\xbf\xafu\nS\xab\xef\x04\xa9\xa9\x8b,\x99\x0eS\xed\x0c\xe8\xc1\xd4\x13g\n\x1a\x93$\x8asd&\x02"\xb1+\xf1\xe5+\xef\xaf\xe1\xdd\xe4\xb1\xca\x0b\x11\x94@\x12;P\x1d\xef\x98\x9a+\xf9\xda01!\xe8\x9e\xd2\x06\tE\x00+\xeb\x82\x01\x0c\x89\x9c)\x02\x17\x11\xe0\x8eB\xfc;\xc4\xbdq\xa7\x07\xadW]\xae\xbf\x89\xa4\xcdM\xea?P\xd2e\x0b\xbf\xd0_e\xd4\xf5\xdc\xf8`#\x12T\x89\x00e\xcd\x8fM\xcf\xf1\xfe\xaa\x05:I\xb7\x9eq]\xaf\xf7D\x8a\xc7\xc3I\x87e\x1au\xda7-\xef\x92\x0e\x9b\xe9\x9e\xa7\xf8\xa8OL[\x12l\x1d\\3\x83@\x93Q\xd0\xbfR?D\x13\xf5\\\xbaU\xc8\xc4\n\xddi\x12\t\xd2\xbf\xdb\x90\x99\xd5\xd7\xd7R\x19\xd3s9;\xf9\x9f\xedd\xe4\x93\xc2p\xfd\x19+\xf2R\xb1\xcaQ+|\xf2\t\xbf\xd4\xed\xc0\xeb\xc6\xd8\xbe\xac\xa8\xa1\xf4-\x15\xc4\xc6\x01\xbbi\x95\xd2ee\x1e\x7f\xd8\x11\xbf\x82l\xd4\xbf84\x03Y\xbb\xd5\xf9\xba6\x9d\xe6\x89Lv\x89NU\'\xda\x9c{/9\xd2P\xbc^K\xfb\x02\xbf\xd4)CX~\x97\xd1\n\x0c\x9fMEz\xf7\xb1\x1e\x92\x8e\x0eT\xf3^\x96)\xc8\xd23\xc5\xf4\xb7\xd6\x81\xaf\xb2-\xa9+\xac!\x801\xf9\xbed\xe1\xf2\xd4q\x8c\x11\\\xa2gJ\xd21?\xf5\xb9\xbc\x8d\x14\x05\xa0O\xbc\xb3\xf3\xf7\xd9n\xbf\xa7)\xc7v\xbb\xaf\xdaH\xa2\xaa\xf6\x19)>\xf7,P\x07\x95,\x899Vi\x00k\xe4eV!~\xddJ\x8f\x1f\x88\xca\xcf@#D%q\xe1\x91\xaf\x8fh\xd1\x12!\x8a\x9a\x02a\xc5F\xd3\x0bP\xeb!\xe7R1\xc7_x\xca\x1e\xa7h\xd5\xfeU\xdaZ\x904\x1c\x83\xf2lT\x02:g\x0c\x83\x97\xbc\x16\xe1\x85\xfd[1\xa6\xe2\x16\xc0\x8a\xa7\xba\xba\x14\xc7\xcf\xd6W\xbfe\xb8\x9d\xdb\x15G\xd3\x96\xf88\x9b\xf9t\xb6\xaf8\xa7\xe2G"\xac\xb0\xf0\xc9\xaes\x8bS(\x10\x7f\xb1\x8e\xee\xf0[\xdc\xbf\x97\xbbV-\xc4\xc0\x91\xe2\x9f\x08\xb9\x0bN\xc8s{\xaf\xc9\x0c\x07\x94Q\x9fN\xc5v\x9d-0AaY]T\xd6\xed-\xcbP\x88\x1b\xc5 \xff\x01\xff\x86\xea\xf9\x9a\xc0}\xe4\x92\x06z\xdax\xd8r\x9d\xcd\xfc\xaa\xa2\xf5\xc8o:<\xe1\xb7\xc4\xb2t\x11\x08\x1b\xb2\r\xe4%Z\xd3\xadQno.\xbbs\xbe\xda\x10\xbcWzC\xaa\xb4:\xb8b\x0b\xd6\x15\x9e\xa0A.\x04\xee\xc3o\xe6\xc9\xfa\x85\x9c\xab\xca\xe0p\xe7\xed\x0c\xd8\t\xa6X\xf9\xab\xb0 \x1cX\xf4\xac%\xa1\xb5.\xb1\x1b+\x0f\x7f|k\xfb\xe6\xb2s\xcc\n\xd8\xf7\xb9\x98\xdfN\x89k\xe3\xeb\xc1Z\xb4r\x06\xf7\xbd\xab\t\xe0\xeb1Qy\x0c\xc3\xd9\xca\x85V\xe2\x81\xcf\x10\x90ID-\x0b2+C\xcb\xba\xf1m\xe2K5\x05\xf0(4\xd1\xa5h\x97\rHW\x15\xb7\xcd5\xce\x0e\xa8n\xf3\x14\x04\xcb\xeaj\xc1=\xa2\x8d\x16\x02@bq\x02?\x93L\x1b\xb0*\x17T\xe4\xd1\xbe\x91\xe3\xff\xc3\xff\xefT$\xa7RN\x8c\xe4\x04\xeek\xbf\xdd<\xc1O!m3\xbd\x82\x9d\x11\xe7\xd4\xfc\x1b\xcav\x19]\xdc\xf2M\xbbW\xc1!g\xba\xe3\x07Y\xb2s\x0e\x1e\x0b\xad2\xea\x94xj\xc6\xc3x\x85\xe2\xbb\xc7\x03\x12\x95Y\x9f\xe4\xf2A\xe0\x9fOwF\tKO\xc4Xt\xde\xb8\xceo%5D\xfe\x7f\x9e\xe0)\x8b/]\xb9\xf7\x8eBV\x04/\xae\xd2\x93\xcd\xd7\x9a\xe8\n\x950)\x18\x88\xb9\xd4~lS\xdfW\xfc\xda`\x8c\x08\xb1\xb0w\x8a\x12\x82\x0fA%\x04\xbb\xc9\x04\x9d\x14\\$\x9e\\\xe8\x955B\'\xbc\xddy\x95<\xd7\xca\xab\xcc>\xfda\ry\xa1*T\xcb\xebDv\xd1M\x1cl\x8d\xc2\x9b\xde\xce\xc87\xbeH^\x1f\xaeI\xbb]\xa5T\xea\xc0\xef\xb8\xe8\xfcX\x9e\x93\x9c\xc4\xb6i\xee\x9c\x13R\x85\xe9V\x19M\xe4>\x7fVl\x82$\x88%=\x9b\x18\x87\xab\x19\xd2\x92\x81\x10\xca0\xbc\x0e\xcfW*q\x05\xa4\xe35\xc1\xf6\xee\x17\x11\x04\xb8]\x01\xa7\x18\xb2L\xf2N:\x87\xfc\xf8T\xb9\x97\xfe\xacx\xa6\xae\x98{\xae/\xff\x1b\xd8\n\x7f\xa8\xce\xb2U\xeau5\xd0zsf\xa6i\xa1\xafgIR\xfe_h\x1e\xbf\xecvW\xe33\xfa*\xd3\xc8w~`\x1f4\x08\xb3\xa6\x8fY\xeb\xb0,d\x96\x12F\xee\x8b\xbd==\x7f7"\xf5\xb6\x93\x15\x9cjI\x9c\xa8lQZ2#*K\xf4\x81=\x1fJ\x13hV\x92\xb1\xb9Q\x9b[\xeb\x89\xaf:\nKo^\xe3\xb5Tx\xd3:\xc3\xb2\t O\xcd\xfc\xcfo\xab\xe3\xe5v\xd2y\x96\x82R*`6~\xa3M\xd2D\xf4\x1a\x10k\xf0X\xf4t\x8b\x04\xa3\x86\x02\xbdf\x9d\xd1[.\x98\xe9\x10\xcfPt\xaarX\x8e\xae\x92\xd9\x9d*\xd0\xb0\x04>\xca\x93=\xeb\x1c\xbc\xa7\xc1+\xa4;\x05e\xb4c-nlbTFN\xb5\xe5\xdf_N%\xde\x15\xd2OF\xdc\xb7\xf1\x87\x17\x9c#<\xb28H\x7f\x8cd\x9b\x17z\x07\x03\x12\xbaf4G[\x9d\xf3\xc5\xff2!\xabVE]$:\x10(-)F\x8e\xbf\x1f\xffa\x1a94\xf1\xa1\xc6\x15\xb2\xb6\xeb\xad\xf3gRd\xd5\xa2O\x16\xe3\xe8\xc9\xcc\x862\xdby\xd9\xa9\xcc\xdcB\xdbhr\x9f8")D$\xd0,\xaf\x80\xfdg7\xae\xec\x1b\xc5~s\xc3\x13O_j}\x9f+"\xbb\x16\x98\x92\x92\xa8\x8c\x8d_\x98M\xe7\xda\x8f4H\xcb\xaaP\xe1\x02\x99;\xb94\x8df\x92\xd7\xbe\xb7\xd8\x9e\xfb\xa40@\xe2U9e\x06\xaea\xf2Lm,\xf7\xe7\x9e\xff\xed\xa8;\x90e\xa5\x03\xd2\xf4h_\x1e\xa0\x8b-\xab\x05#P\xcaxf\xc4\x87\xee\xd3S\xc9\xd8}\x94A\x93\x1eM\xfe\x1e\x11\x00m\tV\x04\xd8m\xd2\xa7\x18\x86K<.\xa3\xeb\xb7\x8a\xea\xf3@\xb1\xf9\x17\xff\'\xc6a],\xd3\xd6\xac\x1aN%4a\x16\xc3_\xbf\x88\xc1O\xda\xf0\xfc\xac\xd6&\x93\xd6\x0b\xd5\xa0X\xa9D\x96\x11\xa4a\xf3\xd6\xe3#\xef\xf8\x8c\xa9\xb3TC\xac\xfch*8*\xd7\xda\x8b\x16\x92\x88\xf9\x14\xd0\xa2\xf3oDp\x8b\x17\xb5\xea\xa5\x1b\xacN\xff\xc7\xa4\xa1$\xcd\x8awS]\x88\x11W)\xe0\xd42\xd6\xc7\xd1\xa1\x0e\xfc\xa4\xf3X\xca\x10\xbc\xa8\xf0I \xf9?*[\x0ex^\xcc7\x19\xcd\xd4w\xb5\xed\x93.\x97\x00m\x9d=:\xd3f\n\x06\x02\x81\xb6\xf3\xd6\xbb\rF2\x03d\x97V\xeat@e,g\'<\x86\x8c\xb2\x05\xab\xf8\x87\xf2\xdd\xf5\xb2\xc5%9bS\x04\x86\x15\x07^\xb0\xbc\xf8|\x0b\xb8\xbb<\xd1&6|\xa1^\x9e#\xf5c\r\r\x89\xde\xa2R\xe6Q\x8d\xb7+|%\xaa\x1d\x92\x06\xe8h\x12z\xb3\x86#\xf4\x15\x10\x00\x17\xc5\xef\x8b!Bm\xcb\\\xe4\xe3\xba\xdbWg\n-!)\nQ\xefTtar\xf5\xab$\xbao\x04\x97b;\xfa\x05K5\xff?\x02\x1el\x1d=\x9c`t\x16\x98rW\xe3~z\xf3\x12z\xf3\xf8>\xd2\xd0-\x07\xdf@A\xb7\xa6\xccN\xd6\xb8\xa2\xde\x19\xfb~\xae\x85\'S\x07\xc0\x17\x8d\x03Q\xfdm\xf2\xa8T\xe6\xb0M\xff\x92\x910\xae\'\x0fP\x83\xf3\xc1\xc3\x05\x1e\xd3\x81\x96*G\xd7\x1bu\xe7\xef\xbe-\xc85\xc5\'\xcb\xc8\xbb\x8b\xc9\xb2\xbab<EI\xe3\xa5\n,=\x9f\x17\xbf!\xa1\xec\x13\x89\xe8\x80_\xabl2X\xea\xcd\x0c\xc9\x13\x91\x82\xe7\x86i\xc0U\xf8H\xd2\xb9x\xbd\xd8r\x95;\xf4\x1f\xceg\xd5\xea%(\x84\xde\xe4\x10H\x96\xf2Oo\xfc\xf9@0m\\\xba\xf8l^\xe6\xb5Y\xf7\xe9\xf0\xe5s`\xd5\xd8\xbc5\x15\xd4\x8e\xdc(u\x86\xf8\xac\xae\x10(\xd1\x9b`\xe4\x9e\x9b{Y-\x9dx\x02\xd2\xaa11U\xf9wv\xe3\xca\xbe\xaa\xe4\xfd\x91\'\xab\nR\x88\xbbO\xd3\xd7\x7f\xc5A\x91\x874\xb7\x8d\xb9`+\xbc\xb2A\xa0\xf2W$\xe7\xa7\x1a\x14\x13\xa3DL\xd5\x1a1"\x1d\x05\x92\xd4\xdd\xe9S\x06y\xfcV\xef/\xe5\\%\x0b\xbb\xba9]\x90z\xbb\x90\xab\xa8\xf5\x986c\x87h+&w\xac\xb2\xda\xfct#\x8d\x90\xd7\x07\xf3\xab0\xe4f\xf62\x94b9\x84\x9e?\xaf|\xd3).\x04\te\xac\xab\xb1:GZ\x8a\x85[?\xc4\xb3\xa5\x90\xf8Z\x9eg!\x04\xd8\x99\xe4\xd7\x0f\x8f_\x7f[\xd3-\xae\x88\x01\xcf\x0bl\\.\xde\xed\xbbs\xad0)Y;\x955I3\xb7:\xe4\x13\xd8\x16x\xf3\x9ezx\x11\x88Q\xd2W,[\xa3,%\xb2\x83;\x8f\xe4N\xd2h\xa2\xae\xc2\x97\x90\x87\xd8\xbd\x17\xae\x1f\xb0\x05d\x96\xe1\xd0\x91\xbc\xd9\xb5\x12\xb5\x18\n\x987h\xab\xec\xed\x0e\'Z\xea\x94\x02\xb1\xa8\xb8\xd2\xbf`\xa8\xc8\x86\x98t\xf9\xc5{\x19\x98\x92z\x8c\xe8Rb?q\xe2\xe3j\xf1\x88\xc3\xc9\xfb\xe1\x8a?\xf5\xe4Nq\xd1\xac\xd9?\xd5\xf7O\x87t\xe2\xd5\xc7$\xbcDX\xe9\x06:\xeb\x8c\xc2\xcfXX\xf1\xec\xc1)-@\x94\xa1\x95Se\r)n\xc5\x82ke\xa4\x10\xc5?\x04\xd3\x1bO\x86V\x83V\xb9\xf3\xc9\xe6\x17\xc2\x8a0\xf4\xe6\xd1\x89\xcf\x0e\xc6\x93q6\xa3\xda\xce\x9d\xee\x05\x93\xbc\x14X\x92\xbe99\x9e\xe4\xa3\xdb\x93PZ\xe9\xdf\x1c\xe1\xb2\xcdn\xff\x9b\xea?\x0e\xf5\xa2\xf1\xbd\x84\x12A\xc5j4R\x80\xe7\xd9W.\xc4\xa2D}\xcc\xbf%\xce\x98O\xda\xc3\x13\xce\xcb\x88\xa5P\x0e\xbe$\xf3\x94\xf8\xa8\xff\xcb\t\xd4\x1bi\xa3c\xb5\x03J\x97+\x02\xb4\x84\xf1\xd0\x1f\x95\x17\xa6\x15\xc1`\xc1\xce/3\xc5\x9dzK8j3M\x9cy\x03`w\x00\xab0\n\xb8\xa4C\x0f\x11\xc3\xbe\x1a\xa5LYM\x0ew\\\x99R\x8c\xa1\xccx\xd8\xfb\xd1\x13\xea\xd1q\xb7w\xcd\xf7Ec\xad\xf3\xdb\xd7~X\xe4vF\xfa5\xf5*\x83\xce@6\x08F\xc1\xa6J7\xd5]\xe5\x13\xd8k\x16\x93\x7f\xa5W\xa8[\xa1\x08\xa7h\xc3\xec\xc3\tY:\x8bu_+\xfc\x98\xcf\x0e\x07XF\xf9\xee$\xfa\x87\xc9\xdd\xb6\xa8\x87b\x91\xd9\x04\xc0\n\xa1\x1ez\xcekp\xb4\xd1\xb8\xcc&cy,\xb1\xd9|\xc6\nVC\xf3\x1ef\'4\xbb\xb8\x9c\x92\xe1\xa9\xe7v\xc4T\xa5l\xa2*/\xcb,)\x91\xf29\x15V_z&\x18\x1a\xf9\xe6\xf4\xaf\xceAA\xb0\x14\t\x18\xd9Z\xa7\x8f\x82\x1e\xfa\xf4\xf4X\x8a\x10\xb7-\xb8\x12\xd0\x00P\xdff_\xbe\x1d1\x0c\xf2\xf9\xf9Ou`VV\xf65\x8d\x16\xe0\x99f\xd5\xc8\x1a\xd9\x98\xd8hA\xe4\x9d\x02\x94|v\xd0\xc8\x87\xe1\x99\xadi\x8c\xde\x1d\xc1\x99o\x0e\xa9\x9e:\x10\xd6/W\x80\xeeS\x1d\x91\xb4\xef\xa6d%\xdc\\;\x17\x96\'\xe7\xf7\x9c\xbes\xfc\x1f\xbb\xa2\xa1\x16z\x833F\xe1t\xda\xec\\o?]\x14L.\x1c\x95\x1d\x9d\x81\xcf2\x07\xc2\xa0\xf9t)\xd3\xc50\x7f\x0e\x9d2\x07\x99DcK\xec\xf7\xc1\x82Z\xf9\xbc\xda\x9de\xb13\xa5\xe5\xaf\xaa=\x92`N\xb4\x97\xf4>R\x15Tw\x15\x99b#?\xd1\x98f\x1b\xd0\xb3\x12\xd5\xb7\xad\xc9\xaf3\x88\x13ML\'\xb1W1\x18u\xc5%\xeaKQ\xca\xad\xaa\x110\x1b\x88\xb5^e\xd1\x873\xbd_\xe0ZYtC\xb6\xfc\xfaD\x97\xb6\x92\x12-i#R\xca\x8e\xb3\x80\xc2Y\x00=m\x98l+]5\xf5\xd9U\x9e\xcbMq~u\xf3\x07,\x08\xb8e\x87\x0c\'y\xf1aS\xa9\xb2d\xbcaI\x10]\xe9\xd5\xb6~\x1c~\xfb\xf7\xf4\xd3\xf9\xbd\xe4\x15>\x05\x85<\xd3;\xe6_\xe7\x80\x87#\xc7\x1e%)\xec~@+]\xa1\x004\x95+v?aS\x07\xfc\xb3\x11\xc0\xdc\x94\x03\xdf\xc1\x84/~|\x16\xbb7\x94\xd2\xa5\x14\xab\xb9NO\xb5\x10\xb7\x1a\x91m\xfeu\xd5\xbd?\x86v]\xde5O\x9e\x18<`\x1b\x98\xa9U\x90$\xf9\xce\xc0*\xc0\xb7PX\r\xe3/\xd0\xf6\x14bU\xe6\x9d\xd68-k\xee\xc3\x97>\xe9\xee\x0f^\x89\xe6\xf6B\x92\xe0pS\\\xdd\x90\xb9F@\x1d\x90\x88Jt\x8d)[\xc5\xe6\x8dk\xa5\xa9&\x8c\x0f\xb9\xa1\x80WP%-\x8c\x91{\x0e\xbb \xe2|\x1f\x9f3\x1feJz\x9c\xe2\x83\xc8\xc7\xc9\xd87\xf5\x0f4\x06\x12y\xaa\xd6@aR\xb8\xefQ,OH\xad\xe7\x93/N\x15\xc0\xc1C\x9f\xe4\r\xb0vH4\x92\x19T\x90\x18\xb9\x8b\x15\'{"w\xf1\xde>\xd3\x19\x06Z\x1fQ\xbb\x08\xd8MU\x16z\x13\x1d\xbbc\x03\x04\xb6f=xq\xc8\x17\x0c\xc5\xc8\x8a\xc2\x1c\x181\x02\xe2\x96\xd5S\xa9\xcc\xd0\xda\rw\xb0\xf3\xb8\xffY\x1d\x8a?\x8d+o|4\xe9\xcd\x12\xab7\x03\xc4\xc9\xd0\x9at\xd0\xc3\x17=\xb1\x02j\xed\x88\x0bY\xbd\x94\x8a2i\xcaW\x03\xda{\xc3W:\xa4\x05{\x8cz\n\x11\x01\xe7\xc7p\x02\xc8\ts\xb3=a_G\x11\x1f\x8e\xe2j\t\xfa\xf5\x02D7\xb6\xdd\xf9\xd2?[\x15\xf9\xdcR\x83\xfc\xa1\'\x99W\xe7T~\x04.\xdc\xa3w=-\xa3}5\x03U\xc8\x90\xa53\xef\xdd03\xba.W\x9f\xf8\x17>\x88\x96\'\xb5\xf1C\xea\xa1\xd6\xd6x\xf4*\x98\xe5\xd1\xa6\\\xee\nav\xb3)%YAA\xcb\xf3\xce\xc5\x9ar+\xb9\x8b!O\xa1\xe7\xcd]\xcfd%\xc1\x91dL\xa9>l\xd0|\x97\x16{\x82\x17\x1e?\xe0\xa6\xe6\xe3\x8fn\xaf\xaf\x1dx>\x8b\x8bO\xea2\xean\xfe\x98D\x8a_\xc2j+\x01\xd1Q\xeb+\xc1\xbd\xb8y\x89\xdc\xae<\x10\x9f\xbf\xf6\xdc\xa5r\xf0R\xabH\xc6\xba\x07\x1a JmN\xcbfb\\cb\xf3b\x11<\xb0\xba\xd0\xc9\x93\x9d=\xc6\xf7\tI\x1a\x83\xde\xe2_\xcc\x11\xa8\xaf\x05\xce\x0f\x02\xdb \xf06d\xcf1\xc4G&@\xc4\xf6q\xecW\rHz8\xf7j\x04 \xb3\x80!y:\xcc>~qo\x16\x11<\x9aEX\xd1\x8f3\x05-e\xf8\xd6tQ\xafv\xda\xd1\xa4\x88\x7f\xb3V\xf5\\\x94\xdeS\xb5\x10\n\xc5n\x84\x88\x85\xe08>\xd4\xe2\xde\xfc\x01LyP\xffJ\xfb\xe4\xad\x98\xfd\x0f}\xfc\x87\x856\xbd\x9bk\xde\xe8\xea\xb3R\x14:\x80\x91\x9du\xd9q\x9b\xd9R%\xd91\x1f\xc5\x9e{o\x8d3\xe8\x8c\xd5\xd1\xc6~\x0c}\xa9\xb1\x8b\xa2\x86ue\xdb\xc2h\x9cB`\xea@\xe2tjC7Q\xc2\xc6\xb2\xb3\x9ei\xe5\\\xb4\x06\xcdV\xefX\x1e\xf9eg\xf8s\xb0U5\xe5\x1a\xe2\x0bp\x1dM\xfcM\xc0z\xd4666Xa]Rh2g\xd5\xc8;\x94_}\xb4\x93_\xa4[\xa0aHn\tJ\x94{\'\xe0*y\x03Z\xaaj39`\xd1\xc8\xaeXY_>\x11\xcb\x81\x04v\x0c\xaa\xc1h\xab\x10p\xaaS\x06/yKl\xc6\xcb\xbeZBX\xa2\x99VB\xb2l\xeb\x02\x05\x18TZ\xf9\xb0\x1c\xc3\xd8\x8f\x9d\x0f\x19Q\xa5h`\xb5\xb7\xda\x88[\x165c\xb9\xc9\x86\x0b\xe3\xdf?\x84:\xc7[c3\x1d\xa6H\xcb?\x95!x\xe7Je\xa7^\xd7;\x1dD\x9a@\xad\x86\x84P.\x98\x1c\xf3\xc8\x04\x81\x9e\xb9|,\rBF\xf0\x1a=Mif\xb1!\xcd\xb1\x98\xa7\x0e:\x17\x8f\xdd\xaa!)\x18d\xb8Aw\xca\x1f\x07:\xc5y+_zb\xed\x96:\x0ft\x189\x0e\x0cS\xff\xd3\xb7\x90\xa4\x88\x93\x82\xbe\xdd\x0f\x03\xe7\xe9 \x91P0I\xebW\t\x11\xd6\x9d6\xc0\xad\xd6\xaa\xa7N\xdfR\xbf\xb0+\xee\xab\xb8\x82\xd4\x9f\xd0-\x95.\x1a\xb4\x1c\n\x18\xb0\xbdO\x03\x07<\xd0KK\'^\x0f\xa4\x1d\'\x0e\x92\xe4F\xda\x08\x02\xb6\xb8\xd8\xc5\x84\xcf\x95[\x94-x\xb2\xffm[\xda\xfah\xfe\x9ah\x13\xcab\xf8\xcd\xd5\x19\xf6\xba\xb0+#\xf1\x9bl\x99\xe1\x9b\x16m\x0c\x9b\x11\\\x9e\xf4XF\\\x16\x15\x10\xedR\xa5\x8d\x1c\x83\x00\x9a\x99\xf7:\x0e\x10\t\xcf\xd1\x83\xb49F-\xe1\x05+\xd5\xd2\x05\xca\x06yx\x0c\xfb`\xba\x83\x17F\xf9[\xf8\x8e\xe1\xc1[?i\xfe\x03#\x8fP\xb7$\x87\'D\x19\xb7!n\xc4\xca\xc3\xe8\xef\x1f\xcf\xf7\xc7\xb7\x01\xba\x19~\x7f\x8d\xe5)\x03F\x88\xb04\x82OXe\xb1Xu\xf9(+\x14\x92\xf6~jO\xc5}\xf7\x8d\x88\x91X\xbc8\x922\x1a\x1cH\xf4*9\xe7X\x17\xba\xf6\\\x12!\x82\xf9\xa3\x91\xd9:\x9aq\x1dU\x15<^\xbc\xf8\xed\x1e\xce\xbazI\xdb\xf70M\xc9\xe6\xe5\x8b\xf7\xd1\xbdO\x84\xc6\xb9$\xfd\xfdQ\x9bF\xb3\xd1\xeb\xca\xbc\xa2\x14\xeaP\x84Z\xc3r\x19C\xc396\x95`\xafA\xa7\xc8\x8e\xba\xdc\x05\xe5\x82\x82\xdb\xf25\xed\x8b\xc0\xb7\xc42\xba\xe8\xe0B\xdbU^\x86\xe9\x03%T\xa4b[P\x00\xbc\xbd\xf0\xc9/9\xc4\x05*\xd5s\x14;\\;e\'\x97K>\x9a\x0b\xeb(\xf8w\x97\xcez\xd8\xe7\xc2\xe7\xda\xd4\xa2\xf7#\x05\x01\xdb\x98JT\xe2R\xba^\x0b\x16\x9e\xc5\xa54\xe0?\x9b`\xfaF\xdb\xaeLI\xae\xf1w\xf1\xa3\xab\xd7\xf7R\x847\x0c\xfe\xa8\xdaI\xdb\x05\x1dEb)\xf9\xf5\x14M\xc2;\xf0\x1f\xae\xde\xe9h\x93oE\xa9\xf4\x00\x06\x96\x89\x91P\x02\xf3\xa6S\n\xad\x0c\xc8\x8a\x01\xfe\x97{_\xcad\xa5\xdb\x1d\x16\xd2P\xea\\\xadV\x7f\x9d\xf9Y\xee\xfem3\xf3\x1c\x17\xf1H\x03\xadXX\x94\xac\xf0N\x8e/\xf7\xb0,\x03\x19\xcbR\x00\xdey\xa2JVG\x1e\xfb\xda\xd7J@\xb7\x9a\xb6\xc4te\xa0v\xea\x80>\x0c o\xb0O*\xb3\x8f\xd8\xb41\xdd<\xf9%\xd7\x04\xb0c\xcd\xffb\xc9I\x93\xb3r=\xceeD\xe1Y/\\Z\xd3\xbf\x15,d\xf3\xe2\x0c<|S\xff\xf0a\x9f\xdb>\x8d~\xd63\x81\xf7\xb5\xb7h\xe2\'\x9ff>*;6\x04v\x8a[\xc9X\x8dlyz\xf8\x8b\x82\xf8\x9a\xb5c,G\x8aYD\xde\x16\xdb\xca\x9c\t\xd8q/v6\xb4\xf4\xc4h\x1b<\x0b\xbem\x0c\xd3\x11m\xd4\x87\xa9\x1c{\x87n\xd4\xca%\xb1~\xeb\x91[YP\xd96j\xda\xab:)#\x8c\xd7\xaf\xaee\xbc\x00;\xa5\x87+\xd0=\x03\xa4I\x92Q\x86\xca\x9e&sST00\xd1\x0b\x8e(\xff\x9c&\x9c\xb7\r\xbb\xd0\xbd.2\xe2K\xec\xa9j\xe1\xf2Fn\xf14u*?\x0f\x0b6I\xc1\x0f\xc1\ti\xdd\xad~,&\xac\xb1K7VQ\x07ysX\x9c\x92\xa8E\xdf\x1eQM\xc9/\xa4\x82\x92f\xbdt\xbcQv\x16\t?\xf91\xb7}7\xb2\x86\xb2\xf9\xec\x0b\rr\xd9\xb8{\xce\x89s\xc4\x82\xb7\xb5\x9eesB\xd3-\xd8N\xf2\x96+\xa9\xa3\n}\xbb.\x9d\xa8\xcf\xa5\xd7\x9a\xb5)q\xce\xdb\x8bI\xa9\x07;\xde\xe0\x1e\x97\xe7\x9b|\x0c\x94\x15\xcb\x05\x83t\xa9\x8cH\x161\xa0N.p\x8c\x85\xe7\x02\xec\xacPPj\x99\xa65\x90$\xbe\xdf\xddj]\xf4*e!?\xf4\x1c\xb2\x86OD\x96\xcc\xa5?\xf0\x06\xea\x86\x1c\xfdFg0\xe4\xb3\x94\x1c\xb8\x82\xd0]\xfd\xa9z\xee\xa9k\x19\xc20k\x896,R9,h\x1f\xb5\x97\xe6\xd7\xdaI\x1a$<\x06~\x8f\x02\x88\xccA\x99w\xfc\xed\xf4\\\xa5\xf8}\xef\xcf\xad6F\x12\xed\xd7\x10\xb95\x08\x93\xe0\xa5\x87.\xb7\xd2N"PDO!7Q\x1e!\x11\x89%\xccg\xbd\x10 (\x99\xb5_\x12\x9b\xaa\xefr\x1e\x8f\xb1\x84\xcf\'9\r|\x95\n\xa3\xd2w\x8d5\xa9\x18\xaeu\x9e\xc1\xdc(\xca_\x8a\xb9\xd5\xe9k\xe9\x88\x83\x11\x83\xb2\x85\x19]\xc0k\xb2.YM)\xde\x10]\x95a\xd3[S\x83\xbd\xd4\xa2mU\x94J\xf1-I0a\x81\xce\x89\xc6!\xe3@\xfe\xd3\x96\xf3\xe9\\\'\xc7eE\x14\xfc\xd34\x05\xe7\xe3^\x1f\xfbM\x12RZ\xd3\x80\xef\x18,\r\xab=4\xc8@+\x8b\x13e+\xa4\xbdbv\x10\xac\x9b\xecJ\xbd\xa6\xfd_\x164\xbd\xec\x00\xbf\x91\x9b\x98\x7f\x9eTBU\xe2\xfd\x04%\x94S\xf8s\x93\xedI\xfb(\x16K\xde\xfc\xfa\xa2e\x7f\xa0c\x8b\x95\x92R\xf1\xaf\xf4T\xf1=f\x833\xa6\x8a\x1fO\t\xbd\x8a\xe1:\x82\xd6\xc0\xe3E\xec\xfa\xcbw\xf3\xe9\xed%~\x9c\x8d\xff\xd8\xe7;=\xb9\x166/\x95\xc1Z*9\xcc\xe4\x83\x14_\x068\x14\x93\xde\x1b\xc6\t\x07\x89\xca\xbf\xfb\x1b|T1\x8b\xb2v\x99\xf3\x19\xd1\xe3\xacL\xf6\xa2\xd6\xe4\x9e\x9a\x9bJE\x9f\xdaE\xac\xf2\xdc~\xfc\x87^n\xee\xe5&[\xbb\x1b.\x8a\xac\x97`\xee\xb8\xef\x99\xcb\xf37\x90r8\x14nP\xc5x3B\xedG-]\xe3\xf2\'[\xfc\xb1=\\.\xb4e\xdeH%\x97%O\xd5\xf1\xa9\x86\x1d\xef\xbft\xf4\x08Q~\xc9\xa1+6<\x94\xf5[mp\xbc\xad\xb5C\xad`\x12\n\xc1\x12%/\x91\x9c{\xdd\x19\xd4S\x89\x07<\xfa\xdc\xda\xd6\x95\xf2\xa2\xe2\x1b\xed\xcd\xef\x85\xab\xfa\xe5\x9c7\xe0\n4o\xf9\x10\xd7F\xf2\n#P\xdb\xa7\xb8\x98\xf2@\x01n\x95\x90Q\xcc\xda\xa5\x1cE\x1d\x0f\xd9n\x02tI\x9f\x01\xbaJ!D7\xb0\xde\x0c\xe0\xc7\xc0\xa4\xb8\x14^q\xb5\x8anh\x8f\xfeg\xb2\xc7\xec\xb2Q\x91\n\\T\xa6\x96\xec\x99@\xe4&y\xfdR\x9cb\xe5uU\xda\xf2\x85e\xdan\xc7K\xd5\xe4\xbc\x16\x8b\x99&:\xa9k\xdd\xfc\xfe\xc2Q\xef?t\x9fBgRo\xfe%\xa3\x11z\x99\xf2\x8dN\x83Ja\'U\x08\xad%p$\xa7G\xa3\xf0fZ\x92\x8b\xa6\x19\xc3$\xc3y\x8b\xda\xaa\xe4\x8f\xbe\xa9\xdc\xa4\x0b\xad\x9d,\x14=\x85\xa3\x88u\xa5\x81\xf0\x14\x00}4\x92 \xec\xdc\x1b\xcf\x83.5v\x9b\xf9>\t\xea\xfc:\xd48\xf1\xb4}|\xa9\x87\x9c\x800\x97N{\xb8\xbaU\xf1S\xe3h!\xfb\x1d\xe6\xc8A9\x95\xf6g\xce\n\x81~\xf2\xcc\xa7Y\xb6@\x18\xae\xbd\xb1\xf8\r"\xef\x95y!\x90\x8d\x1f\xef\x8dtR5l\xd5K-\xa7\x02\t\xce\xf9\x84\xd7\xa0\x87$\x15\x90\xb0\xaa\xf7\x96\x12\x07#zw%\xafP\x9f\x0c\x1cD\xe8\x81R\xb4\xc1\x0f\xf7\x99\xbc\xb1\xec\xf2\xa5\xcek\xf5\xacT\xd2!\x8a\xb8\x92\x11O(\xec\xbb\xd4hWf\xec-3\xd3\xed\xc2c\x86@\xb9\xc2\\\x8b\x9cn)\xfd\xc9\xf5i\xa9G\xdd\xc6\xbd\xa7\xa6W&\'L\x1a\xcd\xae`\x87Y\x97^\xa4!\xbf$\x99~\xd7R\xbe:tml\xb9\xd7\xfa\xa7\x84\xd0\x88\x942\x08m|\xaa+=\\\xe3_o\x17\xbbO\x91%Y<\xd2\xfd\xb1\xc7\x1f\xa5~\xa4\x9b\xfa:\xcd\x97p\xb1r\xcd\xcb\xb8\xe6\x99\x12\xa3\xb9\xec\xca\x81\xd6\xf0\nb\x03\x97\x83\xb7\x97\x17\xbe\xcbTzWL/\x9b\x15\xf9T\xf9`(\xe2!\x07\xff\xcb\x08jt\xc9X#U\x95\x8af\x18\xbe\xd0\x9dZ\xc5\x82\xfbk\xef\xa6e7&\xde\x8a\x0f\xcb\x1f\xe2\x8f\x90&VXgm/\x94\xef\xaa)/O\x84{H7Ib\x8e\xa5\x8d\xec\xebw\xca\x81&&y\xad\x04jXQ\xf0l\x8e\xe1\x06Q\xfdr\xed\x8e\r\xb9\x85x,_o\x9cR\xc44\x8c\x88\x15\xa1\xe7\x98\xbad\x9c\x9fZ\x00\x16\x13\x03\xb2U\x12\x0b\xa3\xd0i@\x13\xe3rA[\xa1\xff\xca\xba\xce\xae(\xb6\xae\xf9\xfd\xfd\x15\xa0 \x92\xa4\xc3\xf4t7 \x8a\nH\x10\t\x82\xa0c\xe8(\x03H\x18F.A\xf8\xed\x0fU\xa76\xedZ\xef\x07\xd7\x15\xaf\xcet<g\xef\xda\x15\x1e\xaaQ\x88\x81\xe0\x80\xe4\xdcs\xdfK\x0bU\xeci\xd9\xe4k6o\xd1\xe7\xdb\x9a\x9e\x12E\xa36\xf37\xc1\\\xff@N\xa0\x1e\xad,\xca\x06\xb3r\xb0\xec\xb8\x8a\x87\xe4Z\xb6\xdd9\x1c\x80\x92;\x10\x9b\x82\x17\xa2\xd2\x8b\xc8_\xa9,\xe1t+\xbc;\x10\xa9\x86L\x83uW\x07\x96\x89\xd9A\x9do^K\x02\x90J\x1eJ\xbf\xa5\x97\xff\xc6?\x86/\xf9\x18\x8fh\x0e\xf5\xcf\x1bS\xc1\x17\x80*+/x#C4l\xf0U{\xf6\xba\xd5\xf8\x84\xb1\x96m-\xfd>\xcb\xcd\xb7jZ\x94\x1fBCH\x08\xae\x07o\xf0%z\xf83\xe8F2\x94\xb3\x95\x86\x15~\xe2\x84T\xe0\x97\xd5b|\x96r\x0fN\xf5\x00\x97\x15\xccrK\xb1"*\x03\xd6\xebaH\xd2\xaa-\xc2\xc9\x8bc\xa2P\xe5\xac\xe4\xce\xb7YT\xcd\x9d\x18\xc7k\x8a\x80\x004\xf4I2\r\x02D\xf8A\xe0cxz(<\xcc%h\xcfi\x7f+F\xd7\xbaM4{\xdaZ]e\xae\xdc;i\xacK\x05\x95h\xb6Z\xc1\xca\x97\xfbJz1\xa0\xb5N7\x84\xfdEu+\x97C\xef\xcc\xef\xf4\x13\xe1\x97\xadw<\xac\xfc~*Cr\xa2\x17\xdel\xa9\xe4\x11\x8b\xc9\xdd5\xf9\xd2\x8a:\x9d\x15\xb7\xcb\xe2a\xb3\xac\x8c\xcc\xfe\xca\x15\x00\xbe\x0c\x12mpS\xdb\x00\x0b\xa9L\x94\x0b\xd0\xd2\xae\x864\xb7(G\xd5\x99\xb8\xfc\xc5\x91\xa5\xf8\x80\xfe`\xc1V\xd5\xf8\xa02\xdaA%@\x02\xeb\x02\xca\xee\x88\xc5\r\x0f\x92\x0f(\xe5K\x19*c\xc8\xac79\xf8\xb1\x14\x84\xb0\x19\x11g\x1a\n3\xa8\t\x94\xa04\x02\xbd\x04\xb6\x96\xe40\x17\xfb\xef\x14\x04,BT\x12\xed\x7f\xbfP\xf4\x01\x07\xac\xeas\xd8\xd4E\xad\xe6\xfe\xf9rl\xa9C\x0e*\xc1lh\x13\x10\xfdt\x08`\xbc\x0c\x967`_\xe4\xa5\xcd8\xc9\xaf\x86\xe9ay-\x92i*\xde\x18:\x1f\xb8\x80\xf8^\xaa\xf1\xb5/e\x00\xe3\xdc\xe1\x9dc4\xc3\xd2\x9f\xe7\xc5\x95\x7f-\xb3jC\xb6\xca{+\xb4+\xb8uO\xbc\xd3\xd1\xce\xba{\\\x07wB<\xf2\xb7\xab\xda*k\x8b\x86\x14\xef\x87\x91\x7f\xeb\xdbn\xabf%I\xd2\xaa(vL\x19\n.$a\xa2\x96u\xf6\xa3"\xfd\xf2\x95S\xf7\x0c<\xf4kV1\x1a\xd0\xc3P\xbfeWr\xd4\x95A\xd3\xd5_q\x02\xea\xf5Y)\x8b*Y2\xa7\xe9\x8fE\x95~\xf2\xd2\xf0\xd9\xa4\x1d\x08`\xc3]B\x93\xe78v\xear\xd0@\x97s\xea*\x19\x1f\xc9EL\x89\xb7$>\xb6-\xdc\xa5E\xaf\x0e<y\xe5\xd8nw\xaa4\x8b\x89U\xc9\x1c\xa5\xc3\xa8\xd94Q\xe7\x06m\xbc\xe7\xf7\x91\xf6\xc6E\x91VU|17\xbf6,\xc5T\xe3\x84D{\xbe\xd3\xe5\xeeZ\xc4\xbdJ%R\x99\x18\xd9\xd3\xa71\xa4"Xc)\xe6\xb9\xf9/\xe9\x8b=9]\xd0\r\x05\x9c\xd3\n\x00\x05\xe7\x14\x91{\x01\xe3\xfaLY\xc9|G\xf2\xef\xf0We\x07\x10\t"\xb24<\x07I\xe0\xb8\xe9C\x01\xeb;\x86\xee\x92\x80\xf4\xe1\xf2r\xd3\xe8\x1dWM;\xe2#\xb2\xc7\xcd\x19\xa6\x85B\xc4sb\xb8h\xc0\xcd\x88\xf8\x8a}2\xd8(\xc9W\xd1\xcb0\xb3\xe5z\x15\xcdA\xe7WL7A\xf5e{\xe5\xbd\xac\x082a\x009\x82\xdd\xdcYl[\xd6Z\xa7w\xa7\x91w%\'\x86\x84\x86G\xde\x01\x8e\x9a3\xe9q\x95\x18\xc5\x1e\x80=\x8c\xd4j]\xb5\x87v\xa8TZ}\xa6\x96^\xd0\xb5k\xc7Zb\x1b1\xbf\x16\x0c;`t\xbe\x16\xbe\x9aXS\xea\x9eW\xba>\x82-\x83\x99-\xd8\x8d\xe0\x0f\xba\xcc`\xae-#\xc7\xb2\x93\xf4\xd5\xbfdZAY\xdb\x07\x18\xd6\xa6\xd7\x8f\xedB\xe7D\xb3\x03\xe2\x85\xf0\x17\xaay\xf7/t\xf53\xf1tJ\xd1\xcb\xa1\n\xf7\x05\x9dA\x91\xf2P\xe6\x1c\xd1\x0b`\xd65<\x9e7\xb4\xab\xa5\xc1NM\xf4\r\xd2\x9b\xe9\xaf\xe0}n\x0c\xd52\x0e\xca\xc1]-\x03\xe6\x88\xfe\xfdB\x80y\xfd\x07VS\xf2h\x1ez\xbf\x91\xcd})\xa5B\xf7\xe2\xa7"%\xe0\x1d\x8d\xeb\xff\xfa\x06\xebb\x1d\x841C*T\x8c\t\xb7\x1apQ\x06\x8c0\x0e\x18@tT\xc7\xd7R\x8cZ\xec.\xc1w8\x13\xe4\xe1\xe8\xcc\x9a>\x12\x0fF\xda%\xb5c\xe6\x8715\xae\xf5\x0e)\xb9\xaf\x14\xdb\x8a\xa5M\xf9^\x1e\xa6\x8cP\x16\xc8\xa5\xd8\xdf\x8e^\xd0\xb6CT;\xea#\xb2\xce\xc8\xaa\xc4\x00\x96E\xae\x96\xbd6)\x9c\xad\xf0,M\xe7\xe4\x0cB\x9b\x0c~\xde/U\xf5*/\x13\x81_u\x9bbE\xe0\x81-J]\xc7\x87]qd\xfc\xcf2\xa4\xbf\xca\x8f\xb77x\xdd\x0fG\xc19\xa3\x9b\\\xce\x87\x83:\x06\xce\xdf"\x8d\x82\xe5\x8d\xec\xce\xe7\x85\xa1\x84gr\x87D,\xa9\xcb\xff\x04M\x88\xa8y\xeb\x0fUb\\,W\xae\x10\x9b\x1aM\x1dp"\xba\xea\xb6\x86Z#zN\xaa"p\x9d\xb3\xb6\x88K\xec\x12-\xf6\x9bz\xeb\x15\xe9D\xe3\xf5E[\x03\xe4*/)0\x8d\xf0S\xe10\x04,\x82e\xdb\x18\xb522\xcf\xb1\x08\x7fH\x97Y\xb4\x97\x7f\xb9\x97\xb0\x90\xd9a\xa2\x80*\xf3\xe5\x7f\xd8C\xf9\x80\x9cHS\x1e\xd7\x93\x8b\x96\xb5c\xded\xf8\xec[\x9b8\xd0J\xe2\xfeu_#\x08\xa2\xd6\xde\x90\x92\x0e\xc0F\xf1\xe2\x1f\xbf~\x99\x93@q\xf9\xf1\x89\xf885\x92\x16\xf3\xd6\x90 \xd0\x87#]C8Q\xa2T\xdeL\xda\xd7\xccC@HMvd\xadv\x9e\x00Q~\xba#\xec\x9et\x18\xb8t$\x1aX>\xb4\xbf\x92\xc5\xc4\xf3\x12D\xb4\xa5\xd0\xa2\xd9\xf1\x1a\x16E\x96\x8cawL\xf3\xe6*\xfa\xfd"ld\xc7et6\tQb\xc1\x0c2\xf1\xbf\xb8(z\xaf\x96\x9f%\xc6\xf8\xc6\xb1\xf0\xa6g\xc8\xd1\xe0\xf3\tBuU\xda\x88\t\x13\xd1<\\\xe3\x02f(\xbc\xafX\xef\xc4ktJ$&S{lvn\xf4\x87\xa5x\xe5\x8b\xb6\xb7\xfc)\x0f\xe1D\xc3BQ\x86+\x0eo\x83\xb9\x1d=7T\x08u\xcdH\xfc\xd0mb\x15\xd8U\x86\xee\xb8\x98b\xb0S\x90^\xe6B\xbf\xc7vF\xa7\x8ff/\xe6\x05\xb1\xa2\x1d\xe3\x92\x01\x15JU]\nJ\xa2k\x11\xda\x98R\xf2U\x12\'\x95j\xef\xc2\xa3\xfaTNo\xc874RNF\xa5\xd9P|"\x8fM1\xaa2\xe6\x19\x82h\x95\x07\x8b\x8f\x84^\x01\xd6\x05\r\x02\x90\x99\xc91k\xb4\xff\xde$,2t2\x11O\xa64U\x0b\x0f\xa9\x82\xa3g\xb2\xecM4\xf4\xf7b\xf0\x9d\xdbK\xb8T\x7f-3\xfb\x1d<>R\xda/\x8fJ%\xc3\x05`[\x8d\x0c\xb0?\xf0\xd0\x87p7\xde\xbe\x91\x83\xb7\x0cl(\x87(m"\x05\x9b\xa3$mR\xb4\xc9E\x0cnE\xc2\xc19\x14r\xca\xb5\x898\r\xe5\x93x\xf2\xb3\xf6R\xad4\xd6E\xe5\xb1\x02\xd1-S\xa4\x12\x11\xca+^\xa7\x8a\xec.@\\68\x9fh\x87\xf6\xc2\xa4M\x1a\xc5\xc6\x93M\xb0]*\xac\xffA\xf1\xf7V\xa5\x92\xa7\x05\x13\xa1\x8cu\xac\x92\'%H:)\xb5\x12\xc7\x0c\xa9"\xe2sY%Ed\xcc\xce\xa8c\x87]s\xe5g\xe66\xa3\xfa\x94\xd4\xb3\x1b\xcd\x0c\x00\xf6\x17\xc9[\x05\xa6\xd1\xc7R\xde\xe4%^\xab\x04\x1c\xdd\xb2>xE\x0b\xd0\xeegY\xd0\xf9\x8e\x90J\x99\xec\xda\xf8\x90\x14a\x16\xfa(\xce[],-\xa9\x8b\x8a\x95\xca\x90\xd9\xe0SNV\x85\x1e\xf7G\x9b\x8cR\x00\xb8\x12\xee\xd9\x12\xe1\xad\x86a\xab\x17\x9e\r\xbbG\xda\x94;\x95\x0c\xe0K+\xcaQ\x07\xe2\xef\xfb\x8f\xd4\xac+\xf1\x93\xd3&\xba>\xa7\xb7`\xe5\x96_\x12x\xa2N\xffr\xce\x84:\x13\xea\xfa"5\x19\xd1UwW\xf2"s\xb3\x89d\'a\xf0\r\xc7\x8d\xd7fM\xfb|T\xe3\'(s\xe8S\xe2]\x0b\xd0\xa6\r3z\x192`\x92g\xdbx\xb2B\xb8\xe9\xd5;?{\xcd\x82\xe2rS\xb2\xa7\x97G\\\x0c\x17\x15\xdeS\ry\xaa\xaf\xbc\xd7\xcfEMii_\xa9\xb4H\'\xdf6\xd4\xc3\xc8\x00\xa2\x8eVnf$R\xcfU\xdaq\xb4J{\xdd\x97*\x87y:\xea\xf5\xe1N0\x02\xe0\x14\x86\x8a<\xa7\\\xc9:%\xcd \x90C\x91\x1a\xec\xd0\xb6\xa0\x98\xcb\xde\xde\xc2\x9a|\xb2$\xe1r\x0crj\xb8\xa7\xa2\x81\xafT\x03\rh\xee\xc9t5_\xacu\xc1^I\xf4\x0f\xaf=\\x\xd9,b\x85L\xae\xd9\xe4\x154p\xbc\xc1\x10*\xe4\x15\x9f\x10\x9aE\xe4\x84M\xce\x1aNx\xbf\xd3\xdbi\xb4YU\xf8fF{x0\xdc\x95\x17\x83\xec\xd0r\x19\x9c\xb3\x9d\xcd\xd4\xdfjCM\xb4H&E\xab\xd5\xe4\xeff&a\xa9\xdb\xb2\xbe\t5\xfc\x0fPtzC\xe3\xc20\xf1\x92\xb5\x8b\xb5>\x83\x8f1hK\xd0\x0c\x98\xe1\x7f\x1eo}\xc4\xf0\xdb\xeb_j\x16\xaf\x96\xac\x0e\xe58\x92\x95\x8b\x9a\xcb\x15\xc7j\x19\r\x17\xc4\x7f\xe1|\xc6\x1c\xc8\xe0n\xff\xcd\x05~\xa8\x7f^\tn\x0f\xa5\xc0\xe2\xc7\xdeS\x84\xd4\x1e\xc4qT\xf7\xa2\x90\xe5\xe2?3\\\xad\x92\xb2\xballm\xf8+\xd3]\x16;\xb2\x90r\x9a\x00\xacw\xee\xe9\xea)@\x9e;\xa8g9j\xe0\xdfU\x08\xeep\xe1"\xdd\x15\xd6\x80\x07&e\x10hB\xd5t\xf73\xe7Q?$\xda&geI>75F.\xad\xd7\xbb\xc2\x98\xb5@<|9\x88\xb8e\xfd\xe5\x8a\xd6Y7\x07\xb2Bf\x9ez bN2\xfd\xcc\xe6X{Z-\x92ui(\xe3\xc9\x05\x9a\x02\xbd\xdd\x11\x87\xab\xe8\xf4\xa72\x8b\x9c\xd8R\xb5\xc7\x8e\xa4\x14\xca\xc7,\x81+\xe5\x8f\xd1\xf4\x89.@\xa3b\xcfd[/\x9f\xe9[\x10\xb0\xe4\x05k`\x92\xfb\xed\xef\xa4\r\xad\xb9\xa7\xebQ\x1c\xd9\xfa&)6\xe1m\xd0\xaeP\xa4{\xedg\xda\xc2J1k0\xc2\xad\x94\xbfV\xc8\x8d\x98p(\x969xt\x11\x15z\xe8\xc1:L\x9e\xff\xaa\xda\x81\x06(\xe0\x0e3\xc5\x0e\xd4b\x97k\xf8\xbd\'\xe7\xebzT"\xf4TC\x1a\x915\xb3\x8ca\x0b7\x1aX\xd2\x97hHf@r\x1b|\xd4\xa9\x80\x8ci\x9c\xca\nFK\xa4wG\xe38\x9b\xd6\x9al \xf9Pd\x03*\xf2\xcbW\xda\xf1\xdb\x16\xc7L#\xe5\x15\xf7\x98\x94\xf2\x02#\xb8\xe2-\x97\xea\xf88\xc8\x1d\xba\x8f\x158O*<\x05\xe7tG\xce\xd5\xb9%\x1976\xe8\x0f\x83\xe1o\xef\xe4\xd7\x9b\x95\xbd\xbf\xa3\x16\xd1:(\x12\x98\xca\x8e\x87\x82\xe3V\xc3\xeb\xdc6\xb6{#\xddK\xdaa\x8c\x7f\x8c\xa0\x13QM\xcb\xf62\xb8Y\x19\xf8\x8b\xf4\x07`\x1f\xc89{\xa2\xebT\x96/\x8a{\xf3\x8ek\x99\xd6\x1f\xb4\xcc\x16\xdd\x8e\xe4\xf0\xe72\xaa\xcd^\x1d\x0f\xf4\x82\xb4\xc4\xb16,\x96"\x9f\xe4$\x1f\xbfY\x92\xe2;\x12\x12\xc2D\xed\xd7\xcf\xe4\xdb\xe6[\r\xc2\x03\xe9\x83W\ts\x11\xf2\xf4B\xb1\x84\xe9\x07\xb19-\xeb\x00\xbd\x87\xa5\x86MY\xb89w\xa5\xb5&\t\'\xbf\xd8\xf8\x932\xb2Y\xbdL\xec\x06\xf9V\xd0\x8fHy2^0O\xed\xdc\xaf\xb1\x86\x14\\\xc9\xa6\x85\xfe\x9d9\xa6\xc1\x10\xff0\xbf\'`\xa3&\xdf\xca\xb2\xd8\xd64SpP\xae\x8c)/\x10\x9d\\\xe3\x844\xa6\xfb\xe2W\x81\xc2\xa1\x8aJ\xaa\xff\xb5Mx\xd1[2d*\x90\xde[)Jf\x04\xc6p.\x14\xc2\x97%#\x1a?s;\xa4\'\x99<\xd9\xeb\xce\xc9\x85\xd0E\x962\xb9p\x17q\x1f\x88$\xf0\xea\x8az\xe5\x98\x01;B\x87\x13\x15\x04\x14g\xf7\xc4\x8c\xada\x06\\[\xe0=\x91<\xc9\xa2\x18\x91\xed\xadc\xe4\x14-s\xae81\xa5-,\xf9\x80\xb2\x1d\xfe\xcd6\x10\xce\xe4\xcd\x91\xc4\x9b\x86\xf7\x9cKT\xa8HLV\x7fH\xa7\xa8\xa4<x(nF\xa2\x0fv\xd9u\xf4\x91\x9c\xf9KJv\xb5\xb0\xe7\xe1\xce\xa5\xd6\x93\xa4\xd9\xcf*A\xc5u\xfe\xackl\xb9E^\x0cT\x8c\xe9d\xb6\xfa^\xca}a\xcb\xdc\xbd\nq\x84\xdc\xfa\x8dUgO\x8c\x8fJ\x99\xb0\x85\x9c\xf1*9\x7f\x12dc\xe1\xb5K\x94bx\xc5\x00n\x1c\xe68\x10:\xf8.\x13\xeb\xac\xd56\xd7\xe2\xd0\xd1\\\xa3\xde^\x92\xe7\x00e\xe9K\xa6\x808\xb2\xf6\x8e\x9e9g\x1a|y\x1bL\xbb\xbdt[\x89\'\xec\xbb\x8aE\xed$\x91\xa5\x90KJ]Z\xf8\xeb\xbd\x12\x80\x18\xc9\xde\xbeg\xb10"1Hj\xa2p\xd3\xee0\x00T\xaeF\xb5C\x00G\x86\x80\xc2T\x9b"\xd1\xc4\n\xecu\x93\xcf-\xd7\xbd&\x90#\xc6\xdapY9\xa5\x1c6\x0e]\x15\x1a\xdc)W-em-\x0f8V\xd4\xd5\x07\xcd\xb3,\xc4\x8d\x16\x91\x1f\x1fG\xd8\xc8\n\xe7\xfe\xef\xf4\xcc \x06\x04]\xadQ\n,q\x9e\xb5\xabR]\xfa\xd7*\xb7\xd4H1\xde\x03\xe7\xedg;G\x9au\x94\xb2*\'9E\xb1\xbeD\x98\x05S\x14\xf1\xda/\x91\xd4\xf9)\x7f\xc5v\xc5\xb2P\xfd\x9d\xd1@\xa8\xd0\x11\xcbv>\xd1\x92\xf7x\x9b\xd3%\xbe\x92?[\xd2\xa4(<\xd6\xc74\xb3z(\xe4{\xe2KJ\xdc\xcb\x9e\x86\xc0\x16wk#oaX\xe2\x07\xe1\xd6\x87\xbeDM\xb5\x06\x9c\x85L\xf0Y\x04\x89\xc4U\xfa\x16\xab\xbd&{`\x14\xe0\xe6\x95Y\xd2V\x87\x01$\x08e%\xd0X\x9d\xabt\xa7ZR\xb0A\xd6~\xa7\xe1u\xfbvN\xfe{u\x02\x05n\x8d(\xd3\xba\xbd\xdb\xe9\x0cLJ\xfd$\xbfFz\x95\x98\xc7\x91\x82\x85\xd9\xc7\xa0\x8b$\xfaE;\xd5M\xd9\xf48\x95\xecs\xdb\x95\xe7H\x8e\xd8\xb6\xe2Bf7-\xcb&\x16-]O\x98\x07\x10\x84.\x00\xe9Uf\xeeH\xd53\x11\x17\x98\x9c\x90\xe9)m\x0f\x8e-\x88\xa3[\x170\xb4\x82=\x00\xed+\xf9\xa2\x8ci\x87\xaa\xcd\xfd\x0f,\xb3\xeaP\x06\xcd5p\xf2\x87\x05\x11c>\x7f\xe5J\xcb\xa2^\x8d\xb2\x089\xf2\x99\xbb\x9e\x1b\x8a\xcf\xf7\xe6\xdc\x06\x94\xfa\xd7\x14\xb8\xe2i\xf4\'\xf7e\x86\x19J\xdcP\xca\xc9\xb7*\xfb\xf7z\xa4\x8a\r\xc1\x1cD\xf8\\Z\xacd\xdcD\x9d\xcf\xcc+\r\xeal\xff\xe6+\x9f\xa3\xbel%47\xc9\x94\xfbZ{\xeb\x1b\xff\x99\x819\xc0\x88\xb4\x80\x91x\xa4\xd3%\x8f\x90;\xa6"\x96\xb3\xf4\xd5\xb7\x1b\\\x8e{\xe7d\xf7IF\x90<\xcb\xb5]\xf9\x1e\xa8\xc6\xa6\x97v\xab\xd3\x1f\xf8\xce%M\\Q#Y\xe40dqCL>x}\x91_\x0b\xcb\xde2\xa9\xdfS\x8d+\xbc\xf3i|%XI\xa0\t\xfb\xa2A\xfa\xc1\xfa\xc6\xd8\xef\xd1^gd^\xd4\xbd4z*et\xeb\xc3\x94n{i\x12\xddC\x19\x9a\xe7\xc5U\x03\x91\xa7\xd16\x9b1{c\xcb\x9d\x19Y*\xc9"\xb7H\xeeI\xf84\x9d`\x9e\xed\xec^\x92Y\x98\xf7\x0f\x19H\x8b\x1b\x97-~Q\xb6$\xcd\xc8\x07\xddzl\x08L\x1a\xf9\xa2\xc3\x9a \xa3\xcev\xd6H\xdf>\xfe\xb6?;)\x8f\xcc\x94G\x0c\r\x04\xe5\nl\x7f,:9\xbf\xae\xa6\x86\xc4\x96"\xb0\xc0p\xb8\x9f\xc5\x84\x08=\x98\xca\xb9@!\x1c\xc7%\x0b?\x99\x01[\x1b\xef\xd2\xbf\xf7\x1b\xd7\x13_\xaa\xce:\xef\x1d\xc8\xde\x19H\xbe}L\x1d=:\xa19m]\x9b\xc2\x86c\x11\x9c\x94\x0e\xc4\xa8t\x9f\x1b\xc4\xcc/M^)\xe8\x8c>\xfep;1\x13\xeeX[\xd2\t\r9d\xf5\xc2\xa0\x06\xb3\xc0\xc2J)L\xa94\xcd\x9e`g\x83Yr\xa2\xc2*Udhak\x17\x9d#\xe9\xe7\x88W\xbb^\x03\xaa\xf3P\'\x8c\xa0]N_Q\xdb\xcf\x18\xdb\x19\xb73\x92\xa6c\xae\x83\xea\xbc\xe3zP\x17\xd2\x93\x9a\x06\xc9)\x99\x96tf*q|[|\xe5\x9c\xd5\x0e.W\xba\xa3\x8d\xbfQ\x9db<\x93`\x9cL\xdb9\x9e\xcall\xd84\xfe;\xa6\xf5\xa2T\x1a\xa6 VJ\'\x95\xbaC\xadd\xe5/\x1d\t\xc2\xa4\xa73\x80z8\xea\xd3,W\xc3\x88\x8c\xbd\x19h\x0fI}\xa8\xc4\xa0\x1c\xd9\xd5\xb9a4\xb5 :\x8e\x0bSE\xe8\xb9T\x95J\x19\xd8d\xbcCQX`\xa4\xc1y\'\xe8\xd3l%A+/\x8aK\xdd\x10\x8d\x869`\xe0\x94\x8f-x\xd5\xc3\x8c\x08\x92\xfd\n\x0cFf7sfs\xd8\x98\x14\xd6\xe1\x8cZl\xd2T\xee\xc3\xf1+\x90\x99\xb2\xef\x1ah\xa4\xc6LZW\x7f\x99\x7fU\xf9\xa7&\xdb\x95\\\xc8\xd6\xcc\x14\xed\xe9k7 \x8a\x12\x1f\xfd\x92\xb8\x8cDot\xd0\xb9\x96\x18Z\x19\x85\xb4\x1d\xbd\x97\xea.$\x1a\xeb\xfc\xf0Is\x1c\xd6H\xb1\xca\x88\xfb1\x1d$^\xd8\x12\xb4Y\xbb\x1d\xfe\xa1u\x1a\x81.\xa6\r\x0f\x81BN\xb7^\xdb|\x9b5\x83\x80"\xd2\'\x87\t\x0e,\xdc\xd0\x93T\x15.\xcd\x8a\x02\x9b\x1e\xa8\xcb\xa7OA\xb8>\xdc\xe43g\xa2\xc2\xa4\x90\xe5\xc5\xb5\t ~]_(\x05\x00\xe7\x97J\xa7P[2\x01\x0b\x0cGF\xed\x8b\x0c\xdd2]\xd5\x9f\xf5\x15s\x8c"\x8d\xe6\x9d\xd4[\x9c\x9eh\x8d\xb5\x1c6\xaf\xdd\xa4k1\xaf\xd81\xc6\xb1\xfb{\xf9\x84\xd6\xe4\xe2\xf8\xe6\xf6pP\xac\x9d@t]\x12\xbf?\xe8\x11\xe2\xd8\xfc\xf9\xeb!\x81l\xad\x1f\'\x98>\xc4\xe4\xe2.\xc9\xeb\xddC\x02\x05\x14\xc7~\xf9\xf2\x9e\xa6R\x9f\xdf\r\xab\xc1\x08\xb4]\xd2=p|\xe5H\xc8\xbc\xa5n\x85}\xcb\xe8\x9b\xd7p5\xd5\x17\xd3\xc8z\x9a$\xb6\\\xb68\xb4}O\x93\xbf\x97*{\xa3\xca\nm\x1bUi\x00\xa6\x0b\x96\xf9K\x88gcEO\x04\xf8\x8b\n\x8bl\xe6p\x89\x15\xf5\x88\x05\xb0q\xb1\xeb\x9f\xc4\xc6\x15\xdf\xeb\x9c\x9c\xee)M\xb4\xadIik[\xef\xb0/nk\xd2\xd2\xe8\x86r\x99\xe2\x0f\x7f\x07\x8dN\x08{\xfe"\x87d\xb1\xd08-\xcd\xae\xcf\xf5\xda$0"l\xd7\xe2\x9d\x11\xe0\x96\xb3A\x9eQF>\xafx#\xa2E\x18\x97\xd6\xca;(\xaa\x9b\x17<\xe6P^\xcd\xad*\xb8S\xbc\\M\xea\xa5\x9cO\xb82\x11\xb9\x02\xd3\x8c\x13}\x16P\xd3\xaf\x89\xb4\xb4\xd0\x87\xb5\x11\xf9\x9e\xc6S E\x11\xeb\xcboV`\xa7P\xe6\xcf{\x93?w]\x9deY\x82\xbe\xa4\x87\x96\xc6R2?B/v\xa9b\xd5\x81\xb3Nh4\xb4\xb5(\xc2|\xa0\x10\xbfG\xcd\x88\xbf\xded4\'\x1aJb~\xfeI\rD\xbe)\x84\x8d\xf1\xeb$\x13?\x83\xda\xadz\xb7\xa52\x9bC\xf8k\xa5\xa0\xa7\xe2vg\xe1\x99\xec\x10\xb0\xd6 \xd1\x89J\xf8\xd6\x0e8]\xdc\x18\x98\x9f\x8c\xfcTPq\xb8\xff\xb5\xe5D\x93\xe8)\x95h\xc7\x0fa\xc9HR|<\xb1\x8c\x0c\x1e\xffo!\xe50\x926\xf06\xc4+\xc3/O\'4\xb6\xf6fME$\x8f\x97\x82\xbeMR\x93\xd3\xcf\xc7\xff~\x1c\xee\xaf\x8a\x85\x9b\xbc\xf8 ?\x83ze}J\xd3"0\xddj\xe7\xe0\xb1\xfe\tz.*O\x03\x11\x15\x19d\x94\xfdc4\xa6\x8e\xbdPa\xe6H:]\xf5+\x12\xc1\x17\x927\x9b\x9dUZ\xf9\x03\x8d\x97\xa8\xd7R\x85M\xd3\x85l\xd9o\xe8T\xa5E\xba\xa2\xd9{\xd8@;-Q+\n{=\x7f\xcb\x81\xc9|*\x9d}8<\xd8\x080\xc2B\xac\xf2\x17\xb4y\x96\x9cxU=\x04~\x97\x93\xb8\xe8\xb8\xc7\xfe\xcf\x03\xa5\xaa&\xde\xa5\xda \xbap\x05\x95HT\xb1\x1c,jy6\xa7\xf5\xa7Mq\xd7\xe3\xc6\t\x8c?\xb7\x07\xd1\xe6\x84w\xb74\xd4\x1c\xd8xE\xd1\x05XZ\xf0\x9e\xa4\xfd`\xb2\xa1\xc8a_\x80\x045\x13\xd1\'\xd1m\xa3\x8d/\x9a\x06h\x1d\x8e\xeb\x86\x9cN\x8a\x00U3\xf8U\xcb\xcb3\xaf>\xe1~\x8fMwH\x9c\x80=W\xf2\x93k\xc2\xc9U\x03I%\xd5?u\x01\x99\xe943\xbc\xd4{\x11\x8d\x9ew:+\x9aq`\x06T\x90\xf8\x03fU%\x91\x00\xa5\xde\x99\xa4:Y\xda8\xb1f\x08N\xb6V\x87\x9b{0\xcf\xe0\x87\xd6\xb7q\x11>99\x03\xf2\x92)\xa1\x84\xa0e6\xafc\xc2^C\x84L\xa3\xdd<\x89oN\xcf\xf6>~T\x83\xd1\xa2\xe78T\x92\x01Y~\x93\x90i\x16\xebzh\x08\x02R{0\xa9\x87%\x81\xc3Kb-Z-#\x8b\x87O\xee\xcd\xf6\xd6\x85\xd1;\x8f\xff\x11W\x9cxt\xf5_:\xc6\x89T/\x8a\xa9\x85\t\xd9\x9fx\xfb\xa2j\xa8ZL`\xe2\x94\xc9S\x97`\x0fR+=0x\xb2\xa0a\x07;\xe3F\xf8\x03!L\x96\xd7\xc4\x13\\\xeby\x8b\xf4\x0c\x9e~\xde\x08\x9e\xe8\x14\x0c\xa7\x98\xdaC\xabW\xaf\xb0\xe9\xf8)\x96\x1d\x97\xfa\x121\x829\xf8\xe6\xc4_y\xd2y\xfb\xcb\xe2?\xf0\x94\x85\xadi\xae\x941\xa4\xb4\xc6(3\x93\xd9R&\x03{\x82%\xf8\xbeT\xff\x8e\xd3\xael\xb4k\xc6_\xe1\xb4\xda\xe7\xb2ey\x08\xbc^w\xc3\xe28(\x18\xc0\xc9\xf3\x8e\x0e\xef\xbaG\xff\xac\xd5\x1ah<|\xe3\xc9\x82\xd2\xd9\x08\xfeO\x9cO\xb9\x1a\x81\x95\x07\xa4\x9d\x04#\x15\x17\xee3\xabX\xd8+\x89\xa9\x02UR%\'s\xb6\\\xa0\x9c\xc4\xd6\xcf;\xc2\x82g\xce\xba\xd9]\x12\xecQXv\x85\xc4\x97r\xc6qh\x8aV1\n\xb3\x11\n-\x19\xea\x88Ps\xd64\xdf\x7f0\xbb\xb7\t\xec\xf0\xa4""\xbb\xd0dy\xac\x9b\x17-\x9a\x19\xfc\x8f\xe8Bg\x91\x8a\xd0\x10\\[z4\x8e\n\x02\xca\xec\xe6\x89\xe0\x11\xeeD\x02%k\xb9\xed\xb0e\x8a\xd8$\r\x1d\xdcA&\x97\xa5\xad\'_\xb5\xfbD\x9a\xae\xc5\r\xa0\xcaq\xb8\xffG\x88O\xd5\xc8T\\\x93}\x01\xe8\x05\x99\xd5lT\x80\xaf$\xc9:\x94\xb6\xed\xbf7o\xfe\x9cI\xcc@\xb9\xc4\x0c#@\xaf\x9b0\x83\x14N\xa1\x95rv\xb9.\xd5\x93\x87S\xd1\xa4\xe8\xf1\x1e\xb6\xd3\xb4\x9d\x9e\xff\xf7G\xcb\xba\xd4\x1d\x9e\x84\x06\xf4s`\xed>-\xe2!M\xef\xc6\x9b\tw\x8a\xcb\xe33\xce\x90\xe7\x91lB\x81\x1aX\xb2\xb6\xffH\n\x1b\xe9\x8c\x8c\t\xe4\xa7K\xe0\x13\xd5\xbeu\x8e\xca"Z\xd6>\x83Z\xd4\x91\x9b\xbb\x80\x87`\xd9\xccQm\xd4X\xde$\x98$\xd4\xd1\xdb\x7f8\xbb\xd5*(\xc0\x95%\xbc\x91\xe1\x0c\xa4;\x85\xfc\xda\xdb\x1b\\q\xa7\x91I\x88\x94i\x03\xf2\x95*\x93\xc7m\xfaG\xc9\x9d\x89"\xac5\x80\x03\xfe\xdbo\x123\x95)\x02\x96\xa2\x9e^\x01Os#\xaf89Qo+y]\xd2\x1e\x1e\x1f\x1b\xc2{\x96\xe8\x7f\x98\xdb\xac/\x9ai\xa1]\xc0{t\xe3\xa0\xc1\x8a\xe7\x16(*R\x93\x17\xef\x05\xf3\xf8*\x14\x02\x91I\xd8\xf6\xb2F\r\xe8\xa7\xb0{\xf7V\xec\x7fUjY\xa1\x03J\xc4\x9b\xaaK\x92A\xcf\xbb2\x0c\xe5D$"\xf9i\x19\xcf\x0b\xa8\x87\xfe\'I\xda\xda2\x87\xca\x02\xa3g\xdc\xcb\xd1V\x89\x10|`\x0b\x905\x19\x8d\x19\xc1\xc2/\xf8\xaeR\xd8C6\x83\xc5pP\xee\x90j\xbdg\x1d\x91Mk\xcc\xaa\x12\xcf\xd1\x12\xee\x1a\x8ds\xe9\x0f\x9e4\x13{\x0eW\x92\xfd\xb3\x0b\xfe\xd9\xb4\xed(\xf8 \xb9\x97\x95TO\x9bR,{\xfc7\xfa\x03\xaa\x18\x8e\xe7\xf7%5\xcf\x92\xd3y\xfb\xad\xbd\x1e\'\xf3\xfbW\x98O$\xbf\x81\xd4\xe53\xea\x90I\x8d\xf8\r7\xd1\x84\xff7u\x7f\xf2E\xdf\x8f\xf8\xad\xc2_\x85\x8e7\xdf\x97!z\x99\'P\x95q!\xccO\xe6g\xf4\xa7\x1cZe2Pr\x16cvm,\xf3\x91\xc4\xe4\x89\x97\xa2V+l\xab\x92[\x9e/\x0b\xa8\xa2@\xf5\x01\x9f\xb7Da\xbd\xb5\x92gJ\xcb4\xe4\xba&KU\xdfO\x9e&\x9d\x8e>\x84\xbe\\\xf9G\x81\xba\x89\xd2{X\x1e\xd62[\xf4\xc2 7s\xddR\xfa\x10\xea)o:\xfd\x85^\xef\xb47\x90_\x0f\x888\xeb\x0f\x8c6\xbe\xce^;\xcf.\xaa\xb6Y\x05\xc4\xbf\xb3\xde\xc5\x01\x9c}\xf9c\x92wO\xb2\x8b\xa2\xdb\xd5\xcf\xad_7\xb4.\xe5\xefo\x8e\xbb\xf6!\xad\xca!}$\xee\x1c\x9ff\xc8\xd0\x82\xb3mY\x15\xa7\xbf\xcfz\xd5\x85\xfb\xb9\xbf\xf0\xf0\x96\x9f\xf5\xbb\xa7\'\xfa\xbb\xbe}@t\xd6\xeb\x9e\xf4U\xcf<\xbc\x9d\xbd\xc6\x89\xe1\xff\xfdh\xdfY\xff)\x8e\xec8g\x7f\x9f\x96\x7f\x8ea\xb5\xe4\xf9\xff\xfe\xdd\x0bs\xcbv\xbf\x9e\x8b\xf6\xce<\xa3\t\xfde\x8dO=\xf9\xd9T\xee\x11Ys\xb5d\xd6\xb2\x7f;2::\xfa\x7f\x15O`\xe0\xf1<\x06\xb2\x8b\x81|\x9a\x07\xff\xbc~\xf2x\xa5o\xf3\xbb\x81\'\xa3\xff\x03\xa2\x0f\xf2F(\xd8\x01\x00')))
except Exception as b:
print(f'Error for : {b} ')
| 14,671.090909
| 161,223
| 0.734258
|
9354540bc893f59e36ecde6e8e37256131b2eae2
| 23,746
|
py
|
Python
|
modular_client/modular_client.py
|
janelia-pypi/modular_device_python
|
dde4b9555c9898316f5afb6ba95a8110d1077ef9
|
[
"BSD-3-Clause"
] | 1
|
2021-03-23T15:10:22.000Z
|
2021-03-23T15:10:22.000Z
|
modular_client/modular_client.py
|
janelia-pypi/modular_device_python
|
dde4b9555c9898316f5afb6ba95a8110d1077ef9
|
[
"BSD-3-Clause"
] | null | null | null |
modular_client/modular_client.py
|
janelia-pypi/modular_device_python
|
dde4b9555c9898316f5afb6ba95a8110d1077ef9
|
[
"BSD-3-Clause"
] | 1
|
2015-12-10T19:35:22.000Z
|
2015-12-10T19:35:22.000Z
|
import serial
import time
import atexit
import json
import functools
import operator
import platform
import os
import inflection
import sre_yield
from serial_interface import SerialInterface, SerialInterfaces, find_serial_interface_ports, WriteFrequencyError
try:
from pkg_resources import get_distribution, DistributionNotFound
_dist = get_distribution('modular_client')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'modular_client')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except (ImportError,DistributionNotFound):
__version__ = None
else:
__version__ = _dist.version
DEBUG = False
BAUDRATE = 115200
class ModularClient(object):
'''ModularClient contains an instance of serial_interface.SerialInterface and
adds methods to it, like auto discovery of available modular devices in
Linux, Windows, and Mac OS X. This class automatically creates methods from
available functions reported by the modular device when it is running the
appropriate firmware. This is the modular device client library for
communicating with and calling remote methods on modular device servers.
Example Usage:
from modular_client import ModularClient
dev = ModularClient()
# Will try to automatically find device if one available. This may be slow if it
# needs to search many serial ports. If it is not found automatically or to
# speed up, specify port directly.
dev = ModularClient(port='/dev/ttyACM0') # Linux specific port
dev = ModularClient(port='/dev/tty.usbmodem262471') # Mac OS X specific port
dev = ModularClient(port='COM3') # Windows specific port
dev.get_device_id()
dev.get_methods()
'''
_TIMEOUT = 0.05
_WRITE_READ_DELAY = 0.001
_WRITE_WRITE_DELAY = 0.005
_METHOD_ID_GET_METHOD_IDS = 0
_VERBOSE_HELP_STRING = '??'
def __init__(self,*args,**kwargs):
name = None
form_factor = None
serial_number = None
if 'debug' in kwargs:
self.debug = kwargs['debug']
else:
kwargs.update({'debug': DEBUG})
self.debug = DEBUG
if 'try_ports' in kwargs:
try_ports = kwargs.pop('try_ports')
else:
try_ports = None
if 'baudrate' not in kwargs:
kwargs.update({'baudrate': BAUDRATE})
elif (kwargs['baudrate'] is None) or (str(kwargs['baudrate']).lower() == 'default'):
kwargs.update({'baudrate': BAUDRATE})
if 'timeout' not in kwargs:
kwargs.update({'timeout': self._TIMEOUT})
if 'write_read_delay' not in kwargs:
kwargs.update({'write_read_delay': self._WRITE_READ_DELAY})
if 'write_write_delay' not in kwargs:
kwargs.update({'write_write_delay': self._WRITE_WRITE_DELAY})
if 'name' in kwargs:
name = kwargs.pop('name')
if 'form_factor' in kwargs:
form_factor = kwargs.pop('form_factor')
if 'serial_number' in kwargs:
serial_number = kwargs.pop('serial_number')
if ('port' not in kwargs) or (kwargs['port'] is None):
port = find_modular_device_port(baudrate=kwargs['baudrate'],
name=name,
form_factor=form_factor,
serial_number=serial_number,
try_ports=try_ports,
debug=kwargs['debug'])
kwargs.update({'port': port})
t_start = time.time()
self._serial_interface = SerialInterface(*args,**kwargs)
atexit.register(self._exit_modular_client)
self._create_methods()
# store the device id to output during debugging
self._device_id = self.get_device_id()
t_end = time.time()
self._debug_print('Initialization time =', (t_end - t_start))
def _debug_print(self, *args):
if self.debug:
print(*args)
def _exit_modular_client(self):
pass
def _args_to_request(self,*args):
request = json.dumps(args,separators=(',',':'))
request += '\n';
return request
def _handle_response(self,response,request_id):
if response is None:
error_message = 'Did not receive server response.'
raise IOError(error_message)
try:
response_dict = json_string_to_dict(response)
except Exception as e:
error_message = 'Error:\n{0}\nUnable to parse server response:\n{1}'.format(str(e),response)
raise IOError(error_message)
try:
response_id = response_dict.pop('id')
except KeyError:
error_message = 'Server response does not contain id member:\n{0}'.format(response)
raise IOError(error_message)
if not response_id == request_id:
error_message = 'Response id:\n{0}\nDoes not match request id:\n{1}\nin response:{2}'.format(response_id,request_id,response)
raise IOError(error_message)
try:
error = response_dict.pop('error')
try:
message = error.pop('message')
except KeyError:
message = ''
try:
data = error.pop('data')
except KeyError:
data = ''
try:
code = error.pop('code')
except KeyError:
code = ''
error_message = '(from server) message: {0}, data: {1}, code: {2}'.format(message,data,code)
raise IOError(error_message)
except KeyError:
pass
try:
result = response_dict.pop('result')
except KeyError:
error_message = 'Server response does not contain result member:\n{0}'.format(response)
raise IOError(error_message)
return result
def _send_request_get_result(self,*args):
'''
Sends request to server over serial port and
returns response result
'''
request = self._args_to_request(*args)
self._debug_print('request', request)
response = self._serial_interface.write_read(request,use_readline=True,check_write_freq=True)
self._debug_print('response', response)
if (type(response) != str):
response = response.decode('utf-8')
self._debug_print('type(response)', type(response))
result = self._handle_response(response,args[0])
return result
def _get_method_dict(self):
method_dict = self._send_request_get_result(self._METHOD_ID_GET_METHOD_IDS)
method_dict = dict([(inflection.underscore(method_name),method_id) for (method_name,method_id) in method_dict.items()])
return method_dict
def _send_request_by_method_id(self,method_id,*args):
method_args = [method_id]
method_args.extend(args)
result = self._send_request_get_result(*method_args)
return result
def _method_func_base(self,method_id,*args):
if len(args) == 1 and type(args[0]) is dict:
args_dict = args[0]
args_list = self._args_dict_to_list(args_dict)
else:
args_list = args
try:
result = self._send_request_by_method_id(method_id,*args_list)
except Exception as e:
device_id = self._device_id
port = self.get_port()
write_data = self._serial_interface._write_data
read_data = self._serial_interface._read_data
error_message = '\ndevice_id:\n{0}\nserial_port:\n{1}\nserial_write_data:\n{2}\nserial_read_data:\n{3}'.format(device_id,port,write_data,read_data)
raise e from Exception(error_message)
return result
def _create_method_docstring(self,method_id):
docstring = str(self._send_request_get_result(method_id,self._VERBOSE_HELP_STRING))
return docstring
def _create_methods(self):
self._method_dict = self._get_method_dict()
for method_name, method_id in sorted(self._method_dict.items()):
method_func = functools.partial(self._method_func_base, method_id)
method_func.__name__ = method_name
method_func.__doc__ = self._create_method_docstring(method_id)
setattr(self,method_name,method_func)
def _args_dict_to_list(self,args_dict):
key_set = set(args_dict.keys())
try:
order_list = sorted([(num,name) for (name,num) in order_dict.items()])
except AttributeError:
order_list = sorted([(num,name) for (name,num) in order_dict.iteritems()])
args_list = [args_dict[name] for (num, name) in order_list]
return args_list
def close(self):
'''
Close the device serial port.
'''
self._serial_interface.close()
def get_port(self):
return self._serial_interface.port
def get_methods(self):
'''
Get a list of modular methods automatically attached as class methods.
'''
return sorted(list(self._method_dict.keys()))
def call_get_result(self,method_name,*args):
method_name = inflection.camelize(method_name,False)
return self._send_request_get_result(method_name,*args)
def call(self,method_name,*args):
self.call_get_result(method_name,*args)
def send_json_request(self,request):
'''
Sends json request to device over serial port and returns result
'''
request_python = json.loads(request)
try:
request_id = request_python["id"]
except TypeError:
pass
except KeyError:
error_message = 'Request does not contain an id:\n{0}'.format(request)
raise IOError(error_message)
try:
request_python["method"] = inflection.camelize(request_python["method"],False)
except TypeError:
pass
except KeyError:
error_message = 'Request does not contain a method:\n{0}'.format(request)
raise IOError(error_message)
try:
request_python[0] = inflection.camelize(request_python[0],False)
request_id = request_python[0]
except IndexError:
error_message = 'Request does not contain a method:\n{0}'.format(request)
raise IOError(error_message)
request = json.dumps(request_python,separators=(',',':'))
request += '\n'
self._debug_print('request', request)
response = self._serial_interface.write_read(request,use_readline=True,check_write_freq=True)
self._debug_print('response', response)
result = self._handle_response(response,request_id)
return result
def convert_to_json(self,python_to_convert,response_indent=None):
'''
Convert python object to json string.
'''
converted_json = json.dumps(python_to_convert,separators=(',',':'),indent=response_indent)
return converted_json
def save_device_id(self,output_directory):
'''
Save device_id as a json file.
'''
if output_directory is None:
output_directory = os.path.join(os.path.curdir)
elif len(os.path.splitext(output_directory)[1]) > 0:
output_directory = os.path.dirname(output_directory)
if not os.path.exists(output_directory):
os.makedirs(output_directory)
result = self.call_get_result('getDeviceId')
output = {}
output['id'] = 'getDeviceId'
output['result'] = result
output_path = os.path.join(output_directory,'device_id.json')
with open(output_path,'w') as output_file:
json.dump(output,output_file,separators=(',',':'),indent=2)
try:
os.removedirs(output_directory)
except OSError:
pass
def save_device_info(self,output_directory):
'''
Save device_info as a json file.
'''
if output_directory is None:
output_directory = os.path.join(os.path.curdir)
elif len(os.path.splitext(output_directory)[1]) > 0:
output_directory = os.path.dirname(output_directory)
if not os.path.exists(output_directory):
os.makedirs(output_directory)
result = self.call_get_result('getDeviceInfo')
output = {}
output['id'] = 'getDeviceInfo'
output['result'] = result
output_path = os.path.join(output_directory,'device_info.json')
with open(output_path,'w') as output_file:
json.dump(output,output_file,separators=(',',':'),indent=2)
try:
os.removedirs(output_directory)
except OSError:
pass
def save_api(self,output_directory,verbosity='DETAILED',firmware='ALL'):
'''
Save api as a set of json files.
'''
if output_directory is None:
output_directory = os.path.join(os.path.curdir,'api')
elif len(os.path.splitext(output_directory)[1]) > 0:
output_directory = os.path.dirname(output_directory)
if not os.path.exists(output_directory):
os.makedirs(output_directory)
device_info = self.call_get_result('getDeviceInfo')
for firmware_info in device_info['firmware']:
if (firmware == 'ALL') or (firmware == firmware_info['name']):
result = self.call_get_result('getApi',verbosity,[firmware_info['name']])
api = {}
api['id'] = 'getApi'
api['result'] = result
output_path = os.path.join(output_directory,firmware_info['name'] + '.json')
with open(output_path,'w') as api_file:
json.dump(api,api_file,separators=(',',':'),indent=2)
try:
os.removedirs(output_directory)
except OSError:
pass
class ModularClients(dict):
'''ModularClients inherits from dict and automatically populates it with
modular clients on all available serial ports. Access each individual client
with three keys, the device name, the form_factor, and the serial_number. If
you want to connect multiple ModularClients with the same name and
form_factor at the same time, first make sure they have unique
serial_numbers by connecting each device one by one and using the
set_serial_number method on each device.
Example Usage:
from modular_client import ModularClients
devs = ModularClients()
# Will try to automatically find all available devices. This may be slow if it
# needs to search many serial ports. If they are not found automatically or to
# speed up, specify ports to use.
devs = ModularClients(use_ports=['/dev/ttyACM0','/dev/ttyACM1']) # Linux
devs = ModularClients(use_ports='(/dev/ttyACM)[0-1]') # Linux string RE alternative
devs = ModularClients(use_ports=['/dev/tty.usbmodem262471','/dev/tty.usbmodem262472']) # Mac OS X
devs = ModularClients(use_ports='(/dev/tty\.usbmodem26247)[1-2]') # Mac OS X RE Alternative
devs = ModularClients(use_ports=['COM3','COM4']) # Windows
devs = ModularClients(use_ports='(COM)[3-4]') # Windows RE Alternative
devs.items()
# dev = devs[name][form_factor][serial_number]
devs = ModularClients(use_ports='(/dev/ttyACM)[0-1]',keys=[0,1])
dev = devs[0]
devs = ModularClients(use_ports='(/dev/ttyACM)[0-1]',keys='(device)[0-1]')
dev = devs['device0']
devs = ModularClients(use_ports='(/dev/ttyACM)[0-1]',ports_as_keys=True)
dev = devs['/dev/ttyACM0']
'''
def __init__(self,*args,**kwargs):
if 'key_port_debug' in kwargs:
self.key_port_debug = kwargs.pop('key_port_debug')
else:
self.key_port_debug = False
try:
modular_device_ports = kwargs.pop('use_ports')
if modular_device_ports is None:
raise KeyError
if isinstance(modular_device_ports,str):
modular_device_ports = list(sre_yield.AllStrings(modular_device_ports))
if len(modular_device_ports) != len(set(modular_device_ports)):
raise KeyError
except KeyError:
modular_device_ports = find_modular_device_ports(*args,**kwargs)
try:
keys = kwargs.pop('keys')
if keys is None:
raise KeyError
if isinstance(keys,str):
keys = list(sre_yield.AllStrings(keys))
if len(keys) != len(modular_device_ports):
raise KeyError
if len(keys) != len(set(keys)):
raise KeyError
except (KeyError,TypeError):
keys = [None] * len(modular_device_ports)
try:
ports_as_keys = kwargs.pop('ports_as_keys')
if ports_as_keys is None:
raise KeyError
except KeyError:
ports_as_keys = False
for key,port in zip(keys,modular_device_ports):
self._add_device(key,port,ports_as_keys,*args,**kwargs)
def _key_port_debug_print(self,key,port):
if self.key_port_debug:
print('key={0}, port={1}'.format(key,port))
def _add_device(self,key,port,ports_as_keys,*args,**kwargs):
kwargs.update({'port': port})
dev = ModularClient(*args,**kwargs)
if (key is None) and (not ports_as_keys):
device_id = dev.get_device_id()
name = device_id['name']
form_factor = device_id['form_factor']
serial_number = device_id['serial_number']
if name not in self:
self[name] = {}
if form_factor not in self[name]:
self[name][form_factor] = {}
self[name][form_factor][serial_number] = dev
self._key_port_debug_print('[{0}][{1}][{2}]'.format(name,form_factor,serial_number),port)
elif key is not None:
self[key] = dev
self._key_port_debug_print(key,port)
else:
key = dev.get_port()
self[key] = dev
self._key_port_debug_print(key,port)
def check_dict_for_key(d,k,dname=''):
if not k in d:
if not dname:
dname = 'dictionary'
raise IOError('{0} does not contain {1}'.format(dname,k))
def json_string_to_dict(json_string):
json_dict = json.loads(json_string,object_hook=json_decode_dict)
return json_dict
def json_decode_dict(data):
'''
Object hook for decoding dictionaries from serialized json data. Ensures that
all strings are unpacked as str objects rather than unicode.
'''
rv = {}
try:
for key, value in data.iteritems():
if isinstance(key, unicode):
key = key.encode('utf-8')
if isinstance(value, unicode):
value = value.encode('utf-8')
elif isinstance(value, list):
value = json_decode_list(value)
elif isinstance(value, dict):
value = json_decode_dict(value)
rv[key] = value
except (AttributeError,NameError):
for key, value in data.items():
if isinstance(value, list):
value = json_decode_list(value)
elif isinstance(value, dict):
value = json_decode_dict(value)
rv[key] = value
return rv
def json_decode_list(data):
'''
Object hook for decoding lists from serialized json data. Ensures that
all strings are unpacked as str objects rather than unicode.
'''
rv = []
try:
for item in data:
if isinstance(item, unicode):
item = item.encode('utf-8')
elif isinstance(item, list):
item = json_decode_list(item)
elif isinstance(item, dict):
item = json_decode_dict(item)
rv.append(item)
except NameError:
for item in data:
if isinstance(item, list):
item = json_decode_list(item)
elif isinstance(item, dict):
item = json_decode_dict(item)
rv.append(item)
return rv
def find_modular_device_ports(baudrate=None,
name=None,
form_factor=None,
serial_number=None,
try_ports=None,
debug=DEBUG,
*args,
**kwargs):
serial_interface_ports = find_serial_interface_ports(try_ports=try_ports, debug=debug)
os_type = platform.system()
if os_type == 'Darwin':
serial_interface_ports = [x for x in serial_interface_ports if 'tty.usbmodem' in x or 'tty.usbserial' in x]
if type(name) is str:
name = [name]
if type(form_factor) is str:
form_factor = [form_factor]
if type(serial_number) is int:
serial_number = [serial_number]
modular_device_ports = {}
for port in serial_interface_ports:
try:
dev = ModularClient(port=port,baudrate=baudrate,debug=debug)
device_id = dev.get_device_id()
if ((name is None ) and (device_id['name'] is not None)) or (device_id['name'] in name):
if ((form_factor is None) and (device_id['form_factor'] is not None)) or (device_id['form_factor'] in form_factor):
if ((serial_number is None) and (device_id['serial_number'] is not None)) or (device_id['serial_number'] in serial_number):
modular_device_ports[port] = {'name': device_id['name'],
'form_factor': device_id['form_factor'],
'serial_number': device_id['serial_number']}
dev.close()
except (serial.SerialException, IOError):
pass
return modular_device_ports
def find_modular_device_port(baudrate=None,
name=None,
form_factor=None,
serial_number=None,
try_ports=None,
debug=DEBUG):
modular_device_ports = find_modular_device_ports(baudrate=baudrate,
name=name,
form_factor=form_factor,
serial_number=serial_number,
try_ports=try_ports,
debug=debug)
if len(modular_device_ports) == 1:
return list(modular_device_ports.keys())[0]
elif len(modular_device_ports) == 0:
serial_interface_ports = find_serial_interface_ports(try_ports)
err_string = 'Could not find any Modular devices. Check connections and permissions.\n'
err_string += 'Tried ports: ' + str(serial_interface_ports)
raise RuntimeError(err_string)
else:
err_string = 'Found more than one Modular device. Specify port or name and/or form_factor and/or serial_number.\n'
err_string += 'Matching ports: ' + str(modular_device_ports)
raise RuntimeError(err_string)
# -----------------------------------------------------------------------------------------
if __name__ == '__main__':
debug = False
dev = ModularClient(debug=debug)
| 41.154246
| 159
| 0.605323
|
0f412935d33f7885e2b545d945fbc07222feeab5
| 2,258
|
py
|
Python
|
swatcher/image.py
|
joshbduncan/swatcher
|
91e459df75be4c50d38540b8cf49c6c4ed6a5764
|
[
"MIT"
] | null | null | null |
swatcher/image.py
|
joshbduncan/swatcher
|
91e459df75be4c50d38540b8cf49c6c4ed6a5764
|
[
"MIT"
] | null | null | null |
swatcher/image.py
|
joshbduncan/swatcher
|
91e459df75be4c50d38540b8cf49c6c4ed6a5764
|
[
"MIT"
] | null | null | null |
from collections import Counter
from PIL import Image, ImageChops
from .color import normalize_rgb_values
def trim_excess(image: object) -> object:
"""
Trim excess background pixels from around an image.
:param image: PIL Image object
:returns: PIL Image object
"""
w, h = image.size
# get RGB value for each corner of image
corners = [
normalize_rgb_values(image.getpixel((0, 0))),
normalize_rgb_values(image.getpixel((w - 1, 0))),
normalize_rgb_values(image.getpixel((0, h - 1))),
normalize_rgb_values(image.getpixel((w - 1, h - 1))),
]
# count how many times each value is present
color_count = Counter([pixel for pixel in corners]).most_common()
# if multiple corners have the same pixel count don't trim
if len(color_count) > 1 and color_count[0][1] == color_count[1][1]:
return image
else: # set the comparison pixel to the most common value
bg_pixel = color_count[0][0]
# compare the original image to the excess pixels
comp = Image.new("RGB", image.size, bg_pixel)
diff = ImageChops.difference(image, comp)
bbox = diff.getbbox()
# crop the difference
return image.crop(bbox)
def process_image(image: object, max_size: int = 500) -> object:
"""
Process the image for best color sampling results.
:param image: PIL Image object
:param max_size: maximum size of the image for color sampling
:returns: PIL Image object
"""
image = image.convert("RGBA")
# check to make sure image has pixels
w, h = image.size
if w == 0 or h == 0:
raise ValueError("The provided image has no pixels.")
# composite the image on a white background just in case it has transparency
bg = Image.new("RGBA", image.size, (255, 255, 255))
comp = Image.alpha_composite(bg, image)
# convert composite image to RGB since we only need the RGB color values
comp = comp.convert("RGB")
# crop the image if extra surrounding background pixels are found
comp = trim_excess(comp)
# reduce the image down to `max_size` to speed up processing
if comp.width > max_size or comp.height > max_size:
comp.thumbnail((max_size, max_size), resample=0)
return comp
| 34.738462
| 80
| 0.671833
|
731b3338ed4a11cc0ed88b9c776e65438c4f9969
| 850
|
py
|
Python
|
gcloud/apigw/constants.py
|
wkma/bk-sops
|
8fb5609c0c4495c28d588fbafa9d9f5f2976929b
|
[
"Apache-2.0"
] | 2
|
2021-07-28T01:48:31.000Z
|
2021-11-17T11:02:26.000Z
|
gcloud/apigw/constants.py
|
wkma/bk-sops
|
8fb5609c0c4495c28d588fbafa9d9f5f2976929b
|
[
"Apache-2.0"
] | null | null | null |
gcloud/apigw/constants.py
|
wkma/bk-sops
|
8fb5609c0c4495c28d588fbafa9d9f5f2976929b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
PROJECT_SCOPE_CMDB_BIZ = "cmdb_biz"
DEFAULT_APP_WHITELIST = {"bk_fta", "bk_bcs", "bk_datainstaller", "bk_dataadmin"}
| 53.125
| 115
| 0.785882
|
9decb5da52bef2cf77f44480edb90a21902e593d
| 1,423
|
py
|
Python
|
Kai/crab/NANOv7_Fri13/2017/ElMu/crab_cfg_2017_ElMu_C.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | 1
|
2022-01-17T17:29:38.000Z
|
2022-01-17T17:29:38.000Z
|
Kai/crab/NANOv7_Fri13/2017/ElMu/crab_cfg_2017_ElMu_C.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | null | null | null |
Kai/crab/NANOv7_Fri13/2017/ElMu/crab_cfg_2017_ElMu_C.py
|
NJManganelli/FourTopNAOD
|
9743d5b49bdbad27a74abb7b2d5b7295f678a0e3
|
[
"Apache-2.0"
] | 1
|
2021-12-15T10:56:50.000Z
|
2021-12-15T10:56:50.000Z
|
import os
from WMCore.Configuration import Configuration
from CRABClient.UserUtilities import config, getUsernameFromCRIC
config = Configuration()
config.section_("General")
config.General.requestName = '2017_ElMu_C'
config.General.transferOutputs = True
config.General.transferLogs = True
config.section_("JobType")
config.JobType.allowUndistributedCMSSW = True
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'PSet.py'
config.JobType.maxMemoryMB = 2000
config.JobType.maxJobRuntimeMin = 1315
config.JobType.numCores = 1
config.JobType.scriptExe = 'crab_script_2017_ElMu_C.sh'
config.JobType.inputFiles = ['crab_script_2017_ElMu_C.py',
os.path.join(os.environ['CMSSW_BASE'],'src/PhysicsTools/NanoAODTools/scripts/haddnano.py'),
]
config.JobType.outputFiles = ['hist.root']
config.JobType.sendPythonFolder = True
config.section_("Data")
config.Data.inputDataset = '/MuonEG/Run2017C-02Apr2020-v1/NANOAOD'
config.Data.inputDBS = 'global'
config.Data.splitting = 'FileBased'
if config.Data.splitting == 'FileBased':
config.Data.unitsPerJob = 1
# config.Data.totalUnits = $TOTAL_UNITS
# config.Data.userInputFiles = []
config.Data.outLFNDirBase = '/store/user/{user}/Fri13'.format(user=getUsernameFromCRIC())
config.Data.publication = True
config.Data.outputDatasetTag = 'Fri13'
config.section_("Site")
config.Site.storageSite = 'T2_CH_CERN'
| 36.487179
| 120
| 0.762474
|
556b11ae92eb13375000e60f7d49996e57129370
| 3,933
|
py
|
Python
|
openstack_exporter/exporter.py
|
sapcc/openstack-exporter
|
d9472fcf5790bd02664bf57a890ca4d3eab73960
|
[
"Apache-2.0"
] | null | null | null |
openstack_exporter/exporter.py
|
sapcc/openstack-exporter
|
d9472fcf5790bd02664bf57a890ca4d3eab73960
|
[
"Apache-2.0"
] | 3
|
2020-10-22T14:49:22.000Z
|
2022-03-23T13:53:51.000Z
|
openstack_exporter/exporter.py
|
sapcc/openstack-exporter
|
d9472fcf5790bd02664bf57a890ca4d3eab73960
|
[
"Apache-2.0"
] | null | null | null |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import click
import importlib
import logging
import os
import sys
import time
from prometheus_client.core import REGISTRY
from prometheus_client import start_http_server
import yaml
import openstack_exporter
LOG = logging.getLogger(__name__)
def factory(module_class_string, super_cls: type = None, **kwargs):
"""
:param module_class_string: full name of the class to create an object of
:param super_cls: expected super class for validity, None if bypass
:param kwargs: parameters to pass
:return:
"""
module_name, class_name = module_class_string.rsplit(".", 1)
module = importlib.import_module(module_name)
assert hasattr(module, class_name), (
"class {} is not in {}".format(class_name, module_name))
# click.echo('reading class {} from module {}'.format(
# class_name, module_name))
cls = getattr(module, class_name)
if super_cls is not None:
assert issubclass(cls, super_cls), (
"class {} should inherit from {}".format(
class_name, super_cls.__name__))
# click.echo('initialising {} with params {}'.format(class_name, kwargs))
obj = cls(**kwargs)
return obj
def load_and_register_collectors(collector_config, openstack_config):
"""Load all enabled collectors from config."""
for collector in collector_config:
cfg = collector_config[collector]
if cfg['enabled']:
LOG.info("Loading collector '{}'".format(cfg['collector']))
cls = factory(cfg['collector'], openstack_config=openstack_config)
REGISTRY.register(cls)
def run_prometheus_server(port, collector_config, openstack_config):
start_http_server(int(port))
load_and_register_collectors(collector_config, openstack_config)
while True:
time.sleep(1)
def get_config(config_file):
if os.path.exists(config_file):
try:
with open(config_file) as f:
config = yaml.load(f, Loader=yaml.FullLoader)
except IOError as e:
logging.error("Couldn't open configuration file: " + str(e))
return config
else:
logging.error("Config file doesn't exist: " + config_file)
exit(0)
@click.command()
@click.option("--port", metavar="<port>", default=9102,
help="specify exporter serving port")
@click.option("-c", "--config", metavar="<config>",
help="path to rest config")
@click.version_option()
@click.help_option()
def main(port, config):
if not config:
raise click.ClickException("Missing OpenStack config yaml --config")
config_obj = get_config(config)
exporter_config = config_obj['exporter']
os_config = config_obj['openstack']
collector_config = config_obj['collectors']
if exporter_config['log_level']:
LOG.setLevel(logging.getLevelName(
exporter_config['log_level'].upper()))
else:
LOG.setLevel(logging.getLevelName("INFO"))
format = '[%(asctime)s] [%(levelname)s] %(message)s'
logging.basicConfig(stream=sys.stdout, format=format)
LOG.info("Starting OpenStack Exporter {} on port={} config={}".format(
openstack_exporter.version_string(),
port,
config
))
run_prometheus_server(port, collector_config, os_config)
if __name__ == '__main__':
main()
| 32.504132
| 78
| 0.67938
|
0063ed4a186f3a11208534193b7127a86da1ac8f
| 4,456
|
py
|
Python
|
Utils.py
|
Alaszun/ALttPEntranceRandomizer
|
390bd1f5615deff621970e135c07d45b0bb59ec1
|
[
"MIT"
] | 27
|
2019-05-06T21:12:24.000Z
|
2020-08-17T17:33:34.000Z
|
Utils.py
|
Alaszun/ALttPEntranceRandomizer
|
390bd1f5615deff621970e135c07d45b0bb59ec1
|
[
"MIT"
] | 17
|
2019-05-04T18:18:19.000Z
|
2020-05-07T00:21:53.000Z
|
Utils.py
|
Alaszun/ALttPEntranceRandomizer
|
390bd1f5615deff621970e135c07d45b0bb59ec1
|
[
"MIT"
] | 37
|
2019-05-04T17:45:54.000Z
|
2022-01-30T10:20:23.000Z
|
import os
import subprocess
import sys
def int16_as_bytes(value):
value = value & 0xFFFF
return [value & 0xFF, (value >> 8) & 0xFF]
def int32_as_bytes(value):
value = value & 0xFFFFFFFF
return [value & 0xFF, (value >> 8) & 0xFF, (value >> 16) & 0xFF, (value >> 24) & 0xFF]
def pc_to_snes(value):
return ((value<<1) & 0x7F0000)|(value & 0x7FFF)|0x8000
def snes_to_pc(value):
return ((value & 0x7F0000)>>1)|(value & 0x7FFF)
def is_bundled():
return getattr(sys, 'frozen', False)
def local_path(path):
if local_path.cached_path is not None:
return os.path.join(local_path.cached_path, path)
if is_bundled():
# we are running in a bundle
local_path.cached_path = sys._MEIPASS # pylint: disable=protected-access,no-member
else:
# we are running in a normal Python environment
local_path.cached_path = os.path.dirname(os.path.abspath(__file__))
return os.path.join(local_path.cached_path, path)
local_path.cached_path = None
def output_path(path):
if output_path.cached_path is not None:
return os.path.join(output_path.cached_path, path)
if not is_bundled():
output_path.cached_path = '.'
return os.path.join(output_path.cached_path, path)
else:
# has been packaged, so cannot use CWD for output.
if sys.platform == 'win32':
#windows
import ctypes.wintypes
CSIDL_PERSONAL = 5 # My Documents
SHGFP_TYPE_CURRENT = 0 # Get current, not default value
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
ctypes.windll.shell32.SHGetFolderPathW(None, CSIDL_PERSONAL, None, SHGFP_TYPE_CURRENT, buf)
documents = buf.value
elif sys.platform == 'darwin':
from AppKit import NSSearchPathForDirectoriesInDomains # pylint: disable=import-error
# http://developer.apple.com/DOCUMENTATION/Cocoa/Reference/Foundation/Miscellaneous/Foundation_Functions/Reference/reference.html#//apple_ref/c/func/NSSearchPathForDirectoriesInDomains
NSDocumentDirectory = 9
NSUserDomainMask = 1
# True for expanding the tilde into a fully qualified path
documents = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, True)[0]
else:
raise NotImplementedError('Not supported yet')
output_path.cached_path = os.path.join(documents, 'ALttPEntranceRandomizer')
if not os.path.exists(output_path.cached_path):
os.mkdir(output_path.cached_path)
return os.path.join(output_path.cached_path, path)
output_path.cached_path = None
def open_file(filename):
if sys.platform == 'win32':
os.startfile(filename)
else:
open_command = 'open' if sys.platform == 'darwin' else 'xdg-open'
subprocess.call([open_command, filename])
def close_console():
if sys.platform == 'win32':
#windows
import ctypes.wintypes
try:
ctypes.windll.kernel32.FreeConsole()
except Exception:
pass
def new_logic_array():
import random
l = list(range(256))
random.SystemRandom().shuffle(l)
chunks = [l[i:i + 16] for i in range(0, len(l), 16)]
lines = [", ".join([str(j) for j in i]) for i in chunks]
print("logic_hash = ["+",\n ".join(lines)+"]")
def make_new_base2current(old_rom='Zelda no Densetsu - Kamigami no Triforce (Japan).sfc', new_rom='working.sfc'):
from collections import OrderedDict
import json
import hashlib
with open(old_rom, 'rb') as stream:
old_rom_data = bytearray(stream.read())
with open(new_rom, 'rb') as stream:
new_rom_data = bytearray(stream.read())
# extend to 2 mb
old_rom_data.extend(bytearray([0x00] * (2097152 - len(old_rom_data))))
out_data = OrderedDict()
for idx, old in enumerate(old_rom_data):
new = new_rom_data[idx]
if old != new:
out_data[idx] = [int(new)]
for offset in reversed(list(out_data.keys())):
if offset - 1 in out_data:
out_data[offset-1].extend(out_data.pop(offset))
with open('data/base2current.json', 'wt') as outfile:
json.dump([{key:value} for key, value in out_data.items()], outfile, separators=(",", ":"))
basemd5 = hashlib.md5()
basemd5.update(new_rom_data)
return "New Rom Hash: " + basemd5.hexdigest()
| 36.227642
| 196
| 0.654399
|
ef2f8b0cfa6914d4f5c741d8ac1eae90ce7c3bc0
| 1,184
|
py
|
Python
|
test_query_counter/models.py
|
silentninja/django-test-query-counter
|
0aadc440d0dad62586121db25aad9ff77cb34204
|
[
"MIT"
] | null | null | null |
test_query_counter/models.py
|
silentninja/django-test-query-counter
|
0aadc440d0dad62586121db25aad9ff77cb34204
|
[
"MIT"
] | null | null | null |
test_query_counter/models.py
|
silentninja/django-test-query-counter
|
0aadc440d0dad62586121db25aad9ff77cb34204
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
from django.http import HttpResponse, HttpRequest
@dataclass
class IArbitraryData:
shapeHashV1Base64: str
asJsonString: str
asText: str
@dataclass
class IBody:
contentType: str
value: IArbitraryData
@dataclass
class IResponse:
statusCode: int
headers: IArbitraryData
body: IBody
@classmethod
def convert(cls, response: HttpResponse) -> IResponse:
pass
@dataclass
class IHttpInteractionTag:
name: str
value: str
@dataclass
class IRequest:
host: str
method: str
path: str
query: IArbitraryData
headers: IArbitraryData
body: IBody
@classmethod
def convert(cls, request: HttpRequest) -> IRequest:
pass
@dataclass
class IHttpInteraction:
uuid: str
request: Optional[IRequest]
response: Optional[IResponse]
tags: Optional[list[IHttpInteractionTag]]
@classmethod
def generate_from(cls, request: HttpRequest, response: HttpResponse) -> IHttpInteraction:
# Todo extract data from request/response
return IHttpInteraction("", None, None, None)
| 18.5
| 93
| 0.711993
|
5fe04504e73a3b705fe931c04dce18255cdebf0d
| 5,685
|
py
|
Python
|
anytask/users/migrations/0003_auto__add_field_userprofile_ya_uid.py
|
AnnaSvalova/anytask
|
f814b43c496f67a2efe2a150873a1ae32ad97449
|
[
"MIT"
] | 1
|
2018-12-03T05:48:43.000Z
|
2018-12-03T05:48:43.000Z
|
anytask/users/migrations/0003_auto__add_field_userprofile_ya_uid.py
|
AnnaSvalova/anytask
|
f814b43c496f67a2efe2a150873a1ae32ad97449
|
[
"MIT"
] | null | null | null |
anytask/users/migrations/0003_auto__add_field_userprofile_ya_uid.py
|
AnnaSvalova/anytask
|
f814b43c496f67a2efe2a150873a1ae32ad97449
|
[
"MIT"
] | 1
|
2021-09-18T22:38:20.000Z
|
2021-09-18T22:38:20.000Z
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserProfile.ya_uid'
db.add_column('users_userprofile', 'ya_uid',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'UserProfile.ya_uid'
db.delete_column('users_userprofile', 'ya_uid')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'users.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'academic_degree': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'academic_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'added_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'second_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'unit': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'update_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'ya_contest_oauth': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'ya_passport_oauth': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'ya_uid': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['users']
| 72.884615
| 182
| 0.566755
|
4d39f3cc519187b6029ee0292f0ebe854f0db85e
| 24,527
|
py
|
Python
|
Assets/Python/Plugins/Resources/Lib/SocketServer.py
|
OpenColonyShip/OpenColonyShip-Core
|
3dd8fd5c86ca89b1bf76d4dc4e0372f2c924d2ae
|
[
"MIT"
] | 42
|
2018-12-12T01:00:59.000Z
|
2022-03-27T07:32:29.000Z
|
Assets/Python/Plugins/Resources/Lib/SocketServer.py
|
OpenColonyShip/OpenColonyShip-Core
|
3dd8fd5c86ca89b1bf76d4dc4e0372f2c924d2ae
|
[
"MIT"
] | 13
|
2020-11-06T13:50:45.000Z
|
2022-01-25T07:17:37.000Z
|
Assets/Python/Plugins/Resources/Lib/SocketServer.py
|
OpenColonyShip/OpenColonyShip-Core
|
3dd8fd5c86ca89b1bf76d4dc4e0372f2c924d2ae
|
[
"MIT"
] | 8
|
2020-11-14T04:30:26.000Z
|
2021-01-16T17:55:19.000Z
|
"""Generic socket server classes.
This module tries to capture the various aspects of defining a server:
For socket-based servers:
- address family:
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
- AF_UNIX: Unix domain sockets
- others, e.g. AF_DECNET are conceivable (see <socket.h>
- socket type:
- SOCK_STREAM (reliable stream, e.g. TCP)
- SOCK_DGRAM (datagrams, e.g. UDP)
For request-based servers (including socket-based):
- client address verification before further looking at the request
(This is actually a hook for any processing that needs to look
at the request before anything else, e.g. logging)
- how to handle multiple requests:
- synchronous (one request is handled at a time)
- forking (each request is handled by a new process)
- threading (each request is handled by a new thread)
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
save some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
synchronous servers of four types:
+------------+
| BaseServer |
+------------+
|
v
+-----------+ +------------------+
| TCPServer |------->| UnixStreamServer |
+-----------+ +------------------+
|
v
+-----------+ +--------------------+
| UDPServer |------->| UnixDatagramServer |
+-----------+ +--------------------+
Note that UnixDatagramServer derives from UDPServer, not from
UnixStreamServer -- the only difference between an IP and a Unix
stream server is the address family, which is simply repeated in both
unix server classes.
Forking and threading versions of each type of server can be created
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
instance, a threading UDP server class is created as follows:
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
The Mix-in class must come first, since it overrides a method defined
in UDPServer! Setting the various member variables also changes
the behavior of the underlying server mechanism.
To implement a service, you must derive a class from
BaseRequestHandler and redefine its handle() method. You can then run
various versions of the service by combining one of the server classes
with your request handler class.
The request handler class must be different for datagram or stream
services. This can be hidden by using the request handler
subclasses StreamRequestHandler or DatagramRequestHandler.
Of course, you still have to use your head!
For instance, it makes no sense to use a forking server if the service
contains state in memory that can be modified by requests (since the
modifications in the child process would never reach the initial state
kept in the parent process and passed to each child). In this case,
you can use a threading server, but you will probably have to use
locks to avoid two requests that come in nearly simultaneous to apply
conflicting changes to the server state.
On the other hand, if you are building e.g. an HTTP server, where all
data is stored externally (e.g. in the file system), a synchronous
class will essentially render the service "deaf" while one request is
being handled -- which may be for a very long time if a client is slow
to read all the data it has requested. Here a threading or forking
server is appropriate.
In some cases, it may be appropriate to process part of a request
synchronously, but to finish processing in a forked child depending on
the request data. This can be implemented by using a synchronous
server and doing an explicit fork in the request handler class
handle() method.
Another approach to handling multiple simultaneous requests in an
environment that supports neither threads nor fork (or where these are
too expensive or inappropriate for the service) is to maintain an
explicit table of partially finished requests and to use select() to
decide which request to work on next (or whether to handle a new
incoming request). This is particularly important for stream services
where each client can potentially be connected for a long time (if
threads or subprocesses cannot be used).
Future work:
- Standard classes for Sun RPC (which uses either UDP or TCP)
- Standard mix-in classes to implement various authentication
and encryption schemes
- Standard framework for select-based multiplexing
XXX Open problems:
- What to do with out-of-band data?
BaseServer:
- split generic "request" functionality out into BaseServer class.
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
example: read entries from a SQL database (requires overriding
get_request() to return a table entry from the database).
entry is processed by a RequestHandlerClass.
"""
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
__version__ = "0.4"
import socket
import select
import sys
import os
import errno
try:
import threading
except ImportError:
import dummy_threading as threading
__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
"ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
"StreamRequestHandler","DatagramRequestHandler",
"ThreadingMixIn", "ForkingMixIn"]
if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
def _eintr_retry(func, *args):
"""restart a system call interrupted by EINTR"""
while True:
try:
return func(*args)
except (OSError, select.error) as e:
if e.args[0] != errno.EINTR:
raise
class BaseServer:
"""Base class for server classes.
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you do not use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- server_close()
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- allow_reuse_address
Instance variables:
- RequestHandlerClass
- socket
"""
timeout = None
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
self.server_address = server_address
self.RequestHandlerClass = RequestHandlerClass
self.__is_shut_down = threading.Event()
self.__shutdown_request = False
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
pass
def serve_forever(self, poll_interval=0.5):
"""Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread.
"""
self.__is_shut_down.clear()
try:
while not self.__shutdown_request:
# XXX: Consider using another file descriptor or
# connecting to the socket to wake this up instead of
# polling. Polling reduces our responsiveness to a
# shutdown request and wastes cpu at all other times.
r, w, e = _eintr_retry(select.select, [self], [], [],
poll_interval)
if self in r:
self._handle_request_noblock()
finally:
self.__shutdown_request = False
self.__is_shut_down.set()
def shutdown(self):
"""Stops the serve_forever loop.
Blocks until the loop has finished. This must be called while
serve_forever() is running in another thread, or it will
deadlock.
"""
self.__shutdown_request = True
self.__is_shut_down.wait()
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
# - handle_request() is the top-level call. It calls
# select, get_request(), verify_request() and process_request()
# - get_request() is different for stream or datagram sockets
# - process_request() is the place that may fork a new process
# or create a new thread to finish the request
# - finish_request() instantiates the request handler class;
# this constructor will handle the request all by itself
def handle_request(self):
"""Handle one request, possibly blocking.
Respects self.timeout.
"""
# Support people who used socket.settimeout() to escape
# handle_request before self.timeout was available.
timeout = self.socket.gettimeout()
if timeout is None:
timeout = self.timeout
elif self.timeout is not None:
timeout = min(timeout, self.timeout)
fd_sets = _eintr_retry(select.select, [self], [], [], timeout)
if not fd_sets[0]:
self.handle_timeout()
return
self._handle_request_noblock()
def _handle_request_noblock(self):
"""Handle one request, without blocking.
I assume that select.select has returned that the socket is
readable before this function was called, so there should be
no risk of blocking in get_request().
"""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
else:
self.shutdown_request(request)
def handle_timeout(self):
"""Called if no new request arrives within self.timeout.
Overridden by ForkingMixIn.
"""
pass
def verify_request(self, request, client_address):
"""Verify the request. May be overridden.
Return True if we should proceed with this request.
"""
return True
def process_request(self, request, client_address):
"""Call finish_request.
Overridden by ForkingMixIn and ThreadingMixIn.
"""
self.finish_request(request, client_address)
self.shutdown_request(request)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
pass
def finish_request(self, request, client_address):
"""Finish one request by instantiating RequestHandlerClass."""
self.RequestHandlerClass(request, client_address, self)
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
pass
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print '-'*40
print 'Exception happened during processing of request from',
print client_address
import traceback
traceback.print_exc() # XXX But this goes to stderr!
print '-'*40
class TCPServer(BaseServer):
"""Base class for various socket-based server classes.
Defaults to synchronous IP stream (i.e., TCP).
Methods for the caller:
- __init__(server_address, RequestHandlerClass, bind_and_activate=True)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you don't use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- request_queue_size (only for stream sockets)
- allow_reuse_address
Instance variables:
- server_address
- RequestHandlerClass
- socket
"""
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 5
allow_reuse_address = False
def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
if bind_and_activate:
try:
self.server_bind()
self.server_activate()
except:
self.server_close()
raise
def server_bind(self):
"""Called by constructor to bind the socket.
May be overridden.
"""
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
self.socket.listen(self.request_queue_size)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
self.socket.close()
def fileno(self):
"""Return socket file number.
Interface required by select().
"""
return self.socket.fileno()
def get_request(self):
"""Get the request and client address from the socket.
May be overridden.
"""
return self.socket.accept()
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
try:
#explicitly shutdown. socket.close() merely releases
#the socket and waits for GC to perform the actual close.
request.shutdown(socket.SHUT_WR)
except socket.error:
pass #some platforms may raise ENOTCONN here
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
request.close()
class UDPServer(TCPServer):
"""UDP server class."""
allow_reuse_address = False
socket_type = socket.SOCK_DGRAM
max_packet_size = 8192
def get_request(self):
data, client_addr = self.socket.recvfrom(self.max_packet_size)
return (data, self.socket), client_addr
def server_activate(self):
# No need to call listen() for UDP.
pass
def shutdown_request(self, request):
# No need to shutdown anything.
self.close_request(request)
def close_request(self, request):
# No need to close anything.
pass
class ForkingMixIn:
"""Mix-in class to handle each request in a new process."""
timeout = 300
active_children = None
max_children = 40
def collect_children(self):
"""Internal routine to wait for children that have exited."""
if self.active_children is None:
return
# If we're above the max number of children, wait and reap them until
# we go back below threshold. Note that we use waitpid(-1) below to be
# able to collect children in size(<defunct children>) syscalls instead
# of size(<children>): the downside is that this might reap children
# which we didn't spawn, which is why we only resort to this when we're
# above max_children.
while len(self.active_children) >= self.max_children:
try:
pid, _ = os.waitpid(-1, 0)
self.active_children.discard(pid)
except OSError as e:
if e.errno == errno.ECHILD:
# we don't have any children, we're done
self.active_children.clear()
elif e.errno != errno.EINTR:
break
# Now reap all defunct children.
for pid in self.active_children.copy():
try:
pid, _ = os.waitpid(pid, os.WNOHANG)
# if the child hasn't exited yet, pid will be 0 and ignored by
# discard() below
self.active_children.discard(pid)
except OSError as e:
if e.errno == errno.ECHILD:
# someone else reaped it
self.active_children.discard(pid)
def handle_timeout(self):
"""Wait for zombies after self.timeout seconds of inactivity.
May be extended, do not override.
"""
self.collect_children()
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
self.collect_children()
pid = os.fork()
if pid:
# Parent process
if self.active_children is None:
self.active_children = set()
self.active_children.add(pid)
self.close_request(request) #close handle in parent process
return
else:
# Child process.
# This must never return, hence os._exit()!
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
os._exit(0)
except:
try:
self.handle_error(request, client_address)
self.shutdown_request(request)
finally:
os._exit(1)
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
# Decides how threads will act upon termination of the
# main process
daemon_threads = False
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
In addition, exception handling is done here.
"""
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
t.daemon = self.daemon_threads
t.start()
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
if hasattr(socket, 'AF_UNIX'):
class UnixStreamServer(TCPServer):
address_family = socket.AF_UNIX
class UnixDatagramServer(UDPServer):
address_family = socket.AF_UNIX
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
class BaseRequestHandler:
"""Base class for request handler classes.
This class is instantiated for each request to be handled. The
constructor sets the instance variables request, client_address
and server, and then calls the handle() method. To implement a
specific service, all you need to do is to derive a class which
defines a handle() method.
The handle() method can find the request as self.request, the
client address as self.client_address, and the server (in case it
needs access to per-server information) as self.server. Since a
separate instance is created for each request, the handle() method
can define other arbitrary instance variables.
"""
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
self.setup()
try:
self.handle()
finally:
self.finish()
def setup(self):
pass
def handle(self):
pass
def finish(self):
pass
# The following two classes make it possible to use the same service
# class for stream or datagram servers.
# Each class sets up these instance variables:
# - rfile: a file object from which receives the request is read
# - wfile: a file object to which the reply is written
# When the handle() method returns, wfile is flushed properly
class StreamRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for stream sockets."""
# Default buffer sizes for rfile, wfile.
# We default rfile to buffered because otherwise it could be
# really slow for large data (a getc() call per byte); we make
# wfile unbuffered because (a) often after a write() we want to
# read and we need to flush the line; (b) big writes to unbuffered
# files are typically optimized by stdio even when big reads
# aren't.
rbufsize = -1
wbufsize = 0
# A timeout to apply to the request socket, if not None.
timeout = None
# Disable nagle algorithm for this socket, if True.
# Use only when wbufsize != 0, to avoid small packets.
disable_nagle_algorithm = False
def setup(self):
self.connection = self.request
if self.timeout is not None:
self.connection.settimeout(self.timeout)
if self.disable_nagle_algorithm:
self.connection.setsockopt(socket.IPPROTO_TCP,
socket.TCP_NODELAY, True)
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
try:
self.wfile.flush()
except socket.error:
# A final socket error may have occurred here, such as
# the local error ECONNABORTED.
pass
self.wfile.close()
self.rfile.close()
class DatagramRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for datagram sockets."""
def setup(self):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
self.packet, self.socket = self.request
self.rfile = StringIO(self.packet)
self.wfile = StringIO()
def finish(self):
self.socket.sendto(self.wfile.getvalue(), self.client_address)
| 33.506831
| 85
| 0.63057
|
5af8129d7aeda12ef558a42ba50dc6f20579c668
| 12,130
|
py
|
Python
|
propagator/opentelemetry-propagator-ot-trace/tests/test_ot_trace_propagator.py
|
seemk/opentelemetry-python-contrib
|
10a448ee754394ccfff2c3a3c9af129af59e6775
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
propagator/opentelemetry-propagator-ot-trace/tests/test_ot_trace_propagator.py
|
seemk/opentelemetry-python-contrib
|
10a448ee754394ccfff2c3a3c9af129af59e6775
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
propagator/opentelemetry-propagator-ot-trace/tests/test_ot_trace_propagator.py
|
seemk/opentelemetry-python-contrib
|
10a448ee754394ccfff2c3a3c9af129af59e6775
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from opentelemetry.baggage import get_all, set_baggage
from opentelemetry.propagators.ot_trace import (
OT_BAGGAGE_PREFIX,
OT_SAMPLED_HEADER,
OT_SPAN_ID_HEADER,
OT_TRACE_ID_HEADER,
OTTracePropagator,
)
from opentelemetry.propagators.textmap import DictGetter
from opentelemetry.sdk.trace import _Span
from opentelemetry.trace import (
INVALID_SPAN_CONTEXT,
INVALID_SPAN_ID,
INVALID_TRACE_ID,
SpanContext,
TraceFlags,
set_span_in_context,
)
from opentelemetry.trace.propagation import get_current_span
carrier_getter = DictGetter()
class TestOTTracePropagator(TestCase):
ot_trace_propagator = OTTracePropagator()
def carrier_inject(self, trace_id, span_id, is_remote, trace_flags):
carrier = {}
self.ot_trace_propagator.inject(
dict.__setitem__,
carrier,
set_span_in_context(
_Span(
"child",
context=SpanContext(
trace_id=trace_id,
span_id=span_id,
is_remote=is_remote,
trace_flags=trace_flags,
),
)
),
)
return carrier
def test_inject_short_trace_id_short_span_id(self):
carrier = self.carrier_inject(
int("1", 16), int("2", 16), True, TraceFlags.SAMPLED,
)
self.assertEqual(carrier[OT_TRACE_ID_HEADER], "1")
self.assertEqual(carrier[OT_SPAN_ID_HEADER], "2")
def test_inject_trace_id_span_id_true(self):
"""Test valid trace_id, span_id and sampled true"""
carrier = self.carrier_inject(
int("80f198ee56343ba864fe8b2a57d3eff7", 16),
int("e457b5a2e4d86bd1", 16),
True,
TraceFlags.SAMPLED,
)
self.assertEqual(carrier[OT_TRACE_ID_HEADER], "64fe8b2a57d3eff7")
self.assertEqual(carrier[OT_SPAN_ID_HEADER], "e457b5a2e4d86bd1")
self.assertEqual(carrier[OT_SAMPLED_HEADER], "true")
def test_inject_trace_id_span_id_false(self):
"""Test valid trace_id, span_id and sampled true"""
carrier = self.carrier_inject(
int("80f198ee56343ba864fe8b2a57d3eff7", 16),
int("e457b5a2e4d86bd1", 16),
False,
TraceFlags.DEFAULT,
)
self.assertEqual(carrier[OT_TRACE_ID_HEADER], "64fe8b2a57d3eff7")
self.assertEqual(carrier[OT_SPAN_ID_HEADER], "e457b5a2e4d86bd1")
self.assertEqual(carrier[OT_SAMPLED_HEADER], "false")
def test_inject_truncate_traceid(self):
"""Test that traceid is truncated to 64 bits"""
self.assertEqual(
self.carrier_inject(
int("80f198ee56343ba864fe8b2a57d3eff7", 16),
int("e457b5a2e4d86bd1", 16),
True,
TraceFlags.DEFAULT,
)[OT_TRACE_ID_HEADER],
"64fe8b2a57d3eff7",
)
def test_inject_sampled_true(self):
"""Test that sampled true trace flags are injected"""
self.assertEqual(
self.carrier_inject(
int("80f198ee56343ba864fe8b2a57d3eff7", 16),
int("e457b5a2e4d86bd1", 16),
True,
TraceFlags.SAMPLED,
)[OT_SAMPLED_HEADER],
"true",
)
def test_inject_sampled_false(self):
"""Test that sampled false trace flags are injected"""
self.assertEqual(
self.carrier_inject(
int("80f198ee56343ba864fe8b2a57d3eff7", 16),
int("e457b5a2e4d86bd1", 16),
True,
TraceFlags.DEFAULT,
)[OT_SAMPLED_HEADER],
"false",
)
def test_inject_invalid_trace_id(self):
"""Test that no attributes are injected if the trace_id is invalid"""
self.assertEqual(
self.carrier_inject(
INVALID_TRACE_ID,
int("e457b5a2e4d86bd1", 16),
True,
TraceFlags.SAMPLED,
),
{},
)
def test_inject_set_baggage(self):
"""Test that baggage is set"""
carrier = {}
self.ot_trace_propagator.inject(
dict.__setitem__,
carrier,
set_baggage(
"key",
"value",
context=set_span_in_context(
_Span(
"child",
SpanContext(
trace_id=int(
"80f198ee56343ba864fe8b2a57d3eff7", 16
),
span_id=int("e457b5a2e4d86bd1", 16),
is_remote=True,
trace_flags=TraceFlags.SAMPLED,
),
)
),
),
)
self.assertEqual(carrier["".join([OT_BAGGAGE_PREFIX, "key"])], "value")
def test_inject_invalid_baggage_keys(self):
"""Test that invalid baggage keys are not set"""
carrier = {}
self.ot_trace_propagator.inject(
dict.__setitem__,
carrier,
set_baggage(
"(",
"value",
context=set_span_in_context(
_Span(
"child",
SpanContext(
trace_id=int(
"80f198ee56343ba864fe8b2a57d3eff7", 16
),
span_id=int("e457b5a2e4d86bd1", 16),
is_remote=True,
trace_flags=TraceFlags.SAMPLED,
),
)
),
),
)
self.assertNotIn("".join([OT_BAGGAGE_PREFIX, "!"]), carrier.keys())
def test_inject_invalid_baggage_values(self):
"""Test that invalid baggage values are not set"""
carrier = {}
self.ot_trace_propagator.inject(
dict.__setitem__,
carrier,
set_baggage(
"key",
"α",
context=set_span_in_context(
_Span(
"child",
SpanContext(
trace_id=int(
"80f198ee56343ba864fe8b2a57d3eff7", 16
),
span_id=int("e457b5a2e4d86bd1", 16),
is_remote=True,
trace_flags=TraceFlags.SAMPLED,
),
)
),
),
)
self.assertNotIn("".join([OT_BAGGAGE_PREFIX, "key"]), carrier.keys())
def test_extract_trace_id_span_id_sampled_true(self):
"""Test valid trace_id, span_id and sampled true"""
span_context = get_current_span(
self.ot_trace_propagator.extract(
carrier_getter,
{
OT_TRACE_ID_HEADER: "80f198ee56343ba864fe8b2a57d3eff7",
OT_SPAN_ID_HEADER: "e457b5a2e4d86bd1",
OT_SAMPLED_HEADER: "true",
},
)
).get_span_context()
self.assertEqual(
hex(span_context.trace_id)[2:], "80f198ee56343ba864fe8b2a57d3eff7"
)
self.assertEqual(hex(span_context.span_id)[2:], "e457b5a2e4d86bd1")
self.assertTrue(span_context.is_remote)
self.assertEqual(span_context.trace_flags, TraceFlags.SAMPLED)
def test_extract_trace_id_span_id_sampled_false(self):
"""Test valid trace_id, span_id and sampled false"""
span_context = get_current_span(
self.ot_trace_propagator.extract(
carrier_getter,
{
OT_TRACE_ID_HEADER: "80f198ee56343ba864fe8b2a57d3eff7",
OT_SPAN_ID_HEADER: "e457b5a2e4d86bd1",
OT_SAMPLED_HEADER: "false",
},
)
).get_span_context()
self.assertEqual(
hex(span_context.trace_id)[2:], "80f198ee56343ba864fe8b2a57d3eff7"
)
self.assertEqual(hex(span_context.span_id)[2:], "e457b5a2e4d86bd1")
self.assertTrue(span_context.is_remote)
self.assertEqual(span_context.trace_flags, TraceFlags.DEFAULT)
def test_extract_malformed_trace_id(self):
"""Test extraction with malformed trace_id"""
span_context = get_current_span(
self.ot_trace_propagator.extract(
carrier_getter,
{
OT_TRACE_ID_HEADER: "abc123!",
OT_SPAN_ID_HEADER: "e457b5a2e4d86bd1",
OT_SAMPLED_HEADER: "false",
},
)
).get_span_context()
self.assertEqual(span_context, INVALID_SPAN_CONTEXT)
def test_extract_malformed_span_id(self):
"""Test extraction with malformed span_id"""
span_context = get_current_span(
self.ot_trace_propagator.extract(
carrier_getter,
{
OT_TRACE_ID_HEADER: "64fe8b2a57d3eff7",
OT_SPAN_ID_HEADER: "abc123!",
OT_SAMPLED_HEADER: "false",
},
)
).get_span_context()
self.assertEqual(span_context, INVALID_SPAN_CONTEXT)
def test_extract_invalid_trace_id(self):
"""Test extraction with invalid trace_id"""
span_context = get_current_span(
self.ot_trace_propagator.extract(
carrier_getter,
{
OT_TRACE_ID_HEADER: INVALID_TRACE_ID,
OT_SPAN_ID_HEADER: "e457b5a2e4d86bd1",
OT_SAMPLED_HEADER: "false",
},
)
).get_span_context()
self.assertEqual(span_context, INVALID_SPAN_CONTEXT)
def test_extract_invalid_span_id(self):
"""Test extraction with invalid span_id"""
span_context = get_current_span(
self.ot_trace_propagator.extract(
carrier_getter,
{
OT_TRACE_ID_HEADER: "64fe8b2a57d3eff7",
OT_SPAN_ID_HEADER: INVALID_SPAN_ID,
OT_SAMPLED_HEADER: "false",
},
)
).get_span_context()
self.assertEqual(span_context, INVALID_SPAN_CONTEXT)
def test_extract_baggage(self):
"""Test baggage extraction"""
context = self.ot_trace_propagator.extract(
carrier_getter,
{
OT_TRACE_ID_HEADER: "64fe8b2a57d3eff7",
OT_SPAN_ID_HEADER: "e457b5a2e4d86bd1",
OT_SAMPLED_HEADER: "false",
"".join([OT_BAGGAGE_PREFIX, "abc"]): "abc",
"".join([OT_BAGGAGE_PREFIX, "def"]): "def",
},
)
span_context = get_current_span(context).get_span_context()
self.assertEqual(hex(span_context.trace_id)[2:], "64fe8b2a57d3eff7")
self.assertEqual(hex(span_context.span_id)[2:], "e457b5a2e4d86bd1")
self.assertTrue(span_context.is_remote)
self.assertEqual(span_context.trace_flags, TraceFlags.DEFAULT)
baggage = get_all(context)
self.assertEqual(baggage["abc"], "abc")
self.assertEqual(baggage["def"], "def")
| 32.872629
| 79
| 0.551278
|
2dedb37b353d5987e9d97b601cdeac32bb8317ca
| 5,958
|
py
|
Python
|
niftycloud/pyami/bootstrap.py
|
yudai09/niftycloud
|
b34ef2504f12e3f423c18b2fa155b17b9d0d7ca7
|
[
"MIT"
] | null | null | null |
niftycloud/pyami/bootstrap.py
|
yudai09/niftycloud
|
b34ef2504f12e3f423c18b2fa155b17b9d0d7ca7
|
[
"MIT"
] | null | null | null |
niftycloud/pyami/bootstrap.py
|
yudai09/niftycloud
|
b34ef2504f12e3f423c18b2fa155b17b9d0d7ca7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import os
import niftycloud
from niftycloud.utils import get_instance_metadata, get_instance_userdata
from niftycloud.pyami.config import Config, NiftycloudConfigPath
from niftycloud.pyami.scriptbase import ScriptBase
import time
class Bootstrap(ScriptBase):
"""
The Bootstrap class is instantiated and run as part of the PyAMI
instance initialization process. The methods in this class will
be run from the rc.local script of the instance and will be run
as the root user.
The main purpose of this class is to make sure the niftycloud distribution
on the instance is the one required.
"""
def __init__(self):
self.working_dir = '/mnt/pyami'
self.write_metadata()
super(Bootstrap, self).__init__()
def write_metadata(self):
fp = open(os.path.expanduser(NiftycloudConfigPath), 'w')
fp.write('[Instance]\n')
inst_data = get_instance_metadata()
for key in inst_data:
fp.write('%s = %s\n' % (key, inst_data[key]))
user_data = get_instance_userdata()
fp.write('\n%s\n' % user_data)
fp.write('[Pyami]\n')
fp.write('working_dir = %s\n' % self.working_dir)
fp.close()
# This file has the AWS credentials, should we lock it down?
# os.chmod(NiftycloudConfigPath, stat.S_IREAD | stat.S_IWRITE)
# now that we have written the file, read it into a pyami Config object
niftycloud.config = Config()
niftycloud.init_logging()
def create_working_dir(self):
niftycloud.log.info('Working directory: %s' % self.working_dir)
if not os.path.exists(self.working_dir):
os.mkdir(self.working_dir)
def load_niftycloud(self):
update = niftycloud.config.get('Niftycloud', 'niftycloud_update', 'svn:HEAD')
if update.startswith('svn'):
if update.find(':') >= 0:
method, version = update.split(':')
version = '-r%s' % version
else:
version = '-rHEAD'
location = niftycloud.config.get('Niftycloud', 'niftycloud_location', '/usr/local/niftycloud')
self.run('svn update %s %s' % (version, location))
elif update.startswith('git'):
location = niftycloud.config.get('Niftycloud', 'niftycloud_location', '/usr/share/python-support/python-niftycloud/niftycloud')
num_remaining_attempts = 10
while num_remaining_attempts > 0:
num_remaining_attempts -= 1
try:
self.run('git pull', cwd=location)
num_remaining_attempts = 0
except Exception as e:
niftycloud.log.info('git pull attempt failed with the following exception. Trying again in a bit. %s', e)
time.sleep(2)
if update.find(':') >= 0:
method, version = update.split(':')
else:
version = 'master'
self.run('git checkout %s' % version, cwd=location)
else:
# first remove the symlink needed when running from subversion
self.run('rm /usr/local/lib/python2.5/site-packages/niftycloud')
self.run('easy_install %s' % update)
def fetch_s3_file(self, s3_file):
try:
from niftycloud.utils import fetch_file
f = fetch_file(s3_file)
path = os.path.join(self.working_dir, s3_file.split("/")[-1])
open(path, "w").write(f.read())
except:
niftycloud.log.exception('Problem Retrieving file: %s' % s3_file)
path = None
return path
def load_packages(self):
package_str = niftycloud.config.get('Pyami', 'packages')
if package_str:
packages = package_str.split(',')
for package in packages:
package = package.strip()
if package.startswith('s3:'):
package = self.fetch_s3_file(package)
if package:
# if the "package" is really a .py file, it doesn't have to
# be installed, just being in the working dir is enough
if not package.endswith('.py'):
self.run('easy_install -Z %s' % package, exit_on_error=False)
def main(self):
self.create_working_dir()
self.load_niftycloud()
self.load_packages()
self.notify('Bootstrap Completed for %s' % niftycloud.config.get_instance('instance-id'))
if __name__ == "__main__":
# because bootstrap starts before any logging configuration can be loaded from
# the niftycloud config files, we will manually enable logging to /var/log/niftycloud.log
niftycloud.set_file_logger('bootstrap', '/var/log/niftycloud.log')
bs = Bootstrap()
bs.main()
| 44.133333
| 139
| 0.637462
|
f9e2bc5b69b415ba1cbe11e45e93a47aaf24aceb
| 188
|
py
|
Python
|
Farmacia/apps/ventas/urls.py
|
cluco91/Django_Farmacia
|
10d787533bcebac8ba35cf8a7feb10ffb14ca45f
|
[
"MIT"
] | 2
|
2018-11-06T04:05:20.000Z
|
2020-05-28T23:19:23.000Z
|
Farmacia/apps/ventas/urls.py
|
cluco91/Django_Farmacia
|
10d787533bcebac8ba35cf8a7feb10ffb14ca45f
|
[
"MIT"
] | null | null | null |
Farmacia/apps/ventas/urls.py
|
cluco91/Django_Farmacia
|
10d787533bcebac8ba35cf8a7feb10ffb14ca45f
|
[
"MIT"
] | 2
|
2018-11-06T04:12:17.000Z
|
2019-11-21T18:34:26.000Z
|
from django.conf.urls import patterns, url
from views import *
urlpatterns = patterns('',
url(r'^$',todo_listCreateView.as_view()),
url(r'^author-ajax/$', 'app.views.TodoitemAjax'),
)
| 23.5
| 50
| 0.707447
|
6c7814d19aed12312307675d5b66843e71bb227e
| 8,075
|
py
|
Python
|
cloudvolume/datasource/graphene/mesh/unsharded.py
|
austinhoag/cloud-volume
|
122c009f327a6ebcb1025732fac05affba4823d3
|
[
"BSD-3-Clause"
] | null | null | null |
cloudvolume/datasource/graphene/mesh/unsharded.py
|
austinhoag/cloud-volume
|
122c009f327a6ebcb1025732fac05affba4823d3
|
[
"BSD-3-Clause"
] | null | null | null |
cloudvolume/datasource/graphene/mesh/unsharded.py
|
austinhoag/cloud-volume
|
122c009f327a6ebcb1025732fac05affba4823d3
|
[
"BSD-3-Clause"
] | null | null | null |
import six
from collections import defaultdict
import itertools
import json
import os
import posixpath
import re
import requests
import numpy as np
from tqdm import tqdm
from ....lib import red, toiter, Bbox, Vec, jsonify
from ....mesh import Mesh
from .... import paths
from ....storage import Storage, GreenStorage
from ....scheduler import schedule_jobs
from ...precomputed.mesh import UnshardedLegacyPrecomputedMeshSource, PrecomputedMeshMetadata
class GrapheneUnshardedMeshSource(UnshardedLegacyPrecomputedMeshSource):
def compute_filename(self, label):
layer_id = self.meta.meta.decode_layer_id(label)
chunk_block_shape = 2 * Vec(*self.meta.meta.mesh_chunk_size)
start = self.meta.meta.decode_chunk_position(label)
start *= chunk_block_shape
bbx = Bbox(start, start + chunk_block_shape)
return "{}:0:{}".format(label, bbx.to_filename())
def exists(self, labels, progress=None):
"""
Checks for dynamic mesh existence.
Returns: { label: boolean, ... }
"""
labels = toiter(labels)
filenames = [
self.compute_filename(label) for label in labels
]
cloudpath = self.meta.join(self.meta.cloudpath, self.meta.mesh_path)
with Storage(cloudpath) as stor:
return stor.files_exist(filenames)
def get_fragment_labels(self, segid, lod=0, level=2, bbox=None, bypass=False):
if bypass:
return [ segid ]
manifest = self.fetch_manifest(segid, lod, level, bbox, return_segids=True)
return manifest["seg_ids"]
def get_fragment_filenames(self, segid, lod=0, level=2, bbox=None, bypass=False):
if bypass:
return [ self.compute_filename(segid) ]
manifest = self.fetch_manifest(segid, lod, level, bbox)
return manifest["fragments"]
def fetch_manifest(self, segid, lod=0, level=2, bbox=None, return_segids=False):
# TODO: add lod to endpoint
query_d = {
'verify': True,
}
if return_segids:
query_d['return_seg_ids'] = 1
if bbox is not None:
bbox = Bbox.create(bbox)
query_d['bounds'] = bbox.to_filename()
url = "%s/%s:%s" % (self.meta.meta.manifest_endpoint, segid, lod)
if level is not None:
res = requests.get(
url,
data=jsonify({ "start_layer": level }),
params=query_d,
headers=self.meta.meta.auth_header
)
else:
res = requests.get(url, params=query_d, headers=self.meta.meta.auth_header)
res.raise_for_status()
return json.loads(res.content.decode('utf8'))
def download_segid(self, seg_id, bounding_box, bypass, use_byte_offsets=True):
"""
Download a mesh for a single segment ID.
seg_id: Download the mesh for this segid.
bounding_box: Limit the query for child meshes to this bounding box.
bypass: Don't fetch the manifest, precompute the filename instead. Use this
only when you know the actual mesh labels in advance.
use_byte_offsets: Applicable only for the sharded format. Reuse the byte_offsets
into the sharded format that the server precalculated to accelerate download.
A time when you might want to switch this off is when you're working on a new
meshing job with different sharding parameters but are keeping the existing
meshes for visualization while it runs.
allow_missing: If set to True, return None if segid missing. If set to False, throw
an error.
"""
import DracoPy
level = self.meta.meta.decode_layer_id(seg_id)
fragment_filenames = self.get_fragment_filenames(
seg_id, level=level, bbox=bounding_box, bypass=bypass
)
fragments = self._get_mesh_fragments(fragment_filenames)
fragments = sorted(fragments, key=lambda frag: frag[0]) # make decoding deterministic
fragiter = tqdm(fragments, disable=(not self.config.progress), desc="Decoding Mesh Buffer")
is_draco = False
for i, (filename, frag) in enumerate(fragiter):
mesh = None
if frag is not None:
try:
# Easier to ask forgiveness than permission
mesh = Mesh.from_draco(frag)
is_draco = True
except DracoPy.FileTypeException:
mesh = Mesh.from_precomputed(frag)
fragments[i] = mesh
fragments = [ f for f in fragments if f is not None ]
if len(fragments) == 0:
raise IndexError('No mesh fragments found for segment {}'.format(seg_id))
mesh = Mesh.concatenate(*fragments)
mesh.segid = seg_id
return mesh, is_draco
def get(
self, segids,
remove_duplicate_vertices=False,
fuse=False, bounding_box=None,
bypass=False, use_byte_offsets=True,
deduplicate_chunk_boundaries=True,
allow_missing=False,
):
"""
Merge fragments derived from these segids into a single vertex and face list.
Why merge multiple segids into one mesh? For example, if you have a set of
segids that belong to the same neuron.
segid: (iterable or int) segids to render into a single mesh
Optional:
remove_duplicate_vertices: bool, fuse exactly matching vertices within a chunk
fuse: bool, merge all downloaded meshes into a single mesh
bounding_box: Bbox, bounding box to restrict mesh download to
bypass: bypass requesting the manifest and attempt to get the
segids from storage directly by testing the dynamic and then the initial mesh.
This is an exceptional usage of this tool and should be applied only with
an understanding of what that entails.
use_byte_offsets: For sharded volumes, we can use the output of
exists(..., return_byte_offsets) that the server already did in order
to skip having to query the sharded format again.
deduplicate_chunk_boundaries: Our meshing is done in chunks and creates duplicate vertices
at the boundaries of chunks. This parameter will automatically deduplicate these if set
to True. Superceded by remove_duplicate_vertices.
allow_missing: If set to True, missing segids will be ignored. If set to False, an error
is thrown.
Returns: Mesh object if fused, else { segid: Mesh, ... }
"""
segids = list(set([ int(segid) for segid in toiter(segids) ]))
meta = self.meta.meta
meshes = []
for seg_id in tqdm(segids, disable=(not self.config.progress), desc="Downloading Meshes"):
level = meta.decode_layer_id(seg_id)
if allow_missing:
try:
mesh, is_draco = self.download_segid(
seg_id, bounding_box, bypass, use_byte_offsets
)
except IndexError:
continue
else:
mesh, is_draco = self.download_segid(
seg_id, bounding_box, bypass, use_byte_offsets
)
resolution = meta.resolution(self.config.mip)
if meta.chunks_start_at_voxel_offset:
offset = meta.voxel_offset(self.config.mip)
else:
offset = Vec(0,0,0)
if remove_duplicate_vertices:
mesh = mesh.consolidate()
elif is_draco:
if not deduplicate_chunk_boundaries:
pass
elif level == 2:
# Deduplicate at quantized lvl2 chunk borders
draco_grid_size = meta.get_draco_grid_size(level)
mesh = mesh.deduplicate_chunk_boundaries(
meta.mesh_chunk_size * resolution,
offset=offset * resolution,
is_draco=True,
draco_grid_size=draco_grid_size,
)
else:
# TODO: cyclic draco quantization to properly
# stitch and deduplicate draco meshes at variable
# levels (see github issue #299)
print('Warning: deduplication not currently supported for this layer\'s variable layered draco meshes')
elif deduplicate_chunk_boundaries:
mesh = mesh.deduplicate_chunk_boundaries(
meta.mesh_chunk_size * resolution,
offset=offset * resolution,
is_draco=False,
)
meshes.append(mesh)
if not fuse:
return { m.segid: m for m in meshes }
return Mesh.concatenate(*meshes).consolidate()
| 35.262009
| 113
| 0.679133
|
21f6364fdc58e1d357d3fcb257701311088f7662
| 4,067
|
py
|
Python
|
packager.py
|
xpl/gop2
|
c0d76e5c46766887446419022b2701509cb0c218
|
[
"MIT"
] | 3
|
2019-03-30T15:43:18.000Z
|
2021-07-14T14:09:30.000Z
|
packager.py
|
xpl/gop2
|
c0d76e5c46766887446419022b2701509cb0c218
|
[
"MIT"
] | null | null | null |
packager.py
|
xpl/gop2
|
c0d76e5c46766887446419022b2701509cb0c218
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys, os, subprocess
if len(sys.argv) < 3:
print 'To package a single file:'
print ' python %s PACKAGE_NAME FILE' % sys.argv[0];
print 'To package a directory tree:'
print ' python %s PACKAGE_NAME DIRECTORY FILE_TO_RUN' % sys.argv[0];
print ''
print 'Requires dosbox.html as template.'
print 'Creates PACKAGE_NAME.data and PACKAGE_NAME.html.'
sys.exit(1)
def error(s):
print >> sys.stderr, s
sys.exit(1)
OUTPUT_HTML = sys.argv[1] + '.html'
OUTPUT_DATA = sys.argv[1] + '.data'
if os.path.isfile(sys.argv[2]):
( BASE_DIR, PACKAGE_ARG ) = os.path.split(sys.argv[2])
EXECUTABLE = PACKAGE_ARG
elif os.path.isdir(sys.argv[2]):
BASE_DIR = sys.argv[2];
PACKAGE_ARG = '.'
if (len(sys.argv) < 4):
error('When packaging directory, supply file to run as 3rd argument.')
else:
p = os.path.join(sys.argv[2], sys.argv[3])
if os.path.isfile(p):
EXECUTABLE = sys.argv[3]
else:
error("Did not find executable at %s" % p)
elif not os.path.exists(sys.argv[2]):
error("Can't find %s" % sys.argv[2])
else:
error("Don't know how to package %s" % sys.argv[2])
def getfiletext(fn):
try:
f = open(fn, 'r')
txt = f.read()
except Exception, e:
error('Error reading file: %s' % (str(e)))
f.close
return txt
try:
exec(getfiletext(os.path.expanduser('~/.emscripten')))
except Exception, e:
error('Error evaluating Emscripten configuration: %s' % (str(e)))
# Find folder in PATH environment variable which contains specified file
def find_in_path(fn):
for d in os.environ["PATH"].split(os.pathsep):
if os.path.isfile(os.path.join(d, fn)):
return d
return None
# Find Emscripten from EMSCRIPTEN_ROOT or by searching via PATH
def find_emscripten():
if 'EMSCRIPTEN_ROOT' in globals():
em_path = EMSCRIPTEN_ROOT
else:
em_path = find_in_path('emcc');
if em_path is None or not os.path.isdir(em_path):
error("Can't find Emscripten. Add it to PATH or set EMSCRIPTEN_ROOT.");
return em_path;
# Find Emscripten's file packager
def find_packager():
p = os.path.join(find_emscripten(), "tools", "file_packager.py");
if not os.path.isfile(p):
error('Emscripten file_packager.py not found.')
return p;
# Run Emscripten's file packager from the appropriate directory
def run_packager():
if BASE_DIR != '':
# Need to change directory because paths in package are
# relative to directory where Emscripten packager is run.
cwd = os.getcwd()
os.chdir(BASE_DIR)
if os.path.isabs(OUTPUT_DATA):
datafile = OUTPUT_DATA
else:
datafile = os.path.join(cwd, OUTPUT_DATA)
else:
datafile = OUTPUT_DATA
packager_path = find_packager();
if 'PYTHON' in globals():
python_path = PYTHON
else:
python_path = sys.executable
try:
res = subprocess.check_output([python_path, packager_path,
datafile,
"--no-heap-copy",
"--preload",
PACKAGE_ARG])
except:
error('Error reported by Emscripten packager.')
if BASE_DIR != '':
os.chdir(cwd)
return res
def inject_files(f):
f.write('<script type="text/javascript">')
f.write(run_packager())
f.write("Module['arguments'] = [ './" + EXECUTABLE + "' ];\n</script>\n")
try:
outf = open(OUTPUT_HTML, 'w')
except Exception, e:
error('Error opening %s for writing: %s' %( OUTPUT_HTML, (str(e)) ))
with open('dosbox.html') as f:
for line in iter(f.readline, ''):
if 'INJECT_HERE' in line:
inject_files(outf)
outf.write(line)
elif '<title>' in line:
outf.write(' <title>')
outf.write(sys.argv[1]);
outf.write('</title>\n')
else:
outf.write(line)
outf.close
| 29.471014
| 79
| 0.592574
|
b176e9df9ecc2d5d8f0464dabc285c20995185c1
| 148
|
py
|
Python
|
gary/coordinates/setup_package.py
|
adrn/gary-old
|
065b371534baa03deeb860893640068d90ba5881
|
[
"MIT"
] | null | null | null |
gary/coordinates/setup_package.py
|
adrn/gary-old
|
065b371534baa03deeb860893640068d90ba5881
|
[
"MIT"
] | null | null | null |
gary/coordinates/setup_package.py
|
adrn/gary-old
|
065b371534baa03deeb860893640068d90ba5881
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
def get_package_data():
return {'gary.coordinates': ['tests/idl_vgsr_vhel.txt', 'tests/SgrCoord_data']}
| 29.6
| 83
| 0.77027
|
8ded974abad2d91d9ee610c6d863e154810b2393
| 2,392
|
py
|
Python
|
checkers/compare_json_outputs.py
|
vicpopov/subways
|
6596d9789cb280d9cc7ff1ac39cecf4d0317c4d1
|
[
"Apache-2.0"
] | 27
|
2017-11-04T19:19:49.000Z
|
2021-07-24T04:20:56.000Z
|
checkers/compare_json_outputs.py
|
vicpopov/subways
|
6596d9789cb280d9cc7ff1ac39cecf4d0317c4d1
|
[
"Apache-2.0"
] | 88
|
2017-10-07T08:34:05.000Z
|
2020-10-13T15:44:52.000Z
|
checkers/compare_json_outputs.py
|
vicpopov/subways
|
6596d9789cb280d9cc7ff1ac39cecf4d0317c4d1
|
[
"Apache-2.0"
] | 17
|
2018-02-19T03:14:59.000Z
|
2021-10-02T09:51:12.000Z
|
"""This utility allows one to check equivalency of outputs (defined
by --output command line parameter) of process_subways.py.
Due to unordered nature of sets/dicts, two runs of process_subways.py
even on the same input generate equivalent jsons,
which cannot be compared with 'diff' command. The compare_jsons() function
compares two osm_subways.json taking into account possible shuffling of
dict items and items of some lists, as well as system-specific subtleties.
This utility is useful to ensure that code improvements which must not
affect the process_subways.py output really doesn't change it.
"""
import sys
import json
import logging
from common import compare_stops, compare_transfers, compare_networks
def compare_jsons(result0, result1):
"""Compares two objects which are results of subway generation"""
network_names0 = sorted([x['network'] for x in result0['networks']])
network_names1 = sorted([x['network'] for x in result1['networks']])
if network_names0 != network_names1:
logging.debug("Different list of network names!")
return False
networks0 = sorted(result0['networks'], key=lambda x: x['network'])
networks1 = sorted(result1['networks'], key=lambda x: x['network'])
for network0, network1 in zip(networks0, networks1):
if not compare_networks(network0, network1):
return False
stop_ids0 = sorted(x['id'] for x in result0['stops'])
stop_ids1 = sorted(x['id'] for x in result1['stops'])
if stop_ids0 != stop_ids1:
logging.debug("Different stop_ids")
return False
stops0 = sorted(result0['stops'], key=lambda x: x['id'])
stops1 = sorted(result1['stops'], key=lambda x: x['id'])
for stop0, stop1 in zip(stops0, stops1):
if not compare_stops(stop0, stop1):
return False
if not compare_transfers(result0['transfers'], result1['transfers']):
return False
return True
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: {} <file1.json> <file2.json>".format(sys.argv[0]))
sys.exit()
logging.basicConfig(level=logging.DEBUG)
path0, path1 = sys.argv[1:3]
j0 = json.load(open(path0, encoding='utf-8'))
j1 = json.load(open(path1, encoding='utf-8'))
equal = compare_jsons(j0, j1)
print("The results are {}equal".format("" if equal else "NOT "))
| 36.8
| 77
| 0.686455
|
5e57839b82463bbda34ec0ba074df2123d5fdd44
| 1,464
|
py
|
Python
|
setup.py
|
remorses/yaml
|
9e6008ce47bf9700671c4fe3b24e65dd0ac17785
|
[
"MIT"
] | null | null | null |
setup.py
|
remorses/yaml
|
9e6008ce47bf9700671c4fe3b24e65dd0ac17785
|
[
"MIT"
] | null | null | null |
setup.py
|
remorses/yaml
|
9e6008ce47bf9700671c4fe3b24e65dd0ac17785
|
[
"MIT"
] | null | null | null |
from os import path
from codecs import open
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
with open(path.join(here, 'VERSION'), 'r') as f:
version = f.read().strip()
setup(
name='yaml',
version=version,
description='yaml library',
long_description=long_description,
long_description_content_type='text/markdown',
author='Tommaso De Rossi',
author_email='daer.tommy@gmail.com',
license='Apache Software License 2.0',
url='https://github.com/remorses/yaml',
keywords=['yaml', 'pyyaml alternative'],
classifiers=[
# How mature is this project? Common values are
# 'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Information Technology',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
packages=find_packages(exclude=['tests']),
)
| 31.148936
| 77
| 0.668033
|
e5b683ef5531b5f0a6cf16e59296132540ac5f3d
| 1,774
|
py
|
Python
|
src/utils/corpus.py
|
HephaestusProject/pytorch-Sencnn
|
d7ff5725dbc4b8ffdb7a6ecfa50b859c3f3d883b
|
[
"MIT"
] | 5
|
2020-07-26T08:09:59.000Z
|
2020-10-27T21:52:08.000Z
|
src/utils/corpus.py
|
HephaestusProject/pytorch-Sencnn
|
d7ff5725dbc4b8ffdb7a6ecfa50b859c3f3d883b
|
[
"MIT"
] | 38
|
2020-07-25T06:17:39.000Z
|
2021-12-13T20:56:47.000Z
|
src/utils/corpus.py
|
HephaestusProject/pytorch-Sencnn
|
d7ff5725dbc4b8ffdb7a6ecfa50b859c3f3d883b
|
[
"MIT"
] | null | null | null |
from typing import Callable, List, Tuple
import pandas as pd
import torch
from torch.utils.data import Dataset
class NSMCCorpus(Dataset):
"""NSMCCorpus class"""
def __init__(self, filepath: str, encode_fn: Callable[[str], List[int]]) -> None:
"""Instantiating NSMCCorpus class
Args:
filepath (str): filepath
encode_fn (Callable): a function that can act as a encoder
"""
self._corpus = pd.read_csv(filepath, sep="\t").loc[:, ["document", "label"]]
self._encode_fn = encode_fn
def __len__(self) -> int:
return len(self._corpus)
def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]:
tokens2indices = torch.tensor(
self._encode_fn(self._corpus.iloc[idx]["document"])
)
label = torch.tensor(self._corpus.iloc[idx]["label"])
return tokens2indices, label
class TREC6Corpus(Dataset):
"""TREC6Corpus class"""
def __init__(self, filepath: str, encode_fn: Callable[[str], List[int]]) -> None:
"""Instantiating SST2Corpus class
Args:
filepath (str): filepath
encode_fn (Callable): a function that can act as a encoder
"""
self._corpus = pd.read_csv(filepath, sep="\t").loc[:, ["document", "label"]]
self._encode_fn = encode_fn
def __len__(self) -> int:
return len(self._corpus)
def __getitem__(self, idx: int) -> Tuple[torch.Tensor, torch.Tensor]:
tokens2indices = torch.tensor(
self._encode_fn(self._corpus.iloc[idx]["document"])
)
label = torch.tensor(self._corpus.iloc[idx]["label"])
return tokens2indices, label
CorpusRegistry = {"NSMCCorpus": NSMCCorpus, "TREC6Corpus": TREC6Corpus}
| 30.067797
| 85
| 0.625141
|
e55437397f7adeb5c320fbc745f39a9591ebd8de
| 1,727
|
py
|
Python
|
adjutant/wsgi.py
|
CCI-MOC/adjutant
|
032db3124ea0b0632afdfc27afc60b6c66cf5f66
|
[
"Apache-2.0"
] | null | null | null |
adjutant/wsgi.py
|
CCI-MOC/adjutant
|
032db3124ea0b0632afdfc27afc60b6c66cf5f66
|
[
"Apache-2.0"
] | null | null | null |
adjutant/wsgi.py
|
CCI-MOC/adjutant
|
032db3124ea0b0632afdfc27afc60b6c66cf5f66
|
[
"Apache-2.0"
] | 1
|
2019-04-18T12:21:59.000Z
|
2019-04-18T12:21:59.000Z
|
# Copyright (C) 2015 Catalyst IT Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
WSGI config for Adjutant.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from django.conf import settings
from keystonemiddleware.auth_token import AuthProtocol
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "adjutant.settings")
application = get_wsgi_application()
# Here we replace the default application with one wrapped by
# the Keystone Auth Middleware.
conf = {
"auth_plugin": "password",
'username': settings.KEYSTONE['username'],
'password': settings.KEYSTONE['password'],
'project_name': settings.KEYSTONE['project_name'],
"project_domain_id": settings.KEYSTONE.get('domain_id', "default"),
"user_domain_id": settings.KEYSTONE.get('domain_id', "default"),
"auth_url": settings.KEYSTONE['auth_url'],
'delay_auth_decision': True,
'include_service_catalog': False,
'token_cache_time': settings.TOKEN_CACHE_TIME,
}
application = AuthProtocol(application, conf)
| 35.244898
| 78
| 0.745223
|
1b0caac652c81db1741175645eb45c73929aaaf7
| 979
|
py
|
Python
|
helios/url_names.py
|
cassius3m/Helios
|
e76171f57a7bdde680a7c488ccd2051cbdfb76ed
|
[
"Apache-2.0"
] | 525
|
2015-01-04T11:51:26.000Z
|
2022-03-31T17:15:20.000Z
|
helios/url_names.py
|
cassius3m/Helios
|
e76171f57a7bdde680a7c488ccd2051cbdfb76ed
|
[
"Apache-2.0"
] | 238
|
2015-01-02T17:50:37.000Z
|
2022-02-09T16:39:49.000Z
|
helios/url_names.py
|
cassius3m/Helios
|
e76171f57a7bdde680a7c488ccd2051cbdfb76ed
|
[
"Apache-2.0"
] | 238
|
2015-01-05T23:09:20.000Z
|
2022-03-21T16:47:33.000Z
|
from helios import election_url_names as election, stats_url_names as stats
__all__ = [
"election", "stats",
"COOKIE_TEST", "COOKIE_TEST_2", "COOKIE_NO",
"ELECTION_SHORTCUT", "ELECTION_SHORTCUT_VOTE", "CAST_VOTE_SHORTCUT", "CAST_VOTE_FULLHASH_SHORTCUT",
"TRUSTEE_LOGIN",
"ELECTIONS_PARAMS", "ELECTIONS_VERIFIER", "ELECTIONS_VERIFIER_SINGLE_BALLOT",
"ELECTIONS_NEW", "ELECTIONS_ADMINISTERED", "ELECTIONS_VOTED",
]
COOKIE_TEST="cookie@test"
COOKIE_TEST_2="cookie@test2"
COOKIE_NO="cookie@no"
ELECTION_SHORTCUT="shortcut@election"
ELECTION_SHORTCUT_VOTE="shortcut@election@vote"
CAST_VOTE_SHORTCUT="shortcut@vote"
CAST_VOTE_FULLHASH_SHORTCUT="shortcut-fullhash@vote"
TRUSTEE_LOGIN="trustee@login"
ELECTIONS_PARAMS="elections@params"
ELECTIONS_VERIFIER="elections@verifier"
ELECTIONS_VERIFIER_SINGLE_BALLOT="elections@verifier@single-ballot"
ELECTIONS_NEW="elections@new"
ELECTIONS_ADMINISTERED="elections@administered"
ELECTIONS_VOTED="elections@voted"
| 33.758621
| 103
| 0.812053
|
86d159a4592157382f878e299f05d05910cb36c9
| 1,601
|
py
|
Python
|
plate_recognition/ocr.py
|
73VW/ImageClassification
|
b4d15de0bf261b05612bd405005ff33b4e395121
|
[
"MIT"
] | null | null | null |
plate_recognition/ocr.py
|
73VW/ImageClassification
|
b4d15de0bf261b05612bd405005ff33b4e395121
|
[
"MIT"
] | 1
|
2018-02-22T15:09:36.000Z
|
2018-02-22T15:29:14.000Z
|
plate_recognition/ocr.py
|
73VW/ImageClassification
|
b4d15de0bf261b05612bd405005ff33b4e395121
|
[
"MIT"
] | null | null | null |
# import the necessary packages
from PIL import Image
import pytesseract
import argparse
import cv2
import os
from os import system
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=False,
help="path to input image to be OCR'd")
ap.add_argument("-p", "--preprocess", type=str, default="thresh",
help="type of preprocessing to be done")
args = vars(ap.parse_args())
# load the example image and convert it to grayscale
#image = cv2.imread(args["image"])
image = cv2.imread('C:\\Users\\laurent.gander\\Documents\\HE-ARC\\3eme\\Traitement_Image\\plates_regnonition\\plaque.jpg')
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# check to see if we should apply thresholding to preprocess the
# image
if args["preprocess"] == "thresh":
gray = cv2.threshold(gray, 0, 255,
cv2.THRESH_BINARY | cv2.THRESH_OTSU)[1]
# make a check to see if median blurring should be done to remove
# noise
elif args["preprocess"] == "blur":
gray = cv2.medianBlur(gray, 3)
# write the grayscale image to disk as a temporary file so we can
# apply OCR to it
filename = "{}.png".format(os.getpid())
#filename = "image.png".format(os.getpid)
cv2.imwrite(filename, gray)
# load the image as a PIL Pillow image, apply OCR, and then delete
# the temporary filename
print(filename)
#text = system("tesseract -l chr /" + filename + " text.txt")
text = pytesseract.image_to_string(Image.open('plaque.jpg'), lang='fra')
os.remove(filename)
print(text)
# show the output images
cv2.imshow("Image", image)
cv2.imshow("Output", gray)
cv2.waitKey(0)
| 30.788462
| 122
| 0.733916
|
4f525d0a675ee652763e0e1fed3ea3796d0d71c1
| 4,486
|
py
|
Python
|
loophole/polar/pb/map_information_pb2.py
|
oscarpicas/loophole
|
f9389c73f06b419c97ad32847346663a30d80225
|
[
"MIT"
] | 153
|
2016-02-12T22:52:25.000Z
|
2022-02-05T21:55:19.000Z
|
loophole/polar/pb/map_information_pb2.py
|
oscarpicas/loophole
|
f9389c73f06b419c97ad32847346663a30d80225
|
[
"MIT"
] | 14
|
2016-02-28T13:56:34.000Z
|
2020-02-16T20:33:41.000Z
|
loophole/polar/pb/map_information_pb2.py
|
oscarpicas/loophole
|
f9389c73f06b419c97ad32847346663a30d80225
|
[
"MIT"
] | 20
|
2016-02-28T13:05:15.000Z
|
2021-07-20T18:21:46.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: map_information.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import types_pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='map_information.proto',
package='data',
serialized_pb=_b('\n\x15map_information.proto\x12\x04\x64\x61ta\x1a\x0btypes.proto\"4\n\rPbMapLocation\x12\x10\n\x08latitude\x18\x01 \x02(\x01\x12\x11\n\tlongitude\x18\x02 \x02(\x01\"\x80\x01\n\x10PbMapInformation\x12)\n\x0c\x63\x65ntre_point\x18\x01 \x02(\x0b\x32\x13.data.PbMapLocation\x12)\n\x0e\x64\x61ta_timestamp\x18\x02 \x01(\x0b\x32\x11.PbSystemDateTime\x12\x16\n\x07updated\x18\x03 \x01(\x08:\x05\x66\x61lse')
,
dependencies=[types_pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PBMAPLOCATION = _descriptor.Descriptor(
name='PbMapLocation',
full_name='data.PbMapLocation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='latitude', full_name='data.PbMapLocation.latitude', index=0,
number=1, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='longitude', full_name='data.PbMapLocation.longitude', index=1,
number=2, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=44,
serialized_end=96,
)
_PBMAPINFORMATION = _descriptor.Descriptor(
name='PbMapInformation',
full_name='data.PbMapInformation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='centre_point', full_name='data.PbMapInformation.centre_point', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data_timestamp', full_name='data.PbMapInformation.data_timestamp', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='updated', full_name='data.PbMapInformation.updated', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=99,
serialized_end=227,
)
_PBMAPINFORMATION.fields_by_name['centre_point'].message_type = _PBMAPLOCATION
_PBMAPINFORMATION.fields_by_name['data_timestamp'].message_type = types_pb2._PBSYSTEMDATETIME
DESCRIPTOR.message_types_by_name['PbMapLocation'] = _PBMAPLOCATION
DESCRIPTOR.message_types_by_name['PbMapInformation'] = _PBMAPINFORMATION
PbMapLocation = _reflection.GeneratedProtocolMessageType('PbMapLocation', (_message.Message,), dict(
DESCRIPTOR = _PBMAPLOCATION,
__module__ = 'map_information_pb2'
# @@protoc_insertion_point(class_scope:data.PbMapLocation)
))
_sym_db.RegisterMessage(PbMapLocation)
PbMapInformation = _reflection.GeneratedProtocolMessageType('PbMapInformation', (_message.Message,), dict(
DESCRIPTOR = _PBMAPINFORMATION,
__module__ = 'map_information_pb2'
# @@protoc_insertion_point(class_scope:data.PbMapInformation)
))
_sym_db.RegisterMessage(PbMapInformation)
# @@protoc_insertion_point(module_scope)
| 34.244275
| 420
| 0.756799
|
7664f482fc0986616be11689e511a09e9fbab706
| 8,446
|
py
|
Python
|
docs/conf.py
|
myuanz/wechatpy
|
f131e1808ffd51f61881188fc080c36523063c2b
|
[
"MIT"
] | 821
|
2016-09-16T03:05:37.000Z
|
2022-03-30T07:18:06.000Z
|
docs/conf.py
|
myuanz/wechatpy
|
f131e1808ffd51f61881188fc080c36523063c2b
|
[
"MIT"
] | 160
|
2017-08-28T03:32:56.000Z
|
2022-03-31T09:16:10.000Z
|
docs/conf.py
|
myuanz/wechatpy
|
f131e1808ffd51f61881188fc080c36523063c2b
|
[
"MIT"
] | 168
|
2016-05-30T16:40:07.000Z
|
2022-03-25T15:17:18.000Z
|
# -*- coding: utf-8 -*-
#
# wechatpy documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 25 14:26:14 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath("_themes"))
sys.path.insert(0, os.path.abspath(".."))
import wechatpy
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "wechatpy"
copyright = "2014 - 2020, Messense Lv"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = wechatpy.__version__
# The full version, including alpha/beta/rc tags.
release = wechatpy.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = "zh_CN"
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
html_search_language = "zh"
# Output file base name for HTML help builder.
htmlhelp_basename = "wechatpydoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
("index", "wechatpy.tex", "wechatpy Documentation", "messense", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [("index", "wechatpy", "wechatpy Documentation", ["messense"], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"wechatpy",
"wechatpy Documentation",
"messense",
"wechatpy",
"One line description of project.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
| 31.514925
| 79
| 0.709567
|
4c68ae770d8e01cf9e8a3ce075048063cb29a20f
| 3,613
|
py
|
Python
|
commanderbot/lib/json_file_database_adapter.py
|
CommanderBot-Dev/commanderbot-ext
|
c8798b4475b892c234a1e4ffbfb4fed3fb702938
|
[
"MIT"
] | 4
|
2020-09-25T19:22:48.000Z
|
2021-06-16T18:08:49.000Z
|
commanderbot/lib/json_file_database_adapter.py
|
CommanderBot-Dev/commanderbot-py
|
835841f733e466c5a0e6724d4020747c55856fe3
|
[
"MIT"
] | 23
|
2021-08-30T04:07:29.000Z
|
2021-11-08T17:44:41.000Z
|
commanderbot/lib/json_file_database_adapter.py
|
CommanderBot-Dev/commanderbot-py
|
835841f733e466c5a0e6724d4020747c55856fe3
|
[
"MIT"
] | 3
|
2020-09-25T19:23:22.000Z
|
2021-03-16T18:19:48.000Z
|
import asyncio
from dataclasses import dataclass, field
from logging import Logger, getLogger
from typing import Callable, Generic, Optional, TypeVar
from commanderbot.lib.database_options import JsonFileDatabaseOptions
from commanderbot.lib.json import json_dump_async, json_load_async
from commanderbot.lib.types import JsonObject
__all__ = ("JsonFileDatabaseAdapter",)
CacheType = TypeVar("CacheType")
@dataclass
class JsonFileDatabaseAdapter(Generic[CacheType]):
"""
Wraps common operations for persistent data backed by a simple JSON file.
Attributes
----------
options
Immutable, pre-defined settings that define core database behaviour.
serializer
A callable that serializes Python objects into JSON objects.
deserializer
A callable that deserializes JSON objects into Python objects.
log
A logger named in a uniquely identifiable way.
"""
options: JsonFileDatabaseOptions
serializer: Callable[[CacheType], JsonObject]
deserializer: Callable[[JsonObject], CacheType]
log: Logger = field(init=False)
# Lazily-initialized in-memory representation of state. The reason this is lazy is
# because it needs to be asynchronously initialized from within an async method.
# **Do not use this member; use `_get_cache()` instead.**
__cache: Optional[CacheType] = field(init=False, default=None)
# Lock used to avoid a potential race condition where multiple concurrent asyncio
# tasks initialize the cache in parallel.
__cache_lock = asyncio.Lock()
def __post_init__(self):
self.log = getLogger(
f"{self.options.path.name} ({self.__class__.__name__}#{id(self)})"
)
async def _create_cache(self) -> CacheType:
"""Construct the initial cache from the database."""
data = await self.read()
assert isinstance(data, dict)
return self.deserializer(data)
async def get_cache(self) -> CacheType:
"""Create the cache if it doesn't already exist, and then return it."""
async with self.__cache_lock:
if self.__cache is None:
self.log.info("Lazily-initializing new cache...")
self.__cache = await self._create_cache()
return self.__cache
async def dirty(self):
"""Mark the cache as dirty, forcing a write to the database."""
cache = await self.get_cache()
data = self.serializer(cache)
await self.write(data)
async def read(self) -> JsonObject:
"""Read and return the data from the database file."""
try:
# Attempt to async load the file.
return await json_load_async(self.options.path)
except FileNotFoundError as ex:
if self.options.no_init:
# If the file doesn't exist, and we've been specifically told not to
# automatically create it, then let the error fall through.
raise ex
else:
# Otherwise, we can go ahead and automatically initialize the file.
self.log.warning(
f"Initializing database file because it doesn't already exist: {self.options.path}"
)
# We need to have valid JSON in the file, so just use an empty object.
await json_dump_async({}, self.options.path, mkdir=True)
return {}
async def write(self, data: JsonObject):
"""Write the given data to the database file."""
await json_dump_async(data, self.options.path, indent=self.options.indent)
| 38.031579
| 103
| 0.661223
|
a27e76c3886a4475dc8d0e572c5643ee82b272fb
| 5,749
|
py
|
Python
|
test/integration/component/test_baremetal.py
|
ycyun/ablestack-cloud
|
b7bd36a043e2697d05303246373988aa033c9229
|
[
"Apache-2.0"
] | 1,131
|
2015-01-08T18:59:06.000Z
|
2022-03-29T11:31:10.000Z
|
test/integration/component/test_baremetal.py
|
ycyun/ablestack-cloud
|
b7bd36a043e2697d05303246373988aa033c9229
|
[
"Apache-2.0"
] | 5,908
|
2015-01-13T15:28:37.000Z
|
2022-03-31T20:31:07.000Z
|
test/integration/component/test_baremetal.py
|
ycyun/ablestack-cloud
|
b7bd36a043e2697d05303246373988aa033c9229
|
[
"Apache-2.0"
] | 1,083
|
2015-01-05T01:16:52.000Z
|
2022-03-31T12:14:10.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Test for baremetal
"""
#Import Local Modules
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.cloudstackAPI import createVlanIpRange
from marvin.lib.utils import cleanup_resources
from marvin.lib.base import (NetworkOffering,
NetworkServiceProvider,
PhysicalNetwork,
Network,
Pod)
#from marvin.lib.common import *
from nose.plugins.attrib import attr
#Import System modules
_multiprocess_shared_ = True
class Services:
"""Test Baremetal
"""
def __init__(self):
self.services = {
"network_offering": {
"name": 'Baremetal_network_offering',
"displaytext": 'Baremetal_network_offering',
"guestiptype": 'Shared',
"supportedservices": 'Dhcp,UserData,BaremetalPxeService',
"specifyVlan": "true",
"specifyIpRanges": "true",
"traffictype": 'GUEST',
"availability": 'Optional',
"conservemode": 'false',
"serviceProviderList": {
"Dhcp": 'BaremetalDhcpProvider',
"UserData": 'BaremetalUserdataProvider',
"BaremetalPxeService": 'BaremetalPxeProvider',
},
},
"network" :{
"name" : "defaultBaremetalNetwork",
"displaytext" : "defaultBaremetalNetwork",
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
}
class TestBaremetal(cloudstackTestCase):
zoneid = 1
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.services = Services().services
self.cleanup = []
def tearDown(self):
try:
self.debug("Cleaning up the resources")
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
self.debug("Cleanup complete!")
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags = ["baremetal", "invalid"])
def test_baremetal(self):
self.debug("Test create baremetal network offering")
networkoffering = NetworkOffering.create(self.apiclient, self.services["network_offering"])
networkoffering.update(self.apiclient, state="Enabled")
self.cleanup.append(networkoffering)
physical_network = PhysicalNetwork.list(self.apiclient, zoneid=self.zoneid)[0];
dhcp_provider = NetworkServiceProvider.list(self.apiclient, name="BaremetalDhcpProvider", physical_network_id=physical_network.id)[0]
NetworkServiceProvider.update(
self.apiclient,
id=dhcp_provider.id,
state='Enabled'
)
pxe_provider = NetworkServiceProvider.list(self.apiclient, name="BaremetalPxeProvider", physical_network_id=physical_network.id)[0]
NetworkServiceProvider.update(
self.apiclient,
id=pxe_provider.id,
state='Enabled'
)
userdata_provider = NetworkServiceProvider.list(self.apiclient, name="BaremetalUserdataProvider", physical_network_id=physical_network.id)[0]
NetworkServiceProvider.update(
self.apiclient,
id=userdata_provider.id,
state='Enabled'
)
network = Network.create(self.apiclient, self.services["network"], zoneid=self.zoneid, networkofferingid=networkoffering.id)
self.cleanup.insert(0, network)
pod = Pod.list(self.apiclient)[0]
cmd = createVlanIpRange.createVlanIpRangeCmd()
cmd.podid = pod.id
cmd.networkid = network.id
cmd.gateway = "10.1.1.1"
cmd.netmask = "255.255.255.0"
cmd.startip = "10.1.1.20"
cmd.endip = "10.1.1.40"
cmd.forVirtualNetwork="false"
self.apiclient.createVlanIpRange(cmd)
| 46.362903
| 149
| 0.532788
|
3597991cfff65a24f4cbaf28bef6b1c70de99177
| 1,019
|
py
|
Python
|
data/checkpointManager.py
|
kennethsinder/exit-dash-hyperion
|
d44ea77ef5e8a6ce9490e3f802636da5b44e6e74
|
[
"MIT"
] | 2
|
2018-01-29T14:34:51.000Z
|
2020-10-20T23:30:26.000Z
|
data/checkpointManager.py
|
kennethsinder/exit-dash-hyperion
|
d44ea77ef5e8a6ce9490e3f802636da5b44e6e74
|
[
"MIT"
] | null | null | null |
data/checkpointManager.py
|
kennethsinder/exit-dash-hyperion
|
d44ea77ef5e8a6ce9490e3f802636da5b44e6e74
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import pygame
class CheckpointManager(object):
def __init__(self, platforms, blocks, character):
# Initialize lists for the co-ordinates for every checkpoint and the direction needed to get there
self.checkpointsLeft, self.checkpointsRight = [] * 2
self.checkpointDirectionsL, self.checkpointDirectionsR = [] * 2
self.indexLevel(platforms, blocks, character)
def indexLevel(self, platforms, blocks, char):
# This method obtains all of the level information necessary to pathfind
# Gather information about the character
maxW = char.maxJumpLength
maxH = char.maxJumpWidth
charRect = pygame.Rect(char.x, char.y, char.width, char.height)
charW = char.width
# Fill in the checkpoint coordinates and directions
for platform in platforms:
self.checkpointsLeft.append(platform.x + 5)
self.checkpointsRight.append(platform.x + platform.width - charW - 5)
| 40.76
| 107
| 0.672228
|
70f70bbe883b92cb3fb625d507170482ad47c0c6
| 4,192
|
py
|
Python
|
utils/MQTT.py
|
JuanPabloBeco/py-nbiot
|
a498b34f53f5a50bacea8d6ab56c6a3071eb4de5
|
[
"MIT"
] | 1
|
2022-01-23T12:18:25.000Z
|
2022-01-23T12:18:25.000Z
|
utils/MQTT.py
|
JuanPabloBeco/py-nbiot
|
a498b34f53f5a50bacea8d6ab56c6a3071eb4de5
|
[
"MIT"
] | null | null | null |
utils/MQTT.py
|
JuanPabloBeco/py-nbiot
|
a498b34f53f5a50bacea8d6ab56c6a3071eb4de5
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append( '..' )
import serial
from serial_tools.send_cmd import send_cmd
from constants import MY_PHONE_PASS
DEFALUT_PRINT_RESPONSE = True
DEFAULT_TCP_CONNECT_ID = 0
DEFAULT_HOST_NAME = "54.191.221.113"
DEFAULT_PORT = 1883
DEFAULT_USERNAME = "topic"
DEFEULT_PASSWORD = "password o string secreto"
DEFAULT_TCP_CONNECT_ID = 0
DEFAULT_MSG_ID = 0
DEFAULT_QOS = 0
DEFAULT_RETAIN = 0
DEFAULT_TOPIC = "mychannel"
def check_mqtt_network(ser, print_response=DEFALUT_PRINT_RESPONSE):
cmd_response = send_cmd("AT+QMTOPEN?", ser, print_response=print_response)
print(cmd_response)
return(cmd_response)
def open_mqtt_network(
ser,
tcp_connect_id=DEFAULT_TCP_CONNECT_ID,
host_name=DEFAULT_HOST_NAME,
port=DEFAULT_PORT,
print_response=DEFALUT_PRINT_RESPONSE,
):
cmd_response = send_cmd("AT+QMTOPEN=" + str(tcp_connect_id) + ",\"" + host_name + "\"" + "," + str(port), ser, print_response=print_response)
print(cmd_response)
return(cmd_response)
def connect_to_mqtt_server(
ser,
tcp_connect_id=DEFAULT_TCP_CONNECT_ID,
username = DEFAULT_USERNAME,
password = DEFEULT_PASSWORD,
print_response=DEFALUT_PRINT_RESPONSE,
):
cmd_response = send_cmd(
"AT+QMTCONN=" + str(tcp_connect_id) + ",\"" + username + "\",\"" + password + "\"",
ser,
print_response=print_response,
ms_of_delay_before=100)
print(cmd_response)
return(cmd_response)
def check_connection_to_mqtt_server(
ser,
print_response=DEFALUT_PRINT_RESPONSE,
tcp_connect_id=DEFAULT_TCP_CONNECT_ID,
base_delay=500,
):
cmd_response={'status':'ERROR'}
print('check_connection_to_mqtt_server')
for time in range(0,3):
cmd_response = send_cmd(
"AT+QMTCONN?",
ser,
ms_of_delay_before=base_delay*(time+1),
print_response=print_response)
print('cmd_response: ')
print(cmd_response)
print(cmd_response.get('response')[1])
if cmd_response.get('response')[1].find('OK') != -1:
print(cmd_response.get('response')[1].find('OK'))
return(cmd_response)
if cmd_response.get('response')[1].split(str(tcp_connect_id) + ",",1)[1][0] == '3':
print(cmd_response.get('response')[1].split(str(tcp_connect_id) + ",",1)[1][0])
return(cmd_response)
cmd_response['status'] = 'TIMEOUT'
print('TIMEOUT')
return(cmd_response)
def publish_mqtt_message(
ser,
str_to_send,
topic = DEFAULT_TOPIC,
tcp_connect_id = DEFAULT_TCP_CONNECT_ID,
msgID = DEFAULT_MSG_ID,
qos = DEFAULT_QOS,
retain = DEFAULT_RETAIN,
print_response=DEFALUT_PRINT_RESPONSE
):
cmd_response = send_cmd(
"AT+QMTPUB=" +
str(tcp_connect_id) + "," +
str(msgID) + "," +
str(qos) + "," +
str(retain) + "," +
"\"" + topic + "\"," +
str(len(str_to_send)), # these length is in bytes
ser,
custom_response_end='>',
print_response=print_response
)
print(cmd_response)
cmd_response = send_cmd(str_to_send, ser, print_response=print_response, ms_of_delay_before=200)
print(cmd_response)
return cmd_response
def close_mqtt_network(ser, tcp_connect_id=DEFAULT_TCP_CONNECT_ID, print_response=DEFALUT_PRINT_RESPONSE):
cmd_response = send_cmd("AT+QMTCLOSE=" + str(tcp_connect_id) + "", ser, print_response=print_response)
print(cmd_response)
def disconnect_to_mqtt_server(ser, print_response=DEFALUT_PRINT_RESPONSE):
cmd_response = send_cmd("AT+QMTDISC=" + str(tcp_connect_id) + "", ser, print_response=print_response)
print(cmd_response)
return(cmd_response)
def subscribe_to_mqtt_topic(ser, print_response=DEFALUT_PRINT_RESPONSE):
cmd_response = send_cmd("AT+QMTSUB=0,1,\"mychannel\",0", ser, print_response=print_response)
print(cmd_response)
return(cmd_response)
def subscribe_to_mqtt_topic(ser, print_response=DEFALUT_PRINT_RESPONSE):
cmd_response = send_cmd("AT+QMTUNS=0,1,\"mychannel\"", ser, print_response=print_response)
print(cmd_response)
return(cmd_response)
| 32
| 146
| 0.683206
|
defecf8e2b27589433e6ea74f945233886053ec3
| 2,029
|
py
|
Python
|
scru/scru.py
|
Alquimista/scru
|
331e0213a0a3a3c53b8da5bfd541767412479bee
|
[
"MIT"
] | null | null | null |
scru/scru.py
|
Alquimista/scru
|
331e0213a0a3a3c53b8da5bfd541767412479bee
|
[
"MIT"
] | null | null | null |
scru/scru.py
|
Alquimista/scru
|
331e0213a0a3a3c53b8da5bfd541767412479bee
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import clipboard
import imgur
import screenshot
import utils
import os
import subprocess
import urllib
APP_ICON = '/usr/share/icons/hicolor/scalable/apps/scru.svg'
XDG_CACHE_HOME = os.environ['XDG_CACHE_HOME']
APP_CACHE = os.path.join(XDG_CACHE_HOME, 'scru')
if not os.path.isdir(APP_CACHE):
os.makedirs(APP_CACHE)
def complete(url, notify):
"""Show a notify message when the upload was commpleted"""
image = urllib.urlretrieve(
url, os.path.join(APP_CACHE, 'notify.png'))[0]
image_uri = 'file://' + image
if notify:
# Get the thumb image of the current uploaded image
utils.show_notification('Scru',
'The screenshot was uploaded to imgur', image_uri)
print 'The screenshot was uploaded to imgur'
print 'Link was copied to the clipboard'
def screen_to_imgur(filename, link, select, sound,
notify, quality, delay, optipng):
"""Take a screenshot and upload to imgur"""
# Default link argument
if not link:
link = 'original'
# Take the screenshot
screen = screenshot.grab(filename, select, sound, quality, delay, optipng)
# print 'Uploading image to imgur...'
data = imgur.upload(screen)
screen.close()
# Get the links of the uploaded screenshot
if link == 'html_clikeable_thumbail':
thumb = data['upload']['links']['large_thumbnail']
original = data['upload']['links']['original']
url = '<a href="%s"><img src=%s/></a>' % (original, thumb)
elif link == 'IM':
original = data['upload']['links']['original']
url = '[IMG]%s[/IMG]' % (original, thumb)
else:
url = data['upload']['links'][link]
notify_im = data['upload']['links']['small_square'] #thumb image
# Copy to the clipboard the url of the uploaded screenshot
clipboard.copy(url)
if notify:
# Notify when done
complete(notify_im, notify)
# print link.upper() + ': ' + url
print url
return url
| 31.703125
| 78
| 0.643667
|
3a047361ca9a1cd438a2f259e16d7d029337c536
| 11,508
|
py
|
Python
|
tests/performance/test_performance.py
|
gnomonsis/model_server
|
bdd7eccde5dfa4f518f6964c103b4389cd00caaf
|
[
"Apache-2.0"
] | null | null | null |
tests/performance/test_performance.py
|
gnomonsis/model_server
|
bdd7eccde5dfa4f518f6964c103b4389cd00caaf
|
[
"Apache-2.0"
] | null | null | null |
tests/performance/test_performance.py
|
gnomonsis/model_server
|
bdd7eccde5dfa4f518f6964c103b4389cd00caaf
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Can be run locally with: py.test tests/test_performance.py -v -s
import os
import subprocess
import datetime
import urllib.request
import pytest
from tabulate import tabulate
from data.performance_constants import DATASET, OVMS_CLIENT, OVMS_DATASET, ITERATIONS, AMS_PORT, \
AMS_ADDRESS, OVMS_PORT, OVMS_CLIENT_PATH, AMS_CLIENT_PATH, MODELS, PARAMS
from fixtures.performance import run_ams, cleanup_ams, run_ovms, cleanup_ovms, prepare_dataset_for_ovms, cleanup_dataset
LATENCY = []
THROUGHPUT = []
class TestPerformance:
@staticmethod
def inference(model_name, image, iterations: int):
responses = []
with open(os.path.join(DATASET, image), mode='rb') as image_file:
image_bytes = image_file.read()
for num in range(iterations):
start_time = datetime.datetime.now()
url = 'http://{}:{}/{}'.format(AMS_ADDRESS, AMS_PORT, model_name)
headers = {'Content-Type': 'image/png'}
req = urllib.request.Request(url, image_bytes, headers=headers)
response = urllib.request.urlopen(req)
assert response.getcode() == 200, "Not expected response code: {}".format(response.getcode)
stop_time = datetime.datetime.now()
duration = (stop_time - start_time).total_seconds() * 1000
responses.append({"response": response,
"duration": duration})
return responses
@staticmethod
def inference_ovms(model_name, image_width, image_height, input_name):
cmd = ["python", OVMS_CLIENT, "--grpc_port", OVMS_PORT, "--model_name", model_name,
"--input_images_dir", OVMS_DATASET, "--width", str(image_width), "--height",
str(image_height), "--input_name", input_name]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output = proc.communicate()[0].decode('utf-8')
print("Output for latency:\n{} \nCommand: {}".format(output, " ".join(cmd)))
metrics = [line for line in output.splitlines() if "average time" in line][0]
average_proccessing_time = metrics.split(";")[0].split(":")[1]
average_speed = metrics.split(";")[1].split(":")[1]
return average_proccessing_time, average_speed
@staticmethod
def return_metrics(cmd):
ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = ps.communicate()[0].decode()
print("Output for throughput:\n{} \nCommand: {}".format(output, cmd))
times = [line for line in output.splitlines() if "real" in line][0]
time = times.split("\t")[1].split("m")
final_time = float(time[0]) * 60 + float(time[1].replace("s", ""))
throughput = 10 * 1 * (ITERATIONS / final_time)
subprocess.call("rm -rf metrics", shell=True)
return final_time, throughput
@staticmethod
def measure_throughput_ams(model_name, image):
cmd = " ".join(["time printf '%s\\n' {1..10} | xargs -n 1 -P 10 python",
AMS_CLIENT_PATH, model_name, image])
return TestPerformance.return_metrics(cmd)
@staticmethod
def measure_throughput_ovms(model_name, width, height, input_name):
cmd = " ".join(["time printf '%s\\n' {1..10} | xargs -n 1 -P 10 sh",
OVMS_CLIENT_PATH, model_name, width, height, OVMS_PORT, OVMS_DATASET, input_name])
return TestPerformance.return_metrics(cmd)
@pytest.mark.parametrize("model", MODELS)
@pytest.mark.parametrize("params", PARAMS)
def test_performance_latency_one_client_ams(self, model, params):
"""
<b>Description:</b>
Checks AMS performance - latency
<b>Input data:</b>
- AMS
- model
- configuration
<b>Expected results:</b>
Test passes when AMS has results close to OVMS and OpenVino benchmark app.
<b>Steps:</b>
1. Run AMS and get response time.
"""
# running ams
print("Plugin config: {}".format(params["plugin_config"]))
container = run_ams(params)
# measure latency for ams
responses = self.inference(image=model["dataset"], iterations=ITERATIONS,
model_name=model["model_name"])
total_processing_time = 0
total_speed = 0
for rsp in responses:
total_processing_time = total_processing_time + round(rsp["duration"], 2)
total_speed = total_speed + round(1000/rsp["duration"], 2)
average_proccessing_time = "{} ms".format(round(total_processing_time / len(responses), 2))
average_speed = "{} fps".format(round(total_speed / len(responses), 2))
plugin_config = "singlestream" if params["plugin_config"] == '{"CPU_THROUGHPUT_STREAMS": "1"}' \
else "multistream"
ams = [model["model_name"], params["cores"],
"ams", params["nireq"], params["grpc_workers"], plugin_config,
average_proccessing_time, average_speed]
LATENCY.append(ams)
# remove ovms
cleanup_ams(container)
@pytest.mark.parametrize("model", MODELS)
@pytest.mark.parametrize("params", PARAMS)
def test_performance_latency_one_client_ovms(self, model, params):
"""
<b>Description:</b>
Checks AMS performance - latency
<b>Input data:</b>
- OVMS
- model
- configuration
<b>Expected results:</b>
Test passes when AMS has results close to OVMS and OpenVino benchmark app.
<b>Steps:</b>
1. Run OVMS and get response time.
"""
# prepare dataset
prepare_dataset_for_ovms(model["dataset"])
# running ams
container = run_ovms(params)
# measure latency for ovms
plugin_config = "singlestream" if params["plugin_config"] == '{"CPU_THROUGHPUT_STREAMS": "1"}' \
else "multistream"
average_proccessing_time, average_speed = self.inference_ovms(model_name=model["model_name_ovms"],
image_width=model["width"],
image_height=model["height"],
input_name=model["input_name"])
ovms = [model["model_name"], params["cores"], "ovms", params["nireq"],
params["grpc_workers"], plugin_config, average_proccessing_time, average_speed]
LATENCY.append(ovms)
# remove ovms
cleanup_ovms(container)
# remove dataset
cleanup_dataset()
@pytest.mark.parametrize("model", MODELS)
@pytest.mark.parametrize("params", PARAMS)
def test_performance_measure_throughput_ten_clients_ams(self, model, params):
"""
<b>Description:</b>
Measures OVMS and AMS performance - throughput
<b>Input data:</b>
- AMS
- model
- configuration
<b>Expected results:</b>
Test passes when AMS results are close to OVMS results - throughput
<b>Steps:</b>
1. Measure AMS results
"""
# run ams
container = run_ams(params)
# measure throughput for ams
plugin_config = "singlestream" if params["plugin_config"] == '{"CPU_THROUGHPUT_STREAMS": "1"}' \
else "multistream"
final_time, throughput = self.measure_throughput_ams(model_name=model["model_name"], image=model["dataset"])
ams = [model["model_name"], params["cores"], "ams", params["nireq"],
params["grpc_workers"], plugin_config, final_time, throughput]
THROUGHPUT.append(ams)
# remove ams
cleanup_ams(container)
@pytest.mark.parametrize("model", MODELS)
@pytest.mark.parametrize("params", PARAMS)
def test_performance_measure_throughput_ten_clients_ovms(self, model, params):
"""
<b>Description:</b>
Measures OVMS and AMS performance - throughput
<b>Input data:</b>
- OVMS
- model
- configuration
<b>Expected results:</b>
Test passes when AMS results are close to OVMS results - throughput
<b>Steps:</b>
1. Measure OVMS results
"""
# prepare dataset
prepare_dataset_for_ovms(model["dataset"])
# run ovms
container = run_ovms(params)
# measure throughput for ovms
final_time, throughput = self.measure_throughput_ovms(model_name=model["model_name_ovms"],
width=model["width"],
height=model["height"],
input_name=model["input_name"])
plugin_config = "singlestream" if params["plugin_config"] == '{"CPU_THROUGHPUT_STREAMS": "1"}' \
else "multistream"
ovms = [model["model_name"], params["cores"], "ovms", params["nireq"],
params["grpc_workers"], plugin_config, final_time, throughput]
THROUGHPUT.append(ovms)
# remove ovms
cleanup_ovms(container)
# remove dataset
cleanup_dataset()
def test_performance_compare_latency_one_client(self):
"""
<b>Description:</b>
Compares OVMS and AMS performance - latency
<b>Assumptions:</b>
- All models present on local machine, location: /opt/models/models
<b>Input data:</b>
- OVMS performance - latency results
- AMS performance - latency results
<b>Expected results:</b>
Test passes when AMS results are close to OVMS results - latency
<b>Steps:</b>
1. Compare OVMS and AMS results
"""
print("\n")
print(tabulate(LATENCY, headers=["Model", "Cores", "Service", "Nireq", "Grcp_workers",
"Singlestream/Multistream", "Average processing time", "Average speed"]))
def test_performance_compare_throughput_ten_clients(self):
"""
<b>Description:</b>
Compares OVMS and AMS performance - throughput
<b>Assumptions:</b>
- All models present on local machine, location: /opt/models/models
<b>Input data:</b>
- OVMS performance - throughput results
- AMS performance - throughput results
<b>Expected results:</b>
Test passes when AMS results are close to OVMS results - throughput
<b>Steps:</b>
1. Compare OVMS and AMS results
"""
print("\n")
print(tabulate(THROUGHPUT, headers=["Model", "Cores", "Service", "Nireq", "Grcp_workers",
"Singlestream/Multistream", "Final time", "Throughput"]))
| 39.010169
| 120
| 0.602103
|
cf92e06022a0033791565af4034c4ae9f87afbca
| 13,864
|
py
|
Python
|
tests/api/test_slack.py
|
cham11ng/boss
|
71e67cf2c4411787d319e2bd842fd93402aeaef3
|
[
"MIT"
] | 25
|
2017-10-23T09:22:06.000Z
|
2021-09-15T11:04:51.000Z
|
tests/api/test_slack.py
|
cham11ng/boss
|
71e67cf2c4411787d319e2bd842fd93402aeaef3
|
[
"MIT"
] | 37
|
2017-10-18T15:40:18.000Z
|
2021-12-19T12:59:29.000Z
|
tests/api/test_slack.py
|
cham11ng/boss
|
71e67cf2c4411787d319e2bd842fd93402aeaef3
|
[
"MIT"
] | 17
|
2017-10-19T08:39:09.000Z
|
2021-11-01T09:35:05.000Z
|
''' Tests for boss.api.slack module. '''
from mock import patch
from pytest import fixture
from boss.api import slack
from boss.core.constants.notification_types import (
DEPLOYMENT_STARTED,
DEPLOYMENT_FINISHED,
RUNNING_SCRIPT_STARTED,
RUNNING_SCRIPT_FINISHED
)
@fixture(scope='function')
def slack_url():
return slack.slack_url(
slack.config()['base_url'],
slack.config()['endpoint']
)
def test_create_link():
''' Test slack.create_link(). '''
url = 'http://test-link-url'
title = 'Test link'
expected_link = '<{url}|{title}>'.format(url=url, title=title)
assert slack.create_link(url, title) == expected_link
def test_create_link_supports_empty_url():
''' Test slack.create_link() supports empty url. '''
assert slack.create_link(None, 'Test') == 'Test'
def test_slack_url():
''' Test slack_url() works. '''
assert slack.slack_url('', '') == ''
assert slack.slack_url(
'https://hooks.slack.com/services',
'/foo/bar'
) == 'https://hooks.slack.com/services/foo/bar'
assert slack.slack_url(
'https://hooks.slack.com/services',
'https://hooks.slack.com/services/foo/bar'
) == 'https://hooks.slack.com/services/foo/bar'
assert slack.slack_url(
'',
'https://hooks.slack.com/services/foo/bar'
) == 'https://hooks.slack.com/services/foo/bar'
def test_slack_url_with_no_leading_trailing_slashes():
''' Test no trailing or leading slashes are required. '''
assert slack.slack_url(
'https://hooks.slack.com/services',
'just-test'
) == 'https://hooks.slack.com/services/just-test'
def test_send(slack_url):
''' Test slack.send(). '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
commit='tttt',
commit_url='http://commit-url',
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user',
)
payload = {
'attachments': [
{
'color': 'good',
'text': 'user is deploying <http://repository-url|project-name>:<http://branch-url|temp> (<http://commit-url|tttt>) to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_send_deployment_started_with_no_repository_url(slack_url):
''' Test deployment started notification with no repository url. '''
notify_params = dict(
branch='temp',
commit='tttt',
commit_url=None,
branch_url=None,
repository_url=None,
public_url='http://public-url',
host='test-notify-deploying-host',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user',
)
payload = {
'attachments': [
{
'color': 'good',
'text': 'user is deploying project-name:temp (tttt) to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_send_deployment_finished_with_no_repository_url(slack_url):
''' Test deployment finished notification with no repository url. '''
notify_params = dict(
branch='temp',
commit='tttt',
commit_url=None,
branch_url=None,
repository_url=None,
public_url='http://public-url',
host='test-notify-deploying-host',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user',
)
payload = {
'attachments': [
{
'color': '#764FA5',
'text': 'user finished deploying project-name:temp (tttt) to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_send_with_no_branch_name(slack_url):
'''
Test slack.send() doesn't show the branch link,
if branch name is not provided.
'''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
commit='tttt',
commit_url='http://commit-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user',
)
payload = {
'attachments': [
{
'color': 'good',
'text': 'user is deploying <http://repository-url|project-name> (<http://commit-url|tttt>) to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_notity_deployed(slack_url):
''' Test slack.notify_deployed(). '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
commit='tttt',
commit_url='http://commit-url',
public_url='http://public-url',
host='test-notify-deployed-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user'
)
payload = {
'attachments': [
{
'color': '#764FA5',
'text': 'user finished deploying <http://repository-url|project-name>:<http://branch-url|temp> (<http://commit-url|tttt>) to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_notity_deployed_with_no_commit(slack_url):
''' Test sending deployment finished notification with no commit. '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
public_url='http://public-url',
host='test-notify-deployed-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user'
)
payload = {
'attachments': [
{
'color': '#764FA5',
'text': 'user finished deploying <http://repository-url|project-name>:<http://branch-url|temp> to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_notity_deploying_with_no_commit(slack_url):
''' Test sending deployment started notification with no commit. '''
notify_params = dict(
branch_url='http://branch-url',
branch='temp',
public_url='http://public-url',
host='test-notify-deployed-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user'
)
payload = {
'attachments': [
{
'color': 'good',
'text': 'user is deploying <http://repository-url|project-name>:<http://branch-url|temp> to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_notity_deployed_with_no_branch_name(slack_url):
'''
Test slack.notify_deployed() doesn't show the branch link,
if branch name is not provided.
'''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deployed-host',
commit='tttt',
commit_url='http://commit-url',
repository_url='http://repository-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user'
)
payload = {
'attachments': [
{
'color': '#764FA5',
'text': 'user finished deploying <http://repository-url|project-name> (<http://commit-url|tttt>) to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_notity_deployment_finished_with_no_commit_no_branch(slack_url):
''' Test sending deployment finished notification with no commit and no branch. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deployed-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user'
)
payload = {
'attachments': [
{
'color': '#764FA5',
'text': 'user finished deploying <http://repository-url|project-name> to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_FINISHED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_notity_deployment_started_with_no_commit_no_branch(slack_url):
''' Test sending deployment started notification with no commit and no branch. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deployed-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='server-name',
server_link='http://server-link',
user='user'
)
payload = {
'attachments': [
{
'color': 'good',
'text': 'user is deploying <http://repository-url|project-name> to <http://public-url|server-name> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_notity_deployment_started_no_links_at_all(slack_url):
''' Test deployment started notification with no links or urls at all. '''
notify_params = dict(
project_name='project-name',
server_name='staging',
user='user',
)
payload = {
'attachments': [
{
'color': 'good',
'text': 'user is deploying project-name to staging server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(DEPLOYMENT_STARTED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_send_running_script_started_notification(slack_url):
''' Test send() sends RUNNING_SCRIPT_STARTED notfication. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
script='migration',
user='user'
)
payload = {
'attachments': [
{
'color': 'good',
'text': 'user is running <http://repository-url|project-name>:migration on <http://public-url|stage> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(RUNNING_SCRIPT_STARTED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
def test_send_running_script_finished_notification(slack_url):
''' Test send() sends RUNNING_SCRIPT_FINISHED notfication. '''
notify_params = dict(
public_url='http://public-url',
host='test-notify-deploying-host',
repository_url='http://repository-url',
project_name='project-name',
server_name='stage',
server_link='http://server-link',
script='migration',
user='user'
)
payload = {
'attachments': [
{
'color': '#764FA5',
'text': 'user finished running <http://repository-url|project-name>:migration on <http://public-url|stage> server.',
'mrkdwn_in': ['text']
}
]
}
with patch('requests.post') as mock_post:
slack.send(RUNNING_SCRIPT_FINISHED, **notify_params)
mock_post.assert_called_once_with(slack_url, json=payload)
| 31.9447
| 182
| 0.593263
|
545a3084f8f487541f95e312e36ccea39a2792b7
| 2,032
|
py
|
Python
|
update_price.py
|
Block-Way/sync
|
48970311e29c4e2fbe1db56140328f22c4accca8
|
[
"Apache-2.0"
] | null | null | null |
update_price.py
|
Block-Way/sync
|
48970311e29c4e2fbe1db56140328f22c4accca8
|
[
"Apache-2.0"
] | null | null | null |
update_price.py
|
Block-Way/sync
|
48970311e29c4e2fbe1db56140328f22c4accca8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
import time
import config
import pymysql
def Update():
conn = pymysql.connect(host=config.host, port=config.port, user=config.user, password=config.password, db=config.db)
cursor = conn.cursor()
url = 'https://dncapi.fxhapp.com/api/coin/web-coinrank?page=1&type=-1&pagesize=50&webp=1'
response = requests.get(url=url)
objs = json.loads(response.text)
sqls = []
for obj in objs["data"]:
if obj["name"] == 'BTC':
sql = 'update quotations set price = %s,price24h = %s where tradePairId = "BTC/USDT"' \
% (obj["current_price_usd"],(obj["current_price_usd"] * 100) / (100 + obj["change_percent"]))
sqls.append(sql)
elif obj["name"] == "ETH":
sql = 'update quotations set price = %s,price24h = %s where tradePairId = "ETH/USDT"' \
% (obj["current_price_usd"],(obj["current_price_usd"] * 100) / (100 + obj["change_percent"]))
sqls.append(sql)
elif obj["name"] == "BNB":
sql = 'update quotations set price = %s,price24h = %s where tradePairId = "BNB/USDT"' \
% (obj["current_price_usd"],(obj["current_price_usd"] * 100) / (100 + obj["change_percent"]))
sqls.append(sql)
elif obj["name"] == "TRX":
sql = 'update quotations set price = %s,price24h = %s where tradePairId = "TRX/USDT"' \
% (obj["current_price_usd"],(obj["current_price_usd"] * 100) / (100 + obj["change_percent"]))
sqls.append(sql)
elif obj["name"] == "XRP":
sql = 'update quotations set price = %s,price24h = %s where tradePairId = "XRP/USDT"' \
% (obj["current_price_usd"],(obj["current_price_usd"] * 100) / (100 + obj["change_percent"]))
else:
pass
for sql in sqls:
print(sql)
cursor.execute(sql)
conn.commit()
if __name__ == '__main__':
while True:
Update()
time.sleep(120)
| 42.333333
| 120
| 0.576772
|
907ea6700105a12cb9ef69caf9a93a97247f1b0e
| 3,344
|
py
|
Python
|
openprocurement/medicines/registry/databridge/caching.py
|
ProzorroUKR/openprocurement.medicines.registry
|
3faa63de9eeb21fbf94346d3b67339bae7389099
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/medicines/registry/databridge/caching.py
|
ProzorroUKR/openprocurement.medicines.registry
|
3faa63de9eeb21fbf94346d3b67339bae7389099
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/medicines/registry/databridge/caching.py
|
ProzorroUKR/openprocurement.medicines.registry
|
3faa63de9eeb21fbf94346d3b67339bae7389099
|
[
"Apache-2.0"
] | 1
|
2018-07-25T14:07:17.000Z
|
2018-07-25T14:07:17.000Z
|
import logging
import redis
from ConfigParser import ConfigParser
from rediscluster import StrictRedisCluster
logger = logging.getLogger(__name__)
class DB(object):
def __init__(self, config):
self.config = config
cache_backend = self.config_get('cache_backend') or 'redis'
if cache_backend == 'redis':
self.__backend = cache_backend
self.__host = self.config_get('cache_host') or '127.0.0.1'
self.__port = self.config_get('cache_port') or 6379
self.__db_name = self.config_get('cache_db_name') or 0
self.db = redis.StrictRedis(host=self.__host, port=self.__port, db=self.__db_name)
elif cache_backend == 'redis-cluster':
self.__backend = cache_backend
node1_host = self.config_get('node1_host')
node1_port = self.config_get('node1_port')
node2_host = self.config_get('node2_host')
node2_port = self.config_get('node2_port')
node3_host = self.config_get('node3_host')
node3_port = self.config_get('node3_port')
node4_host = self.config_get('node4_host')
node4_port = self.config_get('node4_port')
node5_host = self.config_get('node5_host')
node5_port = self.config_get('node5_port')
node6_host = self.config_get('node6_host')
node6_port = self.config_get('node6_port')
self.__host = (node1_host, node2_host, node3_host, node4_host, node5_host, node6_host)
self.__port = (node1_port, node2_port, node3_port, node4_port, node5_port, node6_port)
self.__db_name = 'cluster'
cluster_nodes = [
{'host': node1_host, 'port': node1_port},
{'host': node2_host, 'port': node2_port},
{'host': node3_host, 'port': node3_port},
{'host': node4_host, 'port': node4_port},
{'host': node5_host, 'port': node5_port},
{'host': node6_host, 'port': node6_port}
]
self.db = StrictRedisCluster(startup_nodes=cluster_nodes, decode_responses=True)
self.set_value = self.db.set
self.has_value = self.db.exists
self.remove_value = self.db.delete
def config_get(self, name):
if isinstance(self.config, ConfigParser):
return self.config.get('app:api', name)
else:
return self.config.get('app:api').get(name)
def get(self, key):
return self.db.get(key)
def keys(self, prefix):
keys = self.db.keys(prefix)
return keys
def put(self, key, value, ex=90000):
self.set_value(key, value, ex)
def remove(self, key):
self.remove_value(key)
def has(self, key):
return self.has_value(key)
def scan_iter(self, prefix=None):
return [key for key in self.db.scan_iter(prefix)]
def remove_pattern(self, prefix):
for key in self.db.scan_iter(prefix):
self.remove(key)
def flushall(self):
self.db.flushall()
@property
def backend(self):
return self.__backend
@property
def host(self):
return self.__host
@property
def port(self):
return self.__port
@property
def db_name(self):
return self.__db_name
| 31.847619
| 98
| 0.610048
|
0725493300e9c62b869e9db92cdfd21c74233609
| 1,281
|
py
|
Python
|
parse_apache_configs/test/test_get_apache_config.py
|
daladim/parse_apache_configs
|
5f585c099095176ae4708c65593bef315d930033
|
[
"Apache-2.0"
] | 7
|
2017-05-10T12:03:48.000Z
|
2022-01-31T09:05:25.000Z
|
parse_apache_configs/test/test_get_apache_config.py
|
daladim/parse_apache_configs
|
5f585c099095176ae4708c65593bef315d930033
|
[
"Apache-2.0"
] | 8
|
2015-11-04T16:08:25.000Z
|
2015-12-01T16:16:16.000Z
|
parse_apache_configs/test/test_get_apache_config.py
|
elextro/parse_apache_configs
|
d40292e5f938147a558f4a8c94a5e6668d213df3
|
[
"Apache-2.0"
] | 5
|
2017-01-17T15:17:52.000Z
|
2022-01-11T22:48:28.000Z
|
from os import listdir
from os.path import isfile, join
import unittest
from parse_apache_configs import parse_config
import pprint
class testGetApacheConfig(unittest.TestCase):
#print "ENTERING TEST_PARSE_CONFIG" + "-"*8
def test_get_apache_config(self):
test_files = [ f for f in listdir("./test_conf_files") if isfile(join("./test_conf_files", f)) ]
for file_name in test_files:
pac = parse_config.ParseApacheConfig("./test_conf_files/" + file_name)
#pp = pprint.pprint
conf_list = pac.parse_config()
conf_string = pac.get_apache_config(conf_list)
#print conf_string
#TODO make sure we get the right config file.
def test_get_apache_config_string_config(self):
test_files = [ f for f in listdir("./test_conf_files") if isfile(join("./test_conf_files", f)) ]
for file_name in test_files:
full_file_path = "./test_conf_files/" + file_name
with open(full_file_path, 'r') as fp:
file_as_string = fp.read()
pac = parse_config.ParseApacheConfig(apache_file_as_string=file_as_string)
conf_list = pac.parse_config()
conf_string = pac.get_apache_config(conf_list)
| 44.172414
| 108
| 0.654957
|
84ee6ce295c0fbb5469f9ecd92400430d966a489
| 58
|
py
|
Python
|
dev.py
|
actiniumn404/SICL
|
218b832fafd7613051a9b29b10b3223e4f7b102a
|
[
"MIT"
] | null | null | null |
dev.py
|
actiniumn404/SICL
|
218b832fafd7613051a9b29b10b3223e4f7b102a
|
[
"MIT"
] | null | null | null |
dev.py
|
actiniumn404/SICL
|
218b832fafd7613051a9b29b10b3223e4f7b102a
|
[
"MIT"
] | null | null | null |
import main
main.SICL(open("tests/dev.sicl", "r").read())
| 19.333333
| 45
| 0.672414
|
0552fc4833f35fb1f18e735402bcad9ed192d9c2
| 16,767
|
py
|
Python
|
twilio/rest/sync/v1/service/sync_stream/__init__.py
|
NCPlayz/twilio-python
|
08898a4a1a43b636a64c9e98fbb0b6ee1792c687
|
[
"MIT"
] | null | null | null |
twilio/rest/sync/v1/service/sync_stream/__init__.py
|
NCPlayz/twilio-python
|
08898a4a1a43b636a64c9e98fbb0b6ee1792c687
|
[
"MIT"
] | 1
|
2021-06-02T00:27:34.000Z
|
2021-06-02T00:27:34.000Z
|
exercise/venv/lib/python3.7/site-packages/twilio/rest/sync/v1/service/sync_stream/__init__.py
|
assuzzanne/notifications-dispatcher-api
|
81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b
|
[
"MIT"
] | null | null | null |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.sync.v1.service.sync_stream.stream_message import StreamMessageList
class SyncStreamList(ListResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid):
"""
Initialize the SyncStreamList
:param Version version: Version that contains the resource
:param service_sid: The SID of the Sync Service that the resource is associated with
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamList
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamList
"""
super(SyncStreamList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, }
self._uri = '/Services/{service_sid}/Streams'.format(**self._solution)
def create(self, unique_name=values.unset, ttl=values.unset):
"""
Create a new SyncStreamInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode ttl: How long, in seconds, before the Stream expires and is deleted
:returns: Newly created SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
data = values.of({'UniqueName': unique_name, 'Ttl': ttl, })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return SyncStreamInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def stream(self, limit=None, page_size=None):
"""
Streams SyncStreamInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists SyncStreamInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of SyncStreamInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return SyncStreamPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of SyncStreamInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return SyncStreamPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a SyncStreamContext
:param sid: The SID of the Stream resource to fetch
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
return SyncStreamContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a SyncStreamContext
:param sid: The SID of the Stream resource to fetch
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
return SyncStreamContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Sync.V1.SyncStreamList>'
class SyncStreamPage(Page):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, response, solution):
"""
Initialize the SyncStreamPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Sync Service that the resource is associated with
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamPage
"""
super(SyncStreamPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of SyncStreamInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
return SyncStreamInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Sync.V1.SyncStreamPage>'
class SyncStreamContext(InstanceContext):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid, sid):
"""
Initialize the SyncStreamContext
:param Version version: Version that contains the resource
:param service_sid: The SID of the Sync Service with the Sync Stream resource to fetch
:param sid: The SID of the Stream resource to fetch
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
super(SyncStreamContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'sid': sid, }
self._uri = '/Services/{service_sid}/Streams/{sid}'.format(**self._solution)
# Dependents
self._stream_messages = None
def fetch(self):
"""
Fetch a SyncStreamInstance
:returns: Fetched SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return SyncStreamInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the SyncStreamInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def update(self, ttl=values.unset):
"""
Update the SyncStreamInstance
:param unicode ttl: How long, in seconds, before the Stream expires and is deleted
:returns: Updated SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
data = values.of({'Ttl': ttl, })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return SyncStreamInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
@property
def stream_messages(self):
"""
Access the stream_messages
:returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
:rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
"""
if self._stream_messages is None:
self._stream_messages = StreamMessageList(
self._version,
service_sid=self._solution['service_sid'],
stream_sid=self._solution['sid'],
)
return self._stream_messages
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Sync.V1.SyncStreamContext {}>'.format(context)
class SyncStreamInstance(InstanceResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, payload, service_sid, sid=None):
"""
Initialize the SyncStreamInstance
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
super(SyncStreamInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload['sid'],
'unique_name': payload['unique_name'],
'account_sid': payload['account_sid'],
'service_sid': payload['service_sid'],
'url': payload['url'],
'links': payload['links'],
'date_expires': deserialize.iso8601_datetime(payload['date_expires']),
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'created_by': payload['created_by'],
}
# Context
self._context = None
self._solution = {'service_sid': service_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: SyncStreamContext for this SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamContext
"""
if self._context is None:
self._context = SyncStreamContext(
self._version,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def unique_name(self):
"""
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
"""
return self._properties['unique_name']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def service_sid(self):
"""
:returns: The SID of the Sync Service that the resource is associated with
:rtype: unicode
"""
return self._properties['service_sid']
@property
def url(self):
"""
:returns: The absolute URL of the Message Stream resource
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The URLs of the Stream's nested resources
:rtype: unicode
"""
return self._properties['links']
@property
def date_expires(self):
"""
:returns: The ISO 8601 date and time in GMT when the Message Stream expires
:rtype: datetime
"""
return self._properties['date_expires']
@property
def date_created(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def created_by(self):
"""
:returns: The Identity of the Stream's creator
:rtype: unicode
"""
return self._properties['created_by']
def fetch(self):
"""
Fetch a SyncStreamInstance
:returns: Fetched SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the SyncStreamInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def update(self, ttl=values.unset):
"""
Update the SyncStreamInstance
:param unicode ttl: How long, in seconds, before the Stream expires and is deleted
:returns: Updated SyncStreamInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance
"""
return self._proxy.update(ttl=ttl, )
@property
def stream_messages(self):
"""
Access the stream_messages
:returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
:rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageList
"""
return self._proxy.stream_messages
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Sync.V1.SyncStreamInstance {}>'.format(context)
| 33.941296
| 103
| 0.62957
|
2781964a090b618563f2c2fba7803bccd90b8f69
| 23,412
|
py
|
Python
|
toontown/building/DistributedElevator.py
|
AnonymousDeveloper65535/open-toontown
|
3d05c22a7d960ad843dde231140447c46973dba5
|
[
"BSD-3-Clause"
] | 8
|
2017-10-10T11:41:01.000Z
|
2021-02-23T12:55:47.000Z
|
toontown/building/DistributedElevator.py
|
AnonymousDeveloper65535/open-toontown
|
3d05c22a7d960ad843dde231140447c46973dba5
|
[
"BSD-3-Clause"
] | 1
|
2018-07-28T20:07:04.000Z
|
2018-07-30T18:28:34.000Z
|
toontown/building/DistributedElevator.py
|
AnonymousDeveloper65535/open-toontown
|
3d05c22a7d960ad843dde231140447c46973dba5
|
[
"BSD-3-Clause"
] | 2
|
2019-04-06T16:18:23.000Z
|
2021-02-25T06:25:01.000Z
|
from pandac.PandaModules import *
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from ElevatorConstants import *
from ElevatorUtils import *
from direct.showbase import PythonUtil
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import ClassicFSM, State
from direct.distributed import DistributedObject
from direct.fsm import State
from toontown.toonbase import TTLocalizer, ToontownGlobals
from direct.task.Task import Task
from toontown.distributed import DelayDelete
from toontown.hood import ZoneUtil
from toontown.toontowngui import TeaserPanel
from toontown.building import BoardingGroupShow
class DistributedElevator(DistributedObject.DistributedObject):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedElevator')
JumpOutOffsets = JumpOutOffsets
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
self.bldgRequest = None
self.toonRequests = {}
self.deferredSlots = []
self.localToonOnBoard = 0
self.boardedAvIds = {}
self.openSfx = base.loadSfx('phase_5/audio/sfx/elevator_door_open.mp3')
self.finalOpenSfx = None
self.closeSfx = base.loadSfx('phase_5/audio/sfx/elevator_door_close.mp3')
self.elevatorFSM = None
self.finalCloseSfx = None
self.elevatorPoints = ElevatorPoints
self.fillSlotTrack = None
self.type = ELEVATOR_NORMAL
self.countdownTime = ElevatorData[self.type]['countdown']
self.__toonTracks = {}
self.fsm = ClassicFSM.ClassicFSM('DistributedElevator', [State.State('off', self.enterOff, self.exitOff, ['opening',
'waitEmpty',
'waitCountdown',
'closing',
'closed']),
State.State('opening', self.enterOpening, self.exitOpening, ['waitEmpty', 'waitCountdown']),
State.State('waitEmpty', self.enterWaitEmpty, self.exitWaitEmpty, ['waitCountdown']),
State.State('waitCountdown', self.enterWaitCountdown, self.exitWaitCountdown, ['waitEmpty', 'closing']),
State.State('closing', self.enterClosing, self.exitClosing, ['closed', 'waitEmpty']),
State.State('closed', self.enterClosed, self.exitClosed, ['opening'])], 'off', 'off')
self.fsm.enterInitialState()
self.isSetup = 0
self.__preSetupState = None
self.bigElevator = 0
return
def generate(self):
DistributedObject.DistributedObject.generate(self)
def setupElevator(self):
collisionRadius = ElevatorData[self.type]['collRadius']
self.elevatorSphere = CollisionSphere(0, 5, 0, collisionRadius)
self.elevatorSphere.setTangible(0)
self.elevatorSphereNode = CollisionNode(self.uniqueName('elevatorSphere'))
self.elevatorSphereNode.setIntoCollideMask(ToontownGlobals.WallBitmask)
self.elevatorSphereNode.addSolid(self.elevatorSphere)
self.elevatorSphereNodePath = self.getElevatorModel().attachNewNode(self.elevatorSphereNode)
self.elevatorSphereNodePath.hide()
self.elevatorSphereNodePath.reparentTo(self.getElevatorModel())
self.elevatorSphereNodePath.stash()
self.boardedAvIds = {}
self.openDoors = getOpenInterval(self, self.leftDoor, self.rightDoor, self.openSfx, self.finalOpenSfx, self.type)
self.closeDoors = getCloseInterval(self, self.leftDoor, self.rightDoor, self.closeSfx, self.finalCloseSfx, self.type)
self.closeDoors = Sequence(self.closeDoors, Func(self.onDoorCloseFinish))
self.finishSetup()
def finishSetup(self):
self.isSetup = 1
self.offsetNP = self.getElevatorModel().attachNewNode('dummyNP')
if self.__preSetupState:
self.fsm.request(self.__preSetupState, [0])
self.__preSetupState = None
for slot in self.deferredSlots:
self.fillSlot(*slot)
self.deferredSlots = []
return
def disable(self):
if self.bldgRequest:
self.cr.relatedObjectMgr.abortRequest(self.bldgRequest)
self.bldgRequest = None
for request in self.toonRequests.values():
self.cr.relatedObjectMgr.abortRequest(request)
self.toonRequests = {}
if hasattr(self, 'openDoors'):
self.openDoors.pause()
if hasattr(self, 'closeDoors'):
self.closeDoors.pause()
self.clearToonTracks()
self.fsm.request('off')
DistributedObject.DistributedObject.disable(self)
return
def delete(self):
if self.isSetup:
self.elevatorSphereNodePath.removeNode()
del self.elevatorSphereNodePath
del self.elevatorSphereNode
del self.elevatorSphere
del self.bldg
if self.leftDoor:
del self.leftDoor
if self.rightDoor:
del self.rightDoor
if hasattr(self, 'openDoors'):
del self.openDoors
if hasattr(self, 'closeDoors'):
del self.closeDoors
del self.fsm
del self.openSfx
del self.closeSfx
self.isSetup = 0
self.fillSlotTrack = None
self.offsetNP.removeNode()
if hasattr(base.localAvatar, 'elevatorNotifier'):
base.localAvatar.elevatorNotifier.cleanup()
DistributedObject.DistributedObject.delete(self)
return
def setBldgDoId(self, bldgDoId):
self.bldgDoId = bldgDoId
self.bldgRequest = self.cr.relatedObjectMgr.requestObjects([bldgDoId], allCallback=self.gotBldg, timeout=2)
def gotBldg(self, buildingList):
self.bldgRequest = None
self.bldg = buildingList[0]
if not self.bldg:
self.notify.error('setBldgDoId: elevator %d cannot find bldg %d!' % (self.doId, self.bldgDoId))
return
self.setupElevator()
return
def gotToon(self, index, avId, toonList):
request = self.toonRequests.get(index)
if request:
del self.toonRequests[index]
self.fillSlot(index, avId)
else:
self.notify.error('gotToon: already had got toon in slot %s.' % index)
def setState(self, state, timestamp):
if self.isSetup:
self.fsm.request(state, [globalClockDelta.localElapsedTime(timestamp)])
else:
self.__preSetupState = state
def fillSlot0(self, avId, wantBoardingShow):
self.fillSlot(0, avId, wantBoardingShow)
def fillSlot1(self, avId, wantBoardingShow):
self.fillSlot(1, avId, wantBoardingShow)
def fillSlot2(self, avId, wantBoardingShow):
self.fillSlot(2, avId, wantBoardingShow)
def fillSlot3(self, avId, wantBoardingShow):
self.fillSlot(3, avId, wantBoardingShow)
def fillSlot4(self, avId, wantBoardingShow):
self.fillSlot(4, avId, wantBoardingShow)
def fillSlot5(self, avId, wantBoardingShow):
self.fillSlot(5, avId, wantBoardingShow)
def fillSlot6(self, avId, wantBoardingShow):
self.fillSlot(6, avId, wantBoardingShow)
def fillSlot7(self, avId, wantBoardingShow):
self.fillSlot(7, avId, wantBoardingShow)
def fillSlot(self, index, avId, wantBoardingShow = 0):
self.notify.debug('%s.fillSlot(%s, %s, ...)' % (self.doId, index, avId))
request = self.toonRequests.get(index)
if request:
self.cr.relatedObjectMgr.abortRequest(request)
del self.toonRequests[index]
if avId == 0:
pass
elif not self.cr.doId2do.has_key(avId):
func = PythonUtil.Functor(self.gotToon, index, avId)
self.toonRequests[index] = self.cr.relatedObjectMgr.requestObjects([avId], allCallback=func)
elif not self.isSetup:
self.deferredSlots.append((index, avId, wantBoardingShow))
else:
if avId == base.localAvatar.getDoId():
place = base.cr.playGame.getPlace()
if not place:
return
place.detectedElevatorCollision(self)
elevator = self.getPlaceElevator()
if elevator == None:
if place.fsm.hasStateNamed('elevator'):
place.fsm.request('elevator')
elif place.fsm.hasStateNamed('Elevator'):
place.fsm.request('Elevator')
elevator = self.getPlaceElevator()
if not elevator:
return
self.localToonOnBoard = 1
if hasattr(localAvatar, 'boardingParty') and localAvatar.boardingParty:
localAvatar.boardingParty.forceCleanupInviteePanel()
localAvatar.boardingParty.forceCleanupInviterPanels()
if hasattr(base.localAvatar, 'elevatorNotifier'):
base.localAvatar.elevatorNotifier.cleanup()
cameraTrack = Sequence()
cameraTrack.append(Func(elevator.fsm.request, 'boarding', [self.getElevatorModel()]))
cameraTrack.append(Func(elevator.fsm.request, 'boarded'))
toon = self.cr.doId2do[avId]
toon.stopSmooth()
if not wantBoardingShow:
toon.setZ(self.getElevatorModel(), self.elevatorPoints[index][2])
toon.setShadowHeight(0)
if toon.isDisguised:
animInFunc = Sequence(Func(toon.suit.loop, 'walk'))
animFunc = Sequence(Func(toon.setAnimState, 'neutral', 1.0), Func(toon.suit.loop, 'neutral'))
else:
animInFunc = Sequence(Func(toon.setAnimState, 'run', 1.0))
animFunc = Func(toon.setAnimState, 'neutral', 1.0)
toon.headsUp(self.getElevatorModel(), apply(Point3, self.elevatorPoints[index]))
track = Sequence(animInFunc, LerpPosInterval(toon, TOON_BOARD_ELEVATOR_TIME * 0.75, apply(Point3, self.elevatorPoints[index]), other=self.getElevatorModel()), LerpHprInterval(toon, TOON_BOARD_ELEVATOR_TIME * 0.25, Point3(180, 0, 0), other=self.getElevatorModel()), Func(self.clearToonTrack, avId), animFunc, name=toon.uniqueName('fillElevator'), autoPause=1)
if wantBoardingShow:
boardingTrack, boardingTrackType = self.getBoardingTrack(toon, index, False)
track = Sequence(boardingTrack, track)
if avId == base.localAvatar.getDoId():
cameraWaitTime = 2.5
if boardingTrackType == BoardingGroupShow.TRACK_TYPE_RUN:
cameraWaitTime = 0.5
elif boardingTrackType == BoardingGroupShow.TRACK_TYPE_POOF:
cameraWaitTime = 1
cameraTrack = Sequence(Wait(cameraWaitTime), cameraTrack)
if self.canHideBoardingQuitBtn(avId):
track = Sequence(Func(localAvatar.boardingParty.groupPanel.disableQuitButton), track)
if avId == base.localAvatar.getDoId():
track = Parallel(cameraTrack, track)
track.delayDelete = DelayDelete.DelayDelete(toon, 'Elevator.fillSlot')
self.storeToonTrack(avId, track)
track.start()
self.fillSlotTrack = track
self.boardedAvIds[avId] = None
return
def emptySlot0(self, avId, bailFlag, timestamp, time):
self.emptySlot(0, avId, bailFlag, timestamp, time)
def emptySlot1(self, avId, bailFlag, timestamp, time):
self.emptySlot(1, avId, bailFlag, timestamp, time)
def emptySlot2(self, avId, bailFlag, timestamp, time):
self.emptySlot(2, avId, bailFlag, timestamp, time)
def emptySlot3(self, avId, bailFlag, timestamp, time):
self.emptySlot(3, avId, bailFlag, timestamp, time)
def emptySlot4(self, avId, bailFlag, timestamp, time):
self.emptySlot(4, avId, bailFlag, timestamp, time)
def emptySlot5(self, avId, bailFlag, timestamp, time):
self.emptySlot(5, avId, bailFlag, timestamp)
def emptySlot6(self, avId, bailFlag, timestamp, time):
self.emptySlot(6, avId, bailFlag, timestamp, time)
def emptySlot7(self, avId, bailFlag, timestamp, time):
self.emptySlot(7, avId, bailFlag, timestamp, time)
def notifyToonOffElevator(self, toon):
toon.setAnimState('neutral', 1.0)
if toon == base.localAvatar:
doneStatus = {'where': 'exit'}
elevator = self.getPlaceElevator()
if elevator:
elevator.signalDone(doneStatus)
self.localToonOnBoard = 0
else:
toon.startSmooth()
def emptySlot(self, index, avId, bailFlag, timestamp, timeSent = 0):
if self.fillSlotTrack:
self.fillSlotTrack.finish()
self.fillSlotTrack = None
if avId == 0:
pass
elif not self.isSetup:
newSlots = []
for slot in self.deferredSlots:
if slot[0] != index:
newSlots.append(slot)
self.deferredSlots = newSlots
else:
timeToSet = self.countdownTime
if timeSent > 0:
timeToSet = timeSent
if self.cr.doId2do.has_key(avId):
if bailFlag == 1 and hasattr(self, 'clockNode'):
if timestamp < timeToSet and timestamp >= 0:
self.countdown(timeToSet - timestamp)
else:
self.countdown(timeToSet)
toon = self.cr.doId2do[avId]
toon.stopSmooth()
if toon.isDisguised:
toon.suit.loop('walk')
animFunc = Func(toon.suit.loop, 'neutral')
else:
toon.setAnimState('run', 1.0)
animFunc = Func(toon.setAnimState, 'neutral', 1.0)
track = Sequence(LerpPosInterval(toon, TOON_EXIT_ELEVATOR_TIME, Point3(*self.JumpOutOffsets[index]), other=self.getElevatorModel()), animFunc, Func(self.notifyToonOffElevator, toon), Func(self.clearToonTrack, avId), name=toon.uniqueName('emptyElevator'), autoPause=1)
if self.canHideBoardingQuitBtn(avId):
track.append(Func(localAvatar.boardingParty.groupPanel.enableQuitButton))
track.append(Func(localAvatar.boardingParty.enableGoButton))
track.delayDelete = DelayDelete.DelayDelete(toon, 'Elevator.emptySlot')
self.storeToonTrack(avId, track)
track.start()
if avId == base.localAvatar.getDoId():
messenger.send('exitElevator')
if avId in self.boardedAvIds:
del self.boardedAvIds[avId]
else:
self.notify.warning('toon: ' + str(avId) + " doesn't exist, and" + ' cannot exit the elevator!')
return
def allowedToEnter(self, zoneId = None):
allowed = False
if hasattr(base, 'ttAccess') and base.ttAccess:
if zoneId:
allowed = base.ttAccess.canAccess(zoneId)
else:
allowed = base.ttAccess.canAccess()
return allowed
def handleEnterSphere(self, collEntry):
self.notify.debug('Entering Elevator Sphere....')
if self.allowedToEnter(self.zoneId):
if self.elevatorTripId and localAvatar.lastElevatorLeft == self.elevatorTripId:
self.rejectBoard(base.localAvatar.doId, REJECT_SHUFFLE)
elif base.localAvatar.hp > 0:
self.cr.playGame.getPlace().detectedElevatorCollision(self)
toon = base.localAvatar
self.sendUpdate('requestBoard', [])
else:
place = base.cr.playGame.getPlace()
if place:
place.fsm.request('stopped')
self.dialog = TeaserPanel.TeaserPanel(pageName='cogHQ', doneFunc=self.handleOkTeaser)
def handleOkTeaser(self):
self.dialog.destroy()
del self.dialog
place = base.cr.playGame.getPlace()
if place:
place.fsm.request('walk')
def rejectBoard(self, avId, reason = 0):
print 'rejectBoard %s' % reason
if hasattr(base.localAvatar, 'elevatorNotifier'):
if reason == REJECT_SHUFFLE:
base.localAvatar.elevatorNotifier.showMe(TTLocalizer.ElevatorHoppedOff)
elif reason == REJECT_MINLAFF:
base.localAvatar.elevatorNotifier.showMe(TTLocalizer.ElevatorMinLaff % self.minLaff)
elif reason == REJECT_PROMOTION:
base.localAvatar.elevatorNotifier.showMe(TTLocalizer.BossElevatorRejectMessage)
elif reason == REJECT_NOT_YET_AVAILABLE:
base.localAvatar.elevatorNotifier.showMe(TTLocalizer.NotYetAvailable)
doneStatus = {'where': 'reject'}
elevator = self.getPlaceElevator()
if elevator:
elevator.signalDone(doneStatus)
def timerTask(self, task):
countdownTime = int(task.duration - task.time)
timeStr = str(countdownTime)
if self.clockNode.getText() != timeStr:
self.clockNode.setText(timeStr)
if task.time >= task.duration:
return Task.done
else:
return Task.cont
def countdown(self, duration):
countdownTask = Task(self.timerTask)
countdownTask.duration = duration
taskMgr.remove(self.uniqueName('elevatorTimerTask'))
return taskMgr.add(countdownTask, self.uniqueName('elevatorTimerTask'))
def handleExitButton(self):
localAvatar.lastElevatorLeft = self.elevatorTripId
self.sendUpdate('requestExit')
def enterWaitCountdown(self, ts):
self.elevatorSphereNodePath.unstash()
self.accept(self.uniqueName('enterelevatorSphere'), self.handleEnterSphere)
self.accept('elevatorExitButton', self.handleExitButton)
def exitWaitCountdown(self):
self.elevatorSphereNodePath.stash()
self.ignore(self.uniqueName('enterelevatorSphere'))
self.ignore('elevatorExitButton')
self.ignore('localToonLeft')
taskMgr.remove(self.uniqueName('elevatorTimerTask'))
self.clock.removeNode()
del self.clock
del self.clockNode
def enterClosing(self, ts):
if self.localToonOnBoard:
elevator = self.getPlaceElevator()
if elevator:
elevator.fsm.request('elevatorClosing')
self.closeDoors.start(ts)
def exitClosing(self):
pass
def onDoorCloseFinish(self):
for avId in self.boardedAvIds.keys():
av = self.cr.doId2do.get(avId)
if av is not None:
if av.getParent().compareTo(self.getElevatorModel()) == 0:
av.detachNode()
self.boardedAvIds = {}
return
def enterClosed(self, ts):
self.forceDoorsClosed()
self.__doorsClosed(self.getZoneId())
def exitClosed(self):
pass
def forceDoorsOpen(self):
openDoors(self.leftDoor, self.rightDoor)
def forceDoorsClosed(self):
self.closeDoors.finish()
closeDoors(self.leftDoor, self.rightDoor)
def enterOff(self):
pass
def exitOff(self):
pass
def enterWaitEmpty(self, ts):
pass
def exitWaitEmpty(self):
pass
def enterOpening(self, ts):
self.openDoors.start(ts)
def exitOpening(self):
pass
def startCountdownClock(self, countdownTime, ts):
self.clockNode = TextNode('elevatorClock')
self.clockNode.setFont(ToontownGlobals.getSignFont())
self.clockNode.setAlign(TextNode.ACenter)
self.clockNode.setTextColor(0.5, 0.5, 0.5, 1)
self.clockNode.setText(str(int(countdownTime)))
self.clock = self.getElevatorModel().attachNewNode(self.clockNode)
self.clock.setPosHprScale(0, 2.0, 7.5, 0, 0, 0, 2.0, 2.0, 2.0)
if ts < countdownTime:
self.countdown(countdownTime - ts)
def _getDoorsClosedInfo(self):
return ('suitInterior', 'suitInterior')
def __doorsClosed(self, zoneId):
if self.localToonOnBoard:
hoodId = ZoneUtil.getHoodId(zoneId)
loader, where = self._getDoorsClosedInfo()
doneStatus = {'loader': loader,
'where': where,
'hoodId': hoodId,
'zoneId': zoneId,
'shardId': None}
elevator = self.elevatorFSM
del self.elevatorFSM
elevator.signalDone(doneStatus)
return
def getElevatorModel(self):
self.notify.error('getElevatorModel: pure virtual -- inheritors must override')
def getPlaceElevator(self):
place = self.cr.playGame.getPlace()
if place:
if hasattr(place, 'elevator'):
return place.elevator
else:
self.notify.warning("Place was in state '%s' instead of Elevator." % place.fsm.getCurrentState().getName())
place.detectedElevatorCollision(self)
else:
self.notify.warning("Place didn't exist")
return None
def setElevatorTripId(self, id):
self.elevatorTripId = id
def getElevatorTripId(self):
return self.elevatorTripId
def setAntiShuffle(self, antiShuffle):
self.antiShuffle = antiShuffle
def getAntiShuffle(self):
return self.antiShuffle
def setMinLaff(self, minLaff):
self.minLaff = minLaff
def getMinLaff(self):
return self.minLaff
def storeToonTrack(self, avId, track):
self.clearToonTrack(avId)
self.__toonTracks[avId] = track
def clearToonTrack(self, avId):
oldTrack = self.__toonTracks.get(avId)
if oldTrack:
oldTrack.pause()
if self.__toonTracks.get(avId):
DelayDelete.cleanupDelayDeletes(self.__toonTracks[avId])
del self.__toonTracks[avId]
def clearToonTracks(self):
keyList = []
for key in self.__toonTracks:
keyList.append(key)
for key in keyList:
if self.__toonTracks.has_key(key):
self.clearToonTrack(key)
def getDestName(self):
return None
def getOffsetPos(self, seatIndex = 0):
return self.JumpOutOffsets[seatIndex]
def getOffsetPosWrtToonParent(self, toon, seatIndex = 0):
self.offsetNP.setPos(apply(Point3, self.getOffsetPos(seatIndex)))
return self.offsetNP.getPos(toon.getParent())
def getOffsetPosWrtRender(self, seatIndex = 0):
self.offsetNP.setPos(apply(Point3, self.getOffsetPos(seatIndex)))
return self.offsetNP.getPos(render)
def canHideBoardingQuitBtn(self, avId):
if avId == localAvatar.doId and hasattr(localAvatar, 'boardingParty') and localAvatar.boardingParty and localAvatar.boardingParty.groupPanel:
return True
else:
return False
def getBoardingTrack(self, toon, seatIndex, wantToonRotation):
self.boardingGroupShow = BoardingGroupShow.BoardingGroupShow(toon)
track, trackType = self.boardingGroupShow.getBoardingTrack(self.getElevatorModel(), self.getOffsetPosWrtToonParent(toon, seatIndex), self.getOffsetPosWrtRender(seatIndex), wantToonRotation)
return (track, trackType)
| 40.858639
| 370
| 0.63019
|
fcec59580e4a552078198972ec1e8f72d7db34e6
| 735
|
py
|
Python
|
accounts/urls.py
|
abhishek593/IITISOC_LaFrescoInPocket
|
dd91b99ea60fb2753d4715e8890c0c7dc26b8b99
|
[
"MIT"
] | null | null | null |
accounts/urls.py
|
abhishek593/IITISOC_LaFrescoInPocket
|
dd91b99ea60fb2753d4715e8890c0c7dc26b8b99
|
[
"MIT"
] | null | null | null |
accounts/urls.py
|
abhishek593/IITISOC_LaFrescoInPocket
|
dd91b99ea60fb2753d4715e8890c0c7dc26b8b99
|
[
"MIT"
] | null | null | null |
from django.urls import path
from accounts import views
app_name = 'accounts'
urlpatterns = [
path('profile/<int:user_id>/', views.profile, name='profile'),
path('login/', views.login_user, name='login'),
path('logout/', views.logout_user, name='logout'),
path('register/', views.register_user, name='register'),
path('confirm_registration/<str:uidb64>/<str:token>/', views.confirm_register_user, name='confirm_registration'),
path('password_reset/', views.password_reset, name='password-reset'),
path('password_reset_confirm/<str:uidb64>/<str:token>/', views.password_reset_confirm,
name='password_reset_confirm'),
path('password_change/', views.password_change, name='password-change'),
]
| 40.833333
| 117
| 0.717007
|
5b506ac97aad12c0b004564221cea7987e848f50
| 55
|
py
|
Python
|
wf_core_database/__init__.py
|
WildflowerSchools/wf-core-database
|
9b8eecd84a1451831634fef3681f38a03850830e
|
[
"MIT"
] | null | null | null |
wf_core_database/__init__.py
|
WildflowerSchools/wf-core-database
|
9b8eecd84a1451831634fef3681f38a03850830e
|
[
"MIT"
] | null | null | null |
wf_core_database/__init__.py
|
WildflowerSchools/wf-core-database
|
9b8eecd84a1451831634fef3681f38a03850830e
|
[
"MIT"
] | null | null | null |
from .utils import *
from .wf_database_pandas import *
| 18.333333
| 33
| 0.781818
|
a42dfca47500827207f8d5321444817b92be3c4c
| 8,023
|
py
|
Python
|
amap_distance_matrix/services/osrm.py
|
Euraxluo/distance_matrix
|
680e3147c263ea5f1abb26998aeb0b1985442a4b
|
[
"MIT"
] | 1
|
2022-03-15T06:47:36.000Z
|
2022-03-15T06:47:36.000Z
|
amap_distance_matrix/services/osrm.py
|
Euraxluo/distance_matrix
|
680e3147c263ea5f1abb26998aeb0b1985442a4b
|
[
"MIT"
] | null | null | null |
amap_distance_matrix/services/osrm.py
|
Euraxluo/distance_matrix
|
680e3147c263ea5f1abb26998aeb0b1985442a4b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Time: 2022-03-01 11:00
# Copyright (c) 2022
# author: Euraxluo
import copy
import asyncio
import warnings
import polyline
from concurrent import futures
from amap_distance_matrix.helper import *
from amap_distance_matrix.services.register import register
"""
Osrm webapi接口服务包装
"""
###############osrm################
def osrm_distance(origin: list, destination: list):
"""
osrm 距离
:param origin:[float,float]
:param destination:[float,float]
:return:
"""
data = futures_osrm([origin, destination], service="route")
return data['routes'][0]['distance']
def request_osrm(url, idx, data_list):
"""
通过导航url获取导航数据并进行结果设置
:param url: 导航url
:param idx: 结果集合索引
:param data_list:
:return:
"""
try:
data = register.session().get(url).json()
data_list[idx] = data
except Exception as e:
register.logger.warning(f"Osrm Error:{e},url:{url}")
def futures_osrm(urls: list) -> dict:
"""
异步 基于 osrm url list 通过请求接口 获得 路径规划结果
:param urls:
:return:
"""
data_collections = [None] * len(urls)
pack_data_result = {}
all_tasks = []
# 准备
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
try:
event_loop = asyncio.get_event_loop()
except Exception as _:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
event_loop = asyncio.get_event_loop()
# 添加task
for idx in range(len(urls)):
all_tasks.append(event_loop.run_in_executor(register.pool, request_osrm, urls[idx], idx, data_collections))
# 运行
event_loop.run_until_complete(asyncio.wait(all_tasks))
service = urls[0].split('/')[3] + 's'
for idx in range(len(urls)):
api_data_result = data_collections[idx]
if not api_data_result:
request_osrm(urls[idx], idx, data_collections)
if not pack_data_result:
pack_data_result = api_data_result
pack_data_result['strategy'] = service
pack_data_result['total_duration'] = api_data_result[service][0]['duration']
pack_data_result['total_distance'] = api_data_result[service][0]['distance']
pack_data_result['steps'] = []
loc_list: List[float] = polyline.decode(api_data_result[service][0]['geometry']) # [(lat,lon)]
waypoint_locations = [waypoint['location'] for waypoint in api_data_result['waypoints']]
steps = [(i['distance'], i['duration'], i['weight']) for i in api_data_result[service][0]['legs']]
for step_idx, (b, e) in enumerate(zip(waypoint_locations, waypoint_locations[1:])):
b_geohash = geo_encode(*b)
e_geohash = geo_encode(*e)
polyline_point_list = []
begin = False
tmp_loc_list = copy.deepcopy(loc_list)
precision = 8
while not begin and precision > 6:
for loc_idx, i in enumerate(tmp_loc_list):
poly_geohash = geo_encode(i[1], i[0], precision=precision)
if poly_geohash == b_geohash:
begin = True
if poly_geohash == e_geohash:
polyline_point_list.append((i[1], i[0]))
loc_list = loc_list[loc_idx:]
break
if begin:
polyline_point_list.append((i[1], i[0]))
else:
precision -= 1
continue
break
pack_data_result['steps'].append({
"distance": steps[step_idx][0],
"duration": steps[step_idx][1],
"polyline": loc_to_str(polyline_point_list),
"tmc": steps[step_idx][2],
"origin": b,
"destination": e,
})
pack_data_result['origin'] = waypoint_locations[0]
pack_data_result['destination'] = waypoint_locations[-1]
else:
pack_data_result['total_duration'] += api_data_result[service][0]['duration']
pack_data_result['total_distance'] += api_data_result[service][0]['distance']
waypoint_locations = [waypoint['location'] for waypoint in api_data_result['waypoints']]
pack_data_result['destination'] = waypoint_locations[-1]
loc_list: List[float] = polyline.decode(api_data_result[service][0]['geometry']) # [(lat,lon)]
steps = [(i['distance'], i['duration'], i['weight']) for i in api_data_result[service][0]['legs']]
for step_idx, (b, e) in enumerate(zip(waypoint_locations, waypoint_locations[1:])):
b_geohash = geo_encode(*b)
e_geohash = geo_encode(*e)
polyline_point_list = []
begin = False
tmp_loc_list = copy.deepcopy(loc_list)
precision = 8
while not begin:
for loc_idx, i in enumerate(tmp_loc_list):
poly_geohash = geo_encode(i[1], i[0], precision=precision)
if poly_geohash == b_geohash:
begin = True
if poly_geohash == e_geohash:
polyline_point_list.append((i[1], i[0]))
loc_list = loc_list[loc_idx:]
break
if begin:
polyline_point_list.append((i[1], i[0]))
precision -= 1
pack_data_result['steps'].append({
"distance": steps[step_idx][0],
"duration": steps[step_idx][1],
"polyline": loc_to_str(polyline_point_list),
"tmc": steps[step_idx][2],
"origin": b,
"destination": e,
})
del pack_data_result['code']
del pack_data_result[service]
del pack_data_result['waypoints']
return pack_data_result
def osrm_url(origin: list, destination: list, waypoints: list = None, service: str = 'route', profile: str = 'car', version: str = 'v1',
batch_size: int = 100, host: str = None) -> list:
"""
将数据包装为 osrm_url urls
所有经纬度应该使用wgs火星坐标
:param origin:
:param destination:
:param waypoints:
:param service:
:param version:
:param profile:
:param batch_size:
:param host:
:return:
"""
if waypoints is None:
waypoints = []
if host is None:
host = register.osrm_host
loc_list = [origin] + waypoints + [destination]
urls = []
for idx in [(i, i + batch_size) for i in range(0, len(loc_list), batch_size)]:
tmp_points = loc_list[idx[0] - 1 if idx[0] > 1 else 0:idx[1]]
urls.append(f"{host}/{service}/{version}/{profile}/{loc_to_str(tmp_points)}")
return urls
def osrm_batch(origin: list, destination: list, waypoints: list = None, profile="car", version="v1", batch_size=100, host: str = None):
"""
所有经纬度应该使用wgs火星坐标
异步进行osrm url的构建,以及请求
:param origin:
:param destination:
:param waypoints:
:param service:
:param profile:
:param version:
:param batch_size:
:param host:
:return:
"""
if host is None:
host = register.osrm_host
service = "route" # 一定是route
urls = osrm_url(origin=origin, destination=destination, waypoints=waypoints, service=service, profile=profile, version=version, batch_size=batch_size, host=host)
data = futures_osrm(urls=urls)
if data['total_distance'] == 0 and geohash.encode(longitude=origin[0], latitude=origin[1], precision=8) != geohash.encode(longitude=destination[0], latitude=destination[1], precision=8):
raise ValueError("Osrm Data Abort!")
return data
| 37.84434
| 190
| 0.567369
|
42b627cf4566be7c1549c7b495d42c1acfe0e71c
| 5,332
|
py
|
Python
|
nova/api/openstack/compute/flavors.py
|
stryng/nova
|
5dba32a23e67341bfdc03a00781ab491238e21f4
|
[
"Apache-2.0"
] | 1
|
2021-04-08T10:13:03.000Z
|
2021-04-08T10:13:03.000Z
|
nova/api/openstack/compute/flavors.py
|
stryng/nova
|
5dba32a23e67341bfdc03a00781ab491238e21f4
|
[
"Apache-2.0"
] | null | null | null |
nova/api/openstack/compute/flavors.py
|
stryng/nova
|
5dba32a23e67341bfdc03a00781ab491238e21f4
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack import common
from nova.api.openstack.compute.views import flavors as flavors_view
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.compute import flavors
from nova import exception
def make_flavor(elem, detailed=False):
elem.set('name')
elem.set('id')
if detailed:
elem.set('ram')
elem.set('disk')
elem.set('vcpus', xmlutil.EmptyStringSelector('vcpus'))
xmlutil.make_links(elem, 'links')
flavor_nsmap = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
class FlavorTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('flavor', selector='flavor')
make_flavor(root, detailed=True)
return xmlutil.MasterTemplate(root, 1, nsmap=flavor_nsmap)
class MinimalFlavorsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('flavors')
elem = xmlutil.SubTemplateElement(root, 'flavor', selector='flavors')
make_flavor(elem)
return xmlutil.MasterTemplate(root, 1, nsmap=flavor_nsmap)
class FlavorsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('flavors')
elem = xmlutil.SubTemplateElement(root, 'flavor', selector='flavors')
make_flavor(elem, detailed=True)
return xmlutil.MasterTemplate(root, 1, nsmap=flavor_nsmap)
class Controller(wsgi.Controller):
"""Flavor controller for the OpenStack API."""
_view_builder_class = flavors_view.ViewBuilder
@wsgi.serializers(xml=MinimalFlavorsTemplate)
def index(self, req):
"""Return all flavors in brief."""
limited_flavors = self._get_flavors(req)
return self._view_builder.index(req, limited_flavors)
@wsgi.serializers(xml=FlavorsTemplate)
def detail(self, req):
"""Return all flavors in detail."""
limited_flavors = self._get_flavors(req)
req.cache_db_flavors(limited_flavors)
return self._view_builder.detail(req, limited_flavors)
@wsgi.serializers(xml=FlavorTemplate)
def show(self, req, id):
"""Return data about the given flavor id."""
try:
flavor = flavors.get_instance_type_by_flavor_id(id)
req.cache_db_flavor(flavor)
except exception.NotFound:
raise webob.exc.HTTPNotFound()
return self._view_builder.show(req, flavor)
def _get_is_public(self, req):
"""Parse is_public into something usable."""
is_public = req.params.get('is_public', None)
if is_public is None:
# preserve default value of showing only public flavors
return True
elif is_public is True or \
is_public.lower() in ['t', 'true', 'yes', '1']:
return True
elif is_public is False or \
is_public.lower() in ['f', 'false', 'no', '0']:
return False
elif is_public.lower() == 'none':
# value to match all flavors, ignore is_public
return None
else:
msg = _('Invalid is_public filter [%s]') % req.params['is_public']
raise webob.exc.HTTPBadRequest(explanation=msg)
def _get_flavors(self, req):
"""Helper function that returns a list of flavor dicts."""
filters = {}
context = req.environ['nova.context']
if context.is_admin:
# Only admin has query access to all flavor types
filters['is_public'] = self._get_is_public(req)
else:
filters['is_public'] = True
filters['disabled'] = False
if 'minRam' in req.params:
try:
filters['min_memory_mb'] = int(req.params['minRam'])
except ValueError:
msg = _('Invalid minRam filter [%s]') % req.params['minRam']
raise webob.exc.HTTPBadRequest(explanation=msg)
if 'minDisk' in req.params:
try:
filters['min_root_gb'] = int(req.params['minDisk'])
except ValueError:
msg = _('Invalid minDisk filter [%s]') % req.params['minDisk']
raise webob.exc.HTTPBadRequest(explanation=msg)
limited_flavors = flavors.get_all_types(context, filters=filters)
flavors_list = limited_flavors.values()
sorted_flavors = sorted(flavors_list,
key=lambda item: item['flavorid'])
limited_flavors = common.limited_by_marker(sorted_flavors, req)
return limited_flavors
def create_resource():
return wsgi.Resource(Controller())
| 35.546667
| 78
| 0.652101
|
fc59b77d1d807915a342dd5c515a1ebe06412148
| 1,513
|
py
|
Python
|
pyshadowsocks/packet/datagram_packer.py
|
FTwOoO/pyShadowsocks
|
452323e30c4b97d322cbb67e9bbc7c4549e67b5f
|
[
"MIT"
] | 21
|
2016-08-01T06:48:01.000Z
|
2021-04-05T18:20:53.000Z
|
pyshadowsocks/packet/datagram_packer.py
|
zen-of-proxy/pyShadowsocks
|
452323e30c4b97d322cbb67e9bbc7c4549e67b5f
|
[
"MIT"
] | 2
|
2016-07-23T02:33:17.000Z
|
2018-03-13T09:50:02.000Z
|
pyshadowsocks/packet/datagram_packer.py
|
FTwOoO/pyShadowsocks
|
452323e30c4b97d322cbb67e9bbc7c4549e67b5f
|
[
"MIT"
] | 7
|
2017-04-22T16:53:53.000Z
|
2021-02-08T06:33:05.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: booopooob@gmail.com
#
# Info:
#
#
from packet.packet_header import PacketHeader
class DatagramPacker(object):
def __init__(self, header_type=None):
pass
def pack(self, header: PacketHeader = None, data=None):
"""return encode or compress data"""
encoded_data = b''
if header:
encoded_data += header.to_bytes()
if data:
encoded_data += data
if len(encoded_data) > 0:
self.in_bytes += len(encoded_data)
self.out_bytes += len(encoded_data)
return encoded_data
def unpack(self, header: PacketHeader = None, data=None):
"""return header and raw content"""
self.in_bytes += len(data)
if header is not None:
all_data = self.data_buffer + data
try:
header_length = header.from_bytes(all_data)
except ValueError:
# need more data
self.data_buffer = all_data
return None, None
except Exception as ex:
# TODO:do something
return None, None
else:
self.data_buffer = b''
out_data = all_data[header_length:]
self.out_bytes += len(all_data)
return header, out_data
else:
out_data = self.data_buffer + data
self.out_bytes += len(out_data)
return None, out_data
| 27.017857
| 61
| 0.547918
|
d2b86ab173254e8698faa3d138f606052f8462f2
| 910
|
py
|
Python
|
Lectures/revision lecture/5_loops_revision_update_variable_using_if.py
|
CLAHRCWessex/math6005-python
|
934d3eeccf7a2a08da995dcd2f441304115578c6
|
[
"MIT"
] | 2
|
2019-11-25T14:02:23.000Z
|
2020-04-27T13:39:35.000Z
|
Lectures/revision lecture/5_loops_revision_update_variable_using_if.py
|
CLAHRCWessex/math6005-python
|
934d3eeccf7a2a08da995dcd2f441304115578c6
|
[
"MIT"
] | null | null | null |
Lectures/revision lecture/5_loops_revision_update_variable_using_if.py
|
CLAHRCWessex/math6005-python
|
934d3eeccf7a2a08da995dcd2f441304115578c6
|
[
"MIT"
] | 3
|
2019-01-30T18:30:55.000Z
|
2022-02-05T18:28:34.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
MATH6005: Loops Revision
Example: Looping over a list to search for a largest or smallest value
We need to make use of if-statements to keep track of 'if'
an update to a variable is required.
"""
# =============================================================================
# The function below finds the smallest value in an array
# =============================================================================
def find_smallest_value(data):
smallest = data[0]
for i in range(1, len(data)):
#we only update 'smallest' if we have found
#a new minimum value in the list
if data[i] < smallest:
smallest = data[i]
return smallest
def main():
data = [10.6, 32.1, 4.2, 45.0, 8.6]
smallest = find_smallest_value(data)
print(smallest)
if __name__ == '__main__':
main()
| 25.277778
| 79
| 0.517582
|
5303d243acccde16f65b604899852e9dfef3fd74
| 5,317
|
py
|
Python
|
docs/conf.py
|
Aniket979Kanodia/primitive
|
70148ffa508c26a55e09110ec5d435d774354895
|
[
"Apache-2.0"
] | 1
|
2018-01-13T03:15:56.000Z
|
2018-01-13T03:15:56.000Z
|
docs/conf.py
|
Aniket979Kanodia/primitive
|
70148ffa508c26a55e09110ec5d435d774354895
|
[
"Apache-2.0"
] | 17
|
2018-09-22T13:48:58.000Z
|
2018-10-03T21:17:37.000Z
|
docs/conf.py
|
Aniket979Kanodia/primitive
|
70148ffa508c26a55e09110ec5d435d774354895
|
[
"Apache-2.0"
] | 1
|
2021-06-30T15:09:46.000Z
|
2021-06-30T15:09:46.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# primitivefxmvc documentation build configuration file, created by
# sphinx-quickstart on Thu Nov 23 13:17:10 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.ifconfig',
'sphinx.ext.githubpages', 'sphinx.ext.autosectionlabel']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'primitivefxmvc'
copyright = '2017, Eray Erdin'
author = 'Eray Erdin'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.1-alpha'
# The full version, including alpha/beta/rc tags.
release = '0.1.1-alpha'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
import sphinx_rtd_theme
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'globaltoc.html',
'relations.html', # needs 'show_related': True theme option to display
'sourcelink.html',
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'primitivefxmvcdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'primitivefxmvc.tex', 'primitivefxmvc Documentation',
'Eray Erdin', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'primitivefxmvc', 'primitivefxmvc Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'primitivefxmvc', 'primitivefxmvc Documentation',
author, 'primitivefxmvc', 'One line description of project.',
'Miscellaneous'),
]
| 30.557471
| 79
| 0.684597
|
36bdb2e6c906ba035db37c2eff4027850af212a8
| 1,272
|
py
|
Python
|
moi/indicators/migrations/0002_auto_20151229_2332.py
|
Ecotrust/F2S-MOI
|
aeb38942d6539c50f252ea3ff6fbff07aabc5088
|
[
"Apache-2.0"
] | null | null | null |
moi/indicators/migrations/0002_auto_20151229_2332.py
|
Ecotrust/F2S-MOI
|
aeb38942d6539c50f252ea3ff6fbff07aabc5088
|
[
"Apache-2.0"
] | 33
|
2015-05-06T00:47:20.000Z
|
2016-11-08T21:13:44.000Z
|
moi/indicators/migrations/0002_auto_20151229_2332.py
|
Ecotrust/F2S-MOI
|
aeb38942d6539c50f252ea3ff6fbff07aabc5088
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-29 23:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0010_change_on_delete_behaviour'),
('indicators', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='indicator',
name='displayTitle',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
migrations.AddField(
model_name='indicator',
name='image',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='indicator',
name='main_content',
field=wagtail.wagtailcore.fields.RichTextField(blank=True, default=None, null=True),
),
migrations.AddField(
model_name='indicator',
name='sub_title',
field=wagtail.wagtailcore.fields.RichTextField(blank=True, default=None, null=True),
),
]
| 32.615385
| 163
| 0.632862
|
396725c20117532b0ba89f769a7fe85a85fa926e
| 594
|
py
|
Python
|
_unittests/ut_helpgen/test_helper_helpgen.py
|
Pandinosaurus/pyquickhelper
|
326276f656cf88989e4d0fcd006ada0d3735bd9e
|
[
"MIT"
] | null | null | null |
_unittests/ut_helpgen/test_helper_helpgen.py
|
Pandinosaurus/pyquickhelper
|
326276f656cf88989e4d0fcd006ada0d3735bd9e
|
[
"MIT"
] | null | null | null |
_unittests/ut_helpgen/test_helper_helpgen.py
|
Pandinosaurus/pyquickhelper
|
326276f656cf88989e4d0fcd006ada0d3735bd9e
|
[
"MIT"
] | null | null | null |
"""
@brief test log(time=8s)
@author Xavier Dupre
"""
import sys
import os
import unittest
from pyquickhelper.loghelper.flog import fLOG
from pyquickhelper.helpgen.utils_sphinx_config import locate_image_documentation, NbImage
from IPython.core.display import Image
class TestHelperHelpGen(unittest.TestCase):
def test_NbImage(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
r = NbImage("completion.png")
assert isinstance(r, Image)
if __name__ == "__main__":
unittest.main()
| 20.482759
| 89
| 0.688552
|
191653264c00da00f325e066dc53b74bbf16ab41
| 757
|
py
|
Python
|
dags/DagFetcher.py
|
cicerojmm/aws-ecs-airflow
|
665d859760c2231a692246b5b8ad1df92ee09c93
|
[
"MIT"
] | 1
|
2022-03-29T00:24:12.000Z
|
2022-03-29T00:24:12.000Z
|
dags/DagFetcher.py
|
cicerojmm/aws-ecs-airflow
|
665d859760c2231a692246b5b8ad1df92ee09c93
|
[
"MIT"
] | null | null | null |
dags/DagFetcher.py
|
cicerojmm/aws-ecs-airflow
|
665d859760c2231a692246b5b8ad1df92ee09c93
|
[
"MIT"
] | null | null | null |
from airflow.models import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.utils.dates import days_ago
from airflow.models import Variable
airflow_bucket = Variable.get('airflow_bucket_dags')
airflow_home = Variable.get('airflow_home_dags')
args = {
'owner': 'Airflow',
'start_date': days_ago(0),
'depends_on_past': False
}
folders = ['dags', 'planning', 'quality']
with DAG(dag_id='DagFetcher',
default_args=args,
schedule_interval='*/5 * * * *',
tags=['example'],
catchup=False,
is_paused_upon_creation=False) as dag:
tasks = BashOperator(task_id="folder",
bash_command=f"aws s3 sync {airflow_bucket} {airflow_home} --delete",
dag=dag)
| 27.035714
| 93
| 0.675033
|
f943a11e9408079b336950ed516590e4cb2d9a96
| 1,619
|
py
|
Python
|
tests/core/test_merkle_set.py
|
Flax-Network/flax-light-wallet
|
1745850a28a47bbbc4b5f3d460f35b34b4ed4f25
|
[
"Apache-2.0"
] | 1
|
2021-12-02T14:38:11.000Z
|
2021-12-02T14:38:11.000Z
|
tests/core/test_merkle_set.py
|
Flax-Network/flax-light-wallet
|
1745850a28a47bbbc4b5f3d460f35b34b4ed4f25
|
[
"Apache-2.0"
] | null | null | null |
tests/core/test_merkle_set.py
|
Flax-Network/flax-light-wallet
|
1745850a28a47bbbc4b5f3d460f35b34b4ed4f25
|
[
"Apache-2.0"
] | 6
|
2021-11-21T00:38:27.000Z
|
2021-12-03T01:25:19.000Z
|
import asyncio
import itertools
import pytest
from flaxlight.util.merkle_set import MerkleSet, confirm_included_already_hashed
from tests.setup_nodes import bt
@pytest.fixture(scope="module")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestMerkleSet:
@pytest.mark.asyncio
async def test_basics(self):
num_blocks = 20
blocks = bt.get_consecutive_blocks(num_blocks)
merkle_set = MerkleSet()
merkle_set_reverse = MerkleSet()
coins = list(itertools.chain.from_iterable(map(lambda block: block.get_included_reward_coins(), blocks)))
# excluded coin (not present in 'coins' and Merkle sets)
excl_coin = coins.pop()
for coin in reversed(coins):
merkle_set_reverse.add_already_hashed(coin.name())
for coin in coins:
merkle_set.add_already_hashed(coin.name())
for coin in coins:
result, proof = merkle_set.is_included_already_hashed(coin.name())
assert result is True
result_excl, proof_excl = merkle_set.is_included_already_hashed(excl_coin.name())
assert result_excl is False
validate_proof = confirm_included_already_hashed(merkle_set.get_root(), coin.name(), proof)
validate_proof_excl = confirm_included_already_hashed(merkle_set.get_root(), excl_coin.name(), proof_excl)
assert validate_proof is True
assert validate_proof_excl is False
# Test if the order of adding items changes the outcome
assert merkle_set.get_root() == merkle_set_reverse.get_root()
| 34.446809
| 118
| 0.697344
|
3ef50be98944e9e332a7173ddde86c957e8c987a
| 202
|
py
|
Python
|
ecommerce/users/tests/test_models.py
|
sorwarduet/ecommerce
|
d1e0818d2b8372d668b672a5f0737900d144c38a
|
[
"MIT"
] | null | null | null |
ecommerce/users/tests/test_models.py
|
sorwarduet/ecommerce
|
d1e0818d2b8372d668b672a5f0737900d144c38a
|
[
"MIT"
] | null | null | null |
ecommerce/users/tests/test_models.py
|
sorwarduet/ecommerce
|
d1e0818d2b8372d668b672a5f0737900d144c38a
|
[
"MIT"
] | null | null | null |
import pytest
from ecommerce.users.models import User
pytestmark = pytest.mark.django_db
def test_user_get_absolute_url(user: User):
assert user.get_absolute_url() == f"/users/{user.username}/"
| 20.2
| 64
| 0.772277
|
add63f96d39f5140700e9ed0bcc835f045153122
| 5,851
|
py
|
Python
|
data/utils_optuna_trials/hulm_sample_data_utils.py
|
humanlab/HaRT
|
ab5da16cb1bfdadc3fb77a99f0fda8123ea1fe02
|
[
"Apache-2.0"
] | null | null | null |
data/utils_optuna_trials/hulm_sample_data_utils.py
|
humanlab/HaRT
|
ab5da16cb1bfdadc3fb77a99f0fda8123ea1fe02
|
[
"Apache-2.0"
] | null | null | null |
data/utils_optuna_trials/hulm_sample_data_utils.py
|
humanlab/HaRT
|
ab5da16cb1bfdadc3fb77a99f0fda8123ea1fe02
|
[
"Apache-2.0"
] | null | null | null |
import os
import pandas as pd
from sqlalchemy import create_engine
from sqlalchemy.engine.url import URL
from data.utils_hart.hulm_data_utils import transform_data, group_data
def get_conn(data_args):
myDB = URL(drivername='mysql', host=data_args.hostname,
database=data_args.db, query={'read_default_file': '~/.my.cnf', 'charset': 'utf8mb4'})
engine = create_engine(myDB, encoding='latin1')
conn = engine.connect()
return conn
''''
To be run only once! This will save the sampled users' IDs in a csv that will be used for all following optuna trials.
'''
def sample_train_users(logger, table, data_args, filename):
logger.info("Getting {} sampled train users from table:{} in {} database, to run optuna trials.".format(str(data_args.num_users_for_optuna), table, data_args.db))
conn = get_conn(data_args)
select_clause = 'select distinct user_dataset_id from ' + table
order_clause = ' order by rand() limit ' + str(data_args.num_users_for_optuna)
dev_filter_column = 'is_oosusr_dev'
test_filter_column = 'is_oosusr_test'
source_filter_column = 'dataset '
source_not_included = "'fb'"
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ') and ' + dev_filter_column + '=0' + ' and ' + test_filter_column + '=0'
stmt = select_clause + where_clause + order_clause
results = conn.execute(stmt)
data = pd.DataFrame(results.fetchall())
data.columns = results.keys()
data.to_csv(filename, index=False)
conn.close()
return data
def get_data(logger, table, data_args, data_type, sampled_users):
logger.info("Getting data from table:{} in {} database".format(table, data_args.db))
conn = get_conn(data_args)
select_clause = 'select user_dataset_id, message_id, message, updated_time from ' + table
order_clause = ' order by user_dataset_id, updated_time'
limit_clause = '' if not __debug__ else ' limit 100'
source_filter_column = 'dataset '
source_not_included = "'fb'"
if data_type=='train':
if "en_non_oosmsgs" in table:
dev_filter_column = 'is_oosusr_dev'
test_filter_column = 'is_oosusr_test'
users_id_column = 'user_dataset_id'
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ') and ' + dev_filter_column + '=0' + ' and ' + test_filter_column + '=0' + ' and ' + users_id_column + ' in (' + sampled_users + ')'
stmt = select_clause + where_clause + order_clause + limit_clause
else:
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ')'
stmt = select_clause + where_clause + order_clause + limit_clause
results = conn.execute(stmt)
elif data_type=='dev':
if 'en_non_oosmsgs' in table:
filter_column = 'is_oosusr_dev'
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ') and ' + filter_column + '=1'
stmt = select_clause + where_clause + order_clause + limit_clause
elif 'en_oosmsgs' in table:
filter_column = 'is_oosmsg_dev'
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ') and ' + filter_column + '=1'
stmt = select_clause + where_clause + order_clause + limit_clause
else:
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ')'
stmt = select_clause + where_clause + order_clause + limit_clause
results = conn.execute(stmt)
elif data_type=='test':
if 'en_non_oosmsgs' in table:
filter_column = 'is_oosusr_test'
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ') and ' + filter_column + '=1'
stmt = select_clause + where_clause + order_clause + limit_clause
elif 'en_oosmsgs' in table:
filter_column = 'is_oosmsg_test'
where_clause = ' where ' + source_filter_column + 'not in (' + source_not_included + ') and ' + filter_column + '=1'
stmt = select_clause + where_clause + order_clause + limit_clause
results = conn.execute(stmt)
data = pd.DataFrame(results.fetchall())
data.columns = results.keys()
data = data[data.message.notnull()]
conn.close()
return data
def sample_users_if_train(logger, table, data_args):
dirname = os.path.dirname(__file__)
filename = os.path.join(dirname, '../datasets/pt_sampled_users.csv')
try:
sampled_users = pd.read_csv(filename)
if sampled_users.size != data_args.num_users_for_optuna:
sampled_users = sample_train_users(logger, table, data_args, filename)
except FileNotFoundError:
sampled_users = sample_train_users(logger, table, data_args, filename)
sampled_users_string = ', '.join(["'{}'".format(e) for e in sampled_users['user_dataset_id'].to_list()])
return sampled_users_string
def load_dataset(logger, tokenizer, table, block_size, max_blocks, data_args, data_type, disable_hulm_batching):
sampled_users_string = sample_users_if_train(logger, table, data_args) if data_type=='train' else ''
data = get_data(logger, table, data_args, data_type, sampled_users_string)
data = transform_data(logger, tokenizer, data, block_size)
logger.info('************** Block size = {} *************'.format(block_size))
if not disable_hulm_batching:
return group_data(data, max_blocks, logger)
else:
instances, uncut_num_blocks = group_data(data, max_blocks, logger)
flat_list = [item for sublist in instances for item in sublist if item is not None]
return flat_list, uncut_num_blocks
| 50.008547
| 230
| 0.667065
|
ade7b9ca40b92e1d0a0842cc3ef1a99b348e1347
| 2,358
|
py
|
Python
|
pupa/cli/commands/dbinit.py
|
azban/pupa
|
158378e19bcc322796aa4fb766784cbd4fd08413
|
[
"BSD-3-Clause"
] | 62
|
2015-01-08T05:46:46.000Z
|
2022-01-31T03:27:14.000Z
|
pupa/cli/commands/dbinit.py
|
azban/pupa
|
158378e19bcc322796aa4fb766784cbd4fd08413
|
[
"BSD-3-Clause"
] | 199
|
2015-01-10T03:19:37.000Z
|
2021-05-21T20:34:58.000Z
|
pupa/cli/commands/dbinit.py
|
azban/pupa
|
158378e19bcc322796aa4fb766784cbd4fd08413
|
[
"BSD-3-Clause"
] | 35
|
2015-03-09T19:41:42.000Z
|
2021-06-22T20:01:35.000Z
|
import django
from django.db import connection
from django.core.management import call_command
from .base import BaseCommand
def copy_tmp(tablename):
cursor = connection.cursor()
print('copying data from table ' + tablename)
cursor.execute("DROP TABLE IF EXISTS tmp_{t};".format(t=tablename))
cursor.execute("CREATE TABLE tmp_{t} (LIKE {t});".format(t=tablename))
cursor.execute("INSERT INTO tmp_{t} SELECT * FROM {t};".format(t=tablename))
def restore_from_tmp(tablename):
print('restoring data to table ' + tablename)
cursor = connection.cursor()
cursor.execute("INSERT INTO {t} SELECT * FROM tmp_{t};".format(t=tablename))
cursor.execute("DROP TABLE IF EXISTS tmp_{t};".format(t=tablename))
def drop_tables(skip_divisions=False):
tables = connection.introspection.table_names()
cursor = connection.cursor()
for table in tables:
if table.startswith(('opencivicdata_', 'pupa_')):
print('dropping table ' + table)
cursor.execute("DROP TABLE IF EXISTS {} CASCADE;".format(table))
cursor.execute("DELETE FROM django_migrations WHERE app='core';")
cursor.execute("DELETE FROM django_migrations WHERE app='legislative';")
cursor.execute("DELETE FROM django_migrations WHERE app='pupa';")
class Command(BaseCommand):
name = 'dbinit'
help = 'initialize a pupa database'
def add_args(self):
self.add_argument('--reset', action='store_true', default=False,
help='reset entire database - USE WITH CAUTION')
self.add_argument('--partial-reset', action='store_true', default=False,
help='reset entire database, except for divisions - USE WITH CAUTION')
self.add_argument(type=str, dest='country', nargs='+',
help='country to load divisions for')
def handle(self, args, other):
django.setup()
if args.partial_reset:
copy_tmp('opencivicdata_division')
drop_tables()
elif args.reset:
drop_tables()
else:
pass
call_command('migrate', interactive=False)
if args.partial_reset:
restore_from_tmp('opencivicdata_division')
else:
for country in args.country:
call_command('loaddivisions', country)
| 36.276923
| 96
| 0.647159
|
87b306af7991f1dc913ce49227f425a9318f1cd3
| 743
|
py
|
Python
|
redirect/tests.py
|
seancallaway/shrtn
|
730cf0c0f70617a88a431c47fc6dd4f83b6c6461
|
[
"MIT"
] | 2
|
2020-12-02T17:36:02.000Z
|
2021-03-29T15:09:04.000Z
|
redirect/tests.py
|
seancallaway/shrtn
|
730cf0c0f70617a88a431c47fc6dd4f83b6c6461
|
[
"MIT"
] | null | null | null |
redirect/tests.py
|
seancallaway/shrtn
|
730cf0c0f70617a88a431c47fc6dd4f83b6c6461
|
[
"MIT"
] | null | null | null |
from django.shortcuts import reverse
from django.test import TestCase
from redirect.models import Redirect, SLUG_LENGTH
class RedirectModelTests(TestCase):
def setUp(self) -> None:
self.redirect = Redirect.objects.create(
target='https://mail.google.com/'
)
def test_redirect_creation(self) -> None:
# Ensure slug was created and is of proper length
self.assertEqual(len(self.redirect.slug), SLUG_LENGTH)
def test_absolute_url(self) -> None:
self.assertEqual(self.redirect.get_absolute_url(), reverse('redirect', kwargs={'slug': self.redirect.slug}))
def test_str_repr(self) -> None:
self.assertEqual(f'Redirect to {self.redirect.target}', str(self.redirect))
| 32.304348
| 116
| 0.69852
|
1f9190c838c22954346476e35c5c50fabf5bffa6
| 11,218
|
py
|
Python
|
release/stubs.min/Autodesk/Revit/DB/Analysis_parts/SpatialFieldManager.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 182
|
2017-06-27T02:26:15.000Z
|
2022-03-30T18:53:43.000Z
|
release/stubs.min/Autodesk/Revit/DB/Analysis_parts/SpatialFieldManager.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 28
|
2017-06-27T13:38:23.000Z
|
2022-03-15T11:19:44.000Z
|
release/stubs.min/Autodesk/Revit/DB/Analysis_parts/SpatialFieldManager.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 67
|
2017-06-28T09:43:59.000Z
|
2022-03-20T21:17:10.000Z
|
class SpatialFieldManager(Element,IDisposable):
"""
Exposes all API for an external analysis application.
Its primary role is creation,deletion and modification of SpatialFieldElement elements.
"""
def AddSpatialFieldPrimitive(self,*__args):
"""
AddSpatialFieldPrimitive(self: SpatialFieldManager,reference: Reference) -> int
Creates an empty analysis results primitive associated with a reference.
reference: Reference pointing to the curve or face to be associated with the primitive
Returns: Unique index of primitive for future references
AddSpatialFieldPrimitive(self: SpatialFieldManager) -> int
Creates empty analysis results primitive not associated with any geometry
element
Returns: Unique index of primitive for future references
AddSpatialFieldPrimitive(self: SpatialFieldManager,reference: Reference,hidingMode: SpatialFieldPrimitiveHideMode) -> int
Creates an empty analysis results primitive associated with a reference,with
the option to control how the reference element is hidden.
reference: Reference pointing to the curve or face to be associated with the primitive
hidingMode: The mode used to hide the original model element
Returns: Unique index of primitive for future references
AddSpatialFieldPrimitive(self: SpatialFieldManager,curve: Curve,trf: Transform) -> int
Creates empty analysis results primitive associated with a curve and a
transform.
curve: Curve to be associated with the primitive.
%curve% does NOT correspond to
actual Revit geometry,i.e. it cannot be associated with reference;
otherwise the other overload of the method must be used (taking "reference" as
the input)
trf: Conformal Transform to be applied to %curve%.
Returns: Unique index of primitive for future references
AddSpatialFieldPrimitive(self: SpatialFieldManager,face: Face,trf: Transform) -> int
Creates empty analysis results primitive associated with a face and a transform.
face: Face to be associated with the primitive
trf: Conformal Transform to be applied to %face%
Returns: Unique index of primitive for future references
"""
pass
def Clear(self):
"""
Clear(self: SpatialFieldManager)
Clear all analysis results managed by this manager object
"""
pass
@staticmethod
def CreateSpatialFieldManager(view,numberOfMeasurements):
"""
CreateSpatialFieldManager(view: View,numberOfMeasurements: int) -> SpatialFieldManager
Factory method - creates manager object for the given view
view: View for which manager object is created or retrieved
numberOfMeasurements: Total number of measurements in the calculated results.
This number defines
the length of value arrays in ValueAtPoint objects
Returns: Manager object for the view passed in the argument
"""
pass
def Dispose(self):
""" Dispose(self: Element,A_0: bool) """
pass
def getBoundingBox(self,*args):
""" getBoundingBox(self: Element,view: View) -> BoundingBoxXYZ """
pass
def GetLegend(self):
"""
GetLegend(self: SpatialFieldManager) -> AnalysisDisplayLegend
Returns legend element or NULL
Returns: The legend element or NULL
"""
pass
def GetMaximum(self,resultIndex,rawValue):
"""
GetMaximum(self: SpatialFieldManager,resultIndex: int,rawValue: bool) -> float
Calculates the maximum value for all primitives
resultIndex: Index of result schema
rawValue: If true returned value is NOT multiplied by the current result's units
multiplier,otherwise it IS
Returns: Resulting maximum value
"""
pass
def GetMinimum(self,resultIndex,rawValue):
"""
GetMinimum(self: SpatialFieldManager,resultIndex: int,rawValue: bool) -> float
Calculates the minimum value for all primitives
resultIndex: Index of result schema
rawValue: If true returned value is NOT multiplied by the current result's units
multiplier,otherwise it IS
Returns: Resulting minimum value
"""
pass
def GetRegisteredResults(self):
"""
GetRegisteredResults(self: SpatialFieldManager) -> IList[int]
Returns an array of indices of all registered results
"""
pass
def GetResultSchema(self,idx):
"""
GetResultSchema(self: SpatialFieldManager,idx: int) -> AnalysisResultSchema
Returns result schema by index
idx: Index of registered result schema
"""
pass
@staticmethod
def GetSpatialFieldManager(view):
"""
GetSpatialFieldManager(view: View) -> SpatialFieldManager
Retrieves manager object for the given view or returns NULL
view: View for which manager object is retrieved
Returns: Manager object for the view passed in the argument
"""
pass
def IsResultSchemaNameUnique(self,name,resultIndexToSkip):
"""
IsResultSchemaNameUnique(self: SpatialFieldManager,name: str,resultIndexToSkip: int) -> bool
Verify the uniqueness of the name among all registered result schemas.
name: Name to verify uniqueness of.
resultIndexToSkip: Index of result (e.g. to be replaced) which names should not count for
uniqueness; negative number means nothing is excluded from comparison.
Returns: True if name is unique,false otherwise.
"""
pass
@staticmethod
def IsTextTypeIdValid(textTypeId,doc):
"""
IsTextTypeIdValid(textTypeId: ElementId,doc: Document) -> bool
Verify if text type id is valid.
textTypeId: Text type id to be validated.
doc: Document for which %textTypeId% is validated.
Returns: True if text type id is valid,false otherwise.
"""
pass
def RegisterResult(self,resultSchema):
"""
RegisterResult(self: SpatialFieldManager,resultSchema: AnalysisResultSchema) -> int
Registers result and assigns it a unique result index
resultSchema: Result schema to be registered
Returns: Unique index assigned to the result
"""
pass
def ReleaseUnmanagedResources(self,*args):
""" ReleaseUnmanagedResources(self: Element,disposing: bool) """
pass
def RemoveSpatialFieldPrimitive(self,idx):
"""
RemoveSpatialFieldPrimitive(self: SpatialFieldManager,idx: int)
Removes analysis results primitive identified by the unique index
idx: Unique index identifying the primitive
"""
pass
def setElementType(self,*args):
""" setElementType(self: Element,type: ElementType,incompatibleExceptionMessage: str) """
pass
def SetMeasurementDescriptions(self,measurementDescriptions):
""" SetMeasurementDescriptions(self: SpatialFieldManager,measurementDescriptions: IList[str]) """
pass
def SetMeasurementNames(self,measurementNames):
""" SetMeasurementNames(self: SpatialFieldManager,measurementNames: IList[str]) """
pass
def SetResultSchema(self,idx,resultSchema):
"""
SetResultSchema(self: SpatialFieldManager,idx: int,resultSchema: AnalysisResultSchema)
Sets a new value for an existing result schema in the result registry
idx: Index of registered result schema
resultSchema: Result schema replacing the existent one
"""
pass
def UpdateSpatialFieldPrimitive(self,idx,fieldDomainPoints,fieldValues,resultIndex):
"""
UpdateSpatialFieldPrimitive(self: SpatialFieldManager,idx: int,fieldDomainPoints: FieldDomainPoints,fieldValues: FieldValues,resultIndex: int)
Populates analysis results data (or replaces the existing data) in the existing
primitive identified by the unique index
idx: Unique index identifying the primitive
fieldDomainPoints: Set of domain points.
If the new set of domain points is supplied,all
previously supplied domain points and field values for all results are removed
from the primitive.
If %fieldDomainPoints% is ll only fieldValues are
updated
fieldValues: Set of data values.
Number of values in fieldValues must coincide with the
number of points in fieldDomainPoints
resultIndex: Unique index identifying the result schema
"""
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
CurrentMeasurement=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Stores the currently displayed measurement
Get: CurrentMeasurement(self: SpatialFieldManager) -> int
Set: CurrentMeasurement(self: SpatialFieldManager)=value
"""
LegendPosition=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Stores current position of analysis results legend element in view
Get: LegendPosition(self: SpatialFieldManager) -> XYZ
Set: LegendPosition(self: SpatialFieldManager)=value
"""
LegendShowConfigurationName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""If true legend contains analysis configuration name.
Get: LegendShowConfigurationName(self: SpatialFieldManager) -> bool
Set: LegendShowConfigurationName(self: SpatialFieldManager)=value
"""
LegendShowDescription=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""If true legend contains analysis description.
Get: LegendShowDescription(self: SpatialFieldManager) -> bool
Set: LegendShowDescription(self: SpatialFieldManager)=value
"""
LegendTextTypeId=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Stores element id of text associated with common (result-independent) part of legend in view.
Get: LegendTextTypeId(self: SpatialFieldManager) -> ElementId
Set: LegendTextTypeId(self: SpatialFieldManager)=value
"""
NumberOfMeasurements=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Stores the total number of measurements
Get: NumberOfMeasurements(self: SpatialFieldManager) -> int
"""
ResultsVisibleInView=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Enables analysis results visibility in the view.
Get: ResultsVisibleInView(self: SpatialFieldManager) -> bool
Set: ResultsVisibleInView(self: SpatialFieldManager)=value
"""
UseRangeForAllMeasurements=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Governs how minimum and maximum values (the data range) are calculated.
Get: UseRangeForAllMeasurements(self: SpatialFieldManager) -> bool
Set: UseRangeForAllMeasurements(self: SpatialFieldManager)=value
"""
| 24.124731
| 215
| 0.717864
|
b93c48fb34e165465f94848f00504f4b25650638
| 588
|
py
|
Python
|
Actividades/LimpiezaRobot/cleaning_test.py
|
joeirigoyen/MultiAgent-Projects
|
c3112dc999e7db3b2295cddbf5d7660896ead551
|
[
"MIT"
] | null | null | null |
Actividades/LimpiezaRobot/cleaning_test.py
|
joeirigoyen/MultiAgent-Projects
|
c3112dc999e7db3b2295cddbf5d7660896ead551
|
[
"MIT"
] | null | null | null |
Actividades/LimpiezaRobot/cleaning_test.py
|
joeirigoyen/MultiAgent-Projects
|
c3112dc999e7db3b2295cddbf5d7660896ead551
|
[
"MIT"
] | null | null | null |
#%%
from cleaning_model import CleaningModel
MAX_STEPS = 800
model = CleaningModel(10, 10, 20, 50)
for i in range(MAX_STEPS):
model.step()
required_steps = model.ticks
cleaned_cells = model.dirtycell_datacollector.get_model_vars_dataframe()
total_moves = model.dirtycell_datacollector.get_agent_vars_dataframe()
end_moves = total_moves.xs(MAX_STEPS - 1, level="Step")
cleaned_cells.plot()
end_moves.plot(kind="bar")
print(f"Completion steps: {required_steps}")
print(f"Clean cell percentage: {(model.total_cells - model.dirty_cells_count) / (model.total_cells) * 100}%")
# %%
| 24.5
| 109
| 0.767007
|
d6f11a9990152426431d94e5c01fc165d9e54bb5
| 295
|
py
|
Python
|
app/setdb.py
|
gregdek/factoryman
|
c4056e6b23ca6a6bb50d5e1e768d3da322592fb4
|
[
"Apache-2.0"
] | null | null | null |
app/setdb.py
|
gregdek/factoryman
|
c4056e6b23ca6a6bb50d5e1e768d3da322592fb4
|
[
"Apache-2.0"
] | null | null | null |
app/setdb.py
|
gregdek/factoryman
|
c4056e6b23ca6a6bb50d5e1e768d3da322592fb4
|
[
"Apache-2.0"
] | null | null | null |
# This is a separate script that sets game-independent
# variables, like commodity prices.
import redis
r = redis.StrictRedis('localhost', 6379, charset="utf-8", decode_responses=True)
# Set commodity prices
r.set("buy:A", "2")
r.set("sell:A", "1")
r.set("buy:B", "10")
r.set("sell:B", "8")
| 22.692308
| 80
| 0.681356
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.