text stringlengths 8 6.05M |
|---|
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import socket
from path import path
from datetime import timedelta
from handy.cipher import MessageCipher
import django.conf.global_settings as DEFAULT_SETTINGS
LANGUAGE_CODE = 'en-us'
USE_TZ = False
TIME_ZONE = 'America/Chicago'
USE_I18N = True
USE_L10N = True
INTERNAL_IPS = ('127.0.0.1',)
BASE_DIR = path(__file__).abspath().dirname().dirname()
HOSTNAME = socket.getfqdn()
MEDIA_ROOT = BASE_DIR / 'uploads'
MEDIA_URL = '/uploads/'
STATIC_ROOT = BASE_DIR / 'static'
STATIC_URL = "/static/"
LOGIN_URL = '/login/'
LOGIN_EXEMPT_URLS = []
STATICFILES_DIRS = (
BASE_DIR / 'assets',
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# SECURITY WARNING: keep the secret key used in production secret!
#SECRET_KEY = 'hdi%asicub%e1z$=9h&g&m8$m$n2+bwo*_gne2zqbt5-+u+!##'
# Old dashboard's so that the old passwords work. (god help us all)
SECRET_KEY = '0xdeadbeefcafebabe'
MIDDLEWARE_CLASSES = (
'middleware.RequestTimeLoggingMiddleware',
'middleware.QueryCountDebugMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'dynamicresponse.middleware.api.APIMiddleware',
'dynamicresponse.middleware.dynamicformat.DynamicFormatMiddleware',
'org.middleware.LoginRequiredMiddleware',
)
AUTHENTICATION_BACKENDS = (
'org.backends.OrganizationBackend',
'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
BASE_DIR / 'templates',
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'compressor',
'raven.contrib.django.raven_compat',
'org',
'inventory',
'agreement',
'handy',
'regional',
'monkeypatch',
)
# This is tacked onto the end of COMPRESS_ROOT (which defaults to STATIC_ROOT)
COMPRESS_OUTPUT_DIR = '_cache'
if HOSTNAME == 'secure.protectamerica.com':
LESSC_BIN = ";lasdkfasd;lk"
else:
LESSC_BIN = "lessc"
COMPRESS_PRECOMPILERS = (
('text/less', '{0} {{infile}} {{outfile}}'.format(LESSC_BIN)),
)
COMPRESS_CSS_FILTERS = [
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.CSSMinFilter'
]
COMPRESS_JS_FILTERS = [
'compressor.filters.jsmin.JSMinFilter'
]
TEMPLATE_CONTEXT_PROCESSORS = DEFAULT_SETTINGS.TEMPLATE_CONTEXT_PROCESSORS + (
'django.core.context_processors.request',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': ('%(levelname)s %(asctime)s %(module)s %(process)d '
'%(thread)d %(message)s'),
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler',
}
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
}
}
}
CREDIT_BUREAUS = 'equifax,transUnion'
STOP_RUNNING_AT_BEACON = 625
CREDIT_REUSABLE_SPAN = timedelta(days=30)
CREDIT_APPROVED_BEACON = 600
SOCIAL_CIPHER = MessageCipher(private_file='/devd/trashboard/social_cipher_test.private', public_file='/devd/trashboard/social_cipher_test.public')
|
from abc import ABC
from abc import abstractmethod
class State(ABC):
def __init__(self):
pass
@abstractmethod
def enter(self, data):
pass
@abstractmethod
def exit(self):
pass
@abstractmethod
def handle_event(self, event):
pass
@abstractmethod
def update(self, dt):
pass
@abstractmethod
def render(self, renderer):
pass
class NullState(State):
def __init__(self):
super().__init__()
def enter(self, data):
pass
def exit(self):
pass
def handle_event(self, event):
pass
def update(self, dt):
pass
def render(self, renderer):
pass
|
from control import Control
def test_control_degree():
assert Control(1000, 2000).degree(-5000) == 0, "Should be 0"
assert Control(1000, 2000).degree(910) == 0, "Should be 0"
assert Control(1000, 2000).degree(5010) == 180, "Should be 180"
assert Control().degree(1) == 0, "Should be 0"
assert Control(-500, 500).degree(0) == 90, "Should be 90"
assert Control(0, 180).degree(163) == 163, "Should be 163"
a = Control(1000, 2000)
assert a.degree(1500) == 90, "Should be 90"
a = Control(1000, 2000)
assert a.degree(5000) == 180, "Should be 90"
def test_control_switch():
a = Control(19000, 20000)
assert a.switch(19700), "Should be True"
a = Control(19000, 20000)
assert a.switch(19699) is None, "Should be False"
def test_control_variator():
a = Control(1000, 2000)
assert a.variator(1500, 0.0, 1.0) == 0.5, "Should be 0.5"
a = Control(1000, 2000)
assert a.variator(1500, 0.0, 0.1) == 0.05, "Should be 0.05"
|
from django.urls import path, include
from . import views
from rest_framework import routers
from django.conf.urls import url
router = routers.DefaultRouter()
urlpatterns = [
path('',include(router.urls)),
url(r'prueba/',views.prueba.as_view(),name="prueba")
] |
import matplotlib; matplotlib.use('Agg') # NOQA
import os
import json
import skimage.io
import skimage.transform
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Arrow
from .config import cfg
from util import boxes
def vis_one_vqa(img_path, words, vqa_scores, label, module_names, answers,
txt_att, att_stack, stack_ptr, module_prob, save_path):
img = skimage.io.imread(img_path)
h = plt.figure(figsize=(20, 20))
T = cfg.MODEL.T_CTRL
# img
plt.subplot(5, 3, 1)
plt.imshow(img)
plt.title(
'\n'.join([' '.join(words[b:b+10]) for b in range(0, len(words), 10)]))
# module weights
plt.subplot(5, 3, 2)
plt.imshow(module_prob.T, cmap='Reds')
plt.colorbar()
plt.xticks(range(T), range(T))
plt.yticks(range(len(module_names)), module_names, size='small')
plt.title('module weights at controller timestep')
# textual attention
plt.subplot(5, 3, 3)
# print(np.sum(txt_att, axis=1))
# print(np.sum(txt_att[:, :len(words)], axis=1))
plt.imshow(txt_att[:, :len(words)], cmap='Reds')
plt.colorbar()
plt.xticks(range(len(words)), words, rotation=90)
plt.yticks(range(T), range(T))
plt.ylabel('controller timestep')
plt.title('textual attention at controller timestep')
# scores
plt.subplot(5, 3, 4)
plt.imshow(vqa_scores[np.newaxis, :], cmap='Reds')
plt.xticks(range(len(answers)), answers, rotation=90)
plt.yticks([], [])
plt.xlabel('answer logits')
plt.title('prediction: %s label: %s' % (
answers[np.argmax(vqa_scores)], answers[label]))
plt.subplot(5, 3, 5)
plt.imshow(stack_ptr.T, cmap='Reds')
plt.colorbar()
plt.xticks(range(T), range(T))
plt.yticks(range(stack_ptr.shape[1]), range(stack_ptr.shape[1]))
plt.ylabel('stack depth')
plt.xlabel('stack pointer at controller timestep')
# Visualize the attention stack
# att_stack is T x H x W x L -> L x H x T x W
plt.subplot(5, 3, 6)
T, H, W, L = att_stack.shape
plt.imshow(att_stack.transpose((3, 1, 0, 2)).reshape((L*H, T*W)))
plt.colorbar()
plt.xticks(W // 2 + np.arange(T) * W, range(T))
plt.yticks(np.arange(L) * H, np.arange(L) * H)
plt.ylabel('stack depth')
plt.xlabel('image attention at controller timestep')
# image attention at each timestep
for t in range(T):
plt.subplot(5, 3, t+7)
att = np.sum(att_stack[t] * stack_ptr[t], axis=-1)
img_with_att = attention_interpolation(img, att)
plt.imshow(img_with_att)
plt.xlabel('controller timestep t = %d' % t)
plt.savefig(save_path)
print('visualization saved to ' + save_path)
plt.close(h)
def vis_one_loc(img_path, words, loc_scores, bbox_pred, bbox_gt, module_names,
txt_att, att_stack, stack_ptr, module_prob, save_path):
img = skimage.io.imread(img_path)
h = plt.figure(figsize=(20, 20))
T = cfg.MODEL.T_CTRL
# img
plt.subplot(5, 3, 1)
plt.imshow(img)
_print_bbox(bbox_pred, 'r')
_print_bbox(bbox_gt, 'y')
plt.title(
'\n'.join([' '.join(words[b:b+10]) for b in range(0, len(words), 10)])
+ '\nred: prediction yellow: ground-truth')
# module weights
plt.subplot(5, 3, 2)
plt.imshow(module_prob.T, cmap='Reds')
plt.colorbar()
plt.xticks(range(T), range(T))
plt.yticks(range(len(module_names)), module_names, size='small')
plt.title('module weights at controller timestep')
# textual attention
plt.subplot(5, 3, 3)
# print(np.sum(txt_att, axis=1))
# print(np.sum(txt_att[:, :len(words)], axis=1))
plt.imshow(txt_att[:, :len(words)], cmap='Reds')
plt.colorbar()
plt.xticks(range(len(words)), words, rotation=90)
plt.yticks(range(T), range(T))
plt.ylabel('controller timestep')
plt.title('textual attention at controller timestep')
# scores
plt.subplot(5, 3, 4)
plt.imshow(loc_scores.reshape(cfg.MODEL.H_FEAT, cfg.MODEL.W_FEAT))
plt.colorbar()
plt.title('localization scores')
plt.subplot(5, 3, 5)
plt.imshow(stack_ptr.T, cmap='Reds')
plt.colorbar()
plt.xticks(range(T), range(T))
plt.yticks(range(stack_ptr.shape[1]), range(stack_ptr.shape[1]))
plt.ylabel('stack depth')
plt.xlabel('stack pointer at controller timestep')
# Visualize the attention stack
# att_stack is T x H x W x L -> L x H x T x W
plt.subplot(5, 3, 6)
T, H, W, L = att_stack.shape
plt.imshow(att_stack.transpose((3, 1, 0, 2)).reshape((L*H, T*W)))
plt.colorbar()
plt.xticks(W // 2 + np.arange(T) * W, range(T))
plt.yticks(np.arange(L) * H, np.arange(L) * H)
plt.ylabel('stack depth')
plt.xlabel('image attention at controller timestep')
# image attention at each timestep
for t in range(T):
plt.subplot(5, 3, t+7)
att = np.sum(att_stack[t] * stack_ptr[t], axis=-1)
img_with_att = attention_interpolation(img, att)
plt.imshow(img_with_att)
plt.xlabel('controller timestep t = %d' % t)
plt.savefig(save_path)
print('visualization saved to ' + save_path)
plt.close(h)
def _format_str(s):
words = s.split()
s = '\n'.join([' '.join(words[b:b+8]) for b in range(0, len(words), 8)])
return s
MODULE_DESCRIPTION_TEXT = {
'_NoOp':
'it doesn\'t do anything (i.e. nothing is updated in this timestep).', # NoQA
'_Find':
'it looks at new image regions based on attended text.', # NoQA
'_Transform':
'it shifts the image attention to somewhere new, conditioned on its previous glimpse.', # NoQA
'_Filter':
'it tries to select out some image regions from where it looked before (based on attended text).', # NoQA
'_And':
'it takes the intersection of the program\'s two previous glimpses as inputs, returning their intersection.', # NoQA
'_Or':
'it takes the union of the program\'s two previous glimpses as inputs, returning their union.', # NoQA
'_Scene':
'it tries to look at some objects in the image.', # NoQA
'_DescribeOne':
'it takes the program\'s previous glimpse as input, and tries to infer the answer from it.', # NoQA
'_DescribeTwo':
'it takes the program\'s two previous glimpses as inputs, and tries to infer the answer from them.', # NoQA
}
def _find_txt_segs(keep, words):
segs = []
elems = []
for n, k in enumerate(keep):
if k:
elems.append(words[n])
else:
if elems:
segs.append('"' + ' '.join(elems) + '"')
elems = []
if elems:
segs.append('"' + ' '.join(elems) + '"')
return segs
def _extract_txt_att(words, atts, thresh=0.5):
"""
Take at most 3 words that have at least 50% of the max attention.
"""
atts_sorted = np.sort(atts)[::-1]
att_min = max(atts_sorted[2], atts_sorted[0]*thresh)
# collect those words above att_min
keep = (atts >= att_min)
# assert np.any(keep)
vis_txt = ', '.join(_find_txt_segs(keep, words))
return vis_txt
def vis_one_stepwise(img_path, words, module_names, txt_att, att_stack,
stack_ptr, module_prob, save_path, vis_type,
vqa_scores=None, label=None, answers=None,
loc_scores=None, bbox_pred=None, bbox_gt=None):
T = cfg.MODEL.T_CTRL
# M = len(module_names)
img = skimage.io.imread(img_path)
scale_x = 480. / img.shape[1]
scale_y = 320. / img.shape[0]
img = skimage.transform.resize(img, (320, 480))
h = plt.figure(figsize=(18, (T+2) * 5))
if cfg.TEST.VIS_SHOW_IMG:
# Image and question
plt.subplot((T+2)*2, 3, (3, 6))
plt.imshow(img)
plt.axis('off')
plt.title('\n'.join(
[' '.join(words[b:b+6]) for b in range(0, len(words), 6)]),
fontsize=20)
# Modules at each timestep
m_list = [module_names[np.argmax(module_prob[t])] for t in range(T)]
is_disp = np.ones(T, np.bool)
is_ans = np.zeros(T, np.bool)
if vis_type == 'vqa':
"""
Show the output of the last "_Describe*"
"""
describe_t = -1
for t in range(T-1, -1, -1):
if m_list[t].startswith('_Describe'):
describe_t = t
break
for t in range(T):
is_disp[t] = not (
(m_list[t] == '_NoOp') or
(m_list[t].startswith('_Describe') and t != describe_t))
is_ans[describe_t] = True
else:
for t in range(T):
is_disp[t] = (t == T-1) or not (
(m_list[t] == '_NoOp') or
(m_list[t].startswith('_Describe')))
is_ans[T-1] = True
t_disp = 0
for t in range(T):
if not is_disp[t]:
continue
show_ans = is_ans[t]
m = m_list[t]
if m in {'_Scene', '_NoOp', '_And', '_Or'}:
att_txt = ''
else:
att_txt = _extract_txt_att(words, txt_att[t, :len(words)])
if t == 0 and m == '_Filter':
m_display = 'find'
else:
m_display = m[1:].replace(
'Find', 'look_for').replace(
'Filter', 'select').replace(
'Transform', 'related_by').replace(
'DescribeOne', 'Answer').replace(
'DescribeTwo', 'Compare_Two').replace(
'And', 'Intersect').replace('Or', 'Combine').lower()
if show_ans and vis_type == 'loc' and \
m in {'_NoOp', '_DescribeOne', '_DescribeTwo'}:
m_display = 'bbox_regression'
att_txt = ''
# output attention
if show_ans:
if vis_type == 'vqa':
plt.subplot((T+2)*2, 3, (6*t_disp+9, 6*t_disp+12))
plt.imshow(np.ones(img.shape, np.float32))
plt.axis('off')
if cfg.TEST.VIS_SHOW_ANSWER:
answer_txt = (
'predicted answer: "%s"\ntrue answer: "%s"' % (
answers[np.argmax(vqa_scores)], answers[label]))
else:
answer_txt = '(model prediction not shown)'
plt.text(10, 100, answer_txt, fontsize=20)
elif vis_type == 'loc':
plt.subplot((T+2)*2, 3, (6*t_disp+9, 6*t_disp+12))
plt.imshow(img)
_print_bbox(bbox_gt, 'y', scale_x, scale_y)
if cfg.TEST.VIS_SHOW_ANSWER:
_print_bbox(bbox_pred, 'r', scale_x, scale_y)
IoU = boxes.bbox_iou(bbox_pred, bbox_gt)
txt = 'prediction: red box\nground-truth: yellow box\n' \
'(IoU = %.2f)' % IoU
else:
txt = 'prediction: (not shown)\nground-truth: yellow box'
plt.xticks([], [])
plt.yticks([], [])
plt.xlabel(txt, fontsize=20)
else:
raise ValueError('Unknow vis_type ' + str(vis_type))
else:
plt.subplot((T+2)*2, 3, (6*t_disp+9, 6*t_disp+12))
att = np.sum(att_stack[t] * stack_ptr[t], axis=-1)
img_with_att = attention_interpolation(img, att)
plt.imshow(img_with_att)
plt.xticks([], [])
plt.yticks([], [])
plt.title('%s(%s)\n' % (m_display, att_txt), fontsize=24)
patches = Arrow(
img.shape[1] // 2, -35, 0, 32, width=40, color='k', clip_on=False)
plt.gca().add_patch(patches)
t_disp += 1
plt.savefig(save_path, bbox_inches='tight')
with open(save_path.replace('.png', '') + '.txt', 'w') as f:
question = (' '.join(words)).replace(' ?', '?')
if vis_type == 'vqa':
ans_pred, ans_gt = answers[np.argmax(vqa_scores)], answers[label]
json.dump({'question': question, 'ans_pred': ans_pred,
'ans_gt': ans_gt}, f)
elif vis_type == 'loc':
json.dump({'question': question, 'bbox_pred': list(bbox_pred),
'bbox_gt': list(bbox_gt)}, f)
else:
raise ValueError('Unknow vis_type ' + str(vis_type))
print('visualization saved to ' + save_path)
plt.close(h)
def vis_batch_vqa(model, data_reader, batch, vis_outputs, start_idx,
start_idx_correct, start_idx_incorrect, vis_dir):
module_names = model.nmn.module_names
answers = data_reader.batch_loader.answer_dict.word_list
if cfg.TEST.VIS_SEPARATE_CORRECTNESS:
num_correct = max(cfg.TEST.NUM_VIS_CORRECT-start_idx_correct, 0)
num_incorrect = max(cfg.TEST.NUM_VIS_INCORRECT-start_idx_incorrect, 0)
labels = batch['answer_label_batch']
predictions = np.argmax(vis_outputs['vqa_scores'], axis=1)
is_correct = predictions == labels
inds = (list(np.where(is_correct)[0][:num_correct]) +
list(np.where(~is_correct)[0][:num_incorrect]))
else:
num = min(len(batch['image_path_list']), cfg.TEST.NUM_VIS - start_idx)
inds = range(num)
for n in inds:
img_path = batch['image_path_list'][n]
if cfg.TEST.VIS_SEPARATE_CORRECTNESS:
if is_correct[n]:
save_name = 'correct_%08d_%s.png' % (
start_idx_correct,
os.path.basename(img_path).split('.')[0])
start_idx_correct += 1
else:
save_name = 'incorrect_%08d_%s.png' % (
start_idx_incorrect,
os.path.basename(img_path).split('.')[0])
start_idx_incorrect += 1
else:
save_name = '%08d_%s.png' % (
start_idx, os.path.basename(img_path).split('.')[0])
start_idx += 1
save_path = os.path.join(vis_dir, save_name)
words = [
data_reader.batch_loader.vocab_dict.idx2word(n_w) for n_w in
batch['input_seq_batch'][:batch['seq_length_batch'][n], n]]
vqa_scores = vis_outputs['vqa_scores'][n]
label = batch['answer_label_batch'][n]
txt_att = vis_outputs['txt_att'][n]
att_stack = vis_outputs['att_stack'][n]
stack_ptr = vis_outputs['stack_ptr'][n]
module_prob = vis_outputs['module_prob'][n]
if cfg.TEST.STEPWISE_VIS:
vis_one_stepwise(img_path, words, module_names, txt_att, att_stack,
stack_ptr, module_prob, save_path, vis_type='vqa',
vqa_scores=vqa_scores, label=label,
answers=answers)
else:
vis_one_vqa(img_path, words, vqa_scores, label, module_names,
answers, txt_att, att_stack, stack_ptr, module_prob,
save_path)
def vis_batch_loc(model, data_reader, batch, vis_outputs, start_idx,
start_idx_correct, start_idx_incorrect, vis_dir):
module_names = model.nmn.module_names
iou_th = cfg.TEST.BBOX_IOU_THRESH
if cfg.TEST.VIS_SEPARATE_CORRECTNESS:
num_correct = max(cfg.TEST.NUM_VIS_CORRECT-start_idx_correct, 0)
num_incorrect = max(cfg.TEST.NUM_VIS_INCORRECT-start_idx_incorrect, 0)
bbox_pred = boxes.batch_feat_grid2bbox(
np.argmax(vis_outputs['loc_scores'], axis=1),
vis_outputs['bbox_offset'],
data_reader.batch_loader.stride_H,
data_reader.batch_loader.stride_W,
data_reader.batch_loader.feat_H, data_reader.batch_loader.feat_W)
bbox_gt = batch['bbox_batch']
is_correct = boxes.batch_bbox_iou(bbox_pred, bbox_gt) >= iou_th
inds = (list(np.where(is_correct)[0][:num_correct]) +
list(np.where(~is_correct)[0][:num_incorrect]))
else:
num = min(len(batch['image_path_list']), cfg.TEST.NUM_VIS - start_idx)
inds = range(num)
for n in inds:
img_path = batch['image_path_list'][n]
if cfg.TEST.VIS_SEPARATE_CORRECTNESS:
if is_correct[n]:
save_name = 'correct_%08d_%s.png' % (
start_idx_correct,
os.path.basename(img_path).split('.')[0])
start_idx_correct += 1
else:
save_name = 'incorrect_%08d_%s.png' % (
start_idx_incorrect,
os.path.basename(img_path).split('.')[0])
start_idx_incorrect += 1
else:
save_name = '%08d_%s.png' % (
start_idx, os.path.basename(img_path).split('.')[0])
start_idx += 1
save_path = os.path.join(vis_dir, save_name)
words = [
data_reader.batch_loader.vocab_dict.idx2word(n_w) for n_w in
batch['input_seq_batch'][:batch['seq_length_batch'][n], n]]
loc_scores = vis_outputs['loc_scores'][n]
bbox_offset = vis_outputs['bbox_offset'][n]
bbox_pred = boxes.feat_grid2bbox(
np.argmax(loc_scores), bbox_offset,
data_reader.batch_loader.stride_H,
data_reader.batch_loader.stride_W, data_reader.batch_loader.feat_H,
data_reader.batch_loader.feat_W)
bbox_gt = boxes.feat_grid2bbox(
batch['bbox_ind_batch'][n], batch['bbox_offset_batch'][n],
data_reader.batch_loader.stride_H,
data_reader.batch_loader.stride_W, data_reader.batch_loader.feat_H,
data_reader.batch_loader.feat_W)
# bbox_gt = batch['bbox_batch'][n]
txt_att = vis_outputs['txt_att'][n]
att_stack = vis_outputs['att_stack'][n]
stack_ptr = vis_outputs['stack_ptr'][n]
module_prob = vis_outputs['module_prob'][n]
if cfg.TEST.STEPWISE_VIS:
vis_one_stepwise(img_path, words, module_names, txt_att, att_stack,
stack_ptr, module_prob, save_path, vis_type='loc',
loc_scores=loc_scores, bbox_pred=bbox_pred,
bbox_gt=bbox_gt)
else:
vis_one_loc(
img_path, words, loc_scores, bbox_pred, bbox_gt, module_names,
txt_att, att_stack, stack_ptr, module_prob, save_path)
def _print_bbox(bbox, color='r', scale_x=1., scale_y=1.):
x1, y1, h, w = bbox
x2 = x1 + w - 1
y2 = y1 + h - 1
x1 *= scale_x
y1 *= scale_y
x2 *= scale_x
y2 *= scale_y
plt.plot([x1, x2, x2, x1, x1], [y1, y1, y2, y2, y1], color)
def _att_softmax(att):
exps = np.exp(att - np.max(att))
softmax = exps / np.sum(exps)
return softmax
def attention_interpolation(im, att):
softmax = _att_softmax(att)
att_reshaped = skimage.transform.resize(softmax, im.shape[:2], order=3)
# normalize the attention
# make sure the 255 alpha channel is at least 3x uniform attention
att_reshaped /= np.maximum(np.max(att_reshaped), 3. / att.size)
att_reshaped = att_reshaped[..., np.newaxis]
# make the attention area brighter than the rest of the area
vis_im = att_reshaped * im + (1-att_reshaped) * im * .45
vis_im = vis_im.astype(im.dtype)
return vis_im
def _move_ptr_bw(stack_ptr):
new_stack_ptr = np.zeros_like(stack_ptr)
new_stack_ptr[:-1] = stack_ptr[1:]
if cfg.MODEL.NMN.STACK.GUARD_STACK_PTR:
stack_bottom_mask = np.zeros_like(stack_ptr)
stack_bottom_mask[0] = 1.
new_stack_ptr += stack_bottom_mask * stack_ptr
return new_stack_ptr
def _read_two_from_stack(att_stack, stack_ptr):
att_2 = np.sum(att_stack * stack_ptr, axis=-1)
att_1 = np.sum(att_stack * _move_ptr_bw(stack_ptr), axis=-1)
return att_1, att_2
|
import numpy as np
from sklearn.datasets import load_boston
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn.model_selection import train_test_split, KFold, cross_val_score
from sklearn.metrics import accuracy_score, r2_score
# from sklearn.svm import LinearSVC, SVC
from sklearn.neighbors import KNeighborsClassifier, KNeighborsRegressor
from sklearn.linear_model import LinearRegression
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
from sklearn.linear_model import LinearRegression
from sklearn.neighbors import KNeighborsClassifier, KNeighborsRegressor
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
# 1. 데이터
dataset = load_boston()
x = dataset.data
y = dataset.target
print(x.shape, y.shape) # (506, 13), (506,)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, shuffle=True, random_state=45)
kfold = KFold(n_splits=5, shuffle=True)
# 2. 모델
for i in [LinearRegression, KNeighborsRegressor, DecisionTreeRegressor, RandomForestRegressor]:
print()
model = i()
# 훈련
scores = cross_val_score(model, x_train, y_train, cv=kfold)
print(i.__name__ + '\'s score(R2) :', scores)
'''
LinearRegression's score(R2) : [0.68548713 0.73717088 0.7014253 0.60038314 0.78640173]
KNeighborsRegressor's score(R2) : [0.53018174 0.49457994 0.51838518 0.4337409 0.42476561]
DecisionTreeRegressor's score(R2) : [0.74241591 0.72411056 0.82158985 0.75469256 0.73659862]
RandomForestRegressor's score(R2) : [0.8863208 0.76628895 0.91202741 0.87049145 0.78873223]
''' |
# -*- coding:UTF-8 -*-
from rest_framework import serializers
from . import models
from index.models import Application
class CheckApplicationSerializer(serializers.ModelSerializer):
application = serializers.PrimaryKeyRelatedField(queryset=Application.objects.filter(application_status=1))
class Meta:
model = models.CheckApplication
fields = '__all__'
class StoreActivitySerializer(serializers.ModelSerializer):
class Meta:
model = models.StoreActivityType
fields = '__all__'
class DeliverSerializer(serializers.ModelSerializer):
class Meta:
model = models.Delivers
fields = '__all__'
class DeliverServiceSerializer(serializers.ModelSerializer):
delivers = DeliverSerializer(many=True, read_only=True)
class Meta:
model = models.DeliverServices
fields = '__all__'
class GenerateCodeSerializer(serializers.ModelSerializer):
class Meta:
model = models.CodeWarehouse
fields = '__all__'
class AccountRechargeSerializer(serializers.ModelSerializer):
class Meta:
model = models.AccountRecharge
fields = '__all__'
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = models.Account
fields = ('id', 'account_type', 'bank_balance')
class KeepAccountSerializer(serializers.ModelSerializer):
class Meta:
model = models.KeepAccounts
fields = '__all__'
class DeliverReasonSerializer(serializers.ModelSerializer):
class Meta:
model = models.DeliveryReason
fields = '__all__'
class ProtocolSerializer(serializers.ModelSerializer):
class Meta:
model = models.Protocol
fields = '__all__'
class RefundReasonSerializer(serializers.ModelSerializer):
class Meta:
model = models.RefundReason
fields = '__all__'
class BargainPosterSerializer(serializers.ModelSerializer):
class Meta:
model = models.BargainPoster
fields = '__all__'
class BankCardSerializer(serializers.ModelSerializer):
bank_name = serializers.ReadOnlyField(source='receiver_bank_no.bank_name')
receiver_account_num = serializers.CharField(max_length=30,write_only=True)
last_four_num = serializers.SerializerMethodField()
class Meta:
model = models.BankCard
fields ='__all__'
def get_last_four_num(self,obj):
return obj.receiver_account_num[-4:]
class TolingqiangSerializer(serializers.ModelSerializer):
class Meta:
model = models.ToLingqiang
fields = '__all__'
class BankNoSerializer(serializers.ModelSerializer):
class Meta:
model = models.BankNo
fields ='__all__'
class ToBankSerializer(serializers.ModelSerializer):
class Meta:
model = models.ToBank
fields ='__all__'
class StoreTransferChargeSerializer(serializers.ModelSerializer):
class Meta:
model = models.StoreTransferCharge
fields = '__all__' |
from socket import *
import time
import threading as th
from multiprocessing import Process, Lock,RLock, Semaphore
def fromGateway():
print "The server for Gateway is ready to receive"
while 1:
messageFromGateway, gatewayClientAddress = fromGatewayserverSocket.recvfrom(2048)
fromGatewayLock.acquire()
messageFromGatewayReserve = messageFromGateway
print messageFromGateway
fromGatewayFlag=1
fromGatewayLock.release()
#serverSocket.sendto(modifiedMessage, clientAddress)
#if (messageFromGateway[-2:]=="U3")
def toGateway():
print "The client for Gateway is ready to receive"
while 1:
fromU3Lock.acquire()
if fromU3Flag==1:
clientSocketToGateway.sendto(messageFromU3Reserve,(toGatewayServerName, toGatewayServerPort))
print messageFromGatewayReserve
fromU3Flag=0
fromU3Lock.release()
def fromU3():
print "The server for U3 is ready to receive"
while 1:
messageFromU3, U3ClientAddress = fromU3serverSocket.recvfrom(2048)
fromU3Lock.acquire()
messageFromU3Reserve = messageFromU3
print messageFromU3
fromU3Flag=1
fromU3Lock.release()
def toU3():
print "The client for U3 is ready to receive"
'''while 1:
fromGatewayLock.acquire()
if fromGatewayFlag==1:
clientSocketToU3.sendto(messageFromGatewayReserve,(toU3ServerName, toU3ServerPort))
print messageFromGatewayReserve
fromGatewayFlag=0
fromGatewayLock.release()'''
while 1:
fromU3Lock.acquire()
if fromU3Flag==1:
messageFromU3Reserve=messageFromU3Reserve + "python krali"
clientSocketToU3.sendto(messageFromU3Reserve,(toU3ServerName, toU3ServerPort))
print messageFromU3Reserve
fromU3Flag=0
fromU3Lock.release()
messageFromGateway=""
messageFromU3=""
fromGatewayFlag=0
fromU3Flag=0
messageFromGatewayReserve=""
messageFromU3Reserve=""
fromGatewayLock=th.Lock()
fromU3Lock=th.Lock()
toGatewayServerName = "Gateway"
toGatewayServerPort= 10004
clientSocketToGateway = socket(AF_INET, SOCK_DGRAM)
toU3ServerName = "U3"
toU3ServerPort= 30003
clientSocketToU3 = socket(AF_INET, SOCK_DGRAM)
fromGatewayServerPort=30002
fromGatewayserverSocket = socket(AF_INET, SOCK_DGRAM)
fromGatewayserverSocket.bind(("",fromGatewayServerPort))
fromU3ServerPort=30022
fromU3serverSocket = socket(AF_INET, SOCK_DGRAM)
fromU3serverSocket.bind(("", fromU3ServerPort))
gatewayAcceptor = th.Thread(target=fromGateway)
gatewaySender = th.Thread(target=toGateway)
U3Acceptor = th.Thread(target=fromU3)
U3Sender = th.Thread(target=toU3)
|
# -*- coding: utf-8 -*-
import scrapy
class MalaysiaSongSpider(scrapy.Spider):
name = 'malaysia_song'
allowed_domains = ['www.youtube.com/playlist?list=PLgNjz5kKawRiPXT7l3XT60v3iWogBvGP0']
start_urls = ['http://www.youtube.com/playlist?list=PLgNjz5kKawRiPXT7l3XT60v3iWogBvGP0/']
def parse(self, response):
content = response.xpath('//s/text()').extract()
with open('C:\\Users\\Administrator\\Desktop\\youtu.html', 'w', encoding='utf8')as f:
f.write(response.body)
|
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter
import numpy as np
%matplotlib inline
y = [1,2,3,4,10]
x = [1,2,3,4,10]
line, = plt.plot(x, y, "-", linewidth=5.0) # linewidth means the width of the line
line.set_antialiased(False) # this removes the blur
plt.plot(x, y, "ro") # r means red o means o which makes the plot represent points instead of a line
plt.axis([0, 6, 0, 20])
sequence = np.arange(0.0, 10.0, 0.2)
plt.plot(x, y, "r--", sequence, sequence**2, "gs") # represents two functions in one plot; r-- means red and dashed; gs means green and squares
plt.plot(x, y, "r--", sequence, sequence**2, "gs", sequence, sequence**3, "b^") # represents two functions in one plot; r-- means red and dashed; gs means green and squares;
# b^ means blue and with triangles
lines = plt.plot(sequence, sequence, sequence, sequence**2)
plt.setp(lines, color="r", linewidth=2.0)
plt.show()
lines = plt.plot(sequence, sequence, sequence, sequence**2)
plt.setp(lines, "color", "r", "linewidth", 2.0)
plt.show()
plt.plot(sequence, sequence, "r.", sequence, sequence**2, "g:", marker="+", animated=True)
plt.setp(lines) #shows all parameters
plt.show()
# Representing functions
def f(x):
return np.exp(-x)*np.cos(2*np.pi*x)
x1 = np.arange(0.0, 5.0, 0.1)
x2 = np.arange(0.0, 5.0, 0.2)
plt.figure(1)
plt.subplot(211) # length:height ratio for plot 1 by default is 111 the numbers actually mean 2 rows and 1 column. The third one represents the figure whose data
# is just going to be introduced
plt.plot(x1, f(x1), "ro", x2, f(x2))
plt.subplot(212) # length:height ratio for plot 2
plt.plot(x1, f(x1), "go")
# Once you have passed one figure, you can still go back to a certain plot in a given figure
plt.figure(1)
plt.subplot(211) # length:height ratio for plot 1 by default is 111 the numbers actually mean 2 rows and 1 column. The third one represents the figure whose data
# is just going to be introduced
plt.plot(x1, f(x1), "ro", x2, f(x2))
plt.subplot(212) # length:height ratio for plot 2
plt.plot(x1, f(x1), "go")
plt.figure(2)
plt.plot([1,5,10])
plt.figure(1)
plt.subplot(211)
plt.title("This is a plot")
plt.close()
# plt.hold() erases the function represented in the last plot it is still the focus (haven't stated another one or closed the last one) and then the next function will
# be represented in the same plot
# Adding text to a plot
mu = 100
sigma = 20
x = mu+sigma*np.random.randn(10000)
n, bins, patches = plt.hist(x, 50, normed=1, facecolor="g", alpha=0.6)
plt.xlabel("numeros aleatorios $N(\mu,\sigma)$", fontsize=15, color="green")
plt.ylabel("probabilidad de que al generarlos aleatoriamente se genere dicho numero")
plt.title("simplemente numeros aleatorios, literalmente. No, te lo digo de verdad, son aleatorios")
plt.text(40, 0.015, "$\mu=100,\ \sigma=20$")
plt.ylim(0, 0.03)
plt.grid(True)
plt.annotate("Maximo", xy=(105,0.02), xytext=(110, 0.025), arrowprops = dict(facecolor = "blue", shrink = 0.05))
plt.show()
plt.close()
# Changing the scale
mu = 0.5
sd = 0.3
y = mu+ sd*np.random.randn(1000)
y = y[(y>0)&(y<1)]
y.sort()
x = np.arange(len(y))
plt.figure(figsize=(10, 8))
plt.subplot(221)
plt.plot(x, y)
plt.yscale("linear")
plt.xscale("linear")
plt.title("Escala lineal")
plt.grid(True)
plt.subplot(222)
plt.plot(x, y)
plt.xscale("log")
plt.title("Escala logaritmica")
plt.grid(True)
plt.subplot(223)
plt.plot(x, y-y.mean())
plt.yscale("symlog", linthreshy=0.01)
plt.title("Escala logaritmica simetrica")
plt.grid(True)
plt.subplot(224)
plt.plot(x, y)
plt.yscale("logit")
plt.title("Escala logistica")
plt.gca().yaxis.set_minor_formatter(NullFormatter())
plt.grid(True)
plt.subplots_adjust(top = 0.92, bottom = 0.08, left = 0.10, right = 0.95, hspace = 0.45, wspace = 0.4)
# More configuration parameters for plots
plt.figure(figsize=(10,8))
x = np.linspace(-np.pi, np.pi, 256, endpoint=True)
S, C = np.sin(x), np.cos(x)
plt.plot(x, S, color = "green", linestyle = "-", label = "Sin")
plt.plot(x, C, color = "blue", linestyle = "--", label = "Cos")
plt.xlim(-4,4)
plt.ylim(S.min()*1.2, S.max()*1.2)
plt.xticks(np.linspace(-4, 4, 9, endpoint = True))
plt.yticks( [-1, 1], ["-1", "+1"])
ax = plt.gca()
ax.spines["right"].set_color("none")
ax.spines["top"].set_color("none")
ax.xaxis.set_ticks_position("bottom")
ax.spines["bottom"].set_position(("data", 0))
ax.yaxis.set_ticks_position("left")
ax.spines["left"].set_position(("data", 0))
plt.legend(loc = "upper left")
# making changes in axis labels
for label in ax.get_xticklabels() + ax.get_yticklabels():
label.set_fontsize(16)
label.set_bbox(dict(facecolor="white", edgecolor="None", alpha=0.3))
plt.show()
|
# Generated by Django 3.0.3 on 2020-03-17 15:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('shop', '0006_auto_20200317_1832'),
]
operations = [
migrations.RenameField(
model_name='wishitem',
old_name='Item',
new_name='item',
),
]
|
# from django.test import TestCase
# I would create tests here if this was intended for production. Upon request I can add some.
|
import logging
import re
import time
import typing
logger = logging.getLogger()
# Just for performance timing tests
def timed_request(method):
def timed(*args, **kw):
ts = time.time()
try:
uri = args[0].__dict__.get('base_url', '') + kw.get('resource_path', '')
except IndexError:
uri = kw.get('resource_path', '')
logger.info(f'{method.__name__}({uri})')
result = method(*args, **kw)
te = time.time()
response_time = (te - ts) * 1000
logger.info(f'Response Time({uri})==>{response_time:2.2f} ms')
return result
return timed
|
def authenticate(uname,pword):
login={"abc":"123",
"helen":"li",
"soft":"dev"
}
if uname in login:
if login[uname]==pword:
return True
else:
return False
|
import numpy as np
import pandas as pd
import random
train = pd.read_csv('../data/train.csv')
sample_places = random.sample(set(train['place_id']), 100)
sample_train = train[train['place_id'].isin(sample_places)]
sample_train.to_csv('../processing/random.100.places.csv')
|
# nc2pdf - main program
# vim:fileencoding=utf-8
"""Plot cuts from a Gerber cloth cutter NC file to a PDF."""
__version__ = '1.12-beta'
_lic = """nc2pdf {}
Copyright © 2013, 2015 R.F. Smith <rsmith@xs4all.nl>. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.""".format(__version__)
import argparse
import datetime
import os.path
import time
import sys
import cairo
from nctools import gerbernc, plot, utils
class LicenseAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
print(_lic)
sys.exit()
def getcuts(rd):
"""Make a list of cuts
:rd: nctools.gerbernc.Reader object
:returns: list of (x,y) tuples representing the cuts.
"""
cuts = []
x = []
y = []
section = None
cutting = False
pos = None
for c, args in rd:
if c.startswith('down'):
cutting = True
if not pos:
raise ValueError('Start of cutting without pos')
section = [pos]
elif c.startswith('up'):
cutting = False
if section:
cuts.append(section)
section = None
elif c.startswith('moveto'):
_, newpos = args
if cutting:
section.append(newpos)
xv, yv = newpos
x.append(xv)
y.append(yv)
pos = newpos
return cuts, x, y
def main(argv):
"""Main program for the nc2pdf utility.
:argv: command line arguments
"""
parser = argparse.ArgumentParser(description=__doc__)
group = parser.add_mutually_exclusive_group()
group.add_argument('-L', '--license', action=LicenseAction, nargs=0,
help="print the license")
group.add_argument('-V', '--version', action='version',
version=__version__)
parser.add_argument('-v', '--verbose', dest='verbose', action="store_true")
parser.add_argument('files', nargs='*', help='one or more file names',
metavar='file')
pv = parser.parse_args(argv)
msg = utils.Msg(pv.verbose)
offset = 40
if not pv.files:
parser.print_help()
sys.exit(0)
for fn in utils.xpand(pv.files):
msg.say('Starting file "{}"'.format(fn))
try:
ofn = utils.outname(fn, extension='.pdf', addenum='_nc')
rd = gerbernc.Reader(fn)
except ValueError as e:
msg.say(str(e))
fns = "Cannot construct output filename. Skipping file '{}'."
msg.say(fns.format(fn))
continue
except IOError as e:
msg.say("Cannot read file: {}".format(e))
msg.say("Skipping file '{}'".format(fn))
continue
cuts, xvals, yvals = getcuts(rd)
cnt = len(cuts)
msg.say('Got {} cuts'.format(cnt))
minx, maxx = min(xvals), max(xvals)
miny, maxy = min(yvals), max(yvals)
bs = '{} range from {:.1f} mm to {:.1f} mm'
msg.say(bs.format('X', minx, maxx))
msg.say(bs.format('Y', miny, maxy))
w = maxx - minx + offset
h = maxy - miny + offset
msg.say('Plotting the cuts')
# Produce PDF output. Scale factor is 1 mm real =
# 1 PostScript point in the PDF file
xf = cairo.Matrix(xx=1.0, yy=-1.0, y0=h)
out = cairo.PDFSurface(ofn, w, h)
ctx = cairo.Context(out)
ctx.set_matrix(xf)
ctx.set_line_cap(cairo.LINE_CAP_ROUND)
ctx.set_line_join(cairo.LINE_JOIN_ROUND)
ctx.set_line_width(0.5)
# Plot a grid in red
plot.plotgrid(ctx, w, h)
# Plot the cutlines
colors = plot.crange(380, 650, cnt)
# Plot in colors
ctx.save()
ctx.translate(offset/2-minx, offset/2-miny)
for section, (r, g, b) in zip(cuts, colors):
x1, y1 = section.pop(0)
ctx.move_to(x1, y1)
ctx.set_source_rgb(r/255.0, g/255.0, b/255.0)
for x2, y2 in section:
ctx.line_to(x2, y2)
ctx.stroke()
ctx.restore()
# plot the color bar
plot.plotcolorbar(ctx, w, cnt, colors)
# Plot the filename
ctx.save()
ctx.set_matrix(cairo.Matrix(xx=1.0, yy=1.0))
ctx.select_font_face('Sans')
fh = min(10, h/40)
ctx.set_source_rgb(0.0, 0.0, 0.0)
ctx.set_font_size(fh)
ctx.move_to(5, fh+5)
txt = ' '.join(['Produced by: nc2pdf', __version__, 'on',
str(datetime.datetime.now())[:-10]])
ctx.show_text(txt)
ctx.stroke()
fh = min(30, h/20)
ctx.move_to(5, h-15)
txt = 'File: "{}", last modified: {}'
ctx.show_text(txt.format(fn, time.ctime(os.path.getmtime(fn))))
ctx.stroke()
ctx.restore()
# Finish the page.
out.show_page()
msg.say('Writing output file "{}"'.format(ofn))
out.finish()
msg.say('File "{}" done.'.format(fn))
if __name__ == '__main__':
main(sys.argv[1:])
|
from flask import Flask, request, jsonify, session,\
make_response, url_for, redirect, abort, Response, \
session
# from flask_script import Manager
import json, os
app = Flask(__name__)
# manager = Manager(app)
# 127.0.0.1:5000?city=Beijing&country=china&city=nanchang ?后面的是查询字符串
# POST和GET一样也可以使用查询字符串
@app.route('/', methods=['GET', 'POST'])
def index():
# request中包含了前端发送过来的所有请求数据
# form 和 data用来提取请求体数据
# request.form可以直接提取请求体的表单数据,是字典类对象
# 通过get方法只能拿到同名参数的第一个,可以通过getlist方法获取同名参数列表
name = request.form.get("name")
age = request.form.get("age")
name = request.form.get("name")
name_list = request.form.getlist("name")
# args 是用来提取url中的参数(查询字符串)
city = request.args.get("city")
country = request.args.get("country")
city = request.args.get("city")
city_list = request.args.getlist("city")
# 如果请求的数据不是表单格式(如json格式),可以通过request.data获取
print("request.data:{0}".format(request.data))
# if request.methods == 'GET':
# pass
# elif request.methods == 'POST':
# pass
return '<h1>Hello {0}, age = {1}, city={2}, country={3}, form_list={4}, city_list={5}</h1>'\
.format(name, age, city, country, name_list, city_list)
@app.route('/upload', methods=['POST'])
def upload():
f_obj = request.files.get("pic")
if f_obj is None:
return "未上传文件"
# 方法一:
# 1.创建一个文件
# f = open('./demos/http/demo.png', 'wb')
# 2.向文件写入内容
# data = f_obj.read()
# f.write(data)
# 3.关闭文件
# f.close()
# 方法二:open会自动关闭打开的文件
# with open('./demos/http/demo.png', 'wb') as f:
# f.write(f_obj.read())
# 方法三:flask 自带函数
f_obj.save('./demos/http/demo.png')
return "上传成功"
@app.route('/res')
def res():
# 1. 使用元组,返回自定义信息
# 响应体,状态码,响应头
# return "index", 666, [("name", "ai"), ("city", "Beijing")]
# return "index", 666, {"name": "ai", "city": "Beijing"}
# return "index", "666 statuscode description",
# 2. 使用make_response 来构造响应信息
resp = make_response("index page")
resp.status = "999 status description"
resp.headers['city'] = 'Beijing'
return resp
@app.route('/login')
def login():
session['logged_in'] = True
return redirect(url_for('hello'))
@app.route('/set/<name>')
def set(name):
response = make_response(redirect(url_for('hello', name=name)))
response.set_cookie('name', name)
return response
@app.route('/json')
def json_res():
data = {
"city": "Beijing",
"country": "china"
}
# # 1.使用内置函数
# json.dumps(dic) 将python中的字典转换成字符串
# json.loads(str) 将字符串转换成python中的字典
# json_str = json.dumps(data)
# # 如果不设置content-type的话, 返回的类型仍是application/text
# return json_str
# return json_str, 500, {"Content-Type": "application/json"}
# flask内置函数,导入jsonify,帮助转为python数据,并设置响应头Content-Type 为
# appliction/json
return jsonify(data)
# return jsonify(name='Grey Li', gender='male'), 500
@app.route('/set_cookie')
def set_cookie():
"""docstring for set_cookie"""
# 设置cookie,默认是临时cookie,浏览器关闭就失效
res = make_response('success')
res.set_cookie('name1', 'ai')
res.set_cookie('name2', 'ying')
# 设置有效期,单位秒
res.set_cookie('name3', 'tom', max_age=3600)
# 通过header来设置cookie
res.headers['Set-Cookie'] = "name4=join; Expires=Thu," \
+ "28-Feb-2019 08:30:14 GMT; Max-Age=3600; Path=/"
return res
@app.route('/get_cookie')
def get_cookie():
# 获取cookie
cookie = request.cookies.get('name3')
return cookie
@app.route('/delete_cookie')
def delete_cookie():
res = make_response('delete_cookie')
res.delete_cookie('name3')
return res
@app.route('/note')
def foo():
response = make_response('Hello')
response.mimetype = 'text/plain'
return response
@app.route('/secret')
def secret():
"""docstring for secret"""
# 从.env文件中获取SECRET_KEY, 如果SECRET_KEY为空,那么使用默认的第二个参数作为SECRET_KEY
app.secret_key = os.getenv('SECRET_KEY', 'ksjdaksdfjak8934523kjfka')
return app.secret_key
@app.route('/set_session')
def set_session():
# flask的session使用需要使用SECERT_KEY
app.secret_key = os.getenv('SECRET_KEY', 'ksjdaksdfjak8934523kjfka')
# flask 默认把session保存在cookie中
print(app.secret_key)
session['name'] = 'ai'
return 'login sucess'
@app.route('/hello')
@app.route('/hello/<name>')
def hello():
name = request.args.get('name')
if name is None:
name = request.cookies.get('name')
return '<h1>Hello {0}</h1>'.format(name)
@app.route('/goback/<int:year>')
def goback(year):
return '<p> Welcome to %d.' % (2019-year)
@app.route('/bar')
def bar():
return '<h1> Foo Page</h1><a href="{}">Do something and redirect</a>'\
.format(url_for('hello', next=request.full_path))
@app.route('/hook')
def hook():
"""docstring for hook"""
# 1/0
print('hook test')
return 'hook test'
@app.before_first_request
def hand_before_first_request():
# 第一次请求处理前被执行
print("hand_before_first_request():第一次请求处理前被执行")
@app.before_request
def hand_before_request():
# 每次请求处理前被执行
print("hand_before_request():每次请求处理前被执行")
# @app.after_this_request
# def hand_after_this_request(res):
# # 某个视图函数请求处理后被执行, 必须带有响应对象参数
# print("hand_after_this_request(res):某个视图函数请求处理后被执行")
# return res
@app.after_request
def hand_after_request(res):
# 每次请求视图函数处理后执行,前提是视图函数没有异常
print("hand_after_request(res):每次请求视图函数处理后执行,前提是视图函数没有异常")
return res
@app.teardown_request
def hand_teardown_request(res):
# 每次请求视图函数处理后执行,无论视图函数是否有异常, 前提必须实在生产模式,即Debug=False
print("hand_teardown_request(res):# 每次请求视图函数处理后执行,无论视图函数是否有异常")
return res
@app.route('/abort')
def abort_error():
name = request.form.get('name')
age = request.form.get('age')
if name == 'ai' and age == 18:
return "name={0}, age={1}".format(name, age)
else:
# 1.返回状态码(常用)
abort(404)
# 2.返回响应体信息
# res = response("failed")
# abort(res)
# 自定义错误方法
@app.errorhandler(404)
def error(err):
return "自定义错误方法{0}".format(err)
|
#!/usr/bin/env python
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--noMultiThreading", dest="noMultiThreading", default = False, action="store_true", help="noMultiThreading?")
parser.add_option("--selectWeight", dest="selectWeight", default=None, action="store", help="select weight?")
parser.add_option("--PDFset", dest="PDFset", default="NNPDF30", choices=["NNPDF30", "PDF4LHC15_nlo_100"], help="select the PDF set")
parser.add_option("--selectRegion", dest="selectRegion", default=None, type="int", action="store", help="select region?")
parser.add_option("--sample", dest='sample', action='store', default='TTZ_NLO_16', choices=["TTZ_LO_16", "TTZ_NLO_16", "TTZ_NLO_17", "WZ_pow_16"], help="which sample?")
parser.add_option("--small", action='store_true', help="small?")
parser.add_option("--reducedPDF", action='store_true', help="Don't use all PDF variations for tests?")
parser.add_option("--combine", action='store_true', help="Combine results?")
parser.add_option("--noKeepNorm", action='store_true', help="Keep the normalization = acceptance uncertainty only?")
parser.add_option('--logLevel', dest="logLevel", default='INFO', action='store', help="log level?", choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'TRACE', 'NOTSET'])
parser.add_option('--overwrite', dest="overwrite", default = False, action = "store_true", help="Overwrite existing output files, bool flag set to True if used")
parser.add_option('--skipCentral', dest="skipCentral", default = False, action = "store_true", help="Skip central weights")
parser.add_option('--btagWZ', dest="btagWZ", default = False, action = "store_true", help="Get the uncertainties for b-tag extrapolation of WZ")
parser.add_option('--regionsXSec', dest="regionsXSec", default = False, action = "store_true", help="Use nJet and nBTag binning")
(options, args) = parser.parse_args()
# Standard imports
import ROOT
import os
import sys
import pickle
import math
# Analysis
from TopEFT.Analysis.SetupHelpers import channel, trilepChannels, allTrilepChannels, allQuadlepChannels, quadlepChannels
from TopEFT.Analysis.regions import regionsE, noRegions, btagRegions, regions4lB, regionsXSec
from TopEFT.Tools.u_float import u_float
from TopEFT.Tools.resultsDB import resultsDB
from TopEFT.Analysis.Region import Region
#RootTools
from RootTools.core.standard import *
from TopEFT.samples.color import color
from TopEFT.Tools.cutInterpreter import cutInterpreter
import TopEFT.Tools.logger as logger
import RootTools.core.logger as logger_rt
logger = logger.get_logger( options.logLevel, logFile = None)
logger_rt = logger_rt.get_logger(options.logLevel, logFile = None)
data_directory = "/afs/hephy.at/data/dspitzbart02/cmgTuples/"
## 2016 ##
postProcessing_directory = "TopEFT_PP_2016_mva_v21/trilep/"
from TopEFT.samples.cmgTuples_Summer16_mAODv2_postProcessed import *
postProcessing_directory = "TopEFT_PP_2016_mva_v21/trilep/"
from TopEFT.samples.cmgTuples_Data25ns_80X_07Aug17_postProcessed import *
## 2017 ##
postProcessing_directory = "TopEFT_PP_2017_mva_v21/trilep/"
from TopEFT.samples.cmgTuples_Fall17_94X_mAODv2_postProcessed import *
postProcessing_directory = "TopEFT_PP_2017_mva_v21/trilep/"
from TopEFT.samples.cmgTuples_Data25ns_94X_Run2017_postProcessed import *
from TopEFT.Analysis.Setup import Setup
year = 2017 if options.sample.count("17") else 2016
setup3l = Setup(year=year, nLeptons=3)
if options.btagWZ:
setup3l.parameters.update({"nBTags":(0,-1), "nJets":(1,-1)})
elif options.regionsXSec:
setup3l.parameters.update({"nBTags":(0,-1), "nJets":(2,-1)})
setup4l = Setup(year=year, nLeptons=4)
setup4l.parameters.update({'nJets':(1,-1), 'nBTags':(1,-1), 'zMassRange':20, 'zWindow2':"offZ"})
##Summer16 samples
data_directory = "/afs/hephy.at/data/dspitzbart02/cmgTuples/"
postProcessing_directory = "TopEFT_PP_2016_mva_v21/trilep/"
dirs = {}
dirs['TTZ_LO'] = ["TTZ_LO"]
dirs['TTZToLLNuNu_ext'] = ['TTZToLLNuNu_ext_comb']
dirs['WZTo3LNu_comb'] = ['WZTo3LNu_comb']
directories = { key : [ os.path.join( data_directory, postProcessing_directory, dir) for dir in dirs[key]] for key in dirs.keys()}
#TTZ_LO_16 = Sample.fromDirectory(name="TTZ_LO", treeName="Events", isData=False, color=color.TTJets, texName="t#bar{t}Z (LO)", directory=directories['TTZ_LO'])
TTZ_NLO_16 = Sample.fromDirectory(name="TTZ_NLO", treeName="Events", isData=False, color=color.TTJets, texName="t#bar{t}Z, Z#rightarrowll (NLO)", directory=directories['TTZToLLNuNu_ext'])
WZ_pow_16 = Sample.fromDirectory(name="WZ_pow", treeName="Events", isData=False, color=color.TTJets, texName="WZ (powheg)", directory=directories['WZTo3LNu_comb'])
## Fall17 samples
#data_directory = "/afs/hephy.at/data/dspitzbart02/cmgTuples/"
#postProcessing_directory = "TopEFT_PP_2017_Fall17_v3/trilep/"
data_directory = "/afs/hephy.at/data/dspitzbart02/cmgTuples/"
postProcessing_directory = "TopEFT_PP_2017_mva_v21/trilep/"
dirs = {}
dirs['TTZToLLNuNu'] = ['TTZToLLNuNu_amc_psw']
directories = { key : [ os.path.join( data_directory, postProcessing_directory, dir) for dir in dirs[key]] for key in dirs.keys()}
TTZ_NLO_17 = Sample.fromDirectory(name="TTZ_NLO_17", treeName="Events", isData=False, color=color.TTJets, texName="t#bar{t}Z, Z#rightarrowll (NLO)", directory=directories['TTZToLLNuNu'])
if options.sample == "TTZ_LO_16":
sample = TTZ_LO_16
elif options.sample == "TTZ_NLO_16":
sample = TTZ_NLO_16
elif options.sample == "TTZ_NLO_17":
sample = TTZ_NLO_17
elif options.sample == "WZ_pow_16":
sample = WZ_pow_16
if options.small:
sample.reduceFiles( to = 1 )
if options.btagWZ:
allRegions = btagRegions + noRegions
elif options.regionsXSec:
allRegions = noRegions + regionsXSec
else:
allRegions = noRegions + regionsE + regions4lB
regions = allRegions if not options.selectRegion else [allRegions[options.selectRegion]]
#setupIncl = setup.systematicClone(parameters={'mllMin':0, 'nJets':(0,-1), 'nBTags':(0,-1), 'zWindow1':'allZ'})
setupIncl3l = setup3l.systematicClone(parameters={'mllMin':0, 'nJets':(2,-1), 'nBTags':(1,-1), 'zWindow1':'onZ'})
setupIncl4l = setup4l.systematicClone(parameters={'mllMin':0, 'nJets':(1,-1), 'nBTags':(1,-1), 'zWindow1':'onZ'})
#setup.verbose = True
# use more inclusive selection in terms of lepton multiplicity in the future?
from TopEFT.Analysis.MCBasedEstimate import MCBasedEstimate
from TopEFT.Tools.user import analysis_results
'''
check all PDF sets that are available. will only implement parts for now.
'''
PSweights = False
PDFset = options.PDFset
#PDFset = "NNPDF30"
scale_indices = [1,2,3,4,6,8]
LHEweight_original = 'abs(LHEweight_original)' # should be similar to genWeight
if options.sample == "TTZ_NLO_16":
if PDFset == "NNPDF30":
PDFType = "replicas"
centralWeight = "genWeight" # sample produced with NNPDF30, so no central LHEweight saved apart from genWeight
PDF_indices = range(9,109)
aS_indices = [109,110]
else:
raise NotImplementedError
elif options.sample == "TTZ_NLO_17":
if PDFset == "NNPDF31":
raise NotImplementedError
elif PDFset == "NNPDF30":
PDFType = "replicas"
centralWeight = "abs(LHEweight_wgt[972])"
PDF_indices = range(973,1073)
aS_indices = [1073, 1074]
elif PDFset == "PDF4LHC15_nlo_100":
PDFType = "hessian"
centralWeight = "abs(LHEweight_wgt[475])"
PDF_indices = range(476,576)
aS_indices = [576, 577]
else:
raise NotImplementedError
## PS weights ##
PSweights = True
# starting from 1080: 0,1 are central. 2,3,4,5 are reduced, 6,7,8,9 are nominal, 10,11,12,13 are enhanced.
PS_indices = [1086, 1088, 1083, 1085]#range(1086, 1090)
#PS_indices = range(1090, 1094)
PSweight_original = "abs(LHEweight_wgt[1080])"
elif options.sample == "WZ_pow_16":
if PDFset == "NNPDF30":
PDFType = "replicas"
centralWeight = "genWeight" # sample produced with NNPDF30, so no central LHEweight saved apart from genWeight
PDF_indices = range(9,109)
aS_indices = [109,110]
else:
# CT10nlo and MMHT2014nlo68clas118 also included
raise NotImplementedError
else:
raise NotImplementedError
# central weights here should cancel out, but are necessary to not change the sign for NLO samples
if not options.selectWeight:
scale_variations= [ "abs(LHEweight_wgt[%i])"%(i) for i in scale_indices ]
PDF_variations = [ "abs(LHEweight_wgt[%i])"%(i) for i in PDF_indices ] if not options.reducedPDF else [ "abs(LHEweight_wgt[%i])"%(i) for i in PDF_indices ][:5]
aS_variations = [ "abs(LHEweight_wgt[%i])"%(i) for i in aS_indices ]
variations = scale_variations + PDF_variations + aS_variations + [LHEweight_original, centralWeight]
if PSweights:
PS_variations = [ "abs(LHEweight_wgt[%i])"%(i) for i in PS_indices ] + [PSweight_original]
variations += PS_variations
else:
variations = [ "abs(LHEweight_wgt[%s])"%(options.selectWeight) ]
results = {}
scale_systematics = {}
cacheDir = "/afs/hephy.at/data/dspitzbart01/TopEFT/results/PDF_v2_%s/"%(PDFset)
estimate = MCBasedEstimate(name=sample.name, sample=sample )
estimate.initCache(cacheDir)
## Results DB for scale and PDF uncertainties
PDF_cache = resultsDB(cacheDir+sample.name+'_unc.sq', "PDF", ["region", "channel", "PDFset"])
scale_cache = resultsDB(cacheDir+sample.name+'_unc.sq', "scale", ["region", "channel", "PDFset"])
PS_cache = resultsDB(cacheDir+sample.name+'_unc.sq', "PSscale", ["region", "channel", "PDFset"])
'''
Recommendation from arxiv:1510.03865
for MC sets sort the obtained values e.g. in a list, then calculate
delta(PDF)sigma = (sigma[84] - sigma[16])/2
which gives the 68% CL
'''
def wrapper(args):
r, c, setup = args
res = estimate.cachedEstimate(r, c, setup, save=True, overwrite=options.overwrite)
return (estimate.uniqueKey(r, c, setup), res )
jobs=[]
# remove all so to avoid unnecessary concurrency. All will be calculated as sum of the individual channels later
seperateChannels3l = allTrilepChannels
allTrilepChannelNames = [ c.name for c in allTrilepChannels ]
seperateChannels3l.pop(allTrilepChannelNames.index('all'))
seperateChannels4l = allQuadlepChannels
allQuadlepChannelNames4l = [ c.name for c in allQuadlepChannels ]
seperateChannels4l.pop(allQuadlepChannelNames4l.index('all'))
if not options.skipCentral:
# First run over seperate channels
jobs.append((noRegions[0], channel(-1,-1), setupIncl3l))
jobs.append((noRegions[0], channel(-1,-1), setupIncl4l))
for var in variations:
for c in seperateChannels3l:
jobs.append((noRegions[0], c, setupIncl3l.systematicClone(sys={'reweight':[var]})))
for c in seperateChannels4l:
jobs.append((noRegions[0], c, setupIncl4l.systematicClone(sys={'reweight':[var]})))
## then one can sum up over all (currently done in the combine step)
#for var in variations:
# jobs.append((noRegions[0], "all", setupIncl.systematicClone(sys={'reweight':[var]})))
if not options.combine:
for region in regions:
seperateChannels = seperateChannels4l if region in regions4lB else seperateChannels3l
for c in seperateChannels:
#for region in regions:
setup = setup4l if region in regions4lB else setup3l
jobs.append((region, c, setup))
for var in variations:
jobs.append((region, c, setup.systematicClone(sys={'reweight':[var]})))
logger.info("Created %s jobs",len(jobs))
if options.noMultiThreading:
results = map(wrapper, jobs)
else:
from multiprocessing import Pool
pool = Pool(processes=8)
results = pool.map(wrapper, jobs)
pool.close()
pool.join()
logger.info("All done.")
PDF_unc = []
Scale_unc = []
PS_unc = []
#regions = regionsE[1:13:3]
regions = regionsE + regions4lB
if options.combine:
for c in [channel(-1,-1)]:#allChannels:
for region in regions:
setup = setup4l if region in regions4lB else setup3l
setupIncl = setupIncl4l if region in regions4lB else setupIncl3l
logger.info("Region: %s", region)
scales = []
showerScales = []
deltas = []
delta_squared = 0
# central yield inclusive and in region
sigma_incl_central = estimate.cachedEstimate(noRegions[0], channel(-1,-1), setupIncl.systematicClone(sys={'reweight':[LHEweight_original]}))
sigma_incl_centralWeight = estimate.cachedEstimate(noRegions[0], channel(-1,-1), setupIncl.systematicClone(sys={'reweight':[centralWeight]}))
sigma_central = estimate.cachedEstimate(region, c, setup.systematicClone(sys={'reweight':[LHEweight_original]}))
sigma_centralWeight = estimate.cachedEstimate(region, c, setup.systematicClone(sys={'reweight':[centralWeight]}))
for var in scale_variations:
simga_incl_reweight = estimate.cachedEstimate(noRegions[0], channel(-1,-1), setupIncl.systematicClone(sys={'reweight':[var]}))
norm = sigma_incl_central/simga_incl_reweight if not options.noKeepNorm else 1
sigma_reweight = estimate.cachedEstimate(region, c, setup.systematicClone(sys={'reweight':[var]}))
sigma_reweight_acc = sigma_reweight * norm
unc = abs( ( sigma_reweight_acc - sigma_central) / sigma_central )
scales.append(unc.val)
scale_rel = max(scales)
for var in PDF_variations:
# calculate x-sec noramlization
simga_incl_reweight = estimate.cachedEstimate(noRegions[0], channel(-1,-1), setupIncl.systematicClone(sys={'reweight':[var]}))
norm = sigma_incl_central/simga_incl_reweight if not options.noKeepNorm else 1
norm_centralWeight = sigma_incl_central/sigma_incl_centralWeight
sigma_reweight = estimate.cachedEstimate(region, c, setup.systematicClone(sys={'reweight':[var]}))
sigma_reweight_acc = sigma_reweight * norm
## For replicas, just get a list of all sigmas, sort it and then get the 68% interval
deltas.append(sigma_reweight_acc.val)
## recommendation for hessian is to have delta_sigma = sum_k=1_N( (sigma_k - sigma_0)**2 )
## so I keep the norm for both sigma_k and sigma_0 to obtain the acceptance uncertainty. Correct?
delta_squared += ( sigma_reweight.val - sigma_centralWeight.val )**2
deltas = sorted(deltas)
# calculate uncertainty
if PDFType == "replicas":
# get the 68% interval
upper = len(deltas)*84/100-1
lower = len(deltas)*16/100 - 1
delta_sigma = (deltas[upper]-deltas[lower])/2
elif PDFType == "hessian":
delta_sigma = math.sqrt(delta_squared)
# recommendation is to multiply uncertainty by 1.5
deltas_as = []
for var in aS_variations:
simga_incl_reweight = estimate.cachedEstimate(noRegions[0], channel(-1,-1), setupIncl.systematicClone(sys={'reweight':[var]}))
norm = sigma_incl_central/simga_incl_reweight if not options.noKeepNorm else 1
sigma_reweight = estimate.cachedEstimate(region, c, setup.systematicClone(sys={'reweight':[var]}))
sigma_reweight_acc = sigma_reweight * norm
deltas_as.append(sigma_reweight_acc.val)
scale = 1.5 if PDFset.count("NNPDF") else 1.0
delta_sigma_alphaS = scale * ( deltas_as[0] - deltas_as[1] ) / 2.
# add alpha_s and PDF in quadrature
delta_sigma_total = math.sqrt( delta_sigma_alphaS**2 + delta_sigma**2 )
# make it relative wrt central value in region
delta_sigma_rel = delta_sigma_total/sigma_central.val
# calculate the PS uncertainties
if PSweights:
sigma_incl_central = estimate.cachedEstimate(noRegions[0], channel(-1,-1), setupIncl.systematicClone(sys={'reweight':[PSweight_original]}))
sigma_central = estimate.cachedEstimate(region, c, setup.systematicClone(sys={'reweight':[PSweight_original]}))
print "Count:", sigma_central.val/0.5148500
shower_scales = []
for var in PS_variations:
simga_incl_reweight = estimate.cachedEstimate(noRegions[0], channel(-1,-1), setupIncl.systematicClone(sys={'reweight':[var]}))
norm = sigma_incl_central/simga_incl_reweight
sigma_reweight = estimate.cachedEstimate(region, c, setup.systematicClone(sys={'reweight':[var]}))
sigma_reweight_acc = sigma_reweight #* norm
#unc = ( ( sigma_reweight_acc - sigma_central) / sigma_central ) # no abs atm
unc = sigma_reweight_acc / sigma_central
#print ( sigma_reweight_acc - sigma_central) / sigma_central
showerScales.append(unc.val)
print "ISR up/down", round(showerScales[0], 3), round( showerScales[2], 3)
print "FSR up/down", round(showerScales[1], 3), round( showerScales[3], 3)
PS_scale_rel = max(showerScales)
else:
PS_scale_rel = 0.
logger.info("Calculated PDF and alpha_s uncertainties for region %s in channel %s"%(region, c.name))
logger.info("Central x-sec: %s", sigma_central)
logger.info("Delta x-sec using PDF variations: %s", delta_sigma)
logger.info("Delta x-sec using alpha_S variations: %s", delta_sigma_alphaS)
logger.info("Delta x-sec total: %s", delta_sigma_total)
logger.info("Relative uncertainty: %s", delta_sigma_rel)
logger.info("Relative scale uncertainty: %s", scale_rel)
logger.info("Relative shower scale uncertainty: %s", PS_scale_rel)
PDF_unc.append(delta_sigma_rel)
Scale_unc.append(scale_rel)
PS_unc.append(PS_scale_rel)
# Store results
if not options.reducedPDF:
PDF_cache.add({"region":region, "channel":c.name, "PDFset":options.PDFset}, delta_sigma_rel, overwrite=True)
scale_cache.add({"region":region, "channel":c.name, "PDFset":'scale'}, scale_rel, overwrite=True)
PS_cache.add({"region":region, "channel":c.name, "PDFset":'PSscale'}, PS_scale_rel, overwrite=True)
if not options.reducedPDF:
PDF_cache.get({"region":region, "channel":c.name, "PDFset":options.PDFset})
scale_cache.get({"region":region, "channel":c.name, "PDFset":'scale'})
PS_cache.get({"region":region, "channel":c.name, "PDFset":'PSscale'})
logger.info('Min. PDF uncertainty: %.3f', min(PDF_unc))
logger.info('Max. PDF uncertainty: %.3f', max(PDF_unc))
logger.info('Min. scale uncertainty: %.3f', min(Scale_unc))
logger.info('Max. scale uncertainty: %.3f', max(Scale_unc))
logger.info('Min. PS scale uncertainty: %.3f', min(PS_unc))
logger.info('Max. PS scale uncertainty: %.3f', max(PS_unc))
|
r"""
###############################################################################
:mod:`OpenPNM.Utilities` -- IO, geometry tools and other functions
###############################################################################
.. automodule:: OpenPNM.Utilities.IO
:members:
:undoc-members:
:show-inheritance:
"""
from . import IO
from . import transformations
from . import misc
from . import vertexops
from .__topology__ import topology
|
import math, operator
def choose(n, k):
if 0 <= k <= n:
ntok = 1
ktok = 1
for t in range(1, min(k, n - k) + 1):
ntok *= n
ktok *= t
n -= 1
return ntok // ktok
else:
return 0
def is_prime(x):
return all(x%d for d in range(2, 1+math.floor(math.sqrt(x))))
def get_prime_factor(x):
for d in range(2, 1+math.floor(math.sqrt(x))):
if not x % d: return d
return x
# __________________________
def pe18():
triangle = """75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23""".split("\n")
triangle = [[int(n) for n in row.split()] for row in triangle]
#print(triangle)
for y in range(len(triangle)-2,0,-1):
for x in range(y+1):
triangle[y][x] += max(triangle[y+1][x], triangle[y+1][x+1])
print(triangle[0][0]+max(triangle[1]))
def pe33():
for x in range(10,100):
for y in range(x+1,100):
a, b = str(x), str(y)
if a[0] in b:
if a[0] == b[0]:
if int(a[1])/int(b[1]) == x/y:
print(x,y)
else:
if int(a[1])/int(b[0]) == x/y:
print(x,y)
elif a[1] in b and a[1] != '0':
if a[1] == b[0] and b[1] != '0':
if int(a[0])/int(b[1]) == x/y:
print(x,y)
else:
if int(a[0])/int(b[0]) == x/y:
print(x,y)
def pe35():
derp = 0
for x in range(2,10**6):
X = str(x)*2
L = len(X)//2
bad = False
for n in range(L):
num = int(X[n:n+L])
if not is_prime(num):
bad = True
if not bad:
derp += 1
return derp
def pe47():
def distinct_primes_count(n):
f = set()
while n > 1:
prime = get_prime_factor(n)
n /= prime
f.add(prime)
return len(f)
counts = list(map(distinct_primes_count, range(2*3*5*7, 1000000)))
print(counts[:100])
for x in range(len(counts)):
if counts[x] == 4 and counts[x] == counts[x+1] and counts[x] == counts[x+2] and counts[x] == counts[x+3]:
print(x+2*3*5*7)
def pe67():
tri = open("p067_triangle.txt")
triangle = tri.read().splitlines()
tri.close()
triangle = [[int(n) for n in row.split()] for row in triangle]
#print(triangle)
for y in range(len(triangle)-2,0,-1):
for x in range(y+1):
triangle[y][x] += max(triangle[y+1][x], triangle[y+1][x+1])
print(triangle[0][0]+max(triangle[1]))
def pe81():
mat = open("p081_matrix.txt")
matrix = mat.read().splitlines()
mat.close()
matrix = [[int(n) for n in row.split(',')] for row in matrix]
#matrix = [[131,673,234,103,18],[201,96,342,965,150],[630,803,746,422,111],[537,699,497,121,956],[805,732,524,37,331]]
L = len(matrix)-1
#print(matrix)
#print(matrix[0][0])
for rowColumnSum in range(L*2-1,L-1,-1):
for y in range(L,rowColumnSum-L-1,-1):
x = rowColumnSum-y
#print(str(y)+" "+str(x)+" "+str(rowColumnSum))
if y == L: matrix[y][x] += matrix[y][x+1]
elif x == L: matrix[y][x] += matrix[y+1][x]
else:
#print(matrix[y+1][x], matrix[y][x+1])
matrix[y][x] += min(matrix[y+1][x], matrix[y][x+1])
for rowColumnSum in range(L-1,-1,-1):
for y in range(rowColumnSum,-1,-1):
x = rowColumnSum-y
#print(y, x, rowColumnSum)
matrix[y][x] += min(matrix[y+1][x], matrix[y][x+1])
print(matrix[0][0], '\n'.join(str(r[:10]) for r in matrix[:10]))
##def pe493():
## f = math.factorial
## E = 0
## E += 0 * (f(50)/f(30)) / (f(70)/f(50)) * choose(70,0)
## E += 1 *
## pass
def pe587():
A = 0
res = 10000000
slope = 1/2239
for rect in range(res):
x = rect/res
y = min(slope*x,1-math.sqrt(2*x-x**2))
#print(slope*x,1-(2*x-x**2)**.5)
A += y
return A/(1-math.pi/4)/res
|
import os
import random
from collections import defaultdict, OrderedDict
import chainer
import cv2
import numpy as np
import config
from dataset_toolkit.compress_utils import get_zip_ROI_AU, get_AU_couple_child
from img_toolkit.face_mask_cropper import FaceMaskCropper
# obtain the cropped face image and bounding box and ground truth label for each box
class AUDataset(chainer.dataset.DatasetMixin):
def __init__(self, img_resolution, database, fold, split_name, split_index, mc_manager, prefix="", pretrained_target=""):
self.database = database
self.img_resolution = img_resolution
self.split_name = split_name
self.au_couple_dict = get_zip_ROI_AU()
self.mc_manager = mc_manager
self.au_couple_child_dict = get_AU_couple_child(self.au_couple_dict)
self.AU_intensity_label = {} # subject + "/" + emotion_seq + "/" + frame => ... not implemented
self.pretrained_target = pretrained_target
self.dir = config.DATA_PATH[database] # BP4D/DISFA/ BP4D_DISFA
id_list_file_path = os.path.join(self.dir + "/idx/{0}_fold{1}".format(fold, prefix), "intensity_{0}_{1}.txt".format(split_name, split_index))
self.result_data = []
self.video_offset = OrderedDict()
self.video_count = defaultdict(int)
print("idfile:{}".format(id_list_file_path))
with open(id_list_file_path, "r") as file_obj:
for idx, line in enumerate(file_obj):
if line.rstrip():
line = line.rstrip()
img_path, au_set_str, from_img_path, current_database_name = line.split("\t")
AU_intensity = np.fromstring(au_set_str, dtype=np.int32, sep=',')
from_img_path = img_path if from_img_path == "#" else from_img_path
img_path = config.RGB_PATH[current_database_name] + os.path.sep + img_path # id file 是相对路径
from_img_path = config.RGB_PATH[current_database_name] + os.path.sep + from_img_path
video_id = "/".join([img_path.split("/")[-3], img_path.split("/")[-2]])
if video_id not in self.video_offset:
self.video_offset[video_id] = len(self.result_data)
self.video_count[video_id] += 1
if os.path.exists(img_path):
self.result_data.append((img_path, from_img_path, AU_intensity, current_database_name))
self.result_data.sort(key=lambda entry: (entry[0].split("/")[-3],entry[0].split("/")[-2],
int(entry[0].split("/")[-1][:entry[0].split("/")[-1].rindex(".")])))
self._num_examples = len(self.result_data)
print("read id file done, all examples:{}".format(self._num_examples))
def __len__(self):
return self._num_examples
def assign_label(self, couple_box_dict, current_AU_couple, bbox, label):
AU_couple_bin = dict()
for au_couple_tuple, _ in couple_box_dict.items():
# use connectivity components to seperate polygon
AU_inside_box = current_AU_couple[au_couple_tuple] # AU: intensity
AU_bin = np.zeros(shape=len(config.AU_INTENSITY_DICT), dtype=np.int32) # 全0表示背景,脸上没有运动
for AU, intensity in sorted(AU_inside_box.items(), key=lambda e: int(e[0])):
if AU not in config.AU_SQUEEZE.inv:
continue
idx = config.AU_INTENSITY_DICT.inv[AU]
np.put(AU_bin, idx, intensity)
AU_couple_bin[au_couple_tuple] = AU_bin # for the child
# 循环两遍,第二遍拿出child_AU_couple
for au_couple_tuple, box_list in couple_box_dict.items():
AU_child_bin = np.zeros(shape=len(config.AU_INTENSITY_DICT), dtype=np.int32)
if au_couple_tuple in self.au_couple_child_dict:
for au_couple_child in self.au_couple_child_dict[au_couple_tuple]:
AU_child_bin = np.maximum(AU_child_bin, AU_couple_bin[au_couple_child])
AU_bin_tmp = AU_couple_bin[au_couple_tuple] # 全0表示背景,脸上没有运动
AU_bin = np.maximum(AU_child_bin, AU_bin_tmp)
bbox.extend(box_list)
for _ in box_list:
label.append(AU_bin)
def get_example(self, i):
'''
Returns a color image and bounding boxes. The image is in CHW format.
The returned image is RGB.
:param i: the index of the example
:return: tuple of an image and its all bounding box
'''
if i > len(self.result_data):
raise IndexError("Index too large")
img_path, from_img_path, AU_intensity, database_name = self.result_data[i]
if not os.path.exists(img_path):
raise IndexError("image file_path: {} not exist!".format(img_path))
try:
# print("begin fetch cropped image and bbox {}".format(img_path))
read_img_path = img_path if from_img_path == "#" else from_img_path
rgb_img_path = config.RGB_PATH[self.database] + os.path.sep + os.path.sep.join(read_img_path.split("/")[-3:])
key_prefix = self.database +"@{}".format(self.img_resolution) +"|"
if self.pretrained_target is not None and len(self.pretrained_target) > 0:
key_prefix = self.pretrained_target+"|"
cropped_face, AU_box_dict = FaceMaskCropper.get_cropface_and_box(read_img_path, rgb_img_path,
channel_first=True,
mc_manager=self.mc_manager, key_prefix=key_prefix)
except IndexError:
print("crop image error:{}".format(img_path))
face = np.transpose(cv2.resize(cv2.imread(img_path), config.IMG_SIZE), (2, 0, 1))
whole_bbox = np.tile(np.array([1, 1, config.IMG_SIZE[1] - 1, config.IMG_SIZE[0] - 1], dtype=np.float32),
(config.BOX_NUM[database_name], 1))
whole_label = np.tile(AU_intensity, (config.BOX_NUM[database_name], 1))
return face, whole_bbox, whole_label
current_AU_couple = defaultdict(dict) # key = AU couple, value = {出现的AU: intensity}
couple_box_dict = OrderedDict() # key= AU couple
for idx, intensity in enumerate(AU_intensity):
AU = str(config.AU_INTENSITY_DICT[idx])
if intensity > 0:
try:
current_AU_couple[self.au_couple_dict[AU]][AU] = intensity
except KeyError:
print(list(self.au_couple_dict.keys()), AU)
raise
for AU, box_list in sorted(AU_box_dict.items(), key=lambda e:int(e[0])):
AU = str(AU)
couple_box_dict[self.au_couple_dict[AU]] = box_list # 所以这一步会把脸上有的,没有的AU都加上
label = [] # one box may have multiple labels. so each entry is 10101110 binary code
bbox = [] # AU = 0背景的box是随机取的
self.assign_label(couple_box_dict, current_AU_couple, bbox, label)
bbox = np.stack(bbox).astype(np.float32)
label = np.stack(label).astype(np.int32)
assert bbox.shape[0] == label.shape[0]
return cropped_face, bbox, label |
n = int(input("Digite o numero: "))
numero = list(range(1,n, 2))
print (numero) |
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 15 14:49:49 2018
@author: shams
reading in raw data json files for each user, reorganizng and pickling the data
"""
import numpy as np
import pandas as pd
import networkx as nx
def usr_top_chans(usr, netWindow , nchans = 5):
chanList = list(netWindow.loc[netWindow['user']==usr]['channel'].unique())
b=netWindow.groupby(['user','channel']).count().reset_index()
b['weight'] = b['text']
b=b.drop(['subtype','type','ts','time','date','text'],axis =1)
G =nx.DiGraph()
networkG=nx.from_pandas_edgelist(b,source ='user',target='channel', create_using = G )
networkG.add_weighted_edges_from(list(b.itertuples(index=False, name=None)))
try:
h,a=nx.hits(networkG)
bib = dict((k, a[k]) for k in chanList if k in a)
chScore = pd.DataFrame.from_dict(bib,orient='index')
chScore.columns=['hScore']
chScore= chScore.sort_values(by='hScore',ascending=False)
except:
h,a=nx.hits(networkG,tol=1e-01)
bib = dict((k, a[k]) for k in chanList if k in a)
chScore = pd.DataFrame.from_dict(bib,orient='index')
chScore.columns=['hScore']
chScore= chScore.sort_values(by='hScore',ascending=False)
return(chScore.iloc[0:nchans])
# loading data
user_file = pd.read_json('C:/Users/shams/OneDrive/Documents/Projects/Insight/datasets/users.json')
channel_file = pd.read_json('C:/Users/shams/OneDrive/Documents/Projects/Insight/datasets/channels.json')
allData = pd.read_json('C:/Users/shams/OneDrive/Documents/Projects/Insight/datasets/allData.json')
# prepare training data
freq = 'D'
winSize = 10
#freq = 'W'
allData.drop(allData['user']==None)
allData['time'] = pd.to_datetime(allData['ts'],unit='s')
networkLog = allData.sort_values(by=['ts'] )
networkLog['date']=networkLog['time'].apply(lambda x : x.date())
networkLog['date'] = pd.to_datetime(networkLog['date'])
usr_list = [x for x in allData['user'].unique() if x is not None]
bigData = pd.DataFrame()
for usr in usr_list:
# ----------------------build user's time series
startDate = networkLog.loc[networkLog['user']==usr]['date'].min()
endDate = networkLog.loc[networkLog['user']==usr]['date'].max()
usrWindow = networkLog.loc[(networkLog['date']>= startDate )& (networkLog['date']<= endDate) ]
usrLog = usrWindow.loc[usrWindow['user']== usr]
usr_daily = usrLog['date'].value_counts().sort_index()
usr_daily= usr_daily.reindex(fill_value=0)
usr_daily.index = pd.DatetimeIndex(usr_daily.index)
#usr_weekly = usr_daily.resample('W').sum()
usr_ts = usr_daily.resample(freq).sum()
input_ts = pd.DataFrame(usr_ts,index = usr_ts.index)
input_ts = input_ts.rename(columns={'date':'user_ts'})
input_ts.fillna(0, inplace=True)
input_ts['usr_ma'] = input_ts['user_ts'].rolling(window=winSize).mean()
# ----------------------find corresponding high score channels
topChans = usr_top_chans(usr, usrWindow , nchans = 3)
topChans_list = topChans.index.values.tolist()
ch_counter = list(enumerate(topChans_list, 1))
for counter , ch in ch_counter:
channel_log = usrWindow[usrWindow['channel']==ch].sort_values(by='ts')
channel_log['date']=channel_log['time'].apply(lambda x : x.date())
channel_daily = channel_log['date'].value_counts().sort_index()
channel_daily = channel_daily.reindex(fill_value=0)
channel_daily.index = pd.DatetimeIndex(channel_daily.index)
channel_ts = channel_daily.resample(freq).sum()
input_ts['ch'+str(counter)] = channel_ts
input_ts['ch'+str(counter)].fillna(0,inplace = True)
input_ts['ch'+str(counter)+'_ma'] = input_ts['ch'+str(counter)].rolling(window=winSize).mean()
input_ts = input_ts.iloc[winSize:,:]
input_ts['usr_tag'] = [usr for x in range(len(input_ts))]
bigData = bigData.append(input_ts)
#input_ts.to_json('/scratch/nshams/data/byUser/'+usr+'.json')
orig_index = bigData.index
bigData.index = [x for x in range(len(bigData))]
bigData.to_json('C:/Users/shams/OneDrive/Documents/Projects/Insight/datasets/bigData.json')
# append to the training data
|
import numpy as np
import pandas as pd
from matplotlib.colors import ListedColormap
import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler
import os
from sklearn.utils import resample
df = pd.read_csv(os.path.abspath('dataset.csv'),header=None)
y = df.iloc[:, 11].values
X = df.iloc[:, [2, 4]].values #시각화를 위해 SBS에서 가장 정확도가 높았던 두 개의 feature
# class 1과 0의 비율을 1:1로 upsampling함. 총 9040개의 데이터를 사용함.
X_upsampled, y_upsampled = resample(X[y == 1], y[y == 1], replace=True, n_samples=X[y == 0].shape[0], random_state=1)
X = np.vstack((X[y==0], X_upsampled))
y = np.hstack((y[y==0], y_upsampled))
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) #30% test set
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier(n_neighbors=5, p=2, metric='minkowski')
from sklearn.linear_model import LogisticRegression
lr = LogisticRegression(solver='liblinear', penalty='l2', C=1, random_state=1)
from sklearn.tree import DecisionTreeClassifier
tree = DecisionTreeClassifier(max_depth=3, criterion='entropy', random_state=1)
from sklearn.linear_model import SGDClassifier
SGD = SGDClassifier(max_iter=70, eta0=0.01, tol=1e-3, random_state=1)
from sklearn.pipeline import Pipeline
pipeKNN = Pipeline([['sc', StandardScaler()], ['clf', knn]])
pipelr = Pipeline([['sc', StandardScaler()], ['clf', lr]])
pipeSGD = Pipeline([['sc', StandardScaler()], ['clf', SGD]])
clf_labels = ['KNN', 'Logistic Regression', 'Decision tree', 'SGD']
all_clf = [pipeKNN, pipelr, tree, pipeSGD]
from sklearn.model_selection import cross_val_score
from sklearn.metrics import accuracy_score
for clf, label in zip(all_clf, clf_labels):
scores = cross_val_score(estimator=clf, X=X_train, y=y_train, cv=10, scoring='roc_auc')
print("ROC AUC: %0.3f (+/- %0.2f) [%s]" % (scores.mean(), scores.std(), label))
clf.fit(X_train, y_train)
print("train/test accuracy: %0.3f/%0.3f" %(accuracy_score(y_train, clf.predict(X_train)), accuracy_score(y_test, clf.predict(X_test))))
sc = StandardScaler()
X_train_std = sc.fit_transform(X_train)
###decision region###
sc = StandardScaler()
X_train_std = sc.fit_transform(X_train)
from itertools import product
x_min = X_train_std[:, 0].min() -1
x_max = X_train_std[:, 0].max() +1
y_min = X_train_std[:, 1].min() -1
y_max = X_train_std[:, 1].max() +1
xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1), np.arange(y_min, y_max, 0.1))
f, axarr = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(7,5))
for idx, clf, tt in zip(product([0, 1], [0, 1]), all_clf, clf_labels):
clf.fit(X_train_std, y_train)
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
axarr[idx[0], idx[1]].contourf(xx, yy, Z, alpha = 0.3)
axarr[idx[0], idx[1]].scatter(X_train_std[y_train == 0, 0], X_train_std[y_train == 0, 1], c='blue', marker='^', s=5)
axarr[idx[0], idx[1]].scatter(X_train_std[y_train == 1, 0], X_train_std[y_train == 1, 1], c='green', marker='o', s=5)
axarr[idx[0], idx[1]].set_title(tt)
plt.text(-3.5, -3.5, s = 'Feature 3 [standardized]', ha='center', va='center', fontsize=12)
plt.text(-11.5, 6.5, s = 'Feature 5 [standardized]', ha='center', va='center', fontsize=12, rotation=90)
plt.show() |
import graphene
from models import PhaseEnumGraphene
from utils.ordering import OrderedList
from .case import CaseTypeGrapheneEnum, DeleteCase, ImportCase, Case, resolve_case, resolve_case_types, resolve_cases
from .field import Case as CaseType, AddField
from .role import AssignRole, resolve_role_by_user, resolve_role_by_field, Role, DeleteRole
from .tasks import Task, resolve_tasks
from .user import CreateUser
from .volumetrics import VolumetricsType, resolve_volumetrics
class Query(graphene.ObjectType):
tasks = graphene.List(
Task, user=graphene.String(), field=graphene.String(), hours=graphene.Int(), resolver=resolve_tasks)
cases = OrderedList(
CaseType, field_name=graphene.String(), case_ids=graphene.List(graphene.Int), resolver=resolve_cases)
case_types = graphene.List(CaseTypeGrapheneEnum, resolver=resolve_case_types)
case = graphene.Field(Case, case_id=graphene.Int(), resolver=resolve_case)
role_by_user = graphene.List(Role, user=graphene.String(), resolver=resolve_role_by_user)
roles_by_field = graphene.List(Role, field=graphene.String(), resolver=resolve_role_by_field)
volumetrics = graphene.List(
VolumetricsType,
facies_names=graphene.List(graphene.String),
region_names=graphene.List(graphene.String),
zone_names=graphene.List(graphene.String),
phase=graphene.Argument(PhaseEnumGraphene),
case_ids=graphene.List(graphene.Int),
resolver=resolve_volumetrics)
class Mutations(graphene.ObjectType):
add_field = AddField.Field()
import_case = ImportCase.Field()
create_user = CreateUser.Field()
delete_case = DeleteCase.Field()
assign_role = AssignRole.Field()
delete_role = DeleteRole.Field()
schema = graphene.Schema(query=Query, mutation=Mutations)
|
from logs import logDecorator as lD
import jsonref
import vtk
import numpy as np
config = jsonref.load(open('../config/config.json'))
logBase = config['logging']['logBase'] + '.lib.simpleFunctions.simpleObjects'
class MeshXZ():
def __init__(self, startX, startZ, endX, endZ, yValue=0, nPoints=20):
self.xCoords = vtk.vtkFloatArray()
self.yCoords = vtk.vtkFloatArray()
self.zCoords = vtk.vtkFloatArray()
for x in np.linspace(startX, endX, nPoints):
self.xCoords.InsertNextValue(x)
self.yCoords.InsertNextValue(yValue)
for z in np.linspace(startZ, endZ, nPoints):
self.zCoords.InsertNextValue(z)
self.rgrid = vtk.vtkRectilinearGrid()
self.rgrid.SetDimensions(nPoints, 1, nPoints)
self.rgrid.SetXCoordinates(self.xCoords)
self.rgrid.SetYCoordinates(self.yCoords)
self.rgrid.SetZCoordinates(self.zCoords)
self.plane = vtk.vtkRectilinearGridGeometryFilter()
self.plane.SetInputData(self.rgrid)
self.plane.SetExtent(0, nPoints-1, 0, 0, 0, nPoints-1)
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection(self.plane.GetOutputPort())
self.actor = vtk.vtkActor()
self.actor.SetMapper(self.mapper)
self.actor.GetProperty().SetRepresentationToWireframe()
self.actor.GetProperty().SetColor((0,0,0))
self.actor.GetProperty().EdgeVisibilityOn()
return
class MeshXY():
def __init__(self, startX, startY, endX, endY, zValue=0, nPoints=20):
self.xCoords = vtk.vtkFloatArray()
self.yCoords = vtk.vtkFloatArray()
self.zCoords = vtk.vtkFloatArray()
for x in np.linspace(startX, endX, nPoints):
self.xCoords.InsertNextValue(x)
for y in np.linspace(startY, endY, nPoints):
self.yCoords.InsertNextValue(y)
for z in [zValue]:
self.zCoords.InsertNextValue(z)
self.rgrid = vtk.vtkRectilinearGrid()
self.rgrid.SetDimensions(nPoints, nPoints, 1)
self.rgrid.SetXCoordinates(self.xCoords)
self.rgrid.SetYCoordinates(self.yCoords)
self.rgrid.SetZCoordinates(self.zCoords)
self.plane = vtk.vtkRectilinearGridGeometryFilter()
self.plane.SetInputData(self.rgrid)
self.plane.SetExtent(0, nPoints-1, 0, nPoints-1, 0, 0)
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection(self.plane.GetOutputPort())
self.actor = vtk.vtkActor()
self.actor.SetMapper(self.mapper)
self.actor.GetProperty().SetRepresentationToWireframe()
self.actor.GetProperty().SetColor((0,0,0))
self.actor.GetProperty().EdgeVisibilityOn()
return
class Line():
def __init__(self, p1, p2):
self.linesPolyData = vtk.vtkPolyData()
self.pts = vtk.vtkPoints()
self.pts.InsertNextPoint(*p1)
self.pts.InsertNextPoint(*p2)
self.linesPolyData.SetPoints(self.pts)
self.line0 = vtk.vtkLine()
self.line0.GetPointIds().SetId(0, 0)
self.line0.GetPointIds().SetId(1, 1)
self.lines = vtk.vtkCellArray()
self.lines.InsertNextCell(self.line0)
self.linesPolyData.SetLines(self.lines)
self.namedColors = vtk.vtkNamedColors()
self.colors = vtk.vtkUnsignedCharArray()
self.colors.SetNumberOfComponents(3)
# self.colors.InsertNextTypedTuple(self.namedColors.GetColor3ub("Tomato"))
self.colors.InsertNextTypedTuple((0,0,0))
self.linesPolyData.GetCellData().SetScalars(self.colors)
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputData( self.linesPolyData )
self.actor = vtk.vtkActor()
self.actor.GetProperty().SetLineWidth(2)
self.actor.SetMapper( self.mapper )
return
class Sphere():
def __init__(self):
self.source = vtk.vtkSphereSource()
self.source.SetThetaResolution(100)
self.source.SetPhiResolution(100)
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection( self.source.GetOutputPort() )
self.actor = vtk.vtkActor()
self.actor.SetMapper( self.mapper )
return
def setColor(self, color):
self.actor.GetProperty().SetColor( color )
return
def setResolution(self, resolution=100):
self.source.SetThetaResolution(resolution)
self.source.SetPhiResolution(resolution)
return
class Cone():
def __init__(self):
self.source = vtk.vtkConeCone()
self.source.SetResolution(100)
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection( self.source.GetOutputPort() )
self.actor = vtk.vtkActor()
self.actor.SetMapper( self.mapper )
return
def setColor(self, color):
self.actor.GetProperty().SetColor( color )
return
def setResolution(self, resolution=100):
self.source.SetResolution(resolution)
return
class Cylinder():
def __init__(self):
self.source = vtk.vtkCylinderSource()
self.source.SetResolution(100)
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection( self.source.GetOutputPort() )
self.actor = vtk.vtkActor()
self.actor.SetMapper( self.mapper )
return
def setColor(self, color):
self.actor.GetProperty().SetColor( color )
return
def setResolution(self, resolution=100):
self.source.SetResolution(resolution)
return
def setSize(self, size):
self.source.SetHeight(size)
self.source.SetRadius(size/2)
return
class Cube():
def __init__(self):
self.source = vtk.vtkCubeSource()
self.source.SetCenter(0,0,0)
self.source.SetXLength(1)
self.source.SetYLength(1)
self.source.SetZLength(1)
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection( self.source.GetOutputPort() )
self.actor = vtk.vtkActor()
self.actor.SetMapper( self.mapper )
return
def setSize(self, size=1):
self.source.SetXLength(size)
self.source.SetYLength(size)
self.source.SetZLength(size)
return
def setColor(self, color):
self.actor.GetProperty().SetColor( color )
return
class Voxel():
def __init__(self):
pts = [
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0],
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
[1, 1, 1],
]
self.points = vtk.vtkPoints()
self.voxel = vtk.vtkVoxel()
self.mapper = vtk.vtkDataSetMapper()
self.actor = vtk.vtkActor()
for i, p in enumerate(pts):
self.points.InsertNextPoint( *p )
self.voxel.GetPointIds().SetId(i, i)
self.ug = vtk.vtkUnstructuredGrid()
self.ug.SetPoints(self.points)
self.ug.InsertNextCell(self.voxel.GetCellType(), self.voxel.GetPointIds())
self.mapper.SetInputData(self.ug)
self.actor.SetMapper(self.mapper)
self.actor.GetProperty().SetColor(vtk.vtkNamedColors().GetColor3d("Tomato"))
self.actor.GetProperty().EdgeVisibilityOn()
self.actor.GetProperty().SetLineWidth(3)
self.actor.GetProperty().SetOpacity(.1)
return
class Text():
def __init__(self, text='None'):
self.source = vtk.vtkVectorText()
self.source.SetText(text)
self.source.Update()
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection( self.source.GetOutputPort() )
self.actor = vtk.vtkActor()
self.actor.SetMapper( self.mapper )
return
def setColor(self, color):
self.actor.GetProperty().SetColor( color )
return
|
# Generated by Django 2.2.5 on 2019-12-05 22:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trips', '0008_auto_20191129_1613'),
]
operations = [
migrations.CreateModel(
name='SelectedTrip',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(help_text='The name of the trip', max_length=300, unique=True)),
('travel_style', models.CharField(help_text='The type of G Adventures trip', max_length=300)),
('destination', models.CharField(help_text='Initial location of the trip', max_length=300)),
('cost', models.IntegerField(help_text='Total cost of the trip')),
('duration_days', models.IntegerField(help_text='How many days this trip runs')),
('hotelName', models.CharField(max_length=300)),
('hotelCheckIn', models.CharField(max_length=300)),
('hotelCheckOut', models.CharField(max_length=300)),
('hotelCost', models.CharField(max_length=300)),
('transportationType', models.CharField(max_length=300)),
('transportationCost', models.CharField(max_length=300)),
],
options={
'ordering': ('title',),
},
),
]
|
#! /user/bin/python/
# -*-coding:utf-8 -*-
import os
import sys
BASE_DIR=os.path.dirname(os.path.dirname(__file__))
BASE_ADMIN_DB=os.path.join(BASE_DIR,"db","admin")
Base_TEACHER_DB=os.path.join(BASE_DIR,"db","teachers")
BASE_COURSE_DB=os.path.join(BASE_DIR,"db","courses") |
#!/usr/bin/python
# -*- coding: utf-8 -*-
data = \
'''3COM ,CoreBuilder ,7000/6000/3500/2500 ,Telnet ,debug ,synnet , ,
3COM ,CoreBuilder ,7000/6000/3500/2500 ,Telnet ,tech ,tech , ,
3COM ,HiPerARC ,v4.1.x ,Telnet ,adm ,(none) , ,
3COM ,LANplex ,2500 ,Telnet ,debug ,synnet , ,
3COM ,LANplex ,2500 ,Telnet ,tech ,tech , ,
3COM ,LinkSwitch ,2000/2700 ,Telnet ,tech ,tech , ,
3COM ,NetBuilder , ,SNMP , ,ILMI ,snmp-read ,
3COM ,Netbuilder , ,Multi ,admin ,(none) ,Admin ,
3COM ,Office Connect ISDN Routers ,5×0 ,Telnet ,n/a ,PASSWORD ,Admin ,
3COM ,SuperStack II Switch ,2200 ,Telnet ,debug ,synnet , ,
3COM ,SuperStack II Switch ,2700 ,Telnet ,tech ,tech , ,
3COM ,OfficeConnect 812 ADSL , ,Multi ,adminttd ,adminttd ,Admin ,
3COM ,Wireless AP ,ANY ,Multi ,admin ,comcomcom ,Admin ,Works on all 3com wireless APs
3COM ,CellPlex ,7000 ,Telnet ,tech ,tech ,User ,
3COM ,cellplex ,7000 ,Telnet ,admin ,admin ,Admin ,
3COM ,cellplex ,7000 , ,operator ,(none) ,Admin ,
3COM ,HiPerARC ,v4.1.x ,Telnet ,adm ,(none) ,Admin ,
3COM ,3Com SuperStack 3 , , ,security ,security ,Admin ,
3COM ,superstack II ,1100/3300 , ,3comcso ,RIP000 ,initialize ,resets all pws to defaults
3COM ,LANplex ,2500 ,Telnet ,tech ,(none) ,Admin ,
3COM ,CellPlex , ,HTTP ,admin ,synnet ,Admin ,
3COM ,NetBuilder , , ,(none) ,admin ,User ,SNMP_READ
3COM ,SuperStack II Switch ,2700 ,Telnet ,tech ,tech ,Admin ,
3COM ,CellPlex ,7000 ,Telnet ,root ,(none) ,Admin ,
3COM ,HiPerACT ,v4.1.x ,Telnet ,admin ,(none) ,Admin ,
3COM ,CellPlex ,7000 ,Telnet ,tech ,(none) ,Admin ,
3COM ,CellPlex ,7000 ,Telnet ,admin ,admin ,Admin ,
3com ,CellPlex ,7000 ,Telnet ,tech ,tech ,Admin ,
3com ,super , ,Telnet ,admin ,(none) ,Admin ,
3com ,cellplex ,7000 ,Multi ,admin ,admin ,Admin ,RS-232/telnet
3COM ,SuperStack 3 ,4XXX ,Multi ,admin ,(none) ,Admin ,
3COM ,SuperStack 3 ,4XXX ,Multi ,monitor ,monitor ,User ,
3COM ,SuperStack 3 ,4400-49XX ,Multi ,manager ,manager ,User can access/ ,
3com ,CellPlex ,7000 ,Telnet ,root ,(none) ,Admin ,
3com ,Netbuilder , ,Multi ,admin ,(none) ,Admin ,
3com ,cellplex ,7000 ,Telnet ,operator ,(none) ,Admin ,
3com ,OfficeConnect 812 ADSL ,01.50-01 ,Multi ,admin ,(none) ,Admin ,
3com ,cellplex , ,Multi ,admin ,admin ,Admin ,
3com ,HiPerACT ,v4.1.x ,Telnet ,admin ,(none) ,Admin ,
3com ,3c16405 , ,Multi ,n/a ,(none) ,Admin ,
3com ,3c16405 , ,Console ,Administrator ,(none) ,Admin ,
3com ,Switch ,3300XM ,Multi ,admin ,admin ,Admin ,
3com ,SS III Switch ,4xxx (4900 – sure) ,Telnet ,recovery ,recovery ,resets_all_to_default ,u need to power offunit.tbl_
3com ,OfficeConnect Wireless 11g , ,HTTP ,(none) ,admin ,Admin ,
3COM ,Netbuilder , ,HTTP ,Root ,(none) ,Admin ,http://10.1.0.1
3COM ,3C16405 , ,Multi ,admin ,(none) ,Admin ,
3COM ,3C16450 , ,Multi ,admin ,(none) ,Admin ,telnet or serial
3COM ,3C16406 , ,Multi ,admin ,(none) ,Admin ,telnet or serial
3COM ,OfficeConnect 812 ADSL ,01.50-01 ,Multi ,admin ,(none) ,Admin ,
3COM ,cellplex , ,Multi ,n/a ,(none) ,Admin ,
3COM ,cellplex , ,Multi ,admin ,admin ,Admin ,
3COM ,HiPerACT ,v4.1.x ,Telnet ,admin ,(none) ,Admin ,
3COM ,3c16405 , ,Console ,Administrator ,(none) ,Admin ,
3COM ,CellPlex ,7000 ,Telnet ,tech ,(none) ,Admin ,
3COM ,Switch ,3300XM ,Multi ,admin ,admin ,Admin ,
3COM ,SS III Switch ,4xxx (4900 – sure) ,Telnet ,recovery ,recovery ,resets_all_to_default ,u need to power off unit. tbl_
3COM ,OfficeConnect Wireless , ,HTTP ,(none) ,admin ,Admin ,
3COM ,11g Cable/DSL Gateway , , , , , ,
3COM ,3CRADSL72 ,1.2 ,Multi ,(none) ,1234admin ,Admin ,snmp open by default with
3COM , , , , , , ,public / private community
3COM ,CB9000 / 4007 ,3 ,Console ,Type User: FORCE ,(none) ,Admin ,This will recover a lost password
3COM , , , , , , ,and reset the switch configto
3COM , , , , , , ,Factory Default
3COM ,officeconnect , ,Multi ,n/a ,(none) ,Admin ,
3COM ,Internet Firewall ,3C16770 ,HTTP ,admin ,password ,Admin ,
3COM ,superstack II Netbuilder ,11.1 ,Multi ,n/a ,(none) ,Admin ,
3COM ,Office Connect ISDN Routers ,5×0 ,Telnet? ,n/a ,PASSWORD ,Admin ,
3com ,812 , ,HTTP ,Administrator ,admin ,Admin ,
3COM ,CoreBuilder ,7000/6000/3500/2500 ,Telnet ,n/a ,admin ,Admin ,
3COM ,CoreBuilder ,7000/6000/3500/2500 ,Telnet ,n/a ,(none) ,Admin ,
3com ,officeconnect , ,Multi ,admin ,(none) ,Admin ,
3com ,office connect ,11g ,Multi ,admin ,(none) ,User ,
3COM ,OfficeConnect ADSL ,3CRWDR100-72 ,HTTP ,(none) ,admin ,Admin ,http://192.168.1.1
3Com ,Shark Fin ,Comcast-supplied ,HTTP ,User ,Password ,Diagnostics page ,192.160.100.1
3com ,LANplex ,2500 ,Telnet ,n/a ,admin ,Admin ,
3Com ,3CRWDR100A-72 ,2.06 (Sep 21 2005 14:24:48) ,HTTP ,admin ,1234admin ,Admin ,Provided by Ya.com provider in
3COM ,OfficeConnect 812 ADSL , ,Multi ,Administrator ,admin ,Admin ,terra
3com ,corebuilder ,7000/600/3500/2500 , ,defug ,synnet , ,
3M ,VOL-0215 etc. , ,SNMP ,volition ,volition ,Admin ,for Volition fibre switches
3ware ,3DM , ,HTTP ,Administrator ,3ware ,Admin ,
5200-Serie , , , , , , ,
Accelerated ,DSL CPE and DSLAM , ,Telnet ,sysadm ,anicust , ,
ACCTON ,Wirelessrouter ,T-online ,HTTP ,none ,0 ,Admin ,YEAHH
accton t-online ,accton , ,Multi ,(none) ,0 ,Admin ,
accton t-online ,accton , ,Multi ,(none) ,0 ,Admin ,
Aceex ,Modem ADSL Router , ,HTTP ,admin ,(none) ,Admin ,
Aceex ,Modem ADSL Router , ,HTTP ,admin ,(none) ,Admin ,
Actiontec ,Wireless Broadband Router , ,Multi ,admin ,password ,Admin ,Verizon Fios Setup
ADC Kentrox ,Pacesetter Router , ,Telnet ,n/a ,secret , ,
Addon ,GWAR3000/ARM8100 , ,HTTP ,admin ,admin ,Admin ,http://www.addon-tech.com
ADIC ,Scalar 100/1000 , ,HTTP ,admin ,secure ,Admin ,
ADIC ,Scalar i2000 , ,Multi ,admin ,password ,Admin ,
adtran ,MX2800 , ,Telnet ,n/a ,adtran ,Admin ,hit enter a few times
adtran ,Smart 16/16e , ,Telnet ,n/a ,(none) ,Admin ,hit enter a few times
adtran ,Atlas 800/800Plus/810Plus/ , ,Telnet ,n/a ,Password ,Admin ,crtl-L
adtran ,Smart 16/16e , ,Telnet ,n/a ,PASSWORD ,Admin ,hit enter a few times
adtran ,NxIQ , ,Telnet ,n/a ,adtran ,Admin ,hit enter a few times
adtran ,TSU IQ/DSU IQ , ,Telnet ,n/a ,(none) ,Admin ,hit enter a few times
adtran ,Express 5110/5200/5210 , ,Telnet ,n/a ,adtran ,Admin ,hit enter a few times
adtran ,Agent Card , ,Telnet ,n/a ,ADTRAN ,Admin ,ctrl-PTT
adtran ,TSU Router Module/ , ,Telnet ,n/a ,(none) ,Admin ,hit enter a few times
adtran ,T3SU 300 , ,Telnet ,n/a ,adtran ,Admin ,Hit enter a few times
Advantek Networks ,Wireless LAN 802.11 g/b , ,Multi ,admin ,(none) ,Admin ,http://www.advanteknetworks.com/
Aethra ,Starbridge EU , ,HTTP ,admin ,password ,Admin ,
AirTies RT-210 ,AirTies RT-210 ,AirTies RT-210 ,Telnet ,admin ,admin ,Admin ,AirTies RT-210
Alcatel ,PBX ,4400 ,Port 2533 ,kermit ,kermit ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,dhs3mt ,dhs3mt ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,at4400 ,at4400 ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,mtch ,mtch ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,mtcl ,mtcl ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,root ,letacla ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,dhs3pms ,dhs3pms ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,adfexc ,adfexc ,unknown ,thanks to Nicolas Gregoire
Alcatel ,PBX ,4400 ,Port 2533 ,client ,client ,unknown ,
Alcatel ,PBX ,4400 ,Port 2533 ,install ,llatsni ,unknown ,thanks to Nicolas
Alcatel ,PBX ,4400 ,Port 2533 ,halt ,tlah ,unknown ,thanks to Nicolas
Alcatel ,Office 4200 , ,Multi ,n/a ,1064 ,Admin ,by Bazille
Alcatel ,OmniStack 6024 , ,Telnet ,admin ,switch ,Admin ,
Alcatel ,Omnistack/Omniswitch , ,Telnet/ ,diag ,switch ,Admin ,
Alcatel ,Omnistack/omniswitch , ,Telnet ,diag ,switch ,Admin ,
Alcatel ,Timestep VPN 1520 ,3.00.026 ,Permit config ,root ,permit ,Admin ,Perm/Config port 38036
Alcatel ,OXO ,1.3 ,Multi ,(none) ,admin ,User ,
Alcatel ,OmniPCX Office ,4.1 ,FTP ,ftp_inst ,pbxk1064 ,Installer ,
Alcatel ,OmniPCX Office ,4.1 ,FTP ,ftp_admi ,kilo1987 ,Admin ,
Alcatel ,OmniPCX Office ,4.1 ,FTP ,ftp_oper ,help1954 ,Operator ,
Alcatel ,OmniPCX Office ,4.1 ,FTP ,ftp_nmc ,tuxalize ,NMC ,
ALCATEL ,4400 , ,Console ,mtcl ,(none) ,User ,
Alcatel ,7300 ASAM , ,TL1 ,SUPERUSER ,ANS#150 ,Admin ,
Allied ,Telesyn , ,Multi ,manager ,friend ,Admin ,
Allied Telesyn ,AT-8024(GB) , ,Console ,n/a ,admin ,Admin ,
Allied Telesyn ,AT-8024(GB) , ,HTTP ,manager ,admin ,Admin ,
Allied Telesyn ,AT Router , ,HTTP ,root ,(none) ,Admin ,
Allied Telesyn ,ALAT8326GB , ,Multi ,manager ,manager ,Admin ,
Allied Telesyn ,AT8016F , ,Console ,manager ,friend ,Admin ,
Allied Telesyn ,AT-AR130 (U) -10 , ,HTTP ,Manager ,friend ,Admin ,Default IP is192.168.242.242
ALLNET ,T-DSL Modem ,Software Version: ,HTTP ,admin ,admin ,Admin ,
Allnet ,ALL0275 802.11g AP ,1.0.6 ,HTTP ,none ,admin ,Admin ,
ALLNET ,ALL 130DSL , , ,admin ,password , ,
Alteon ,ACEDirector3 , ,console ,admin ,(none) , ,
Alteon ,ACEswitch ,180e ,HTTP ,admin ,admin ,Admin ,
Alteon ,ACEswitch ,180e ,Telnet ,admin ,(none) , ,
Alteon ,ACEswitch ,180e ,HTTP ,admin ,linga ,Admin ,none
Alteon ,AD4 ,9 ,Console ,admin ,admin ,Admin ,Factory default
AMBIT ,ADSL , ,Telnet ,root ,(none) ,Admin ,
Ambit ,Cable Modem 60678eu ,1.12 ,Multi ,root ,root ,Admin ,
Ambit ,Cable Modem , ,Multi ,root ,root ,Admin ,Time Warner Cable issued modem
Ambit ,ntl:home 200 ,2.67.1011 ,HTTP ,root ,root ,Admin ,This is the cable modem supplied
Amitech ,wireless router and ,any ,HTTP ,admin ,admin ,Admin ,Web interface is
Andover Controls ,Infinity ,any ,Console ,acc ,acc ,Admin ,Building management system
AOC ,zenworks 4.0 , ,Multi ,n/a ,admin ,Admin ,
Apache ,Tomcat Web Server ,5 ,HTTP ,admin ,(none) ,Admin ,
APC ,9606 Smart Slot , ,Telnet ,n/a ,backdoor ,Admin ,
APC ,USV Network Management Card , ,SNMP ,n/a ,TENmanUFactOryPOWER ,Admin ,nachzulesen unter http://www.
apc ,Smartups 3000 , ,HTTP ,apc ,apc ,Admin ,By Sentinel Software.net
APC ,UPSes (Web/SNMP Mgmt Card) , ,HTTP ,device ,device ,Admin ,Secondary access account
APC ,Smart UPS , ,Multi ,apc ,apc ,Admin ,
Apple ,AirPort Base Station (Graphite) ,2 ,Multi ,(none) ,public ,public ,See Apple article number 58613
Apple ,Airport Base Station (Dual Ethernet) ,2 ,Multi ,n/a ,password ,Guest ,See Apple article number 106597
Apple ,Airport Extreme Base Station ,2 ,Multi ,n/a ,admin ,Guest ,see Apple article number 107518
Apple ,airport5 ,1.0.09 ,Multi ,root ,admin ,Admin ,192.168.1.1
Apple ,Network Assistant , , ,(none) ,xyzzy ,admin ,
Applied Innovations ,AIscout , ,Multi ,scout ,scout ,supervisor ,
Areca ,RAID controllers , ,Console ,admin ,0 ,Admin ,
Areca ,RAID controllers ,Any , ,admin ,0 ,Administrator ,http://ArecaIP:81
Arescom ,modem/router ,10XX ,Telnet ,n/a ,atc123 ,Admin ,
ARtem ,ComPoint – CPD-XT-b ,CPD-XT-b ,Telnet ,(none) ,admin ,Admin ,
Asante ,IntraSwitch , ,multi ,IntraSwitch ,Asante ,Admin ,
Asante ,IntraStack , ,multi ,IntraStack ,Asante ,Admin ,
Asante ,FM2008 , ,Telnet ,superuser ,(none) ,Admin ,
Asante ,FM2008 , ,Multi ,admin ,asante ,Admin ,
Ascend ,Yurie , ,Multi ,readonly ,lucenttech2 , ,
Ascend ,Router , ,Telnet ,n/a ,ascend ,Admin ,
Ascend ,Sahara , ,Multi ,root ,ascend , ,
Ascom ,Ascotel PBX ,ALL ,Multi ,(none) ,3ascotel ,Admin ,no user req. _by DTK
asmack ,router ,ar804u ,HTTP ,admin ,epicrouter ,Admin ,
ASMAX ,AR701u / ASMAX AR6024 , ,HTTP ,admin ,epicrouter ,Admin ,
ASMAX ,AR800C2 , ,HTTP ,admin ,epicrouter ,Admin ,
ASMAX ,AR800C2 , ,HTTP ,admin ,epicrouter ,Admin ,
Asmax ,Ar-804u , ,HTTP ,admin ,epicrouter ,Admin ,
Aspect ,ACD ,6 ,HTTP ,customer ,none ,User ,views error logs
Aspect ,ACD ,6 ,Oracle ,DTA ,TJM ,User ,
Aspect ,ACD ,7 ,Oracle ,DTA ,TJM ,User ,
Aspect ,ACD ,8 ,Oracle ,DTA ,TJM ,User ,
Asus ,wl503g ,All ,HTTP ,admin ,admin ,Admin ,
Asus ,wl500 ,All ,HTTP ,admin ,admin ,Admin ,
Asus ,wl300 ,All ,HTTP ,admin ,admin ,Admin ,
ASUS ,WL-500G ,1.7.5.6 ,HTTP ,admin ,admin ,Admin ,
asus ,WL500g , ,HTTP ,admin ,admin ,Admin ,
Asus ,WL500g Deluxe , ,HTTP ,admin ,admin ,Admin ,
Asus ,P5P800 , ,Multi ,n/a ,admin ,User ,
ASUS ,ASUS WL-330 Pocket , ,HTTP ,admin ,admin ,Admin ,http://192.168.1.1
ASUS ,ASUS SMTA Router ,Firmware: 3.5.1.3 ,HTTP + Telnet ,admin ,admin ,Admin ,Router / VoIP Gateway
ATL ,P1000 , ,Multi ,operator ,1234 ,User ,Tape Library Operator Access
ATL ,P1000 , ,Multi ,Service ,5678 ,Service Maintenance ,Tape Library Service Access
Atlantis ,A02-RA141 , ,Multi ,admin ,atlantis ,Admin ,
Atlantis ,I-Storm Lan Router ADSL , ,Multi ,admin ,atlantis ,Admin ,(submit by fedematico)
AVAYA ,g3R ,v6 ,Console ,root ,ROOT500 ,Admin ,
Avaya ,Definity ,G3Si ,Multi ,craft ,(none) ,Admin ,
Avaya ,Cajun Pxxx , ,Multi ,root ,root ,Admin ,
Avaya ,Cajun ,P550R P580 P880 ,Multi ,diag ,danger ,Developer ,
Avaya ,Cajun ,P550R P580 P880 ,Multi ,manuf ,xxyyzz ,Developer ,
Avaya ,Pxxx ,41761 ,Multi ,diag ,danger ,Admin ,
Avaya ,Pxxx ,41761 ,Multi ,manuf ,xxyyzz ,Admin ,
AVAYA ,Cajun P33x ,firmware before 3.11.0 ,SNMP ,n/a ,admin ,Admin ,check the Bugtraq archives for
Avaya ,definity ,up to rev. 6 ,any ,craft ,crftpw ,Admin ,
Avaya ,CMS Supervisor ,11 ,Console ,root ,cms500 ,Admin ,
Avaya ,Definity , ,Multi ,dadmin ,dadmin01 ,Admin ,
AVAYA ,P333 , ,Telnet ,Administrator ,ggdaseuaimhrke ,Admin ,
AVAYA ,P333 , ,Telnet ,root ,ggdaseuaimhrke ,Admin ,
AVM ,Fritz!Box ,any , ,n/a ,0 ,admin ,
AVM ,Fritz!Box Fon ,7270 , ,n/a ,n/a , ,http://fritz.box
Avocent ,Cyclade ,Linux hostnamehere ,Multi ,root ,tslinux ,Admin ,http://www.cyclades.com.au
Axis ,NETCAM ,200/240 ,Telnet ,root ,pass ,Admin ,
Axis ,All Axis Printserver ,All ,Multi ,root ,pass ,Admin ,
Axis ,Webcams , ,HTTP ,root ,pass ,Admin ,
Axis ,540/542 Print Server , ,Multi ,root ,pass ,Admin ,
axis ,2100 , ,Multi ,n/a ,(none) ,Admin ,
Axis ,NETCAM ,200/240 , ,root ,pass , ,
AXUS ,AXUS YOTTA , ,Multi ,n/a ,0 ,Admin ,Storage DAS SATA to SCSI/FC
aztech ,DSL-600E , ,HTTP ,admin ,admin ,Admin ,
Aztecj ,DSL 600EU ,62.53.2 ,Telnet ,root ,admin ,Admin ,
Aztecj ,DSL 600EU ,62.53.2 ,HTTP ,isp ,isp ,Admin ,backdoor – not in all f/w versions
Bausch Datacom ,Proxima PRI ADSL PSTN , ,Multi ,admin ,epicrouter ,Admin ,
Bay Networks ,Switch ,350T ,Telnet ,n/a ,NetICs ,Admin ,
Bay Networks ,SuperStack II , ,Telnet ,security ,security ,Admin ,
Bay Networks ,Router , ,Telnet ,User ,(none) ,User ,
Bay Networks ,Router , ,Telnet ,Manager ,(none) ,Admin ,
Bay Networks ,Router , , ,User ,(none) ,User ,
Bay Networks ,SuperStack II , , ,security ,security ,Admin ,
Bay Networks ,Switch ,350T , ,n/a ,NetICs ,Admin ,
BBR-4MG and ,BUFFALO ,ALL ,HTTP ,root ,n/a ,Admin ,
Beetel ,ADSL Modem ,220X ,Multi ,admin ,password ,Admin ,Beetel Model Provided By Airtel
Belkin ,F5D6130 , ,SNMP ,(none) ,MiniAP ,Admin ,Wireless Acess Point IEEE802.11b
Belkin ,F5D7150 ,FB ,Multi ,n/a ,admin ,Admin ,
Benq ,awl 700 wireless router ,1.3.6 Beta-002 ,Multi ,admin ,admin ,Admin ,
Billion ,Bipac 5100 , ,HTTP ,admin ,admin ,Admin ,
Billion ,BIPAC-640 AC ,640AE100 ,HTTP ,(none) ,(none) ,Admin ,
Bintec ,Bianka Routers , ,Multi ,admin ,bintec ,Admin ,
BinTec ,Bianca/Brick ,XM-5.1 ,SNMP ,n/a ,snmp-Trap ,read/write ,by rootkid
BinTec ,x1200 ,37834 ,Multi ,admin ,bintec ,Admin ,
BinTec ,x2300i ,37834 ,Multi ,admin ,bintec ,Admin ,
BinTec ,x3200 ,37834 ,Multi ,admin ,bintec ,Admin ,
Blue Coat Systems ,ProxySG ,3.x ,HTTP ,admin ,articon ,Admin ,access to command line interface
Bluecoat ,ProxySG (all model) ,SGOS 3 / SGOS4 ,HTTPS (8082) ,admin ,admin ,Admin ,
BMC ,Patrol ,6 ,Multi ,patrol ,patrol ,User ,
BMC Software ,Patrol ,all ,BMC unique ,Administrator ,the same all over ,Admin ,this default user normally for ALL
Bosch ,NWC-0455 Dinion IP Cameras , , ,service ,service ,admin ,may work in other bosch
Bosch ,NWC-0455 Dinion IP Cameras , , ,user ,user ,regular user ,
Bosch ,NWC-0455 Dinion IP Cameras , , ,live ,live ,monitor – low priv ,
Breezecom ,Breezecom Adapters ,3.x , ,n/a ,Master ,Admin ,
Breezecom ,Breezecom Adapters ,2.x , ,n/a ,laflaf ,Admin ,
Breezecom ,Breezecom Adapters ,4.4.x ,Console ,n/a ,Helpdesk ,Admin ,
Breezecom ,Breezecom Adapters ,4.x , ,n/a ,Super , ,
Breezecom ,Breezecom Adapters ,3.x , ,n/a ,Master , ,
Breezecom ,Breezecom Adapters ,2.x , ,n/a ,laflaf , ,
Broadlogic ,XLT router , ,HTTP ,webadmin ,webadmin ,Admin ,
Broadlogic ,XLT router , ,Telnet ,admin ,admin ,Admin ,
Broadlogic ,XLT router , ,Telnet ,installer ,installer ,Admin ,
Brocade ,Fabric OS ,All ,Multi ,root ,fivranne ,Admin ,Gigiabit SAN (by Nicolas Gregoire)
Brocade ,Silkworm ,all ,Multi ,admin ,password ,Admin ,Also on other Fiberchannel
Brocade ,Fabric OS , ,Multi ,admin ,password ,Admin ,Gigabit SAN
Brocade ,Fabric OS ,5320 , ,user ,password ,user ,Also on other SAN equipment
Brocade ,Fabric OS , , ,root ,Serv4EMC ,root ,
Brocade ,Fabric OS , , ,factory ,Fact4EMC ,wipe? ,I think this may wipe the device
Brother ,NC-3100h , , ,(none) ,access ,network board access ,
Brother ,NC-4100h , , ,(none) ,access ,network board access ,
Brother ,HL-1270n , ,Multi ,n/a ,access ,network board access ,
Brother ,NC-2100p , ,Multi ,(none) ,access ,Admin ,NC-2100p Print Server
Brother ,MFC-420CN ,Firmware Ver.C ,Multi ,n/a ,access ,Admin ,multifunction printer copier
Brother ,HL5270DN , ,HTTP ,admin ,access ,Admin ,
Brother ,MFC-7225 , , ,admin ,access ,admin ,
Buffalo ,Wireless Broadband Base ,WLA-G54 WBR-G54 ,HTTP ,root ,(none) ,Admin ,http://192.168.11.1
BUFFALO ,WLAR-L11-L / WLAR-L11G-L , ,HTTP ,root ,(none) ,Admin ,
Buffalo ,WHR-G300N , , ,root , ,Administrator ,
Buffalo Technology ,TeraStation , ,Multi ,admin ,password ,Admin ,
Cable And Wireless ,ADSL Modem/Router , ,Multi ,admin ,1234 ,Admin ,
Cabletron ,Netgear modem/router , , ,netman ,(none) ,Admin ,
canyon ,router , ,Multi ,Administrator ,admin ,Admin ,
Cayman ,Cayman DSL , , ,n/a ,(none) ,Admin ,
Celerity ,Mediator ,Multi ,Multi ,mediator ,mediator ,User ,
Celerity ,Mediator , ,Multi ,root ,Mau’dib ,Admin ,Assumption: the password is
Cellit ,CCPro , ,Multi ,cellit ,cellit ,Admin ,
Checkpoint ,SecurePlatform ,NG FP3 ,Console ,admin ,admin ,Admin ,
CipherTrust ,IronMail ,Any ,Multi ,admin ,password ,Admin ,
CISCO ,Cache Engine , ,Console ,admin ,diamond ,Admin ,
Cisco ,ConfigMaker , , ,cmaker ,cmaker ,Admin ,
cisco ,cva 122 , ,Telnet ,admin ,admin ,Admin ,
Cisco ,CNR ,All ,CNR GUI ,admin ,changeme ,Admin ,This is the default password for
Cisco ,Netranger/secure IDS , ,Multi ,netrangr ,attack , ,
Cisco ,BBSM ,5.0 and 5.1 ,Telnet or Named ,bbsd-client ,changeme2 ,database ,The BBSD Windows Client
Cisco ,BBSD MSDE Client ,5.0 and 5.1 ,Telnet or Named ,bbsd-client ,NULL ,database ,The BBSD Windows Client
Cisco ,BBSM Administrator ,5.0 and 5.1 ,Multi ,Administrator ,changeme ,Admin ,
Cisco ,Netranger/secure IDS ,3.0(5)S17 ,Multi ,root ,attack ,Admin ,must be changed at
Cisco ,BBSM MSDE Administrator ,5.0 and 5.1 ,IP and ,sa ,(none) ,Admin ,
Cisco ,Catalyst 4000/5000/6000 ,All ,SNMP ,(none) ,public/private/secret ,RO/RW/RW+change ,default on All Cat switches running the native CatOS CLI software.
Cisco ,PIX firewall , ,Telnet ,(none) ,cisco ,User ,
Cisco ,VPN Concentrator 3000 series ,3 ,Multi ,admin ,admin ,Admin ,
Cisco ,Content Engine , ,Telnet ,admin ,default ,Admin ,
cisco ,3600 , ,Telnet ,Administrator ,admin ,Guest ,
Cisco ,AP1200 ,IOS ,Multi ,Cisco ,Cisco ,Admin ,This is when you convert AP1200
cisco ,GSR , ,Telnet ,admin ,admin ,admin ,
Cisco ,CiscoWorks 2000 , , ,guest ,(none) ,User ,
Cisco ,CiscoWorks 2000 , , ,admin ,cisco ,Admin ,
Cisco ,ConfigMaker , , ,cmaker ,cmaker ,Admin ,
Cisco ,Ciso Aironet 1100 series ,Rev. 01 ,HTTP ,(none) ,Cisco ,Admin ,
cisco ,2600 , ,Telnet ,Administrator ,admin ,Admin ,
Cisco ,Aironet , ,Multi ,(none) ,_Cisco ,Admin ,
Cisco ,Aironet , ,Multi ,Cisco ,Cisco ,Admin ,
Cisco ,HSE , ,Multi ,root ,blender ,Admin ,
Cisco ,HSE , ,Multi ,hsa ,hsadb ,Admin ,
Cisco ,WLSE , ,Multi ,root ,blender ,Admin ,
Cisco ,WLSE , ,Multi ,wlse ,wlsedb ,Admin ,
Cisco ,Aironet 1200 , ,HTTP ,root ,Cisco ,Admin ,
Cisco ,CallManager , ,HTTP ,admin ,admin ,Admin ,nabil ouchn
Cisco ,WSLE ,all ,all ,wlseuser ,wlsepassword ,User ,see also enable passwd
Cisco ,WLSE ,all ,Console ,enable ,(none) ,enable ,use with wlseuser
Cisco ,Cisco Wireless Location Appliance ,2700 Series prior to 2.1.34.0 ,Multi ,root ,password ,Admin ,Added by DPL admin. From
Cisco ,MeetingPlace , ,Console ,technician ,2 + last 4 of Audio ,Admin ,Used for Audio Server or
Cisco ,ONS ,all ,Multi ,CISCO15 ,otbu+1 ,Admin ,Optical Network System – http/TL1
cisco ,2600 router , ,Telnet ,cisco ,(none) ,Admin ,
Cisco ,PIX ,6.3 ,Console ,enable ,(none) ,Admin ,
Cisco-Arrowpoint ,Arrowpoint , , ,admin ,system ,Admin ,
Citel ,Handset Gateway , ,HTTP ,citel ,password ,Admin ,
Citel ,Handset Gateway , ,Telnet ,(none) ,citel ,Admin ,
CNET ,CNET 4PORT ADSL MODEM ,CNAD NF400 ,Multi ,admin ,epicrouter ,Admin ,
CNET ,CSH-2400W ,unk ,HTTP ,admin ,1234 ,Admin ,
CNet ,CWR- 500 Wireless-B Router , , ,Admin ,admin ,Admin ,
Colubris ,MSC , ,HTTP ,admin ,admin ,User ,for all Colubris Devices
COM3 ,OLe , ,HTTP ,admin ,admin ,User ,
Comcast Home Networking ,Comcast Home Networking ,ALL ,HTTP ,comcast ,(none) ,Admin ,
Compaq ,Insight Manager , , ,administrator ,administrator ,Admin ,
Compaq ,Insight Manager , , ,anonymous ,(none) ,User ,
Compaq ,Insight Manager , , ,user ,user ,User ,
Compaq ,Insight Manager , , ,operator ,operator , ,
Compaq ,Insight Manager , , ,user ,public ,User ,
Compaq ,Insight Manager , , ,PFCUser ,240653C9467E45 ,User ,
comtrend ,ct536+ , ,Multi ,admin ,(none) ,Admin ,
Comtrend ,ct-536+ , ,HTTP ,admin ,admin ,Admin ,
Comtrend ,ct-536+ , ,HTTP ,admin ,1234 ,Admin ,
Conceptronic ,C54BRS4 , ,Multi ,admin ,1234 ,Admin ,Its a Generic Router From
conexant ,ACCESS RUNNER ADSL , ,Telnet ,Administrator ,admin ,Admin ,
Conexant ,Router , ,HTTP ,n/a ,epicrouter ,Admin ,
Conexant ,Router , ,HTTP ,n/a ,admin ,Admin ,yes
Corecess ,Corecess 3112 , ,HTTP ,Administrator ,admin ,Admin ,
Corecess ,6808 APC , ,Telnet ,corecess ,corecess ,User ,
corecess ,3113 , ,Multi ,admin ,(none) ,Admin ,
creative ,2015U , ,Multi ,n/a ,(none) ,Admin ,
Crossbeam ,COS / XOS , ,Lilo boot ,(none) ,x40rocks ,Admin ,At the LILO boot prompt type CTC
Union ,ATU-R130 ,81001a ,Multi ,root ,root ,Admin ,
cuproplus ,bus , ,Multi ,n/a ,(none) ,Admin ,
cyberguard ,all firewalls ,all ,console + passport1 ,cgadmin ,cgadmin ,Admin ,
Cyclades ,PR 1000 , ,Telnet ,super ,surt ,Admin ,mpacheco.inimigo.com
Cyclades ,TS800 , ,HTTP ,root ,tslinux ,Admin ,mpacheco.inimigo.com
D9287ar ,Pavilion6640c , , ,Clarissa , , ,
Dallas Semiconductors ,TINI embedded JAVA Module ,<= 1.0 ,Telnet ,root ,tini ,Admin ,
Datacom ,BSASX/101 , , ,n/a ,letmein ,Admin ,
Datawizard.net ,FTPXQ server , ,FTP ,anonymous ,any@ ,read/write on c:,
Davolink ,DV2020 , ,HTTP ,user ,user ,unknown ,
Davox ,Unison , ,Multi ,root ,davox ,Admin ,
Davox ,Unison , ,Multi ,admin ,admin ,User ,
Davox ,Unison , ,Multi ,davox ,davox ,User ,
Davox ,Unison , ,Sybase ,sa ,(none) ,Admin ,
Deerfield ,MDaemon , ,HTTP ,MDaemon ,MServer ,Admin ,web interface to manage MDaemon. fixed June 2002
Dell ,Laser Printer 3000cn / 3100cn , ,HTTP ,admin ,password ,Admin ,
Dell ,Remote Access Card , ,HTTP ,root ,calvin ,Admin ,
Dell ,2161DS Console Switch , ,HTTP ,Admin ,(none) ,Admin ,case sensitive username
Dell ,PowerConnect 2724 , ,HTTP ,admin ,(none) ,Admin ,
Dell ,WRTA-108GD , , ,admin ,admin ,Admin ,192.168.2.1
Demarc ,Network Monitor , ,multi ,admin ,my_DEMARC ,Admin ,
Deutsch Telekomm ,T-Sinus 130 DSL , ,HTTP ,(none) ,0 ,Admin ,
Deutsche Telekom ,T-Sinus DSL 130 , ,HTTP ,admin ,(none) ,Admin ,Usuallay also a WirelessLan AP :)
Deutsche Telekom ,T-Sinus 154 DSL ,13.9.38 ,HTTP ,(none) ,0 ,Admin ,thx to AwdCzAb
Deutsche Telekom ,T-Sinus 1054 DSL ,All ,HTTP ,(none) ,0 ,Admin ,
Develcon ,Orbitor Default Console , , ,n/a ,BRIDGE ,Admin ,
Develcon ,Orbitor Default Console , , ,n/a ,password ,Admin ,
DI624 ,D-LINK ,C3 ,HTTP ,admin ,password ,Admin ,hardcoded for Verizon FiOS
Dictaphone ,ProLog , , ,PBX ,PBX , ,
Dictaphone ,ProLog , , ,NETWORK ,NETWORK , ,
Dictaphone ,ProLog , , ,NETOP ,(none) , ,
Digicom ,Michelangelo , ,Multi ,admin ,michelangelo ,Admin ,
Digicom ,Michelangelo , ,Multi ,user ,password ,User ,
digicom ,Wavegate 54C , ,HTTP ,Admin ,(none) ,Admin ,
DIGICOM ,Michelangelo Wave108 , ,HTTP ,root ,admin ,Admin ,
Digicorp ,Viper , ,Telnet ,n/a ,BRIDGE ,Admin ,
Digicorp ,Viper , ,Telnet ,n/a ,password ,Admin ,
Digicorp ,Router , , ,n/a ,BRIDGE ,Admin ,
Digicorp ,Router , , ,n/a ,password ,Admin ,
Dlink ,DSL-500 , ,Multi ,admin ,admin ,Admin ,
dlink ,adsl , ,HTTP ,admin ,admin ,Admin ,
DLINK ,604 , ,Multi ,n/a ,admin ,Admin ,
D-Link ,hubs/switches , ,Telnet ,D-Link ,D-Link , ,
D-Link ,DI-704 ,rev a ,Multi ,(none) ,admin ,Admin ,Cable/DSL Routers/Switches
D-Link ,DI-804 ,v2.03 ,Multi ,admin ,(none) ,Admin ,Contributed by _CR_
D-Link ,DWL 900AP , ,Multi ,(none) ,public ,Admin ,
D-Link ,DI-614+ , ,HTTP ,user ,(none) ,User ,by rootkid
D-Link ,DWL-614+ ,rev a rev b ,HTTP ,admin ,(none) ,Admin ,http://192.168.0.1
D-Link ,D-704P ,rev b ,Multi ,admin ,(none) ,Admin ,
D-link ,DWL-900AP+ ,rev a rev b rev c ,HTTP ,admin ,(none) ,Admin ,http://192.168.0.50
D-Link ,DI-604 ,rev a rev b rev c rev e ,Multi ,admin ,(none) ,Admin ,http://192.168.0.1
D-Link ,DWL-614+ ,2.03 ,HTTP ,admin ,(none) ,Admin ,
D-Link ,D-704P , ,Multi ,admin ,admin ,Admin ,
D-Link ,DWL-900+ , ,HTTP ,admin ,(none) ,Admin ,
D-Link ,DI-704 , ,Multi ,n/a ,admin ,Admin ,
D-Link ,DI-604 ,1.62b+ ,HTTP ,admin ,(none) ,Admin ,
D-Link ,DI-624 ,all ,HTTP ,admin ,(none) ,Admin ,
D-Link ,DI-624 ,all ,HTTP ,User ,(none) ,Admin ,
D-Link ,DI-604 ,2.02 ,HTTP ,admin ,admin ,Admin ,
D-Link ,DWL 1000 , ,HTTP ,admin ,(none) ,Admin ,
D-Link ,DI-514 , ,Multi ,user ,(none) ,Admin ,
D-Link ,DI-614+ ,any ,HTTP ,admin ,(none) ,Admin ,all access ??
D-Link ,DWL 2100AP , ,Multi ,admin ,(none) ,Admin ,
D-LINK ,DSL-G664T ,A1 ,HTTP ,admin ,admin ,Admin ,SSID : G664T_WIRELESS
D-link ,504g adsl router , ,HTTP ,admin ,admin ,Admin ,from product doco
D-Link ,DSL-302G , ,Multi ,admin ,admin ,Admin ,
D-Link ,DI-624+ ,A3 ,HTTP ,admin ,admin ,Admin ,
D-Link ,DWL-2000AP+ ,1.13 ,HTTP ,admin ,(none) ,Admin ,Wireless Access Point
D-Link ,DI-614+ , ,HTTP ,admin ,admin ,Admin ,
D-Link ,Dsl-300g+ ,Teo ,Telnet ,(none) ,private ,Admin ,
D-Link ,DSL-300g+ ,Teo ,HTTP ,admin ,admin ,Admin ,
D-Link ,DI-524 ,all ,HTTP ,admin ,(none) ,Admin ,http://192.168.0.1
D-Link ,firewall ,dfl-200 ,HTTP ,admin ,admin ,Admin ,
D-link ,DSL500G , ,Multi ,admin ,admin ,Admin ,
d-link ,di-524 , ,HTTP ,admin ,(none) ,Admin ,
D-link ,DSL-504T , ,HTTP ,admin ,admin ,Admin ,
D-link ,DSL-G604T , ,Multi ,admin ,admin ,Admin ,
D-link ,Di-707p router , ,HTTP ,admin ,(none) ,Admin ,
D-Link ,DI-524 ,all ,HTTP ,user ,(none) ,User ,
D-link ,ads500g , ,HTTP ,admin ,admin ,Admin ,
D-Link ,DI-634M , ,Multi ,admin ,(none) ,Admin ,
D-Link ,DI-524 ,E1 ,Telnet ,Alphanetworks ,wrgg15_di524 ,Admin ,Password is actually firmware
D-Link ,DWL-G730AP ,1.1 ,HTTP ,admin ,(none) ,Admin ,http://192.168.0.30
D-Link ,G624T , ,Multi ,admin ,admin ,Admin ,
D-Link ,WBR-1310 ,B-1 ,Multi ,admin ,(none) ,Admin ,
Draytek ,Vigor ,all ,HTTP ,admin ,admin ,Admin ,
Draytek ,Vigor 2600 , ,HTTP ,admin ,(none) ,Admin ,
Draytek ,Vigor 2900+ , ,HTTP ,admin ,admin ,Admin ,
Draytek ,Vigor3300 series , ,Telnet ,draytek ,1234 ,Admin ,
Dynalink ,RTA230 , ,Multi ,admin ,admin ,Admin ,
E-Con ,Econ DSL Router , ,Router ,admin ,epicrouter ,Admin ,DSL Router
Edimax ,Broadband Router ,Hardware: Rev A. ,HTTP ,admin ,1234 ,Admin ,
Edimax ,EW-7205APL ,Firmware release ,Multi ,guest ,(none) ,Admin ,http://secunia.com/advisories/
Edimax ,ES-5224RXM , ,Multi ,admin ,123 ,Admin ,
edimax ,wireless adsl router ,AR-7024 ,Multi ,admin ,epicrouter ,Admin ,
Edimax ,EW-7206APG , ,HTTP ,admin ,1234 ,Admin ,
Edimax ,Edimax Fast Ethernet Switch , ,HTTP ,admin ,password ,Admin ,
Edimax ,PS-1208MFG , , ,edimax ,software01 ,Admin ,for most Edimax HW????
Edimax ,PS-1203/PS-1205Um/PS-3103 ,( not applicable ) , ,admin ,(none) OR su@psir ,Administration ,
Efficient ,Speedstream DSL , ,Telnet ,n/a ,admin ,Admin ,
Efficient ,5871 DSL Router ,v 5.3.3-0 ,Multi ,login ,admin ,Admin ,This is for access to HTTP
Efficient ,5851 , ,Telnet ,login ,password ,Admin ,might be all 5800 series
Efficient ,Speedstream DSL , , ,n/a ,admin ,Admin ,
Efficient Networks ,Speedstream 5711 ,Teledanmark version ,Console ,n/a ,4getme2 ,Admin ,for all your TDC router needs
Efficient Networks ,EN 5861 , ,Telnet ,login ,admin ,Admin ,
Efficient Networks ,5851 SDSL Router ,N/A ,Console ,(none) ,hs7mwxkk ,Admin ,On some Covad Routers
Elsa ,LANCom Office ISDN Router ,800/1000/1100 ,Telnet ,n/a ,cisco ,Admin ,
EMC ,DS-4100B , ,Console ,admin ,(none) ,Admin ,EMC Fiber Switch
Enterasys ,ANG-1105 ,unknown ,HTTP ,admin ,netadmin ,Admin ,default IP is 192.168.1.1
Enterasys ,ANG-1105 ,unknown ,Telnet ,(none) ,netadmin ,Admin ,default IP is 192.168.1.1
Enterasys ,Vertical Horizon ,ANY ,Multi ,admin ,(none) ,Admin ,this works in telnet or http
Enterasys ,Vertical Horizon ,VH-2402S ,Multi ,tiger ,tiger123 ,Admin ,
Entrust ,getAccess ,4.x and 7.x ,Web Admin gui ,websecadm ,changeme ,Admin ,Access to Admin Gui via /sek-bin/
Ericsson ,Ericsson Acc , , ,netman ,netman , ,
ericsson ,md110 pabx ,up-to-bc9 ,Multi ,(none) ,help ,varies depending on ,
ericsson ,ericsson acc , ,Multi ,n/a ,(none) ,Admin ,
Ericsson ,Ericsson Acc , , ,netman ,netman , ,
Ericsson ,MD110 , ,Telnet ,MD110 ,help ,Admin ,
Ericsson ,BP250 , ,HTTP ,admin ,default ,Admin ,
Ericsson ,SBG ,3.1 , ,expert ,expert , ,
Ericsson ACC ,Tigris Platform ,All ,Multi ,public ,(none) ,Guest ,
E-Tech ,ADSL Ethernet Router ,Annex A v2 ,HTTP ,admin ,epicrouter ,Admin ,Password can also be password
E-Tech ,Wireless 11Mbps Router , ,HTTP ,(none) ,admin ,Admin ,
E-Tech ,Router ,RTBR03 ,HTTP ,(none) ,admin ,Admin ,1wan/4ports switch router
EverFocus ,PowerPlex ,EDR1600 ,Multi ,admin ,admin ,Admin ,
EverFocus ,PowerPlex ,EDR1600 ,Multi ,supervisor ,supervisor ,Admin ,
EverFocus ,PowerPlex ,EDR1600 ,Multi ,operator ,operator ,Admin ,
Exabyte ,Magnum20 , ,FTP ,anonymous ,Exabyte ,Admin ,
Extended Systems ,Print Servers ,– , ,admin ,extendnet ,Admin ,
Extreme Networks ,All Switches , ,Multi ,admin ,(none) ,Admin ,Submitted by Eastman Rivai
F5 ,Bigip 540 , ,Multi ,root ,default ,Admin ,
F5-Networks ,BIGIP , ,Multi ,n/a ,(none) ,Admin ,
Flowpoint ,2200 SDSL , ,Telnet ,admin ,admin ,Admin ,
Flowpoint ,DSL , ,Telnet ,n/a ,password ,Admin ,Installed by Covad
Flowpoint ,100 IDSN , ,Telnet ,admin ,admin ,Admin ,
Flowpoint ,40 IDSL , ,Telnet ,admin ,admin ,Admin ,
Flowpoint ,Flowpoint DSL , , ,admin ,admin ,Admin ,
fon ,La fonera ,0.7.1 r1 ,HTTP ,admin ,admin ,Admin ,fon.com
Fortinet ,Fortigate , ,Telnet ,admin ,(none) ,Admin ,
Fortinet ,Fortigate , ,Console ,maintainer ,bcpb+serial# ,Admin ,serial# has to be in caps
Fortinet ,Fortigate , ,Console ,maintainer ,admin ,Admin ,
Foundry Networks ,IronView Network Manager ,Version 01.6.00a ,HTTP ,admin ,admin ,Admin ,http://www.foundrynet.com/
Freetech ,PC BIOS , ,Console ,n/a ,Posterie ,Admin ,
Freetech ,BIOS , ,Console ,n/a ,Posterie ,Admin ,
Fujitsu Siemens ,Routers , ,HTTP ,(none) ,connect ,Admin ,
Fujitsu Siemens ,Fibre Channel SAN storage , ,HTTP ,manage ,!manage ,Admin ,
Fujitsu Siemens ,Fibre Channel SAN storage , ,Telnet ,manage ,!manage ,Admin ,
Funk Software ,Steel Belted Radius ,3.x ,Proprietary ,admin ,radius ,Admin ,
Gericom ,Phoenix , ,Multi ,Administrator ,(none) ,Admin ,
giga ,8ippro1000 , ,Multi ,Administrator ,admin ,Admin ,
Grandstream ,GXP-2000 , ,HTTP ,admin ,1234 ,Admin ,
greatspeed ,DSL , ,HTTP ,netadmin ,nimdaten ,Admin ,ETB Colombia
Guru ,Wireless ADSL2 , ,HTTP ,admin ,admin ,Admin ,
GVC ,e800/rb4 , ,HTTP ,Administrator ,admin ,Admin ,
Hewlett Packard ,Power Manager ,3 ,HTTP ,admin ,admin ,Admin ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,HPP187 , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,HPP189 , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,HPP196 , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,INTX3 , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,ITF3000 , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,NETBASE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,REGO , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,RJE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,CONV , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,OPERATOR ,SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,OPERATOR ,DISC , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,OPERATOR ,SYSTEM , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,OPERATOR ,SUPPORT , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,OPERATOR ,COGNOS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,PCUSER ,SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,RSBCMON ,SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,SPOOLMAN ,HPOFFICE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,WP ,HPOFFICE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,ADVMAIL ,HPOFFICE DATA , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,ADVMAIL ,HP , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,SUPPORT , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,MGR , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,SERVICE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,MANAGER , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,HPP187 SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,LOTUS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,HPWORD PUB , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,FIELD ,HPONLY , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,HELLO ,MANAGER.SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,HELLO ,MGR.SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,HELLO ,FIELD.SUPPORT , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,HELLO ,OP.OPERATOR , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MAIL ,MAIL , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MAIL ,REMOTE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MAIL ,TELESUP , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MAIL ,HPOFFICE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MAIL ,MPE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MANAGER ,TCH , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MANAGER ,SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MANAGER ,SECURITY , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MANAGER ,ITF3000 , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MANAGER ,HPOFFICE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MANAGER ,COGNOS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MANAGER ,TELESUP , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,SYS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,CAROLIAN , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,VESOFT , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,XLSERVER , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,SECURITY , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,TELESUP , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,HPDESK , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,CCC , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,CNAS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,WORD , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,COGNOS , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,ROBELLE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,HPOFFICE , ,
Hewlett-Packard ,HP 2000/3000 MPE/xx , ,Multi ,MGR ,HPONLY , ,
Hewlett-Packard ,LaserJet Net Printers ,Ones with Jetdirect ,Telnet ,(none) ,(none) ,Admin ,press enter twice if no response
Hewlett-Packard ,LaserJet Net Printers ,Ones with Jetdirect ,HTTP ,(none) ,(none) ,Admin ,HTTP interface -submit
Hewlett-Packard ,LaserJet Net Printers ,Ones with Jetdirect ,FTP ,Anonymous ,(none) ,User ,send files to be printed -submit
Hewlett-Packard ,LaserJet Net Printers ,Ones with Jetdirect ,9100 ,(none) ,(none) ,User ,Type what you want and
Hewlett-Packard ,webmin ,0.84 ,HTTP ,admin ,hp.com ,Admin ,you can find more passwords
hp ,sa7200 , ,Multi ,admin ,admin ,Admin ,
hp ,sa7200 , ,Multi ,admin ,(none) ,Admin ,
HP ,ISEE , ,Multi ,admin ,isee ,Admin ,
hp ,2300 , ,Multi ,admin ,admin ,Admin ,
HP ,MSL Series Libraries , ,Multi ,Factory ,56789 ,Admin ,Factory password under Utilities.
HP ,t5000 Thin Client series , ,Console ,Administrator ,admin ,Admin ,
HP ,E1200 ,Network Storage Router , ,root ,password ,admin ,
Huawei ,E960 , , ,admin ,admin ,Admin ,
Huawei ,MT880r , ,Multi ,TMAR#HWMT8007079 ,(none) ,Admin ,mpacheco.inimigo.com
Huawei ,mt820 ,V100R006C01B021 ,HTTP ,admin ,admin ,Admin ,Telefonica Colombia ADSL
Huawei ,MT880 , ,HTTP ,admin ,admin ,Admin ,
huawei incorporate ,k3765 ,9.4.3.16284 , ,admin ,admin , ,
iblitzz ,BWA711/All Models ,All ,HTTP ,admin ,admin ,Admin ,This Information Works On All
IBM ,Ascend OEM Routers , ,Telnet ,n/a ,ascend ,Admin ,
IBM ,A21m , ,Multi ,n/a ,(none) ,Admin ,
IBM ,390e , ,Multi ,n/a ,admin ,Admin ,
ibm ,a20m , ,Multi ,n/a ,admin ,Admin ,
IBM ,TotalStorage Enterprise Server , ,Multi ,storwatch ,specialist ,Admin ,By Nicolas Gregoire
IBM ,8239 Token Ring HUB ,2.5 ,Console ,n/a ,R1QTPS ,Utility Program ,submitted by FX
IBM ,8224 HUB , ,Multi ,vt100 ,public ,Admin ,Swap MAC address chip from
IBM ,3534 F08 Fibre Switch , ,Multi ,admin ,password ,Admin ,
IBM ,switch ,8275-217 ,Telnet ,admin ,(none) ,Admin ,
IBM ,Directory – Web ,5.1 ,HTTP ,superadmin ,secret ,Admin ,Documented in Web Administration Guide
IBM ,Hardware Management ,3 ,ssh ,hscroot ,abc123 ,Admin ,
IBM ,3583 Tape Library , ,HTTP ,admin ,secure ,Admin ,
IBM ,Infoprint 6700 ,http://www.phenoelit. ,Multi ,root ,(none) ,Admin ,Also works for older 4400 printers
IBM ,T20 , ,Multi ,n/a ,admin ,Admin ,
IBM ,IBM , ,Multi ,n/a ,(none) ,Admin ,
IBM ,Remote Supervisor Adapter (RSA) , ,HTTP ,USERID ,PASSW0RD ,Admin ,
IBM ,BladeCenter Mgmt Console , ,HTTP ,USERID ,PASSW0RD ,Admin ,
IBM ,T42 , ,HTTP ,Administrator ,admin ,Admin ,
iDirect ,iNFINITY series ,3000/5000/7000 ,Telnet ,admin ,P@55w0rd! ,Admin ,to enable ssh connections to the
iDirect ,iNFINITY series ,3000/5000/7000 ,ssh ,root ,iDirect ,Admin ,first enable sshd telnet to router:
ihoi ,oihoh ,lknlkn ,HTTP ,Administrator ,pilou ,Admin ,
IMAI ,Traffic Shaper ,TS-1012 ,HTTP ,n/a ,(none) ,Admin ,default IP 1.2.3.4
inchon ,inchon ,inchon ,Multi ,admin ,admin ,Admin ,inchon
infacta ,group mail , ,Multi ,Administrator ,(none) ,Admin ,
Infoblox ,INFOBLOX Appliance , ,Multi ,admin ,(none) ,Admin ,
Infosmart ,SOHO router , ,HTTP ,admin ,0 ,Admin ,
INOVA ,ONT4BKP (IP clock) ,all ,Telnet ,iclock ,timely ,Admin ,Network clock
Integral ,RemoteView ,4 ,Console ,Administrator ,letmein ,Admin ,
Intel ,Shiva , ,Multi ,root ,(none) ,Admin ,
Intel ,Express 9520 Router , ,Multi ,NICONEX ,NICONEX ,User ,
Intel ,Express 520T Switch , ,Multi ,setup ,setup ,User ,
intel ,netstructure ,480t ,Telnet ,admin ,(none) ,Admin ,
Intel ,Wireless AP 2011 ,2.21 ,Multi ,(none) ,Intel ,Admin ,by FX
Intel ,Wireless Gateway ,3.x ,HTTP ,intel ,intel ,Admin ,
Intel ,Shiva , , ,Guest ,(none) ,User ,
Intel ,Shiva , , ,root ,(none) ,Admin ,
Intel/Shiva ,Mezza ISDN Router ,All ,Telnet ,admin ,hello ,Admin ,
Intel/Shiva ,Access Port ,All ,Telnet ,admin ,hello ,Admin ,
Interbase ,Interbase Database Server ,All ,Multi ,SYSDBA ,masterkey ,Admin ,
Intermec ,Mobile LAN ,5.25 ,Multi ,intermec ,intermec ,Admin ,
Intershop ,Intershop ,4 ,HTTP ,operator ,$chwarzepumpe ,Admin ,
Intersystems ,Cache Post-RDMS , ,Console ,system ,sys ,Admin ,Change immediately
intex ,organizer , ,Multi ,n/a ,(none) ,Admin ,
Intracom ,jetSpeed ,520/520i ,Multi ,admin ,admin ,Admin ,L3x
Inventel ,Livebox , ,Multi ,admin ,admin ,Admin ,
ion ,nelu ,nel ,Multi ,n/a ,admin ,Admin ,vreau ceva
ion ,nelu ,nel ,Multi ,Administrator ,admin ,Admin ,vreau ceva
iPSTAR ,iPSTAR Satellite Router/Radio ,v2 ,HTTP ,admin ,operator ,Admin ,For CSLoxInfo and iPSTAR Customers
iPSTAR ,iPSTAR Network Box ,v.2+ ,HTTP ,admin ,operator ,Admin ,iPSTAR Network Box is used
Irongate ,NetSurvibox 266 ,1 ,HTTP ,admin ,NetSurvibox ,Admin ,
IronPort ,Messaging Gateway Appliance , ,Multi ,admin ,ironport ,Admin ,
JAHT ,adsl router ,AR41/2A ,HTTP ,admin ,epicrouter ,Admin ,
JD Edwards ,WorldVision/OneWorld ,All(?) ,Console ,JDE ,JDE ,Admin/SECOFR ,
JDE ,WorldVision/OneWorld , ,Multi ,PRODDTA ,PRODDTA ,Admin ,Owner of database tables and
JDS ,Hydra 3000 ,r2.02 ,Console ,hydrasna ,(none) ,Admin ,www.hydrasystems.com/
Juniper ,ISG2000 , ,Multi ,netscreen ,netscreen ,Admin ,Just a note – netscreen is now made by Juniper – otherwise no change
Juniper ,Netscreen ,3.2 ,Console ,serial# ,serial# ,Admin ,Resets to factory settings
Kalatel ,Calibur DSR-2000e , ,Multi ,n/a ,3477 ,Admin ,
Kalatel ,Calibur DSR-2000e , ,on-screen ,n/a ,8111 ,restore factory ,
KASDA ,KD318-MUI ,kasda adsl router ,Multi ,admin ,adslroot ,Admin ,
Konica Minolta ,magicolor 2300 DL , ,Multi ,(none) ,1234 ,Admin ,info from: ftp://ftp.minolta-qms.com/pub/cts/out_going/
Konica Minolta ,magicolor 2430DL ,All ,Multi ,(none) ,(none) ,Admin ,Taken from reference manual
Konica Minolta ,magicolor 5430 DL , ,HTTP ,admin ,administrator ,Admin ,
Konica Minolta ,magicolor 1690MF , , ,(non) ,sysAdmin ,Administrator ,
Konica Minolta ,magicolor 1690MF , , ,(non) ,sysAdmin ,Administrator ,
Konica Minolta ,Di 2010f ,n/a ,HTTP ,n/a ,0 ,Admin ,Printer configuration interface
KTI ,KS-2260 , ,Telnet ,superuser ,123456 ,special CLI ,can be disabled by renaming the
KTI ,KS2600 , ,Console ,admin ,123456 ,Admin ,
KTI ,KS2260 , ,Console ,admin ,123 ,Admin ,
Kyocera ,EcoLink ,7.2 ,HTTP ,n/a ,PASSWORD ,Admin ,
Kyocera ,Telnet Server IB-20/21 , ,multi ,root ,root ,Admin ,
Kyocera ,Intermate LAN FS Pro 10/100 ,K82_0371 ,HTTP ,admin ,admin ,Admin ,
Kyocera ,Printer ,any , ,(none) ,admin00 , ,
Kyocera ,FS-2020D , , ,– ,admin00 ,Admin ,
Kyocera Printers ,2020D , , ,n/a ,admin00 ,Admin ,
LANCOM ,IL11 , ,Multi ,n/a ,(none) ,Admin ,
Lanier ,Digital Imager ,LD124c ,HTTP ,admin ,(none) ,Admin ,
Lanier ,LD335 , ,HTTP ,supervisor ,(none) ,Admin ,
Lantronics ,Lantronics Terminal Server , ,TCP 7000 ,n/a ,access ,Admin ,
Lantronics ,Lantronics Terminal Server , ,TCP 7000 ,n/a ,system ,Admin ,
Lantronix ,Lantronix Terminal , ,TCP 7000 ,n/a ,lantronix ,Admin ,
Lantronix ,SCS1620 , ,Multi ,sysadmin ,PASS ,Admin ,9600/N/8/1 XON/XOFF
Lantronix ,SCS3200 , ,EZWebCon ,login ,access ,Admin ,secondary port settings login:
Lantronix ,SCS400 , ,Multi ,n/a ,admin ,Admin ,secondary priv. password:
Lantronix ,SCS200 , ,Multi ,n/a ,admin ,Admin ,secondary priv. password: system
Lantronix ,SCS100 , ,Multi ,n/a ,access ,Admin ,secondary priv. password: system
Lantronix ,ETS4P , ,Multi ,n/a ,(none) ,Admin ,secondary priv. password: system
Lantronix ,ETS16P , ,Multi ,n/a ,(none) ,Admin ,secondary priv. password: system
Lantronix ,ETS32PR , ,Multi ,n/a ,(none) ,Admin ,secondary priv. password: system
Lantronix ,ETS422PR , ,Multi ,n/a ,(none) ,Admin ,secondary priv. password: system
latis network ,border guard , ,Multi ,n/a ,(none) ,Admin ,
LAXO ,IS-194G ,1.0a , ,admin ,admin ,admin ,192.168.1.254
Leviton ,47611-GT5 , ,Multi ,admin ,leviton ,Admin ,
LG ,Aria iPECS ,All ,Console ,(none) ,jannie ,maintenance ,dealer backdoor password
LG ,LAM200E / LAM200R , ,Multi ,admin ,epicrouter ,Admin ,
Linksys ,WAP11 , ,Multi ,n/a ,(none) ,Admin ,
Linksys ,DSL , ,Telnet ,n/a ,admin ,Admin ,
Linksys ,EtherFast Cable/DSL ROuter , ,Multi ,Administrator ,admin ,Admin ,
Linksys ,Linksys Router DSL/Cable , ,HTTP ,(none) ,admin ,Admin ,
Linksys ,BEFW11S4 ,1 ,HTTP ,admin ,(none) ,Admin ,
Linksys ,BEFSR41 ,2 ,HTTP ,(none) ,admin ,Admin ,
Linksys ,WRT54G , ,HTTP ,admin ,admin ,Admin ,
Linksys ,WAG54G , ,HTTP ,admin ,admin ,Admin ,
linksys ,ap 1120 , ,Multi ,n/a ,(none) ,Admin ,
Linksys ,Linksys DSL , , ,n/a ,admin ,Admin ,
Linksys ,WAP54G ,2 ,HTTP ,(none) ,admin ,Admin ,
Linksys ,WRT54G ,All Revisions ,HTTP ,(none) ,admin ,Admin ,
Linksys ,model WRT54GC compact , ,Multi ,(none) ,admin ,Admin ,
Linksys ,AG 241 – ADSL2 Gateway , ,Multi ,admin ,admin ,Admin ,
Linksys ,Comcast ,Comcast-supplied ,HTTP ,comcast ,1234 ,diagnostics ,192.168.0.1/docsisdevicestatus.asp
Linksys ,WAG54GS , ,Multi ,admin ,admin ,Admin ,
Linksys ,rv082 , ,Multi ,admin ,(none) ,Admin ,
linksys ,wrt54g , ,Multi ,admin ,admin ,Admin ,
Linksys ,WRT54GS ,V4 ,HTTP ,admin ,admin ,Admin ,
Linksys ,WAG354G ,2 ,HTTP ,admin ,admin ,Admin ,Applies to other linksys too
linksys ,wag354g , ,Telnet ,admin ,admin ,User ,
linksys ,BEFW11S4 ,2 ,Multi ,(none) ,admin ,Admin ,Comes up as BEFW11S4 V.2
Linksys ,ADSLME3 , , ,root ,orion99 ,Admin ,From Telus ISP (Canada)
Linksys Cisco ,RTP300 w/2 phone ports ,1 ,HTTP ,admin ,admin ,Admin ,
Linksys Cisco ,RTP300 w/2 phone ports ,1 ,HTTP ,user ,tivonpw ,update access ,use for flashing firmware
Livingston ,IRX Router , ,Telnet ,!root ,(none) , ,
Livingston ,Livingston Portmaster 3 , ,Telnet ,!root ,(none) , ,
Livingston ,Officerouter , ,Telnet ,!root ,(none) , ,
Livingstone ,Portmaster 2R , ,Telnet ,root ,(none) ,Admin ,
Lockdown ,All Lockdown Products ,up to 2.7 ,Console ,setup ,changeme (exclamation) ,User ,
LogiLink ,WL0026 ,1.68 , ,admin ,1234 ,Admin ,Realtek chipset. Default IP 192.168.2.1
Logitech ,Logitech Mobile Headset , ,Bluetooth ,(none) ,0 ,audio access ,Thierry Zoller (Thierry@sniff-em.com)
longshine ,isscfg , ,HTTP ,admin ,0 ,Admin ,
Lucent ,B-STDX9000 , ,Multi ,(any 3 characters) ,cascade , ,
Lucent ,B-STDX9000 , ,debug mode ,n/a ,cascade , ,
Lucent ,B-STDX9000 ,all ,SNMP ,n/a ,cascade ,Admin ,
Lucent ,CBX 500 , ,Multi ,(any 3 characters) ,cascade , ,
Lucent ,CBX 500 , ,debug mode ,n/a ,cascade , ,
Lucent ,GX 550 , ,SNMP readwrite ,n/a ,cascade , ,
Lucent ,MAX-TNT , ,Multi ,admin ,Ascend , ,
Lucent ,PSAX 1200 and below , ,Multi ,root ,ascend , ,
Lucent ,PSAX 1250 and above , ,Multi ,readwrite ,lucenttech1 ,Admin ,
Lucent ,PSAX 1250 and above , ,Multi ,readonly ,lucenttech2 ,Admin ,
Lucent ,Anymedia , ,Console ,LUCENT01 ,UI-PSWD-01 ,Admin ,requires GSI software
Lucent ,Anymedia , ,Console ,LUCENT02 ,UI-PSWD-02 ,Admin ,requires GSI software
Lucent ,PacketStar , ,Multi ,Administrator ,(none) ,Admin ,
Lucent ,Cellpipe 22A-BX-AR USB D , ,Console ,admin ,AitbISP4eCiG ,Admin ,
LUCENT ,M770 , ,Telnet ,super ,super ,Admin ,
Lucent ,System 75 , , ,bciim ,bciimpw , ,
Lucent ,System 75 , , ,bcim ,bcimpw , ,
Lucent ,System 75 , , ,bcms ,bcmspw , ,
Lucent ,System 75 , , ,bcnas ,bcnaspw , ,
Lucent ,System 75 , , ,blue ,bluepw , ,
Lucent ,System 75 , , ,browse ,browsepw , ,
Lucent ,System 75 , , ,browse ,looker , ,
Lucent ,System 75 , , ,craft ,craft , ,
Lucent ,System 75 , , ,craft ,craftpw , ,
Lucent ,System 75 , , ,cust ,custpw , ,
Lucent ,System 75 , , ,enquiry ,enquirypw , ,
Lucent ,System 75 , , ,field ,support , ,
Lucent ,System 75 , , ,inads ,indspw , ,
Lucent ,System 75 , , ,inads ,inads , ,
Lucent ,System 75 , , ,init ,initpw , ,
Lucent ,System 75 , , ,locate ,locatepw , ,
Lucent ,System 75 , , ,maint ,maintpw , ,
Lucent ,System 75 , , ,maint ,rwmaint , ,
Lucent ,System 75 , , ,nms ,nmspw , ,
Lucent ,System 75 , , ,rcust ,rcustpw , ,
Lucent ,System 75 , , ,support ,supportpw , ,
Lucent ,System 75 , , ,tech ,field , ,
Lucent ,Cellpipe ,20A-GX-UK ,Console ,n/a ,admin ,Admin ,
m0n0wall ,m0n0wall ,1.3 , ,admin ,mono ,Administrator ,Firewall
m0n0wall ,m0n0wall ,1.3 , ,admin ,mono ,Administrator ,Firewall
Marconi ,Fore ATM Switches , ,Multi ,ami ,(none) ,Admin ,
maxdata ,ms2137 , ,Multi ,n/a ,(none) ,Admin ,
maxdata ,7000x , ,Multi ,n/a ,(none) ,Admin ,
McAfee ,SCM 3100 ,4.1 ,Multi ,scmadmin ,scmchangeme ,Admin ,
McData ,FC Switches/Directors , ,Multi ,Administrator ,password ,Admin ,
McData ,i10k Switch , , ,McdataSE ,redips ,admin ,
Mediatrix ,MDD 2400/2600 , ,Console ,administrator ,(none) ,Admin ,From the Getting Started Guide
mediatrix 2102 ,mediatrix 2102 , ,HTTP ,admin ,1234 ,Admin ,
medion ,Routers , ,HTTP ,n/a ,medion ,Admin ,
Megastar ,BIOS , ,Console ,n/a ,star ,Admin ,
Mentec ,Micro/RSX , ,Multi ,MICRO ,RSX ,Admin ,
Mentec ,Micro/RSX , , ,MICRO ,RSX ,Admin ,
MERCURY ,234234 ,234234 ,SNMP ,Administrator ,admin ,Admin ,
MERCURY ,KT133A/686B , ,SNMP ,Administrator ,admin ,Admin ,
Meridian ,PBX ,ANY ,Telnet ,service ,smile ,System ,This is the default password
Micronet ,Access Point ,SP912 ,Telnet ,root ,default ,Admin ,
Micronet ,Micronet SP5002 , ,Console ,mac ,(none) ,Admin ,
Micronet ,3351 / 3354 , ,Multi ,admin ,epicrouter ,Admin ,
Microplex ,Print Server , ,Telnet ,root ,root ,Admin ,
Microprocessing , , , , , , ,h3kdocs/H3R25C04.pdf
microRouter ,900i , ,Console/Multi ,n/a ,letmein ,Admin ,
Mikrotik ,Router OS ,all ,Telnet ,admin ,(none) ,Admin ,also for SSH and Web access
Mikrotik ,Router OS ,42775 ,HTTP ,admin ,(none) ,Admin ,
Mikrotik ,Mikrotik , ,Telnet ,admin ,(none) ,Admin ,
Milan ,mil-sm801p , ,Multi ,root ,root ,Admin ,
Minolta PagrPro ,QMS 4100GN PagePro , ,HTTP ,n/a ,sysadm ,Admin ,
Minolta QMS ,Magicolor 3100 ,3.0.0 ,HTTP ,operator ,(none) ,Admin ,
Minolta QMS ,Magicolor 3100 ,3.0.0 ,HTTP ,admin ,(none) ,Admin ,Gives access toAccounting
Mintel ,Mintel PBX , , ,n/a ,SYSTEM ,Admin ,
Mintel ,Mintel PBX , , ,n/a ,SYSTEM ,Admin ,
Mitel ,3300 ICP ,all ,HTTP ,system ,password ,Admin ,
Mitel ,SX2000 ,all ,Multi ,n/a ,(none) ,Admin ,
Motorola ,Cablerouter , ,Telnet ,cablecom ,router ,Admin ,
Motorola ,WR850G ,4.03 ,HTTP ,admin ,motorola ,Admin ,higher revisions likely the same
Motorola ,Wireless Router ,WR850G ,HTTP ,admin ,motorola ,Admin ,
Motorola ,SBG900 , ,HTTP ,admin ,motorola ,Admin ,
Motorola ,Motorola Cablerouter , , ,cablecom ,router ,Admin ,
motorola ,vanguard , ,Multi ,n/a ,(none) ,Admin ,
motorola ,sgb900 , ,HTTP ,admin ,motorola ,Admin ,
Motorola ,SURFboard ,SBV5120 ,HTTP ,admin ,motorola ,Admin ,
mro software ,maximo ,v4.1 ,Multi ,SYSADM ,sysadm ,Admin ,
Mutare ,EVM Admin ,All ,HTTP ,(none) ,admin ,Admin ,http://www.mutare.com/data/kb/
NAI ,Intrushield IPS ,1200/2600/4000 ,SSH + Web ,admin ,admin123 ,Admin ,By Nicolas Gregoire
NAI ,Entercept , ,Management console ,GlobalAdmin ,GlobalAdmin ,Admin ,By Nicolas Gregoire : must
NEC ,WARPSTAR-BaseStation , ,Telnet ,n/a ,(none) ,Admin ,
Netcomm ,NB1300 , ,HTTP ,admin ,password ,Admin ,
Netgear ,FR314 , ,HTTP ,admin ,password ,Admin ,
NetGear ,RM356 ,None ,Telnet ,(none) ,1234 ,Admin ,shutdown the router via internet
Netgear ,MR-314 ,3.26 ,HTTP ,admin ,1234 ,Admin ,
Netgear ,RT314 , ,HTTP ,admin ,admin ,Admin ,
Netgear ,RP614 , ,HTTP ,admin ,password ,Admin ,
Netgear ,RP114 ,3.26 ,Telnet ,(none) ,1234 ,Admin ,telnet 192.168.0.1
Netgear ,WG602 ,Firmware Version ,HTTP ,super ,5777364 ,Admin ,
Netgear ,WG602 ,Firmware Version ,HTTP ,superman ,21241036 ,Admin ,
Netgear ,WG602 ,Firmware Version ,HTTP ,super ,5777364 ,Admin ,
Netgear ,MR814 , ,HTTP ,admin ,password ,Admin ,
Netgear ,FVS318 , ,HTTP ,admin ,password ,Admin ,
Netgear ,DM602 , ,FTP Telnet and HTTP ,admin ,password ,Admin ,
netgear ,FM114P , ,Multi ,n/a ,(none) ,Admin ,
NetGear ,WGT624 ,2 ,HTTP ,admin ,password ,Admin ,
Netgear ,FR114P , ,HTTP ,admin ,password ,Admin ,
Netgear ,ME102 , ,SNMP ,(none) ,private ,Admin ,Standard IP-Address is
Netgear ,WGR614 ,v4 ,Multi ,admin ,password ,Admin ,192.168.0.1 OR
Netgear ,RP114 ,3.20-3.26 ,HTTP ,admin ,1234 ,Admin ,default http://192.168.0.1
Netgear ,dg834g , ,HTTP ,admin ,password ,Admin ,it should be work also with dg834gt
Netgear ,Router/Modem , ,Multi ,admin ,password ,Admin ,
Netgear ,MR314 , ,Multi ,admin ,1234 ,Admin ,
Netgear ,GSM7224 , ,HTTP ,admin ,(none) ,Admin ,
Netgear ,ADSL Modem DG632 ,V3.3.0a_cx ,HTTP ,admin ,password ,Admin ,
Netgear ,WGT634U , ,HTTP ,admin ,password ,Admin ,
Netgear ,FWG114P , ,Multi ,n/a ,admin ,password ,
Netgear ,GS724t ,V1.0.1_1104 ,HTTP ,n/a ,password ,Admin ,
NetGear ,Comcast ,Comcast-supplied ,HTTP ,comcast ,1234 ,diagnostics page ,192.168.0.1/docsisdevicestatus.html
Netgear ,WG602 ,1.7.x ,HTTP ,admin ,password ,Admin ,Default IP: DHCP or
Netgear ,CG814CCR ,2 ,Multi ,cusadmin ,highspeed ,Admin ,Comcast small business router.
netgear ,sc101 , ,management ,admin ,password ,Admin ,
netgear ,DG834GT ,192.168.0.1 ,Multi ,admin ,Password ,Admin ,
Netgear ,FVS114 ,GR ,HTTP ,admin ,password ,Admin ,
Netgear ,RO318 , ,Multi ,admin ,1234 ,Admin ,
Netgear ,WGT624 , ,Serial console ,Gearguy ,Geardog ,Admin ,see http://wiki.openwrt.org/
Netgear ,Wifi Router ,WGT 624 v3 ,HTTP ,admin ,password ,Admin ,slawcio26
netgear ,dg834 , ,Multi ,n/a ,admin ,Admin ,
Netgear ,FSM7326P 24+2 L3 mANAGED , ,HTTP ,admin ,(none) ,Admin ,
Netgear ,WGR614 ,v6 ,HTTP ,admin ,draadloos ,Admin ,Dutch routers
Netgear ,MR814 ,v1 ,HTTP ,admin ,password ,Admin ,
Netgear ,ReadyNas Duo ,RND2000 , ,admin ,netgear1 ,Admin ,v4 firmware onwards
Netgear ,ReadyNas Duo ,RND2000 , ,admin ,infrant1 ,Admin ,Upto v3 firmware
Netgear ,WGR614 ,9 , ,admin ,password ,Admin ,192.168.1.1
NETGEAR ,DG834G ,3 , ,admin ,password , ,
NetGenesis ,NetAnalysis Web Reporting , ,HTTP ,naadmin ,naadmin ,Admin ,
Netopia ,Netopia 9500 , ,Telnet ,netopia ,netopia ,Admin ,
Netopia ,R910 , ,Multi ,admin ,(none) ,Admin ,
Netopia ,3351 , ,Multi ,n/a ,(none) ,Admin ,
Netopia ,4542 , ,Multi ,admin ,noway ,Admin ,
Netopia ,Netopia 7100 , , ,(none) ,(none) , ,
Netopia ,Netopia 9500 , , ,netopia ,netopia , ,
Netport ,Express 10/100 , ,multi ,setup ,setup ,Admin ,
Netscreen ,Firewall , ,multi ,netscreen ,netscreen ,Admin ,
netscreen ,firewall , ,Telnet ,Administrator ,(none) ,Admin ,
netscreen ,firewall , ,Telnet ,admin ,(none) ,Admin ,
netscreen ,firewall , ,Telnet ,operator ,(none) ,Admin ,
netscreen ,firewall , ,HTTP ,Administrator ,(none) ,Admin ,
Netstar ,Netpilot , ,Multi ,admin ,password ,Admin ,http://www.netpilot.com/products/
Network Appliance ,NetCache ,any ,Multi ,admin ,NetCache ,Admin ,
Network Associates ,WebShield Security , ,HTTP ,e500 ,e500changeme ,Admin ,
Network Associates ,WebShield Security , ,HTTP ,e250 ,e250changeme ,Admin ,
Network Everywhere ,NWR11B , ,HTTP ,(none) ,admin ,Admin ,
NGSec , GSecureWeb , ,HTTP ,admin ,(none) ,Admin ,
NGSec ,NGSecureWeb , ,HTTP ,admin ,asd ,Admin ,
Niksun ,NetDetector , ,Multi ,vcr ,NetVCR ,Admin ,su after login with empty password
Nimble ,PC BIOS , ,Console ,n/a ,xdfk9874t3 ,Admin ,
Nimble ,BIOS , ,Console ,n/a ,xdfk9874t3 ,Admin ,
Nokia ,DSL Router M1122 ,1.1 – 1.2 ,Multi ,m1122 ,m1122 ,User ,
Nokia ,MW1122 , ,Multi ,telecom ,telecom ,Admin ,Only in New Zealand.
NOKIA ,7360 , ,Multi ,(none) ,9999 ,Admin ,
Nokia ,M1921 , ,Telnet ,(none) ,nokai ,Admin ,
Nokia ,ADSL router M1921 , ,Telnet ,(none) ,nokia ,Admin ,
Nokia ,M1122 ,unknown ,Multi ,(none) ,Telecom ,Admin ,New Zealand
NOMADIX ,AG5000 , ,Telnet ,admin ,(none) ,Admin ,
Nortel ,Meridian Link , ,Multi ,disttech ,4tas ,engineer account ,
Nortel ,Meridian Link , ,Multi ,maint ,maint ,Maintenance account ,
Nortel ,Meridian Link , ,Multi ,mlusr ,mlusr ,user account ,
Nortel ,Remote Office 9150 , ,Client ,admin ,root ,Admin ,
Nortel ,Accelar (Passport) 1000 , ,Multi ,l2 ,l2 ,Layer 2 Read Write ,
Nortel ,Accelar (Passport) 1000 , ,Multi ,l3 ,l3 ,Layer 3 (and layer 2) ,
Nortel ,Accelar (Passport) 1000 , ,Multi ,ro ,ro ,Read Only ,
Nortel ,Accelar (Passport) 1000 , ,Multi ,rw ,rw ,Read Write ,
Nortel ,Accelar (Passport) 1000 , ,Multi ,rwa ,rwa ,Read Write All ,
Nortel ,Extranet Switches , ,Multi ,admin ,setup ,Admin ,
Nortel ,Baystack 350-24T , ,Telnet ,n/a ,secure ,Admin ,
Nortel ,Meridian PBX , ,Serial ,login ,0 , ,AUTH codes in LD 8
Nortel ,Meridian PBX , ,Serial ,login ,1111 , ,AUTH codes in LD 8
Nortel ,Meridian PBX , ,Serial ,login ,8429 , ,AUTH codes in LD 8
Nortel ,Meridian PBX , ,Serial ,spcl ,0 , ,AUTH codes in LD 8
Nortel ,Meridian MAX , ,Multi ,service ,smile ,general engineer account ,
Nortel ,Meridian MAX , ,Multi ,root ,3ep5w2u ,Admin ,
Nortel ,Matra 6501 PBX , ,Console ,(none) ,0 ,Admin ,
Nortel ,Meridian MAX , ,Multi ,maint ,ntacdmax ,Maintenance account ,
Nortel ,Meridian CCR , ,Multi ,service ,smile ,general engineer account ,
Nortel ,Meridian CCR , ,Multi ,disttech ,4tas ,engineer account ,enter 3letter of day from
Nortel ,Meridian CCR , ,Multi ,maint ,maint ,Maintenance account ,
Nortel ,Meridian CCR , ,Multi ,ccrusr ,ccrusr ,User account ,
Nortel ,Meridian , ,Multi ,n/a ,(none) ,Admin ,
Nortel ,Meridian Link , ,Multi ,service ,smile ,general engineer account ,
Nortel ,Contivity ,Extranet/VPN switches ,HTTP ,admin ,setup ,Admin ,
nortel ,dms , ,Multi ,n/a ,(none) ,Admin ,
Nortel ,Business Communications Manager ,3.5 and 3.6 ,HTTPS ,supervisor ,PlsChgMe ,Admin ,there is an exclamation point
Nortel ,Phone System ,All ,From Phone ,n/a ,266344 ,Installers ,
Nortel ,Norstar , ,Console ,266344 ,266344 ,Admin ,
nortel ,p8600 , ,Multi ,n/a ,(none) ,Admin ,
Nortel ,VPN Gateway , ,Console ,admin ,admin ,Admin ,
Nortel ,Passport 2430 , ,Telnet ,Manager ,(none) ,Admin ,
NRG or RICOH ,DSc338 Printer ,1.19 ,HTTP ,(none) ,password ,Admin ,no user
Nullsoft ,Shoutcast ,38361 ,PLS ,admin ,changeme ,Admin ,
OKI ,C5700 , ,HTTP ,root ,the 6 last digit of the ,Admin ,running with other models
OKI ,6120e and 421n , ,HTTP ,admin ,OkiLAN ,Admin ,
olitec ,sx 200 adsl modem router , ,Multi ,admin ,adslolitec ,Admin ,default ip 192.168.0.250
olitec (Trendchip) ,sx 202 adsl modem router , ,HTTP ,admin ,admin ,Admin ,Firmware: 2.7.0.9
Omnitronix ,Data-Link ,DL150 ,Multi ,(none) ,SUPER ,Admin ,
Omnitronix ,Data-Link ,DL150 ,Multi ,(none) ,SMDR ,Admin ,
OMRON ,MR104FH , ,Multi ,n/a ,(none) ,Admin ,
OPEN Networks ,812L , ,HTTP ,root ,0P3N ,Admin ,
OpenConnect ,OC://WebConnect Pro , ,Multi ,admin ,OCS ,Admin ,
OpenConnect ,OC://WebConnect Pro , ,Multi ,adminstat ,OCS ,Admin ,
OpenConnect ,OC://WebConnect Pro , ,Multi ,adminview ,OCS ,Admin ,
OpenConnect ,OC://WebConnect Pro , ,Multi ,adminuser ,OCS ,Admin ,
OpenConnect ,OC://WebConnect Pro , ,Multi ,adminview ,OCS ,Admin ,
OpenConnect ,OC://WebConnect Pro , ,Multi ,helpdesk ,OCS ,Admin ,
Openwave ,WAP Gateway ,Any ,HTTP ,sys ,uplink ,Admin ,
Openwave ,MSP ,Any ,HTTP ,cac_admin ,cacadmin ,Admin ,
Oracle ,Oracle RDBMS ,Any ,Multi ,system/manager ,sys/change_on_install ,Admin ,
Osicom ,NETPrint ,500 1000 1500 and ,Telnet ,Manager ,Manager ,Admin ,
Osicom ,NETPrint and JETX Print ,500 1000 1500 and ,Telnet ,sysadm ,sysadm ,Admin ,
Osicom ,Osicom Plus T1/PLUS 56k , ,Telnet ,write ,private , ,
Osicom ,NETCommuter ,Telnet ,debug ,d.e.b.u.g ,User , ,No
Osicom ,NETCommuter ,Telnet ,echo ,echo ,User , ,No
Osicom ,NETCommuter ,Telnet ,guest ,guest ,User , ,No
Osicom ,NETCommuter ,Telnet ,Manager ,Manager ,Admin , ,No
Osicom ,NETCommuter ,Telnet ,sysadm ,sysadm ,Admin , ,No
Osicom ,Osicom Plus T1/PLUS 56k , , ,write ,private , ,
Osicom ,NETCommuter Remote , ,Telnet ,sysadm ,sysadm ,Admin ,
Osicom ,JETXPrint ,1000E/B ,Telnet ,sysadm ,sysadm ,Admin ,
Osicom ,JETXPrint ,1000E/N ,Telnet ,sysadm ,sysadm ,Admin ,
Osicom ,JETXPrint ,1000T/N ,Telnet ,sysadm ,sysadm ,Admin ,
Osicom ,JETXPrint ,500 E/B ,Telnet ,sysadm ,sysadm ,Admin ,
Osicom ,NETPrint ,500 ,1000 ,1500 ,and 2000 Series ,Telnet ,Manager
Overland ,NEO Series Libraries , ,Multi ,Factory ,56789 ,Admin ,Factory password under Utilities.
ovislink ,WL-1120AP , ,Multi ,root ,(none) ,Admin ,
Pacific Micro Data ,MAST 9500 Universal Disk Array ,ESM ver. 2.11 / 1 ,Console ,pmd ,(none) ,Admin ,
Panasonic ,CF-28 , ,Multi ,n/a ,(none) ,Admin ,
panasonic ,cf 27 ,4 ,Multi ,n/a ,(none) ,Admin ,
Panasonic ,CF-45 , ,Multi ,n/a ,(none) ,Admin ,
Panasonic ,PBX TDA 100/200/400 ,all ,Console ,(none) ,1234 ,Admin ,google.com search q panasonic
Pansonic ,KXTD1232 , ,Multi ,admin ,1234 ,Admin ,
penril datability ,vcp300 terminal server , ,Multi ,n/a ,system ,Admin ,
Pentagram ,Cerberus ADSL modem + router , ,HTTP ,admin ,password ,Admin ,
Pentaoffice ,Sat Router , ,Telnet ,(none) ,pento ,Admin ,
PentaSafe ,VigilEnt Security Manager ,3 ,VigilEnt Security ,PSEAdmin ,$secure$ ,Admin ,
Perle ,CS9000 ,any ,Console ,admin ,superuser ,Admin ,
pfSense ,pfSense Firewall ,1.0.1 ,Multi ,admin ,pfsense ,Admin ,http://www.pfsense.com
Phoenix v1.14 ,Phoenix v1.14 , ,Multi ,Administrator ,admin ,Admin ,
Pirelli ,Pirelli Router , ,Multi ,admin ,mu ,Admin ,
Pirelli ,Pirelli Router , ,Multi ,admin ,microbusiness ,Admin ,
Pirelli ,Pirelli Router , ,Multi ,user ,password ,Admin ,
Pirelli ,Pirelli AGE-SB , ,HTTP ,admin ,smallbusiness ,Admin ,
Pirelli ,AGE ADSL Router , ,Multi ,admin ,microbusiness ,Admin ,
Pirelli ,AGE ADSL Router , ,Multi ,user ,password ,User ,
Pirelli ,DRG A125G ,4.5.3 , ,admin ,admin ,Admin ,
Planet ,WAP-1900/1950/2000 ,36561 ,Multi ,(none) ,default ,Admin ,
planet ,Akcess Point , ,HTTP ,admin ,admin ,Admin ,
Planet ,ADE-4110 , ,HTTP ,admin ,epicrouter ,Admin ,
Planet ,XRT-401D , ,HTTP ,admin ,1234 ,Admin ,
Planet ,ADE-4000 , ,Multi ,admin ,epicrouter ,Admin ,
planet ,akcess point , ,HTTP ,admin ,admin ,Admin ,
Planet ,WAP 4000 , ,Multi ,admin ,admin ,Admin ,Default IP is 192.168.1.1
Planex ,BRL-04UR , ,Multi ,admin ,0 ,Admin ,
Polycom ,Soundpoint VoIP phones , ,HTTP ,Polycom ,SpIp ,User ,
Polycom ,ViewStation 4000 ,3.5 ,Multi ,(none) ,admin ,Admin ,
Polycom ,iPower 9000 , ,Multi ,(none) ,(none) ,Admin ,
Polycom ,SoundPoint IP Phones , ,HTTP ,Polycom ,456 ,Admin ,username is case sensitive
Prestigio ,Nobile ,156 ,Multi ,n/a ,(none) ,Admin ,
Prolink ,H9000 Series , ,HTTP ,admin ,password ,Admin ,
Promise ,NS4300N NAS , ,Shell ,engmode ,hawk201 ,Admin ,
Proxim ,Orinoco 600/2000 ,All ,HTTP ,(none) ,(none) ,Admin ,WLAN accesspoint
Psionteklogix ,9150 , ,HTTP ,support ,h179350 ,Admin ,
Psionteklogix ,9160 ,1 ,HTTP ,admin ,admin ,Admin ,
Psionteklogix ,9160 ,1 ,HTTP ,admin ,admin ,Admin ,
ptcl ,zxdsl831cii , , ,admin ,admin , ,
Pyramid Computer ,BenHur ,all ,HTTP ,admin ,admin ,Admin ,
QLogic ,SANbox 5602 Fibre Channel Switch , ,Multi ,admin ,password ,Admin ,
QLogic ,SANbox 5602 Fibre Channel Switch , ,Multi ,images ,images ,User ,
Quintum Technologies Inc. ,Tenor Series ,all ,Multi ,admin ,admin ,Admin ,
Radware ,Linkproof , ,ssh ,lp ,lp ,Admin ,
Radware ,Linkproof ,3.73.03 ,Multi ,radware ,radware ,Admin ,
Radware ,AppXcel , , ,radware ,radware ,Admin ,
Radware ,AppDirect , , ,radware ,radware ,Admin ,
Raidzone ,raid arrays , , ,n/a ,raidzone , ,
Ramp Networks ,WebRamp , , ,wradmin ,trancell , ,
Ramp Networks ,WebRamp , , ,wradmin ,trancell , ,
RaspberryPi ,Pi Boards , ,SSH ,pi ,raspberry , ,
RedHat ,Redhat 6.2 , ,HTTP ,piranha ,q ,User ,
RedHat ,Redhat 6.2 , ,HTTP ,piranha ,piranha ,User ,
Research ,PC BIOS , ,Console ,n/a ,Col2ogro2 ,Admin ,
Research ,BIOS , ,Console ,n/a ,Col2ogro2 ,Admin ,
Ricoh ,Aficio ,AP3800C ,HTTP ,sysadmin ,password ,Admin ,
Ricoh ,Aficio 2228c , ,Multi ,sysadmin ,password ,Admin ,Webpage admin
Ricoh ,Aficio AP3800C ,2.17 ,HTTP ,(none) ,password ,Admin ,alternative to sysadmin and Admin
Ricoh ,Aficio 2232C , ,Telnet ,n/a ,password ,Admin ,
Ricoh ,AP410N ,1.13 ,HTTP ,admin ,(none) ,Admin ,
Ricoh ,Aficio 2020D , ,HTTP ,admin ,password ,Admin ,
Ricoh ,Ricoh ,Aficio MP 3500 1.0 ,Multi ,admin ,(none) ,Admin ,Nabil OUCHN
Ricoh ,Aficio 551 , ,Multi ,(none) ,sysadm ,Admin ,
Ricoh ,Aficio 1018d , ,HTTP ,n/a ,sysadm ,Admin ,
Ricoh ,Aficio 1013F , ,HTTP ,n/a ,sysadm ,Admin ,
Ricoh ,Aficio MP 161L ,( Printer MP 161L ) , ,(none – Not required) ,sysadm ,Administration ,
Ricoh ,Aficio ,2016 , ,(none) ,password ,all ,
RM ,RM Connect , ,Multi ,setup ,changeme , ,
RM ,RM Connect , ,Multi ,teacher ,password , ,
RM ,RM Connect , ,Multi ,temp1 ,password , ,
RM ,RM Connect , ,Multi ,admin ,rmnetlm , ,
RM ,RM Connect , ,Multi ,admin2 ,changeme , ,
RM ,RM Connect , ,Multi ,adminstrator ,changeme , ,
RM ,RM Connect , ,Multi ,deskalt ,password , ,
RM ,RM Connect , ,Multi ,deskman ,changeme , ,
RM ,RM Connect , ,Multi ,desknorm ,password , ,
RM ,RM Connect , ,Multi ,deskres ,password , ,
RM ,RM Connect , ,Multi ,guest ,(none) , ,
RM ,RM Connect , ,Multi ,replicator ,replicator , ,
RM ,RM Connect , ,Multi ,RMUser1 ,password , ,
RM ,RM Connect , ,Multi ,topicalt ,password , ,
RM ,RM Connect , ,Multi ,topicnorm ,password , ,
RM ,RM Connect , ,Multi ,topicres ,password , ,
RoamAbout ,RoamAbout R2 Wireless , ,Multi ,admin ,password ,Admin ,
SAF Tehnika ,CFQ series modems , , ,integrator ,p1nacate ,Integrator ,
SAF Tehnika ,CFQ series modems , , ,administrator ,d1scovery ,Admin ,
SAF Tehnika ,CFQ series modems , , ,operator ,col1ma ,Operator ,
SAF Tehnika ,CFQ series modems , , ,monitor ,monitor ,Monitor ,
sagem ,fast 1400w , ,Multi ,root ,1234 ,Admin ,
SAGEM ,FAST 1400 , ,Multi ,admin ,epicrouter ,Admin ,
Sagem ,F@st 1200 (Fast 1200) , ,Telnet ,root ,1234 ,User ,root/1234
Sagem ,Livebox , ,Multi ,admin ,admin ,Admin ,
Sagem ,Fast 3504 v2 , , ,Menara ,Menara ,admin ,moroccan internet provider’s router
samsung ,n620 , ,Multi ,n/a ,(none) ,Admin ,
Samsung ,MagicLAN SWL-3500RG ,2.15 ,HTTP ,public ,public ,Admin ,def. WEP keys: 0123456789
samsung ,modem/router ,aht-e300 ,Multi ,admin ,password ,Admin ,after reset
Scientific Atlanta ,DPX2100 ,Comcast-supplied ,HTTP ,admin ,w2402 ,diagnostics page ,192.168.100.1
Secure Computing ,Webwasher ,all ,HTTP ,admin ,(none) ,Admin ,
Sempre ,54M Wireless Router ,V 1.00 , ,admin ,admin , ,
Senao ,2611CB3+D (802.11b , ,HTTP ,admin ,(none) ,Admin ,Default IP: 192.168.1.1
seninleyimben ,@skan ,el rattani ,FTP ,admin ,admin ,Admin ,11182360608
Sercom ,IP806GA , ,HTTP ,admin ,admin ,Admin ,
Sercom ,IP806GB , ,HTTP ,admin ,admin ,Admin ,
Server Technology ,Sentry Remote Power , ,Multi ,GEN1 ,gen1 ,view/control ,Telnet port 2001
Server Technology ,Sentry Remote Power , ,Multi ,GEN2 ,gen2 ,view/control ,Telnet port 2001
Server Technology ,Sentry Remote Power , ,Multi ,ADMN ,admn ,Admin ,Telnet port 2001
sharp ,AR-407/S402 , ,Multi ,n/a ,(none) ,Admin ,
Sharp ,AR-M355N , ,HTTP ,admin ,Sharp ,Admin ,
Sharp ,MX-3501n , ,HTTP ,Administrator ,admin ,Admin ,
Sharp ,AL-1655CS , ,HTTP ,admin ,Sharp ,Admin ,
Sharp ,AR-M155 , ,HTTP ,admin ,Sharp ,Admin ,Note the Capital S
Sharp ,MX-5500 , ,HTTP ,admin ,admin ,Admin ,Different to other sharp units
Sharp ,AR-M237 , , ,admin ,Sharp ,Admin ,
Sharp ,AR-M237 , , ,admin ,Sharp ,Admin ,pass case-sensitive
Shoretel ,ALL , ,HTTP ,admin ,changeme ,Admin ,
siemen ,speedstream 5400 ,059-e440-a02 ,HTTP ,admin ,(none) ,Admin ,
Siemens ,ROLM PBX , , ,eng ,engineer , ,
Siemens ,ROLM PBX , , ,op ,op , ,
Siemens ,ROLM PBX , , ,op ,operator , ,
siemens ,hipath , ,Multi ,n/a ,(none) ,Admin ,
Siemens ,ROLM PBX , , ,su ,super , ,
Siemens ,PhoneMail , , ,poll ,tech , ,
Siemens ,PhoneMail , , ,sysadmin ,sysadmin , ,
Siemens ,ROLM PBX , , ,admin ,pwp , ,
Siemens ,PhoneMail , , ,tech ,tech , ,
SIEMENS ,SE515 , ,HTTP ,admin ,n/a ,Admin ,
Siemens ,5940 T1E1 Router ,5940-001 v6.0.180-2 ,Telnet ,superuser ,admin ,Admin ,
Siemens ,PhoneMail , , ,poll ,tech , ,
Siemens ,PhoneMail , , ,sysadmin ,sysadmin , ,
Siemens ,PhoneMail , , ,tech ,tech , ,
Siemens ,ROLM PBX , , ,admin ,pwp , ,
Siemens ,ROLM PBX , , ,eng ,engineer , ,
Siemens ,ROLM PBX , , ,op ,op , ,
Siemens ,ROLM PBX , , ,op ,operator , ,
Siemens ,ROLM PBX , , ,su ,super , ,
Siemens ,SpeedStream 4100 , ,HTTP ,admin ,hagpolm1 ,Admin ,DSL Modem and Router
Siemens ,SE560dsl , ,Multi ,admin ,admin ,Admin ,Also has an account with:
Siemens ,Speedstream SS2614 ,Hardware V. 01 ,HTTP ,n/a ,admin ,Admin ,
Siemens ,Hipath ,3300-3750 ,Custom program ,31994 ,31994 ,Admin ,
Siemens ,Gigaset ,All ,Multi ,(none) ,0 ,Admin ,
Siemens Nixdorf ,PC BIOS , ,Console ,n/a ,SKY_FOX ,Admin ,
Siemens Nixdorf ,BIOS , ,Console ,n/a ,SKY_FOX ,Admin ,
Siemens Pro C5 ,Siemens , ,Multi ,n/a ,(none) ,Admin ,
Sigma ,Sigmacoma IPshare ,Sigmacom router v1.0 ,HTTP ,admin ,admin ,Admin ,effes2004@gmail.com
Signamax ,065-7726S , ,Multi ,admin ,admin ,Admin ,Switch
Siips ,Trojan ,8974202 ,Multi ,Administrator ,ganteng ,Admin ,Thx silex
technology ,PRICOM (Printserver) , ,Multi ,root ,(none) ,Admin ,for telnet / HTTP
Silvercrest ,WR-6640Sg , ,HTTP ,admin ,admin ,Admin ,
sitara ,qosworks , ,Console ,root ,(none) ,Admin ,
Sitecom ,All WiFi routers , ,Multi ,(none) ,sitecom ,Admin ,
Sitecom ,WL-0xx up to WL-17x ,all ,Multi ,admin ,admin ,Admin ,often on port 88
SmartSwitch ,Router 250 ssr2500 ,v3.0.9 ,Multi ,admin ,(none) ,Admin ,
SMC ,Barricade 7004 AWBR , ,Multi ,admin ,(none) ,Admin ,192.168.123.254 (WiFi AP)
SMC ,Router ,All ,HTTP ,admin ,admin ,Admin ,
SMC ,SMC broadband router , ,HTTP ,admin ,admin ,Admin ,
SMC ,SMC2804WBR ,v.1 ,HTTP ,(none) ,smcadmin ,Admin ,
SMC ,WiFi Router ,All ,HTTP ,n/a ,smcadmin ,Admin ,model #2804WBRP-G
SMC ,SMB2804WBR ,V2 ,Multi ,Administrator ,smcadmin ,Admin ,
SMC ,7401BRA ,1 ,HTTP ,admin ,barricade ,Admin ,
SMC ,7401BRA ,2 ,HTTP ,smc ,smcadmin ,Admin ,
SMC ,Barricade7204BRB , ,HTTP ,admin ,smcadmin ,Admin ,
SMC ,2804wr , ,HTTP ,(none) ,smcadmin ,Admin ,
SMC ,Router/Modem ,BR7401 ,Multi ,admin ,barricade ,Admin ,
SMC ,SMCWBR14-G ,SMCWBR14-G ,HTTP ,(none) ,smcadmin ,Admin ,
SMC ,Modem/Router , ,HTTP ,cusadmin ,highspeed ,Customer Admin ,Comcast Commercial High Speed Modem model number 8013WG
SMC ,7204BRA , ,Multi ,smc ,smcadmin ,Admin ,
SMC ,SMCWBR14-G , ,HTTP ,n/a ,smcadmin ,Admin ,mentioned password (no passwd)
smc ,smc 7904BRA , ,Multi ,(none) ,smcadmin ,Admin ,
SMC ,smc7904wbrb , ,Multi ,(none) ,smcadmin ,Admin ,
SMC ,SMC7004VBR , ,HTTP ,n/a ,smcadmin ,Admin ,
SMC ,SMCWBR14-G , ,HTTP ,(none) ,smcadmin ,Admin ,DeFaults:IP Address:192.168.2.1
SMC ,SMC8013WG-CCR ,2.11.19-1d ,HTTP ,mso ,w0rkplac3rul3s ,Admin ,Comcast Business Gateway w
Snapgear ,Pro ,Lite ,and SOHO ,1.79 ,Multi ,root ,default
Software , , , , , , ,EVMAdminGuide.pdf
Solution 6 ,Viztopia Accounts , ,Multi ,aaa ,often blank ,Admin ,
SonicWALL ,ALL ,ALL ,HTTP ,admin ,password ,Admin ,
Sonic-X ,SonicAnime ,on ,Telnet ,root ,admin ,Admin ,1.01E+14
SOPHIA (Schweiz) ,Protector , ,HTTPS ,admin ,Protector ,Admin ,
SOPHIA (Schweiz) ,Protector , ,SSH ,root ,root ,Admin ,
Sorenson ,SR-200 , ,HTTP ,(none) ,admin ,Admin ,
Sparklan ,Wx-6215 D and G , ,HTTP ,admin ,admin ,Admin ,
Spectra Logic ,64000 Gator , ,Multi ,administrator ,(none) ,Admin ,Has no password
Spectra Logic ,64000 Gator , ,Multi ,operator ,(none) ,User ,Has no password
Speedstream ,5861 SMT Router , ,Multi ,admin ,admin ,Admin ,
Speedstream ,5871 IDSL Router , ,Multi ,admin ,admin ,Admin ,
Speedstream ,Router 250 ssr250 , ,Multi ,admin ,admin ,Admin ,
Speedstream ,DSL , ,Multi ,admin ,admin ,Admin ,
Speedstream ,5667 ,R4.0.1 ,HTTP ,(none) ,admin ,Admin ,
SpeedStream ,5660 , ,Telnet ,n/a ,adminttd ,Admin ,
SpeedStream ,SpeedStream , ,Telnet ,Administrator ,admin ,Admin ,
SpeedXess ,HASE-120 , ,Multi ,(none) ,speedxess ,Admin ,
Sphairon ,(Versatel WLAN-Router) , ,Multi ,admin ,passwort ,Admin ,
Spike ,CPE , ,Console ,enable ,(none) ,Admin ,
SSA ,BPCS ,Up to 5.02 ,Multi ,SSA ,SSA ,Admin ,rarely changed/used for
stratacom ,all ,all ,Multi ,stratacom ,stratauser ,Admin ,
Sun ,JavaWebServer ,1.x 2.x ,AdminSrv ,admin ,admin ,Admin ,
Sun ,Cobalt , ,HTTP ,admin ,admin ,Admin ,submit by Nabil Ouchn
Sun Microsystems ,ILOM of X4100 ,1 ,HTTP ,root ,changeme ,Admin ,
SWEEX , , , ,sweex ,mysweex ,Admin ,
Swissvoice ,IP 10S , ,Telnet ,target ,password ,Admin ,
Syabas Technology ,Popcorn Hour A-110 ,all , ,nmt ,1234 ,admin ,
Syabas Technology ,Popcorn Hour C-200 ,all , ,nmt ,1234 ,admin ,
Syabas Technology ,Popcorn Hour A-110 ,all , ,ftpuser ,1234 ,admin ,
Sybase ,EAServer , ,HTTP ,jagadmin ,(none) ,Admin ,Source : Manufactor documentation
Symbol ,Spectrum ,series 4100-4121 ,HTTP ,n/a ,Symbol ,Admin ,Access Point Wireless
Symbol ,AP-2412 , ,Multi ,n/a ,Symbol ,Admin ,2Mbps FH AccessPoint
Symbol ,AP-3020 , ,Multi ,n/a ,Symbol ,Admin ,2Mbps FH AccessPoint
Symbol ,AP-4111 , ,Multi ,n/a ,Symbol ,Admin ,11Mbps DS AccessPoint
Symbol ,AP-4121 , ,Multi ,n/a ,Symbol ,Admin ,11Mbps DS AccessPoint
Symbol ,AP-4131 , ,Multi ,n/a ,Symbol ,Admin ,11Mbps DS AccessPoint
Symbol ,CB3000 ,A1 ,HTTPS ,admin ,symbol ,Admin ,Default IP 10.10.1.1
Symmetricom ,NTS-200 ,All , ,operator ,mercury ,Admin ,Symmetricom NTP Network
Symmetricom ,NTS-200 ,All , ,guest ,truetime ,guest ,Symmetricom NTP Network
SysMaster ,M10 , ,HTTP ,admin ,12345 ,Admin ,
System/32 ,VOS , ,Multi ,install ,secret ,Admin ,
T-com ,sinus ,1054dsl , ,veda ,12871 , ,
TANDBERG ,TANDBERG ,8000 ,Multi ,(none) ,TANDBERG ,Admin ,http://www.tandberg.net/
Tandberg ,6000MXP , ,Multi ,Admin ,(none) ,Admin ,
Tandberg Data ,DLT8000 Autoloader 10x , ,Console ,n/a ,10023 ,Maintenance ,
Tandem ,TACL , ,Multi ,super.super ,(none) ,Admin ,
Tandem ,TACL , ,Multi ,super.super ,master ,Admin ,
T-Com ,Speedport Router Family ,all ,HTTP ,(none) ,0 ,Admin ,works with nearly all routers of
T-Com ,Speedport ,any , ,n/a ,0 ,admin ,192.168.2.1 |
T-Com ,Speedport W701V ,any , ,n/a ,0 ,admin ,
T-Com ,Speedport W900V ,any , ,n/a ,0 ,admin ,
T-Com ,Speedport 503V ,any , , ,123456 , ,
T-Comfort ,Routers , ,HTTP ,Administrator ,(none) ,Admin ,
Team Xodus ,XeniumOS ,2.3 ,FTP ,xbox ,xbox ,Admin ,
Technologies , , , , , , ,
Teklogix ,Accesspoint , ,Multi ,Administrator ,(none) ,Admin ,
Telco Systems ,Edge Link 100 , ,Console ,telco ,telco ,telco ,
telecom ,home hauwei , ,Multi ,operator ,(none) ,Admin ,
Teledat ,Routers , ,HTTP ,admin ,1234 ,Admin ,
Teletronics ,WL-CPE-Router ,37320 ,HTTPS ,admin ,1234 ,Admin ,
Telewell ,TW-EA200 , ,Multi ,admin ,password ,Admin ,
Telewell ,TW-EA501 ,v1 ,Multi ,admin ,admin ,Admin ,
Telindus ,1124 , ,HTTP ,n/a ,(none) ,Admin ,
Telindus ,SHDSL1421 ,yes ,HTTP ,admin ,admin ,Admin ,
telindus ,telindus ,2002 ,Telnet ,admin ,admin ,Admin ,
Tellabs ,Titan 5500 ,FP 6.x ,Multi ,tellabs ,tellabs#1 ,Admin ,
Tellabs ,7120 , ,Multi ,root ,admin_1 ,Admin ,telnet on port 3083
Terayon ,Unknown ,Comcast-supplied ,HTTP ,(none) ,(none) ,diagnostics page ,192.168.100.1/diagnosti
Terayon ,Unknown ,Comcast-supplied ,HTTP ,(none) ,(none) ,diagnostics page ,192.168.100.1/diagnosti
Thomson ,Wireless Cable Gateway ,DCW725 ,HTTP ,(none) ,admin ,Admin ,SSID : THOMSON (Credit to
Thomson ,SpeedTouch AP ,180 ,HTTP ,n/a ,admin ,Admin ,SSID : SpeedTouch180
Thomson ,TCW-710 , ,Multi ,(none) ,admin ,Admin ,ono
thomson ,speedtouch 585 v7 ,2+ , ,admin ,password ,administrator ,uw club supply u wid dese boxes
Tiara ,1400 ,3.x ,Console ,tiara ,tiaranet ,Admin ,also known as Tasman Networks
Topcom ,Wireless Webr@cer 1154+ ,V 4.00.0 ,HTTP ,admin ,admin ,Admin ,G+ mode (125Mbps) integration
Topcom ,Wireless Webr@cer 1154+ ,V 0.01.06 ,HTTP ,admin ,admin ,Admin ,WPA-PSK implemented
Topcom ,Wireless Webr@cer 1154+ ,V 0.01.09 ,HTTP ,admin ,admin ,Admin ,Improved wireless stability
Topcom ,Skyr@cer Pro AP 554 ,1.93 ,HTTP ,admin ,admin ,Admin ,Wireless Access Point
topsec ,firewall , ,Multi ,superman ,talent ,Admin ,
Toshiba ,E-Studio 3511c , ,HTTP ,Admin ,123456 ,Admin ,Multifunction
Prin ter/Copier/Toshiba ,E-Studio 4511c , ,HTTP ,admin ,123456 ,Admin ,
Toshiba ,Most e-Studio copiers , , ,admin ,123456 ,Admin ,
TrendMicro ,InterScan 7.0 , ,HTTP ,admin ,imss7.0 ,Admin ,
TrendNET ,TEW-435BRM ,1 ,HTTP ,admin ,password ,Admin ,
Troy ,ExtendNet 100zx , ,Multi ,admin ,extendnet ,Admin ,dizphunKt
TVT System ,Expresse G5 , ,Multi ,craft ,(none) ,Admin ,
TVT System ,Expresse G5 DS1 Module , ,Multi ,(none) ,enter ,Admin ,
U.S. Robotics ,SureConnect 9003 ADSL , ,Multi ,root ,12345 ,Admin ,
U.S. Robotics ,SureConnect 9105 ADSL , ,HTTP ,admin ,admin ,Admin ,
UNEX ,Routers , ,HTTP ,n/a ,password ,Admin ,
Unisys ,ClearPath MCP , ,Multi ,NAU ,NAU ,Privileged ,Network Administration Utility
Unisys ,ClearPath MCP , ,Multi ,ADMINISTRATOR ,ADMINISTRATOR ,Admin ,
Unisys ,ClearPath MCP , ,Multi ,HTTP ,HTTP ,Web Server Administration ,
us robotic ,adsl gateway wireless router , ,wireless router ,support ,support ,super user access ,I find it on a manual
US Robotics ,USR8000 ,1.23 / 1.25 ,Multi ,root ,admin ,Admin ,DSL-Router. Web-Login always
US Robotics ,USR8550 ,3.0.5 ,Multi ,Any ,12345 ,Any ,Best Modem
US ROBOTICS ,ADSL Ethernet Modem , ,HTTP ,(none) ,12345 ,Admin ,
US Robotics ,SureConnect ADSL ,SureConnect ADSL ,Telnet ,support ,support ,User ,works after 3rd login trial
US Robotics ,USR9110 , ,HTTP ,admin ,(none) ,Admin ,default IP subnet: 192.168.1.0
US Robotics ,USR9106 , ,HTTP ,admin ,admin ,Admin ,
US Robotics ,USR5462 , ,HTTP ,n/a ,admin ,Admin ,
us21100060 ,hp omibook 6100 , ,Multi ,n/a ,(none) ,Admin ,
Various ,DD-WRT ,v23 SP1 Final ,HTTP ,root ,admin ,Admin ,Alternative firmware
VASCO ,VACMAN Middleware ,2.x ,Multi ,admin ,(none) ,Admin ,strong authentication server
Verifone ,Verifone Junior ,2.05 , ,(none) ,166816 , ,
Verilink ,NE6100-4 NetEngine ,IAD 3.4.8 ,Telnet ,(none) ,(none) ,Guest ,
Visual Networks ,Visual Uptime T1 CSU/DSU ,1 ,Console ,admin ,visual ,Admin ,
VxWorks ,misc , ,Multi ,admin ,admin ,Admin ,
VxWorks ,misc , ,Multi ,guest ,guest ,Guest ,
Wanadoo ,Livebox , ,Multi ,admin ,admin ,Admin ,
Wang ,Wang , ,Multi ,CSG ,SESAME ,Admin ,
Watch guard ,firebox 1000 , ,Multi ,admin ,(none) ,Admin ,
Watchguard ,SOHO and SOHO6 ,all versions ,FTP ,user ,pass ,Admin ,works only from the inside LAN
Watchguard ,Firebox , , ,(blank) ,wg ,admin ,
Weidmüeller ,IE-SW16-M , , ,admin ,detmond ,admin ,
westell ,2200 , ,Multi ,admin ,password ,Admin ,
Westell ,Versalink 327 , ,Multi ,admin ,(none) ,Admin ,
Westell ,Wirespeed , ,Multi ,admin ,password ,Admin ,also try password: sysAdmin
Westell ,Wang , ,Multi ,CSG ,SESAME ,Admin ,
Westell ,Wirespeed wireless router , ,Multi ,admin ,sysAdmin ,Admin ,
Westell ,Ultraline Series3 A90- ,1.02.00.04 , ,admin ,password1 ,Admin ,Verizon cable router (Model
WLAN_3D ,Router , ,HTTP ,Administrator ,admin ,Admin ,
wline ,w3000g , ,HTTP ,admin ,1234 ,Admin ,
Wyse ,Winterm ,5440XL ,Console ,root ,wyse ,Admin ,
Wyse ,Winterm ,5440XL ,VNC ,VNC ,winterm ,VNC ,
Wyse ,Winterm ,9455XL ,BIOS ,(none) ,Fireport ,BIOS ,Case Sensitive
Wyse ,winterm , ,Multi ,root ,(none) ,Admin ,
Wyse ,rapport ,4.4 ,FTP ,rapport ,r@p8p0r+ ,ftp logon to ,
Wyse ,Winterm 3150 , ,VNC ,n/a ,password ,Admin ,by satadru
XAMPP ,XAMPP Filezilla FTP Server , , ,newuser ,wampp ,User ,
Xavi ,7000-ABA-ST1 , ,Console ,n/a ,(none) ,Admin ,
Xavi ,7001 , ,Console ,n/a ,(none) ,Admin ,
xd ,xdd ,xddd ,Multi ,xd ,xd ,Admin ,
Xerox ,Multi Function Equipment , ,Multi ,admin ,2222 ,Admin ,combo fax/scanner/printer
Xerox ,WorkCenter Pro 428 , ,HTTP ,admin ,admin ,Admin ,
xerox ,xerox , ,Multi ,admin ,admin ,Admin ,
xerox ,xerox , ,Multi ,n/a ,admin ,Admin ,
Xerox ,Document Centre 425 , ,HTTP ,admin ,(none) ,Admin ,
xerox ,work centre pro 35 , ,HTTP ,admin ,1111 ,Admin ,
Xerox ,DocuCentre 425 , ,HTTP ,admin ,22222 ,Admin ,works for access panel 2
Xerox ,Document Centre 405 ,– ,HTTP ,admin ,admin ,Admin ,
Xerox ,240a , ,HTTP ,admin ,x-admin ,Admin ,
Xerox ,WorkCentre 7132 , ,Multi ,11111 ,x-admin ,Admin ,
Xerox ,6204 , ,Multi ,n/a ,0 ,Admin ,
Xerox ,DocumentCenter 186 ,2007 , ,admin ,x-admin ,admin ,
X-Micro ,X-Micro WLAN 11b Broadband Router ,1.2.2 1.2.2.3 1.2.2.4 1.6.0.0 ,Multi ,super ,super ,Admin ,From BUGTRAQ
X-Micro ,X-Micro WLAN 11b Broadband Router ,1.6.0.1 ,HTTP ,1502 ,1502 ,Admin ,From BUGTRAQ
X-Micro ,WLAN 11b Access Point ,37258 ,Multi ,super ,super ,Admin ,http://secunia.com/advisories/
Xylan ,Omniswitch , ,Telnet ,admin ,switch ,Admin ,
Xylan ,Omniswitch , ,Telnet ,diag ,switch ,Admin ,
Xylan ,omniswitch , ,Multi ,admin ,switch ,Admin ,
Xyplex ,Routers , ,Port 7000 ,n/a ,system ,Admin ,
Xyplex ,Terminal Server , ,Port 7000 ,n/a ,access ,User ,
Xyplex ,Terminal Server , ,Port 7000 ,n/a ,system ,Admin ,
Xyplex ,Routers , ,Port 7000 ,n/a ,access ,User ,
xyplex ,switch ,3.2 ,Console ,n/a ,(none) ,Admin ,
Xyplex ,Routers , ,Port 7000 ,n/a ,access ,User ,
Xyplex ,Terminal Server , ,Port 7000 ,n/a ,access ,User ,
Xyplex ,Terminal Server , ,Port 7000 ,n/a ,system ,Admin ,
Yakumo ,Routers , ,HTTP ,admin ,admin ,Admin ,
Zcom ,Wireless , ,SNMP ,root ,admin ,Admin ,
Zebra ,10/100 Print Server , ,Multi ,admin ,1234 ,Admin ,
ZOOM ,ZOOM ADSL Modem , ,Console ,admin ,zoomadsl ,Admin ,
ZTE ,ZXDSL 831 ,4.2 ,Multi ,ADSL ,expert03 ,Admin ,Default Password if user does
ZyXEL ,Prestige , ,HTTP ,n/a ,1234 ,Admin ,http://192.168.1.1
ZyXEL ,Prestige , ,FTP ,root ,1234 ,Admin ,
ZyXEL ,Prestige , ,Telnet ,(none) ,1234 ,Admin ,
ZyXEL ,Prestige 643 , ,Console ,(none) ,1234 ,Admin ,
ZyXEL ,Prestige 652HW-31 ADSL Router , ,HTTP ,admin ,1234 ,Admin ,http://192.168.1.1
ZyXEL ,Prestige 100IH , ,Console ,n/a ,1234 ,Admin ,
Zyxel ,ZyWall 2 , ,HTTP ,n/a ,(none) ,Admin ,
Zyxel ,adsl routers ,All ZyNOS Firmwares ,Multi ,admin ,1234 ,Admin ,this is default for dsl routers
ZyXEL ,Prestige 650 , ,Multi ,1234 ,1234 ,Admin ,
ZyXEL ,Prestige 900 , ,HTTP ,webadmin ,1234 ,Admin ,192.168.1.1:8080
ZyXEL ,Prestige 645 , ,HTTP ,admin ,1234 ,Admin ,
Zyxel ,Prestige 660HW , ,Multi ,admin ,admin ,Admin ,
ZyXel ,Prestige P660HW , ,Multi ,admin ,1234 ,Admin ,
Zyxel ,Router ,650-1 ,Telnet ,(none) ,1234 ,Admin ,Telefonica
ZyXeL ,660HW , ,HTTP ,admin ,(none) ,Admin ,terra
Zyxel ,ES-2108 , ,Multi ,admin ,1234 ,Admin ,
zyxel ,g-570s , ,Multi ,n/a ,admin ,Admin ,
Zyxel ,Prestige 650HW31 ,31 ,Telnet ,192.168.1.1 60020 ,@dsl_xilno ,Admin ,
Zyxel ,NWA1100 , , , ,1234 ,Admin ,
Zyxel ,G570S ,v2 , , ,1234 ,Admin ,
ZyXel Based (Generic) ,Broadband SOHO Router ,925ahcd on circuit , ,admin ,0 ,Admin ,Password is 4 zeros. Gray router
ZyWALL Series ,Prestige 660R-61C , ,Multi ,n/a ,admin ,Admin , '''
search = ''
while search != 'exit':
search = raw_input('Default Creds search string (exit): ')
found = 0
records = data.split('\n')
for ix,record in enumerate(records):
#print 'Busy with record ',str(ix), record
Vendor ,Model ,Version ,AccessType ,Username ,Password ,Rights ,Comments = record.split(',')
if search.lower() in record.lower():
found += 1
print '%s. Vendor : %s ' % (found, Vendor)
print ' Model : %s ' % Model
print ' Version : %s ' % Version
print ' AccessType: %s ' % AccessType
print ' Username : %s ' % Username
print ' Password : %s ' % Password
print ' Rights : %s ' % Rights
print ' Comments : %s ' % Comments
print ''
|
from socket import *
import threading
#设计思想:
#1.让服务器连接多台客户端
#2.服务器可以客户端进行收发操作,互不干扰
class tcpserver_test(threading.Thread):
def __init__(self,port):
"""初始化变量"""
threading.Thread.__init__(self)
self.port = port #自定义开启端口
addr = ('10.48.41.61',self.port)
#创建套接字
self.tcpCon = socket(AF_INET,SOCK_STREAM)
#设置为非阻塞
self.tcpCon.setblocking(False) #默认情况下是阻塞,此处设置为非阻塞
#绑定端口
self.tcpCon.bind(addr)
# 开始监听
self.tcpCon.listen(5)
self.sock_list=[]
#定义一个函数用来处理客户端的请求处理
def rec(self):
#创建一个死循环来一直接收客户端的连接
while True:
try:
self.sock, recv = self.tcpCon.accept()
self.ip = recv[0] # 客户端的ip地址
self.port = recv[1] # 客户端的端口
self.sock_list.append((self.sock,self.ip)) #一直接收来自客户端的信息,将接收到的信息设置为一个元组(sock信息,客户端IP地址)存进一个列表中
self.sock.setblocking(False) #设置为非阻塞,如果为阻塞的话那么就接收不到第二个客户端的请求
print("与客户端%s连接成功,开始你的表演....."%self.ip)
except Exception:
pass #因为sock采用的是非阻塞,因此一开始打开服务器没有客户端发起请求连接的时候,程序会报错,为了忽略程序报错,此处设置为pass
for i in self.sock_list:
try:
data = i[0].recv(1024)
if data:
print("从",i[1],"收到的信息是:",data.decode())
#开一个线程来处理给客户端发送信息
t_sendNews=threading.Thread(target=self.sendNews)
t_sendNews.start()
else:
print("没有收到数据")
i.close()
continue
except Exception:
pass #因为sock采用的是非阻塞,因此一开始打开服务器没有客户端发来的信息的时候,程序会报错,为了忽略程序报错,此处设置为pass
#定义一个函数向客户端发送信息
def sendNews(self):
try:
while True:
news = input()
self.sock.send(bytes(news,encoding='gbk'))
except Exception as e:
print("客户端关闭连接")
if __name__ == "__main__":
t = tcpserver_test(6666)
t.rec()
|
# Generated by Django 3.1.6 on 2021-04-07 01:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('log', '0015_auto_20210406_2035'),
]
operations = [
migrations.RenameField(
model_name='entry',
old_name='img',
new_name='image2',
),
migrations.RemoveField(
model_name='entry',
name='fyle',
),
migrations.AddField(
model_name='entry',
name='image1',
field=models.FileField(blank=True, null=True, upload_to='media/entry_images/'),
),
]
|
'''++
Copyright (C) 2019 PrimeDevelopers
All rights reserved.
This file has been generated by the Automatic Component Toolkit (ACT) version 1.4.0.
Abstract: This is an autogenerated Python application that demonstrates the
usage of the Python bindings of Ray Marching Library
Interface version: 1.1.0
'''
import os
import sys
import math
import LibRayMarching
from PIL import Image
import math
def Progress(Percentage, ShouldAbort):
print(("=" * math.ceil(Percentage)) + "> " + str(math.ceil(Percentage)) + "%", end="\r")
sys.stdout.flush();
class RayMarching:
def __init__(self, width, height, dx, dy):
self.Wrapper = LibRayMarching.Wrapper("../build/libraymarching");
major, minor, micro = self.Wrapper.GetLibraryVersion();
print("LibRayMarching version: {:d}.{:d}.{:d}".format(major, minor, micro));
self.Scene = self.Wrapper.CreateRayMarching();
print("Set size")
self.Scene.SetScreenSize(width, height);
self.Scene.SetProgressCallback(LibRayMarching.ProgressCallback(Progress));
self.Width = width;
self.Height = height;
print("Set viewport")
self.Scene.SetViewport(
LibRayMarching.Vector(X = 3 + 14 * dx, Y = -3 + 14 * dy, Z = 4),
LibRayMarching.Vector(X = -dx, Y = -dy, Z = -0.2),
LibRayMarching.Vector(X = 0, Y = 0, Z = 1), math.pi*20/180);
print("Set properties");
self.Scene.SetShaderProperties(LibRayMarching.ShaderProperties(
Oversampling = 2,
MaxBouncing = 5,
SoftShadow = 50,
FastNormalCalculation = True));
self.Scene.SetBackground(LibRayMarching.Vector(X = 0.05, Y = 0, Z = 0.2), 30, 40);
def BuildScene(self):
print ("Create light")
self.Scene.AddLight(LibRayMarching.Vector(9, -20, 25), LibRayMarching.Vector(1, 1, 1));
print ("Create sphere")
Sphere = self.Wrapper.CreateSphere(1);
Sphere.SetMaterial(LibRayMarching.Material(
Red = 0.5, Green = 0.1, Blue = 0.3,
Ambient = 0.3,
Diffuse = 1,
Specular = 1,
SpecularAlpha = 15));
Sphere.SetSinusDistortion(0.06, 0.6);
Sphere.Translate(LibRayMarching.Vector(X = 0, Y = 0, Z = 2));
self.Scene.AddPrimitive(Sphere);
print ("Create box")
Box = self.Wrapper.CreateBox(
LibRayMarching.Vector(X = 0.5, Y = 0.5, Z = 0.5)
);
Box.SetMaterial(LibRayMarching.Material(
Red = 0.5, Blue = 0.1, Green = 0.3,
Ambient = 0.3,
Diffuse = 1,
Specular = 1,
SpecularAlpha = 15));
Box.Translate(LibRayMarching.Vector(X = 3, Y = -3, Z = 2));
self.Scene.AddPrimitive(Box);
print ("Create capsule")
Capsule = self.Wrapper.CreateCapsule(
0.5,
LibRayMarching.Vector(X = -0.5, Y = -0.5, Z = -0.5),
LibRayMarching.Vector(X = 0.5, Y = 0.5, Z = 0.5)
);
Capsule.SetMaterial(LibRayMarching.Material(
Blue = 0.5, Green = 0.1, Red = 0.3,
Ambient = 0.3,
Diffuse = 1,
Specular = 1,
SpecularAlpha = 15));
Capsule.Translate(LibRayMarching.Vector(X = 6, Y = 0, Z = 2));
self.Scene.AddPrimitive(Capsule);
print ("Create cylinder")
Cylinder = self.Wrapper.CreateCylinder(1, 2);
Cylinder.SetMaterial(LibRayMarching.Material(
Green = 0.5, Red = 0.1, Blue = 0.3,
Ambient = 0.3,
Diffuse = 1,
Specular = 1,
SpecularAlpha = 15));
Cylinder.Translate(LibRayMarching.Vector(X = 0, Y = -3, Z = 2));
self.Scene.AddPrimitive(Cylinder);
print ("Create torus")
Torus = self.Wrapper.CreateTorus(0.7, 0.3);
Torus.SetMaterial(LibRayMarching.Material(
Green = 1, Blue = 1, Red = 1,
Ambient = 0.05,
Diffuse = 0.1,
Specular = 0.3,
SpecularAlpha = 15,
Reflection = 0.6));
Torus.Rotate(LibRayMarching.Vector(X = 1, Y = 0, Z = 0), math.pi/2);
Torus.Translate(LibRayMarching.Vector(X = 3, Y = 0, Z = 2));
self.Scene.AddPrimitive(Torus);
print ("Create transparent sphere")
Sphere = self.Wrapper.CreateSphere(1);
Sphere.SetMaterial(LibRayMarching.Material(
Blue = 0.5, Red = 0.1, Green = 0.3,
Ambient = 0.1,
Diffuse = 0.2,
Specular = 0.4,
SpecularAlpha = 15,
TransparencY = 0.5,
Refraction = 1.33));
Sphere.Translate(LibRayMarching.Vector(X = 6, Y = -3, Z = 2));
self.Scene.AddPrimitive(Sphere);
print ("Create menger sponge")
MengerSponge = self.Wrapper.CreateMengerSponge(3);
MengerSponge.SetMaterial(LibRayMarching.Material(
Green = 0.5, Blue = 0.1, Red = 0.3,
Ambient = 0.3,
Diffuse = 0.5,
Specular = 0.4,
SpecularAlpha = 15));
MengerSponge.Translate(LibRayMarching.Vector(X = 0, Y = -6, Z = 2));
self.Scene.AddPrimitive(MengerSponge);
print ("Create julia quaternion fractal")
QuaternionFractal = self.Wrapper.CreateQuaternionFractal(10);
QuaternionFractal.SetQuaternion(0.2, -0.4, 0, -0.6);
QuaternionFractal.SetMaterial(LibRayMarching.Material(
Green = 0.5, Blue = 0.1, Red = 0.5,
Ambient = 0.3,
Diffuse = 0.5,
Specular = 0.4,
SpecularAlpha = 15));
QuaternionFractal.Translate(LibRayMarching.Vector(X = 3, Y = -6, Z = 2));
self.Scene.AddPrimitive(QuaternionFractal);
GSphere = self.Wrapper.CreateSphere(1.25);
GBox = self.Wrapper.CreateBox(
LibRayMarching.Vector(X = 1, Y = 1, Z = 1)
);
GSphere2 = self.Wrapper.CreateSphere(1.1);
GSphere2.Rotate(LibRayMarching.Vector(X = 1, Y = 0, Z = 0), math.pi/2);
GroupRCube = self.Wrapper.CreatePrimitiveGroup(LibRayMarching.GroupAction.Intersect);
GroupRCube.AddPrimitive(GSphere);
GroupRCube.AddPrimitive(GBox);
GroupM = self.Wrapper.CreatePrimitiveGroup(LibRayMarching.GroupAction.Subtract);
GroupM.AddPrimitive(GroupRCube);
GroupM.AddPrimitive(GSphere2);
GroupM.Translate(LibRayMarching.Vector(X = 6, Y = -6, Z = 2));
GroupM.SetMaterial(LibRayMarching.Material(
Green = 0.5, Blue = 0.5, Red = 0.1,
Ambient = 0.3,
Diffuse = 0.5,
Specular = 0.4,
SpecularAlpha = 15));
self.Scene.AddPrimitive(GroupM);
print ("Create pane")
Plane = self.Wrapper.CreatePlane(
LibRayMarching.Vector(X = 0, Y = 0, Z = 0),
LibRayMarching.Vector(X = 0, Y = 0, Z = 1)
);
Plane.SetMaterial(LibRayMarching.Material(
Red = 0.2, Green = 0.2, Blue = 0.2,
Ambient = 0.3,
Diffuse = 1,
Specular = 1,
SpecularAlpha = 15));
self.Scene.AddPrimitive(Plane);
def RenderPixel(self, x, y):
print ("Render scene")
Pixel = self.Scene.RenderPixel(x, y);
print ("Pixelcolor is " + str(Pixel))
def Render(self, file, show):
print ("Render scene")
self.Scene.RenderScene();
print ("")
print ("Get buffer")
color_buffer = self.Scene.GetColorBuffer();
print ("Buffer length " + str(len(color_buffer)))
img = Image.new('RGB', (self.Width, self.Height))
pixels = img.load()
for i in range(img.size[0]): # for every col:
for j in range(img.size[1]): # For every row
color = color_buffer[i + j * img.size[0]];
# print color;
pixels[i,j] = ((color >> 16) & 0xFF, (color >> 8) & 0xFF, color & 0xFF) # set the colour accordingly
img.save(file + '.png')
if show:
img.show()
def main():
#Radian = 2 * math.pi * 27 / 401;
#RM = RayMarching(600, 400, math.sin(Radian), math.cos(Radian));
#RM.BuildScene();
#RM.RenderPixel(50, 50);
#RM.Render('ray_marching', True);
for i in range(0, 400):
if not os.path.isfile('round/frame_'+str(i)+'.png'):
Radian = 2 * math.pi * i / 400;
RM = RayMarching(600, 400, math.sin(Radian), math.cos(Radian));
RM.BuildScene();
RM.RenderPixel(225, 172);
RM.Render('round/frame_' + str(i), False);
if __name__ == "__main__":
try:
main()
except Exception as e:
print(e)
|
import tensorflow as tf
import numpy as np
model = tf.keras.Sequential()
model.add(tf.keras.layers.Embedding(1001, 64, input_length=3, mask_zero=True))
# The model will take as input an integer matrix of size (batch,
# input_length), and the largest integer (i.e. word index) in the input
# should be no larger than 999 (vocabulary size).
# Now model.output_shape is (None, 10, 64), where `None` is the batch
# dimension.
input_array = np.random.randint(1000, size=(32, 3))
input_array[0,2] = 0
input_array[0,1] = 0
print(input_array.shape, input_array)
model.compile('rmsprop', 'mse')
output_array = model.predict(input_array)
print(output_array.shape, output_array[0])
# 32, 10 --> 32, 10, 64
|
in_file = open('input_8.txt', 'r')
# in_file = open('test_8.txt', 'r')
def op(line):
global acc
global pointer
ops, num = line.split(' ')
if ops == 'acc':
if num[0] == '+':
acc += int(num[1:])
else: acc -= int(num[1:])
return 1
elif ops == 'jmp':
if num[0] == '+':
pointer += int(num[1:])
else: pointer -= int(num[1:])
return 0
else: return 1
acc = 0
pointer = 0
evaled = []
raw = list(map(lambda x: x.rstrip(), in_file.readlines()))
while True:
if pointer not in evaled:
evaled.append(pointer)
else: break
line = raw[pointer]
pc = op(line)
if pc == 1: pointer += 1
print(acc) |
import logging
import pickle
import time
import h5py
import numpy as np
import tensorflow as tf
import tensorflow.contrib.layers as layers
from . import tf_util as U
logger = logging.getLogger(__name__)
class Policy:
def __init__(self, *args, **kwargs):
self.args, self.kwargs = args, kwargs
self.scope = self._initialize(*args, **kwargs)
self.all_variables = tf.get_collection(tf.GraphKeys.VARIABLES, self.scope.name)
self.trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope.name)
self.num_params = sum(int(np.prod(v.get_shape().as_list())) for v in self.trainable_variables)
self._setfromflat = U.SetFromFlat(self.trainable_variables)
self._getflat = U.GetFlat(self.trainable_variables)
logger.info('Trainable variables ({} parameters)'.format(self.num_params))
for v in self.trainable_variables:
shp = v.get_shape().as_list()
logger.info('- {} shape:{} size:{}'.format(v.name, shp, np.prod(shp)))
logger.info('All variables')
for v in self.all_variables:
shp = v.get_shape().as_list()
logger.info('- {} shape:{} size:{}'.format(v.name, shp, np.prod(shp)))
placeholders = [tf.placeholder(v.value().dtype, v.get_shape().as_list()) for v in self.all_variables]
self.set_all_vars = U.function(
inputs=placeholders,
outputs=[],
updates=[tf.group(*[v.assign(p) for v, p in zip(self.all_variables, placeholders)])]
)
def reinitialize(self):
for v in self.trainable_variables:
v.reinitialize.eval()
def _initialize(self, *args, **kwargs):
raise NotImplementedError
def save(self, filename):
assert filename.endswith('.h5')
with h5py.File(filename, 'w', libver='latest') as f:
for v in self.all_variables:
f[v.name] = v.eval()
# TODO: it would be nice to avoid pickle, but it's convenient to pass Python objects to _initialize
# (like Gym spaces or numpy arrays)
f.attrs['name'] = type(self).__name__
f.attrs['args_and_kwargs'] = np.void(pickle.dumps((self.args, self.kwargs), protocol=-1))
@classmethod
def Load(cls, filename, extra_kwargs=None):
with h5py.File(filename, 'r') as f:
args, kwargs = pickle.loads(f.attrs['args_and_kwargs'].tostring())
if extra_kwargs:
kwargs.update(extra_kwargs)
policy = cls(*args, **kwargs)
policy.set_all_vars(*[f[v.name][...] for v in policy.all_variables])
return policy
# === Rollouts/training ===
def rollout(self, env, *, render=False, timestep_limit=None, save_obs=False, random_stream=None):
"""
If random_stream is provided, the rollout will take noisy actions with noise drawn from that stream.
Otherwise, no action noise will be added.
"""
env_timestep_limit = env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps')
timestep_limit = env_timestep_limit if timestep_limit is None else min(timestep_limit, env_timestep_limit)
rews = []
t = 0
if save_obs:
obs = []
ob = env.reset()
for _ in range(timestep_limit):
ac = self.act(ob[None], random_stream=random_stream)[0]
if save_obs:
obs.append(ob)
ob, rew, done, _ = env.step(ac)
rews.append(rew)
t += 1
if render:
env.render()
if done:
break
rews = np.array(rews, dtype=np.float32)
if save_obs:
return rews, t, np.array(obs)
return rews, t
def act(self, ob, random_stream=None):
raise NotImplementedError
def set_trainable_flat(self, x):
self._setfromflat(x)
def get_trainable_flat(self):
return self._getflat()
@property
def needs_ob_stat(self):
raise NotImplementedError
def set_ob_stat(self, ob_mean, ob_std):
raise NotImplementedError
def bins(x, dim, num_bins, name):
scores = U.dense(x, dim * num_bins, name, U.normc_initializer(0.01))
scores_nab = tf.reshape(scores, [-1, dim, num_bins])
return tf.argmax(scores_nab, 2) # 0 ... num_bins-1
class MujocoPolicy(Policy):
def _initialize(self, ob_space, ac_space, ac_bins, ac_noise_std, nonlin_type, hidden_dims, connection_type):
self.ac_space = ac_space
self.ac_bins = ac_bins
self.ac_noise_std = ac_noise_std
self.hidden_dims = hidden_dims
self.connection_type = connection_type
assert len(ob_space.shape) == len(self.ac_space.shape) == 1
assert np.all(np.isfinite(self.ac_space.low)) and np.all(np.isfinite(self.ac_space.high)), \
'Action bounds required'
self.nonlin = {'tanh': tf.tanh, 'relu': tf.nn.relu, 'lrelu': U.lrelu, 'elu': tf.nn.elu}[nonlin_type]
with tf.variable_scope(type(self).__name__) as scope:
# Observation normalization
ob_mean = tf.get_variable(
'ob_mean', ob_space.shape, tf.float32, tf.constant_initializer(np.nan), trainable=False)
ob_std = tf.get_variable(
'ob_std', ob_space.shape, tf.float32, tf.constant_initializer(np.nan), trainable=False)
in_mean = tf.placeholder(tf.float32, ob_space.shape)
in_std = tf.placeholder(tf.float32, ob_space.shape)
self._set_ob_mean_std = U.function([in_mean, in_std], [], updates=[
tf.assign(ob_mean, in_mean),
tf.assign(ob_std, in_std),
])
# Policy network
o = tf.placeholder(tf.float32, [None] + list(ob_space.shape))
a = self._make_net(tf.clip_by_value((o - ob_mean) / ob_std, -5.0, 5.0))
self._act = U.function([o], a)
return scope
def _make_net(self, o):
# Process observation
if self.connection_type == 'ff':
x = o
for ilayer, hd in enumerate(self.hidden_dims):
x = self.nonlin(U.dense(x, hd, 'l{}'.format(ilayer), U.normc_initializer(1.0)))
else:
raise NotImplementedError(self.connection_type)
# Map to action
adim, ahigh, alow = self.ac_space.shape[0], self.ac_space.high, self.ac_space.low
assert isinstance(self.ac_bins, str)
ac_bin_mode, ac_bin_arg = self.ac_bins.split(':')
if ac_bin_mode == 'uniform':
# Uniformly spaced bins, from ac_space.low to ac_space.high
num_ac_bins = int(ac_bin_arg)
aidx_na = bins(x, adim, num_ac_bins, 'out') # 0 ... num_ac_bins-1
ac_range_1a = (ahigh - alow)[None, :]
a = 1. / (num_ac_bins - 1.) * tf.to_float(aidx_na) * ac_range_1a + alow[None, :]
elif ac_bin_mode == 'custom':
# Custom bins specified as a list of values from -1 to 1
# The bins are rescaled to ac_space.low to ac_space.high
acvals_k = np.array(list(map(float, ac_bin_arg.split(','))), dtype=np.float32)
logger.info('Custom action values: ' + ' '.join('{:.3f}'.format(x) for x in acvals_k))
assert acvals_k.ndim == 1 and acvals_k[0] == -1 and acvals_k[-1] == 1
acvals_ak = (
(ahigh - alow)[:, None] / (acvals_k[-1] - acvals_k[0]) * (acvals_k - acvals_k[0])[None, :]
+ alow[:, None]
)
aidx_na = bins(x, adim, len(acvals_k), 'out') # values in [0, k-1]
a = tf.gather_nd(
acvals_ak,
tf.concat(2, [
tf.tile(np.arange(adim)[None, :, None], [tf.shape(aidx_na)[0], 1, 1]),
tf.expand_dims(aidx_na, -1)
]) # (n,a,2)
) # (n,a)
elif ac_bin_mode == 'continuous':
a = U.dense(x, adim, 'out', U.normc_initializer(0.01))
else:
raise NotImplementedError(ac_bin_mode)
return a
def act(self, ob, random_stream=None):
a = self._act(ob)
if random_stream is not None and self.ac_noise_std != 0:
a += random_stream.randn(*a.shape) * self.ac_noise_std
return a
@property
def needs_ob_stat(self):
return True
@property
def needs_ref_batch(self):
return False
def set_ob_stat(self, ob_mean, ob_std):
self._set_ob_mean_std(ob_mean, ob_std)
def initialize_from(self, filename, ob_stat=None):
"""
Initializes weights from another policy, which must have the same architecture (variable names),
but the weight arrays can be smaller than the current policy.
"""
with h5py.File(filename, 'r') as f:
f_var_names = []
f.visititems(lambda name, obj: f_var_names.append(name) if isinstance(obj, h5py.Dataset) else None)
assert set(v.name for v in self.all_variables) == set(f_var_names), 'Variable names do not match'
init_vals = []
for v in self.all_variables:
shp = v.get_shape().as_list()
f_shp = f[v.name].shape
assert len(shp) == len(f_shp) and all(a >= b for a, b in zip(shp, f_shp)), \
'This policy must have more weights than the policy to load'
init_val = v.eval()
# ob_mean and ob_std are initialized with nan, so set them manually
if 'ob_mean' in v.name:
init_val[:] = 0
init_mean = init_val
elif 'ob_std' in v.name:
init_val[:] = 0.001
init_std = init_val
# Fill in subarray from the loaded policy
init_val[tuple([np.s_[:s] for s in f_shp])] = f[v.name]
init_vals.append(init_val)
self.set_all_vars(*init_vals)
if ob_stat is not None:
ob_stat.set_from_init(init_mean, init_std, init_count=1e5)
def _get_pos(self, model):
mass = model.body_mass
xpos = model.data.xipos
center = (np.sum(mass * xpos, 0) / np.sum(mass))
return center[0], center[1], center[2]
def rollout(self, env, *, render=False, timestep_limit=None, save_obs=False, random_stream=None, policy_seed=None, bc_choice=None):
"""
If random_stream is provided, the rollout will take noisy actions with noise drawn from that stream.
Otherwise, no action noise will be added.
"""
env_timestep_limit = env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps')
timestep_limit = env_timestep_limit if timestep_limit is None else min(timestep_limit, env_timestep_limit)
rews = []
x_traj, y_traj = np.zeros(timestep_limit), np.zeros(timestep_limit)
t = 0
if save_obs:
obs = []
if policy_seed:
env.seed(policy_seed)
np.random.seed(policy_seed)
if random_stream:
random_stream.seed(policy_seed)
ob = env.reset()
for _ in range(timestep_limit):
ac = self.act(ob[None], random_stream=random_stream)[0]
if save_obs:
obs.append(ob)
ob, rew, done, _ = env.step(ac)
x_traj[t], y_traj[t], _ = self._get_pos(env.unwrapped.model)
rews.append(rew)
t += 1
if render:
env.render()
if done:
break
x_pos, y_pos, _ = self._get_pos(env.unwrapped.model)
rews = np.array(rews, dtype=np.float32)
x_traj[t:] = x_traj[t-1]
y_traj[t:] = y_traj[t-1]
if bc_choice and bc_choice == "traj":
novelty_vector = np.concatenate((x_traj, y_traj), axis=0)
else:
novelty_vector = np.array([x_pos, y_pos])
if save_obs:
return rews, t, np.array(obs), novelty_vector
return rews, t, novelty_vector
class MlpPolicy(Policy):
def _initialize(self, ob_space, ac_space, ac_bins, ac_noise_std, nonlin_type, hidden_dims, connection_type):
self.ac_space = ac_space
self.ac_bins = ac_bins
self.ac_noise_std = ac_noise_std
self.hidden_dims = hidden_dims
self.connection_type = connection_type
assert len(ob_space.shape) == len(self.ac_space.shape) == 1
assert np.all(np.isfinite(self.ac_space.low)) and np.all(np.isfinite(self.ac_space.high)), \
'Action bounds required'
self.nonlin = {'tanh': tf.tanh, 'relu': tf.nn.relu, 'lrelu': U.lrelu, 'elu': tf.nn.elu}[nonlin_type]
with tf.variable_scope(type(self).__name__) as scope:
# Observation normalization
ob_mean = tf.get_variable(
'ob_mean', ob_space.shape, tf.float32, tf.constant_initializer(np.nan), trainable=False)
ob_std = tf.get_variable(
'ob_std', ob_space.shape, tf.float32, tf.constant_initializer(np.nan), trainable=False)
in_mean = tf.placeholder(tf.float32, ob_space.shape)
in_std = tf.placeholder(tf.float32, ob_space.shape)
self._set_ob_mean_std = U.function([in_mean, in_std], [], updates=[
tf.assign(ob_mean, in_mean),
tf.assign(ob_std, in_std),
])
# Policy network
o = tf.placeholder(tf.float32, [None] + list(ob_space.shape))
#a = self._make_net(tf.clip_by_value((o - ob_mean) / ob_std, -5.0, 5.0))
# No normalization needed for the envs we use
a = self._make_net(o)
self._act = U.function([o], a)
return scope
def _make_net(self, o):
# Process observation
if self.connection_type == 'ff':
x = o
for ilayer, hd in enumerate(self.hidden_dims):
x = self.nonlin(U.dense(x, hd, 'l{}'.format(ilayer), U.normc_initializer(1.0)))
else:
raise NotImplementedError(self.connection_type)
# Map to action
adim, ahigh, alow = self.ac_space.shape[0], self.ac_space.high, self.ac_space.low
assert isinstance(self.ac_bins, str)
ac_bin_mode, ac_bin_arg = self.ac_bins.split(':')
if ac_bin_mode == 'uniform':
# Uniformly spaced bins, from ac_space.low to ac_space.high
num_ac_bins = int(ac_bin_arg)
aidx_na = bins(x, adim, num_ac_bins, 'out') # 0 ... num_ac_bins-1
ac_range_1a = (ahigh - alow)[None, :]
a = 1. / (num_ac_bins - 1.) * tf.to_float(aidx_na) * ac_range_1a + alow[None, :]
elif ac_bin_mode == 'custom':
# Custom bins specified as a list of values from -1 to 1
# The bins are rescaled to ac_space.low to ac_space.high
acvals_k = np.array(list(map(float, ac_bin_arg.split(','))), dtype=np.float32)
logger.info('Custom action values: ' + ' '.join('{:.3f}'.format(x) for x in acvals_k))
assert acvals_k.ndim == 1 and acvals_k[0] == -1 and acvals_k[-1] == 1
acvals_ak = (
(ahigh - alow)[:, None] / (acvals_k[-1] - acvals_k[0]) * (acvals_k - acvals_k[0])[None, :]
+ alow[:, None]
)
aidx_na = bins(x, adim, len(acvals_k), 'out') # values in [0, k-1]
a = tf.gather_nd(
acvals_ak,
tf.concat(2, [
tf.tile(np.arange(adim)[None, :, None], [tf.shape(aidx_na)[0], 1, 1]),
tf.expand_dims(aidx_na, -1)
]) # (n,a,2)
) # (n,a)
elif ac_bin_mode == 'continuous':
a = U.dense(x, adim, 'out', U.normc_initializer(0.01))
else:
raise NotImplementedError(ac_bin_mode)
return a
def act(self, ob, random_stream=None):
a = self._act(ob)
if random_stream is not None and self.ac_noise_std != 0:
a += random_stream.randn(*a.shape) * self.ac_noise_std
return a
@property
def needs_ob_stat(self):
return False
@property
def needs_ref_batch(self):
return False
def set_ob_stat(self, ob_mean, ob_std):
self._set_ob_mean_std(ob_mean, ob_std)
def initialize_from(self, filename, ob_stat=None):
"""
Initializes weights from another policy, which must have the same architecture (variable names),
but the weight arrays can be smaller than the current policy.
"""
with h5py.File(filename, 'r') as f:
f_var_names = []
f.visititems(lambda name, obj: f_var_names.append(name) if isinstance(obj, h5py.Dataset) else None)
assert set(v.name for v in self.all_variables) == set(f_var_names), 'Variable names do not match'
init_vals = []
for v in self.all_variables:
shp = v.get_shape().as_list()
f_shp = f[v.name].shape
assert len(shp) == len(f_shp) and all(a >= b for a, b in zip(shp, f_shp)), \
'This policy must have more weights than the policy to load'
init_val = v.eval()
# ob_mean and ob_std are initialized with nan, so set them manually
if 'ob_mean' in v.name:
init_val[:] = 0
init_mean = init_val
elif 'ob_std' in v.name:
init_val[:] = 0.001
init_std = init_val
# Fill in subarray from the loaded policy
init_val[tuple([np.s_[:s] for s in f_shp])] = f[v.name]
init_vals.append(init_val)
self.set_all_vars(*init_vals)
if ob_stat is not None:
ob_stat.set_from_init(init_mean, init_std, init_count=1e5)
def rollout(self, env, *, render=False, timestep_limit=None, save_obs=False, random_stream=None, policy_seed=None, bc_choice=None, bc_only_final_state=False):
"""
If random_stream is provided, the rollout will take noisy actions with noise drawn from that stream.
Otherwise, no action noise will be added.
"""
# Assume timelimit is wrapper
env_timestep_limit = env._max_episode_steps
timestep_limit = env_timestep_limit if timestep_limit is None else min(timestep_limit, env_timestep_limit)
rews = []; novelty_vector = []
t = 0
if save_obs:
obs = []
if policy_seed:
env.seed(policy_seed)
np.random.seed(policy_seed)
if random_stream:
random_stream.seed(policy_seed)
ob = env.reset()
for _ in range(timestep_limit):
start_time = time.time()
ac = self.act(ob[None], random_stream=random_stream)[0]
start_time = time.time()
ob, rew, done, info = env.step(ac)
if save_obs:
obs.append(ob)
rews.append(rew)
novelty_vector.append(ob)
t += 1
if render:
env.render()
if done:
break
if bc_only_final_state:
novelty_vector = ob.copy()
rews = np.array(rews, dtype=np.float32)
if save_obs:
return rews, t, np.array(obs), np.array(novelty_vector)
return rews, t, np.array(novelty_vector)
class ESAtariPolicy(Policy):
def _initialize(self, ob_space, ac_space):
self.ob_space_shape = ob_space.shape
self.ac_space = ac_space
self.num_actions = ac_space.n
with tf.variable_scope(type(self).__name__) as scope:
o = tf.placeholder(tf.float32, [None] + list(self.ob_space_shape))
is_ref_ph = tf.placeholder(tf.bool, shape=[])
a = self._make_net(o, is_ref_ph)
self._act = U.function([o, is_ref_ph] , a)
return scope
def _make_net(self, o, is_ref):
x = o
x = layers.convolution2d(x, num_outputs=16, kernel_size=8, stride=4, activation_fn=None, scope='conv1')
x = layers.batch_norm(x, scale=True, is_training=is_ref, decay=0., updates_collections=None, activation_fn=tf.nn.relu, epsilon=1e-3)
x = layers.convolution2d(x, num_outputs=32, kernel_size=4, stride=2, activation_fn=None, scope='conv2')
x = layers.batch_norm(x, scale=True, is_training=is_ref, decay=0., updates_collections=None, activation_fn=tf.nn.relu, epsilon=1e-3)
x = layers.flatten(x)
x = layers.fully_connected(x, num_outputs=256, activation_fn=None, scope='fc')
x = layers.batch_norm(x, scale=True, is_training=is_ref, decay=0., updates_collections=None, activation_fn=tf.nn.relu, epsilon=1e-3)
a = layers.fully_connected(x, num_outputs=self.num_actions, activation_fn=None, scope='out')
return tf.argmax(a,1)
def set_ref_batch(self, ref_batch):
self.ref_list = []
self.ref_list.append(ref_batch)
self.ref_list.append(True)
@property
def needs_ob_stat(self):
return False
@property
def needs_ref_batch(self):
return True
def initialize_from(self, filename):
"""
Initializes weights from another policy, which must have the same architecture (variable names),
but the weight arrays can be smaller than the current policy.
"""
with h5py.File(filename, 'r') as f:
f_var_names = []
f.visititems(lambda name, obj: f_var_names.append(name) if isinstance(obj, h5py.Dataset) else None)
assert set(v.name for v in self.all_variables) == set(f_var_names), 'Variable names do not match'
init_vals = []
for v in self.all_variables:
shp = v.get_shape().as_list()
f_shp = f[v.name].shape
assert len(shp) == len(f_shp) and all(a >= b for a, b in zip(shp, f_shp)), \
'This policy must have more weights than the policy to load'
init_val = v.eval()
# ob_mean and ob_std are initialized with nan, so set them manually
if 'ob_mean' in v.name:
init_val[:] = 0
init_mean = init_val
elif 'ob_std' in v.name:
init_val[:] = 0.001
init_std = init_val
# Fill in subarray from the loaded policy
init_val[tuple([np.s_[:s] for s in f_shp])] = f[v.name]
init_vals.append(init_val)
self.set_all_vars(*init_vals)
def act(self, train_vars, random_stream=None):
return self._act(*train_vars)
def rollout(self, env, *, render=False, timestep_limit=None, save_obs=False, random_stream=None, worker_stats=None, policy_seed=None):
"""
If random_stream is provided, the rollout will take noisy actions with noise drawn from that stream.
Otherwise, no action noise will be added.
"""
env_timestep_limit = env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps')
timestep_limit = env_timestep_limit if timestep_limit is None else min(timestep_limit, env_timestep_limit)
rews = []; novelty_vector = []
t = 0
if save_obs:
obs = []
if policy_seed:
env.seed(policy_seed)
np.random.seed(policy_seed)
if random_stream:
random_stream.seed(policy_seed)
ob = env.reset()
self.act(self.ref_list, random_stream=random_stream) #passing ref batch through network
for _ in range(timestep_limit):
start_time = time.time()
ac = self.act([ob[None], False], random_stream=random_stream)[0]
if worker_stats:
worker_stats.time_comp_act += time.time() - start_time
start_time = time.time()
ob, rew, done, info = env.step(ac)
ram = env.unwrapped._get_ram() # extracts RAM state information
if save_obs:
obs.append(ob)
if worker_stats:
worker_stats.time_comp_step += time.time() - start_time
rews.append(rew)
novelty_vector.append(ram)
t += 1
if render:
env.render()
if done:
break
rews = np.array(rews, dtype=np.float32)
if save_obs:
return rews, t, np.array(obs), np.array(novelty_vector)
return rews, t, np.array(novelty_vector)
class GAAtariPolicy(Policy):
def _initialize(self, ob_space, ac_space, nonlin_type, ac_init_std=0.1):
self.ob_space_shape = ob_space.shape
self.ac_space = ac_space
self.ac_init_std = ac_init_std
self.num_actions = self.ac_space.n
self.nonlin = {'tanh': tf.tanh, 'relu': tf.nn.relu, 'lrelu': U.lrelu, 'elu': tf.nn.elu}[nonlin_type]
with tf.variable_scope(type(self).__name__) as scope:
o = tf.placeholder(tf.float32, [None] + list(self.ob_space_shape))
a = self._make_net(o)
self._act = U.function([o] , a)
return scope
def _make_net(self, o):
x = o
x = self.nonlin(U.conv(x, name='conv1', num_outputs=16, kernel_size=8, stride=4, std=1.0))
x = self.nonlin(U.conv(x, name='conv2', num_outputs=32, kernel_size=4, stride=2, std=1.0))
x = U.flattenallbut0(x)
x = self.nonlin(U.dense(x, 256, 'fc', U.normc_initializer(1.0), std=1.0))
a = U.dense(x, self.num_actions, 'out', U.normc_initializer(self.ac_init_std), std=self.ac_init_std)
return tf.argmax(a,1)
@property
def needs_ob_stat(self):
return False
@property
def needs_ref_batch(self):
return False
# Dont add random noise since action space is discrete
def act(self, train_vars, random_stream=None):
return self._act(train_vars)
def rollout(self, env, *, render=False, timestep_limit=None, save_obs=False, random_stream=None, worker_stats=None, policy_seed=None):
"""
If random_stream is provided, the rollout will take noisy actions with noise drawn from that stream.
Otherwise, no action noise will be added.
"""
env_timestep_limit = env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps')
timestep_limit = env_timestep_limit if timestep_limit is None else min(timestep_limit, env_timestep_limit)
rews = []; novelty_vector = []
rollout_details = {}
t = 0
if save_obs:
obs = []
if policy_seed:
env.seed(policy_seed)
np.random.seed(policy_seed)
if random_stream:
random_stream.seed(policy_seed)
ob = env.reset()
for _ in range(timestep_limit):
ac = self.act(ob[None], random_stream=random_stream)[0]
if save_obs:
obs.append(ob)
ob, rew, done, info = env.step(ac)
rews.append(rew)
t += 1
if render:
env.render()
if done:
break
# Copy over final positions to the max timesteps
rews = np.array(rews, dtype=np.float32)
novelty_vector = env.unwrapped._get_ram() # extracts RAM state information
if save_obs:
return rews, t, np.array(obs), np.array(novelty_vector)
return rews, t, np.array(novelty_vector)
|
#!/usr/bin/python3
import random
number = random.randint(-10000, 10000)
if (number < 0):
last = (abs(number) % 10) * -1
else:
last = number % 10
if last > 5:
str = "and is greater than 5"
elif last == 0:
str = "and is 0"
elif last < 6 and last != 0:
str = "and is less than 6 and not 0"
print("Last digit of {} is {} {}".format(number, last, str))
|
import random
num = random.randint(1, 10)
answer = input('Guess number:')
if answer.isdigit():
answer = int(answer)
if num > answer:
print(f'Bigger..{answer}')
elif num < answer:
print(f'less...')
else:
print('BINGO!')
else:
print('oops... need number')
if answer != num:
print('second try')
answer_2 = input('try again: ')
if answer_2.isdigit():
answer_2 = int(answer_2)
if num > answer_2:
print('Комп загадав більше', answer_2)
elif num < answer_2:
print('Комп загадав менше', answer_2)
else:
print('BINGO!')
else:
print('oops... need number')
# else:
# print('Congrat again!')
if answer_2 != num:
print('3 try')
answer_3 = input('try again: ')
if answer_3.isdigit():
answer_3 = int(answer_3)
if num > answer_3:
print('Комп загадав більше', answer_3)
elif num < answer_3:
print('Комп загадав менше', answer_3)
else:
print('BINGO!')
else:
print('oops... need number')
# else:
# print('Congrat again and again!')
|
from rest_framework import generics, permissions
from rest_framework.response import Response
#from knox.models import AuthToken
from .serializers import *
from django.contrib.auth import login
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.renderers import TemplateHTMLRenderer
#from knox.views import LoginView as KnoxLoginView
from myauth.models import *
from LandingPage.models import *
from facilitators.models import *
from django.shortcuts import render , redirect
import json
from django.contrib import messages
from facilitators.forms import *
import io
from rest_framework.parsers import JSONParser
from rest_framework.views import APIView
from django.views import View
from rest_framework.decorators import parser_classes
from rest_framework.parsers import FileUploadParser
from django.core import serializers
from mailing.views import *
from django.contrib.auth.models import Group
# Facilitator Register API
class FacilitatorRegisterAPI(APIView):
def get(self, request, *args, **kwargs):
category=Category.objects.all()
subcategory=SubCategory.objects.all()
context = {'form': UserForm(),'expform':ExperienceForm(),'fquery':FacilitatorQueriesForm(),'category':category,'subcategory':subcategory}
return render(request, 'facilitators/register/mysignup.html', context)
def post(self, request, *args, **kwargs):
file=request.FILES['file']
personal_detail=json.loads(request.data.pop('data')[0])
exp_form=personal_detail.pop('facilitator')
facilitator_query=personal_detail.pop('fquery')
expform = ExperienceSerializer(data=exp_form)
form = RegisterSerializer(data=personal_detail)
phone=personal_detail.get('phone')
#fquery=FacilitatorQueriesFormSerializer(data=facilitator_query)
course=personal_detail.get('course')
catlist=""
for cat in course:
if cat!=course[len(course)-1]:
catlist+=cat+","
else:
catlist+=cat
user=None
try:
user=CustomUser.objects.get(email=personal_detail['email'])
except:
user=None
if user is None:
try:
if form.is_valid(raise_exception=True):
user=form.save()
group = Group.objects.get(name='Visiters')
user.groups.add(group)
user.save()
except:
messages.error(request, ('Email is already exist !'))
return redirect('facilitator-register')
applicant=Applicants.objects.create(name=personal_detail['first_name']+" "+personal_detail['last_name'],phone=phone,user=user,intrest=catlist,portfolio=file,status="Due")
applicant.save()
exp_form["facilitator"]=applicant.Aid
if expform.is_valid(raise_exception=True):
expform.save()
else:
messages.error(request, ('Invalid Experience Deatails !'))
return redirect('register')
if facilitator_query:
facilitator_query['user']=user
ToAdminFacilitatorRegistrationQuery(facilitator_query)
successOnRegistration(user)
RegistrationSuccessAdminEmail(personal_detail['first_name']+" "+personal_detail['last_name'],catlist)
return Response({'redirect':'{% url "facilitator-register" %}'},status=201)
from rest_framework.generics import CreateAPIView
# councelling section api
class OnlineCouncelling(APIView):
renderer_classes = [TemplateHTMLRenderer]
template_name = 'LandingPage/index.html'
def post(self, request, *args, **kwargs):
clForm=onlinecounsellingSerializer(data=request.data)
if clForm.is_valid(raise_exception=True):
co=clForm.save()
ToAdminCouncellingDetail(co)
messages.success(self.request, 'Thank You For Choosing Us!')
# redirect('/')
return Response({'success':"Done"})
else:
messages.error(self.request, 'Invalid Form Detail')
# redirect('/')
return Response({'error':"something went wrong"})
|
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "NPK predictionAI .ipynb",
"version": "0.3.2",
"provenance": [],
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/github/Edie738/competitions/blob/master/ANPK_predictionAI_.py\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "CODnInYGcafn",
"colab_type": "code",
"colab": {}
},
"source": [
"import pandas as pd\n",
"import numpy as np\n",
"import seaborn as sns\n",
"from sklearn.linear_model import LinearRegression\n",
"from sklearn.model_selection import train_test_split"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "tH_V18oKvS02",
"colab_type": "code",
"colab": {}
},
"source": [
"from sklearn.tree import DecisionTreeRegressor"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "JXR98iClwJiE",
"colab_type": "code",
"colab": {}
},
"source": [
"from sklearn.model_selection import GridSearchCV\n",
"from sklearn.neighbors import KNeighborsRegressor\n"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "tsn6JqZmhkiN",
"colab_type": "code",
"colab": {}
},
"source": [
"ata = pd.read_csv('https://raw.githubusercontent.com/Edie738/ProjectStuff/master/Soil-Analysis-and-Yield-Prediction-master/Soil-Analysis-and-Yield-Prediction-master/TestingAndTrainingDataSet.csv?token=AM5B2MY2HSDQ53WI5HOLWAS5PO4M4')"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "ItT5DQSNlEUr",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 347
},
"outputId": "ca3bd11f-6495-492c-ad17-7486c1350f70"
},
"source": [
"Data"
],
"execution_count": 157,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>name</th>\n",
" <th>pH</th>\n",
" <th>EC</th>\n",
" <th>%O.C</th>\n",
" <th>Aval N</th>\n",
" <th>Aval P</th>\n",
" <th>Aval K</th>\n",
" <th>mg kg</th>\n",
" <th>Cu</th>\n",
" <th>m..Fe</th>\n",
" <th>mg..Mn</th>\n",
" <th>S</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ram Charan</td>\n",
" <td>7.5</td>\n",
" <td>0.263</td>\n",
" <td>0.840</td>\n",
" <td>403.768</td>\n",
" <td>46.33</td>\n",
" <td>793.520</td>\n",
" <td>1.03</td>\n",
" <td>3.82</td>\n",
" <td>26.95</td>\n",
" <td>19.19</td>\n",
" <td>10.35</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>Gajju Patel</td>\n",
" <td>7.5</td>\n",
" <td>0.286</td>\n",
" <td>0.810</td>\n",
" <td>389.407</td>\n",
" <td>23.46</td>\n",
" <td>778.400</td>\n",
" <td>1.35</td>\n",
" <td>2.75</td>\n",
" <td>14.72</td>\n",
" <td>16.77</td>\n",
" <td>8.28</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>Madan Lal</td>\n",
" <td>7.2</td>\n",
" <td>0.268</td>\n",
" <td>0.750</td>\n",
" <td>360.685</td>\n",
" <td>9.90</td>\n",
" <td>554.064</td>\n",
" <td>0.60</td>\n",
" <td>3.11</td>\n",
" <td>15.32</td>\n",
" <td>13.27</td>\n",
" <td>16.56</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>Shiv Prasad</td>\n",
" <td>7.5</td>\n",
" <td>0.138</td>\n",
" <td>0.330</td>\n",
" <td>159.631</td>\n",
" <td>6.73</td>\n",
" <td>214.928</td>\n",
" <td>0.28</td>\n",
" <td>1.76</td>\n",
" <td>12.70</td>\n",
" <td>10.85</td>\n",
" <td>13.11</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>Sunil Patel</td>\n",
" <td>7.3</td>\n",
" <td>0.170</td>\n",
" <td>0.495</td>\n",
" <td>238.617</td>\n",
" <td>16.23</td>\n",
" <td>135.073</td>\n",
" <td>0.60</td>\n",
" <td>1.40</td>\n",
" <td>10.64</td>\n",
" <td>10.95</td>\n",
" <td>15.18</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>Sohan Dahiya</td>\n",
" <td>7.6</td>\n",
" <td>0.268</td>\n",
" <td>0.420</td>\n",
" <td>202.714</td>\n",
" <td>2.37</td>\n",
" <td>341.600</td>\n",
" <td>0.67</td>\n",
" <td>3.18</td>\n",
" <td>22.13</td>\n",
" <td>18.46</td>\n",
" <td>8.28</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>Mahendra</td>\n",
" <td>7.6</td>\n",
" <td>0.152</td>\n",
" <td>0.480</td>\n",
" <td>231.436</td>\n",
" <td>9.50</td>\n",
" <td>407.344</td>\n",
" <td>0.32</td>\n",
" <td>1.41</td>\n",
" <td>17.91</td>\n",
" <td>8.98</td>\n",
" <td>20.01</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>Rampyare</td>\n",
" <td>7.4</td>\n",
" <td>0.199</td>\n",
" <td>0.855</td>\n",
" <td>410.949</td>\n",
" <td>5.54</td>\n",
" <td>165.536</td>\n",
" <td>0.60</td>\n",
" <td>5.00</td>\n",
" <td>24.49</td>\n",
" <td>26.39</td>\n",
" <td>6.21</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>Sanjay Choudhary</td>\n",
" <td>7.1</td>\n",
" <td>0.226</td>\n",
" <td>0.660</td>\n",
" <td>317.602</td>\n",
" <td>14.65</td>\n",
" <td>334.096</td>\n",
" <td>0.71</td>\n",
" <td>2.57</td>\n",
" <td>16.62</td>\n",
" <td>19.18</td>\n",
" <td>17.25</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9</th>\n",
" <td>RajKumar</td>\n",
" <td>7.0</td>\n",
" <td>0.090</td>\n",
" <td>0.450</td>\n",
" <td>217.075</td>\n",
" <td>7.12</td>\n",
" <td>562.240</td>\n",
" <td>0.57</td>\n",
" <td>2.59</td>\n",
" <td>18.38</td>\n",
" <td>12.44</td>\n",
" <td>5.52</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" name pH EC %O.C ... Cu m..Fe mg..Mn S\n",
"0 Ram Charan 7.5 0.263 0.840 ... 3.82 26.95 19.19 10.35\n",
"1 Gajju Patel 7.5 0.286 0.810 ... 2.75 14.72 16.77 8.28\n",
"2 Madan Lal 7.2 0.268 0.750 ... 3.11 15.32 13.27 16.56\n",
"3 Shiv Prasad 7.5 0.138 0.330 ... 1.76 12.70 10.85 13.11\n",
"4 Sunil Patel 7.3 0.170 0.495 ... 1.40 10.64 10.95 15.18\n",
"5 Sohan Dahiya 7.6 0.268 0.420 ... 3.18 22.13 18.46 8.28\n",
"6 Mahendra 7.6 0.152 0.480 ... 1.41 17.91 8.98 20.01\n",
"7 Rampyare 7.4 0.199 0.855 ... 5.00 24.49 26.39 6.21\n",
"8 Sanjay Choudhary 7.1 0.226 0.660 ... 2.57 16.62 19.18 17.25\n",
"9 RajKumar 7.0 0.090 0.450 ... 2.59 18.38 12.44 5.52\n",
"\n",
"[10 rows x 12 columns]"
]
},
"metadata": {
"tags": []
},
"execution_count": 157
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "mYskuisLxUA4",
"colab_type": "code",
"colab": {}
},
"source": [
"import matplotlib.pyplot as plt"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "zhiMM4abvJ84",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 612
},
"outputId": "fb45872c-9a1f-4b3e-f8e5-9cb409942a55"
},
"source": [
"plt.figure(figsize=(15,10))\n",
"sns.heatmap(Data.corr(), annot = True)"
],
"execution_count": 159,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"<matplotlib.axes._subplots.AxesSubplot at 0x7fdd18f0c828>"
]
},
"metadata": {
"tags": []
},
"execution_count": 159
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAyEAAAJCCAYAAADX+cizAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd8VFX6x/HPmUlvJCEkoROaICod\nUZFmAwugrAUFUdeCiK7+ECt2QHRl7a6rrr13d1eq0pQiIE2K1BBaeu/JzNzfHxNTCBCk3Anwfb9e\neTH33ufOPId7p5x57jljLMtCRERERETELg5fJyAiIiIiIicXdUJERERERMRW6oSIiIiIiIit1AkR\nERERERFbqRMiIiIiIiK2UidERERERERspU6IiIiIiIjYSp0QERERERGxlTohIiIiIiJiK79j/QDl\nGdtPup9kTxl8i69TsNVnqY19nYLYoFtpma9TsF2if4CvU7BdA/dJ95JNptP4OgXbnRuY7esUbLei\nJMrXKdjK7esEfOTGPR8eF09oOz8f+8e0rpf/J6qEiIiIiIiIrdQJERERERERWx3zy7FERERERKQa\nz8l6wVwVVUJERERERMRWqoSIiIiIiNjJ8vg6A59TJURERERERGylSoiIiIiIiJ08qoSoEiIiIiIi\nIrZSJURERERExEaWxoSoEiIiIiIiIvZSJURERERExE4aE6JKiIiIiIiI2EuVEBERERERO2lMiCoh\nIiIiIiJiL3VCRERERETEVrocS0RERETETh63rzPwOVVCRERERETEVqqEiIiIiIjYSQPTVQkRERER\nERF7nZCVkIlT/sHCRcuIjork2w9f93U6R0XQWT2JHH8HOBwUfjed/Pc+rbE99IpLCbtyKHg8WEXF\nZE15HldiEoG9uhM57mbw94NyFzkv/YvSFat91IpDM/CJUSQM6IKruJQZ498gbd2OWjFxp7di0LTb\n8AsKIHHeauY+9kGN7T1uGUz/R67j1c5jKM4uoHnvjgx76x5yd6UDsGXmcpa8+K0dzTkkJ2Ob/xA9\noDPtJt2IcTpI/uhHkl7+rsb2yN4daffUaEJPbcn6214g/X+/1NjuDAvmzJ/+QcaM5Wx+6G07Uz9k\nTfufQe8nRuFwOtj0yXzWvvrfGtsdAX70e2EMMWckUJKdz7zbX6FgdwaBkWEMfOMuGnVuzZYvFrJk\n4vsA+IcGccnXj1TuH9o4mq1fL+KXxz+0tV0HEzfgDLo8OQrjdJD48Xw2vVK7zT1fup2oM1pRll3A\n0ttepmh3Bs2vOJtTbr+0Mq7Bqc354cKJ5K5Pot9XDxMUG4m7pByAn66ZSmlmnp3NqqV5/zM4+wlv\nO3//ZD6r93NsB1Y7tj9UHFuALndcRocR/bHcHhY9+j67F/xWuZ9xGK6Y/hSFKdnMvGFa5fqe911J\n60t7Ybk9bPjgR9a9Pduehh6C0L7diX/kVozTQfZns8n81xc1tkffNIyoqy7CcrtxZ+Wy9/4XKN+b\nXrndERZMm5mvkz9nCSlP1N/37ib9z6Dnk6MwDgdbP5nPuv0c8z4vjiH69ARKs/NZePsrFO7OoGGX\n1pz17F+9QQbWTPuGXTNXANDxlkG0G9Efy7LI+X03i/7vDTyl5XY37YCa9j+DMyvavPmT+fy2nzb3\nfXEMDSvaPL/iPG9y7ml0f+hqnP5+uMtdrJj0CcmLNgAw6IuHCYmLxFVSBsDsEc9Q4uPns630Y4Un\nZidk2MUXcO3wITz01HO+TuXocDiIuu8u0sbdhzs1nbj3XqN44RJciUmVIUWz5lL49f8ACOp7FpH3\njCHjrgfx5OSS/n8T8WRk4t+mFTEvPUPyJVf7qiV1ShjQmahW8fy773gad23DBZNv4KOhj9eKO3/y\njcy+/y2SV21j+HsTSOh/Bonz1wIQ3jialn1PJ6/ijf4Pu5dv4psbp9W6L187GdtcyWE4ZepfWXXV\nJEr3ZtJj1tOkz1pB0eY9lSElezLY8LfXaHH7Zfu9i9YPXE3O0o12ZfynGYfh7EmjmXntVAqTsxjy\n/ZPsnP0rOVv2Vsacck1/SnML+aLPeFoP6U3Ph65h3thXcJeWs/LvXxJ1SjOiOjSrjC8vLOHbix6u\nXB46/SmSZiy3tV0H5TB0nXIDP139NEXJWZw34yn2zl5JfrXj2mpEf8pyC5l59niaDe3N6RNH8MuY\nl9n19WJ2fb0YgIgOzTn7nXvIXV/1Wrds3Gtkr0m0vUn7YxyGcyaN5vuKY3vF90+yY59j26Hi2H7a\nZzxthvSm90PX8MPYV4hs14S2Q3vz+cD7CY2L4pJPHuCzvvdieSwATvvrILK37iUgLLjyvk65qi9h\nTaL5rN99YFkENYywvc0H5HDQ+PHbSRo9kfKUDFp/8zz5Py6lbOuuypCSDdvZPuxurJJSoq69mNgH\nbmLPXc9Ubm90zyiKlq/zRfaHzDgMZ04ezZwRUylKzuLi6U+ya/av5FY75u1GeI/5t33G02pIb7o/\nfA0Lb3+FnN938/3gR7DcHoJjI7l0zmR2z1lJUKMGdLjpQv4z4H7cJeX0ff1OEob2ZtvnP/mwpVWM\nw9B78mhmVbT5sune17DqbW5f0eav+ownYUhvejx8DfNvf4WSrHx+uGEaxak5RJ7SjAs/uo/Pe9xV\nud+Cca+RubZ+PJ/Ffifk5Vg9upxOg4hwX6dx1AR06kD5rj249ySDy0XRnHkE9zu7RoxVWFR52xEU\nBN73Mco3b8WTkem9vW0HJjAA/P1ty/3Panthd9Z/9TMAyau2ERgRSmhsZI2Y0NhIAsKCSV61DYD1\nX/1M24t6VG4f8NhIFk75FMuy7Ev8CJyMbf5DRLe2FCWmUJKUhlXuJu3bxTQa1LNGTMmudAo37ARP\n7baFn5FAQKMGZM1fY1fKf1qjLm3I25FK/s50POVutn+3lBYXdq8R0+LCbmz9wvuBI/H7ZTTp0wkA\nV3Epqcs34z7IN6IRCfEExUSQ8sumY9eIPym6axsKdqRSuDMdq9zNru+W0uSimm1uMqg7SZ8vBGDP\n/5YRe26nWvfT4vKz2PXdEltyPhyx+xzbrd8tpdU+x7bVhd3YXHFst1c7tq0u7M7W75biKXORvyud\nvB2pxHZpA3grWy3P68LvH8+vcV+nXn8ev77wLVQ8z+vTt8bBndtTlrSX8l0pUO4i938LCT+/d42Y\noqVrsUpKAShe/Tv+8TGV24JOa4tfTCQFP6+yNe8/q2HXNuTvSKWg4pjv+G4pzfc5t5tf2I1tFcc8\n6ftlxFccc3dJGZbb++23M9C/8n0awOHnxBkUgHE68AsOoCgl254GHYKYfdq8/bultLjowK9hO75f\nRuOKNmetT6I4NQeAnE278QsKwBFwQn7//adZlse2v/rqhOyEnGicjWJwp1aVrN2p6TgbxdSKC7ty\nKI2/+YAGd91KznOv1NoePLAv5Zu2QHn9KfHuKyw+ivzkzMrl/JQswuKjasUUpGTtN6bNBd3IT8km\nfePOWvfdpFtbrp85meHvTaBh+6bHqAV/3snY5j8ExkdTureq7aV7MwmMjz60nY2h7ePXs/XxD+qO\n9aGQxlEUJlcdu6KULEIb1zy+ofFRFFTEWG4PZXlFBEaFHdL9tx7am8T/LD16CR8FwfHRFO+pOq7F\nyVkE73NOB8dHUby3qs3leUUERNdsc7Mhvdn1Tc1OSI/nb+P8OVPoeM+wY5T9oQtpXHXcAAoP8dgG\nRYURus95UZiSRUjFvmc/PpKlkz+p9aVCRMtY2lx2Jld8/ySDP5hARELcsWran+YX15Dy5KpKrCsl\nA/+4hgeMj7zyQgoWeC9FwhjiHvwrqU//+1inecRC4qMo3Fvt+ZycRch+zu2ifc7tP57PMV3bMGTu\nVC778WmWPvAOlttDcUo261+fzvBlL3LlqlcoyysieWH9qQjtr82h+7S5esyBXsNaXtKTzHU78JS5\nKted+49bGTJ7Mp3v9v3zWex30E6IMeY3Y8za/fz9ZoxZa1eScmgKvviO5MtHkfvym0TcNLLGNr/W\nLYm88xaypjzvo+yOPb+gAHqPG8KiaV/W2pa6bgdvnHU37w96mJXvzmbYm/f4IMOj72Rs8x+a3ngh\nmT+uorTaB7mTUeshZ7GtHlcLDld01za4i8vI27S7ct0vd7zGnIEPMH/Yk8Sc2YEWV/bxYYbHRovz\nulCckUfGbztqbXMG+OMuLefrSx7l94/n0f+5W+1P8ChoMHQAQae3I/PNrwCIGnkJBQtW4ErJrGPP\n41/Gqm38Z+ADTL/4UU4fdxmOQH8CGoTQ/KJufN37Hr7odid+IYEkXHGOr1M9qiLbN6XHQ9ew+P6q\ncXsL73yNb89/kOmXP0Vcr1No85cT7/l8UB6PfX/1VF01sT9GBxrge+DiQ7lTY8ytwK0Ar02bxM3X\njzjsBAXc6Rk44xpVLjvjGuFOzzhgfNHseUQ98Dd4oiI+NoaYZ58k87Gp3ku66pku15/PGSMGAJCy\ndjvhjau+PQuPj6Zgn7J0QUo2YdW+Lf8jJrJlLA2aN2L0zCne9Y2jGTV9Eh8OeYyi9NzK+MR5a3BM\nuoHgqDCKswuOZdMO6GRs8/6UpmQR2KSq7YFNGlKacmidigY92hN5Zkea3nAhztAgHAF+uItK2Dbp\n42OV7mEpSs4mtHHVsQuJj6YwuebxLUzJJqxxNEXJWRing4CIEEoP4ThFd2yBw89B5n4+sPpScUoW\nwU2rjmtw42iK9zmni1OyCW4STXFFm/0jQijLqmpz82FnsevbxTX2Kam4D1dhCTu/Xkx0lzbs/OLn\nY9iSgytK9h63P4Qe5NgWVju2JdkFFO5zXoTGR1OUnE3LC7vR8sJutBjYGWegP/7hwQx86Xbm3vVP\nCpKzSJzhrR4kzlhBv2n1pxPiSs3Ev3FVhd4vPoby1NqditCzuxAz9mp2XHs/VsU34iFdOxDSsxNR\n112CIyQI4++Pp6iEtL+/a1f6h6woJZvQJtWez42ja106VZySTUiTquez/36ez7lb91JeVELUKc0I\na9GIgp3plGblA7Bzxgpie7Qj8etFx75Bh2B/bS7cp81/xOzvNSykcTQD/303P/3tdfKT0mrsA97n\n8/ZvF9OoS2u2fem757PY76CVEMuykir+dgCl1ZaTLMtKOsh+b1iW1cOyrB7qgBy5sg2/49+iKc4m\n8eDnR8gFAyheWPPN2a951aU2QX1649rpHQBqwkKJeX4Kua++Sdna9bbmfahWv/8D7w9+mPcHP8zW\nWb/Sabj325DGXdtQml9EYVpOjfjCtBzKCopp3NV7/XSn4X3YOvtXMjbt5rVud/DmOffw5jn3kJ+c\nxQcXT6QoPZeQRg0q94/v3BrjMD79MH4ytnl/8ldtI6R1Y4JaNML4O4kddjYZs1Yc0r4bxr7M4u5j\nWdJzHFuf+ICUzxfWuw4IQPqa7UQkxBPWvBEOfyeth/Zm55yVNWJ2zllJ2yvPBSDhkl7srZg9pi6t\nh9XPKkj26u2EJcQT0tx7XJsP7U3yrF9rxCTPWknLq/oC0PTSXqT9XO31yRiaXXYmu76taptxOiov\n1zJ+Thpf0LVGlcQX0tZsp0FCPOEVx7bt0N4k7XNsk+aspH3FsW1d7dgmzVlJ26G9cQT4Ed68EQ0S\n4klbvY1lUz/no5538fFZ9/DDHa+yd9EG5t71TwB2zPqVJmd3BKDxWR3J3Z5iY2sPrnjtZgJaNcW/\nWRz4+9Hg0r4U/FhzJrugU1vTeNI4dt32JO7Mqi9J9vzfc2w590a29ruJ1Klvk/vNj/WyAwKQuXo7\n4dWez62G9mbX7JrHfNfslbSpOOYtL+lFSsUxD2veCOP0fuwKbdqQBm2aULArncI9mTTq1hZnUAAA\njft0InfLHuqLjNW1X8P2bfPO2VWvYa0u6VU5A1ZARAgXvD+eX6d8RtqKLZXxxumovFzL+Dlpfn5X\nsn38fLad5bHvr546IUcHTXhsKstXrSUnJ4/zho1k7F9HMfyyi3yd1uFze8h+9mUavfQMxumg4D8z\ncG1PIuK2GyjbuImShUsIu2oYQb26YblcePIKyHzCO+NI+FXD8GvehIibRxFx8ygA0sfdjyc752CP\n6DPb564mYUBnbv5pGuXFZcy8943KbdfPmMz7g70zAv0w8V0GT7u1YrraNSTOO/jA5FMu7kXnUefh\ncblxlZTzv3GvHtN2/BknY5v/YLk9bH7wbbp8+jDG6WDvJ/Mo3LSbhPuuIn/NNjJm/Up4lzac/s69\n+EeGEnNhdxImXMWyfuN9nfohs9weljzyHoM+us87veVnC8jZvIdu9w4nY00iO+esZPOnC+j34hiu\n/HkapTkFzBtbNabrqiXPExAejMPfj5YX9WDmtVMrZ19KuPRMZl//d1817YAst4fVD73LuZ/cj3E6\n2PHpAvI27+HUCcPJXpNI8uyVJH4yn14v386gxdMoyynklzEvV+7fqHcHivZmUbiz2vStAf6c+8kD\nGD8nxukg7ad1bP9wri+aV8lye/j5kfe4uOLYbvpsAdmb99Dj3uGkr0kkac5Kfv90AQNeHMM1Fcf2\nh4pjm715D9v++wtXzX3Gez8T362cGetAVr/6Xwa+PJbTbxmMq7CEBRPesqOZh8btIeWJf9Li3acw\nDgc5X86hdMtOGt09kuLftlDw4y/EPvBXHKFBNHv5QQDK96az67YnfZz4n2O5PSyb+B7nf+w95ls/\nW0Du5j10vnc4mWsS2T1nJVs+XUCfl8Yw7OdplOUUsLDimMf2as9pd1yGx+XG8lj88tC7lGYXUJpd\nQNL3y7h01iQ8LjdZ65PY/NE8H7e0iuX2sHTie1xY0eYtFa9hXStew3ZVtPncl8YwvOI8n1/R5o43\nXkB4qzg633M5ne+5HPBOxesqKuXCj+/HUfF8Tv5pfb1qs9jDHGw2HWNMt2qLHwHXVd9uWVbNrvB+\nlGdsP76m6zkKUgbf4usUbPVZamNfpyA26FZa5usUbJfoH+DrFGzXwH3SvWST6TS+TsF25wbWn9mX\n7LKiJKruoBOI29cJ+MiNez48Lp7Qpb8vsO3FNrBDv3r5f1JXJaT6DwykAH/88IbBO7ncwGORlIiI\niIiInLgO2gmxLGsAgDEmGBgL9MHb+fgJ+Ocxz05ERERE5ERTj8dq2OVQfyfkPaAj8BLwMnAq8P6x\nSkpERERERI49Y8wgY8wmY8xWY8wD+9ne0hjzY8XPdMw3xjQ7Go97qAPTT7Ms69Rqy/OMMYc2fYuI\niIiIiNQ7xhgn8CpwAbAbWG6M+Y9lWdU/5z8HvG9Z1nvGmIHA08CoI33sQ62ErDTG9K6W8JnAoc2j\nKSIiIiIiVerPjxX2ArZalrXdsqwy4FNg6D4xpwJ/TEc4bz/bD8uhdkK6A4uNMTuMMTuAJUBP/XK6\niIiIiEj9ZYy51Rizotpf9V86bQrsqra8u2JddWuAKypuXw6EG2MacoQO9XKsQUf6QCIiIiIigq0D\n0y3LegN4o87AA7sXeMUYcwOwENjDUZgF+pA6IQf7dXQRERERETku7QGaV1tuVrGukmVZe6mohBhj\nwoDhlmUd8a9en5C/mC4iIiIiUm/VPVbDLsuBdsaYBLydj2uAa6sHGGNigCzLsjzAg8DbR+OBD3VM\niIiIiIiInEAsy3IB44BZwEbgc8uy1htjnjTGDKkI6w9sMsZsBuKAyUfjsVUJERERERGxkWUd8ZCK\no8ayrOnA9H3WPVrt9pfAl0f7cVUJERERERERW6kSIiIiIiJiJxtnx6qvVAkRERERERFbqRIiIiIi\nImKn+jM7ls+oEiIiIiIiIrZSJURERERExE4aE6JKiIiIiIiI2EuVEBERERERO3nqz++E+IoqISIi\nIiIiYqtjXglJGXzLsX6Ieid+xpu+TsFWYz961tcpiA1K5m70dQq26xZ08n1PE3hJb1+nYLvSWb/4\nOgXbZazy93UKtht2frqvU7DVe9Mb+ToFkYPS5VgiIiIiInbSwHRdjiUiIiIiIvZSJURERERExE76\nsUJVQkRERERExF6qhIiIiIiI2EljQlQJERERERERe6kSIiIiIiJiJ40JUSVERERERETspUqIiIiI\niIidVAlRJUREREREROylSoiIiIiIiI0sy+3rFHxOlRAREREREbGVKiEiIiIiInbSmBBVQkRERERE\nxF6qhIiIiIiI2Em/mK5KiIiIiIiI2EudEBERERERsZUuxxIRERERsZMGpqsSIiIiIiIi9lIlRERE\nRETEThqYfnx2QoLO6knk+DvA4aDwu+nkv/dpje2hV1xK2JVDwePBKioma8rzuBKTCOzVnchxN4O/\nH5S7yHnpX5SuWO2jVhw9E6f8g4WLlhEdFcm3H77u63SOGkfLUwnodxUYB671i3CtmFUrxtmuO/5n\nXgpYeDJ2UzbzbUx4NIGXjgFjwOHEtWYert9+sr8Bh+FkbLN/116E3HInOByUzvmekq8+3n/cWX0J\nf+ApcsffinvrJkx4BGH3P4lf21MonTuTojdetDnzw+fXuSfBN4wDh5Oyud9T+t0n+43z79WX0PFP\nkP/gbbi3b8bZpgMht473bjSGki/epXz5zzZmfngWbUvh2dlr8VgWl3dpxU1nn1IrZtaG3fzrp40A\ntI9rwNRhvQB4Ye46ftqaAsCtfTpw0anN7Ev8CPid0ZPgUePA4aBs/nRK/3uAY9zzXELvfoL8iWNw\nJ26uXG8axhLx7DuUfPUepdM/tyvtIxLSpwcxD44Bp5O8L2eQ81bNvCNHX0HEXwZhudy4s3NJm/gP\nXHvTCO7VmZgHbquM809oTuq9Uyj8cYndTfjTnJ16EHTVGIzDSdnPMyibtf9j5de1DyFjHqFgyjg8\nSVvA4STo+ntwtmgLDiflS3+gbOZnNmf/5/R9YhQtB3bBVVzKD//3BunrdtSKaXR6K87/x234BQWQ\nNHc1Cx/7AIBzHh5BwvldcZe7yE1K44fxb1CWV0RQZBiD/3UXsZ1b8/sXC1nwyPs2t0p86fjrhDgc\nRN13F2nj7sOdmk7ce69RvHAJrsSkypCiWXMp/Pp/AAT1PYvIe8aQcdeDeHJySf+/iXgyMvFv04qY\nl54h+ZKrfdWSo2bYxRdw7fAhPPTUc75O5egxhoD+Iyj95kWsgmyCrnkQ9/a1WFnJVSGRsfj3uIiS\nL/4OpUUQHA6AVZhLyefPgtsF/oEEjXzUu29hrq9ac2hOxjY7HITcdjf5j43Hk5lOxHP/omzZIjy7\nkmrGBQcTdNlfcG1aX7nKKiuj+KN/42yZgLNFgs2JHwHjIPimv1E4eQKezHTCn36d8hWL8ezZp81B\nwQRefAWuLRsqV7l3JZL/4G3g8WAiowl/9i3Kf11cr68tdnssnp65htev7UNcRDDXvT2Pfu0a06ZR\nRGVMUlYBby/exLvX9yMiOICswhIAFm5JZmNKDp/dPJByl4e/friQc9rEERbo76vmHBrjIPiGv1H4\n9AQ8WemEP/VPylce4BgPGo5r64ZadxE88nbK1yyzKeGjwOGg0cQ72HPzg7hSM2j+2csUzltK+bad\nlSGlG7ex68o7sUpKibj6UhqOv5nU8VMoXraGXVeM9d5Ng3BaznyHokUrfdWSQ2ccBI+4g8IXHsTK\nziD0wZdxrV2KJ3lnzbjAYALOG4Zr+8bKVX7d+2L8/Cl8cgz4BxL2+BuUL5+PlZlqcyMOTcsBnYlM\niOeDc8cT17UN/afcwBdDHq8VN2DKjcy97y1SV21jyPsTaNn/DJLmr2XnT7+xeOpnWG4PZz94NT3u\nuIzFT3+Gq7Scpc99ScNTmtHwlOPjC4ajph6/btvluBsTEtCpA+W79uDekwwuF0Vz5hHc7+waMVZh\nUeVtR1AQWN7b5Zu34snI9N7etgMTGAD+9fzN7BD06HI6DSLCfZ3GUeWIa4WVm4aVlwEeN67Ny3G2\nPqNGjF+nPpSvXeD9MA5QnO/91+P2fhgHcPp5qwPHgZOxzX7tOuJJ2YMn1ft8LvtpLgG9+tSKC7n2\nr5R89TFWWVnVytISXBt/q7nuOOBs2wFP6l48acngdlG2eC7+Pc+pFRd89U2UfPcpVG9fWWnlG5fx\nDwDLsivtw7ZubxbNo0NpFhWKv9PBRac2Y/7m5BoxX69K5OrurYkIDgAgOjQIgO0Z+XRv3hA/h4Pg\nAD/axzZg0bb6+SGtOmebDnhS9+BJrzjGS+fi3/3sWnHBf7mJkv9+UvMYA/7dz8GTloJn9w6bMj5y\nQaefQvnOvbh2p0C5i4IZ8wkbeFaNmOJla7BKSgEoWbsRv7iYWvcTdmEfin5aXhlXnzkTTsGTthcr\nIwXcLspXzMev81m14gKHjqZs5udQXv04WxAYBA4HJiAAy+3CKi6qtW990frC7mz8ylt1TV21jcCI\nUEJiI2vEhMRGEhAWTOqqbQBs/OpnWl/UA4BdC9dhub2vXSmrthHWOBoAV3Epycs34yott6spUo8c\ntBNijLnIGPOX/az/izHmgmOX1oE5G8XgTk2vXHanpuNstJ8XsiuH0vibD2hw163kPPdKre3BA/tS\nvmkLlOvEr49MWBRWfnblslWQgwmLqhkTFYsjMo7AKycQeNV9OFqeWmP/oOsmEnzT07hWzKr/FQFO\n0jY3jMGdkVa57MlMx9Gw5vPZ2bodjphYyn9dand6x4QjOgZP5j5tjtqnzQntMA1jca2q3WZn246E\nP/cO4c+9TfFbz9f7b9PS8kuIDw+uXI6LCCYtv7hGTFJWAUlZBYx+bz6j3pnHom3ey6/axzVg0fZU\nistdZBeVsjwpndS8mvvWR7WOcVYGjqhGNWKcrdphGjbCtfqXmjsHBhF42TWUfP2eHakeNc64hpSn\nVL03u1IycMbWfm/+Q8QVgyj6aXmt9WGD+5P//fxjkeJRZyIb4smuarOVnYEjsmabHc3b4ohqhGtd\nzaqW69efoLSEsGc/IezpDymb8yUU5duS9+EIjY+iYG9m5XJBchZh8TXfn8LioyhIzqpcLkzOInSf\nGIBTr+pL0ry1xy7Z44Xlse+vnqqrEvIosGA/6+cDTx5oJ2PMrcaYFcaYFR+l7zmC9A5fwRffkXz5\nKHJffpOIm0bW2ObXuiWRd95C1pTnfZKbHB3G4cBExlL61TTKZv6bgPNGQoD3w45VkE3JR5Moee8R\nnB3PgpATo1J00rXZGEJuuoPM2QYAAAAgAElEQVSid17zdSb2MYbgUWMp+WD/bXZv3Uj+vTeS/9AY\nAodde0JUc90ei51ZBbw1si9TL+/Fk9+vIq+kjLNbx9GnTTyj313AA98u54ymDXE4jo8q30EZQ/B1\nt1Py0T9rbQoafgOlM76E0hIfJGaPsMsGEnRaO7Lf/rLGemdMNIHtW1G0aIWPMjvKjCHoylsp+fKN\nWpucCaeAx0PBfddS8PD1BJw/HBMT74Mk7dXjziF43B42fbPI16lIPVDXmJBAy7LS911pWVaGMSb0\nQDtZlvUG8AbArp7nHdXrBdzpGTjjqr5RcsY1wp2eccD4otnziHrgb/BERXxsDDHPPknmY1O9l3RJ\nvWQVZGPCq75BMWGRWAXZNWI8BTl4UhK9ExDkZWLlpOGIisWTWnXdtVWYi5W5B2eTdri31u9rjE/K\nNmdm4IyJrVx2NGyEJ7Pq+WyCQ3C2TCB80gve7VHRhD88hfzJD+Heusn2fI8GT1YGjob7tDm72mtY\nUAiO5gmEPepts4mMJnTCZAr//jDu7VUDlz17dmKVFONsnlBjfX0TGx5ESrXKR2peMbHVKiMAceHB\nnNY0Cn+ng6aRobRsGMbOrAJOaxLNLX06cEufDgA88O0yWkaH2Zr/4ah1jKNjanxjXnmMJ3q/CDMN\nogkdP4nCaRPxa9OBgF59CR5xGyYkDMvyYJWXUTbnW7ub8ae4UzPxj696b/aLj8GdVvu9OfisrkTf\nOoI9o++tdSVC2KC+FPywGFzuY57v0WDlZNaocJmoGDw51docGIyjaStC/+9Z7/YG0YSMfYKi1x7D\nv9cAXOtXgMeNlZ+Le9sGnC3b48pIsbsZB3T66PPpNGIAAGlrthPWpGHltrDG0RSk1Hx/KkjJrrzM\nCiC0cTSF1WI6XHkurc7ryrfXPH2MMz9O1PMqth3qqoREGGNqdVSMMf5A8H7ij7myDb/j36Ipzibx\n4OdHyAUDKF64uEaMX/OmlbeD+vTGtdNbjTFhocQ8P4XcV9+kbO16pP7ypCZhImMxEQ3B4cSvfU/c\n22uWb93bVuNs2t67EBSKiYzFk5uBCYsEZ8W3w4EhOJq0xZNdf17YD+RkbLNry+84GjfDEet9Pgec\nO5DyZVXfkFlFheSMGkrurdeQe+s1uDZtOK47IADubb/jiG+Ko1E8OP0IOHsg5SuqvYYVF5J3yzDy\n7hxB3p0jcG/ZUNkBcTSKB4f3ZdvExOFs0gJPev0+zp2aRLEzq4A9OYWUuz3M2rCbfu0b14gZcEpj\nViR5P7xlF5WSlFlAs8hQ3B6LnCLv2IDNqblsScvjrNaxtR6jvnFv3+cY9x5I+a/VZnoqLiRvzOXk\n3X0teXdfi3vrBgqnTcSduJmCp+6uXF868ytKv/u43ndAAErWbcK/ZVP8msaBvx9hg/tTOK/m5YQB\nHdsQ+9hdJI97DHdW7ctFwy/pT8H0+TZlfOTcOzbhiG2KaRgHTj/8e/THtaZam0uKKBh/FQUPj6bg\n4dG4t2+k6LXH8CRtwZOVjrNDF29cQCDOhA54Unb5piEH8Nt7P/DpoIf5dNDDbJ/1Kx2He8frxXVt\nQ1l+EUVpOTXii9JyKCsoJq5rGwA6Du/D9tm/AtCi/xl0H3Mp/7vpH7hKjq9xfHLs1FUJ+Rp40xgz\nzrKsQgBjTBjwYsU2+7k9ZD/7Mo1eegbjdFDwnxm4ticRcdsNlG3cRMnCJYRdNYygXt2wXC48eQVk\nPvEMAOFXDcOveRMibh5FxM2jAEgfdz+e7JyDPWK9N+GxqSxftZacnDzOGzaSsX8dxfDLLvJ1WkfG\n8lA2/zMCh93lna52w2KsrGT8e1+GJzUJd+JaPEkbsFqcStDIx8DyUP7z11BSiGnRkcBzh3snJDBQ\nvnIOVuZeX7eobidjmz1uit54gfDHn/NO0fvjdNy7dhB87U24tv5O+bLFB929wRufYkJCMX5+BJzZ\nh7zH7609s1Z94/FQ/PZLhD70bMX0rTPw7N5B0JU34tq+CdevB26zs8PphA69FtwuLMtD8b9fwMrP\nszH5P8/P4eCBi7pw+yeL8HgshnZuSdtGEby2YAOnNo6kf/smnN06jiXb07jiX3NwGMM9551GZEgg\npS43N32wEIDQAD8mD+mBn+M4mE/F46H43ZcJvf8Z7zTMC2bg2bODoOE34ErcjGvlwc/r45LbQ/rk\nV2ny5hSMw0HeN7Mp25pE9LjrKVm/maJ5S4m59xZMSDDxz08EwLU3jeRxjwPg1yQOv/hGFC8/jsYK\neDyUfPoqIX/ztrls0Ww8yUkEXnY97qTNuNYeeBxb2fz/EDx6PKGPeS/VKl8yG8+eRLsy/9N2zF1N\ny4Gduf7naZQXl/Hj+KpLzK6ZOZlPBz0MwPyH3+X8f9zqnaJ33hqS5q0BoN9To3EG+DHs4wcASFm5\nlfkPvQPA6MXPExAejMPfj9YX9eDb66aSveU4eP86UqqEYKyDzK5SUQWZBNwM/PHO3gL4N/CIZVl1\njuo+2pdjHQ/iZ7zp6xRsVf7Rs75OQWxQMndj3UEnGEfQcfCB9ygLvKS3r1OwXemsX+oOOsFkrDr+\nxxL9WbHn+joDe703vVHdQSegO3d9eFwMHCv+/gXbPh8HX3J3vfw/OWglxLIsF/CAMeYJoG3F6q2W\nZdX/6UlEREREROqjejxrlV3qmqL3PoCKTkcHy7J++6MDYoyZYkN+IiIiIiJygqnrWoNrqt1+cJ9t\ng45yLiIiIiIiJz6Px76/eqquTog5wO39LYuIiIiIiNSprk6IdYDb+1sWERERERGpU11T9HY2xuTh\nrXoEV9ymYjnomGYmIiIiInIi0sD0OmfHctqViIiIiIiInBzqqoSIiIiIiMjRVI8HjNvl5PslLhER\nERER8SlVQkRERERE7KQxIaqEiIiIiIiIvVQJERERERGxk8aEqBIiIiIiIiL2UiVERERERMROqoSo\nEiIiIiIiIvZSJURERERExE6W5esMfE6VEBERERERsZUqISIiIiIidtKYEFVCRERERETEXqqEiIiI\niIjYSZWQY98J+Sy18bF+iHpn7EfP+joFW/lfd5+vUxAbeFIm+DoF27lTcn2dgv3KSn2dge0CLz7H\n1ynYLoZFvk7Bdn7dO/s6BVutm7Xb1ymIHJQqISIiIiIidrJUCdGYEBERERERsZU6ISIiIiIiYitd\njiUiIiIiYicNTFclRERERERE7KVKiIiIiIiInSzL1xn4nCohIiIiIiJiK1VCRERERETspDEhqoSI\niIiIiIi9VAkREREREbGTKiGqhIiIiIiIiL1UCRERERERsZOlSogqISIiIiIiYitVQkREREREbGR5\n9DshqoSIiIiIiIitVAkREREREbGTZsdSJUREREREROylSoiIiIiIiJ00O5YqISIiIiIiYq/jqhIy\n8IlRJAzogqu4lBnj3yBt3Y5aMXGnt2LQtNvwCwogcd5q5j72QY3tPW4ZTP9HruPVzmMozi6gee+O\nDHvrHnJ3pQOwZeZylrz4rR3N+VMcLU8loN9VYBy41i/CtWJWrRhnu+74n3kpYOHJ2E3ZzLcx4dEE\nXjoGjAGHE9eaebh++8n+BhxlE6f8g4WLlhEdFcm3H77u63RscaK22dm+C4GX3gQOB+XLf6R8wTc1\ntvt1G0Dg4FF48rIAKF8yA9eKHwEInfw5npSdAFg5GZR8MNXe5A+T32k9Cbp2rLfNC2dQOv3T/cd1\nP5fQcY9R8MRY3Ds2A+BolkDw6HswwSFgWRQ8MRZc5Xam/6ctSkzj2R834LEsLj+jOTed2bZWzKzf\n9/KvxVsAaB8bwdRLu7J8ZwZ/n7uxMmZHVgFTL+vKwHbxtuV+uBZtTebZWavxeCwu75rATX061tj+\n91mrWL7D+75TUu4iq7CUn++/HICxHy1k7e5MuraI4eUR59qe++HyO6MnwaPGgcNB2fzplP73k/3G\n+fc8l9C7nyB/4hjciZsr15uGsUQ8+w4lX71H6fTP7Ur7iCxKTOfv87zn9rDTmnPTmW1qxczelMzr\ni7dgDLRvFM7Tl3Rl+c5Mnpu/oTJmR1YhUy/pwoDj4NwGuPqxGzltQDfKikt5995X2bU+sVbM0HtH\n0PuKvoQ0CONvnUZVro9qEsON0+4gOCIUh8PBN898xLr5q+xMX+qJ46YTkjCgM1Gt4vl33/E07tqG\nCybfwEdDH68Vd/7kG5l9/1skr9rG8PcmkND/DBLnrwUgvHE0LfueTt7ujBr77F6+iW9unGZHMw6P\nMQT0H0HpNy9iFWQTdM2DuLevxcpKrgqJjMW/x0WUfPF3KC2C4HAArMJcSj5/Ftwu8A8kaOSj3n0L\nc33VmqNi2MUXcO3wITz01HO+TsU2J2SbjYPAIbdQ/O8nsfIyCb7jGVwbl2Ol7a4RVv7bYsr+81bt\n/cvLKH75XpuSPUqMg6BRd1L43P1YWemEPfoq5asX49m7s2ZcUDCBF1yOa1vVh3AcDkJufZCiN6fi\n2bUdExoBbre9+f9Jbo/F03PW8/pVZxIXHsR1H/xMvzZxtIkJr4xJyi7k7V+28e61ZxMR5E9WYSkA\nPVvE8PkN3g/hucVlXPbWfM5q1cgn7fgz3B4PT89Yyesj+xEXEcx1b/1Av1Oa0KZRg8qYCRd1rbz9\nybIt/J6SXbk8+qxTKCl38+XKbbbmfUSMg+Ab/kbh0xPwZKUT/tQ/KV+5GM+epJpxQcEEDhqOa+uG\nWncRPPJ2ytcssynhI+f2WEz9cT3//Esv77n90SL6tY2lTcP9nNsjzvKe20V/nNsN+ez6qnN7yNsL\n6H0cnNsAp/XvSmxCYx7pfycJXdtx3eRbmDrsoVpxa39cwbz3ZvDU/JdrrL9k3HBWfL+EhR/OpnHb\nZox790Ee7nOHXenXH5qi9/i5HKvthd1Z/9XPACSv2kZgRCihsZE1YkJjIwkICyZ5lfeFe/1XP9P2\noh6V2wc8NpKFUz7Fso6vA++Ia4WVm4aVlwEeN67Ny3G2PqNGjF+nPpSvXeDtgAAU53v/9bi9HRAA\np5+3InIC6NHldBpEhNcdeAI5EdvsaN4WT2YKVnYquF241vyMX8eevk7rmHK2PgVP2l6s9GRwuyhf\nNh//rufUigu6/AZKp38G5WWV6/xO64F793Y8u7YDYBXm1fvritcl59A8KoRmkSH4Ox1c1KEJ87em\n1oj5es1Oru7akoggfwCiQwNr3c+czSmck9CIYH+nLXkfiXV7smgeFUazqDD8nU4u6tSC+Zv2HjB+\nxrqdDOrUonL5zNZxhAQeN98RAuBs0wFP6h48Fed12dK5+Hc/u1Zc8F9uouS/n0BZWY31/t3PwZOW\ngmf3DpsyPnLrUnJoHlnt3D6lca1z+5u1u7iqS7VzO6T2uf3DlhTOaXV8nNsAnS/sydKvFwCQuGoL\nweGhRDSKrBWXuGoLeek5tdZbWASHBQMQHBFCbmp2rRg5ORz0Vc4YMxIwlmV9sM/6UYDbsqyPj2Vy\n1YXFR5GfnFm5nJ+SRVh8FIVpOTViClKyasUAtLmgG/kp2aRv3OfbRqBJt7ZcP3Myhak5zJ/8MZmb\n9xzDlvx5JiwKK7/qSWoV5OCIT6gZExWLA/C7cgIYQ/kv/8OTtKFy/8Chd2AaxFL+81fHfRVEThwm\nIhort6oyaeVl4WjerlacX6feOFudipWxl9Lv38HKrXgt8Asg+I5nwOOhbME3uDfU/29RTVQMVlZa\n5bInKx1nmw41Yhwt2+KIjsW19hcCB19VtT6uGVgWIeOn4ghvQNkv8yibUb8vW0krKCE+PLhyOS48\niN+Sa34wScouBGD0R4vxWBZjzmnHOQmxNWJm/b6XUT1qvu7VV2n5xcQ3CKlcjosI5rc9WfuN3ZtT\nyN6cQnrt097jjSM6Bk9m9fM6A782NS9Bc7Zqh2nYCNfqX+CSq6s2BAYReNk1FDw9gaDq6+u5tIIS\n4sKDKpfjwoNZd4Bz+4ZPluCxLG47qx3nJNSseMz6PZmR3Vsd83yPlsi4aLL2Vn0ey0nJJCo+er8d\njv357/Ofc/cHjzBg9GACQgJ54bqnjlWq9Zum6K3zcqw7gfP2s/5rYCFgWyfkSPgFBdB73BC+GPlM\nrW2p63bwxll3U15USsKAzgx78x7+3e84u7wDMA4HRMZS+tU0b6fjL+Mp+fApKCvGKsim5KNJmNAG\nBFx6O66tK6Eo39cpixwS1+/Lca35Cdwu/HpdQOCVd1Ly1uMAFD07BisvCxMVR/Atj1OckoSVlXrw\nO6zvjCH4mtspeuvZ2tucTvzanUbBk3dglZUSOuHvuHdswb3x+L6e2u2x2JldyFvX9CYtv4SbPl3C\nFzf0rfz2OL2ghK3p+cfFpVh/1qz1Ozm/YzOcjuPmwoTDYwzB191O0b9qvw8HDb+B0hlfQmmJDxI7\nttyWh505hbx51ZmkFZTw10+X8sXocwmvdm5vyTgxz+0D6TWkD4u/nMcPb/2P1t3ac+Pzd/Lkhf93\n3F2lIkeurk6Iv2VZBfuutCyr0Bjjf6CdjDG3ArcCDI/qRe+w2t9sHoou15/PGSMGAJCydjvhjRtW\nbguPj6YgpWYJryAlm7D46FoxkS1jadC8EaNnTvGubxzNqOmT+HDIYxSlV1UFEuetwTHpBoKjwijO\nrtVsn7EKsjHhUZXLJiwSq6Bm2z0FOXhSEsHjwcrLxMpJwxEViye16npcqzAXK3MPzibtcG9daVv+\nIgdi5WVhGsRULnsrI5k1g4qqnouu5T8SOLhqgKNVMVjdyk7FvX09jiYJuOt5J8TKzsBEV33r7Yhu\nhJVdrc1BITiatiLsAe84NdMgmpC7nqTopUexstJxbf4NqyAPANfaX3C2bFevOyGxYUGk5BdXLqfm\nlxAbFlQjJi48iNMaR+LvdNA0MoSWUaHszC7ktMbeSzxmb0pmQLs4/J3Hxwf12PBgUnKLKpdT84qJ\nrVYNqm7m+l08OLibXakdM56sDBwNq5/XMXiy06sCgkJwNE8gbOLzgPe8Dh0/icJpE/Fr04GAXn0J\nHnEbJiQMy/JglZdRNqf+TRJTXWxYEKn5VR2n1PxiGoUF1oo5/Y9zu0EILaND2ZlTSKd477k9Z3My\nA9vW/3O7/6iL6DPifAB2rNlKdJOG/DFiKTK+Idkp+6/07c85Vw/kpdGTAdi+cjP+gf6ERYeTn5l3\ntNOu31QJqXNMSLAxJnTflcaYcCDgQDtZlvWGZVk9LMvqcbgdEIDV7//A+4Mf5v3BD7N11q90Gt4H\ngMZd21CaX1TjUiyAwrQcygqKadzVOztFp+F92Dr7VzI27ea1bnfw5jn38OY595CfnMUHF0+kKD2X\nkGoDBeM7t8Y4TL3qgAB4UpMwkbGYiIbgcOLXvifu7WtrxLi3rcbZtL13ISgUExmLJzcDExYJzor+\nYmAIjiZt8WSn2NwCkf3z7N6KI6YxJioWnH74de6De+OKGjEmvOpaY2fHHnjSKi6XDAr1jnMCCAnH\n2bIDnn0GtNdH7sRNOGObYmLiwemHf6/+lK9aXBVQXEj+XcPJnzCS/AkjcW/bSNFLj+LesZnydStw\nNkuAgEBwOPA7pTOevUkHfrB6oFPjBuzMLmRPThHlbg+zft9Lv7ZxNWIGtItjxS5vRyy7qIyk7EKa\nRVZdzjRz414Gd2xia95HolPTaHZmFbAnu4Byt5tZ63fSr33t/BMz8sgrLqNzs4b7uZfji3v77zji\nm+Jo5D2vA3oPpPzXJVUBxYXkjbmcvLuvJe/ua3Fv3UDhtIm4EzdT8NTdletLZ35F6Xcf1/sOCECn\n+AbszClkT27Fub0pmf5t9jm328azYpf3A3p2URlJWYU0rXap3szfkxnUof6f2/M/mMWkiycw6eIJ\nrJ69nN5X9AMgoWs7ivOLDvlSLICsvRl0OOd0AOLbNMU/0P/k64AIUHcl5N/Al8aYMZZlJQEYY1oB\nr1Zss832uatJGNCZm3+aRnlxGTPvfaNy2/UzJvP+4IcB+GHiuwyedmvFFL1rSJy35qD3e8rFveg8\n6jw8LjeuknL+N+7VY9qOw2J5KJv/GYHD7vJO0bthMVZWMv69L8OTmoQ7cS2epA1YLU4laORjYHko\n//lrKCnEtOhI4LnDwQIMlK+cg5V54AGSx4sJj01l+aq15OTkcd6wkYz96yiGX3aRr9M6pk7INns8\nlP7nLYJvegSMg/IVc/Gk7SLg/Gtw79mKe+MK/M++BGfHnuBxYxUVUPLlKwA4YpsRePltYFlgDGUL\nvqk1q1a95PFQ/NHLhI6f6p2i96eZePYmEThsNO4dm3GtXnLgfYsKKJ31JWGPvgqWhWvtMlxrf7Ev\n98Pg53DwwPmncfuXy/B4LIae3oy2MeG89vMmTo2PpH/bOM5u1YgliRlc8fYCHMZwT7+ORAZ7v+fa\nk1tESn4x3ZsfPx/U/RwOHhjcjds/WojHshjaJYG2sQ14bd46Tm0SRf9TmgIws2JAutlnwpAb35nL\njsx8ispcXPj8f3n8sp6c3baeT93q8VD87suE3v8MOJyULZiBZ88OgobfgCtxM66Vi+u+j+OMn8PB\n/QM7MfarZXg8MPS0ZrSJCee1RZs5Na5Bxbkdw5KkdK54ZyFOB9zdr0Plub238tyOruOR6pd181Zy\n+oCuTFrwMmXFZbw3oepz08Tpf2fSxRMAuOKBkfQa2oeA4ACmLnmdnz/7kf+98AVfTnqfkVNv47y/\nXgIWvHtvPfzcZQddfoap6xo8Y8wY4EEgrGJVATDVsqx/HsoDPNdi5En3vzx2fFjdQScQ/+vu83UK\nYoPSaRN8nYLt3Ckn3yQOAeecUXfQiSa4VsH/hFc6fZGvU7BdQL/Ovk7BVvdMOQ6+lDkG/rXji+Ni\nGtCiF26z7fNxyN3/qpf/J3XOAWhZ1uvA6xWXYGFZlkY0i4iIiIgcLo0JOfTfCbEsK796B8QYc/yP\npBMREREREdsdyXQMtx+1LEREREREThYey76/euqwOyGWZd1yNBMREREREZGTQ51jQowxAcB1QKeK\nVeuBjy3LKj2WiYmIiIiInJCs+jMmxBgzCHgRcAJvWZY1dT8xVwGP451vdY1lWdce6eMetBJijDkV\n2AD0B3ZW/PUH1htjOh14TxERERERqc+MMU68P70xGDgVGFHx+b96TDu8M+WeY1lWJ+Duo/HYdVVC\nXgZutyxrzj7JnA+8Agw4GkmIiIiIiJw06s9YjV7AVsuytgMYYz4FhuItQvzhFuBVy7KyASzLSjsa\nD1zXmJCm+3ZAKh78B6Ce/3KSiIiIiMjJzRhzqzFmRbW/W6ttbgrsqra8u2Jdde2B9saYRcaYpRWX\nbx2xuiohDmNM4L7jP4wxQYewr4iIiIiI+JBlWW8AbxzBXfgB7fAOyWgGLDTGnG5ZVs6R5FVXJeR9\n4CtjTMs/VhhjWgFfAB8cyQOLiIiIiJyMLI/Htr867AGaV1tuVrGuut3AfyzLKrcsKxHYjLdTckQO\n2gmxLGsSMBP4yRiTYYzJABYAsyzLevJIH1xERERERHxmOdDOGJNQMSPuNcB/9on5Fm8VBGNMDN7L\ns7Yf6QPX+TshlmW9YllWCyABuAL4HfiLMWbYkT64iIiIiMhJp578WKFlWS5gHDAL2Ah8blnWemPM\nk8aYIRVhs4BMY8wGYB4wwbKszCP9LzjouA5jTLxlWSkVSeYbY8YBwwAD/IK3ZyQiIiIiIschy7Km\nA9P3WfdotdsW8H8Vf0dNXYPLXzfGrASetSyrBMgB/gJ4gLyjmYiIiIiIyEmhHv1Yoa/UNSZkGLAK\n+J8x5nq8P04SCDTEWxERERERERH5Uw5lTMh/gYuABsA3wGbLsl6yLCv9WCcnIiIiInLCqSdjQnzp\noJ0QY8wQY8w8vDNkrQOuBoYaYz41xrSxI0ERERERETmx1DUmZBLen3MPxjstby9gvDGmHTAZ7zRe\nIiIiIiJyqOr+/Y4TXl2dkFy80/KGAGl/rLQsawvqgIiIiIiIyGGoqxNyOTACKAeuPfbpiIiIiIic\n4OrxWA27HLQTYllWBvCyTbmIiIiIiMhJoK5KiIiIiIiIHE36nZC6p+gVERERERE5mlQJERERERGx\nk8aEqBIiIiIiIiL2UidERERERERspcuxRERERERsZOnHClUJERERERERe6kSIiIiIiJiJw1MVyVE\nRERERETspUqIiIiIiIidVAlRJUREREREROylSoiIiIiIiJ0szY6lSoiIiIiIiNhKlRARERERETtp\nTIgqISIiIiIiYi9VQkREREREbGSpEqJKiIiIiIiI2EuVEBERERERO6kSokqIiIiIiIjYS5UQERER\nERE7efQ7IcdVJ2TgE6NIGNAFV3EpM8a/Qdq6HbVi4k5vxaBpt+EXFEDivNXMfeyDGtt73DKY/o9c\nx6udx1CcXUDz3h0Z9tY95O5KB2DLzOUsefFbO5rzpzhankpAv6vAOHCtX4RrxaxaMc523fE/81LA\nwpOxm7KZb2PCowm8dAwYAw4nrjXzcP32k/0NOMomTvkHCxctIzoqkm8/fN3X6djiRG2zs30XAi+9\nCRwOypf/SPmCb2ps9+s2gMDBo/DkZQFQvmQGrhU/AhA6+XM8KTsBsHIyKPlgqr3JHya/03oSdO1Y\nb5sXzqB0+qf7j+t+LqHjHqPgibG4d2wGwNEsgeDR92CCQ8CyKHhiLPw/e/cdHkW1PnD8e3bTewIk\nAYIQEjoIoYmAAhbEgiAoAooiFhTUqxcQ27VhQdGrV+xiv1bsVxFQehWQ3gkloSSkk2RTd+f8/tj8\nEtZACJDMbsL7eZ59YHbe2X1PdpI9Z04Ze6mZ6Z+2FfvTeGnBdgytue78Zoy7IL5SzLydR3h35R4A\nWkeGMP2aBNYmZzBj4Y7ymANZ+UwfnMAlraJNy/1MrUhM4aV5GzEMzXUJsYzr285l/4x5G1h7wPm9\nU1RqJ8tWzPKp1wEw4fOlbD6UScJ5DZk56iLTcz9TXuf3wH/MvWCxULJ4DsX/+/KEcd49LiLwgafJ\ne/xuHPt3lz+vGkQS8hBUfvUAACAASURBVNJHFH33CcVzvjEr7bOyYn86MxY5z+2hHZsx7oK4SjHz\nd6Xwzso9KAWtGwXzwtUJrE3O5OXF28tjDmTZmH51FwbUgXMb4MYnb6PjgK6UFBbz8eQ3Obhtf6WY\nIZNH0WvYxQSEBvGPDmPKnw9v0pDbXpmIf0ggFouFH178nK2LN5iZvvAQdaYREjugM+Etovng4kk0\nTojj8ufG8vmQpyrFXfbcbcyfOouUDXsZ/skUYvufz/7FmwEIbhxB84s7kXsow+WYQ2t38cNtr5hR\njDOjFD79R1H8w3/Q+dn4jXwEx77N6KyUipCwSLy7X0HR7BlQXAD+wQBo2zGKvnkJHHbw9sXv5iec\nx9qOuas0NWLoVZczevi1PDrtZXenYpp6WWZlwffaOyn84Bl0bib+E1/EvmMtOu2QS1jplpWU/Dyr\n8vGlJRTOnGxSsjVEWfAbcx+2l6eis9IJeuJNSjeuxDiS7Brn54/v5ddh31tRCcdiIeCuRyh4fzrG\nwX2owBBwOMzN/zQ5DM0Lv2/jnREXEBXsx02fLadfXBRxDYPLY5KybXz4514+Ht2bED9vsmzFAPQ4\nryHfjHVWwo8VljB41mIubNHILeU4HQ7D4IXf1vPOzf2ICvHnpll/0K9NE+IahZbHTLkiofz/X67Z\nw87U7PLtWy9sQ1Gpg2/X7zU177OiLPiP/Qe2F6ZgZKUTPO1tStevxDic5Brn54/voOHYE7dXegn/\nm++hdNMakxI+ew5DM33BNt6+vqfz3P58Bf3iI4lrcIJze9SFznO74P/P7QZ8fUvFuX3th0voVQfO\nbYCO/ROIjG3Mv/rfR2xCK2567k6mD320UtzmBetY9MlvTFs80+X5q+8dzrpfV7H0v/NpHB/DvR8/\nwmN9J5qVvvAgJ50TopS6paqHmUkCxA/sxrbvlgOQsmEvviGBBEaGucQERobhE+RPygbnH+5t3y0n\n/oru5fsHPHkzS5//Cq3r1mQgS1QL9LE0dG4GGA7su9dibXm+S4xXh76Ubl7ibIAAFOY5/zUczgYI\ngNXL2SNSD3Tv0onQkOBTB9Yj9bHMlmbxGJmp6Oyj4LBj37Qcr3Y93J1WrbK2bIORdgSdngIOO6Vr\nFuOd0KdSnN91Yyme8zWUlpQ/59WxO45D+zAO7gNA23JBe3aX/taUHJqFBxATFoC31cIVbZuwOPGo\nS8z3m5K5MaE5IX7eAEQE+lZ6nd93p9InthH+3lZT8j4bWw9n0Sw8iJjwILytVq7ocB6Ldx05afxv\nW5MZ1OG88u0LWkYR4FtnrhECYI1ri3H0MEbZeV2yeiHe3XpXivO/fhxF//sSSkpcnvfu1gcjLRXj\n0AGTMj57W1NzaBZ23LndpnGlc/uHzQcZ0eW4czug8rn9x55U+rSoG+c2QOeBPVj9/RIA9m/Yg39w\nICGNwirF7d+wh9z0nErPazT+Qf4A+IcEcOxodqWYc4KhzXt4qKr+yp2sJnAt0BT4tObTObmg6HDy\nUjLLt/NSswiKDseWluMSk5+aVSkGIO7yruSlZpO+429XG4EmXeO5Ze5z2I7msPi5L8jcfbgWS3L6\nVFA4Oq/il1Tn52CJjnWNCY/EAnjdMAWUovTPXzCStpcf7ztkIio0ktLl39X5XhBRf6iQCPSxip5J\nnZuFpVmrSnFeHXphbdEenXGE4l8/Qh8r+1vg5YP/xBfBMChZ8gOO7Z5/FVWFN0RnpZVvG1npWOPa\nusRYmsdjiYjEvvlPfK8cUfF8VAxoTcCk6ViCQyn5cxElv3n2sJW0/CKig/3Lt6OC/diS4loxScq2\nAXDr5ysxtObuPq3oExvpEjNv5xHGdHf9u+ep0vIKiQ4NKN+OCvFny+GsE8YeybFxJMdGz7+Vt66x\nRDTEyDz+vM7AK851CJq1RStUg0bYN/4JV99YscPXD9/BI8l/YQp+xz/v4dLyi4gK9ivfjgr2Z+tJ\nzu2xX67C0JrxF7aiT6xrj8e8nSnc3K1FredbU8KiIsg6UlEfy0nNJDw64oQNjhP536vf8MBn/2LA\nrVfiE+DLazdNq61UhYc7aSNEa33f//9fKaWAm4CpwGrguapeVCl1F3AXwPDwnvQKqlypMJOXnw+9\n7r2W2Te/WGnf0a0HeO/CBygtKCZ2QGeGvv8gH/SrY8M7AGWxQFgkxd+94mx0XD+Jov9Og5JCdH42\nRZ8/iwoMxeeae7AnroeCPHenLES12Heuxb5pGTjsePW8HN8b7qNo1lMAFLx0Nzo3CxUehf+dT1GY\nmoTOOlr1C3o6pfAfeQ8Fs16qvM9qxatVR/KfmYguKSZwygwcB/bg2FG3x1M7DE1yto1ZI3uRllfE\nuK9WMXvsxeVXj9Pzi0hMz6sTQ7FO17xtyVzWLgarpZ4vVqkU/jfdQ8G7lb+H/YaPpfi3b6G4yA2J\n1S6HNkjOsfH+iAtIyy/i9q9WM/vWiwg+7tzek1E/z+2T6XltX1Z+u4g/Zv1Cy66tue3V+3hm4D/r\n3CiVs+bBPRRmqbK/VynlBYwFJuNsfFyvtd51qhfVWr8HvAfw8nk3n/FPucstl3H+qAEApG7eR3Dj\nBuX7gqMjyE917cLLT80mKDqiUkxY80hCmzXi1rnPO59vHMGYOc/y32ufpCC9oldg/6JNWJ4di394\nEIXZ+Weado3T+dmo4PDybRUUhs53LbuRn4ORuh8MA52bic5JwxIeiXG0Yjyuth1DZx7G2qQVjsT1\npuUvxMno3CxUaMPybWfPSKZrUEHF76J97QJ8r6yY4KjLJqvr7KM49m3D0iQWh4c3QnR2Biqi4qq3\nJaIROvu4MvsFYGnagqCHnfPUVGgEAfc/Q8HrT6Cz0rHv3oLOzwXAvvlPrM1beXQjJDLIj9S8wvLt\no3lFRAb5ucREBfvRsXEY3lYLTcMCaB4eSHK2jY6NnUM85u9KYUCrKLytdaOiHhnsT+qxgvLto7mF\nRB7XG3S8udsO8siVXc1KrdYYWRlYGhx/XjfEyE6vCPALwNIslqDHXwWc53XgpGexvfI4XnFt8el5\nMf6jxqMCgtDaQJeWUPK75y0Sc7zIID+O5lU0nI7mFdIoyLdSTKf/P7dDA2geEUhyjo0O0c5z+/fd\nKVwS7/nndv8xV9B31GUAHNiUSESTBvz/jKWw6AZkp564p+9E+tx4Ca/f6ryWvW/9brx9vQmKCCYv\nM7em0xYerqo5IROB7UA3YJDWemx1GiA1aeOnf/DplY/x6ZWPkTjvLzoM7wtA44Q4ivMKXIZiAdjS\ncijJL6RxgnN1ig7D+5I4/y8ydh3ira4Teb/Pg7zf50HyUrL47KrHKUg/RsBxEwWjO7dEWZRHNUAA\njKNJqLBIVEgDsFjxat0Dx77NLjGOvRuxNm3t3PALRIVFYhzLQAWFgdV5xQXfACxN4jGyU00ugRAn\nZhxKxNKwMSo8EqxeeHXui2PHOpcYFVwx1tjarjtGWtlwSb9A5zwngIBgrM3bYvxtQrsncuzfhTWy\nKaphNFi98O7Zn9INKysCCm3k3T+cvCk3kzflZhx7d1Dw+hM4DuymdOs6rDGx4OMLFgtebTpjHEk6\n+Zt5gA6NQ0nOtnE4p4BSh8G8nUfoFx/lEjOgVRTrDjobYtkFJSRl24gJqxjONHfHEa5s18TUvM9G\nh6YRJGflczg7n1KHg3nbkunXunL++zNyyS0soXNMgxO8St3i2LcTS3RTLI2c57VPr0so/WtVRUCh\njdy7ryP3gdHkPjAaR+J2bK88jmP/bvKnPVD+fPHc7yj+6QuPb4AAdIgOJTnHxuFjZef2rhT6x/3t\n3I6PZt1BZwU9u6CEpCwbTY8bqjd3ZwqD2nr+ub34s3k8e9UUnr1qChvnr6XXsH4AxCa0ojCvoNpD\nsQCyjmTQtk8nAKLjmuLt631ONkC01qY9PFVVPSEzgTSgL9BHVUxoVoDWWp9/sgNrw76FG4kd0Jk7\nlr1CaWEJcye/V77vlt+e49MrHwPgj8c/5spX7ipboncT+xdtqvJ121zVk85jLsWwO7AXlfLLvW/W\najnOiDYoWfw1vkPvdy7Ru30lOisF716DMY4m4di/GSNpO/q89vjd/CRog9Ll30ORDXVeO3wvGg4a\nUFC6/nd05sknSNYVU56cztoNm8nJyeXSoTcz4fYxDB98hbvTqlX1ssyGQfHPs/Af9y9QFkrXLcRI\nO4jPZSNxHE7EsWMd3r2vxtquBxgOdEE+Rd++AYAlMgbf68aD1qAUJUt+qLSqlkcyDAo/n0ngpOnO\nJXqXzcU4koTv0FtxHNiNfeOqkx9bkE/xvG8JeuJN0Br75jXYN/9pXu5nwMti4eHLOnLPt2swDM2Q\nTjHENwzmreW7aB8dRv/4KHq3aMSq/RkM+3AJFqV4sF87wvx9ADh8rIDUvEK6Nas7FXUvi4WHr+zK\nPZ8vxdCaIV1iiY8M5a1FW2nfJJz+bZoCMLdsQrr624Iht320kAOZeRSU2Bn46v94anAPesd7+NKt\nhkHhxzMJnPoiWKyULPkN4/AB/IaPxb5/N/b1K0/9GnWMl8XC1Es6MOG7NRgGDOkYQ1zDYN5asZv2\nUaFl53ZDViWlM+yjpVgt8EC/tuXn9pHyczviFO/kWbYuWk+nAQk8u2QmJYUlfDKlot70+JwZPHvV\nFACGPXwzPYf0xcffh+mr3mH51wv45bXZfPvsp9w8fTyX3n41aPh4sgfWu4Qp1MlaSEqp5lUdqLWu\n1uW3sxmOVVdNmBTk7hRM5X3TQ+5OQZig+JUp7k7BdI7Uc28RB58+pl5f8gz+ge7OwHTFc1a4OwXT\n+fTr7O4UTPXg83XgokwtePfA7DqxDGjunQNNqx+HvD/fI38mVU1M9+w+fiGEEEIIIUSdVLcWIhdC\nCCGEEKKuk9WxTj4xXQghhBBCCCFqg/SECCGEEEIIYSItPSEnb4QopbbgXFOp0i7csDqWEEIIIYQQ\non6oqifkGtOyEEIIIYQQ4lwhPSGyOpYQQgghhBDCXKecmK6U6qWUWquUyldKlSilHEqpc+/WlkII\nIYQQQtQEw8SHh6rO6lhvAKOAPYA/cAcgt7cUQgghhBBCnJFqLdGrtU4ErFprh9b6I2BQ7aYlhBBC\nCCGEqK+qs0RvgVLKB9iolHoJSEHuLyKEEEIIIcQZkSV6q9eYGFMWdy9gA5oBw2szKSGEEEIIIUT9\nVZ2ekG7Ar1rrXODpWs5HCCGEEEKI+k16QqrVEzIY2K2U+kwpdY1SSu6yLoQQQgghhDhjp2yEaK1v\nA+KB2ThXydqrlJpV24kJIYQQQghRL8kSvdUajoXWulQp9RugcS7TOxTnUr1CCCGEEEIIcVpO2QhR\nSl0J3Aj0BxYDs4ARtZqVEEIIIYQQ9ZSsjlW9npBbgK+B8Vrr4lrORwghhBBCCFHPnbIRorUedfy2\nUqovMEprPbHWshJCCCGEEKK+8uC5Gmap1pwQpVQCMBq4AdgPfF/dN+haXHJmmdVhRQt3uDsFUxmp\nU9ydgjCB76QZ7k7BdKWzX3V3CqbTdoe7UzDdukk73Z2C6VrEKHenYLp9fxxydwqm6uzj6+4UhKjS\nSRshSqnWOFfDGgVk4BySpbTWA0zKTQghhBBCiHpH5oRU3ROyE1gGXKO1TgRQSj1oSlZCCCGEEEKI\nequq+4QMA1KARUqp95VSlwLnXv+tEEIIIYQQNUnuE3LyRojW+ket9UigLbAIeACIVEq9rZQaaFaC\nQgghhBBCiPqlOndMt2mtv9BaDwZigA3A1FrPTAghhBBCiHpIG+Y9PNUpGyHH01pna63f01pfWlsJ\nCSGEEEIIIeq302qECCGEEEIIIcTZqtZ9QoQQQgghhBA1xIOHSZlFekKEEEIIIYQQppKeECGEEEII\nIUzkyRPGzSI9IUIIIYQQQghTSU+IEEIIIYQQZpKeEOkJEUIIIYQQQphLekKEEEIIIYQwkcwJkZ4Q\nIYQQQgghhMmkJ0QIIYQQQggTSU+I9IQIIYQQQgghTCY9IUIIIYQQQphIekKkJ0QIIYQQQghhMukJ\nEUIIIYQQwkxauTsDt6uTjZCIAZ1p9extKKuFlM8XkDTzJ5f9Yb3a0WrarQS2b8628a+R/sufLvut\nQf5csOzfZPy2lt2Pfmhm6mfMO6EnAXfeBxYLxb//StF3X5w47sKLCX54Gscm3YUjcRcqOISgqc/g\nFd+G4oVzKXjvPyZnfuasrbvge804sFgoXbuA0iU/uOz36joA3yvHYORmAVC66jfs6xYAEPjcNxip\nyQDonAyKPptubvJn6Fwsc1Uef/7fLF2xhojwMH787zvuTqfGrDiQwYwlOzEMzdCOMYzrEVspZv7u\nVN5ZvRcFtG4UzAtXng9At//MJ75BMADRIX7859oEM1M/IysOpDNj8Q4MA2d5e7asFDN/VwrvrE5E\noZzlvaozACm5hTzz+1aO5hcB8MbQbjQJDTA1/zMRPqALcdOc31Opny/g4Bs/uuwP7dWOls+MJah9\nc3bc/RoZv6wu3+fbtCGtX7kb3yYN0MDWm56n+GC6ySU4fX4X9iB88kSwWLD9OIfcT75y2R80/BqC\nbhgCDgOjsJCs517Fvj8JS2gIDV98Ep/2bbD9Mo/sl2a6qQSnL2JAF+KPq48kz6z8OcdPc37O28e/\nRvpxn3O/I19j2+H8m110OIOtt7xoYubV0/fpMTS/pAv2wmIW/PM9MrYeqBTTqFMLLvn3eLz8fEha\nuJHlT34GgG9YIAPfvJfgZo3IO5jO/AkzKT5WQJfxV9P6ut4AKC8L4fFN+ajLPRTn2Bjw8p00v7QL\nhZm5fH3ZI2YWVbhB3WuEWBRtpt/OhhHPUnwkk+7zXiB93joKdh8uDyk6nMH2f7zFefcMPuFLtHz4\nRnJW7zAr47NnsRAw/gHynpyEkZlOyMvvUrJmBcbBJNc4f3/8Bl+Pfde28qd0SQmFn3+AtXks1vMq\nV3Q8lrLge+2dFH7wDDo3E/+JL2LfsRaddsglrHTLSkp+nlX5+NISCmdONinZGnIulvkUhl51OaOH\nX8uj0152dyo1xmFopi/awdvDuhEV5MdNX66mX8tGxDUIKo9Jyrbx4dr9fDyiJyF+3mQVFJfv8/Wy\n8vXNF7oj9TPiMDTTF27n7WE9iAr246YvVtEvLvIE5d3Hxzf2qlTef83bzB094+jVvCEFJXaUqgNX\nDy0W4l+4nS0jplGckkXC3BfInL+Ogt0Vv8tFhzPY/Y83iZlwbaXD28y8l+TXvidn6WYsAX51Y/C4\nxUL41PtJm/gQjqPpRH/6FgVLV2HfX/E9ZZu7kPzvfgHA/+ILCX/wbtLvfwRdXMKxtz/CO74F3nF1\n6HvKYqHV9NvZNGIaxUey6DbvBTLmuX7OxYcz2PmPN2l2T+XP2SgqYd2lU8zM+LScN6AzobHRfH7R\nJKIS4uj3/Fi+u/apSnEXP38bix+axdENe7n60ymc1/98khdvpuuEwRxasZ0Nb/2PhAmDSZgwmNUv\nfM3Gd39l47u/AtD8sgQ63zGI4hwbADtnL2XLx79z6WvjzSyqW9SFX+vaVuWcEKXUUKXUZKXUFWYl\ndCohXeMp2J9KUVIautRB2o8raTSoh0tM0cF0bNuTwdCVjg8+PxafRqFkLd5kVspnzatVO4zUwxhH\nU8Bup2TZQnx69q0UFzD6doq++wJdUlLxZHER9h1bXJ+rAyzN4jEyU9HZR8Fhx75pOV7tepz6wDrs\nXCzzqXTv0onQkGB3p1GjtqYeo1loADGhAXhbLVzROprFe9NcYn7YepgRnZsR4ucNQESArztSrRFb\nU3NoFhZATFhZedtEs3jvUZeYH7YcYkTn8yqVd29mPg5D06t5QwACfLzw97aaW4AzEJwQT+H+VIqS\n09CldtJ/XEGDK7q7xBQfTMe2Ixn9t++pgNYxKKuVnKWbATAKijAKPf/vt0+HttgPHsZx2Pk9VTB/\nEQH9ervEaFtB+f+Vvx+UFV0XFVG8aSu6uNTMlM9aSNeyzznJ+Tmn/biChoNcP+eq6iOeLnZgN3Z9\ntxyAoxv24hMSSEBkmEtMQGQYPkH+HN2wF4Bd3y0ntuxcbzGwG7u+XeZ8/ttl5c8fr9WQC9nz06ry\n7ZQ/d1Gck18r5RGe56Q9IUqpt4AOwEpgmlKqp9Z6mmmZnYRvdATFRzLLt4uPZBLStVX1DlaK+Kdu\nYfvEmYRf3KmWMqx5qkFDHBkVlRQjMx2v1u1cYqwtW2FpGEnpX6vxu26k2SnWOBUSgT6WUb6tc7Ow\nNKv8OXt16IW1RXt0xhGKf/0Ifazs3PDywX/ii2AYlCz5Acf2NWalfsbOxTKfi9JsRUQF+5VvRwX7\nsTX1mEtMUrbzquDYr9dgaM34XnH0aeGsiJfYDUZ/sRovi+K27rEMiI80L/kzkJZfTFSwf/l2VNAJ\nylt2FXTsV6ud5b0wnj4tGpGcbSPY15tJ/9vA4WMFXHBeA+7v2warxbN7Q3wb/+17KiWL4Gp+T/m3\nbIw910b7Dybjd14k2cu2sP/Zz8Hw7Mum1siGOI5WDBmzp6Xj27FdpbigG4YQfNP1KC8v0u6p2z23\nlesjWdWvjwAWX2+6zZuOdjhInvkjGb+trY00z1hgdDj5x5XPlpJFYHQ4BWk5rjEpWZViAAIahpTH\nFqTlENAwxOX1vfx8OK//+Sz71ye1WQzhwaoajnUx0Flr7VBKBQDLALc3Qs5G09sGkrlgA8XH/cLU\nC0oRMG4ittfr/hyA02HfuRb7pmXgsOPV83J8b7iPollPAVDw0t3o3CxUeBT+dz5FYWoSOuto1S9Y\nB5yLZT4XObQmOaeA96/vTlp+EbfPXsvsm3sT7OfNnNsvIjLIj0PHCrjr23XENwyiWZjnz5GoisPQ\nJOfYeP+Gns7yfrOG2WP6YDc0Gw5n8+VNvYkO8WPqr5v4efthrusY4+6Ua43yshJ6QTvWXzaFosMZ\ntHv3QaJv7E/qlwvdnVqNyJ/9E/mzfyLgiksIuf1msp7yvHkQZlnVbQIlqVn4NY+ky7dPkr89maKk\n+vs3W/+tM6jF5Qmkrt1dPhTrXKMNz76YYoaqhmOVaK0dAFrrAqDaPy2l1F1KqXVKqXW/FO472xxd\nFKdm4dukQfm2b5MGFKdWr1ER2r01MeMGceHaN4h/cgzRIy4m7vHRNZpfbdCZGVgbVlzttDRohJFZ\nccVc+QdgbR5L8LOvEfreV3i1aU/wY89jjW/jjnRrhM7NQoU2LN929hJkugYV5IPDDoB97QKsTVu6\nHA+gs4/i2LcNSxPPH2d8Lpb5XBQZ6MfRvKLy7aN5RTQKdB1uFRnkR7+WjfC2WmgaGkDz8ECScwrK\n9wHEhAbQPSaCnem55iV/BiKDfDmaV1i+fTS/iEZBJyhvXORx5Q0gOaeAqGA/WjcKJiYsAC+LhQFx\nkexM8+zygrPnw+V7qnEEJSmZVRxx3LFHMsnfdoCi5DRwGGTOXUvQ+Z7/u+xIy8Aa1ah82yuyEY60\njJPGF8xfRED/3ifdXxdUro9EUJxavc8ZoKSs7lKUlEbOyu0Ed3L/59zx1ssYMfc5Rsx9joK0HIKO\nK19g4whsqdku8bbUbIIaR5wwpiAjt3z4VkBkGIWZrr+78ddeyJ6fVyHOXVU1QtoqpTaXPbYct71F\nKbW5qhfVWr+nte6ute5+jX/lVVDORt6GvQS0bIzfeY1Q3lYih/YmY966ah27fcJMVnabwKoe95L4\n9GekfrOUvc+eeJUpT2LfsxNL4xgskdHg5YXPRZdQumZF+X5dYCNnzBCO3TWSY3eNxL5rO3nPPYoj\ncZcbsz47xqFELA0bo8IjweqFV+e+OHa4fs4quGJsqrVdd4y0ssUJ/ALBWtbJFxCMtXlbjL9N7vZE\n52KZz0UdokNIzing8LECSh0G83an0j/OdUjVgLhI1h1yVlCyC0tIyrbRNNSf3KJSSuxG+fMbU3Jo\nGRFU6T08SYfoUJKzjyvvrlT6t/xbeeMjWXfw+PIW0DTUnw5RoeQV28kqcM6JWHswi5YRgaaX4XTl\nbUzEv2Vj/M6LRHl70WhoHzLnV+97Km/jXrxCAvBu4By6Eta3I7bdnv+7XLJ9J97NmmJt4vyeChg4\ngMKlK11ivJo1Lf+/f99elCYf/vvL1Cl5G1w/58ihfapdH/EKDUT5OP9me0cEE9KzjUd8zls/+YNv\nBj3GN4MeY/+8v2gz3Dn/NCohjpK8ApehWOAcZlWSX0hUQhwAbYb3Zf/8vwA48Pt62lx/kfP56y/i\nQNnzAD7B/jTp1Zb989abUSyPpA3zHp6qquFYlQdzegDtMNj9yId0+eoxlNXCkS8XYdt1iNiHRpC3\naS8Z8/4iuEscnT6ajHdYIA0HdiN2ygjW9Jvk7tTPnOGg4L3XCH7qZecSvQvm4Dh4AP/R47An7qR0\nzcoqDw997ytUQCDKywufC/qS+9TkyitreRrDoPjnWfiP+xcoC6XrFmKkHcTnspE4Difi2LEO795X\nY23XAwwHuiCfom/fAMASGYPvdeOdfb9KUbLkh0orTHmkc7HMpzDlyems3bCZnJxcLh16MxNuH8Pw\nwR6zTsYZ8bJYmDqgLRN+WI+hNUM6NCWuQRBvrUqkfWQI/eMi6d28AauSMhn26QqsSvHARa0J8/dh\n45EcnluwHaWcH/Vt3Vu4rDLlibwsFqZe0p4J368rK28McQ2DeWvlHtpHhZaVtyGrkjIY9skyZ3kv\nbkOYvw8A/7y4DXd/twatoV1UCMM6NXNziarBYZD46Ad0/NL5PZX65SIKdh2i+UM3krdxL1nz1xHU\nJY4OH07BKyyQBpd3o/mUEfzV759gGOx7+jM6zX4CpRR5m/eR+t8F7i7RqTkMsmbMJHLmi2C1YPv5\nN0r3JRE6fiwlO3ZRuHQVwSOG4tuzK9jtGHn5LkOxmvz8OSowAOXtjX+/PqTdO9VlZS1PpB0Gex75\ngPPL6iMpZZ9zi4duJG/TXjLnrSO4SxwdPyr7nAd2o8WUEazt908CWjWl9cvjnXN9LBaSZ/7osqqW\nJ0hauJHzLunMzNyj7QAAIABJREFUTctfwV5YwsJJ75XvGzH3Ob4Z9BgASx/7mEv+fRdefj4kL9pE\n8iLnwj/r3/wfV7x9H+1G9iPvUAbzJ1QsvRw7qDsHl27BXljs8p6XvzGRJr3a4RcRxC1rXmftK9+x\n4+slJpRWuIPSfx+kV8MWRo2oe0tCnKUuvervmM4T8Wnf8NRBos7znTTD3SmYrnT2q+5OwXyqykUT\n66V1Tx1xdwqmaxFTz+ZGVsO+gxGnDqpHtvvU3VX1zsaEg/+tE5MtDl94iWn146arFnrkz+Tc+7YR\nQgghhBBCuFXdu1mhEEIIIYQQdZgnz9Uwi/SECCGEEEIIIUxV1c0Kt1B+P1PXXYDWWp9fa1kJIYQQ\nQghRT8l9QqoejnWNaVkIIYQQQgghzhknbYRorT17bTwhhBBCCCHqoFpenLZOOOWcEKVUL6XUWqVU\nvlKqRCnlUEp5/i1rhRBCCCGEEFVSSg1SSu1SSiUqpR4+wf67y25WvlEptVwp1b4m3rc6q2O9AYwE\nZgPdgVuA1jXx5kIIIYQQQpxrPGVOiFLKCrwJXA4cAtYqpX7WWm8/LuwLrfU7ZfHXAv8GBp3te1dr\ndSytdSJg1Vo7tNYf1cQbCyGEEEIIIdyqJ5Cotd6ntS4BvgKGHB+gtT5+BFQgJ1646rRVpyekQCnl\nA2xUSr0EpCBL+wohhBBCCHFGPKUnBGgKHDxu+xBwwd+DlFITgX8CPsAlNfHG1WlMjCmLuxewAc2A\n4TXx5kIIIYQQQojao5S6Sym17rjHXaf7GlrrN7XWccBU4PGayKs6PSHdgF/LumKerok3FUIIIYQQ\nQtQ+rfV7wHsn2X0YZwfD/4spe+5kvgLerom8qtMTMhjYrZT6TCl1jVKqOg0XIYQQQgghxAlobd7j\nFNYCrZRSsWXTL0YCPx8foJRqddzm1cCemvgZnLIRorW+DYjHuTrWKGCvUmpWTby5EEIIIYQQwj20\n1nacUy7mATuAb7TW25RSz5SthAVwr1Jqm1JqI855IbfWxHtXq1dDa12qlPoN52x4f2AocEdNJCCE\nEEIIIcS5xIMmpqO1ngPM+dtzTxz3/3/UxvtW52aFVyqlPsbZ9TIcmAVE10YyQgghhBBCiPqvOj0h\ntwBfA+O11sW1nI8QQgghhBD1mtae0xPiLqdshGitRx2/rZTqC4zSWk+stayEEEIIIYQQ9Va15oQo\npRKA0cANwH7g+9pMSgghhBBCiPpKG+7OwP1O2ghRSrXGuRrWKCAD55AspbUeYFJuQgghhBBCiHqo\nqp6QncAy4BqtdSKAUurB032D/d4+Z5ha3dXVrzq3X6k/HKnH3J2CMEHp7FfdnYLpvG847T95dZ59\n7gfuTsF0vhaHu1Mwnb3Y6u4UTLfH29fdKZhqnbXI3SmIKhgyJ6TK1bGGASnAIqXU+0qpSwH5iQkh\nhBBCCCHOykl7QrTWPwI/KqUCgSHAA0CkUupt4Aet9XyTchRCCCGEEKLekNWxqnfHdJvW+gut9WAg\nBtgATK31zIQQQgghhBD1UrVWx/p/Wuts4L2yhxBCCCGEEOI0edId093l3JpBLYQQQgghhHC70+oJ\nEUIIIYQQQpwdrd2dgftJT4gQQgghhBDCVNIIEUIIIYQQQphKhmMJIYQQQghhIpmYLj0hQgghhBBC\nCJNJT4gQQgghhBAmMuRmhdITIoQQQgghhDCX9IQIIYQQQghhIi09IdITIoQQQgghhDCX9IQIIYQQ\nQghhIrlZofSECCGEEEIIIUwmPSFCCCGEEEKYSFbHkp4QIYQQQgghhMmkJ0QIIYQQQggTyepYdagR\n0rT/+fR6egwWq4VdXy5m85v/c9lv8fGi32t30/D8WIqy81h0zxvkH8rANyyIS967n0adW7Jn9lJW\nPf4pAN6Bflz9/b/Kjw9sHEHi9yv486n/mlqu6vLq3AP/sfeCxUrJwl8p/unLE8Z597yYwElPk/fI\neBz7dmONa0vAXZOcO5WiaPbHlK5dbmLmZ86rYw/8Rk8Ai4XSpb9RPOerE8d1u4jAe58k/+kJOA7s\nBsASE4v/rQ+i/ANAa/KfngD2UjPTPyPnYplXHMhgxpKdGIZmaMcYxvWIrRQzf3cq76zeiwJaNwrm\nhSvPB6Dbf+YT3yAYgOgQP/5zbYKZqdeKx5//N0tXrCEiPIwf//uOu9OpFSsSU3lp3kYMrbkuIZZx\nfdq67J8xfyNrD6QDUFTqIMtWzPKHhrgj1TMW2j+B5tPGoSwW0r78g5Q3fnDZH3xBe5o/M46Ads1J\nvOffZP26qnxfs8fHEHZpN5TFwrGlm0j61wdmp39G/Pt0p8HUe1BWC7nfz+XYB1+77A+9ZTjBwwah\nHQ6MrGOkP/EK9pQ0ACIevJ2Aiy4AIPvdz7HNW2J6/lWJ6X8+Fz49BlVWB9l0gjpI/7I6SHF2HgvK\n6iAAnScOps2o/miHwaonPuXQki0AdLxjEG1H9UdrTdbOQyyd9B6O4lKa9OnABY+PQlkUpbYilvzz\nPXIPHDW9zNU1+slxdBqQQElhCR9MfoPkbftd9vv4+XDPW5OIbB6N4TDYtGAd3774uZuyFZ6kTjRC\nlEXR+9lbmTt6OraULK799RmS5/9Fzp4j5TFtRvan+JiN2X0n0fLaXvR4dCSLJryBo7iU9TO+JbxN\nDOFtY8rjS21F/HjFY+XbQ+ZMI+m3taaWq9qUBf9x/8D23BSMzHSCX3iH0nUrMQ4nucb5+eN71TDs\ne7aXP+U4uJ+8R8aDYaDCIgh+aRalf60EwzC5EKdJWfAbcx+2l6eis9IJeuJNSjeuxDiS7Brn54/v\n5ddh37uj4jmLhYC7HqHg/ekYB/ehAkPA4TA3/zNxDpbZYWimL9rB28O6ERXkx01frqZfy0bENQgq\nj0nKtvHh2v18PKInIX7eZBUUl+/z9bLy9c0XuiP1WjP0qssZPfxaHp32srtTqRUOQ/PC3A28c9NF\nRIUEcNOsBfRr3YS4RiHlMVMGdin//5drEtmZmuOOVM+cxUKL5+9k58inKUnJpMOcl8iZt5bCPYfK\nQ4oPp7P3gZk0vtu1cRXUvQ3BPdqx5dJ/AtD+x+cIvrADeau2mVqE02ax0PCxe0m562HsqRk0/Wom\nBYtWUbqv4u9X8Y5Eckfeiy4qJnjENUT88w7SpjyP/0U98WnXikM33I3y8aHxhzMoWL4WbStwY4Eq\nKIuiz7O3MqesDjL012dIOkEdpOSYjW/K6iA9Hx3JwglvENaqCXFDevHtJVMJjArnqi8f5puLJ+Mf\nGUbHcQOZfclUHEWlXPr2fbS8thd7Zi+j7wtjmT/uVXISj9DulstIuH8IS/75nht/AifXqX8CUbGN\neaT/fbRMaMUtz93Fs0MfqRQ37/2f2blqG1ZvL6Z8/iSd+iewZfEGN2TsOWR1rCrmhCilQqvY1712\n0jmxRl3iyD1wlLzkdIxSB/t+Ws15A7u5xJw3sCuJs5cBsP/XNTTp2wEAe2ExR9fuxlF88ivCIbHR\n+DUMIfXPXbVXiLNgjW+LcfQIRloKOOyUrFyId48+leL8bxxH0U9fQUlJxZMlxeUNDuXtU2fOemvL\nNhhpR9DpzjKXrlmMd0LlMvtdN5biOV9DaUWZvTp2x3FoH8bBfQBoWy5oD290cW6WeWvqMZqFBhAT\nGoC31cIVraNZvDfNJeaHrYcZ0bkZIX7eAEQE+LojVdN079KJ0JBgd6dRa7YeyaJZeBAx4UHOz7xD\nMxbvOnLS+N+2JTOoYzMTMzx7QQnxFB1IoTj5KLrUTtZPywm/oqdLTMmhdAp3JFW+IKQ1Fl9vlI8X\nFl8vlLeV0nTPb4T5dmpDafIR7IdSwW7H9tsSAgf0dokpWrsJXeS8iFC8eQdeUY0A8IlrTtFfW8Bh\noAuLKNm9n4C+plYzqvT3Osjen1bT/G91kBYDu7L7uDpI07I6SPOB3dj702qMEjt5B9PJPXCURl3i\nAFBeVrz8fFBWC17+PhQczQacX9Pewf4A+AT7YzvquZ9/wsAerPx+MQD7NuwhIDiA0EZhLjElRSXs\nLGtEO0rtJG3bR3h0A7NTFR6oqonpfyilwv/+pFJqIPDDCeJrTUDjcGwpWeXbBalZBDZ2TS0wOpz8\nshjtMCjJLcA3PIjqaDmkF/t/Xl1zCdcwS0RDjMyKipmRmY4lvKFLjDW2FapBJPYNlcthjW9H8Msf\nEfzyhxTOetXze0EAFd4QnXVcmbPSUeGuf7QszeOxRERi3/yn6/NRMaA1AZOmE/TU2/hcOcKUnM/W\nuVjmNFsRUcF+5dtRwX6k24pdYpKybSRnFzD26zXc8tWfrDiQUb6vxG4w+ovV3PLVnyxKdG28CM+U\nlltIdIh/+XZUiD9peYUnjD2SY+NITgE9W0SalV6N8IluQMmRzPLtkpRMvBtHVOvY/L92k7tyK103\nfEDChg84tngjRYmHayvVGuMV2RB7anr5tv1oOtaok1c0g4cNomC5c/RBya59BPTpjvLzxRIWgn/P\nzuUNFE8Q2LiifgFgO0EdJCC6op5yfB0k8G/1l/8/tiA1m83vzmHUn//hpvVvUJJXwOGlWwFYNmUW\ngz6dzKi1r9NqeN9KQ788SXhUA7KOO9ezUrOqbGD4hwTQ5dLu7Fix2Yz0PJqhlWkPT1VVI+Q9YJFS\nqvwvgVJqNPAucHVtJ2amltdeyN6fVp060FMphf+YCRR99tYJdzsSd5A3+TbyHr0b36Gjwdvb5ARr\ngVL4j7yHwq9OMGbeasWrVUcK332e/OcfwLtrX6zt6v5cgXOyzIBDa5JzCnj/+u68cGUnpv2xjbwi\nZ8/mnNsv4ovRvXj+yk7MWLKTgzmeMXxD1Ix52w5yWbumWC2e+yVa03xbROMXH8OGbneyoeudhPTp\nRHDPdu5Oq0YFXXMpvu1bk/PRbAAKV/1FwbI1NPnsNSJfepSiTTvQdeBi2dnwCQ2gxcCufHXhg3ze\n7T68/H2JH+bs+e545yDm3vIyX/a4n93fLKXXkze5OduaYbFauPv1B/nj4zmkH5SLRqKKRojW+n3g\nFWChUqqxUuoB4AlggNa6yiasUuoupdQ6pdS6JbY9Z51kQUo2gcddRQqIjsCWku0SY0vNJqgsRlkt\n+IQEUJydf8rXjmh3HhYvC5lbDpx1nrXFyMrA0qDiSqClQSOM7IqrwfgFYGkWS9ATrxEy80usrdoT\nOOU5rC1bu77O4WR0USHWZpUn/noanZ2BijiuzBGN0NkVV1vwC8DStAVBD79C8Iz/Yo1rR8D9z2Bt\n0RqdlY599xZ0fi6UFGPf/CfW5q3cUIrTcy6WOTLQj6N5ReXbR/OKaBToOtwqMsiPfi0b4W210DQ0\ngObhgSSXNTYig5y9KDGhAXSPiWBneq55yYszEhniT2puRc/H0dxCIoP9Txg7d9shBnWoW0OxAEpS\nM/FpUnE12KdxA0qPuxpelYgrLyB//W6MgiKMgiKOLVpPUPc2tZVqjbGnZeAVXdF74RXVCMfRzEpx\n/r0SCLtzFKn3PwmlFcOkc97/ksM33EPqXQ+jFJQmHap0rLvYUirqFwCBJ6iDFKRW1FOOr4PY/lZ/\n+f9jm/btSN7BdIqy8tB2Bwd+W0dUt1b4RQTToN15pG/YC8Den1cT1c2z/pZfMmYQT82ZwVNzZpCT\nlk3Eced6RHQE2amVP3eAW1+4m6P7U/j9w1/NStWjaa1Me3iqKu8TorX+DHgG2ACMBvpqrQ+c6kW1\n1u9prbtrrbv3Czz7X570TfsIiY0mqFkjLN5WWg7pRfLv611ikn9fT/wNFwEQe3VPjqzYfqKXqqTl\nUM/vBXHs3YkluimWRtFg9cKn9yWUrltZEVBoI/fOoeTeN4rc+0bh2LMd24zHcOzb7TzG4vyYVcMo\nrE3Ow0hPdVNJqs+xfxfWyKaohs4ye/fsT+kG1zLn3T+cvCk3kzflZhx7d1Dw+hM4DuymdOs6rDGx\n4OMLFgtebTpjHEk6+Zt5iHOxzB2iQ0jOKeDwsQJKHQbzdqfSP8516M2AuEjWHXJW4LILS0jKttE0\n1J/colJK7Eb58xtTcmgZUb0hmMJ9OjQJJzkrn8PZNudnvu0g/Vo3rhS3PyOX3KISOsfUvbHj+RsT\n8YttjG+zSJS3FxFD+pI9v3oLnxQfziDkwvZgtaC8rAT36uAyod1TFW/dhXfzpng1jQYvLwKv7Idt\nset3q0/bOBo+8Q9S73sCI+u4eQ4WC5ZQ5zwon9ax+LRqSeHKv8xMv0r/XwcJLquDxJ2gDpL0+3pa\nn6AOkvz7euKG9MLi40Vws0aExEaTvnEv+UcyiUyIx+rnA0CTvh3ISTxM8TEbPiEBhMZGAxBzcUdy\nPGw43sLP5vLUVVN46qopbJi/ht7D+gPQMqEVBXkFHDvBHKbrJo3EPziAL5/5yORshSc76epYSqkt\ngAYUEAA0wNkrogCttT7fnBSd4ytX/esTBn3+EMpiYffXS8jZfZiuk4eTsWk/yb+vZ/dXS+j3n7u5\nYfkrFOfks2jCG+XHj1j1Kj7B/li8vWh+RXfmjp5evqpF7DUXMP+WGWYV5cwYBoUfvk7goy+BxULJ\n4t8wDh3A74bbsO/bhf2vlSc91Nq2E4FDRoPDjtYGhR+8hs6rA1eLDYPCz2cSOGm6c7naZXMxjiTh\nO/RWHAd2Y99YRcOxIJ/ied8S9MSboDX2zWsqzaHwSOdgmb0sFqYOaMuEH9ZjaM2QDk2JaxDEW6sS\naR8ZQv+4SHo3b8CqpEyGfboCq1I8cFFrwvx92Hgkh+cWbEcp50TO27q3cFlVq66a8uR01m7YTE5O\nLpcOvZkJt49h+OAr3J1WjfGyWHh4UBfu+WKZ8zPv3IL4yFDeWryN9o3D6d+mCQBztx1kUIdmOL9y\n6hiHwYHHZtHmiydQVgvpXy2gcPdBmk4ZiW3TXnLmryWwczytP5iKNSyQsMt70HTyjWwZ8ABZv6wi\npE8nzl/4GmhNzqIN5Py+zt0lOjWHQcbzbxD9zvMoq4W8H+ZRujeJ8Im3ULxtNwWLVxMx6U5UgD9R\nrziXx7enpHH0/idRXlaafPJvAIz8AtIemQ4OzxmOpR0GK//1CVeW1UF2fb2E7N2H6TZ5OOlldZBd\nXy2h/3/uZkRZHWRhWR0ke/dh9v3vT25Y+CKGw2DF4x+jDU36hr3sm7OGYXOfxbA7yNyWxI7PF6Ed\nBsse+oDL3v8H2jAoPlbA0kmeuTIWwOZF6zl/QFemL3mDksJiPpxSMSz8qTkzeOqqKYRHRzD4vus5\nkniIJ399CYAFn8xl2dcL3JW28BBKn2S1JKVU86oO1FpX6zLrBzE3143lmGrQ8D6eddWitqkAq7tT\nECbwrmfj0qvD+4YH3Z2C6exz68Y9KWrS5od2nDqonmnU4NTDleubBdl1a4GDs7XKq+jUQfXQhwe+\nrRNXLv5sMsy0+vEFR773yJ/JSXtCqtvIEEIIIYQQQojTUSduViiEEEIIIUR9cc4NEzqBKiemCyGE\nEEIIIURNk54QIYQQQgghTOTJNxE0S3VWx6q0C5NXxxJCCCGEEELUH1X1hFxjWhZCCCGEEEKcIzz5\nJoJmkdWxhBBCCCGEEKY65cR0pVQvpdRapVS+UqpEKeVQStWBu90JIYQQQgjheQwTH56qOqtjvQGM\nAvYA/sAdwJu1mZQQQgghhBCi/qrWEr1a60TAqrV2aK0/AgbVblpCCCGEEELUTxpl2sNTVWeJ3gKl\nlA+wUSn1EpCC3F9ECCGEEEIIcYaq05gYUxZ3L2ADmgHDazMpIYQQQggh6itDm/fwVNXpCekG/Kq1\nzgWeruV8hBBCCCGEEPVcdXpCBgO7lVKfKaWuUUrJXdaFEEIIIYQ4QwbKtIenOmUjRGt9GxAPzMa5\nStZepdSs2k5MCCGEEEIIUT9Vq1dDa12qlPoN0DiX6R2Kc6leIYQQQgghhDgt1blZ4ZVKqY9x3idk\nODALiK7lvIQQQgghhKiXZIne6vWE3AJ8DYzXWhfXcj5CCCGEEEKIeu6UjRCt9ajjt5VSfYFRWuuJ\ntZaVEEIIIYQQ9ZTh7gQ8QLXmhCilEoDRwA3AfuD72kxKCCGEEEIIUX+dtBGilGqNczWsUUAGziFZ\nSms94HTeINThwXdJqSW+V/dydwrmKpFReucCbXe4OwXT2ed+4O4UTOc16HZ3p2C62I9uc3cKpjuS\nGOruFEx3/YWH3J2Cqd5dfu79za5LPHmuhlmq6gnZCSwDrtFaJwIopR40JSshhBBCCCFEvVXV6ljD\ngBRgkVLqfaXUpSDNNiGEEEIIIc6GYeLDU520EaK1/lFrPRJoCywCHgAilVJvK6UGmpWgEEIIIYQQ\non6pzh3TbVrrL7TWg4EYYAMwtdYzE0IIIYQQoh6SnpBqNEKOp7XO1lq/p7W+tLYSEkIIIYQQQtRv\n1VqiVwghhBBCCFEzZHWs0+wJEUIIIYQQQoizJT0hQgghhBBCmMiQjhDpCRFCCCGEEEKYS3pChBBC\nCCGEMJEhc0KkJ0QIIYQQQghhLmmECCGEEEIIIUwlw7GEEEIIIYQwkXZ3Ah5AekKEEEIIIYQQppKe\nECGEEEIIIUxkuDsBDyA9IUIIIYQQQghTSU+IEEIIIYQQJjKULNErPSFCCCGEEEIIU0lPiBBCCCGE\nECaS1bGkJ0QIIYQQQghhsjrTExI14Hy6PDMGZbWw/4vF7Hrjfy77LT5e9Hj9HsLPb0FJdj6rx8+k\n4FAGzYb1ps0915THhbZvxh8DH+fYtiT6ffcYfpFhOIpKAVg2cjrFmblmFqvaVuxN5aX5mzG05rou\nLRjXu02lmHnbD/Hush0AtI4KZfrQngC8tnAryxJTAbirb1uuaB9jXuJnYcX+NF5asN1Z5vObMe6C\n+Eox83Ye4d2VewBoHRnC9GsSWJucwYyFO8pjDmTlM31wApe0ijYt9zN1Tpb5QDozFu/AMGBoxxjG\n9WxZKWb+rhTeWZ2IQtG6UTAvXNUZgJTcQp75fStH84sAeGNoN5qEBpia/9lakZjKS/M2Oj/zhFjG\n9Wnrsn/G/I2sPZAOQFGpgyxbMcsfGuKOVGvN48//m6Ur1hARHsaP/33H3enUCJ8ePQmaeB9YLBTN\n+ZWCr75w2e93zbUEDLkObTjQhYXkvfoyjqQkfC+9jIARI8vjvFrGkX33ndj3JppdhNMW3K8rTZ+8\nA2W1kvnVfNLe/s5lf6M7htBg5OVou4E96xjJU16n9HA6/u1jiXnuHixBAeAwOPrGN+T8stxNpTg9\nXl16EnDbvWCxUrzgV4p//OKEcd4XXEzQ5GfInToex75dWOPbEjB+cvn+otkfU7qmbpQZYPK0f9Dn\n0l4UFRbz1APPs2vL7koxr3/xMg0jG2D1srLxz028+MirGIZBq/ZxPPLiZAIC/TlyMJV/TXwGW36B\nG0rhXrI6Vl1phFgUCc+PZdmNL1CQksWlv03jyPz15O0+XB7SYlR/So7ZmNt7EjFDetHp8VH8efdM\nDn6/koPfrwQgpG0zen/0IMe2JZUft+bet8jetN/0Ip0Oh6F5Ye4m3hndl6gQf276cBH9WjUmrlFI\neUxSVj4frtzFx7f0I8Tfhyybs1K2dE8KO1Jz+PqOSyi1G9z+36X0iYsiyNfbXcWpFoeheeH3bbwz\n4gKigv246bPl9IuLIq5hcHlMUraND//cy8ejexPi502WrRiAHuc15JuxFwFwrLCEwbMWc2GLRm4p\nx+k4V8s8feF23h7Ww1nmL1bRLy6SuAZB5TFJ2TY+XLuPj2/s5SxzQXH5vn/N28wdPePo1bwhBSV2\nVB2b6Of83d7AOzddRFRIADfNWkC/1k1cfrenDOxS/v8v1ySyMzXHHanWqqFXXc7o4dfy6LSX3Z1K\nzbBYCL7/AbIfmoSRnk74W+9SvGoFjqSK757ihX9Q9MvPAPhc2Juguydy7JGHKF7wB8UL/gDAGtuS\nsGeerRMNECwWYqaNZ+9NT1Camknrn1/h2B9rKN5zsDykcNs+dl3zT3RRCQ1uvpImj4wl6d4ZGIXF\nJD34KiUHUvCKjKDNr/8mb+kGHLk2NxaoGiwWAm7/B/nTJmNkpRP8wjuUrluBcSjJNc7PH9+rhmPf\nvb38KUfyfvKmjgfDgQqLIOTlDzi2bhUYDpMLcfr6XNKLZi1juK73KDp2bc8j0ycx9urxleIeueuJ\n8sbFS7OmcdngAcz/aQGPvzKV/zzzFutXbeTakVcxZsIo3nnpA7OLITzAKYdjKaWGneBxqVIq0owE\nASIS4sg/cBRbcjq61MHBn1bT5IpuLjFNBnUj6ZulABz+ZQ2RF3Wo9Dr/x959x0dRrQ0c/53dTe89\nAQKE0EF6B6UqsVAEC6IIggXU67WDXl9sVwGvvV3btVwLtotgoSO9SO/SIYH0SnrZnXn/2LjJEkqA\nZLKB5+tnP+7MPLN5Ttid7JnnnJnGN/bm+Lz1huRck3YnZREd7EOjIB/czCaGtm3EigPJTjFzth3l\n1q7N8PdyByDYxxOAIxl5dI0OwWIy4eVuoWV4AGsPpxrehvO1OzmH6CBvGgV629vcugErDjnnPWdH\nArd2boK/p71DFezjUeV1lhxIoW9MGF5uZkPyvhiXZZtTcogOrNTmVpGsOOX9+dOuE9zSsXFFm73t\nbT6cmY9N0+nVJBQAb3dLvWhzZbuTsogO8qVRkK+9/e2iWbE/6YzxC/YkENc+2sAMjdGt0xUE+Pud\nO7CesLRugzUxES05GaxWSpb/jkeffk4xemHFmV/l6XXa1/EcNJji5b/Xaq41xbtTC0qOJVN6PBW9\nzEr2L6sJuLqnU0z++l3oxaUAFG7bj1uU/bNbcjSJ0mP2v2nWtCysGScxB/vj6szNW6OlJKKl2f+d\ny9b+jnu3vlXivMZMonjebPSy0oqVpSWODodydwe9/swQ6B/Xj/k/LARg99a9+Pn7EhIeUiXurw6I\n2WLG4uaGXt7GJs2i2bp+OwB/rNrMoOsHGJO4i9GUcQ9XVZ05IZOAT4Dbyx8fA1OBtUqpcbWYm4NX\nZDBFiZm6W7EDAAAgAElEQVSO5aLkLLwig06JCaIoKQsA3aZRlluIe7CvU0yj4b04/pNzJ6TbG/cx\nZMnLtHlkZC1lf/HS8oqJ9Kv4IxXh70VaXpFTTHxWPvFZ+Yz/YgXjPlvO2sP24VctIwJYeySVojIr\n2YUlbIpPJzXXeV9XlJZ/Spv9PEkrH3Lzl/jsAuKzChj/9TrGfbWWtUfTqrzOon1JXNumQa3nWxMu\nzzaXEFG5zb6epOeXOMXE5xSQkF3IhG83cOfs9awtH5qUkF2An4cbj/2yjTFfreWNVfuwafXnDzlA\nWm4Rkf5n/2z/JSmngKScQno0Nez8j7hA5tBQtPSKz6aWno4pNLRKnNeIkYR8+Q2+904m/923qmz3\nHDCQ4t+X1WquNcUtMoSy5AzHcllyBm6RVb+Y/iX41qvJW7Glynrvji1Q7hZK41NqJc+aZAoOQ8tM\ndyxrWemoEOcKtDmmBaaQMKxbN1TZ39y8Df6vf4b/a59R+PHr9aIKAhAWGUZKUsX7OzU5nfCoqu9v\ngHdmv8aSXb9QmF/Isl9XAHB4/1H6x9kr90OGDSSigRzTLlfV6YRYgDa6ro/WdX000Bb7pP6e2Dsj\nVSil7lVKbVZKbV5S6Bpl5ODOsdiKSsndf8Kx7o8H3mfJoGmsGPkCoT1b0/jmfmd5Bddm03QSsvL5\n5I6rmHljD174bRu5xaX0aRZBv9hIxn++kmlzN9GhYQgmkwt3i8+DTdNJyC7gkzG9mHlDZ15YtIvc\n8vk9AOn5xRxKz6sXw5Kq67Jtc04BH9/cgxnXdeTFJXvIKy7DqulsS8zmkStb8dXY3pw4WcTPexPP\n/YL11KI9xxnSpiHmS+TzK6Bo3lwyx40l/+MP8b7jTqdtltZt0ItLsB1z7eHCFyLoxgF4X9GctA/n\nOK23hAfR+I1HSHj87XpVGTgjpfAa/wBF//33aTfbDv1J7qN3kTvtPjxvvB3c3A1OsPb97bbHiOs0\nEncPN7r36wLAC4/O5OYJI/ly0Sd4+3hRVlp2jle5NGkowx6uqjqdkGhd1yuPj0grX5cFnPado+v6\nR7qud9N1vdvV3lUn1p6vopQsvBpWnFHxigqmKCX7lJhsvBoEA6DMJtz8vSnNyq9oxMjeHJ+7zmmf\n4vLXsBYUkzBnHcGdYi8619oQ7udJSqWzo6m5RYT7OZfvI/y86N8yCjeziYaBPjQJ8SWhvP339GvN\n9/cM5sOx/dDRaXJKhcgVhfue0ua8YsJ9PZ1iIvw86d88orzN3jQJ8iEhu2IM8eL9yQxsYd9eH1ye\nbfYgtXKb84sJ8/U4JcaT/rHh9jYHeNMkyJuEnEIi/DxpGeZHo0BvLCYTA2PD2ZfmmheWOJNwfy9S\ncs/+2f7Lwj0niGt36Q3FuhTZMjIwhVWc3TWFhaFlZJwxvmT5sirDtTwHDqJ4ef2oggCUpWQ6hlcB\nuEWFUpaSWSXOt29HIh68maN3/xO91OpYb/L1otln00l+9SsKt+03JOeLpWWlY6pU+TAFh6FXqozg\n5Y05Ogbf597E/71vsbRoi+/UlzA3c76wjJaYgF5chDk6xqjUz9vNE27k6yWf8vWST8lIyySyUvUi\nIiqMtOQzv79LS0pZuWgN/Yfa3+PxhxJ4cMxjjBt6N4vmLiMx/tI9eSTOrjrfVFYopX5VSo1XSo0H\n5pWv8wEMmSGZvf0IvjGReEeHodzMRI/oRfIi5zJu8qKtNLnlKgAa3tCDtDV7KjYqRaNhPTk+t2Io\nljKbHMO1lMVM1NWdnaokrqRdgyASsvJJzCmgzKaxaO8J+reMcooZ2CqKzfH2g0B2YQnxmfk0CvTB\npunklE/kPZB6koNpufRu5vqlz3ZRASRkF5CYU2hv874k+jePcIoZ2CKCzcftf+SyC0uJzy6gUWDF\nlZEW/ll/hiXBZdrmyAASsgtJPFne5v0pDDjl/TmweTibj9uHWmYXlRKfXUjDAC/aRQSQV2Ilq9A+\nznrT8SyaBfsY3oaL4fhsZ5d/tvccr/LZBjiakUtucSkdG515eItwHdZ9+7A0bIQpMhIsFjwGDqJk\n3VqnGHPDho7n7r16Y0us9PdHKTwGDKxXnZDCHQfxiGmAe3QEys1C0LAryV3yh1OMV7tmRM+4nyOT\n/ok186RjvXKzEPPR02T/bzkn56879aVdlu3QfkxRjTCF2/+d3foOonRzpfwLCzg5aQS5D4wh94Ex\nWA/uJX/WP7Ad2W/fx2Sfw2YKjcDcoDFauusOQfvh85+4/eqJ3H71RFYsWM11N8cB0L5LW/Lz8slM\nc+5wenl7OeaJmM1m+g7uzbFDCQAEhQQCoJRi0sN38r//zjOwJa5DN/DhqqpzdawHgFHAX6dp/qvr\n+o/lzwfWSlan0G0a25/+nCtnT0WZTRz7diW5BxJp+8RosnccJXnxVo7OXkGPd6YQt+41SnMK+GPy\nO479w3q1pjApi4KEijMUJnc3rpw9DWUxo8wm0lbv5shXrjkB0GIyMW1oJ6bMXoum6Yzo2ITmYf68\nv3IvbaMCGdCyAX2aRbD+SBqjPlyCSSkeGdyeQG8PSqw2Jn5pn7Dv427hpeHdsJhc/yy5xWRi2pD2\nTPlxo73NVzSieagf76/ZT9vIQAY0j6BP0zDWH81g1Kcr7W3u34bA8on5iScLSckromt0/fnSdrm2\neeqgttw/ZzOarjOiXSNiQ/14f91B2kYEMCA2nD5NQlkfn8GoL1ZjVoqHr2rlaPOjV7Vi8v82ouvQ\nJsKfUVfUr0qBxWRiWlwnpnyz2t7+jk1pHh7A+yv20DYqiAGt7B3KhXuOE9cuut5d/au6nnh2Jpu2\n7SQnJ5fBI+/g/knjGD1saF2ndeE0G3nvvEngrFdRJhNFC+Zjiz+Gz4SJlO3fR+n6dXiNHIV7l67o\nVit6fj65s2Y4dnfr0BEtLc0+sb2+sGmcmP4hzf77HMpsIuv7pRQfPE7ko2Mp3HmI3KUbafD0BEze\nXsS8bx/JXZqUztG7XyLwhn749miHJdCP4JsGAZDw+FsU7XXxoWiajcL/vIXvP/4FJhOlyxegnTiG\n5613YTu8n7LNZ+5QWVpfgefIseg2G2gahZ+8iZ538ozxrmTtsvX0HdyLueu/pbiomOcfqXjvfr3k\nU26/eiJe3p68/sUM3N3dMZkUm9duc3Q2ht44hJsnjAJg+fyV/Pzt/Dpph6h7Sj/HuEul1LW6ri84\nZd1kXderdTH3H6Nud+VOWK24flbjuk7BWKUl544R9Z5urR+TJmuS8nX9oYs1zRI3qa5TMFz2rXfV\ndQqGSzoUUNcpGK5Jj/o1XPNiXb3m8jtmA2xOXl0vztZ81eAOw74f35H0lUv+TqpzSvz/lFKD/lpQ\nSj0JXFp3yhJCCCGEEMIgcone6g3HGg78qpR6AogDWiOdECGEEEIIIcQFOmcnRNf1DKXUcGApsAW4\nST/XGC4hhBBCCCHEaWl1nYALOGMnRCmVh/OkenegGXCTUkrXdd31b2cqhBBCCCGEcDln7ITouu5n\nZCJCCCGEEEJcDmRIUfUmpgshhBBCCCFEjanOxHQhhBBCCCFEDXHlq1YZRSohQgghhBBCCEOdsxKi\nlAo+zeo8XdfLaiEfIYQQQgghLmlydazqVUK2AunAAeBg+fNjSqmtSqmutZmcEEIIIYQQovYopeKU\nUvuVUoeUUtNOs91DKfVd+fY/lFJNa+LnVqcTsgS4Ttf1UF3XQ4BrgV+B+4H3ayIJIYQQQgghLhea\ngY+zUUqZgfewf79vC9ymlGp7StgkIFvX9ebAG8CsC2y2k+p0Qnrpur7orwVd1xcDvXVd3wB41EQS\nQgghhBBCCMP1AA7pun5E1/VS4FtgxCkxI4Avyp//CAxWSl301PrqdEKSlVJTlVJNyh9PAqnlPScZ\n0iaEEEIIIcR50JVxD6XUvUqpzZUe91ZKpSFwvNLyifJ1nC5G13UrcBIIudjfQXUu0TsWeBaYW768\ntnydGbjlYhMQQgghhBBC1A5d1z8CPqrrPE51zk6IrusZwN/OsPlQzaYjhBBCCCHEpc2FhhIlAtGV\nlhuVrztdzAmllAUIADIv9gefcziWUqqbUmpO+dWwdv71uNgfLIQQQgghhKhTm4AWSqkYpZQ7MAb4\n+ZSYn4Hx5c9vAn7XdV2/2B9cneFYXwNPALtwqY6bEEIIIYQQ4kLpum5VSj0ILMI+1eJTXdf3KKVe\nADbruv4z8B/gS6XUISALe0flolWnE5JenoAQQgghhBDiIrnSWX1d1+cD809ZN73S82Lg5pr+udXp\nhDyrlPoEWAaUVEpoTk0nI4QQQgghhLj0VacTchfQGnCjouOmA9IJEUIIIYQQ4jxd9ISKS0B1OiHd\ndV1vdaE/INN80fcyqXdKFv1R1ykYyuO6vnWdgjDA5sf21XUKhvMw2eo6BcPFfHZXXadguKDvPqvr\nFAznt+nXuk7BcKbG7eo6BUNt73xnXacgxFlVpxOyTinVVtf1vbWejRBCCCGEEJc47fI7R19FdToh\nvYDtSqmj2OeEKEDXdb1DrWYmhBBCCCGEuCRVpxMSV+tZCCGEEEIIcZlwpatj1ZXq3DE93ohEhBBC\nCCGEEJeH6lRChBBCCCGEEDVEKiFgqusEhBBCCCGEEJcXqYQIIYQQQghhILlPiFRChBBCCCGEEAaT\nSogQQgghhBAGkvuESCVECCGEEEIIYTCphAghhBBCCGEguTqWVEKEEEIIIYQQBpNOiBBCCCGEEMJQ\nMhxLCCGEEEIIA8kleqUSIoQQQgghhDCYVEKEEEIIIYQwkCa1EKmECCGEEEIIIYwllRAhhBBCCCEM\nJJfolUqIEEIIIYQQwmAuXQmJHtCBPs+PQ5lN7Ju9gu3v/eK03eRuYdCbkwntEENxdh5Lp7xL/okM\nADo9MIzWtw1At2msnf5fTqzc5dhPmRSj5r9IQUo2Cye85ljf/cmbaXZDD3Sbxt4vl7H708XGNLQa\nLB264zXuQTCZKF0xn5JfZp82zq37lfg8/Dx5z0zGdvSAY70KCcf/lc8o/t8XlMz/3qi0L8raQ8m8\nsmg7mqZzY+cYJvZr47T9X4u2selYOgDFZVayCkpYM/VGAO7/ehU7T2TSuXEo79x2peG5X6jLsc1B\nAzsR++JdKLOJlK+XcfzduU7bA3q1odkLE/Bt24Q/J79Jxq8bHNs8GobS8rXJeDQIQQd23/4yJcfT\nDW7B+QsY0JkmL05EmUykzV5K8rs/OW3369mWJi9MxLtNEw5NeZ2s39Y7tkU/M47AwV1RJhMnV+0g\n/v/+Y3T65829ew98H/gbmEwUz/+Nwm+/cdruecNwvEfciK7Z0IuKyHvjVWzx8XgMHoL3LWMccZZm\nsWRPvgfr4UNGN6HGPfPy66xau5HgoEDmfvVBXadTI9b+Gc8rc9ag6Ro39mrLxCFdnbb/66c1bDp4\nAig/fuUVsWbmPQC88fM6Vu89hq5Br1aNeHLUlSilDG/DxVizdQ+zPv0BTdMZNaQPk0YNddqelJbJ\n9Pe+Ijs3jwBfH17++wQiQ4PqKNuL88brL3Bt3CAKi4qYNOkRtm3fXSXGzc2Nt9/6J/3790HTNP5v\n+ix++mk+V/bryWuvPU+HK9ow9o77mTPntzpoQd2TGSEu3AlRJkXff47nt7EzKUjOYtRvL3Bs8RZy\nDiY5YlqPGUDJyQK+7fcYscN70evpMSy9/10CWzSg+YhefD9oKj4RQVw/exrfXfU4umb/J28/KY7s\nQ0m4+3o5XqvVLVfh2yCY7/o/CbqOZ4i/4W0+I2XCa8LfKZjxBFpWOn4v/puyrevQEuOd4zy98Igb\njfXQ3iov4XXHFMp2bDQo4Ytn0zRmLNjKB3f0J8Lfi9s/WUr/Vg2IDQtwxDwxtLPj+eyNB9mXku1Y\nHt+7FcVlNn7cetjQvC/G5dhmTCaaz5jErltepCQ5i84LZ5C5eDOFB044QooTMzjw9/dodP/wKru3\neudBEt6cQ86qnZi8PUGvBwVuk4mmL9/DvjHPU5qcSbv5r5CzaBNFByvaXJKYzuGH3yFq8ginXX27\ntcKvext2DX4UgLZzX8Kvdzvy1u8xtAnnxWTC76GHyX7yMbT0dILe/5CS9WuxxVccv0p+X0rxrz8D\n4N67D76TH+DkU09SsmwpJcuWAmCOaUbgC/+8JDogACOvu5qxo4fz9Iuv1nUqNcKmacz4cRUfTBlO\nRKAvt7/+A/3bxxAbGeyIeeLGfo7ns1ftZN8J+wmD7UeT2X40mR+etHc473prDpsPJdG9RUNjG3ER\nbDaNlz/+jo+efYiIkEBue3IWA7p3IDY6yhHz2hdzGDagJyMG9uKPXft5++t5vPz3CXWX9AW6Nm4Q\nLZrH0LptP3r26MJ7786gT79hVeKefuoh0tMzadvO3qEMDg4EIOF4IpPufoRHH5lsdOrCxbjscKzw\nTrHkHkslLyEdrczGoXkbaHqN81mVptd04cAPqwE48ttGGvRrV76+K4fmbUArtZJ3PJ3cY6mEd4oF\nwCcqmCaDO7HvmxVOr9X2zsFseXMu6PaOSnFmbu028DyYY1ujpSaipSeDzUrpht9x69qnSpzXTRMp\n/mU2lJY6rXfr2hctLQXtxDGDMr54uxOziA7ypVGQL25mM0PbNWbF/qQzxi/YnUBcu8aO5Z7NIvD2\ncNk+9mldjm3269ycoqMpFCekoZdZSZ+7lpCh3ZxiSo6nU/BnguMkwl+8WzZCmc3krNoJgFZYjFbk\n/N53Rb6dm1N8LJmShFT0MitZ89YQNLSHU0zpiXSK/owH7ZROla5j8nBDuVsweVhQbmbK0nMMzP78\nWVq3wZqYiJacDFYrJct/x6NPP6cYvbDQ8Vx5ep36EgB4DhpM8fLfazVXI3XrdAUB/n51nUaN2R2f\nRnRoAI1CA3CzmBnauQUrdh09Y/yCrQeJ69oSAIWitMxGmVWj1GrDqmmE+J3+feCqdh86RuOoMBpF\nhuLmZiGuX1eWb9zhFHPkRAo9r7C3uUf7lizfuLMuUr1ow4YN5cuvfwTgj41bCQgMIDIyvErchPFj\nmDnrHQB0XScz037SLD7+BLt2/Yl26vHtMqMZ+HBVLtsJ8Y4KIj85y7FckJKFT5Rz2dInsiJGt2mU\n5hbiGeSLT1QQBafs612+b5/n7mDDS7PRdecvNP5Nwokd1pNRv73AtV8+gX9MRG017byZgkPRMtMc\ny1pWBqagMKcYc9MWqJAwrNv/cN7ZwxOPYWMonvOFEanWmLS8IiIDvB3LEf5epOUVnTY2KaeApJwC\nesRUPQjWJ5djmz2igilJynQslyRn4R4VUq19vZpFYc0toO1/HqfLkleImT4OTC57SHNwjwyhtFKb\nS5MzcYsKPsseFfK3HCB33W66bPsPnbf9h5MrtlN8KLG2Uq0R5tBQtPRKx6/0dEyhoVXivEaMJOTL\nb/C9dzL5775VZbvngIEU/76sVnMVFy7tZD6RQb6O5YhAX9JOFpw2Nikrl6SsXHqUVzo6xkTSvUVD\nhkz/jKunf07v1o1pFlm9z4SrSM3MISKk4jtKREgQaVknnWJaNm3I0g3bAVj2x3YKiorJycs3NM+a\n0LBBJCeOV5wgSzyRTMMGkU4xAQH20SQvPPckG/9YyLezPyQ8vOrnXlzeqvUXWyl15+ketZ1cTWs8\nuBNFGblk7DpWZZvZ3Q1bSRlzrp/Ovm+WM+DVe41P8EIphdftUyj++t9VNnmOnkDJgh+hpLgOEjPG\noj0JDGnTCHM9+AJaUy7HNp9KWcwE9GzDkef/y9a4aXg2Dify1gF1nVat8mgaiWfzRmzreg/butyD\nf98r8OvR5tw71gNF8+aSOW4s+R9/iPcdzn9eLK3boBeXYDt25jProv5YtPUQQzrGOo5fCek5HEnN\nZvHz41n8/Hg2HTjB1sNnrgLXV4+NH8WWPQe55bGX2bznIOHBgZgu0WO4xWImOroB6zZspkfPODZs\n2MIrs6bXdVouRVPGPVxVdd/93Ss9rgSeA6oO0C6nlLpXKbVZKbV5dcHBC0qsMDkb30pnB30igylI\nznaKKUipiFFmE+7+3hRn51OQnI3PKfsWJmcT2b0lTa7pwtj1bzDkvQdo0Lctg96eAkB+chZHF2wG\n4OiCzQS3ib6gvGuDlpWBKaTijLcpOBQtu9LkW09vTNEx+D7zBv5vfoO5eVt8Hvsn5piWWGJb43Xb\nffi/+Q0ecaPxGDEW96tH1kErzk+4nxcpJyuGaKTmFhF+hvL8wj3HiWvf+LTb6pPLsc0lyVl4NKio\nfHhEBVOanHmWPSrtm5RJ/p5jFCekgU0jc+EmfDvE1FaqNaY0JRP3Sm12jwqhrFLl9myCr+1J/tYD\n9qFnhcWcXL4V326taivVGmHLyMAUVun4FRaGlpFxxviS5cuqDNfyHDiI4uVSBXFl4QG+pGRXnNVP\nzcknPMDntLELtx0krksLx/Lvu47QoUkE3h7ueHu407dNE3YcS6n1nGtSREggqZkV31FSM7MJDw5w\nigkPDuSNqffx/WtP89BY+1cofx9v6oMpk8ezedNiNm9aTHJKKo2iGzi2NWwURWKS879XZmY2BQWF\n/PTTfAB+/N+vdO7c3tCcheurVidE1/W/VXrcA3QBfM8S/5Gu6910Xe92pU+LM4WdVdqOIwTEROIX\nHYbJzUzzEb2IX7LVKSZ+yVZa3my/ClCz63uQtHavY33zEb0wuVvwiw4jICaStO2H2Tjze77u/hDf\n9H6EpQ+8R9Lavfz+kL16cGzRFhr0sZ9RjOrdhpNHXOcAaDuyD1NkQ0xhkWC24N5rEGVbKq6WQ1EB\nuZNvJPfhseQ+PBbbob0UvPYMtqMHyH/xYcf6koX/o2TeN5QumXvmH+Yi2jUMJiErn8TsfMpsNhbt\nSaB/ywZV4o5m5JJbVErHRtUbwuPKLsc2520/hFezKDwbh6PcLISN7Evm4s3V3PcwFn9v3MovIhHY\nrz0FlSa0u6r87YfwjInCI9re5uAR/chevKla+5YkZuDfuy2YTSiLGb9e7ZwmtLsi6759WBo2whQZ\nCRYLHgMHUbJurVOMuWHFBGT3Xr2xJVZqk1J4DBgonRAX165xOAkZJ0nMzKXMamPRtoP0b9+0StzR\n1GxyC0vo2LRi+E5UoB9bDidhtWmU2WxsOZxIs4j6ddWods2bEJ+cxonUDMrKrCxcs4UB3Ts4xWTn\n5jvmQXwyZxE3Du5dF6lekH9/8AXdul9Dt+7X8PPPixh3+00A9OzRhdyTuaSkpFXZ59ffljCgv33+\n6qCB/fjzzws7KX2p0tANe7iqC53FWgA0q8lETqXbNNb83xdc9/WTKJOJ/d+tJPtAIt0eH036jqPE\nL9nKvm9XMvCtyYxZ8xolOfksvf9dALIPJHL4lz+45fdZ9td55vMqk1pPtf29Xxj0zv1ccc+1WAuK\nWfnEJ7XZvPOjaRR9/g4+U2eByUzpygVoicfwHD0B69EDWLeuq+sMa5zFZGLatV2Y8vUqNF1nRKcY\nmocH8P7y3bRtEMSAVvYvLQvLJ2efeinHuz77nWOZeRSWWrnmjV94blh3+jSPPN2PchmXY5uxaRx6\n+j+0n/0P+yV6Zy+ncP8Jmjx5K3nbD5O1eDO+nWJp9+kTWAJ9CLm6K02euIUt/R8FTePI819yxQ/T\nUUqRt/MIKV/Vgy+qNo1j//iEVt9MR5lNpH+7jKIDx2n4xBgKdhwmZ/EmfDo2p+V/pmIO9CHw6u40\nfPxWdg18mKxf1+Pf9wo6/P4m6Do5y7eRs6R6nbY6o9nIe+dNAme9ijKZKFowH1v8MXwmTKRs/z5K\n16/Da+Qo3Lt0Rbda0fPzyZ01w7G7W4eOaGlp9ontl5Annp3Jpm07ycnJZfDIO7h/0jhGDxt67h1d\nlMVsYtroK5nywc9oms6Inm1oHhXC+/P/oG3jcAa0t1cpF261V0EqH7+GdIpl48ET3DzrW5SCPq0b\n07+961c1K7OYzTx9961MeeFdbJrGyMG9ad64Ae/N/oW2sU0Y2KMDm3Yf4O2v56FQdGnbnH/ce2td\np31B5i9YRlzcIPb/uZbCoiLuvvtRx7bNmxbTrfs1ADz19Et88dnbvPbac2SkZzHpnkcA6Na1Iz/+\n8B+CggK44fqreXb6Y3TsNKhO2iLqljp1gvZpg5T6hYpLGpuAtsD3uq5PO9e+Hza6w3W7YLXk1v6X\n3ljWs/G4rm9dpyAMsOmxfXWdguE8TLa6TsFwMW2qNxzuUhL03Wd1nYLhrJt+resUDGdq3K6uUzCU\nT+d6N3W3RlhLE114FkSFfzQda9j345eOfeOSv5OzVkKUUs2BCKDyhcytgAIurdNSQgghhBBCCEOc\na07Im0CurusrKz3WAifLtwkhhBBCCCHEeTnXnJAIXdd3nbpS1/VdSqmmtZKREEIIIYQQlzBXvomg\nUc5VCQk8y7b6dTtTIYQQQgghhEs4Vydks1LqnlNXKqXuBrbUTkpCCCGEEEJcuuQSvecejvUw8JNS\n6nYqOh3dAHfgxtpMTAghhBBCCHFpOmsnRNf1VKCPUmog8NetLn/Tdf33Ws9MCCGEEEKIS5Dr1ieM\nU62bFeq6vhxYXsu5CCGEEEIIIS4DF3rHdCGEEEIIIcQFkKtjnXtiuhBCCCGEEELUKKmECCGEEEII\nYSBXvmqVUaQSIoQQQgghhDCUVEKEEEIIIYQwkNRBpBIihBBCCCGEMJhUQoQQQgghhDCQXB1LKiFC\nCCGEEEIIg0klRAghhBBCCAPpMitEKiFCCCGEEEIIY0knRAghhBBCCGEoGY4lhBBCCCGEgWRiugGd\nkCs9smv7R7icjG1udZ2CoUJZW9cpCAM0baTqOgXDWUvMdZ2C4ZIOBdR1Cobz2/RrXadgOEv3G+o6\nBcNZf/mgrlMwVExAZF2nIMRZSSVECCGEEEIIA2kyMV3mhAghhBBCCCGMJZUQIYQQQgghDCR1EKmE\nCCGEEEIIIQwmlRAhhBBCCCEMJHNCpBIihBBCCCGEMJhUQoQQQgghhDCQ3CdEKiFCCCGEEEIIg0kl\nRLA08V0AACAASURBVAghhBBCCAPpMidEKiFCCCGEEEIIY0klRAghhBBCCAPJnBCphAghhBBCCCEM\nJpUQIYQQQgghDCRzQqQSIoQQQgghhDCYdEKEEEIIIYQQhpLhWEIIIYQQQhhIJqZLJUQIIYQQQghh\nMKmECCGEEEIIYSBNl4npUgkRQgghhBBCGEoqIUIIIYQQQhhI6iD1tBPic1VXIv/vXpTZRPZ3i8n8\n8Aen7cETRxJ0y1B0mw1b1kmSpr5JWVK6Y7vJ14vYhR+Qt2Q9Kc9/YHT6F8S7XzdCn5oMZjO5Py4g\n55PvnbYHjh+F/01x6FYbtuyTpD3zOtakNLx6dCR02n2OOLeYaFIff5mCZeuNbsJ5s3Tojte4B8Fk\nonTFfEp+mX3aOLfuV+Lz8PPkPTMZ29EDjvUqJBz/Vz6j+H9fUDL/+9Pu62ouxzZ79u5O0OMPgMlE\nwdz55H7xrdN239E34HvzCLBpaEVFZL30Btaj8ZgC/Amd9SzubVtR8Osisl95p45acP68+nYjZOoU\nlNlE7pyFnPzPd07bA+4cjd+oOHSbDS3rJOnTX8OanAZA8COT8L6yJwDZH35NwaKVhud/vvz6d6Hh\ns3ejzGYyv11M2r//57Q97O4RhIy5Gt2qYc06ScITb1OWmI5X2xgavTQFk6832DRS3/2enF/X1FEr\nzs/aP+N5Zc4aNF3jxl5tmTikq9P2f/20hk0HTwBQXGYlK6+INTPvAeCNn9exeu8xdA16tWrEk6Ou\nRClleBtq2jMvv86qtRsJDgpk7lf142/vuaw9ksorS3ehaXBjx8ZM7N2ySsyiPxP5cM0+UIqW4f7M\nHN4NgDeX72H14VQA7u3biqFtGhqa+8X4v5efoP+QvhQVFjP1oefYu3PfGWM/+PJ1ops05PqrbnWs\nG3f3rdw+8RY0m40VS9bwygtvG5G2cDH1rxNiMhH13BTixz9DWUoGzX56g7xlGyg9dNwRUrz3CEdG\nPoxeXELQ2OsInzaRxIdmObaHPTKOwk276yL7C2MyEfbMAyTe/RTW1Ayiv3uHguUbKDuc4Agp+fMw\nx2/+G3pxCf633kDIY3eT+tjLFG3cwfFR99tfJsCPJgs/o3Dt1rpqSfUpE14T/k7BjCfQstLxe/Hf\nlG1dh5YY7xzn6YVH3Gish/ZWeQmvO6ZQtmOjQQnXgMuxzSYTQVMfIu2BJ7GlphP53/cpXLUe69GK\nNhcs/J38//0KgNdVvQl6ZDLpDz2FXlLKyX9/hlvzprjFxtRVC86fyUToPx4k+d5pWFMyaPjtOxQu\nX0/Zkcqf50PkjnkQvbgEv1tuIPjRu0l74mW8ruyBe5sWnLh5MsrdnahP/0Xhmk3oBYV12KBzMJlo\n9OJ9HL59OmUpmbT8+TVOLt1IycGKY3bRniPsv+FR9OJSQu64lgZPTSD+wX+hFZUQ/8gblB5LxhIe\nTKvfXidv1TZsuQV12KBzs2kaM35cxQdThhMR6Mvtr/9A//YxxEYGO2KeuLGf4/nsVTvZd8J+omz7\n0WS2H03mhyfHAHDXW3PYfCiJ7i3qzxfUMxl53dWMHT2cp198ta5TqRE2TWfG4p18MKYPEX5e3P75\nSvq3iCQ21N8RE5+Vz6frD/L5uCvx93Qnq6AEgFWHUvgz9STfTRxAmVVj0jdr6dssHF8PtzpqTfX1\nH9KXJs2iGdJjJJ26tueFV57iprjxp4295vqBFBYUOa3r2bcbg+P6M3zAGEpLywgODTIibZejSS2k\n/s0J8erYktL4JMqOp0CZlZO/rsJvSC+nmMINO9GL7R/0ou37cIsMdWzzbN8cS2gg+Wu2GZr3xfC8\nohVlCUlYT9jbnL9gBb6DejvFFG3c4Whz8c4/sUSEVnkd32v6Ubh6kyPOlZljW6OlJqKlJ4PNSumG\n33Hr2qdKnNdNEyn+ZTaUljqtd+vaFy0tBe3EMYMyvniXY5vd27XGejwRW2IyWK0ULl6Od3/nNlf+\ngq28PB01bL24mJIdu9FLyoxM+aJ5VP48W60ULFiJz0DnNhdvqvg8l+z8E0tEGADusU0o3rILbBp6\nUTGlB47i3a+b4W04H96dWlByLJnS46noZVayf1lNwNU9nWLy1+9CL7a/nwu37cctyn78KjmaROmx\nZACsaVlYM05iDvbH1e2OTyM6NIBGoQG4WcwM7dyCFbuOnjF+wdaDxHW1n0FXKErLbJRZNUqtNqya\nRoifl1Gp16puna4gwN+vrtOoMbuTs4kO8qFRoA9uZhND2zZkxcEUp5g5O+K5tWsM/p7uAAT7eABw\nJDOPrtEhWEwmvNwttAz3Z+2RNMPbcCGGxPVn7ne/AbB9y278AnwJO813Dm8fL+6acgfvv/6J0/qx\nd93ER29/Tmmp/didlZFd+0kLl1TtTohSqp9S6q7y52FKqTo59WiJCKEsOcOxbE3JwC0i5IzxgTdf\nQ/7KzfYFpYh4ahKpM/5T22nWKHNECGUpFcPJrCkZmMOrfuD/4j8qjsLVm6qs9712AHm/raiNFGuc\nKTgULbPigKxlZWAKCnOKMTdtgQoJw7r9D+edPTzxGDaG4jlfGJFqjbkc22wOD8WWWum9nZZ+2ve2\n780jiJr7JYF/u5fsV981MsUaZwkPxVr585yajvksxzC/UXEUrrF/nkv3H8G7bzeUpwemQH+8enR0\ndFBclVuk8zG7LDkDt8gztzf41qvJW7Glynrvji1Q7hZK41NOs5drSTuZT2SQr2M5ItCXtJOnr94k\nZeWSlJVLj/JKR8eYSLq3aMiQ6Z9x9fTP6d26Mc0qVVCE60jLKyayUgcxws+LtLxip5j4rHzis/IZ\n/+Vqxv13FWuP2IdftQwPYO2RNIrKrGQXlrApPoPUXOeKgauKiAonOSnVsZySlEZEZNXj0MPTpvDp\n+19RVOT8O4mJbUy3Xp35ceEXfD3vI67o1LbWc3ZFuoH/uapqdUKUUs8CU4Gnyle5AV+dJf5epdRm\npdTm73MTzhRW6wJGDMTzihZkfmwffxx0x/Xkr9yMNSWzznKqbb7DBuHZvgXZn/7otN4cGoxHy6YU\nrt1cR5nVMKXwun0KxV//u8omz9ETKFnwI5QUn2bHeuxybHO5/B/mkTxyHDnvfIz/pDvqOh3D+N4w\nGI+2Lcn5zD7vrWj9FgpXb6TBl28S/srTFO/4E127dG55FXTjALyvaE7ah3Oc1lvCg2j8xiMkPP42\nXGKXtVy09RBDOsZiNtn/HCek53AkNZvFz49n8fPj2XTgBFsPJ9VxluJC2TSdhKx8Phnbl5nDu/LC\ngu3kFpfRJyacfrHhjP9yNdN+3kKHhsGYTPV/3s9f2rRvSeOmjVgyf3mVbWazmYAgf26KG8+s597i\nrU9m1kGGwhVUd07IjUBnYCuArutJSqkz1lR1Xf8I+Ahgb+z1NfoXw5qa6SjVA1giQylLrdqp8OnT\nidD7b+XY2KnopVYAvDu3xrt7O4Juvx6TtyfKzQ2tsJi0f31ekynWOFtqJm6VzjJYIkOxpWVUifPq\n3Znge28jcfzjUOY8RMU37iryl64Dq63W860JWlYGppBwx7IpOBQtu+LsMZ7emKJj8H3mDQBUQDA+\nj/2TgteewRLbGvceV+F1230ob190XUMvK6V0yVyjm3FeLsc229IyMFc6k28JDzvte/svhYuXE/zU\n38kyIrlaYk3LwFL58xwRhu00xzCvXp0JvOc2ku5y/jznfDybnI/tFywInzWNsvgTtZ/0RShLcT5m\nu0WFUnaaE0G+fTsS8eDNHLrlaccxG+wXEmn22XSSX/2Kwm37Dcn5YoUH+JKSne9YTs3JJzzA57Sx\nC7cd5KmbrnIs/77rCB2aRODtYR++07dNE3YcS6FLbIPaTVqct3A/T1LyKqoXqXlFhPt5OsVE+HnR\nvkEgbmYTDQN9aBLsS0J2Pu2jgrinTyvu6dMKgGk/b6ZJsC+u6vaJN3PruBsB2LltL1ENIhzbIhuE\nk1qpugvQuVsH2ndqy/Itv2CxmAkODearuR9yx8j7SElOY/Gvy8tfaw+6phMcEkhWZo5xDXIBl87p\nowtX3eFYpbqu65SPxlZKnf5oaoCinQdwb9oQt0YR4GYh4IaryF/mPDTFs20zov75IMfvewFb5knH\n+sRHX+XglXdxqP9EUmd+ysmflrl8BwSgePd+3Jo0xNLQ3mbfawdQsHyDU4x7m1jCn32I5AefxZZ1\nsspr+F0/gPz5KwzK+OLZjuzDFNkQU1gkmC249xpE2ZZKV/QqKiB38o3kPjyW3IfHYju0l4LXnsF2\n9AD5Lz7sWF+y8H+UzPvG5b+Mw+XZ5tK9+3CLboi5QSRYLHhfM5CiVeucYizRFRNyvfr1oiwh0eg0\na1SJ4/Nsb7PPtf0pWOF8tTr31rGETv87KX+bjpZV6Q+zyYQpwH7+x71lDO4tmlG0rurQJVdSuOMg\nHjENcI+OQLlZCBp2JblLnI/ZXu2aET3jfo5M+ifWSsds5WYh5qOnyf7fck7OX3fqS7usdo3DScg4\nSWJmLmVWG4u2HaR/+6ZV4o6mZpNbWELHppGOdVGBfmw5nITVplFms7HlcCLNIi7Pibuurl1UIAlZ\nBSTmFFBm01i0N5H+zSOdYga2jGRzgr3TnV1YQnxWPo0CfbBpOjlF9nlQB9JOcjAtl94xrju08utP\nf2D4wLEMHziWpQtWMPLW6wHo1LU9ebn5pKc6nzz65vMf6XdFHAO7DmPMDZM4djieO0bar9S5dP4K\nepXPZWvarDFu7pbLrgMi7KpbCfleKfUhEKiUugeYCHxce2mdhU0j5fl/0/jzF1EmEzk/LqHkYAJh\nD99B0a6D5C/7g/BpkzD5eNLoHfvosbKkdI7f90KdpFsjbBrpL71Hg49fRplM5P60mNJD8QQ/eCfF\new5QuHwDoY/fg/L2IvKNZwCwJqWR/OBzAFgaRGCJDKNo0846bMR50jSKPn8Hn6mzwGSmdOUCtMRj\neI6egPXoAaxb688Xkmq7HNts08j61zuEvzMLzCYKfl5A2ZF4Au6bQOmf+ylatR6/W0bi0aMLWK1o\neflkPVdxpbsGP3+N8vFGubnh1b8vaQ9OdbqylkuyaWS8/C6RH7yMMpvI+2kRZYfjCXrgTkr2HKBw\nxQaCH7N/niNe+z8ArMlppD70LMpipsEXrwOg5ReS9tRMsLn4+TSbxonpH9Lsv8+hzCayvl9K8cHj\nRD46lsKdh8hdupEGT0/A5O1FzPtTAShNSufo3S8ReEM/fHu0wxLoR/BNgwBIePwtivaeeZK3K7CY\nTUwbfSVTPvgZTdMZ0bMNzaNCeH/+H7RtHM6A9vYplQu3HiSuSwuny+8O6RTLxoMnuHnWtygFfVo3\npn/7enT1t7N44tmZbNq2k5ycXAaPvIP7J41j9LChdZ3WBbOYTEy7pgNTvluPpuuM6NCY5mH+vL/q\nT9pGBTKgRRR9YsJZfzSdUR8vw2RSPDKwHYFe7pRYbUz8ajUAPh5uvDSsKxZT/bhW0Iola+g/pC/L\nNs6jqKiYaQ8959j28/JvGD5w7Fn3//Gbecx461l+W/UdZWVWnnzwubPGX6rk6lig9GqOr1VKXQ1c\nAyhgka7rS6qzX00Px6oP3D2s5w66hIR2rl9XJxIXJnf/pTNeubqsJea6TsFwubme5w66xLT6YEhd\np2A4S/cb6joFw1l/uTTuTVJdHZ5cVtcp1ImD6VvqxR+rm5uMMOz78Q/x81zyd3LWSohSqpeu6xsA\nyjsd1ep4CCGEEEIIIU7Pla9aZZRz1f7e/+uJUsr1b7EthBBCCCGEcHnn6oRULt9cfjV6IYQQQggh\nRI0718R0k1IqCHtn5a/njo6Jruv1+UqZQgghhBBCGM7FLyliiHN1QgKALVR0PLZW2qYDzWojKSGE\nEEIIIcSl66ydEF3XmxqUhxBCCCGEEJeF6l6d9lJWPy5KLYQQQgghhLhkVPdmhVUopbbqut6lJpMR\nQgghhBDiUic3K7yISoh0QIQQQgghhBAX4oIrIUIIIYQQQojzJ1fHuohKiFLqo5pMRAghhBBCCHF5\nuJiJ6R/UWBZCCCGEEEJcJnQD/7sYSqlgpdQSpdTB8v8HnSamiVJqq1Jqu1Jqj1JqcnVeu1qdEKVU\nN6XUT+U/YKdSahfw+fk1QwghhBBCCFGPTAOW6breAlhWvnyqZKC3ruudgJ7ANKVUg3O9cHXnhHwN\nPAHsQoaxCSGEEEIIccHq0dWxRgADyp9/AawAplYO0HW9tNKiB9UsclS3E5Ku6/rP1YwVQgghhBBC\n1H8Ruq4nlz9PASJOF6SUigZ+A5oDT+i6nnSuF65uJ+RZpdQn2MswJX+t1HV9TjX3F0IIIYQQQmDs\nHdOVUvcC91Za9ZGu6x9V2r4UiDzNrv+ovKDruq6UOm3iuq4fBzqUD8Oaq5T6Udf11LPlVd1OyF1A\na8CNiuFYOiCdECGEEEIIIVxUeYfjjFe11XV9yJm2KaVSlVJRuq4nK6WigLRz/KwkpdRu4Ergx7PF\nVrcT0l3X9VbVjBVCCCGEEEKcQT2aYP0zMB6YWf7/eacGKKUaAZm6rheVXz2rH/DGuV64upfoXaeU\nalv9fIUQQgghhBD13EzgaqXUQWBI+fJfV879pDymDfCHUmoHsBJ4Vdf1Xed64epWQnoB25VSR7HP\nCVHYh4Z1ONeOm4urXE74kjdySHpdp2AwNyxdpY96qTuy9ERdp2C4g24edZ2C4W7qffn9O5sat6vr\nFAxn/eXyu9WXZVi1bl1wydg7bDJRzeLqOg1xBhd7/w6j6LqeCQw+zfrNwN3lz5cA5+wTnKq6nRB5\nF4szkg6IEEII4VqkAyJcXbU6Ibqux9d2IkIIIYQQQojLQ3UrIUIIIYQQQogaUI9uVlhrqjsxXQgh\nhBBCCCFqhFRChBBCCCGEMJCRNyt0VVIJEUIIIYQQQhhKKiFCCCGEEEIYSOaESCVECCGEEEIIYTCp\nhAghhBBCCGGg+nKzwtoklRAhhBBCCCGEoaQSIoQQQgghhIE0uTqWVEKEEEIIIYQQxpJKiBBCCCGE\nEAaSOohUQoQQQgghhBAGk0qIEEIIIYQQBpL7hEglRAghhBBCCGEwqYQIIYQQQghhIKmESCVECCGE\nEEIIYTDphAghhBBCCCEMJcOxhBBCCCGEMJAuNyusP52QBgM60P2FcSiTiUOzV7D7vV+ctpvcLfR7\nazLBV8RQkp3HqinvUnAig5BOzej9yiR7kIIdr/3E8YWbAWhzTxwtbhuAruvk7DvB2kc/QispM7pp\n1WJu1w3PWyajTGZK1yygdNH3p42zdO6H9+T/I//lB9HiD4LJjOedj2Bu3BxMZso2LKV04XcGZ39h\n1h5N51/L96LpOiPbRzOxZ2yVmMX7k/lg3UGUgpZhfsy4vjObEjJ5dcVeR8yxrAJmXt+JgS0ijUz/\nglyObQ4e2Inm/7wLZTaR/PUyEt6Z67Q9oFcbmr84Ad+2Tdh735uk/7rBsa1/0ncU/JkAQHFiBrvv\nnGVg5mfXaEAHej8/DmU2sX/2Cnac5pg14M3JhHawH7OWTXmX/BMZAHR8YBitbhuAbtNYP/2/nFi5\nC4D2d8fRuvyYlbXvBKse+whbSRkN+raj5zO3oUyKsoJiVj76EbnHUg1v85lYOvXA+64HwWSmZNlv\nlMz95rRxbj2vwvfxF8ideh+2I/sxN2+N932PO7YX//A5ZRvXGJV2jVmzdQ+zPv0BTdMZNaQPk0YN\nddqelJbJ9Pe+Ijs3jwBfH17++wQiQ4PqKNsLt/ZIKq8s3YWmwY0dGzOxd8sqMYv+TOTDNftAKVqG\n+zNzeDcA3ly+h9WH7e/Ze/u2YmibhobmXhueefl1Vq3dSHBQIHO/+qCu06lRL7/yDEOu6U9RYRF/\nmzKNnTv2VomZ99uXRESGUVRUAsDNI+8iIyOLCRPHMPGe27HZNAoKCnn0oWc4sP+w0U0QLqBedEKU\nSdHzpfEsuW0mhclZXDf/BY4v3sLJg0mOmBa3DaDkZAFz+z1G0+G96PqPMaya8i45+07w27X/h27T\n8AoP5IYlL3FiyVY8wwJoPfEafh44FVtxGVd98DdiRvTi8Per67ClZ6BMeN32AAVvPoWenYHPU+9g\n3bkBLTnBOc7DC/fBI7Ee+dOxytL1KpTFjYIXJoObB77PfUTZphXoma7zBeV0bJrOzGV7+PdNPYjw\n8+T2r9fSv3k4sSF+jpj47AI+/eMwn9/WG39PN7IK7Qe67o1D+O7OKwE4WVTK8E9X0qtpWJ2043xc\njm3GZKLFzEnsuOVFSpKy6LpoBhmLNlN44IQjpCQxg31/f4/oKcOr7K4Vl7J58BNGZlwtyqTo+8/x\nzB87k4LkLEb+9gLxi7eQU+mY1WrMAEpPFvB9v8doNrwXPZ4ew+/3v0tgiwbEjujFj4Om4hMRxHWz\np/H9VY/jFR5I+4nX8MMg+zFr8L//RrPhvTj4w2r6zZjA4olvkHMoiTZ3DqHzQyNY+ehHdfgbqMRk\nwnvS38l/8XG0rHT8ZnxA2ea1aCfineM8vfC4bjTWAxVfZmwJR8mbeh9oNlRgMP6v/oeTm9eDZjO4\nERfOZtN4+ePv+OjZh4gICeS2J2cxoHsHYqOjHDGvfTGHYQN6MmJgL/7YtZ+3v57Hy3+fUHdJXwCb\npjNj8U4+GNOHCD8vbv98Jf1bRBIb6u+Iic/K59P1B/l83JX4e7qTVWA/fq06lMKfqSf5buIAyqwa\nk75ZS99m4fh6uNVRa2rGyOuuZuzo4Tz94qt1nUqNGnJNf5rFNqVHp6vp2r0j/3rjeYYOuvm0sZPv\nfpzt23Y7rfvxh1/4/NNvAYi7dhAvzniKW0fdXet5uxqZmF5P5oSEdI4l71gq+QnpaGU2js3bQPTQ\nrk4x0dd04fAP9g5E/G8biezXDgBbcSm6TQPA7OHmdItKk8WM2dMdZTZh8XKnMCXbmAadJ3NMK7S0\nJPSMFLBZKdu8AkvH3lXiPEaMp3Th91BWWmmtDh6eYDKh3N3RbVb0okLjkr9Au1NyiA70plGgN25m\nE0NbRbHikHPH6aedx7mlUxP8Pe1/qIK9Paq8ztKDKfRtGoaXm9mQvC/G5dhm/y7NKTqaQnF8GnqZ\nlbS5awmN6+YUU3w8nYK9CaDVnwN2WKdYco+lkld+zDo8bwNNrnE+ZjW9pgsHyo9ZR3/bSMPyY1aT\na7pyeN4GtFIrecfTyT2WSlgne0VMWcxYKh+zUu3HLF0HNz8vANz9vChIzTGqqedkbt4aLSURLS0Z\nrFbK1v6Oe7e+VeK8xkyieN5s9MrHr9ISR4dDubvbG1rP7D50jMZRYTSKDMXNzUJcv64s37jDKebI\niRR6XmGvGvRo35LlG3fWRaoXZXdyNtFBPjQK9LEfv9o2ZMXBFKeYOTviubVrDP6e7gAE+9iPX0cy\n8+gaHYLFZMLL3ULLcH/WHkkzvA01rVunKwjw9zt3YD1z7XWD+X72TwBs2bSDgAA/IiKqf9IrP6/A\n8dzbx6s+fqxFDakXlRDvyCAKkrIcy4XJWYR2dh6m4hUZRGF5jG7TKMstxCPIl5LsfEI7x9LntXvw\naRTKmoc+QLdpFKVks+eD+Yze+Ba24lKSVu4ieZVzb91VqMAQtOx0x7KenYE5prVTjCm6OaagMEp2\nb8T9mpsc661bVuPWsTe+r8xGuXtS/MMHUJhnWO4XKi2/mAg/T8dyhJ8Xu5Odv1jFZ9sPZBNmr0fT\nde7r3YK+Mc4HwkX7krmja9Naz7cmXI5t9ogMpiQp07FckpSFf5cW1d7f5OFG10Uz0W02Et6ZS8aC\nTbWR5nnziQoiP7nimFWQkkX4Kccs78ggCpIrjlml5ccsn6gg0rYedtrXvu4QOz+cz21/vIW1uJTE\nVbtILD9mrX7iE+L++zjW4jLK8oqYN/y52m9kNZmCw9AyK45fWlY65hZtnWLMMS0whYRh3boBho9x\n3ta8DT73P4kpLJKCd16qV1UQgNTMHCJCKoZWRYQEsevgMaeYlk0bsnTDdu64YRDL/thOQVExOXn5\nBPr5GpzthUvLKyayvCMM9uPXriTnE3vxWfkAjP9yNZquM7lfK/o2i6BleAAfrtnPuB6xFJfZ2BSf\nQbOQS+/L+6UiqkEEiScqOphJialENYggNTW9Suzb78/AZtP49edFvPbK+471E++5nSkP3oW7mxs3\nDrvTkLxdjS6VkOpVQpRSYUqpp5VSHymlPv3rUdvJ1ZSMbYf5edA05l83nSseHIbJww33AG+ih3Zh\nTq9H+KHL37B4exAzqurZuXpBKTxvvpfiH6sOvzDHtAJNI//JseT/407ch4xGhbr+PIHqsOkaCf/f\n3p3HRVX1Dxz/fBlABRREZXHfKnM3l9RQQUktH9M2l8xM2+1pVbOyfpalaT1tPuVTtlpZarZYaa65\nlyVuueW+C4orsgjCnN8fd0AQFBRmAb5vX/OSe+fcy/fMXO7cc7/nnDmVxEd9rue1Hs15Zf5Gzpw9\nP6YnPvEsO46doV1x6JZUQKWxzpfyR8uhrOn2LFseeZf6Y+6lbK1Qd4fkNL6BftTueh3T2j3F1JaP\n4V2uDPUd56zGD3Rn7j3/4ZvWj7N9xjLajh7g5mgvgwjlBj1Kyhf/y/PpjJ1bSXh6MAnPPkTZWweA\nj6+LA3S+YYNuY83mHfQZNo6YzTsICQ7Cy6tYdFS4LBl2w/4TiXx81w2Mv6UlY35dT8LZc7SvE0JE\nvRAGfbmcZ39aQ9NqwXh5ibvDVYX00P3D6diuJz2730Xb9q3o07931nOffjSV1s2iGTP6DZ4eMdSN\nUSp3KuhZbhYQCCwEZmd75ElEHhSRGBGJWZy0o9BBJsedxL9qcNayX3hwrq5TKXEn8XOUEZsXPhX8\nSD2ZmKPM6Z2HOZd8lorXVCe8Q2MS98eTeuIMJj2D/b/GENKq4HdgXcmcOo5XxfMXlVKxMvZTx84X\nKFMOr2q18X/6dQLGTsFW91r8hr6MV62r8GkTRfrmGLBnYM6cJmPXFmy1cg8W9DQhAWU5cuZs9BGK\n/AAAIABJREFU1vKRMylUCSiTq0yneiH42LyoFuhHrWB/9p86n+ZdsD2WzvVD8bEVjw/z0ljn1LgT\nlKlaKWu5TNVgUuOOX2KLnNLirEzC2X1HOfX7Fso3qVPkMV6JpNiTBISfP2f5hwWTFJvznJUcdxL/\n8PPnLF/HOSsp9vz67NtWi2jMmQPxnHWcs/b+GkNoy6soG1yeStfWJH6dlT3Z9dMqQlt6zrnMfiIe\nr0rnz19ewVUw2TIjlPPDVqMOAS+9Q4X3p+F9VUMCRo7FVveanPs5tB9zNgVbDc94jwsqtFIQR46f\nf++PHD9JSHBgjjIhwUG8PfIhZrz5PI/fZY19quDv59I4CyukfFnizqRkLR85k0JItswuWNmRTleF\nWeevIH9qBQew3/E5/UD7a5gxJIoP+7XHYKgVXHyyQKXBkAcGsHjFLBavmMWRuHiqVT9/M7NqtVBi\nD+ceZxoXa61LTEziuxk/c13LprnKfD9zNjf3iHZe4B7MGOOyh6cq6JWKnzFmpDFmhjHmu8zHxQob\nYyYbY1oZY1pF+Rf+w/D4+t2UrxNGQI0qePnYqN2rLQfmr81R5sD8tdS70xqYW6tHG+JWWoMbA2pU\nQRwXZP7VKhFYryqJB+JJOnScKtfVx+bomxoe0YjTOw4VOlZnyNi7Da+QakilULB549MqkvQN52cI\n4mwyicP6kDhqEImjBpGxeyvJk0Zj37fD6vrQoLlVzrcMtjoNsMcdcE9FLkOjsED2n0ri0OlkzmXY\nmbctlsh6Oe9yR9UPI+aAdRF6MjmNfSeSqBZ4/oN77j+xdG9Q1aVxF0ZprPOZdTspVzecsjVDEB9v\nQnrfwLF5MQXa1jvQH/G1epT6BJenQptrSMo2oN2d4jfspkKdMMo7zln1erVl/4Kc56x9C9ZyteOc\nVadHGw47zln7F6ylXq+2ePl6U75GFSrUCSN+/S4SDx8npMX5c1bViEac2nmI1NNJ+FbwI7COdVFQ\nvWNjTu30nHNZxs5teIVXxyskDLy98bmhM2kxv58vkJzE6ft6kfBoPxIe7Uf6ji0kThhFxu5t1jZe\n1tgmr8qh2KrWxB4fd5Hf5Jka1a/FvtijHDxyjHPn0pm7Yg2RrXNejJ1MSMRut8Yufvz9PG7tknvM\nn6drFB7E/hNJHDqVZJ2/thyiU/2cWfeoq8OI2W/dZDiZnMq+E4lUD/Inw244lWKNBdp+9DQ7jibQ\nrk7pyOYWF59+NJWoiF5ERfRizuyF9Ol/KwAtWzcjISExV1csm81GcLDVDdHb25uu3aP4Z8t2AOrW\nq5VVrmu3SHbv2uuaSiiPU9AxIb+IyM3GmDlOjeYiTIadv16YQvTXz1hT9E5fyunth2g2/HaOb9jD\nwQVr2TFtKRETH6b3ijdJO5XIsqHvARDS5moaP9oTe3oGxm748/nPST2ZSOrJRPbN/ot/zXsVe3oG\nJzbvY/vUxe6oXv7sds5Oex+/J8YhXl6krZyPPXYfZXreQ8a+7aT/veqim6Yt+Ylyg4bhP9rqqnXu\nj/nYD+1xVeRXzNvLi5GdGzH0u7+w26FX4+rUq1yeSSu30zA0kMj6obSvXZk/9sVz22fLsHnBk50a\nEFTOukA7fDqZuDMptKwRnM9v8hylsc4mw86O5z6h6bRR1hS93ywmedtBaj/TlzMbdnF8Xgzlm9ej\n8Wcj8A7yp1LXltQe0YfVnZ7G76pqXP2fh8BuBy8v9v/3xxyzarmTybDz+4tTuGmqdc7aNn0pJ7cf\nouXw24nfsIf9C9aybdpSIt99mD4r3iT1VCK/Oc5ZJ7cfYvfPf3LnbxOwZ9hZ+cLnGLshft0uds/5\ni9vmWues45v3sXXqYkyGneXPfEL0R09g7HZSTyezbJiHzIwFYM8g+ZN3CRj1Bnh5kbb4V+wH91K2\n72Aydm3jXPYGyQW8GzShbO+7MBkZYLeT/PE7mDOnXRh84XnbbDx/f18eGfMeGXY7vbu0o37Nqrz/\nzc80rFeLqDZNWb1pOxOnzkIQrmtYn1EP9nV32JfN28uLZ7s25ZHp1ni1Xk1rUr9KBSYt20rD8CAi\nrwqnfZ0Q/tgTz20fLcLLS3gqqhFB5XxJTc9gyFfWJA3+ZXwY27Ml3iWgO9qI0eNZve5vTp1KoEvv\nuxl630Bu79kt/w093IJ5S4ju2onVGxaSkpzC40Ofy3pu8YpZREX0okwZX7794RO8fbyx2WwsXfI7\nX3xufbXAfQ/eTafI9pw7l87pU6d59OGR7qqKW+nsWCAFSdOIyBnAH0gFzgECGGNMhUtuCHxR7e5S\n9yr3/lfuwVklmXfLhvkXUsXeXy96xgW+K+3wyT37WEl3R7vS9z77vfSiu0NwOftf89wdgst593zY\n3SG4VHjd7u4OwS2OJWwvFgOKrguPcNn18drYFR75mhQoE2KM0WkqlFJKKaWUKgKePFbDVS7ZCBGR\n6y71vDFm7aWeV0oppZRSSqkL5ZcJiQE2AZlTMWVP5xigszOCUkoppZRSqqTSMSH5N0KeBu4AUoBp\nwA/GmMRLb6KUUkoppZRSF3fJ6SeMMe8YYyKAx4AawCIRmSEizV0SnVJKKaWUUiWMceE/T1WgOfCM\nMbuxvrBwPtAG8Pxvu1NKKaWUUkp5pPwGptcF+gG9gANYXbLGGWNSLrWdUkoppZRSSl1MfmNCdgJ/\nY2VBEoCawCMi1vh0Y8xbTo1OKaWUUkqpEsauU/Tm2wgZA1mdyQKcHItSSimllFKqFLhkI8QY85KL\n4lBKKaWUUqpU8OQB465SoIHpeRGRfxVlIEoppZRSSqnSIb/uWJfSGvilqAJRSimllFKqNNAxIYXI\nhBhjRhdlIEoppZRSSqnS4YozISISZoyJK8pglFJKKaWUKul0TEghMiHAJ0UWhVJKKaWUUqrUuOJM\niDGmR1EGopRSSimlVGmgY0IK2AgRkeA8Vp8xxpwr4niUUkoppZRSJVxBMyFrgRrASUCAICBORI4A\nDxhj1jgpPqWUUkoppUoUHRNS8DEhC4CbjTGVjTGVgJuwpucdCkxyVnBKKaWUUkqpkqegjZC2xph5\nmQvGmPlAO2PMKqCMUyJTSimllFKqBLIb47KHpypod6xYERkJTHMs9wWOiogNsF9qw4xCBFdcTZlT\nxd0huNSmeQfdHYJygWa+pe9+Q4ztrLtDcLkPV5S+s/b6Fve4OwSXqxMY5u4QXO7kEz+6OwSXit09\n190hKHVJBW2E3AWMBn5wLK8E+gE2oI8T4lJKKaWUUqpE0jEhBe+OVRuojjUo3QeIBH4zxqQZY3Y6\nJzSllFJKKaVUSVTQTMhUYDiwiXy6XymllFJKKaXUpRS0ERJvjPnZqZEopZRSSilVChij9/QL2ggZ\nLSIfA4uA1MyVxpjvnRKVUkoppZRSqsQqaCNkMNAAazxIZtPNANoIUUoppZRS6jLYdWB6gRshrY0x\n1zg1EqWUUkoppVSpUNBGyO8i0tAYs8Wp0SillFJKKVXCGQ/+EkFXKWgjpC2wXkT2YI0JEcAYY5o6\nLTKllFJKKaVUiVTQRkh3p0ahlFJKKaVUKaFjQgrYCDHG7HN2IEoppZRSSqnSoaCZEKWUUkoppVQR\n0DEh4OXuAJRSSimllFKli2ZClFJKKaWUciG7ZkI0E6KUUkoppZRyLc2EKKWUUkop5UJGZ8fSTIhS\nSimllFLKtTQTopRSSimllAvp7FiaCVFKKaWUUkq5mDZClFJKKaWUUi5VbLpjVYtsyvVjBiJeXmz/\nZgkb3/85x/Nevt50fPdhKjWpQ+rJMyx55D0SDx6jaofGtHy+LzYfbzLOpRPz6jfErtwCQPdvR+EX\nGkT62TQA5vefwNnjCS6v26V0fHkgtTo3Jz0llYVPTyZ+095cZao0qU30Ww/hXdaXfb+tZ9noLwG4\nYVR/6kS3IONcOqf3HWXhsMmkJSRTNiiAmz58nJBmdfnn22UsffELF9eq4PqOHkzjqOtIS0nl8+Hv\nc2Dznlxleg3vT9vbOuIXGMATjQZmra9YtTKD33yUchX88fLy4ocJU9m0ZJ0rw78iJbXOEdmO5UVP\nT+bYRY7lztmO5RWOY7lMkD9d3/835WtU4cyBeOYP/S+pp5Np/lAPrr61PQDi7UXF+tX4rPkjpJ5K\nIuo/D1CrS3NSjicwPfo5V1b1st01eghNolqQlpLGJ8PfY/8F77lvWV8emTSMkFph2DPsbFgUw8wJ\nU90U7ZUZ/soT3NClLWdTUnnpyXFs27g9V5mJX/+HyiGVsHnbWP/nBiY89zZ2u52rGtbjuQnD8fMv\nx+EDcbz46BiSEpPdUIvL8/ZbY7ipe2eSU1K4776nWLd+U64yPj4+THz3VTp1ao/dbufF/5vADz/M\noUPE9bz55ss0bXItd909lO+/n+2GGly+F8eNoFP0DaQkn2Xk4y+x5e9/Llr2gy/fokatavTo2Ddr\n3cD7+zJgSB/sGRksWbCC18dMdEXYhTLu9ReI7tqJlOQUHnvkWf7esCVXmVmzvyQ0rAopKakA3Nl7\nMMeOneDeIf0Y8sAAMjLsJCUl8/TjL7B92y5XV6HIvDDuLZat/IvgikH8+NUH7g7Ho9l1YHrxyISI\nl9B27CDm3/06P0Q9Q93ebQm8qmqOMlf3jyT1dBLfRQxj80dzaTWqHwBnT5xh4b1v8mP0cyx/8kM6\nvPtwju2W/nsSP3UdxU9dR3lcA6RWVDOC6oTxZYdh/DbyEyLH3Ztnuahxg/ntmY/5ssMwguqEUSuy\nKQD7l29kavSzfNP1eU7tjqXVoz0BSE89x6r/zGTlq1+7qipXpHFkC0LqhPNi5GN89fyHDBj7QJ7l\n/l4Uw2u9cl9k9vj37cTM/oOxPZ7h48feof+r9zs75EIrqXWuGdWMwDphTO0wjCUjP6HTRY7ljuMG\ns+SZj5naYRiBdcKo6TiWrxvak4Mrt/B1x+EcXLmFFkOtY3n9h7OZ0X0UM7qPYtX4GRxetZXUU0kA\n/PPtMn4Z+IZL6lcYTSJbEFonnOciH2PK8x9wz9gH8yw376OfGNXlCV7qMYL6LRvQJLKFiyO9cjd0\nbkuNutW5tX1/xo54nefGD8uz3HMP/h93RQ+mb+Q9VKwURHTPKABeeHMk7437kH6d72XJr8sYOLS/\nK8O/Ijd178xV9evQoGEEjzwykvffey3Pcs8/9zjx8cdp2KgDTZpGsmzZHwDsP3CI++5/im+m/ejK\nsAulU/QN1Kpbg+g2vXlx2KuMef3ijf+uPaJITkrJse76G1rRpXsnbonsx80d+vDxpC+dHXKhRXft\nRN16tWnT/EaefuJF3nj75YuWffj+4URF9CIqohfHjp0AYOa3P9OxXU+iInrx3jsf8cprnn3DJD+9\nb76RD9561d1hqGLiko0QEWktImHZlu8RkVkiMlFEgp0fnqVyi3qc2XuExP3x2M9lsHvWKmp2a5mj\nTM2u17Hz2+UA7J39F+ERjQA4sXkfKUdOAXBq20G8y/ri5Vs8EkB1u7Zk63crADiybhdlKvjjFxKU\no4xfSBC+AeU4ss66c7L1uxXU7dYKgAPLNmEy7ADErdtFQLj1lqWnpBK7ejvpqedcVZUr0qxra1Z9\nvxSAPet2UK68PxWqBOUqt2fdDhLiT+VabzCUCygHQLkKfpw+ctK5AReBklrnOl1bsi3bsexbgGN5\n23crqOM4lmt3bcm2mdbf97aZy7PWZ3dVr3bsmPVH1nLsn9tIPZXolPoUpRZdW/P790sA2L1uB37l\n/Qi84D1PO5vGP39sBiDjXDr7Nu+mYlglV4d6xTp1j2DOt3MB2LR2C+UrBFApJHf8mdkNm7cNbx+f\nrIGbterWYO0f6wH4c1kMnXtEuibwQujZsxtfTp0JwJ9/rSUwKJCwsJBc5e4d1I/xE/4LWANVjx+3\n/mb37TvIxo1bsdvtrgu6kKK7d+LH6VbGZv2aTZQPDKBKaOVc5fz8yzH4kbuZ9NbHOdbfNfgOJk/8\nnLQ067PpxDHPOH9dyk03d2HGNz8AsGb1BgIDyxMaWqXA2yeeScr62c+/HMV9rHKr5k0IrFDe3WEU\nC8YYlz08VX6ZkA+BNAAR6QiMB74ATgOTnRvaeX5hFUk6fCJrOTn2BP5hFS9axmTYSUtIpkzFgBxl\navVozfFNe7GnpWet6/DWg9wyfyzNnuztxBpcGf+wiiQePp61nBh7goAL6h0QVpHE2POvTVIerw1A\nwz4d2bf4b+cF6wRBocGcyFb/U3HHqRhW8Lbvz2/P4PreHRn/xwf8+7PnmDb6U2eEWaRKap0vPJbz\nOk79L3Es+1WuQPJRq9GVfPQUfpUr5NjWu6wvNSObsvvX1c6qgtNUDK2U4z0/EXfikg2MchX8aN6l\nFVtXFp+/5yphVYg7fDRr+UhsPCHhuS9OAf77zZss2PgzyYnJLPplCQC7tu2hU/cOAET3jCK0au6L\neU9TrWoYBw8czlo+dDCWalXDcpQJDLSO4zEvPcNff85l2jcfEhKS9+tSHISGhxB7+EjWctzho4SG\n5b4gf/LZR/h00lekpJzNsb5OvZq0atuCmXOnMHXWZJo0b+j0mAsrvGoohw7GZS0fPnSE8KqheZad\nOOk1Fq+YxbBnhuZYP+SBAazesJDRY57h+WdecWq8SnmS/BohNmNM5lVBX2CyMeY7Y8yLQP2LbSQi\nD4pIjIjELEnaUVSxFkrQ1dVo9Xw/fh95/qJs2WOT+DH6Oebc+gqhba6h3h0RbozQeVo9dgv2DDvb\nfljp7lBcqs0tEfw+czHPtnuY9wa/xuC3H0NE3B2WU5WWOl94Y6f2jS2IW709qytWSeVl8+LhiU+x\n8PM5xB84mv8GxdBj/YfRvXlvfMv40DriOgDGPD2eO+/tzZfzPsbPvxzn0jw7i1tQ3t42atSoyu+r\nYmhzfXdWrVrD6xP+z91hOdW1ja+mZu3qLJizONdzNpuNwIoVuKP7ICa89C7vfjzeDRE6x0P3D6dj\nu5707H4Xbdu3ok//8zc+P/1oKq2bRTNm9Bs8PWLoJfaiShK7MS57eKr8+iXZRMTbGJMOdAGyd1a+\n6LbGmMk4MiWfVbu70LVPjjuJf9Xzd4P9woNJijuZZ5nk2BOIzQvfCn6knkzMKt/5kydZ/sQHnNl3\nNMc2AOlJZ9n94+9UaV6XXTNXFDbcQmkyKJpG/a1+0Ec37Cag6vk7ogHhwSReUO/EuJNZ3awA/C94\nbRrc2YHaXVrwY7+8+yN7msiB3YjoHw3A3g07Ca5aicwhekFhlTgZd+LiG1/ghr6dmThoLAC7127H\np4wPAcHlOeNhY39Kap0bD4qm4UWO5QuPU4CkSxzLyccS8AsJsrIgIUGkXFCf+re0Y8dPf1BcdB7Y\nnY79uwCwZ8MugrO9NsFhwZyMO57ndoNee5gje2JZ8KnnD1K+895b6T3AGruzZcM/hFUNYYPjudDw\nKhyNPXbRbdNS01g6bwWdukXw57IY9u3cz7/7WeNIatatQUR0O2eHf0UeeXgQ9903AICYmPVUr3F+\n7GK16uEcOhyXo/zx4ydJSkrmhx/mADDzu18YPLif6wIuAgOG3EnfgbcC8Pe6LTmyAGFVQzgSF5+j\nfItWTWncvCGL1/yMt7eN4MrBfPXjh9zd+yHiYo8y/5fFjn1txtgNwZWCOHE8d9dTdxrywAAGDuoD\nwPq1G6lW/XyGq2q10BzZoExxsda6xMQkvpvxM9e1bMqMb3KO9/l+5mzeeOviY0qUKmnyy4R8AywV\nkVlACrAcQETqY3XJcolj63dToU4YATWq4OVjo26vthyYvzZHmf3z11L/TitdX7tHm6wZsHwr+HHj\nF8NYM246R2POZ2XE5pXVXUu8bdSIbsHJbQddVKOL2zhlIdO6j2Ja91HsnreGa2+3sjOhLeqRdiY5\nq0tKpuSjp0hLTCG0RT0Arr09gt3z1wBQM7IpLR/+F78MeStrBjBPt+TLebx68whevXkE6+evpu1t\nnQCo0+IqUs4k5zkO4mJOHD5GgxuaABBWrxo+ZXw8rgECJbfOm6YszBo0vmfeGq65zGP5mtsj2OM4\nlvcuWMs1d1h/39fc0YG9jvUAvuXLUbVtA/bMy3lO8GS/fTmXl24ewUs3j2Dd/L9of1skAHVbXEXy\nmWRO5/Ge3zqsH+XK+/HNmM9cHO2V+fbzHxhw4xAG3DiEJb8u5+Y7uwPQ+LqGJJ5J5PjRnA2tcn7l\nssaJ2Gw2bujSjr079wNQsZI1RkZEuO/Je/jui1kurEnB/e+DKbRq3ZVWrbvy00/zGDjgDgCub3Md\nCacTiIvLnb36ZfYCIjtZM7x1jopg61bP6D1QUFM//ZZbou7ilqi7WPjrEnr37QFA85aNOZOQSPyR\nnI3Nrz+fSUST7kS17Em/f93H3l37uLv3QwAsnLOEthGOcWB1a+Lj6+1xDRCwMheZA8znzF5In/5W\nI6xl62YkJCRy5EjOhpfNZiM42Opa6u3tTdfuUfyzxZodrm69WlnlunaLZPeuva6phHI7HRMCkl9w\nItIWCAfmG2OSHOuuBgKMMfl+6hdFJgSgeudmtHn5bsTLix3Tl/L3xJ9oMfx2jm3Yw4EFa7GV8aHD\nxIep1Kg2qacSWTL0PRL3x9PsiV40+XdPEvacvzMxv/8E0pNTuen7F/DytiE2L2KXb+avl7/C2Asf\nbmIRzjnW6dVB1IpsyrmUNBYNm8zRv62pO/vNHcu07qMACGlah+i3HrSmNV28IWvK3YHL38Tm681Z\nR0Yobu1OljxvXcAM+v1tfMuXw8vHm7SEZH4cMJ6TOw7nEUH+NtlSC1vNi+o/5j4adWpOWkoaU0a8\nz76NuwF4Yc4bvHrzCABue/Zu2vSKIDC0IqePnGTF9EX88s63hNevzt3jH6KMf1kw8N1rX7J1uef3\no/fUOjfLKFOo7Tu8OoiakU1JT0njt2GTiXccy33mjmWG41iu0rQOnR3H8v7FG1juOJbLBAXQ7X+P\nEVCtEmcOHrOm6HV0vbrmzg7UjGzKgkffz/H7bnzvUaq2vZaywQGkHEtg9ZvfsXX60suKOcZ2Nv9C\nReDuMffTuFNz0lJS+XTEJPZutHJhL815g5duHkHFsGDeXDWZwzsPku7oirRoylyWT19U5LH8nZr7\nLm5ReGbcU7SPup6zKWd5+anX2LphGwBTF3zKgBuHEFy5Im9/OQFfX1+8vISYlet4a/R/ycjIoN/9\nd3DnvbcBsHjOUt4b92GRxrb++O4i3V+mie+OpVvXSJJTUrj//qdZs9b6W4xZPZ9WrbsCULNmNaZ8\nNpHAoAociz/BfQ88xYEDh2nVshkzv/2EihUDOXs2lbgjR2nWvHORxVYnMCz/Qldg9ISRdIxqT0rK\nWZ59/CU2bdgKwE+Lv+aWqLtylK1WI5zJU9/JmqLXx8eb194dzbWNr+bcuXTGj36HVSuKbpzXydQz\nRbav7Ca8OZrO0R1ISU7h8aHPsX6dNRXz4hWziIrohZ9fOX7+dSrePt7YbDaWLvmdF597DbvdztgJ\no+gU2Z5z59I5feo0I4ePYds/O4skrtjdc4tkP5djxOjxrF73N6dOJVApOIih9w3k9p7dXBqDT+W6\nxaIPcsWA+i5rHZxM3OmRr0m+jZDCKqpGSHFSlI2Q4sCZjRDlOQrbCCmOXNUI8STOaoR4Mmc1QjyZ\nsxohnsxZjRBP5Y5GiCcoLo2QwIB6Lrs+Pp24yyNfk1J2uayUUkoppZRyt+LxhRlKKaWUUkqVEJ48\nVsNVNBOilFJKKaWUcinNhCillFJKKeVCnvz9Ha6imRCllFJKKaWUS2kmRCmllFJKKRcyaCZEMyFK\nKaWUUkopl9JGiFJKKaWUUsqltDuWUkoppZRSLqQD0zUTopRSSimllHIxzYQopZRSSinlQvplhZoJ\nUUoppZRSSrmYZkKUUkoppZRyIZ2iVzMhSimllFJKKRfTRohSSimllFIuZIxx2aMwRCRYRBaIyA7H\n/xUvUq6miMwXka0iskVEaue3b22EKKWUUkoppfLyLLDIGHMVsMixnJcvgDeMMdcCbYCj+e1Yx4Qo\npZRSSinlQsVodqxeQKTj5ynAEmBk9gIi0hDwNsYsADDGJBZkx5oJUUoppZRSqoQSkQdFJCbb48HL\n2DzUGBPr+DkOCM2jzNXAKRH5XkTWicgbImLLb8eaCVFKKaWUUsqFXJkHMcZMBiZf7HkRWQiE5fHU\nqAv2Y0Qkr9C9gQ5AC2A/MB24F/jkUnFpI0QppZRSSqlSyhgTfbHnROSIiIQbY2JFJJy8x3ocBNYb\nY3Y7tvkRaEs+jRApRn3SLpuIPOho/ZUapa3Opa2+oHUuLbTOpYPWueQrbfWF0lnnkkpE3gCOG2PG\ni8izQLAx5pkLytiAtUC0MSZeRD4DYowx719q3yV9TMjl9HkrKUpbnUtbfUHrXFponUsHrXPJV9rq\nC6WzziXVeOBGEdkBRDuWEZFWIvIxgDEmAxgOLBKRjYAAH+W3Y+2OpZRSSimllMrFGHMc6JLH+hjg\n/mzLC4Cml7Pvkp4JUUoppZRSSnmYkt4IKY39EUtbnUtbfUHrXFponUsHrXPJV9rqC6WzzuoyleiB\n6UoppZRSSinPU9IzIUoppZRSSikPU6IbISKyRERaZVuuLSKb3BlTURGRDBFZn+3xrGO9j4iMF5Ed\nIrJWRP4QkZvcHW9BiEgVEVkhIptEpHe29bNEpKrjZxGRFxz12y4ii0Wk0SX2OVxE/nG8RqtF5B5X\n1KUgRKS3iBgRaVCIfeR5TDvWGxF5LNu690Tk3iv9XUXFBfVOcbzfW0TkAxFx63nOBfXdlG35ARFZ\nIyIVr/R3eRIRSXR3DEVJRMJEZJqI7HK8T3NE5Gp3x+UuIrJXRDZm+xxr7+6YXEVEXnKcF+pnW/ek\nY12rS23r6URklIhsFpG/He/r9e6OSXmmEt0IKeFSjDHNsz3GO9a/AoQDjY0x1wG9gfJui/Ly9Ac+\nANoATwKISE9gnTHmsKPMo0B7oJkx5mrgNeAnESl74c5E5GHgRqCNMaY51uwO4vRaFFxwT3eVAAAH\nWElEQVR/YIXjf2c4CjwhIr5O2v+Vcna9dzne76ZAQ6y/AXdydn0BEJGBwGNAN2PMSWf+LnX5RESA\nH4Alxph6xpiWwHNAqHsjc7uobJ9jv7s7GBfbCPTLtnwnsNlNsRQJEWkH/Au4zhjTFGtK1wPujUp5\nqhLRCHHcDfxHRKaKyFYRmSkifu6Oy9UcdX4AeMwYkwpgjDlijJnh3sgK7BzgB5QBMkTEG6sx8nq2\nMiOBfxtjkgGMMfOB34EBeezveeARY0yCo2yCMWaKE+MvMBEJACKA+8j2IeS4S9oj2/LnInKH4xhf\n7shurS3gHcN4YBEwqKjjv1IuqjcAxph0rGOjfn5lncVV9RWRPsCzQFdjzLEirkaBZTsXf+7IVE4V\nkWgRWenIXrZxlKsiIgscd0s/FpF9IlL5EvutLFZWt4eIeInIJMfvWeDIJtzhulpesSjgnDHmg8wV\nxpgNgE1Efslc5ykZy4Io6Pt9BfsdIVbm+m8Rebmo477MWPKt4+Uez9n8CPRy/J56wGkg6+9XRBJF\nZKyIbBCRVSJSHBqs4cCxbNcgx7LdRFQqhxLRCHG4BphkjLkWSACGOtZPzUz1AnPcFl3RKyc5u2P1\nxbrY2p950V0MfY11Ql4AjMN6D7/MbHCISAXA3xiz+4LtYoAcXbIcZcvnUdZT9ALmGmO2A8dFpKVj\n/XSgD4Ajg9EFmI2V1bjRkd3qC0ws4O+ZAAwX69tMPYGr6p3ZKO+CdbfRXVxR31rAe1gNkLgijv9K\n1AfeBBo4HndhNcSGY90YABgN/GaMaQTMBGpebGeOC6/ZwP8ZY2YDtwG1sbJcA4F2TqlF0WsMrHF3\nEE5QkPf7UhY7PsP+BBCRrsBVWBnx5kBLEenojMAvQ351LPDxfIEE4ICINMa6STH9guf9gVXGmGbA\nMqybjJ5uPlDD0WCbJCKd3B2Q8lwlqRFywBiz0vHzV1gnCIABmale4Gb3hOYUF3bHuvDkVewYY04b\nY3oYY1oBa4GewEwR+UhEZgIt3BthkeoPTHP8PI3zXXV+BaJEpAxwE7DMGJMC+AAfifVNpN9iXYDl\ny9EI+xPrQ9MTuKLe9Rw3HVYCs40xvxZlBS6TK+obD+zH0ajxAHuMMRuNMXasriWLjDUN40asxgNY\n5+dpAMaYucDFuo/5YGXznnF8EVbmtt8aY+yORtdi51RDFVBB3u9LyeyOlTluoKvjsQ7rc6ABVqPE\nnfKrY0GP57xMw2qA9MbqrpddGpCZJVtDwV5PtzLGJAItsb4xPR6YXlwye8r1StI3pl8413BpnHt4\nJ1BTRCoU42xIpheBsZzvTz8T+B5IEpG6F2Q4WgJLs29sjElwpLIvLOt2IhIMdAaaiIgBbIARkRHG\nmLMisgTohnUnPPMC9ingCNAM6+bB2cv4leOwXr+l+RV0JhfWO3NMiFu5sL7JWDdYlovIUWPM1KKt\nyWVLzfazPduyncv/zEnHuvjqhpuP3yKwGcir21g6OW8I5hrf5uGK8v0Ga9zea8aYDwsbWBHKr47p\nhdj3L8AbQIzjcyv7c+fM+e9RyKCYXLMZYzKAJcASxw2VQcDn7oxJeaaSlAmpKdaAKLDu+q5wZzDu\n4Oi29AnwrqOLR2bf6zvdG9nlEZGrgOrGmCVYY0TsWI3Kclgn64kiUs5RNhrrLtTXeezqNeB9R9cs\nRCRAPGN2rDuwupnVMsbUNsbUAPYAHRzPTwcGO5bnOtYFArGOO3EDsS5oC8QY8w+wBSuz5E4urbcH\ncFl9jTFHge7AOBHpVoR1cJaVnO+O1hW42GxeBhgCNBCRkdm2vd0xNiQUiHRyrEXlN6CMiDyYuUJE\nmmJddDcUkTIiEoTVNa80mwcMEWs8FSJSTURC3BxTfgp6POfi+NweiXXTrdgTkWscn+GZmgP73BWP\n8mwlqRGyDXhURLZinQD+5+Z4nO3CMSGZs2O9gJUC3SLW1J2/YPU7LU7GAqMcP38DPAKsBt4F/uv4\neaOIbMPKmPRydGXBMSgwc3rD/2F11VjteC2WYzVo3K0/udPu33G+q858oBOw0BiT5lg3CRgkIhuw\nuickXebvHAtUv7Jwi4w76u1OLq2vMWYPcAvw6ZUOCHahl4Gujr/LO4E44ExeBR13VfsDnUVkKNZr\neBCrYf0VVped064IujAcd7RvBaLFmqJ3M9aNkjhgBrDJ8f8690VZ9ESklYh8nG15/aXKOyYb+Rr4\nw3EXfSaeP8PjRY9nsSZOyJxifoyI3HLhxsaYacaYta4M2IkCgCliTZH+N1aX0pfcG5LyVCXiG9NF\npDbwizGmsZtDUUoplQ/HWJgMY0y6I4P9v8vpQiciAcaYRBGpBPwF3OAhg/JVKVTY41mp0qpY9C9U\nSilVotQEZoj1RZJpXP6sP784ui75Aq9oA0S5WWGPZ6VKpRKRCVFKKaWUUkoVHyVpTIhSSimllFKq\nGNBGiFJKKaWUUsqltBGilFJKKaWUcilthCillFJKKaVcShshSimllFJKKZfSRohSSimllFLKpf4f\nes8YIHy4dFoAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 1080x720 with 2 Axes>"
]
},
"metadata": {
"tags": []
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "p0ZyxQyql3nW",
"colab_type": "code",
"colab": {}
},
"source": [
"X = Data.drop(['name','Aval N','Aval P','Aval K','mg kg','Cu','m..Fe','mg..Mn','S'], axis =1)\n",
"y1 = Data['Aval N']\n",
"y2 = Data['Aval P']\n",
"y3 = Data['Aval K']"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "f38uhd0Rxs5z",
"colab_type": "code",
"colab": {}
},
"source": [
"import tensorflow as tf"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "_3sVfu-Lua2X",
"colab_type": "code",
"colab": {}
},
"source": [
"X_train, X_test, y1_train, y1_test = train_test_split(X,y1, test_size = 0.2, random_state = 0)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "EpUJVtYP6EZH",
"colab_type": "code",
"colab": {}
},
"source": [
"y2_train, y2_test = train_test_split(y2, test_size = 0.2, random_state = 0)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "U-_sAGUS6Km4",
"colab_type": "code",
"colab": {}
},
"source": [
"y3_train, y3_test = train_test_split(y3, test_size = 0.2, random_state = 0)"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "N20NIOw8yF5A",
"colab_type": "code",
"colab": {}
},
"source": [
"modelA1 = LinearRegression()\n",
"modelA2 = LinearRegression()\n",
"modelA3 = LinearRegression()"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "0JWX38FUyhkd",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "cd3b037f-67cf-45c6-b4c8-7f6271d5dcff"
},
"source": [
"modelA1.fit(X_train, y1_train)"
],
"execution_count": 180,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None, normalize=False)"
]
},
"metadata": {
"tags": []
},
"execution_count": 180
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "Dc6xWi2kE2p4",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "777545e1-1657-4846-fa26-51a4f292a774"
},
"source": [
"modelA2.fit(X_train, y2_train)"
],
"execution_count": 181,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None, normalize=False)"
]
},
"metadata": {
"tags": []
},
"execution_count": 181
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "HROjYNQeE6Q_",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "3ca3790b-da96-4f98-c485-d811bf8f6413"
},
"source": [
"modelA3.fit(X_train, y3_train)"
],
"execution_count": 182,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None, normalize=False)"
]
},
"metadata": {
"tags": []
},
"execution_count": 182
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "wtpsst3Ny40Y",
"colab_type": "code",
"colab": {}
},
"source": [
"L1_pred = modelA1.predict(X_test)\n",
"L2_pred = modelA2.predict(X_test)\n",
"L3_pred = modelA3.predict(X_test)\n"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "z26eHvgoDRKv",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "c1a682a6-a579-4ef9-95f1-b001655defe5"
},
"source": [
"L1_pred"
],
"execution_count": 177,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"array([375.66964207, 332.26213321])"
]
},
"metadata": {
"tags": []
},
"execution_count": 177
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "QZRdmjjTBEoI",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "0dc5bf0b-d638-46bd-feb2-ff43fa2849dc"
},
"source": [
"L2_pred"
],
"execution_count": 171,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"array([25.37942654, 22.06209337])"
]
},
"metadata": {
"tags": []
},
"execution_count": 171
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "HkdmVdJnDYgg",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "c04e3e7e-b7c3-4914-9411-d085111ca0f0"
},
"source": [
"L3_pred"
],
"execution_count": 178,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"array([597.89753357, 558.19831197])"
]
},
"metadata": {
"tags": []
},
"execution_count": 178
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "F4hwZKV13ltU",
"colab_type": "code",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 34
},
"outputId": "c64104fe-5f1d-4dec-c20d-f311a29c2fce"
},
"source": [
"a= modelA1.score(X_test, y1_test)\n",
"b= modelA2.score(X_test, y2_test)\n",
"c= modelA3.score(X_test, y3_test)\n",
"print('Score for Aval N is {}, Score for Aval P is {}, Score for Aval K is{} '.format(a, b, c))"
],
"execution_count": 184,
"outputs": [
{
"output_type": "stream",
"text": [
"Score for Aval N is 0.5264820043653415, Score for Aval P is -25.109852553550727, Score for Aval K is-1.1553056672076427 \n"
],
"name": "stdout"
}
]
}
]
} |
# coding: utf-8
"""
NiFi Rest API
The Rest API provides programmatic access to command and control a NiFi instance in real time. Start and stop processors, monitor queues, query provenance data, and more. Each endpoint below includes a description, definitions of the expected input and output, potential response codes, and the authorizations required to invoke each service.
OpenAPI spec version: 1.19.0
Contact: dev@nifi.apache.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class RegisteredFlowSnapshotMetadata(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'bucket_identifier': 'str',
'flow_identifier': 'str',
'version': 'int',
'timestamp': 'int',
'author': 'str',
'comments': 'str'
}
attribute_map = {
'bucket_identifier': 'bucketIdentifier',
'flow_identifier': 'flowIdentifier',
'version': 'version',
'timestamp': 'timestamp',
'author': 'author',
'comments': 'comments'
}
def __init__(self, bucket_identifier=None, flow_identifier=None, version=None, timestamp=None, author=None, comments=None):
"""
RegisteredFlowSnapshotMetadata - a model defined in Swagger
"""
self._bucket_identifier = None
self._flow_identifier = None
self._version = None
self._timestamp = None
self._author = None
self._comments = None
if bucket_identifier is not None:
self.bucket_identifier = bucket_identifier
if flow_identifier is not None:
self.flow_identifier = flow_identifier
if version is not None:
self.version = version
if timestamp is not None:
self.timestamp = timestamp
if author is not None:
self.author = author
if comments is not None:
self.comments = comments
@property
def bucket_identifier(self):
"""
Gets the bucket_identifier of this RegisteredFlowSnapshotMetadata.
:return: The bucket_identifier of this RegisteredFlowSnapshotMetadata.
:rtype: str
"""
return self._bucket_identifier
@bucket_identifier.setter
def bucket_identifier(self, bucket_identifier):
"""
Sets the bucket_identifier of this RegisteredFlowSnapshotMetadata.
:param bucket_identifier: The bucket_identifier of this RegisteredFlowSnapshotMetadata.
:type: str
"""
self._bucket_identifier = bucket_identifier
@property
def flow_identifier(self):
"""
Gets the flow_identifier of this RegisteredFlowSnapshotMetadata.
:return: The flow_identifier of this RegisteredFlowSnapshotMetadata.
:rtype: str
"""
return self._flow_identifier
@flow_identifier.setter
def flow_identifier(self, flow_identifier):
"""
Sets the flow_identifier of this RegisteredFlowSnapshotMetadata.
:param flow_identifier: The flow_identifier of this RegisteredFlowSnapshotMetadata.
:type: str
"""
self._flow_identifier = flow_identifier
@property
def version(self):
"""
Gets the version of this RegisteredFlowSnapshotMetadata.
:return: The version of this RegisteredFlowSnapshotMetadata.
:rtype: int
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this RegisteredFlowSnapshotMetadata.
:param version: The version of this RegisteredFlowSnapshotMetadata.
:type: int
"""
self._version = version
@property
def timestamp(self):
"""
Gets the timestamp of this RegisteredFlowSnapshotMetadata.
:return: The timestamp of this RegisteredFlowSnapshotMetadata.
:rtype: int
"""
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp):
"""
Sets the timestamp of this RegisteredFlowSnapshotMetadata.
:param timestamp: The timestamp of this RegisteredFlowSnapshotMetadata.
:type: int
"""
self._timestamp = timestamp
@property
def author(self):
"""
Gets the author of this RegisteredFlowSnapshotMetadata.
:return: The author of this RegisteredFlowSnapshotMetadata.
:rtype: str
"""
return self._author
@author.setter
def author(self, author):
"""
Sets the author of this RegisteredFlowSnapshotMetadata.
:param author: The author of this RegisteredFlowSnapshotMetadata.
:type: str
"""
self._author = author
@property
def comments(self):
"""
Gets the comments of this RegisteredFlowSnapshotMetadata.
:return: The comments of this RegisteredFlowSnapshotMetadata.
:rtype: str
"""
return self._comments
@comments.setter
def comments(self, comments):
"""
Sets the comments of this RegisteredFlowSnapshotMetadata.
:param comments: The comments of this RegisteredFlowSnapshotMetadata.
:type: str
"""
self._comments = comments
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, RegisteredFlowSnapshotMetadata):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
from linear_algebra import *
# Some samples
A = [[1, 4], [-2, 3]]
B = [[-2, 5], [6, 7]]
print('Matrix A:')
print_matrix(A)
print('Matrix B:')
print_matrix(B)
print('Addition of matrices A and B:')
# Addition
print_matrix(add_matrix(A, B))
print('Subtraction of matrices A and B:')
# Subtraction
print_matrix(subtract_matrix(A, B))
print('Multiplication of matrices A and B:')
# Multiplication
print_matrix(multiply_matrix(A, B))
print('Division of matrices A and B (A * B ^ (-1)):')
# Division
print_matrix(divide_matrix(A, B))
print('Transpose of matrix A:')
# Transpose
print_matrix(transpose_matrix(A))
# Determinant
print(f'Determinant of matrix A: {determinant_matrix(A)}')
print(f'Determinant of matrix A: {determinant_matrix(B)}')
|
from django import forms
from .models import Blog, Comment
# from pagedown.widgets import PagedownWidget
class BlogCreateForm(forms.ModelForm):
# for pagedown
# content = forms.CharField(widget=PagedownWidget(show_preview=False))
class Meta:
model = Blog
fields = ('title', 'content')
class BlogPublishForm(forms.ModelForm):
publish_date = forms.DateField(widget=forms.DateInput(attrs={'class':'timepicker'}))
class Meta:
model = Blog
fields = ('publish_date', 'category', 'cover_image_url', 'cover_breif', 'is_series',
'series_keyword', 'series_index')
|
#Data correlation source: http://tylervigen.com/view_correlation?id=28590
import matplotlib.pyplot as plt
from numpy.random import rand
Year = ['2008', '2009', '2010']
Color = ['green', 'blue', 'red']
MilitarySpending = [38579, 40246, 39461]
LawnmowerDeaths = [43, 84, 73]
plt.subplot(211)
plt.plot(MilitarySpending, label='UK Military Spending')
plt.plot(LawnmowerDeaths, label='Lawnmower Deaths')
plt.show() |
'''Jinjer操作サービス'''
import os
import time
from urllib import parse
from collections import defaultdict
from django.utils import timezone
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from sns.models import Report
class LoginPageLocators(object):
'''
【Locator】
ログイン画面
'''
USERNAME_TEXT = (By.ID, 'username')
PASSWORD_TEXT = (By.ID, 'password')
LOGIN_BUTTON = (By.XPATH, '//*[@id=\"button_login\"]')
class ReportSearchLocators(object):
'''
【Locator】
月報検索画面
'''
REPORT_DETAIL_URLS = (By.XPATH, '//table[@border="1" and @cellspacing="0"]/tbody/tr/td[1]//a')
YM_DROPDOWN = (By.NAME, 'report_ym')
SUBMIT_BUTTON = (By.XPATH, '//td/input')
class ReportDetailLocators(object):
'''
【Locator】
月報詳細画面
'''
REPORT_DETAIL_ITEMS = (By.XPATH, '//form[1]/table/tbody/tr[2]/td[2]/table[2]/tbody/tr/td/div')
def login(driver):
'''
[Control]
ログイン
'''
print('[Start]:Login.')
basic_info = os.getenv("DJANGO_SNS_BASIC_USER") + ':' + os.getenv('DJANGO_SNS_BASIC_PASS') + '@'
driver.get("https://" + basic_info + "sv27.wadax.ne.jp/~stylagy-co-jp/sns/")
driver.find_element(*LoginPageLocators.USERNAME_TEXT).send_keys(os.getenv('DJANGO_SNS_EMAIL'))
driver.find_element(*LoginPageLocators.PASSWORD_TEXT).send_keys(os.getenv('DJANGO_SNS_PASSWORD'))
driver.find_element(*LoginPageLocators.LOGIN_BUTTON).click()
print('[Complete]:Login.')
def select_dropdown(driver, ym):
'''
うまく動かないので使用しない
'''
element = driver.find_element(*ReportSearchLocators.YM_DROPDOWN)
ym_dropdown = Select(element)
ym_dropdown.select_by_value(ym)
print(driver.page_source)
# ここでエラー発生する。。。10秒待つようにしているが要素が見つからないっぽい
submit_element = WebDriverWait(driver, 10).until(
expected_conditions.visibility_of_element_located(
ReportSearchLocators.SUBMIT_BUTTON)
)
submit_element.click()
def get_report_urls(driver, ym):
'''
月報検索画面から月報のURLを取得する
'''
driver.get("https://sv27.wadax.ne.jp/~stylagy-co-jp/sns/?m=pc&a=page_h_report_m_search")
elements = driver.find_elements(*ReportSearchLocators.REPORT_DETAIL_URLS)
# if ym is None:
# pass
# else:
# ドロップダウンから必要な月報の年月を選択するはず。。。
# select_dropdown(driver, ym)
urllist = []
for i in range(len(elements)):
if not elements[i].text:
continue # 月報を書いてない人のURLは取得しない
urllist.append(elements[i].get_attribute('href'))
return urllist
def get_report_item_list(driver):
'''
月報情報を取得し、dict型で戻り値を返す
'''
elements = driver.find_elements(*ReportDetailLocators.REPORT_DETAIL_ITEMS)
elements_iter = iter(elements)
d = defaultdict(lambda:'')
while True:
try:
item_name = elements_iter.__next__().text
if check_item(item_name):
d[item_name] = elements_iter.__next__().text
except StopIteration:
break
return d
def open_report_detail(driver, url):
'''
月報のURLを取得する
'''
driver.get(url)
# URLをパースしてクエリ部分を取り出す
qs_d = parse.parse_qs(parse.urlparse(url).query)
# print(driver.page_source) # 開いているURLのHTMLを確認する
d = get_report_item_list(driver)
obj = Report(
id=qs_d['id'][0],
submitter=d['提出者'],
submission_year_month=d['提出年月'],
work_place=d['作業場所'],
work_content=d['作業内容'],
environment=d['環境\n(使用しているOS/言語/アプリケーションなど)'],
problem=d['問題点/懸念事項'],
self_assessment=d['自己評価'],
acquisition_skill=d['習得したスキル\n又は\n習得中のスキル'],
work_schedule=d['今後の作業予定'],
goal=d['来月の目標\n又は\n習得したいスキル'],
other=d['そ の 他'],
created_at=timezone.now(),
updated_at=timezone.now(),
)
obj.save()
return
def check_item(str):
'''
月報項目かをチェックする
'''
if str in {'提出者','提出年月','作業場所','作業内容','環境\n(使用しているOS/言語/アプリケーションなど)','問題点/懸念事項','自己評価',"習得したスキル\n又は\n習得中のスキル",'今後の作業予定','来月の目標\n又は\n習得したいスキル','そ の 他'}:
return True
return False
|
#!/usr/bin/env python
# Author: Jin Lee (leepc12@gmail.com)
import sys
import os
import argparse
from encode_lib_common import (
assert_file_not_empty,
log,
ls_l,
mkdir_p,
)
from encode_lib_genomic import (
peak_to_bigbed,
peak_to_hammock,
get_region_size_metrics,
get_num_peaks,
peak_to_starch,
)
from encode_lib_blacklist_filter import blacklist_filter
from encode_lib_frip import frip_shifted
def parse_arguments():
parser = argparse.ArgumentParser(prog='ENCODE post_call_peak (chip)',
description='')
parser.add_argument('peak', type=str,
help='Path for PEAK file. Peak filename should be "*.*Peak.gz". '
'e.g. rep1.narrowPeak.gz')
parser.add_argument('--ta', type=str,
help='TAG-ALIGN file.')
parser.add_argument('--peak-type', type=str, required=True,
choices=['narrowPeak', 'regionPeak',
'broadPeak', 'gappedPeak'],
help='Peak file type.')
parser.add_argument('--fraglen', type=int, required=True,
help='Fragment length.')
parser.add_argument('--chrsz', type=str,
help='2-col chromosome sizes file.')
parser.add_argument('--blacklist', type=str,
help='Blacklist BED file.')
parser.add_argument('--regex-bfilt-peak-chr-name',
help='Keep chromosomes matching this pattern only '
'in .bfilt. peak files.')
parser.add_argument('--mem-gb', type=float, default=4.0,
help='Max. memory for this job in GB. '
'This will be used to determine GNU sort -S (defaulting to 0.5 of this value). '
'It should be total memory for this task (not memory per thread).')
parser.add_argument('--out-dir', default='', type=str,
help='Output directory.')
parser.add_argument('--log-level', default='INFO',
choices=['NOTSET', 'DEBUG', 'INFO',
'WARNING', 'CRITICAL', 'ERROR',
'CRITICAL'],
help='Log level')
args = parser.parse_args()
if args.blacklist is None or args.blacklist.endswith('null'):
args.blacklist = ''
log.setLevel(args.log_level)
log.info(sys.argv)
return args
def main():
# read params
args = parse_arguments()
log.info('Initializing and making output directory...')
mkdir_p(args.out_dir)
log.info('Blacklist-filtering peaks...')
bfilt_peak = blacklist_filter(
args.peak, args.blacklist, args.regex_bfilt_peak_chr_name, args.out_dir)
log.info('Checking if output is empty...')
assert_file_not_empty(bfilt_peak)
log.info('Converting peak to bigbed...')
peak_to_bigbed(bfilt_peak, args.peak_type, args.chrsz,
args.mem_gb, args.out_dir)
log.info('Converting peak to starch...')
peak_to_starch(bfilt_peak, args.out_dir)
log.info('Converting peak to hammock...')
peak_to_hammock(bfilt_peak, args.mem_gb, args.out_dir)
log.info('Shifted FRiP with fragment length...')
frip_qc = frip_shifted(args.ta, bfilt_peak,
args.chrsz, args.fraglen, args.out_dir)
log.info('Calculating (blacklist-filtered) peak region size QC/plot...')
region_size_qc, region_size_plot = get_region_size_metrics(bfilt_peak)
log.info('Calculating number of peaks (blacklist-filtered)...')
num_peak_qc = get_num_peaks(bfilt_peak)
log.info('List all files in output directory...')
ls_l(args.out_dir)
log.info('All done.')
if __name__ == '__main__':
main()
|
import json
with open('data.json') as data_file:
data = json.load(data_file)
contour_points = data["contour_points"]
x_max = data["x_max"]
y_max = data["y_max"]
init_flow_len = data["init_flow_len"]
alfa = data["alfa"]
circulation = data["circulation"] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Sends JSON-formatted tweets related to meteors to stdout in real time
Author: Geert Barentsen (geert.io)
Inspired by: http://peter-hoffmann.com/2012/simple-twitter-streaming-api-access-with-python-and-oauth.html
"""
import sys
import tweepy
import secrets
class CustomStreamListener(tweepy.StreamListener):
def on_data(self, data):
print(data)
return True
def on_error(self, status_code):
sys.stderr.write('Encountered error with status code: {}'.format(status_code))
return True # Don't kill the stream
def on_timeout(self):
sys.stderr.write('Timeout...')
return True # Don't kill the stream
if __name__ == '__main__':
# Setup authentication
auth = tweepy.OAuthHandler(secrets.CONSUMER_KEY, secrets.CONSUMER_SECRET)
auth.set_access_token(secrets.ACCESS_KEY, secrets.ACCESS_SECRET)
# Start listening to the streaming API
sapi = tweepy.streaming.Stream(auth, CustomStreamListener())
sapi.filter(track=["meteor", "meteors", "meteorite" , "meteorites",
"fireball", "fireballs" , "meteorwatch",
"meteoro", "meteoros", "meteorito", "meteoritos",
"метеор", "метеорит", "نيزك"],
stall_warnings=True)
|
# -*- coding: UTF-8 -*-
import MySQLdb
def saveToMysql():
file_in = open('activation_code', 'r')
db = MySQLdb.connect(host = "localhost",
user = "",
passwd = "",
db = "")
table_name = "test"
cur = db.cursor()
count = 0
for line in file_in:
stmt = "insert into " + table_name + " values(" + str(count) + ",'" + \
line + "');"
cur.execute(stmt)
count += 1
db.close()
file_in.close()
if __name__ == "__main__":
saveToMysql()
|
from django.shortcuts import render, get_object_or_404, redirect
from .models import ClubTeam
from .forms import ClubTeamForm
# Create your views here.
def club_teams(request):
clubs = ClubTeam.objects.all().order_by('created_date')
return render(request, 'club_team/club_team_list.html', {'clubs':clubs})
def club_detail(request, pk):
club = get_object_or_404(ClubTeam, pk=pk)
return render(request, 'club_team/club_detail.html', {'club':club})
def club_new(request):
if request.method == "POST":
form = ClubTeamForm(request.POST)
if form.is_valid():
club = form.save(commit=False)
club.author = request.user
club.save()
return redirect('club_detail', pk=club.pk)
else:
form = ClubTeamForm()
return render(request, 'club_team/club_edit.html', {'form': form})
def club_edit(request, pk):
club = get_object_or_404(ClubTeam, pk=pk)
if request.method == "POST":
form = ClubTeamForm(request.POST, instance=club)
if form.is_valid():
club = form.save(commit=False)
club.author = request.user
club.save()
return redirect('club_detail', pk=club.pk)
else:
form = ClubTeamForm(instance=club)
return render(request, "club_team/club_edit.html", {'form':form})
|
"""
File for ingredient parser implemented using a linked list.
Used to make ingredients parsing faster relative to using a list to parse.
Each removal operation is O(1) instead of O(n) where n is the number of words
in the ingredient description.
Author: John Li
"""
import re
# special cases: wanted to get rid of garlic {cloves} but cloves are
# their own thing too
TO_STRIP = {'pinch', 'dash', 'teaspoons', 'teaspoon', 'fluid',
'tablespoons', 'tablespoon', 'cup', 'cups', 'ounce', 'ounces',
'pint', 'pints', 'quart', 'quarts', 'milliliter', 'milliters',
'liter', 'liters', 'gram', 'grams', 'kilogram', 'kilograms',
'pound', 'pounds', 'large', 'medium', 'small', 'gallon', 'gallons',
'sprig', 'sprigs', 'ground', 'piece', 'pieces', 'of', 'to',
'bottle', 'bottles', 'carton', 'cartons', 'slice', 'slices',
'fresh', 'freshly', 'hard', 'boiled', 'boiling', 'chopped',
'yolk', 'fine', 'finely', 'zest', 'diced', 'thin', 'thinly',
'taste', 'sharp', 'inch', 'inches', 'sliced', 'with', 'generous',
'grated', 'can', 'stick', 'a', 'few', 'dash', 'dashes',
'really', 'sheet', 'one', 'two', 'three', 'four', 'five', 'six',
'seven', 'eight', 'nine', 'store-bought', '-one', '-oz.',
'-size', 'about', 'the', 'rind', '/', 'an', 'from', 'bunch',
'bunches', 'coarsely', 'cooked', 'cubed', 'cubes', 'container'
'containers', 'bag', 'bags', 'box', 'boxes', 'crushed', 'each',
'plus', 'yolks', 'envelope', 'envelopes', 'garnish', 'good', 'quality',
'good', 'half', 'handful', 'jar', 'julienned', 'leftover', 'millileter',
'milliliters', 'minced', 'natural', 'brewed', 'other', 'part', 'parts',
'pack', 'package', 'packages', 'packed', 'packet', 'packets',
'peeled', 'pkg', 'pkgs', 'premium', 'prepared', 'roughly',
'round', 'rounded', 'sauteed', 'scant', 'palmful', 'scoop', 'scoops',
'serving', 'suggestions', 'shredded', 'splash', 'squeezed',
'store', 'bought', 'tbs.', 'uncooked', 'very', 'your', 'favourite',
'favorite', 'rounds', 'pkg.', 'percent', 'whites', 'sticks'}
RM_EVERYTHING_AFTER = {'for', ','}
DELIMITERS = {',', ' ', '-', '/', ':', '*'}
MULT_INGREDIENT_DELIM = r'([/,\s]?(and|or|and/or)[/,\s]?)'
PATTERNS = [
r'[0-9]+',
r'.*/[0-9]+',
r'\(.*\)',
r'[0-9]*\.[0-9]+',
r'[0-9]*(g|g\.|oz|oz\.|ml|ml\.|lb\.|lb|lbs|lbs\.|tsp|tsp\.|tbsp|tbsp\.)']
END_STRIP = '.-,/:!*? '
WHITE_SPACE = r'\s*'
OPEN_PAR = '('
CLOSE_PAR = ')'
class Node:
""" Linked list node class.
Attributes:
data: Generic data stored in the node
next: Reference to next node in linked list
prev: Reference to previous node in linked list.
"""
def __init__(self, data):
""" Constructor to initialize node data.
Args:
data: Data to initialize node with.
"""
self.data = data
self.next = None
self.prev = None
class IngredientParser:
""" Linked list class to use for ingredients parsing
Uses sentinal nodes.
Attributes:
_head: Reference to head node
_tail: Reference to tail node
"""
def __init__(self, ingredient):
""" Constructor to initialize linked list using ingredient string
Args:
ingredients (string): Ingredient to parse
"""
self._head = Node(None)
self._tail = Node(None)
self._head.next = self._tail
self._tail.prev = self._head
self._size = 0
curr = self._head
parenthesis = False
# each word in string is a node in the linked list
word = []
for char in ingredient:
# group everything in parenthesis as one word
if char == OPEN_PAR:
parenthesis = True
elif char == CLOSE_PAR:
parenthesis = False
# also want delimiters in list
if not parenthesis and char in DELIMITERS:
# could be two delimiters right after each other
if len(word) > 0:
self.append(''.join(word))
word = []
self.append(char)
else:
word.append(char)
# Might be one more word leftover
if len(word) > 0:
self.append(''.join(word))
def append(self, elem):
""" Appends element to end of the list
Args:
elem: Element to append
"""
self.insert(self._size, elem)
def insert(self, idx, elem):
""" Inserts element at specified index
Args:
elem: Element to insert
idx: Index to insert at
"""
curr = self._get(idx)
elem_node = Node(elem)
elem_node.next = curr.next
elem_node.next.prev = elem_node
elem_node.prev = curr
curr.next = elem_node
self._size += 1
def _get(self, idx):
""" Gets node at specified index - 1
Args:
idx: Index of node to return
Returns:
The node at the specified index - 1
"""
curr = self._head
curr_idx = 0
while curr_idx != idx:
curr = curr.next
curr_idx += 1
return curr
def _rm_curr_node(self, curr):
""" Remove passed in node from list
Args:
curr: Node to remove
"""
curr.prev.next = curr.next
curr.next.prev = curr.prev
def _recount_size(self):
""" Gets size by counting number of nodes in list
Return:
The number of valid nodes in the list
"""
self._size = 0
curr = self._head.next
while curr.data != None:
self._size += 1
curr = curr.next
return self._size
def join_list(self):
""" Combines strings in list
Returns:
List of ingredient strings, all in lowercase
"""
curr = self._head.next
final_product = [[]]
while curr.data != None:
if re.fullmatch(MULT_INGREDIENT_DELIM, curr.data):
self._rm_curr_node(curr)
# check if one of the optional ingredients (delimited
# by or / and) isn't just white space
option_ingredient = final_product[len(final_product) - 1]
if not re.fullmatch(WHITE_SPACE, ''.join(option_ingredient)):
final_product.append([])
else:
final_product[len(final_product) - 1].append(curr.data.lower())
curr = curr.next
# nested join to remove extra white space
list_of_ingredients = []
for ingredient in final_product:
final_ingredient = ' '.join(''.join(ingredient).split()).strip(END_STRIP)
if not re.fullmatch(WHITE_SPACE, final_ingredient):
list_of_ingredients.append(final_ingredient)
return list_of_ingredients
def join(self):
""" Combines string in list into one whole string
Returns:
Combined strings from list data
"""
curr = self._head.next
final_product = []
while curr.data != None:
final_product.append(curr.data)
curr = curr.next
# nested join to remove extra white space
return ' '.join(''.join(final_product).split())
def parse(self):
""" Parses ingredient to remove qualifiers and quantifiers.
Returns:
List of ingredient strings
"""
curr = self._head.next
# Remove unnecessary elements
while curr.data != None:
normalized = curr.data.lower()
if normalized in RM_EVERYTHING_AFTER:
# Remove curr node and everything after
curr.prev.next = self._tail
# save previous _tail.prev just in case we remove too much
# and want to go back
prev_tail = self._tail.prev
self._tail.prev = curr.prev
result_string = self.join()
# if everything is whitespace, we removed too much
if re.fullmatch(WHITE_SPACE, result_string):
# unremove everything
curr.prev.next = curr
self._tail.prev = prev_tail
# but remove current node
self._rm_curr_node(curr)
curr = curr.next
else:
curr = self._tail
# else if element matches a regex pattern or is one of the words we
# don't want, remove
elif normalized in TO_STRIP or \
any(re.fullmatch(pattern, normalized) for pattern in PATTERNS):
self._rm_curr_node(curr)
curr = curr.next
# otherwise we keep the word
else:
curr = curr.next
self._recount_size()
return self.join_list()
if __name__ == '__main__':
while True:
parser = IngredientParser(input())
print(f'Unparsed: {parser.join()}')
print(f'Parsed: {parser.parse()}')
|
import logic
def input_params():
print('Выберите уравнение:\n'
'1. y\' + 2y - x^2 = 0\n'
'2. y\' + 5ln(x) = 0\n'
'3. y\' + 2xy = 0')
t = float(input())
print('Введите начальные условия через пробел (х0 у0)')
x0, y0 = input().split()
print('Введите конец отрезка')
x1 = float(input())
print('Введите точность')
h = float(input())
return float(x0), float(y0), x1, h, t
x0, y0, x1, h, t = input_params()
y, y_vals = logic.Runge_Kutte(x0, y0, x1, h, t)
print(y)
logic.get_graph(x0, x1, y_vals)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# The above lines tell the shell to use python as interpreter when the
# script is called directly, and that this file uses utf-8 encoding,
# because of the country specific letter in my surname.
"""
Name: Christmas Tree
Author: Martin Bo Kristensen Grønholdt.
Version: 1.0 (2016-12-11)
Print a christmas tree.
"""
from random import randrange
from colorama import Fore
def stem(tree_width, stem_width):
'''
Print the stem of the christmas tree.
:param tree_width: Width of the christmas tree.
:param stem_width: Width of the christmas tree stem.
:return: Nothing.
'''
stem_width = int(tree_width / 7)
stem_height = int(tree_width / 6)
# Find the offset where the stem starts
stem_start = int(tree_width / 2 - (stem_width / 2))
for h in range(0, stem_height):
# Loop until the end of the stem.
for w in range(0, stem_start + stem_width):
if w < stem_start:
# Non stem position, print space.
print(' ', end='')
else:
# Stem position, print #.
print(Fore.RED + '#', end='')
print()
def print_tree_char(dist):
'''
Print one character of the actual tree. Chooses randomly among the rest of
the characters available.
:param dist:
:return:
'''
count = sum([c[1] for c in dist])
# We're at the last character.
if count == 1:
i = 0
# Don't waste time, just find it.
while dist[i][1] == 0:
i += 1
# And print it.
dist[i] = (dist[i][0], dist[i][1] - 1)
print(dist[i][0])
# Some other character in the tree.
else:
# Pick a random character among the rest needed.
choice = randrange(0, count)
# Find the choice by distributing them among all the possible characters
# in the tree.
i = 0
while choice > sum([c[1] for c in dist[0:i + 1]]):
i += 1
# Bad choice, all used up.
if dist[i][1] == 0:
# Make another choice
return (print_tree_char(dist))
else:
# Print the character.
dist[i] = (dist[i][0], dist[i][1] - 1)
print(dist[i][0], end='')
return (dist[i][0])
def tree(width, height, stem_width, dist):
'''
Print the tree.
:param height: Height, in characters, of the tree.
:param stem_width: Width, in characters, of the tree.
:param dist: Characters and the number to be used in the tree.
:return:
'''
# Get the area.
n_chars = 0
for i in range(0, 12):
n_chars += i * 2 + 1
# Dictionary of chars, and how many have been used.
use = dict()
# Create a dictionary to keep a record of the number of used characters.
for char in dist:
# Add all characters to the dictionary with 0 used.
use[char[0]] = 0
# Find the middle of the tree.
tree_middle = int(width / 2)
# Row width of the top.
row_width = 1
# Loop from the top down.
for h in range(0, height):
# New line.
print()
# Loop from left to right.
for w in range(0, tree_middle + row_width):
# Print the filling spaces before the tree.
if w < (tree_middle - int(row_width / 2)):
print(' ', end='')
# Print the top.
elif row_width == 1:
# A star at the top.
i = 0
while '*' not in dist[i][0]:
i += 1
# Print the character.
dist[i] = (dist[i][0], dist[i][1] - 1)
print(dist[i][0], end='')
# Add to used
use[dist[i][0]] += 1
# Print the rows of the tree itself.
elif w <= (tree_middle + int(row_width / 2)):
use[print_tree_char(dist)] += 1
# Next row is wider.
row_width += 2
stem(width, stem_width)
return(use)
def main():
'''
Program main entry point.
'''
# Height of the tree, and stem width.
height = 1
stem_width = 3
try:
height = int(input('Input the height of the christmas tree: '))
except ValueError:
# Complain when something unexpected was entered.
print('\nPlease use only numbers.')
exit(1)
# Get the number of characters in the tree.
n_chars = 0
for i in range(0, height):
n_chars += i * 2 + 1
width = i * 2 + 1
# Set number of characters for each type of decorations.
tree_chars = [
(Fore.MAGENTA + 'O', int((n_chars / 100) * 10)),
(Fore.LIGHTWHITE_EX + 'i', int((n_chars / 100) * 10)),
(Fore.LIGHTRED_EX + 'V', int((n_chars / 100) * 10)),
(Fore.LIGHTYELLOW_EX + '*', int((n_chars / 100) * 10)),
(Fore.GREEN + '#', int((n_chars / 100) * 60))]
# Fix rounding errors by adding more green.
tree_chars[4] = (tree_chars[4][0], tree_chars[4][1] + n_chars -
sum([c[1] for c in tree_chars]))
# Print the tree after a new line.
print()
use = tree(width, height, stem_width, tree_chars)
# Print statistics after a new line.
print()
total_number = 0
total_percent = 0
print(Fore.WHITE + ' Type | Nr. | Percent')
print(Fore.WHITE + '-------------------------------')
for char, number in use.items():
percent = number / (n_chars / 100)
print(' {}'.format(char) + Fore.WHITE +
' |{:6} | {:6.2f}%'.format(number, percent))
total_number += number
total_percent += percent
print(Fore.WHITE + '-------------------------------')
print(Fore.WHITE + ' |{:6} | {:6.2f}%'.format(total_number, total_percent))
# Run this when invoked directly
if __name__ == '__main__':
main()
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class AHDRNet(nn.Module):
def __init__(self, in_c=6, out_c=3, fc=64, growth_rate=32):
super(AHDRNet, self).__init__()
self.z1 = BasicBlock(in_c, fc)
self.z2 = BasicBlock(in_c, fc)
self.z3 = BasicBlock(in_c, fc)
self.a1 = AttentionModule(fc)
self.a3 = AttentionModule(fc)
self.f0 = BasicBlock(fc*3, fc)
self.f1 = DRDB(fc, growth_rate)
self.f2 = DRDB(fc, growth_rate)
self.f3 = DRDB(fc, growth_rate)
self.f5 = BasicBlock(fc*3, fc)
self.f6 = BasicBlock(fc, fc)
self.f7 = nn.Conv2d(fc, out_c, 1)
def forward(self, in_LDRs, in_HDRs):
in_1 = torch.cat((in_LDRs[0], in_HDRs[0]), 1)
in_2 = torch.cat((in_LDRs[1], in_HDRs[1]), 1)
in_3 = torch.cat((in_LDRs[2], in_HDRs[2]), 1)
z1 = self.z1(in_1)
z2 = self.z2(in_2)
z3 = self.z3(in_3)
a1 = self.a1(torch.cat((z1, z2), 1))
a3 = self.a3(torch.cat((z3, z2), 1))
z1_2 = z1 * a1
z3_2 = z3 * a3
zs = torch.cat((z1_2, z2, z3_2), 1)
f0 = self.f0(zs)
f1 = self.f1(f0)
f2 = self.f2(f1)
f3 = self.f3(f2)
f4 = torch.cat((f1, f2, f3), 1)
f5 = self.f5(f4)
f6 = self.f6(f5+z2)
f7 = self.f7(f6)
return F.tanh(f7)
class BasicBlock(nn.Module):
def __init__(self, in_c, out_c, ks=3, stride=1, padding=1):
super(BasicBlock, self).__init__()
self.conv_block = nn.Sequential(
nn.Conv2d(in_c, out_c, kernel_size=ks, stride=stride, padding=padding),
nn.ReLU(inplace=True)
)
def forward(self, x):
x = self.conv_block(x)
return x
class AttentionModule(nn.Module):
def __init__(self, fc=64, ks=3, stride=1, padding=1):
super(AttentionModule, self).__init__()
self.attention_block = nn.Sequential(
nn.Conv2d(fc*2, fc, kernel_size=ks, stride=stride, padding=padding),
nn.ReLU(inplace=True),
nn.Conv2d(fc, fc, kernel_size=ks, stride=stride, padding=padding),
nn.Sigmoid()
)
def forward(self, x):
x = self.attention_block(x)
return x
class DRDB(nn.Module):
def __init__(self, in_c, fc=32, conv_num=6):
super(DRDB, self).__init__()
convs = []
for i in range(conv_num):
convs.append(DRDB_C(i*fc+in_c, fc))
self.convs = nn.Sequential(*convs)
# Local Feature Fusion
self.LFF = nn.Sequential(
nn.Conv2d(conv_num*fc+in_c, in_c, kernel_size=1, stride=1, padding=0),
nn.ReLU(inplace=True)
)
def forward(self, x):
out = self.convs(x)
out = self.LFF(out)
return out + x
class DRDB_C(nn.Module):
def __init__(self, in_c, out_c=32, dialation=2):
super(DRDB_C, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_c, out_c, kernel_size=3, padding=dialation, stride=1, dilation=dialation),
nn.ReLU(inplace=True)
)
def forward(self, x):
out = self.conv(x)
return torch.cat((x, out), 1)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('imagematch', '0002_auto_20150421_0508'),
]
operations = [
migrations.AlterField(
model_name='image',
name='category',
field=models.CharField(default=None, max_length=15, null=True, help_text=b'Category of image.', choices=[(b'Interior', b'Interior'), (b'Exterior', b'Exterior'), (b'Garden', b'Garden'), (b'Land', b'Land'), (b'Map', b'Map'), (b'FloorPlan', b'FloorPlan'), (b'View', b'View'), (b'Other', b'Other')]),
),
]
|
import numpy as np
import math
class RobotKol:
def __init__(self, DH_Parametresi, UzuvUzunluklari):
self.dh = DH_Parametresi
self.Uzunluk = np.array(UzuvUzunluklari)
self.EklemSayisi = len(self.dh)
self.TransferFonksiyonlari = []
def FK(self):
for i in range(self.EklemSayisi):
T = np.array([[np.cos(self.dh[i][3]), -np.sin(self.dh[i][3]), 0, self.dh[i][1]],
[np.sin(self.dh[i][3]) * np.cos(self.dh[i][0]), np.cos(self.dh[i][3]) * np.cos(self.dh[i][0]),
-np.sin(self.dh[i][0]),
-(self.dh[i][2] * np.sin(self.dh[i][0]))],
[np.sin(self.dh[i][3]) * np.sin(self.dh[i][0]), np.cos(self.dh[i][3]) * np.sin(self.dh[i][0]),
np.cos(self.dh[i][0]), np.cos(self.dh[i][0]) * self.dh[i][2]],
[0, 0, 0, 1]])
self.TransferFonksiyonlari.append(T)
Carpim = np.dot(self.TransferFonksiyonlari[0], self.TransferFonksiyonlari[1])
for j in range(2, self.EklemSayisi):
Carpim = np.dot(Carpim, self.TransferFonksiyonlari[j])
return Carpim
def IK(self, alfa, beta, gama, px, py, pz):
R_zyz = self.ZYZEuler(alfa, beta, gama, px, py, pz)
merkezKonum = self.MerkezKonumBul(R_zyz, self.Uzunluk[5], self.dh[5][1])
a1, a2, a3, a4, a5 = self.Uzunluk[0], self.Uzunluk[1], self.Uzunluk[2], self.Uzunluk[3], self.Uzunluk[4]
q1 = math.atan2(merkezKonum[1], merkezKonum[0])
if 170 * (math.pi / 180) >= q1 >= -170 * (math.pi / 180):
_px = math.sqrt(merkezKonum[0] ** 2 + merkezKonum[1] ** 2) - a2
else:
_px = math.sqrt(merkezKonum[0] ** 2 + merkezKonum[1] ** 2) + a2
if merkezKonum[2] >= 0:
_pz = abs(merkezKonum[2] - a1)
else:
_pz = abs(merkezKonum[2]) + a1
try:
k = math.sqrt((_pz ** 2 + _px ** 2))
L = math.sqrt((a5 ** 2 + a4 ** 2))
a = math.atan2(_pz, _px)
c = math.atan2(a5, a4)
b = math.acos((a3 ** 2 + L ** 2 - k ** 2) / (2 * a3 * L))
d = math.acos((a3 ** 2 + k ** 2 - L ** 2) / (2 * a3 * k))
if 170 * (math.pi / 180) >= q1 >= -170 * (math.pi / 180):
q3 = c + b - math.pi
else:
q3 = c - b + math.pi
if merkezKonum[2] >= a1:
if 170 * (math.pi / 180) >= q1 >= -170 * (math.pi / 180):
q2 = d + a - (math.pi / 2)
else:
q2 = (math.pi / 2) - a - d
q1 = q1 - (180 * (math.pi / 180)) if q1 > 0 else q1 + (180 * (math.pi / 180))
else:
if 170 * (math.pi / 180) >= q1 >= -170 * (math.pi / 180):
q2 = d - (math.pi / 2) - a
else:
q2 = (math.pi / 2) + a - d
q1 = q1 - (180 * (math.pi / 180)) if q1 > 0 else q1 + (180 * (math.pi / 180))
T0_3 = self.T03Matrisi(q1, q2, q3, a1, a2, a3)
tT0_3 = self.TersMatrix(T0_3)
R3_6 = np.dot(tT0_3, R_zyz)
q5 = math.acos(-R3_6[1][2])
q4 = math.atan2(R3_6[2][2], R3_6[0][2])
q6 = math.atan2(-R3_6[1][1], R3_6[1][0])
return [q1, q2, q3, q4, q5, q6]
except:
return [0, 0, 0, 0, 0, 0]
@staticmethod
def T03Matrisi(q1, q2, q3, a1, a2, a3):
return [[-(np.cos(q1) * np.sin(q2) * np.cos(q3)) - (np.cos(q1) * np.cos(q2) * np.sin(q3)),
np.cos(q1) * np.sin(q2) * np.sin(q3) - (np.cos(q1) * np.cos(q2) * np.cos(q3)),
np.sin(q1), -(np.cos(q1) * np.sin(q2) * a3) + np.cos(q1) * a2],
[-(np.sin(q1) * np.sin(q2) * np.cos(q3)) - (np.sin(q1) * np.cos(q2) * np.sin(q3)),
np.sin(q1) * np.sin(q2) * np.sin(q3) - (np.sin(q1) * np.cos(q2) * np.cos(q3)),
-np.cos(q1), -(np.sin(q1) * np.sin(q2) * a3) + np.sin(q1) * a2],
[np.cos(q2) * np.cos(q3) - (np.sin(q2) * np.sin(q3)),
-(np.cos(q2) * np.sin(q3)) - (np.sin(q2) * np.cos(q3)), 0, np.cos(q2) * a3 + a1],
[0, 0, 0, 1]]
@staticmethod
def MerkezKonumBul(Matrix, d6, d7=0):
P_6 = [[Matrix[0][3]], [Matrix[1][3]], [Matrix[2][3]], [1]]
a_6 = [[Matrix[0][2]], [Matrix[1][2]], [Matrix[2][2]], [0]] # T06'nın Z açısı
P4 = np.subtract(P_6, np.dot(d6, a_6))
if d7 != 0:
a_7 = [[Matrix[0][0]], [Matrix[1][0]], [Matrix[2][0]], [0]] # T06'nın X açısı
P4 = P4 - np.dot(d7, a_7)
return P4
@staticmethod
def ZYZEuler(alfa, beta, gama, px, py, pz):
R_zyz = [
[np.cos(alfa) * np.cos(beta) * np.cos(gama) - (np.sin(alfa) * np.sin(gama)),
-(np.cos(alfa) * np.cos(beta) * np.sin(gama)) - (np.sin(alfa) * np.cos(gama)), np.cos(alfa) * np.sin(beta),
px],
[np.sin(alfa) * np.cos(beta) * np.cos(gama) + np.cos(alfa) * np.sin(gama),
-(np.sin(alfa) * np.cos(beta) * np.sin(gama)) + np.cos(alfa) * np.cos(gama),
np.sin(alfa) * np.sin(beta), py],
[-(np.sin(beta) * np.cos(gama)), np.sin(beta) * np.sin(gama), np.cos(beta), pz], [0, 0, 0, 1]]
return R_zyz
@staticmethod
def TersMatrix(matrix):
rr = [[matrix[0][0], matrix[1][0], matrix[2][0]],
[matrix[0][1], matrix[1][1], matrix[2][1]],
[matrix[0][2], matrix[1][2], matrix[2][2]]]
_rr = np.dot(-1, rr)
pp = np.dot(_rr, [[matrix[0][3]], [matrix[1][3]], [matrix[2][3]]])
return [[rr[0][0], rr[0][1], rr[0][2], pp[0]],
[rr[1][0], rr[1][1], rr[1][2], pp[1]],
[rr[2][0], rr[2][1], rr[2][2], pp[2]],
[0, 0, 0, 1]]
def main():
a1, a2, a3, a4, a5, a6 = 328, 40.2, 445, 39.95, 438.35, 164.2
# a1, a2, a3, a4, a5, a6 = 111, 14, 120, 14, 116, 80
PT = [[0, 0, a1, 0],
[np.pi / 2, a2, 0, np.pi / 2],
[0, a3, 0, 0],
[np.pi / 2, a4, a5, 0],
[-np.pi / 2, 0, 0, 0],
[np.pi / 2, 0, a6, 0]]
uzuvlar = [a1, a2, a3, a4, a5, a6]
robot = RobotKol(PT, uzuvlar)
ik = robot.IK(0, np.pi / 2, np.pi, 6.42750000e+02, 0, 8.12950000e+02)
PT2 = [[0, 0, a1, ik[0]],
[np.pi / 2, a2, 0, ik[1] + np.pi / 2],
[0, a3, 0, ik[2]],
[np.pi / 2, a4, a5, ik[3]],
[-np.pi / 2, 0, 0, ik[4]],
[np.pi / 2, 0, a6, ik[5]]]
robot1 = RobotKol(PT2, uzuvlar)
sonuc = robot1.FK()
print("-" * 50)
print(np.matrix(sonuc))
print("-" * 50)
for i in range(len(ik)):
print(f"Q{i + 1} :", end=" ")
print(ik[i] * (180 / np.pi))
if __name__ == '__main__':
main()
|
from django.shortcuts import render
from app.models import TrendRepo
from django.views import generic
# Create your views here.
def index(request):
trend_list = TrendRepo.objects.all()
return render(request,'app/index.html',{'trend_list':trend_list})
def detail(request,pk):
trend = TrendRepo.objects.get(id=pk)
return render(request,'app/detail.html',{'trend':trend})
|
#!/usr/bin/env python
"""
_New_
MySQL implementation of Masks.New
"""
__all__ = []
import logging
from WMCore.Database.DBFormatter import DBFormatter
class New(DBFormatter):
plainsql = """INSERT INTO wmbs_job_mask (job, inclusivemask) VALUES (:jobid, :inclusivemask)"""
sql = """INSERT INTO wmbs_job_mask (job, firstevent, lastevent, firstrun, lastrun, firstlumi, lastlumi, inclusivemask) VALUES
(:jobid, :firstevent, :lastevent, :firstrun, :lastrun, :firstlumi, :lastlumi, :inclusivemask)"""
def format(self,result):
return True
def getDictBinds(self, jobList, inclusivemask = True):
binds = []
for job in jobList:
binds.append({'jobid': job['id'], 'inclusivemask': inclusivemask,
'firstevent': job['mask']['FirstEvent'],
'lastevent': job['mask']['LastEvent'],
'firstrun': job['mask']['FirstRun'],
'lastrun': job['mask']['LastRun'],
'firstlumi': job['mask']['FirstLumi'],
'lastlumi': job['mask']['LastLumi'],})
return binds
def execute(self, jobid = None, inclusivemask = None, conn = None,
transaction = False, jobList = None):
if jobList:
binds = self.getDictBinds(jobList, inclusivemask)
result = self.dbi.processData(self.sql, binds, conn = conn, transaction = transaction)
return self.format(result)
elif jobid:
if inclusivemask == None:
binds = self.getBinds(jobid = jobid, inclusivemask=True)
else:
binds = self.getBinds(jobid = jobid, inclusivemask = inclusivemask)
result = self.dbi.processData(self.plainsql, binds, conn = conn,
transaction = transaction)
return self.format(result)
else:
logging.error('Masks.New asked to create Mask with no Job ID')
return
|
'''
This module defines the behaviour of a client in your Chat Application
'''
import sys
import getopt
import socket
import random
from threading import Thread
import os
import util
import time
import select
import re
'''
Write your code inside this class.
In the start() function, you will read user-input and act accordingly.
receive_handler() function is running another thread and you have to listen
for incoming messages in this function.
'''
def helper():
'''
This function is just for the sake of our Client module completion
'''
print("Client")
print("-u username | --user=username The username of Client")
print("-p PORT | --port=PORT The server port, defaults to 15000")
print("-a ADDRESS | --address=ADDRESS The server ip or hostname, defaults to localhost")
print("-w WINDOW_SIZE | --window=WINDOW_SIZE The window_size, defaults to 3")
print("-h | --help Print this help")
class Client:
'''
This is the main Client Class.
'''
def __init__(self, username, dest, port, window_size):
self.server_addr = dest
self.server_port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(None)
self.sock.bind(('', random.randint(10000, 40000)))
self.name = username
self.window = window_size
self.dict={'disconnect':1,'join':1,'request_list':2,'msg':3,'send_file':4}
def start(self):
'''
Main Loop is here
Start by sending the server a JOIN message.
Waits for userinput and then process it
'''
#Join
string = f"{self.name}"
msg=util.make_message('join',self.dict['join'],string)
packet=util.make_packet('data',0,msg)
self.sock.sendto(packet.encode('utf-8'),(self.server_addr,self.server_port))
while True:
inp=str(input(''))
if 'quit' == inp:
s='quitting'
print(s)
sys.stdout.flush()
inp = str(inp.replace('msg',''))
msg=util.make_message('disconnect',self.dict['disconnect'],inp)
packet=util.make_packet('data',0,msg)
self.sock.sendto(packet.encode('utf-8'),(self.server_addr,self.server_port))
self.sock.close()
os._exit(1)
elif 'list'==inp:
inp=inp.replace('list','')
msg=util.make_message('request_list',self.dict['request_list'],inp)
packet=util.make_packet('data',0,msg)
self.sock.sendto(packet.encode('utf-8'),(self.server_addr,self.server_port))
elif 'msg' in inp:
inp=inp.replace('msg','')
msg=util.make_message('msg',self.dict['msg'],inp)
packet=util.make_packet('data',0,msg)
self.sock.sendto(packet.encode('utf-8'),(self.server_addr,self.server_port))
elif 'file' in inp:
file=inp.split(' ')[-1]
try:
with open(file,'r') as f:
data=f.read()
except:
print('file not found')
pass
data=data.replace(' ','[s]').replace('\n','[nl]')
string =" ".join(inp.split()[1:])
string=string + ' ' + data
msg=util.make_message('send_file',self.dict['send_file'],string)
packet=util.make_packet('data',0,msg)
self.sock.sendto(packet.encode('utf-8'),(self.server_addr,self.server_port))
else:
print(f'incorrect userinput format')
def receive_handler(self):
'''
Waits for a message from server and process it accordingly
'''
while True:
packet=self.sock.recv(util.CHUNK_SIZE)
packet=packet.decode('utf-8')
message=util.parse_packet(packet)[2]
msg=message.split(' ')
if 'ERR_SERVER_FULL' in msg[0]:
print(msg[2])
self.sock.close()
os._exit(1)
elif 'ERR_USERNAME_UNAVAILABLE' in msg[0]:
print(msg[2])
self.sock.close()
os._exit(1)
elif 'response_list' in msg[0]:
msg1=msg[2].replace('$'," ")
print(f"list: {msg1}")
elif 'FORWARD' in msg[0]:
msg1=' '.join(msg[2:])
msg1=msg1.replace(' ',' ')
print(msg1)
elif 'DISCONNECT' in msg[0]:
print('disconnected: server received a unknown command')
self.sock.close()
os._exit(1)
elif 'forward_file' in msg[0]:
client = msg[2]
file=msg[3]
data=msg[4].replace('[s]',' ').replace('[nl]','\n')
with open(f'{client}_{file}','w+') as f:
f.write(data)
f.close()
print(f'file: {client}: {file}')
#raise NotImplementedError
# Do not change this part of code
if __name__ == "__main__":
try:
OPTS, ARGS = getopt.getopt(sys.argv[1:],
"u:p:a:w", ["user=", "port=", "address=","window="])
except getopt.error:
helper()
exit(1)
PORT = 15000
DEST = "localhost"
USER_NAME = None
WINDOW_SIZE = 3
for o, a in OPTS:
if o in ("-u", "--user="):
USER_NAME = a
elif o in ("-p", "--port="):
PORT = int(a)
elif o in ("-a", "--address="):
DEST = a
elif o in ("-w", "--window="):
WINDOW_SIZE = a
if USER_NAME is None:
print("Missing Username.")
helper()
exit(1)
S = Client(USER_NAME, DEST, PORT, WINDOW_SIZE)
try:
# Start receiving Messages
T = Thread(target=S.receive_handler)
T.daemon = True
T.start()
# Start Client
S.start()
#T.join()
except (KeyboardInterrupt, SystemExit):
sys.exit()
|
from rest_framework.routers import DefaultRouter
from .views import UserRegistrationView
router = DefaultRouter()
router.register(r'register', UserRegistrationView, basename='user_register')
urlpatterns = router.urls |
#! /usr/bin/python
import time
import re
import datetime
# non stdlib external
import wikitools
class WikiHandler():
def __init__(self, apiURL, username, password):
self.apiURL = apiURL
self.username = username
self.password = password
self.lastTimestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def connect(self):
self.wiki = wikitools.Wiki(self.apiURL)
self.wiki.login(self.username, self.password)
def recentChanges(self):
if not self.lastTimestamp:
self.lastTimestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
params = {'action' : 'query',
'list' : 'recentchanges|logevents',
'rclimit': '50',
'rcstart': self.lastTimestamp,
'rcdir' : 'newer',
'rcprop' : 'user|timestamp|title|comment',
'lelimit': '50',
'lestart': self.lastTimestamp,
'ledir' : 'newer',
'leprop' : 'user|timestamp|title|comment|type'
}
response = wikitools.APIRequest(self.wiki, params).query(querycontinue = False)
recentChanges = response['query']['recentchanges'] + response['query']['logevents']
recentChanges = sorted(recentChanges, key = lambda x: x['timestamp'])
if recentChanges:
self.lastTimestamp = self.newTimeStamp(recentChanges[len(recentChanges)-1]['timestamp'])
return [self.makeMessage(change) for change in recentChanges]
def pageText(self, title):
try:
page = wikitools.Page(title)
response = page.getWikiText()
return response
except wikitools.page.NoPage:
return None
def userEdits(self, username):
params = {'action' : 'query',
'list' : 'usercontribs',
'ucuser' : username,
'uclimit': 1000,
'ucdir' : 'newer'
}
response = wikitools.APIRequest(self.wiki, params).query(querycontinue = False)
return response['query']['usercontribs']
def deletePage(self, title, reason = 'spam', *args):
if args:
reason = "{} {}".format(reason, ' '.join(args))
page = wikitools.Page(self.wiki, title)
page.delete(reason)
return "Page {pageName} deleted".format(pageName = title)
def removeAllChanges(self, user):
edits = self.userEdits(user)
fixedTitles = []
for edit in edits:
if edit['title'] not in fixedTitles:
fixedTitles.append(edit['title'])
if 'new' in edit.keys():
self.deletePage(edit['title'])
else:
self.revertPage(user, edit['title'])
def revertPage(self, user, title, reason = 'spam'):
page = wikitools.Page(self.wiki, title)
rollbackToken = self.getRollbackToken(page.pageid)
if rollbackToken:
params = {'action' : 'rollback',
'title' : title,
'user' : user,
'token' : rollbackToken,
'markbot': 1
}
response = wikitools.APIRequest(self.wiki, params).query(querycontinue = False)
return "Page {} reverted to previous edit".format(title)
def getRollbackToken(self, pageID):
params = {'action' : 'query',
'prop' : 'revisions',
'rclimit': 1,
'pageids' : pageID,
'rvtoken': 'rollback'
}
response = wikitools.APIRequest(self.wiki, params).query(querycontinue = False)
try:
return response['query']['pages'][str(pageID)]['revisions'][0]['rollbacktoken']
except KeyError:
return False
def blockUser(self, user, reason = 'spambot', *args):
if args:
reason = "{} {}".format(reason, ' '.join(args))
userObject = wikitools.User(self.wiki, user)
userObject.block(reason = reason, autoblock = True, expiry = 'never', nocreate = True)
return "User {username} blocked".format(username = user)
def blockAndRemovePages(self, user):
self.removeAllChanges(user)
self.blockUser(user)
return "User {username} blocked, pages deleted".format(username = user)
def newTimeStamp(self, oldTimestamp):
oldTime = datetime.datetime.strptime(oldTimestamp, "%Y-%m-%dT%H:%M:%SZ")
newTime = oldTime + datetime.timedelta(seconds = 1)
return datetime.datetime.strftime(newTime, "%Y-%m-%dT%H:%M:%SZ")
def makeMessage(self, change):
message = None
for key in change:
try:
change[key] = change[key].encode('UTF-8', 'ignore')
except:
pass
if not change['comment']:
change['comment'] = '-'
if change['type'] == 'new':
message = "%s made new page titled %s with comment: %s" % (change['user'], change['title'], change['comment'])
if change['type'] == 'edit':
message = "%s edited %s with comment: %s" % (change['user'], change['title'], change['comment'])
if change['type'] == 'newusers':
message = "New user: %s" % change['user']
if change['type'] == 'block':
message = '%s blocked user %s with comment %s' % (change['user'], change['title'].split(":", 1)[1],
change['comment'])
if change['type'] == 'delete':
message = '%s deleted page %s with comment %s' % (change['user'], change['title'], change['comment'])
return message if message else None
|
from os import environ
from flask import Flask, jsonify, request, make_response
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.sql import func
from flask_migrate import Migrate
from flask_cors import CORS
import boto3
from datetime import datetime
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = environ.get("dbURL")
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db = SQLAlchemy(app)
migrate = Migrate(app, db)
CORS(app)
# -> POST order
# -> PATCH delivery status
# -> GET by OrderID
class Order(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
invoice_id = db.Column(
db.Integer, db.ForeignKey("order_invoice.id"), nullable=False
)
customer_id = db.Column(db.Integer, nullable=False)
product_id = db.Column(db.String(120), nullable=False)
product_name = db.Column(db.String(64), nullable=False)
product_image = db.Column(db.String(64), nullable=False)
quantity = db.Column(db.Integer, nullable=False)
price = db.Column(db.Float(6, 2), nullable=False)
created_at = db.Column(db.DateTime, server_default=func.now(), nullable=False)
def __init__(
self,
id,
invoice_id,
customer_id,
product_id,
product_name,
product_image,
quantity,
price,
created_at,
):
self.id = id
self.invoice_id = invoice_id
self.customer_id = customer_id
self.product_id = product_id
self.product_name = product_name
self.product_image = product_image
self.quantity = quantity
self.price = price
self.created_at = created_at
def json(self):
return {
"id": self.id,
"invoice_id": self.invoice_id,
"customer_id": self.customer_id,
"product_id": self.product_id,
"product_name": self.product_name,
"product_image": self.product_image,
"quantity": self.quantity,
"price": self.price,
"created_at": self.created_at,
}
class Order_invoice(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
total_amount = db.Column(db.Integer, nullable=False)
customer_id = db.Column(db.Integer, nullable=False)
delivery_status = db.Column(db.String(200), nullable=False)
Orders = db.relationship("Order", backref="orders", lazy=True)
def __init__(self, id, total_amount, customer_id, delivery_status):
self.id = id
self.total_amount = total_amount
self.customer_id = customer_id
self.delivery_status = delivery_status
def json(self):
return {
"id": self.id,
"customer_id": self.customer_id,
"total_amount": self.total_amount,
"delivery_status": self.delivery_status,
}
url_route = "/api"
@app.route("/")
def test():
response = make_response(
jsonify({"status": "success", "message": "ORDER Service is working"}),
200,
)
response.headers["Content-Type"] = "application/json"
return response
@app.route(url_route + "/create_order", methods=["POST"])
def create_order():
order_data = request.get_json()
cart = order_data["cart"]
customer_id = order_data["customer_id"]
total_amount = order_data["total_amount"]
# total_cost calculated in FE, pass over
# total = 0
# for c_list in cart:
# product_price = c_list['unit_price']
# total += product_price
invoice = Order_invoice(None, total_amount, customer_id, "Pending")
try:
db.session.add(invoice)
db.session.commit()
invoice_id = invoice.id
except:
return jsonify(
{
"status": "fail",
"message": "An error occurred when creating order invoice.",
}
)
for item in cart:
order = Order(
None,
invoice_id,
customer_id,
item["pId"],
item["pName"],
item["pImg"],
item["quantity"],
item["price"],
datetime.now(),
)
try:
db.session.add(order)
db.session.commit()
except:
return jsonify(
{
"status": "fail",
"message": "An error occurred when creating order",
}
)
return jsonify({"status": "success", "message": invoice.json()})
@app.route(url_route + "/invoice/<int:id>", methods=["GET"])
def get_invoice(id):
invoice = Order_invoice.query.filter_by(id=id).first()
if invoice:
return jsonify({"status": "success", "data": invoice.json()})
return jsonify({"status": "fail", "message": "Invoice not found."})
@app.route(url_route + "/invoices/<int:customer_id>", methods=["GET"])
def get_customer_invoices(customer_id):
invoices = Order_invoice.query.filter_by(customer_id=customer_id).all()
if len(invoices):
return jsonify(
{
"status": "success",
"data": {"invoices": [invoice.json() for invoice in invoices]},
}
)
return jsonify({"status": "fail", "message": "There are no invoices."})
@app.route(url_route + "/invoices", methods=["GET"])
def get_all_invoices():
invoices = Order_invoice.query.all()
if len(invoices):
return jsonify(
{
"status": "success",
"data": {"invoices": [invoice.json() for invoice in invoices]},
}
)
return jsonify({"status": "fail", "message": "There are no invoices."})
# -> PATCH delivery status
@app.route(url_route + "/invoice/<int:id>/delivery_status", methods=["PATCH"])
def update_delivery_status(id):
invoice = Order_invoice.query.filter_by(id=id).first()
invoice.delivery_status = "delivered"
try:
db.session.commit()
response = make_response(jsonify({"status": "success", "data": invoice.json()}))
except:
db.session.rollback()
response = make_response(
jsonify(
{
"status": "fail",
"message": "An error occurred when updating delivery status",
}
)
)
finally:
db.session.close()
response.headers["Content-Type"] = "application/json"
return response |
from numpy import *
from glue.ligolw import utils, ligolw, lsctables
from glue.lal import LIGOTimeGPS
from glue import lal
from glue import segments as seg
import os
from optparse import OptionParser
from gwpy.timeseries import TimeSeries
from glue import datafind
parser = OptionParser(
version = "Name: Overflow Trigger Generator",
usage = "%prog --gps-start-time --gps-end-time --channel ",
description = "Makes triggers for overflows from a single channel"
)
parser.add_option("-s", "--gps-start-time", metavar = "gps_start_time", help = "Start of GPS time range")
parser.add_option("-e", "--gps-end-time", metavar = "gps_end_time", help = "End of GPS time range")
parser.add_option("-c", "--channel", metavar = "channel", help="Channel name.")
args, others = parser.parse_args()
channel = args.channel
gps_start = int(args.gps_start_time)
gps_end = int(args.gps_end_time)
ifo = 'H'
frames = 'H1_R'
# generate frame cache and connection
connection = datafind.GWDataFindHTTPConnection()
cache = connection.find_frame_urls(ifo, frames, gps_start, gps_end, urltype='file')
# read in master list that contains ADC model numbers, names, and cumulative status
model_list = []
model_read = open("/home/tjmassin/adc_overflow_condor/master/h1_model_info.txt")
for line in model_read.readlines():
model_list.append(map(str,line.split()))
# pick off ndcuid list to enable direct searching of channels for overflow status
ndcuid_list = []
for item in model_list:
ndcuid_list.append(item[2])
# functions to find the start of overflow segments
# if x and y are equal in value and z jumps, we'll pull off the timestamp attached to z
# and record that as the beginning of a segment
def startCumuSegTest(x,y,z):
if (x == y < z):
return True
else:
return False
# for non-cumulative overflows, we mark every time we see a change in channel value
# from here we will create one-second segments every time we see a change and then perform
# a coalesce at the end to find the overall start/end points of overflowing times
def startSegTest(x,y,z):
if (x == y != z) and (z != 0):
return True
else:
return False
# check if an overflow channel is cumulative by crosschecking ndcuid_list and model_list
# model list lines are recorded as <model_name.mdl> <cumulative status> <ndcuid=num>
def checkCumulative(chan_name,model_list,ndcuid_list):
ID = 'ndcuid=' + str(chan_name)[str(chan_name).find('-')+1:str(chan_name).find('_')]
if (model_list[ndcuid_list.index(ID)][1] == 'cumulative'):
return True
else:
return False
data=TimeSeries.read(cache, channel, start=gps_start, end=gps_end)
time_vec = linspace(gps_start,gps_end,(gps_end - gps_start)*16,endpoint=False)
'''
We are interested in times when the channels switch from a normal state to an overflowing
state or vice versa. We're not checking the first and last data point of each set because it's not
possible to tell whether or not a channel has just started overflowing at our first data
point or if it had been overflowing beforehand.
This big loop will test every data point (that isn't an endpoint) and record it in the
trigger vector if it's an overflow transition.
'''
trig_segs = seg.segmentlist()
if checkCumulative(channel,model_list,ndcuid_list):
for j in arange(size(data,0)):
if (0 < j < (size(data,0) - 1)):
if startCumuSegTest(data[j-1],data[j],data[j+1]):
trig_segs |= seg.segmentlist([seg.segment(time_vec[j+1],time_vec[j+1]+1)])
else:
for j in arange(size(data,0)):
if (0 < j < (size(data,0) - 1)):
if startSegTest(data[j-1],data[j],data[j+1]):
trig_segs |= seg.segmentlist([seg.segment(time_vec[j+1],time_vec[j+1]+1)])
trig_segs = trig_segs.coalesce()
if (size(trig_segs) == 0):
print "No triggers found for " + str(channel)
exit()
else:
print "Found triggers for " + str(channel)
# make vectors of up and down transitions and feed into XML output
up_trigger_vec = []
for i in arange(size(trig_segs,0)):
up_trigger_vec.append(trig_segs[i][0] - 0.5)
down_trigger_vec = []
for i in arange(size(trig_segs,0)):
down_trigger_vec.append(trig_segs[i][1] - 0.5)
# map triggers into float type and then convert them all into LIGOTimeGPS notation
up_trig_times = map(LIGOTimeGPS,map(float,up_trigger_vec))
down_trig_times = map(LIGOTimeGPS,map(float,down_trigger_vec))
# create mocked up frequency and SNR vectors to fill in XML tables
freqs = empty(size(up_trigger_vec))
freqs.fill(100)
snrs = empty(size(up_trigger_vec))
snrs.fill(10)
sngl_burst_table_up = lsctables.New(lsctables.SnglBurstTable, ["peak_time", "peak_time_ns","peak_frequency","snr"])
sngl_burst_table_down = lsctables.New(lsctables.SnglBurstTable, ["peak_time", "peak_time_ns","peak_frequency","snr"])
for t,f,s in zip(up_trig_times, freqs, snrs):
row = sngl_burst_table_up.RowType()
row.set_peak(t)
row.peak_frequency = f
row.snr = s
sngl_burst_table_up.append(row)
for t,f,s in zip(down_trig_times, freqs, snrs):
row = sngl_burst_table_down.RowType()
row.set_peak(t)
row.peak_frequency = f
row.snr = s
sngl_burst_table_down.append(row)
xmldoc_up = ligolw.Document()
xmldoc_up.appendChild(ligolw.LIGO_LW())
xmldoc_up.childNodes[0].appendChild(sngl_burst_table_up)
xmldoc_down = ligolw.Document()
xmldoc_down.appendChild(ligolw.LIGO_LW())
xmldoc_down.childNodes[0].appendChild(sngl_burst_table_down)
directory_up = ("/home/tjmassin/triggers/POSTER5/" + channel[:2] + "/" +
channel[3:] + "_UP/" + str(gps_start)[:5] + "/")
directory_down = ("/home/tjmassin/triggers/POSTER5/" + channel[:2] + "/" +
channel[3:] + "_DOWN/" + str(gps_start)[:5] + "/")
if not os.path.exists(directory_up):
os.makedirs(directory_up)
if not os.path.exists(directory_down):
os.makedirs(directory_down)
utils.write_filename(xmldoc_up, directory_up + channel[:2] + "-" + channel[3:6] +
"_" + channel[7:] + "_UP_ADC-" + str(gps_start) + "-" + str(gps_end - gps_start) +
".xml.gz", gz=True)
utils.write_filename(xmldoc_down, directory_down + channel[:2] + "-" + channel[3:6] +
"_" + channel[7:] + "_DOWN_ADC-" + str(gps_start) + "-" + str(gps_end - gps_start) +
".xml.gz", gz=True)
|
import requests
from bs4 import BeautifulSoup
import simplejson
def getLensList():
headers = {
'Connection': 'keep-alive',
}
data = [
('Type', ''),
('Diameter_min', ''),
('Diameter_max', ''),
('Length_min', ''),
('Length_max', ''),
('Sort', 'Diameter'),
('Alpha', 'Desc'),
('Submit.x', '61'),
('Submit.y', '13'),
]
#first we need to request the lenses from surplusshed
r = requests.post('http://www.surplusshed.com/finder.cfm', headers=headers, data=data)
#unfortunately they have no api, and don't structure their classes/tags/results in a way that makes it easy for us
soup = BeautifulSoup(r.text, 'html.parser')
# table = soup.find('table')
sku = soup.find_all("input")
#we find through experimentation that attributes of where we input our desired quantity has the lens attributes, but there are more inputs than just desired quantity
rawListOfLenses = []
for each in sku:
value = str(each.get('value'))
#now we have to hunt through each input and find the ones with value is both greater than 1 and the second letter is e - this comes from experimentation and looking at what the results we want have in common
if value is not None and len(value) > 1 and value[1] == 'e':
#lets add these results to a list to manipulate further.
rawListOfLenses.append(value)
#now we need to clean up our list of lenses. Let's make each lens a tuple of (type, diameter, focal length). We also know if the lens is coated but that's unnecessary complexity for now.
listOfLensesSplit = []
for each in rawListOfLenses:
splitByComma = each.split(',') #first we cut each result at the comma
newLens = []
for i in splitByComma:
newLens.append(i.split(' ')[-1])
listOfLensesSplit.append(newLens)
#Now we tuplify the results
listOfLensTuples = []
for each in listOfLensesSplit:
if len(each) > 2:
lensTuple = tuple([each[0], each[1], each[2]])
listOfLensTuples.append(lensTuple)
return(listOfLensTuples)
def saveLenses(lenses, filename):
with open(filename, 'w') as myfile:
simplejson.dump(lenses, myfile)
def loadLenses(filename):
with open(filename,'r') as myfile:
lenses = simplejson.load(myfile)
return(lenses)
def filterSmallLenses(lenses, minSize = 28.8):
largeLenses = []
for each in lenses:
if float(each[1]) >= minSize:
largeLenses.append(tuple(each))
return(largeLenses)
def filterByType(lenses, typeOfLens = "PMN"):
filteredLenses = []
for each in lenses:
if each[0] == typeOfLens:
filteredLenses.append(each)
return filteredLenses
def lensCalc(focalOne, focalTwo, distance):
f1 = float(focalOne)
f2 = float(focalTwo)
d = float(distance)
newFocal = (f1*f2) / (f1+ f2 - d)
return newFocal
def distanceCalc(focalDesired, focalOne, focalTwo):
f1 = float(focalOne)
f2 = float(focalTwo)
fD = float(focalDesired)
dist = - (((f1*f2) / fD) - (f1+f2))
return dist
def computeTripletDist(focalDesired, focalOne, focalTwo, focalThree):
eps = 10
pocketD = 1000000
pocketFL = 100000
for d in range(focalDesired):
try:
compoundOne = lensCalc(focalOne, focalTwo, d)
compoundTwo = lensCalc(compoundOne, focalThree, d)
if abs(compoundTwo - focalDesired) < abs(pocketFL - focalDesired):
pocketFL = compoundTwo
pocketD = d
print(compoundTwo)
except:
print('oops')
print('Best FL: ' + str(pocketFL) + ' at ' + str(pocketD))
return pocketD
computeTripletDist(220, 222, -150, 222)
# lenses = getLensList()
# saveLenses(lenses, 'lenses')
# lenses = loadLenses('lenses')
# # lenses = list(lenses)
# lenses = filterSmallLenses(lenses, 40.0)
# lenses = filterByType(lenses, "NMN")
# # saveLenses(lenses, 'verylargeLenses')
# print(lenses)
# print(len(lenses))
# print(distanceCalc(-330.0, 220.0, -150.0 ))
# print(distanceCalc(220.0, -485.0, 220.0 ))
#
# print(lensCalc(220.0, -150.0, 2.0 ))
# print(lensCalc(-485.0, 220.0, 2.0 ))
#
#
# print(lensCalc(20, 20, 10))
#
# print(distanceCalc(13, 20, 20))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from typing import Any, Union
from smorest_sfs.extensions import db
from smorest_sfs.modules.menus import models
from smorest_sfs.plugins.hierachy_xlsx.parsers import HierachyParser
from smorest_sfs.plugins.hierachy_xlsx.transformers import (
HierachyModelProtocol,
Transformer,
)
from smorest_sfs.utils.paths import Path, ProjectPath
class MenuTransformer(Transformer):
def _get_instance(self, **kwargs: Any) -> HierachyModelProtocol:
from smorest_sfs.modules.menus import schemas
schema = schemas.MenuSchema()
return schema.load(kwargs)
def import_menus_from_filepath(filepath: Union[str, Path]) -> None:
filepath = ProjectPath.get_subpath_from_project(filepath)
parser = HierachyParser(filepath=filepath)
parser.parse()
transformer = MenuTransformer(parser, models.Menu, db.session)
transformer.transform()
|
import re
class BibParser:
def __init__(self, fname):
self.filename = fname
def read_raw_content(self):
item = ""
items = []
with open(self.filename) as f:
while True:
line = f.readline()
if not line:
items.append(item)
break
if line.startswith('@') and item:
items.append(item)
item = ""
item += line.strip()
return items
def get_pre_parsed_data(self, items):
"""
Get the pre-parsed data (tuple), with only the following values:
(Entry_type, Entry_tag, Entry_content)
"""
tmp_data = []
for count, item in enumerate(items):
r = re.match("^@.*?{", item)
if r:
entry_type = r.group()[1:-1]
entry_tag = item[len(entry_type)+1:].split(",")[0][1:]
entry_raw_content = item[len(entry_type)+len(entry_tag)+3:-1]
tmp_data.append((entry_type, entry_tag, entry_raw_content))
return tmp_data
def vstrip(self, val):
"""
Remove the following elements on a string:
* ending comma
* initial quote
* ending quote
* initial bracket
* ending bracket
"""
if val.endswith(","):
val = val[:-1]
if val.startswith("\""):
val = val[1:]
if val.endswith("\""):
val = val[:-1]
if val.startswith("{"):
val = val[1:]
if val.endswith("}"):
val = val[:-1]
return val
def get_parsed_data(self, pparsed):
"""
Takes the pre-parsed data and apply some last rules
to generate a dictionary with all the information of an entry
"""
l = []
for ttype, tag, content in pparsed:
d = {}
# Information that we already have
d["type"] = ttype
d["tag"] = tag
# Getting the left side of the attributes (names)
attr_name = filter(None, re.findall("\w+\s*=", content))
# Getting the right side of the attributes (values)
attr_value = filter(None, re.split("\w+\s*=", content))
assert len(attr_name) == len(attr_value)
for name, value in zip(attr_name, attr_value):
# Removing the " =" substring
n = name.strip().split()[0]
# Removing the "," at the end, and the "" and {}
v = self.vstrip(value.strip())
# Adding the value
d[n] = v
l.append(d)
return l
|
class Product:
def __init__(self, name, description, quantity, buying_cost, selling_price, manufacturer_id, id= None):
self.name = name
self.description = description
self.quantity = quantity
self.buying_cost = buying_cost
self.selling_price = selling_price
self.manufacturer_id = manufacturer_id
self.id = id |
import numpy as np
# numpy.frombuffer
s = 'Hello World'
a = np.frombuffer(s, dtype='S1')
print(a)
x = [(1, 2, 3), (4, 5)]
a = np.asarray(x)
print(a)
# numpy.asarray
x = [1, 2, 3]
a = np.asarray(x)
print(a)
x = [1, 2, 3]
a = np.asarray(x, dtype=float)
print(a)
x = (1, 2, 3)
a = np.asarray(x)
print(a)
# numpy.fromiter
list = range(5)
print(list)
list = range(5)
it = iter(list)
x = np.fromiter(it, dtype=float)
print(x)
|
from django.db import models
from django.utils import timezone
class Bug(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length=200)
description = models.TextField()
created_date = models.DateTimeField(auto_now_add=True)
completed_date = models.DateTimeField(auto_now_add=True)
upvotes = models.IntegerField(default=0)
status = models.CharField(max_length=20, default="To do")
def __unicode__(self):
return self.title
class Comment(models.Model):
id = models.AutoField(primary_key=True)
bug = models.ForeignKey(Bug, related_name="bug")
contents = models.TextField()
user = models.TextField(default="unknown")
created_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
|
def lowestCommonAncestor(self, root, p, q):
while (root.val - p.val) * (root.val - q.val) > 0:
root = root.left if p.val < root.val else root.right
return root |
with open('attendees.html') as in_file:
attendees = []
attendees_by_name = {}
current = {}
info_count = 0
for line in in_file:
if '<h3>' in line:
current['name'] = line[4:-6]
if '<img' in line:
out = line.split('src=')
current['image'] = out[1][1:-3]
if 'attendee-info' in line:
info_count += 1
out = line[27:-7]
if out.endswith(','):
out = out[:-1]
current['info' + str(info_count)] = out
if 'attendee-country' in line:
info_count = 0
current['country'] = line[30:-7]
attendees.append(current)
attendees_by_name[current['name']] = current
current = {}
import json
out_file = open('attendees.json', 'w+')
out_file.write(json.dumps(attendees, sort_keys=True, indent=2, separators=(',', ': ')))
out_file.close()
out_file1 = open('attendees_by_name.json', 'w+')
out_file1.write(json.dumps(attendees_by_name, sort_keys=True, indent=2, separators=(',', ': ')))
out_file1.close()
print len(attendees)
print len(attendees_by_name.items())
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
import singlylinkedlist
class Solution:
def isPalindrome(self, head):
if head == None:
return True
temp = []
pointer = head
mid = head
size = 1
while pointer.next != None:
pointer = pointer.next
size +=1
pointer = head
if size %2 == 0 :
for i in range(int(size / 2)):
mid = mid.next
# print(mid.data)
# print(pointer.data)
else:
for i in range(int(size / 2)+1):
mid = mid.next
while mid != None:
temp.append(mid.data)
mid = mid.next
for item in temp[::-1]:
if pointer.data != item:
return False
pointer = pointer.next
return True
if __name__ == '__main__':
s = Solution()
mylist1 = singlylinkedlist.linked_list()
# mylist1.addElement(2)
# mylist1.addElement(1)
# mylist1.addElement(2)
# mylist1.addElement(1)
print(s.isPalindrome(mylist1.head))
|
# Generated by Django 2.2 on 2019-06-10 17:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('django_nginx_access', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UrlsDictionary',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField()),
],
),
migrations.CreateModel(
name='UserAgentsDictionary',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_agent', models.CharField(max_length=100)),
],
),
migrations.AlterModelOptions(
name='logitem',
options={'verbose_name_plural': 'Записи логов'},
),
migrations.CreateModel(
name='UserAgentsAgg',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('agg_month', models.DateField()),
('amount', models.IntegerField()),
('user_agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django_nginx_access.UserAgentsDictionary')),
],
),
migrations.CreateModel(
name='UrlsAgg',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('agg_month', models.DateField()),
('amount', models.IntegerField()),
('url', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django_nginx_access.UrlsDictionary')),
],
),
]
|
"""
GAME CLASS FILE
this file can be used to import the game class into the
main server loop
"""
import random
import re
import asyncio
# stores a reference to the file containing the game's phrases
phrases_file = "src/phrases"
# used to check if a guess is in the alphabet
alphabet = "abcdefghijklmnopqrstuvwxyz"
class Game:
"""
The parent class for the entire game instance.
contains all of the specific handlers which are used to
interact with the game
"""
class Service:
"""
The parent service class. Services are all children of the game
instance, and are used to implement functionality, such as
generating the random number, or storing the score for players
"""
def __init__(self, game):
self.game = game
class PlayerHandler(Service):
"""
The players service stores the player instances, as well as the
scores so far in the game. allows for adding and removing
players, as well as other functions such as sending a websocket
message to all players
"""
class Player:
"""
the player class, stores information such as their name,
their score, etc.
"""
def __init__(self, game, client):
self.game = game
self.client = client
self.score = 0
def __init__(self, game):
super().__init__(game)
self.game = game
self.players = []
# allows players to be added to the game
def add_player(self, client):
player = self.Player(self.game, client)
self.players.append(player)
client.player = player
# allows players to be removed from the game
async def remove_player(self, player):
self.players.remove(player)
player.client.game = None
player.client.player = None
player.client.location = "ROOM_LIST"
# shifts to the next person's turn if needed
current_round = self.game.round_handler.current_round
if len(self.players) == 0:
if hasattr(self, 'game'):
del self.game
else:
if player == current_round.current_player:
current_round.current_player_index -= 1
await current_round.advance()
else:
current_round.current_player_index = self.players.index(
current_round.current_player
)
if hasattr(self, 'game'):
await self.game.update_players()
class PhraseHandler(Service):
"""
The phrases service stores the different sayings/text which can
appear in a round
"""
class Phrase:
"""
phrase objects store some text as well as the genre for the
text. These may be selected during a game to be guessed by
the players
"""
def __init__(self):
self.text = random.choice(
open(phrases_file).readlines()
).rstrip()
"""
this section of code is unused. It was originally
planned for displaying the phrase on a 3d board on
the client with special formatting
line_lengths = [12, 14, 14, 12]
lines = [[], [], [], []]
line_num = 0
cur_line_lengths = [0, 0, 0, 0]
word_num = 0
words = self.text.split(" ")
while True:
word = words[word_num]
if cur_line_lengths[line_num] +
len(word) > line_lengths[line_num]:
line_num += 1
else:
lines[line_num].append(word)
cur_line_lengths[line_num] += len(word) + 1
word_num += 1
if word_num >= len(words):
break
if len(lines[3]) + len(lines[2]) == 0:
lines[2] = lines[1]
lines[1] = lines[0]
lines[0] = []
for line in lines:
print(f"line: {' '.join(line)}")
self.lines = lines
"""
def __init__(self, game):
super().__init__(game)
class RoundHandler(Service):
"""
The round handler service handles what round the game is
currently on, by storing a reference to it. Allows new rounds
to be created
"""
class Round:
"""
this class is the main functionality of the game, as it is
what allows functionality to be added to induvidual rounds,
such as making guesses, advancing to the next player,
awarding points, etc.
"""
def __init__(self, game):
self.game = game
self.phrase = game.phrase_handler.Phrase()
self.current_player = random.choice(
game.player_handler.players
)
self.current_player_index = game.player_handler.players.index(
self.current_player
)
# this allows a new round to be started asynchronously, as
# __init__ cannot be awaited
async def start(self):
self.prize = self.game.wheel_handler.generate_prize()
self.guessed_letters = set([",", "-", "'", '"', " "])
self.ignore_letters = set([",", "-", "'", '"', " "])
self.total_guessed = 0
await self.game.update_players()
await self.update_phrase()
await self.game.send_all(
{"TYPE": "GAME_MESSAGE", "DATA": "A new round is starting"}
)
await asyncio.sleep(1.5)
await self.send_turn_data()
await self.update_phrase()
await self.game.send_all(
{"TYPE": "SET_PRIZE", "DATA": self.prize}
)
await asyncio.sleep(5)
# moves onto the next persons turn. Maintains the correct
# order even when people leave the game
async def advance(self, change_score=True):
players = self.game.player_handler.players
next_index = (self.current_player_index + 1) % len(players)
self.current_player = players[next_index]
self.current_player_index = next_index
await self.game.update_players()
await self.send_turn_data()
await self.update_phrase()
self.prize = self.game.wheel_handler.generate_prize()
await self.game.send_all(
{"TYPE": "SET_PRIZE", "DATA": self.prize}
)
await asyncio.sleep(5)
# this sends a message to the client telling the player
# who's turn it is
async def send_turn_data(self):
for player in self.game.player_handler.players:
name = self.current_player.client.name
is_turn = player == self.current_player
msg_data = "Your" if is_turn else f"{name}'s"
msg = f"It is {msg_data} turn"
data = {"TYPE": "GAME_MESSAGE", "DATA": msg}
await player.client.send(data)
# this updates the phrase representation (with underscores)
# and sends it to all the players. Also adds the guessed
# letters
async def update_phrase(self):
self.display_phrase = (
"".join(
[
letter if letter.lower()
in self.guessed_letters else "_"
for letter in self.phrase.text
]
) +
" guessedletters: " +
", ".join(
list(
filter(
(None).__ne__,
[
letter
if letter not in self.ignore_letters
else None
for letter in self.guessed_letters
],
)
)
)
)
await self.game.send_all(
{"TYPE": "UPDATE_PHRASE", "DATA": self.display_phrase}
)
# this is fired when the server class recieves a guess
# attempt from a client. It handles checking the
# validity of the guess, awarding points, etc.
async def make_guess(self, player, guess: str):
if player != self.current_player:
await player.client.error(
"You can only make a guess when it is your turn"
)
return
guess = guess.lower()
phrase = self.phrase.text.lower()
if guess in self.guessed_letters:
await player.client.error("Please submit a new letter")
return
if len(guess) == 1:
# client guessed a single letter
if guess in alphabet:
if guess not in self.guessed_letters:
occurances = phrase.count(guess)
self.guessed_letters.add(guess)
self.total_guessed += occurances
if occurances == 0:
# client guessed incorrectly
name = player.client.name
data = f"{name} incorrectly guessed the letter\
'{guess}'"
await self.game.send_all(
{
"TYPE": "GAME_MESSAGE",
"DATA": data,
}
)
await asyncio.sleep(1)
await self.advance(False)
else:
# client guessed correctly
score = self.prize * occurances
player.score += score
name = player.client.name
await self.game.update_players()
await self.game.send_all(
{
"TYPE": "GAME_MESSAGE",
"DATA": f"{name} guessed the letter \
'{guess}' and won {self.prize * occurances} dollars!",
}
)
if self.total_guessed >= len(
re.sub(r"\W+", "", phrase)
):
await asyncio.sleep(3)
await self.game.round_handler.new_round()
return
else:
await asyncio.sleep(2)
wheel_handler = self.game.wheel_handler
self.prize = (
wheel_handler.generate_prize()
)
await self.game.send_all(
{
"TYPE": "SET_PRIZE",
"DATA": self.prize
}
)
else:
await player.client.error(
"Your guess must only be alphabetical characters"
)
elif len(guess) == 0:
# client guessed nothing
await player.client.error(
"You must submit a guess; you attempted to submit \
nothing"
)
else:
# client guessed a phrase
if guess == phrase:
# client got phrase correct
player.score += 1000
await self.game.send_all(
{
"TYPE": "GAME_MESSAGE",
"DATA": f"{player.client.name} correctly \
guessed the phrase '{guess}' and won $1000",
}
)
await self.update_phrase()
await asyncio.sleep(3)
await self.game.round_handler.new_round()
return
else:
# client got phrase incorrect
await self.game.send_all(
{
"TYPE": "GAME_MESSAGE",
"DATA": f"{player.client.name} incorrectly \
guessed the phrase '{guess}'",
}
)
await self.advance(False)
await self.update_phrase()
def __init__(self, game):
super().__init__(game)
self.current_round = None
self.total_rounds = 0
# this creates a new round and sets the current round to it
async def new_round(self):
self.total_rounds += 1
if self.total_rounds > 3:
# 3 rounds have already been played, finishing game
max_score = 0
best_player = "NOBODY"
for player in self.game.player_handler.players:
if player.score >= max_score:
max_score = player.score
best_player = player.client.name
await self.game.send_all(
{
"TYPE": "GAME_MESSAGE",
"DATA": f"GAME FINISHED! Winner: {best_player}",
}
)
# this is in order to keep clients connected with very
# little overhead
await asyncio.sleep(99999)
round = self.Round(self.game)
self.current_round = round
await round.start()
return round
class WheelHandler(Service):
"""
this class handles generating a random priE each time the wheel
is spun
"""
def __init__(self, game):
super().__init__(game)
self.prizes = [50, 100, 150, 200, 250, 500, 1000]
# generates a random prize from the list of possible prizes
def generate_prize(self):
return random.choice(self.prizes)
def __init__(self, room):
self.add_services()
for client in room.connected:
client.game = self
self.player_handler.add_player(client)
# this starts up a new game instance. This is done seperately from
# the __init__ dunder function, as it needs to be run asynchronously
async def start(self):
data = {"TYPE": "JOINED_GAME", "DATA": ""}
await self.send_all(data)
self.current_round = await self.round_handler.new_round()
# this starts up and creates a reference to all of the services
def add_services(self):
self.player_handler = self.PlayerHandler(self)
self.phrase_handler = self.PhraseHandler(self)
self.round_handler = self.RoundHandler(self)
self.wheel_handler = self.WheelHandler(self)
# this sends out a websocket to all the players telling them who's
# turn it is, who is in the game, and what score everyone has
async def update_players(self):
current_player = self.round_handler.current_round.current_player
for player in self.player_handler.players:
player_info = {}
player_id = 0
for other in self.player_handler.players:
info = {
"NAME": other.client.name,
"YOU": other == player,
"SCORE": other.score,
"IS_TURN": current_player == other,
}
player_info[player_id] = info
player_id += 1
await player.client.send(
{"TYPE": "GAME_CONNECTED_UPDATE", "DATA": player_info}
)
# this function can be used to send a message to every player in the game
async def send_all(self, data):
for player in self.player_handler.players:
await player.client.send(data)
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from __future__ import division
from tornado.web import authenticated, HTTPError
from tornado.escape import url_escape
from json import dumps
from qiita_files.demux import stats as demux_stats
from qiita_core.qiita_settings import r_client, qiita_config
from qiita_core.util import execute_as_transaction
from qiita_db.metadata_template.constants import (SAMPLE_TEMPLATE_COLUMNS,
PREP_TEMPLATE_COLUMNS)
from qiita_db.exceptions import QiitaDBUnknownIDError
from qiita_db.artifact import Artifact
from qiita_db.processing_job import ProcessingJob
from qiita_db.software import Software, Parameters
from qiita_pet.handlers.base_handlers import BaseHandler
VALID_SUBMISSION_TYPES = ['Demultiplexed', 'per_sample_FASTQ']
class EBISubmitHandler(BaseHandler):
@execute_as_transaction
def display_template(self, artifact_id, msg, msg_level):
"""Simple function to avoid duplication of code"""
artifact_id = int(artifact_id)
try:
artifact = Artifact(artifact_id)
except QiitaDBUnknownIDError:
raise HTTPError(404, reason="Artifact %d does not exist!" %
artifact_id)
else:
user = self.current_user
if user.level != 'admin':
raise HTTPError(403, reason="No permissions of admin, "
"get/EBISubmitHandler: %s!" % user.id)
prep_templates = artifact.prep_templates
allow_submission = len(prep_templates) == 1
msg_list = ["Submission to EBI disabled:"]
if not allow_submission:
msg_list.append(
"Only artifacts with a single prep template can be submitted")
# If allow_submission is already false, we technically don't need to
# do the following work. However, there is no clean way to fix this
# using the current structure, so we perform the work as we
# did so it doesn't fail.
# We currently support only one prep template for submission, so
# grabbing the first one
prep_template = prep_templates[0]
study = artifact.study
sample_template = study.sample_template
stats = {
'Number of samples': len(prep_template),
'Number of metadata headers': len(sample_template.categories()),
'Number of sequences': 'N/A',
'Total forward': 'N/A',
'Total reverse': 'N/A'
}
artifact_type = artifact.artifact_type
if artifact_type not in VALID_SUBMISSION_TYPES:
msg = "You can only submit: '%s' and this artifact is '%s'" % (
', '.join(VALID_SUBMISSION_TYPES), artifact_type)
msg_level = 'danger'
elif artifact_type == 'Demultiplexed':
demux = [x['fp'] for x in artifact.filepaths
if x['fp_type'] == 'preprocessed_demux']
demux_length = len(demux)
if demux_length > 1:
msg = "Study appears to have multiple demultiplexed files!"
msg_level = 'danger'
else:
demux_file = demux[0]
demux_file_stats = demux_stats(demux_file)
stats['Number of sequences'] = demux_file_stats.n
msg_level = 'success'
elif artifact_type == 'per_sample_FASTQ':
raw_forward_seqs = []
raw_reverse_seqs = []
for x in artifact.filepaths:
if x['fp_type'] == 'raw_forward_seqs':
raw_forward_seqs.append(x['fp'])
elif x['fp_type'] == 'raw_reverse_seqs':
raw_reverse_seqs.append(x['fp'])
stats['Total forward'] = len(raw_forward_seqs)
stats['Total reverse'] = len(raw_reverse_seqs)
msg_level = 'success'
# Check if the templates have all the required columns for EBI
pt_missing_cols = prep_template.check_restrictions(
[PREP_TEMPLATE_COLUMNS['EBI']])
st_missing_cols = sample_template.check_restrictions(
[SAMPLE_TEMPLATE_COLUMNS['EBI']])
allow_submission = (len(pt_missing_cols) == 0 and
len(st_missing_cols) == 0 and allow_submission)
if not allow_submission:
if len(pt_missing_cols) > 0:
msg_list.append("Columns missing in prep template: %s"
% ', '.join(pt_missing_cols))
if len(st_missing_cols) > 0:
msg_list.append("Columns missing in sample template: %s"
% ', '.join(st_missing_cols))
ebi_disabled_msg = "<br/>".join(msg_list)
else:
ebi_disabled_msg = None
self.render('ebi_submission.html',
study_title=study.title, stats=stats.items(), message=msg,
study_id=study.id, level=msg_level,
preprocessed_data_id=artifact_id,
investigation_type=prep_template.investigation_type,
allow_submission=allow_submission,
ebi_disabled_msg=ebi_disabled_msg)
@authenticated
def get(self, preprocessed_data_id):
self.display_template(preprocessed_data_id, "", "")
@authenticated
@execute_as_transaction
def post(self, preprocessed_data_id):
user = self.current_user
# make sure user is admin and can therefore actually submit to EBI
if user.level != 'admin':
raise HTTPError(403, reason="User %s cannot submit to EBI!" %
user.id)
submission_type = self.get_argument('submission_type')
if submission_type not in ['ADD', 'MODIFY']:
raise HTTPError(403, reason="User: %s, %s is not a recognized "
"submission type" % (user.id, submission_type))
study = Artifact(preprocessed_data_id).study
state = study.ebi_submission_status
if state == 'submitting':
message = "Cannot resubmit! Current state is: %s" % state
self.display_template(preprocessed_data_id, message, 'danger')
else:
qiita_plugin = Software.from_name_and_version('Qiita', 'alpha')
cmd = qiita_plugin.get_command('submit_to_EBI')
params = Parameters.load(
cmd, values_dict={'artifact': preprocessed_data_id,
'submission_type': submission_type})
job = ProcessingJob.create(user, params, True)
r_client.set('ebi_submission_%s' % preprocessed_data_id,
dumps({'job_id': job.id, 'is_qiita_job': True}))
job.submit()
level = 'success'
message = 'EBI submission started. Job id: %s' % job.id
self.redirect("%s/study/description/%d?level=%s&message=%s" % (
qiita_config.portal_dir, study.id, level, url_escape(message)))
|
#Programa: raices.py
#Propósito: Calcular la raíz cuadrada y la raíz cúbica de un número
#Autor: Jose Manuel Serrano Palomo.
#Fecha: 13/10/2019
#
#Variables a usar:
# n1 es el numero que vamos a usar
# sq1 es la raíz cuadrada
# sq2 es la raíz cúbica
#
#Algoritmo:
# LEER n1
# sq1 <-- math.sqrt(n1)
# sq2 <-- n1 ** (1/3)
# ESCRIBIR sq1 y sq2
print("Calcular la raíz cuadrada y la raíz cúbica")
print("------------------------------------------\n")
import math
#Leemos los datos
n1 = int(input("Ingrese un número: "))
#Calculamos
sq1 = math.sqrt(n1)
sq2 = n1 ** (1/3)
#Escribimos los resultados
print("El resultado de la raíz cuadrada es ", sq1, " y el de la raíz cúbica es ", sq2)
|
import bs4
import re
import io
import logging
import zipfile
import webFunctions
import mimetypes
import urllib.parse
import urllib.error
class GDocExtractor(object):
log = logging.getLogger("Main.GDoc")
wg = webFunctions.WebGetRobust(logPath="Main.GDoc.Web")
def __init__(self, targetUrl):
isGdoc, url = self.isGdocUrl(targetUrl)
if not isGdoc:
raise ValueError("Passed URL '%s' is not a google document?" % targetUrl)
self.url = url+'/export?format=zip'
self.refererUrl = targetUrl
self.document = ''
self.currentChunk = ''
@classmethod
def getDriveFileUrls(cls, url):
ctnt, handle = cls.wg.getpage(url, returnMultiple=True)
# Pull out the title for the disambiguation page.
soup = bs4.BeautifulSoup(ctnt)
title = soup.title.string
# horrible keyhole optimization regex abomination
# this really, /REALLY/ should be a actual parser.
# Unfortunately, the actual google doc URLs are only available in some JS literals,
# so we have to deal with it.
driveFolderRe = re.compile(r'(https://docs.google.com/document/d/[-_0-9a-zA-Z]+)')
items = driveFolderRe.findall(ctnt)
ret = set()
# Google drive supports a `read?{google doc path} mode. As such, we look at the actual URL,
# which tells us if we redirected to a plain google doc, and add it of we did.
handleUrl = handle.geturl()
if handleUrl != url:
if cls.isGdocUrl(handleUrl):
cls.log.info("Direct read redirect: '%s'", handleUrl)
ret.add(handleUrl)
for item in items:
ret.add(item)
return items, title
@classmethod
def isGdocUrl(cls, url):
# This is messy, because it has to work through bit.ly redirects.
# I'm just resolving them here, rather then keeping them around because it makes things easier.
gdocBaseRe = re.compile(r'(https?://docs.google.com/document/d/[-_0-9a-zA-Z]+)')
simpleCheck = gdocBaseRe.search(url)
if simpleCheck and not url.endswith("/pub"):
return True, simpleCheck.group(1)
return False, url
@classmethod
def clearBitLy(cls, url):
if "bit.ly" in url:
try:
dummy_ctnt, handle = cls.wg.getpage(url, returnMultiple=True)
# Recurse into redirects
return cls.clearBitLy(handle.geturl())
except urllib.error.URLError:
print("Error resolving redirect!")
return None
return url
@classmethod
def clearOutboundProxy(cls, url):
'''
So google proxies all their outbound links through a redirect so they can detect outbound links.
This call strips them out if they are present.
'''
if url.startswith("http://www.google.com/url?q="):
qs = urllib.parse.urlparse(url).query
query = urllib.parse.parse_qs(qs)
if not "q" in query:
raise ValueError("No target?")
return query["q"].pop()
return url
def extract(self):
try:
arch, fName = self.wg.getFileAndName(self.url, addlHeaders={'Referer': self.refererUrl})
except IndexError:
print("ERROR: Failure retreiving page!")
return None, []
baseName = fName.split(".")[0]
if not isinstance(arch, bytes):
if 'You need permission' in arch or 'Sign in to continue to Docs':
self.log.critical("Retreiving zip archive failed?")
self.log.critical("Retreived content type: '%s'", type(arch))
raise TypeError("Cannot access document? Is it protected?")
else:
with open("tmp_page.html", "w") as fp:
fp.write(arch)
raise ValueError("Doc not valid?")
zp = io.BytesIO(arch)
zfp = zipfile.ZipFile(zp)
resources = []
baseFile = None
for item in zfp.infolist():
if not "/" in item.filename and not baseFile:
contents = zfp.open(item).read()
contents = bs4.UnicodeDammit(contents).unicode_markup
baseFile = (item.filename, contents)
elif baseName in item.filename and baseName:
raise ValueError("Multiple base file items?")
else:
resources.append((item.filename, mimetypes.guess_type(item.filename)[0], zfp.open(item).read()))
if not baseFile:
raise ValueError("No base file found!")
return baseFile, resources
def makeDriveDisambiguation(urls, pageHeader):
soup = bs4.BeautifulSoup()
tag = soup.new_tag('h3')
tag.string = 'Google Drive directory: %s' % pageHeader
soup.append(tag)
for url in urls:
tag = soup.new_tag('a', href=url)
tag.string = url
soup.append(tag)
tag = soup.new_tag('br')
soup.append(tag)
return soup.prettify()
def test():
import webFunctions
wg = webFunctions.WebGetRobust()
# url = 'https://docs.google.com/document/d/1ljoXDy-ti5N7ZYPbzDsj5kvYFl3lEWaJ1l3Lzv1cuuM/preview'
# url = 'https://docs.google.com/document/d/17__cAhkFCT2rjOrJN1fK2lBdpQDSO0XtZBEvCzN5jH8/preview'
url = 'https://docs.google.com/document/d/1t4_7X1QuhiH9m3M8sHUlblKsHDAGpEOwymLPTyCfHH0/preview'
urls = [
'https://docs.google.com/document/d/1RrLZ-j9uS5dJPXR44VLajWrGPJl34CVfAeJ7pELPMy4',
'https://docs.google.com/document/d/1_1e7D30N16Q1Pw6q68iCrOGhHZNhXd3C9jDrRXbXCTc',
'https://docs.google.com/document/d/1ke-eW78CApO0EgfY_X_ZgLyEEcEQ2fH8vK_oGbhROPM',
'https://docs.google.com/document/d/1Dl5XbPHThX6xCxhIHL9oY0zDbIuQn6fXckXQ16rECps',
'https://docs.google.com/document/d/12UHbPduKDVjSk99VVdf5OHdaHxzN3nuIcAGrW5oV5E8',
'https://docs.google.com/document/d/1ebJOszL08TqJw1VvyaVfO72On4rQBPca6CujSYy-McY',
'https://docs.google.com/document/d/19vXfdmkAyLWcfV2BkgIxNawD2QwCoeFEQtV8wYwTamU',
'https://docs.google.com/document/d/1RGqoPR6sfjJY_ZxLfQGa4YLNIW5zKj1HTWa6qmFLQfg',
'https://docs.google.com/document/d/1TDmwoB6Y7XiPJRZ7-OGjAhEqPPbdasazn0vBbCvj8IM',
'https://docs.google.com/document/d/1o40vXZAW6v81NlNl4o6Jvjch0GO2ETv5JgwKqXfOpOQ',
'https://docs.google.com/document/d/1STcAhI6J9CEEx7nQFGAt_mnxfgo0fMOrb4Ls0EYWRHk',
'https://docs.google.com/document/d/1xyyhV5yeoRTZHPCPX6yeL8BbVzybhFM27EyInFtjwZQ',
'https://docs.google.com/document/d/11RzD2ILc1MKH5VA4jBzCDO7DIFRzUFCjAe7-MnJfDLY',
'https://docs.google.com/document/d/1AVyCN0nXTTqVrrMaqJRUSkTP1Ksyop9H-UHWvdMB5Ps',
'https://docs.google.com/document/d/18VaVO2VnFMo5Lv6VFZ4hP-lbX3XxHKnPu6wc2sxxA6U',
'https://docs.google.com/document/d/1XuD5iloTWdpFAAzuSHpQuPKVwsrQeyAlT0CSFoIYk3A',
'https://docs.google.com/document/d/1yoKoZq3DBCXLJ__1LNod_d_p6SkKC2VzQ3r-pjlOa4M',
'https://docs.google.com/document/d/1CIJLV1CN57naLf9gG9Y6C7aZ6ieLM9uL5CGquxCNPQM',
'https://docs.google.com/document/d/1m9yGcNhNfQRCfdcmwb4mAy2sVG3BXHjM6cBFKjzmvFw',
]
# print(makeDriveDisambiguation(urls))
# parse = GDocExtractor(url)
# base, resc = parse.extract()
# # parse.getTitle()
print(GDocExtractor.getDriveFileUrls('https://drive.google.com/folderview?id=0B_mXfd95yvDfQWQ1ajNWZTJFRkk&usp=drive_web'))
# with open("test.html", "wb") as fp:
# fp.write(ret.encode("utf-8"))
if __name__ == "__main__":
import logSetup
if __name__ == "__main__":
print("Initializing logging")
logSetup.initLogging()
test()
|
S_ = "SI" |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-04-15 07:46
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Disk',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('disk_id', models.CharField(blank=True, max_length=22, null=True, verbose_name='磁盘ID')),
('device', models.CharField(blank=True, max_length=15, null=True, verbose_name='所属Instance的Device信息')),
('size', models.IntegerField(verbose_name='磁盘大小,单位GB')),
('type', models.CharField(choices=[('system', '系统盘'), ('data', '数据盘')], default='data', max_length=6, verbose_name='磁盘类型')),
('creation_time', models.DateTimeField(verbose_name='创建时间')),
('expired_time', models.DateTimeField(verbose_name='过期时间')),
('add_time', models.DateTimeField(auto_now_add=True, verbose_name='入库时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
],
options={
'verbose_name_plural': '磁盘',
'verbose_name': '磁盘',
},
),
migrations.CreateModel(
name='Host',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cloud_type', models.CharField(choices=[('aliyun', '阿里云'), ('qcloud', '腾讯云')], default='aliyun', max_length=20, verbose_name='云主机类型')),
('instance_id', models.CharField(max_length=22, unique=True, verbose_name='实例ID')),
('instance_name', models.CharField(max_length=22, verbose_name='实例的显示名称')),
('description', models.CharField(blank=True, max_length=128, null=True, verbose_name='实例的描述')),
('image_id', models.CharField(max_length=50, verbose_name='镜像ID')),
('region_id', models.CharField(max_length=30, verbose_name='实例所属地域ID')),
('zone_id', models.CharField(max_length=30, verbose_name='实例所属可用区')),
('cpu', models.IntegerField(verbose_name='CPU核数')),
('memory', models.IntegerField(verbose_name='内存大小,单位: GB')),
('instance_type', models.CharField(max_length=30, verbose_name='实例资源规格')),
('status', models.CharField(choices=[('Running', '运行中'), ('Starting', '启动中'), ('Stopping', '停止中'), ('Stopped', '已停止')], default='Running', max_length=8, verbose_name='实例状态')),
('hostname', models.CharField(blank=True, max_length=23, null=True, verbose_name='实例机器名称')),
('public_ip', models.GenericIPAddressField(blank=True, null=True, verbose_name='公网IP')),
('private_ip', models.GenericIPAddressField(verbose_name='私网IP')),
('os_type', models.CharField(default='linux', max_length=10, verbose_name='操作系统类型')),
('os_name', models.CharField(default='', max_length=20, verbose_name='操作系统名称')),
('instance_charge_type', models.CharField(choices=[('PrePaid', '预付费'), ('PostPaid', '后付费')], default='PrePaid', max_length=8, verbose_name='实例的付费方式')),
('creation_time', models.DateTimeField(verbose_name='创建时间')),
('expired_time', models.DateTimeField(verbose_name='过期时间')),
('add_time', models.DateTimeField(auto_now_add=True, verbose_name='入库时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
],
options={
'verbose_name_plural': '主机',
'verbose_name': '主机',
},
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=32, verbose_name='业务线名称')),
('module_letter', models.CharField(max_length=32, verbose_name='字母简称')),
('dev_interface', models.ManyToManyField(related_name='dev', to=settings.AUTH_USER_MODEL, verbose_name='业务负责人')),
('op_interface', models.ManyToManyField(related_name='op', to=settings.AUTH_USER_MODEL, verbose_name='运维负责人')),
('pid', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='cmdb.Product', verbose_name='上级业务线')),
],
),
migrations.AddField(
model_name='host',
name='business_line',
field=models.ManyToManyField(blank=True, to='cmdb.Product', verbose_name='业务线'),
),
migrations.AddField(
model_name='disk',
name='host',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cmdb.Host', verbose_name='主机'),
),
]
|
{
"name" : "Redondeo en pedidos",
"version" : "1.0",
"author" : "Econube Pablo Cabezas",
"website" : "http://openerp.com",
"category" : "Econube",
"description": """
Se redonde segun la configuracion de contabilidad metodo de redondeo
""",
"depends" : ['base','point_of_sale','account'],
"data": [
],
"update_xml" : [],
"installable": True,
"active": False
}
|
# %%
import os
import shutil
from glob import glob
from pathlib import Path
# %%
def combinetxt(dir_path, pattern, out_name):
folder = dir_path + '/values'
Path(folder).mkdir(parents=True, exist_ok=True)
with open(folder + '/' + out_name, 'wb') as out_file:
for file_name in glob(os.path.join(dir_path, 'data', pattern)):
# print(file_name)
if file_name == out_name:
# don't want to copy the output into the output
continue
with open(file_name, 'rb') as read_file:
shutil.copyfileobj(read_file, out_file)
return folder + '/' + out_name
|
"""Base class for LegendreTransformer and PrimitiveTransformer
"""
from sklearn.base import TransformerMixin, BaseEstimator
import dask.array as da
class BasisTransformer(BaseEstimator, TransformerMixin):
"""Basis transformer for Sklearn pipelines
Attributes:
discretize: function to discretize the data
n_state: the number of local states
min_: the minimum local state
max_: the maximum local state
chunks: chunks size for state axis
>>> import numpy as np
>>> f = lambda *_, **__: None
>>> BasisTransformer(f).fit().transform(np.arange(4).reshape(1, 2, 2))
"""
# pylint: disable=too-many-arguments
def __init__(self, discretize, n_state=2, min_=0.0, max_=1.0, chunks=None):
"""Instantiate a PrimitiveTransformer"""
self.discretize = discretize
self.n_state = n_state
self.min_ = min_
self.max_ = max_
self.chunks = chunks
def transform(self, data):
"""Perform the discretization of the data
Args:
data: the data to discretize
Returns:
the discretized data
"""
return self.discretize(
data if hasattr(data, "chunks") else da.from_array(data, data.shape),
n_state=self.n_state,
min_=self.min_,
max_=self.max_,
chunks=self.chunks,
)
def fit(self, *_):
"""Only necessary to make pipelines work"""
return self
|
n = int(input())
l = list(map(int,input().split(', ')))
for i in range(len(l)):
num = l[i]
a = 0
while num>0:
a += num%6
num = num//6
l[i] = a
count = 0
for i in range(n):
for j in range(i,n):
if l[i]>l[j]:
count += 1
print(count) |
from pwn import *
def main(input):
t = process(['python2','service.py'])
t.recvline()
t.sendline(input)
a = t.recvall()
encoded = a.decode()
return encoded[64:96]
if __name__ == '__main__':
flag = ''
while True:
print("===============================================================================")
payload = "0"*(47-len(flag))
hex = main(payload)
print(hex)
for i in range(33, 125):
if main(payload + flag + chr(i)) == hex:
flag += chr(i)
print("Flag: ", flag)
break |
import cv2
import numpy as np
import os
from PIL import Image
recognizer = cv2.face.LBPHFaceRecognizer_create()
path = 'dataSet'
def getImagesWidthID(path):
#lay duong dan cua du lieu anh trong thu muc
imagePaths=[os.path.join(path,f) for f in os.listdir(path)]
print(imagePaths)
faces=[]
IDs=[]
for imagePaths in imagePaths:
faceImg=Image.open(imagePaths).convert('L')
faceNp=np.array(faceImg,'uint8')
print(faceNp)
#Cat de lay ID cua hinh anh
ID=int(imagePaths.split('\\')[1].split('.')[1])
faces.append(faceNp)
IDs.append(ID)
cv2.imshow('Training', faceNp)
cv2.waitKey(5000)
return faces,IDs
faces, IDs = getImagesWidthID(path)
#trainning
recognizer.train(faces, np.array(IDs))
#Luu vao file
if not os.path.exists('recognizer'):
os.makedirs('recognizer')
recognizer.save ('recognizer/trainningData.yml')
cv2.destroyAllWindows()
|
global_config = None
if global_config is None:
from .config import Config
global_config = Config() |
"""
#------------------------------------------------------------------------------
# Input generation for Boom Crane - inputgen.py
#
# Create a specific vibration and a shaped command that is designed to offset that vibration.
# Formatted for input to a small-scale boom crane
#
# Created: 4/26/17 - Daniel Newman -- danielnewman09@gmail.com
#
# Modified:
# * 4/46/17 - DMN -- danielnewman09@gmail.com
# - Added documentation for this script
# * 6/27/17 - DMN -- danielnewman09@gmail.com
# - Added modifying values for offsetting the shaper timing
# * 2/15/18 - DMN -- danielnewman09@gmail.com
#------------------------------------------------------------------------------
"""
# Matplotlib generates an annoying warning that I don't like to see in my terminal
import warnings
warnings.simplefilter("ignore", UserWarning)
# Import the necessary python library modules
import numpy as np
from scipy.signal import lsim
from scipy import optimize
import os
import sys
import pdb
# Add my local path to the relevant modules list
path = os.getcwd()
rootpath = path.split('Daniel Newman')
rootpath = rootpath[0] + 'Daniel Newman/Python Modules'
sys.path.append(rootpath)
# Import my python modules
import InputShaping as shaping
import Generate_Plots as genplt
import ic_impulse as ic
omega_error = np.arange(.1,2.,0.1)
unshaped_ampsfile = 'Amplitudes/Unshaped_Simulated.csv'
siic_ampsfile = 'Amplitudes/SI-IC_Simulated.csv'
zvic_ampsfile = 'Amplitudes/ZV-IC_Simulated.csv'
unshaped_data = open(unshaped_ampsfile,'w')
siic_data = open(siic_ampsfile,'w')
zvic_data = open(zvic_ampsfile,'w')
dt = 0.001
t = np.arange(0,10,dt)
modeled_freq = 2 * np.pi
zeta = 0.1
#Designed Values
detected_phase = 5 * np.pi/4
design_amp = 0.5
# Actuator constraints
design_Vmax = 1. # m/s
# Time over which the system accelerates to maximum velocity
tacc = 0.1
# Undamped period of oscillation
tau = 2 * np.pi / modeled_freq
# Normalized acceleration time by period
tacc_norm = tacc / tau
# The position response of a linear second-order system subject to damping
def position_response(t,omega_n,zeta,normalized_amp,impulse_amp):
return omega_n / np.sqrt(1 - zeta**2) * np.exp(-zeta * omega_n * t) \
* np.sin(omega_n * np.sqrt(1 - zeta**2) * t) * impulse_amp * normalized_amp
# The velocity response of a linear second-order system subject to damping
def velocity_response(t,omega,zeta,normalized_amp,impulse_amp):
return impulse_amp * normalized_amp * (omega**2 * np.exp(-omega * zeta * t) * np.cos(omega * np.sqrt(1 - zeta**2) * t)\
- omega**2 * zeta * np.exp(-omega * zeta * t) * np.sin(omega * np.sqrt(1 - zeta**2) * t) / np.sqrt(1 - zeta**2))
# Response used to generate desired initial conditions
modeled_response = position_response(t,modeled_freq,zeta,design_Vmax,design_amp)
modeled_response_deriv = velocity_response(t,modeled_freq,zeta,design_Vmax,design_amp)
ic_time = detected_phase / (modeled_freq * np.sqrt(1 - zeta**2))
ic_time_step = np.round(ic_time / (t[1] - t[0])).astype(int)
ic_init_pos_act = modeled_response[ic_time_step]
ic_init_vel_act = modeled_response_deriv[ic_time_step]
# End impulse response amplitude generation
################################################################################################
################################################################################################
# Generate the IC shapers and the corresponding responses
# Create the input shapers
shaper_freq = modeled_freq / (2 * np.pi)
p_shaper = [ic_init_pos_act,ic_init_vel_act,tacc,1,1,modeled_freq,True]
res,zvic_shaper = ic.si(0, 1.0, None, 1.0 * shaper_freq, 1. * shaper_freq, 0.00, 0.001,p_shaper,zeta,iterating=True,)
res,siic_shaper = ic.si(2, 1.0, None, 0.9 * shaper_freq, 1.1 * shaper_freq, 0.05, 0.001,p_shaper,zeta,iterating=True,)
np.save('zvic_shaper',zvic_shaper)
np.save('siic_shaper',siic_shaper)
zvic_shaper = np.load('zvic_shaper.npy')
siic_shaper = np.load('siic_shaper.npy')
def get_local_Extrema(time,data):
''' # Function to get the local extrema for a response
#
# Inputs:
# time = time array corresponding to the data
# data = the response data array (only pass a single dimension/state at at time)
#
# Output:
# localMaxes = the amplitude of the local maxes
# localMax_Times = the times of the local maxes
#
# Created: 03/28/14
# - Joshua Vaughan
# - joshua.vaughan@louisiana.edu
# - http://www.ucs.louisiana.edu/~jev9637
#
# Modified:
# * 03/17/16 - JEV - joshua.vaughan@louisiana.edu
# - updated for Python 3
#
######################################################################################
'''
from scipy import signal
# Get local maximums
localMax_indexes = signal.argrelextrema(data, np.greater)
localMaxes = data[localMax_indexes]
localMax_Times = time[localMax_indexes]
# Get local minimums
localMin_indexes = signal.argrelextrema(data, np.less)
localMins = data[localMin_indexes]
localMin_Times = time[localMin_indexes]
return localMaxes, localMax_Times, localMins, localMin_Times
################################################################################################
# Generate an initial oscillation condition
# We want to make sure that the initial oscillation amplitude is appropriate as the natural frequency (length)
# changes. We can manipulate the initial oscillation amplitude by changing the cable length and the start angle
# For this command.
for i in range(len(omega_error)):
modeled_error = omega_error[i]
################################################################################################
zvic_shaped_input = shaping.shaped_input(shaping.pulse,t,zvic_shaper,1/tacc,tacc)
siic_shaped_input = shaping.shaped_input(shaping.pulse,t,siic_shaper,1/tacc,tacc)
unshaped_input = shaping.shaped_input(shaping.pulse,t,np.array([[0,1]]),1/tacc,tacc)
ic_init_pos = ic_init_pos_act / ((modeled_freq * np.sqrt(1 - zeta**2))*modeled_error)**2
ic_init_vel = ic_init_vel_act / ((modeled_freq * np.sqrt(1 - zeta**2))*modeled_error)**2
# Transfer function of the system
err_num=[(modeled_freq*modeled_error)**2]
err_den=[1,2*zeta * (modeled_freq*modeled_error), ((modeled_error*modeled_freq)**2)]
err_sys=(err_num,err_den)
ty,unshaped_response,xy=lsim(err_sys,unshaped_input,t,X0=(ic_init_vel,ic_init_pos))
ty,zvic_response,xy=lsim(err_sys,zvic_shaped_input,t,X0=(ic_init_vel,ic_init_pos))
ty,siic_response,xy=lsim(err_sys,siic_shaped_input,t,X0=(ic_init_vel,ic_init_pos))
# The endpoint where the luff command stops
zvic_end = int(np.round(zvic_shaper[-1,0] / dt))
siic_end = int(np.round(siic_shaper[-1,0] / dt))
# Get the peaks of the response
unshapedMaxes, unshapedMax_Times, unshapedMins, unshapedMin_Times = get_local_Extrema(
t[siic_end:],
unshaped_response[siic_end:]
)
unshaped_amp = (np.average(unshapedMaxes) - np.average(unshapedMins))/2
# Get the peaks of the response
siic_Maxes, siic_Max_Times, siic_Mins, siic_Min_Times = get_local_Extrema(
t[siic_end:],
siic_response[siic_end:]
)
siic_amp = (np.average(siic_Maxes) - np.average(siic_Mins))/2
siic_amp /= unshaped_amp
# Get the peaks of the response
unshapedMaxes, unshapedMax_Times, unshapedMins, unshapedMin_Times = get_local_Extrema(
t[zvic_end:],
unshaped_response[siic_end:]
)
unshaped_amp = (np.average(unshapedMaxes) - np.average(unshapedMins))/2
# Get the peaks of the response
zvic_Maxes, zvic_Max_Times, zvic_Mins, zvic_Min_Times = get_local_Extrema(
t[siic_end:],
zvic_response[siic_end:]
)
zvic_amp = (np.average(zvic_Maxes) - np.average(zvic_Mins))/2
zvic_amp /= unshaped_amp
unshaped_data.write('{}\n'.format(unshaped_amp))
siic_data.write('{}\n'.format(siic_amp))
zvic_data.write('{}\n'.format(zvic_amp))
#genplt.compare_responses(t,
# zvic_response,'ZV-IC',
# unshaped_response,'Unshaped',
# siic_response,'SI-IC',
# name_append='Responses_{}'.format(omega_error[i]),
# xlabel='Time (s)',ylabel='Position (m)',
# grid=False,save_data=False,ncol=1,ymax=0.1
# )
unshaped_data.close()
siic_data.close()
zvic_data.close() |
#!/usr/bin/env python3
import argparse
import sys
from time import time
import yaml
n_comparisons = 0
n_swaps = 0
def insertion_sort(array):
global n_comparisons
global n_swaps
for i in range(1, len(array)):
elem = array[i]
for k in range(i):
n_comparisons += 1
if array[k] > elem:
break
for j in reversed(range(k + 1, i)):
n_swaps += 1
array[j + 1] = array[j]
array[k] = elem
return array
def bubble_sort(array):
global n_comparisons
global n_swaps
for i in range(len(array)):
for j in reversed(range(i + 1, len(array))):
n_comparisons += 1
if array[j] < array[j - 1]:
n_swaps += 1
array[j], array[j - 1] = array[j - 1], array[j]
return array
def write_result(algo, sorted_array, t):
result_dict = {
'algo' : algo,
'result' : sorted_array,
'comparisons': n_comparisons,
'swaps': n_swaps,
'time' : t
}
print(yaml.dump(result_dict, default_flow_style=False))
def read_list(instance):
try:
with open(instance, 'r') as f:
l = [int(line.strip()) for line in f]
return l
except IOError as error:
raise
def run_experiment(algo, instance):
array = read_list(instance)
t = 0
sorted_array = []
if algo == 'bubble-sort':
t = -time()
sorted_array = bubble_sort(array)
t += time()
elif algo == 'insertion-sort':
t = -time()
sorted_array = insertion_sort(array)
t += time()
else:
print("Unknown algorithm: ", algo, file=sys.stderr)
sys.exit(1)
write_result(algo, sorted_array, t)
def do_main():
parser = argparse.ArgumentParser()
parser.add_argument('--algo', type=str, choices=['bubble-sort', 'insertion-sort'])
parser.add_argument('instance', type=str)
args = parser.parse_args()
run_experiment(args.algo, args.instance)
do_main()
|
class Solution(object):
def spiralOrder(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if not matrix:
return matrix
res = []
c,r = len(matrix[0]),len(matrix)
x1,y1,x2,y2 = 0,0,r-1,c-1
while x1<=x2 and y1<=y2:
for i in range(y1,y2+1):
res.append(matrix[x1][i])
for j in range(x1+1,x2+1):
res.append(matrix[j][y2])
if x1 < x2 and y1 < y2:
for i in range(y2-1,y1-1,-1):
res.append(matrix[x2][i])
for j in range(x2-1,x1,-1):
res.append(matrix[j][y1])
x1 += 1
y1 += 1
x2 -= 1
y2 -= 1
return res |
import turtle
bob = turtle.Turtle()
print(bob)
def draw_leaf(t, n, l, a):
# t = turtle_name
# n = accuracy = about30
# l = length_of_leaf
# a = thinness_of_leaf
#go
for i in range(n):
t.fd(l)
t.lt(a)
#turn
turn = 180 - (a*n)
t.lt(turn)
#back
for i in range(n):
t.fd(l)
t.lt(a)
#turn
t.lt(turn)
def draw_leafs(k):
# k = number_of_leaf
t = bob
n = 30
l = 5
a = 0.5
for i in range(k):
draw_leaf(t, n, l, a)
t.lt(360 / k)
draw_leafs(12)
|
# A simple MDP where agent has to traverse a specific path
# in gridworld - wrong action will throw player back to start or do nothing.
# Player is rewarded for reaching new maximum length in the episode.
#
# State is represented by a positive ndim vector that tells
# where the player is. This is designed to mimic coordinate-systems
# and also deliberately confuse networks (e.g. might think higher value
# on axis 0 means we should take one specific action always)
#
import random
import numpy as np
import gym
# Fix for older gym versions
import gym.spaces
def generate_path(game_length: int, ndim: int, num_mines: int, seed: int = 42) -> np.ndarray:
"""Generate the path player has to follow.
Args:
game_length: Length of the path to generate
ndim: Number of dimensions in the environment
num_mines: Number of mines per step
seed: Seed used to generate path
Returns:
path: List of ints, representing actions player should take in each state.
mines: List of List of ints, representing which actions are mines in each state.
"""
path = []
mines = []
gen = np.random.default_rng(seed)
for i in range(game_length):
action_ordering = gen.permutation(ndim)
# First item goes to path, next num_mines go to mines
path.append(action_ordering[0].item())
mines.append(action_ordering[1:1 + num_mines].tolist())
return path, mines
class DangerousPathEnv(gym.Env):
"""
A N-dimensional environment where player has to choose
the exact correct action at any given location (follow
a very specific path). Otherwise game terminates or player stays
still, depending on if they hit a mine or not.
If `discrete_obs` is True, observation space tells location
of player in path. If False, uses continuous observations
that tell coordinate-like information of location of the player.
`mine_ratio` specifies the amount of mines (terminal states) versus
no-move moves per state.
"""
def __init__(
self,
game_length=100,
ndim=2,
seed=42,
discrete_obs=False,
random_action_p=0.0,
mine_ratio=1.0
):
super().__init__()
self.game_length = game_length
self.ndim = ndim
self.mine_ratio = mine_ratio
self.num_mines_per_step = np.floor(ndim * mine_ratio)
self.path, self.mines = generate_path(game_length, ndim, seed)
# Emperically found to be a necessary adjustment
self.step_size = 1.0
self.discrete_obs = discrete_obs
self.random_action_p = random_action_p
if discrete_obs:
self.observation_space = gym.spaces.Discrete(n=self.game_length)
else:
self.observation_space = gym.spaces.Box(0, 1, shape=(self.ndim,))
self.action_space = gym.spaces.Discrete(n=self.ndim)
self.path_location = 0
self.max_path_location = 0
self.num_steps = 0
self.player_location = np.zeros((self.ndim,))
def step(self, action):
if self.random_action_p > 0.0 and random.random() < self.random_action_p:
action = self.action_space.sample()
done = False
reward = 0
action = int(action)
if action == self.path[self.path_location]:
# You chose wisely
self.path_location += 1
# Only reward progressing once
if self.path_location > self.max_path_location:
reward = 1
self.max_path_location += 1
# Small step sizes
self.player_location[action] += self.step_size
if self.path_location == (self.game_length - 1):
done = True
else:
# You chose poorly
reward = 0
if action in self.mines[self.path_location]:
# You chose very poorly, back to start
self.path_location = 0
self.player_location = np.zeros((self.ndim,))
self.num_steps += 1
if self.num_steps >= self.game_length:
done = True
return self.path_location if self.discrete_obs else self.player_location, reward, done, {}
def reset(self):
self.path_location = 0
self.max_path_location = 0
self.num_steps = 0
self.player_location = np.zeros((self.ndim,))
return self.path_location if self.discrete_obs else self.player_location
def seed(self, seed):
self.path, self.mines = generate_path(self.game_length, self.ndim, seed)
|
import warnings
warnings.filterwarnings("ignore")
from fingerprinter.reader import read
from fingerprinter.fingerprint import fingerprint
from database.fingerprint_db import FingerprintDatabase
from etc.util import get_args_for_input_file
DESCRIPTION = """
This script will load a single wav file (-f), fingerprint it, and look up all hashes in the fingerprint database.
"""
PLOT_FINGERPRINT = False
if __name__ == "__main__":
args = get_args_for_input_file(DESCRIPTION)
print("Loading and fingerprinting wav file {}".format(args.file))
db = FingerprintDatabase()
channels, frame_rate = read(args.file)
for _, channel in enumerate(channels):
hash_tuples = fingerprint(channel, frame_rate, PLOT_FINGERPRINT)
for pair in hash_tuples:
result = db.lookup(pair[1])
for (hash, episode, play_head) in result:
print("hash:{} episode:{} play_head:{}".format(hash, episode, play_head))
|
# It reads two values and show a menu screen:
# [1] Addition
# [2] Multiplication
# [3] Greater Number
# [4] New Numbers
# [5] Finish the program
# It accomplish the requested operaton of each option:
from time import sleep
n1 = int(input('First Value: '))
n2 = int(input('Second Value: '))
option = 0
while option != 5:
print('''
[1] Addition
[2] Multiplication
[3] Greater Number
[4] New Numbers
[5] Finish the Program''')
option = int(input('>>>>>>> What is your option? '))
if option == 1:
sum = n1 + n2
print(f'The sum between {n1} + {n2} is {sum} ')
elif option == 2:
times = n1 * n2
print(f'The result of {n1} x {n2} is {times}')
elif option == 3:
if n1 > n2:
greater = n1
else:
greater = n2
print(f'Between {n1} and {n2} the greater number is {greater}')
elif option == 4:
print('Give me new numbers: ')
n1 = int(input('First Value: '))
n2 = int(input('Second Value:'))
elif option == 5:
print('Finishing ...')
else:
print('Invalid Option. Try Again!')
print('=-=' * 10)
sleep(2)
print('End of Program! Bye!')
|
print("Hello, World from python")
print("Hi there, webdev!")
total = 2 + 2
print("the new total is", total)
|
import math, time, functools
M = 1000000007
# We see that s(n) looks like 19, 29, 39, etc.
# The sum is given easily via summation.
# For S(20), we get the following sum:
# 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8
# + 9 + 19 + 29 + 39 + 49 + 59 + 69 + 79 + 89
# + 99 + 199 + 299 => 100 + 200 + 300 - 3 = 597 = sum(1, r + 1) * 10^q - (r + 1)
@functools.lru_cache()
def f(i):
if i < 2:
return i
return f(i-1) + f(i-2)
def S(k):
q, r = divmod(k, 9)
## q = k // 9
## r = k % 9
# 45 * (1-10**(N+1))/(1-10)
mainSum = 5 * (pow(10, q, M) - 1) - 9 * q
remSum = (r + 2) * (r + 1) // 2 * pow(10, q, M) - (r + 1)
## print(mainSum, remSum)
return mainSum + remSum
val = 0
for i in range(2, 91):
## print("i", i)
val += S(f(i))
val %= M
print(val)
|
def getIntersectionNode(self, headA, headB):
"""
:type head1, head1: ListNode
:rtype: ListNode
"""
if headA is None or headB is None:
return None
l1 = headA
l2 = headB
while l1 is not l2:
l1 = headB if l1 is None else l1.next
l2 = headA if l2 is None else l2.next
return l1
|
file = 'Textfile.py'
def write_to_file(student):
new_file = open(file,'a') #Append
new_file.write(student + "\n") #New Line
new_file.close()
def get_student_info(studentName):
test_scores = []
print("Enter scores for: " +studentName)
while True:
student_score = int(input("Enter the scores "))
test_scores.append(student_score)
stop = input("Press s to stop")
if(stop == "s"): #If you press s, it will go to the next person
break
write_to_file(str((studentName, tuple(test_scores))))
def read_from_file():
read = open(file)
for text in read.readlines():
print(text)
read.close()
def main():
get_student_info('Ben')
get_student_info('Eric')
get_student_info('David')
get_student_info('Frank')
read_from_file()
if __name__ == "__main__":
main() #Output
('Ben', (80,))
('Ben', (80, 70))
('Eric', (90,))
('Eric', (90, 20))
('David', (67,))
('David', (67, 51))
('Frank', (56,))
('Frank', (56, 45))
|
# Generated by Django 3.1.1 on 2020-10-02 11:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('joseph_blog', '0005_auto_20201002_1859'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='blog_title',
field=models.CharField(max_length=100, verbose_name='Blog Title'),
),
]
|
from random import randint
from time import sleep
print('='*10+' JOGO DE ADIVINHA '+'='*10)
print('\n>> Vou pensar em um numero de 0 a 10. Tente adivinhar.')
numeroComputador = randint(0,5)
for palplites in range(1, 6):
numeroUtilizador = int(input('>> Resposta: '))
print('PROCESSANDO...')
sleep(1)
if numeroUtilizador < numeroComputador:
print('\n>> Errado! O numero {} e\' menor.'.format(numeroUtilizador))
elif numeroUtilizador > numeroComputador:
print('\n>> Errado! O numero {} e\' maior.'.format(numeroUtilizador))
else:
print('\n>> PARABENS! Voce consegui!')
print('>> Depois de {} tentativas.'.format(palplites))
break
|
class LinearRegression:
def __init__(self):
self.intercept = 0
self.slope = 0
def predict(self, data):
try:
iterator = iter(data)
res = []
for x in iterator:
res.append(self.predict(x))
return res
except TypeError:
return data * self.slope + self.intercept
def fit(self, x_arr, y_arr):
self.intercept = 1
self.slope = 1
c = 0
dir = 1
step = 1
cur_ssr = self.__calc_ssr(x_arr, y_arr)
while c <= 10:
self.intercept += dir * step
prev_ssr = cur_ssr
cur_ssr = self.__calc_ssr(x_arr, y_arr)
if cur_ssr > prev_ssr:
dir *= -1
step /= 2
c += 1
c = 0
dir = 1
step = 1
while c <= 10:
self.slope += dir * step
prev_ssr = cur_ssr
cur_ssr = self.__calc_ssr(x_arr, y_arr)
if cur_ssr > prev_ssr:
dir *= -1
step /= 2
c += 1
def __calc_ssr(self, x_arr, y_arr):
sqr_res = []
for z in range(len(x_arr)):
expected = self.intercept + (self.slope * x_arr[z])
actual = y_arr[z]
res = (expected - actual) ** 2
sqr_res.append(res)
return sum(sqr_res)
|
from collections import defaultdict
class TopoSort(object):
def __init__(self, n,e):
super(TopoSort, self).__init__()
self.vertices=n
self.g=defaultdict(list)
for edge in e:
x,y=edge[0],edge[1]
self.g[x].append(y)
def Sort(self):
used={}
res=[]
for i in range(self.vertices):
if i not in used:
self.dfs(i,used,res)
res.reverse()
return res
def dfs(self,v,used,res):
used[v]=1
for u in self.g[v]:
if u not in used:
self.dfs(u,used,res)
res.append(v)
main = TopoSort(3,[[2,0],[1,2]])
print(main.Sort())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.