content
stringlengths 5
1.05M
|
|---|
#!/usr/bin/python
#
# Copyright 2019 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
from __future__ import absolute_import, division, print_function
import sys
import click
from polyaxon.cli.errors import handle_cli_error
from polyaxon.client import PolyaxonClient
from polyaxon.exceptions import (
PolyaxonClientException,
PolyaxonHTTPError,
PolyaxonShouldExitError,
)
from polyaxon.logger import clean_outputs
from polyaxon.utils.formatting import Printer
@click.group()
@clean_outputs
def user():
"""Commands for user management."""
@user.command()
@click.argument("username", type=str)
@clean_outputs
def activate(username):
"""Activate a user.
Example:
\b
```bash
$ polyaxon user activate david
```
"""
try:
PolyaxonClient().user.activate_user(username)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
handle_cli_error(e, message="Could not activate user `{}`.".format(username))
sys.exit(1)
Printer.print_success("User `{}` was activated successfully.".format(username))
@user.command()
@click.argument("username", type=str)
@clean_outputs
def delete(username):
"""Delete a user.
Example:
\b
```bash
$ polyaxon user delete david
```
"""
try:
PolyaxonClient().user.delete_user(username)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
handle_cli_error(e, message="Could not delete user `{}`.".format(username))
sys.exit(1)
Printer.print_success("User `{}` was deleted successfully.".format(username))
|
import pytest
from centpy.equations import Equation1d
from centpy.parameters import Pars1d
class Scalar(Equation1d):
def initial_data(self):
return [20, 12]
def boundary_conditions(self, u):
u[0] = 10
def flux_x(self, u):
return 0.5 * u
def spectral_radius_x(self, u):
return 0.1 * u
@pytest.fixture
def eqn():
return Scalar(Pars1d())
def test_equation1d(eqn):
tmp_u = [100, 200]
eqn.boundary_conditions(tmp_u)
assert eqn.initial_data() == [20, 12]
assert tmp_u == [10, 200]
assert eqn.flux_x(tmp_u[0]) == 5
assert eqn.spectral_radius_x(tmp_u[1]) == 20
|
##################################################################
# test T-splines mesh creation. The example is taken from Fig. 23,
# Isogeometric Analysis using T-splines
##################################################################
#importing Kratos modules
from KratosMultiphysics import *
from KratosMultiphysics.StructuralApplication import *
from KratosMultiphysics.IsogeometricApplication import *
kernel = Kernel() #defining kernel
model_part_io = BezierModelPartIO("model")
mp = ModelPart()
model_part_io.ReadModelPart(mp)
|
from onegov.org import _
from onegov.form import Form
from wtforms import StringField, TextAreaField, SelectField
from wtforms.fields.html5 import URLField
from wtforms.validators import InputRequired
from onegov.org.models.external_link import ExternalLinkCollection
class ExternalLinkForm(Form):
title = StringField(
label=_("Title"),
validators=[InputRequired()])
lead = TextAreaField(
label=_("Lead"),
description=_("Describes briefly what this entry is about"),
validators=[InputRequired()],
render_kw={'rows': 4})
url = URLField(
label=_("URL"),
description=_("Url pointing to another website"),
validators=[InputRequired()]
)
group = StringField(
label=_("Group"),
description=_("Used to group this link in the overview")
)
member_of = SelectField(
label=_("Name of the list view this link will be shown"),
choices=[]
)
def on_request(self):
self.member_of.choices = [
(id_, self.request.translate(_(name)))
for id_, name in ExternalLinkCollection.form_choices()
]
|
#!/usr/bin/env python
from setuptools import setup
setup(name='broti',
version='1.0',
description='A bot for IRC',
author='Stefan Koch',
author_email='programming@stefan-koch.name',
packages=['broti', 'broti.modules', 'broti.providers'],
install_requires=['irc'],
entry_points = {
'console_scripts': ['broti = broti.cmdline:execute'],
}
)
|
# Copyright The IETF Trust 2021, All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Slavomir Mazur'
__copyright__ = 'Copyright The IETF Trust 2021, All Rights Reserved'
__license__ = 'Apache License, Version 2.0'
__email__ = 'slavomir.mazur@pantheon.tech'
import json
from create_config import create_config
from redis import Redis
class RedisConnection:
def __init__(self, modules_db: int = 1):
config = create_config()
self.__redis_host = config.get('DB-Section', 'redis-host')
self.__redis_port = config.get('DB-Section', 'redis-port')
self.modulesDB = Redis(host=self.__redis_host, port=self.__redis_port, db=modules_db)
### MODULES DATABASE COMMUNICATION ###
def update_module_properties(self, new_module: dict, existing_module: dict):
keys = {**new_module, **existing_module}.keys()
for key in keys:
if key == 'implementations':
new_impls = new_module.get('implementations', {}).get('implementation', [])
existing_impls = existing_module.get('implementations', {}).get('implementation', [])
existing_impls_names = [self._create_implementation_key(impl) for impl in existing_impls]
for new_impl in new_impls:
new_impl_name = self._create_implementation_key(new_impl)
if new_impl_name not in existing_impls_names:
existing_impls.append(new_impl)
existing_impls_names.append(new_impl_name)
elif key in ['dependents', 'dependencies']:
new_prop_list = new_module.get(key, [])
existing_prop_list = existing_module.get(key, [])
existing_prop_names = [existing_prop.get('name') for existing_prop in existing_prop_list]
for new_prop in new_prop_list:
new_prop_name = new_prop.get('name')
if new_prop_name not in existing_prop_names:
existing_prop_list.append(new_prop)
existing_prop_names.append(new_prop_name)
else:
index = existing_prop_names.index(new_prop_name)
existing_prop_list[index] = new_prop
else:
new_value = new_module.get(key)
existing_value = existing_module.get(key)
if existing_value != new_value and new_value is not None:
existing_module[key] = new_value
return existing_module
def populate_modules(self, new_modules: list):
""" Merge new data of each module in 'new_modules' list with existing data already stored in Redis.
Set updated data to Redis under created key in format: <name>@<revision>/<organization>
Argument:
:param new_modules (list) list of modules which need to be stored into Redis cache
"""
new_merged_modules = {}
for new_module in new_modules:
redis_key = self._create_module_key(new_module)
redis_module = self.get_module(redis_key)
if redis_module == '{}':
updated_module = new_module
else:
updated_module = self.update_module_properties(new_module, json.loads(redis_module))
self.set_redis_module(updated_module, redis_key)
new_merged_modules[redis_key] = updated_module
def get_module(self, key: str):
data = self.modulesDB.get(key)
return (data or b'{}').decode('utf-8')
def set_redis_module(self, module: dict, redis_key: str):
result = self.modulesDB.set(redis_key, json.dumps(module))
if result:
print('{} key updated'.format(redis_key), flush=True)
else:
print('Problem while setting {}'.format(redis_key), flush=True)
return result
def _create_module_key(self, module: dict):
return '{}@{}/{}'.format(module.get('name'), module.get('revision'), module.get('organization'))
def _create_implementation_key(self, impl: dict):
return '{}/{}/{}/{}'.format(impl['vendor'].replace(' ', '#'), impl['platform'].replace(' ', '#'),
impl['software-version'].replace(' ', '#'), impl['software-flavor'].replace(' ', '#'))
|
import sys
import os
import ssl
import time
import json
import MySQLdb
from datetime import datetime
import socket
import random
import threading
class GetSvaData():
isError = False
nowTime = datetime.now()
appname = ""
brokeip = ""
brokerport = ""
queueid = ""
companyid = ""
def __init__(self,appName,brokeIP,brokerPort,queueID,companyID):
self.appname = appName
self.brokeip = brokeIP
self.brokerport = brokerPort
self.queueid = queueID
self.companyid = companyID
def CheckSvaError(self):
if(self.isError == True):
return True
messageTime = self.nowTime
nowTime = datetime.now()
if((nowTime - messageTime).seconds > 60 * 5):
return True
return False
def Run(self):
try:
while True:
timestamp = int(time.time())* 1000
print timestamp
userId = "C0A80A66"
message1 = "{\"networkinfo\":[{\"userid\":\""+str(userId)+"\",\"infotype\":\"ransignal\",\"lampsiteinfo\":{\"enbid\":\"509146\",\"prrusignal\":[{\"gpp\":\"0_2_1\",\"rsrp\":\"-1100\"},{\"gpp\":\"0_2_2\",\"rsrp\":\"-1000\"},{\"gpp\":\"0_2_3\",\"rsrp\":\"-1300\"},{\"gpp\":\"0_2_4\",\"rsrp\":\"-1100\"},{\"gpp\":\"0_2_5\",\"rsrp\":\"-900\"},{\"gpp\":\"0_2_6\",\"rsrp\":\"-1300\"}]},\"timestamp\":"+str(timestamp)+"}]}"
message2 = "{\"networkinfo\":[{\"userid\":\""+str(userId)+"\",\"infotype\":\"ransignal\",\"lampsiteinfo\":{\"enbid\":\"509146\",\"prrusignal\":[{\"gpp\":\"0_2_2\",\"rsrp\":\"-1400\"},{\"gpp\":\"0_2_3\",\"rsrp\":\"-1100\"},{\"gpp\":\"0_2_5\",\"rsrp\":\"-1000\"},{\"gpp\":\"0_2_6\",\"rsrp\":\"-1350\"},{\"gpp\":\"0_2_7\",\"rsrp\":\"-1500\"},{\"gpp\":\"0_2_8\",\"rsrp\":\"-1100\"}]},\"timestamp\":"+str(timestamp)+"}]}"
message3 = "{\"networkinfo\":[{\"userid\":\""+str(userId)+"\",\"infotype\":\"ransignal\",\"lampsiteinfo\":{\"enbid\":\"509146\",\"prrusignal\":[{\"gpp\":\"0_2_2\",\"rsrp\":\"-1100\"},{\"gpp\":\"0_2_3\",\"rsrp\":\"-900\"},{\"gpp\":\"0_2_4\",\"rsrp\":\"-1600\"},{\"gpp\":\"0_2_5\",\"rsrp\":\"-800\"},{\"gpp\":\"0_2_6\",\"rsrp\":\"-1360\"},{\"gpp\":\"0_2_7\",\"rsrp\":\"-1100\"}]},\"timestamp\":"+str(timestamp)+"}]}"
message4 = "{\"networkinfo\":[{\"userid\":\""+str(userId)+"\",\"infotype\":\"ransignal\",\"lampsiteinfo\":{\"enbid\":\"509146\",\"prrusignal\":[{\"gpp\":\"0_2_2\",\"rsrp\":\"-1400\"},{\"gpp\":\"0_2_3\",\"rsrp\":\"-1500\"},{\"gpp\":\"0_2_4\",\"rsrp\":\"-1500\"},{\"gpp\":\"0_2_5\",\"rsrp\":\"-1400\"},{\"gpp\":\"0_2_6\",\"rsrp\":\"-1400\"}]},\"timestamp\":"+str(timestamp)+"}]}"
message5 = "{\"networkinfo\":[{\"userid\":\""+str(userId)+"\",\"infotype\":\"ransignal\",\"lampsiteinfo\":{\"enbid\":\"509146\",\"prrusignal\":[{\"gpp\":\"0_2_1\",\"rsrp\":\"-900\"},{\"gpp\":\"0_2_2\",\"rsrp\":\"-1100\"},{\"gpp\":\"0_2_3\",\"rsrp\":\"-1300\"},{\"gpp\":\"0_2_4\",\"rsrp\":\"-1100\"},{\"gpp\":\"0_2_5\",\"rsrp\":\"-1400\"},{\"gpp\":\"0_2_6\",\"rsrp\":\"-1360\"}]},\"timestamp\":"+str(timestamp)+"}]}"
message6 = "{\"networkinfo\":[{\"userid\":\""+str(userId)+"\",\"infotype\":\"ransignal\",\"lampsiteinfo\":{\"enbid\":\"509146\",\"prrusignal\":[{\"gpp\":\"0_2_1\",\"rsrp\":\"-900\"},{\"gpp\":\"0_2_3\",\"rsrp\":\"-1100\"},{\"gpp\":\"0_2_4\",\"rsrp\":\"-800\"},{\"gpp\":\"0_2_5\",\"rsrp\":\"-1200\"},{\"gpp\":\"0_2_6\",\"rsrp\":\"-1400\"},{\"gpp\":\"0_2_7\",\"rsrp\":\"-1100\"}]},\"timestamp\":"+str(timestamp)+"}]}"
messageList = [message1,message2,message3,message4,message5,message6]
count = random.randint(0, 5)
message = messageList[count]
#print message
#message = '{"geofencing":[{"IdType": "IP", "userid": ["bea80202"], "mapid": 2, "zoneid": 0, "zone_event": "exit", "Timestamp":1461054031000}]}'
#{"locationstream":[{"IdType":"IP","Timestamp":1427560872000,"datatype":"coordinates","location":{"x":1133.0,"y":492.0,"z":1},"userid":["0a26d23d"]}]}
try:
print message
jsonData = json.loads(message)
conn=MySQLdb.connect(host='127.0.0.1',user='root',passwd='123456',port=3306)
cursor = conn.cursor()
conn.select_db('sva')
if jsonData.keys()[0] == 'locationstream':
jsonList = jsonData["locationstream"]
for index in range(len(jsonList)):
IdType = jsonList[index]["IdType"]
Timestamp = jsonList[index]["Timestamp"]
datatype = jsonList[index]["datatype"]
x = jsonList[index]["location"]["x"]
y = jsonList[index]["location"]["y"]
z = jsonList[index]["location"]["z"]
if z > 0:
z = z + int(self.companyid)*10000
else:
z = abs(z) + 5000 + int(self.companyid)*10000
if len(jsonList[index]["userid"]) < 1:
continue
userid = jsonList[index]["userid"][0]
sqlparam = [IdType,Timestamp,datatype,x,y,z,userid]
print sqlparam
time_begin = Timestamp
loc_count= 1
during = 0
time_local = time.time() * 1000
cursor.execute ("select loc_count, time_begin,timestamp,userid from locationPhone where userid=%s",[userid])
row = cursor.fetchone ()
if row != None:
sqlparam = [IdType,Timestamp,time_local,datatype,x,y,z,userid]
cursor.execute("update locationphone set IdType=%s, Timestamp = %s,time_local= %s,datatype= %s,x=%s, y =%s, z = %s where userid = %s",sqlparam)
else:
sqlparam = [IdType,Timestamp,time_begin,time_local,loc_count,during,datatype,x,y,z,userid]
cursor.execute("replace into locationPhone (IdType,Timestamp,time_begin,time_local,loc_count,during,datatype,x,y,z,userid) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",sqlparam)
#cursor.execute("replace into locationPhone (IdType,Timestamp,datatype,x,y,z,userid) values (%s,%s,%s,%s,%s,%s,%s)",sqlparam)
if jsonData.keys()[0] == 'locationstreamanonymous':
jsonList = jsonData["locationstreamanonymous"]
dataStr = ""
for index in range(len(jsonList)):
IdType = jsonList[index]["IdType"]
Timestamp = jsonList[index]["Timestamp"]
datatype = jsonList[index]["datatype"]
x = jsonList[index]["location"]["x"]
y = jsonList[index]["location"]["y"]
z = jsonList[index]["location"]["z"]
if z > 0:
z = z + int(self.companyid)*10000
else:
z = abs(z) + 5000 + int(self.companyid)*10000
if len(jsonList[index]["userid"]) < 1:
continue
userid = jsonList[index]["userid"][0]
sqlparam = [IdType,Timestamp,datatype,x,y,z,userid]
if dataStr == "":
ltime=time.localtime(Timestamp/1000)
dataStr=time.strftime("%Y%m%d", ltime)
print sqlparam
time_begin = Timestamp
loc_count= 1
during = 0
time_local = time.time() * 1000
cursor.execute ("select loc_count, time_begin,timestamp,userid from location"+dataStr+" where userid=%s and z = %s",[userid,z])
row = cursor.fetchone ()
if row != None:
loc_count = loc_count + int(row[0])
during = Timestamp - int(row[1]);
sqlparam = [IdType,Timestamp,time_local,loc_count,during,datatype,x,y,z,userid]
print sqlparam
cursor.execute("update location"+dataStr+" set IdType=%s, Timestamp = %s,time_local=%s,loc_count=%s, during=%s,datatype=%s,x=%s, y =%s where z = %s and userid = %s ",sqlparam)
else:
sqlparam = [IdType,Timestamp,time_begin,time_local,loc_count,during,datatype,x,y,z,userid]
cursor.execute("insert into location"+dataStr+" (IdType,Timestamp,time_begin,time_local,loc_count,during,datatype,x,y,z,userid) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",sqlparam)
#cursor.execute("insert into location"+dataStr+" (IdType,Timestamp,datatype,x,y,z,userid) values (%s,%s,%s,%s,%s,%s,%s)",sqlparam)
if jsonData.keys()[0] == 'networkinfo':
print "in"
time_local = time.time() * 1000
userid = jsonData["networkinfo"][0]["userid"]
enbid = jsonData["networkinfo"][0]["lampsiteinfo"]["enbid"]
jsonList = jsonData["networkinfo"][0]["lampsiteinfo"]["prrusignal"]
for index in range(len(jsonList)):
gpp = jsonList[index]["gpp"]
rsrp = jsonList[index]["rsrp"]
sqlparam = [userid,enbid,gpp,rsrp,time_local]
print sqlparam
cursor.execute("insert into prrusignal (userId,enbid,gpp,rsrp,timestamp) values (%s,%s,%s,%s,%s)",sqlparam)
if jsonData.keys()[0] == 'geofencing':
jsonList = jsonData["geofencing"]
for index in range(len(jsonList)):
IdType = jsonList[index]["IdType"]
if len(jsonList[index]["userid"]) < 1:
continue
userid = jsonList[index]["userid"][0]
mapid = jsonList[index]["mapid"]
zoneid = jsonList[index]["zoneid"]
zone_event = jsonList[index]["zone_event"]
Timestamp = jsonList[index]["Timestamp"]
time_local = time.time() * 1000
sqlparam = [IdType,userid,mapid,zoneid,zone_event,Timestamp,time_local]
print sqlparam
cursor.execute("insert into geofencing (IdType,userid,mapid,zoneid,enter,Timestamp,time_local) values (%s,%s,%s,%s,%s,%s,%s)",sqlparam)
conn.commit()
self.nowTime = datetime.now()
cursor.close()
conn.close()
time.sleep(2)
except Exception as e:
print e
except Exception as m:
print m
self.isError = True
#sys.exit(-1)
if __name__ == "__main__":
#appName = "app0"
#brokeIP = "182.138.104.35"
#brokerPort = "4703"
#queueID = "app0.7ce75a30c6184ef08b20994bdcb53dcb.66fc8841"
#companyID = "861300010010300005"
#appName = sys.argv[1] #app0
#brokeIP = sys.argv[2] #182.138.104.35
#brokerPort = sys.argv[3] #4703
#queueID = sys.argv[4] #app0.7ce75a30c6184ef08b20994bdcb53dcb.66fc8841
#companyID = sys.argv[5] #861300010010300005
appName = "app0"
brokeIP = "182.138.104.35"
brokerPort = 4703
queueID = "app0.7ce75a30c6184ef08b20994bdcb53dcb.66fc8841"
companyID = 1
getSvaData = GetSvaData(appName,brokeIP,brokerPort,queueID,companyID)
try:
thread1 = threading.Thread(target=getSvaData.Run)
thread1.setDaemon(True)
thread1.start()
except Exception as e:
sys.exit(-1)
while True:
try:
time.sleep(20)
if(getSvaData.CheckSvaError()):
sys.exit(-1)
except Exception as e:
sys.exit(-1)
|
import itertools
import numpy as np
from config import *
from time import time
def full(num, count):
return [num] * count
def zeros(count):
return full(0, count)
def ones(count):
return full(1, count)
def minus(count):
return full(-1, count)
class Pattern:
@classmethod
def divisions(cls, num, space=float('inf'), lim=float('inf'), depth=0, fill_space=False):
for main_chunk in range(min(num, lim), 0, -1):
rest_chunk = num - main_chunk
if rest_chunk == 0:
if fill_space and depth == 0:
yield [main_chunk] + zeros(space - 1)
else:
yield [main_chunk]
continue
if depth + 2 > space:
continue
rchunk_divisions = cls.divisions(
rest_chunk, space, lim=main_chunk, depth=depth+1)
for rest_chunk_division in rchunk_divisions:
if fill_space and depth == 0:
yield [main_chunk, *rest_chunk_division] + zeros(space - (1 + len(rest_chunk_division)))
else:
yield [main_chunk, *rest_chunk_division]
@classmethod
def permutations_without_duplication(cls, iterable, l=0, r=None):
r = len(iterable)-1 if r is None else r
black_list = []
for i in range(l, r+1):
if (iterable[l] == iterable[i] and l != i) or\
(iterable[i] in black_list):
continue
black_list.append(iterable[i])
iterable[l], iterable[i] = iterable[i], iterable[l]
if l+1 == r:
yield tuple(iterable)
else:
for case in cls.permutations_without_duplication(iterable, l+1, r):
yield case
iterable[l], iterable[i] = iterable[i], iterable[l] # backtrack
@classmethod
def white_cell_patterns(cls, space, num):
for v in cls.divisions(num, space, fill_space=True):
for pattern in cls.permutations_without_duplication(v):
yield pattern
@classmethod
def patterns_from_map(cls, arg):
# arg[0] -> 'row' or 'col'
# arg[1] -> index
# arg[2] -> keys
# arg[3] -> length
start_time = time()
result = cls.patterns(arg[2], arg[3])
if sum(arg[2]) != 0:
timedata = (time()-start_time, sum(arg[2]), len(arg[2]), len(arg[2])/sum(arg[2]))
else:
timedata = (time()-start_time, sum(arg[2]), len(arg[2]), 0)
return arg[0], arg[1], result, timedata
@classmethod
def patterns(cls, key, length):
S = len(key) # number of keys
B = sum(key) # number of black cells
W = length - B # number of white cells
VARIABLE_FACTOR = W - (S - 1)
if not VARIABLE_FACTOR:
pattern = []
for i, black_cell in enumerate(key):
pattern += ones(black_cell)
if i != S - 1:
pattern.append(-1)
return np.array([pattern], dtype=DTYPE)
white_cell_patterns = cls.white_cell_patterns(S + 1, VARIABLE_FACTOR)
pattern_set = None
for white_cell_pattern in white_cell_patterns:
pattern = []
for i, white_cell in enumerate(white_cell_pattern):
pattern += minus(white_cell)
if i < S:
pattern += ones(key[i])
if i != S - 1:
pattern.append(-1)
if pattern_set is None:
pattern_set = np.array([pattern], dtype=DTYPE)
else:
pattern_set = np.append(pattern_set, np.array(
[pattern], dtype=DTYPE), axis=0)
return pattern_set
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import wraps
import json
from django.db import models
from django import forms
from . import conf
__all__ = [
'override_djconfig',
'serialize']
def override_djconfig(**new_cache_values):
"""
Temporarily override config values.
This is similar to :py:func:`django.test.override_settings`,\
use it in testing.
:param new_cache_values: Keyword arguments,\
the key should match one in the config,\
a new one is created otherwise,\
the value is overridden within\
the decorated function
"""
def decorator(func):
@wraps(func)
def func_wrapper(*args, **kw):
old_cache_values = {
key: getattr(conf.config, key)
for key in new_cache_values}
conf.config._set_many(new_cache_values)
try:
# todo: make a note about this in the docs:
# don't populate the config within migrations
# This works coz the config table is empty,
# so even if the middleware gets called,
# it won't update the config (_updated_at
# will be None), this is assuming the table
# is not populated by the user (ie: within
# a migration), in which case it will load
# all the default values
return func(*args, **kw)
finally:
conf.config._set_many(old_cache_values)
return func_wrapper
return decorator
# todo: add DateField
def serialize(value, field):
"""
Form values serialization
:param object value: A value to be serialized\
for saving it into the database and later\
loading it into the form as initial value
"""
assert isinstance(field, forms.Field)
if isinstance(field, forms.ModelMultipleChoiceField):
return json.dumps([v.pk for v in value])
# todo: remove
if isinstance(value, models.Model):
return value.pk
return value
|
'''
Given a binary tree and a number, return true if the tree has a
root-to-leaf path such that adding up all the values along the
path equals the given number. Return false if no such path can be found.
'''
def targetsum(root, target):
if not root:
return False
if not root.left and not root.right:
if root.val == target:
return True
else:
return False
return targetsum(root.left, target-root.val) or targetsum(root.right, target-root.val)
|
# coding: utf-8
from abc import ABC, abstractmethod
from .ingredient_factory import (
NYPizzaIngredientFactory,
ChicagoPizzaIngredientFactory
)
from .pizza import (
CheesePizza,
ClamPizza
)
class PizzaStore(ABC):
def order_pizza(self, pizza_type):
pizza = self.create_pizza(pizza_type)
pizza.prepare()
pizza.bake()
pizza.cut()
pizza.box()
return pizza
@abstractmethod
def create_pizza(self, item):
"""
:param item:
:return:
:rtype: lib.pizza.Pizza
"""
pass
class NYPizzaStore(PizzaStore):
def create_pizza(self, item):
pizza = None
ingredient_factory = NYPizzaIngredientFactory()
if item == 'チーズ':
pizza = CheesePizza(ingredient_factory)
pizza.set_name('NYスタイルチーズピザ')
elif item == 'クラム':
pizza = ClamPizza(ingredient_factory)
pizza.set_name('NYスタイルクラムピザ')
return pizza
class ChicagoPizzaStore(PizzaStore):
def create_pizza(self, item):
pizza = None
ingredient_factory = ChicagoPizzaIngredientFactory()
if item == 'チーズ':
pizza = CheesePizza(ingredient_factory)
pizza.set_name('シカゴスタイルチーズピザ')
elif item == 'クラム':
pizza = ClamPizza(ingredient_factory)
pizza.set_name('シカゴスタイルクラムピザ')
return pizza
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 Doug Hellmann. All rights reserved.
#
"""Demonstrate the representations of values using different encodings.
"""
#end_pymotw_header
from codecs_to_hex import to_hex
text = u'pi: π'
encoded = text.encode('utf-8')
decoded = encoded.decode('utf-8')
print 'Original :', repr(text)
print 'Encoded :', to_hex(encoded, 1), type(encoded)
print 'Decoded :', repr(decoded), type(decoded)
|
"""
Crie um programa que lê 6 valores inteiros pares e, em seguida, mostre na tela os valores lidos na ordem
inversa
"""
num = list(range(0, 11, 2))
print(num)
num.reverse()
print(num)
|
"""Tests for refactoring requests."""
from hamcrest import assert_that, is_
from tests import TEST_DATA
from tests.lsp_test_client import session
from tests.lsp_test_client.utils import StringPattern, as_uri
REFACTOR_TEST_ROOT = TEST_DATA / "refactoring"
def test_lsp_rename_function():
"""Tests single file function rename."""
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri((REFACTOR_TEST_ROOT / "rename_test1.py"))
actual = ls_session.text_document_rename(
{
"textDocument": {"uri": uri},
"position": {"line": 12, "character": 4},
"newName": "my_function_1",
}
)
expected = {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [
{
"range": {
"start": {"line": 3, "character": 6},
"end": {"line": 3, "character": 6},
},
"newText": "_",
},
{
"range": {
"start": {"line": 3, "character": 10},
"end": {"line": 3, "character": 10},
},
"newText": "tion_",
},
{
"range": {
"start": {"line": 8, "character": 6},
"end": {"line": 8, "character": 6},
},
"newText": "_",
},
{
"range": {
"start": {"line": 8, "character": 10},
"end": {"line": 8, "character": 10},
},
"newText": "tion_",
},
{
"range": {
"start": {"line": 12, "character": 2},
"end": {"line": 12, "character": 2},
},
"newText": "_",
},
{
"range": {
"start": {"line": 12, "character": 6},
"end": {"line": 12, "character": 6},
},
"newText": "tion_",
},
],
}
],
}
assert_that(actual, is_(expected))
def test_lsp_rename_variable_at_line_start():
"""Tests renaming a variable that appears at the start of a line."""
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri((REFACTOR_TEST_ROOT / "rename_test2.py"))
actual = ls_session.text_document_rename(
{
"textDocument": {"uri": uri},
"position": {"line": 1, "character": 0},
"newName": "y",
}
)
expected = {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [
{
"range": {
"start": {"line": 1, "character": 0},
"end": {"line": 1, "character": 1},
},
"newText": "y",
},
],
}
],
}
assert_that(actual, is_(expected))
def test_lsp_rename_inserts_at_line_start():
"""Tests renaming a variable by inserting text at the start of a line."""
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri((REFACTOR_TEST_ROOT / "rename_test2.py"))
actual = ls_session.text_document_rename(
{
"textDocument": {"uri": uri},
"position": {"line": 1, "character": 0},
# old name is "x", so we will insert "a"
"newName": "ax",
}
)
expected = {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [
{
"range": {
"start": {"line": 1, "character": 0},
"end": {"line": 1, "character": 0},
},
"newText": "a",
},
],
}
],
}
assert_that(actual, is_(expected))
def test_lsp_rename_last_line():
"""Tests whether rename works for end of file edge case.
This example was receiving a KeyError, but now we check for end-1 to
fit within correct range.
"""
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri((REFACTOR_TEST_ROOT / "rename_test3.py"))
actual = ls_session.text_document_rename(
{
"textDocument": {"uri": uri},
"position": {"line": 14, "character": 7},
"newName": "args2",
}
)
expected = {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [
{
"range": {
"start": {"line": 11, "character": 4},
"end": {"line": 11, "character": 4},
},
"newText": "2",
},
{
"range": {
"start": {"line": 12, "character": 7},
"end": {"line": 12, "character": 7},
},
"newText": "2",
},
{
"range": {
"start": {"line": 12, "character": 15},
"end": {"line": 12, "character": 15},
},
"newText": "2",
},
{
"range": {
"start": {"line": 14, "character": 10},
"end": {"line": 14, "character": 12},
},
"newText": "2)\n",
},
],
}
],
}
assert_that(actual, is_(expected))
def test_rename_package() -> None:
"""Tests renaming of an imported package."""
test_root = REFACTOR_TEST_ROOT / "rename_package_test1"
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri(test_root / "rename_test_main.py")
actual = ls_session.text_document_rename(
{
"textDocument": {"uri": uri},
"position": {"line": 2, "character": 12},
"newName": "new_name",
}
)
old_name_uri = as_uri(test_root / "old_name")
new_name_uri = as_uri(test_root / "new_name")
expected = {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [
{
"range": {
"start": {"line": 2, "character": 5},
"end": {"line": 2, "character": 8},
},
"newText": "new",
}
],
},
{
"kind": "rename",
"oldUri": old_name_uri,
"newUri": new_name_uri,
"options": {"overwrite": True, "ignoreIfExists": True},
},
]
}
assert_that(actual, is_(expected))
def test_rename_module() -> None:
"""Tests example from the following example.
https://github.com/pappasam/jedi-language-server/issues/159
"""
test_root = REFACTOR_TEST_ROOT
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri(test_root / "rename_module.py")
actual = ls_session.text_document_rename(
{
"textDocument": {"uri": uri},
"position": {"line": 0, "character": 24},
"newName": "new_somemodule",
}
)
old_name_uri = as_uri(test_root / "somepackage" / "somemodule.py")
new_name_uri = as_uri(test_root / "somepackage" / "new_somemodule.py")
expected = {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [
{
"range": {
"start": {"line": 0, "character": 24},
"end": {"line": 0, "character": 24},
},
"newText": "new_",
},
{
"range": {
"start": {"line": 4, "character": 4},
"end": {"line": 4, "character": 4},
},
"newText": "new_",
},
],
},
{
"kind": "rename",
"oldUri": old_name_uri,
"newUri": new_name_uri,
"options": {"overwrite": True, "ignoreIfExists": True},
},
]
}
assert_that(actual, is_(expected))
def test_lsp_code_action() -> None:
"""Tests code actions like extract variable and extract function."""
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri((REFACTOR_TEST_ROOT / "code_action_test1.py"))
actual = ls_session.text_document_code_action(
{
"textDocument": {"uri": uri},
"range": {
"start": {"line": 4, "character": 10},
"end": {"line": 4, "character": 10},
},
"context": {"diagnostics": []},
}
)
expected = [
{
"title": StringPattern(
r"Extract expression into variable 'jls_extract_var'"
),
"kind": "refactor.extract",
"edit": {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [],
}
]
},
},
{
"title": StringPattern(
r"Extract expression into function 'jls_extract_def'"
),
"kind": "refactor.extract",
"edit": {
"documentChanges": [
{
"textDocument": {
"uri": uri,
"version": 0,
},
"edits": [],
}
]
},
},
]
# Cannot use hamcrest directly for this due to unpredictable
# variations in how the text edits are generated.
assert_that(len(actual), is_(len(expected)))
# Remove the edits
actual[0]["edit"]["documentChanges"][0]["edits"] = []
actual[1]["edit"]["documentChanges"][0]["edits"] = []
assert_that(actual, is_(expected))
def test_lsp_code_action2() -> None:
"""Tests edge case for code actions.
Identified in: https://github.com/pappasam/jedi-language-server/issues/96
"""
with session.LspSession() as ls_session:
ls_session.initialize()
uri = as_uri((REFACTOR_TEST_ROOT / "code_action_test2.py"))
actual = ls_session.text_document_code_action(
{
"textDocument": {"uri": uri},
"range": {
"start": {"line": 2, "character": 6},
"end": {"line": 2, "character": 6},
},
"context": {"diagnostics": []},
}
)
assert_that(actual, is_(None))
|
# draw a house
from turtle import *
forward(141)
left(90)
forward(100)
left(45)
forward(100)
left(90)
forward(100)
left(45)
forward(100)
done()
|
"""
WSGI config for djhome project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
from datetime import timedelta
import os
from django.core.wsgi import get_wsgi_application
from redis import Redis
from rq_scheduler import Scheduler
def load():
import logging
logger = logging.getLogger(__name__)
logger.info('WSGI started')
try:
logger.debug('Clearing input states')
from ios.models import Input
Input.objects.all().update(state=None)
# Send all default output states
from ios.tasks import set_initial_phidget_outputs
scheduler = Scheduler(connection=Redis()) # Get a scheduler for the "default" queue
scheduler.enqueue_in(func=set_initial_phidget_outputs, time_delta=timedelta(seconds=5))
#TODO: Need to ask Phidget to send current status for everything, so that UI can be updated
#TODO: Need to send outputs with default inital state of true
# This will make sure the app is always imported when Django starts so that shared_task will use this app.
# from djhome.celery import app as celery_app
# set_default_phidget_outputs.apply_async((), countdown=5)
# async did not work, so calling synchronously
#set_default_phidget_outputs()
except Exception:
logger.exception('WSGI loading failed')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djhome.settings")
application = get_wsgi_application()
load()
|
# Copyright 2017 F5 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controller Unit Tests.
Units tests for testing command-line args, Marathon state parsing, and
BIG-IP resource management.
"""
import unittest
import json
import requests
from mock import Mock
import f5
import icontrol
class Pool(object):
"""A mock BIG-IP Pool."""
def __init__(self, name, **kwargs):
"""Initialize the object."""
self.name = name
self.monitor = kwargs.get('monitor', None)
self.loadBalancingMode = kwargs.get('balance', None)
self.partition = kwargs.get('partition', None)
self.members = kwargs.get('members', None)
def modify(self, **kwargs):
"""Placeholder: This will be mocked."""
pass
def update(self, **kwargs):
"""Placeholder: This will be mocked."""
pass
def create(self, partition=None, name=None, **kwargs):
"""Create the pool object."""
pass
def delete(self):
"""Delete the pool object."""
pass
class Member(object):
"""A mock BIG-IP Pool Member."""
def __init__(self, name, **kwargs):
"""Initialize the object."""
self.name = name
self.session = kwargs.get('session', None)
if kwargs.get('state', None) == 'user-up':
self.state = 'up'
else:
self.state = 'user-down'
def modify(self, **kwargs):
"""Placeholder: This will be mocked."""
pass
class Profiles(object):
"""A container of Virtual Server Profiles."""
def __init__(self, **kwargs):
"""Initialize the object."""
self.profiles = kwargs.get('profiles', [])
def exists(self, name, partition):
"""Check for the existance of a profile."""
for p in self.profiles:
if p['name'] == name and p['partition'] == partition:
return True
return False
def create(self, name, partition):
"""Placeholder: This will be mocked."""
pass
class ProfileSet(object):
"""A set of Virtual Server Profiles."""
def __init__(self, **kwargs):
"""Initialize the object."""
self.profiles = Profiles(**kwargs)
class Policies(object):
"""A container of Virtual Server Policies."""
def __init__(self, **kwargs):
"""Initialize the object."""
self.policies = kwargs.get('policies', [])
def exists(self, name, partition):
"""Check for the existance of a policy."""
for p in self.policies:
if p['name'] == name and p['partition'] == partition:
return True
return False
def create(self, name, partition):
"""Placeholder: This will be mocked."""
pass
class PolicySet(object):
"""A set of Virtual Server Policies."""
def __init__(self, **kwargs):
"""Initialize the object."""
self.policies = Policies(**kwargs)
class Virtual(object):
"""A mock BIG-IP Virtual Server."""
def __init__(self, name, **kwargs):
"""Initialize the object."""
self.profiles_s = ProfileSet(**kwargs)
self.policies_s = PolicySet(**kwargs)
self.name = name
self.enabled = kwargs.get('enabled', None)
self.disabled = kwargs.get('disabled', None)
self.ipProtocol = kwargs.get('ipProtocol', None)
self.destination = kwargs.get('destination', None)
self.pool = kwargs.get('pool', None)
self.sourceAddressTranslation = kwargs.get('sourceAddressTranslation',
None)
self.profiles = kwargs.get('profiles', [])
self.policies = kwargs.get('policies', [])
self.rules = kwargs.get('rules', [])
self.partition = kwargs.get('partition', None)
def modify(self, **kwargs):
"""Placeholder: This will be mocked."""
pass
def create(self, name=None, partition=None, **kwargs):
"""Create the virtual object."""
pass
def delete(self):
"""Delete the virtual object."""
pass
def load(self, name=None, partition=None):
"""Load the virtual object."""
pass
class HealthCheck(object):
"""A mock BIG-IP Health Monitor."""
def __init__(self, name, **kwargs):
"""Initialize the object."""
self.name = name
self.interval = kwargs.get('interval', None)
self.timeout = kwargs.get('timeout', None)
self.send = kwargs.get('send', None)
self.partition = kwargs.get('partition', None)
def modify(self, **kwargs):
"""Placeholder: This will be mocked."""
pass
def delete(self):
"""Delete the healthcheck object."""
pass
class MockService(object):
"""A mock Services service object."""
def __init__(self):
"""Initialize the object."""
pass
def load(self, name, partition):
"""Load a mock iapp."""
pass
def create(self, name=None, template=None, partition=None, variables=None,
tables=None, trafficGroup=None, description=None):
"""Create a mock iapp."""
pass
class MockServices(object):
"""A mock Application services object."""
def __init__(self):
"""Initialize the object."""
self.service = MockService()
def get_collection(self):
"""Get collection of iapps."""
return []
class MockApplication(object):
"""A mock Sys application object."""
def __init__(self):
"""Initialize the object."""
self.services = MockServices()
class MockFolders(object):
"""A mock Sys folders object."""
def __init__(self):
"""Initialize the object."""
class MockSys(object):
"""A mock BIG-IP sys object."""
def __init__(self):
"""Initialize the object."""
self.application = MockApplication()
self.folders = MockFolders()
class MockIapp(object):
"""A mock BIG-IP iapp object."""
def __init__(self, name=None, template=None, partition=None,
variables=None, tables=None, trafficGroup=None,
description=None):
"""Initialize the object."""
self.name = name
self.partition = partition
self.template = template
self.variables = variables
self.tables = tables
self.trafficGroup = trafficGroup
self.description = description
def delete(self):
"""Mock delete method."""
pass
def update(self, executeAction=None, name=None, partition=None,
variables=None, tables=None, **kwargs):
"""Mock update method."""
pass
class MockFolder(object):
"""A mock BIG-IP folder object."""
def __init__(self, name):
"""Initialize the object."""
self.name = name
class MockHttp(object):
"""A mock Https http object."""
def __init__(self):
"""Initialize the object."""
def create(self, partition=None, **kwargs):
"""Create a http healthcheck object."""
pass
def load(self, name=None, partition=None):
"""Load a http healthcheck object."""
pass
class MockHttps(object):
"""A mock Monitor https object."""
def __init__(self):
"""Initialize the object."""
self.http = MockHttp
def get_collection(self):
"""Get collection of http healthchecks."""
pass
class MockTcp(object):
"""A mock Tcps tcp object."""
def __init__(self):
"""Initialize the object."""
pass
def create(self, partition=None, **kwargs):
"""Create a tcp healthcheck object."""
pass
def load(self, name=None, partition=None):
"""Load a tcp healthcheck object."""
pass
class MockTcps(object):
"""A mock Monitor tcps object."""
def __init__(self):
"""Initialize the object."""
self.tcp = MockTcp()
def get_collection(self):
"""Get collection of tcp healthchecks."""
pass
class MockMonitor(object):
"""A mock Ltm monitor object."""
def __init__(self):
"""Initialize the object."""
self.https = MockHttps()
self.tcps = MockTcps()
class MockVirtuals(object):
"""A mock Ltm virtuals object."""
def __init__(self):
"""Initialize the object."""
self.virtual = Virtual('test')
class MockPools(object):
"""A mock Ltm pools object."""
def __init__(self):
"""Initialize the object."""
self.pool = Pool('test')
def get_collection(self):
"""Get collection of pools."""
pass
class MockLtm(object):
"""A mock BIG-IP ltm object."""
def __init__(self):
"""Initialize the object."""
self.monitor = MockMonitor()
self.virtuals = MockVirtuals()
self.pools = MockPools()
self.data_group = MockDataGroupInternals()
class MockHealthMonitor(object):
"""A mock BIG-IP healthmonitor object."""
def __init__(self, name, partition):
"""Initialize the object."""
self.name = name
self.partition = partition
class MockDataGroupInternals(object):
"""A mock Ltm data-group internals object."""
def __init__(self):
"""Initialize the object."""
pass
def get_collection(self):
"""Get collection of nodes."""
pass
class MockDataGroup(object):
"""A mock Ltm data_group object."""
def __init__(self):
"""Initialize the object."""
self.internals = MockDataGroupInternals()
class BigIPTest(unittest.TestCase):
"""BIG-IP configuration tests.
Test BIG-IP configuration given various cloud states and existing
BIG-IP states
"""
virtuals = {}
profiles = {}
policies = {}
rules = {}
pools = {}
virtuals = {}
members = {}
healthchecks = {}
def mock_get_pool_member_list(self, partition, pool):
"""Mock: Get a mocked list of pool members."""
try:
return self.bigip_data[pool]
except KeyError:
return []
def mock_get_node_list(self, partition):
"""Mock: Get a mocked list of nodes."""
return ['10.141.141.10']
def mock_get_http_healthcheck_collection(self):
"""Mock: Get a mocked list of http health monitors."""
monitors = []
for key in self.hm_data:
if 'http' in self.hm_data[key]['type']:
monitors.append(MockHealthMonitor(key, self.test_partition))
return monitors
def mock_get_tcp_healthcheck_collection(self):
"""Mock: Get a mocked list of http health monitors."""
monitors = []
for key in self.hm_data:
if self.hm_data[key]['type'] == 'tcp':
monitors.append(MockHealthMonitor(key, self.test_partition))
return monitors
def mock_iapp_service_create(self, name, template, partition, variables,
tables, trafficGroup, description):
"""Mock: Create a mocked iapp."""
self.test_iapp = MockIapp(name=name, template=template,
partition=partition, variables=variables,
tables=tables, trafficGroup=trafficGroup,
description=description)
return self.test_iapp
def mock_iapp_service_load(self, name, partition):
"""Mock: Get a mocked iapp."""
self.test_iapp = MockIapp(name=name, partition=partition)
return self.test_iapp
def mock_iapp_update_service_load(self, name, partition):
"""Mock: Wrapper to get previouly created mocked iapp."""
return self.test_iapp
def mock_iapp_services_get_collection(self):
"""Mock: Get a mocked collection of iapps."""
self.test_iapp_list = \
[MockIapp(name='server-app2_iapp_10000_vs',
partition=self.test_partition)]
return self.test_iapp_list
def mock_iapp_update_services_get_collection(self):
"""Mock: Get a mocked collection of iapps for iapp update."""
self.test_iapp_list = \
[MockIapp(name='default_configmap',
partition=self.test_partition)]
return self.test_iapp_list
def mock_partition_folders_get_collection(self):
"""Mock: Get a mocked collection of partitions."""
folder = MockFolder('mesos')
folder2 = MockFolder('mesos2')
return [folder, folder2]
def create_mock_pool(self, name, **kwargs):
"""Create a mock pool server object."""
pool = Pool(name, **kwargs)
self.pools[name] = pool
pool.modify = Mock()
return pool
def create_mock_virtual(self, name, **kwargs):
"""Create a mock virtual server object."""
virtual = Virtual(name, **kwargs)
self.virtuals[name] = virtual
virtual.modify = Mock()
virtual.profiles_s.profiles.create = Mock()
virtual.policies_s.policies.create = Mock()
self.profiles = kwargs.get('profiles', [])
self.policies = kwargs.get('policies', [])
return virtual
def create_mock_pool_member(self, name, **kwargs):
"""Create a mock pool member object."""
member = Member(name, **kwargs)
self.members[name] = member
member.modify = Mock()
return member
def create_mock_healthcheck(self, name, **kwargs):
"""Create a mock healthcheck object."""
healthcheck = HealthCheck(name, **kwargs)
self.healthchecks[name] = healthcheck
healthcheck.modify = Mock()
return healthcheck
def mock_get_pool(self, partition, name):
"""Lookup a mock pool object by name."""
return self.pools.get(name, None)
def mock_get_virtual(self, partition, name):
"""Lookup a mock virtual server object by name."""
return self.virtuals.get(name, None)
def mock_get_virtual_address(self, partition, name):
"""Lookup a mock virtual Address object by name."""
return name
def mock_get_member(self, partition, pool, name):
"""Lookup a mock pool member object by name."""
return self.members.get(name, None)
def mock_get_healthcheck(self, partition, hc, hc_type):
"""Lookup a mock healthcheck object by name."""
return self.healthchecks.get(hc, None)
def mock_get_virtual_profiles(self, virtual):
"""Return a list of Virtual Server profiles."""
return self.profiles
def mock_get_virtual_policies(self, virtual):
"""Return a list of Virtual Server policies."""
return self.policies
def mock_virtual_create(self, name=None, partition=None, **kwargs):
"""Mock: Creates a mocked virtual server."""
self.test_virtual.append({'name': name, 'partition': partition})
def mock_pool_create(self, partition=None, name=None, **kwargs):
"""Mock: Create a mocked pool."""
self.test_pool.append({'name': name, 'partition': partition})
def mock_healthmonitor_create(self, partition=None, **kwargs):
"""Mock: Create a mocked tcp or http healthmonitor."""
self.test_monitor.append({'partition': partition,
'name': kwargs['name']})
def mock_virtual_load(self, name=None, partition=None):
"""Mock: Return a mocked virtual."""
v = Virtual(name, kwargs={'partition': partition})
self.test_virtual.append(v)
return v
def mock_healtcheck_load(self, name=None, partition=None):
"""Mock: Return a mocked healthcheck."""
hc = HealthCheck(name, kwargs={'partition': partition})
self.test_monitor.append(hc)
return hc
def mock_pools_get_collection(self):
"""Mock: Return a mocked collection of pools."""
p_collection = []
for key in self.bigip_data:
p = Pool(key)
p.partition = 'mesos'
p_collection.append(p)
self.test_pool = p_collection
return p_collection
def mock_pool_load(self, name=None, partition=None, cow=3):
"""Mock: Return a mocked pool."""
pool = Pool(name)
self.test_pool.append(pool)
return pool
def mock_get_pool_list(self, partition, all_pools=False):
"""Mock: Return previouly created pools."""
pool_list = []
if self.test_pool is not None:
for pool in self.test_pool:
if pool['partition'] == partition:
pool_list.append(pool['name'])
return pool_list
def mock_get_virtual_list(self, partition):
"""Mock: Return previously created virtuals."""
virtual_list = []
if self.test_virtual is not None:
for virtual in self.test_virtual:
if virtual['partition'] == partition:
virtual_list.append(virtual['name'])
return virtual_list
def mock_get_healthcheck_list(self, partition):
"""Mock: Return previously created healthchecks."""
monitor_list = {}
if self.test_monitor is not None:
for monitor in self.test_monitor:
if monitor['partition'] == partition:
monitor_list.update({monitor['name']: 'mocked'})
return monitor_list
def mock_virtual_delete(self, partition, virtual):
"""Mock: deletion of a virtual server."""
if self.test_virtual is not None:
for i in range(0, len(self.test_virtual)):
if (self.test_virtual[i]['name'] == virtual and
self.test_virtual[i]['partition'] == partition):
self.test_virtual.pop(i)
def read_test_vectors(self, cloud_state, bigip_state=None,
hm_state=None, network_state=None):
"""Read test vectors for the various states."""
# Read the Marathon state
if cloud_state:
with open(cloud_state) as json_data:
self.cloud_data = json.load(json_data)
# Read the BIG-IP state
if bigip_state:
with open(bigip_state) as json_data:
self.bigip_data = json.load(json_data)
self.bigip.get_pool_list = Mock(
return_value=self.bigip_data.keys())
self.bigip.get_virtual_list = Mock(
return_value=self.bigip_data.keys())
else:
self.bigip_data = {}
self.bigip.get_pool_list = Mock(
return_value=[])
self.bigip.get_virtual_list = Mock(
return_value=[])
if hm_state:
with open(hm_state) as json_data:
self.hm_data = json.load(json_data)
else:
self.hm_data = {}
if network_state:
with open(network_state) as json_data:
self.network_data = json.load(json_data)
def raiseTypeError(self, cfg):
"""Raise a TypeError exception."""
raise TypeError
def raiseSDKError(self, cfg):
"""Raise an F5SDKError exception."""
raise f5.sdk_exception.F5SDKError
def raiseConnectionError(self, cfg):
"""Raise a ConnectionError exception."""
raise requests.exceptions.ConnectionError
def raiseBigIPInvalidURL(self, cfg):
"""Raise a BigIPInvalidURL exception."""
raise icontrol.exceptions.BigIPInvalidURL
def raiseBigiControlUnexpectedHTTPError(self, cfg):
"""Raise an iControlUnexpectedHTTPError exception."""
raise icontrol.exceptions.iControlUnexpectedHTTPError
def setUp(self, partition, bigip): # pylint: disable=arguments-differ
"""Test suite set up."""
self.bigip = bigip
self.test_partition = partition
self.test_virtual = []
self.test_pool = []
self.test_monitor = []
self.test_iapp = []
self.bigip.sys = MockSys()
self.bigip.get_pool_member_list = \
Mock(side_effect=self.mock_get_pool_member_list)
self.bigip.ltm = MockLtm()
self.bigip.ltm.virtuals.virtual.create = \
Mock(side_effect=self.mock_virtual_create)
self.bigip.ltm.virtuals.virtual.load = \
Mock(side_effect=self.mock_virtual_load)
self.bigip.ltm.pools.pool.create = \
Mock(side_effect=self.mock_pool_create)
self.bigip.ltm.pools.get_collection = \
Mock(side_effect=self.mock_pools_get_collection)
self.bigip.ltm.monitor.https.get_collection = \
Mock(side_effect=self.mock_get_http_healthcheck_collection)
self.bigip.ltm.monitor.tcps.get_collection = \
Mock(side_effect=self.mock_get_tcp_healthcheck_collection)
self.bigip.ltm.monitor.https.http.create = \
Mock(side_effect=self.mock_healthmonitor_create)
self.bigip.ltm.monitor.tcps.tcp.create = \
Mock(side_effect=self.mock_healthmonitor_create)
self.bigip.ltm.monitor.https.http.load = \
Mock(side_effect=self.mock_healtcheck_load)
self.bigip.ltm.monitor.tcps.tcp.load = \
Mock(side_effect=self.mock_healtcheck_load)
# Save the original update functions (to be restored when needed)
self.bigip.pool_update_orig = self.bigip.pool_update
self.bigip.virtual_update_orig = self.bigip.virtual_update
self.bigip.member_update_orig = self.bigip.member_update
self.bigip.healthcheck_update_orig = self.bigip.healthcheck_update
self.bigip.healthcheck_exists_orig = self.bigip.healthcheck_exists
self.bigip.iapp_delete_orig = self.bigip.iapp_delete
self.bigip.iapp_create_orig = self.bigip.iapp_create
self.bigip.pool_delete_orig = self.bigip.pool_delete
self.bigip.iapp_update_orig = self.bigip.iapp_update
self.bigip.get_node = Mock()
self.bigip.pool_update = Mock()
self.bigip.healthcheck_update = Mock()
self.bigip.healthcheck_exists = Mock()
self.bigip.healthcheck_exists.return_value = {'http': True,
'tcp': True}
self.bigip.virtual_update = Mock()
self.bigip.virtual_address_create = Mock()
self.bigip.virtual_address_update = Mock()
self.bigip.member_create = Mock()
self.bigip.member_delete = Mock()
self.bigip.member_update = Mock()
self.bigip.iapp_create = Mock()
self.bigip.iapp_delete = Mock()
self.bigip.iapp_update = Mock()
self.bigip.node_delete = Mock()
self.bigip.sys.folders.get_collection = \
Mock(side_effect=self.mock_partition_folders_get_collection)
self.bigip.get_node_list = Mock(side_effect=self.mock_get_node_list)
def tearDown(self):
"""Test suite tear down."""
self.test_partition = None
self.test_iapp = None
self.test_iapp_list = None
self.test_virtual = None
self.test_pool = None
self.test_monitor = None
|
import json
import logging
from datetime import datetime
from typing import List, Dict, Any, Type
from pydantic import BaseModel
from csr.tabular_file_reader import TabularFileReader
logger = logging.getLogger(__name__)
class EntityReader:
"""Reader that reads entity data from tab delimited files.
"""
def __init__(self, input_dir: str):
self.input_dir = input_dir
@staticmethod
def get_date_fields(schema: Dict[str, Any]) -> List[str]:
date_fields: List[str] = []
for name, field in schema['properties'].items():
if field.get('format') == 'date':
date_fields.append(name)
return date_fields
@staticmethod
def get_array_fields(schema: Dict[str, Any]) -> List[str]:
return [name
for name, field in schema['properties'].items()
if field.get('type') == 'array']
def read_entities(self, file_path: str, entity_type: Type[BaseModel]) -> List[Any]:
try:
data = TabularFileReader(file_path).read_data()
except FileNotFoundError:
return []
date_fields = self.get_date_fields(entity_type.schema())
array_fields = self.get_array_fields(entity_type.schema())
for row in data:
for field, value in row.items():
if value == '' or value == 'NA':
row[field] = None
elif field in date_fields:
row[field] = datetime.strptime(value, '%Y-%m-%d')
elif field in array_fields:
row[field] = json.loads(value)
return [entity_type(**d) for d in list(data)]
|
# -*- coding: utf-8 -*-
"""
A telemetry recording pipeline.
"""
import zmq
from ..cyloop.loop import IOLoop
from ..utils.msg import internal_address
from .handlers import Telemetry, TelemetryWriter
__all__ = ['PipelineIOLoop', 'create_pipeline']
def create_pipeline(address, context=None, chunksize=1024, filename="telemetry.{0:d}.hdf5", kind=zmq.SUB):
"""Create a telemetry writing pipeline."""
return PipelineIOLoop(address, context, chunksize, filename, kind)
class PipelineIOLoop(IOLoop):
"""Expose pipeline parts at IOLoop top level."""
def __init__(self, address, context=None, chunksize=1024, filename="telemetry.{0:d}.hdf5", kind=zmq.SUB):
context = context or zmq.Context.instance()
super(PipelineIOLoop, self).__init__(context)
self.add_worker()
self.record = Telemetry.at_address(address, context, kind=kind, chunksize=chunksize, filename=filename)
self.attach(self.record, 0)
self.attach(self.record.writer, 1)
self.write = self.record.writer
|
from urlparse import urlparse
from threading import Thread
import httplib, sys, multiprocessing
from Queue import Queue
import simplejson
import time
#Run as "time python perfprocess.py > out 2>&1 &" then "tail -f out"
num_processes = int(sys.argv[1])
num_threads = int(sys.argv[2])
num_requests = int(sys.argv[3])
num_metrics_per_request = int(sys.argv[4])
print num_processes * num_threads * num_requests * num_metrics_per_request
headers = {"Content-type": "application/json", "X-Auth-Token": "2685f55a60324c2ca6b5b4407d52f39a"}
urls = [
'http://localhost:8080/v2.0/metrics',
]
def doWork(q):
url=q.get()
for x in xrange(num_requests):
status,response=getStatus(url)
doSomethingWithResult(status,response)
q.task_done()
def getStatus(ourl):
try:
url = urlparse(ourl)
conn = httplib.HTTPConnection(url.netloc)
body = []
for i in xrange(num_metrics_per_request):
epoch = (int)(time.time()) - 120
body.append({"name": "test-" + str(i), "dimensions": {"dim-1": "value-1"}, "timestamp": epoch, "value": i})
body = simplejson.dumps(body)
conn.request("POST", url.path, body, headers)
res = conn.getresponse()
if res.status != 204:
raise Exception(res.status)
return res.status, ourl
except Exception as ex:
print ex
return "error", ourl
def doSomethingWithResult(status, url):
pass
def doProcess():
q=Queue(num_threads)
for i in range(num_threads):
t=Thread(target=doWork, args=(q,))
t.daemon=True
t.start()
try:
for i in xrange(num_threads):
url = urls[i%len(urls)]
q.put(url.strip())
q.join()
except KeyboardInterrupt:
sys.exit(1)
if __name__ == '__main__':
jobs = []
for i in range(num_processes):
p = multiprocessing.Process(target=doProcess)
jobs.append(p)
p.start()
p.join()
|
#!/usr/bin/env python3
# Software Name: pyngsild
# SPDX-FileCopyrightText: Copyright (c) 2021 Orange
# SPDX-License-Identifier: Apache 2.0
#
# This software is distributed under the Apache 2.0;
# see the NOTICE file for more details.
#
# Author: Fabien BATTELLO <fabien.battello@orange.com> et al.
import threading
import logging
import anyio
from datetime import datetime
from paho.mqtt.client import MQTTMessage
from queue import SimpleQueue as Queue
from typing import Literal, Callable
from pyngsild.source import Row, ROW_NOT_SET as QUEUE_EOT, SourceSingle
from pyngsild.utils.mqttclient import MqttClient, MQTT_DEFAULT_PORT
from pyngsild.sink import *
from . import ManagedDaemon
logger = logging.getLogger(__name__)
class MqttAgent(ManagedDaemon):
"""A MqttAgent receives data from a MQTT broker on a given topic.
Each time a message is received on the subscribed topic(s), the Source emits a Row composed of the message payload.
The row provider is set to the topic.
"""
def __init__(
self,
sink: Sink = SinkStdout(),
process: Callable[[Row]] = lambda row: row.record,
host: str = "localhost",
port: int = MQTT_DEFAULT_PORT,
credentials: tuple[str, str] = (None, None),
topic: str | list[str] = "#", # all topics
qos: Literal[0, 1, 2] = 0, # no ack
):
"""Returns a MqttAgent instance.
Args:
host (str): Hostname or IP address of the remote broker. Defaults to "localhost".
port (int): Network port of the server host to connect to. Defaults to 1883.
credentials (str,str): Username and password used in broker authentication. Defaults to no auth.
topic (OneOrManyStrings): Topic (or list of topics) to subscribe to. Defaults to "#" (all topics).
qos (Literal[0, 1, 2]) : QoS : 0, 1 or 2 according to the MQTT protocol. Defaults to 0 (no ack).
"""
super().__init__(sink, process)
self.topic = topic
self._queue: Queue[Row] = Queue()
user, passwd = credentials
self._mcsub: MqttClient = MqttClient(
host, port, user, passwd, qos, callback=self._callback
)
async def _aloop(self):
while True:
row: Row = self._queue.get(True)
if row == QUEUE_EOT: # End Of Transmission
logger.info("Received EOT")
break
src = SourceSingle(row, provider="mqtt")
await self.trigger(src) # TODO => sync it or all async
logger.info("EOT received !")
self._mcsub.stop()
def _loop(self):
anyio.run(self._aloop)
def run(self):
super().run()
self._mcsub.subscribe(self.topic)
thread = threading.Thread(target=self._loop)
thread.start()
def _callback(self, msg: MQTTMessage):
logger.debug(f"Received MQTT message : {msg}")
self.status.lastcalltime = datetime.now()
self.status.calls += 1
payload = str(msg.payload.decode("utf-8"))
self._queue.put(Row(msg.topic, payload))
def close(self):
"""Properly disconnect from MQTT broker and free resources"""
self._queue.put(QUEUE_EOT)
self._mcsub.stop()
super().close()
|
# -*- coding: utf-8 -*-
from datetime import datetime
class LedgerEntry(object):
def __init__(self):
self.date = None
self.description = None
self.change = None
def create_entry(date, description, change):
entry = LedgerEntry()
entry.date = datetime.strptime(date, '%Y-%m-%d')
entry.description = description
entry.change = change
return entry
def format_entries(currency, locale, entries):
if locale == 'en_US':
# Generate Header Row
table = 'Date'
for _ in range(7):
table += ' '
table += '| Description'
for _ in range(15):
table += ' '
table += '| Change'
for _ in range(7):
table += ' '
while len(entries) > 0:
table += '\n'
# Find next entry in order
min_entry_index = -1
for i in range(len(entries)):
entry = entries[i]
if min_entry_index < 0:
min_entry_index = i
continue
min_entry = entries[min_entry_index]
if entry.date < min_entry.date:
min_entry_index = i
continue
if (
entry.date == min_entry.date and
entry.change < min_entry.change
):
min_entry_index = i
continue
if (
entry.date == min_entry.date and
entry.change == min_entry.change and
entry.description < min_entry.description
):
min_entry_index = i
continue
entry = entries[min_entry_index]
entries.pop(min_entry_index)
# Write entry date to table
month = entry.date.month
month = str(month)
if len(month) < 2:
month = '0' + month
date_str = month
date_str += '/'
day = entry.date.day
day = str(day)
if len(day) < 2:
day = '0' + day
date_str += day
date_str += '/'
year = entry.date.year
year = str(year)
while len(year) < 4:
year = '0' + year
date_str += year
table += date_str
table += ' | '
# Write entry description to table
# Truncate if necessary
if len(entry.description) > 25:
for i in range(22):
table += entry.description[i]
table += '...'
else:
for i in range(25):
if len(entry.description) > i:
table += entry.description[i]
else:
table += ' '
table += ' | '
# Write entry change to table
if currency == 'USD':
change_str = ''
if entry.change < 0:
change_str = '('
change_str += '$'
change_dollar = abs(int(entry.change / 100.0))
dollar_parts = []
while change_dollar > 0:
dollar_parts.insert(0, str(change_dollar % 1000))
change_dollar = change_dollar // 1000
if len(dollar_parts) == 0:
change_str += '0'
else:
while True:
change_str += dollar_parts[0]
dollar_parts.pop(0)
if len(dollar_parts) == 0:
break
change_str += ','
change_str += '.'
change_cents = abs(entry.change) % 100
change_cents = str(change_cents)
if len(change_cents) < 2:
change_cents = '0' + change_cents
change_str += change_cents
if entry.change < 0:
change_str += ')'
else:
change_str += ' '
while len(change_str) < 13:
change_str = ' ' + change_str
table += change_str
elif currency == 'EUR':
change_str = ''
if entry.change < 0:
change_str = '('
change_str += u'€'
change_euro = abs(int(entry.change / 100.0))
euro_parts = []
while change_euro > 0:
euro_parts.insert(0, str(change_euro % 1000))
change_euro = change_euro // 1000
if len(euro_parts) == 0:
change_str += '0'
else:
while True:
change_str += euro_parts[0]
euro_parts.pop(0)
if len(euro_parts) == 0:
break
change_str += ','
change_str += '.'
change_cents = abs(entry.change) % 100
change_cents = str(change_cents)
if len(change_cents) < 2:
change_cents = '0' + change_cents
change_str += change_cents
if entry.change < 0:
change_str += ')'
else:
change_str += ' '
while len(change_str) < 13:
change_str = ' ' + change_str
table += change_str
return table
elif locale == 'nl_NL':
# Generate Header Row
table = 'Datum'
for _ in range(6):
table += ' '
table += '| Omschrijving'
for _ in range(14):
table += ' '
table += '| Verandering'
for _ in range(2):
table += ' '
while len(entries) > 0:
table += '\n'
# Find next entry in order
min_entry_index = -1
for i in range(len(entries)):
entry = entries[i]
if min_entry_index < 0:
min_entry_index = i
continue
min_entry = entries[min_entry_index]
if entry.date < min_entry.date:
min_entry_index = i
continue
if (
entry.date == min_entry.date and
entry.change < min_entry.change
):
min_entry_index = i
continue
if (
entry.date == min_entry.date and
entry.change == min_entry.change and
entry.description < min_entry.description
):
min_entry_index = i
continue
entry = entries[min_entry_index]
entries.pop(min_entry_index)
# Write entry date to table
day = entry.date.day
day = str(day)
if len(day) < 2:
day = '0' + day
date_str = day
date_str += '-'
month = entry.date.month
month = str(month)
if len(month) < 2:
month = '0' + month
date_str += month
date_str += '-'
year = entry.date.year
year = str(year)
while len(year) < 4:
year = '0' + year
date_str += year
table += date_str
table += ' | '
# Write entry description to table
# Truncate if necessary
if len(entry.description) > 25:
for i in range(22):
table += entry.description[i]
table += '...'
else:
for i in range(25):
if len(entry.description) > i:
table += entry.description[i]
else:
table += ' '
table += ' | '
# Write entry change to table
if currency == 'USD':
change_str = '$ '
if entry.change < 0:
change_str += '-'
change_dollar = abs(int(entry.change / 100.0))
dollar_parts = []
while change_dollar > 0:
dollar_parts.insert(0, str(change_dollar % 1000))
change_dollar = change_dollar // 1000
if len(dollar_parts) == 0:
change_str += '0'
else:
while True:
change_str += dollar_parts[0]
dollar_parts.pop(0)
if len(dollar_parts) == 0:
break
change_str += '.'
change_str += ','
change_cents = abs(entry.change) % 100
change_cents = str(change_cents)
if len(change_cents) < 2:
change_cents = '0' + change_cents
change_str += change_cents
change_str += ' '
while len(change_str) < 13:
change_str = ' ' + change_str
table += change_str
elif currency == 'EUR':
change_str = u'€ '
if entry.change < 0:
change_str += '-'
change_euro = abs(int(entry.change / 100.0))
euro_parts = []
while change_euro > 0:
euro_parts.insert(0, str(change_euro % 1000))
change_euro = change_euro // 1000
if len(euro_parts) == 0:
change_str += '0'
else:
while True:
change_str += euro_parts[0]
euro_parts.pop(0)
if len(euro_parts) == 0:
break
change_str += '.'
change_str += ','
change_cents = abs(entry.change) % 100
change_cents = str(change_cents)
if len(change_cents) < 2:
change_cents = '0' + change_cents
change_str += change_cents
change_str += ' '
while len(change_str) < 13:
change_str = ' ' + change_str
table += change_str
return table
|
import unittest
from mock import patch, MagicMock
from add_user import AddUser
from user_repository import UserRepository
class AddUserTest(unittest.TestCase):
def setUp(self):
self.user_repo = UserRepository()
def test_add_user_should_add_user_to_repository(self):
self.user_repo.user_exists = MagicMock(return_value=False)
self.user_repo.add_user = MagicMock()
add_user = AddUser(self.user_repo)
response = add_user({'data': {'nickname': 'juancho'}})
self.assertEqual('success', response['status'])
def test_add_user_should_fail_if_user_already_exists(self):
self.user_repo.user_exists = MagicMock(return_value=True)
add_user = AddUser(self.user_repo)
response = add_user({'data': {'nickname': 'juancho'}})
self.assertEqual('fail', response['status'])
|
import numpy as np
from casim.utils import to_binary, to_decimal
def test_to_binary():
assert np.array_equal(to_binary(54), [0, 0, 1, 1, 0, 1, 1, 0])
def test_to_decimal():
assert to_decimal(np.array([0, 0, 1, 1, 0, 1, 1, 0]), 8) == 54
|
# Using Try, Except, Finally and Else clauses
# handle errors and exceptions gracefully
# comment out examples as required so they run
# Test 1 - Generating a single error, change number1 to zero
# Test 2 - Adding a second error type, change number1 to 'apple'
# Test 3 - Use an 'else' clause if no error is generated, change number 1 to 2
# Test 4 - Use a 'finally' clause to run if the code is sucessful or generates an error
number = 2
number1 = 2
# handling a single error
try:
print(f'The outcome of the sum was: {number / number1}')
except ZeroDivisionError as error:
print(f'You attempted to divide by zero this raised an {error=}')
# handling multple error types
try:
print(f'The outcome of the sum is: {number / number1}')
except ZeroDivisionError as error:
print(f'You attempted to divide by zero this raised an {error=}')
except TypeError as error:
print(
f'You attemed to divide an integer '
f'by a string which raised an {error=}')
# Using an else clause to run code if no exception is raised.
try:
print(f'The outcome of the sum is: {number / number1}')
except ZeroDivisionError as error:
print(f'You attempted to divide by zero, this raised an {error=}')
else:
print('Your sum was a sucess and did not raise an error, congratulations')
# Using 'finally' to run code irrespective of success or error being generated
try:
print(f'The outcome of the sum is: {number / number1}')
except ZeroDivisionError as error:
print(f'You attemed to divide by zero, this raised an {error=}')
finally:
print(
'This code will run irrespective'
' of success or an error being generated')
|
from ansiblelint import AnsibleLintRule
class PlaysContainLogicRule(AnsibleLintRule):
id = 'EXTRA0008'
shortdesc = 'plays should not contain logic'
description = 'plays should not contain tasks, handlers or vars'
tags = ['dry']
def matchplay(self, file, play):
results = []
for logic in ['tasks', 'pre_tasks', 'post_tasks', 'vars', 'handlers']:
if logic in play and play[logic]:
# we can only access line number of first thing in the section
# so we guess the section starts on the line above.
results.append(({file['type']: play},
"%s should not be required in a play" % logic))
return results
|
from Crypto.Random.random import randrange
import argparse
def sequence(max_num_incl=2048, sequence_len=24):
print(f"Generating sequence of {sequence_len} numbers 1-{max_num_incl}...")
for i in range(sequence_len):
print(randrange(1, max_num_incl + 1))
def run():
cli_name = "germ-otp"
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description="Generate a one-time pad.\n\n"
"examples:\n"
f"{cli_name}\n "
f"{cli_name} -m 2048 -l 24\n")
max_num_nm = 'maxnumincl'
default_max = 2048
max_help = f"Maximum generated number, corresponds to the modulo number (default: {default_max})"
parser.add_argument('-m', f'--{max_num_nm}', default=2048, help=max_help, required=False)
seq_len_nm = 'seqlen'
default_len = 24
len_help = f"Sequence length, how many numbers should be generated (default: {default_len})."
parser.add_argument('-l', f'--{seq_len_nm}', default=24, help=len_help, required=False)
config = vars(parser.parse_args())
sequence(int(config[max_num_nm]), int(config[seq_len_nm]))
|
#!/usr/local/bin/python
#
# t t y L i n u x . p y
#
# getLookAhead reads lookahead chars from the keyboard without
# echoing them. It still honors ^C etc
#
import termios, sys, time
if sys.version > "2.1" : TERMIOS = termios
else : import TERMIOS
def setSpecial () :
"set keyboard to read single chars lookahead only"
global oldSettings
fd = sys.stdin.fileno()
oldSettings = termios.tcgetattr(fd)
new = termios.tcgetattr(fd)
new[3] = new[3] & ~TERMIOS.ECHO # lflags
new[3] = new[3] & ~TERMIOS.ICANON # lflags
new[6][6] = '\000' # Set VMIN to zero for lookahead only
termios.tcsetattr(fd, TERMIOS.TCSADRAIN, new)
def setNormal () :
"restore previous keyboard settings"
global oldSettings
fd = sys.stdin.fileno()
termios.tcsetattr(fd, TERMIOS.TCSADRAIN, oldSettings)
def readLookAhead () :
"read max 3 chars (arrow escape seq) from look ahead"
return sys.stdin.read(3)
|
# Generated by Django 2.1.7 on 2019-02-28 18:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("catalogsources", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="fetchedgame",
name="platforms",
field=models.ManyToManyField(to="catalogsources.FetchedPlatform"),
),
]
|
import numpy as np
import scipy.signal as scisig
def FindPedestal(p, m):
noOutlier = RejectOutliers(p, m=m)
return np.mean(noOutlier)
def RejectOutliers(data, m=2.):
d = np.abs(data - np.median(data))
mdev = np.median(d)
s = d / mdev if mdev else 0.
return data[s < m]
def WaveformDiscriminator(p,
noiseSigma,
nNoiseSigmaThreshold=1,
sgFilter=True,
sgWindow=15,
sgPolyOrder=3):
[baselineVal, noiseInADC] = FindDigitizedPedestal(p=p, m=3, nBits=12, dynamicRange=1, noiseSigma=noiseSigma)
if sgFilter:
filter_p = scisig.savgol_filter(x=p, window_length=sgWindow, polyorder=sgPolyOrder)
hitLogic = np.array(
[(True if pi < baselineVal - nNoiseSigmaThreshold * noiseInADC else False) for pi in filter_p])
else:
hitLogic = np.array([(True if pi < baselineVal - nNoiseSigmaThreshold * noiseInADC else False) for pi in p])
return [hitLogic, baselineVal, noiseInADC]
def DiscriminatorConditioning(p,
noiseSigmaInVolt,
durationTheshold=5,
adjDurationThreshold=5,
nNoiseSigmaThreshold=1,
sgFilter=True,
sgWindow=15,
sgPolyOrder=3):
[baseline, noiseInADC] = [np.mean(p[:50]),np.std(p[:50])]
hitLogic = hitLogic = np.array(
[(True if pi < baseline - nNoiseSigmaThreshold * noiseSigmaInVolt else False) for pi in p])
for i in range(1, np.size(hitLogic)):
if ((not hitLogic[i - 1]) and hitLogic[i]) and hitLogic[i]:
countDuration = 0
for j in range(i, np.size(hitLogic) - 1):
if hitLogic[j]:
countDuration = countDuration + 1
if not hitLogic[j + 1]:
break
if countDuration < durationTheshold:
for j in range(i, i + countDuration):
hitLogic[j] = False
for i in range(1, np.size(hitLogic)):
if (hitLogic[i - 1] and (not hitLogic[i])) and (not hitLogic[i]):
countDuration = 0
for j in range(i, np.size(hitLogic) - 1):
if (not hitLogic[j]):
countDuration = countDuration + 1
if hitLogic[j + 1]:
break
if countDuration < adjDurationThreshold:
for j in range(i, i + countDuration):
hitLogic[j] = True
return [hitLogic, baseline, noiseSigmaInVolt]
def startTimeFinder(p,
noiseSigmaInVolt,
cfdThreshold=0.2,
durationTheshold=10,
adjDurationThreshold=5,
nNoiseSigmaThreshold=3,
sgFilter=True,
sgWindow=15,
sgPolyOrder=3):
[hitLogic, baseline, noiseSigma] = DiscriminatorConditioning(p=p,
noiseSigmaInVolt=noiseSigmaInVolt,
durationTheshold=durationTheshold,
adjDurationThreshold=adjDurationThreshold,
nNoiseSigmaThreshold=nNoiseSigmaThreshold,
sgFilter=sgFilter,
sgWindow=sgWindow,
sgPolyOrder=sgPolyOrder)
hitStartIndexList = []
hitPeakAmplitude = []
hitPeakIndexArray = []
hitStartIndex = 0
hitAmplitude=0
hitPeakIndex=0
for i in range(1, np.size(hitLogic)):
if ((not hitLogic[i - 1]) and hitLogic[i]) and hitLogic[i] and hitStartIndex == 0:
hitAmplitude = 1E100
hitPeakIndex = i
for j in range(i, np.size(hitLogic) - 1):
if p[j] < hitAmplitude:
hitAmplitude = p[j]
hitPeakIndex = j
if not hitLogic[j + 1]:
break
ThresholdADC = baseline + (cfdThreshold * (hitAmplitude-baseline ))
hitStartIndex = i
for j in range(hitPeakIndex, 0, -1):
if (p[j-1] <= ThresholdADC and p[j ] > ThresholdADC):
hitStartIndex = j-1
break
#hitStartIndexList = np.append(hitStartIndexList, hitStartIndex)
#hitPeakAmplitude = np.append(hitPeakAmplitude, hitAmplitude)
#hitPeakIndexArray = np.append(hitPeakIndexArray, hitPeakIndex)
return hitStartIndex
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from dataclasses import dataclass
from typing import Union
import torch
@dataclass
class DensePoseChartPredictorOutput:
"""
Predictor output that contains segmentation and inner coordinates predictions for predefined
body parts:
* coarse segmentation, a tensor of shape [N, K, Hout, Wout]
* fine segmentation, a tensor of shape [N, C, Hout, Wout]
* U coordinates, a tensor of shape [N, C, Hout, Wout]
* V coordinates, a tensor of shape [N, C, Hout, Wout]
where
- N is the number of instances
- K is the number of coarse segmentation channels (
2 = foreground / background,
15 = one of 14 body parts / background)
- C is the number of fine segmentation channels (
24 fine body parts / background)
- Hout and Wout are height and width of predictions
"""
coarse_segm: torch.Tensor
fine_segm: torch.Tensor
u: torch.Tensor
v: torch.Tensor
def __len__(self):
"""
Number of instances (N) in the output
"""
return self.coarse_segm.size(0)
def __getitem__(
self, item: Union[int, slice, torch.BoolTensor]
) -> "DensePoseChartPredictorOutput":
"""
Get outputs for the selected instance(s)
Args:
item (int or slice or tensor): selected items
"""
if isinstance(item, int):
return DensePoseChartPredictorOutput(
coarse_segm=self.coarse_segm[item].unsqueeze(0),
fine_segm=self.fine_segm[item].unsqueeze(0),
u=self.u[item].unsqueeze(0),
v=self.v[item].unsqueeze(0),
)
else:
return DensePoseChartPredictorOutput(
coarse_segm=self.coarse_segm[item],
fine_segm=self.fine_segm[item],
u=self.u[item],
v=self.v[item],
)
|
import logging
from flask import redirect, render_template, request, url_for
from structlog import wrap_logger
from frontstage import app
from frontstage.common.authorisation import jwt_authorization
from frontstage.controllers import collection_instrument_controller, party_controller, conversation_controller
from frontstage.exceptions.exceptions import CiUploadError
from frontstage.views.surveys import surveys_bp
logger = wrap_logger(logging.getLogger(__name__))
@surveys_bp.route('/upload-survey', methods=['POST'])
@jwt_authorization(request)
def upload_survey(session):
party_id = session.get_party_id()
case_id = request.args['case_id']
business_party_id = request.args['business_party_id']
survey_short_name = request.args['survey_short_name']
logger.info('Attempting to upload collection instrument', case_id=case_id, party_id=party_id)
if request.content_length > app.config['MAX_UPLOAD_LENGTH']:
return redirect(url_for('surveys_bp.upload_failed',
_external=True,
case_id=case_id,
business_party_id=business_party_id,
survey_short_name=survey_short_name,
error_info='size'))
# Check if respondent has permission to upload for this case
party_controller.is_respondent_enrolled(party_id, business_party_id, survey_short_name)
# Get the uploaded file
upload_file = request.files['file']
upload_filename = upload_file.filename
upload_file = {
'file': (upload_filename, upload_file.stream, upload_file.mimetype, {'Expires': 0})
}
try:
# Upload the file to the collection instrument service
collection_instrument_controller.upload_collection_instrument(upload_file, case_id, party_id)
except CiUploadError as ex:
if ".xlsx format" in ex.error_message:
error_info = "type"
elif "50 characters" in ex.error_message:
error_info = "charLimit"
elif "File too large" in ex.error_message:
error_info = 'size'
elif "File too small" in ex.error_message:
error_info = 'sizeSmall'
else:
logger.error('Unexpected error message returned from collection instrument service',
status=ex.status_code,
error_message=ex.error_message,
party_id=party_id,
case_id=case_id)
error_info = "unexpected"
return redirect(url_for('surveys_bp.upload_failed',
_external=True,
case_id=case_id,
business_party_id=business_party_id,
survey_short_name=survey_short_name,
error_info=error_info))
logger.info('Successfully uploaded collection instrument', party_id=party_id, case_id=case_id)
unread_message_count = { 'unread_message_count': conversation_controller.try_message_count_from_session(session) }
return render_template('surveys/surveys-upload-success.html', upload_filename=upload_filename,
unread_message_count=unread_message_count)
|
# Copyright (c) 2017-2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, AsyncIterable, Callable, Iterable, List, Tuple, TypeVar, Union, cast
from urllib.parse import urlparse
from grpc import (
AuthMetadataContext,
AuthMetadataPlugin,
AuthMetadataPluginCallback,
ChannelCredentials,
composite_channel_credentials,
metadata_call_credentials,
ssl_channel_credentials,
)
from grpc.aio import (
Channel,
ClientCallDetails,
StreamStreamCall,
StreamStreamClientInterceptor,
StreamUnaryCall,
StreamUnaryClientInterceptor,
UnaryStreamCall,
UnaryStreamClientInterceptor,
UnaryUnaryCall,
UnaryUnaryClientInterceptor,
insecure_channel,
secure_channel,
)
from ..config import Config
__all__ = ["create_channel"]
RequestType = TypeVar("RequestType")
RequestIterableType = Union[Iterable[Any], AsyncIterable[Any]]
ResponseIterableType = AsyncIterable[Any]
def create_channel(config: "Config") -> "Channel":
"""
Create a :class:`Channel` for the specified configuration.
"""
u = urlparse(config.url.url)
options = [
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
]
if not config.url.use_http_proxy:
options.append(("grpc.enable_http_proxy", 0))
if (u.scheme in ("https", "grpcs")) or config.ssl:
if config.ssl.ca is None and config.ssl.cert is not None:
credentials = ssl_channel_credentials(root_certificates=config.ssl.cert)
else:
credentials = ssl_channel_credentials(
root_certificates=config.ssl.ca,
private_key=config.ssl.cert_key,
certificate_chain=config.ssl.cert,
)
if config.access.token_version is not None:
# The grpc Credential objects do not actually define a formal interface, and are
# used interchangeably in the code.
#
# Additionally there are some incorrect rules in the grpc-stubs typing rules that force
# us to work around the type system.
credentials = cast(
ChannelCredentials,
composite_channel_credentials(
credentials, metadata_call_credentials(GrpcAuth(config), name="auth gateway")
),
)
return secure_channel(u.netloc, credentials, tuple(options))
elif config.access.token_version is not None:
# Python/C++ libraries refuse to allow "credentials" objects to be passed around on
# non-TLS channels, but they don't check interceptors; use an interceptor to inject
# an Authorization header instead
return insecure_channel(u.netloc, options, interceptors=[GrpcAuthInterceptor(config)])
else:
# no TLS, no tokens--simply create an insecure channel with no adornments
return insecure_channel(u.netloc, options)
class GrpcAuth(AuthMetadataPlugin):
def __init__(self, config: "Config"):
self._config = config
def __call__(self, context: "AuthMetadataContext", callback: "AuthMetadataPluginCallback"):
# This overly verbose type signature is here to satisfy mypy and grpc-stubs
options = [] # type: List[Tuple[str, Union[str, bytes]]]
# TODO: Add support here for refresh tokens
token = self._config.access.token
if token:
# note: gRPC headers MUST be lowercased
options.append(("authorization", "Bearer " + self._config.access.token))
callback(tuple(options), None)
class GrpcAuthInterceptor(
UnaryUnaryClientInterceptor,
UnaryStreamClientInterceptor,
StreamUnaryClientInterceptor,
StreamStreamClientInterceptor,
):
"""
An interceptor that injects "Authorization" metadata into a request.
This works around the fact that the C++ gRPC libraries (which Python is built on) highly
discourage sending authorization data over the wire unless the connection is protected with TLS.
"""
# NOTE: There are a number of typing errors in the grpc.aio classes, so we're ignoring a handful
# of lines until those problems are addressed.
def __init__(self, config: "Config"):
self._config = config
async def intercept_unary_unary(
self,
continuation: "Callable[[ClientCallDetails, RequestType], UnaryUnaryCall]",
client_call_details: ClientCallDetails,
request: RequestType,
) -> "Union[UnaryUnaryCall, RequestType]":
return await continuation(self._modify_client_call_details(client_call_details), request)
async def intercept_unary_stream(
self,
continuation: "Callable[[ClientCallDetails, RequestType], UnaryStreamCall]",
client_call_details: ClientCallDetails,
request: RequestType,
) -> "Union[ResponseIterableType, UnaryStreamCall]":
return await continuation(self._modify_client_call_details(client_call_details), request)
async def intercept_stream_unary(
self,
continuation: "Callable[[ClientCallDetails, RequestType], StreamUnaryCall]",
client_call_details: ClientCallDetails,
request_iterator: RequestIterableType,
) -> StreamUnaryCall:
return await continuation(
self._modify_client_call_details(client_call_details), request_iterator # type: ignore
)
async def intercept_stream_stream(
self,
continuation: Callable[[ClientCallDetails, RequestType], StreamStreamCall],
client_call_details: ClientCallDetails,
request_iterator: RequestIterableType,
) -> "Union[ResponseIterableType, StreamStreamCall]":
return await continuation(
self._modify_client_call_details(client_call_details), request_iterator # type: ignore
)
def _modify_client_call_details(self, client_call_details: ClientCallDetails):
if (
"authorization" not in client_call_details.metadata
and self._config.access.token_version is not None
):
client_call_details.metadata.add("authorization", f"Bearer {self._config.access.token}")
return client_call_details
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import super
from future import standard_library
standard_library.install_aliases()
from vcfx.field.nodes import Field
#######
# TODO(cassidy): Figure out what `CALADRURI` actually is and implement support
#######
class BusyTime(Field):
KEY = "FBURL"
def __init__(self, *a, **kw):
super(BusyTime, self).__init__(*a, **kw)
|
#! /usr/bin/env python
# -*- coding:UTF-8 -*-
# 一些比较好的代码片段
def pick_and_reorder_columns(listofRows, column_indexes):
return [ [row[ci] for ci in column_indexes ] for row in listofRows ]
def pairwise(iterable):
iternext = iter(iterable).next()
while True:
yield itnext(),itnext()
def dictFromSeq(seq):
return dict(pairwise(seq))
|
__author__ = 'konradjk'
import gzip
import argparse
def main(args):
f = gzip.open(args.vcf) if args.vcf.endswith('.gz') else open(args.vcf)
g = gzip.open(args.output, 'w') if args.output.endswith('.gz') else open(args.output, 'w')
for line in f:
if line.startswith('##'):
g.write(line)
else:
fields = line.strip().split('\t')[:9]
g.write('\t'.join(fields) + '\n')
f.close()
g.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--vcf', '--input', '-i', help='Input VCF file; may be gzipped', required=True)
parser.add_argument('--output', '-o', help='Output file, may be gzipped if ends in .gz', required=True)
args = parser.parse_args()
main(args)
|
"""
MicroPython Eduponics mini water level sensor - demo
https://github.com/STEMinds/micropython-eduponics
MIT License
Copyright (c) 2021 STEMinds
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import machine
# define water level sensor as INPUT on IO pin number 21
water_level = machine.Pin(21, machine.Pin.IN)
# this function will return 0 if container have no water and 1 if it has water
def is_empty():
return water_level.value()
# check if the water container is empty is not
if(is_empty()):
print("The water container is empty")
else:
print("The water container is full")
|
import unittest
from translate.util.baidu_sign import generate_token
class BaiduSignTest(unittest.TestCase):
def test_token(self):
gtk = '320305.131321201'
self.assertEqual(generate_token('hello', gtk), '54706.276099')
self.assertEqual(generate_token('hello world are you ok ? this is funny.', gtk), '865383.644950')
self.assertEqual(generate_token('你好', gtk), '232427.485594')
self.assertEqual(generate_token('抽刀断水水更流,举杯消愁愁更愁。', gtk), '641727.862606')
|
"""
A collection of functions that call Instagram Basic Display API endpoints.
Functions:
get_auth_url(app_id, redirect_url, state) -> string
get_short_token(app_id, app_secret, redirect_url, auth_code) -> string
get_long_token(app_id, app_secret, redirect_url, auth_code) -> string
refresh_long_token(token) -> string
get_media(token, media_id, fields) -> object
get_album_images(token, media_id, fields) -> object
get_user(token, api_version, fields) -> object
get_user_media(token, api_version, fields) -> object
"""
import requests
# Instagram
API_VERSION = "v12.0"
GRAPH_URL = "https://graph.instagram.com"
OAUTH_URL = "https://api.instagram.com/oauth"
MEDIA_FIELDS = (
"id,media_type,media_url,permalink,thumbnail_url,caption,timestamp,username"
)
ALBUM_IMAGE_FIELDS = "id,media_type,media_url,thumbnail_url"
USER_FIELDS = "id,username,account_type,media_count,media"
# --------------------
# Authorizationc
# --------------------
def get_auth_url(app_id, redirect_url, state=False):
"""Get an app's authorization url."""
state_url = ""
if state:
state_url = f"&state={state}"
return (
f"{OAUTH_URL}/authorize"
f"?client_id={app_id}"
f"&redirect_uri={redirect_url}"
"&response_type=code"
"&scope=user_profile,user_media"
f"{state_url}"
)
def get_short_token(app_id, app_secret, redirect_url, auth_code):
"""Get a short-lived access token."""
url = f"{OAUTH_URL}/access_token"
payload = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "authorization_code",
"redirect_uri": redirect_url,
"code": auth_code,
}
resp = requests.post(url, data=payload).json()
return resp["access_token"]
def get_long_token(app_id, app_secret, redirect_url, auth_code):
"""Get a long-lived access token."""
url = f"{GRAPH_URL}/access_token"
query = {
"client_secret": app_secret,
"grant_type": "ig_exchange_token",
"access_token": get_short_token(app_id, app_secret, redirect_url, auth_code),
}
resp = requests.get(url, params=query).json()
return resp["access_token"]
def refresh_long_token(long_token):
"""Refresh a long-lived access token."""
url = f"{GRAPH_URL}/refresh_access_token"
query = {"grant_type": "ig_refresh_token", "access_token": long_token}
resp = requests.get(url, params=query).json()
return resp["access_token"]
# --------------------
# Media
# --------------------
def get_media(token, media_id, fields=MEDIA_FIELDS):
"""Get media's attributes."""
url = f"{GRAPH_URL}/{media_id}"
query = {
"fields": "".join(fields),
"access_token": token,
}
req = requests.get(url, params=query).json()
if req["media_type"] == "CAROUSEL_ALBUM":
del req["media_url"]
album_images = get_album_images(token, req["id"])["data"]
req["album_images"] = album_images
return req
def get_album_images(token, media_id, fields=ALBUM_IMAGE_FIELDS):
"""Get all images for an album."""
url = f"{GRAPH_URL}/{media_id}/children"
query = {
"fields": "".join(fields),
"access_token": token,
}
req = requests.get(url, params=query).json()
return req
# --------------------
# User
# --------------------
def get_user(token, api_version=API_VERSION, fields=USER_FIELDS):
"""Get user's information"""
url = f"{GRAPH_URL}/{api_version}/me"
query = {
"fields": "".join(fields),
"access_token": token,
}
req = requests.get(url, params=query).json()
return req
def get_user_media(token, api_version=API_VERSION, fields=MEDIA_FIELDS):
"""Get user's media."""
url = f"{GRAPH_URL}/{api_version}/me/media"
query = {
"fields": "".join(fields),
"access_token": token,
}
req = requests.get(url, params=query).json()
for idx, media in enumerate(req["data"]):
if media["media_type"] == "CAROUSEL_ALBUM":
req["data"][idx] = get_media(token, media["id"])
return req
|
import math
import torch
import torch.nn as nn
import numpy as np
from outside_index import get_outside_index, OutsideIndexCheck
from inside_index import get_inside_index, InsideIndexCheck
from inside_index import get_inside_index_unique
from offset_cache import get_offset_cache
from inside_index import get_inside_components
from outside_index import get_outside_components
from base_model import *
TINY = 1e-8
# Composition Functions
class ComposeMLP(nn.Module):
def __init__(self, size, activation, n_layers=2, leaf=False, side_1_size=None, side_2_size=None):
super(ComposeMLP, self).__init__()
self.size = size
self.activation = activation
self.n_layers = n_layers
if leaf:
self.V = nn.Parameter(torch.FloatTensor(self.size, self.size))
self.W = nn.Parameter(torch.FloatTensor(2 * self.size, self.size))
self.B = nn.Parameter(torch.FloatTensor(self.size))
self.side_1_size = side_1_size
if side_1_size is not None:
self.W_side_1 = nn.Parameter(torch.FloatTensor(side_1_size, self.size))
self.side_2_size = side_2_size
if side_2_size is not None:
self.W_side_2 = nn.Parameter(torch.FloatTensor(side_2_size, self.size))
for i in range(1, n_layers):
setattr(self, 'W_{}'.format(i), nn.Parameter(torch.FloatTensor(self.size, self.size)))
setattr(self, 'B_{}'.format(i), nn.Parameter(torch.FloatTensor(self.size)))
self.reset_parameters()
@property
def device(self):
return next(self.parameters()).device
@property
def is_cuda(self):
device = self.device
return device.index is not None and device.index >= 0
def reset_parameters(self):
params = [p for p in self.parameters() if p.requires_grad]
for i, param in enumerate(params):
param.data.normal_()
def leaf_transform(self, x, side=None):
h = torch.tanh(torch.matmul(x, self.V) + self.B)
device = torch.cuda.current_device() if self.is_cuda else None
return h
def forward(self, hs, constant=1.0, side_1=None, side_2=None):
input_h = torch.cat(hs, 1)
h = torch.matmul(input_h, self.W)
if side_1 is not None:
h = h + torch.matmul(side_1, self.W_side_1)
if side_2 is not None:
h = h + torch.matmul(side_2, self.W_side_2)
h = self.activation(h + self.B)
for i in range(1, self.n_layers):
W = getattr(self, 'W_{}'.format(i))
B = getattr(self, 'B_{}'.format(i))
h = self.activation(torch.matmul(h, W) + B)
device = torch.cuda.current_device() if self.is_cuda else None
return h
# Score Functions
class Bilinear(nn.Module):
def __init__(self, size_1, size_2=None):
super(Bilinear, self).__init__()
self.size_1 = size_1
self.size_2 = size_2 or size_1
self.mat = nn.Parameter(torch.FloatTensor(self.size_1, self.size_2))
self.reset_parameters()
def reset_parameters(self):
params = [p for p in self.parameters() if p.requires_grad]
for i, param in enumerate(params):
param.data.normal_()
def forward(self, vector1, vector2):
# bilinear
# a = 1 (in a more general bilinear function, a is any positive integer)
# vector1.shape = (b, m)
# matrix.shape = (m, n)
# vector2.shape = (b, n)
bma = torch.matmul(vector1, self.mat).unsqueeze(1)
ba = torch.matmul(bma, vector2.unsqueeze(2)).view(-1, 1)
return ba
# Base
class DioraMLP(DioraBase):
K = 1
def __init__(self, *args, **kwargs):
self.n_layers = kwargs.get('n_layers', None)
super(DioraMLP, self).__init__(*args, **kwargs)
@classmethod
def from_kwargs_dict(cls, context, kwargs_dict):
return cls(**kwargs_dict)
def init_parameters(self):
# Model parameters for transformation required at both input and output
self.inside_score_func = Bilinear(self.size)
self.outside_score_func = Bilinear(self.size)
if self.compress:
self.root_mat_out = nn.Parameter(torch.FloatTensor(self.size, self.size))
else:
self.root_vector_out_h = nn.Parameter(torch.FloatTensor(self.size))
self.root_vector_out_c = None
self.inside_compose_func = ComposeMLP(self.size, self.activation, n_layers=self.n_layers, leaf=True)
self.outside_compose_func = ComposeMLP(self.size, self.activation, n_layers=self.n_layers)
|
from move_images.tasks import get_posts, update_post
posts = get_posts()
for p in posts:
update_post(p)
|
from dataclasses import dataclass
import numpy as np
import random
from typing import Dict, Mapping
from entity_gym.environment import (
CategoricalAction,
DenseCategoricalActionMask,
Environment,
CategoricalActionSpace,
ActionSpace,
EpisodeStats,
ObsSpace,
Observation,
Action,
)
from entity_gym.dataclass_utils import obs_space_from_dataclasses, extract_features
@dataclass
class Lava:
x: float
y: float
@dataclass
class HighGround:
x: float
y: float
@dataclass
class Player:
x: float
y: float
class FloorIsLava(Environment):
"""
The player is surrounded by 8 tiles, 7 of which are lava and 1 of which is high ground.
The player must move to one of the tiles.
The player receives a reward of 1 if they move to the high ground, and 0 otherwise.
"""
@classmethod
def obs_space(cls) -> ObsSpace:
return obs_space_from_dataclasses(Lava, HighGround, Player)
@classmethod
def action_space(cls) -> Dict[str, ActionSpace]:
return {
"move": CategoricalActionSpace(["n", "ne", "e", "se", "s", "sw", "w", "nw"])
}
def reset(self, obs_space: ObsSpace) -> Observation:
width = 1000
x = random.randint(-width, width)
y = random.randint(-width, width)
self.player = Player(x, y)
self.lava = random.sample(
[
Lava(x + i, y + j)
for i in range(-1, 2)
for j in range(-1, 2)
if not (i == 0 and j == 0)
],
random.randint(1, 8),
)
safe = random.randint(0, len(self.lava) - 1)
self.high_ground = HighGround(self.lava[safe].x, self.lava[safe].y)
self.lava.pop(safe)
obs = self.observe(obs_space)
return obs
def _reset(self) -> Observation:
return self.reset(FloorIsLava.obs_space())
def act(self, action: Mapping[str, Action], obs_filter: ObsSpace) -> Observation:
for action_name, a in action.items():
assert isinstance(a, CategoricalAction) and action_name == "move"
if a.actions[0][1] == 0:
self.player.y += 1
elif a.actions[0][1] == 1:
self.player.y += 1
self.player.x += 1
elif a.actions[0][1] == 2:
self.player.x += 1
elif a.actions[0][1] == 3:
self.player.y -= 1
self.player.x += 1
elif a.actions[0][1] == 4:
self.player.y -= 1
elif a.actions[0][1] == 5:
self.player.y -= 1
self.player.x -= 1
elif a.actions[0][1] == 6:
self.player.x -= 1
elif a.actions[0][1] == 7:
self.player.y += 1
self.player.x -= 1
obs = self.observe(obs_filter, done=True)
return obs
def _act(self, action: Mapping[str, Action]) -> Observation:
return self.act(
action,
FloorIsLava.obs_space(),
)
def observe(self, obs_filter: ObsSpace, done: bool = False) -> Observation:
if (
done
and self.player.x == self.high_ground.x
and self.player.y == self.high_ground.y
):
reward = 1.0
else:
reward = 0.0
return Observation(
entities=extract_features(
{
"Player": [self.player],
"Lava": self.lava,
"HighGround": [self.high_ground],
},
obs_filter,
),
action_masks={
"move": DenseCategoricalActionMask(actors=np.array([0]), mask=None),
},
ids=list(range(3)),
reward=reward,
done=done,
end_of_episode_info=EpisodeStats(1, reward) if done else None,
)
|
import pandas as pd
import matplotlib.pyplot as plt
import os
'''
© 2018 Aaron Penne
Taken from https://github.com/aaronpenne/data_visualization
Assumes the data is already normalized.
'''
def make_output_dir(output_name):
output_dir = os.path.realpath(output_name)
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
return output_dir
def dot_pair_plot(
df,
title,
first_label,
second_label,
colors=['#FC8D62', '#65C2A5', '#C947F5'],
line_color='gray'
):
fig, ax = plt.subplots(figsize=(8, 6), dpi=150)
for i in df.index:
x = [df.iloc[i, 0], df.iloc[i, 1]]
y = [i, i]
plt.plot(x, y,
color=line_color,
linestyle='-',
linewidth=1)
if (abs(x[0] - x[1]) < 1.0):
plt.text(x[0]+4, y[0], df.iloc[i, 2] + ' ({})'.format(df.iloc[i, 0]), horizontalalignment='left', verticalalignment='center', weight='bold')
plt.text(x[1]-4, y[1], df.iloc[i, 3] + ' ({})'.format(df.iloc[i, 1]), horizontalalignment='right', verticalalignment='center')
plot_point(plt, df.iloc[i, 0], i, colors[2])
elif x[0] > x[1]:
plt.text(x[0]+4, y[0], df.iloc[i, 2] + ' ({})'.format(df.iloc[i, 0]), horizontalalignment='left', verticalalignment='center', weight='bold')
plt.text(x[1]-4, y[1], df.iloc[i, 3] + ' ({})'.format(df.iloc[i, 1]), horizontalalignment='right', verticalalignment='center')
plot_point(plt, df.iloc[i, 0], i, colors[0])
plot_point(plt, df.iloc[i, 1], i, colors[1])
else:
plt.text(x[0]-4, y[0], df.iloc[i, 2] + ' ({})'.format(df.iloc[i, 0]), horizontalalignment='right', verticalalignment='center', weight='bold')
plt.text(x[1]+4, y[1], df.iloc[i, 3] + ' ({})'.format(df.iloc[i, 1]), horizontalalignment='left', verticalalignment='center')
plot_point(plt, df.iloc[i, 0], i, colors[0])
plot_point(plt, df.iloc[i, 1], i, colors[1])
for side in ['right', 'left', 'top', 'bottom']:
ax.spines[side].set_visible(False)
plt.ylim([-1, 13])
plt.xlim([-50, 150])
plt.xticks(range(0,101,10), color='gray')
plt.yticks([])
plt.text(-45, 12, title,
horizontalalignment='left',
size=16,
weight='bold')
plt.text(-45, 11, first_label,
horizontalalignment='left',
color=colors[1],
size=14)
plt.text(50, 11, second_label,
horizontalalignment='left',
color=colors[0],
size=14)
return fig
def plot_point(plt, x, index, color):
plt.plot(x, index,
color=color,
linestyle='None',
marker='o',
markersize=7,
fillstyle='full')
def save_figure(fig, output_dir, filename, pad_inches=0.3):
fig.savefig(os.path.join(output_dir, filename),
dpi=fig.dpi,
bbox_inches='tight',
pad_inches=pad_inches)
|
from twisted.web.server import Site, Request
from twisted.web.resource import Resource
from twisted.internet import reactor, endpoints
root = Resource()
class Foo(Resource):
def render(self, request: Request): # $ requestHandler
print(f"{request.content=}")
print(f"{request.cookies=}")
print(f"{request.received_cookies=}")
return b"I am Foo" # $ HttpResponse
root.putChild(b"foo", Foo())
class Child(Resource):
def __init__(self, name):
self.name = name.decode("utf-8")
def render_GET(self, request): # $ requestHandler
return f"Hi, I'm child '{self.name}'".encode("utf-8") # $ HttpResponse
class Parent(Resource):
def getChild(self, path, request): # $ requestHandler
print(path, type(path))
return Child(path)
def render_GET(self, request): # $ requestHandler
return b"Hi, I'm parent" # $ HttpResponse
root.putChild(b"parent", Parent())
if __name__ == "__main__":
factory = Site(root)
endpoint = endpoints.TCP4ServerEndpoint(reactor, 8880)
endpoint.listen(factory)
print("Will run on http://localhost:8880")
reactor.run()
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Population analyses based on cclib data."""
import logging
import numpy
from .calculationmethod import Method
class Population(Method):
"""An abstract base class for population-type methods."""
def __init__(self, data, progress=None, \
loglevel=logging.INFO, logname="Log"):
# Call the __init__ method of the superclass.
super(Population, self).__init__(data, progress, loglevel, logname)
self.fragresults = None
def __str__(self):
"""Return a string representation of the object."""
return "Population"
def __repr__(self):
"""Return a representation of the object."""
return "Population"
def partition(self, indices=None):
if not hasattr(self, "aoresults"):
self.calculate()
if not indices:
# Build list of groups of orbitals in each atom for atomresults.
if hasattr(self.data, "aonames"):
names = self.data.aonames
elif hasattr(self.data, "fonames"):
names = self.data.fonames
atoms = []
indices = []
name = names[0].split('_')[0]
atoms.append(name)
indices.append([0])
for i in range(1, len(names)):
name = names[i].split('_')[0]
try:
index = atoms.index(name)
except ValueError: #not found in atom list
atoms.append(name)
indices.append([i])
else:
indices[index].append(i)
natoms = len(indices)
nmocoeffs = len(self.aoresults[0])
# Build results numpy array[3].
alpha = len(self.aoresults[0])
results = []
results.append(numpy.zeros([alpha, natoms], "d"))
if len(self.aoresults) == 2:
beta = len(self.aoresults[1])
results.append(numpy.zeros([beta, natoms], "d"))
# For each spin, splice numpy array at ao index,
# and add to correct result row.
for spin in range(len(results)):
for i in range(natoms): # Number of groups.
for j in range(len(indices[i])): # For each group.
temp = self.aoresults[spin][:, indices[i][j]]
results[spin][:, i] = numpy.add(results[spin][:, i], temp)
self.logger.info("Saving partitioned results in fragresults: [array[2]]")
self.fragresults = results
return True
if __name__ == "__main__":
import doctest, population
doctest.testmod(population, verbose=False)
|
from django.test import TestCase
from authorization.views import LoginView, LoginRefreshView, RegisterView
class TestRegisterView(TestCase):
def setUp(self):
self.view = RegisterView()
def test_queryset_belongs_to_user_model(self):
self.assertEqual("User", self.view.queryset.model.__name__)
def test_serializer_class_is_register_serializer(self):
self.assertEqual(
"RegisterSerializer", self.view.serializer_class.__name__
)
class TestLoginView(TestCase):
def setUp(self):
self.view = LoginView()
def test_serializer_class_is_login_serializer(self):
self.assertEqual(
"LoginSerializer", self.view.serializer_class.__name__
)
def test_permissions_classes_only_include_allowany(self):
permissions_classes = self.view.permission_classes
self.assertEqual(1, len(permissions_classes))
self.assertEqual(
"AllowAny", self.view.permission_classes[0].__name__
)
def test_expected_allowed_methods(self):
self.assertEqual(["POST", "OPTIONS"], self.view.allowed_methods)
class TestLoginRefreshView(TestCase):
def setUp(self):
self.view = LoginRefreshView()
def test_serializer_class_is_login_refresh_serializer(self):
self.assertEqual(
"LoginRefreshSerializer", self.view.serializer_class.__name__
)
def test_permissions_classes_only_include_isauthenicated(self):
permissions_classes = self.view.permission_classes
self.assertEqual(1, len(permissions_classes))
self.assertEqual(
"AllowAny", self.view.permission_classes[0].__name__
)
def test_expected_allowed_methods(self):
self.assertEqual(["GET", "OPTIONS"], self.view.allowed_methods)
def test_authentication_classes_only_includes_jwtauthentication(self):
authentication_classes = self.view.authentication_classes
self.assertEqual(1, len(authentication_classes))
self.assertEqual(
"JWTAuthentication", authentication_classes[0].__name__
)
|
# https://github.com/rafa-acioly/animal_case
import re
def _unpack(data):
if isinstance(data, dict):
return data.items()
return data
def to_snake_case(value):
"""
Convert camel case string to snake case
:param value: string
:return: string
"""
first_underscore = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', value)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', first_underscore).lower()
def keys_to_snake_case(content):
"""
Convert all keys for given dict to snake case
:param content: dict
:return: dict
"""
return {to_snake_case(key): value for key, value in _unpack(content)}
def to_camel_case(value):
"""
Convert the given string to camel case
:param value: string
:return: string
"""
content = value.split('_')
return content[0] + ''.join(word.title() for word in content[1:] if not word.isspace())
def keys_to_camel_case(content):
"""
Convert all keys for given dict to camel case
:param content: dict
:return: dict
"""
return {
to_camel_case(key): value for key, value in _unpack(dict(content))
}
def animalify(*args, **kwargs):
"""
Convert all keys for given dict/list to snake case recursively
the main type are 'snake' and 'camel'
:return: dict/list
"""
types = 'camel'
if len(args) > 2:
raise ValueError("Invalid number of arguments")
if len(args) == 2:
types = args[1]
if kwargs.get('types'):
types = kwargs.get('types')
del kwargs['types']
if types not in ('snake', 'camel'):
raise ValueError("Invalid parse type, use snake or camel")
if args and kwargs:
raise TypeError('animalify() behavior undefined when passed both args and kwargs')
if args:
data = args[0]
else:
data = kwargs
if type(data) == list:
formatted = []
elif type(data) == dict:
formatted = {}
else:
return data
formatter = keys_to_snake_case if types == 'snake' else keys_to_camel_case
if type(data) == dict:
for key, value in _unpack(formatter(data)):
if isinstance(value, dict):
formatted[key] = animalify(value, types)
elif isinstance(value, list) and len(value) > 0:
formatted[key] = []
for _, val in enumerate(value):
formatted[key].append(animalify(val, types))
else:
formatted[key] = value
return formatted
else:
for i, each in enumerate(data):
if isinstance(each, dict):
formatted.append(animalify(each, types))
elif isinstance(each, list) and len(each) > 0:
formatted.append([])
for _, val in enumerate(each):
formatted[i].append(animalify(val, types))
else:
formatted.append(each)
return formatted
|
import _plotly_utils.basevalidators
class SlidersValidator(_plotly_utils.basevalidators.CompoundArrayValidator):
def __init__(self, plotly_name='sliders', parent_name='layout', **kwargs):
super(SlidersValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Slider',
data_docs="""
active
Determines which button (by index starting from
0) is considered active.
activebgcolor
Sets the background color of the slider grip
while dragging.
bgcolor
Sets the background color of the slider.
bordercolor
Sets the color of the border enclosing the
slider.
borderwidth
Sets the width (in px) of the border enclosing
the slider.
currentvalue
plotly.graph_objs.layout.slider.Currentvalue
instance or dict with compatible properties
font
Sets the font of the slider step labels.
len
Sets the length of the slider This measure
excludes the padding of both ends. That is, the
slider's length is this length minus the
padding on both ends.
lenmode
Determines whether this slider length is set in
units of plot *fraction* or in *pixels. Use
`len` to set the value.
minorticklen
Sets the length in pixels of minor step tick
marks
name
When used in a template, named items are
created in the output figure in addition to any
items the figure already has in this array. You
can modify these items in the output figure by
making your own item with `templateitemname`
matching this `name` alongside your
modifications (including `visible: false` or
`enabled: false` to hide it). Has no effect
outside of a template.
pad
Set the padding of the slider component along
each side.
steps
plotly.graph_objs.layout.slider.Step instance
or dict with compatible properties
templateitemname
Used to refer to a named item in this array in
the template. Named items from the template
will be created even without a matching item in
the input figure, but you can modify one by
making an item with `templateitemname` matching
its `name`, alongside your modifications
(including `visible: false` or `enabled: false`
to hide it). If there is no template or no
matching item, this item will be hidden unless
you explicitly show it with `visible: true`.
tickcolor
Sets the color of the border enclosing the
slider.
ticklen
Sets the length in pixels of step tick marks
tickwidth
Sets the tick width (in px).
transition
plotly.graph_objs.layout.slider.Transition
instance or dict with compatible properties
visible
Determines whether or not the slider is
visible.
x
Sets the x position (in normalized coordinates)
of the slider.
xanchor
Sets the slider's horizontal position anchor.
This anchor binds the `x` position to the
*left*, *center* or *right* of the range
selector.
y
Sets the y position (in normalized coordinates)
of the slider.
yanchor
Sets the slider's vertical position anchor This
anchor binds the `y` position to the *top*,
*middle* or *bottom* of the range selector.""",
**kwargs
)
|
#!/usr/bin/env python
# Class autogenerated from /home/sam/Downloads/aldebaran_sw/nao/naoqi-sdk-2.1.4.13-linux64/include/alproxies/alautonomouslifeproxy.h
# by Sammy Pfeiffer's <Sammy.Pfeiffer at student.uts.edu.au> generator
# You need an ALBroker running
from naoqi import ALProxy
class ALAutonomousLife(object):
def __init__(self, session):
self.session = session
self.proxy = None
def force_connect(self):
self.proxy = self.session.service("ALAutonomousLife")
def focusedActivity(self):
"""Returns the currently focused activity
:returns str: The name of the focused activity
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.focusedActivity()
def getActivityNature(self, activity_name):
"""Returns the nature of an activity
:param str activity_name: The package_name/activity_name to check
:returns str: Possible values are: solitary, interactive
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getActivityNature(activity_name)
def getActivityStatistics(self):
"""Get launch count, last completion time, etc for activities.
:returns std::map<std::string , std::map<std::string , int> >: A map of activity names, with a cooresponding map of "prevStartTime", "prevCompletionTime", "startCount", "totalDuration". Times are 0 for unlaunched Activities
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getActivityStatistics()
def getAutonomousActivityStatistics(self):
"""Get launch count, last completion time, etc for activities with autonomous launch trigger conditions.
:returns std::map<std::string , std::map<std::string , int> >: A map of activity names, with a cooresponding map of "prevStartTime", "prevCompletionTime", "startCount", "totalDuration". Times are 0 for unlaunched Activities
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getAutonomousActivityStatistics()
def getEnabledLaunchpadPlugins(self):
"""Get a list of enabled AutonomousLaunchpad Plugins. Enabled plugins will run when AutonomousLaunchpad is started
:returns std::vector<std::string>: A list of strings of enabled plugins.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getEnabledLaunchpadPlugins()
def getFocusHistory(self):
"""Get a list of the order that activities that have been focused, and their time focused.
:returns std::vector<std::pair<std::string , int> >: A list of pairs, each pair is ActivityName/PreviousFocusedTime
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getFocusHistory()
def getFocusHistory2(self, depth):
"""Get a list of the order that activities that have been focused, and their time focused.
:param int depth: How many items of history to report, starting from most recent.
:returns std::vector<std::pair<std::string , int> >: A list of pairs, each pair is ActivityName/PreviousFocusedTime
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getFocusHistory(depth)
def getLaunchpadPluginsForGroup(self, group):
"""Get a list of AutonomousLaunchpad Plugins that belong to specified group
:param str group: The group to search for the plugins
:returns std::vector<std::string>: A list of strings of the plugins belonging to the group.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getLaunchpadPluginsForGroup(group)
def getLifeTime(self):
"""Get the time in seconds as life sees it. Based on gettimeofday()
:returns int: The int time in seconds as Autonomous Life sees it
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getLifeTime()
def getRobotOffsetFromFloor(self):
"""Get the vertical offset (in meters) of the base of the robot with respect to the floor
:returns float: Current vertical offset (in meters)
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getRobotOffsetFromFloor()
def getState(self):
"""Returns the current state of AutonomousLife
:returns str: Can be: solitary, interactive, safeguard, disabled
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getState()
def getStateHistory(self):
"""Get a list of the order that states that have been entered, and their time entered.
:returns std::vector<std::pair<std::string , int> >: A list of pairs, each pair is StateName/PreviousEnteredTime
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getStateHistory()
def getStateHistory2(self, depth):
"""Get a list of the order that states that have been entered, and their time entered.
:param int depth: How many items of history to report, starting from most recent.
:returns std::vector<std::pair<std::string , int> >: A list of pairs, each pair is StateName/PreviousEnteredTime
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.getStateHistory(depth)
def isMonitoringLaunchpadConditions(self):
"""Gets running status of AutonomousLaunchpad
:returns bool: True if AutonomousLaunchpad is monitoring ALMemory and reporting conditional triggers.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.isMonitoringLaunchpadConditions()
def isSafeguardEnabled(self, name):
"""Get if a given safeguard will be handled by Autonomous Life or not.
:param str name: Name of the safeguard to consider: RobotPushed, RobotFell,CriticalDiagnosis, CriticalTemperature
:returns bool: True if life handles the safeguard.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.isSafeguardEnabled(name)
def ping(self):
"""Just a ping. Always returns true
:returns bool: returns true
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.ping()
def setLaunchpadPluginEnabled(self, plugin_name, enabled):
"""Temporarily enables/disables AutonomousLaunchpad Plugins
:param str plugin_name: The name of the plugin to enable/disable
:param bool enabled: Whether or not to enable this plugin
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.setLaunchpadPluginEnabled(plugin_name, enabled)
def setRobotOffsetFromFloor(self, offset):
"""Set the vertical offset (in meters) of the base of the robot with respect to the floor
:param float offset: The new vertical offset (in meters)
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.setRobotOffsetFromFloor(offset)
def setSafeguardEnabled(self, name, enabled):
"""Set if a given safeguard will be handled by Autonomous Life or not.
:param str name: Name of the safeguard to consider: RobotPushed, RobotFell,CriticalDiagnosis, CriticalTemperature
:param bool enabled: True if life handles the safeguard.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.setSafeguardEnabled(name, enabled)
def setState(self, state):
"""Programatically control the state of Autonomous Life
:param str state: The possible states of AutonomousLife are: interactive, solitary, safeguard, disabled
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.setState(state)
def startMonitoringLaunchpadConditions(self):
"""Start monitoring ALMemory and reporting conditional triggers with AutonomousLaunchpad.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.startMonitoringLaunchpadConditions()
def stopAll(self):
"""Stops the focused activity and clears stack of activities
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.stopAll()
def stopFocus(self):
"""Stops the focused activity. If another activity is stacked it will be started.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.stopFocus()
def stopMonitoringLaunchpadConditions(self):
"""Stop monitoring ALMemory and reporting conditional triggers with AutonomousLaunchpad.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.stopMonitoringLaunchpadConditions()
def switchFocus(self, activity_name, flags):
"""Set an activity as running with user focus
:param str activity_name: The package_name/activity_name to run
:param int flags: Flags for focus changing. STOP_CURRENT or STOP_AND_STACK_CURRENT
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.switchFocus(activity_name, flags)
def switchFocus2(self, activity_name):
"""Set an activity as running with user focus
:param str activity_name: The package_name/activity_name to run
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.switchFocus(activity_name)
def version(self):
"""Returns the version of the module.
:returns str: A string containing the version of the module.
"""
if not self.proxy:
self.proxy = self.session.service("ALAutonomousLife")
return self.proxy.version()
|
import numpy as np
import torch
import matplotlib.pyplot as plt
import numba as nb
def parity_mapping(state, parity):
'''
Function that does the following parity mapping:
Args:
state: a numpy array representing the number of photons in each mode.
parity: type of parity mapping done (0 or 1)
Returns:
a bit string
'''
bit_string = [(ni+parity) % 2 for ni in state]
return bit_string
|
import transmitm
import pytest
def test_api():
"""Test imported names"""
assert all([
hasattr(transmitm, '__version__'),
hasattr(transmitm, 'Dispatcher'),
hasattr(transmitm, 'Tap'),
hasattr(transmitm, 'TCPProxy'),
hasattr(transmitm, 'UDPProxy'),
hasattr(transmitm, 'Proxy')
]) is True
def test_tap_bad():
"""Tap subclasses should define a handle method"""
class BadTap(transmitm.Tap):
pass
with pytest.raises(TypeError, match="Can't instantiate abstract class*"):
BadTap()
def test_tap_api():
assert hasattr(transmitm.Tap, 'handle') is True
def test_proxy_bad():
"""Proxy subclasses should define a spawn method"""
class BadProxy(transmitm.Proxy):
pass
with pytest.raises(TypeError, match="Can't instantiate abstract class*"):
BadProxy()
def test_proxy_api():
assert hasattr(transmitm.Proxy, 'spawn') is True
def test_dispatcher_instance():
"""Dispatcher should not be instantiable"""
with pytest.raises(TypeError,
match="Dispatcher class cannot be instantiated"):
transmitm.Dispatcher()
|
import pytest
PREDICTION_TEXT = "This is a test prediction. If a finetune model predicts on this we may get an empty list as result."
@pytest.fixture(scope="module")
def model_group(indico):
results = indico.model_groups()
try:
return next(
result
for result in results
if not result["retrainRequired"]
and result["status"] == "COMPLETE"
and result.get_selected_model().get("id")
)
except StopIteration:
raise AssertionError(
"The authenticated user does not have a successfully trained model"
)
def test_model_group_predict(model_group):
result = model_group.predict([PREDICTION_TEXT])
# TODO: Break this test by task_type and have saved model groups for these tests. this will require a test user api token.
assert isinstance(result, list)
assert len(result) == 1
def test_model_group_info(model_group):
result = model_group.info()
assert isinstance(result, dict)
assert "class_counts" in result
assert "class_names" in result
assert "metrics" in result
def test_model_group_load(model_group):
"""
TODO: Ensure this test passes with Finetune model
"""
result = model_group.load()
assert result == "ready"
def test_model_group_predict_with_model_id(model_group):
model_id = model_group.get_selected_model().get("id")
result = model_group.predict([PREDICTION_TEXT], model_id=model_id)
assert isinstance(result, list)
assert len(result) == 1
|
from math import tan, pi
def polysum(n: int, s: int) -> float:
"""
n: number of sides,
s: length of sides
Return the sum of the area and square of the perimeter
of the regular polygon, rounded to 4 decimal places.
"""
area = (0.25 * n * s**2)/(tan(pi/n))
perimeter = n * s
return round(area + perimeter**2, 4)
|
"""
The :mod:`fatf.utils.data.segmentation` module implements image segmenters.
.. versionadded:: 0.1.1
"""
# Author: Kacper Sokol <k.sokol@bristol.ac.uk>
# License: new BSD
# pylint: disable=too-many-lines
from numbers import Number
from typing import List, Optional, Tuple, Union
import abc
import logging
import warnings
import numpy as np
from fatf.exceptions import IncorrectShapeError
import fatf.utils.array.validation as fuav
try:
import skimage.color as ski_colour
import skimage.segmentation as ski_segmentation
except ImportError:
raise ImportError(
'scikit-image Python package is not installed on your system. '
'You must install it in order to use the fatf.utils.data.segmentation '
'functionality. '
'One possibility is to install scikit-image alongside this package '
'via auxiliary dependencies with: pip install fat-forensics[all].')
try:
from PIL import Image, ImageFont, ImageDraw
except ImportError:
raise ImportError(
'PIL Python package is not installed on your system. '
'You must install it in order to use the fatf.utils.data.segmentation '
'functionality. '
'One possibility is to install PIL alongside this package via '
'auxiliary dependencies with: pip install fat-forensics[all].')
__all__ = ['get_segment_mask',
'Segmentation',
'Slic',
'QuickShift'] # yapf: disable
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
RGBcolour = Tuple[int, int, int]
def _validate_image_array(image: np.ndarray, image_name: str) -> bool:
"""
Checks whether a numpy array has properties expected of images.
A numpy representation of an image should be a non-structured, 2- or
3-dimensional, integer-valued array with elements in the 0--255 range.
Parameters
----------
image : numpy.ndarray
A 2- or 3-dimensional numpy array representing an image.
image_name : string
A name of the validated array to be used in error messages.
Raises
------
IncorrectShapeError
The input ``image`` is neither a 2- nor 3-dimensional numpy array.
TypeError
The input ``image`` is either a structured numpy array or it is not a
numerical array.
ValueError
The elements of the input ``image`` are not integers in
the 0--255 range.
Returns
-------
is_valid : boolean
Indicates whether the input ``image`` is a valid numpy array.
"""
is_valid = False
assert (isinstance(image_name, str)
and image_name), 'image_name must be a non-empty string.'
# Validate the image
if fuav.is_structured_array(image):
raise TypeError(('The input {} must not be a structured '
'numpy array.').format(image_name))
if not fuav.is_numerical_array(image):
raise TypeError(
'The input {} must be of a numerical type.'.format(image_name))
# Ensure that we are dealing with integers within the 0--255 range
_image_is_int = image.dtype.kind in 'iu'
_image_min, _image_max = image.min(), image.max()
if _image_min < 0 or _image_max > 255 or not _image_is_int:
raise ValueError(('The numpy representation of the input {} '
'should have its values (integers) between the '
'0--255 range.').format(image_name))
# Ensure 2- or 3-dimensional
_image_in_shape = len(image.shape) in (2, 3)
if not _image_in_shape:
raise IncorrectShapeError(
'The input {} must be a 2- or 3-dimensional numpy array.'.format(
image_name))
is_valid = True
return is_valid
def _validate_input(image: np.ndarray,
segmentation_mask: Union[None, np.ndarray]) -> bool:
"""
Validates the input parameters of a segmentation class.
For the description of the input parameters and exceptions raised by this
function, please see the documentation of the
:func:`~fatf.utils.data.segmentation._validate_image_array` function.
Raises
------
IncorrectShapeError
The width and height of ``image`` and ``segmentation_mask`` do not
agree.
Returns
-------
is_valid : boolean
``True`` if input is valid, ``False`` otherwise.
"""
is_valid = False
assert _validate_image_array(image, 'image'), 'image is invalid.'
if segmentation_mask is not None:
assert _validate_image_array(
segmentation_mask,
'image segmentation mask'), 'image segmentation mask is invalid.'
# Check shape
if image.shape[:2] != segmentation_mask.shape[:2]:
raise IncorrectShapeError(
'The width and height of the input image and the segmentation '
'mask do not agree.')
is_valid = True
return is_valid
def _validate_segmentation(segments: np.ndarray, image: np.ndarray) -> bool:
"""
Checks whether a segmentation array is valid.
A numpy representation of a segmentation of an image should be
a non-structured, 2-dimensional, integer-valued array with a continuous
sequence of unique elements starting at 1.
Parameters
----------
segments : numpy.ndarray
A 2-dimensional numpy array representing a segmentation.
image : numpy.ndarray
A 2- or 3-dimensional numpy array representing an image.
Raises
------
IncorrectShapeError
The ``segments`` array is not 2-dimensional.
The the height or width the ``segments`` array does not agree with
the dimensions of the segmented image.
TypeError
The ``segments`` array is either a structured numpy array or
it is not an integer-valued array.
ValueError
The unique elements of the ``segments`` array do not form a continuous
sequence starting at 1.
Returns
-------
is_valid : boolean
Indicates whether the ``segments`` array is valid.
"""
is_valid = False
# Validate image
assert _validate_image_array(image, 'image'), 'image is invalid.'
# Validate segments
if fuav.is_structured_array(segments):
raise TypeError('The segmentation array must not be a structured '
'numpy array.')
if not fuav.is_2d_array(segments):
raise IncorrectShapeError('The segmentation array must be a 2-'
'dimensional numpy array.')
_segments_is_int = segments.dtype.kind in 'iu'
if not fuav.is_numerical_array(segments) or not _segments_is_int:
raise TypeError('The segmentation array must be of integer type.')
_segments_min, _segments_max = segments.min(), segments.max()
_segments_unique = np.unique(segments)
_segments_is_continuous = True
for i in range(_segments_min, _segments_max + 1):
if i not in _segments_unique:
_segments_is_continuous = False
break
if _segments_min != 1 or not _segments_is_continuous:
raise ValueError('The segmentation array should encode unique '
'segments with a continuous sequence of integers '
'starting at 1.')
# Check shape
if segments.shape[:2] != image.shape[:2]:
raise IncorrectShapeError(
'The width and height of the segmentation array and '
'the input image do not agree.')
is_valid = True
return is_valid
def _validate_colour(colour: Union[None, RGBcolour]) -> bool:
"""
Validates RGB colour triplet.
``colour`` must either be ``None`` or a 3-tuple of integers within the
0--255 range.
Parameters
----------
colour : tuple(integer, integer, integer) or None
RGB colour triplet.
Raises
------
TypeError
The ``colour`` parameter is neither a tuple nor a ``None``;
or one of its elements is not an integer.
ValueError
The ``colour`` parameter is not a 3-tuple, one of its elements
is outside of the 0--255 range.
Returns
-------
is_valid : boolean
Indicates whether the ``colour`` RGB triplet is valid.
"""
is_valid = False
if colour is not None:
if not isinstance(colour, tuple):
raise TypeError('The colour must either be None or a thriplet '
'representing an RGB colour.')
if len(colour) != 3:
raise ValueError('The colour tuple must be a triplet.')
for i in colour:
if not isinstance(i, int):
raise TypeError(
'Each element of the colour tuple must be an integer.')
if i < 0 or i > 255:
raise ValueError('Each RGB value must be between 0 and 255.')
is_valid = True
return is_valid
class Segmentation(abc.ABC):
"""
An abstract class implementing image segmentation functionality.
.. versionadded:: 0.1.1
An abstract class that all segmentation classes should inherit from.
It contains an abstract ``_segment`` method to be implemented by
individual segmenters.
This methods should return a 2-dimensional numpy array assigning each
pixel of an image to a segment by using unique integers from a sequence
starting at 1.
The ``kwargs`` attribute can be used to collect optional parameters
upon initialising this class that can be used within the ``_segment``
method.
This class is designed for images represented as numpy arrays with their
values in the 0--255 range:
- 2-dimensional arrays for grayscale (0--255 range) and
black-and-white (0 and 255 valued) images; and
- 3-dimensional arrays for colour images.
The segmentation stored by this class can be overwritten either with the
``set_segments`` method or by directly setting the ``segments`` attribute,
both of which will perform the necessary validation.
Parameters
----------
image : numpy.ndarray
A numpy array representing an image to be segmented.
segmentation_mask : numpy.ndarray, optional (default=None)
A numpy array representing an image to be used for generating the
segmentation. If this parameter is not provided, the ``image`` will
be used to generate the segmentation.
**kwargs : dictionary
A list of named parameters saved to the ``kwargs`` attribute,
which can be used to pass configuration options to the ``_segment``
method.
Warns
-----
UserWarning
Inform the user that only a single segment was found.
Raises
------
IncorrectShapeError
The input ``image`` is neither a 2- nor 3-dimensional numpy array.
The width and height of ``image`` and ``segmentation_mask`` do not
agree.
The segmentation array is not 2-dimensional.
The the height or width the segmentation array does not agree with
the dimensions of the segmented image.
RuntimeError
A black-and-white image does not use 0 as black and 1 or 255 as white.
TypeError
The input ``image`` is either a structured numpy array or it is not a
numerical array.
The segmentation array is either a structured numpy array or
it is not an integer-valued array.
ValueError
The elements of the input ``image`` are not integers in
the 0--255 range.
The unique elements of the segmentation array do not form a continuous
sequence starting at 1.
Attributes
----------
image : numpy.ndarray
A numpy array representing an image to be segmented.
segmentation_mask : numpy.ndarray
A numpy array representing an image used to perform segmentation.
is_rgb : boolean
Indicates whether the ``image`` is RGB or black-and-white.
kwargs : dictionary
A list of named parameters stored as a dictionary;
it is used to pass configuration options to the ``_segment`` method.
segments : numpy.ndarray
A 2-dimensional numpy array representing segmentation of the ``image``.
segments_number : integer
The number of segments.
"""
GRAYSCALE_TRANSFORMATION = np.asarray([0.2989, 0.5870, 0.1140])
def __init__(self,
image: np.ndarray,
segmentation_mask: Optional[np.ndarray] = None,
**kwargs):
"""Constructs a ``Segmentation`` abstract class."""
assert _validate_input(image, segmentation_mask), 'Invalid input.'
# The image and the segmentation mask in numpy representation
self.image = image.copy() # (np.array(image) * 255).astype(np.uint8)
if segmentation_mask is None:
self.segmentation_mask = self.image.copy()
else:
self.segmentation_mask = segmentation_mask.copy()
# Check whether the image is RGB, greyscale or black-and-white
self.is_rgb = len(self.image.shape) == 3
# If {0, 1} black-and-white, scale to {0, 255}
if not self.is_rgb:
# For the image
_unique_intensities = set(np.unique(self.image))
_unique_intensities_n = len(_unique_intensities)
if _unique_intensities_n in (1, 2):
logger.info('Assuming a black-and-white image.')
if 1 in _unique_intensities:
logger.info('Rescale 0/1 black-and-white image to 0/255.')
_bnw_mask = (self.image == 1)
self.image[_bnw_mask] = 255
if _unique_intensities.difference((0, 1, 255)):
raise RuntimeError('Black-and-white images must use 0 as '
'black and 1 or 255 as white.')
# Repeat the same for the mask
if len(self.segmentation_mask.shape) != 3:
_unique_intensities = set(np.unique(self.segmentation_mask))
_unique_intensities_n = len(_unique_intensities)
if _unique_intensities_n in (1, 2):
logger.info('Assuming a black-and-white segmentation mask.')
print(_unique_intensities, _unique_intensities_n)
if 1 in _unique_intensities:
logger.info('Rescale 0/1 black-and-white segmentation '
'mask to 0/255.')
_bnw_mask = (self.segmentation_mask == 1)
self.segmentation_mask[_bnw_mask] = 255
if _unique_intensities.difference((0, 1, 255)):
raise RuntimeError(
'Black-and-white segmentation masks must use 0 as '
'black and 1 or 255 as white.')
# Memorise optional arguments used for the _segment method
self.kwargs = kwargs
# Segments map
self._segments = self._segment()
assert _validate_segmentation(self._segments,
self.image), 'Invalid segments.'
# Number of segments
self.segments_number = np.unique(self._segments).shape[0]
if self.segments_number == 1:
warnings.warn(
'The segmentation returned only **one** segment. '
'Consider tweaking the parameters to generate a reasonable '
'segmentation.', UserWarning)
@abc.abstractmethod
def _segment(self) -> np.ndarray:
"""
Segments ``self.image``.
This methods must be implemented with the desired segmentation
algorithm.
It should return a two-dimensional numpy array whose shape corresponds
to the width and height of ``self.image`` assigning a segment id to
each of its pixels.
The segment ids should start at 1 and be a continuous series of
integers.
Use the ``self.kwargs`` dictionary to pass (optional) configuration
parameters to the segmentation function.
Raises
------
NotImplementedError
Raised when the ``_segment`` method is not overwritten by the child
class.
Returns
-------
segmentation : numpy.ndarray
A two-dimensional numpy array encoding segment id for each pixel
of the segmented image.
"""
raise NotImplementedError( # pragma: nocover
'Overwrite this method with your implementation of a bespoke '
'segmentation algorithm.')
# pylint: disable=unreachable
segmentation = None # Use self.kwargs # pragma: nocover
return segmentation # pragma: nocover
@property
def segments(self) -> np.ndarray:
"""Retrieves the segments."""
return self._segments
@segments.setter
def segments(self, segments: np.ndarray):
"""Setups the segments manually."""
assert _validate_segmentation(segments, self.image), 'Bad segments.'
if np.unique(segments).shape[0] == 1:
warnings.warn('The segmentation has only **one** segment.',
UserWarning)
self._segments = segments
def set_segments(self, segments: np.ndarray):
"""
Manually overwrites the segmentation with custom ``segments``.
``segments`` must be a non-structured, 2-dimensional, integer-valued
numpy array with a continuous sequence of unique elements starting
at 1, which indicate the segment assignment of each pixel.
The dimension of ``segments`` must agree with the width and height of
the segmented image.
.. note::
The same can be achieved by directly setting the ``self.segments``
with ``my_segmenter.segments = segments``.
(A dedicated *setter* method takes care of validating the
correctness of ``segments``.)
Parameters
----------
segments : numpy.ndarray
A 2-dimensional numpy array representing a segmentation.
Raises
------
IncorrectShapeError
The ``segments`` array is not 2-dimensional.
The the height or width the ``segments`` array does not agree with
the dimensions of the segmented image.
TypeError
The ``segments`` array is either a structured numpy array or
it is not an integer-valued array.
ValueError
The unique elements of the ``segments`` array do not form a
continuous sequence starting at 1.
"""
assert _validate_segmentation(segments, self.image), 'Bad segments.'
if np.unique(segments).shape[0] == 1:
warnings.warn('The segmentation has only **one** segment.',
UserWarning)
self._segments = segments
def mark_boundaries(self,
mask: bool = False,
image: Optional[np.ndarray] = None,
colour: Optional[RGBcolour] = None) -> np.ndarray:
"""
Marks segment boundaries atop the image used to initialise this class.
The boundaries can either be overlaid on top of the image or
segmentation mask (``mask=True``) used to initialise this class.
Alternatively, an external ``image`` of the same dimensions can be
supplied.
.. note::
If the image is grayscale, it will be converted to RGB to display
the segment boundaries.
Parameters
----------
mask : boolean, optional (default=False)
If ``True``, plot the segment boundaries on top of
the segmentation mask;
if ``False``, plot the segment boundaries atop the image.
image : numpy.ndarray, optional (default=None)
If provided, the segment boundaries will be overlaid atop this
``image`` instead of the one used to initialise this segmenter.
colour : tuple(integer, integer, integer), optional (default=None)
If provided, the segment boundaries will be plotted with this
RGB colour.
Raises
------
IncorrectShapeError
The the height or width the ``image`` array does not agree with
the dimensions of the class image.
TypeError
The ``mask`` parameter is not a boolean.
The ``colour`` parameter is neither a tuple nor a ``None``;
or one of its elements is not an integer.
ValueError
The ``colour`` parameter is not a 3-tuple, one of its elements
is outside of the 0--255 range.
Returns
-------
marked_image : numpy.ndarray
A numpy array holding the image with overlaid segment boundaries.
"""
assert self._segments is not None, 'The segmenter was not initialised.'
if not isinstance(mask, bool):
raise TypeError('The mask parameter must be a boolean.')
assert _validate_colour(colour), 'Invalid colour.'
if colour is None:
_colour = colour
else:
_colour = tuple([i / 255 for i in colour]) # Avoids a UserWarning
if image is None:
if mask:
canvas = self.segmentation_mask
else:
canvas = self.image
else:
assert _validate_image_array(image, 'image'), 'Invalid image.'
if image.shape[:2] != self.image.shape[:2]:
raise IncorrectShapeError(
'The width and height of the input image do not agree '
'with the dimensions of the original image.')
canvas = image
bnd_float = ski_segmentation.mark_boundaries(
canvas, self._segments, color=_colour)
marked_image = (bnd_float * 255).astype(np.uint8)
assert _validate_image_array(
marked_image,
'image with boundaries'), 'Invalid integer-based image.'
return marked_image
def number_segments(
self,
segments_subset: Optional[Union[int, List[int]]] = None,
mask: bool = False,
image: Optional[np.ndarray] = None,
colour: Optional[RGBcolour] = None) -> np.ndarray:
"""
Plots segment numbers on top of the image.
The numbering can either be overlaid on top of the image or
segmentation mask (``mask=True``) used to initialise this class.
Alternatively, an external ``image`` of the same dimensions can be
supplied.
By default all the segments are numbered; a selected subset of segments
can be numbered by providing the ``segments_subset`` parameter.
The colour of the numbers can be specified via the ``colour`` parameter
by passing an RGB triplet.
.. note::
The numbers may not be printed within the bounds of their respective
segments when these are not convex.
Parameters
----------
segments_subset : intiger or list(integer), optional (default=None)
A number of a specific segment to be numbered or a list of segments
to be numbered. By default (``None``) all the segments are
numbered.
mask : boolean, optional (default=False)
If ``True``, number the segmentation mask;
if ``False``, number the image (default).
image : numpy.ndarray, optional (default=None)
If provided, this ``image`` will be numbered instead of the one
used to initialise this segmenter.
colour : tuple(integer, integer, integer), optional (default=None)
If provided, the numbers will be plotted with this RGB colour.
Raises
------
IncorrectShapeError
The the height or width the ``image`` array does not agree with
the dimensions of the class image.
TypeError
The ``mask`` parameter is not a boolean.
The ``colour`` parameter is neither a tuple nor a ``None``;
or one of its elements is not an integer.
The ``segments_subset`` parameter is neither ``None``, an integer,
or a list of integers; one of the segment ids in this list is not
an integer.
ValueError
The ``colour`` parameter is not a 3-tuple, one of its elements
is outside of the 0--255 range.
One of the segment ids provided via ``segments_subset`` is invalid
for the class segmentation, the list of segments is empty or some
of its elements are duplicated.
Returns
-------
numbered_image : numpy.ndarray
A numpy array holding the image with the selected subset of
segments numbered.
"""
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
assert self._segments is not None, 'The segmenter was not initialised.'
unique_segments = np.unique(self._segments)
if segments_subset is None:
segments_subset_ = unique_segments
else:
if isinstance(segments_subset, int):
if segments_subset not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
segments_subset, unique_segments.tolist()))
segments_subset_ = np.asarray([segments_subset])
elif isinstance(segments_subset, list):
if not segments_subset:
raise ValueError('The list of segments cannot be empty.')
if len(segments_subset) != len(set(segments_subset)):
raise ValueError('The list of segments has duplicates.')
for i in segments_subset:
if not isinstance(i, int):
raise TypeError(
'The segment id {} is not an integer.'.format(i))
if i not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
i, unique_segments.tolist()))
segments_subset_ = np.asarray(segments_subset)
else:
raise TypeError('Segments subset must be either of None, '
'an integer or a list of integers.')
if not isinstance(mask, bool):
raise TypeError('The mask parameter must be a boolean.')
assert _validate_colour(colour), 'Invalid colour.'
if image is None:
if mask:
canvas = self.segmentation_mask
else:
canvas = self.image
is_rgb = self.is_rgb
else:
assert _validate_image_array(image, 'image'), 'Invalid image.'
if image.shape[:2] != self.image.shape[:2]:
raise IncorrectShapeError(
'The width and height of the input image do not agree '
'with the dimensions of the original image.')
canvas = image
is_rgb = len(image.shape) == 3
if not is_rgb:
canvas = ski_colour.gray2rgb(canvas)
assert _validate_image_array(
canvas, 'grayscale->RGB image'), 'Invalid image.'
canvas = canvas.astype(np.uint8)
# canvas = self.mark_boundaries(image=canvas, colour=colour)
# font = ImageFont.truetype('~/Library/Fonts/Calibri.ttf', 11)
font = ImageFont.load_default()
numbered_canvas = Image.fromarray(canvas)
numbered_canvas_draw = ImageDraw.Draw(numbered_canvas)
for segment_id in segments_subset_:
segment_id_mask = (self._segments == segment_id)
segment_id_indices = np.argwhere(segment_id_mask)
# segment_x_left = segment_id_indices[:, 1].min().astype(int)
segment_y_top = segment_id_indices[:, 0].min().astype(int)
eligible_y_ind = np.where(
segment_id_indices[:, 0] == segment_y_top)[0]
segment_x_middle = segment_id_indices[eligible_y_ind].min(
axis=0)[1]
numbered_canvas_draw.text((segment_x_middle, segment_y_top),
'{}'.format(segment_id),
fill=colour,
font=font)
numbered_image = np.asarray(numbered_canvas)
assert _validate_image_array(
numbered_image, 'numbered image'), 'Invalid numbered image.'
return numbered_image
def highlight_segments(
self,
segments_subset: Optional[Union[int, List[int]]] = None,
mask: bool = False,
image: Optional[np.ndarray] = None,
colour: Optional[Union[RGBcolour, List[RGBcolour]]] = None
) -> np.ndarray:
"""
Highlights image segments by translucently colouring them.
The highlighting can either be applied on top of the image or
segmentation mask (``mask=True``) used to initialise this class.
Alternatively, an external ``image`` of the same dimensions can be
supplied.
By default all the segments are highlighted; a selected subset of
segments can be highlighted by providing the ``segments_subset``
parameter.
The segments are highlighted with different colours by default
(``colour=None``);
alternatively, a single colour can be supplied with the ``colour``
parameter.
It is also possible to specify a unique colour for each segment
by setting ``colour`` to a list of RGB triplets;
in this case the list must have the same length as the number of
segments being highlighted.
Parameters
----------
segments_subset : intiger or list(integer), optional (default=None)
A number of a specific segment or a list of segments to be
highlighted. By default (``None``) all the segments are
highlighted.
mask : boolean, optional (default=False)
If ``True``, highlight the segmentation mask;
if ``False``, highlight the image (default).
image : numpy.ndarray, optional (default=None)
If provided, this ``image`` will be highlighted instead of the one
used to initialise this segmenter.
colour : tuple(integer, integer, integer) or \
list(tuple(integer, integer, integer)), optional (default=None)
If provided, the regions will be highlighted with a single RGB
colour or each segment will be highlighted with its unique colour.
By default (``None``) every segment receives a unique colour.
Raises
------
IncorrectShapeError
The the height or width the ``image`` array does not agree with
the dimensions of the class image.
TypeError
The ``mask`` parameter is not a boolean.
The ``colour`` parameter is neither of ``None``, a tuple or a list
of tuples; or one of its elements is not an integer.
The ``segments_subset`` parameter is neither ``None``, an integer,
or a list of integers; one of the segment ids in this list is not
an integer.
ValueError
If ``colour`` is provided as a list, the list is either empty or
the number of colours provided is not the same as the number of
segments chosen to be highlighted.
A colour is not a 3-tuple or one of its elements is outside of the
0--255 range.
One of the segment ids provided via ``segments_subset`` is invalid
for the class segmentation, the list of segments is empty or some
of its elements are duplicated.
Returns
-------
image_highlighted : numpy.ndarray
A numpy array holding the image with the selected subset of
segments highlighted.
"""
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
assert self._segments is not None, 'The segmenter was not initialised.'
unique_segments = np.unique(self._segments)
if segments_subset is None:
segments_subset_ = unique_segments
else:
if isinstance(segments_subset, int):
if segments_subset not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
segments_subset, unique_segments.tolist()))
segments_subset_ = np.asarray([segments_subset])
elif isinstance(segments_subset, list):
if not segments_subset:
raise ValueError('The list of segments cannot be empty.')
if len(segments_subset) != len(set(segments_subset)):
raise ValueError('The list of segments has duplicates.')
for i in segments_subset:
if not isinstance(i, int):
raise TypeError(
'The segment id {} is not an integer.'.format(i))
if i not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
i, unique_segments.tolist()))
segments_subset_ = np.asarray(segments_subset)
else:
raise TypeError('Segments subset must be either of None, '
'an integer or a list of integers.')
if not isinstance(mask, bool):
raise TypeError('The mask parameter must be a boolean.')
if isinstance(colour, tuple):
assert _validate_colour(colour), 'Invalid colour.'
colour = [colour]
elif isinstance(colour, list):
if not colour:
raise ValueError('The colour list cannot be empty.')
if len(colour) != segments_subset_.shape[0]:
raise ValueError('If colours are provided as a list, their '
'number must match the number of segments '
'chosen to be highlighted.')
for clr in colour:
assert _validate_colour(clr), 'Invalid colour.'
else:
if colour is not None:
raise TypeError('The colour can be either of an RGB tuple, '
'a list of RGB tuples or None.')
if image is None:
if mask:
canvas = self.segmentation_mask
else:
canvas = self.image
is_rgb = self.is_rgb
else:
assert _validate_image_array(image, 'image'), 'Invalid image.'
if image.shape[:2] != self.image.shape[:2]:
raise IncorrectShapeError(
'The width and height of the input image do not agree '
'with the dimensions of the original image.')
canvas = image
is_rgb = len(image.shape) == 3
if not is_rgb:
canvas = ski_colour.gray2rgb(canvas)
highlight_mask = np.zeros(shape=self._segments.shape, dtype=int)
for i, segments in enumerate(segments_subset_):
s_mask = (self._segments == segments)
highlight_mask[s_mask] = i + 1
# This step converts the image to grayscale first...
image_highlighted_ = ski_colour.label2rgb(
highlight_mask,
image=canvas,
colors=colour,
bg_label=0,
bg_color=None,
kind='overlay')
image_highlighted_ = (image_highlighted_ * 255).astype(np.uint8)
assert _validate_image_array(
image_highlighted_, 'highlighted image'), 'Bad highlighted image.'
# ... so we need to restore the colour to the background
image_highlighted = canvas.copy()
colour_mask = highlight_mask.astype(bool)
image_highlighted[colour_mask] = image_highlighted_[colour_mask]
return image_highlighted
def _stain_segments(
self,
segments_subset: Optional[Union[int, List[int]]] = None,
mask: bool = False,
image: Optional[np.ndarray] = None,
colour: Optional[Union[str, List[str]]] = None) -> np.ndarray:
"""
Stain selected segments of the image with red, green or blue tint.
The staining can either be applied on top of the image or
segmentation mask (``mask=True``) used to initialise this class.
Alternatively, an external RGB ``image`` of the same dimensions can be
supplied.
By default all the segments are stained in *blue*; a selected subset of
segments can be stained by providing the ``segments_subset`` parameter.
The ``colour`` can be either of ``'r'``, ``'g'`` or ``'b'`` --
respectively for red, green and blue -- or a list of thereof, which has
the same length as the number of segments specified via
``segments_subset``.
.. note::
This method works only with RGB images.
Parameters
----------
segments_subset : intiger or list(integer), optional (default=None)
A number of a specific segment or a list of segments to be stained.
By default (``None``) all the segments are stained.
mask : boolean, optional (default=False)
If ``True``, stain the segmentation mask;
if ``False``, stain the image (default).
image : numpy.ndarray, optional (default=None)
If provided, this ``image`` will be stained instead of the one
used to initialise this segmenter.
colour : string or list(string), optional (default=None)
Either of ``'r'``, ``'g'`` or ``'b'`` for red, green or blue
respectively; or list thereof of the length equal to the subset of
segments being stained.
By default (``None``) every segment is stained in *blue*.
If provided as string, the regions will be stained in a single
colour; if provided as a list, each segment will be stained with
its unique colour.
Raises
------
IncorrectShapeError
The the height or width the ``image`` array does not agree with
the dimensions of the class image or the ``image`` is not RGB.
RuntimeError
The class has been initialised with a black-and-white or
grayscale image.
TypeError
The ``mask`` parameter is not a boolean.
The ``segments_subset`` parameter is neither ``None``, an integer,
or a list of integers; one of the segment ids in this list is not
an integer.
The ``colour`` is neither a string nor a list of strings.
ValueError
One of the segment ids provided via ``segments_subset`` is invalid
for the class segmentation, the list of segments is empty or some
of its elements are duplicated.
One of the colour strings is neither of ``'r'``, ``'g'`` or
``'b'``.
The colour list is empty or its length is different to the number
of segments selected to be stained.
Returns
-------
image_stained : numpy.ndarray
A numpy array holding the image with the selected subset of
segments stained.
"""
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
_accepted_colours = ('r', 'g', 'b')
_colour_map = {'r': 0, 'g': 1, 'b': 2}
if not self.is_rgb:
raise RuntimeError('Staining segments of an image can only be '
'performed on RGB images.')
assert self._segments is not None, 'The segmenter was not initialised.'
unique_segments = np.unique(self._segments)
if segments_subset is None:
segments_subset_ = unique_segments.astype(int).tolist()
else:
if isinstance(segments_subset, int):
if segments_subset not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
segments_subset, unique_segments.tolist()))
segments_subset_ = [segments_subset]
elif isinstance(segments_subset, list):
if not segments_subset:
raise ValueError('The list of segments cannot be empty.')
if len(segments_subset) != len(set(segments_subset)):
raise ValueError('The list of segments has duplicates.')
for i in segments_subset:
if not isinstance(i, int):
raise TypeError(
'The segment id {} is not an integer.'.format(i))
if i not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
i, unique_segments.tolist()))
segments_subset_ = segments_subset
else:
raise TypeError('Segments subset must be either of None, '
'an integer or a list of integers.')
if not isinstance(mask, bool):
raise TypeError('The mask parameter must be a boolean.')
segments_subset_n = len(segments_subset_)
if colour is None:
colour = segments_subset_n * ['b']
elif isinstance(colour, str):
if colour not in _accepted_colours:
raise ValueError(('One of the provided colour strings ({}) is '
"not 'r', 'g' or 'b'.").format(colour))
colour = segments_subset_n * [colour]
elif isinstance(colour, list):
if not colour:
raise ValueError('The colour list cannot be empty.')
if len(colour) != segments_subset_n:
raise ValueError('If colours are provided as a list, their '
'number must match the number of segments '
'chosen to be highlighted.')
for clr in colour:
if clr not in _accepted_colours:
raise ValueError(
('One of the provided colour strings ({}) is not '
"'r', 'g' or 'b'.").format(clr))
else:
raise TypeError("The colour can be either of 'r', 'g' or 'b' "
'strings, a list thereof or None.')
if image is None:
if mask:
canvas = self.segmentation_mask
else:
canvas = self.image
else:
assert _validate_image_array(image, 'image'), 'Invalid image.'
if len(image.shape) != 3:
raise IncorrectShapeError(
'The user-provided image is not RGB.')
if image.shape[:2] != self.image.shape[:2]:
raise IncorrectShapeError(
'The width and height of the input image do not agree '
'with the dimensions of the original image.')
canvas = image
image_stained = canvas.copy()
max_value = np.max(image_stained)
for id_, clr in zip(segments_subset_, colour):
pixel_mask = get_segment_mask(id_, self._segments)
colour_channel = _colour_map[clr]
image_stained[pixel_mask, colour_channel] = max_value
return image_stained
def grayout_segments(
self,
segments_subset: Optional[Union[int, List[int]]] = None,
mask: bool = False,
image: Optional[np.ndarray] = None) -> np.ndarray:
"""
Grays out a selected subset of segments in the RGB image.
The graying out can either be applied on top of the image or
segmentation mask (``mask=True``) used to initialise this class.
Alternatively, an external RGB ``image`` of the same dimensions can be
supplied.
By default all the segments are grayed out; a selected subset of
segments can be grayed out by providing the ``segments_subset``
parameter.
Parameters
----------
segments_subset : intiger or list(integer), optional (default=None)
A number of a specific segment or a list of segments to be
grayed out. By default (``None``) all the segments are grayed out.
mask : boolean, optional (default=False)
If ``True``, gray out the segmentation mask;
if ``False``, gray out the image (default).
image : numpy.ndarray, optional (default=None)
If provided, this ``image`` will be grayed out instead of the one
used to initialise this segmenter.
Raises
------
IncorrectShapeError
The the height or width the ``image`` array does not agree with
the dimensions of the class image or the ``image`` is not RGB.
RuntimeError
The class has been initialised with a black-and-white or
grayscale image.
TypeError
The ``mask`` parameter is not a boolean.
The ``segments_subset`` parameter is neither ``None``, an integer,
or a list of integers; one of the segment ids in this list is not
an integer.
ValueError
One of the segment ids provided via ``segments_subset`` is invalid
for the class segmentation, the list of segments is empty or some
of its elements are duplicated.
Returns
-------
image_grayscale : numpy.ndarray
A numpy array holding the image with the selected subset of
segments grayed out.
"""
# pylint: disable=too-many-branches
assert self._segments is not None, 'The segmenter was not initialised.'
if not self.is_rgb:
raise RuntimeError('Graying out segments of an image can only be '
'performed on RGB images.')
unique_segments = np.unique(self._segments)
if segments_subset is None:
segments_subset = unique_segments.astype(int).tolist()
else:
if isinstance(segments_subset, int):
if segments_subset not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
segments_subset, unique_segments.tolist()))
segments_subset = [segments_subset]
elif isinstance(segments_subset, list):
if not segments_subset:
raise ValueError('The list of segments cannot be empty.')
if len(segments_subset) != len(set(segments_subset)):
raise ValueError('The list of segments has duplicates.')
for i in segments_subset:
if not isinstance(i, int):
raise TypeError(
'The segment id {} is not an integer.'.format(i))
if i not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
i, unique_segments.tolist()))
segments_subset = segments_subset
else:
raise TypeError('Segments subset must be either of None, '
'an integer or a list of integers.')
if not isinstance(mask, bool):
raise TypeError('The mask parameter must be a boolean.')
if image is None:
if mask:
canvas = self.segmentation_mask
else:
canvas = self.image
else:
assert _validate_image_array(image, 'image'), 'Invalid image.'
if len(image.shape) != 3:
raise IncorrectShapeError(
'The user-provided image is not RGB.')
if image.shape[:2] != self.image.shape[:2]:
raise IncorrectShapeError(
'The width and height of the input image do not agree '
'with the dimensions of the original image.')
canvas = image
# Convert RGB into a grayscale representation
image_grayscale_ = np.dot(canvas, self.GRAYSCALE_TRANSFORMATION)
image_grayscale_ = np.repeat(
image_grayscale_[:, :, np.newaxis], 3, axis=2).astype(np.uint8)
# grayscale_image_ = np.clip(
# (0.1 * grayscale_image_ + 200), 0, 255)
# grayscale_image = np.dstack(
# [np.zeros((grayscale_image_.shape[0],
# grayscale_image_.shape[1],
# 2)),
# grayscale_image_]).astype(np.uint8)
# Filter out segments
image_grayscale = canvas.copy()
grayscale_mask = get_segment_mask(segments_subset, self._segments)
image_grayscale[grayscale_mask] = image_grayscale_[grayscale_mask]
return image_grayscale
def merge_segments(self,
segments_grouping: Union[List[int], List[List[int]]],
inplace: bool = True,
segments: Optional[np.ndarray] = None) -> np.ndarray:
"""
Merges segments based on the provided grouping.
The merging can either be applied to the segmentation stored in the
class to a segmentation passed as a parameter (``segments``).
By default (``inplace=True``) the segmentation stored in the class
will be updated to the merged segmentation.
Parameters
----------
segments_grouping : list(integer) or list(list(integer))
A collection or a set of collections of segment ids to be merged.
inplace : boolean, optional (default=True)
If ``True``, overwrite the segmentation stored in the class.
segments : numpy.ndarray, optional (default=None)
If provided, the merging will be performed on this segmentation
instead of the one stored in the class.
Raises
------
IncorrectShapeError
The ``segments`` array is not 2-dimensional.
The the height or width the ``segments`` array does not agree with
the dimensions of the segmented image.
TypeError
The ``segments`` array is either a structured numpy array or
it is not an integer-valued array.
The inplace parameter is not a boolean.
The segments grouping is not a list of integers or lists.
One of the segment ids is not an integer.
ValueError
The unique elements of the ``segments`` array do not form a
continuous sequence starting at 1.
The segments grouping is an empty lists or the list has duplicates.
One of the segment ids is invalid or appears across different
groupings.
Returns
-------
merged_segments : numpy.ndarray
A 2-dimensional numpy array holding the merged segmentation.
"""
# pylint: disable=too-many-branches,too-many-locals
assert self._segments is not None, 'The segmenter was not initialised.'
if segments is None:
segments_ = self._segments
else:
assert _validate_segmentation(segments,
self.image), 'Invalid segmentation.'
segments_ = segments
if not isinstance(inplace, bool):
raise TypeError('The inplace parameter must be a boolean.')
unique_segments = np.unique(segments_)
if isinstance(segments_grouping, list):
if not segments_grouping:
raise ValueError(
'The segments grouping cannot be an empty list.')
if isinstance(segments_grouping[0], int):
if len(segments_grouping) != len(set(segments_grouping)):
raise ValueError('The segments grouping has duplicates.')
for i in segments_grouping:
if not isinstance(i, int):
raise TypeError(
'The segment id {} is not an integer.'.format(i))
if i not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
i, unique_segments.tolist()))
segments_grouping_ = [segments_grouping]
elif isinstance(segments_grouping[0], list):
_item_collector = []
for i in segments_grouping:
if not isinstance(i, list):
raise TypeError(
'The nested elements of segments grouping are not '
'consistent. If one is a list, all must be lists.')
if len(i) != len(set(i)):
raise ValueError(
'The segments grouping has duplicates.')
for j in i:
if not isinstance(j, int):
raise TypeError(
'The segment id {} is not an integer.'.format(
j))
if j not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to '
'any of the known segments ({}).').format(
j, unique_segments.tolist()))
if j in _item_collector:
raise ValueError(
('The segment id {} is duplicated across '
'grouping lists.').format(j))
_item_collector.append(j)
segments_grouping_ = segments_grouping # type: ignore
else:
raise TypeError('The segments grouping must either be a list '
'of integers or a list of lists.')
else:
raise TypeError('Segments grouping must be a list.')
merged_segments_ = segments_.copy()
for group in segments_grouping_:
mask = get_segment_mask(group, segments_) # type: ignore
# use the smallest id to avoid collisions
merged_segments_[mask] = min(group)
# Remap segment ids to ensure continuous numbering starting at 1
merged_segments = np.full(
merged_segments_.shape, -1, dtype=merged_segments_.dtype)
for new_id, old_id in enumerate(np.unique(merged_segments_)):
mask = (merged_segments_ == old_id)
merged_segments[mask] = new_id + 1
assert not (merged_segments == -1).any(), 'Internal remapping error.'
assert _validate_segmentation(merged_segments,
self.image), 'Invalid segmentation.'
if inplace:
self.set_segments(merged_segments)
return merged_segments
class Slic(Segmentation):
"""
Wraps the slic segmentation algorithm implemented in scikit-image.
.. versionadded:: 0.1.1
This class provides an interface for the slic segmentation implemented by
the :func:`skimage.segmentation.slic` function.
For the documentation see the specification of the
:class:`fatf.utils.data.segmentation.Segmentation` abstract class.
The initialisation parameters specific to the slic segmenter are
documented below.
Parameters
----------
n_segments : integer, optional (default=10)
The number of segments desired of slic.
Raises
------
TypeError
The number of segments parameter is not an integer.
ValueError
The number of segments parameter is less than 2.
"""
def __init__(self,
image: np.ndarray,
segmentation_mask: Optional[np.ndarray] = None,
n_segments: int = 10):
"""Constructs a ``slic`` segmenter."""
super().__init__(image, segmentation_mask, n_segments=n_segments)
def _segment(self):
"""
Wraps the :func:`skimage.segmentation.slic` function.
Raises
------
TypeError
The number of segments parameter is not an integer.
ValueError
The number of segments parameter is less than 2.
Returns
-------
segments : numpy.ndarray
Segments of the image (segmentation mask).
"""
assert 'n_segments' in self.kwargs, 'Parameter missing.'
n_segments = self.kwargs.get('n_segments')
if not isinstance(n_segments, int):
raise TypeError('The n_segments parameter must be an integer.')
if n_segments < 2:
raise ValueError('The n_segments parameter must be at least 2.')
segments = ski_segmentation.slic(
self.segmentation_mask, start_label=1, **self.kwargs)
return segments
class QuickShift(Segmentation):
"""
Wraps the quickshift segmentation algorithm implemented in scikit-image.
.. versionadded:: 0.1.1
This class provides an interface for the quickshift segmentation
implemented by the :func:`skimage.segmentation.quickshift` function.
For the documentation see the specification of the
:class:`fatf.utils.data.segmentation.Segmentation` abstract class.
The initialisation parameters specific to the quickshift segmenter are
documented below.
The parameter values for ``ratio``, ``kernel_size`` and ``max_dist`` are
by default set to the values used by the official LIME_ implementation.
.. _LIME: https://github.com/marcotcr/lime
Parameters
----------
ratio : number, optional (default=0.2)
Balances color-space proximity and image-space proximity.
Higher values give more weight to color-space.
Between 0 and 1.
kernel_size : number, optional (default=4)
Width of Gaussian kernel used in smoothing the sample density.
Higher means fewer clusters.
max_dist : number, optional (default=200)
Cut-off point for data distances. Higher means fewer clusters.
Raises
------
TypeError
The ratio, kernel size or max dist parameter is not an integer.
ValueError
The ratio parameter is outside of the 0--1 range.
"""
def __init__(self,
image: np.ndarray,
segmentation_mask: Optional[np.ndarray] = None,
ratio: float = 0.2,
kernel_size: float = 4,
max_dist: float = 200):
"""Constructs a ``quickshift`` segmenter."""
# pylint: disable=too-many-arguments
super().__init__(
image,
segmentation_mask,
ratio=ratio,
kernel_size=kernel_size,
max_dist=max_dist)
def _segment(self):
"""
Wraps the :func:`skimage.segmentation.quickshift` function.
Raises
------
TypeError
The ratio, kernel size or max dist parameter is not an integer.
ValueError
The ratio parameter is outside of the 0--1 range.
Returns
-------
segments : numpy.ndarray
Segments of the image (segmentation mask).
"""
assert ('ratio' in self.kwargs and 'kernel_size' in self.kwargs
and 'max_dist' in self.kwargs), 'Parameters missing.'
ratio = self.kwargs.get('ratio')
if not isinstance(ratio, Number):
raise TypeError('Ratio should be a number.')
if ratio < 0 or ratio > 1:
raise ValueError('Ratio must be between 0 and 1.')
kernel_size = self.kwargs.get('kernel_size')
if not isinstance(kernel_size, Number):
raise TypeError('Kernel size should be a number.')
max_dist = self.kwargs.get('max_dist')
if not isinstance(max_dist, Number):
raise TypeError('Max dist should be a number.')
segments = ski_segmentation.quickshift(self.segmentation_mask,
**self.kwargs)
segments = segments + 1 # quickshift starts segment numbering at 0
return segments
def get_segment_mask(segments_subset: Union[int, List[int]],
segmentation: np.ndarray) -> np.ndarray:
"""
Generates a boolean mask for pixels belonging to the specified segments.
.. versionadded:: 0.1.1
The mask holds ``True`` where the pixel belongs to one of the specified
segments.
Parameters
----------
segments_subset : intiger or list(integer)
A number of a specific segment or a list of segments for which a mask
will be created.
segmentation : np.ndarray
A 2-dimensional numpy array defining segmentation of an image
(each unique integer -- in sequence starting at 1 -- indicates segment
id of the pixel at this coordinate).
Raises
------
IncorrectShapeError
The ``segmentation`` array is not 2-dimensional.
TypeError
The ``segments_subset`` parameter is neither an integer nor a list of
integers; one of the segment ids in this list is not an integer.
The ``segmentation`` array is either a structured numpy array or
it is not an integer-valued array.
ValueError
One of the segment ids provided via ``segments_subset`` is invalid
for the ``segmentation`` or some of its elements are duplicated.
The unique elements of the ``segments`` array do not form a continuous
sequence starting at 1.
Returns
-------
segment_mask : numpy.ndarray
A boolean numpy array of the same shape as ``segmentation`` indicating
the pixels (``True``) belonging to the specified segments.
"""
# Validate segments
assert _validate_segmentation(
segmentation, np.zeros(shape=segmentation.shape,
dtype=np.int8)), 'Invalid segmentation array.'
unique_segments = np.unique(segmentation)
if isinstance(segments_subset, int):
if segments_subset not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(segments_subset,
unique_segments.tolist()))
segments_subset_ = np.asarray([segments_subset])
elif isinstance(segments_subset, list):
if len(segments_subset) != len(set(segments_subset)):
raise ValueError('The list of segments has duplicates.')
for i in segments_subset:
if not isinstance(i, int):
raise TypeError(
'The segment id {} is not an integer.'.format(i))
if i not in unique_segments:
raise ValueError(
('The segment id {} does not correspond to any of '
'the known segments ({}).').format(
i, unique_segments.tolist()))
segments_subset_ = np.asarray(segments_subset)
else:
raise TypeError('Segments subset must either be an integer '
'or a list of integers.')
# Get a 2-D mask where True indicates pixels from the selected segments
segment_mask = np.zeros(shape=segmentation.shape, dtype=bool)
for segment_id in segments_subset_:
mask = (segmentation == segment_id)
segment_mask[mask] = True
return segment_mask
|
from django.shortcuts import render
from django.views.generic.base import TemplateView
from django.views.generic.edit import CreateView
class Homepage(TemplateView):
template_name = 'index.html'
class BaseTemplateView(TemplateView):
template_name = 'base.html'
class Demo(CreateView):
template_name = 'student/student_register.html'
|
#!/usr/bin/env python
'''
readSequenceFromListTest.py: Example reading a list of PDB IDs from a local MMTF Hadoop sequence \
file into a tubleRDD.
'''
__author__ = "Mars (Shih-Cheng) Huang"
__maintainer__ = "Mars (Shih-Cheng) Huang"
__email__ = "marshuang80@gmail.com"
__status__ = "Warning"
import unittest
from pyspark.sql import SparkSession
from mmtfPyspark.io.mmtfReader import read_sequence_file
class ReadSequenceFileTest(unittest.TestCase):
def setUp(self):
path = 'resources/mmtf_full_sample'
#TODO
stringIds = "1FDK,1FDL,1FDM,1FDN,1FDO,1FDP,1FDQ,1FDR,1FDS,1FDT"
self.pdbIds = stringIds.split(',')
self.spark = SparkSession.builder.master("local[*]") \
.appName("read_sequence_file") \
.getOrCreate()
self.pdb = read_sequence_file(path, pdbId = self.pdbIds)
def test_size(self):
self.assertEqual(len(self.pdbIds),self.pdb.count())
def test_result(self):
self.assertEqual(set(self.pdbIds),set(self.pdb.keys().collect()))
def tearDown(self):
self.spark.stop()
if __name__ == '__main__':
unittest.main()
|
"""
database manager functions
edits text file databases
Functions: fillDB(), searchDB()
"""
def fillDB(data):
"""
This function will open memberDB and appends the data.
input = type str
example input = 'name money level daily\n'
void
"""
file = open("memberDB.txt","a")
file.write(data)
file.close()
def searchDB(data):
"""
This function will open memberDB and search for name
input = type str
example input = 'name'
returns the entire line containing that data
returns False if not found
"""
file = open("memberDB.txt", "r")
for line in file:
if data in line:
file.close()
return line.split(" ")
file.close()
return False
|
number = 0
def add_number():
global number
number += 1
def print_number():
print(f'number is {number}')
print('module executed')
|
x = int(raw_input())
y = int(raw_input())
z = int(raw_input())
n = int(raw_input())
print sum(sum([[ [ [X,Y,Z] for Z in range(z+1) if X+Y+Z != 2 ] for Y in range(y+1)] for X in range(x+1) ], []), [])
|
import unittest
from RomanNumeralsConverter import RomanNumeralsConverter
class RomanNumeralsConverterTests(unittest.TestCase):
test_cases = [
[1,'I'],
[2,'II'],
[3,'III'],
[4,'IV'],
[5,'V'],
[6,'VI'],
[7,'VII'],
[8,'VIII'],
[9,'IX'],
[10,'X'],
[12,'XII'],
[14,'XIV'],
[19,'XIX'],
[29,'XXIX'],
[49,'XLIX'],
[50,'L'],
[73,'LXXIII'],
[99,'XCIX'],
[100,'C'],
[349,'CCCXLIX'],
[443,'CDXLIII'],
[500,'D'],
[900,'CM'],
[1000,'M'],
[1903,'MCMIII'],
[1999,'MCMXCIX'],
[2019,'MMXIX'],
]
def test_all_cases_arabic_to_numeral(self):
for item in self.test_cases:
self.assertEqual(item[1], RomanNumeralsConverter().convert(item[0]))
def test_all_cases_numeral_to_arabic(self):
for item in self.test_cases:
self.assertEqual(item[0], RomanNumeralsConverter().convert(item[1]))
if __name__ == '__main__':
unittest.main()
|
# 阶乘
# n! = 1x2x3x...n
def fact(n):
"""
1 定义的时候调用自己
2 要有停止条件
3 防止栈溢出,控制递归的次数
应用:查看当前目录先的所有子目录及文件...
"""
if n == 1:
return 1
return n * fact(n-1)
|
#!/usr/bin/env python
import csv, re
from datetime import datetime, timezone
inputFile = "log.csv"
outputFile = "log.adif"
contest = "ARRL-SCR"
# "CLASS-I" for individual.
# "CLASS-C" for club (non-school).
# "CLASS-S-EL" for elementary school.
# "CLASS-S-JH" for middle/intermediate/junior high school.
# "CLASS-S-HS" for senior high school.
# "CLASS-S-UN" for college/university.
category_station = "CLASS-S-HS"
callsign = "W1HLO"
arrl_section = "CT"
email = "girasolia@nfaschool.org"
grid_locator = "FN31xm"
name = "Anthony Girasoli"
club = "Norwich Free Academy Amateur Radio and Engineering Club"
address_1 = "Norwich Free Academy"
address_2 = "305 Broadway"
address_city = "Norwich"
address_state_province = "CT"
address_postalcode = "06360"
address_country = "US"
operators = "W1TTL"
soapbox = "W1HLO -- Calling CQ Since 1944. Go Wildcats!"
nowtime = datetime.now(timezone.utc).replace(microsecond=0)
hour = str(nowtime.hour)
if (len(hour) == 1):
hour = "0" + hour
minute = str(nowtime.minute)
if (len(minute) == 1):
minute = "0" + minute
second = str(nowtime.second)
if (len(second) == 1):
second = "0" + second
todayDate = nowtime.date()
current_zulu_time = hour + ":" + minute + ":" + second + "Z"
# Read log file
logLines = []
with open(inputFile, 'r') as fd:
reader = csv.reader(fd, delimiter=',')
for row in reader:
logLines.append(row)
fd.close()
# Write the new log file
logFile = open(outputFile, "w")
logFile.write("Generated on " + str(todayDate) + " at " + current_zulu_time + "\n")
logFile.write("<adif_ver:5>3.1.2\n")
logFile.write("<programid:21>W1TTL Rig Control 1.0\n")
logFile.write("<USERDEF1:1:S>class_sent\n")
logFile.write("<USERDEF2:1:S>class_rcv\n")
logFile.write("<USERDEF3:50:S>qth_sent\n")
logFile.write("<EOH>\n\n")
for logLine in logLines:
freq = str(logLine[0])
mo = logLine[1]
date = str(logLine[2])
time = str(logLine[3])
sent_call = logLine[4]
sent_rst = str(logLine[5])
sent_class = logLine[6]
sent_qth = logLine[7]
rcv_call = logLine[8]
rcv_rst = str(logLine[9])
rcv_class = logLine[10]
rcv_qth = logLine[11]
# Remove the dashes from date
date = re.sub('[-]', '', date)
logFile.write("<qso_date:" + str(len(date)) + ">" + date + "\n")
logFile.write("<time_on:" + str(len(time)) + ">" + time + "\n")
logFile.write("<operator:" + str(len(sent_call)) + ">" + sent_call + "\n")
logFile.write("<rst_sent:" + str(len(sent_rst)) + ">" + sent_rst + "\n")
logFile.write("<class_sent:" + str(len(sent_class)) + ">" + sent_class + "\n")
logFile.write("<qth_sent:" + str(len(sent_qth)) + ">" + sent_qth + "\n")
logFile.write("<call:" + str(len(rcv_call)) + ">" + rcv_call + "\n")
#logFile.write("<band:" + str(len(band)) + ">" + band)
logFile.write("<mode:" + str(len(mo)) + ">" + mo + "\n")
logFile.write("<freq:" + str(len(freq)) + ">" + freq + "\n")
logFile.write("<rst_rcvd:" + str(len(rcv_rst)) + ">" + rcv_rst + "\n")
logFile.write("<class_rcv:" + str(len(rcv_class)) + ">" + rcv_class + "\n")
logFile.write("<qth:" + str(len(rcv_qth)) + ">" + rcv_qth + "\n")
logFile.write("<eor>\n\n")
logFile.close()
|
import time
import unittest
from datetime import datetime
from nokia import NokiaObject
class TestNokiaObject(unittest.TestCase):
def test_attributes(self):
data = {
"date": "2013-04-10",
"string": "FAKE_STRING",
"integer": 55555,
"float": 5.67
}
obj = NokiaObject(data)
self.assertEqual(obj.date.date().isoformat(), data['date'])
self.assertEqual(obj.string, data['string'])
self.assertEqual(obj.integer, data['integer'])
self.assertEqual(obj.float, data['float'])
# Test time as epoch
data = {"date": 1409596058}
obj = NokiaObject(data)
self.assertEqual(obj.date.timestamp, data['date'])
# Test funky time
data = {"date": "weird and wacky date format"}
obj = NokiaObject(data)
self.assertEqual(obj.date, data['date'])
|
# -*- coding: utf-8 -*-
from os import getenv
from enum import Enum
# Verify that using ANSI color is supported
_color_is_supported: bool = getenv('ANSI_COLORS_DISABLED') is None
class _ANSIColor(Enum):
COLOR_BLACK = 30
COLOR_RED = 31
COLOR_GREEN = 32
COLOR_YELLOW = 33
COLOR_BLUE = 34
COLOR_MAGENTA = 35
COLOR_CYAN = 36
COLOR_WHITE = 37
def _colorize_string(value: str, color: _ANSIColor) -> str:
if _color_is_supported:
value = f'\033[0;{color.value}m{value}\033[0m'
return value
class Colors(object):
"""
Contains all the base methods responsible for wrapping
input strings in ANSI escape codes
"""
@staticmethod
def black(value: str) -> str:
"""
Colorizes a string to black
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_BLACK)
@staticmethod
def red(value: str) -> str:
"""
Colorizes a string to red
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_RED)
@staticmethod
def green(value: str) -> str:
"""
Colorizes a string to green
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_GREEN)
@staticmethod
def yellow(value: str) -> str:
"""
Colorizes a string to yellow
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_YELLOW)
@staticmethod
def blue(value: str) -> str:
"""
Colorizes a string to blue
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_BLUE)
@staticmethod
def magenta(value: str) -> str:
"""
Colorizes a string to magenta
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_MAGENTA)
@staticmethod
def cyan(value: str) -> str:
"""
Colorizes a string to cyan
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_CYAN)
@staticmethod
def white(value: str) -> str:
"""
Colorizes a string to white
Args:
value (str): The string to colorize
Returns:
str: The colorized string
"""
return _colorize_string(value, color=_ANSIColor.COLOR_WHITE)
if __name__ == "__main__":
pass
|
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXX XXXXXX XXXXXXXXXXXXXX XXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXX XXXXXXXX XXX XXXXX XX XXXXXXXX XXXXXXXXXXXXXX XXXXXX XX XXX
XXXXXXX XXXXXXXXXXXXXX XXXX XXXXXXXXXXXXX XXX XXXXXXX XX XXXXX XXX XXXX XXXXXX
XXXXXXX XXXXXX XXXXXXXX X XXXXXXXXXX XXXX XXXXX XX XXXXXX XXX XXX X XXXXXXX
XXXXXXXXXXXXXX XX XXXXXXXXX XXX XXXXXXXXXXXX XXX XXXXXXXX XXXXX XXXXXXXXXXXXXX
XXXXX XXXXXX XXXX XXX XXXXXXXX XXXXXX XXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXX XXX
XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXX XXXXXXXX XXXX XXXXXXXXXXXXXX XXX XXXXXXXXXXXXX XXXXXXXX
XXX XX XXXXXXXXX XXXXXXXX XX XX XXX XXXXXXXXXXXXXX XXXXXXX XX XXXXX XXXXXXXX
XXX XXXXXXXX XXXXXXXX
XX XXXXXXXXXXXXXX
XXXXXXXX XXXXXXX
XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXX XXX XXXX XX XXX
XXXXXXXXXXXXXX XXXXXXX XXXX XXXXXXXXX XXXXXXXXX XXX XXXXXX XXXXXXXXXXX XXXX
XXXX XXXX XXX XXX XXXX XX XXXXXX XXXXXX XXXX XXXXXXXXXXX XXXXXXX XXXXXXXXXXX
XXXX XXXXXXXXX XXXXXXXXXXX XXXXXXX XXXX XXXXXXXX XXXX XXXX XXX XXXXX XX XXXX
XXXXXX XX XXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXX XXXX XXXX XXXXXXX XXXX
XXXXXXX XXXXXXXXXX XXXX XXX XXXXXXXXX XXXXXXX XX XXXX XXXXXXXX
XXX XXXXXXX XXXXXXXXXX XX XXX XXXXXXX XXXX XXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXXXXXXXXXXX XXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXXX XXXXXXXXXX XXX XXXXXXXXXXXXX XXXX XXXXXXX XX XXXX XXXX XXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXX
XXXXXXXXXXXXXX
XXX XXXX XXXXXX XXX XX XXXXXX XXXXX XX XX XXX XXX XXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
X XX XXXX XXXXXX XXXX XX X XXXX XXXXXX XXXX XXX XXXXXXX XXXX XXXXX
X XX XXX XXXXXXXXX XXX XXX XXXXXXXX XX XXXXXX XXX XXXXXXXXXX
X XX XXX XXXX XX XXXXXX XXXXX XXXXXXX
XXX XXXXXXXXXXXXXX X XXXXXXXX
XXX XXXXXXXXXXX
XX XXX XXXX XXX XXXXXX XXXXX XXXXXXXXXX XXX XXX XXXX XXXXXXXXXXXX XXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
X XXXXXX XXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXX XX XXXXXXXX XXX X XXXXXXXXX XXXXX XXX XXXXX XXXX XXX XXXX XXXX XX
XXXXXXX XXXXXXXXXXXX XX XXX XXXXX XXX XXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXX XX XXXX
XXXXXX XXX XXX XXXXX XXXXXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXX XXXX XXX XXXXX XXX XXXXXX XXXXX XXXXXXXXX XX XXX XXXX XXXXXX XXX XXXX
X XXXX XXXX XXXXXXXXXXXXXXXXXXX XX XXX XXXXXXXXX XXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXX XXXXXXX XX XXXXX XX XXX XXXXXXX XX
XXXXXXXXXX XXX XXXXXXXX XXXXXXXXX XX XXX XXXX XXXXXXXXX XXXX XX XXX X XXXXXX
XXXXXXXX XX XXXX XXXX XXXXXXXX X XXXXX
XX XXXXXX X XXXXXX XXXXXXXXX XXX XXXX XXXXXXX XXXXXXXX
XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXX X XXXXXX
XX XXXXXXXX X XXXXXX XXXXXXXX XXXX XXX XXXXXXX XXXXX XX XXXXXXX XXX XX
XXXXXX XXX XXXXXXXX XX X XXXXX XXXX XXXXX XXX XXXX XXXXX XXXXXX XX
XXXX XXXX XXXXXX XXX XXX XXXXXXXX XXXX XX XXXXXXX XXXXXXXXXXXX XX XXX
XX XXX XXXXXX X XXXXX XXX XXXXXXX XXXX XXXXXXX XX XXXXXX XXX XXXXXXXX
XXXXX XXXXXXXX XXXXXXX XXX XXXXXXX XXXXXX XXXXX
XXX XXX XXXX XXXXXX X XXXXXXXX XXXXXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXX XXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX X X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXX
XXX XXXXXXXX
XX XXX XXXX XXX XXXXXX XXXXX XXXXXXXXXX XXX XXX XXXX XXXXXX XXXXXX XXXXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXX XXXXX XXXXXXXXXXXXXX
XXXXXX XXXX XXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXX XXXX XXX XX XXXX XX XXXXX XXXXX XX XXXXXX XXXXX XXX
XXXXXXXXXX
XXXXXXXX X XXXXXX XXXXXXXX XXXX XXX XXX XXX XXXXX XXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX
XXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXX X XXX XX
XXXXXXXXXXXX XX XXXXX XXXXXXXXXXX XX XXXXXXX XXXXXXXXXX XXXXXXXXXXXX XXX
XXXXXXXXXXXX XXX XXX XXXXXXX XXXXX XXXXXX XXXX XXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XXX XXXXXXXXXXX XXX
XXXXX XXX X XXXXXXXX XX XXX XXXXXXXXXXX XXXXXX XXXXX XXX XXX XXXXXXX XX XX
X XXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXX XXXXXXXXX XXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXX
XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XX XXXX XX XXX XXXXX
X X XXXXXXX XXXXXXXXXXXXX XXX XXXXXXXXXXX
XXXXX
X XX XXXXXXX XXXXXXXXXXXXX XXX XXXXXXXXXXX
XXXXXXXXXXX XX XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX
XXXXXX XX XXX XXXXXXXXXXXXXXXXXX XXXXXX XX XXX XXXXXXXXXXXXXX XXXXXXXXX
XX XXXXXX
XXXX XX X XXX XXXXX XXX XX XXXXXXXXXXXX X XXX XX XXXXXXXXXXXX XXX
XXXXXXXX XXXX XXXX XX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXX XXXXXXX XXXX XXX XXXXXXXXXXXXXX XXXXXXX XXX XXXXXXXX XXXXX XXX
XXXXX XXXXXX XX XXXXXX XXXXXXX XXX X XXX XX XXXXX X XXXXX XXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXX XXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXX XXXX X XXXXXXXX XXXXXXXXXXX XXXXXXX XX XXXXXXXX X XXX XX XXXXXX
XXXXXXXXXXX XX XXXXXXXX XXXXX XXX XXXXXX XX XXXXXX
XXXX XXXX XX XXX XXXXXX XXXXX XXXXX XXX XXXXXX XXXXXXX XX XXX XX XX XXXX XXX
XXXXX
XXX XXXXXX XXXXX XXXX XXXX XXXXXXXXXXX XX XXXXXXXX
X XXXXXX XX XXXX XXXXXXX XX XXXXXXX XX XXXXX XXXX XXX XXXXXX XX XXXXXXXX
XXXXXXXXXX XXX XXXX XXXX XX XXXXXXX
X XXXXXX XX XXXX XXX XXXXX XXXX XXX XXX XX XXXXXX XX XXXXXXX XX XXXXX XXXX
XXX XXXXX XXXXXXXXXX XXX XXXX XXXX XX XXXXXXX
X XXXXXX XX XXXX XXX XXXXXX XXXXX XXXX XXX XXXXXXXX XXXX XXX XXXXXX XX
XXXXXX XX XXXXXXX XX XXXXX XXXX XXX XXXXXXXX XXXXXXXXXX XXX XXXX XXXX XX
XXXXXXX
X XXXXXX XX XXXXXX XX XXXXXX XX XXXXXXX XX XXXXX XXXX XXX XXXXXXXX
XXXXXXXXXX XXX XXXX XXXX XX XXXXXXX
XXXXXXXXXXX XXX XX XXX XXX XXXX XXX XXXX XX XXXXXXX XXX XXXX XXX XXXXXXXX
XXXXXX XXXXXXXXX XX XXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XX XXXXXXXX XX
XXXXXXXXX XXXXXXXXXXX XXX XXXXXXXXX XXXXXX XXXXXXXXX XX XXX XXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXX XXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXX XXXXXX XXXXX XXXXXXX
XXXXXXX XX XXX XXXX XXX XX XXX XXXXX XXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX XX XXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XX XXXX XXXXXX XXXX XXXX XXXXXXX XXXXXXXXXXX XX XXXX XXXXXXX XXXXXXX
XXX XXXX XX XXX XXXXXXX XXX XXXX XXXXXX XXXXX XXXXXXX XX XXX XX XXXX XXXXXXXXX
XXXXXXXXXXXXX
XXXXX XXXXXXXXXXX XXXX XX XXXXXXX XXXX XXX XXX XXXXXXXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXXX XXX XXXXX XXXX XXX XXX XXXXXXXXXXX XXXXX XXXXXX
XXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXX XXXXXXXXXXX
XXXX XX XXXXXXX XXX XXX XXXXXXXXXXXXXXXXXXXX XXXXXXX XX XXXX XX XXX XXX XXX
XXXXXX XXXXX XXXXXXXXX XX XXXX XXXXX XXXXXXXXXX XX XXXX XXXXXX XXXXXXX
XXXXXXXXXXX XXX XXX XXXXXX XXXX XXXX XXX XXX XXXXXXXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXX XXXX XXXXXXXX XXXX XXXXXXX XXXXXXXXXXX XX XXXXXXXXX XX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXXX XXX XXXX XX XXXXXXXXXXX XXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXX X XXXXX XXXXX XXXXXXXX
XX XXXX XXX XXXXX XXXXXXXXXXX XXX XXXXXX XXXX
X XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX XXXXXX XXXXXXXX
XXXXXXXXX
XXXXXX
XXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX X XXXXXXX XXX XX
XXXXXXXXXXXX XXXXX XX XXX XXX XXXXX XXXXXXXXXXXX XX XXXX XXXXX XXXXXX XX XXXXX
XXXXXX X XXXX XXX XXXXXX XX XXX XXXXXX XX XXXXXXX
X XXXX XX X XXXXX XXXXXXXXXXXXX XXX XXX XXXXXXXXXXX XXXXXXX XX XXXX XXXXXX XXX
XXXXXXXX XX XXX XXXXX XXXXXX XXXXXXXXX XXX XXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XX XXXX XXXXX XXXX XXXX XXXX XXXXXXXXXXX
XXXXXX XXXXXXXXXXXX XXXXXX XXX X XXXXXXXXXX XXX XX XXXXXXXXXX XXXXX XX XXXX
XXXX XXXX XXXXXX XX XXXXXXXX XXXXXXXXXXXXXX XXX XXXXXXXX XXX XXXXX XXXXXX X
XXXXX XXXXXXXXXX XXXXXXXXX XXX XXX XXXXX XXXXX XXXX XXXX XXXXXX XXXX XXXX XXXX
XXXXXX XX X XXXXXXXXXXXX XXXXXXX XX XXXX XXXXX XX XXXX XXXX XXXXXXXXXXXX XXXXX
XXXXXXXXX
XXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXX XXXXXX
X XXXXXXX XXXXXXXX XXXXXX XXX XXX XXXX XXXXXX XXXXXXXXXXX XXXXXXXXX XXX
XXXXXXXX XXX XXX XXXXXX XXX XXXXXXXXXXXXXXX XXXXXXXXXX XXX X XXXXXXXXXXXX XXXXX
XX XXXXXXXXXXX
XXXX XXXXXXXXXXXX XXXXXX XXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX
XXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXX
X
XXX XXXXXXXXXX XXX XXXX XX XXXXXXXX XX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX XXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XX XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX
XXXXXXXXXXXXXXX XXXXXXXXXX
XX XXXXXXXXXXXX XXXXX XXXXXX XXXX XXXXX XXX XXXXXXX XXXX
XX XXX XXXX XX XXXXXX XXXXXXXXXXXXXXXXX XXX X XXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX XXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX XX
XXX XXXX XXXXXX XXXXX XXX XXXXX XXXX XXXX XXXX XX XX XXXXXXX XXX X XXXXXXXXXXX
XXXXXX XXXX XX XXXXXXXXX XXXX XXX XXX XXXXXXXXXXXXXXXX XXXXX XXXXX XXXXXXXXXXX
XXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXXX XXXXX XXXX XXX XXXXX XXX XXX XXXXXX XXX
XXXXXXXXX XX XXX XXX XXXXXX XXXXXXXXXXX XXX XXXXXXXX XXXX XXXXXXXXXXX
XXXXXXXXXX XX X XXXX XX XXXX XXX XXXXXXXX XXX XXXXXXX XXXXXXXX XX XX XXXXXXXX
XXX XXXX XXXX XXX XXXXXXXXX XXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX XXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXX XXXXXX XXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXX X XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
X XXX XXXXXXXXXX XXXXX XXXX XXXXX XXX XXXXXXX XXX XX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXX
X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXX XXX XXXXXX XXXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXX
X XXXXXXX XXX XXXXXXXX XX XXXX
X XX XXXXX XXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXX XXX XXXXXX
XXXX X XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
X XXXXXXXXXX XXXXX XX XXXXXXXXXXX XXXX XXX XXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXX
XXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXX
XXXXXXXXXXXX
XXXXX XXXXXX XXXX XXXXXXX XXX XXXX XXX XX XXXXXXXX XXXXXXX XXXXXXXXXXX XXX
XXXXXXX XXXXX XXX XXX XXXXXXX XXXX XX XXX XXXXX XXXXXX XXXXX XXXXXX XXXXX
XXXXXXX XXX XXXXXXXXXXX XX XXX XXXXXXXX XXXXX XXXX XXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXX
XXXXXXXXXXX X XXXXXXXXXXXXXXXXXXX XXXX XXX XXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXX
XXXXX X XXXX
XXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXXXX XXXXXXXXX
XXX X XXXXX XXX XXXXXXX XXXX XXX XXX XXXXX XXXXXX
XXX XXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXX XXX X XX XXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXX
XXX XXX XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX
XX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XX XXX XXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXX XX XXXX XXX
XXXXXXXXXXXXXX XXXXXX XXXX XXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXX X XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XX XXXXX XXXXXXX XXXXX XXXXXXXXXX XXX XXXXXXX XXXXX XX XXX XXXXXXX XXXX XXX XXX
XXXXXX XXX XXXX XXXXXXXXX XXXX XX XXX XX XX XXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XX XXXX XX XX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXXX XXXX XXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XX XXXXXXXXX XXX XXXXXXXXXXXXX XXXXXX
XXX
XXXXX
X XX XXXXXXXXX XXX XXXXXXXXX XXXXXX
XXX
XX XXXXXXXXXXXXXXXXXXXXXX
XXX XX XXX X XXXX XX
XXXXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXXXXX XXXX XXX XXXX XX XXXXXX XX XXX XXXXXXX XXXXXXX
X XXXX XX XXXX XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX
XX XXX X XXXX XXX XXXX X XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXX XXXXXX XX XX XXX XXXXXXXX
XXXXX XXXXXXXX XXXXXXX XXXXXXXXXX
XXXX XXXX XXX XXXX XXX XXXXXX XXX XXXXXXXXX XXXXXXX XX XXXXXXXX XX XXX
XXXXXXX XXXXX X XXXX XXXX XXX
XXXX XXXXXXX XXXXX XXX XXX XXXXX XXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXX XXXXX
XXX XXXXXXXXXXXXXXXXX
XXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXX
XXXX X XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XX XXXX XX XXX XXXXX
XXXXXXXXXXXXXX XXXXX
X XXXXXXXX XX X XXXXXXX XXXXX
XXX
XXXXX
X XXXXXX XX XXXXXXXX XXXXXX XXXXX XXXXXXXX
XXX
XXXXXXXXX XXX XXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX X XXXX XXXX XXX XXX XXXXXX XX XXX XXX XXXXXXX XXXX XXX XXXX XXX
XXXXXXXXXXXXXX XXX XXXXX XX XXX XXXXXX XXXXXXXX XXXX XXXXXX XXX XXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXX XXX XXXXXX
XXXXXXX XX X XXXXXX XXXXXXXX XXX XXXXXXXXXXXXXX XXXXXXX XX XXXX XX XXX XXXXXXX
XX XXXXXXXX XX XXXXXXXX
XX XXX XXX XXXXX XX XXX XXXXXXXX XXXXXXXXXXX XXXXXXXXX XX XXXXXXXXX
XX XXX XXX XXXXX XX XXX XXXXXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXXX XXXX XXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXX XXXXXXXXXXXXXXXX XXXXXXXXX XX XXX XXXX XXXXXX XX XXXXXXXX
XX XXX XXX XXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXX XX XXXX
XXXX
XX XXXXXXXXXX XXXXX XX XXXXXXXXXX
XX XXXXX X XXX XX XXX XXXXX XX XXX XXXXXXXXXXX XXXXXXXX XX XXX XXXXXXXXXXXXXXXX
XXXXXXXXX XXXXXX XX X XXXXXX XXXXXX XXXX XXXXXX XXXXX XXXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX XXXXXX XXXXXXX XXXXXX
XXX XX XXX X XXXX XXX
XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXXX
XX XXX XXX X XXXX XXX XXX XXXX XXXXXX XX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX XXXXX XX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX XXX XX XXXXXX XXXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX
XXX XXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXX XX X XXXXXXX XXXXX
XXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXX XXX XXXXXX XX
XXX XXXX XXXXXX XXXXXX XXX
XXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXX XXXX XXX
XXX XXXXXXX XXXXXXX XX XXXXXXXXXX XXXXXXX XXXX XXX XXXXXXXX XXXX XX
XXXXXXXX XXXX XX XX XXXXXXX XXXXXXX XXXXXX XXXX XXXXX XXX XXXX XXX XXXXXXX
XX XXX XX XXX XXXX XXXXXX XX XXX XXXXXXXX XXXXXX XXXXXXX XXXXX XX XXX XXXX
XX XXX XXXXXXXX XXXX XXX XXXXXXX XXXX XXXX XX XXXXXXXXX XX XXX XXXX
XXXXXXXXXXX XXXXX XXXXXXX XXXX XX XXXX XXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXXX XX XXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXX
XXXXXXXXXXX
XXX XXX XXX XX XXXXX XXXXXX XX XXXXX XX XX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXX XXXXXXXX XX X
XXXXX XXXXXX
XXXX XXXXXXXXXXX XXXXXX XXXXXXXX
XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
X XXX
XXXXX XXXXXXX XX XXXXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXX
XXX XXXXXXXXXXXXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
X XXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XX X XXXXXXXXX XXX XXX XXX XXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXX XXXXXXXXXX
X XX XXX XXXX XXXXX XXXXXX XXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XXX XXXXXXX XXXXXXXX
XXXX XX XXX XXXXX XXXXXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XX XXX XXXX XX XXXXXX XXX XXXXXXX XXX XXXX XXXXXXXXX XXX XXXX XXXX XX
XXXX XX XXXXXX XXX XXXX XX XXXXXX XXX
XX XXXXXXXX XXX XXXX XXXX XXX XXXX XXXXXX XX XXXXXXXXXX XX XXXX
XXXXXXXXXX XXXXXXXXXXXXXX XX XXXXXX XX X XXXXX XXXXXX XXXXXXXXX XXXXXX
XXXXXXXXXXX XX XXX XXXXX XXXXXX XX XXX X XXXXXXXXX XXXX XXX XXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX
XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XXXX XXXX XX XXX XXXXXXX X XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XXXX
XXXXXX XXXX XX XXXXXXXXX XXXX XXXXX XXXXXXXX XX XXXXX XXXXX XXX XXXXXXXX
XXXXXXX XXXXXXXX XXXXX XXXXXX XXX XXXXXXXX XXXX XXXX XXX XXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXX XX XXX XXX XXXXXX XXXX XXXXXXXXXX XXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXX XX
XXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XXXX XXXX XX XXX XXXXX XXXXXXX XXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXX XXXX XX
XXXXXX XXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXX XXXX XXXXX
XXXX XXX XXXXXXXX XXXXXXXXXXX XXX XXXXXXXX XXXXX XXX XXXXXXXXX XXX XXX
XXXXXXXXX XXXXX XX XXXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXX XXXXXX XXXXX XX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXX XXXXXXX XXXX XXXXXXXX
XXXXX XXX XXXXXXXXXXX XXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXXX XXX
XX XXXXXX XXXXX XXXX XXXXX XXXX XXXXXX XXXX XXXXXXX XXXXXXX XXXXXX XX
XXXXXX XXX XXXXXXXX
XX XXXXXX
XXX XXXXXXXXXXXXXXXXXX XXXXXXXXX XXXX XXX XXXXX XXX XXXXXXXXXXXXX XXXX XX X
XXXXX XXX XXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXX
XXXXXX
XX XXXXXXXXX
XX XXX XXX XXXXXXX XXXXXX XXXXX XXX XXXXXXXX XXXXX XXX XXXX XXX XXXX
XXXXXXXXXXXXX XXXXX XXXX XXX XXXXXXXX XXXXX XXXXX XXX XXX XXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX X XXXXXX XXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXX XXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX
XXXXXXX XXX XXXX XXXXXXXX XX XXXX XXXXXXXXXXXXXXXXXX XX XXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXX XXXXXX XX XX XXX XXXXXXXX XXXXXXXX XX XXX
XXXXXXXXXXX XXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXX
XX X XXXX XX XXXXX XXXX XXXXXX XXX XXXXXXXX XX XXXXXXXXXXXXXXXXX XXXXX XXXX
XX XXXXXXXXXX XX XXX XXXXX XXXX XX XXXXX XX XXXX XXX XXXXXXXXX XXXXXX
XXXXXXXXX XX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX
XXX XXX XXX XXX XX XXX XXXXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXX XXX XXXXXXXX
XX XXXXXXXXXXXX XXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXXXXXXXX X XXXXXXXXX
XXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXX
XX XXXXXX
XXXX XX XXX XXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXX XXXXX XXXX XXX XXXXX XXX
XXXXXXXXXXXXX XXXX XX X XXXXX XXX XXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXX XXXXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXXX XX XXXXXXXXX XXXXX XXXX XXXX X XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXX XXXXXX XXXXX XX XXXXXXX XXXXXXXXXXX XX XXXX XXXXX XXXXX XXXXX XX
XXXXXXXXXXX XXX XXXX XXXXX XX XXXXXXXXX XX XXX XXXXXXXX XXXXXXXX
XXX XXX XXX XXXX XXXX XX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXX XXXX XXXXXXXXX XXX XXXXXXXX XXXX XXXX XXXXXX XX XXXX XXXX XXX XXXX XXX XX
XXXXX XX XXX XXXXXXX XXXXXX XXX XX XXXX XXXXXXXXX XX XXX XXXXX XXXXXX
XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXX
X XXX
XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XX X XXXXXXXXX XXX XXX XXX XXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXX XXXXXXXX X XXXXXXXX XXXX XXX XXXXXXXX XXXXXXX XXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX X XXXXXXXX
XXXXXXXXX X XXXXXXXX XXXX XXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX XXXXXXX XXXXXXXX XX
XXX XXXX XX XXXXXXX XX XXXX XXX XXXXX XXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXX
XXXXXXXXXXXXX XXXXX XXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX
XXXX XXX XXXXXXX XXX XXX XXXX XXXXX XXX XXXXX XXXX XXX XXXX XXXX XX
XXXXXXXXXX XXX XX XXX XX X XXXXX XXXX XXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XX XXX XXXXX XXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXX
XXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XX XX XXXXXXXX XXXXXXX XX XXXX XXX XXXX XXXXX XXX XXX XXXX XX XX
XX XXX XXX XXXXXXXXXXX XXXXX XXXX XXXXX XXXX XXX XXXX XX X XXXXXXXXX
XXXX XXXXX XXXXXXX XX XXXXX XXXXXX
XXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXX XXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXX XXX XXX XXXXXXXXXXXXXXXXXXXXXXX XX XX XXXXX
XX XXXXXXXX XXXXXXXXXXX
XXX XXXX XX XXXXXXXX XXX XXXXXXXXXXXXXXX XXXXXX XX XXX XXXXX XX
XXXXXXX XXX XXXX XXXX XX XXXXXXXXXX XXXXXXXXXXXX XXX XXX XXX XXX XX XXX
XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXX XXX XXXXXXXX XX XXXXXXXXXXXX XXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXX
XXX XXX XXXX XXXXXXXX XXX XXXXXXXXXXXXXXXXXXX XXXXXX XX XXXX XXX XXXXX
XXX X XXXXXXXXXXX XXXXX XXXXXXXX XXX XXX XXXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XX XXX XXX XXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXX XXX XXXXXX XXXXX
XXXX XX XXXX XXXXXXXXXXX XXXXX XXX XXXXXXXXX XXXX XXX XXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXX
XXX
XX XXXXXXXXXXXXXX XXXXX XXXX XXXXXXXXXXX XXX XXXX XXXX XXXXXX XXXX
XXXXXXXX XXXXXXXXXXXXXX XXXXXXXX XXXX XXXXXXXXXX XXXXXXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXX X XXXXXXXXXX XXXXXX XXXX XX XXXXX XXXXXXX X XXXX XXX X XXXXXXXXXX
XXXXXXXXXXX XXX XXXX XXXXXXX XXXXXX XXXXXXXX X XXXXXXXX XXX XXXX XXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XXXX XXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXX XXXXX XXXX XXX XXXX XXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXX X XXXXXXXXXX XX X XXXXX XX XXX XXXXXXXXX
XXXXXXXXXXXXX
XXX XXXXXXXXX XXX XXXX XXXX XX XXXXXXXX XX XXXXXXXXXXXX XX XXXXX XXXX XXX
XXXX XXXX XXXX XXX XX XXX XXXXXXXXXXX XX XXXXX XX XXXXXX XXX XXXXX
XXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXX XX XXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXX XXXXXXXXX XX XXXXXX XXX XXXXXXXXX XXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXXX XXX
XXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XX XXXXXXXXXXX XX XXX
XXXXX XXXXX
XX XXX XXXX XX XXX XXXXXXXXXXXXXXXXXXX XXX XXXX XXXX XXXX XXXXX X XXXXXX XX
XXXXX XXXXXX XXX XXX XXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXX
XXXX XXXX XXXXXX X XXXXXXXX XXXX XXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX XXXXXXXXX XXXX XXXXXXX XXXX
XXX XX XXX XXXXXXXX XXXXXXXXXXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXX XXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXX XXXX XXXX XXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXX XXXXXX XXXXXXX XXX XXXX XXXXXXXXX X XXXX XXX XXX XXXXX
XXXXXXXXXXXX XXX XXXXXX XXXXXXX XXX XXXXXXXXXX XXX XX XXXXXXXX XX
XXXXXXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXXX
X XX XXXXXXXX XX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXX XXX XX XXX XXXXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXX XXX XXXXXXXX
XX XXXXXXXXXXXX XXXXXX
XXX XXX XXXX XXXXXXXX XXXXX XXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XX XXXXXXXX XX XXXXXXXXXX XXXXX XXXX XX XXX XXXXXX XXXXXXXX XX
XXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXX XX X XXXXX XX
XXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXX X XXXXXXX XXXXXXXX XXXXXXX XXX XXXXXXX XXXX XXX XXXXXXXXXX XX
XXXXXXX XXX XXXXXXXXX XXXXX XX XXXXXXXX XXXX XXXXXXX XXX XXXXXX XX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXX
XXXX XX XXXXXXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXX XXXXXXXX XX XXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXX XXX XXXXXXXX XX XXXXXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXX XXX XX XXXX XX XXXXXXXXX
XXX XXXXXXXX XX X XXXX XXXX XXXXXX XX XXXXXXX XXXXXXXXXXXXX XXXXX XXX XXXXXX
XXXXXX XXXX XX XXXX XXX XXXXXXXXX XXXXXXXXX XXXXXXXXX XXXXX XXX XXXXXXXXXX XX
XXX XXXXX XXXX XX XXXXX XX XXXX XXX XXXXXXXXX XXXXXXXXX XXXXXXXXX XX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX
XX XXXXXXX XXXXXXXXXXX
XX XXXXXXXXXXX XXXXXXXXX
XXXXXXX XXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XX XXXXXXXX
XX XXXXX XXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XX XX XXXXX XXXXXXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XX
XXXXXXXXXXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXX
XX XXXX XXXXXXXXX XX XXX XX XXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XX XXXXXX
XXXX XXX XXXXXXXXXX XXX XXX XXXX XXXX XXXXXXXXX XXXX XXXXXXXXX
XXXXXXXXX XXXXX XXX XXXXXXXXXX XX XXX XXXXX XXXXX
XX XXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXX XXX XXXX XXXXX XXX XXXXX XXXX XXX XXXX XXXX XX XXXXXXXXXX
XXX XXXXXXX XXXXXXXXXXXXXXXXX XX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXX XXXX XXXXXX XXX XX XXXX XX
XXXXXXX XXX XXXXX XXXXXXX XXXXXX XX XXX XXXXX XXXXXXX XXX XXXXXXX XX
XXX XXXXX XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XX
XXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXX XXXX XX XXX XXXXX XXXXXXXXX XXXX XXXX XXXXXXX XXX XXX XXX
XXXX XXXXXX XX XXXXXXXXXX XX XXXXX X XXXXXXXXXX XXXXXX XX XXX XXX XXXX
XX XXXXXXXXX X XXXXX XXXXXXXXX XXXXX XX XXXXXX XXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XX XXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XX XXX XXXXX XX XXXXXXXXXXXXXXXXXXXX XXX XXXXXX XXXXXX XXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XX
XXXXXXXXX XXX XXXX XX XXX XXXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXX XX XX XX XXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXX XX XXXXXXXX XXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXX XXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXX XXXX XXXXXXX XXX XXXX XXXXXXXX XX XXXX XXXXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXX XX XX
XXXX XX XXX XXXXXXXX XXXXXX XXXXXX XXXXXXXX XXXX XXX XXXX XX XXX XXXXXXX XXX
XXXX XXXXXXX XXXXXXX XXX XXX XXXXXX XXXXXXXX XXXXXX XXX XXXXXXXX XXXX XXXXXX X
XXXX XX XXX XXX XXX XX XXXXX XXXXXXXX XX XXXXXXXX XXXXX XXXXXXXXX
XXX XXXXXXX XXXXXXXX XXXXXX XXXXX XXXXXXXX XXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX
XXXXXXXXXXXXXXXXXXXXXXXX XXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX
XXX XXXXXXX XXXX XXX XXX XXXXXXXX XXXX XX XXXX X XXXX XXXXXXXX XXXXX XXX
XXXXXXXX XXXXX XXX XXXXXXXXXX XXXX XX XXX XXXX X XXXXXX XXXXXXXX XXXXXX XXXX
XXX XXXX XX XXXX XXXXXXX XXXXXXXXX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX
XXXX XXXXXXXX XXXXX XXX XXXXXXX XXXXXXX XXX XXX XXXXXXX XXXX XXXXXX XXXX
XXXXX XXX XXX XXXXXXX XXXX XXXX XX XXXXXXX XXX XXXXXXX XXX XXXXXXX XXXX
XXXXXXXXXXXXXX XX XXXX XXXXXXX XXX XXXXXXX XXX XX XXXX X XXXXXX XXXXXXX
XXXXXX XXXX XX XXXXXXXXXXXX
XXXXXXX XXXXXXX
XXXX XXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXX XX XXXXXXX
XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX
XXXXX
XXX
XX XXXXXX
XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXX XX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXX XXXX XX XXX X XXX XXXXXX
XXXX XXXXXXXXXX XXX XXXXXXXX XXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXX XXXXXXX XXXXX XXXX XXX XXX XXX XXX XXXXXXXX XXXXXX XXXXXXX XXX
XXXXXXXX XXXXXXXXXXX XXXXX XXXX XXX XX XXX XXXXXXXXXXX XXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXX XXX XXX XXX XXXX XX XXXX XXX XXXXX XX XXXXX
XXXXXX XXXXXXXX XX XXXXXXX XXXXXXXX XXX XXX XXXXXXXXXXXXXX XXXXXX XXX XXXXXX
XXXXXX XXXX XXX XXXXXXXXX XXX XXX XXXXX XXX XXXX XX XXXX XXX XXXXXXXX XXXXXXX
XX XXXXXXXXXX XX XXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXX
XXXXX XXX XXXXX
XXXXXXXXXXXXXXX
XXXXX XXX XXXXXXXXX XXXXXXX XX XXXXXXXXX XXXXX XXXXX XX XXXX XXXXXXXX XXX
XXXXXXX XXX XX XX XXXXXXX XXX XXXXXXXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXX XXX XXXXXXXX XXX XXXXXXXXX
XXXXXXXXXXX X X
XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X
XXXX XXXX XXXXXXX XXX XXXXXXXXX XXX XXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXX XXXXXXX X XXX XXXX XXX XXXXXX XXXXXXXXXX XXX XXXXXXXXX XXX
XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXX XXXXXXX XX XXXXX XXXXX XXX XXXXXXXXX
XX XXX XXXX XXXX XXXXXXX XXXX XXXX XXXXX XXX XXX XXXXXXXXX X XXXXXXXX XXXX XX
XXXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXX XXXXXX XXXXX XX XXXXXXXXXX
XXXXXXXXXXX X X
XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X
XXX XXXXX XXXX XXXXXXXX XXXXXXXXX XXX XXX XXX XX XXXXX XXX XXXXXXXX XX XXX
XXXXX XXX XXXXXXXX XX XXX XXXX XX XXXXXX XXX XXXXXXXX XXXX X XXXX XXXXX XXX XXX
XXXXXXX XXX XXXXXXXXXXXXXXXXX XXXXXXXXX X XXX XX XX XXXX XX XX XXXXXXX XXXXXXX
XXXXXXXXX XX XXX XXXXXXXX XXXXX XXXX XX XXXXXX XX XX XXX XXXXX XXX XXXXXXXXX
XXXXXXXXXXX X X
XXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX
X
XXX XXXXX XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXX
XXX XX XXXXXX XXXXXXXXX XXXX XX XXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXX
XXXX XX X XXXX XXXX XXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXX
XXXXXXXXXXXXXX XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXX
XXXXX XXXXXXX XXXXXXXXX
XXX XXXXXXXXX XXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXX XXXXXXX XX XXXXX
XXXXX XXX XXXXXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XX X XXXXXXXX XX XXXXXXX XXX XXX XXXX XXXX XX
XXX XXX XXXX XXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XX X XXXXXXX XXXXX XXXXXXXXXX XXX
XXX XX XXXXXXXX XX XXXXX XXXXXX XXXXXXXX XX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXX XXXXXXXXXX X XXXX XXXXXX XX XXX XXX
XXXXXXXXXXXXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXX XXXX XXXXXXXX XXXXXXX XX XXX
XXXXXXXXXXXXX XXXXX XXXXXXXXX XXX XXXXX XXXX XXXX XX XXXXXXXXXX XX XX
XXXX XXX XXXX XXXXXXXXXXXX XXXXXX XXX XXXXXXXX XX XXXXXXXXXX
XX XXXXXXXXX
XX XXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXX XXXX XX
XXXX XX XXXXXXXXX XX XXXXX XXXXXXXX XXX XXXXXXXXXXXXX XX XXXX XXXX XX
XXXXXXXXXX XXXXXXXX XXXX XX XXXXX XXXXX XX XXXX XXXXXXXX XX XXXXX
XXXX XXXXXXXX XXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXX XXXX XXX XXXXXX XXX XXXX XXXXXXX XX X XXXXXXXX XXXXXXX
XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XXXXXX XX X XXXXXXXX
XXXX XXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXXX XXX XXXXXXXXXXXXXXXXXXX XXXXXXXXX XX XXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXX XX XXXXXX XX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXX
XXXX XXX XXXXXXXXXXX XXXXX XXXXXX XXXXXXXX XX XX XXXXX XXXXXXXXXXXXX
XXXXXX XXXX XXXXXXXXXXXXX XXXXX
X XX XXXXXX XXX XXXXXXXX XX XXXXXXXX X XXXXX XXXX XXXX XXXXX XX XXX
XXXX XXXX XXXX XX XXXX XX X XXXX
X XX XXXXXX XXX XXXXXXXX XXXX XXXX XXXXXXXXX XXXXXXXXXXXX XX XXXXX XX XXX
XXX XXXX XXX XX XXXXX XX XXXXXXXXXXX XXX XXXX XXXXXXXXX XX XXX XXX
XXXXXXXXX XX XXXXXXXXX XX XXXXXXXX XXXXX XXXXXXXXX XX XXXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXX XX XXXXX XXXXX XXXXXXXXXXX XX
XXXXXXXXXX XXX XXXXX XXXXX
XXXX XXXX XXXXXXXXXXXXXX XX XXXXXXX XXX XXXX XXX XXX XXXXX XXXXXXXX
X XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXX XXXX XXXXXXXX XXXX XXXXXX
XXXX XXXXXXXX XXXXXXX XXXXXXXXXX
X XXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXX XXX XXX XX XXXXXXXX XX XXXXX XXXXXXXXXX XXXXXX XXXX XXX
XXXXXXX X XXXXX XXXXXXX XXXX
X XXXXXXXXX XXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XX XXX XXXXXXXXXXXXXXXXXX XXXXXXXX XX XXX XXXXX XXXX XXX
XXXX XXXXXXXXX XXXXXXXXXX XXXX XXXX XX XXX XX XX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXX XXX
XXXX XXXX XXX XXXXXX XXXX XXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXX XX XXXXX XXX XXXXXXXXXXXXXX XX XXX XXXXX XXXX XXX XXXX
XXXXXXXXX XXXXXXXXXX XXXX XXXX XX XXX XX XXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXX XX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXX XXXXXX XXX XX XXXX XXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXXX XXX XXXXXXXXXXXXXXXXX XXXXXXXXX XXX XXX XXXXX XXXXXXXXX XX
XXX XXXXXXXXXXX XXXXXX XX XXXX XXXXXXXX XXX XXXXXXXX XXXX XXXXXXX XXXX XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXXX XXXXXXX XXX XXXX XX XXX XXXXXXX XXXXX XXXXX XXXXXXXX XXX XXX
XX XXXXXXXX XX XXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXX XXX
XXXXX XX XXXXXX XXXXXXXXX
XXXXXX X XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXX XXX XX X
XXXXXXXX XXXXXX XX XXXXXXX XXX XXXX X XXXXXXXXXXXXXXXXX XXXXXXXX XXXX
XXXXXXX X XXXXXXXXXXX XXXXXX
XX XXXXXXXXXXXX XXXXXXXXXXX
BBBBBBB BBBBBBBBBBB
BBBBB BBBBBBB
BB BBBBBBBBBBB
XXXXXXX XXXXXXXX XXX XXXXXXXX XXXXXX XXXXXX XXXXXX XXX XXXXXXXXXX
BBBBB
BB BBBB
BB BBBBBBBBBBBBBBBBBBBBB
XXXXXXX XXXXXXX XXXXXXX XXXX XXXXXX XX XXXX XXXXX XX XXXXXXXX
XXXXXX XXXXX XXXX XX XXXXXXX XXXX XXX XXXXXXXXXXX
BBBB
XXXXXXXXX XXXXX XX XXX XXXX XXXXXXXXX
BBBBB
BBBBB
XXXXX XXXXXXXXXXXXX XXXXXXXXBBB BBBBBBBXX
BBBBBBBBBB
XXXXXXX
XXXX
XXXXXXXXX
XXXXXXXXX
XXXXX
XXXX
XXXXXXXXX
XXXXXXXXX
XXXXX
XXXXXXXX
XXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXX
XXXXXXX
XXXXX XXXXXXBBB BBBBBBBBBBBBBBBBXXXXXX XXXXXXXXXXXXXXXXX
BBBBBBBB
XX XXX XXXX XXXXXXXXXX XXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX XXX X XXXXXX XXXXXXXXXXXXXX XXXX XX
XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXX XXXX XXXX XXXXXX X
XXXXXXXXXXX XXXXXXX XXXXXXXX XX XXX XXXXXXXXXXXXXX XXXXXX XXX XXXXXXX X
XXXXXXXXXXXXXX XXXXXX XXXXX XXXXXXX XXX XXXXXXXXXXXXX XXXX XXXXXX XXXXX
XXXXXX XX XXXX XXXX XXXXXX XXXXX XXXXXXXXXX XXXX XXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXX
XXXX X XXXX XXXX
XXXXX XXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXX XXX XXX XX XXXXXXXX XX XXXXX XXXXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXXXXXX XXXXX
XXXXXXX XXX XXXX XXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XX X XXXXXXX XXXXX XXXXXXXXXX XXX
XXX XX XXXXXXXX XX XXXXX XXX XXXX XXXXXXXX XX XXXXXXXXX XXXXXXXXX XXX
XXXXXXXXXXXXX XXX XX XXX XXXXX XXXXXXX XXXXXXXXX XX XXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXX XX XXXXXX XX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXX
XXXX XXX XXXXXXXXXXX XXXXX XXXXXXX XXXXXXXX XX XX XXXXX XXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX
X XXXXXXXXXX XXX XXXXXX XXXXXXX XXXXX XXXXXXXXXX
X XXXXXXXXX XXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XX XXX XXXXXXXXXXXXXXXXXX XXXXXXXX XX XXX XXXXX XXXX XXX
XXXX XXXXXXXXX XXXXXXXXXX XXXX XXXX XX XXX XX XX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXX XXX
XXXX XXXX XXX XXXXXX XXXX XXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXX XX XXXXX XXX XXXXXXXXXXXXXX XX XXX XXXXX XXXX XXX XXXX
XXXXXXXXX XXXXXXXXXX XXXX XXXX XX XXX XX XXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXX XX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXX X XXXX XXXX XXXX XXXXXXXXX XX XXX XXXXX XXXXX
XXXXX XXXXXXX XX XXXXXXX XXX XXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
X XXXXXXXXXXXXXX XXX XXX XX XXX XXXXX XXXX XX XXXXXXXX XXX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XX XXX XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXX
XXXXX XXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXX X XXXX XX XXXXXX XXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXX XXX
XXXXXXXXXX XXX XXXXXXXX XXXXXX XXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXX XXX XXX XX XXXXXXXX XX XXXXX X XXXXXXXXXX XXXXXXXX
XXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXX X XXXXXX XXXXXXX XXXXXXXXX XXXX XXXXX XXXX XXXXXX X
XXXXXXXX XXXXXXX XXXXXXXXX XXX XXXX XX XXXXXXXXXXX XXX XXXXXXXX XXXXXXXX
XXX XXXXXX XXXXXXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
XXXXXXXXXX XXXXXXXXXX
X XXXXXXXXX XXX XXXXXXXX XXXXXX XXXX XXXX XXXXXXXXXXXXXX XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXX XXXXX XXXXX X XXXX XXX XXXXXXX XXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXXX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX
XXXXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX
XXXXX XXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXX X XXXX XX XXXXX XXXXX XXXXXXXX XX XXXXXXXXXX X XXXXXXXX XXX XXXX
XXXX XXX XX XXXX XX XXXXX XXX XXXXXXXXX XXX XXXXXXX XXXX XXXX XX XXX
XXXXXX XXXXXXXXXX XXXXX XXXXXXXX
XX XXX XXXXX XXXXXXX XXXXXXXX XXXX XXX XXXXX XX XXX XXXXXXX XXXX XXXX
XXXXX XXXX XX XXXXXX XXX XXX XXXX XXXXX XXXXXXX XXX XXXXX XXXXXXX XXXXXXX
XXXX XXXXXXXX XXXXXXXXXXX XXXXXXX XX XXXXXXXXX XXXXXXXXXX XX XXX XXXX XX
XXXXXXX XX XXXXX XXXXXXX XX XXXX XXXXX XXX XXX XXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX XXX
XXXXXXXXXXXXXX XXXXXXXXXX
XX XXXXXX
XX XXXXX XXXX XXXXXXX XX XXXXX XXXXX XXXXX XXXXX XXXXX XXX XXX XX
XXXXXXXXXX XX XX XXXXX XXXXXXX XXXXXXXXXXX XXXXXX XXXXXX XXX XX X
XXXXXXXXXX XXXXXXX XXX XXXXXXXX XX X XXXXX XXXXXXX XXX XX XXXXXXXX
XXXXX XXXXXXX XXX XXX XXXXXXXX XX X XXXXX XXXXXXX XXX X XXXXXXXXXXX
XXXXX XXXXXXXX XX XXXXXX XXX XXXXXXXXX XXX XXX XXX X XXX XXXXX XXXXXXX
XXXX XXXXXX XX XXXX XXXXXX XXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXX XXXX XX XXXXXXXX XXXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXXXXXX XX XXXXXXX X XXXXXXXX XXXXX XX XXXXXXX XXXXXX XXXX XXXXX XX
XXXXXXXX XXXXXXXXXXXXXX XXXXXX XXXX XXXXX XXXX XXXX XXXX XXXXX XXXXXXX XXX
XXXXX XXXXXXX XXXXX XXXX XXXXX XXXXXX XXXXX XXXXXXXXX XXXXXXXXX XXX XX
XXXX XXXX XX XXXX XXXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXX XXX
XXXXXXXXXX XXX XXXXXXXX XXXXX XXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXX XXXX XXXX XXXX XX XXXX XX XXX XXX XXXXX XX
XXX XXXX XX XXXXX XXX XXXXXXXX XXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXX XXX
XXXXXXXXXX XXX XXXXX XXXX XXX XXXXX XXXXXXXX XXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXX XXX
XXX XXXXXXX XX XXX XXXXX XXXX XXX XXXXX XXXXXXXX XXXXX XXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXX XXXXXXXX XX XXX XXXXX XX XXXXX XXX XXX XXXX XXXXX
XXXX XXXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXX XXX XXX XX XXXXXXXX XX XXXXX X XXXXXXXXXX XXXXXXXX XXXXX
XXXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXX X XXXXX XXXXX XXXXXXXX XX XXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXX
XXX XXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXX XXXX XXX XXXXXXXX
XXXXX XXXXX XX XXXXXXXX XXXX XXXXX XX XXX XXXXX
X XXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX
XXXXXXXXX XX XXX XXXXX XXXXXXXXX XX XXX XX XXXX XX XXXXXXXX XXXXXXX
XXXXXXXX XXXXXXX XXXXXX XXXXXX XXXXX XXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX
X XXXXXXXXX XXX XXXX XXXX XXXXXXXXXXXXXX XXXXXX XXX XXXXXXXXX XXX XXXXXX
XXXXXXXXX
XXXXXXX XXXXXXXX XXXXXXXXXX
X XXXXXXXXXX XX XXXXX XXX XXXXXXXXXXXXXX
X XXXXXXXXX XXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XX XXX XXXXXXXXX XXXX XXXXXX XXXX XXXXXX XXXXX XXX XXXX XX
XXXXX XXXXX XXXXXXXXX XXXXXXXXXXXXXXXXX XX XXXXXXXX
X XXXXXXXXXXXXXX XX XXXXX XXX XXXXXXXXXXXXXX XX XXX XXXXX XXXX XXX XXXX
XXXXXXXXX XXXXXXXXXX XXXX XXXX XX XXX XX XXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXX XX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXX XX XXXXX XXX XXXXXXXXXXXXXXXX XX XXX XXXXX XXXX XXX XXXX
XXXXXXXXX XXXXXXXXXX XXXX XXXX XX XXX XX XXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXX XXXX XX XXXXX
X XXXXXXXX XXX XXXXXX XXXXXXX XXX XXXXXXX XX XXXX XXX
X XXXXXXXXXX XXXXX XX XXXXX XXXX XXX XXXXX XXXX XX XXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX XXXXXXXXXX
XX XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXX XXX XXXXXXXX XXXXX XXX XXXXX X XXXXXX XXX XXXX XXXXXX
XXXBBB BBBBBBBBBBBBBBBBBBBBBBBB BBBBBBBBBB BBBBBBBBBBB
XXX XXXX XXXXXXXX XXXXXXX XX XXXX XXX XXXXXXX XXXXXXXXX XXXXXXX XXXX XX
XXXXXX XXXX XXXXX XXXX XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXX XXXXX XXXXX X XXXX XXX XXXX XXXXXXX X XXXX XX XXXXX XXXXX
XXXXXXXXX XXXX XXXX XX XXXXXX XX XXXXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXX XX XXXXXXXX XXXXXXXXXXXXXXX XXX XXXX
XX XXXXXX
XX XXX XXXXX XXXXXXX XXXXXXXX XXXX XXX XXXXX XX XXX XXXXXXX XXX XXXX XX
XXXXXXXXX XX XXX XX XXXXXXXX XXXXXXXXX XXX XXXX XXXX XXXXX XX
XXXXXXXXXX XX XXXX XXXX XXX XX XXXXX XXXX XX XXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXXX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX
XXXXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX X XXXX XXX XXXXXXXX X XXX XXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXX XXX XXXXXX
X XXXXXXXXXXX XXX XXXXXX XX XXXXXXX XX XXXX XXX
X XXXXXXXXXX XXXXX XX XXXXX XXXX XXX XXXXXXXX XX XXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXXXXXX XXX XXXXXXX
XXXXXXXX XXXXX XXXXXXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXX XXXXXXXX XX XXX XXXXX XX XXXXX XXX XXXXXXXXX XXXX
XXXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXX X XXXXXXX XXXXXXXXXX XX XXX XXXX XXXXXX XX
XXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX X XXXXXXXXXX XXXXXXXX XXXXXX XXXXXXXX
XX XXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXX XXXX XX XXX XXXXXXXXXXXXXX
XXXXXXX XX XXX XXXX XXXXXXXXXXXXXX X XXXX XX XXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXX XXXXXXXX XXXX XX XXX XXXX XXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXX XX XXXXXXXXX
X XXXXXXXXXXXXXXX XXXX XXXX XXXX XX XXXX XX XXX XXX XXXXXXXXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXX XXX XX XXXXXXXX XXXXX XXX XXXXXXXX XXXXX XXXXX XXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXX XXXXXXXXX XX X XXXXXXXXX XX XXXXXXXX
XXXXX XXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX
X XXXXXXXXX XXX XXXX XXXX XXXXXXXXXXXXXX XXXXXX XXX XXXXXXX XXX XXX XXXXXX
XXXXXXXXX
X XXXXXXXXXXXXXX XXXXXXXX XXXX XX XXX XXXX XXXXXXXXXXXX XX XXXXXXXXXX XXX
XXXXXXXXXX XX XXXXX XX XXXXXX XXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX X XXXX XXXXX XXXXXXX XXX XXXX XXXX XXX XXXXXXXX XXX XXXX
XXXXXXXXXXXX XXXXXXXX
XXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX XXX XXXX XXXX XX X XXXXXXXX XX XXXXXXX XXX XXXXX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXX X XXXXXXXXXX XX XXXXXXX XXXX XXXX XXXX XX XXXXX XX XXX
XXXXXXX XXXXXXX XXXX XXXXXX XX XXX XXXXXXXXX
XXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XX XXX XXXXX XXXXX XXX XXXX XXXX XX XXXXXXX XXX XXXXX X
XXXXXXXXXX XXXXXX
XXXXXXXXXX XXXXXXXXXXXX
X XXXXXXXXX XXX XXX XX XXXXXXXX XX XXXXX X XXXXXXXXXX XXXXXX
XXXXXXXXXX XXXXXXXXXXXX
X XXXXXXXXXXXXXX XXX XXX XX XXX XXXXX XXXX XX XXXXXXXX XXX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XX XXX XXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XX X XXXXXXX XXXXX XXXXXXXXXX XXX
XXX XX XXXXXXXX XX XXXXX XXX XXXX XXXXXXXXX XXXXXXXX XX XXX XXXXX
XXXXXXX XXXXXXXXX XX XXXXXXX
XX XXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXX
XXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XX XXX XXXXX XXXX XX XXX XXX XXXXXXXX XXXXXX XXX XXXX XXX XXXXXXXXXXX XX XXX
XXXXXX XX XXXXX XXXXX XXX XXXX XXXXXXXXXXXXXX XXX XXXXXXXXXXXXXX XXXXXX
XXXXXXXX XXXXXXX XXXXXXXX XXXXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXX
XXX XXXXXXXX XXXXXXXXXXXXXX XXXXX XXXX XXXXXXX XXXXXXXXXXX XXXXX XXX XXXX
XXXXX XXXX XXXX XXX XXXXXXX XXXXX XX XXXXXX XXXXX X XXXXXXXXXXXX XXXX XXXXX
XXXXXXXXXXXXXXXXXXXX XX XXX XX XXXXXXXXX XX XXXXXX XXXX XXX XXXXX XXX XXX
XXXXXXXXXXXXXX XXXXXXX XXX XXXX XXXXXXXXXXXX XXXXX XX XXX XXXXXXXXXXXXX
XXXXX XXXXXXXXXXX XXX XXXXXXXX XXXXXXXXXXXXXX XXXXX XXXX XXXXXX XXXX XXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
X XXXX XXXX XX XXX XXXXX XXXXXXXXX XX XXXXXX X XXXXXX XXXXXXXXX
XXXXX XXX XXXXXXXX XX XXX XXXXX XXXXXXXXXX XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXX
X XXXX XXX XXXXXXX X XXXX XXX
XXXXX XXXXXXXXXXX XX XXX XXXXX XXXXXXXXXX XXXXXXXXX XXXXX XX XXXXXX XX XXX
XXXX XXXXXXXX XXX XXX XX XXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXX XXXXX XXXXXXXXXXXXX
XXXX XX XXX XX XXXXXXXXXX XXX XXX XXXXXXXX XXXX XXXXXXXX XXXX X XXXXXX
XXXXXX XX XXXXXXXXX XXXXX XXXXX XXX XXX XXX XX XXXX XXXX X XXXXXX XXXX
XXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXX XXXXXX XXXXXX XXXXX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXX XXXXX XXXX XXX
XXX XXX XXX
XXX XXXXXXXX XX XXXXX XXX XXXXX XX XXX XX XXXXXXXXXX XX XXXXXXXX
XXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXXX
XXX XXXX XXXXX XXXXXX XXXX XXXX XX XXX XX XXXXXXXXXXXXXX XXXXXXX XXXX
XXXXXX XXXXXXXX XXXXXX XXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XX XXXXX XXXX XXXX XXXXXX XXXXX XX XXX XXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XX XXX XXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXXXX XXXXXXX XX XXXXXXXXXXXX
XXXXXXXXXXXXXXXX
X
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXX XXXXXXXX XXXX XXX XXXXXX XXXXXXX XXXXXXXX
XXXXXXXXXXXXXXXXXX
X
XX XXXXXXX XXXXXXXXXXXXXXXXXX
X XXXX XXX XXXXXXXX X XXXX XX XXXXXX XXXXX XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX
X XXXX XXX XXXXXXXXXX XXX XXXXXXXX X XXXXXXXX XXX XXXX XX XXXXX X
XXXXXX XXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXX XXXXXXXXX XX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XX XXXXXXXXXX XX XXXXXXXXX XXX XXX XXXXX XX XXXX XX XXX XXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX XXX XXX XXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX XXX XXX XXXXX XXXXX
XXXXXX XXXXXXXX XXXXXXX XXXXXX XX XXX XXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXX XXX XX XX XXX
XXXXXXXXXX
XXXXXX XXXXXXXXXXX XXX XXXXXXXX XXXXXX
XXXXXX XXXXXXXXX XXX XXXXX XX XXX XXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX XXX XXX XXXX XXXXX
XXXXXXXX XX XXXXXXXXX XX XXXXX XXXX X XXXXX XXXX XXXXX XX XXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXX XXX XXXXXXXXXXX XXXX XXX
XXXX XXXXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XXX
XXXXX XXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXX
X XXXX XXXX XXXX X XXXX XXXXXX XXXXX XXXXXXXX XXXXXXX XXXXXXXX XXX XXX
XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXX
X XXXX XXXX XX XXX XXXXX XXXXXXXXX XX XXXXXX X XXXXXX XXXXXXXXXXX XXX
XXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX X XXX XXXXX
XX XXX XXXXX XXXXXXX XXXXXXXXXXXX XXXXX XXX XXXX XXXXXXX XXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XX XXXXXXXX XXXX XXXXXXXXXXXXX XXX XXXXXXXXXXXXX XXXXXX
XXXXXXXXX XXX XXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXXX XXX XXXXXX XXXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXX XX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXX XXXXXXXXX XXXX XXX XXXXX XXXXXXXXXXX XXX XXXX XXXXXXXXX XX XXX
XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXX XXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXX XXXXX XXXXXXXXX XXX XXXX XXXX XXXXXXXXX XX XXX XXXXXXXX
XXXXXXX XX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXX XX
XXXXXXXX XX XX XX XXX XXXXXXX XXXXXXXXX XXXXXXXX XXXXX XXX XXXXX XXX XXX
XXXXXXXXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXX
XXXX XXXXXXXXX X XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX
XXXXXXXXX XXXXXXXXX XXXXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XX
XXXXXX XX XXX XXXXXXXX XXXXXXXX XXXXX
XX XXXXXXXXXXXX XXXXXXXXXXX
BB BBBBBBBBBBBBBBBBBBBBB
XXXXXXXXXXX X XXXXXX XXX XXXXXXX XXXXXXX
BBBB
XXXXXXXXXXX XXX XXXXX XXXXXX XXX XXXXXXX
BBBBB
XXXX XXXXXXXX XXXXXXX XXXXXXXX XX XXX XXXXXXXXX XX X XXXXXXXXXXXXXXXXXX XX XXX
XXXXX XXXXX
XXXXXXXXXXX
XXXXXXXXXXX
XXX XXXXXXXXX XXXXXXXXX XXXXXX XXXXXXXXXXX XXX XXXXXX XX XXX XXXXXXXX XXXXXXXX
XXXXX XXXX XX XX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX X
XXXXXXXXXXXXXXXXX XXXXX XX XXXXXXXXXXXX
XXXXXXXXXX X XXXXXXXXXXXXXXXX XXXXXX XX XXXX XX X XXXXXXX XX X XXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX XX XXXXX XX
XXX XXXXXXXXX XXXX XXX XXX XXXXXXXXXXX XX XXX XXXXXXX XXXXX
BB BBBBBBBBB
XXXXXXXXXX X XXXXXXXXXXXXXXXXXXX XXXXXX XX X XXXXXXX XX X XXXXX XX
XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXX
XX XXXXX XX XXX XXXXXXXXX XXXX XXX XXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX
BB BBBBBBBBBBBBBBBBBB
XXXXXX X XXXX XXXXXXXX XXXXXXX XX XXXXXXXX XXXXXXXXXXX XX X XXXXXXXXX
XX XXXXXXXXXXXX XXXXXXXXXXX
BB BBBBBBBBB
XXXXXX XXXX XXXXXXXXXX XX XX XXXXXXXXX XX XXX XXX XXXXXXXX
BB BBBBBBBBBBBBBBBBBB
XXXXXX XXX XXXXXXXXX
BBBBB
BB BBBBBBBBBBBBBBBBBBBBB
XXXXXX XXX XXXXXXXXXX
BBBBB
BBBB
XXXXXX XXXXX XXXX XXXXXXXXXX XX XX XXXXXXXX XX XXX XXX XXXXXXXX
BBBBB
XX XX XXXXXXXX XX XXXX XXXX XXXXXXXXXXX XX XX XXBB BBXX XXXXXXXXXXX
XXX XXXXXXXX
XX XXXXXXXXXXXX XXXXXXXXXXX
BB BBBBB BB BBBBB
BB BBBBBBBBBBBBBB BB BBBBB
XXXXX XXXXXX XXXXXX XXXXXXXX
BBBBB
BBBBB
XX XXXXXXXXXXXX
XXXXXXXX XXXXX XX XXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXX XXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXX XXX XXXXX XXXXXXXX X XXXXXXXXXX XXX XX XXXX XXX XXXXXX XXXXXX
XXXXXXX XXX XXXXXXXXXXXX XXXXX XXX XX XXXXXXX XXX XXXXXXX XXXX XXX XXXXXX
XXXXXX XXXXXX XXX XX XXXXXXXX XXX XXXXXXXXXXX XXX XX XXXXXXXX XX XXXXX XX
XXXXXXX X XXX XX XXXX XXXXX XX XXXXXX XXXX XXXXXX XXX XXXXX XX XXXX XXXXXX XXX
XXXXXXXXXX
XXXXXXXX XXXXX
XXXXXXXXXXXXXX
XXX XXXXXX XXX X XXXX XX XXXXXXX XX XXX XXXXXX
XXXXXXX XX XXX XXXX XXXXX XXXXX XXXXX XXX XXXX XXXXX XXXXX XXXX XX XXXXXXXXX
XXXX XXXXXXXX XXXXX XXXXX XX XXXX XX XXXXXXXX XXX XX XXXXXX X XXXXXXXX XXX
XXXXXXXX XXXXXX XXXXXXXX XXX XX XXXX XXX XXXX XX XXX XXXXXX XXXXXXX
XXXX XXXXX XX XXX XXXX X XXXX XXXXXXX XX XX XXXX XX XXXXXX XXXXX XXXXX XXX
XXXXXX XXXXX XXXXX XXXXXX XXXX XX XXXX XXXX XXXXXXXXXX XX XXX XXXXX XXXXX
XXXXXX XXXXX XXXXXX XXX XXXX XXXXX XXX XXXXXXX XXXXX XXXXXXXXXXXXX XX XX
XXXXXXX XXX XXXXXXXXXX XX XXX XXXXX XXX XXX XX XXXXXX XXXXX XXXX XXXXXXX XXXXX
XX XXXX XX XXX XXXXXX XXXX XXXXXXX XX XXX XXXX XXXXXXXXXX XX XXX XXXXXX XXX
XXXX XXX XXXXX XX XXXXXX XXXXXXXXXXX XXXXX XXX XXXXX XX XXXXX XXXXXX XXXXX
XXXXXX XX XXXXXX XXXXXXXX XXX XXXXX XXXXXX XXXXXXXXXXX XX X XXXXXX XXXXXXXX
XXXXXXXX
XX XXXXXXXXXX XXXXX XXX XXX XXXXX XXXXX XX XXXXXX XXXXXXXXXXXX XX XXX XXXX X
XXXXXXXXXXXXX XXX XXXXXXX XX XXXX XXXXXX XXXX XX XXXXXXXXXX XXX XXXX XX XXXXXX
XXXX XXXXXXXXX XXXXXX XXXXXXX XXXX XXXX XX XXXX XX XXXXXXX XXXXXXXXXXX XX
XXXXX XXXXXXXXX XXXXXXXXXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXX XXX XXX XXXXXXXXX XX XXX XXXXX XXXX XXXXXX XX XXX XXXXXXXXXX XXX
XXX XXXXXXXXXXXXXX XXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXX
XXXXXXXX XX XXX XXXXXXX XX XXXX XXXXXXXXXXX XX X XXXX XX
X XXXXXXXX XXXXXX XXXX XXXX XXXXXX XXXXXX XX XXXXXX XXXX XXXXXXXXXX
|
"""
This is a script to convert the predictions to regions
-------------------------------------
Author: Sushanth Kathirvelu
"""
import json
import matplotlib.pyplot as plt
from numpy import array, zeros
import numpy as np
from scipy.misc import imread, imsave
from PIL import Image
mask = Image.open('../../training_masks/neurofinder.00.00.png')
mask = array(mask)
coordinates = []
print(mask.shape)
print(np.unique(mask))
for x in range(mask.shape[0]):
for y in range(mask.shape[1]):
#print(mask[x][y])
if mask[x , y] != 0:
#print(mask[x, y])
coordinates.append([x, y])
imsave('../../predictions/neurofinder.00.00.png', coordinates)
|
# python decorator function
import functools
print '************** partial function Test Programs **************'
print int('11111111',base=2)
print int('11111111',base=8)
print int('11111111',base=16)
int2 = functools.partial(int, base=2)
print int2('11111111')
max2 = functools.partial(max,10)
print max2(2,3,4,5)
raw_input()
|
import ssl
import tarfile
import logging
import time
import os
import io
import tempfile
import http
import hashlib
from threading import Timer
from datetime import datetime, timedelta
from urllib import request
from pyVmomi import vim, vmodl
from subcontractor_plugins.common.files import file_reader, file_writer
"""
Initially derived from code from https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/deploy_ova.py and deploy_ovf.py
"""
PROGRESS_INTERVAL = 10 # in seconds
DOWNLOAD_FILE_TIMEOUT = 60 # in seconds
def _get_tarfile_size( tarfile ):
"""
Determine the size of a file inside the tarball.
If the object has a size attribute, use that. Otherwise seek to the end
and report that.
"""
try:
return tarfile.size
except AttributeError:
pass
size = tarfile.seek( 0, 2 )
tarfile.seek( 0, 0 )
return size
class Lease():
def __init__( self, nfc_lease ):
super().__init__()
self.lease = nfc_lease
self.cont = False
def start_wait( self ):
count = 0
while self.lease.state == vim.HttpNfcLease.State.initializing:
count += 1
if count > 60:
raise Exception( 'Timeout waiting for least to be ready' )
logging.info( 'Lease: Waiting for lease to be ready...' )
time.sleep( 4 )
if self.lease.state == vim.HttpNfcLease.State.error:
raise Exception( 'Lease error: "{0}"'.format( self.lease.error ) )
if self.lease.state == vim.HttpNfcLease.State.done:
raise Exception( 'Lease done before we start?' )
def complete( self ):
self.lease.Complete()
def abort( self, msg ):
self.lease.Abort( msg )
@property
def state( self ):
return self.lease.state
@property
def info( self ):
return self.lease.info
def start( self ):
self.cont = True
Timer( PROGRESS_INTERVAL, self._timer_cb ).start()
def stop( self ):
self.cont = False
class ImportLease( Lease ):
def __init__( self, nfc_lease, file_handle ):
super().__init__( nfc_lease )
self.file_handle = file_handle
self.file_size = os.stat( file_handle.name ).st_size
def get_device_url( self, fileItem ):
for device in self.lease.info.deviceUrl:
if device.importKey == fileItem.deviceId:
return device
raise Exception( 'Failed to find device.url for file {0}'.format( fileItem.path ) )
def _timer_cb( self ):
if not self.cont:
return
try:
cur_pos = self.file_handle.tell()
prog = cur_pos * 100 / self.file_size # interestingly the progres is the offset position in the file, not how much has been uploaded, so if the vmdks are uploaded out of order, the progress is going to jump arround
self.lease.Progress( int( prog ) )
logging.debug( 'Lease: import progress at {0}%'.format( prog ) )
if self.lease.state == vim.HttpNfcLease.State.ready:
Timer( PROGRESS_INTERVAL, self._timer_cb ).start()
else:
self.cont = False
except Exception as e: # don't renew the timer
logging.warning( 'ImportLease: Exception during _timer_cb: "{0}"'.format( e ) )
self.cont = False
class ExportLease( Lease ):
def __init__( self, nfc_lease ):
super().__init__( nfc_lease )
self.progress = 0
def _timer_cb( self ):
if not self.cont:
return
try:
self.lease.Progress( int( self.progress ) )
logging.debug( 'ExportLease: export progress at {0}%'.format( self.progress ) )
if self.lease.state == vim.HttpNfcLease.State.ready:
Timer( PROGRESS_INTERVAL, self._timer_cb ).start()
else:
self.cont = False
except Exception as e: # don't renew the timer
logging.warning( 'ExportLease: Exception during _timer_cb: "{0}"'.format( e ) )
self.cont = False
# TODO: validate the hashes against the .mf file, so far SHA256 and SHA1 hashes are used
class OVAImportHandler():
"""
OVAImportHandler handles most of the OVA operations.
It processes the tarfile, matches disk keys to files and
uploads the disks, while keeping the progress up to date for the lease.
"""
def __init__( self, ova_file, sslContext ):
"""
Performs necessary initialization, opening the OVA file,
processing the files and reading the embedded ovf file.
"""
self.handle = file_reader( ova_file, None, sslContext )
self.tarfile = tarfile.open( fileobj=self.handle, mode='r' )
ovf_filename = list( filter( lambda x: x.endswith( '.ovf' ), self.tarfile.getnames() ) )[0]
ovf_file = self.tarfile.extractfile( ovf_filename )
self.descriptor = ovf_file.read().decode()
def _get_disk( self, fileItem ):
"""
Does translation for disk key to file name, returning a file handle.
"""
ovf_filename = list( filter( lambda x: x == fileItem.path, self.tarfile.getnames() ) )[0]
return self.tarfile.extractfile( ovf_filename )
def _upload_disk( self, fileItem, lease, host ):
"""
Upload an individual disk. Passes the file handle of the
disk directly to the urlopen request.
"""
logging.info( 'OVAImportHandler: Uploading "{0}"...'.format( fileItem ) )
file = self._get_disk( fileItem )
if file is None:
return
device = lease.get_device_url( fileItem )
url = device.url.replace( '*', host )
headers = { 'Content-length': _get_tarfile_size( file ) }
if hasattr( ssl, '_create_unverified_context' ):
sslContext = ssl._create_unverified_context()
else:
sslContext = None
try:
req = request.Request( url, data=file, headers=headers, method='POST' )
request.urlopen( req, context=sslContext )
except Exception as e:
logging.error( 'OVAImportHandler: Exception Uploading "{0}", lease info: "{1}": "{2}"'.format( e, lease.info, fileItem ) )
raise e
def upload( self, host, resource_pool, import_spec_result, datacenter ):
"""
Uploads all the disks, with a progress keep-alive.
return uuid of vm
"""
lease = ImportLease( resource_pool.ImportVApp( spec=import_spec_result.importSpec, folder=datacenter.vmFolder ), self.handle )
lease.start_wait()
uuid = lease.info.entity.config.instanceUuid
try:
lease.start()
logging.debug( 'OVAImportHandler: Starting file upload(s)...' )
for fileItem in import_spec_result.fileItem:
self._upload_disk( fileItem, lease, host )
logging.debug( 'OVAImportHandler: File upload(s) complete' )
lease.complete()
except Exception as e:
logging.error( 'OVAImportHandler: Exception uploading files' )
lease.abort( vmodl.fault.SystemError( reason=str( e ) ) )
raise e
finally:
lease.stop()
return uuid
class OVAExportHandler():
def __init__( self, ovf_manager, url, sslContext ):
super().__init__()
self.ovf_manager = ovf_manager
self.url = url
self.sslContext = sslContext
def _downloadFiles( self, wrk_dir, lease, host, header_map, proxy ):
ovf_files = []
opener = request.OpenerDirector()
if proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars
opener.add_handler( request.ProxyHandler( { 'http': proxy, 'https': proxy } ) )
else:
opener.add_handler( request.ProxyHandler( {} ) )
opener.add_handler( request.HTTPHandler() )
if hasattr( http.client, 'HTTPSConnection' ):
opener.add_handler( request.HTTPSHandler() ) # context=self.sslContext
opener.add_handler( request.UnknownHandler() )
logging.debug( 'OVAExportHandler: Starting file downloads(s)...' )
for device in lease.info.deviceUrl:
url = device.url.replace( '*', host )
if not device.targetId:
logging.debug( 'ExportLease: No targetId for "{0}", skipping...'.format( url ) )
continue
logging.debug( 'OVAExportHandler: Downloading "{0}"...'.format( device.url ) )
req = request.Request( url, headers=header_map, method='GET' )
resp = opener.open( req, timeout=DOWNLOAD_FILE_TIMEOUT )
try:
content_length = int( resp.headers[ 'content-length' ] )
except TypeError: # ESX dosen't supply contect-length?
content_length = '<unknwon>'
file_hash = hashlib.sha256()
local_file = open( os.path.join( wrk_dir, device.targetId ), 'wb' )
buff = resp.read( 4096 * 1024 )
cp = datetime.utcnow()
while buff:
if datetime.utcnow() > cp:
cp = datetime.utcnow() + timedelta( seconds=PROGRESS_INTERVAL )
logging.debug( 'OVAExportHandler: download at {0} of {1}'.format( local_file.tell(), content_length ) )
local_file.write( buff )
file_hash.update( buff )
buff = resp.read( 4096 * 1024 )
ovf_file = vim.OvfManager.OvfFile()
ovf_file.deviceId = device.key
ovf_file.path = device.targetId
ovf_file.size = local_file.tell()
ovf_files.append( ( ovf_file, file_hash.hexdigest() ) )
local_file.close()
return ovf_files
def export( self, host, vm, vm_name ):
headers = {}
proxy = None
ova_file = tempfile.NamedTemporaryFile( mode='w+b', dir='/tmp', prefix='subcontractor_vcenter_' )
wrk_dir = tempfile.TemporaryDirectory( prefix='subcontractor_vcenter_', dir='/tmp' )
try:
nfc_lease = vm.ExportVm()
lease = ExportLease( nfc_lease )
lease.start_wait()
try:
lease.start()
ovf_file_list = self._downloadFiles( wrk_dir.name, lease, host, headers, proxy )
logging.debug( 'OVAExportHandler: File download(s) complete' )
lease.complete()
except Exception as e:
logging.error( 'OVAExportHandler: Exception downloading files' )
lease.abort( vmodl.fault.SystemError( reason=str( e ) ) )
raise e
finally:
lease.stop()
logging.debug( 'OVAExportHandler: Generating OVF...' )
ovf_parameters = vim.OvfManager.CreateDescriptorParams()
ovf_parameters.name = vm_name
ovf_parameters.ovfFiles = [ i[0] for i in ovf_file_list ]
ovf_descriptor = self.ovf_manager.CreateDescriptor( obj=vm, cdp=ovf_parameters )
if ovf_descriptor.error:
msg = '"{0}"'.format( '", "'.join( [ i.fault for i in ovf_descriptor.error ] ) )
logging.error( 'vcenter: error creating ovf descriptor ' + msg )
raise Exception( 'Error createing ovf descriptor: ' + msg )
if ovf_descriptor.warning:
msg = '"{0}"'.format( '", "'.join( [ i.fault for i in ovf_descriptor.warning ] ) )
logging.warning( 'vcenter: warning creating ovf descriptor ' + msg )
ova_tarfile = tarfile.open( fileobj=ova_file, mode='w' )
ovf_file = tarfile.TarInfo( name='{0}.ovf'.format( vm_name ) )
bytes = ovf_descriptor.ovfDescriptor.encode( 'utf-8' )
ovf_file.size = len( bytes )
ova_tarfile.addfile( ovf_file, fileobj=io.BytesIO( bytes ) )
ovf_hash = hashlib.sha256( bytes ).hexdigest()
logging.debug( 'OVAExportHandler: Generating mf...' )
mf = 'SHA256({0}.ovf)={1}\n'.format( vm_name, ovf_hash )
for item, hash in ovf_file_list:
mf += 'SHA256({0})={1}\n'.format( item.path, hash )
mf_file = tarfile.TarInfo( name='{0}.mf'.format( vm_name ) )
bytes = mf.encode( 'utf-8' )
mf_file.size = len( bytes )
ova_tarfile.addfile( mf_file, fileobj=io.BytesIO( bytes ) )
for item, _ in ovf_file_list:
logging.debug( 'OVAExportHandler: adding "{0}"...'.format( item.path ) )
item_file = tarfile.TarInfo( name=item.path )
item_file.size = item.size
ova_tarfile.addfile( item_file, fileobj=open( os.path.join( wrk_dir.name, item.path ), 'rb' ) )
ova_tarfile.close()
finally:
wrk_dir.cleanup()
ova_file.flush()
ova_file.seek( 0 )
file_writer( self.url, ova_file, '{0}.ova'.format( vm_name ), None, self.sslContext )
ova_file.close()
return 'http://somplace/somepath/{0}.ova'.format( vm_name )
class VMDKHandler():
def __init__( self, vmdk_file, sslContext ):
super().__init__()
self.handle = file_reader( vmdk_file, None, sslContext )
def upload( self, host, resource_pool, datacenter ):
raise Exception( 'Not implemented' )
|
"""Client to connect to a database server."""
import hmac
import socket
from contextlib import contextmanager
from typing import Dict, List
from luxdb.commands import (AddItemsCommand, ConnectCommand, CountCommand, CreateIndexCommand, DeleteIndexCommand,
DeleteItemCommand, GetEFCommand, GetEFConstructionCommand, GetIdsCommand, GetIndexesCommand,
GetItemsCommand, IndexExistsCommand, InfoCommand, InitIndexCommand, MaxElementsCommand,
QueryIndexCommand, ResizeIndexCommand, Result, SetEFCommand)
from luxdb.connection import (gen_key, receive_result_sync, send_close_sync, send_command_sync)
class SyncClient:
"""Client to connect to a database. This is the synchronous version of the client."""
def __init__(self, host, port, secret):
self.host = host
self.port = port
self.socket = None
self.secret = gen_key(secret)
def connect(self):
"""Connect to the server"""
self.socket = socket.create_connection((self.host, self.port))
connect_command = ConnectCommand()
try:
result = self.send_command(connect_command)
except TypeError:
result = b''
if not hmac.compare_digest(connect_command.payload, result):
self.socket.close()
self.socket = None
raise RuntimeError('Connect failed, make sure your secret is correct')
def send_command(self, command) -> Result:
"""Send the command to the server and return the result the server sends back."""
send_command_sync(self.socket, command, self.secret)
result = receive_result_sync(self.socket, self.secret)
return result.get_value()
def index_exists(self, name: str) -> bool:
"""Check if the index already exists."""
command = IndexExistsCommand(name=name)
return self.send_command(command)
def create_index(self, name: str, space: str, dim: int) -> bool:
"""Create a new index with the given space (l2, ip, cosine) and dimension.
More information about the parameters is available here:
https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
"""
command = CreateIndexCommand(name=name, space=space, dim=dim)
return self.send_command(command)
def init_index(self, name: str, max_elements: int, ef_construction: int = 200, M: int = 16) -> None:
"""Initialize the index with the max_elements, ef_construction and M.
More information about the parameters is available here:
https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md
"""
command = InitIndexCommand(name=name, max_elements=max_elements, ef_construction=ef_construction, M=M)
return self.send_command(command)
def delete_index(self, name: str) -> None:
"""Delete the index with the given name."""
command = DeleteIndexCommand(name=name)
return self.send_command(command)
def add_items(self, name: str, data, ids) -> None:
"""Add the given items to the index."""
command = AddItemsCommand(name=name, data=data, ids=ids)
return self.send_command(command)
def set_ef(self, name: str, new_ef: int):
"""Set the ef to a new value."""
command = SetEFCommand(name=name, new_ef=new_ef)
return self.send_command(command)
def query_index(self, name: str, vector, k: int):
"""Find the k nearest neighbors of every element in vector.
Returns a tuple with the labels of the found neighbors and the distances.
"""
command = QueryIndexCommand(name=name, vector=vector, k=k)
return self.send_command(command)
def get_ef(self, name: str) -> int:
"""Get the ef value."""
command = GetEFCommand(name=name)
return self.send_command(command)
def get_ef_construction(self, name: str) -> int:
"""Get the ef construction value"""
command = GetEFConstructionCommand(name=name)
return self.send_command(command)
def delete_item(self, name: str, label: int) -> None:
"""Mark an item as deleted, this will exclude it from search results."""
command = DeleteItemCommand(name=name, label=label)
return self.send_command(command)
def resize_index(self, name: str, new_size: int) -> None:
"""Resize the index to fit more ore less items"""
command = ResizeIndexCommand(name=name, new_size=new_size)
return self.send_command(command)
def count(self, name: str) -> int:
"""Return the current amount of items in the index."""
command = CountCommand(name=name)
return self.send_command(command)
def max_elements(self, name: str) -> int:
"""Return the maximal amount of items"""
command = MaxElementsCommand(name=name)
return self.send_command(command)
def info(self, name: str) -> Dict:
"""Get collection of information about the index.
Returns a dict with space, dim, M, ef_construction, ef, max_elements, element_count
"""
command = InfoCommand(name=name)
return self.send_command(command)
def get_indexes(self) -> None:
"""Return a list of all indexes in the db"""
command = GetIndexesCommand()
return self.send_command(command)
def quit(self) -> None:
"""Quit the connection and inform the server about it."""
send_close_sync(self.socket)
def get_items(self, name: str, ids: List[int]):
"""Return array with the items with the id"""
command = GetItemsCommand(name=name, ids=ids)
return self.send_command(command)
def get_ids(self, name: str):
"""Return all ids in the index."""
command = GetIdsCommand(name=name)
return self.send_command(command)
@contextmanager
def connect(host, port, secret) -> SyncClient:
"""Provides a context manager for a client connection to host and port"""
client = SyncClient(host, port, secret)
client.connect()
try:
yield client
finally:
client.quit()
|
# Generated by Django 2.2.5 on 2020-07-24 10:23
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Workstation',
fields=[
('ws_id', models.IntegerField(primary_key=True, serialize=False)),
],
),
migrations.CreateModel(
name='WorkstationPreferences',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('window', models.BooleanField(default=False, verbose_name='window')),
('noise', models.BooleanField(default=False, verbose_name='noise')),
('large_screen', models.BooleanField(default=False, verbose_name='large_screen')),
('is_mac', models.BooleanField(default=False, verbose_name='is_mac')),
('workstation', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='spaceplanner.Workstation')),
],
),
migrations.CreateModel(
name='EmployeePreferences',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('window', models.BooleanField(default=False, verbose_name='window')),
('window_preference', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='window_preference')),
('noise', models.BooleanField(default=False, verbose_name='noise')),
('noise_preference', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='noise_preference')),
('large_screen', models.BooleanField(default=False, verbose_name='large_screen')),
('large_screen_preference', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='large_screen_preference')),
('is_mac', models.BooleanField(default=False, verbose_name='is_mac')),
('is_mac_preference', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='is_mac_preference')),
('employee', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('favourite_workspace', models.ManyToManyField(default=None, to='spaceplanner.Workstation')),
],
),
migrations.CreateModel(
name='Workweek',
fields=[
('week_id', models.AutoField(primary_key=True, serialize=False)),
('year', models.IntegerField(verbose_name='year')),
('week', models.IntegerField(verbose_name='week')),
('Friday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Friday', to=settings.AUTH_USER_MODEL)),
('Monday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Monday', to=settings.AUTH_USER_MODEL)),
('Saturday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Saturday', to=settings.AUTH_USER_MODEL)),
('Sunday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Sunday', to=settings.AUTH_USER_MODEL)),
('Thursday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Thursday', to=settings.AUTH_USER_MODEL)),
('Tuesday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Tuesday', to=settings.AUTH_USER_MODEL)),
('Wednesday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Wednesday', to=settings.AUTH_USER_MODEL)),
('workstation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='spaceplanner.Workstation', verbose_name='workstation')),
],
options={
'unique_together': {('workstation', 'year', 'week')},
},
),
migrations.CreateModel(
name='Userweek',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.IntegerField(verbose_name='year')),
('week', models.IntegerField(verbose_name='week')),
('monday_date', models.DateField(verbose_name='monday_date')),
('Friday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Friday', to='spaceplanner.Workstation')),
('Monday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Monday', to='spaceplanner.Workstation')),
('Saturday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Saturday', to='spaceplanner.Workstation')),
('Sunday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Sunday', to='spaceplanner.Workstation')),
('Thursday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Thursday', to='spaceplanner.Workstation')),
('Tuesday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Tuesday', to='spaceplanner.Workstation')),
('Wednesday', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Wednesday', to='spaceplanner.Workstation')),
('employee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='employee')),
],
options={
'unique_together': {('employee', 'year', 'week')},
},
),
]
|
# -*- coding:utf-8 -*-
#########################################################
# Rutap Bot 2019 Timeform Module (By. Preta) #
# 모든 저작권은 Preta가 소유합니다. 모든 권리를 보유합니다. #
#########################################################
import time, datetime
def timeform(dt1):
now = datetime.datetime.now()
msgtime = str(dt1)
mili = str(msgtime)[-6:]
msgtime = str(msgtime)[:-7]
msgtime = time.strptime(msgtime,'%Y-%m-%d %H:%M:%S')
msgtime = time.mktime(msgtime)
msgtime = float(str(msgtime)[:-1] + mili)
return msgtime
|
_, k = list(map(int, input().strip().split(' ')))
a = list(input().strip().split(' '))
result = a[k:] + a[:k]
print(' '.join(result))
|
from __future__ import annotations
import datetime
from typing import Iterable, Optional
from django.db.models import QuerySet
from django.template import Library
from django.utils import timezone
from django.utils.safestring import SafeText, mark_safe
from markdown import markdown as md
from ..models import Show, Track
from ..utils import length_str
register = Library()
@register.filter
def votes_for(track, show: Show) -> QuerySet[Track]:
"""
Return all votes applicable to to `track` for `show`.
"""
return track.votes_for(show)
@register.filter
def when(date: datetime.datetime) -> str:
"""
Convert a date into an appropriately relative human-readable date.
"""
our_day = date.date()
today = timezone.now().date()
show_locale = timezone.get_current_timezone()
date = date.astimezone(show_locale)
if our_day == today:
return date.strftime('%I:%M %p').lower()
elif today - our_day <= datetime.timedelta(days=6):
return date.strftime('%A at %I:%M %p').lower()
elif today - our_day <= datetime.timedelta(days=364):
return date.strftime('%a %b %d at %I:%M %p').lower()
else:
return date.strftime('%a %b %d %Y at %I:%M %p').lower()
@register.filter
def percent(flt: Optional[float]) -> str:
"""
Convert a float into a percentage string.
"""
if flt is None:
return '[in flux]'
return '{0:.0f}%'.format(flt * 100)
@register.filter
def total_length(tracks: Iterable[Track]):
return length_str(sum([t.msec for t in tracks if t.msec is not None]))
@register.filter
def markdown(text: str) -> SafeText:
return mark_safe(md(text))
|
from selenium import webdriver
from NewItems.pages.main_page import MainPage
from NewItems.pages.item_page import ItemPage
from NewItems.pages.trash_page import TrashPage
class Application:
def __init__(self):
self.driver = webdriver.Chrome()
self.main_page = MainPage(self.driver)
self.item_page = ItemPage(self.driver)
self.trash_page = TrashPage(self.driver)
def quit(self):
self.driver.quit()
def add_items(self):
self.main_page.open_page()
self.main_page.item_click()
count = int(self.item_page.get_items_counter_in_trash())
self.item_page.select_option()
self.item_page.add_item(count)
def get_trash_count(self):
self.trash_page.open_page()
count = len(self.trash_page.get_items_in_trash())
return count
def delete_items(self,count):
list = self.trash_page.get_items_in_trash()
el = list[count-1]
self.trash_page.select_item()
self.trash_page.delete_item(el)
|
import numpy as np
import copy
from unsafe_runaway import *
import time
from nose.tools import assert_raises
class mockup:
pass
def makeLaserData():
laser_data = mockup()
laser_data.range_max = 5.6
laser_data.angle_min = -2.1
laser_data.angle_increment = 0.06136
laser_data.ranges = list(1+np.random.rand(69)*5)
return laser_data
class Test_simpleTracker:
def setUp(self):
self.tracker = simpleTracker()
def test_ignor_first_scan(self):
laser_data = makeLaserData()
tracker = simpleTracker()
assert_raises(UserWarning, self.tracker.registerScan,laser_data)
def test_unmuted(self):
laser_data = makeLaserData()
backup = copy.copy(laser_data)
try:
angle, distance = self.tracker.registerScan(laser_data)
except:
pass
assert backup.ranges == laser_data.ranges
#print(laser_data.ranges)
#print('angle: {}, dist: {}'.format(angle, distance))
def test_nan(self):
laser_data = makeLaserData()
assert_raises(UserWarning, self.tracker.registerScan,laser_data)
laser_data.ranges[12] = float('nan')
angle, dist=self.tracker.registerScan(laser_data)
#print('angle: {}, dist: {}'.format(angle, dist))
def test_only_nan(self):
laser_data = makeLaserData()
laser_data.ranges = [float('nan') for _ in laser_data.ranges]
assert_raises(UserWarning, self.tracker.registerScan,laser_data)
assert_raises(UserWarning, self.tracker.registerScan,laser_data)
def test_real_real_min(self):
laser_data = makeLaserData()
laser_data.ranges[-1]=0.5 #real min
assert_raises(UserWarning, self.tracker.registerScan,laser_data)
laser_data.ranges[-1]=0.6
laser_data.ranges[42]=0.1 #fake min
ang, dist = self.tracker.registerScan(laser_data)
assert dist == 0.6
#print('ang: {}, target: {}'.format(ang, (laser_data.angle_min+ 23*laser_data.angle_increment)))
assert ang == laser_data.angle_min+ 68*laser_data.angle_increment
class Test_PID:
def setUp(self):
pass
def test_convergence(self):
self.pid = simplePID([0,30], 0.8, 0.001, 0.0001)
x =np.array([23, 12])
for i in range(20):
update= self.pid.update(x)
print('added {} to current x {}'.format(update, x))
x = x+update
time.sleep(0.1)
assert np.all(abs(x-[0,30])<=0.01)
def test_convergence_differentParamShape(self):
self.pid = simplePID([0,30],0.8, 0.001, 0.0001)
x =np.array([23, 12])
for i in range(20):
update= self.pid.update(x)
print('added {} to current x {}'.format(update, x))
x = x+update
time.sleep(0.1)
assert np.all(abs(x-[0,30])<=0.01)
def test_raises_unequal_param_shape_at_creation(self):
assert_raises(TypeError, simplePID, [0,30],[0.8, 0.7, 0.1], 0.001, 0.0001)
assert_raises(TypeError, simplePID, [0,30],[0.8, 0.7], 0.001, 0.0001)
assert_raises(TypeError, simplePID, 0,[0.8, 0.7], 0.001, 0.0001)
assert_raises(TypeError, simplePID, 0, [0.8, 0.7], [0.001, 0.001], [0.0001, 0,0001])
_ = simplePID([0,30],[0.8, 0.7], [0.001, 0.001], [0.0001, 0.0001])
_ = simplePID([0,30],0.8, 0.001, 0.0001)
_ = simplePID(0,0.8, 0.001, 0.0001)
def test_raise_incompatable_input(self):
self.pid = simplePID([0,30], 0.8, 0.001, 0.0001)
_ = assert_raises(TypeError, self.pid.update, 3)
x =np.array([23, 12])
for i in range(50):
update= self.pid.update(x)
print('added {} to current x {}'.format(update, x))
x = x+update
time.sleep(0.1)
assert np.all(abs(x-[0,30])<=0.001)
|
"""
Author: Jacob Dachenhaus, jdachenh@purdue.edu
Assignment: 05.4 - Hello Turtle
Date: 10/11/2021
Description:
Program that draws text with a limited selection of characters.
Currently hard-coded to draw "hello turtle"
Contributors:
None
My contributor(s) helped me:
[x] understand the assignment expectations without
telling me how they will approach it.
[x] understand different ways to think about a solution
without helping me plan my solution.
[x] think through the meaning of a specific error or
bug present in my code without looking at my code.
Note that if you helped somebody else with their code, you
have to list that person as a contributor.
Academic Integrity Statement:
I have not used source code obtained from any unauthorized
source, either modified or unmodified; nor have I provided
another student access to my code. The project I am
submitting is my own original work.
"""
from turtle import *
def start():
"""This function initializes the window and the turtle.
Do not modify this function.
"""
setup(600, 400)
width(9)
CAP_SIZE = 60
NUM_SEGMENTS = 16
THETA = 360.0 / NUM_SEGMENTS
SEGMENT_LENGTH = (3.14 * CAP_SIZE) / NUM_SEGMENTS
def drawCircle(numSegments, h=90, dir=1):
seth(h)
pd()
for _ in range(numSegments):
rt(dir * THETA)
forward(SEGMENT_LENGTH)
def draw_e():
[x, y] = pos()
pu()
setpos(x, y + (CAP_SIZE / 2) + (SEGMENT_LENGTH / 2))
pd()
seth(0)
forward(CAP_SIZE)
drawCircle(NUM_SEGMENTS - 2, dir=-1)
# Reset pos
pu()
setpos(x + CAP_SIZE, y)
def draw_h():
[x, y] = pos()
pu()
setpos(x, y + (CAP_SIZE * 2))
pd()
setpos(x, y)
pu()
setpos(x, y + (CAP_SIZE / 2) + (SEGMENT_LENGTH / 2))
pd()
drawCircle(int(NUM_SEGMENTS / 2))
setpos(x + CAP_SIZE, y)
def draw_l():
[x, y] = pos()
pu()
setpos(x, y + (CAP_SIZE * 2))
pd()
setpos(x, y)
def draw_o():
[x, y] = pos()
pu()
setpos(x, y + (CAP_SIZE / 2) + (SEGMENT_LENGTH / 2))
pd()
drawCircle(NUM_SEGMENTS)
pu()
setpos(x + CAP_SIZE, y)
def draw_r():
[x, y] = pos()
pu()
setpos(x, y + CAP_SIZE)
pd()
setpos(x, y)
pu()
setpos(x, y + (CAP_SIZE / 2) + (SEGMENT_LENGTH / 2))
pd()
drawCircle(int(NUM_SEGMENTS / 4))
pu()
setpos(x + (CAP_SIZE / 2), y)
def draw_t():
[x, y] = pos()
pu()
setpos(x + (CAP_SIZE / 2), y + (CAP_SIZE * 2))
pd()
setpos(x + (CAP_SIZE / 2), y)
pu()
setpos(x, y + (CAP_SIZE * 1.5))
pd()
setpos(x + CAP_SIZE, y + (CAP_SIZE * 1.5))
pu()
setpos(x + CAP_SIZE, y)
def draw_u():
[x, y] = pos()
pu()
setpos(x, y + CAP_SIZE)
pd()
setpos(x, y + (CAP_SIZE / 2) - (SEGMENT_LENGTH / 2))
drawCircle(int(NUM_SEGMENTS / 2), h=270, dir=-1)
pu()
setpos(x + CAP_SIZE, y + CAP_SIZE)
pd()
setpos(x + CAP_SIZE, y)
def drawText(text, kern=(CAP_SIZE * 0.35), lh=(CAP_SIZE * 2.3)):
# Keep track of y level
[startX, startY] = pos()
dy = 0
for c in text:
# Draw letters
if c == "e":
draw_e()
elif c == "h":
draw_h()
elif c == "l":
draw_l()
elif c == "o":
draw_o()
elif c == "r":
draw_r()
elif c == "t":
draw_t()
elif c == "u":
draw_u()
# Draw newlines
elif c == "\n":
dy += lh
pu()
setpos(startX, startY - dy)
continue
# Draw spaces (or invalid characters)
else:
[lineX, lineY] = pos()
pu()
setpos(lineX + CAP_SIZE, lineY)
continue
# Add kerning after each letter
[lineX, lineY] = pos()
pu()
setpos(lineX + kern, lineY)
def main():
pu()
setpos(-3.35 * CAP_SIZE, CAP_SIZE)
drawText(" hello\nturtle")
# Do not change anything after this line.
if __name__ == '__main__':
start()
main()
done()
|
class Solution:
"""
@param str: The string before proofreading.
@return: Return the string after proofreading.
有一些单词中有拼写错误,请编写一个程序,能够自动校对单词的拼写,并且返回正确值。
校对的规则如下:
如果有三个相同的字符连在一起,一定是拼写错误,去掉其中一个,如:ooops -> oopsooops−>oops。
如果有两对一样的字符(AABB的形式)连在一起,,一定是拼写错误,去掉第二对中的一个字符,如:helloo -> hellohelloo−>hello。
以上两条规则要优先从左往右处理,如:aabbccaabbcc 中 aabbaabb 和 bbccbbcc 都是拼写错误,应该优先考虑修复aabbaabb,结果是aabccaabcc。
Example
样例输入 1:
str = "helloo"
样例输出 1:
"hello"
"lloo" 拼写错误,去掉一个'o'。
样例输入 2:
str = "woooow"
样例输出 2:
"woow"
"oooo"拼写错误,先删去一个'o',"ooo"还是拼写错误,再删去一个'o'。
Notice
输入字符串的长度为 nn,1 <= n <= 10^51<=n<=10
5
。
字符串均由小写字符组成。
https://www.jiuzhang.com/solution/automatic-proofreading-program/
"""
# todo stack
def automaticProofreadingProgram(self, str):
# Write your code here.
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2007 Søren Roug, European Environment Agency
#
# This is free software. You may redistribute it under the terms
# of the Apache license and the GNU General Public License Version
# 2 or at your option any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
import unittest, os
import io
import zipfile
from odf import teletype
from odf.opendocument import OpenDocumentText, load
from odf import style, text
from odf.text import P
class TestWhite(unittest.TestCase):
def test_create(self):
""" Test that tabs and newlines are converted to elements """
para = P()
teletype.addTextToElement(para,
u"The boy stood on the burning deck,\n" +
u"\tHis feet\twere\t\tfull of blisters.\n" +
u"The captain stood in\tthe public house\n" +
u" With beer running down his whiskers. " );
outfp = io.StringIO()
para.toXml(1,outfp)
self.assertEqual(u'''<text:p>The boy stood <text:s text:c="2"/>on the burning deck,<text:line-break/>''' +
u'''<text:tab/>His feet<text:tab/>were<text:tab/><text:tab/>full of blisters.<text:line-break/>''' +
u'''The captain <text:s text:c="1"/>stood in<text:tab/>the public house<text:line-break/>''' +
u''' <text:s text:c="8"/>With beer running down his whiskers. <text:s text:c="2"/></text:p>''', outfp.getvalue())
def test_extract(self):
""" Convert a paragraph to plain text """
poem_odt = os.path.join(
os.path.dirname(__file__), u"examples", u"serious_poem.odt")
d = load(poem_odt)
allparas = d.getElementsByType(P)
content = u"""<text:p text:style-name="Standard">The boy stood <text:s text:c="3"/>on the burning deck,<text:line-break/><text:tab/>Whence all<text:tab/>but<text:tab/><text:tab/>him had fled.<text:line-break/>The flames <text:s text:c="2"/>that lit<text:tab/>the battle's<text:tab/>wreck,<text:line-break/> <text:s text:c="11"/>Shone o'er him, round the dead. <text:s text:c="2"/></text:p>"""
self.assertEqual(u"The boy stood on the burning deck,\n\tWhence all\tbut\t\thim had fled.\nThe flames that lit\tthe battle's\twreck,\n Shone o'er him, round the dead. ", teletype.extractText(allparas[0]))
def test_extract_with_span(self):
""" Extract a text with a bold/italic span """
poem_odt = os.path.join(
os.path.dirname(__file__), u"examples", u"simplestyles.odt")
d = load(poem_odt)
teletype.extractText(d.body)
self.assertEqual(u'Plain textBoldItalicBold italicUnderlineUnderline italicUnderline bold italicKm2 - superscriptH2O - subscript', teletype.extractText(d.body))
if __name__ == '__main__':
unittest.main()
|
# Copyright 2020 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__all__ = ['Deny', 'Identifier', 'ScopedIdentifier', 'Style', 'Use']
from dataclasses import dataclass, field
from typing import List, Callable, IO, Optional, Tuple
from functools import reduce
@dataclass
class Deny:
styles: List[str] = field(default_factory=list)
uses: List[str] = field(default_factory=list)
bindings: List[str] = field(default_factory=list)
def matches(self, style: 'Style', use: 'Use') -> bool:
return not (
self.styles and style.name not in self.styles or
self.uses and use.name not in self.uses)
@dataclass
class Identifier:
name: str
tag: int
deny: List[Deny] = field(default_factory=list)
@property
def parts(self) -> List[str]:
return self.name.split('_')
def scoped(self, style: 'Style', use: 'Use') -> 'ScopedIdentifier':
# All the deny rules for this style & use
denies = [deny for deny in self.deny if deny.matches(style, use)]
# Bindings deny list
bindings_denylist = [
binding for deny in denies for binding in deny.bindings
]
return ScopedIdentifier(
style(self.parts), self.tag, style, use,
any(d.bindings == [] for d in denies),
','.join(sorted(set(bindings_denylist))))
@dataclass
class ScopedIdentifier:
name: str
tag: int
style: 'Style'
use: 'Use'
denied: bool
bindings_denylist: str
def __str__(self):
return self.name
@property
def decl_attributes(self) -> str:
'''Attributes to put on a declaration with this name.'''
if self.bindings_denylist:
return f'[BindingsDenylist="{self.bindings_denylist}"]\n'
else:
return ''
@dataclass
class Style:
name: str
func: Tuple[Callable[[List[str]], str]]
def __call__(self, parts: List[str]) -> str:
return self.func[0](parts)
@dataclass
class Use:
name: str
func: Tuple[Callable[[IO, List[ScopedIdentifier]], None]]
def __call__(self, f: IO, idents: List[ScopedIdentifier]):
return self.func[0](f, idents)
|
import unittest
from dpipe import make_dataset
import tensorflow as tf
DATAPATH_IMAGES = 'images_dataset'
DATAPATH_VIDEOS = 'videos_dataset'
def make_model():
inputs = tf.keras.Input(shape=(128, 128, 3))
x = tf.keras.layers.Flatten()(inputs)
x = tf.keras.layers.Dense(64, activation='relu')(x)
x = tf.keras.layers.Dense(64, activation='relu')(x)
outputs = tf.keras.layers.Dense(2)(x)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
return model
class TestFromListFactory(unittest.TestCase):
def test_images_label(self):
dataset = make_dataset('image', 'label', x_path=DATAPATH_IMAGES, x_size=(128, 128)).build()
self.assertEqual(len(list(dataset.as_numpy_iterator())), 6)
def test_videos_label(self):
dataset = make_dataset('video', 'label', x_path=DATAPATH_VIDEOS, x_size=(128, 128)).build()
self.assertEqual(len(list(dataset.as_numpy_iterator())), 4)
def test_videos_label_cropping_single(self):
## TEST single
# test external defined video frames
dataset = make_dataset('video', 'label', x_path=DATAPATH_VIDEOS, x_size=(128, 128), video_frames=10,
video_cropping='single').build()
self.assertEqual(len(list(dataset.as_numpy_iterator())), 4)
self.assertEqual([10,128,128,3],dataset.element_spec[0].shape.as_list())
# test inferred out video frames
dataset = make_dataset('video', 'label', x_path=DATAPATH_VIDEOS, x_size=(128, 128),
video_cropping='single').build()
self.assertEqual(len(list(dataset.as_numpy_iterator())), 4)
self.assertEqual([140, 128, 128, 3], dataset.element_spec[0].shape.as_list())
def test_videos_label_cropping_multi(self):
## TEST multi
# test external defined video frames
dataset = make_dataset('video', 'label', x_path=DATAPATH_VIDEOS, x_size=(128, 128), video_frames=10,
video_cropping='multi').recompute_length().build()
for m,n in dataset.as_numpy_iterator():
self.assertEqual((10, 128, 128, 3), m.shape)
self.assertEqual((), n.shape)
break
self.assertEqual(58, len(list(dataset.as_numpy_iterator())))
self.assertEqual(58, dataset.length)
self.assertEqual([10,128,128,3],dataset.element_spec[0].shape.as_list())
# test inferred out video frames
dataset = make_dataset('video', 'label', x_path=DATAPATH_VIDEOS, x_size=(128, 128),
video_cropping='multi').build()
self.assertEqual(len(list(dataset.as_numpy_iterator())), 4)
self.assertEqual([140, 128, 128, 3], dataset.element_spec[0].shape.as_list())
def test_parallel_training(self):
EPOCHS = 10
LENGTH = 50
dataset_builder = make_dataset('image', 'label', x_path=DATAPATH_IMAGES, x_size=(128,128), one_hot_encoding=False)
dataset = dataset_builder.\
shuffle(LENGTH, reshuffle_each_iteration=True). \
batch(1).\
repeat(EPOCHS).\
build()
print(dataset.element_spec)
model = make_model()
model.compile(loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer=tf.keras.optimizers.RMSprop())
print("Build arguments: ", dataset.built_args)
model.fit(x=dataset, epochs=EPOCHS, **dataset.built_args)
# TODO: implement test cases video video, video image, image video, label video and label image
if __name__ == '__main__':
unittest.main()
|
from absl import flags
from celery import Task
import tensorflow as tf
from pysc2.env import sc2_env
import importlib
from evolution.model_input import ModelInput
from evolution.model_output import ModelOutput
from evolution.model_config import ModelConfig
from common.env_wrapper import EnvWrapper
from common.feature_dimensions import NUM_FUNCTIONS
from common.enums import ModelDataFormat as DataFormat
import common.feature_dimensions as feature_dims
from common.random_util import RandomUtil
class WorkerEnvironment(Task):
# Base class for managing the RL-Environments.
# (Only) env_params(params) needs to be called before it can be used.
# When env_params(params) is called to pass environment parameters, (re-)initializes
# - SC2 environment
# - agent (and neural network model)
# or reuses them when already initialized and params are the same.
_agent = None
_env = None
_sess = None
_model_config = None
_env_params = dict()
def __init__(self):
tf.reset_default_graph()
# PySC2 needs flags parsed or it will cause exceptions
flags.FLAGS(['distributed_main.py'])
@property
def sess(self):
if self._sess is None:
self._sess = tf.Session()
return self._sess
@sess.setter
def sess(self, value):
self._sess = value
@property
def model_config(self):
if self._model_config is None:
self._model_config = self.setup_model_config()
return self._model_config
@model_config.setter
def model_config(self, value):
self._model_config = value
@property
def env(self):
if self._env is None:
self._env = self.setup_environment()
return self._env
@property
def agent(self):
if self._agent is None:
self._agent = self.setup_agent()
return self._agent
@agent.setter
def agent(self, value):
self._agent = value
@property
def env_params(self):
return self._env_params
@env_params.setter
def env_params(self, params):
# Setter for environment parameters.
# Will set up environment and agent according to parameters in params
params_changed = False
# Check if parameters changed
for key, value in params.items():
if key in self._env_params.keys():
if self._env_params[key] == value:
continue
self._env_params[key] = value
params_changed = True
# If changed: re-initializes random table, environment and agent
if params_changed:
RandomUtil.reinitialize_random_table(size=params['random_table_size'],
sigma=params['random_table_sigma'],
seed=params['random_table_seed'])
self.setup_model_config()
self.setup_environment()
self.setup_agent()
def shut_down_env(self):
if self._env is not None:
self._env.close()
if self._sess is not None:
self._sess.close()
self._sess = None
tf.reset_default_graph()
def setup_agent(self):
# Sets up Agent (incl. model configuration for Input/outputs)
# Returns Agent class
if self._sess is not None:
self._sess.close()
self._sess = None
tf.reset_default_graph()
agent_module, agent_name = self._env_params['agent'].rsplit(".", 1)
agent_cls = getattr(importlib.import_module(agent_module), agent_name)
self._agent = agent_cls(self.sess, self.model_config, tf.global_variables_initializer)
return self._agent
def setup_environment(self):
# Set up wrapped SC2 environment.
# Returns EnvWrapper class.
if self._env is not None:
self._env.close()
players = list()
players.append(sc2_env.Agent(sc2_env.Race[self._env_params['agent_race']]))
sc2_environment = sc2_env.SC2Env(map_name=self._env_params['map_name'],
players=players,
step_mul=self._env_params['step_mul'],
agent_interface_format=sc2_env.parse_agent_interface_format(
feature_screen=self._env_params['screen_size'],
feature_minimap=self._env_params['screen_size'],
rgb_screen=self._env_params['rgb_screen_size'],
rgb_minimap=self._env_params['rgb_screen_size'],
action_space=self._env_params['action_space'],
use_feature_units=self._env_params['use_feature_units']))
self._env = EnvWrapper(sc2_environment, self.model_config)
return self._env
def setup_model_config(self):
# creates ModelConfig (used to set up neural network inputs/outputs and ) according to data in environment
# parameters
# Returns ModelConfig class
feature_inputs = list()
flat_feature_names = self._env_params['features_flat']
flat_feature_names = flat_feature_names.split(',')
feature_inputs.append(ModelInput('flat', flat_feature_names,
feature_dims.get_flat_feature_dims(flat_feature_names)))
if self._env_params['use_minimap']:
size = self._env_params['screen_size']
feature_inputs.append(ModelInput('minimap', ['feature_minimap'], feature_dims.get_minimap_dims(), size))
size = self._env_params['screen_size']
feature_inputs.append(ModelInput('screen', ['feature_screen'], feature_dims.get_screen_dims(), size))
arg_outputs = []
for arg_type in feature_dims.ACTION_TYPES:
arg_outputs.append(ModelOutput(arg_type, arg_type.sizes[0], feature_dims.is_spacial_action[arg_type]))
scope = "test"
self._model_config = ModelConfig(feature_inputs, arg_outputs, size, NUM_FUNCTIONS, DataFormat.NHWC, scope, self._env_params['use_biases'])
return self._model_config
|
import math
import pyglet
from pyglet import gl
import graphicutils as gu
from app import colors
from .camera import Camera
def draw_lines(vertices):
pyglet.graphics.draw(
len(vertices) // 2,
gl.GL_LINES,
('v2f', vertices),
)
def draw_circle(x, y, r, color, mode=gl.GL_LINE_LOOP, resolution=32):
gl.glColor4f(*color)
gu.draw_circle(int(x), int(y), int(r), resolution, mode)
def draw_grid():
camera = Camera.get_active()
size = int(20 * camera.zoom)
if size > 0:
gl.glColor3f(*colors.CONTEXT_GRID_COLOR)
gu.draw_grid(
camera.w,
camera.h,
int(camera.centerx),
int(camera.centery),
size,
0,
0
)
def draw_axes():
camera = Camera.get_active()
center_x = int(camera.centerx)
center_y = int(camera.centery)
gl.glColor3f(1, 0, 0)
gu.draw_arrow(20, 40, 40, 0)
draw_lines((0, center_y, camera.w, center_y))
gl.glColor3f(0, 1, 0)
gu.draw_arrow(20, 40, 0, 40)
draw_lines((center_x, 0, center_x, camera.h))
def draw_path(obj):
camera = Camera.get_active()
gl.glColor4f(1, 0.76, 0.12, 0.8)
gl.glBegin(gl.GL_LINES)
for x, y in obj.path[:100]:
pos_x = int(x * camera.zoom + camera.centerx)
pos_y = int(y * camera.zoom + camera.centery)
gl.glVertex2d(pos_x, pos_y)
gl.glEnd()
def draw_object(obj):
camera = Camera.get_active()
pos = (obj.position * camera.zoom)
x = int(pos[0] + camera.centerx)
y = int(pos[1] + camera.centery)
gl.glColor4f(0, 1, 0, 1)
for force in obj.forces:
if any(force):
w = int(force[0] * camera.zoom)
h = int(force[1] * camera.zoom)
gu.draw_arrow(x, y, w, h)
gl.glColor4f(0.2, 0.5, 1, 1)
if any(obj.velocity):
w = int(obj.velocity[0] * camera.zoom)
h = int(obj.velocity[1] * camera.zoom)
gu.draw_arrow(x, y, w, h)
gl.glColor4f(0.9, 0.29, 0.58, 1)
if any(obj.acceleration):
w = int(obj.acceleration[0] * camera.zoom)
h = int(obj.acceleration[1] * camera.zoom)
gu.draw_arrow(x, y, w, h)
draw_circle(
x,
y,
20 * camera.zoom,
colors.RIGIDBODY_COLOR,
mode=gl.GL_POLYGON
)
draw_circle(
x,
y,
20 * camera.zoom,
colors.RIGIDBODY_BORDER_COLOR,
)
def draw_ruler(x1, y1, x2, y2):
camera = Camera.get_active()
vx1 = int(x1 * camera.zoom + camera.centerx)
vy1 = int(y1 * camera.zoom + camera.centery)
vx2 = int(x2 * camera.zoom + camera.centerx)
vy2 = int(y2 * camera.zoom + camera.centery)
gl.glColor4f(0.27, 0.63, 0.78, 0.8)
gu.draw_dashed_line(vx2, vy2, vx1, vy1)
gu.draw_circle(vx1, vy1, 4, 8, gl.GL_LINE_LOOP)
gu.draw_circle(vx2, vy2, 4, 8, gl.GL_LINE_LOOP)
size = math.hypot(x2 - x1, y2 - y1)
label = pyglet.text.Label(
font_name='verdana',
font_size=12,
color=(255, 255, 255, 200))
label.text = f'{size:.2f}m'
label.x = (vx1 + vx2) // 2
label.y = (vy1 + vy2) // 2
label.draw()
def draw_select_area(x1, y1, x2, y2):
rect = (x1, y1, x2, y1, x2, y2, x1, y2)
gl.glColor4f(0.1, 0.2, 0.3, 0.2)
pyglet.graphics.draw(4, gl.GL_QUADS, ('v2f', rect))
gl.glColor4f(0.3, 0.5, 0.8, 0.5)
pyglet.graphics.draw(4, gl.GL_LINE_LOOP, ('v2f', rect))
|
import hashlib
def dict_to_hash(key):
"""
Given a dictionary `key`, returns a hash string
"""
hashed = hashlib.md5()
for k, v in sorted(key.items()):
hashed.update(str(k).encode())
hashed.update(str(v).encode())
return hashed.hexdigest()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: gexiao
# Created on 2017-09-08 15:28
import sys
sys.path.append('..')
import math
import pymongo
import json
import re
from datetime import datetime
from elasticsearch import Elasticsearch
from pymongo import MongoClient
from config.config import *
SHARDS_NUMBER = 1
REPLICAS_NUMBER = 0
DEFAULT_SETTINGS = {"settings": {
"index": {
"number_of_shards": SHARDS_NUMBER,
"number_of_replicas": REPLICAS_NUMBER,
"refresh_interval": "30s"
}
}}
TOPIC_MAX_BONUS = 15.0
TOPIC_INDEX_NAME = "topic_v1"
TOPIC_ALIAS_NAME = "topic"
POSTSCRIPT_MAPPING = {
"mappings": {
"postscript": {
"_all": {
"enabled": False
},
"dynamic": "strict",
"properties": {
"content": {
"type": "text",
"analyzer": "ik_max_word"
},
"sequence": {
"type": "short"
},
"topic_id": {
"type": "integer"
},
"id": {
"type": "keyword"
}
}
}
}
}
TOPIC_MAPPING = {
"mappings": {
"topic": {
"_all": {
"enabled": False
},
"dynamic": "strict",
"properties": {
"click": {
"type": "integer"
},
"content": {
"type": "text",
"analyzer": "ik_max_word"
},
"created": {
"type": "date"
},
"deleted": {
"type": "boolean"
},
"favorite": {
"type": "integer"
},
"id": {
"type": "integer"
},
"last_modified": {
"type": "date"
},
"last_touched": {
"type": "date"
},
"member": {
"type": "keyword"
},
"node": {
"type": "integer"
},
"replies": {
"type": "integer"
},
"thank": {
"type": "integer"
},
"title": {
"type": "text",
"analyzer": "ik_max_word"
}
}
}
}
}
REPLY_MAPPING = {
"mappings": {
"reply": {
"_all": {
"enabled": False
},
"dynamic": "strict",
"properties": {
"content": {
"type": "text",
"analyzer": "ik_max_word"
},
"created": {
"type": "date"
},
"deleted": {
"type": "boolean"
},
"id": {
"type": "integer"
},
"last_modified": {
"type": "date"
},
"member": {
"type": "keyword"
},
"thanks": {
"type": "integer"
},
"topic_id": {
"type": "integer"
}
}
}
}
}
ASSEMBLED_TOPIC_MAPPING = {
"mappings": {
"topic": {
"dynamic": "strict",
"_all": {
"enabled": False
},
"properties": {
"all_content": {
"type": "text",
"analyzer": "ik_max_word"
},
"all_reply": {
"type": "text",
"analyzer": "ik_max_word"
},
"click": {
"type": "integer"
},
"content": {
"type": "text",
"copy_to": [
"all_content"
],
"analyzer": "ik_max_word"
},
"created": {
"type": "date"
},
"deleted": {
"type": "boolean"
},
"favorite": {
"type": "integer"
},
"id": {
"type": "integer"
},
"last_modified": {
"type": "date"
},
"last_touched": {
"type": "date"
},
"member": {
"type": "keyword"
},
"node": {
"type": "integer"
},
"bonus": {
"type": "float"
},
"postscript_list": {
"type": "nested",
"dynamic": "strict",
"properties": {
"content": {
"type": "text",
"copy_to": [
"all_content"
],
"analyzer": "ik_max_word"
},
"id": {
"type": "keyword"
},
"sequence": {
"type": "short"
},
"topic_id": {
"type": "integer"
}
}
},
"replies": {
"type": "integer"
},
"reply_list": {
"type": "nested",
"dynamic": "strict",
"properties": {
"content": {
"type": "text",
"copy_to": [
"all_content", "all_reply"
],
"analyzer": "ik_max_word"
},
"created": {
"type": "date"
},
"deleted": {
"type": "boolean"
},
"id": {
"type": "integer"
},
"last_modified": {
"type": "date"
},
"member": {
"type": "keyword"
},
"reply_id": {
"type": "integer"
},
"thanks": {
"type": "integer"
},
"topic_id": {
"type": "integer"
}
}
},
"thank": {
"type": "integer"
},
"title": {
"type": "text",
"copy_to": [
"all_content"
],
"analyzer": "ik_max_word"
}
}
}
}
}
image_url_pattern = re.compile(r"https?:\/\/[A-Za-z0-9_\-\/\.]+?\.(jpg|jpeg|gif|png)")
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, datetime):
return obj.isoformat()
raise TypeError("Type %s not serializable" % type(obj))
def sub_image_url(content):
if not content:
return ""
return re.sub(image_url_pattern, " ", content)
def filter_topic(topic, wanted_keys=TOPIC_MAPPING["mappings"]["topic"]["properties"].keys()):
assert len(wanted_keys) > 0
wanted_topic = {key: topic[key] for key in wanted_keys if key in topic}
if "content" in wanted_topic:
wanted_topic["content"] = sub_image_url(wanted_topic["content"])
wanted_topic["reply_list"] = []
wanted_topic["postscript_list"] = []
return wanted_topic
def filter_reply(reply, wanted_keys=REPLY_MAPPING["mappings"]["reply"]["properties"].keys()):
assert len(wanted_keys) > 0
wanted_reply = {key: reply[key] for key in wanted_keys if key in reply}
if "content" in wanted_reply:
wanted_reply["content"] = sub_image_url(wanted_reply["content"])
return wanted_reply
def filter_postscript(postscript, wanted_keys=POSTSCRIPT_MAPPING["mappings"]["postscript"]["properties"].keys()):
wanted_postscript = {key: postscript[key] for key in wanted_keys if key in postscript}
if "content" in wanted_postscript:
wanted_postscript["content"] = sub_image_url(wanted_postscript["content"])
wanted_postscript["id"] = str(wanted_postscript["topic_id"]) + "_" + str(wanted_postscript["sequence"])
return wanted_postscript
def assemble_topic(topic, reply_collection, postscript_collection):
topic_id = topic["id"]
wanted_topic = filter_topic(topic)
reply_cursor = reply_collection.find({"topic_id": topic_id}).sort("id", pymongo.ASCENDING)
for reply in reply_cursor:
wanted_reply = filter_reply(reply)
wanted_topic["reply_list"].append(wanted_reply)
postscript_cursor = postscript_collection.find({"topic_id": topic_id}).sort("sequence", pymongo.ASCENDING)
for postscript in postscript_cursor:
wanted_postscript = filter_postscript(postscript)
wanted_topic["postscript_list"].append(wanted_postscript)
if topic["deleted"]:
bonus = 0.0
else:
favorite = int(topic["favorite"]) if "favorite" in topic else 0
thank = int(topic["thank"]) if "thank" in topic else 0
node_id = int(topic["node"])
# log(1 + 5*count, 2)
bonus = round(float(math.log2((1 + 5 * favorite) * (1 + 5 * thank))), 2)
bonus = min(bonus, TOPIC_MAX_BONUS)
# sandbox: 542, ohno: 983
if node_id != 542 and node_id != 983:
bonus += 5
wanted_topic["bonus"] = bonus
return wanted_topic
class Migrate(object):
def __init__(self):
if MONGODB_USER:
self.client = MongoClient(MONGODB_HOST, MONGODB_PORT,
username=MONGODB_USER, password=MONGODB_PASSWORD,
authSource=MONGODB_DBNAME, authMechanism='SCRAM-SHA-1')
else:
self.client = MongoClient(MONGODB_HOST, MONGODB_PORT)
db = self.client.v2ex
self.topic_collection = db.topic
self.reply_collection = db.reply
self.postscript_collection = db.postscript
self.es = Elasticsearch([ES_HOST])
def create_indices(self):
self.es.indices.create(index=TOPIC_INDEX_NAME, body={**DEFAULT_SETTINGS, **ASSEMBLED_TOPIC_MAPPING}, ignore=400)
def migrate_assembled_topics(self):
search_body = {
"aggs": {
"max_id": {
"max": {
"field": "id"
}
}
},
"size": 1
}
max_topic = self.es.search(index=TOPIC_INDEX_NAME, doc_type="topic", body=search_body)
es_max_topic_id = max_topic["aggregations"]["max_id"]["value"]
if not es_max_topic_id:
es_max_topic_id = 0
es_max_topic_id = int(es_max_topic_id)
beginning_time = datetime.utcnow()
cursor = self.topic_collection.find({"id": {"$gt": es_max_topic_id}}).sort("id", pymongo.ASCENDING)
bulk_body = ""
bulk_count = 0
for topic in cursor:
topic_id = topic["id"]
wanted_topic = assemble_topic(topic, self.reply_collection, self.postscript_collection)
single_doc_str = json.dumps({"index": {"_index": TOPIC_INDEX_NAME, "_type": "topic", "_id": topic_id}}) + \
"\n" + \
json.dumps(wanted_topic, default=json_serial) + \
"\n"
bulk_body = bulk_body + single_doc_str
bulk_count += 1
if bulk_count >= 3000:
print("Start inserting topic " + str(topic_id))
self.es.bulk(body=bulk_body, index=TOPIC_INDEX_NAME, doc_type="topic", request_timeout=20)
bulk_count = 0
bulk_body = ""
print("Inserted topic " + str(topic_id))
if bulk_body:
self.es.bulk(body=bulk_body, index=TOPIC_INDEX_NAME, doc_type="topic")
modified_count = self.update_topics_synced_state(beginning_time, True)
print("Modified topic count: " + str(modified_count))
def update_topics_synced_state(self, latest_crawled, synced):
if latest_crawled and isinstance(latest_crawled, datetime):
return self.topic_collection.update_many({"crawled": {"$lt": latest_crawled},
"web_crawled": {"$lt": latest_crawled}},
{"$set": {"synced": synced}},
upsert=False).modified_count
def create_aliases(self):
if self.es.indices.exists_alias(name=TOPIC_ALIAS_NAME):
existing_alias = self.es.indices.get_alias(name=TOPIC_ALIAS_NAME)
body = {
"actions": [
]
}
for index_name, alias in existing_alias.items():
body["actions"].append({"remove": {"index": index_name, "alias": TOPIC_ALIAS_NAME}})
body["actions"].append({"add": {"index": TOPIC_INDEX_NAME, "alias": TOPIC_ALIAS_NAME}})
self.es.indices.update_aliases(body)
else:
self.es.indices.put_alias(TOPIC_INDEX_NAME, TOPIC_ALIAS_NAME)
__all__ = ["filter_topic", "filter_reply", "filter_postscript", "assemble_topic",
"TOPIC_ALIAS_NAME"]
if __name__ == "__main__":
migrate = Migrate()
migrate.create_indices()
migrate.migrate_assembled_topics()
migrate.create_aliases()
|
from output.models.nist_data.atomic.name.schema_instance.nistschema_sv_iv_atomic_name_min_length_5_xsd.nistschema_sv_iv_atomic_name_min_length_5 import NistschemaSvIvAtomicNameMinLength5
__all__ = [
"NistschemaSvIvAtomicNameMinLength5",
]
|
# This script reads in a GTF transcript annotation and extracts the splice
# junctions. Exons must be in order.
# The output format is designed to match the STAR SJ file output format
from optparse import OptionParser
from pyfasta import Fasta
def getOptions():
parser = OptionParser()
parser.add_option("--f", dest = "infile", help = "Input GTF file",
metavar = "FILE", type = "string", default = "")
parser.add_option("--g", dest = "genomeFile", help = "Reference genome",
metavar = "FILE", type = "string", default = "")
parser.add_option("--minIntronSize", "-m", dest = "minIntron", default = 21,
help = "Minimum size of intron to consider a junction. Default: 21 bp.")
parser.add_option("--o", dest = "outfile",
help = "output file", metavar = "FILE", type = "string", default = "out.txt")
(options, args) = parser.parse_args()
return options
def formatSJOutput(currExon, prev_exonEnd, genome, minIntron):
chromosome = currExon[0]
strand = currExon[6]
if strand == "+":
strand = "1"
intron_start = int(prev_exonEnd) + 1
intron_end = int(currExon[3]) - 1
intronMotif = getIntronMotif(chromosome, intron_start, intron_end, genome)
elif strand == "-":
strand = "2"
intron_start = int(currExon[4]) + 1 #int(currExon[3]) + 1
intron_end = int(prev_exonEnd) - 1 #int(prev_exonEnd) - 1
intronMotif = getIntronMotif(chromosome, intron_start, intron_end, genome)
if abs(intron_end - intron_start + 1) < minIntron:
return None
intronMotif = getIntronMotif(chromosome, intron_start, intron_end, genome)
annotationStatus = "1"
nUniqueReads = "NA"
nMultiReads = "NA"
maxSpliceOverhang = "NA"
return "\t".join([chromosome, str(intron_start), str(intron_end), strand, intronMotif, annotationStatus, nUniqueReads, nMultiReads, maxSpliceOverhang])
def getIntronMotif(chrom, start, end, genome):
startBases = genome.sequence({'chr': chrom, 'start': start, 'stop': start + 1}, one_based=True)
endBases = genome.sequence({'chr': chrom, 'start': end - 1, 'stop': end}, one_based=True)
motif = (startBases + endBases).upper()
if motif == "GTAG":
return "21"
elif motif == "CTAC":
return "22"
elif motif == "GCAG":
return "23"
elif motif == "CTGC":
return "24"
elif motif == "ATAC":
return "25"
elif motif == "GTAT":
return "26"
else:
return "20"
if __name__ == "__main__":
junctions_seen = {}
# Read input arguments
options = getOptions()
gtf = options.infile
genome = Fasta(options.genomeFile)
minIntron = int(options.minIntron)
o = open(options.outfile, 'w')
# Read in the GTF
prev_transcriptID = ""
prev_exonEnd = 0
with open(gtf, 'r') as f:
for line in f:
# Prep
line = line.strip()
# Ignore header
if line.startswith("#"):
continue
# Split GTF line on tab
info = line.split("\t")
# Ignore entries that are not exons
if info[2] != "exon":
continue
# Extract transcriptID and exonID from description field
description = info[-1]
# Skip entries that lack a transcript ID
if "transcript_id" not in description:
continue
if "exon_number" not in description:
continue
transcriptID = (description.split("transcript_id ")[1]).split('"')[1]
exonNumber = int((description.split("exon_number ")[1]).split(';')[0])
strand = info[6]
if transcriptID != prev_transcriptID:
# Start new transcript
if exonNumber != 1:
print "Error: exons are not listed in order"
exit()
prev_transcriptID = transcriptID
if strand == "+":
prev_exonEnd = info[4]
else:
prev_exonEnd = info[3]
else:
# Output the current junction
spliceJn = formatSJOutput(info, prev_exonEnd, genome, minIntron)
if strand == "+":
prev_exonEnd = info[4]
else:
prev_exonEnd = info[3]
if spliceJn != None:
if spliceJn not in junctions_seen:
o.write(spliceJn + "\n")
junctions_seen[spliceJn] = 1
o.close()
|
from PyPDF2 import PdfFileReader, PdfFileWriter
def merge_pdf(infnList, outfn):
pdf_output = PdfFileWriter()
for infn in infnList:
pdf_input = PdfFileReader(open(infn, 'rb'))
page_count = pdf_input.getNumPages()
for i in range(page_count):
pdf_output.addPage(pdf_input.getPage(i))
pdf_output.write(open(outfn, 'wb'))
def split_pdf(input_pdf, out_num, split_site):
for i in range(out_num):
with open(input_pdf, 'rb') as open_pdf, open(str(i)+'.pdf', 'wb') as write_pdf:
pdfReader = PdfFileReader(open_pdf)
pdfWriter = PdfFileWriter()
for j in range(i*split_site, (i+1)*split_site):
page = pdfReader.getPage(j)
pdfWriter.addPage(page)
pdfWriter.write(write_pdf)
|
#!/usr/bin/python
# vim:fileencoding=utf-8
# (c) 2011 Michał Górny <mgorny@gentoo.org>
# Released under the terms of the 2-clause BSD license.
import shutil, subprocess, tempfile
from ..exceptions import InvalidBashCodeError
from . import BashParser
_bash_script = '''
while
(
while read -r __GENTOOPM_CMD; do
eval ${__GENTOOPM_CMD}
done
exit 1
)
do
:
done
'''
class BashServer(BashParser):
"""
Bash script parser built on backgrounded bash process.
"""
def __init__(self):
self._bashproc = subprocess.Popen(['bash', '-c', _bash_script],
stdin = subprocess.PIPE, stdout = subprocess.PIPE,
env = {})
def terminate(self):
if self._bashproc is not None:
self._bashproc.terminate()
self._bashproc.communicate()
self._bashproc = None
def load_file(self, envf):
with tempfile.NamedTemporaryFile('w+b') as f:
shutil.copyfileobj(envf, f)
f.flush()
self._write('exit 0',
'bash -n %s &>/dev/null && printf "OK\\0" || printf "FAIL\\0"' % repr(f.name))
if self._read1() != 'OK':
raise InvalidBashCodeError()
self._write('source %s &>/dev/null; printf "DONE\\0"' % repr(f.name))
if self._read1() != 'DONE':
raise AssertionError('Sourcing unexpected caused stdout output')
def _read1(self):
assert self._bashproc is not None
f = self._bashproc.stdout
buf = b' '
while not buf.endswith(b'\0'):
x = f.read(1)
if len(x) < 1:
# end-of-file
raise InvalidBashCodeError()
buf += x
return buf[1:-1].decode('utf-8')
def _write(self, *cmds):
assert self._bashproc is not None
for cmd in cmds:
self._bashproc.stdin.write(('%s\n' % cmd).encode('ASCII'))
self._bashproc.stdin.flush()
def _cmd_print(self, *varlist):
q = ' '.join(['"${%s}"' % v for v in varlist])
self._write('set -- %s' % q,
'printf "%s\\0" "${@}"')
return [self._read1() for v in varlist]
def __getitem__(self, k):
return self._cmd_print(k)[0]
def __call__(self, code):
self._write('( %s ) &>/dev/null; printf "%%d\\0" "${?}"' % code)
return int(self._read1())
def copy(self, *varlist):
ret = self._cmd_print(*varlist)
return dict(zip(varlist, ret))
|
#!/usr/bin/env python
"""
Oracle procedures in schema
"""
import sys
import logging
from lib_properties import pc
import lib_oracle
import lib_common
from sources_types.oracle import schema as oracle_schema
from sources_types.oracle import procedure as oracle_procedure
def Main():
cgiEnv = lib_oracle.OracleEnv()
ora_schema = cgiEnv.m_entity_id_dict["Schema"]
grph = cgiEnv.GetGraph()
sql_query = "SELECT OBJECT_NAME,STATUS,CREATED FROM ALL_OBJECTS WHERE OBJECT_TYPE = 'PROCEDURE' AND OWNER = '" + ora_schema + "'"
logging.debug("sql_query=%s", sql_query)
node_oraschema = oracle_schema.MakeUri(cgiEnv.m_oraDatabase, ora_schema)
result = lib_oracle.ExecuteQuery(cgiEnv.ConnectStr(), sql_query)
for row in result:
procedure_name = str(row[0])
node_procedure = oracle_procedure.MakeUri(cgiEnv.m_oraDatabase, ora_schema, procedure_name)
grph.add((node_oraschema, pc.property_oracle_procedure, node_procedure))
lib_oracle.AddLiteralNotNone(grph, node_procedure, "Status", row[1])
lib_oracle.AddLiteralNotNone(grph, node_procedure, "Creation", row[2])
cgiEnv.OutCgiRdf("LAYOUT_RECT", [pc.property_oracle_procedure])
if __name__ == '__main__':
Main()
|
# Generated by Django 3.2.3 on 2021-05-30 22:58
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='College',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('roll_number', models.IntegerField(unique=True)),
('college', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.college')),
],
),
]
|
import os
import time
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
from scipy import signal
from matplotlib.ticker import StrMethodFormatter
# School ID = 03118942
# F=9+4+2=15=>1+5=6
myFreq=6000
A=4
# Βοηθιτικες Συναρτησεις
def PlotYLim(Max, Min):
plt.ylim([Min,Max])
def plotSignals(time1, signal1, color1, legend1, PlotTitle, numberOfSignals=1, time2=None, signal2=None, color2=None, legend2=None):
if numberOfSignals==1:
plt.plot(time1, signal1, color1)
plt.legend(legend1)
elif numberOfSignals==2:
plt.plot(time1, signal1, color1, time2, signal2, '.', color2)
plt.legend([legend1, legend2])
else:
return None
plt.xlabel('Seconds')
plt.ylabel('Volts')
plt.title(PlotTitle)
plt.grid()
plt.show()
#---------------|ΑΣΚΗΣΗ 2|-------------------
#(A)
fs1=30*myFreq #180kHz
fs2=50*myFreq #300kHz
def mid_riser(signal):
for i in range(len(signal)):
if signal[i]>0xb0111:
signal[i]=7
elif signal[i]<-0xb1000:
signal[i]=-8
else:
if (signal[i] - round(signal[i]) > 0) and (signal[i] > 0):
signal[i] = round(signal[i]) + 1
elif (signal[i] - round(signal[i]) < 0) and (signal[i] < 0):
signal[i] = round(signal[i]) - 1
else:
signal[i] = round(signal[i])
return signal
# grayCodeBinary = [0000, 0001, 0011, 0010, 0110, 0111, 0101, 0100, 1100, 1101, 1111, 1110, 1010, 1011, 1001, 1000]
def grayCodeMap(signal):
grayCode4bit = [0, 1, 3, 2, 6, 7, 5, 4, 12, 13, 15, 14, 10, 11, 9, 8]
for i in range(len(signal)):
signal[i] = grayCode4bit[int(signal[i])+8]
return signal
def calcError(QuantifiedSamples, accualSignalSamples, numOfSamples):
i=0
s=0
while i < numOfSamples:
s+=accualSignalSamples[i]-QuantifiedSamples[i]
i+=1
return s/numOfSamples
def calcAverageSigPower(signal, numOfSamples):
i=0
s=0
while i < numOfSamples:
s += signal[i]**2
return s/numOfSamples
def calcSNR(StartingSignal, numOfSamples):
numOfBitsPerSample = 4
maxSigVoltage = 7
return ((2**(2*numOfBitsPerSample))*(3*calcAverageSigPower(StartingSignal, numOfSamples)/maxSigVoltage**2))
#(a)
# t1 = np.linspace(0, 4/myFreq, 4*int(fs1/myFreq))
t1 = np.arange(0, 4/myFreq, 1/fs1)
triangle1 = signal.sawtooth(2 * np.pi * myFreq * t1, 0.5)*4
trigCopy = signal.sawtooth(2 * np.pi * myFreq * t1, 0.5)
x = mid_riser(triangle1)
# y = grayCodeMap(x)
fig, ax = plt.subplots()
ax.yaxis.set_major_formatter(StrMethodFormatter("{x:04b}"))
ax.yaxis.set_ticks(np.arange(-4, 15, 1))
plotSignals(t1, 4*trigCopy, 'o', 'Fs1', 'Quantified Triangle sampled Fs1')
plotSignals(t1, x, 'o', 'Fs1', 'Quantified Triangle sampled Fs1')
plt.show()
print(calcError(mid_riser(triangle1), trigCopy, 10))
print(calcError(mid_riser(triangle1), trigCopy, 20))
# print(calcSNR(4*triangle1, 10))
# print(calcSNR(4*triangle1, 20))
|
from chapter_04.binary_search_tree import BinarySearchTree
from chapter_04.binary_tree import BinaryTree
def is_binary_search_tree(tree):
return _is_bst(tree.root)
def _is_bst(node, min_val=None, max_val=None):
if not node:
return True
if (min_val and node.key < min_val) or (max_val and node.key >= max_val):
return False
return _is_bst(node.left, min_val, node.key) and _is_bst(
node.right, node.key, max_val
)
def test_is_binary_search_tree():
bst = BinarySearchTree()
bst.insert(20)
bst.insert(9)
bst.insert(25)
bst.insert(5)
bst.insert(12)
bst.insert(11)
bst.insert(14)
t = BinaryTree()
n1 = t.insert(5, None)
n2 = t.insert(4, n1)
n3 = t.insert(6, n1)
n4 = t.insert(3, n2)
t.insert(6, n2)
t.insert(5, n3)
t.insert(2, n4)
assert not is_binary_search_tree(t)
assert is_binary_search_tree(bst)
|
"""The following module demonstrates how to read files"""
import json
import csv
def write_json_file(file_name, json_file_content):
"""Write JSON file.
:type file_name:
:param file_name:
:raises:
:rtype:
"""
with open(file_name, 'w') as json_file:
json.dump(json_file_content, json_file)
def write_csv_file(file_name, headers, csv_file_content):
"""Example of how to write a csv file.
:type file_name:
:param file_name:
:type headers:
:param headers:
:type csv_file_content:
:param csv_file_content:
:raises:
:rtype:
"""
with open(file_name, 'w') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow(headers)
csv_writer.writerows(csv_file_content)
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Sione Taumoepeau and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import math
import frappe
from frappe import _
from frappe.utils import cstr, formatdate, cint, getdate, date_diff, add_days, time_diff_in_hours, rounded, now
from frappe.utils.user import get_user_fullname
def get_create_cargo_devan(doctype, cargo_ref, final_work_type, secondary_work_type, cargo_type, devan):
last_work_type, third_work_type, movement, payment, gate, yard_status, delivery_code = "", "", "", "", "", "", ""
custom_warrant, custom_code, inspection_status, yard_date, final_status = "", "", "", "", ""
if doctype == "Pre Advice":
val = frappe.db.get_value(doctype, {"name": cargo_ref}, ["booking_ref","pat_code","net_weight","cargo_type","last_port",\
"qty","container_no","voyage_no", "bol","work_type","secondary_work_type","pol","agents","commodity_code","vessel","pod",\
"temperature", "container_type","mark","final_dest_port","volume", "container_size","consignee","container_content",\
"stowage","hazardous","hazardous_code", "status","seal_1","seal_2","eta_date","cargo_description","etd_date",\
"chasis_no","yard_slot","inspection_status","yard_status","final_status", "third_work_type"], as_dict=True)
elif doctype == "Cargo":
val = frappe.db.get_value(doctype, {"name": cargo_ref}, ["booking_ref","pat_code","net_weight","cargo_type","last_port",\
"qty","container_no","voyage_no","custom_code","bol","work_type","secondary_work_type","pol","agents","commodity_code",\
"vessel","pod","temperature", "container_type","mark","final_dest_port","volume","custom_warrant", "container_size",\
"consignee","container_content","stowage","hazardous","hazardous_code", "status","seal_1","seal_2","eta_date",\
"cargo_description","etd_date","delivery_code","chasis_no","yard_slot","inspection_status","yard_status","final_status"], as_dict=True)
if final_work_type == "Discharged" and devan == "EMPTY" and val.third_work_type == "Loading" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type =None
container_content = "EMPTY"
secondary_work_type = val.secondary_work_type
if final_work_type == "Discharged" and devan == "EMPTY" and val.third_work_type == "Stock" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type ="Stock"
container_content = "EMPTY"
secondary_work_type = val.secondary_work_type
doc = frappe.new_doc("Cargo")
doc.update({
"docstatus" : 1,
"cargo_ref": cargo_ref,
"booking_ref" : val.booking_ref,
"pat_code" : val.pat_code,
"net_weight" : val.net_weight,
"cargo_type" : cargo_type,
"qty" : val.qty,
"container_no" : val.container_no,
"voyage_no" : val.voyage_no,
"bol" : val.bol,
"work_type" : final_work_type,
"work_type_date": now(),
"secondary_work_type" : secondary_work_type,
"additional_work": third_work_type,
"pol" : val.pol,
"agents" : val.agents,
"commodity_code" : val.commodity_code,
"vessel" : val.vessel,
"pod" : val.pod,
"temperature" : val.temperature,
"container_type" : val.container_type,
"mark" : val.mark,
"final_dest_port" : val.final_dest_port,
"volume" : val.volume,
"container_size" : val.container_size,
"consignee" : val.consignee,
"container_content" : container_content,
"stowage" : val.stowage,
"hazardous" : val.hazardous,
"hazardous_code" : val.hazardous_code,
"status" : movement,
"seal_1" : val.seal_1,
"seal_2" : val.seal_2,
"eta_date" : val.eta_date,
"cargo_description" : val.cargo_description,
"etd_date" : val.etd_date,
"chasis_no" : val.chasis_no,
"inspection_status" : inspection_status,
"yard_status" : yard_status,
"yard_date" : yard_date,
"final_status" : final_status,
"payment_status" : payment,
"gate1_status" : gate,
"gate2_status" : gate,
"custom_warrant" : val.custom_warrant,
"custom_code" : val.custom_code,
"delivery_code" : val.delivery_code,
"inspection_date": now()
})
doc.insert(ignore_permissions=True)
doc.submit()
@frappe.whitelist()
def get_create_cargo(doctype, cargo_ref, final_work_type, secondary_work_type, cargo_type):
last_work_type, third_work_type, movement, payment, gate, yard_status, delivery_code = "", "", "", "", "", "", ""
container_content, custom_warrant, custom_code, inspection_status, yard_date, final_status = "", "", "", "", "", ""
if doctype == "Pre Advice":
booking_ref, pat_code, net_weight, last_port, qty, container_no, voyage_no, bol,work_type,secondary_work_type,\
pol,agents,commodity_code,vessel,pod,temperature, container_type,mark, final_dest_port,volume, container_size,consignee,\
container_content,stowage,hazardous,hazardous_code, status, seal_1,seal_2,eta_date,cargo_description,etd_date, chasis_no,\
yard_slot,inspection_status,yard_status, final_status, third_work_type = frappe.db.get_value(doctype, {'name': cargo_ref},\
['booking_ref','pat_code','net_weight','last_port','qty','container_no','voyage_no','bol','work_type',\
'secondary_work_type','pol','agents','commodity_code', 'vessel','pod','temperature', 'container_type','mark','final_dest_port',\
'volume', 'container_size','consignee','container_content', 'stowage','hazardous','hazardous_code', 'status','seal_1','seal_2',\
'eta_date','cargo_description','etd_date', 'chasis_no', 'yard_slot','inspection_status','yard_status','final_status','third_work_type'])
if doctype == "Cargo":
booking_ref,pat_code,net_weight,last_port,qty,container_no,voyage_no,custom_code, bol,work_type,\
secondary_work_type, pol,agents,commodity_code,vessel,pod,temperature, mark,container_type,final_dest_port,\
volume,custom_warrant, container_size,consignee,container_content,stowage,hazardous,hazardous_code, status,\
seal_1,seal_2,eta_date,cargo_description, etd_date,delivery_code, chasis_no,yard_slot,inspection_status,yard_status,\
final_status = frappe.db.get_value(doctype, {'name': cargo_ref}, ['booking_ref','pat_code','net_weight',\
'last_port','qty','container_no','voyage_no','custom_code', 'bol','work_type','secondary_work_type','pol','agents',\
'commodity_code','vessel','pod','temperature', 'mark','container_type','final_dest_port','volume','custom_warrant',\
'container_size','consignee','container_content','stowage','hazardous','hazardous_code', 'status','seal_1','seal_2',\
'eta_date','cargo_description','etd_date','delivery_code', 'chasis_no','yard_slot','inspection_status','yard_status','final_status'])
if final_work_type == "Loading" and secondary_work_type == "Export":
inspection_status = "Closed"
movement = "Outbound"
yard_status = "Closed"
payment = "Closed"
gate = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
last_work_type = None
container_content = container_content
if final_work_type == "Loading" and secondary_work_type == "Stock":
inspection_status = "Closed"
movement = "Outbound"
yard_status = "Closed"
payment = "Closed"
gate = "Closed"
yard_date = now()
final_status = final_work_type
last_work_type = None
container_content = container_content
if final_work_type == "Loading" and secondary_work_type == "Transhipment":
secondary_work_type = "Transhipment"
movement = "Transshipment"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Loading" and cargo_type == "Split Ports":
secondary_work_type = secondary_work_type
movement = "Split Ports"
payment = "Closed"
gate = "Open"
inspection_status = "Open"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and not secondary_work_type:
inspection_status = "Closed"
movement = "Inspection"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Re-stowing":
secondary_work_type = "Re-stowing"
movement = "Re-stowing"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and not secondary_work_type:
inspection_status = "Closed"
movement = "Inspection"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Re-stowing":
secondary_work_type = "Re-stowing"
movement = "Re-stowing"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Open"
yard_date = None
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Transhipment":
secondary_work_type = "Transhipment"
movement = "Transshipment"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Transhipment":
secondary_work_type = "Transhipment"
movement = "Transshipment"
payment = "Closed"
gate = "Closed"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and cargo_type == "Split Ports":
secondary_work_type = secondary_work_type
movement = "Split Ports"
payment = "Open"
gate = "Open"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and cargo_type == "Split Ports":
secondary_work_type = secondary_work_type
movement = "Split Ports"
payment = "Open"
gate = "Open"
inspection_status = "Closed"
yard_status = "Closed"
yard_date = now()
final_status = final_work_type
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Devanning"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type = None
container_content = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type == "Container":
secondary_work_type = "Devanning"
movement = "Devanning"
inspection_status = "Closed"
yard_status = "Closed"
final_status = "Discharged"
payment = "Closed"
gate = "Closed"
final_work_type == "Discharged"
yard_date = None
third_work_type = None
concontent = container_content
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type != "Container":
secondary_work_type = None
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Discharged"
third_work_type = None
container_content = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type != "Container":
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
container_content = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Loading" and cargo_type != "Container":
secondary_work_type = None
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Discharged"
third_work_type = None
concontent = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type != "Container":
movement = "Inspection"
inspection_status = "Closed"
yard_status = "Open"
final_status = "Devanning"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
concontent = None
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type == "Container":
inspection_status = "Closed"
yard_status = "Open"
final_status = "Discharged"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
container_content = "FULL"
movement = "Devanning"
last_work_type = None
if final_work_type == "Discharged" and secondary_work_type == "Devanning" and third_work_type == "Stock" and cargo_type == "Container":
inspection_status = "Closed"
yard_status = "Open"
final_status = "Discharged"
payment = "Open"
gate = "Open"
yard_date = None
final_work_type = "Devanning"
third_work_type = third_work_type
concontent = "FULL"
movement = "Devanning"
last_work_type = None
doc = frappe.new_doc("Cargo")
doc.update({
"docstatus" : 1,
"cargo_ref": cargo_ref,
"booking_ref" : booking_ref,
"pat_code" : pat_code,
"net_weight" : net_weight,
"cargo_type" : cargo_type,
"qty" : qty,
"container_no" : container_no,
"voyage_no" : voyage_no,
"bol" : bol,
"work_type" : final_work_type,
"work_type_date": now(),
"secondary_work_type" : secondary_work_type,
"additional_work": third_work_type,
"last_work" : last_work_type,
"pol" : pol,
"agents" : agents,
"commodity_code" : commodity_code,
"vessel" : vessel,
"pod" : pod,
"temperature" : temperature,
"container_type" : container_type,
"mark" : mark,
"final_dest_port" : final_dest_port,
"volume" : volume,
"container_size" : container_size,
"consignee" : consignee,
"container_content" : container_content,
"stowage" : stowage,
"hazardous" : hazardous,
"hazardous_code" : hazardous_code,
"status" : movement,
"seal_1" : seal_1,
"seal_2" : seal_2,
"cargo_description" : cargo_description,
"eta_date" : eta_date,
"etd_date" : etd_date,
"chasis_no" : chasis_no,
"inspection_status" : inspection_status,
"yard_status" : yard_status,
"yard_date" : yard_date,
"final_status" : final_status,
"payment_status" : payment,
"gate1_status" : gate,
"gate2_status" : gate,
"custom_warrant" : custom_warrant,
"custom_code" : custom_code,
"delivery_code" : delivery_code,
"inspection_date": now()
})
doc.insert(ignore_permissions=True)
doc.submit()
@frappe.whitelist()
def create_cargo_movement(cargo_ref, work_type, gate_status, gate):
val = frappe.db.get_value("Cargo", {"name": cargo_ref}, ["pat_code","cargo_type","container_no","agents",\
"container_type","container_size", "chasis_no", "mark", "qty", "consignee", "container_content","cargo_description",\
"custom_warrant", "eta_date", "etd_date", "booking_ref"], as_dict=True)
info = frappe.db.get_value(gate, {"cargo_ref": cargo_ref}, ['truck_licenses_plate','drivers_information','modified','name'], as_dict=True)
if gate == "Gate2" and work_type == "Loading":
# frappe.msgprint(_("TSESESESE"), raise_exception=True)
gate2_no = gate_status
gate2_date = now()
gate2_time = now()
gate_content = "FULL"
gate_no = None
gate_date = None
gate_time = None
reference = info.name
if gate == "Gate1" and work_type == "Discharged":
gate_no = gate_status
gate_date = info.modified
gate_time = info.modified
gate_content = None
gate2_no = None
gate2_date = None
gate2_time = None
gate_content = None
reference = val.name
doc = frappe.new_doc("Cargo Movement")
doc.update({
"docstatus" : 1,
"pat_code" : val.pat_code,
"cargo_type" : val.cargo_type,
"container_no" : val.container_no,
"work_type" : work_type,
"agents" : val.agents,
"container_type" : val.container_type,
"container_size" : val.container_size,
"consignee" : val.consignee,
"container_content" : val.container_content,
"cargo_description" : val.cargo_description,
"main_gate_status" : gate2_no,
"main_gate_date" : gate2_date,
"main_gate_time" : gate2_time,
"gate_status" : gate_no,
"movement_date" : gate_date,
"gate1_time" : gate_time,
"truck" : info.truck_licenses_plate,
"truck_driver" : info.drivers_information,
"refrence": reference,
"chasis_no" : val.chasis_no,
"main_gate_content" : gate_content,
"mark" : val.mark,
"qty" : val.qty,
"warrant_number" : val.custom_warrant,
"eta_date" : val.eta_date,
"etd_date" : val.etd_date,
"booking_ref" : val.booking_ref
})
doc.insert(ignore_permissions=True)
doc.submit()
# frappe.msgprint(_("Hello Cargo Movement"), raise_exception=True)
@frappe.whitelist()
def create_preadvise_history(cargo_ref):
val = frappe.db.get_value("Pre Advice", {"name": cargo_ref}, ["booking_ref","pat_code","net_weight","cargo_type","qty",\
"container_no","voyage_no","bol","work_type","secondary_work_type","pol","agents","commodity_code","vessel","pod",\
"temperature", "container_type","mark","final_dest_port","volume","container_size","consignee","container_content",\
"stowage","hazardous","hazardous_code","status","seal_1","seal_2","eta_date","cargo_description","etd_date","chasis_no",\
"yard_slot","inspection_status","yard_status","final_status","break_bulk_item_count","security_item_count"], as_dict=True)
doc = frappe.new_doc("Pre Advise History")
doc.update({
"docstatus" : 1,
"booking_ref" : val.booking_ref,
"pat_code" : val.pat_code,
"net_weight" : val.net_weight,
"cargo_type" : val.cargo_type,
"qty" : val.qty,
"container_no" : val.container_no,
"voyage_no" : val.voyage_no,
"bol" : val.bol,
"work_type" : val.work_type,
"secondary_work_type" : val.secondary_work_type,
"pol" : val.pol,
"agents" : val.agents,
"commodity_code" : val.commodity_code,
"vessel" : val.vessel,
"pod" : val.pod,
"temperature" : val.temperature,
"container_type" : val.container_type,
"mark" : val.mark,
"final_dest_port" : val.final_dest_port,
"volume" : val.volume,
"container_size" : val.container_size,
"consignee" : val.consignee,
"container_content" : val.container_content,
"stowage" : val.stowage,
"hazardous" : val.hazardous,
"hazardous_code" : val.hazardous_code,
"status" : "Yard",
"seal_1" : val.seal_1,
"seal_2" : val.seal_2,
"eta_date" : val.eta_date,
"cargo_description" : val.cargo_description,
"etd_date" : val.etd_date,
"chasis_no" : val.chasis_no,
"yard_slot" : val.yard_slot,
"inspection_status" : val.inspection_status,
"yard_status" : val.yard_status,
"final_status" : val.final_status,
"break_bulk_item_count" : val.break_bulk_item_count,
"security_item_count" : val.security_item_count
})
doc.insert(ignore_permissions=True)
doc.submit()
frappe.db.delete('Pre Advice', {'name': cargo_ref })
@frappe.whitelist()
def update_main_gate_status(name_ref, truck_licenses_plate, drivers_information):
full_name = get_user_fullname(frappe.session['user'])
frappe.db.sql("""UPDATE `tabExport` SET truck_licenses_plate=%s, drivers_information=%s, main_gate_status="Closed",
main_gate_date =%s, status="Main Gate IN", main_gate_created_by=%s, main_gate_user_name=%s
WHERE name=%s""", (truck_licenses_plate, drivers_information, now(), frappe.session.user, full_name, name_ref))
val = frappe.db.get_value("Export", {"name": name_ref}, ["name","cargo_type","container_no","agents","container_type","container_size","container_content","cargo_description"], as_dict=True)
if not val.cargo_type:
if val.container_content == "EMPTY" or val.container_content == "FULL":
val.cargo_type == "Container"
doc = frappe.new_doc("Cargo Movement")
doc.update({
"docstatus" : 1,
"cargo_type" : val.cargo_type,
"container_no" : val.container_no,
"agents" : val.agents,
"container_type" : val.container_type,
"container_size" : val.container_size,
"consignee" : val.consignee,
"main_gate_content" : val.container_content,
"cargo_description" : val.cargo_description,
"main_gate_status" : "IN",
"main_gate_date" : now(),
"main_gate_time" : now(),
"truck" : truck_licenses_plate,
"truck_driver" : drivers_information,
"refrence": val.name
})
doc.insert()
doc.submit()
@frappe.whitelist()
def update_gate1_status(name_ref):
full_name = get_user_fullname(frappe.session['user'])
frappe.db.sql("""UPDATE `tabExport` SET export_gate1_status="Closed", export_gate1_date =%s, status="Gate1 IN",
gate1_created_by=%s, gate1_user_name=%s
WHERE name=%s""", (now(), frappe.session.user, full_name, name_ref))
val = frappe.db.get_value("Export", {"name": name_ref}, ["name","container_content"], as_dict=True)
frappe.db.sql("""Update `tabCargo Movement` set gate_status='IN', container_content=%s, movement_date=%s, gate1_time=%s where refrence=%s""",
(val.container_content, now(), now(), name_ref))
|
GITHUB_URL = 'https://github.com/esdandreu/gcal2clickup/tree/main'
RAWGITHUB_URL = 'https://raw.githubusercontent.com/esdandreu/gcal2clickup/main'
def readme(title: str = 'gcal2clickup') -> str:
# Returns a link to the readme
return f'{GITHUB_URL}#{title}'
def readme_image_url(filename: str) -> str:
# Returns a link to a readme image
return f'{RAWGITHUB_URL}/README/{filename}'
|
# coding: utf8
import cv2 as cv
if __name__ == '__main__':
# 替换字符列表
ascii_char = list(r"#8XOHLTI)i=+;:,. ")
# ascii_char = list(r"$@B%8&WM#*oahkbdpqwmZO0QLCJUYXzcvunxrjft/\|()1{}[]?-_+~<>i!lI;:,\"^`'. ")
char_len = len(ascii_char)
# 读取图片
frame = cv.imread("img.jpg")
# 转灰度图
img_gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
# 缩小图片并调整长宽比
img_resize = cv.resize(img_gray, (int(img_gray.shape[0] / 5), int(img_gray.shape[1] / 10)))
text = ''
# 遍历图片中的像素
for row in img_resize:
for pixel in row:
# 根据像素值,选取对应的字符
text += ascii_char[int(pixel / 256 * char_len)]
text += '\n'
# 输出生成的字符方阵
print(text)
# 保存到文件
with open('imgout.txt', 'w') as doc:
print(text, file=doc)
|
"""2019-05-28 10:50:34"""
|
# -*- coding:utf-8 -*-
from . import douyin
__all__ = ['douyin']
|
from typing import Any, Sequence, Tuple, cast, TypeVar, Dict
from scipy.stats import mode
from numpy.typing import NDArray
def windowed_mean(
array: NDArray[Any], window_size: Tuple[int, ...], **kwargs: Dict[Any, Any]
) -> NDArray[Any]:
"""
Compute the windowed mean of an array.
"""
reshaped = reshape_with_windows(array, window_size)
result = reshaped.mean(axis=tuple(range(1, reshaped.ndim, 2)), **kwargs)
cast(NDArray[Any], result)
return result
def windowed_mode(array: NDArray[Any], window_size: Tuple[int, ...]) -> NDArray[Any]:
"""
Coarsening by computing the n-dimensional mode.
"""
reshaped = reshape_with_windows(array, window_size)
transposed_shape = tuple(range(0, reshaped.ndim, 2)) + tuple(
range(1, reshaped.ndim, 2)
)
transposed = reshaped.transpose(transposed_shape)
collapsed = transposed.reshape(tuple(reshaped.shape[slice(0, None, 2)]) + (-1,))
result = mode(collapsed, axis=collapsed.ndim - 1).mode.squeeze(axis=-1)
return result
def reshape_with_windows(
array: NDArray[Any], window_size: Sequence[int]
) -> NDArray[Any]:
new_shape = ()
for s, f in zip(array.shape, window_size):
new_shape += (s // f, f)
return array.reshape(new_shape)
|
cmdId = RPR_NamedCommandLookup("_FNG_SELECT_NOTES_NEAR_EDIT_CURSOR")
RPR_Main_OnCommand(cmdId, 0)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.